content
stringlengths 5
1.05M
|
|---|
import cimodel.data.simple.util.branch_filters as branch_filters
from cimodel.data.simple.util.docker_constants import (
DOCKER_IMAGE_NDK, DOCKER_REQUIREMENT_NDK
)
class AndroidJob:
def __init__(self,
variant,
template_name,
is_master_only=True):
self.variant = variant
self.template_name = template_name
self.is_master_only = is_master_only
def gen_tree(self):
base_name_parts = [
"pytorch",
"linux",
"xenial",
"py3",
"clang5",
"android",
"ndk",
"r19c",
] + self.variant + [
"build",
]
full_job_name = "_".join(base_name_parts)
build_env_name = "-".join(base_name_parts)
props_dict = {
"name": full_job_name,
"build_environment": "\"{}\"".format(build_env_name),
"docker_image": "\"{}\"".format(DOCKER_IMAGE_NDK),
"requires": [DOCKER_REQUIREMENT_NDK]
}
if self.is_master_only:
props_dict["filters"] = branch_filters.gen_filter_dict(branch_filters.NON_PR_BRANCH_LIST)
return [{self.template_name: props_dict}]
class AndroidGradleJob:
def __init__(self,
job_name,
template_name,
dependencies,
is_master_only=True,
is_pr_only=False):
self.job_name = job_name
self.template_name = template_name
self.dependencies = dependencies
self.is_master_only = is_master_only
self.is_pr_only = is_pr_only
def gen_tree(self):
props_dict = {
"name": self.job_name,
"requires": self.dependencies,
}
if self.is_master_only:
props_dict["filters"] = branch_filters.gen_filter_dict(branch_filters.NON_PR_BRANCH_LIST)
elif self.is_pr_only:
props_dict["filters"] = branch_filters.gen_filter_dict(branch_filters.PR_BRANCH_LIST)
return [{self.template_name: props_dict}]
WORKFLOW_DATA = [
AndroidJob(["x86_32"], "pytorch_linux_build", is_master_only=False),
AndroidJob(["x86_64"], "pytorch_linux_build"),
AndroidJob(["arm", "v7a"], "pytorch_linux_build"),
AndroidJob(["arm", "v8a"], "pytorch_linux_build"),
AndroidJob(["vulkan", "x86_32"], "pytorch_linux_build", is_master_only=False),
AndroidGradleJob(
"pytorch-linux-xenial-py3-clang5-android-ndk-r19c-gradle-build-x86_32",
"pytorch_android_gradle_build-x86_32",
["pytorch_linux_xenial_py3_clang5_android_ndk_r19c_x86_32_build"],
is_master_only=False,
is_pr_only=True),
AndroidGradleJob(
"pytorch-linux-xenial-py3-clang5-android-ndk-r19c-gradle-custom-build-single",
"pytorch_android_gradle_custom_build_single",
[DOCKER_REQUIREMENT_NDK],
is_master_only=False,
is_pr_only=True),
AndroidGradleJob(
"pytorch-linux-xenial-py3-clang5-android-ndk-r19c-gradle-build",
"pytorch_android_gradle_build",
["pytorch_linux_xenial_py3_clang5_android_ndk_r19c_x86_32_build",
"pytorch_linux_xenial_py3_clang5_android_ndk_r19c_x86_64_build",
"pytorch_linux_xenial_py3_clang5_android_ndk_r19c_arm_v7a_build",
"pytorch_linux_xenial_py3_clang5_android_ndk_r19c_arm_v8a_build"]),
]
def get_workflow_jobs():
return [item.gen_tree() for item in WORKFLOW_DATA]
|
# Copyright 2019-2020 The ASReview Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import json
import logging
import sys
import numpy as np
import pandas as pd
from asreview.compat import convert_id_to_idx, convert_idx_to_id
from asreview.review.factory import get_reviewer
from asreview.state.utils import open_state
from asreview.webapp.sqlock import SQLiteLock
from asreview.webapp.utils import get_lock_path
from asreview.webapp.utils import get_state_path
from asreview.webapp.utils.io import read_label_history
from asreview.webapp.utils.io import read_pool
from asreview.webapp.utils.io import write_pool
from asreview.webapp.utils.io import write_proba
from asreview.webapp.utils.paths import get_data_file_path
from asreview.webapp.utils.paths import get_project_path
from asreview.webapp.utils.paths import get_kwargs_path
from asreview.webapp.utils.project import read_data
def _get_diff_history(new_history, old_history):
for i in range(len(new_history)):
try:
if old_history[i] != new_history[i]:
return new_history[i:]
except IndexError:
return new_history[i:]
return []
def _get_label_train_history(state):
label_idx = []
inclusions = []
for query_i in range(state.n_queries()):
try:
new_labels = state.get("label_idx", query_i=query_i)
new_inclusions = state.get("inclusions", query_i=query_i)
except KeyError:
new_labels = None
if new_labels is not None:
label_idx.extend(new_labels)
inclusions.extend(new_inclusions)
return list(zip(label_idx, inclusions))
def train_model(project_id, label_method=None):
"""Add the new labels to the review and do the modeling.
It uses a lock to ensure only one model is running at the same time.
Old results directories are deleted after 4 iterations.
It has one argument on the CLI, which is the base project directory.
"""
logging.info(f"Project {project_id} - Train a new model for project")
# get file locations
asr_kwargs_file = get_kwargs_path(project_id)
lock_file = get_lock_path(project_id)
# Lock so that only one training run is running at the same time.
# It doesn't lock the flask server/client.
with SQLiteLock(
lock_file, blocking=False, lock_name="training",
project_id=project_id) as lock:
# If the lock is not acquired, another training instance is running.
if not lock.locked():
logging.info("Project {project_id} - "
"Cannot acquire lock, other instance running.")
return
# Lock the current state. We want to have a consistent active state.
# This does communicate with the flask backend; it prevents writing and
# reading to the same files at the same time.
with SQLiteLock(
lock_file,
blocking=True,
lock_name="active",
project_id=project_id) as lock:
# Get the all labels since last run. If no new labels, quit.
new_label_history = read_label_history(project_id)
data_fp = str(get_data_file_path(project_id))
as_data = read_data(project_id)
state_file = get_state_path(project_id)
# collect command line arguments and pass them to the reviewer
with open(asr_kwargs_file, "r") as fp:
asr_kwargs = json.load(fp)
try:
del asr_kwargs["abstract_only"]
except KeyError:
pass
asr_kwargs['state_file'] = str(state_file)
reviewer = get_reviewer(dataset=data_fp, mode="minimal", **asr_kwargs)
with open_state(state_file) as state:
old_label_history = _get_label_train_history(state)
diff_history = _get_diff_history(new_label_history, old_label_history)
if len(diff_history) == 0:
logging.info(
"Project {project_id} - No new labels since last run.")
return
query_record_ids = np.array([x[0] for x in diff_history], dtype=int)
inclusions = np.array([x[1] for x in diff_history], dtype=int)
query_idx = convert_id_to_idx(as_data, query_record_ids)
# Classify the new labels, train and store the results.
with open_state(state_file) as state:
reviewer.classify(
query_idx, inclusions, state, method=label_method)
reviewer.train()
reviewer.log_probabilities(state)
new_query_idx = reviewer.query(reviewer.n_pool()).tolist()
reviewer.log_current_query(state)
# write the proba to a pandas dataframe with record_ids as index
proba = pd.DataFrame(
{"proba": state.pred_proba.tolist()},
index=pd.Index(as_data.record_ids, name="record_id")
)
# update the pool and output the proba's
# important: pool is sorted on query
with SQLiteLock(
lock_file,
blocking=True,
lock_name="active",
project_id=project_id) as lock:
# read the pool
current_pool = read_pool(project_id)
# diff pool and new_query_ind
current_pool_idx = convert_id_to_idx(as_data, current_pool)
current_pool_idx = frozenset(current_pool_idx)
new_pool_idx = [x for x in new_query_idx if x in current_pool_idx]
# convert new_pool_idx back to record_ids
new_pool = convert_idx_to_id(as_data, new_pool_idx)
# write the pool and proba
write_pool(project_id, new_pool)
write_proba(project_id, proba)
def main(argv):
# parse arguments
parser = argparse.ArgumentParser()
parser.add_argument("project_id", type=str, help="Project id")
parser.add_argument(
"--label_method",
type=str,
default=None,
help="Label method (for example 'prior')")
args = parser.parse_args(argv)
try:
train_model(args.project_id, args.label_method)
except Exception as err:
err_type = type(err).__name__
logging.error(f"Project {args.project_id} - {err_type}: {err}")
# write error to file is label method is prior (first iteration)
if args.label_method == "prior":
message = {"message": f"{err_type}: {err}"}
fp = get_project_path(args.project_id) / "error.json"
with open(fp, 'w') as f:
json.dump(message, f)
# raise the error for full traceback
raise err
if __name__ == "__main__":
main(sys.argv)
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# GUI module generated by PAGE version 4.21
# in conjunction with Tcl version 8.6
# Apr 21, 2019 05:48:53 PM +0530 platform: Windows NT
import sys
import tkinter
from tkinter import messagebox
from tkinter import *
import mysql.connector
import dbConnect
from dbConnect import DBConnect
try:
import Tkinter as tk
except ImportError:
import tkinter as tk
try:
import ttk
py3 = False
except ImportError:
import tkinter.ttk as ttk
py3 = True
import extramainpage_support
import os.path
def vp_start_gui():
'''Starting point when module is the main routine.'''
global val, w, root
global prog_location
prog_call = sys.argv[0]
print ('prog_call = {}'.format(prog_call))
prog_location = os.path.split(prog_call)[0]
print ('prog_location = {}'.format(prog_location))
sys.stdout.flush()
root = tk.Tk()
top = Toplevel1 (root)
extramainpage_support.init(root, top)
root.mainloop()
w = None
def create_Toplevel1(root, *args, **kwargs):
'''Starting point when module is imported by another program.'''
global w, w_win, rt
global prog_location
prog_call = sys.argv[0]
print ('prog_call = {}'.format(prog_call))
prog_location = os.path.split(prog_call)[0]
print ('prog_location = {}'.format(prog_location))
rt = root
w = tk.Toplevel (root)
top = Toplevel1 (w)
extramainpage_support.init(w, top, *args, **kwargs)
return (w, top)
def destroy_Toplevel1():
global w
w.destroy()
w = None
def world1():
try:
import Tkinter as tk
except ImportError:
import tkinter as tk
try:
import ttk
py3 = False
except ImportError:
import tkinter.ttk as ttk
py3 = True
import connect_support
def vp_start_gui():
'''Starting point when module is the main routine.'''
global val, w, root
root = tk.Tk()
top = Toplevel1 (root)
connect_support.init(root, top)
root.mainloop()
w = None
def create_Toplevel1(root, *args, **kwargs):
'''Starting point when module is imported by another program.'''
global w, w_win, rt
rt = root
w = tk.Toplevel (root)
top = Toplevel1 (w)
connect_support.init(w, top, *args, **kwargs)
return (w, top)
def destroy_Toplevel1():
global w
w.destroy()
w = None
class Toplevel1:
def exits(self):
msg=tkinter.messagebox.showinfo("etax-2019","Do You Want To Exit ?")
if msg:
exit()
def viewnames(self):
vill=str(self.villagename.get());
if (vill == ""):
msg=tkinter.messagebox.showerror("etax2019","Please Enter villagename")
try :
mydb=mysql.connector.connect(host='localhost',user='root',passwd='Pass@123',database='etax2019')
except :
tkinter.messagebox.showerror('etax-2019','Failed to connect server, Please contact your administrator')
mycursor=mydb.cursor()
query = ("SELECT idnumber, meternumber, wardnumber, name, housetax, healthtax, lighttax, watertax, total, reciptnumber, housetaxpaid, healthtaxpaid, lighttaxpaid, watertaxpaid, totalpaid, rest FROM "+vill)
mycursor.execute(query)
for(idnumber, meternumber, wardnumber, name, housetax, healthtax, lighttax, watertax, total, reciptnumber, housetaxpaid, healthtaxpaid, lighttaxpaid, watertaxpaid, totalpaid, rest) in mycursor:
s="{} {}".format(idnumber, name)
self.box1o1.insert(0,s)
def viewalls(self):
vill=str(self.villagename.get());
if (vill == ""):
msg=tkinter.messagebox.showerror("etax2019","Please Enter villagename")
try :
mydb=mysql.connector.connect(host='localhost',user='root',passwd='Pass@123',database='etax2019')
except :
tkinter.messagebox.showerror('etax-2019','Failed to connect server, Please contact your administrator')
mycursor=mydb.cursor()
query = ("SELECT idnumber, meternumber, wardnumber, name, housetax, healthtax, lighttax, watertax, total, reciptnumber, housetaxpaid, healthtaxpaid, lighttaxpaid, watertaxpaid, totalpaid, rest FROM "+vill)
mycursor.execute(query)
for(idnumber, meternumber, wardnumber, name, housetax, healthtax, lighttax, watertax, total, reciptnumber, housetaxpaid, healthtaxpaid, lighttaxpaid, watertaxpaid, totalpaid, rest) in mycursor:
s="{} {} {} {} {} {} {} {} {} {} {} {} {} {} {}".format(idnumber, meternumber, wardnumber, housetax, healthtax, lighttax, watertax, total, reciptnumber, housetaxpaid, healthtaxpaid, lighttaxpaid, watertaxpaid, totalpaid, rest)
self.box2o1.insert(0,s)
def clearalls(self):
self.box2o1.delete(0,tk.END)
self.box1o1.delete(0,tk.END)
def findss(self):
vill=str(self.villagename.get());
uid=str(self.Entry2.get());
if (vill == ""):
msg=tkinter.messagebox.showerror("etax2019","Please Enter villagename")
try :
mydb=mysql.connector.connect(host='localhost',user='root',passwd='Pass@123',database='etax2019')
except :
tkinter.messagebox.showerror('etax-2019','Failed to connect server, Please contact your administrator')
mycursor=mydb.cursor()
query = ("SELECT idnumber, meternumber, wardnumber, name, housetax, healthtax, lighttax, watertax, total, reciptnumber, housetaxpaid, healthtaxpaid, lighttaxpaid, watertaxpaid, totalpaid, rest FROM %s WHERE idnumber= %s" %(vill,uid))
mycursor.execute(query)
for(idnumber, meternumber, wardnumber, name, housetax, healthtax, lighttax, watertax, total, reciptnumber, housetaxpaid, healthtaxpaid, lighttaxpaid, watertaxpaid, totalpaid, rest) in mycursor:
s="{} {} {} {} {} {} {} {} {} {} {} {} {} {} {}".format(idnumber, meternumber, wardnumber, housetax, healthtax, lighttax, watertax, total, reciptnumber, housetaxpaid, healthtaxpaid, lighttaxpaid, watertaxpaid, totalpaid, rest)
self.box2o1.insert(0,s)
try :
mydb=mysql.connector.connect(host='localhost',user='root',passwd='Pass@123',database='etax2019')
except :
tkinter.messagebox.showerror('etax-2019','Failed to connect server, Please contact your administrator')
mycursor=mydb.cursor()
query = ("SELECT idnumber, meternumber, wardnumber, name, housetax, healthtax, lighttax, watertax, total, reciptnumber, housetaxpaid, healthtaxpaid, lighttaxpaid, watertaxpaid, totalpaid, rest FROM %s WHERE idnumber= %s" %(vill,uid))
mycursor.execute(query)
for(idnumber, meternumber, wardnumber, name, housetax, healthtax, lighttax, watertax, total, reciptnumber, housetaxpaid, healthtaxpaid, lighttaxpaid, watertaxpaid, totalpaid, rest) in mycursor:
s="{} {}".format(idnumber, name)
self.box1o1.insert(0,s)
def __init__(self, top=None):
'''This class configures and populates the toplevel window.
top is the toplevel containing window.'''
_bgcolor = '#d9d9d9' # X11 color: 'gray85'
_fgcolor = '#000000' # X11 color: 'black'
_compcolor = '#d9d9d9' # X11 color: 'gray85'
_ana1color = '#d9d9d9' # X11 color: 'gray85'
_ana2color = '#ececec' # Closest X11 color: 'gray92'
font10 = "-family {Rockwell Extra Bold} -size 12 -weight bold " \
"-slant roman -underline 0 -overstrike 0"
font11 = "-family {Rockwell Extra Bold} -size 40 -weight bold " \
"-slant roman -underline 0 -overstrike 0"
font12 = "-family Rockwell -size 15 -weight normal -slant " \
"roman -underline 0 -overstrike 0"
font13 = "-family Rockwell -size 9 -weight normal -slant roman" \
" -underline 0 -overstrike 0"
font14 = "-family Rockwell -size 12 -weight normal -slant " \
"roman -underline 0 -overstrike 0"
font15 = "-family Rockwell -size 13 -weight bold -slant roman " \
"-underline 0 -overstrike 0"
font20 = "-family {Britannic Bold} -size 48 -weight bold " \
"-slant roman -underline 0 -overstrike 0"
font16 = "-family {Palatino Linotype} -size 11 -weight normal " \
"-slant roman -underline 0 -overstrike 0"
font9 = "-family {Segoe Script} -size 12 -weight normal -slant" \
" italic -underline 0 -overstrike 0"
font17 = "-family {Berlin Sans FB} -size 15"
self.style = ttk.Style()
if sys.platform == "win32":
self.style.theme_use('winnative')
self.style.configure('.',background=_bgcolor)
self.style.configure('.',foreground=_fgcolor)
self.style.configure('.',font="TkDefaultFont")
self.style.map('.',background=
[('selected', _compcolor), ('active',_ana2color)])
top.geometry("1538x878+41+60")
top.title("New Toplevel")
top.configure(background="#ffff24")
top.configure(highlightbackground="#d9d9d9")
top.configure(highlightcolor="black")
self.Label1 = tk.Label(top)
self.Label1.place(relx=0.013, rely=0.023, height=81, width=156)
self.Label1.configure(activebackground="#f9f9f9")
self.Label1.configure(activeforeground="black")
self.Label1.configure(background="#ffff24")
self.Label1.configure(disabledforeground="#a3a3a3")
self.Label1.configure(font=font20)
self.Label1.configure(foreground="#ff250d")
self.Label1.configure(highlightbackground="#d9d9d9")
self.Label1.configure(highlightcolor="black")
self.Label1.configure(text='''eTAX''')
self.Label1_1 = tk.Label(top)
self.Label1_1.place(relx=0.117, rely=0.023, height=81, width=156)
self.Label1_1.configure(activebackground="#f9f9f9")
self.Label1_1.configure(activeforeground="black")
self.Label1_1.configure(background="#ffff24")
self.Label1_1.configure(disabledforeground="#a3a3a3")
self.Label1_1.configure(font=font20)
self.Label1_1.configure(foreground="#2212ff")
self.Label1_1.configure(highlightbackground="#d9d9d9")
self.Label1_1.configure(highlightcolor="black")
self.Label1_1.configure(text='''2019''')
self.Label2 = tk.Label(top)
self.Label2.place(relx=0.072, rely=0.103, height=31, width=141)
self.Label2.configure(activebackground="#f9f9f9")
self.Label2.configure(activeforeground="black")
self.Label2.configure(background="#ffff24")
self.Label2.configure(disabledforeground="#a3a3a3")
self.Label2.configure(font=font9)
self.Label2.configure(foreground="#13c12a")
self.Label2.configure(highlightbackground="#d9d9d9")
self.Label2.configure(highlightcolor="black")
self.Label2.configure(text='''working for you''')
self.backbutton = tk.Button(top)
self.backbutton.place(relx=0.013, rely=0.159, height=44, width=97)
self.backbutton.configure(activebackground="#ececec")
self.backbutton.configure(activeforeground="#000000")
self.backbutton.configure(background="#120bd8")
self.backbutton.configure(disabledforeground="#a3a3a3")
self.backbutton.configure(font=font10)
self.backbutton.configure(foreground="#fcffff")
self.backbutton.configure(highlightbackground="#d9d9d9")
self.backbutton.configure(highlightcolor="black")
self.backbutton.configure(pady="0")
self.backbutton.configure(text='''Back''')
self.exit = tk.Button(top)
self.exit.place(relx=0.104, rely=0.159, height=44, width=97)
self.exit.configure(activebackground="#ececec")
self.exit.configure(activeforeground="#000000")
self.exit.configure(background="#120bd8")
self.exit.configure(disabledforeground="#a3a3a3")
self.exit.configure(font=font10)
self.exit.configure(foreground="#fcffff")
self.exit.configure(highlightbackground="#d9d9d9")
self.exit.configure(highlightcolor="black")
self.exit.configure(pady="0")
self.exit.configure(command = self.exits, text='''Exit''')
self.Label3 = tk.Label(top)
self.Label3.place(relx=0.013, rely=0.9, height=21, width=56)
self.Label3.configure(activebackground="#f9f9f9")
self.Label3.configure(activeforeground="black")
self.Label3.configure(background="#ffff24")
self.Label3.configure(disabledforeground="#a3a3a3")
self.Label3.configure(foreground="#000000")
self.Label3.configure(highlightbackground="#d9d9d9")
self.Label3.configure(highlightcolor="black")
self.Label3.configure(text='''etax-2019''')
self.Label3_3 = tk.Label(top)
self.Label3_3.place(relx=0.013, rely=0.923, height=21, width=34)
self.Label3_3.configure(activebackground="#f9f9f9")
self.Label3_3.configure(activeforeground="black")
self.Label3_3.configure(background="#ffff24")
self.Label3_3.configure(disabledforeground="#a3a3a3")
self.Label3_3.configure(foreground="#000000")
self.Label3_3.configure(highlightbackground="#d9d9d9")
self.Label3_3.configure(highlightcolor="black")
self.Label3_3.configure(text='''v 1.0.2''')
self.Label3_4 = tk.Label(top)
self.Label3_4.place(relx=0.007, rely=0.968, height=21, width=134)
self.Label3_4.configure(activebackground="#f9f9f9")
self.Label3_4.configure(activeforeground="black")
self.Label3_4.configure(background="#ffff24")
self.Label3_4.configure(disabledforeground="#a3a3a3")
self.Label3_4.configure(foreground="#000000")
self.Label3_4.configure(highlightbackground="#d9d9d9")
self.Label3_4.configure(highlightcolor="black")
self.Label3_4.configure(text='''Working On Windows''')
self.Label3_1 = tk.Label(top)
self.Label3_1.place(relx=0.013, rely=0.945, height=21, width=164)
self.Label3_1.configure(activebackground="#f9f9f9")
self.Label3_1.configure(activeforeground="black")
self.Label3_1.configure(background="#ffff24")
self.Label3_1.configure(disabledforeground="#a3a3a3")
self.Label3_1.configure(foreground="#000000")
self.Label3_1.configure(highlightbackground="#d9d9d9")
self.Label3_1.configure(highlightcolor="black")
self.Label3_1.configure(text='''Connected to MySQL server 8.0''')
self.Label4 = tk.Label(top)
self.Label4.place(relx=0.316, rely=0.034, height=68, width=651)
self.Label4.configure(activebackground="#f9f9f9")
self.Label4.configure(activeforeground="black")
self.Label4.configure(background="#ffff24")
self.Label4.configure(disabledforeground="#36911a")
self.Label4.configure(font=font11)
self.Label4.configure(foreground="#36911a")
self.Label4.configure(highlightbackground="#d9d9d9")
self.Label4.configure(highlightcolor="black")
self.Label4.configure(text='''CONNECTING WORLD''')
self.Label5 = tk.Label(top)
self.Label5.place(relx=0.793, rely=0.034, height=28, width=192)
self.Label5.configure(activebackground="#f9f9f9")
self.Label5.configure(activeforeground="black")
self.Label5.configure(background="#ffff24")
self.Label5.configure(disabledforeground="#a3a3a3")
self.Label5.configure(font=font12)
self.Label5.configure(foreground="#000000")
self.Label5.configure(highlightbackground="#d9d9d9")
self.Label5.configure(highlightcolor="black")
self.Label5.configure(text='''Village : Kalamwadi''')
self.Label5_2 = tk.Label(top)
self.Label5_2.place(relx=0.813, rely=0.068, height=28, width=172)
self.Label5_2.configure(activebackground="#f9f9f9")
self.Label5_2.configure(activeforeground="black")
self.Label5_2.configure(background="#ffff24")
self.Label5_2.configure(disabledforeground="#a3a3a3")
self.Label5_2.configure(font=font12)
self.Label5_2.configure(foreground="#000000")
self.Label5_2.configure(highlightbackground="#d9d9d9")
self.Label5_2.configure(highlightcolor="black")
self.Label5_2.configure(text='''District : Sangli''')
self.Label5_3 = tk.Label(top)
self.Label5_3.place(relx=0.897, rely=0.923, height=28, width=172)
self.Label5_3.configure(activebackground="#f9f9f9")
self.Label5_3.configure(activeforeground="black")
self.Label5_3.configure(background="#ffff24")
self.Label5_3.configure(disabledforeground="#a3a3a3")
self.Label5_3.configure(font=font13)
self.Label5_3.configure(foreground="#000000")
self.Label5_3.configure(highlightbackground="#d9d9d9")
self.Label5_3.configure(highlightcolor="black")
self.Label5_3.configure(text='''Server Status : Online''')
self.Label5_4 = tk.Label(top)
self.Label5_4.place(relx=0.904, rely=0.945, height=28, width=172)
self.Label5_4.configure(activebackground="#f9f9f9")
self.Label5_4.configure(activeforeground="black")
self.Label5_4.configure(background="#ffff24")
self.Label5_4.configure(disabledforeground="#a3a3a3")
self.Label5_4.configure(font=font13)
self.Label5_4.configure(foreground="#000000")
self.Label5_4.configure(highlightbackground="#d9d9d9")
self.Label5_4.configure(highlightcolor="black")
self.Label5_4.configure(text='''Host : localhost''')
self.Label5_5 = tk.Label(top)
self.Label5_5.place(relx=0.904, rely=0.968, height=28, width=172)
self.Label5_5.configure(activebackground="#f9f9f9")
self.Label5_5.configure(activeforeground="black")
self.Label5_5.configure(background="#ffff24")
self.Label5_5.configure(disabledforeground="#a3a3a3")
self.Label5_5.configure(font=font13)
self.Label5_5.configure(foreground="#000000")
self.Label5_5.configure(highlightbackground="#d9d9d9")
self.Label5_5.configure(highlightcolor="black")
self.Label5_5.configure(text='''Port : 3306''')
self.Label5_1 = tk.Label(top)
self.Label5_1.place(relx=0.91, rely=0.091, height=28, width=172)
self.Label5_1.configure(activebackground="#f9f9f9")
self.Label5_1.configure(activeforeground="black")
self.Label5_1.configure(background="#ffff24")
self.Label5_1.configure(disabledforeground="#a3a3a3")
self.Label5_1.configure(font=font14)
self.Label5_1.configure(foreground="#000000")
self.Label5_1.configure(highlightbackground="#d9d9d9")
self.Label5_1.configure(highlightcolor="black")
self.Label5_1.configure(text='''User : user''')
self.box1o1 = ScrolledListBox(top)
self.box1o1.place(relx=0.013, rely=0.285, relheight=0.598
, relwidth=0.274)
self.box1o1.configure(background="white")
self.box1o1.configure(disabledforeground="#a3a3a3")
self.box1o1.configure(font="TkFixedFont")
self.box1o1.configure(foreground="black")
self.box1o1.configure(highlightbackground="#d9d9d9")
self.box1o1.configure(highlightcolor="#d9d9d9")
self.box1o1.configure(selectbackground="#c4c4c4")
self.box1o1.configure(selectforeground="black")
self.box1o1.configure(width=10)
self.box2o1 = ScrolledListBox(top)
self.box2o1.place(relx=0.293, rely=0.285, relheight=0.598
, relwidth=0.703)
self.box2o1.configure(background="white")
self.box2o1.configure(disabledforeground="#a3a3a3")
self.box2o1.configure(font="TkFixedFont")
self.box2o1.configure(foreground="black")
self.box2o1.configure(highlightbackground="#d9d9d9")
self.box2o1.configure(highlightcolor="#d9d9d9")
self.box2o1.configure(selectbackground="#c4c4c4")
self.box2o1.configure(selectforeground="black")
self.box2o1.configure(width=10)
self.TSeparator1 = ttk.Separator(top)
self.TSeparator1.place(relx=0.923, rely=0.011, relheight=0.114)
self.TSeparator1.configure(orient="vertical")
self.TSeparator2 = ttk.Separator(top)
self.TSeparator2.place(relx=0.013, rely=0.137, relwidth=0.202)
self.TSeparator3 = ttk.Separator(top)
self.TSeparator3.place(relx=0.013, rely=0.228, relwidth=0.975)
self.TSeparator3_6 = ttk.Separator(top)
self.TSeparator3_6.place(relx=0.013, rely=0.894, relwidth=0.975)
self.viewbutton = tk.Button(top)
self.viewbutton.place(relx=0.364, rely=0.923, height=33, width=148)
self.viewbutton.configure(activebackground="#ececec")
self.viewbutton.configure(activeforeground="#000000")
self.viewbutton.configure(background="#2020d8")
self.viewbutton.configure(disabledforeground="#a3a3a3")
self.viewbutton.configure(font=font15)
self.viewbutton.configure(foreground="#ffffff")
self.viewbutton.configure(highlightbackground="#d9d9d9")
self.viewbutton.configure(highlightcolor="black")
self.viewbutton.configure(pady="0")
self.viewbutton.configure(takefocus="0")
self.viewbutton.configure(command = self.viewnames , text='''View all Names''')
self.viewbutton_8 = tk.Button(top)
self.viewbutton_8.place(relx=0.501, rely=0.923, height=33, width=148)
self.viewbutton_8.configure(activebackground="#ececec")
self.viewbutton_8.configure(activeforeground="#000000")
self.viewbutton_8.configure(background="#2020d8")
self.viewbutton_8.configure(disabledforeground="#a3a3a3")
self.viewbutton_8.configure(font=font15)
self.viewbutton_8.configure(foreground="#ffffff")
self.viewbutton_8.configure(highlightbackground="#d9d9d9")
self.viewbutton_8.configure(highlightcolor="black")
self.viewbutton_8.configure(pady="0")
self.viewbutton_8.configure(takefocus="0")
self.viewbutton_8.configure(command = self.viewalls , text='''View all Data''')
self.viewbutton_9 = tk.Button(top)
self.viewbutton_9.place(relx=0.637, rely=0.923, height=33, width=108)
self.viewbutton_9.configure(activebackground="#ececec")
self.viewbutton_9.configure(activeforeground="#000000")
self.viewbutton_9.configure(background="#2020d8")
self.viewbutton_9.configure(disabledforeground="#a3a3a3")
self.viewbutton_9.configure(font=font15)
self.viewbutton_9.configure(foreground="#ffffff")
self.viewbutton_9.configure(highlightbackground="#d9d9d9")
self.viewbutton_9.configure(highlightcolor="black")
self.viewbutton_9.configure(pady="0")
self.viewbutton_9.configure(takefocus="0")
self.viewbutton_9.configure(command = self.clearalls , text='''Clear all''')
self.Label7 = tk.Label(top)
self.Label7.place(relx=0.013, rely=0.251, height=26, width=78)
self.Label7.configure(activebackground="#f9f9f9")
self.Label7.configure(activeforeground="black")
self.Label7.configure(background="#ffff24")
self.Label7.configure(disabledforeground="#a3a3a3")
self.Label7.configure(font=font16)
self.Label7.configure(foreground="#1220e0")
self.Label7.configure(highlightbackground="#d9d9d9")
self.Label7.configure(highlightcolor="black")
self.Label7.configure(text='''Id number''')
self.menubar = tk.Menu(top,font="TkMenuFont",bg=_bgcolor,fg=_fgcolor)
top.configure(menu = self.menubar)
self.Label7_1 = tk.Label(top)
self.Label7_1.place(relx=0.104, rely=0.251, height=26, width=78)
self.Label7_1.configure(activebackground="#f9f9f9")
self.Label7_1.configure(activeforeground="black")
self.Label7_1.configure(background="#ffff24")
self.Label7_1.configure(disabledforeground="#a3a3a3")
self.Label7_1.configure(font=font16)
self.Label7_1.configure(foreground="#1220e0")
self.Label7_1.configure(highlightbackground="#d9d9d9")
self.Label7_1.configure(highlightcolor="black")
self.Label7_1.configure(text='''Name''')
self.Label7_2 = tk.Label(top)
self.Label7_2.place(relx=0.293, rely=0.251, height=26, width=68)
self.Label7_2.configure(activebackground="#f9f9f9")
self.Label7_2.configure(activeforeground="black")
self.Label7_2.configure(background="#ffff24")
self.Label7_2.configure(disabledforeground="#a3a3a3")
self.Label7_2.configure(font=font16)
self.Label7_2.configure(foreground="#1220e0")
self.Label7_2.configure(highlightbackground="#d9d9d9")
self.Label7_2.configure(highlightcolor="black")
self.Label7_2.configure(text='''Id number''')
self.Label7_3 = tk.Label(top)
self.Label7_3.place(relx=0.345, rely=0.251, height=26, width=68)
self.Label7_3.configure(activebackground="#f9f9f9")
self.Label7_3.configure(activeforeground="black")
self.Label7_3.configure(background="#ffff24")
self.Label7_3.configure(disabledforeground="#a3a3a3")
self.Label7_3.configure(font=font16)
self.Label7_3.configure(foreground="#1220e0")
self.Label7_3.configure(highlightbackground="#d9d9d9")
self.Label7_3.configure(highlightcolor="black")
self.Label7_3.configure(text='''Meter No.''')
self.Label7_4 = tk.Label(top)
self.Label7_4.place(relx=0.397, rely=0.251, height=26, width=68)
self.Label7_4.configure(activebackground="#f9f9f9")
self.Label7_4.configure(activeforeground="black")
self.Label7_4.configure(background="#ffff24")
self.Label7_4.configure(disabledforeground="#a3a3a3")
self.Label7_4.configure(font=font16)
self.Label7_4.configure(foreground="#1220e0")
self.Label7_4.configure(highlightbackground="#d9d9d9")
self.Label7_4.configure(highlightcolor="black")
self.Label7_4.configure(text='''Ward No.''')
self.Label7_1 = tk.Label(top)
self.Label7_1.place(relx=0.449, rely=0.251, height=26, width=58)
self.Label7_1.configure(activebackground="#f9f9f9")
self.Label7_1.configure(activeforeground="black")
self.Label7_1.configure(background="#ffff24")
self.Label7_1.configure(disabledforeground="#a3a3a3")
self.Label7_1.configure(font=font16)
self.Label7_1.configure(foreground="#1220e0")
self.Label7_1.configure(highlightbackground="#d9d9d9")
self.Label7_1.configure(highlightcolor="black")
self.Label7_1.configure(text='''Housetax''')
self.Label7_2 = tk.Label(top)
self.Label7_2.place(relx=0.494, rely=0.251, height=26, width=68)
self.Label7_2.configure(activebackground="#f9f9f9")
self.Label7_2.configure(activeforeground="black")
self.Label7_2.configure(background="#ffff24")
self.Label7_2.configure(disabledforeground="#a3a3a3")
self.Label7_2.configure(font=font16)
self.Label7_2.configure(foreground="#1220e0")
self.Label7_2.configure(highlightbackground="#d9d9d9")
self.Label7_2.configure(highlightcolor="black")
self.Label7_2.configure(text='''Healthtax''')
self.Label7_3 = tk.Label(top)
self.Label7_3.place(relx=0.54, rely=0.251, height=26, width=58)
self.Label7_3.configure(activebackground="#f9f9f9")
self.Label7_3.configure(activeforeground="black")
self.Label7_3.configure(background="#ffff24")
self.Label7_3.configure(disabledforeground="#a3a3a3")
self.Label7_3.configure(font=font16)
self.Label7_3.configure(foreground="#1220e0")
self.Label7_3.configure(highlightbackground="#d9d9d9")
self.Label7_3.configure(highlightcolor="black")
self.Label7_3.configure(text='''Lighttax''')
self.Label7_5 = tk.Label(top)
self.Label7_5.place(relx=0.579, rely=0.251, height=26, width=68)
self.Label7_5.configure(activebackground="#f9f9f9")
self.Label7_5.configure(activeforeground="black")
self.Label7_5.configure(background="#ffff24")
self.Label7_5.configure(disabledforeground="#a3a3a3")
self.Label7_5.configure(font=font16)
self.Label7_5.configure(foreground="#1220e0")
self.Label7_5.configure(highlightbackground="#d9d9d9")
self.Label7_5.configure(highlightcolor="black")
self.Label7_5.configure(text='''Watertax''')
self.Label7_5 = tk.Label(top)
self.Label7_5.place(relx=0.624, rely=0.251, height=26, width=48)
self.Label7_5.configure(activebackground="#f9f9f9")
self.Label7_5.configure(activeforeground="black")
self.Label7_5.configure(background="#ffff24")
self.Label7_5.configure(disabledforeground="#a3a3a3")
self.Label7_5.configure(font=font16)
self.Label7_5.configure(foreground="#1220e0")
self.Label7_5.configure(highlightbackground="#d9d9d9")
self.Label7_5.configure(highlightcolor="black")
self.Label7_5.configure(text='''Total''')
self.Label7_5 = tk.Label(top)
self.Label7_5.place(relx=0.663, rely=0.251, height=26, width=78)
self.Label7_5.configure(activebackground="#f9f9f9")
self.Label7_5.configure(activeforeground="black")
self.Label7_5.configure(background="#ffff24")
self.Label7_5.configure(disabledforeground="#a3a3a3")
self.Label7_5.configure(font=font16)
self.Label7_5.configure(foreground="#1220e0")
self.Label7_5.configure(highlightbackground="#d9d9d9")
self.Label7_5.configure(highlightcolor="black")
self.Label7_5.configure(text='''Reciept No.''')
self.Label7_6 = tk.Label(top)
self.Label7_6.place(relx=0.722, rely=0.251, height=26, width=68)
self.Label7_6.configure(activebackground="#f9f9f9")
self.Label7_6.configure(activeforeground="black")
self.Label7_6.configure(background="#ffff24")
self.Label7_6.configure(disabledforeground="#a3a3a3")
self.Label7_6.configure(font=font16)
self.Label7_6.configure(foreground="#1220e0")
self.Label7_6.configure(highlightbackground="#d9d9d9")
self.Label7_6.configure(highlightcolor="black")
self.Label7_6.configure(text='''Housetax''')
self.Label7_6.configure(width=68)
self.Label7_6 = tk.Label(top)
self.Label7_6.place(relx=0.767, rely=0.251, height=26, width=68)
self.Label7_6.configure(activebackground="#f9f9f9")
self.Label7_6.configure(activeforeground="black")
self.Label7_6.configure(background="#ffff24")
self.Label7_6.configure(disabledforeground="#a3a3a3")
self.Label7_6.configure(font=font16)
self.Label7_6.configure(foreground="#1220e0")
self.Label7_6.configure(highlightbackground="#d9d9d9")
self.Label7_6.configure(highlightcolor="black")
self.Label7_6.configure(text='''Lighttax''')
self.Label7_6 = tk.Label(top)
self.Label7_6.place(relx=0.813, rely=0.251, height=26, width=68)
self.Label7_6.configure(activebackground="#f9f9f9")
self.Label7_6.configure(activeforeground="black")
self.Label7_6.configure(background="#ffff24")
self.Label7_6.configure(disabledforeground="#a3a3a3")
self.Label7_6.configure(font=font16)
self.Label7_6.configure(foreground="#1220e0")
self.Label7_6.configure(highlightbackground="#d9d9d9")
self.Label7_6.configure(highlightcolor="black")
self.Label7_6.configure(text='''Watertax''')
self.Label7_6 = tk.Label(top)
self.Label7_6.place(relx=0.904, rely=0.251, height=26, width=48)
self.Label7_6.configure(activebackground="#f9f9f9")
self.Label7_6.configure(activeforeground="black")
self.Label7_6.configure(background="#ffff24")
self.Label7_6.configure(disabledforeground="#a3a3a3")
self.Label7_6.configure(font=font16)
self.Label7_6.configure(foreground="#1220e0")
self.Label7_6.configure(highlightbackground="#d9d9d9")
self.Label7_6.configure(highlightcolor="black")
self.Label7_6.configure(text='''Total''')
self.Label7_3 = tk.Label(top)
self.Label7_3.place(relx=0.858, rely=0.251, height=26, width=68)
self.Label7_3.configure(activebackground="#f9f9f9")
self.Label7_3.configure(activeforeground="black")
self.Label7_3.configure(background="#ffff24")
self.Label7_3.configure(disabledforeground="#a3a3a3")
self.Label7_3.configure(font=font16)
self.Label7_3.configure(foreground="#1220e0")
self.Label7_3.configure(highlightbackground="#d9d9d9")
self.Label7_3.configure(highlightcolor="black")
self.Label7_3.configure(text='''Healthtax''')
self.Label7_4 = tk.Label(top)
self.Label7_4.place(relx=0.943, rely=0.251, height=26, width=48)
self.Label7_4.configure(activebackground="#f9f9f9")
self.Label7_4.configure(activeforeground="black")
self.Label7_4.configure(background="#ffff24")
self.Label7_4.configure(disabledforeground="#a3a3a3")
self.Label7_4.configure(font=font16)
self.Label7_4.configure(foreground="#1220e0")
self.Label7_4.configure(highlightbackground="#d9d9d9")
self.Label7_4.configure(highlightcolor="black")
self.Label7_4.configure(text='''Rest''')
self.Label7_5 = tk.Label(top)
self.Label7_5.place(relx=0.806, rely=0.216, height=26, width=68)
self.Label7_5.configure(activebackground="#f9f9f9")
self.Label7_5.configure(activeforeground="black")
self.Label7_5.configure(background="#ffff24")
self.Label7_5.configure(disabledforeground="#a3a3a3")
self.Label7_5.configure(font=font16)
self.Label7_5.configure(foreground="#1220e0")
self.Label7_5.configure(highlightbackground="#d9d9d9")
self.Label7_5.configure(highlightcolor="black")
self.Label7_5.configure(text='''Paid''')
self.TSeparator4 = ttk.Separator(top)
self.TSeparator4.place(relx=0.715, rely=0.228, relheight=0.057)
self.TSeparator4.configure(orient="vertical")
self.TSeparator4_6 = ttk.Separator(top)
self.TSeparator4_6.place(relx=0.936, rely=0.228, relheight=0.057)
self.TSeparator4_6.configure(orient="vertical")
self.villagename = tk.Entry(top)
self.villagename.place(relx=0.375, rely=0.179, height=20, relwidth=0.153)
self.villagename.configure(background="white")
self.villagename.configure(disabledforeground="#a3a3a3")
self.villagename.configure(font="TkFixedFont")
self.villagename.configure(foreground="#1b1391")
self.villagename.configure(insertbackground="black")
self.villagename.configure(width=244)
self.Entry2 = tk.Entry(top)
self.Entry2.place(relx=0.675, rely=0.179,height=20, relwidth=0.146)
self.Entry2.configure(background="white")
self.Entry2.configure(disabledforeground="#a3a3a3")
self.Entry2.configure(font="TkFixedFont")
self.Entry2.configure(foreground="#000000")
self.Entry2.configure(insertbackground="black")
self.Entry2.configure(width=234)
self.Label7 = tk.Label(top)
self.Label7.place(relx=0.269, rely=0.179, height=21, width=154)
self.Label7.configure(background="#ffff24")
self.Label7.configure(disabledforeground="#a3a3a3")
self.Label7.configure(font=font17)
self.Label7.configure(foreground="#000000")
self.Label7.configure(text='''Village Name :''')
self.Label7.configure(width=154)
self.Label7_1 = tk.Label(top)
self.Label7_1.place(relx=0.575, rely=0.179, height=21, width=154)
self.Label7_1.configure(activebackground="#f9f9f9")
self.Label7_1.configure(activeforeground="black")
self.Label7_1.configure(background="#ffff24")
self.Label7_1.configure(disabledforeground="#a3a3a3")
self.Label7_1.configure(font="-family {Berlin Sans FB} -size 15")
self.Label7_1.configure(foreground="#000000")
self.Label7_1.configure(highlightbackground="#d9d9d9")
self.Label7_1.configure(highlightcolor="black")
self.Label7_1.configure(text='''UID Number :''')
self.Label7_1.configure(width=154)
self.btn_find = tk.Button(top)
self.btn_find.place(relx=0.856, rely=0.167, height=34, width=97)
self.btn_find.configure(activebackground="#ececec")
self.btn_find.configure(activeforeground="#000000")
self.btn_find.configure(background="#ff330a")
self.btn_find.configure(disabledforeground="#a3a3a3")
self.btn_find.configure(font="-family {Rockwell Extra Bold} -size 12 -weight bold")
self.btn_find.configure(foreground="#fcffff")
self.btn_find.configure(highlightbackground="#d9d9d9")
self.btn_find.configure(highlightcolor="black")
self.btn_find.configure(pady="0")
self.btn_find.configure(text='''FIND''')
self.btn_find.configure(width=97,command=self.findss)
"""self.Label6 = tk.Label(top)
self.Label6.place(relx=0.949, rely=0.034, height=44, width=44)
self.Label6.configure(background="#d9d9d9")
self.Label6.configure(disabledforeground="#a3a3a3")
self.Label6.configure(foreground="#000000")
self._img1 = tk.PhotoImage(file="./login3.png")
self.Label6.configure(image=self._img1)
self.Label6.configure(text='''Label''')"""
# The following code is added to facilitate the Scrolled widgets you specified.
class AutoScroll(object):
'''Configure the scrollbars for a widget.'''
def __init__(self, master):
# Rozen. Added the try-except clauses so that this class
# could be used for scrolled entry widget for which vertical
# scrolling is not supported. 5/7/14.
try:
vsb = ttk.Scrollbar(master, orient='vertical', command=self.yview)
except:
pass
hsb = ttk.Scrollbar(master, orient='horizontal', command=self.xview)
#self.configure(yscrollcommand=_autoscroll(vsb),
# xscrollcommand=_autoscroll(hsb))
try:
self.configure(yscrollcommand=self._autoscroll(vsb))
except:
pass
self.configure(xscrollcommand=self._autoscroll(hsb))
self.grid(column=0, row=0, sticky='nsew')
try:
vsb.grid(column=1, row=0, sticky='ns')
except:
pass
hsb.grid(column=0, row=1, sticky='ew')
master.grid_columnconfigure(0, weight=1)
master.grid_rowconfigure(0, weight=1)
# Copy geometry methods of master (taken from ScrolledText.py)
if py3:
methods = tk.Pack.__dict__.keys() | tk.Grid.__dict__.keys() \
| tk.Place.__dict__.keys()
else:
methods = tk.Pack.__dict__.keys() + tk.Grid.__dict__.keys() \
+ tk.Place.__dict__.keys()
for meth in methods:
if meth[0] != '_' and meth not in ('config', 'configure'):
setattr(self, meth, getattr(master, meth))
@staticmethod
def _autoscroll(sbar):
'''Hide and show scrollbar as needed.'''
def wrapped(first, last):
first, last = float(first), float(last)
if first <= 0 and last >= 1:
sbar.grid_remove()
else:
sbar.grid()
sbar.set(first, last)
return wrapped
def __str__(self):
return str(self.master)
def _create_container(func):
'''Creates a ttk Frame with a given master, and use this new frame to
place the scrollbars and the widget.'''
def wrapped(cls, master, **kw):
container = ttk.Frame(master)
container.bind('<Enter>', lambda e: _bound_to_mousewheel(e, container))
container.bind('<Leave>', lambda e: _unbound_to_mousewheel(e, container))
return func(cls, container, **kw)
return wrapped
class ScrolledListBox(AutoScroll, tk.Listbox):
'''A standard Tkinter Text widget with scrollbars that will
automatically show/hide as needed.'''
@_create_container
def __init__(self, master, **kw):
tk.Listbox.__init__(self, master, **kw)
AutoScroll.__init__(self, master)
import platform
def _bound_to_mousewheel(event, widget):
child = widget.winfo_children()[0]
if platform.system() == 'Windows' or platform.system() == 'Darwin':
child.bind_all('<MouseWheel>', lambda e: _on_mousewheel(e, child))
child.bind_all('<Shift-MouseWheel>', lambda e: _on_shiftmouse(e, child))
else:
child.bind_all('<Button-4>', lambda e: _on_mousewheel(e, child))
child.bind_all('<Button-5>', lambda e: _on_mousewheel(e, child))
child.bind_all('<Shift-Button-4>', lambda e: _on_shiftmouse(e, child))
child.bind_all('<Shift-Button-5>', lambda e: _on_shiftmouse(e, child))
def _unbound_to_mousewheel(event, widget):
if platform.system() == 'Windows' or platform.system() == 'Darwin':
widget.unbind_all('<MouseWheel>')
widget.unbind_all('<Shift-MouseWheel>')
else:
widget.unbind_all('<Button-4>')
widget.unbind_all('<Button-5>')
widget.unbind_all('<Shift-Button-4>')
widget.unbind_all('<Shift-Button-5>')
def _on_mousewheel(event, widget):
if platform.system() == 'Windows':
widget.yview_scroll(-1*int(event.delta/120),'units')
elif platform.system() == 'Darwin':
widget.yview_scroll(-1*int(event.delta),'units')
else:
if event.num == 4:
widget.yview_scroll(-1, 'units')
elif event.num == 5:
widget.yview_scroll(1, 'units')
def _on_shiftmouse(event, widget):
if platform.system() == 'Windows':
widget.xview_scroll(-1*int(event.delta/120), 'units')
elif platform.system() == 'Darwin':
widget.xview_scroll(-1*int(event.delta), 'units')
else:
if event.num == 4:
widget.xview_scroll(-1, 'units')
elif event.num == 5:
widget.xview_scroll(1, 'units')
if __name__ == '__main__':
vp_start_gui()
def calculator1():
window=Tk()
window.title("welcome")
lbl=Label(window,text="hello")
lbl.pack()
"""---------------------------------------------calculator module """
#-------------------------------------------------------------------------------------------------------------------------------
def online1():
import sys
try:
import Tkinter as tk
except ImportError:
import tkinter as tk
import webbrowser
import tkinter
from tkinter import messagebox
import smtplib
try:
import ttk
py3 = False
except ImportError:
import tkinter.ttk as ttk
py3 = True
import onlinesupport_support
import os.path
def vp_start_gui():
'''Starting point when module is the main routine.'''
global val, w, root
global prog_location
prog_call = sys.argv[0]
print ('prog_call = {}'.format(prog_call))
prog_location = os.path.split(prog_call)[0]
print ('prog_location = {}'.format(prog_location))
sys.stdout.flush()
root = tk.Tk()
onlinesupport_support.set_Tk_var()
top = e_TAX_2019 (root)
onlinesupport_support.init(root, top)
root.mainloop()
w = None
def create_e_TAX_2019(root, *args, **kwargs):
'''Starting point when module is imported by another program.'''
global w, w_win, rt
global prog_location
prog_call = sys.argv[0]
print ('prog_call = {}'.format(prog_call))
prog_location = os.path.split(prog_call)[0]
print ('prog_location = {}'.format(prog_location))
rt = root
w = tk.Toplevel (root)
onlinesupport_support.set_Tk_var()
top = e_TAX_2019 (w)
onlinesupport_support.init(w, top, *args, **kwargs)
return (w, top)
def destroy_e_TAX_2019():
global w
w.destroy()
w = None
class e_TAX_2019:
def facebooks(self):
new =2
url= "https://www.facebook.com/pranesh.kulkarni.359"
webbrowser.open(url,new=new);
def calls(self):
new =2
url= "https://api.whatsapp.com/send?phone=918956795667&text=Hi+User+here&fbclid=IwAR05YmoAuyw0dzyDMetLj3Zcya3QrJxfOW6sVmo4ydcqaaLf019RR4fNT0M"
webbrowser.open(url,new=new);
def websites(self):
new =2
url= "https://etax20194.webnode.com/"
webbrowser.open(url,new=new);
def submit2s(self):
r1=str(self.spin1.get())
r2=str(self.spin2.get())
r3=str(self.spin3.get())
r4=str(self.spin4.get())
rev=str(self.txt_review.get())
tot= r1+r2+r3+r4+rev
mail= smtplib.SMTP('smtp.gmail.com',587)
mail.ehlo()
mail.starttls()
mail.login('etaxsupp2019@gmail.com','Pass@123')
mail.sendmail('etaxsupp2019@gmail.com','etaxsupp2019@gmail.com',tot)
mail.close()
messagebox.showinfo("e-TAX 2019","Thank You For Rating")
def submit1s(self):
a=str(self.txt_village.get())
b=str(self.txt_username.get())
c=str(self.txt_productid.get())
d=str(self.txt_query.get())
tota = a+b+c+d
mail= smtplib.SMTP('smtp.gmail.com',587)
mail.ehlo()
mail.starttls()
mail.login('etaxsupp2019@gmail.com','Pass@123')
mail.sendmail('etaxsupp2019@gmail.com','etaxsupp2019@gmail.com',tota)
mail.close()
messagebox.showinfo("e-TAX 2019","Bug Reported.")
def backs(self):
root.destroy()
print("wj")
def exits(self):
msg=tkinter.messagebox.askyesno("e-TAX 2019","Do You Want To EXIT ?")
if msg:
exit()
def __init__(self, top=None):
'''This class configures and populates the toplevel window.
top is the toplevel containing window.'''
_bgcolor = '#d9d9d9' # X11 color: 'gray85'
_fgcolor = '#000000' # X11 color: 'black'
_compcolor = '#d9d9d9' # X11 color: 'gray85'
_ana1color = '#d9d9d9' # X11 color: 'gray85'
_ana2color = '#ececec' # Closest X11 color: 'gray92'
top.geometry("1162x780+138+34")
top.title("e-TAX 2019")
top.configure(background="#727272")
top.configure(highlightbackground="#d9d9d9")
top.configure(highlightcolor="black")
self.Frame1 = tk.Frame(top)
self.Frame1.place(relx=0.164, rely=0.026, relheight=0.186
, relwidth=0.667)
self.Frame1.configure(relief='ridge')
self.Frame1.configure(borderwidth="10")
self.Frame1.configure(relief='ridge')
self.Frame1.configure(background="#595959")
self.Frame1.configure(highlightbackground="#d9d9d9")
self.Frame1.configure(highlightcolor="black")
self.Frame1.configure(width=775)
self.Label1 = tk.Label(self.Frame1)
self.Label1.place(relx=0.039, rely=0.207, height=59, width=196)
self.Label1.configure(activebackground="#f9f9f9")
self.Label1.configure(activeforeground="black")
self.Label1.configure(background="#595959")
self.Label1.configure(disabledforeground="#a3a3a3")
self.Label1.configure(font="-family {Rockwell Extra} -size 40 -weight bold")
self.Label1.configure(foreground="#0d4dff")
self.Label1.configure(highlightbackground="#d9d9d9")
self.Label1.configure(highlightcolor="black")
self.Label1.configure(text='''eTAX''')
self.Label1_1 = tk.Label(self.Frame1)
self.Label1_1.place(relx=0.271, rely=0.207, height=59, width=146)
self.Label1_1.configure(activebackground="#f9f9f9")
self.Label1_1.configure(activeforeground="black")
self.Label1_1.configure(background="#595959")
self.Label1_1.configure(disabledforeground="#a3a3a3")
self.Label1_1.configure(font="-family {Rockwell Extra} -size 40 -weight bold")
self.Label1_1.configure(foreground="#ff2b0a")
self.Label1_1.configure(highlightbackground="#d9d9d9")
self.Label1_1.configure(highlightcolor="black")
self.Label1_1.configure(text='''2019''')
self.Label2 = tk.Label(self.Frame1)
self.Label2.place(relx=0.142, rely=0.621, height=31, width=184)
self.Label2.configure(activebackground="#f9f9f9")
self.Label2.configure(activeforeground="black")
self.Label2.configure(background="#595959")
self.Label2.configure(disabledforeground="#a3a3a3")
self.Label2.configure(font="-family {Rage Italic} -size 19 -slant italic")
self.Label2.configure(foreground="#f7ff0a")
self.Label2.configure(highlightbackground="#d9d9d9")
self.Label2.configure(highlightcolor="black")
self.Label2.configure(text='''working for you''')
self.Label1_2 = tk.Label(self.Frame1)
self.Label1_2.place(relx=0.465, rely=0.207, height=69, width=356)
self.Label1_2.configure(activebackground="#f9f9f9")
self.Label1_2.configure(activeforeground="black")
self.Label1_2.configure(background="#595959")
self.Label1_2.configure(disabledforeground="#a3a3a3")
self.Label1_2.configure(font="-family {Rockwell Extra} -size 28 -weight bold")
self.Label1_2.configure(foreground="#5faa14")
self.Label1_2.configure(highlightbackground="#d9d9d9")
self.Label1_2.configure(highlightcolor="black")
self.Label1_2.configure(text='''Online Support''')
self.Frame2 = tk.Frame(top)
self.Frame2.place(relx=0.052, rely=0.256, relheight=0.455, relwidth=0.46)
self.Frame2.configure(relief='ridge')
self.Frame2.configure(borderwidth="10")
self.Frame2.configure(relief='ridge')
self.Frame2.configure(background="#595959")
self.Frame2.configure(highlightbackground="#d9d9d9")
self.Frame2.configure(highlightcolor="black")
self.Frame2.configure(width=535)
self.Label6 = tk.Label(self.Frame2)
self.Label6.place(relx=0.224, rely=0.056, height=49, width=284)
self.Label6.configure(activebackground="#f9f9f9")
self.Label6.configure(activeforeground="black")
self.Label6.configure(background="#595959")
self.Label6.configure(disabledforeground="#a3a3a3")
self.Label6.configure(font="-family {Franklin Gothic Demi Cond} -size 36")
self.Label6.configure(foreground="#ff9e1f")
self.Label6.configure(highlightbackground="#d9d9d9")
self.Label6.configure(highlightcolor="black")
self.Label6.configure(text='''REPORT A BUG''')
self.Label6_5 = tk.Label(self.Frame2)
self.Label6_5.place(relx=0.056, rely=0.254, height=39, width=94)
self.Label6_5.configure(activebackground="#f9f9f9")
self.Label6_5.configure(activeforeground="black")
self.Label6_5.configure(background="#595959")
self.Label6_5.configure(disabledforeground="#a3a3a3")
self.Label6_5.configure(font="-family {Franklin Gothic Demi Cond} -size 16")
self.Label6_5.configure(foreground="aqua")
self.Label6_5.configure(highlightbackground="#d9d9d9")
self.Label6_5.configure(highlightcolor="black")
self.Label6_5.configure(text='''Village :''')
self.Label6_6 = tk.Label(self.Frame2)
self.Label6_6.place(relx=0.056, rely=0.366, height=39, width=94)
self.Label6_6.configure(activebackground="#f9f9f9")
self.Label6_6.configure(activeforeground="black")
self.Label6_6.configure(background="#595959")
self.Label6_6.configure(disabledforeground="#a3a3a3")
self.Label6_6.configure(font="-family {Franklin Gothic Demi Cond} -size 16")
self.Label6_6.configure(foreground="aqua")
self.Label6_6.configure(highlightbackground="#d9d9d9")
self.Label6_6.configure(highlightcolor="black")
self.Label6_6.configure(text='''User Name :''')
self.Label6_1 = tk.Label(self.Frame2)
self.Label6_1.place(relx=0.056, rely=0.592, height=39, width=94)
self.Label6_1.configure(activebackground="#f9f9f9")
self.Label6_1.configure(activeforeground="black")
self.Label6_1.configure(background="#595959")
self.Label6_1.configure(disabledforeground="#a3a3a3")
self.Label6_1.configure(font="-family {Franklin Gothic Demi Cond} -size 16")
self.Label6_1.configure(foreground="aqua")
self.Label6_1.configure(highlightbackground="#d9d9d9")
self.Label6_1.configure(highlightcolor="black")
self.Label6_1.configure(text='''Query :''')
self.Label6_2 = tk.Label(self.Frame2)
self.Label6_2.place(relx=0.056, rely=0.479, height=39, width=94)
self.Label6_2.configure(activebackground="#f9f9f9")
self.Label6_2.configure(activeforeground="black")
self.Label6_2.configure(background="#595959")
self.Label6_2.configure(disabledforeground="#a3a3a3")
self.Label6_2.configure(font="-family {Franklin Gothic Demi Cond} -size 16")
self.Label6_2.configure(foreground="aqua")
self.Label6_2.configure(highlightbackground="#d9d9d9")
self.Label6_2.configure(highlightcolor="black")
self.Label6_2.configure(text='''Product ID :''')
self.txt_village = tk.Entry(self.Frame2)
self.txt_village.place(relx=0.28, rely=0.282,height=20, relwidth=0.643)
self.txt_village.configure(background="white")
self.txt_village.configure(disabledforeground="#a3a3a3")
self.txt_village.configure(font="TkFixedFont")
self.txt_village.configure(foreground="#000000")
self.txt_village.configure(highlightbackground="#d9d9d9")
self.txt_village.configure(highlightcolor="black")
self.txt_village.configure(insertbackground="black")
self.txt_village.configure(selectbackground="#c4c4c4")
self.txt_village.configure(selectforeground="black")
self.txt_username = tk.Entry(self.Frame2)
self.txt_username.place(relx=0.28, rely=0.394, height=20, relwidth=0.643)
self.txt_username.configure(background="white")
self.txt_username.configure(disabledforeground="#a3a3a3")
self.txt_username.configure(font="TkFixedFont")
self.txt_username.configure(foreground="#000000")
self.txt_username.configure(highlightbackground="#d9d9d9")
self.txt_username.configure(highlightcolor="black")
self.txt_username.configure(insertbackground="black")
self.txt_username.configure(selectbackground="#c4c4c4")
self.txt_username.configure(selectforeground="black")
self.txt_productid = tk.Entry(self.Frame2)
self.txt_productid.place(relx=0.28, rely=0.507, height=20
, relwidth=0.643)
self.txt_productid.configure(background="white")
self.txt_productid.configure(disabledforeground="#a3a3a3")
self.txt_productid.configure(font="TkFixedFont")
self.txt_productid.configure(foreground="#000000")
self.txt_productid.configure(highlightbackground="#d9d9d9")
self.txt_productid.configure(highlightcolor="black")
self.txt_productid.configure(insertbackground="black")
self.txt_productid.configure(selectbackground="#c4c4c4")
self.txt_productid.configure(selectforeground="black")
self.txt_query = tk.Entry(self.Frame2)
self.txt_query.place(relx=0.28, rely=0.62,height=70, relwidth=0.643)
self.txt_query.configure(background="white")
self.txt_query.configure(disabledforeground="#a3a3a3")
self.txt_query.configure(font="TkFixedFont")
self.txt_query.configure(foreground="#000000")
self.txt_query.configure(highlightbackground="#d9d9d9")
self.txt_query.configure(highlightcolor="black")
self.txt_query.configure(insertbackground="black")
self.txt_query.configure(selectbackground="#c4c4c4")
self.txt_query.configure(selectforeground="black")
self.btn_submit1 = tk.Button(self.Frame2)
self.btn_submit1.place(relx=0.729, rely=0.845, height=34, width=80)
self.btn_submit1.configure(activebackground="#ececec")
self.btn_submit1.configure(activeforeground="#000000")
self.btn_submit1.configure(background="#ff350d")
self.btn_submit1.configure(borderwidth="10")
self.btn_submit1.configure(disabledforeground="#a3a3a3")
self.btn_submit1.configure(font="-family {Swatch it} -size 11 -weight bold")
self.btn_submit1.configure(foreground="#000000")
self.btn_submit1.configure(highlightbackground="#d9d9d9")
self.btn_submit1.configure(highlightcolor="black")
self.btn_submit1.configure(pady="0")
self.btn_submit1.configure(text='''Submit''',command= self.submit1s)
self.Frame2_1 = tk.Frame(top)
self.Frame2_1.place(relx=0.525, rely=0.256, relheight=0.455
, relwidth=0.443)
self.Frame2_1.configure(relief='ridge')
self.Frame2_1.configure(borderwidth="10")
self.Frame2_1.configure(relief='ridge')
self.Frame2_1.configure(background="#595959")
self.Frame2_1.configure(highlightbackground="#d9d9d9")
self.Frame2_1.configure(highlightcolor="black")
self.Frame2_1.configure(width=515)
self.Label6_11 = tk.Label(self.Frame2_1)
self.Label6_11.place(relx=0.214, rely=0.028, height=49, width=284)
self.Label6_11.configure(activebackground="#f9f9f9")
self.Label6_11.configure(activeforeground="black")
self.Label6_11.configure(background="#595959")
self.Label6_11.configure(disabledforeground="#a3a3a3")
self.Label6_11.configure(font="-family {Franklin Gothic Demi Cond} -size 36")
self.Label6_11.configure(foreground="#ff9e1f")
self.Label6_11.configure(highlightbackground="#d9d9d9")
self.Label6_11.configure(highlightcolor="black")
self.Label6_11.configure(text='''Give Rating''')
self.Label6_3 = tk.Label(self.Frame2_1)
self.Label6_3.place(relx=0.117, rely=0.225, height=39, width=94)
self.Label6_3.configure(activebackground="#f9f9f9")
self.Label6_3.configure(activeforeground="black")
self.Label6_3.configure(background="#595959")
self.Label6_3.configure(disabledforeground="#a3a3a3")
self.Label6_3.configure(font="-family {Franklin Gothic Demi Cond} -size 16")
self.Label6_3.configure(foreground="aqua")
self.Label6_3.configure(highlightbackground="#d9d9d9")
self.Label6_3.configure(highlightcolor="black")
self.Label6_3.configure(text='''UI :''')
self.Label6_7 = tk.Label(self.Frame2_1)
self.Label6_7.place(relx=0.058, rely=0.31, height=39, width=174)
self.Label6_7.configure(activebackground="#f9f9f9")
self.Label6_7.configure(activeforeground="black")
self.Label6_7.configure(background="#595959")
self.Label6_7.configure(disabledforeground="#a3a3a3")
self.Label6_7.configure(font="-family {Franklin Gothic Demi Cond} -size 16")
self.Label6_7.configure(foreground="aqua")
self.Label6_7.configure(highlightbackground="#d9d9d9")
self.Label6_7.configure(highlightcolor="black")
self.Label6_7.configure(text='''Server Connectivity :''')
self.Label6_4 = tk.Label(self.Frame2_1)
self.Label6_4.place(relx=0.019, rely=0.394, height=39, width=204)
self.Label6_4.configure(activebackground="#f9f9f9")
self.Label6_4.configure(activeforeground="black")
self.Label6_4.configure(background="#595959")
self.Label6_4.configure(disabledforeground="#a3a3a3")
self.Label6_4.configure(font="-family {Franklin Gothic Demi Cond} -size 16")
self.Label6_4.configure(foreground="aqua")
self.Label6_4.configure(highlightbackground="#d9d9d9")
self.Label6_4.configure(highlightcolor="black")
self.Label6_4.configure(text='''Costomer Care Service :''')
self.Label6_5 = tk.Label(self.Frame2_1)
self.Label6_5.place(relx=0.058, rely=0.479, height=39, width=154)
self.Label6_5.configure(activebackground="#f9f9f9")
self.Label6_5.configure(activeforeground="black")
self.Label6_5.configure(background="#595959")
self.Label6_5.configure(disabledforeground="#a3a3a3")
self.Label6_5.configure(font="-family {Franklin Gothic Demi Cond} -size 16")
self.Label6_5.configure(foreground="aqua")
self.Label6_5.configure(highlightbackground="#d9d9d9")
self.Label6_5.configure(highlightcolor="black")
self.Label6_5.configure(text='''Overall Ratings:''')
self.spin1 = tk.Spinbox(self.Frame2_1, from_=1.0, to=5.0)
self.spin1.place(relx=0.544, rely=0.254, relheight=0.054, relwidth=0.146)
self.spin1.configure(activebackground="#f9f9f9")
self.spin1.configure(background="white")
self.spin1.configure(buttonbackground="#a3a5a8")
self.spin1.configure(disabledforeground="#a3a3a3")
self.spin1.configure(foreground="black")
self.spin1.configure(highlightbackground="black")
self.spin1.configure(highlightcolor="black")
self.spin1.configure(insertbackground="black")
self.spin1.configure(selectbackground="#c4c4c4")
self.spin1.configure(selectforeground="black")
self.spin1.configure(textvariable=onlinesupport_support.spinbox1)
self.Label5_4 = tk.Label(self.Frame2_1)
self.Label5_4.place(relx=0.35, rely=0.169, height=22, width=152)
self.Label5_4.configure(activebackground="#f9f9f9")
self.Label5_4.configure(activeforeground="black")
self.Label5_4.configure(background="#595959")
self.Label5_4.configure(disabledforeground="#a3a3a3")
self.Label5_4.configure(font="-family {Rockwell} -size 11 -slant italic")
self.Label5_4.configure(foreground="#f7ff14")
self.Label5_4.configure(highlightbackground="#d9d9d9")
self.Label5_4.configure(highlightcolor="black")
self.Label5_4.configure(text='''Give Out of 5''')
self.spin4 = tk.Spinbox(self.Frame2_1, from_=1.0, to=5.0)
self.spin4.place(relx=0.544, rely=0.507, relheight=0.054, relwidth=0.146)
self.spin4.configure(activebackground="#f9f9f9")
self.spin4.configure(background="white")
self.spin4.configure(buttonbackground="#a3a5a8")
self.spin4.configure(disabledforeground="#a3a3a3")
self.spin4.configure(foreground="black")
self.spin4.configure(highlightbackground="black")
self.spin4.configure(highlightcolor="black")
self.spin4.configure(insertbackground="black")
self.spin4.configure(selectbackground="#c4c4c4")
self.spin4.configure(selectforeground="black")
self.spin4.configure(textvariable=onlinesupport_support.spinbox4)
self.spin3 = tk.Spinbox(self.Frame2_1, from_=1.0, to=5.0)
self.spin3.place(relx=0.544, rely=0.423, relheight=0.054, relwidth=0.146)
self.spin3.configure(activebackground="#f9f9f9")
self.spin3.configure(background="white")
self.spin3.configure(buttonbackground="#a3a5a8")
self.spin3.configure(disabledforeground="#a3a3a3")
self.spin3.configure(foreground="black")
self.spin3.configure(highlightbackground="black")
self.spin3.configure(highlightcolor="black")
self.spin3.configure(insertbackground="black")
self.spin3.configure(selectbackground="#c4c4c4")
self.spin3.configure(selectforeground="black")
self.spin3.configure(textvariable=onlinesupport_support.spinbox3)
self.spin2 = tk.Spinbox(self.Frame2_1, from_=1.0, to=5.0)
self.spin2.place(relx=0.544, rely=0.338, relheight=0.054, relwidth=0.146)
self.spin2.configure(activebackground="#f9f9f9")
self.spin2.configure(background="white")
self.spin2.configure(buttonbackground="#a3a5a8")
self.spin2.configure(disabledforeground="#a3a3a3")
self.spin2.configure(foreground="black")
self.spin2.configure(highlightbackground="black")
self.spin2.configure(highlightcolor="black")
self.spin2.configure(insertbackground="black")
self.spin2.configure(selectbackground="#c4c4c4")
self.spin2.configure(selectforeground="black")
self.spin2.configure(textvariable=onlinesupport_support.spinbox2)
self.Label6_9 = tk.Label(self.Frame2_1)
self.Label6_9.place(relx=0.252, rely=0.592, height=49, width=284)
self.Label6_9.configure(activebackground="#f9f9f9")
self.Label6_9.configure(activeforeground="black")
self.Label6_9.configure(background="#595959")
self.Label6_9.configure(disabledforeground="#a3a3a3")
self.Label6_9.configure(font="-family {Franklin Gothic Demi Cond} -size 36")
self.Label6_9.configure(foreground="#ff9e1f")
self.Label6_9.configure(highlightbackground="#d9d9d9")
self.Label6_9.configure(highlightcolor="black")
self.Label6_9.configure(text='''Write A Review''')
self.txt_review = tk.Entry(self.Frame2_1)
self.txt_review.place(relx=0.078, rely=0.732,height=30, relwidth=0.804)
self.txt_review.configure(background="white")
self.txt_review.configure(disabledforeground="#a3a3a3")
self.txt_review.configure(font="TkFixedFont")
self.txt_review.configure(foreground="#000000")
self.txt_review.configure(highlightbackground="#d9d9d9")
self.txt_review.configure(highlightcolor="black")
self.txt_review.configure(insertbackground="black")
self.txt_review.configure(selectbackground="#c4c4c4")
self.txt_review.configure(selectforeground="black")
self.btn_submit2 = tk.Button(self.Frame2_1)
self.btn_submit2.place(relx=0.408, rely=0.845, height=34, width=80)
self.btn_submit2.configure(activebackground="#ececec")
self.btn_submit2.configure(activeforeground="#000000")
self.btn_submit2.configure(background="#ff350d")
self.btn_submit2.configure(borderwidth="10")
self.btn_submit2.configure(disabledforeground="#a3a3a3")
self.btn_submit2.configure(font="-family {Swatch it} -size 11 -weight bold")
self.btn_submit2.configure(foreground="#000000")
self.btn_submit2.configure(highlightbackground="#d9d9d9")
self.btn_submit2.configure(highlightcolor="black")
self.btn_submit2.configure(pady="0")
self.btn_submit2.configure(text='''Submit''',command= self.submit2s)
self.Label3 = tk.Label(top)
self.Label3.place(relx=0.034, rely=0.731, height=176, width=304)
self.Label3.configure(activebackground="#f9f9f9")
self.Label3.configure(activeforeground="black")
self.Label3.configure(background="#d9d9d9")
self.Label3.configure(disabledforeground="#a3a3a3")
self.Label3.configure(foreground="#000000")
self.Label3.configure(highlightbackground="#d9d9d9")
self.Label3.configure(highlightcolor="black")
photo_location = os.path.join(prog_location,"./online2.png")
self._img0 = tk.PhotoImage(file=photo_location)
self.Label3.configure(image=self._img0)
self.Label3.configure(text='''Label''')
self.Frame2_2 = tk.Frame(top)
self.Frame2_2.place(relx=0.336, rely=0.731, relheight=0.224
, relwidth=0.331)
self.Frame2_2.configure(relief='ridge')
self.Frame2_2.configure(borderwidth="10")
self.Frame2_2.configure(relief='ridge')
self.Frame2_2.configure(background="#595959")
self.Frame2_2.configure(highlightbackground="#d9d9d9")
self.Frame2_2.configure(highlightcolor="black")
self.Frame2_2.configure(width=385)
self.Label4 = tk.Label(self.Frame2_2)
self.Label4.place(relx=0.39, rely=0.114, height=23, width=99)
self.Label4.configure(activebackground="#f9f9f9")
self.Label4.configure(activeforeground="black")
self.Label4.configure(background="#595959")
self.Label4.configure(disabledforeground="#a3a3a3")
self.Label4.configure(font="-family {SF Distant Galaxy} -size 13")
self.Label4.configure(foreground="#ffff12")
self.Label4.configure(highlightbackground="#d9d9d9")
self.Label4.configure(highlightcolor="black")
self.Label4.configure(text='''Contact''')
self.btn_facebook = tk.Button(self.Frame2_2)
self.btn_facebook.place(relx=0.104, rely=0.343, height=91, width=96)
self.btn_facebook.configure(activebackground="#ececec")
self.btn_facebook.configure(activeforeground="#000000")
self.btn_facebook.configure(background="#d9d9d9")
self.btn_facebook.configure(borderwidth="10")
self.btn_facebook.configure(disabledforeground="#a3a3a3")
self.btn_facebook.configure(foreground="#000000")
self.btn_facebook.configure(highlightbackground="#d9d9d9")
self.btn_facebook.configure(highlightcolor="black")
photo_location = os.path.join(prog_location,"./facebook-logo-png-5a35528eaa4f08.7998622015134439826976.png")
self._img1 = tk.PhotoImage(file=photo_location)
self.btn_facebook.configure(image=self._img1)
self.btn_facebook.configure(overrelief="raised")
self.btn_facebook.configure(pady="0")
self.btn_facebook.configure(text='''Button''',command= self.facebooks)
self.btn_website = tk.Button(self.Frame2_2)
self.btn_website.place(relx=0.416, rely=0.343, height=86, width=86)
self.btn_website.configure(activebackground="#ececec")
self.btn_website.configure(activeforeground="#000000")
self.btn_website.configure(background="#d9d9d9")
self.btn_website.configure(borderwidth="10")
self.btn_website.configure(disabledforeground="#a3a3a3")
self.btn_website.configure(foreground="#000000")
self.btn_website.configure(highlightbackground="#d9d9d9")
self.btn_website.configure(highlightcolor="black")
photo_location = os.path.join(prog_location,"./icon_weebly.com_bot_02e090.png")
self._img2 = tk.PhotoImage(file=photo_location)
self.btn_website.configure(image=self._img2)
self.btn_website.configure(pady="0")
self.btn_website.configure(text='''Button''',command= self.websites)
self.btn_call = tk.Button(self.Frame2_2)
self.btn_call.place(relx=0.701, rely=0.343, height=92, width=91)
self.btn_call.configure(activebackground="#ececec")
self.btn_call.configure(activeforeground="#000000")
self.btn_call.configure(background="#d9d9d9")
self.btn_call.configure(borderwidth="10")
self.btn_call.configure(disabledforeground="#a3a3a3")
self.btn_call.configure(foreground="#000000")
self.btn_call.configure(highlightbackground="#d9d9d9")
self.btn_call.configure(highlightcolor="black")
photo_location = os.path.join(prog_location,"./2019-04-25 14_03_03-Downloads.png")
self._img3 = tk.PhotoImage(file=photo_location)
self.btn_call.configure(image=self._img3)
self.btn_call.configure(pady="0")
self.btn_call.configure(text='''Button''',command= self.calls)
self.Frame2_3 = tk.Frame(top)
self.Frame2_3.place(relx=0.68, rely=0.731, relheight=0.25
, relwidth=0.297)
self.Frame2_3.configure(relief='ridge')
self.Frame2_3.configure(borderwidth="10")
self.Frame2_3.configure(relief='ridge')
self.Frame2_3.configure(background="#595959")
self.Frame2_3.configure(highlightbackground="#d9d9d9")
self.Frame2_3.configure(highlightcolor="black")
self.Frame2_3.configure(width=345)
self.Label5 = tk.Label(self.Frame2_3)
self.Label5.place(relx=0.232, rely=0.051, height=32, width=172)
self.Label5.configure(activebackground="#f9f9f9")
self.Label5.configure(activeforeground="black")
self.Label5.configure(background="#595959")
self.Label5.configure(disabledforeground="#a3a3a3")
self.Label5.configure(font="-family {Rockwell} -size 17 -slant italic")
self.Label5.configure(foreground="#1fff0f")
self.Label5.configure(highlightbackground="#d9d9d9")
self.Label5.configure(highlightcolor="black")
self.Label5.configure(text='''::: Developers :::''')
self.Label5_3 = tk.Label(self.Frame2_3)
self.Label5_3.place(relx=0.261, rely=0.205, height=32, width=152)
self.Label5_3.configure(activebackground="#f9f9f9")
self.Label5_3.configure(activeforeground="black")
self.Label5_3.configure(background="#595959")
self.Label5_3.configure(disabledforeground="#a3a3a3")
self.Label5_3.configure(font="-family {Rockwell} -size 11 -slant italic")
self.Label5_3.configure(foreground="aqua")
self.Label5_3.configure(highlightbackground="#d9d9d9")
self.Label5_3.configure(highlightcolor="black")
self.Label5_3.configure(text='''Pranesh Kulkarni''')
self.Label5_5 = tk.Label(self.Frame2_3)
self.Label5_5.place(relx=0.261, rely=0.564, height=22, width=152)
self.Label5_5.configure(activebackground="#f9f9f9")
self.Label5_5.configure(activeforeground="black")
self.Label5_5.configure(background="#595959")
self.Label5_5.configure(disabledforeground="#a3a3a3")
self.Label5_5.configure(font="-family {Rockwell} -size 11 -slant italic")
self.Label5_5.configure(foreground="aqua")
self.Label5_5.configure(highlightbackground="#d9d9d9")
self.Label5_5.configure(highlightcolor="black")
self.Label5_5.configure(text='''Prathamesh Dhasade''')
self.Label5_5 = tk.Label(self.Frame2_3)
self.Label5_5.place(relx=0.261, rely=0.667, height=22, width=152)
self.Label5_5.configure(activebackground="#f9f9f9")
self.Label5_5.configure(activeforeground="black")
self.Label5_5.configure(background="#595959")
self.Label5_5.configure(disabledforeground="#a3a3a3")
self.Label5_5.configure(font="-family {Rockwell} -size 11 -slant italic")
self.Label5_5.configure(foreground="aqua")
self.Label5_5.configure(highlightbackground="#d9d9d9")
self.Label5_5.configure(highlightcolor="black")
self.Label5_5.configure(text='''Anjali Gayakwad''')
self.Label5_5 = tk.Label(self.Frame2_3)
self.Label5_5.place(relx=0.261, rely=0.769, height=32, width=152)
self.Label5_5.configure(activebackground="#f9f9f9")
self.Label5_5.configure(activeforeground="black")
self.Label5_5.configure(background="#595959")
self.Label5_5.configure(disabledforeground="#a3a3a3")
self.Label5_5.configure(font="-family {Rockwell} -size 11 -slant italic")
self.Label5_5.configure(foreground="aqua")
self.Label5_5.configure(highlightbackground="#d9d9d9")
self.Label5_5.configure(highlightcolor="black")
self.Label5_5.configure(text='''Shraddha Kshirsagar''')
self.Label5_5 = tk.Label(self.Frame2_3)
self.Label5_5.place(relx=0.261, rely=0.462, height=22, width=152)
self.Label5_5.configure(activebackground="#f9f9f9")
self.Label5_5.configure(activeforeground="black")
self.Label5_5.configure(background="#595959")
self.Label5_5.configure(disabledforeground="#a3a3a3")
self.Label5_5.configure(font="-family {Rockwell} -size 11 -slant italic")
self.Label5_5.configure(foreground="aqua")
self.Label5_5.configure(highlightbackground="#d9d9d9")
self.Label5_5.configure(highlightcolor="black")
self.Label5_5.configure(text='''Ankita Chirame''')
self.Label5_5 = tk.Label(self.Frame2_3)
self.Label5_5.place(relx=0.261, rely=0.359, height=22, width=152)
self.Label5_5.configure(activebackground="#f9f9f9")
self.Label5_5.configure(activeforeground="black")
self.Label5_5.configure(background="#595959")
self.Label5_5.configure(disabledforeground="#a3a3a3")
self.Label5_5.configure(font="-family {Rockwell} -size 11 -slant italic")
self.Label5_5.configure(foreground="aqua")
self.Label5_5.configure(highlightbackground="#d9d9d9")
self.Label5_5.configure(highlightcolor="black")
self.Label5_5.configure(text='''Anamika Rathod''')
self.btn_submit2_11 = tk.Button(top)
self.btn_submit2_11.place(relx=0.869, rely=0.115, height=44, width=90)
self.btn_submit2_11.configure(activebackground="#ececec")
self.btn_submit2_11.configure(activeforeground="#000000")
self.btn_submit2_11.configure(background="#3045ff")
self.btn_submit2_11.configure(borderwidth="10")
self.btn_submit2_11.configure(disabledforeground="#a3a3a3")
self.btn_submit2_11.configure(font="-family {Swatch it} -size 11 -weight bold")
self.btn_submit2_11.configure(foreground="#000000")
self.btn_submit2_11.configure(highlightbackground="#d9d9d9")
self.btn_submit2_11.configure(highlightcolor="black",command= self.exits)
self.btn_submit2_11.configure(pady="0")
self.btn_submit2_11.configure(text='''Exit''')
self.btn_back = tk.Button(top)
self.btn_back.place(relx=0.869, rely=0.051, height=44, width=90)
self.btn_back.configure(activebackground="#ececec")
self.btn_back.configure(activeforeground="#000000")
self.btn_back.configure(background="#2ba30d")
self.btn_back.configure(borderwidth="10")
self.btn_back.configure(disabledforeground="#a3a3a3")
self.btn_back.configure(font="-family {Swatch it} -size 11 -weight bold")
self.btn_back.configure(foreground="#000000")
self.btn_back.configure(highlightbackground="#d9d9d9")
self.btn_back.configure(highlightcolor="black")
self.btn_back.configure(pady="0")
self.btn_back.configure(text='''Back''',command = self.backs)
if __name__ == '__main__':
vp_start_gui()
def update1():
import sys
import tkinter
from tkinter import messagebox
try:
import Tkinter as tk
except ImportError:
import tkinter as tk
try:
import ttk
py3 = False
except ImportError:
import tkinter.ttk as ttk
py3 = True
import modificationmainpage_support
import os.path
def vp_start_gui():
'''Starting point when module is the main routine.'''
global val, w, root
global prog_location
prog_call = sys.argv[0]
print ('prog_call = {}'.format(prog_call))
prog_location = os.path.split(prog_call)[0]
print ('prog_location = {}'.format(prog_location))
sys.stdout.flush()
root = tk.Tk()
top = e_TAX_2019 (root)
modificationmainpage_support.init(root, top)
root.mainloop()
w = None
def create_e_TAX_2019(root, *args, **kwargs):
'''Starting point when module is imported by another program.'''
global w, w_win, rt
global prog_location
prog_call = sys.argv[0]
print ('prog_call = {}'.format(prog_call))
prog_location = os.path.split(prog_call)[0]
print ('prog_location = {}'.format(prog_location))
rt = root
w = tk.Toplevel (root)
top = e_TAX_2019 (w)
modificationmainpage_support.init(w, top, *args, **kwargs)
return (w, top)
def destroy_e_TAX_2019():
global w
w.destroy()
w = None
def access1():
import sys
import tkinter
import time
from tkinter import messagebox
import smtplib
try:
import Tkinter as tk
except ImportError:
import tkinter as tk
try:
import ttk
py3 = False
except ImportError:
import tkinter.ttk as ttk
py3 = True
import permit_support
def vp_start_gui():
'''Starting point when module is the main routine.'''
global val, w, root
root = tk.Tk()
top = e_TAX_2019 (root)
permit_support.init(root, top)
root.mainloop()
w = None
def create_e_TAX_2019(root, *args, **kwargs):
'''Starting point when module is imported by another program.'''
global w, w_win, rt
rt = root
w = tk.Toplevel (root)
top = e_TAX_2019 (w)
permit_support.init(w, top, *args, **kwargs)
return (w, top)
def destroy_e_TAX_2019():
global w
w.destroy()
w = None
class e_TAX_2019:
def exits(self):
msg=tkinter.messagebox.askyesno("e-TAX 2019","Do You Want To EXIT ?")
if msg :
exit()
def backs(self):
root.destroy()
update1()
def submits(self):
p=str(self.txt_productkey.get());
v=str(self.txt_village.get());
u=str(self.txt_username.get());
n=str(self.txt_noofentries.get());
r=str(self.txt_reason.get());
t=str(time.asctime(time.localtime(time.time())))
totcontaint = str("Hi there is a new request for updating e-TAX 2019 database \n\n\n here are DETAILS ::::::\n\n\nproduct key :"+p+"\n village name :"+v+"\n Username :"+u+"\n No. of entries to modify :"+n+"\n Reason to modify data :"+r+"\n\n\n\n\n Requested on :"+t)
mail= smtplib.SMTP('smtp.gmail.com',587)
mail.ehlo()
mail.starttls()
mail.login('etaxsupp2019@gmail.com','Pass@123')
mail.sendmail('etaxsupp2019@gmail.com','kulkarnipranesh1767@gmail.com',totcontaint)
mail.close()
messagebox.showinfo("e-TAX 2019","Requested for updating Data. Wait for Confirmation from Administrator. After recieving Confirmation Email From Administrator You can modify database through respective modules")
root.destroy()
def __init__(self, top=None):
'''This class configures and populates the toplevel window.
top is the toplevel containing window.'''
_bgcolor = '#d9d9d9' # X11 color: 'gray85'
_fgcolor = '#000000' # X11 color: 'black'
_compcolor = '#d9d9d9' # X11 color: 'gray85'
_ana1color = '#d9d9d9' # X11 color: 'gray85'
_ana2color = '#ececec' # Closest X11 color: 'gray92'
font12 = "-family {GoudyHandtooled BT} -size 22"
font15 = "-family {System} -size 10 -weight bold"
font16 = "-family {Segoe UI} -size 12"
top.geometry("990x650+273+127")
top.title("e-TAX 2019")
top.configure(background="#727272")
top.configure(highlightbackground="#d9d9d9")
top.configure(highlightcolor="black")
self.Frame1 = tk.Frame(top)
self.Frame1.place(relx=0.141, rely=0.046, relheight=0.223
, relwidth=0.712)
self.Frame1.configure(relief='ridge')
self.Frame1.configure(borderwidth="10")
self.Frame1.configure(relief='ridge')
self.Frame1.configure(background="#595959")
self.Frame1.configure(highlightbackground="#d9d9d9")
self.Frame1.configure(highlightcolor="black")
self.Frame1.configure(width=705)
self.Label1 = tk.Label(self.Frame1)
self.Label1.place(relx=0.043, rely=0.207, height=59, width=196)
self.Label1.configure(activebackground="#f9f9f9")
self.Label1.configure(activeforeground="black")
self.Label1.configure(background="#595959")
self.Label1.configure(disabledforeground="#a3a3a3")
self.Label1.configure(font="-family {Rockwell Extra} -size 40 -weight bold")
self.Label1.configure(foreground="#0d4dff")
self.Label1.configure(highlightbackground="#d9d9d9")
self.Label1.configure(highlightcolor="black")
self.Label1.configure(text='''eTAX''')
self.Label1_1 = tk.Label(self.Frame1)
self.Label1_1.place(relx=0.298, rely=0.207, height=59, width=146)
self.Label1_1.configure(activebackground="#f9f9f9")
self.Label1_1.configure(activeforeground="black")
self.Label1_1.configure(background="#595959")
self.Label1_1.configure(disabledforeground="#a3a3a3")
self.Label1_1.configure(font="-family {Rockwell Extra} -size 40 -weight bold")
self.Label1_1.configure(foreground="#ff2b0a")
self.Label1_1.configure(highlightbackground="#d9d9d9")
self.Label1_1.configure(highlightcolor="black")
self.Label1_1.configure(text='''2019''')
self.Label2 = tk.Label(self.Frame1)
self.Label2.place(relx=0.156, rely=0.621, height=31, width=184)
self.Label2.configure(activebackground="#f9f9f9")
self.Label2.configure(activeforeground="black")
self.Label2.configure(background="#595959")
self.Label2.configure(disabledforeground="#a3a3a3")
self.Label2.configure(font="-family {Rage Italic} -size 19 -slant italic")
self.Label2.configure(foreground="#f7ff0a")
self.Label2.configure(highlightbackground="#d9d9d9")
self.Label2.configure(highlightcolor="black")
self.Label2.configure(text='''working for you''')
self.Label1_2 = tk.Label(self.Frame1)
self.Label1_2.place(relx=0.525, rely=0.207, height=79, width=306)
self.Label1_2.configure(activebackground="#f9f9f9")
self.Label1_2.configure(activeforeground="black")
self.Label1_2.configure(background="#595959")
self.Label1_2.configure(disabledforeground="#a3a3a3")
self.Label1_2.configure(font="-family {Rockwell Extra} -size 28 -weight bold")
self.Label1_2.configure(foreground="#5faa14")
self.Label1_2.configure(highlightbackground="#d9d9d9")
self.Label1_2.configure(highlightcolor="black")
self.Label1_2.configure(text='''PERMISSION''')
self.Label1_2.configure(width=306)
self.Frame1_1 = tk.Frame(top)
self.Frame1_1.place(relx=0.141, rely=0.308, relheight=0.577
, relwidth=0.712)
self.Frame1_1.configure(relief='ridge')
self.Frame1_1.configure(borderwidth="10")
self.Frame1_1.configure(relief='ridge')
self.Frame1_1.configure(background="#595959")
self.Frame1_1.configure(highlightbackground="#d9d9d9")
self.Frame1_1.configure(highlightcolor="black")
self.Frame1_1.configure(width=705)
self.Label3 = tk.Label(self.Frame1_1)
self.Label3.place(relx=0.156, rely=0.08, height=41, width=446)
self.Label3.configure(background="#595959")
self.Label3.configure(disabledforeground="#a3a3a3")
self.Label3.configure(font=font12)
self.Label3.configure(foreground="#fbf2ff")
self.Label3.configure(text='''FILL YOUR PRODUCT DETAILS''')
self.Label4 = tk.Label(self.Frame1_1)
self.Label4.place(relx=0.142, rely=0.24, height=22, width=102)
self.Label4.configure(background="#595959")
self.Label4.configure(disabledforeground="#a3a3a3")
self.Label4.configure(font=font15)
self.Label4.configure(foreground="AQUA")
self.Label4.configure(text='''Product Key :''')
self.Label4.configure(width=102)
self.Label4_5 = tk.Label(self.Frame1_1)
self.Label4_5.place(relx=0.142, rely=0.32, height=22, width=102)
self.Label4_5.configure(activebackground="#f9f9f9")
self.Label4_5.configure(activeforeground="black")
self.Label4_5.configure(background="#595959")
self.Label4_5.configure(disabledforeground="#a3a3a3")
self.Label4_5.configure(font="-family {System} -size 10 -weight bold")
self.Label4_5.configure(foreground="AQUA")
self.Label4_5.configure(highlightbackground="#d9d9d9")
self.Label4_5.configure(highlightcolor="black")
self.Label4_5.configure(text='''Village :''')
self.Label4_6 = tk.Label(self.Frame1_1)
self.Label4_6.place(relx=0.142, rely=0.4, height=22, width=102)
self.Label4_6.configure(activebackground="#f9f9f9")
self.Label4_6.configure(activeforeground="black")
self.Label4_6.configure(background="#595959")
self.Label4_6.configure(disabledforeground="#a3a3a3")
self.Label4_6.configure(font="-family {System} -size 10 -weight bold")
self.Label4_6.configure(foreground="AQUA")
self.Label4_6.configure(highlightbackground="#d9d9d9")
self.Label4_6.configure(highlightcolor="black")
self.Label4_6.configure(text='''Username :''')
self.Label4_7 = tk.Label(self.Frame1_1)
self.Label4_7.place(relx=0.085, rely=0.48, height=22, width=202)
self.Label4_7.configure(activebackground="#f9f9f9")
self.Label4_7.configure(activeforeground="black")
self.Label4_7.configure(background="#595959")
self.Label4_7.configure(disabledforeground="#a3a3a3")
self.Label4_7.configure(font="-family {System} -size 10 -weight bold")
self.Label4_7.configure(foreground="AQUA")
self.Label4_7.configure(highlightbackground="#d9d9d9")
self.Label4_7.configure(highlightcolor="black")
self.Label4_7.configure(text='''No. of entries to update :''')
self.Label4_7.configure(width=202)
self.Label4_8 = tk.Label(self.Frame1_1)
self.Label4_8.place(relx=0.099, rely=0.56, height=22, width=172)
self.Label4_8.configure(activebackground="#f9f9f9")
self.Label4_8.configure(activeforeground="black")
self.Label4_8.configure(background="#595959")
self.Label4_8.configure(disabledforeground="#a3a3a3")
self.Label4_8.configure(font="-family {System} -size 10 -weight bold")
self.Label4_8.configure(foreground="AQUA")
self.Label4_8.configure(highlightbackground="#d9d9d9")
self.Label4_8.configure(highlightcolor="black")
self.Label4_8.configure(text='''Reason to modify data :''')
self.Label4_8.configure(width=172)
self.txt_productkey = tk.Entry(self.Frame1_1)
self.txt_productkey.place(relx=0.397, rely=0.24, height=20
, relwidth=0.445)
self.txt_productkey.configure(background="white")
self.txt_productkey.configure(disabledforeground="#a3a3a3")
self.txt_productkey.configure(font="TkFixedFont")
self.txt_productkey.configure(foreground="#000000")
self.txt_productkey.configure(insertbackground="black")
self.txt_productkey.configure(width=314)
self.txt_reason = tk.Entry(self.Frame1_1)
self.txt_reason.place(relx=0.397, rely=0.56,height=20, relwidth=0.445)
self.txt_reason.configure(background="white")
self.txt_reason.configure(disabledforeground="#a3a3a3")
self.txt_reason.configure(font="TkFixedFont")
self.txt_reason.configure(foreground="#000000")
self.txt_reason.configure(highlightbackground="#d9d9d9")
self.txt_reason.configure(highlightcolor="black")
self.txt_reason.configure(insertbackground="black")
self.txt_reason.configure(selectbackground="#c4c4c4")
self.txt_reason.configure(selectforeground="black")
self.txt_noofentries = tk.Entry(self.Frame1_1)
self.txt_noofentries.place(relx=0.397, rely=0.48, height=20
, relwidth=0.445)
self.txt_noofentries.configure(background="white")
self.txt_noofentries.configure(disabledforeground="#a3a3a3")
self.txt_noofentries.configure(font="TkFixedFont")
self.txt_noofentries.configure(foreground="#000000")
self.txt_noofentries.configure(highlightbackground="#d9d9d9")
self.txt_noofentries.configure(highlightcolor="black")
self.txt_noofentries.configure(insertbackground="black")
self.txt_noofentries.configure(selectbackground="#c4c4c4")
self.txt_noofentries.configure(selectforeground="black")
self.txt_username = tk.Entry(self.Frame1_1)
self.txt_username.place(relx=0.397, rely=0.4,height=20, relwidth=0.445)
self.txt_username.configure(background="white")
self.txt_username.configure(disabledforeground="#a3a3a3")
self.txt_username.configure(font="TkFixedFont")
self.txt_username.configure(foreground="#000000")
self.txt_username.configure(highlightbackground="#d9d9d9")
self.txt_username.configure(highlightcolor="black")
self.txt_username.configure(insertbackground="black")
self.txt_username.configure(selectbackground="#c4c4c4")
self.txt_username.configure(selectforeground="black")
self.txt_village = tk.Entry(self.Frame1_1)
self.txt_village.place(relx=0.397, rely=0.32,height=20, relwidth=0.445)
self.txt_village.configure(background="white")
self.txt_village.configure(disabledforeground="#a3a3a3")
self.txt_village.configure(font="TkFixedFont")
self.txt_village.configure(foreground="#000000")
self.txt_village.configure(highlightbackground="#d9d9d9")
self.txt_village.configure(highlightcolor="black")
self.txt_village.configure(insertbackground="black")
self.txt_village.configure(selectbackground="#c4c4c4")
self.txt_village.configure(selectforeground="black")
self.btn_submit = tk.Button(self.Frame1_1)
self.btn_submit.place(relx=0.454, rely=0.667, height=44, width=227)
self.btn_submit.configure(activebackground="#ececec")
self.btn_submit.configure(activeforeground="#000000")
self.btn_submit.configure(background="#3028a0")
self.btn_submit.configure(borderwidth="10")
self.btn_submit.configure(disabledforeground="#a3a3a3")
self.btn_submit.configure(font=font16)
self.btn_submit.configure(foreground="#ffffff")
self.btn_submit.configure(highlightbackground="#d9d9d9")
self.btn_submit.configure(highlightcolor="black")
self.btn_submit.configure(pady="0")
self.btn_submit.configure(text='''Ask for permission''',command = self.submits)
self.btn_submit.configure(width=227)
self.btn_exit = tk.Button(top)
self.btn_exit.place(relx=0.879, rely=0.169, height=44, width=107)
self.btn_exit.configure(activebackground="#ececec")
self.btn_exit.configure(activeforeground="#000000")
self.btn_exit.configure(background="#ff002b")
self.btn_exit.configure(borderwidth="10")
self.btn_exit.configure(disabledforeground="#a3a3a3")
self.btn_exit.configure(font="-family {Segoe UI} -size 12")
self.btn_exit.configure(foreground="#ffffff")
self.btn_exit.configure(highlightbackground="#d9d9d9")
self.btn_exit.configure(highlightcolor="black")
self.btn_exit.configure(pady="0")
self.btn_exit.configure(text='''EXIT''',command = self.exits)
self.btn_exit.configure(width=107)
self.btn_back = tk.Button(top)
self.btn_back.place(relx=0.879, rely=0.077, height=44, width=107)
self.btn_back.configure(activebackground="#ececec")
self.btn_back.configure(activeforeground="#000000")
self.btn_back.configure(background="#39a324")
self.btn_back.configure(borderwidth="10")
self.btn_back.configure(disabledforeground="#a3a3a3")
self.btn_back.configure(font="-family {Segoe UI} -size 12")
self.btn_back.configure(foreground="#ffffff")
self.btn_back.configure(highlightbackground="#d9d9d9")
self.btn_back.configure(highlightcolor="black")
self.btn_back.configure(pady="0")
self.btn_back.configure(text='''BACK''',command = self.backs)
if __name__ == '__main__':
vp_start_gui()
def modify1():
import sys
import tkinter
from tkinter import messagebox
import mysql.connector
import dbConnect
import mysql.connector
from dbConnect import DBConnect
import time
import smtplib
try:
import Tkinter as tk
except ImportError:
import tkinter as tk
try:
import ttk
py3 = False
except ImportError:
import tkinter.ttk as ttk
py3 = True
import modifymod_support
def vp_start_gui():
'''Starting point when module is the main routine.'''
global val, w, root
root = tk.Tk()
top = Toplevel1 (root)
modifymod_support.init(root, top)
root.mainloop()
w = None
def create_Toplevel1(root, *args, **kwargs):
'''Starting point when module is imported by another program.'''
global w, w_win, rt
rt = root
w = tk.Toplevel (root)
top = Toplevel1 (w)
modifymod_support.init(w, top, *args, **kwargs)
return (w, top)
def destroy_Toplevel1():
global w
w.destroy()
w = None
class Toplevel1:
def exits(self):
msg=tkinter.messagebox.askyesno('etax-2019','Do You Want To Exit ?');
if(msg):
exit()
def backs(self):
root.destroy()
update1()
def submit4(self):
a=str(self.txt_idnumber.get());
b=str(self.txt_name.get());
c=str(self.txt_meternumber.get());
d=str(self.txt_wardnumber.get());
e=str(self.txt_house.get());
f=str(self.txt_health.get());
g=str(self.txt_light.get());
h=str(self.txt_water.get());
i=str(self.txt_total.get());
j=str(self.txt_reciept.get());
k=str(self.txt_housepaid.get());
l=str(self.txt_healthpaid.get());
m=str(self.txt_lightpaid.get());
n=str(self.txt_waterpaid.get());
o=str(self.txt_totalpaid.get());
v=str(self.txt_village.get());
localtime=str(time.asctime(time.localtime(time.time())))
t=int(i)
u=int(o)
p=str(t-u)
database = DBConnect(host='localhost',user='root',password='Pass@123',database='etax2019')
new_user = {'village':v,'idnumber': a,'meternumber': c,'wardnumber': d,'name': b,'housetax': e,'healthtax': f,'lighttax': g,'watertax': h,'total': i,'reciptnumber':j,'housetaxpaid':k,'healthtaxpaid':l,'lighttaxpaid':m,'watertaxpaid':n,'totalpaid':o,'rest':p}
database.insert(new_user,'updateddata')
database.commit()
content1="WARNING !!!!!! \n\n\n Hi there is a New request to modify eTAX 2019 database \n Here are details: \n Village Name : "+v+"\n\n"
content2="\nReciept Number :"+j+"\nName :"+b+"\nID Number :"+a+"\nMeter Number :"+c+"\nWard Number :"+d+"\nHouse Tax :"+e+"\nHealth Tax :"+f+"\nLight Tax :"+g+"\nWater Tax :"+h+"\nTotal Ammount of tax to be paid :"+i+"\nPaid House Tax :"+k+"\nPaid Health Tax :"+l+"\nPaid Light Tax :"+m+"\nPiad Water Tax :"+n+"\nTotal Tax paid :"+o
totcontaint=content1+content2
mail= smtplib.SMTP('smtp.gmail.com',587)
mail.ehlo()
mail.starttls()
mail.login('etaxsupp2019@gmail.com','Pass@123')
mail.sendmail('etaxsupp2019@gmail.com','kulkarnipranesh1767@gmail.com',totcontaint)
mail.close()
messagebox.showinfo("etax-2019","Data Deleted Successfully")
root.destroy()
def __init__(self, top=None):
'''This class configures and populates the toplevel window.
top is the toplevel containing window.'''
_bgcolor = '#d9d9d9' # X11 color: 'gray85'
_fgcolor = '#000000' # X11 color: 'black'
_compcolor = '#d9d9d9' # X11 color: 'gray85'
_ana1color = '#d9d9d9' # X11 color: 'gray85'
_ana2color = '#ececec' # Closest X11 color: 'gray92'
top.geometry("1043x729+179+114")
top.title("New Toplevel")
top.configure(background="#515154")
top.configure(highlightbackground="#d9d9d9")
top.configure(highlightcolor="black")
self.Label1 = tk.Label(top)
self.Label1.place(relx=0.019, rely=0.027, height=81, width=156)
self.Label1.configure(activebackground="#f9f9f9")
self.Label1.configure(activeforeground="black")
self.Label1.configure(background="#515154")
self.Label1.configure(disabledforeground="#a3a3a3")
self.Label1.configure(font="-family {Britannic Bold} -size 48 -weight bold")
self.Label1.configure(foreground="#ff250d")
self.Label1.configure(highlightbackground="#d9d9d9")
self.Label1.configure(highlightcolor="black")
self.Label1.configure(text='''eTAX''')
self.Label1_1 = tk.Label(top)
self.Label1_1.place(relx=0.173, rely=0.027, height=81, width=156)
self.Label1_1.configure(activebackground="#f9f9f9")
self.Label1_1.configure(activeforeground="black")
self.Label1_1.configure(background="#515154")
self.Label1_1.configure(disabledforeground="#a3a3a3")
self.Label1_1.configure(font="-family {Britannic Bold} -size 48 -weight bold")
self.Label1_1.configure(foreground="#2212ff")
self.Label1_1.configure(highlightbackground="#d9d9d9")
self.Label1_1.configure(highlightcolor="black")
self.Label1_1.configure(text='''2019''')
self.Label2 = tk.Label(top)
self.Label2.place(relx=0.105, rely=0.123, height=31, width=141)
self.Label2.configure(activebackground="#f9f9f9")
self.Label2.configure(activeforeground="black")
self.Label2.configure(background="#515154")
self.Label2.configure(disabledforeground="#a3a3a3")
self.Label2.configure(font="-family {Segoe Script} -size 12 -slant italic")
self.Label2.configure(foreground="#f7ff0d")
self.Label2.configure(highlightbackground="#d9d9d9")
self.Label2.configure(highlightcolor="black")
self.Label2.configure(text='''working for you''')
self.backbutton = tk.Button(top)
self.backbutton.place(relx=0.058, rely=0.192, height=44, width=97)
self.backbutton.configure(activebackground="#ececec")
self.backbutton.configure(activeforeground="#000000")
self.backbutton.configure(background="#120bd8")
self.backbutton.configure(borderwidth="10")
self.backbutton.configure(disabledforeground="#a3a3a3")
self.backbutton.configure(font="-family {Rockwell Extra Bold} -size 12 -weight bold")
self.backbutton.configure(foreground="#fcffff")
self.backbutton.configure(highlightbackground="#d9d9d9")
self.backbutton.configure(highlightcolor="black")
self.backbutton.configure(pady="0")
self.backbutton.configure(text='''Back''',command = self.backs)
self.exit = tk.Button(top)
self.exit.place(relx=0.163, rely=0.192, height=44, width=97)
self.exit.configure(activebackground="#ececec")
self.exit.configure(activeforeground="#000000")
self.exit.configure(background="#120bd8")
self.exit.configure(borderwidth="10")
self.exit.configure(disabledforeground="#a3a3a3")
self.exit.configure(font="-family {Rockwell Extra Bold} -size 12 -weight bold")
self.exit.configure(foreground="#fcffff")
self.exit.configure(highlightbackground="#d9d9d9")
self.exit.configure(highlightcolor="black")
self.exit.configure(pady="0")
self.exit.configure(text='''Exit''',command = self.exits)
self.Label4 = tk.Label(top)
self.Label4.place(relx=0.326, rely=0.041, height=68, width=699)
self.Label4.configure(activebackground="#f9f9f9")
self.Label4.configure(activeforeground="black")
self.Label4.configure(background="#515154")
self.Label4.configure(disabledforeground="#36911a")
self.Label4.configure(font="-family {Rockwell Extra Bold} -size 40 -weight bold")
self.Label4.configure(foreground="#fff71c")
self.Label4.configure(highlightbackground="#d9d9d9")
self.Label4.configure(highlightcolor="black")
self.Label4.configure(text='''DATA MODIFICATION''')
self.Label4.configure(width=699)
self.Frame1 = tk.Frame(top)
self.Frame1.place(relx=0.24, rely=0.274, relheight=0.569, relwidth=0.292)
self.Frame1.configure(relief='ridge')
self.Frame1.configure(borderwidth="10")
self.Frame1.configure(relief='ridge')
self.Frame1.configure(background="#50e82a")
self.Frame1.configure(highlightbackground="#d9d9d9")
self.Frame1.configure(highlightcolor="black")
self.Frame1.configure(width=305)
self.txt_idnumber = tk.Entry(self.Frame1)
self.txt_idnumber.place(relx=0.426, rely=0.096, height=20
, relwidth=0.538)
self.txt_idnumber.configure(background="white")
self.txt_idnumber.configure(disabledforeground="#a3a3a3")
self.txt_idnumber.configure(font="TkFixedFont")
self.txt_idnumber.configure(foreground="#000000")
self.txt_idnumber.configure(highlightbackground="#d9d9d9")
self.txt_idnumber.configure(highlightcolor="black")
self.txt_idnumber.configure(insertbackground="black")
self.txt_idnumber.configure(selectbackground="#c4c4c4")
self.txt_idnumber.configure(selectforeground="black")
self.Label7_1 = tk.Label(self.Frame1)
self.Label7_1.place(relx=0.033, rely=0.169, height=39, width=106)
self.Label7_1.configure(activebackground="#f9f9f9")
self.Label7_1.configure(activeforeground="black")
self.Label7_1.configure(background="#50e82a")
self.Label7_1.configure(disabledforeground="#a3a3a3")
self.Label7_1.configure(font="-family {Plantagenet Cherokee} -size 13")
self.Label7_1.configure(foreground="#000000")
self.Label7_1.configure(highlightbackground="#d9d9d9")
self.Label7_1.configure(highlightcolor="black")
self.Label7_1.configure(text='''Name :''')
self.Label7_2 = tk.Label(self.Frame1)
self.Label7_2.place(relx=0.033, rely=0.289, height=29, width=116)
self.Label7_2.configure(activebackground="#f9f9f9")
self.Label7_2.configure(activeforeground="black")
self.Label7_2.configure(background="#50e82a")
self.Label7_2.configure(disabledforeground="#a3a3a3")
self.Label7_2.configure(font="-family {Plantagenet Cherokee} -size 13")
self.Label7_2.configure(foreground="#000000")
self.Label7_2.configure(highlightbackground="#d9d9d9")
self.Label7_2.configure(highlightcolor="black")
self.Label7_2.configure(text='''Meter Number :''')
self.Label7_3 = tk.Label(self.Frame1)
self.Label7_3.place(relx=0.033, rely=0.361, height=39, width=116)
self.Label7_3.configure(activebackground="#f9f9f9")
self.Label7_3.configure(activeforeground="black")
self.Label7_3.configure(background="#50e82a")
self.Label7_3.configure(disabledforeground="#a3a3a3")
self.Label7_3.configure(font="-family {Plantagenet Cherokee} -size 13")
self.Label7_3.configure(foreground="#000000")
self.Label7_3.configure(highlightbackground="#d9d9d9")
self.Label7_3.configure(highlightcolor="black")
self.Label7_3.configure(text='''Ward Number :''')
self.Label7_4 = tk.Label(self.Frame1)
self.Label7_4.place(relx=0.033, rely=0.458, height=39, width=106)
self.Label7_4.configure(activebackground="#f9f9f9")
self.Label7_4.configure(activeforeground="black")
self.Label7_4.configure(background="#50e82a")
self.Label7_4.configure(disabledforeground="#a3a3a3")
self.Label7_4.configure(font="-family {Plantagenet Cherokee} -size 13")
self.Label7_4.configure(foreground="#000000")
self.Label7_4.configure(highlightbackground="#d9d9d9")
self.Label7_4.configure(highlightcolor="black")
self.Label7_4.configure(text='''House Tax :''')
self.Label7_5 = tk.Label(self.Frame1)
self.Label7_5.place(relx=0.033, rely=0.554, height=39, width=106)
self.Label7_5.configure(activebackground="#f9f9f9")
self.Label7_5.configure(activeforeground="black")
self.Label7_5.configure(background="#50e82a")
self.Label7_5.configure(disabledforeground="#a3a3a3")
self.Label7_5.configure(font="-family {Plantagenet Cherokee} -size 13")
self.Label7_5.configure(foreground="#000000")
self.Label7_5.configure(highlightbackground="#d9d9d9")
self.Label7_5.configure(highlightcolor="black")
self.Label7_5.configure(text='''Health Tax :''')
self.Label7_6 = tk.Label(self.Frame1)
self.Label7_6.place(relx=0.033, rely=0.651, height=39, width=106)
self.Label7_6.configure(activebackground="#f9f9f9")
self.Label7_6.configure(activeforeground="black")
self.Label7_6.configure(background="#50e82a")
self.Label7_6.configure(disabledforeground="#a3a3a3")
self.Label7_6.configure(font="-family {Plantagenet Cherokee} -size 13")
self.Label7_6.configure(foreground="#000000")
self.Label7_6.configure(highlightbackground="#d9d9d9")
self.Label7_6.configure(highlightcolor="black")
self.Label7_6.configure(text='''Light Tax :''')
self.Label7_7 = tk.Label(self.Frame1)
self.Label7_7.place(relx=0.033, rely=0.747, height=39, width=106)
self.Label7_7.configure(activebackground="#f9f9f9")
self.Label7_7.configure(activeforeground="black")
self.Label7_7.configure(background="#50e82a")
self.Label7_7.configure(disabledforeground="#a3a3a3")
self.Label7_7.configure(font="-family {Plantagenet Cherokee} -size 13")
self.Label7_7.configure(foreground="#000000")
self.Label7_7.configure(highlightbackground="#d9d9d9")
self.Label7_7.configure(highlightcolor="black")
self.Label7_7.configure(text='''Water Tax :''')
self.Label7_8 = tk.Label(self.Frame1)
self.Label7_8.place(relx=0.033, rely=0.843, height=39, width=106)
self.Label7_8.configure(activebackground="#f9f9f9")
self.Label7_8.configure(activeforeground="black")
self.Label7_8.configure(background="#50e82a")
self.Label7_8.configure(disabledforeground="#a3a3a3")
self.Label7_8.configure(font="-family {Plantagenet Cherokee} -size 13")
self.Label7_8.configure(foreground="#000000")
self.Label7_8.configure(highlightbackground="#d9d9d9")
self.Label7_8.configure(highlightcolor="black")
self.Label7_8.configure(text='''Total :''')
self.txt_name = tk.Entry(self.Frame1)
self.txt_name.place(relx=0.426, rely=0.193,height=20, relwidth=0.538)
self.txt_name.configure(background="white")
self.txt_name.configure(disabledforeground="#a3a3a3")
self.txt_name.configure(font="TkFixedFont")
self.txt_name.configure(foreground="#000000")
self.txt_name.configure(highlightbackground="#d9d9d9")
self.txt_name.configure(highlightcolor="black")
self.txt_name.configure(insertbackground="black")
self.txt_name.configure(selectbackground="#c4c4c4")
self.txt_name.configure(selectforeground="black")
self.txt_meternumber = tk.Entry(self.Frame1)
self.txt_meternumber.place(relx=0.426, rely=0.289, height=20
, relwidth=0.505)
self.txt_meternumber.configure(background="white")
self.txt_meternumber.configure(disabledforeground="#a3a3a3")
self.txt_meternumber.configure(font="TkFixedFont")
self.txt_meternumber.configure(foreground="#000000")
self.txt_meternumber.configure(highlightbackground="#d9d9d9")
self.txt_meternumber.configure(highlightcolor="black")
self.txt_meternumber.configure(insertbackground="black")
self.txt_meternumber.configure(selectbackground="#c4c4c4")
self.txt_meternumber.configure(selectforeground="black")
self.txt_wardnumber = tk.Entry(self.Frame1)
self.txt_wardnumber.place(relx=0.426, rely=0.361, height=20
, relwidth=0.505)
self.txt_wardnumber.configure(background="white")
self.txt_wardnumber.configure(disabledforeground="#a3a3a3")
self.txt_wardnumber.configure(font="TkFixedFont")
self.txt_wardnumber.configure(foreground="#000000")
self.txt_wardnumber.configure(highlightbackground="#d9d9d9")
self.txt_wardnumber.configure(highlightcolor="black")
self.txt_wardnumber.configure(insertbackground="black")
self.txt_wardnumber.configure(selectbackground="#c4c4c4")
self.txt_wardnumber.configure(selectforeground="black")
self.txt_house = tk.Entry(self.Frame1)
self.txt_house.place(relx=0.393, rely=0.482,height=20, relwidth=0.538)
self.txt_house.configure(background="white")
self.txt_house.configure(disabledforeground="#a3a3a3")
self.txt_house.configure(font="TkFixedFont")
self.txt_house.configure(foreground="#000000")
self.txt_house.configure(highlightbackground="#d9d9d9")
self.txt_house.configure(highlightcolor="black")
self.txt_house.configure(insertbackground="black")
self.txt_house.configure(selectbackground="#c4c4c4")
self.txt_house.configure(selectforeground="black")
self.txt_total = tk.Entry(self.Frame1)
self.txt_total.place(relx=0.393, rely=0.867,height=20, relwidth=0.538)
self.txt_total.configure(background="white")
self.txt_total.configure(disabledforeground="#a3a3a3")
self.txt_total.configure(font="TkFixedFont")
self.txt_total.configure(foreground="#000000")
self.txt_total.configure(highlightbackground="#d9d9d9")
self.txt_total.configure(highlightcolor="black")
self.txt_total.configure(insertbackground="black")
self.txt_total.configure(selectbackground="#c4c4c4")
self.txt_total.configure(selectforeground="black")
self.txt_light = tk.Entry(self.Frame1)
self.txt_light.place(relx=0.393, rely=0.675,height=20, relwidth=0.538)
self.txt_light.configure(background="white")
self.txt_light.configure(disabledforeground="#a3a3a3")
self.txt_light.configure(font="TkFixedFont")
self.txt_light.configure(foreground="#000000")
self.txt_light.configure(highlightbackground="#d9d9d9")
self.txt_light.configure(highlightcolor="black")
self.txt_light.configure(insertbackground="black")
self.txt_light.configure(selectbackground="#c4c4c4")
self.txt_light.configure(selectforeground="black")
self.txt_health = tk.Entry(self.Frame1)
self.txt_health.place(relx=0.393, rely=0.578,height=20, relwidth=0.538)
self.txt_health.configure(background="white")
self.txt_health.configure(disabledforeground="#a3a3a3")
self.txt_health.configure(font="TkFixedFont")
self.txt_health.configure(foreground="#000000")
self.txt_health.configure(highlightbackground="#d9d9d9")
self.txt_health.configure(highlightcolor="black")
self.txt_health.configure(insertbackground="black")
self.txt_health.configure(selectbackground="#c4c4c4")
self.txt_health.configure(selectforeground="black")
self.txt_water = tk.Entry(self.Frame1)
self.txt_water.place(relx=0.393, rely=0.771,height=20, relwidth=0.538)
self.txt_water.configure(background="white")
self.txt_water.configure(disabledforeground="#a3a3a3")
self.txt_water.configure(font="TkFixedFont")
self.txt_water.configure(foreground="#000000")
self.txt_water.configure(highlightbackground="#d9d9d9")
self.txt_water.configure(highlightcolor="black")
self.txt_water.configure(insertbackground="black")
self.txt_water.configure(selectbackground="#c4c4c4")
self.txt_water.configure(selectforeground="black")
self.Label7 = tk.Label(self.Frame1)
self.Label7.place(relx=0.066, rely=0.072, height=39, width=106)
self.Label7.configure(activebackground="#f9f9f9")
self.Label7.configure(activeforeground="black")
self.Label7.configure(background="#50e82a")
self.Label7.configure(disabledforeground="#a3a3a3")
self.Label7.configure(font="-family {Plantagenet Cherokee} -size 13")
self.Label7.configure(foreground="#000000")
self.Label7.configure(highlightbackground="#d9d9d9")
self.Label7.configure(highlightcolor="black")
self.Label7.configure(text='''ID Number :''')
self.Frame1_20 = tk.Frame(top)
self.Frame1_20.place(relx=0.537, rely=0.37, relheight=0.473
, relwidth=0.292)
self.Frame1_20.configure(relief='ridge')
self.Frame1_20.configure(borderwidth="10")
self.Frame1_20.configure(relief='ridge')
self.Frame1_20.configure(background="#50e82a")
self.Frame1_20.configure(highlightbackground="#d9d9d9")
self.Frame1_20.configure(highlightcolor="black")
self.Label7_21 = tk.Label(self.Frame1_20)
self.Label7_21.place(relx=0.033, rely=0.087, height=39, width=126)
self.Label7_21.configure(activebackground="#f9f9f9")
self.Label7_21.configure(activeforeground="black")
self.Label7_21.configure(background="#50e82a")
self.Label7_21.configure(disabledforeground="#a3a3a3")
self.Label7_21.configure(font="-family {Plantagenet Cherokee} -size 13")
self.Label7_21.configure(foreground="#000000")
self.Label7_21.configure(highlightbackground="#d9d9d9")
self.Label7_21.configure(highlightcolor="black")
self.Label7_21.configure(text='''Reciept Number :''')
self.txt_reciept = tk.Entry(self.Frame1_20)
self.txt_reciept.place(relx=0.492, rely=0.116, height=20, relwidth=0.439)
self.txt_reciept.configure(background="white")
self.txt_reciept.configure(disabledforeground="#a3a3a3")
self.txt_reciept.configure(font="TkFixedFont")
self.txt_reciept.configure(foreground="#000000")
self.txt_reciept.configure(highlightbackground="#d9d9d9")
self.txt_reciept.configure(highlightcolor="black")
self.txt_reciept.configure(insertbackground="black")
self.txt_reciept.configure(selectbackground="#c4c4c4")
self.txt_reciept.configure(selectforeground="black")
self.Label7_2 = tk.Label(self.Frame1_20)
self.Label7_2.place(relx=0.033, rely=0.203, height=39, width=126)
self.Label7_2.configure(activebackground="#f9f9f9")
self.Label7_2.configure(activeforeground="black")
self.Label7_2.configure(background="#50e82a")
self.Label7_2.configure(disabledforeground="#a3a3a3")
self.Label7_2.configure(font="-family {Plantagenet Cherokee} -size 13")
self.Label7_2.configure(foreground="#000000")
self.Label7_2.configure(highlightbackground="#d9d9d9")
self.Label7_2.configure(highlightcolor="black")
self.Label7_2.configure(text='''Housetax(Paid) :''')
self.Label7_3 = tk.Label(self.Frame1_20)
self.Label7_3.place(relx=0.033, rely=0.319, height=39, width=126)
self.Label7_3.configure(activebackground="#f9f9f9")
self.Label7_3.configure(activeforeground="black")
self.Label7_3.configure(background="#50e82a")
self.Label7_3.configure(disabledforeground="#a3a3a3")
self.Label7_3.configure(font="-family {Plantagenet Cherokee} -size 13")
self.Label7_3.configure(foreground="#000000")
self.Label7_3.configure(highlightbackground="#d9d9d9")
self.Label7_3.configure(highlightcolor="black")
self.Label7_3.configure(text='''Healthtax (Paid) :''')
self.Label7_4 = tk.Label(self.Frame1_20)
self.Label7_4.place(relx=0.066, rely=0.435, height=39, width=116)
self.Label7_4.configure(activebackground="#f9f9f9")
self.Label7_4.configure(activeforeground="black")
self.Label7_4.configure(background="#50e82a")
self.Label7_4.configure(disabledforeground="#a3a3a3")
self.Label7_4.configure(font="-family {Plantagenet Cherokee} -size 13")
self.Label7_4.configure(foreground="#000000")
self.Label7_4.configure(highlightbackground="#d9d9d9")
self.Label7_4.configure(highlightcolor="black")
self.Label7_4.configure(text='''Lighttax(Paid) :''')
self.Label7_5 = tk.Label(self.Frame1_20)
self.Label7_5.place(relx=0.033, rely=0.551, height=39, width=116)
self.Label7_5.configure(activebackground="#f9f9f9")
self.Label7_5.configure(activeforeground="black")
self.Label7_5.configure(background="#50e82a")
self.Label7_5.configure(disabledforeground="#a3a3a3")
self.Label7_5.configure(font="-family {Plantagenet Cherokee} -size 13")
self.Label7_5.configure(foreground="#000000")
self.Label7_5.configure(highlightbackground="#d9d9d9")
self.Label7_5.configure(highlightcolor="black")
self.Label7_5.configure(text='''Watertax(Paid) :''')
self.Label7_6 = tk.Label(self.Frame1_20)
self.Label7_6.place(relx=0.033, rely=0.667, height=39, width=106)
self.Label7_6.configure(activebackground="#f9f9f9")
self.Label7_6.configure(activeforeground="black")
self.Label7_6.configure(background="#50e82a")
self.Label7_6.configure(disabledforeground="#a3a3a3")
self.Label7_6.configure(font="-family {Plantagenet Cherokee} -size 13")
self.Label7_6.configure(foreground="#000000")
self.Label7_6.configure(highlightbackground="#d9d9d9")
self.Label7_6.configure(highlightcolor="black")
self.Label7_6.configure(text='''Total (Paid) :''')
self.txt_housepaid = tk.Entry(self.Frame1_20)
self.txt_housepaid.place(relx=0.492, rely=0.232, height=20
, relwidth=0.439)
self.txt_housepaid.configure(background="white")
self.txt_housepaid.configure(disabledforeground="#a3a3a3")
self.txt_housepaid.configure(font="TkFixedFont")
self.txt_housepaid.configure(foreground="#000000")
self.txt_housepaid.configure(highlightbackground="#d9d9d9")
self.txt_housepaid.configure(highlightcolor="black")
self.txt_housepaid.configure(insertbackground="black")
self.txt_housepaid.configure(selectbackground="#c4c4c4")
self.txt_housepaid.configure(selectforeground="black")
self.txt_healthpaid = tk.Entry(self.Frame1_20)
self.txt_healthpaid.place(relx=0.492, rely=0.348, height=20
, relwidth=0.439)
self.txt_healthpaid.configure(background="white")
self.txt_healthpaid.configure(disabledforeground="#a3a3a3")
self.txt_healthpaid.configure(font="TkFixedFont")
self.txt_healthpaid.configure(foreground="#000000")
self.txt_healthpaid.configure(highlightbackground="#d9d9d9")
self.txt_healthpaid.configure(highlightcolor="black")
self.txt_healthpaid.configure(insertbackground="black")
self.txt_healthpaid.configure(selectbackground="#c4c4c4")
self.txt_healthpaid.configure(selectforeground="black")
self.txt_lightpaid = tk.Entry(self.Frame1_20)
self.txt_lightpaid.place(relx=0.492, rely=0.464, height=20
, relwidth=0.439)
self.txt_lightpaid.configure(background="white")
self.txt_lightpaid.configure(disabledforeground="#a3a3a3")
self.txt_lightpaid.configure(font="TkFixedFont")
self.txt_lightpaid.configure(foreground="#000000")
self.txt_lightpaid.configure(highlightbackground="#d9d9d9")
self.txt_lightpaid.configure(highlightcolor="black")
self.txt_lightpaid.configure(insertbackground="black")
self.txt_lightpaid.configure(selectbackground="#c4c4c4")
self.txt_lightpaid.configure(selectforeground="black")
self.txt_waterpaid = tk.Entry(self.Frame1_20)
self.txt_waterpaid.place(relx=0.492, rely=0.58, height=20
, relwidth=0.439)
self.txt_waterpaid.configure(background="white")
self.txt_waterpaid.configure(disabledforeground="#a3a3a3")
self.txt_waterpaid.configure(font="TkFixedFont")
self.txt_waterpaid.configure(foreground="#000000")
self.txt_waterpaid.configure(highlightbackground="#d9d9d9")
self.txt_waterpaid.configure(highlightcolor="black")
self.txt_waterpaid.configure(insertbackground="black")
self.txt_waterpaid.configure(selectbackground="#c4c4c4")
self.txt_waterpaid.configure(selectforeground="black")
self.txt_totalpaid = tk.Entry(self.Frame1_20)
self.txt_totalpaid.place(relx=0.492, rely=0.696, height=20
, relwidth=0.439)
self.txt_totalpaid.configure(background="white")
self.txt_totalpaid.configure(disabledforeground="#a3a3a3")
self.txt_totalpaid.configure(font="TkFixedFont")
self.txt_totalpaid.configure(foreground="#000000")
self.txt_totalpaid.configure(highlightbackground="#d9d9d9")
self.txt_totalpaid.configure(highlightcolor="black")
self.txt_totalpaid.configure(insertbackground="black")
self.txt_totalpaid.configure(selectbackground="#c4c4c4")
self.txt_totalpaid.configure(selectforeground="black")
self.btn_submit = tk.Button(self.Frame1_20)
self.btn_submit.place(relx=0.328, rely=0.812, height=44, width=97)
self.btn_submit.configure(activebackground="#ececec")
self.btn_submit.configure(activeforeground="#000000")
self.btn_submit.configure(background="#ff350d")
self.btn_submit.configure(disabledforeground="#a3a3a3")
self.btn_submit.configure(font="-family {Segoe UI} -size 13")
self.btn_submit.configure(foreground="#ffffff")
self.btn_submit.configure(highlightbackground="#d9d9d9")
self.btn_submit.configure(highlightcolor="black")
self.btn_submit.configure(pady="0")
self.btn_submit.configure(text='''SUBMIT''',command = self.submit4)
self.Frame1_21 = tk.Frame(top)
self.Frame1_21.place(relx=0.537, rely=0.274, relheight=0.089
, relwidth=0.292)
self.Frame1_21.configure(relief='ridge')
self.Frame1_21.configure(borderwidth="10")
self.Frame1_21.configure(relief='ridge')
self.Frame1_21.configure(background="#50e82a")
self.Frame1_21.configure(highlightbackground="#d9d9d9")
self.Frame1_21.configure(highlightcolor="black")
self.Frame1_21.configure(width=305)
self.Label7_22 = tk.Label(self.Frame1_21)
self.Label7_22.place(relx=0.033, rely=0.154, height=39, width=126)
self.Label7_22.configure(activebackground="#f9f9f9")
self.Label7_22.configure(activeforeground="black")
self.Label7_22.configure(background="#50e82a")
self.Label7_22.configure(disabledforeground="#a3a3a3")
self.Label7_22.configure(font="-family {Plantagenet Cherokee} -size 13")
self.Label7_22.configure(foreground="#000000")
self.Label7_22.configure(highlightbackground="#d9d9d9")
self.Label7_22.configure(highlightcolor="black")
self.Label7_22.configure(text='''Village Name :''')
self.txt_village = tk.Entry(self.Frame1_21)
self.txt_village.place(relx=0.492, rely=0.308, height=20, relwidth=0.439)
self.txt_village.configure(background="white")
self.txt_village.configure(disabledforeground="#a3a3a3")
self.txt_village.configure(font="TkFixedFont")
self.txt_village.configure(foreground="#000000")
self.txt_village.configure(highlightbackground="#d9d9d9")
self.txt_village.configure(highlightcolor="black")
self.txt_village.configure(insertbackground="black")
self.txt_village.configure(selectbackground="#c4c4c4")
self.txt_village.configure(selectforeground="black")
if __name__ == '__main__':
vp_start_gui()
def delete1():
import sys
import tkinter
from tkinter import messagebox
import mysql.connector
import time
import dbConnect
from dbConnect import DBConnect
import smtplib
try:
import Tkinter as tk
except ImportError:
import tkinter as tk
try:
import ttk
py3 = False
except ImportError:
import tkinter.ttk as ttk
py3 = True
import deletedata_support
import os.path
def vp_start_gui():
'''Starting point when module is the main routine.'''
global val, w, root
global prog_location
prog_call = sys.argv[0]
print ('prog_call = {}'.format(prog_call))
prog_location = os.path.split(prog_call)[0]
print ('prog_location = {}'.format(prog_location))
sys.stdout.flush()
root = tk.Tk()
top = e_TAX_2019 (root)
deletedata_support.init(root, top)
root.mainloop()
w = None
def create_e_TAX_2019(root, *args, **kwargs):
'''Starting point when module is imported by another program.'''
global w, w_win, rt
global prog_location
prog_call = sys.argv[0]
print ('prog_call = {}'.format(prog_call))
prog_location = os.path.split(prog_call)[0]
print ('prog_location = {}'.format(prog_location))
rt = root
w = tk.Toplevel (root)
top = e_TAX_2019 (w)
deletedata_support.init(w, top, *args, **kwargs)
return (w, top)
def destroy_e_TAX_2019():
global w
w.destroy()
w = None
class e_TAX_2019:
def exits(self):
msg=tkinter.messagebox.askyesno("e-TAX 2019","Do You Want To EXIT ?")
if msg :
exit()
def backs(self):
root.destroy()
update1()
def submit2(self):
v=str(self.txt_villagename.get());
n=str(self.txt_name.get());
u=str(self.txt_uidnumber.get());
t=str(time.asctime(time.localtime(time.time())))
mydb=mysql.connector.connect(host='localhost',user='root',passwd='Pass@123',database='etax2019')
mycursor=mydb.cursor()
query=("INSERT INTO deleteddata (village, uidnumber, name)VALUES(%s,%s,%s)")
datac=(v,u,n)
mycursor.execute(query,datac)
mydb.commit()
totcontaint=str("Warning !!!!! \n\n New Request To Delete Entry \n\n Some Entries has been changed \n\n Here are the details : \n\n\n Village Name :"+v+"\n UID Number :"+u+"\n Name Of individual"+n+"\n Data deleted on :"+t)
mail= smtplib.SMTP('smtp.gmail.com',587)
mail.ehlo()
mail.starttls()
mail.login('etaxsupp2019@gmail.com','Pass@123')
mail.sendmail('etaxsupp2019@gmail.com','kulkarnipranesh1767@gmail.com',totcontaint)
mail.close()
messagebox.showinfo("e-TAX 2019","Successfully deleted The Entry")
root.destroy()
def __init__(self, top=None):
'''This class configures and populates the toplevel window.
top is the toplevel containing window.'''
_bgcolor = '#d9d9d9' # X11 color: 'gray85'
_fgcolor = '#000000' # X11 color: 'black'
_compcolor = '#d9d9d9' # X11 color: 'gray85'
_ana1color = '#d9d9d9' # X11 color: 'gray85'
_ana2color = '#ececec' # Closest X11 color: 'gray92'
font12 = "-family {Swatch it} -size 16"
font13 = "-family {Segoe UI Black} -size 13 -weight bold"
font14 = "-family {Shonar Bangla} -size 22"
top.geometry("843x469+274+133")
top.title("e-TAX 2019")
top.configure(background="#727272")
top.configure(highlightbackground="#d9d9d9")
top.configure(highlightcolor="black")
self.Frame1 = tk.Frame(top)
self.Frame1.place(relx=0.012, rely=0.021, relheight=0.309
, relwidth=0.836)
self.Frame1.configure(relief='ridge')
self.Frame1.configure(borderwidth="10")
self.Frame1.configure(relief='ridge')
self.Frame1.configure(background="#595959")
self.Frame1.configure(highlightbackground="#d9d9d9")
self.Frame1.configure(highlightcolor="black")
self.Frame1.configure(width=705)
self.Label1 = tk.Label(self.Frame1)
self.Label1.place(relx=0.043, rely=0.207, height=59, width=196)
self.Label1.configure(activebackground="#f9f9f9")
self.Label1.configure(activeforeground="black")
self.Label1.configure(background="#595959")
self.Label1.configure(disabledforeground="#a3a3a3")
self.Label1.configure(font="-family {Rockwell Extra} -size 40 -weight bold")
self.Label1.configure(foreground="#0d4dff")
self.Label1.configure(highlightbackground="#d9d9d9")
self.Label1.configure(highlightcolor="black")
self.Label1.configure(text='''eTAX''')
self.Label1_1 = tk.Label(self.Frame1)
self.Label1_1.place(relx=0.298, rely=0.207, height=59, width=146)
self.Label1_1.configure(activebackground="#f9f9f9")
self.Label1_1.configure(activeforeground="black")
self.Label1_1.configure(background="#595959")
self.Label1_1.configure(disabledforeground="#a3a3a3")
self.Label1_1.configure(font="-family {Rockwell Extra} -size 40 -weight bold")
self.Label1_1.configure(foreground="#ff2b0a")
self.Label1_1.configure(highlightbackground="#d9d9d9")
self.Label1_1.configure(highlightcolor="black")
self.Label1_1.configure(text='''2019''')
self.Label2 = tk.Label(self.Frame1)
self.Label2.place(relx=0.156, rely=0.621, height=31, width=184)
self.Label2.configure(activebackground="#f9f9f9")
self.Label2.configure(activeforeground="black")
self.Label2.configure(background="#595959")
self.Label2.configure(disabledforeground="#a3a3a3")
self.Label2.configure(font="-family {Rage Italic} -size 19 -slant italic")
self.Label2.configure(foreground="#f7ff0a")
self.Label2.configure(highlightbackground="#d9d9d9")
self.Label2.configure(highlightcolor="black")
self.Label2.configure(text='''working for you''')
self.Label1_2 = tk.Label(self.Frame1)
self.Label1_2.place(relx=0.525, rely=0.207, height=69, width=306)
self.Label1_2.configure(activebackground="#f9f9f9")
self.Label1_2.configure(activeforeground="black")
self.Label1_2.configure(background="#595959")
self.Label1_2.configure(disabledforeground="#a3a3a3")
self.Label1_2.configure(font="-family {Rockwell Extra} -size 28 -weight bold")
self.Label1_2.configure(foreground="#5faa14")
self.Label1_2.configure(highlightbackground="#d9d9d9")
self.Label1_2.configure(highlightcolor="black")
self.Label1_2.configure(text='''delete DATA''')
self.Label1_2.configure(width=306)
self.menubar = tk.Menu(top,font="TkMenuFont",bg=_bgcolor,fg=_fgcolor)
top.configure(menu = self.menubar)
self.Label3 = tk.Label(top)
self.Label3.place(relx=0.107, rely=0.448, height=39, width=186)
self.Label3.configure(background="#727272")
self.Label3.configure(disabledforeground="#a3a3a3")
self.Label3.configure(font=font12)
self.Label3.configure(foreground="#f7ff1c")
self.Label3.configure(text='''Village Name :''')
self.Label3.configure(width=186)
self.Label3_1 = tk.Label(top)
self.Label3_1.place(relx=0.107, rely=0.554, height=29, width=186)
self.Label3_1.configure(activebackground="#f9f9f9")
self.Label3_1.configure(activeforeground="black")
self.Label3_1.configure(background="#727272")
self.Label3_1.configure(disabledforeground="#a3a3a3")
self.Label3_1.configure(font="-family {Swatch it} -size 16")
self.Label3_1.configure(foreground="#f7ff1c")
self.Label3_1.configure(highlightbackground="#d9d9d9")
self.Label3_1.configure(highlightcolor="black")
self.Label3_1.configure(text='''UID Number :''')
self.Label3_2 = tk.Label(top)
self.Label3_2.place(relx=0.142, rely=0.64, height=39, width=116)
self.Label3_2.configure(activebackground="#f9f9f9")
self.Label3_2.configure(activeforeground="black")
self.Label3_2.configure(background="#727272")
self.Label3_2.configure(disabledforeground="#a3a3a3")
self.Label3_2.configure(font="-family {Swatch it} -size 16")
self.Label3_2.configure(foreground="#f7ff1c")
self.Label3_2.configure(highlightbackground="#d9d9d9")
self.Label3_2.configure(highlightcolor="black")
self.Label3_2.configure(text='''Name :''')
self.Label3_2.configure(width=116)
self.txt_villagename = tk.Entry(top)
self.txt_villagename.place(relx=0.368, rely=0.469, height=20
, relwidth=0.361)
self.txt_villagename.configure(background="white")
self.txt_villagename.configure(disabledforeground="#a3a3a3")
self.txt_villagename.configure(font="TkFixedFont")
self.txt_villagename.configure(foreground="#000000")
self.txt_villagename.configure(insertbackground="black")
self.txt_villagename.configure(width=304)
self.txt_uidnumber = tk.Entry(top)
self.txt_uidnumber.place(relx=0.368, rely=0.576, height=20
, relwidth=0.361)
self.txt_uidnumber.configure(background="white")
self.txt_uidnumber.configure(disabledforeground="#a3a3a3")
self.txt_uidnumber.configure(font="TkFixedFont")
self.txt_uidnumber.configure(foreground="#000000")
self.txt_uidnumber.configure(highlightbackground="#d9d9d9")
self.txt_uidnumber.configure(highlightcolor="black")
self.txt_uidnumber.configure(insertbackground="black")
self.txt_uidnumber.configure(selectbackground="#c4c4c4")
self.txt_uidnumber.configure(selectforeground="black")
self.txt_name = tk.Entry(top)
self.txt_name.place(relx=0.368, rely=0.661,height=20, relwidth=0.361)
self.txt_name.configure(background="white")
self.txt_name.configure(disabledforeground="#a3a3a3")
self.txt_name.configure(font="TkFixedFont")
self.txt_name.configure(foreground="#000000")
self.txt_name.configure(highlightbackground="#d9d9d9")
self.txt_name.configure(highlightcolor="black")
self.txt_name.configure(insertbackground="black")
self.txt_name.configure(selectbackground="#c4c4c4")
self.txt_name.configure(selectforeground="black")
self.Label4 = tk.Label(top)
self.Label4.place(relx=0.771, rely=0.49, height=100, width=174)
self.Label4.configure(background="#d9d9d9")
self.Label4.configure(borderwidth="10")
self.Label4.configure(disabledforeground="#a3a3a3")
self.Label4.configure(foreground="#000000")
photo_location = os.path.join(prog_location,"./delete1.png")
self._img0 = tk.PhotoImage(file=photo_location)
self.Label4.configure(image=self._img0)
self.Label4.configure(relief='raised')
self.Label4.configure(text='''Label''')
self.Label4.configure(width=174)
self.btn_submit = tk.Button(top)
self.btn_submit.place(relx=0.427, rely=0.832, height=54, width=194)
self.btn_submit.configure(activebackground="#ececec")
self.btn_submit.configure(activeforeground="#000000")
self.btn_submit.configure(background="#3920d8")
self.btn_submit.configure(borderwidth="10")
self.btn_submit.configure(disabledforeground="#a3a3a3")
self.btn_submit.configure(font=font13)
self.btn_submit.configure(foreground="#ffffff")
self.btn_submit.configure(highlightbackground="#d9d9d9")
self.btn_submit.configure(highlightcolor="black")
self.btn_submit.configure(pady="0")
self.btn_submit.configure(text='''I Wish To Continue''',command = self.submit2)
self.Label5 = tk.Label(top)
self.Label5.place(relx=0.095, rely=0.725, height=43, width=560)
self.Label5.configure(background="#727272")
self.Label5.configure(disabledforeground="#a3a3a3")
self.Label5.configure(font=font14)
self.Label5.configure(foreground="#5bff3b")
self.Label5.configure(text='''Disclaimer : You Can Not recover the data once deleted''')
self.btn_exit = tk.Button(top)
self.btn_exit.place(relx=0.866, rely=0.192, height=44, width=104)
self.btn_exit.configure(activebackground="#ececec")
self.btn_exit.configure(activeforeground="#000000")
self.btn_exit.configure(background="#ff3f0f")
self.btn_exit.configure(borderwidth="10")
self.btn_exit.configure(disabledforeground="#a3a3a3")
self.btn_exit.configure(font="-family {Segoe UI Black} -size 13 -weight bold")
self.btn_exit.configure(foreground="#ffffff")
self.btn_exit.configure(highlightbackground="#d9d9d9")
self.btn_exit.configure(highlightcolor="black")
self.btn_exit.configure(pady="0")
self.btn_exit.configure(text='''EXIT''',command = self.exits)
self.btn_exit.configure(width=104)
self.btn_back = tk.Button(top)
self.btn_back.place(relx=0.866, rely=0.064, height=44, width=104)
self.btn_back.configure(activebackground="#ececec")
self.btn_back.configure(activeforeground="#000000")
self.btn_back.configure(background="#3ba825")
self.btn_back.configure(borderwidth="10")
self.btn_back.configure(disabledforeground="#a3a3a3")
self.btn_back.configure(font="-family {Segoe UI Black} -size 13 -weight bold")
self.btn_back.configure(foreground="#ffffff")
self.btn_back.configure(highlightbackground="#d9d9d9")
self.btn_back.configure(highlightcolor="black")
self.btn_back.configure(pady="0")
self.btn_back.configure(text='''BACK''',command = self.backs)
if __name__ == '__main__':
vp_start_gui()
class e_TAX_2019:
def backs(self):
root.destroy()
print("duchdhci")
def exits(self):
msg=tkinter.messagebox.askyesno("eTAX 2019","Do you want to exit")
if msg :
exit()
def accesss(self):
root.destroy()
access1()
def deletes(self):
root.destroy()
delete1()
def modifys(self):
root.destroy()
modify1()
def __init__(self, top=None):
'''This class configures and populates the toplevel window.
top is the toplevel containing window.'''
_bgcolor = '#d9d9d9' # X11 color: 'gray85'
_fgcolor = '#000000' # X11 color: 'black'
_compcolor = '#d9d9d9' # X11 color: 'gray85'
_ana1color = '#d9d9d9' # X11 color: 'gray85'
_ana2color = '#ececec' # Closest X11 color: 'gray92'
font10 = "-family {Rockwell Extra} -size 40 -weight bold"
font11 = "-family {Rage Italic} -size 19 -slant italic"
font12 = "-family {Rockwell Extra} -size 28 -weight bold"
font15 = "-family {Segoe UI Black} -size 23 -weight bold"
font16 = "-family {Postmaster} -size 13"
top.geometry("990x650+274+133")
top.title("e-TAX 2019")
top.configure(background="#727272")
self.Frame1 = tk.Frame(top)
self.Frame1.place(relx=0.091, rely=0.046, relheight=0.223
, relwidth=0.843)
self.Frame1.configure(relief='ridge')
self.Frame1.configure(borderwidth="10")
self.Frame1.configure(relief='ridge')
self.Frame1.configure(background="#595959")
self.Frame1.configure(width=835)
self.Label1 = tk.Label(self.Frame1)
self.Label1.place(relx=0.036, rely=0.207, height=59, width=196)
self.Label1.configure(background="#595959")
self.Label1.configure(disabledforeground="#a3a3a3")
self.Label1.configure(font=font10)
self.Label1.configure(foreground="#0d4dff")
self.Label1.configure(text='''eTAX''')
self.Label1.configure(width=196)
self.Label1_1 = tk.Label(self.Frame1)
self.Label1_1.place(relx=0.251, rely=0.207, height=59, width=146)
self.Label1_1.configure(activebackground="#f9f9f9")
self.Label1_1.configure(activeforeground="black")
self.Label1_1.configure(background="#595959")
self.Label1_1.configure(disabledforeground="#a3a3a3")
self.Label1_1.configure(font="-family {Rockwell Extra} -size 40 -weight bold")
self.Label1_1.configure(foreground="#ff2b0a")
self.Label1_1.configure(highlightbackground="#d9d9d9")
self.Label1_1.configure(highlightcolor="black")
self.Label1_1.configure(text='''2019''')
self.Label1_1.configure(width=146)
self.Label2 = tk.Label(self.Frame1)
self.Label2.place(relx=0.132, rely=0.621, height=31, width=184)
self.Label2.configure(background="#595959")
self.Label2.configure(disabledforeground="#a3a3a3")
self.Label2.configure(font=font11)
self.Label2.configure(foreground="#f7ff0a")
self.Label2.configure(text='''working for you''')
self.Label2.configure(width=184)
self.Label1_2 = tk.Label(self.Frame1)
self.Label1_2.place(relx=0.431, rely=0.207, height=69, width=446)
self.Label1_2.configure(activebackground="#f9f9f9")
self.Label1_2.configure(activeforeground="black")
self.Label1_2.configure(background="#595959")
self.Label1_2.configure(disabledforeground="#a3a3a3")
self.Label1_2.configure(font=font12)
self.Label1_2.configure(foreground="#5faa14")
self.Label1_2.configure(highlightbackground="#d9d9d9")
self.Label1_2.configure(highlightcolor="black")
self.Label1_2.configure(text='''UPDATE DATABASE''')
self.Label1_2.configure(width=446)
self.Frame2 = tk.Frame(top)
self.Frame2.place(relx=0.01, rely=0.369, relheight=0.531, relwidth=0.328)
self.Frame2.configure(relief='ridge')
self.Frame2.configure(borderwidth="10")
self.Frame2.configure(relief='ridge')
self.Frame2.configure(background="#595959")
self.Frame2.configure(width=325)
self.Label3 = tk.Label(self.Frame2)
self.Label3.place(relx=0.123, rely=0.638, height=45, width=234)
self.Label3.configure(background="#595959")
self.Label3.configure(disabledforeground="#a3a3a3")
self.Label3.configure(font=font15)
self.Label3.configure(foreground="#efff0f")
self.Label3.configure(text='''MODIFY DATA''')
self.Label3.configure(width=234)
self.Label4 = tk.Label(self.Frame2)
self.Label4.place(relx=0.154, rely=0.783, height=22, width=213)
self.Label4.configure(background="#595959")
self.Label4.configure(disabledforeground="#a3a3a3")
self.Label4.configure(font=font16)
self.Label4.configure(foreground="#4cad1c")
self.Label4.configure(text='''CLICK HERE TO UPDATE''')
self.Label4_3 = tk.Label(self.Frame2)
self.Label4_3.place(relx=0.338, rely=0.841, height=32, width=83)
self.Label4_3.configure(activebackground="#f9f9f9")
self.Label4_3.configure(activeforeground="black")
self.Label4_3.configure(background="#595959")
self.Label4_3.configure(disabledforeground="#a3a3a3")
self.Label4_3.configure(font="-family {Postmaster} -size 13")
self.Label4_3.configure(foreground="#4cad1c")
self.Label4_3.configure(highlightbackground="#d9d9d9")
self.Label4_3.configure(highlightcolor="black")
self.Label4_3.configure(text='''DATA''')
self.Label4_3.configure(width=83)
self.btn_modify = tk.Button(self.Frame2)
self.btn_modify.place(relx=0.215, rely=0.058, height=186, width=186)
self.btn_modify.configure(activebackground="#ececec")
self.btn_modify.configure(activeforeground="#000000")
self.btn_modify.configure(background="#d9d9d9")
self.btn_modify.configure(disabledforeground="#a3a3a3")
self.btn_modify.configure(foreground="#000000")
self.btn_modify.configure(highlightbackground="#d9d9d9")
self.btn_modify.configure(highlightcolor="black")
photo_location = os.path.join(prog_location,"./updatepic1.png")
self._img0 = tk.PhotoImage(file=photo_location)
self.btn_modify.configure(image=self._img0)
self.btn_modify.configure(pady="0")
self.btn_modify.configure(text='''Button''',command= self.modifys)
self.btn_modify.configure(width=186)
self.Frame2_4 = tk.Frame(top)
self.Frame2_4.place(relx=0.333, rely=0.369, relheight=0.531
, relwidth=0.328)
self.Frame2_4.configure(relief='ridge')
self.Frame2_4.configure(borderwidth="10")
self.Frame2_4.configure(relief='ridge')
self.Frame2_4.configure(background="#595959")
self.Frame2_4.configure(highlightbackground="#d9d9d9")
self.Frame2_4.configure(highlightcolor="black")
self.Frame2_4.configure(width=325)
self.Label3_5 = tk.Label(self.Frame2_4)
self.Label3_5.place(relx=0.123, rely=0.638, height=45, width=234)
self.Label3_5.configure(activebackground="#f9f9f9")
self.Label3_5.configure(activeforeground="black")
self.Label3_5.configure(background="#595959")
self.Label3_5.configure(disabledforeground="#a3a3a3")
self.Label3_5.configure(font="-family {Segoe UI Black} -size 23 -weight bold")
self.Label3_5.configure(foreground="#efff0f")
self.Label3_5.configure(highlightbackground="#d9d9d9")
self.Label3_5.configure(highlightcolor="black")
self.Label3_5.configure(text='''DELETE DATA''')
self.Label4_6 = tk.Label(self.Frame2_4)
self.Label4_6.place(relx=0.154, rely=0.783, height=22, width=209)
self.Label4_6.configure(activebackground="#f9f9f9")
self.Label4_6.configure(activeforeground="black")
self.Label4_6.configure(background="#595959")
self.Label4_6.configure(disabledforeground="#a3a3a3")
self.Label4_6.configure(font="-family {Postmaster} -size 13")
self.Label4_6.configure(foreground="#4cad1c")
self.Label4_6.configure(highlightbackground="#d9d9d9")
self.Label4_6.configure(highlightcolor="black")
self.Label4_6.configure(text='''CLICK HERE TO DELETE''')
self.Label4_4 = tk.Label(self.Frame2_4)
self.Label4_4.place(relx=0.338, rely=0.841, height=32, width=83)
self.Label4_4.configure(activebackground="#f9f9f9")
self.Label4_4.configure(activeforeground="black")
self.Label4_4.configure(background="#595959")
self.Label4_4.configure(disabledforeground="#a3a3a3")
self.Label4_4.configure(font="-family {Postmaster} -size 13")
self.Label4_4.configure(foreground="#4cad1c")
self.Label4_4.configure(highlightbackground="#d9d9d9")
self.Label4_4.configure(highlightcolor="black")
self.Label4_4.configure(text='''DATA''')
self.btn_delete = tk.Button(self.Frame2_4)
self.btn_delete.place(relx=0.185, rely=0.174, height=122, width=206)
self.btn_delete.configure(activebackground="#ececec")
self.btn_delete.configure(activeforeground="#000000")
self.btn_delete.configure(background="#d9d9d9")
self.btn_delete.configure(disabledforeground="#a3a3a3")
self.btn_delete.configure(foreground="#000000")
self.btn_delete.configure(highlightbackground="#d9d9d9")
self.btn_delete.configure(highlightcolor="black")
photo_location = os.path.join(prog_location,"./delete1.png")
self._img1 = tk.PhotoImage(file=photo_location)
self.btn_delete.configure(image=self._img1)
self.btn_delete.configure(pady="0")
self.btn_delete.configure(text='''Button''',command= self.deletes)
self.Frame2_5 = tk.Frame(top)
self.Frame2_5.place(relx=0.657, rely=0.369, relheight=0.531
, relwidth=0.328)
self.Frame2_5.configure(relief='ridge')
self.Frame2_5.configure(borderwidth="10")
self.Frame2_5.configure(relief='ridge')
self.Frame2_5.configure(background="#595959")
self.Frame2_5.configure(highlightbackground="#d9d9d9")
self.Frame2_5.configure(highlightcolor="black")
self.Frame2_5.configure(width=325)
self.Label3_6 = tk.Label(self.Frame2_5)
self.Label3_6.place(relx=0.123, rely=0.638, height=45, width=234)
self.Label3_6.configure(activebackground="#f9f9f9")
self.Label3_6.configure(activeforeground="black")
self.Label3_6.configure(background="#595959")
self.Label3_6.configure(disabledforeground="#a3a3a3")
self.Label3_6.configure(font="-family {Segoe UI Black} -size 23 -weight bold")
self.Label3_6.configure(foreground="#efff0f")
self.Label3_6.configure(highlightbackground="#d9d9d9")
self.Label3_6.configure(highlightcolor="black")
self.Label3_6.configure(text='''PERMISSION''')
self.Label4_7 = tk.Label(self.Frame2_5)
self.Label4_7.place(relx=0.154, rely=0.783, height=22, width=219)
self.Label4_7.configure(activebackground="#f9f9f9")
self.Label4_7.configure(activeforeground="black")
self.Label4_7.configure(background="#595959")
self.Label4_7.configure(disabledforeground="#a3a3a3")
self.Label4_7.configure(font="-family {Postmaster} -size 13")
self.Label4_7.configure(foreground="#4cad1c")
self.Label4_7.configure(highlightbackground="#d9d9d9")
self.Label4_7.configure(highlightcolor="black")
self.Label4_7.configure(text='''CLICK HERE TO ASK FOR''')
self.Label4_4 = tk.Label(self.Frame2_5)
self.Label4_4.place(relx=0.338, rely=0.841, height=32, width=103)
self.Label4_4.configure(activebackground="#f9f9f9")
self.Label4_4.configure(activeforeground="black")
self.Label4_4.configure(background="#595959")
self.Label4_4.configure(disabledforeground="#a3a3a3")
self.Label4_4.configure(font="-family {Postmaster} -size 13")
self.Label4_4.configure(foreground="#4cad1c")
self.Label4_4.configure(highlightbackground="#d9d9d9")
self.Label4_4.configure(highlightcolor="black")
self.Label4_4.configure(text='''PERMISSION''')
self.Label4_4.configure(width=103)
self.btn_access = tk.Button(self.Frame2_5)
self.btn_access.place(relx=0.185, rely=0.174, height=139, width=206)
self.btn_access.configure(activebackground="#ececec")
self.btn_access.configure(activeforeground="#000000")
self.btn_access.configure(background="#d9d9d9")
self.btn_access.configure(disabledforeground="#a3a3a3")
self.btn_access.configure(foreground="#000000")
self.btn_access.configure(highlightbackground="#d9d9d9")
self.btn_access.configure(highlightcolor="black")
photo_location = os.path.join(prog_location,"./access.png")
self._img2 = tk.PhotoImage(file=photo_location)
self.btn_access.configure(image=self._img2)
self.btn_access.configure(pady="0")
self.btn_access.configure(text='''Button''',command= self.accesss)
self.btn_exit = tk.Button(top)
self.btn_exit.place(relx=0.869, rely=0.292, height=40, width=90)
self.btn_exit.configure(activebackground="#ececec")
self.btn_exit.configure(activeforeground="#000000")
self.btn_exit.configure(background="#264aff")
self.btn_exit.configure(borderwidth="10")
self.btn_exit.configure(disabledforeground="#a3a3a3")
self.btn_exit.configure(foreground="#ffffff")
self.btn_exit.configure(highlightbackground="#d9d9d9")
self.btn_exit.configure(highlightcolor="black")
self.btn_exit.configure(pady="0")
self.btn_exit.configure(text='''EXIT''')
self.btn_exit.configure(width=90,command= self.exits)
self.btn_back = tk.Button(top)
self.btn_back.place(relx=0.747, rely=0.292, height=40, width=90)
self.btn_back.configure(activebackground="#ececec")
self.btn_back.configure(activeforeground="#000000")
self.btn_back.configure(background="#264aff")
self.btn_back.configure(borderwidth="10")
self.btn_back.configure(disabledforeground="#a3a3a3")
self.btn_back.configure(foreground="#ffffff")
self.btn_back.configure(highlightbackground="#d9d9d9")
self.btn_back.configure(highlightcolor="black")
self.btn_back.configure(pady="0")
self.btn_back.configure(text='''BACK''',command= self.backs)
if __name__ == '__main__':
vp_start_gui()
class Toplevel1:
def backs(self):
root.destroy()
print("sjc")
def exits(self):
msg=tkinter.messagebox.askyesno("Main","Do You Want To Exit ?")
if msg:
exit()
def worlds(self):
root.destroy()
world1()
def calculators(self):
root.destroy()
calculator1()
def onlines(self):
root.destroy()
online1()
def updates(self):
root.destroy()
update1()
def __init__(self, top=None):
'''This class configures and populates the toplevel window.
top is the toplevel containing window.'''
_bgcolor = '#d9d9d9' # X11 color: 'gray85'
_fgcolor = '#000000' # X11 color: 'black'
_compcolor = '#d9d9d9' # X11 color: 'gray85'
_ana1color = '#d9d9d9' # X11 color: 'gray85'
_ana2color = '#ececec' # Closest X11 color: 'gray92'
font10 = "-family {Harlow Solid Italic} -size 40 -slant italic" \
""
font12 = "-family {BadBlocks TT} -size 28 -weight bold"
font13 = "-family {BadBlocks TT} -size 24 -weight bold"
font14 = "-family {Serifa BT} -size 14 -weight bold"
font15 = "-family {Sky Skunk} -size 12"
font9 = "-family {BankGothic Md BT} -size 40 -weight bold"
self.style = ttk.Style()
if sys.platform == "win32":
self.style.theme_use('winnative')
self.style.configure('.',background=_bgcolor)
self.style.configure('.',foreground=_fgcolor)
self.style.configure('.',font="TkDefaultFont")
self.style.map('.',background=
[('selected', _compcolor), ('active',_ana2color)])
top.geometry("992x628+286+152")
top.title("eTAX- 2019")
top.configure(background="#939393")
self.menubar = tk.Menu(top,font="TkMenuFont",bg=_bgcolor,fg=_fgcolor)
top.configure(menu = self.menubar)
self.Frame1 = tk.Frame(top)
self.Frame1.place(relx=0.02, rely=0.048, relheight=0.135, relwidth=0.64)
self.Frame1.configure(relief='ridge')
self.Frame1.configure(borderwidth="10")
self.Frame1.configure(relief='ridge')
self.Frame1.configure(background="#595959")
self.Frame1.configure(width=635)
self.Label1 = tk.Label(self.Frame1)
self.Label1.place(relx=0.11, rely=0.235, height=41, width=197)
self.Label1.configure(background="#595959")
self.Label1.configure(disabledforeground="#a3a3a3")
self.Label1.configure(font=font9)
self.Label1.configure(foreground="#ff2a00")
self.Label1.configure(text='''e-TAX''')
self.Label1.configure(width=197)
self.Label1_3 = tk.Label(self.Frame1)
self.Label1_3.place(relx=0.409, rely=0.235, height=41, width=177)
self.Label1_3.configure(activebackground="#f9f9f9")
self.Label1_3.configure(activeforeground="black")
self.Label1_3.configure(background="#595959")
self.Label1_3.configure(disabledforeground="#a3a3a3")
self.Label1_3.configure(font="-family {BankGothic Md BT} -size 40 -weight bold")
self.Label1_3.configure(foreground="#143bff")
self.Label1_3.configure(highlightbackground="#d9d9d9")
self.Label1_3.configure(highlightcolor="black")
self.Label1_3.configure(text='''2019''')
self.Label1_3.configure(width=177)
self.Label2 = tk.Label(self.Frame1)
self.Label2.place(relx=0.709, rely=0.235, height=43, width=158)
self.Label2.configure(background="#595959")
self.Label2.configure(disabledforeground="#a3a3a3")
self.Label2.configure(font=font10)
self.Label2.configure(foreground="#f0ff1c")
self.Label2.configure(text='''extra''')
self.Label2.configure(width=158)
self.Frame1_1 = tk.Frame(top)
self.Frame1_1.place(relx=0.03, rely=0.239, relheight=0.693
, relwidth=0.247)
self.Frame1_1.configure(relief='ridge')
self.Frame1_1.configure(borderwidth="10")
self.Frame1_1.configure(relief='ridge')
self.Frame1_1.configure(background="#595959")
self.Frame1_1.configure(highlightbackground="#d9d9d9")
self.Frame1_1.configure(highlightcolor="black")
self.Frame1_1.configure(width=245)
self.Label3 = tk.Label(self.Frame1_1)
self.Label3.place(relx=0.163, rely=0.069, height=39, width=168)
self.Label3.configure(background="#595959")
self.Label3.configure(disabledforeground="#a3a3a3")
self.Label3.configure(font=font12)
self.Label3.configure(foreground="#82ffff")
self.Label3.configure(text='''UPDATE''')
self.Label3.configure(width=168)
self.Label3_5 = tk.Label(self.Frame1_1)
self.Label3_5.place(relx=0.204, rely=0.184, height=39, width=128)
self.Label3_5.configure(activebackground="#f9f9f9")
self.Label3_5.configure(activeforeground="black")
self.Label3_5.configure(background="#595959")
self.Label3_5.configure(disabledforeground="#a3a3a3")
self.Label3_5.configure(font="-family {BadBlocks TT} -size 28 -weight bold")
self.Label3_5.configure(foreground="#82ffff")
self.Label3_5.configure(highlightbackground="#d9d9d9")
self.Label3_5.configure(highlightcolor="black")
self.Label3_5.configure(text='''DATA''')
self.Label3_5.configure(width=128)
self.Label9 = tk.Label(self.Frame1_1)
self.Label9.place(relx=0.163, rely=0.437, height=65, width=154)
self.Label9.configure(background="#d9d9d9")
self.Label9.configure(disabledforeground="#a3a3a3")
self.Label9.configure(foreground="#000000")
photo_location = os.path.join(prog_location,"./bg4.png")
self._img0 = tk.PhotoImage(file=photo_location)
self.Label9.configure(image=self._img0)
self.Label9.configure(text='''Label''')
self.btn_update = tk.Button(self.Frame1_1)
self.btn_update.place(relx=0.204, rely=0.736, height=44, width=137)
self.btn_update.configure(activebackground="#ececec")
self.btn_update.configure(activeforeground="#000000")
self.btn_update.configure(background="#3045ff")
self.btn_update.configure(borderwidth="10")
self.btn_update.configure(disabledforeground="#a3a3a3")
self.btn_update.configure(font=font15)
self.btn_update.configure(foreground="#f5fcff")
self.btn_update.configure(highlightbackground="#d9d9d9")
self.btn_update.configure(highlightcolor="black")
self.btn_update.configure(pady="0")
self.btn_update.configure(text='''ENTER''')
self.btn_update.configure(width=137,command= self.updates)
self.Frame1_2 = tk.Frame(top)
self.Frame1_2.place(relx=0.282, rely=0.239, relheight=0.693
, relwidth=0.247)
self.Frame1_2.configure(relief='ridge')
self.Frame1_2.configure(borderwidth="10")
self.Frame1_2.configure(relief='ridge')
self.Frame1_2.configure(background="#595959")
self.Frame1_2.configure(highlightbackground="#d9d9d9")
self.Frame1_2.configure(highlightcolor="black")
self.Frame1_2.configure(width=245)
self.Label3_6 = tk.Label(self.Frame1_2)
self.Label3_6.place(relx=0.163, rely=0.069, height=39, width=168)
self.Label3_6.configure(activebackground="#f9f9f9")
self.Label3_6.configure(activeforeground="black")
self.Label3_6.configure(background="#595959")
self.Label3_6.configure(disabledforeground="#a3a3a3")
self.Label3_6.configure(font="-family {BadBlocks TT} -size 28 -weight bold")
self.Label3_6.configure(foreground="#82ffff")
self.Label3_6.configure(highlightbackground="#d9d9d9")
self.Label3_6.configure(highlightcolor="black")
self.Label3_6.configure(text='''ONLINE''')
self.Label3_7 = tk.Label(self.Frame1_2)
self.Label3_7.place(relx=0.163, rely=0.184, height=39, width=168)
self.Label3_7.configure(activebackground="#f9f9f9")
self.Label3_7.configure(activeforeground="black")
self.Label3_7.configure(background="#595959")
self.Label3_7.configure(disabledforeground="#a3a3a3")
self.Label3_7.configure(font="-family {BadBlocks TT} -size 28 -weight bold")
self.Label3_7.configure(foreground="#82ffff")
self.Label3_7.configure(highlightbackground="#d9d9d9")
self.Label3_7.configure(highlightcolor="black")
self.Label3_7.configure(text='''SUPPORT''')
self.Label8 = tk.Label(self.Frame1_2)
self.Label8.place(relx=0.204, rely=0.414, height=88, width=154)
self.Label8.configure(background="#d9d9d9")
self.Label8.configure(disabledforeground="#a3a3a3")
self.Label8.configure(foreground="#000000")
photo_location = os.path.join(prog_location,"./bg3.png")
self._img1 = tk.PhotoImage(file=photo_location)
self.Label8.configure(image=self._img1)
self.Label8.configure(text='''Label''')
self.btn_online = tk.Button(self.Frame1_2)
self.btn_online.place(relx=0.204, rely=0.736, height=44, width=137)
self.btn_online.configure(activebackground="#ececec")
self.btn_online.configure(activeforeground="#000000")
self.btn_online.configure(background="#3045ff")
self.btn_online.configure(borderwidth="10")
self.btn_online.configure(disabledforeground="#a3a3a3")
self.btn_online.configure(font="-family {Sky Skunk} -size 12")
self.btn_online.configure(foreground="#f5fcff")
self.btn_online.configure(highlightbackground="#d9d9d9")
self.btn_online.configure(highlightcolor="black")
self.btn_online.configure(pady="0")
self.btn_online.configure(text='''ENTER''',command= self.onlines)
self.Frame1_2 = tk.Frame(top)
self.Frame1_2.place(relx=0.534, rely=0.239, relheight=0.693
, relwidth=0.237)
self.Frame1_2.configure(relief='ridge')
self.Frame1_2.configure(borderwidth="10")
self.Frame1_2.configure(relief='ridge')
self.Frame1_2.configure(background="#595959")
self.Frame1_2.configure(highlightbackground="#d9d9d9")
self.Frame1_2.configure(highlightcolor="black")
self.Frame1_2.configure(width=235)
self.Label3_8 = tk.Label(self.Frame1_2)
self.Label3_8.place(relx=0.255, rely=0.069, height=39, width=118)
self.Label3_8.configure(activebackground="#f9f9f9")
self.Label3_8.configure(activeforeground="black")
self.Label3_8.configure(background="#595959")
self.Label3_8.configure(disabledforeground="#a3a3a3")
self.Label3_8.configure(font="-family {BadBlocks TT} -size 28 -weight bold")
self.Label3_8.configure(foreground="#82ffff")
self.Label3_8.configure(highlightbackground="#d9d9d9")
self.Label3_8.configure(highlightcolor="black")
self.Label3_8.configure(text='''TAX''')
self.Label3_8.configure(width=118)
self.Label3_9 = tk.Label(self.Frame1_2)
self.Label3_9.place(relx=0.085, rely=0.184, height=39, width=198)
self.Label3_9.configure(activebackground="#f9f9f9")
self.Label3_9.configure(activeforeground="black")
self.Label3_9.configure(background="#595959")
self.Label3_9.configure(disabledforeground="#a3a3a3")
self.Label3_9.configure(font=font13)
self.Label3_9.configure(foreground="#82ffff")
self.Label3_9.configure(highlightbackground="#d9d9d9")
self.Label3_9.configure(highlightcolor="black")
self.Label3_9.configure(text='''CALCULATOR''')
self.Label3_9.configure(width=198)
self.Label7 = tk.Label(self.Frame1_2)
self.Label7.place(relx=0.17, rely=0.414, height=97, width=154)
self.Label7.configure(background="#d9d9d9")
self.Label7.configure(disabledforeground="#a3a3a3")
self.Label7.configure(foreground="#000000")
photo_location = os.path.join(prog_location,"./bg2.png")
self._img2 = tk.PhotoImage(file=photo_location)
self.Label7.configure(image=self._img2)
self.Label7.configure(text='''Label''')
self.btn_calculator = tk.Button(self.Frame1_2)
self.btn_calculator.place(relx=0.213, rely=0.736, height=44, width=137)
self.btn_calculator.configure(activebackground="#ececec")
self.btn_calculator.configure(activeforeground="#000000")
self.btn_calculator.configure(background="#3045ff")
self.btn_calculator.configure(borderwidth="10")
self.btn_calculator.configure(disabledforeground="#a3a3a3")
self.btn_calculator.configure(font="-family {Sky Skunk} -size 12")
self.btn_calculator.configure(foreground="#f5fcff")
self.btn_calculator.configure(highlightbackground="#d9d9d9")
self.btn_calculator.configure(highlightcolor="black")
self.btn_calculator.configure(pady="0")
self.btn_calculator.configure(text='''ENTER''',command= self.calculators)
self.Frame1_2 = tk.Frame(top)
self.Frame1_2.place(relx=0.776, rely=0.239, relheight=0.693
, relwidth=0.217)
self.Frame1_2.configure(relief='ridge')
self.Frame1_2.configure(borderwidth="10")
self.Frame1_2.configure(relief='ridge')
self.Frame1_2.configure(background="#595959")
self.Frame1_2.configure(highlightbackground="#d9d9d9")
self.Frame1_2.configure(highlightcolor="black")
self.Frame1_2.configure(width=215)
self.Label3_9 = tk.Label(self.Frame1_2)
self.Label3_9.place(relx=0.093, rely=0.069, height=39, width=178)
self.Label3_9.configure(activebackground="#f9f9f9")
self.Label3_9.configure(activeforeground="black")
self.Label3_9.configure(background="#595959")
self.Label3_9.configure(disabledforeground="#a3a3a3")
self.Label3_9.configure(font="-family {BadBlocks TT} -size 28 -weight bold")
self.Label3_9.configure(foreground="#82ffff")
self.Label3_9.configure(highlightbackground="#d9d9d9")
self.Label3_9.configure(highlightcolor="black")
self.Label3_9.configure(text='''CONNECT''')
self.Label3_9.configure(width=178)
self.Label3_9 = tk.Label(self.Frame1_2)
self.Label3_9.place(relx=0.186, rely=0.161, height=39, width=118)
self.Label3_9.configure(activebackground="#f9f9f9")
self.Label3_9.configure(activeforeground="black")
self.Label3_9.configure(background="#595959")
self.Label3_9.configure(disabledforeground="#a3a3a3")
self.Label3_9.configure(font="-family {BadBlocks TT} -size 28 -weight bold")
self.Label3_9.configure(foreground="#82ffff")
self.Label3_9.configure(highlightbackground="#d9d9d9")
self.Label3_9.configure(highlightcolor="black")
self.Label3_9.configure(text='''TO''')
self.Label3_9 = tk.Label(self.Frame1_2)
self.Label3_9.place(relx=0.186, rely=0.253, height=39, width=118)
self.Label3_9.configure(activebackground="#f9f9f9")
self.Label3_9.configure(activeforeground="black")
self.Label3_9.configure(background="#595959")
self.Label3_9.configure(disabledforeground="#a3a3a3")
self.Label3_9.configure(font="-family {BadBlocks TT} -size 28 -weight bold")
self.Label3_9.configure(foreground="#82ffff")
self.Label3_9.configure(highlightbackground="#d9d9d9")
self.Label3_9.configure(highlightcolor="black")
self.Label3_9.configure(text='''WORLD''')
self.Label6 = tk.Label(self.Frame1_2)
self.Label6.place(relx=0.14, rely=0.414, height=101, width=154)
self.Label6.configure(background="#d9d9d9")
self.Label6.configure(disabledforeground="#a3a3a3")
self.Label6.configure(foreground="#000000")
photo_location = os.path.join(prog_location,"./bg1.png")
self._img3 = tk.PhotoImage(file=photo_location)
self.Label6.configure(image=self._img3)
self.Label6.configure(text='''Label''')
self.btn_world = tk.Button(self.Frame1_2)
self.btn_world.place(relx=0.186, rely=0.736, height=44, width=137)
self.btn_world.configure(activebackground="#ececec")
self.btn_world.configure(activeforeground="#000000")
self.btn_world.configure(background="#3045ff")
self.btn_world.configure(borderwidth="10")
self.btn_world.configure(disabledforeground="#a3a3a3")
self.btn_world.configure(font="-family {Sky Skunk} -size 12")
self.btn_world.configure(foreground="#f5fcff")
self.btn_world.configure(highlightbackground="#d9d9d9")
self.btn_world.configure(highlightcolor="black")
self.btn_world.configure(pady="0")
self.btn_world.configure(text='''ENTER''',command= self.worlds);
self.Label4 = tk.Label(top)
self.Label4.place(relx=0.685, rely=0.048, height=28, width=184)
self.Label4.configure(background="#939393")
self.Label4.configure(disabledforeground="#a3a3a3")
self.Label4.configure(font=font14)
self.Label4.configure(foreground="#0e4705")
self.Label4.configure(text='''Village : Kalamwadi''')
self.Label4_10 = tk.Label(top)
self.Label4_10.place(relx=0.716, rely=0.096, height=28, width=154)
self.Label4_10.configure(activebackground="#f9f9f9")
self.Label4_10.configure(activeforeground="black")
self.Label4_10.configure(background="#939393")
self.Label4_10.configure(disabledforeground="#a3a3a3")
self.Label4_10.configure(font="-family {Serifa BT} -size 14 -weight bold")
self.Label4_10.configure(foreground="#0e4705")
self.Label4_10.configure(highlightbackground="#d9d9d9")
self.Label4_10.configure(highlightcolor="black")
self.Label4_10.configure(text='''District : Sangli''')
self.Label4_10.configure(width=154)
self.TSeparator1 = ttk.Separator(top)
self.TSeparator1.place(relx=0.877, rely=0.032, relheight=0.127)
self.TSeparator1.configure(orient="vertical")
self.Label5 = tk.Label(top)
self.Label5.place(relx=0.917, rely=0.048, height=44, width=44)
self.Label5.configure(background="#d9d9d9")
self.Label5.configure(disabledforeground="#a3a3a3")
self.Label5.configure(foreground="#000000")
photo_location = os.path.join(prog_location,"./login3.png")
self._img4 = tk.PhotoImage(file=photo_location)
self.Label5.configure(image=self._img4)
self.Label5.configure(text='''Label''')
self.Label4_11 = tk.Label(top)
self.Label4_11.place(relx=0.907, rely=0.127, height=28, width=64)
self.Label4_11.configure(activebackground="#f9f9f9")
self.Label4_11.configure(activeforeground="black")
self.Label4_11.configure(background="#939393")
self.Label4_11.configure(disabledforeground="#a3a3a3")
self.Label4_11.configure(font="-family {Serifa BT} -size 14 -weight bold")
self.Label4_11.configure(foreground="#0e4705")
self.Label4_11.configure(highlightbackground="#d9d9d9")
self.Label4_11.configure(highlightcolor="black")
self.Label4_11.configure(text='''User''')
self.Label4_11.configure(width=64)
self.btn_exit = tk.Button(top)
self.btn_exit.place(relx=0.847, rely=0.939, height=34, width=107)
self.btn_exit.configure(activebackground="#ececec")
self.btn_exit.configure(activeforeground="#000000")
self.btn_exit.configure(background="#ff4f19")
self.btn_exit.configure(borderwidth="10")
self.btn_exit.configure(disabledforeground="#a3a3a3")
self.btn_exit.configure(font="-family {Sky Skunk} -size 12")
self.btn_exit.configure(foreground="#f5fcff")
self.btn_exit.configure(highlightbackground="#d9d9d9")
self.btn_exit.configure(highlightcolor="black")
self.btn_exit.configure(pady="0")
self.btn_exit.configure(text='''EXIT''',command=self.exits)
self.Label10 = tk.Label(top)
self.Label10.place(relx=0.03, rely=0.939, height=21, width=84)
self.Label10.configure(background="#939393")
self.Label10.configure(disabledforeground="#a3a3a3")
self.Label10.configure(foreground="#000000")
self.Label10.configure(text='''e-TAX 2019''')
self.Label10.configure(width=84)
self.Label10_9 = tk.Label(top)
self.Label10_9.place(relx=0.03, rely=0.971, height=11, width=84)
self.Label10_9.configure(activebackground="#f9f9f9")
self.Label10_9.configure(activeforeground="black")
self.Label10_9.configure(background="#939393")
self.Label10_9.configure(disabledforeground="#a3a3a3")
self.Label10_9.configure(foreground="#000000")
self.Label10_9.configure(highlightbackground="#d9d9d9")
self.Label10_9.configure(highlightcolor="black")
self.Label10_9.configure(text='''V 1 . 0 . 2''')
self.Label10_9.configure(width=84)
self.btn_back = tk.Button(top)
self.btn_back.place(relx=0.716, rely=0.939, height=34, width=107)
self.btn_back.configure(activebackground="#ececec")
self.btn_back.configure(activeforeground="#000000")
self.btn_back.configure(background="#387738")
self.btn_back.configure(borderwidth="10")
self.btn_back.configure(disabledforeground="#a3a3a3")
self.btn_back.configure(font="-family {Sky Skunk} -size 12")
self.btn_back.configure(foreground="#f5fcff")
self.btn_back.configure(highlightbackground="#d9d9d9")
self.btn_back.configure(highlightcolor="black")
self.btn_back.configure(pady="0")
self.btn_back.configure(text='''BACK''')
self.btn_back.configure(width=107,command=self.backs)
if __name__ == '__main__':
vp_start_gui()
|
'''
Code to evaluate a particular trained network. Think about formatting results
here well to be tables in the paper.
'''
import sklearn.metrics as skmetrics
from collections import Counter
from util import get_window
class Eval:
'''
Methods to evaluate different models.
'''
def __init__(self, test_data):
'''
Arguments:
test_data (util.MetaphorDataTest): test data
'''
self.test_data = test_data
self.test_df = test_data.test_df
self.test_sentences = self.test_data.text_sentences
self.false_negatives = self.test_df[['text', 'word']][
(self.test_data.predicted_is_metaphor_vec == 0) &
(self.test_data.is_metaphor_vec == 1)
]
self.false_positives = self.test_df[['text', 'word']][
(self.test_data.predicted_is_metaphor_vec == 1) &
(self.test_data.is_metaphor_vec == 0)
]
self.true_negatives = self.test_df[['text', 'word']][
(self.test_data.predicted_is_metaphor_vec == 0) &
(self.test_data.is_metaphor_vec == 0)
]
self.true_positives = self.test_df[['text', 'word']][
(self.test_data.predicted_is_metaphor_vec == 1) &
(self.test_data.is_metaphor_vec == 1)
]
self.true = self.test_data.is_metaphor_vec
self.pred = self.test_data.predicted_is_metaphor_vec
# Compute performance measures.
# http://scikit-learn.org/stable/modules/model_evaluation.html
self.confusion_matrix = \
skmetrics.confusion_matrix(self.true, self.pred)
tn, fp, fn, tp = self.confusion_matrix.ravel()
self.accuracy = (tp + tn) / (tp + tn + fp + fn)
self.precision = tp / (tp + fp) # how often a positive is true pos.
self.sensitivity = tp / (tp + fn) # recall, true positive rate
self.specificity = tn / (tn + fp) # true negative rate
self.auc = skmetrics.roc_auc_score(
self.true, self.test_data.probabilities[:, 1]
)
def word_counts(self, n_most_common=10):
'''
For both the false negatives and false positives, create word counts
Returns:
(dict): keyed by false_{negative,positive} true_{negative,positive}
with word counts as values
'''
def count_words(df, n_most_common):
'Count of most common words in window of text/focal_word'
# Compact way to flatten list of all windowings and do word count.
words = (
word
for row in df.as_matrix()
for word in get_window(row[0], row[1], 5)
)
return Counter(words).most_common(n_most_common)
return {
k: count_words(getattr(self, k), n_most_common)
for k in [
'false_negatives',
'false_positives',
'true_negatives',
'true_positives'
]
}
|
#!/usr/bin/env python
# coding=utf-8
"""
A sample application for cmd2.
This example is very similar to example.py, but had additional
code in main() that shows how to accept a command from
the command line at invocation:
$ python cmd_as_argument.py speak -p hello there
"""
import argparse
import random
import cmd2
class CmdLineApp(cmd2.Cmd):
"""Example cmd2 application."""
# Setting this true makes it run a shell command if a cmd2/cmd command doesn't exist
# default_to_shell = True
MUMBLES = ['like', '...', 'um', 'er', 'hmmm', 'ahh']
MUMBLE_FIRST = ['so', 'like', 'well']
MUMBLE_LAST = ['right?']
def __init__(self):
shortcuts = dict(cmd2.DEFAULT_SHORTCUTS)
shortcuts.update({'&': 'speak'})
# Set include_ipy to True to enable the "ipy" command which runs an interactive IPython shell
super().__init__(allow_cli_args=False, include_ipy=True, multiline_commands=['orate'], shortcuts=shortcuts)
self.self_in_py = True
self.maxrepeats = 3
# Make maxrepeats settable at runtime
self.add_settable(cmd2.Settable('maxrepeats', int, 'max repetitions for speak command', self))
speak_parser = argparse.ArgumentParser()
speak_parser.add_argument('-p', '--piglatin', action='store_true', help='atinLay')
speak_parser.add_argument('-s', '--shout', action='store_true', help='N00B EMULATION MODE')
speak_parser.add_argument('-r', '--repeat', type=int, help='output [n] times')
speak_parser.add_argument('words', nargs='+', help='words to say')
@cmd2.with_argparser(speak_parser)
def do_speak(self, args):
"""Repeats what you tell me to."""
words = []
for word in args.words:
if args.piglatin:
word = '%s%say' % (word[1:], word[0])
if args.shout:
word = word.upper()
words.append(word)
repetitions = args.repeat or 1
for i in range(min(repetitions, self.maxrepeats)):
# .poutput handles newlines, and accommodates output redirection too
self.poutput(' '.join(words))
do_say = do_speak # now "say" is a synonym for "speak"
do_orate = do_speak # another synonym, but this one takes multi-line input
mumble_parser = argparse.ArgumentParser()
mumble_parser.add_argument('-r', '--repeat', type=int, help='how many times to repeat')
mumble_parser.add_argument('words', nargs='+', help='words to say')
@cmd2.with_argparser(mumble_parser)
def do_mumble(self, args):
"""Mumbles what you tell me to."""
repetitions = args.repeat or 1
for i in range(min(repetitions, self.maxrepeats)):
output = []
if random.random() < 0.33:
output.append(random.choice(self.MUMBLE_FIRST))
for word in args.words:
if random.random() < 0.40:
output.append(random.choice(self.MUMBLES))
output.append(word)
if random.random() < 0.25:
output.append(random.choice(self.MUMBLE_LAST))
self.poutput(' '.join(output))
def main(argv=None):
"""Run when invoked from the operating system shell"""
parser = argparse.ArgumentParser(description='Commands as arguments')
command_help = 'optional command to run, if no command given, enter an interactive shell'
parser.add_argument('command', nargs='?', help=command_help)
arg_help = 'optional arguments for command'
parser.add_argument('command_args', nargs=argparse.REMAINDER, help=arg_help)
args = parser.parse_args(argv)
c = CmdLineApp()
sys_exit_code = 0
if args.command:
# we have a command, run it and then exit
c.onecmd_plus_hooks('{} {}'.format(args.command, ' '.join(args.command_args)))
else:
# we have no command, drop into interactive mode
sys_exit_code = c.cmdloop()
return sys_exit_code
if __name__ == '__main__':
import sys
sys.exit(main())
|
from pynotifier import Notification
from .ims import IMS
from threading import Timer, Thread
import time
import pandas as pd
class Bot:
def __init__(self, id, pw, interval = 5 * 60, alert_types = ['new']):
self.ims = IMS(id, pw)
self.interval = interval
self.init_variables()
self.alert_types = alert_types
def init_variables(self):
self.is_running_job = False
self.is_start = False
self.first_run = True
self.previous_data = pd.DataFrame([])
self.callback = self.send_noti
def set_callback(self, callback):
self.callback = callback
def reset_callback(self):
self.callback = self.send_noti
def send_noti(self, changed, _type):
# print(changed)
Notification(
title='(' + _type + ') ' + changed['Issue Number'],
description=changed['Subject'],
duration=5,
urgency=Notification.URGENCY_CRITICAL
).send()
def job(self):
data = self.ims.fetch()
if self.first_run:
self.first_run = False
self.previous_data = data
# new test
old = self.previous_data['Issue Number'].values.tolist()
new = data['Issue Number'].values.tolist()
if 'new' in self.alert_types:
for issue_number in new:
if issue_number not in old:
row = pd.DataFrame(data[data['Issue Number'].isin([issue_number])])
self.callback(row.iloc[0], 'new')
# delete test
if 'delete' in self.alert_types:
for issue_number in old:
if issue_number not in new:
row = pd.DataFrame(self.previous_data[self.previous_data['Issue Number'].isin([issue_number])])
self.callback(row.iloc[0], 'delete')
self.previous_data = data
# change test
# compare = data.compare(self.previous_data)
# if len(compare) > 0:
# changed_list = []
# for ind in compare.index:
# changed_list.append(data.iloc[ind])
# self.callback(changed_list)
def run(self):
self.is_running_job = True
self.job()
self.is_running_job = False
if self.is_start:
self.t = Timer(self.interval, self.run)
self.t.start()
def start(self):
self.is_start = True
self.run()
def stop(self):
if self.is_running_job:
self.is_start = False
else:
self.t.cancel()
|
import random
from collections import defaultdict
import numpy as np
class Member:
def __init__(self, r_d, label=None, doc_id=None):
self._r_d = r_d
self._label = label
self._doc_id = doc_id
class Cluster:
def __init__(self):
self._centroid = None
self._members = []
def reset_members(self):
self._members = []
def add_member(self, member):
self._members.append(member)
class Kmeans:
def __init__(self, num_clusters):
self._num_clusters = num_clusters
self._clusters = [Cluster() for _ in range(self._num_clusters)]
self._E = []
self._S = 0
def load_data(self, data_path):
def sparse_to_dense(sparse_r_d, vocab_size):
r_d = [0.0 for _ in range(vocab_size)]
indices_tfidfs = sparse_r_d.split()
for index_tfidf in indices_tfidfs:
index = int(index_tfidf.split(':')[0])
tfidf = float(index_tfidf.split(':')[1])
r_d[index] = tfidf
return np.array(r_d)
with open(data_path) as f:
d_lines = f.read().splitlines()
with open('../datasets/20news-bydate/words_idfs.txt') as f:
vocab_size = len(f.read().splitlines())
self._data = []
self._label_count = defaultdict(int)
for data_id, d in enumerate(d_lines):
features = d.split('<fff>')
label, doc_id = int(features[0]), int(features[1])
self._label_count[label] += 1
r_d = sparse_to_dense(sparse_r_d=features[2], vocab_size=vocab_size)
self._data.append(Member(r_d=r_d, label=label, doc_id=doc_id))
def random_init(self, seed_value):
# Normal
random.seed(seed_value)
for i in range(self._num_clusters):
self._clusters[i]._centroid = self._data[random.randint(1, len(self._data))]._r_d
def random_init_kmeans_plus(self, seed_value):
# Kmeans++
random.seed(seed_value)
# Initialize one point at random.
self._clusters[0]._centroid = self._data[random.randrange(len(self._data))]._r_d
num_centroids_chosen = 1
# Calculate for each point the distance of the point from its nearest center. Sample a point with the largest
# probability proportional to the square of the distance of the point from its nearest center
for i in range(1, self._num_clusters):
nearest_dis = [np.min(np.array([self.compute_similarity(self._data[member_id], self._clusters[cluster_id]._centroid)
for cluster_id in range(num_centroids_chosen)]))
for member_id in range(len(self._data))]
self._clusters[i]._centroid = self._data[np.argmax(np.array(nearest_dis))]._r_d
num_centroids_chosen += 1
def compute_similarity(self, member, centroid):
# Using Cosine similarity:
numerator = np.sum(member._r_d * centroid)
denominator = np.sqrt(np.sum(member._r_d * member._r_d) * np.sum(centroid * centroid))
return numerator / denominator
def select_cluster_for(self, member):
best_fit_cluster = None
max_similarity = -1
for cluster in self._clusters:
similarity = self.compute_similarity(member, cluster._centroid)
if similarity > max_similarity:
best_fit_cluster = cluster
max_similarity = similarity
best_fit_cluster.add_member(member)
return max_similarity
def update_centroid_of(self, cluster):
member_r_ds = [member._r_d for member in cluster._members]
aver_r_d = np.mean(member_r_ds, axis=0)
sqrt_sum_sqr = np.sqrt(np.sum(aver_r_d ** 2))
new_centroid = np.array([value / sqrt_sum_sqr for value in aver_r_d])
cluster._centroid = new_centroid
def stopping_condition(self, criterion, threshold):
criteria = ['centroid', 'similarity', 'max_iters']
assert criterion in criteria
if criterion == 'max_iters':
if self._iteration >= threshold:
return True
else:
return False
elif criterion == 'centroid':
E_new = [list(cluster._centroid) for cluster in self._clusters]
E_new_minus_E = [centroid for centroid in E_new
if centroid not in self._E]
self._E = E_new
if len(E_new_minus_E) <= threshold:
return True
else:
return False
else:
new_S_minus_S = self._new_S - self._S
self._S = self._new_S
if new_S_minus_S <= threshold:
return True
else:
return False
def run(self, seed_value, criterion, threshold):
self.random_init(seed_value)
# continually update clusters until convergence
self._iteration = 0
while True:
# reset clusters, retain only centroids
for cluster in self._clusters:
cluster.reset_members()
self._new_S = 0
for member in self._data:
max_s = self.select_cluster_for(member)
self._new_S += max_s
for cluster in self._clusters:
self.update_centroid_of(cluster)
self._iteration += 1
if self.stopping_condition(criterion, threshold):
break
def compute_purity(self):
majority_sum = 0
for cluser in self._clusters:
member_labels = [member._label for member in cluser._members]
max_count = max([member_labels.count(label) for label in range(20)])
majority_sum += max_count
return majority_sum * 1. / len(self._data)
def compute_NMI(self):
I_value, H_omega, H_C, N = 0., 0., 0., len(self._data)
for cluster in self._clusters:
wk = len(cluster._members) * 1.
H_omega += - wk / N * np.log10(wk / N)
member_labels = [member._label for member in cluster._members]
for label in range(20):
wk_cj = member_labels.count(label) * 1.
cj = self._label_count[label]
I_value += wk_cj / N * \
np.log10(N * wk_cj / (wk * cj) + 1e-12)
for label in range(20):
cj = self._label_count[label] * 1.
H_C += -cj / N * np.log10(cj / N)
return I_value * 2. / (H_omega + H_C)
if __name__ = '__main__':
kmeans = Kmeans(num_clusters=10)
kmeans.load_data(data_path='../datasets/20news-bydate/data_tf_idf.txt')
kmeans.run(seed_value=2020, criterion='centroid', threshold=2)
print('Purity:', kmeans.compute_purity()) # 0.3808
print('NMI:', kmeans.compute_NMI()) # 0.525
|
#!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import json
import logging
import sys
from django.utils.functional import wraps
from desktop.lib.django_util import render
from desktop.lib.exceptions_renderable import PopupException
from desktop.lib.rest.http_client import RestException
from sqoop.api.exception import handle_rest_exception
from sqoop import client, conf
if sys.version_info[0] > 2:
from django.utils.translation import gettext as _
else:
from django.utils.translation import ugettext as _
__all__ = ['get_job_or_exception']
LOG = logging.getLogger(__name__)
def get_connector_or_exception(exception_class=PopupException):
def inner(view_func):
def decorate(request, connector_id, *args, **kwargs):
try:
c = client.SqoopClient(conf.SERVER_URL.get(), request.user.username, request.LANGUAGE_CODE, ssl_cert_ca_verify=conf.SSL_CERT_CA_VERIFY.get())
connector = c.get_connector(int(connector_id))
except RestException as e:
handle_rest_exception(e, _('Could not get connector.'))
return view_func(request, connector=connector, *args, **kwargs)
return wraps(view_func)(decorate)
return inner
def get_link_or_exception(exception_class=PopupException):
def inner(view_func):
def decorate(request, link_id, *args, **kwargs):
try:
c = client.SqoopClient(conf.SERVER_URL.get(), request.user.username, request.LANGUAGE_CODE, ssl_cert_ca_verify=conf.SSL_CERT_CA_VERIFY.get())
link = c.get_link(int(link_id))
except RestException as e:
handle_rest_exception(e, _('Could not get link.'))
return view_func(request, link=link, *args, **kwargs)
return wraps(view_func)(decorate)
return inner
def get_job_or_exception(exception_class=PopupException):
def inner(view_func):
def decorate(request, job_id, *args, **kwargs):
try:
c = client.SqoopClient(conf.SERVER_URL.get(), request.user.username, request.LANGUAGE_CODE, ssl_cert_ca_verify=conf.SSL_CERT_CA_VERIFY.get())
job = c.get_job(int(job_id))
except RestException as e:
handle_rest_exception(e, _('Could not get job.'))
return view_func(request, job=job, *args, **kwargs)
return wraps(view_func)(decorate)
return inner
def get_submission_or_exception(exception_class=PopupException):
def inner(view_func):
def decorate(request, submission_id, *args, **kwargs):
try:
c = client.SqoopClient(conf.SERVER_URL.get(), request.user.username, request.LANGUAGE_CODE, ssl_cert_ca_verify=conf.SSL_CERT_CA_VERIFY.get())
submission = c.get_submission(int(submission_id))
except RestException as e:
handle_rest_exception(e, _('Could not get submission.'))
return view_func(request, submission=submission, *args, **kwargs)
return wraps(view_func)(decorate)
return inner
|
from app import db
class Position(db.Model):
id = db.Column(db.Integer, primary_key=True)
account_id = db.Column(db.Integer, db.ForeignKey('account.id'), nullable=False)
stock_id = db.Column(db.Integer, db.ForeignKey('stock.id'), nullable=False)
cost_basis = db.Column(db.Float(decimal_return_scale=2), nullable=False)
quantity = db.Column(db.Integer, nullable=False)
account = db.relationship('Account')
stock = db.relationship('Stock')
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.HomePageView.as_view(), name='index'),
url(r'^good/$', views.GoodView.as_view(), name='good'),
url(r'^bad/$', views.BadView.as_view(), name='bad'),
]
|
import os
from . import minneapolis_park_board
from . import migrated
from . import mn_state
def _load_minneapolis_park_board(resource_path):
results = []
path = os.path.join(resource_path, 'minneapolis_park_board')
for year in range(2007, 2018):
filename = '{0}.csv'.format(year)
full_path = os.path.join(path, filename)
results.extend(minneapolis_park_board.etl(full_path, year))
return results
def _load_migrated(resource_path):
results = []
path = os.path.join(resource_path, 'migrated')
for file_number in range(1, 19):
filename = 'file_{0}.json'.format(file_number)
full_path = os.path.join(path, filename)
results.extend(migrated.etl(full_path))
return results
def _load_mn_state(resource_path):
results = []
path = os.path.join(resource_path, 'mn_state')
for year in [2016, 2017]:
filename = 'fiscal-year-{0}.xlsx'.format(year)
full_path = os.path.join(path, filename)
results.extend(mn_state.etl(full_path, year))
return results
def load_all(resource_path):
results = []
results.extend(_load_mn_state(resource_path))
results.extend(_load_minneapolis_park_board(resource_path))
results.extend(_load_migrated(resource_path))
return list(set(results))
|
import boto3
import base64
def decrypt(event, context):
encrypted = bytes(event['secret'], 'ascii')
decoded = base64.b64decode(encrypted)
kms = boto3.client('kms')
decrypted = kms.decrypt(CiphertextBlob=decoded)
return decrypted['Plaintext']
|
import os
import django
DEBUG = True
USE_TZ = True
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = "uzbLoOIYlJnzGDYlUfynNyocjZH9NLSc3AAREwLDaugQkCzsQn"
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
'OPTIONS': {
}
}
}
if os.environ.get('GITHUB_WORKFLOW', False):
DATABASE_ENGINE = os.environ.get('DATABASE_ENGINE', 'sqlite')
if 'mysql' in DATABASE_ENGINE:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'test',
'USER': 'root',
'PASSWORD': '',
'HOST': '127.0.0.1',
'PORT': '3306',
},
}
elif 'postgres' in DATABASE_ENGINE:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'postgres',
'USER': 'postgres',
'PASSWORD': 'postgres',
'HOST': '127.0.0.1',
'PORT': '5432',
},
}
ROOT_URLCONF = "tests.urls"
INSTALLED_APPS = [
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sites",
"django_scrubber",
]
SITE_ID = 1
if django.VERSION >= (1, 10):
MIDDLEWARE = ()
else:
MIDDLEWARE_CLASSES = ()
|
import sys
import os
import automl_data_processing as app
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
|
import os
import cv2
import numpy as np
def draw_output(all_matches, obs_matches, kp1, output):
""" Writes output image to file.
Params:
all_matches: All matches found from Orb matcher.
obs_matches: Matches after matches have been filtered.
kp1: Keypoints from image 1 (current image).
output: The keypoints will be drawn on output.
"""
for m in all_matches:
c, r = kp1[m.queryIdx].pt
cv2.circle(output, (int(c), int(r)), int(
kp1[m.queryIdx].size), (255, 255, 255), thickness=1)
for m in obs_matches:
c, r = kp1[m.queryIdx].pt
cv2.circle(output, (int(c), int(r)), int(
kp1[m.queryIdx].size), (0, 0, 255), thickness=3)
cv2.imwrite("./output/output.jpg", output)
def show_kp(image1, kp1, image2, kp2, matches, tname, output_folder):
""" Creates grid to show top 10 keypoints between two images.
Params:
image1: First image.
kp1: Keypoints associated with image1.
image2: Second image.
kp2: Keypoints associated with image2.
matches: All the matches between image1 and image2.
tname: Name for the table (grid).
output_folder: Folder name to output grid.
"""
# create grid, to place 20 images, 10x2
# each image is a square, dxd, dimensions of grid is d*10 x d*2
d = 300
y1bnd, x1bnd, _ = image1.shape
y2bnd, x2bnd, _ = image2.shape
grid = np.zeros((d * 10, d * 2, 3))
# take top 10
matches = matches[:10]
for i, m in enumerate(matches):
img1 = image1.copy()
img2 = image2.copy()
tmp1 = np.zeros((d, d))
tmp2 = np.zeros((d, d))
x1, y1 = (int(kp1[m.queryIdx].pt[0]), int(kp1[m.queryIdx].pt[1]))
x2, y2 = (int(kp2[m.trainIdx].pt[0]), int(kp2[m.trainIdx].pt[1]))
cv2.circle(img1, (x1, y1), 5, (0, 0, 255), thickness=-1)
cv2.circle(img2, (x2, y2), 5, (0, 0, 255), thickness=-1)
for channel in xrange(3):
# calculate offsets for image 1
t1 = abs(np.min([y1 - d / 2, 0]))
b1 = np.max([y1 + d / 2 - y1bnd, 0])
l1 = abs(np.min([x1 - d / 2, 0]))
r1 = np.max([x1 + d / 2 - x1bnd, 0])
# calculate offsets for image 2
t2 = abs(np.min([y2 - d / 2, 0]))
b2 = np.max([y2 + d / 2 - y2bnd, 0])
l2 = abs(np.min([x2 - d / 2, 0]))
r2 = np.max([x2 + d / 2 - x2bnd, 0])
tmp1[t1:d - b1, l1:d - r1] = img1[np.max(
[y1 - d / 2, 0]):np.min([y1 + d / 2, y1bnd]),
np.max([x1 - d / 2, 0]):np.min([x1 + d / 2, x1bnd]),
channel]
tmp2[t2:d - b2, l2:d - r2] = img2[np.max(
[y2 - d / 2, 0]):np.min([y2 + d / 2, y2bnd]),
np.max([x2 - d / 2, 0]):np.min([x2 + d / 2, x2bnd]),
channel]
# add to main grid to return
grid[(d * i):(d * i + d), :d, channel] = tmp1
grid[(d * i):(d * i + d), d:, channel] = tmp2
cv2.imwrite(
os.path.join(output_folder, 'grid_{}.jpg'.format(tname)), grid)
def draw_matches(image_1, image_1_keypoints, image_2, image_2_keypoints, matches):
""" Draws the matches between the image_1 and image_2.
(Credit: GT CP2017 course provided source)
Params:
image_1: The first image (can be color or grayscale).
image_1_keypoints: The image_1 keypoints.
image_2: The image to search in (can be color or grayscale)
image_2_keypoints: The image_2 keypoints.
Returns:
output: Image with a line drawn between matched keypoints.
"""
# Compute number of channels.
num_channels = 1
if len(image_1.shape) == 3:
num_channels = image_1.shape[2]
# Separation between images.
margin = 10
# Create an array that will fit both images (with a margin of 10 to
# separate the two images)
joined_image = np.zeros((max(image_1.shape[0], image_2.shape[0]),
image_1.shape[1] + image_2.shape[1] + margin,
3))
if num_channels == 1:
for channel_idx in range(3):
joined_image[:image_1.shape[0],
:image_1.shape[1],
channel_idx] = image_1
joined_image[:image_2.shape[0],
image_1.shape[1] + margin:,
channel_idx] = image_2
else:
joined_image[:image_1.shape[0], :image_1.shape[1]] = image_1
joined_image[:image_2.shape[0], image_1.shape[1] + margin:] = image_2
for match in matches:
image_1_point = (int(image_1_keypoints[match.queryIdx].pt[0]),
int(image_1_keypoints[match.queryIdx].pt[1]))
image_2_point = (int(image_2_keypoints[match.trainIdx].pt[0] +
image_1.shape[1] + margin),
int(image_2_keypoints[match.trainIdx].pt[1]))
rgb = (np.random.rand(3) * 255).astype(np.int)
cv2.circle(joined_image, image_1_point, 5, rgb, thickness=-1)
cv2.circle(joined_image, image_2_point, 5, rgb, thickness=-1)
cv2.line(joined_image, image_1_point, image_2_point, rgb, thickness=3)
return joined_image
|
# netchat session object
import pexpect
import sys
from .exception import ParameterError
from .constant import status, spawn
from .script import Script
class Session():
"""connect to a listening TCP port and perform expect/send interaction
:param: address: (host, port) for TCP connection
:type: address: tuple
:param: script: script composed of EXPECT,SEND element pairs as string or Script
:type: script: str/Script
:param: wait_timeout: seconds before an EXPECT wait will return TIMEOUT, defaults to Infinite
:type: wait_timeout: int, optional
:param: out: stream for writing receive data from the connection, defaults to stdout
:type: out: file-type, optional
:param: err: stream for writing diagnostic and status messages, defaults to stderr
:type: out: file-type, optional
:param: events: a list of status events for which diagnostics should be emitted
:type: events: netchat.status, optional
:param: spawn_type: type of subprocess used for TCP connection (spawn.internal, spawn.socat, spawn.nc)
:type: spawn_type: netchat.spawn
..note:: ``script`` can be a ``Script`` or a string
..note:: a chat script is composed of one or more pairs of two-word elements separated by whitespace
in the format ``EXPECT [SEND] [EXPECT [SEND]]...``
- elements may be exact-match strings or regular expressions
- elements are separated by whitespace and may be quote-delimited
- a single element is treated as an EXPECT
- an EXPECT without a SEND will have a null SEND appended
"""
def __init__(
self, address, script, *, wait_timeout=None, out=sys.stdout, err=sys.stderr, events=[status.EXPECT,status.SEND], spawn_type=spawn.internal
):
"""constructor"""
address, port = address
if isinstance(script, str):
self.script = Script(script=script)
elif isinstance(script, Script):
self.script = script
else:
raise ParameterError(f'script must be of type {str} or {Script}')
self.wait_timeout = wait_timeout
self.out = out
self.err = err
self.events = events
if spawn_type == spawn.socat:
self.command = f'socat stdio tcp4-connect:{address}:{port}'
elif spawn_type == spawn.nc:
self.command = f'nc {address} {port}'
elif spawn_type == spawn.internal:
self.command = f'python3 -m netchat {address}:{port} --subprocess'
else:
raise ParameterError(f'invalid spawn_type {spawn_type}')
def run(self, callback=None):
"""connect to the server and iterate through the script, waiting for EXPECT and sending SEND
:param: callback: function to be called on state change events
:type: callback: callback_function(netchat.status, data)
:return: EOF, TIMEOUT, or DONE
:rtype: netchat.state
"""
with Handler(self.command, self.wait_timeout, self.out, self.err, self.events, callback) as handler:
for step in self.script:
try:
handler.expect(step.expect)
handler.send(step.send)
except pexpect.exceptions.EOF as ex:
return handler.event(status.EOF)
except pexpect.exceptions.TIMEOUT as ex:
return handler.event(status.TIMEOUT)
return status.DONE
class Handler():
"""context manager for the pexpect subprocess
:param: command: command line for the spawned subprocess
:type: command: str
:param: timeout: expect timeout
:type: timeout: int
:param: out: stream for writing connection receive data
:type: out: file-type
:param: err: stream for writing diagnostic messages
:type: out: file-type
:param: events: list of desired status change diagnostics
:type: events: status
:param: callback: function to be called on state change events
:type: callback: function
"""
def __init__(self, command, timeout, out, err, events, callback):
self.command = command
self.timeout = timeout
self.out = out
self.err = err
self.events = events
self.callback = callback
def __enter__(self):
self.event(status.CONNECTING)
self.child = pexpect.spawn(self.command, encoding='utf-8', timeout=self.timeout, logfile=self.out, echo=False)
self.event(status.CONNECTED)
return self
def __exit__(self, _, exception, traceback):
if self.child.isalive():
self.child.terminate()
self.event(status.CLOSED)
return False
def event(self, event, data=None):
if event in self.events:
if self.err:
if data:
self.err.write(f"{str(event)} {repr(data)}\n")
else:
self.err.write(f"{str(event)}\n")
if self.callback:
self.callback(event, data)
return event
def expect(self, data):
if data:
self.event(status.EXPECT, data)
ret = self.child.expect(data)
self.event(status.FOUND, data)
else:
self.event(status.EXPECT_SKIPPED)
def send(self, data):
if data:
self.event(status.SEND, data)
self.child.sendline(data)
self.event(status.SENT, data)
else:
self.event(status.SEND_SKIPPED)
|
import sys
import filetype
import magic
import shutil
import os
import xml.etree.ElementTree as ET
import base64
import filetype
import datetime
from Crypto.Cipher import AES
from Crypto.Util.Padding import unpad
from pathlib import Path
from os.path import isfile, join, basename, dirname, getsize, abspath
from pathlib import Path
from scripts.artifact_report import ArtifactHtmlReport
from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, media_to_html
def get_playgroundVault(files_found, report_folder, seeker, wrap_text):
data_list = []
for file_found in files_found:
file_found = str(file_found)
#filesize = (getsize(file_found))
if not isfile(file_found):
continue
filename = basename(file_found)
if filename.startswith('._'):
continue
if filename.startswith('crypto.KEY_256.xml'):
tree = ET.parse(file_found)
root = tree.getroot()
key = base64.b64decode(root.findall('./string[@name="cipher_key"]')[0].text)
logfunc(f'Encryption key found: {key}')
for file_found in files_found:
file_found = str(file_found)
if not isfile(file_found):
continue
filename = basename(file_found)
if filename.startswith('._'):
continue
if filename.startswith('crypto.KEY_256.xml'):
continue
with open (file_found, 'rb') as openFile:
#print('Atttempting to decrypt...')
fullFile = openFile.read()
### IV is after the first 2 bytes
IV = fullFile[2:14]
### Following IV encrypted data minus the GCM validation (16 bytes) at the end
encryptedData = fullFile[14:-16]
### New encryption algo
cipher = AES.new((key), AES.MODE_GCM, (IV))
### Decrypt the data
decryptedData = cipher.decrypt((encryptedData))
### Determine the correct file extension
fileExtension = filetype.guess(decryptedData)
### Open the new output file for writing
with open (join(report_folder, basename(file_found)) , 'wb') as decryptedFile:
decryptedFile.write(decryptedData)
decryptedFile.close()
tolink = []
pathdec = join(report_folder, basename(file_found))
tolink.append(pathdec)
thumb = media_to_html(pathdec, tolink, report_folder)
filename = basename(file_found)
if 'EIF' in filename:
utctime = filename.split('EIF')
enctimestamp = datetime.datetime.fromtimestamp(int(utctime[1]) / 1000)
elif 'EVF' in filename:
utctime = filename.split('EVF')
enctimestamp = datetime.datetime.fromtimestamp(int(utctime[1]) / 1000)
else:
enctimestamp = ''
data_list.append((thumb, filename, enctimestamp, file_found))
if data_list:
report = ArtifactHtmlReport('Playground Vault')
report.start_artifact_report(report_folder, 'Playground Vault')
report.add_script()
data_headers = ('Media', 'Filename', 'Encrypted On Timestamp', 'Full Path')
maindirectory = str(Path(file_found).parents[1])
report.write_artifact_data_table(data_headers, data_list, maindirectory, html_no_escape=['Media'])
report.end_artifact_report()
tsvname = f'Playground Vault'
tsv(report_folder, data_headers, data_list, tsvname)
else:
logfunc('No Playground Vault data available')
|
import os
import pytest
from pytest_insta import SnapshotFixture
from beet.toolchain.config import load_config
@pytest.mark.parametrize("directory", os.listdir("tests/config_examples"))
def test_config_resolution(snapshot: SnapshotFixture, directory: str):
project_config = load_config(f"tests/config_examples/{directory}/beet.json")
assert snapshot("beet-config") == project_config
|
# coding=utf-8
# @Time: 2022/1/12 15:42
# @Author: forevermessi@foxmail.com
|
"""Test cases for io.py."""
import json
from ..unittest.util import get_test_file
from .io import dump, group_and_sort, load, parse
from .typing import Frame
def test_parse() -> None:
"""Test parse label string."""
raw = json.loads(
'{"name": 1, "videoName": "a", "size": {"width": 10, "height": 20}, '
'"labels":[{"id": 1, "box2d": '
'{"x1": 1, "y1": 2, "x2": 3, "y2": 4}, "ignored": true, '
'"attributes": {"trafficLightColor": "G", "speed": 10}}]}'
)
frame = parse(raw)
assert frame.name == "1"
assert frame.videoName == "a"
assert isinstance(frame.labels, list)
labels = frame.labels
assert isinstance(labels, list)
assert len(labels) == 1
label = labels[0] # pylint: disable=unsubscriptable-object
assert label.id == "1"
assert label.attributes is not None
assert label.attributes["trafficLightColor"] == "G"
assert label.attributes["speed"] == 10.0
b = label.box2d
assert b is not None
assert b.y2 == 4
def test_load() -> None:
"""Test loading labels."""
filepath = get_test_file("image_list_with_auto_labels.json")
def assert_correctness(inputs: str, nprocs: int) -> None:
frames = load(inputs, nprocs).frames
assert len(frames) == 10
assert (
frames[0].url == "https://s3-us-west-2.amazonaws.com/bdd-label/"
"bdd100k/frames-20000/val/c1ba5ee6-b2cb1e51.jpg"
)
assert frames[0].frameIndex == 0
assert frames[-1].frameIndex == 9
assert frames[0].labels is not None
assert frames[-1].labels is not None
assert frames[0].labels[0].id == "0"
assert frames[0].labels[0].box2d is not None
assert frames[-1].labels[-1].box2d is not None
box = frames[-1].labels[-1].box2d
assert box.x1 == 218.7211456298828
assert box.x2 == 383.5201416015625
assert box.y1 == 362.24761962890625
assert box.y2 == 482.4760437011719
assert frames[0].labels[0].poly2d is not None
polys = frames[0].labels[0].poly2d
assert isinstance(polys, list)
poly = polys[0]
assert len(poly.vertices) == len(poly.types)
assert len(poly.vertices[0]) == 2
for char in poly.types:
assert char in ["C", "L"]
assert_correctness(filepath, nprocs=0)
assert_correctness(filepath, nprocs=2)
def test_load_graph() -> None:
"""Test loading labels."""
filepath = get_test_file("image_list_with_auto_labels_graph.json")
def assert_correctness(inputs: str, nprocs: int) -> None:
frames = load(inputs, nprocs).frames
assert len(frames) == 10
assert (
frames[0].url == "https://s3-us-west-2.amazonaws.com/bdd-label/"
"bdd100k/frames-20000/val/c1ba5ee6-b2cb1e51.jpg"
)
assert frames[2].frameIndex == 2
assert frames[4].frameIndex == 4
assert frames[2].labels is not None
assert frames[4].labels is not None
assert frames[2].labels[0].id == "0"
assert frames[2].labels[0].box2d is not None
assert frames[4].labels[1].box2d is not None
box = frames[4].labels[1].box2d
assert box.x1 == 1181.4259033203125
assert box.x2 == 1241.681396484375
assert box.y1 == 101.82328796386719
assert box.y2 == 155.20513916015625
assert frames[0].labels is not None
assert frames[0].labels[0].poly2d is not None
polys = frames[0].labels[0].poly2d
assert isinstance(polys, list)
poly = polys[0]
assert len(poly.vertices) == len(poly.types)
assert len(poly.vertices[0]) == 2
for char in poly.types:
assert char in ["C", "L"]
assert frames[0].labels[0].graph is not None
assert frames[0].labels[0].graph.nodes is not None
assert frames[0].labels[0].graph.edges is not None
nodes = frames[0].labels[0].graph.nodes
edges = frames[0].labels[0].graph.edges
assert isinstance(nodes, list)
assert isinstance(edges, list)
assert len(nodes) == 9
assert len(edges) == 9
assert nodes[1].location[0] == 205.20687963549207
assert nodes[1].location[1] == 278.4950509338821
assert nodes[1].category == "polygon"
assert edges[2].source == "5vowGRmRHjolm1-G"
assert edges[2].target == "MqOQsu8Tqn6sLoLM"
_ = load(inputs, nprocs, False)
assert_correctness(filepath, nprocs=0)
assert_correctness(filepath, nprocs=2)
def test_group_and_sort() -> None:
"""Check the group and sort results."""
# frames = [
# Frame(name="bbb-1", videoName="bbb", frameIndex=1, labels=[]),
# Frame(name="aaa-2", videoName="aaa", frameIndex=2, labels=[]),
# Frame(name="aaa-2", videoName="aaa", frameIndex=1, labels=[]),
# ]
frames = [
Frame(name="bbb-1", videoName="bbb", frameIndex=1, labels=[]),
Frame(name="aaa-2", videoName="aaa", frameIndex=2, labels=[]),
Frame(name="bbb-2", videoName="bbb", frameIndex=2, labels=[]),
Frame(name="aaa-2", videoName="aaa", frameIndex=1, labels=[]),
Frame(name="bbb-3", videoName="bbb", frameIndex=3, labels=[]),
]
frames_list = group_and_sort(frames)
assert len(frames_list) == 2
assert len(frames_list[0]) == 2
assert len(frames_list[1]) == 3
assert str(frames_list[0][0].videoName) == "aaa"
assert frames_list[0][1].name == "aaa-2"
assert frames_list[0][1].frameIndex == 2
assert str(frames_list[1][0].videoName) == "bbb"
assert frames_list[1][1].frameIndex == 2
assert frames_list[1][1].name == "bbb-2"
def test_dump() -> None:
"""Test dump labels."""
filepath = get_test_file("image_list_with_auto_labels.json")
labels = load(filepath).frames
labels_dict = [dump(label.dict()) for label in labels]
assert labels_dict[0]["frameIndex"] == labels[0].frameIndex
assert labels_dict[-1]["frameIndex"] == labels[-1].frameIndex
assert "box3d" not in labels_dict[0]["labels"][0]
assert "box2d" in labels_dict[0]["labels"][0]
assert labels[0].labels is not None
assert labels[0].labels[0].box2d is not None
assert (
labels_dict[0]["labels"][0]["box2d"]["x1"]
== labels[0].labels[0].box2d.x1
)
|
# Generated by Django 3.1 on 2020-09-29 05:34
import django.db.models.deletion
import django_extensions.db.fields
from django.conf import settings
from django.db import migrations, models
import library.django_utils.django_file_system_storage
class Migration(migrations.Migration):
initial = True
dependencies = [
('expression', '0001_initial'),
('genes', '0001_initial'),
('annotation', '0002_auto_20200929_1503'),
('snpdb', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('classification', '0001_initial'),
('patients', '0001_initial'),
('seqauto', '0001_initial'),
('pedigree', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='ToolVersion',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.TextField()),
('version', models.TextField()),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='UploadedFile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')),
('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')),
('path', models.TextField(null=True)),
('uploaded_file', models.FileField(max_length=256, null=True, storage=library.django_utils.django_file_system_storage.PrivateUploadStorage(), upload_to='')),
('md5_hash', models.CharField(max_length=32, null=True)),
('file_type', models.CharField(choices=[('B', 'BED'), ('L', 'Clinvar'), ('T', 'Clinvar Citations'), ('C', 'CuffDiff'), ('G', 'Gene List'), ('O', 'Gene Coverage'), ('I', 'Liftover'), ('P', 'Pedigree'), ('R', 'Patient Records'), ('V', 'VCF'), ('Y', 'VCF - Insert variants only (no samples etc)'), ('S', 'Variant Classifications')], max_length=1, null=True)),
('import_source', models.CharField(choices=[('A', 'API'), ('C', 'Command Line'), ('S', 'SeqAuto'), ('W', 'Web'), ('U', 'Web Upload')], max_length=1)),
('name', models.TextField()),
('visible', models.BooleanField(default=True)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'get_latest_by': 'modified',
'abstract': False,
},
),
migrations.CreateModel(
name='UploadedVCF',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('max_variant', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='snpdb.variant')),
],
),
migrations.CreateModel(
name='UploadPipeline',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('status', models.CharField(choices=[('C', 'Created'), ('P', 'Processing'), ('E', 'Error'), ('S', 'Success'), ('K', 'Skipped'), ('T', 'Terminated Early'), ('Z', 'Timed Out')], default='C', max_length=1)),
('items_processed', models.BigIntegerField(null=True)),
('processing_seconds_wall_time', models.IntegerField(null=True)),
('processing_seconds_cpu_time', models.IntegerField(null=True)),
('progress_status', models.TextField(null=True)),
('progress_percent', models.FloatField(default=0)),
('celery_task', models.CharField(max_length=36, null=True)),
('uploaded_file', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='upload.uploadedfile')),
],
),
migrations.CreateModel(
name='UploadStep',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.TextField()),
('sort_order', models.IntegerField()),
('status', models.CharField(choices=[('C', 'Created'), ('P', 'Processing'), ('E', 'Error'), ('S', 'Success'), ('K', 'Skipped'), ('T', 'Terminated Early'), ('Z', 'Timed Out')], default='C', max_length=1)),
('origin', models.CharField(choices=[('A', 'User Addition'), ('I', 'Import Task Factory')], default='I', max_length=1)),
('items_to_process', models.BigIntegerField(default=0)),
('items_processed', models.IntegerField(null=True)),
('error_message', models.TextField()),
('input_filename', models.TextField()),
('output_filename', models.TextField()),
('start_date', models.DateTimeField(null=True)),
('end_date', models.DateTimeField(null=True)),
('task_type', models.CharField(choices=[('C', 'Celery'), ('Q', 'SQL'), ('T', 'Tool')], max_length=1)),
('pipeline_stage', models.CharField(choices=[('U', 'Insert Unknown Variants'), ('D', 'Data Insertion'), ('A', 'Annotation Complete'), ('F', 'Finish')], max_length=1, null=True)),
('pipeline_stage_dependency', models.CharField(choices=[('U', 'Insert Unknown Variants'), ('D', 'Data Insertion'), ('A', 'Annotation Complete'), ('F', 'Finish')], max_length=1, null=True)),
('script', models.TextField()),
('child_script', models.TextField(null=True)),
('import_variant_table', models.TextField(blank=True, null=True)),
('celery_task', models.CharField(max_length=36, null=True)),
('output_text', models.TextField(null=True)),
('input_upload_step', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='upload.uploadstep')),
('parent_upload_step', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='substep_set', to='upload.uploadstep')),
('tool_version', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='upload.toolversion')),
('upload_pipeline', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='upload.uploadpipeline')),
],
),
migrations.CreateModel(
name='VCFImportInfo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('severity', models.CharField(choices=[('W', 'WARNING'), ('E', 'ERROR')], default='W', max_length=1)),
('accepted_date', models.DateTimeField(null=True)),
('upload_step', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='upload.uploadstep')),
],
),
migrations.CreateModel(
name='ModifiedImportedVariants',
fields=[
('vcfimportinfo_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='upload.vcfimportinfo')),
],
bases=('upload.vcfimportinfo',),
),
migrations.CreateModel(
name='VCFSkippedContigs',
fields=[
('vcfimportinfo_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='upload.vcfimportinfo')),
],
bases=('upload.vcfimportinfo',),
),
migrations.CreateModel(
name='VCFSkippedGVCFNonVarBlocks',
fields=[
('vcfimportinfo_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='upload.vcfimportinfo')),
('num_skipped', models.IntegerField()),
],
bases=('upload.vcfimportinfo',),
),
migrations.CreateModel(
name='VCFImporter',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.TextField()),
('version', models.IntegerField()),
('vcf_parser', models.TextField()),
('vcf_parser_version', models.TextField()),
('code_git_hash', models.TextField()),
],
options={
'unique_together': {('name', 'version', 'vcf_parser', 'vcf_parser_version', 'code_git_hash')},
},
),
migrations.CreateModel(
name='UploadStepMultiFileOutput',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('output_filename', models.TextField()),
('items_to_process', models.IntegerField()),
('upload_step', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='upload.uploadstep')),
],
),
migrations.CreateModel(
name='UploadSettings',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('time_filter_method', models.CharField(choices=[('D', 'days'), ('R', 'records')], default='R', max_length=1)),
('time_filter_value', models.IntegerField(default=5)),
('show_all', models.BooleanField(default=False)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='UploadedVCFPendingAnnotation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('finished', models.DateTimeField(null=True)),
('schedule_pipeline_stage_steps_celery_task', models.CharField(max_length=36, null=True)),
('uploaded_vcf', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='upload.uploadedvcf')),
],
),
migrations.AddField(
model_name='uploadedvcf',
name='upload_pipeline',
field=models.OneToOneField(null=True, on_delete=django.db.models.deletion.SET_NULL, to='upload.uploadpipeline'),
),
migrations.AddField(
model_name='uploadedvcf',
name='uploaded_file',
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='upload.uploadedfile'),
),
migrations.AddField(
model_name='uploadedvcf',
name='vcf',
field=models.OneToOneField(null=True, on_delete=django.db.models.deletion.CASCADE, to='snpdb.vcf'),
),
migrations.AddField(
model_name='uploadedvcf',
name='vcf_importer',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='upload.vcfimporter'),
),
migrations.CreateModel(
name='UploadedVariantClassificationImport',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('uploaded_file', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='upload.uploadedfile')),
('variant_classification_import', models.OneToOneField(null=True, on_delete=django.db.models.deletion.CASCADE, to='classification.variantclassificationimport')),
],
),
migrations.CreateModel(
name='UploadedPedFile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('ped_file', models.OneToOneField(null=True, on_delete=django.db.models.deletion.CASCADE, to='pedigree.pedfile')),
('uploaded_file', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='upload.uploadedfile')),
],
),
migrations.CreateModel(
name='UploadedPatientRecords',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('patient_records', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='patients.patientrecords')),
('uploaded_file', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='upload.uploadedfile')),
],
),
migrations.CreateModel(
name='UploadedManualVariantEntryCollection',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('collection', models.OneToOneField(null=True, on_delete=django.db.models.deletion.CASCADE, to='annotation.manualvariantentrycollection')),
('uploaded_file', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='upload.uploadedfile')),
],
),
migrations.CreateModel(
name='UploadedLiftover',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('liftover', models.OneToOneField(null=True, on_delete=django.db.models.deletion.CASCADE, to='snpdb.liftover')),
('uploaded_file', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='upload.uploadedfile')),
],
),
migrations.CreateModel(
name='UploadedGeneList',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('gene_list', models.OneToOneField(null=True, on_delete=django.db.models.deletion.CASCADE, to='genes.genelist')),
('uploaded_file', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='upload.uploadedfile')),
],
),
migrations.CreateModel(
name='UploadedGeneCoverage',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('gene_coverage_collection', models.OneToOneField(null=True, on_delete=django.db.models.deletion.CASCADE, to='genes.genecoveragecollection')),
('sample', models.OneToOneField(null=True, on_delete=django.db.models.deletion.CASCADE, to='snpdb.sample')),
('uploaded_file', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='upload.uploadedfile')),
],
),
migrations.CreateModel(
name='UploadedExpressionFile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('format', models.CharField(choices=[('C', 'CuffDiff')], max_length=1)),
('annotation_level', models.CharField(choices=[('T', 'Transcript'), ('G', 'Gene Symbol')], max_length=1)),
('cuff_diff_file', models.OneToOneField(null=True, on_delete=django.db.models.deletion.CASCADE, to='expression.cuffdifffile')),
('uploaded_file', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='upload.uploadedfile')),
],
),
migrations.CreateModel(
name='UploadedClinVarVersion',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('clinvar_version', models.OneToOneField(null=True, on_delete=django.db.models.deletion.CASCADE, to='annotation.clinvarversion')),
('uploaded_file', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='upload.uploadedfile')),
],
),
migrations.CreateModel(
name='UploadedClinVarCitations',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('md5_hash', models.CharField(max_length=32)),
('clinvar_citations_collection', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='annotation.clinvarcitationscollection')),
('uploaded_file', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='upload.uploadedfile')),
],
),
migrations.CreateModel(
name='UploadedBed',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('genomic_intervals_collection', models.OneToOneField(null=True, on_delete=django.db.models.deletion.CASCADE, to='snpdb.genomicintervalscollection')),
('uploaded_file', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='upload.uploadedfile')),
],
),
migrations.CreateModel(
name='BackendVCF',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('combo_vcf', models.OneToOneField(null=True, on_delete=django.db.models.deletion.CASCADE, to='seqauto.samplesheetcombinedvcffile')),
('uploaded_vcf', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='upload.uploadedvcf')),
('vcf_file', models.OneToOneField(null=True, on_delete=django.db.models.deletion.CASCADE, to='seqauto.vcffile')),
],
),
migrations.CreateModel(
name='VCFSkippedContig',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('contig', models.TextField()),
('num_skipped', models.IntegerField()),
('import_info', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='upload.vcfskippedcontigs')),
],
),
migrations.CreateModel(
name='ModifiedImportedVariant',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('old_multiallelic', models.TextField(null=True)),
('old_variant', models.TextField(null=True)),
('old_variant_formatted', models.TextField(null=True)),
('variant', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='snpdb.variant')),
('import_info', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='upload.modifiedimportedvariants')),
],
),
]
|
# settings/local_pydanny.py
# to use this setting you must run:
# python manage.py runserver --settings=config.settings.local
from .local import *
# Set short cache timeout
CACHE_TIMEOUT = 30
|
#!/usr/bin/python3
EMPTY_SEQUNCE = 'ε'
class Rule:
def __init__(self, input_symbols, output_symbols):
if input_symbols is None or output_symbols is None:
raise ValueError("Invalid rule arguments")
self._input_symbols = input_symbols
self._output_symbols = output_symbols
@property
def input_symbols(self):
return self._input_symbols
@property
def output_symbols(self):
return self._output_symbols
def is_regular(self, context):
"""Check if rule grammar is regular."""
terminals, nonterminals = context
return (
self.is_context_free(context) and
(
len(self._output_symbols) == 2 and
set(self._output_symbols) & terminals and
set(self._output_symbols) & nonterminals
) or (
len(self._output_symbols) == 1 and
set(self._output_symbols) <= nonterminals | terminals | {EMPTY_SEQUNCE}
)
)
def is_context_free(self, context):
"""Check if the grammar rule is context-free."""
_, nonterminals = context
return len(self._input_symbols) == 1 and self._input_symbols in nonterminals
def is_context_dependent(self, context):
"""Check if the grammar rule is context-dependent."""
terminals, nonterminals = context
return (
len(self._output_symbols) >= len(self._input_symbols) and
set(self._input_symbols) <= terminals | nonterminals and
set(self._output_symbols) <= terminals | nonterminals | {EMPTY_SEQUNCE}
)
@staticmethod
def parse(rule_string):
"""Parse grammar rule"""
left_part, right_part = rule_string.split(sep='->')
right_parts = right_part.split(sep='|')
return [Rule(left_part, right_part) for right_part in right_parts]
|
#
# PySNMP MIB module F3-ESM-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/F3-ESM-MIB
# Produced by pysmi-0.3.4 at Wed May 1 13:11:36 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
fsp150cm, = mibBuilder.importSymbols("ADVA-MIB", "fsp150cm")
OctetString, ObjectIdentifier, Integer = mibBuilder.importSymbols("ASN1", "OctetString", "ObjectIdentifier", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ValueSizeConstraint, ConstraintsIntersection, ValueRangeConstraint, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ValueSizeConstraint", "ConstraintsIntersection", "ValueRangeConstraint", "ConstraintsUnion")
NotificationGroup, ModuleCompliance, ObjectGroup = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance", "ObjectGroup")
Unsigned32, TimeTicks, IpAddress, MibIdentifier, Counter32, NotificationType, MibScalar, MibTable, MibTableRow, MibTableColumn, Counter64, iso, Gauge32, ModuleIdentity, Integer32, ObjectIdentity, Bits = mibBuilder.importSymbols("SNMPv2-SMI", "Unsigned32", "TimeTicks", "IpAddress", "MibIdentifier", "Counter32", "NotificationType", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Counter64", "iso", "Gauge32", "ModuleIdentity", "Integer32", "ObjectIdentity", "Bits")
TextualConvention, DisplayString, StorageType, RowStatus, VariablePointer = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString", "StorageType", "RowStatus", "VariablePointer")
f3ESMMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 2544, 1, 12, 23))
f3ESMMIB.setRevisions(('2012-10-03 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: f3ESMMIB.setRevisionsDescriptions((' Notes from release 201210030000Z, (1)MIB version ready for release FSP150CC 5.6CC.',))
if mibBuilder.loadTexts: f3ESMMIB.setLastUpdated('201209300000Z')
if mibBuilder.loadTexts: f3ESMMIB.setOrganization('ADVA Optical Networking')
if mibBuilder.loadTexts: f3ESMMIB.setContactInfo(' Jakub Zalewski ADVA Optical Networking, Inc. Tel: +48 58 7716 411 E-mail: jzalewski@advaoptical.com Postal: ul. Slaska 35/37 81-310 Gdynia, Poland')
if mibBuilder.loadTexts: f3ESMMIB.setDescription('This module defines the Ethernet Service Manager MIB defitinions used by the F3 (FSP150CM/CC) product lines. Copyright (C) ADVA Optical Networking.')
f3EsmConfigObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 2544, 1, 12, 23, 1))
f3EsmConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 2544, 1, 12, 23, 2))
esmConfigTable = MibTable((1, 3, 6, 1, 4, 1, 2544, 1, 12, 23, 1, 1), )
if mibBuilder.loadTexts: esmConfigTable.setStatus('current')
if mibBuilder.loadTexts: esmConfigTable.setDescription('This table has list of entries with which FSPNM can associate some information in the form of name-value pairs. NOTE: This table is for FSPNM use only.')
esmConfigEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2544, 1, 12, 23, 1, 1, 1), ).setIndexNames((0, "F3-ESM-MIB", "esmConfigIndex"))
if mibBuilder.loadTexts: esmConfigEntry.setStatus('current')
if mibBuilder.loadTexts: esmConfigEntry.setDescription('The conceptual row in esmConfigTable.')
esmConfigIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2544, 1, 12, 23, 1, 1, 1, 1), Unsigned32())
if mibBuilder.loadTexts: esmConfigIndex.setStatus('current')
if mibBuilder.loadTexts: esmConfigIndex.setDescription('This is the unique index for ESM Config.')
esmConfigAssociatedEntity = MibTableColumn((1, 3, 6, 1, 4, 1, 2544, 1, 12, 23, 1, 1, 1, 2), VariablePointer()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: esmConfigAssociatedEntity.setStatus('current')
if mibBuilder.loadTexts: esmConfigAssociatedEntity.setDescription('This is the entity with which name-value pairs shall be associated.')
esmConfigStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 2544, 1, 12, 23, 1, 1, 1, 3), StorageType()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: esmConfigStorageType.setStatus('current')
if mibBuilder.loadTexts: esmConfigStorageType.setDescription('The storage type for this conceptual row.')
esmConfigRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2544, 1, 12, 23, 1, 1, 1, 4), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: esmConfigRowStatus.setStatus('current')
if mibBuilder.loadTexts: esmConfigRowStatus.setDescription('The status of this row. An entry MUST NOT exist in the active state unless all objects in the entry have an appropriate value, as described in the description clause for each writable object. The values of esmConfigRowStatus supported are createAndGo(4) and destroy(6). All mandatory attributes must be specified in a single SNMP SET request with neRowStatus value as createAndGo(4). Upon successful row creation, this object has a value of active(1). The esmConfigRowStatus object may be modified if the associated instance of this object is equal to active(1).')
esmNameValuePairTable = MibTable((1, 3, 6, 1, 4, 1, 2544, 1, 12, 23, 1, 2), )
if mibBuilder.loadTexts: esmNameValuePairTable.setStatus('current')
if mibBuilder.loadTexts: esmNameValuePairTable.setDescription('This table has name-value pairs associated with entities. NOTE: This table is for FSPNM use only.')
esmNameValuePairEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2544, 1, 12, 23, 1, 2, 1), ).setIndexNames((0, "F3-ESM-MIB", "esmConfigIndex"), (0, "F3-ESM-MIB", "esmNameValuePairName"))
if mibBuilder.loadTexts: esmNameValuePairEntry.setStatus('current')
if mibBuilder.loadTexts: esmNameValuePairEntry.setDescription('The conceptual row in esmNameValuePairTable.')
esmNameValuePairName = MibTableColumn((1, 3, 6, 1, 4, 1, 2544, 1, 12, 23, 1, 2, 1, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 16)))
if mibBuilder.loadTexts: esmNameValuePairName.setStatus('current')
if mibBuilder.loadTexts: esmNameValuePairName.setDescription('This is the name in the name-value pair.')
esmNameValuePairValue = MibTableColumn((1, 3, 6, 1, 4, 1, 2544, 1, 12, 23, 1, 2, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 256))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: esmNameValuePairValue.setStatus('current')
if mibBuilder.loadTexts: esmNameValuePairValue.setDescription('This is the value in the name-value pair.')
esmNameValuePairStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 2544, 1, 12, 23, 1, 2, 1, 3), StorageType().clone('nonVolatile')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: esmNameValuePairStorageType.setStatus('current')
if mibBuilder.loadTexts: esmNameValuePairStorageType.setDescription('The storage type for this conceptual row.')
esmNameValuePairRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2544, 1, 12, 23, 1, 2, 1, 4), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: esmNameValuePairRowStatus.setStatus('current')
if mibBuilder.loadTexts: esmNameValuePairRowStatus.setDescription('The status of this row. An entry MUST NOT exist in the active state unless all objects in the entry have an appropriate value, as described in the description clause for each writable object. The values of esmNameValuePairRowStatus supported are createAndGo(4) and destroy(6). All mandatory attributes must be specified in a single SNMP SET request with neRowStatus value as createAndGo(4). Upon successful row creation, this object has a value of active(1). The esmNameValuePairRowStatus object may be modified if the associated instance of this object is equal to active(1).')
f3EsmCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 2544, 1, 12, 23, 2, 1))
f3EsmGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 2544, 1, 12, 23, 2, 2))
f3EsmCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 2544, 1, 12, 23, 2, 1, 1)).setObjects(("F3-ESM-MIB", "esmConfigGroup"), ("F3-ESM-MIB", "esmNameValuePairGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
f3EsmCompliance = f3EsmCompliance.setStatus('current')
if mibBuilder.loadTexts: f3EsmCompliance.setDescription('Describes the requirements for conformance to the F3-ESM-MIB compilance.')
esmConfigGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2544, 1, 12, 23, 2, 2, 1)).setObjects(("F3-ESM-MIB", "esmConfigAssociatedEntity"), ("F3-ESM-MIB", "esmConfigStorageType"), ("F3-ESM-MIB", "esmConfigRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
esmConfigGroup = esmConfigGroup.setStatus('current')
if mibBuilder.loadTexts: esmConfigGroup.setDescription('A collection of objects used to manage the EMS Configuration objects.')
esmNameValuePairGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2544, 1, 12, 23, 2, 2, 2)).setObjects(("F3-ESM-MIB", "esmNameValuePairValue"), ("F3-ESM-MIB", "esmNameValuePairStorageType"), ("F3-ESM-MIB", "esmNameValuePairRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
esmNameValuePairGroup = esmNameValuePairGroup.setStatus('current')
if mibBuilder.loadTexts: esmNameValuePairGroup.setDescription('A collection of objects used to manage the EMS Name-Value Pair objects.')
mibBuilder.exportSymbols("F3-ESM-MIB", f3EsmCompliances=f3EsmCompliances, esmConfigTable=esmConfigTable, esmNameValuePairTable=esmNameValuePairTable, f3EsmGroups=f3EsmGroups, esmConfigStorageType=esmConfigStorageType, esmNameValuePairRowStatus=esmNameValuePairRowStatus, esmConfigRowStatus=esmConfigRowStatus, esmNameValuePairValue=esmNameValuePairValue, esmConfigGroup=esmConfigGroup, PYSNMP_MODULE_ID=f3ESMMIB, f3EsmConfigObjects=f3EsmConfigObjects, esmNameValuePairEntry=esmNameValuePairEntry, esmNameValuePairName=esmNameValuePairName, esmConfigIndex=esmConfigIndex, esmConfigEntry=esmConfigEntry, f3EsmConformance=f3EsmConformance, esmConfigAssociatedEntity=esmConfigAssociatedEntity, f3EsmCompliance=f3EsmCompliance, f3ESMMIB=f3ESMMIB, esmNameValuePairStorageType=esmNameValuePairStorageType, esmNameValuePairGroup=esmNameValuePairGroup)
|
from django.forms import ModelForm
from .models import Post, Comment
class PostForm(ModelForm):
class Meta:
model = Post
fields = ['title', 'post_text', 'subreddit']
class CommentForm(ModelForm):
class Meta:
model = Comment
fields = ['text']
|
from importlib import reload
import argparse
import sys
import asyncio
import sc2
import bot
from bot import SimpleBot
from sc2 import Race, Difficulty
from sc2.player import Bot, Computer
import random
from sc2.constants import *
from sc2.position import Point2
class RampWallBot(sc2.BotAI):
async def on_step(self, iteration):
cc = self.units(COMMANDCENTER)
if not cc.exists:
return
else:
cc = cc.first
if self.can_afford(SCV) and self.workers.amount < 16 and cc.noqueue:
await self.do(cc.train(SCV))
# Raise depos when enemies are nearby
for depo in self.units(SUPPLYDEPOT).ready:
for unit in self.known_enemy_units.not_structure:
if unit.position.to2.distance_to(depo.position.to2) < 15:
break
else:
await self.do(depo(MORPH_SUPPLYDEPOT_LOWER))
# Lower depos when no enemies are nearby
for depo in self.units(SUPPLYDEPOTLOWERED).ready:
for unit in self.known_enemy_units.not_structure:
if unit.position.to2.distance_to(depo.position.to2) < 10:
await self.do(depo(MORPH_SUPPLYDEPOT_RAISE))
break
depos = [
Point2((max({p.x for p in d}), min({p.y for p in d})))
for d in self.main_base_ramp.top_wall_depos
]
depo_count = (self.units(SUPPLYDEPOT) | self.units(SUPPLYDEPOTLOWERED)).amount
if self.can_afford(SUPPLYDEPOT) and not self.already_pending(SUPPLYDEPOT):
if depo_count >= len(depos):
return
depo = list(depos)[depo_count]
r = await self.build(SUPPLYDEPOT, near=depo, max_distance=2, placement_step=1)
class WorkerRushBot(sc2.BotAI):
async def on_step(self, iteration):
if iteration == 0:
for worker in self.workers:
await self.do(worker.attack(self.enemy_start_locations[0]))
def main():
player_config = [
Bot(Race.Terran, SimpleBot()),
Computer(Race.Random, Difficulty.Easy)
]
gen = sc2.main._host_game_iter(
sc2.maps.get("Abyssal Reef LE"),
player_config,
realtime=False
)
while True:
r = next(gen)
input("Press enter to reload ")
reload(bot)
player_config[0].ai = bot.SimpleBot()
gen.send(player_config)
if __name__ == "__main__":
main()
|
def is_valid(checksum: int) -> bool:
return checksum % 11 == 0
def verify(isbn: str) -> bool:
isbn = isbn.replace("-", "")
check_sum = 0
if not len(isbn) == 10:
return False
for index, digit in enumerate(isbn):
if index == len(isbn) - 1 and digit == "X":
digit = "10"
if digit.isdigit():
check_sum += int(digit) * (10 - index)
else:
break
return is_valid(check_sum)
|
#########################
# 演示各种String的常用操作方法
#########################
from pkg.Tools import print_divider
str1 = "abcdefghijklmnopqrstuvwxyz"
str2 = 'life is short, i use python, i love python'
str3 = '演示各种 String 的常用操作方法'
str4 = 'python.py'
str5 = 'ad dsf \t sfdf \n sdfdf j sd l a\t dsf da r \t sdfa fd '
# find & rfind
print_divider("find")
index = str1.find("d", 2, len(str1)) # len 获取items的长度
print("find result = %d" % index)
# index
print_divider("index")
index = str1.index("d", 2, len(str1)) # len 获取items的长度
print("index result = %d" % index)
# count
print_divider("count")
count = str1.count('b', 0, 7)
print("count = " + str(count))
# replace
print_divider("replace")
print(str2.replace('python', 'java'))
# split
print_divider('split')
print(str2.split(' '))
print(str5.split()) # split不加任何字符,自动切割
# endswith startswitch
print_divider('endswith startswitch')
print(str4.endswith('.py'))
|
import os
import shutil
import stat
import tempfile
from pathlib import Path
import pytest
from pyprojectx.wrapper import pw
@pytest.fixture
def tmp_dir():
path = tempfile.mkdtemp(prefix="build-env-")
yield Path(path)
shutil.rmtree(path)
@pytest.fixture(scope="session")
def tmp_project():
tmp = Path(tempfile.mkdtemp(prefix="build-env-"))
toml = Path(__file__).with_name("data").joinpath("pw-test.toml")
shutil.copyfile(toml, tmp.joinpath(pw.PYPROJECT_TOML))
pw_copy = Path(tmp, "pw")
project_dir = Path(__file__).parent.parent
shutil.copyfile(project_dir.joinpath("src/pyprojectx/wrapper/pw.py"), pw_copy)
os.chmod(pw_copy, stat.S_IRWXU | stat.S_IRWXG)
shutil.copy(project_dir.joinpath("src/pyprojectx/wrapper/pw.bat"), tmp)
env = os.environ.copy()
env["PYPROJECTX_PACKAGE"] = str(project_dir.absolute())
yield tmp, env
shutil.rmtree(tmp)
|
from __future__ import print_function
import csv, collections, os, sys
# args
# 1: cdr -- a3 or b3
# 2: stcr_data directory -- ...stcr_data/ -- should contain <cdr>/details/*.tsv
# 3: IMGT-renumbered structures directory -- ...all_structures/imgt/
# ex: python get_cdrs_stcr.py b3 ../datasets/stcr_data/ ~/Downloads/all_structures/imgt/
cdr = sys.argv[1]
root = sys.argv[2]
if root[-1] != '/': root += '/'
structs = sys.argv[3]
if structs[-1] != '/': structs += '/'
if not os.path.exists(root+cdr+'/structs'): os.mkdir(root+cdr+'/structs')
# http://www.imgt.org/IMGTScientificChart/Numbering/IMGTIGVLsuperfamily.html
# cdr:(start,end) as in range()
numbering = {'b3':(105,118), 'a3':(105,118)}
# convert 3-char to 1-char code
AA31 = dict([('ALA','A'),('CYS','C'),('ASP','D'),('GLU','E'),('PHE','F'),('GLY','G'),('HIS','H'),('ILE','I'),('LYS','K'),('LEU','L'),('MET','M'),('ASN','N'),('PRO','P'),('GLN','Q'),('ARG','R'),('SER','S'),('THR','T'),('VAL','V'),('TRP','W'),('TYR','Y')])
# all the canonical clusters for this cdr, in tsv files downloaded from stcrdab
groups = [g[:-4] for g in os.listdir(root+cdr+'/details') if g[-4:]=='.tsv']
# which residues to extract for this cdr
resi_range = set(range(*numbering[cdr]))
cdrs = [] # list of cdr specs to be saved out
pdb_seqs = collections.defaultdict(set) # pdb_id => cdr sequences, for duplicate checking
for group in groups:
for record in csv.DictReader(open(root+cdr+'/details/'+group+'.tsv'), delimiter='\t'):
pdb_id = record['pdb']
pdb_chain = record[cdr[0].upper()+'chain']
if pdb_chain == 'NA': continue # no structure?
out_pdb_id = pdb_id+pdb_chain+'_'+cdr
atoms = [] # list of ATOM records within the cdr
aas = {} # AA sequence of the cdr, by position
# extract atoms and seq
for row in open(structs+pdb_id+'.pdb'):
if row[:4]=='ATOM' and row[21]==pdb_chain and int(row[23:26]) in resi_range:
atoms.append(row)
aas[int(row[23:26])] = AA31[row[17:20]]
seq = ''.join(aas[pos] for pos in sorted(aas))
pdb_seqs[pdb_id].add(seq)
cdrs.append((group,len(seq),seq,pdb_id,pdb_chain))
# save pdb file of just the cdr
with open(root+cdr+'/structs/'+out_pdb_id+'.pdb','w') as loopfile:
for atom in atoms:
loopfile.write(atom)
# note when different cdrs (presumably some missing electron density)
with open(root+cdr+'/log.txt','w') as logfile:
for pdb_id,seqs in pdb_seqs.items():
if len(seqs)>1:
print('different cdrs in',pdb_id,seqs, file=logfile)
# save all the cdr specs
with open(root+cdr+'/cdrs.csv','w') as outfile:
outcsv = csv.writer(outfile)
outcsv.writerow(('group','len','seq','pdb','chain'))
outcsv.writerows(cdrs)
|
from flask import Flask, abort, request, jsonify
from datetime import date, datetime
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy.sql import func
import os, json, logging
import playerRankings as rank
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = os.environ['DATABASE_URL']
db = SQLAlchemy(app)
from models import *
gunicorn_error_logger = logging.getLogger('gunicorn.error')
app.logger.handlers.extend(gunicorn_error_logger.handlers)
app.logger.setLevel(logging.DEBUG)
def createTables():
app.logger.debug('creating tables')
db.create_all()
app.logger.debug('created tables')
def dropTables():
app.logger.debug('dropping tables')
db.drop_all()
app.logger.debug('dropped tables')
def jsonSerial(obj):
"""JSON serializer for objects not serializable by default json code"""
if isinstance(obj, (datetime, date)):
return obj.isoformat()
if isinstance(obj, enum.Enum):
return obj.value
raise TypeError ("Type %s not serializable" % type(obj))
####################
# GET
####################
@app.route('/')
def homepage():
createTables()
return """
<h1>Welcome to the Foosball Data Service.</h1>
<p>Here is a list of available routes for reading data:</p>
<ul>
<li><a href="/">/</a></li>
<li><a href="/players">/players</a></li>
<li><a>/players/<id></a></li>
<li><a>/players/user/<username></a></li>
<li><a href="/games">/games</a></li>
<li><a>/games/<id></a></li>
<li><a href="/series">/series</a></li>
<li><a href="/history">/history</a></li>
<li><a href="/rankings">/rankings</a></li>
<li><a href="/leaderboard">/leaderboard</a></li>
</ul>
"""
@app.route('/players', methods=['GET'])
def getPlayers():
players = Players.query.all()
return json.dumps([player.as_dict() for player in players], default=jsonSerial)
@app.route('/players/<int:id>', methods=['GET'])
def getPlayerById(id):
player = Players.query.get_or_404(id)
return json.dumps(player.as_dict(), default=jsonSerial)
@app.route('/players/user/<int:username>', methods=['GET'])
def getPlayerByUsername(username):
username = str(username)
app.logger.debug(username)
app.logger.debug(type(username))
player = Players.query.filter_by(Username=username).first_or_404()
return json.dumps(player.as_dict(), default=jsonSerial)
@app.route('/games', methods=['GET'])
def getGames():
app.logger.debug("getting games")
games = Games.query.all()
return json.dumps([game.as_dict() for game in games], default=jsonSerial)
@app.route('/games/<int:id>', methods=['GET'])
def getGameById(id):
game = Games.query.get_or_404(id)
return json.dumps(game.as_dict(), default=jsonSerial)
@app.route('/series', methods=['GET'])
def getSeries():
series = Series.query.all()
return json.dumps([match.as_dict() for match in series], default=jsonSerial)
@app.route('/history', methods=['GET'])
def getHistory():
history = History.query.all()
return json.dumps([entry.as_dict() for entry in history], default=jsonSerial)
@app.route('/rankings', methods=['GET'])
def getRankings():
sortedPlayers = Players.query.filter(Players.Ranking != 0).order_by(Players.Ranking).all()
return json.dumps([player.as_dict() for player in sortedPlayers], default=jsonSerial)
@app.route('/leaderboard', methods=['GET'])
def getLeaderboard():
leaderboard = []
sortedPlayers = Players.query.filter(Players.Ranking != 0).order_by(Players.Ranking).all()
for player in sortedPlayers:
row = {}
row["First Name"] = player.FirstName
row["Last Name"] = player.LastName
row["Ranking"] = player.Ranking
row["Series Wins"] = player.SeriesWins
row["Game Wins"] = player.GameWins
row["Game Losses"] = player.TotalGamesPlayed - player.GameWins
row["Points Scored"] = player.TotalPoints
row["Shutouts"] = player.Shutouts
row["Game Win %"] = round(100.0*player.GameWins / player.TotalGamesPlayed,2)
numSeriesPlayed = db.session.query(func.distinct(Series.Id)).join(History).filter(History.PlayerId == player.Id).count()
row["Series Win %"] = round(100.0*player.SeriesWins / numSeriesPlayed,2)
row["Avg Points/Game"] = round(player.TotalPoints / player.TotalGamesPlayed,2)
sumWinMargin = db.session.query(func.sum(Games.WinMargin)).join(History).filter(Games.Winner == History.Side).filter(History.PlayerId == player.Id).scalar()
row["Avg Win Margin"] = round(sumWinMargin / player.TotalGamesPlayed,2)
leaderboard.append(row)
return json.dumps(leaderboard)
####################
# POST
####################
@app.route('/players', methods=['POST'])
def createPlayer():
if not request.json:
abort(400)
for attribute in ['FirstName', 'LastName', 'Username', 'Email']:
if attribute not in request.json:
abort(400)
newPlayer = Players(request.json['FirstName'],
request.json['LastName'],
request.json['Username'],
request.json['Email'])
db.session.add(newPlayer)
db.session.commit()
return (json.dumps(newPlayer.as_dict(), default=jsonSerial), 201)
@app.route('/games', methods=['POST'])
def createGame():
if not request.json:
abort(400)
for attribute in ['Single', 'LeftScore', 'RightScore', 'WinMargin', 'Winner']: # add EndTime later
if attribute not in request.json:
abort(400)
if request.json['Winner'] not in ('Left', 'Right'):
abort(400)
newGame = Games(request.json['Duration'],
request.json['Single'],
request.json['LeftScore'],
request.json['RightScore'],
request.json['WinMargin'],
TableSide.LEFT if request.json['Winner'] == 'Left' else TableSide.RIGHT)
db.session.add(newGame)
db.session.commit()
return (json.dumps(newGame.as_dict(), default=jsonSerial), 201)
@app.route('/series', methods=['POST'])
def createSeries():
if not request.json:
abort(400)
for attribute in ['NumGames', 'LeftWins', 'RightWins']: # might not be necessary
if attribute not in request.json:
abort(400)
newSeries = Series(request.json['NumGames'],
request.json['LeftWins'],
request.json['RightWins'])
db.session.add(newSeries)
db.session.commit()
return (json.dumps(newSeries.as_dict(), default=jsonSerial), 201)
@app.route('/history', methods=['POST'])
def createHistory():
if not request.json:
abort(400)
for attribute in ['GameId', 'PlayerId', 'SeriesId', 'Side']:
if attribute not in request.json:
abort(400)
newHistory = History(request.json['GameId'],
request.json['PlayerId'],
request.json['SeriesId'],
TableSide.LEFT if request.json['Side'] == 'Left' else TableSide.RIGHT)
db.session.add(newHistory)
db.session.commit()
return (json.dumps(newHistory.as_dict(), default=jsonSerial), 201)
####################
# PUT
####################
@app.route('/players/<int:id>', methods=["PUT"])
def updatePlayer(id):
if not request.json:
abort(400)
db.session.query(Players).filter_by(Id = id).update(request.json)
db.session.commit()
app.logger.debug(request.json)
return (jsonify(request.json), 202)
@app.route('/updateRankings', methods=['PUT'])
def updateRankings():
games = Games.query.all()
matchupsSeen = set()
gameTups = []
for game in games:
gameHistory = History.query.filter_by(GameId = game.Id)
gameHists = [game.as_dict() for game in gameHistory]
# If pair of players has not been seen, find num of games each pair has played with each other
if (gameHists[0]["PlayerId"], gameHists[1]["PlayerId"]) not in matchupsSeen:
if len(gameHists) == 2:
commonGames = []
player1 = Players.query.filter_by(Id = gameHists[0]["PlayerId"]).first()
player2 = Players.query.filter_by(Id = gameHists[1]["PlayerId"]).first()
play1Games = History.query.filter_by(PlayerId = player1.Id)
play1Games = [game.as_dict() for game in play1Games]
play2Games = History.query.filter_by(PlayerId = player2.Id)
play2Games = [game.as_dict() for game in play2Games]
play2GamesById = {}
for game in play2Games:
_id = game["GameId"]
play2GamesById[_id] = game
for game in play1Games:
_id = game["GameId"]
if _id in play2GamesById:
commonGames.append(game)
# create two entries mapping (player1, player2, numTimesPlayed, player1Wins, player1Loss)
p1Entry = (player1.Id, player2.Id, len(commonGames), player1.GameWins, len(play1Games) - player1.GameWins)
p2Entry = (player2.Id, player1.Id, len(commonGames), player2.GameWins, len(play2Games) - player2.GameWins)
gameTups.append(p1Entry)
gameTups.append(p2Entry)
matchupsSeen.add((player1.Id, player2.Id))
matchupsSeen.add((player2.Id, player1.Id))
# Get new rankings based on game data, sort ids based on ranking, and update player rankings
rankings = rank.updateRankings(gameTups)
sortedRanks = sorted([(value,key) for (key,value) in rankings.items()], reverse=True)
allPlayers = Players.query.all()
allPlayersDict = {}
for player in allPlayers:
allPlayersDict[player.Id] = player
for i in range(len(sortedRanks)):
_id = sortedRanks[i][1]
player = allPlayersDict[_id]
player.Ranking = i + 1
db.session.commit()
return (json.dumps([player.as_dict() for player in allPlayers], default=jsonSerial), 202)
####################
# DELETE
####################
@app.route('/players/<int:id>', methods=['DELETE'])
def removePlayerById(id):
db.session.delete(Players.query.get(id))
db.session.commit()
return jsonify({ 'result': True })
@app.route('/games/<int:id>', methods=['DELETE'])
def removeGameById(id):
db.session.delete(Games.query.get(id))
db.session.commit()
return jsonify({ 'result': True })
@app.route('/series/<int:id>', methods=['DELETE'])
def removeSeriesById(id):
db.session.delete(Series.query.get(id))
db.session.commit()
return jsonify({ 'result': True })
# TODO: figure out how to access history by primary key and delete row
if __name__ == '__main__':
app.run()
|
#!/usr/bin/env python3
''' Creates Neo4j index and constraints for Canonicalized KG2
Usage: create_indexes_constraints_canonicalized.py [--passwordFile=<password-file-name>] <Neo4j Username> [<Neo4j Password>]
'''
import argparse
import neo4j
import getpass
import sys
import json
__author__ = 'Erica Wood'
__copyright__ = 'Oregon State University'
__credits__ = ['Stephen Ramsey', 'Erica Wood']
__license__ = 'MIT'
__version__ = '0.1.0'
__maintainer__ = ''
__email__ = ''
__status__ = 'Prototype'
def run_query(query):
"""
:param query: a cypher statement as a string to run
"""
# Start a neo4j session, run a query, then close the session
session = driver.session()
query = session.run(query)
session.close()
return query
def node_labels():
# Create a list of dictionaries where each key is "labels(n)"
# and each value is a list containing a node label
labels = "MATCH (n) RETURN distinct labels(n)"
query = run_query(labels)
data = query.data()
label_list = []
# Iterate through the list and dicitionaries to create a list
# of node labels
for dictionary in data:
for key in dictionary:
value = dictionary[key]
value_string = value[0]
label_list.append(value_string)
return label_list
def create_index(label_list, property_name):
"""
:param label_list: a list of the node labels in Neo4j
"""
# For every label in the label list, create an index
# on the given property name
for label in label_list:
if label.find(":") < 0: # CREATE INDEX ON :BFO:0000050 (edge_label) gives error
index_query = "CREATE INDEX ON :" + label + " (" + property_name + ")"
run_query(index_query)
def constraint(label_list):
"""
:param label_list: a list of the node labels in Neo4j
"""
# For every label in the label list, create a unique constraint
# on the node id property
constraint_query = "CREATE CONSTRAINT ON (n:Base) ASSERT n.id IS UNIQUE"
run_query(constraint_query)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--configFile", type=str, help="RTXConfiguration JSON file containing the password", required=False, default=None)
parser.add_argument("-u", "--user", type=str, help="Neo4j Username", default=None, required=False)
parser.add_argument("-p", "--password", help="Neo4j Password", type=str, default=None, required=False)
arguments = parser.parse_args()
config_file_name = arguments.configFile
if arguments.password is not None and arguments.configFile is not None:
print("Not allowed to specify both password_file and password command-line options", file=sys.stderr)
sys.exit()
if arguments.user is None and arguments.configFile is None:
print("Must specify a username on the command-line or via the RTXConfiguration config file", file=sys.stderr)
sys.exit()
if arguments.user is not None and arguments.configFile is not None:
print("Cannot specify the username on both the command-line and the RTXConfiguration config file", file=sys.stderr)
sys.exit()
password = None
neo4j_password = None
neo4j_user = None
if config_file_name is not None:
print(config_file_name)
config_data = json.load(open(config_file_name, 'r'))
config_data_kg2_neo4j = config_data['KG2']['neo4j']
neo4j_user = config_data_kg2_neo4j['username']
neo4j_password = config_data_kg2_neo4j['password']
if neo4j_password is None:
neo4j_password = arguments.password
if neo4j_password is None:
neo4j_password = getpass.getpass("Please enter the Neo4j database password: ")
if arguments.user is not None:
neo4j_user = arguments.user
bolt = 'bolt://127.0.0.1:7687'
driver = neo4j.GraphDatabase.driver(bolt, auth=(neo4j_user, neo4j_password))
node_label_list = node_labels() + ['Base']
print("NOTE: If you are running create_indexes_constraints.py standalone and not via tsv-to-neo4j-canonicalized.sh, please make sure to re-set the read-only status of" +
" the Neo4j database to TRUE", file=sys.stderr)
# Create Indexes on Node Properties
create_index(node_label_list, "name")
create_index(node_label_list, "preferred_type")
constraint(node_label_list)
driver.close()
|
from plenum.common.roles import Roles
from plenum.common.transactions import PlenumTransactions
class Command:
def __init__(self, id, title, usage, note=None, examples=None):
self.id = id # unique command identifier
self.title = title # brief explanation about the command
self.usage = usage # syntax with all available clauses
self.note = note # any additional description/note
self.examples = examples if isinstance(examples, list) else [examples] \
if examples else examples
def __str__(self):
detailIndent = " "
header = "\n{}\n{}\n".format(self.id, '-' * (len(self.id)))
note = "{} note: {}\n\n".format(
detailIndent, self.note) if self.note else ""
examplesStr = '\n{}{}'.format(detailIndent, detailIndent).join(
self.examples) if self.examples else ""
examples = "{} example(s):\n{} {}\n".format(
detailIndent, detailIndent, examplesStr) \
if len(examplesStr) else ""
helpInfo = "{} title: {}\n\n" \
"{} usage: {}\n\n" \
"{}" \
"{}".format(detailIndent, self.title,
detailIndent, self.usage, note, examples)
return header + helpInfo
helpCmd = Command(
id="help",
title="Shows this or specific help message for given command",
usage="help [<command name>]",
examples=["help", "help list ids"])
statusCmd = Command(
id="status",
title="Shows general status of the sandbox",
usage="status")
licenseCmd = Command(
id="license",
title="Shows the license",
usage="license")
exitCmd = Command(
id="exit",
title="Exit the command-line interface ('quit' also works)",
usage="exit")
quitCmd = Command(
id="quit",
title="Exit the command-line interface ('exit' also works)",
usage="quit")
newNodeCmd = Command(
id="new node",
title="Starts new node",
usage="new node <name>",
examples=["new node Alpha", "new node all"])
newClientCmd = Command(
id="new client",
title="Starts new client",
usage="new client <name>",
examples="new client Alice")
statusNodeCmd = Command(
id="status node",
title="Shows status for given node",
usage="status node <name>",
examples="status node Alpha")
statusClientCmd = Command(
id="status client",
title="Shows status for given client",
usage="status client <name>",
examples="status client Alice")
# TODO: Obsolete, Needs to be removed
# keyShareCmd = Command(
# id="keyshare",
# title="Manually starts key sharing of a node",
# usage="keyshare node <name>",
# examples="keyshare node Alpha")
loadPluginsCmd = Command(
id="load plugins",
title="load plugins from given directory",
usage="load plugins from <dir path>",
examples="load plugins from /home/ubuntu/plenum/plenum/test/plugin/stats_consumer")
clientSendCmd = Command(
id="client send",
title="Client sends a message to pool",
usage="client <client-name> send {<json request data>}",
examples="client Alice send {'type':'GET_NYM', 'dest':'4QxzWk3ajdnEA37NdNU5Kt'}")
clientShowCmd = Command(
id="client show request status",
title="Shows status of a sent request",
usage="client <client-name> show <req-id>",
note="This will only show status for the request sent by 'client send' command",
examples="client Alice show 1486651494426621")
newKeyCmd = Command(
id="new key",
title="Adds new key to active wallet",
usage="new key [with seed <32 character seed>] [[as] <alias>]",
examples=[
"new key",
"new key with seed aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"new key with seed aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa myalias",
"new key with seed aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa as myalias"])
listIdsCmd = Command(
id="list ids",
title="Lists all DIDs of active wallet",
usage="list ids [with verkeys]",
examples=["list ids", "list ids with verkeys"])
useIdCmd = Command(
id="use DID",
title="Marks given DID active/default",
usage="use DID <DID>",
note="Note: To see all DIDs in active wallet, use 'list ids' command",
examples="use DID 5pJcAEAQqW7B8aGSxDArGaeXvb1G1MQwwqLMLmG2fAy9")
addGenesisTxnCmd = Command(
id="add genesis transaction",
title="Adds given genesis transaction",
usage="add genesis transaction <type> for <dest-DID> [by <DID>] [with data {<json data>}] [role=<role>]",
examples=[
'add genesis transaction {nym} for 2ru5PcgeQzxF7QZYwQgDkG2K13PRqyigVw99zMYg8eML role={role}'.format(
nym=PlenumTransactions.NYM.name,
role=Roles.STEWARD.name),
'add genesis transaction {nym} for 2ru5PcgeQzxF7QZYwQgDkG2K13PRqyigVw99zMYg8eML with data {{"alias": "Alice"}} role={role}'.format(
nym=PlenumTransactions.NYM.name,
role=Roles.STEWARD.name),
'add genesis transaction {node} for 2ru5PcgeQzxF7QZYwQgDkG2K13PRqyigVw99zMYg8eML by FvDi9xQZd1CZitbK15BNKFbA7izCdXZjvxf91u3rQVzW '
'with data {{"node_ip": "localhost", "node_port": "9701", "client_ip": "localhost", "client_port": "9702", "alias": "AliceNode"}}'.format(
node=PlenumTransactions.NODE.name)])
createGenesisTxnFileCmd = Command(
id="create genesis transaction file",
title="Creates genesis transaction file with in memory genesis transaction data",
usage="create genesis transaction file",
examples="create genesis transaction file")
changePromptCmd = Command(
id="prompt",
title="Changes the prompt to given principal (a person like Alice, an organization like Faber College, or an IoT-style thing)",
usage="prompt <principal-name>",
examples="prompt Alice")
newWalletCmd = Command(
id="new wallet",
title="Creates new wallet",
usage="new wallet <name>",
examples="new wallet mywallet")
useWalletCmd = Command(
id="use wallet",
title="Loads given wallet and marks it active/default",
usage="use wallet <name|absolute-wallet-file-path>",
examples=[
"use wallet mywallet",
"use wallet /home/ubuntu/.indy/wallets/test/mywallet.wallet"])
saveWalletCmd = Command(
id="save wallet",
title="Saves active wallet",
usage="save wallet [<active-wallet-name>]",
examples=["save wallet", "save wallet mywallet"])
renameWalletCmd = Command(
id="rename wallet",
title="Renames given wallet",
usage="rename wallet <old-name> to <new-name>",
examples="rename wallet mywallet to yourwallet")
listWalletCmd = Command(
id="list wallets",
title="Lists all wallets",
usage="list wallets")
|
#
# Copyright (c) Microsoft Corporation. All Rights Reserved.
#
"""
This module is a wrapper around ivy2's logic representation for
copmpatibility with ivy1 code.
Ivy1 uses a sorted first-order logic.
Constants (both first and second-order) are represented by class
Symbol. Variables are represented by class Variable. Instances of both
classes are distinguished by string names. Note: in the surface syntax
of Ivy, variables are distinguished from constants by having initial
capitals, but there is no such distinction made in AST's.
A Symbol of Variables has a Sort. Symbols and Variables of distinct
sorts are considered distinct (TODO: hashing and equalit testing on
symbols will be faster if names are required to be
unique). First-order sorts are subclasses of ConstantSort, while
second-order sorts are subclasses of FunctionSort. Relation sorts
have range sort BooleanSort. There is a distinction between a
ConstantSort S and a 0-ary FunctionSort of range S. The first-order
sorts are uniquely identified by string names.
TODO: ivy used a separate RelationSort class. All occurrences of the
pattern isinstance(X,[RelationSort,FunctionSort]) will need attention.
Symbols whose names begin with a digit are *numerals*. Numerals always
have a sort of class ConstantSort, but are not declared in the
signature. In the surface syntax, a numeral without a sort field is
given the default numeric sort, which is fixed in the signature.
A *term* is represented by class Term and is either a constant, a
variable or a second-order constant applied to zero or more term
arguments. All AST nodes have a property "args" that is a list of AST
nodes. For constants and variables, args is empty, while for
applications (class App) args is the list of arguments the constant
symbol is applied to (but does not include the applied symbol itself).
TODO: extracting "args" is linear time for applications, since we must
extract all but the first element of ivy2's term list. We should
either cache it, or fix the code to expect ivy2's represenation.
Terms have a method get_sort that computes the term's sort (for an
application, the range of the applied symbol's sort).
TODO: Symbols of sort TopSort in Ivy1 were represented using str. This
has to change.
TODO: There is no longer any sort Atom for applications of
relation. The constructor can be aliased to App, but we must fix any
other uses of type Atom.
An *enumerated* sort S is a ConsantSort with a set of constants of
sort S that act as its constructors (in other words, distinct
constructors are never equal).
Since logical terms and sorts must act as Ivy AST nodes, the must also
implement the "clone" method which copys the objeect replacing
args. This is a bit tricky since for Apps, clone must replace ony the
args and not applied symbol.
"""
from ivy_utils import flatten, IvyError
import ivy_utils as iu
import logic as lg
import logic_util as lu
from logic import And,Or,Not,Globally,Eventually,Implies,Iff,Ite,ForAll,Exists,Lambda,NamedBinder
from type_inference import concretize_sorts, concretize_terms
from collections import defaultdict
from itertools import chain
import ivy_smtlib
allow_unsorted = False
repr = str
class UnsortedContext(object):
""" Allow unsorted symbols. Useful for parsing.
"""
def __init__(self):
pass
def __enter__(self):
global allow_unsorted
self.old_allow_unsorted = allow_unsorted
allow_unsorted = True
return self
def __exit__(self,exc_type, exc_val, exc_tb):
global allow_unsorted
allow_unsorted = self.old_allow_unsorted
return False # don't block any exceptions
class sort_as_default(object):
""" Context in which a given sort is the default sort
"""
def __init__(self,sort):
self.sort = sort
def __enter__(self):
global sig
self.old_default_sort = sig.sorts.get('S',None)
sig.sorts['S'] = self.sort
return self
def __exit__(self,exc_type, exc_val, exc_tb):
global sig
if self.old_default_sort is not None:
sig.sorts['S'] = self.old_default_sort
else:
del sig.sorts['S']
return False # don't block any exceptions
class top_sort_as_default(sort_as_default):
""" Context in which TopSort is the default sort
"""
def __init__(self):
self.sort = lg.TopS
class alpha_sort_as_default(sort_as_default):
""" Context in which alpha is the default sort
"""
def __init__(self):
self.sort = lg.TopSort('alpha')
def is_numeral_name(s):
return s[0].isdigit() or s[0] == '"' or (s[0] == '-' and len(s) > 1 and s[1].isdigit)
Symbol = lg.Const
Symbol.rep = property(lambda self: self)
Symbol.relname = property(lambda self: self)
# this is just not needed, as it is implemented exactly the same
# in logic.py
#Symbol.__str__ = lambda self: self.name
# the following two lines cause bugs when converting to Z3 and
# when caching in general, since they make constants with the same
# name but with different sorts equal, and the code relies on the fact
# that they are different. Do we really need this behavior?
#Symbol.__hash__ = lambda self: hash(self.name)
#Symbol.__eq__ = lambda self,other: (type(self) == type(other) and self.name == other.name)
Symbol.rename = lambda self,rn: Symbol(rn(self.name),self.sort)
Symbol.prefix = lambda self,s: Symbol(s+self.name,self.sort)
Symbol.startswith = lambda self,s: self.name.startswith(s)
Symbol.suffix = lambda self,s: Symbol(self.name+s,self.sort)
Symbol.endswith = lambda self,s: self.name.endswith(s)
Symbol.drop_prefix = lambda self,s: Symbol(self.name[len(s):],self.sort)
Symbol.drop_suffix = lambda self,s: Symbol(self.name[0,-len(s)],self.sort)
Symbol.contains = lambda self,s: (s in self.name)
Symbol.skolem = lambda self: self.prefix('__')
Symbol.is_skolem = lambda self: self.contains('__')
Symbol.deskolem = lambda self,s: self.drop_prefix('__')
Symbol.__call__ = lambda self,*args: App(self,*args) if len(args) > 0 or isinstance(self.sort,FunctionSort) else self
Symbol.is_relation = lambda self: isinstance(self.sort.rng,lg.BooleanSort)
Symbol.args = property(lambda self : [])
Symbol.is_numeral = lambda self : is_numeral_name(self.name)
Symbol.clone = lambda self,args : self
Symbol.resort = lambda self,sort : Symbol(self.name,sort)
BooleanSort = lg.BooleanSort
class AST(object):
"""
Base class for abstract syntax.
"""
def clone(self,args):
return type(self)(*args)
#
# def __eq__(self,other):
# return type(self) == type(other) and self.args == other.args
def nary_repr(op,args):
res = (' ' + op + ' ').join([repr(a) for a in args])
return ('(' + res + ')') if len(args) > 1 else res
class Let(AST):
"""
Formula of the form let p(X,...,Z) <-> fmla[X,...,Z], ... in fmla
"""
def __init__(self,*args):
assert len(args) >= 1
self.args = args
def __str__(self):
res = str(self.args[-1])
if len(self.args) > 1:
res = 'let ' + ', '.join([str(x) for x in self.args[0:-1]]) + ' in ' + res
return res
class Some(AST):
def __init__(self,*args):
assert len(args) >= 2
self.args = args
def __str__(self):
res = 'some ' + str(self.args[0]) + '. ' + str(self.args[1])
if len(self.args) >= 3:
res += ' in ' + str(self.args[2])
if len(self.args) >= 4:
res += ' else ' + str(self.args[3])
return res
def params(self):
return [self.args[0]]
def fmla(self):
return self.args[1]
def if_value(self):
return self.args[2] if len(self.args) == 4 else None
def else_value(self):
return self.args[3] if len(self.args) == 4 else None
@property
def variables(self):
return [self.args[0]]
def clone_binder(self,vs,body):
return Some(*(vs+self.args[1:]))
class Definition(AST):
"""
Formula of the form p(X,...,Z) <-> fmla[X,...,Z]
"""
def __init__(self,*args):
assert len(args) == 2
self.args = args
def __str__(self):
return ' = '.join([repr(x) for x in self.args])
def defines(self):
return self.args[0].rep
def lhs(self):
return self.args[0]
def rhs(self):
return self.args[1]
def to_constraint(self):
if isinstance(self.args[1],Some):
if self.args[1].if_value() != None:
return And(Implies(self.args[1].args[1],
lg.Exists([self.args[1].args[0]],
And(self.args[1].args[1],Equals(self.args[0],self.args[1].if_value())))),
Or(lg.Exists([self.args[1].args[0]],self.args[1].args[1]),
Equals(self.args[0],self.args[1].else_value())))
return lg.ForAll([self.args[1].args[0]],
Implies(self.args[1].args[1],
lu.substitute(self.args[1].args[1],{self.args[1].args[0]:self.args[0]})))
if is_individual(self.args[0]):
return Equals(*self.args)
return Iff(*self.args)
@property
def sort(self):
return lg.Boolean
def __eq__(self,other):
return type(self) is type(other) and all(x == y for (x,y) in zip(self.args,other.args))
class DefinitionSchema(Definition):
pass
lg_ops = [lg.Eq, lg.Not, lg.Globally, lg.Eventually, lg.And, lg.Or, lg.Implies, lg.Iff, lg.Ite, lg.ForAll, lg.Exists, lg.Lambda, lg.NamedBinder]
for cls in lg_ops:
cls.args = property(lambda self: [ a for a in self])
cls.clone = lambda self,args: type(self)(*args)
for cls in [lg.ForAll, lg.Exists, lg.Lambda]:
cls.clone = lambda self,args: type(self)(self.variables,*args)
lg.NamedBinder.clone = lambda self,args: lg.NamedBinder(self.name, self.variables, *args)
lg.NamedBinder.rep = property(lambda self: self)
lg.Apply.clone = lambda self,args: type(self)(self.func, *args)
lg.Apply.args = property(lambda self: self.terms)
lg.Apply.rep = property(lambda self: self.func)
lg.Apply.relname = property(lambda self: self.func)
for cls in [lg.Apply] + lg_ops:
cls.is_numeral = lambda self: False
def is_numeral(term):
return isinstance(term,Symbol) and term.is_numeral()
for cls in [lg.Const, lg.Var, lg.Apply]:
cls.get_sort = lambda self: self.sort
lg.Eq.rep = property(lambda self: Symbol('=',RelationSort([x.sort for x in self.args])))
App = lg.Apply
def Atom(rel,args):
return Equals(*args) if rel == equals else lg.Apply(rel,*args) if args else rel
def is_atom(term):
return (isinstance(term,App) or isinstance(term,Symbol)) and term.sort == lg.Boolean or isinstance(term,lg.Eq)
# note: ivy1 treats instances of a constant in a formula as an app
def is_app(term):
return (
isinstance(term,App) or
isinstance(term,lg.Const) or
isinstance(term,lg.NamedBinder) and len(term.variables) == 0
)
def is_rel_app(term):
return isinstance(term,App) and term.rep.is_relation()
def _find_sort(type_name):
if allow_unsorted:
if type_name == 'S': return lg.TopS
return lg.UninterpretedSort(type_name)
try:
return sig.sorts[type_name]
except KeyError:
if type_name == 'S':
if iu.get_numeric_version() <= [1,2]:
return default_sort()
raise IvyError(None,"unspecified type")
raise IvyError(None,"unknown type: {}".format(type_name))
def find_sort(type_name):
res = _find_sort(type_name)
# print "sort {!r} = {!r}".format(type_name,res)
return res
def add_sort(sort):
if sort.name in sig.sorts:
IvyError(None,"redefinition of sort {}".format(sort_name))
sig.sorts[sort.name] = sort
def find_symbol(symbol_name,throw=True):
if allow_unsorted:
return Symbol(symbol_name,lg.TopS)
try:
# print "find symbol: {!r}".format(symbol_name)
s = sig.symbols[symbol_name]
# print "find symbol: {} : {}".format(s,s.sort)
return sig.symbols[symbol_name]
except KeyError:
if symbol_name == '=':
return equals
if not throw:
return None
else:
if symbol_name in sig.sorts:
IvyError(None,"type {} used where a function or individual symbol is expected".format(symbol_name))
raise IvyError(None,"unknown symbol: {}".format(symbol_name))
def find_polymorphic_symbol(symbol_name,throw=True):
if iu.ivy_have_polymorphism and symbol_name in polymorphic_symbols:
return polymorphic_symbols[symbol_name]
if symbol_name[0].isdigit() or symbol_name[0] == '"':
return Symbol(symbol_name,alpha)
return find_symbol(symbol_name,throw)
def normalize_symbol(symbol):
if iu.ivy_use_polymorphic_macros and symbol.name in polymorphic_macros_map:
return Symbol(polymorphic_macros_map[symbol.name],symbol.sort)
return symbol
class UnionSort(object):
def __init__(self):
self.sorts = []
def __str__(self):
return "UnionSort(" + ','.join(map(str,self.sorts)) + ")"
def add_symbol(symbol_name,sort):
# print "add symbol: {} : {}".format(symbol_name,sort)
return sig.add_symbol(symbol_name,sort)
def remove_symbol(symbol):
sig.remove_symbol(symbol)
def all_symbols():
return sig.all_symbols()
def get_sort_term(term):
if hasattr(term,'sort'):
return term.sort
return term.rep.sort.rng
def is_qf(term):
if is_quantifier(term):
return False
return all(is_qf(t) for t in term.args)
def is_prenex_universal(term):
if isinstance(term,lg.ForAll):
return is_prenex_universal(term.body)
if isinstance(term,lg.Not):
return is_prenex_existential(term.args[0])
return is_qf(term)
def is_prenex_existential(term):
if isinstance(term,lg.Exists):
return is_prenex_existential(term.body)
if isinstance(term,lg.Not):
return is_prenex_universal(term.args[0])
return is_qf(term)
def drop_universals(term):
if isinstance(term,lg.ForAll):
return drop_universals(term.body)
if isinstance(term,lg.Not):
return lg.Not(drop_existentials(term.args[0]))
if isinstance(term,lg.And) and len(term.args) == 1:
return drop_universals(term.args[0])
return term
def drop_existentials(term):
if isinstance(term,lg.Exists):
return drop_existentials(term.body)
if isinstance(term,lg.Not):
return lg.Not(drop_universals(term.args[0]))
return term
def is_alternation_free(term):
return is_prenex_universal(term) or is_prenex_existential(term) and not lu.free_variables(term)
def is_ae(term):
if isinstance(term,lg.ForAll):
return is_ae(term.body)
if isinstance(term,lg.Exists):
return is_prenex_existential(term.body)
if isinstance(term,lg.Not):
return is_ea(term.args[0])
return is_qf(term)
def is_ea(term):
if isinstance(term,lg.Exists):
return is_ea(term.body)
if isinstance(term,lg.ForAll):
return is_prenex_universal(term.body)
if isinstance(term,lg.Not):
return is_ae(term.args[0])
return is_qf(term)
logics = ["epr","qf","fo"]
decidable_logics = ["epr","qf"]
default_logics = ["epr"]
def subterms(term):
yield term
for a in term.args:
for b in subterms(a):
yield b
def seg_var_pat(t):
return ((x if isinstance(x,lg.Var) else None) for x in t.args)
def is_segregated(fmla):
fmla = drop_existentials(fmla)
vs = lu.used_variables(fmla)
apps = list(t for t in subterms(fmla) if isinstance(t,lg.Apply) and lu.used_variables(t))
byname = iu.partition(apps,lambda self:self.func.name)
for name,terms in byname.iteritems():
pat = seg_var_pat(terms[0])
pvs = set(x for x in pat if x != None)
if pvs != vs:
reason_text = "{} is not segrated (not all variables appear)".format(name)
return False
for t in terms[1:]:
if seg_var_pat(t) != pat:
reason_text = "{} is not segrated (variable positions differ)".format(name)
return False
return True
def reason():
global reason_text
return reason_text
class NotEssentiallyUninterpreted(Exception):
pass
def check_essentially_uninterpreted(fmla):
if is_variable(fmla):
return False
if is_binder(fmla):
return check_essentially_uninterpreted(fmla.body) and len(fmla.variables) == 0
argres = all([check_essentially_uninterpreted(a) for a in fmla.args])
if is_app(fmla) and is_interpreted_symbol(fmla.rep):
if not argres:
raise NotEssentiallyUninterpreted()
return argres
def symbols_over_universals_rec(fmla,syms,pos,univs):
if is_variable(fmla):
return fmla not in univs
if is_quantifier(fmla):
if pos == isinstance(fmla,lg.ForAll) or len(univs) > 0:
univs.update(fmla.variables)
res = symbols_over_universals_rec(fmla.body,syms,pos,univs)
for v in fmla.variables:
univs.remove(v)
return res
if isinstance(fmla,Not):
pos = not pos
argres = all([symbols_over_universals_rec(a,syms,pos,univs) for a in fmla.args])
if is_app(fmla) and not is_eq(fmla) and not argres:
syms.add(fmla.rep)
return argres
def symbols_over_universals(fmlas):
""" Return the set of function symbols that occur over universally
quantified variables after skolemization. In the formula 'forall
X. exists Y. p(X)', p occurs over a universal, since this
skolemizes to 'forall X. p(f(X))'. We don't count free variables,
however. If you want free variabes to be considered quantified,
you have to add a quantifier (see close_formula)."""
syms = set()
for fmla in fmlas:
try:
symbols_over_universals_rec(fmla,syms,True,set())
except Exception as foo:
print fmla
raise foo
return syms
def universal_variables_rec(fmla,pos,univs):
if is_quantifier(fmla):
if pos == isinstance(fmla,lg.ForAll):
univs.update(fmla.variables)
return
if isinstance(fmla,Implies):
universal_variables_rec(fmla.args[0],not pos,univs),
universal_variables_rec(fmla.args[1],pos,univs)
return
if isinstance(fmla,Not):
pos = not pos
for a in fmla.args:
universal_variables_rec(a,pos,univs)
def universal_variables(fmlas):
""" Return the set of variables quantified universally after skolemization."""
univs = set()
for fmla in fmlas:
universal_variables_rec(fmla,True,univs)
return univs
# def check_essentially_uninterpreted(fmla):
# syms = symbols_over_universals([fmla])
# if any(is_interpreted_symbol(sym) for sym in syms):
# raise NotEssentiallyUninterpreted()
def is_in_logic(term,logic,unstrat = False):
global reason_text
assert logic in logics
if logic == "epr":
# ok = (is_prenex_universal(term)
# if lu.free_variables(term) else is_ea(term))
# if not ok:
# reason_text = "of quantifier alternation"
# return False
try:
check_essentially_uninterpreted(term)
except NotEssentiallyUninterpreted:
reason_text = "a variable occurs under an interpreted function symbol"
return False
cs = lu.used_constants(term)
for s in cs:
if s.name in sig.interp:
reason_text = "'{}' is iterpreted".format(s)
return False
if unstrat:
reason_text = "functions are not stratified"
return False
if not is_segregated(term):
reason_text = "formula is unsegregated"
return False
return True
elif logic == "qf":
reason_text = "a formula contains a quantifier"
return is_qf(term)
elif logic == "fo":
cs = lu.used_constants(term)
for s in cs:
if s.name in sig.interp:
reason_text = "'{}' is iterpreted".format(s)
return False
return True
def Constant(sym):
return sym # first-order constants are not applied in ivy2
def is_forall(term):
return isinstance(term,lg.ForAll)
def is_exists(term):
return isinstance(term,lg.Exists)
def is_lambda(term):
return isinstance(term,lg.Lambda)
def is_quantifier(term):
return isinstance(term,lg.ForAll) or isinstance(term,lg.Exists)
def is_binder(term):
return isinstance(term, (lg.ForAll, lg.Exists, lg.Lambda, lg.NamedBinder, Some))
for b in [lg.ForAll,lg.Exists,lg.Lambda]:
b.clone_binder = lambda self, variables, body, b = b: b(variables,body)
lg.NamedBinder.clone_binder = lambda self, variables, body: lg.NamedBinder(self.name,variables,body)
def is_named_binder(term):
return isinstance(term, lg.NamedBinder)
def is_temporal(term):
return isinstance(term, (lg.Globally, lg.Eventually))
def quantifier_vars(term):
return term.variables
def binder_vars(term):
return term.variables
def quantifier_body(term):
return term.body
def binder_args(term):
if isinstance(term,Some):
return term.args[1:]
return [term.body]
def is_epr_rec(term,uvars):
if is_forall(term):
return is_epr_rec(term.body,frozenset.union(uvars,term.variables))
if is_exists(term):
if frozenset.intersection(lu.free_variables(term),uvars):
return False
return is_epr_rec(term.body,frozenset())
return all(is_epr_rec(a,uvars) for a in term.args)
def is_epr(term):
return is_epr_rec(term,lu.free_variables(term))
def variables(sorts):
return [Variable('V'+str(idx),s) for idx,s in enumerate(sorts)]
def extensionality(destrs):
if not destrs:
return Or()
c = []
sort = destrs[0].sort.dom[0]
x,y = Variable("X",sort),Variable("Y",sort)
for d in destrs:
vs = variables(d.sort.dom[1:])
eqn = Equals(d(*([x]+vs)),d(*([y]+vs)))
if vs:
eqn = lg.ForAll(vs,eqn)
c.append(eqn)
res = Implies(And(*c),Equals(x,y))
return res
# Return a prediciate stating relation "rel" is a partial function
def partial_function(rel):
lsort,rsort = rel.sort.dom
x,y,z = [Variable(n,s) for n,s in [('X',lsort),('Y',rsort),('Z',rsort)]]
return ForAll([x,y,z],Implies(And(rel(x,y),rel(x,z)),Equals(y,z)))
# Return a prediciate stating the an element of "sort" can point to
# only one variant of "sort". This is sadly quadratic. TODO: maybe
# use a function to an enumerated type to express this constraint.
# We also include here extensionality for variants, that is to values
# that point to the same value are the equal. A sore point, however, is that
# null values may not be equal.
def exclusivity(sort,variants):
# partial funciton
def pto(s):
return Symbol('*>',RelationSort([sort,s]))
excs = [partial_function(pto(s)) for s in variants]
for s in variants:
x,y,z = [Variable(n,s) for n,s in [('X',sort),('Y',sort),('Z',s)]]
excs.append(Implies(And(pto(s)(x,z),pto(s)(y,z)),Equals(x,y)))
for i1,s1 in enumerate(variants):
for s2 in variants[:i1]:
x,y,z = [Variable(n,s) for n,s in [('X',sort),('Y',s1),('Z',s2)]]
excs.append(Not(And(pto(s1)(x,y),pto(s2)(x,z))))
return And(*excs)
Variable = lg.Var
Variable.args = property(lambda self: [])
Variable.clone = lambda self,args: self
Variable.rep = property(lambda self: self.name)
Variable.__call__ = lambda self,*args: App(self,*args) if isinstance(self.sort,FunctionSort) else self
Variable.rename = lambda self,name: Variable(name if isinstance(name,str) else name(self.name),self.sort)
Variable.resort = lambda self,sort : Variable(self.name,sort)
class Literal(AST):
"""
Either a positive or negative atomic formula. Literals are not
formulas! Use Not(Atom(...)) to get a formula.
"""
def __init__(self, polarity, atom):
# assert isinstance(atom,Atom) or isinstance(atom,And) and len(atom.args) == 0
self.polarity = polarity
self.atom = atom
def __repr__(self):
return "Literal({!r},{!r})".format(self.polarity,self.atom)
def __str__(self):
return ('~' if self.polarity == 0 else '') + str(self.atom)
def __invert__(self):
"""
x.__invert__() <==> ~x
Used to negate the a literal.
"""
return Literal(1 - self.polarity, self.atom)
def clone(self,args):
return Literal(self.polarity,*args)
def __eq__(self,other):
return type(self) == type(other) and self.polarity == other.polarity and self.args == other.args
@property
def args(self):
return [self.atom]
@args.setter
def args(self,value):
assert len(value) == 1
self.atom = value[0]
def _eq_lit(x, y):
return Literal(1, Atom(equals, (x, y)))
def _neq_lit(x, y):
return Literal(0, Atom(equals, (x, y)))
class Predicate(object):
"""
A predicate is a literal factory. It's not an AST object.
"""
def __init__(self, name, arity):
self.name = name
self.arity = arity
def __call__(self, *terms):
assert len(terms) == self.arity
return Literal(1, Atom(self.name, terms))
ConstantSort = lg.UninterpretedSort
ConstantSort.rng = property((lambda self: self))
ConstantSort.dom = property((lambda self: []))
ConstantSort.defines = lambda self: []
ConstantSort.rep = property(lambda self: self.name)
UninterpretedSort = ConstantSort
UninterpretedSort.is_relational = lambda self: False
UninterpretedSort.rename = lambda self,rn: UninterpretedSort(rn(self.name))
EnumeratedSort = lg.EnumeratedSort
EnumeratedSort.defines = lambda self: self.extension
EnumeratedSort.is_relational = lambda self: False
EnumeratedSort.dom = property(lambda self: [])
EnumeratedSort.rng = property(lambda self: self)
# class EnumeratedSort(object):
# def __init__(self,name,extension):
# self.name = name
# self.extension = extension
# self.rng = self
# self.dom = []
# def __str__(self):
# return '{' + ','.join(self.extension) + '}'
# def __hash__(self):
# return hash(tuple(self.extension))
# def defines(self):
# return self.extension
# @property
# def constructors(self):
# return [Symbol(n,self) for n in extension]
# def __iter__(self): # make it iterable so it pretends to be an actual sort
# return self
# def next(self): # Python 3: def __next__(self)
# raise StopIteration
# @property
# def card(self):
# return len(self.extension)
# def is_relational(self):
# return False
FunctionSort = lg.FunctionSort
FunctionSort.rng = FunctionSort.range
FunctionSort.dom = FunctionSort.domain
FunctionSort.defines = lambda self: []
FunctionSort.is_relational = lambda self: self.rng == lg.Boolean
lg.BooleanSort.is_relational = lambda self: True
lg.BooleanSort.rng = property(lambda self: self)
lg.BooleanSort.dom = property(lambda self: [])
def is_enumerated_sort(s):
return isinstance(s,EnumeratedSort)
def is_boolean_sort(s):
return s == lg.Boolean
def is_boolean(term):
return term.sort == lg.Boolean
# TODO: arguably boolean and enumerated are first-order sorts
def is_first_order_sort(s):
return isinstance(s,UninterpretedSort)
def is_function_sort(s):
return isinstance(s,FunctionSort)
def FuncConstSort(*sorts):
return FunctionSort(*sorts) if len(sorts) > 1 else sorts[0]
def RelationSort(dom):
return FunctionSort(*(list(dom) + [lg.Boolean])) if len(dom) else lg.Boolean
def TopFunctionSort(arity):
if arity == 0:
return lg.TopSort('alpha')
res = FunctionSort(*[lg.TopSort('alpha{}'.format(idx)) for idx in range(arity+1)])
return res
TopS = lg.TopS
def apply(symbol,args):
return App(symbol,*args)
def is_topsort(sort):
return isinstance(sort,lg.TopSort)
def sortify(ast):
args = [sortify(arg) for arg in ast.args]
if (isinstance(ast,App)) and isinstance(ast.rep.sort,lg.TopSort):
return apply(find_symbol(ast.rep),args)
return ast.clone(args)
# Signatures
class Sig(object):
""" First-order signature """
def __init__(self):
self.sorts = {}
self.symbols = dict()
self.constructors = set()
self.interp = {}
self._default_sort = None
self.default_numeric_sort = ConstantSort("int")
self.sorts["bool"] = RelationSort([])
self.old_sigs = []
def __enter__(self):
global sig
self.old_sigs.append(sig)
sig = self
return self
def __exit__(self,exc_type, exc_val, exc_tb):
global sig
sig = self.old_sigs[-1]
self.old_sigs = self.old_sigs[:-1]
return False # don't block any exceptions
def copy(self):
res = Sig()
res.sorts = dict(self.sorts)
res.symbols = dict(self.symbols)
res.constructors = set(self.constructors)
res.interp = dict(self.interp)
res._default_sort = self._default_sort
res.default_numeric_sort = self.default_numeric_sort
return res
def all_symbols(self):
for name,sym in self.symbols.iteritems():
if isinstance(sym.sort,UnionSort):
for sort in sym.sort.sorts:
yield Symbol(sym.name,sort)
else:
yield sym
def add_symbol(self,symbol_name,sort):
# print "add symbol: {} : {}".format(symbol_name,sort)
if iu.ivy_have_polymorphism and symbol_name in polymorphic_symbols:
if symbol_name not in self.symbols:
self.symbols[symbol_name] = Symbol(symbol_name,UnionSort())
u = self.symbols[symbol_name].sort
if sort not in u.sorts:
u.sorts.append(sort)
return Symbol(symbol_name,sort)
elif symbol_name in self.symbols:
if sort != self.symbols[symbol_name].sort:
raise IvyError(None,"redefining symbol: {}".format(symbol_name))
else:
self.symbols[symbol_name] = Symbol(symbol_name,sort)
return self.symbols[symbol_name]
def remove_symbol(self,symbol):
assert symbol.name in self.symbols, symbol.name
sort = self.symbols[symbol.name].sort
if isinstance(sort,UnionSort):
assert symbol.sort in sort.sorts
sort.sorts.remove(symbol.sort)
return
del self.symbols[symbol.name]
def contains_symbol(self,symbol):
if symbol.name not in self.symbols:
return False
sort = self.symbols[symbol.name].sort
if isinstance(sort,UnionSort):
return symbol.sort in sort.sorts
return True
def contains(self,sort_or_symbol):
if isinstance(sort_or_symbol,Symbol):
return self.contains_symbol(sort_or_symbol)
return self.sorts.get(sort_or_symbol.name,None) == sort_or_symbol
def all_symbols_named(self,name):
if name not in self.symbols:
return []
sort = self.symbols[name].sort
if isinstance(sort,UnionSort):
return [Symbol(name,s) for s in sort.sorts]
return [Symbol(name,sort)]
def __str__(self):
return sig_to_str(self)
# Environment that temporarily adds symbols to a signature.
class WithSymbols(object):
def __init__(self,symbols):
self.symbols = list(symbols)
def __enter__(self):
global sig
self.saved = []
for sym in self.symbols:
if sym.name in sig.symbols:
self.saved.append((sym.name,sig.symbols[sym.name]))
del sig.symbols[sym.name]
sig.symbols[sym.name] = sym
return self
def __exit__(self,exc_type, exc_val, exc_tb):
global sig
for sym in self.symbols:
del sig.symbols[sym.name]
for name,sym in self.saved:
sig.symbols[name] = sym
return False # don't block any exceptions
class WithSorts(object):
def __init__(self,sorts):
self.sorts = list(sorts)
def __enter__(self):
global sig
self.saved = []
for sym in self.sorts:
if sym.name in sig.sorts:
self.saved.append(sym)
sig.sorts[sym.name] = sym
return self
def __exit__(self,exc_type, exc_val, exc_tb):
global sig
for sym in self.sorts:
del sig.sorts[sym.name]
for sym in self.saved:
sig.sorts[sym.name] = sym
return False # don't block any exceptions
class BindSymbols(object):
def __init__(self,env,symbols):
self.env, self.symbols = env, list(symbols)
def __enter__(self):
self.saved = []
for sym in self.symbols:
if sym in self.env:
self.saved.append(sym)
self.env.remove(sym)
self.env.add(sym)
return self
def __exit__(self,exc_type, exc_val, exc_tb):
for sym in self.symbols:
self.env.remove(sym)
for sym in self.saved:
self.env.add(sym)
return False # don't block any exceptions
class BindSymbolValues(object):
def __init__(self,env,bindings):
self.env, self.bindings = env, list(bindings)
def __enter__(self):
self.saved = []
for sym,val in self.bindings:
if sym in self.env:
self.saved.append((sym,env[sym]))
del self.env[sym]
self.env[sym] = val
return self
def __exit__(self,exc_type, exc_val, exc_tb):
for sym,val in self.bindings:
del self.env[sym]
for sym,val in self.saved:
self.env[sym] = val
return False # don't block any exceptions
alpha = lg.TopSort('alpha')
beta = lg.TopSort('beta')
lg.BooleanSort.name = 'bool'
polymorphic_symbols_list = [
('<' , [alpha,alpha,lg.Boolean]),
('<=' , [alpha,alpha,lg.Boolean]),
('>' , [alpha,alpha,lg.Boolean]),
('>=' , [alpha,alpha,lg.Boolean]),
('+' , [alpha,alpha,alpha]),
('*' , [alpha,alpha,alpha]),
('-' , [alpha,alpha,alpha]),
('/' , [alpha,alpha,alpha]),
('*>' , [alpha,beta,lg.Boolean]),
('bvand' , [alpha,alpha,alpha]),
('bvor' , [alpha,alpha,alpha]),
('bvnot' , [alpha,alpha]),
# for liveness to safety reduction:
('l2s_waiting', [lg.Boolean]),
('l2s_frozen', [lg.Boolean]),
('l2s_saved', [lg.Boolean]),
('l2s_d', [alpha, lg.Boolean]),
('l2s_a', [alpha, lg.Boolean]),
]
# Tricky: since the bfe operator is parameterized, we add instances of it to
# the polymorphic_symbols table on demand.
class PolySymsDict(dict):
def __contains__(self,name):
return dict.__contains__(self,name) or name.startswith('bfe[')
def __getitem__(self,name):
if name.startswith('bfe[') and not dict.__contains__(self,name):
dict.__setitem__(self,name,lg.Const(name,lg.FunctionSort(alpha,beta)))
return dict.__getitem__(self,name)
polymorphic_symbols = PolySymsDict((x,lg.Const(x,lg.FunctionSort(*y) if len(y) > 1 else y[0]))
for x,y in polymorphic_symbols_list)
polymorphic_macros_map = {
'<=' : '<',
'>' : '<',
'>=' : '<',
}
macros_expansions = {
'<=' : lambda t: Or(normalize_symbol(t.func)(*t.args),Equals(*t.args)),
'>' : lambda t: normalize_symbol(t.func)(t.args[1],t.args[0]),
'>=' : lambda t: Or(normalize_symbol(t.func)(t.args[1],t.args[0]),Equals(*t.args)),
}
def is_macro(term):
return isinstance(term,lg.Apply) and term.func.name in macros_expansions and iu.ivy_use_polymorphic_macros
def expand_macro(term):
return macros_expansions[term.func.name](term)
def is_inequality_symbol(sym):
return sym.name in ['<','<=','>','>=']
def is_strict_inequality_symbol(sym,pol=0):
""" Determine whether an inequality symbol is strict under a given
number of negations, where pol == 0 indicates an even number, pol
== 1 an odd number and None indicates neither even nor odd."""
return sym.name in ['<','>'] if pol == 0 else sym.name in ['<=','>='] if pol == 1 else False
def default_sort():
ds = sig._default_sort
if ds != None: return ds
if not iu.get_numeric_version() <= [1,2]:
raise IvyError(None,'unspecified type')
ds = lg.UninterpretedSort('S')
add_sort(ds)
sig._default_sort = ds
return ds
def is_default_sort(s):
return s == sig._default_sort
def is_default_numeric_sort(s):
return s == sig.default_numeric_sort
equals = Symbol('=',RelationSort([lg.TopSort(),lg.TopSort()]))
lg.Eq.relname = property(lambda self: equals)
def Equals(x,y):
return lg.Eq(x,y)
def is_eq(ast):
return isinstance(ast,lg.Eq)
def is_equals(symbol):
return isinstance(symbol,Symbol) and symbol.name == '='
def is_ite(ast):
return isinstance(ast,lg.Ite)
def is_enumerated(term):
return is_app(term) and isinstance(term.get_sort(),EnumeratedSort)
def is_individual(term):
assert hasattr(term,'sort'),term
return term.sort != lg.Boolean
# return isinstance(term,lg.Const) or (isinstance(term,App) and term.sort != lg.Boolean)
def is_constant(term):
return isinstance(term,lg.Const)
def is_variable(term):
return isinstance(term,lg.Var)
def all_concretely_sorted(*terms):
return True
def check_concretely_sorted(term,no_error=False,unsorted_var_names=()):
for x in chain(lu.used_variables(term),lu.used_constants(term)):
if lg.contains_topsort(x.sort) or lg.is_polymorphic(x.sort):
if x.name not in unsorted_var_names:
if no_error:
raise lg.SortError
raise IvyError(None,"cannot infer sort of {} in {}".format(x,repr(term)))
def sort_infer(term,sort=None,no_error=False):
res = concretize_sorts(term,sort)
check_concretely_sorted(res,no_error)
return res
def sort_infer_list(terms,sorts=None,no_error=False,unsorted_var_names=()):
res = concretize_terms(terms,sorts)
for term in res:
check_concretely_sorted(term,no_error,unsorted_var_names)
return res
def sorts():
return [s for n,s in sig.sorts.iteritems()]
def is_ui_sort(s):
return type(s) is lg.UninterpretedSort
def is_concretely_sorted(term):
return not lg.contains_topsort(term) and not lg.is_polymorphic(term)
# TODO: this class doesn't belong here
def ast_match_lists(lx, ly, placeholders,subst):
## print "ml: %s %s %s %s" % (lx,ly, placeholders,subst)
if len(lx) != len(ly):
return False
for x,y in zip(lx,ly):
if not ast_match(x,y, placeholders,subst):
return False
return True
def ast_match(x, y, placeholders, subst):
## print "m: %s %s %s %s" % (x,y, placeholders,subst)
if type(x) is not type(y):
return False
elif is_variable(y) or is_constant(y):
if y in placeholders:
if y in subst:
return x == subst[y]
else:
subst[y] = x
return True
return x == y
elif isinstance(y,lg.Apply):
if x.rep != y.rep:
return False
return ast_match_lists(x.args,y.args,placeholders,subst)
elif isinstance(y,Literal):
if x.polarity != y.polarity:
return False
return ast_match(x.atom,y.atom,placeholders,subst)
elif hasattr(y,'args'):
return ast_match_lists(x.args,y.args,placeholders,subst)
return False # shouldn't happen
Error = lg.IvyError
""" current signature """
sig = Sig()
# string conversions
infix_symbols = set(['<','<=','>','>=','+','-','*','/'])
show_variable_sorts = True
show_numeral_sorts = True
# This converts to string with all type decorations
def to_str_with_var_sorts(t):
res = t.ugly()
return res
# This converts to string with no type decorations
def fmla_to_str_ambiguous(term):
global show_variable_sorts
global show_numeral_sorts
show_variable_sorts = False
show_numeral_sorts = False
res = term.ugly()
show_variable_sorts = True
show_numeral_sorts = True
return res
def app_ugly(self):
if type(self.func) is lg.NamedBinder:
name = str(self.func)
else:
name = self.func.name
args = [a.ugly() for a in self.args]
if name in infix_symbols:
return (' ' + name + ' ').join(args)
if len(args) == 0: # shouldn't happen
return name
return name + '(' + ','.join(args) + ')'
def nary_ugly(op,args,parens = True):
res = (' ' + op + ' ').join([a.ugly() for a in args])
return ('(' + res + ')') if len(args) > 1 and parens else res
lg.Var.ugly = (lambda self: (self.name+':'+self.sort.name)
if show_variable_sorts and not isinstance(self.sort,lg.TopSort) else self.name)
lg.Const.ugly = (lambda self: (self.name+':'+self.sort.name)
if show_numeral_sorts and self.is_numeral() and not isinstance(self.sort,lg.TopSort)
else self.name)
lg.Eq.ugly = lambda self: nary_ugly('=',self.args,parens=False)
lg.And.ugly = lambda self: nary_ugly('&',self.args) if self.args else 'true'
lg.Or.ugly = lambda self: nary_ugly('|',self.args) if self.args else 'false'
lg.Not.ugly = lambda self: (nary_ugly('~=',self.body.args,parens=False)
if type(self.body) is lg.Eq
else '~{}'.format(self.body.ugly()))
lg.Globally.ugly = lambda self: ('globally {}'.format(self.body.ugly()))
lg.Eventually.ugly = lambda self: ('eventually {}'.format(self.body.ugly()))
lg.Implies.ugly = lambda self: nary_ugly('->',self.args,parens=False)
lg.Iff.ugly = lambda self: nary_ugly('<->',self.args,parens=False)
lg.Ite.ugly = lambda self: '({} if {} else {})'.format(*[self.args[idx].ugly() for idx in (1,0,2)])
Definition.ugly = lambda self: nary_ugly('=',self.args,parens=False)
lg.Apply.ugly = app_ugly
def quant_ugly(self):
res = ('forall ' if isinstance(self,lg.ForAll) else
'exists ' if isinstance(self,lg.Exists) else
'lambda ' if isinstance(self,lg.Lambda) else
'$' + self.name + ' ')
res += ','.join(v.ugly() for v in self.variables)
res += '. ' + self.body.ugly()
return res
for cls in [lg.ForAll,lg.Exists, lg.Lambda, lg.NamedBinder]:
cls.ugly = quant_ugly
# Drop the type annotations of variables and polymorphic
# constants that can be inferred using the current signature. Here,
# "inferred_sort" is the sort of fmla that has been inferred, or
# None, and annotated_vars is the set of variable names that have
# already been annotated.
def var_drop_annotations(self,inferred_sort,annotated_vars):
if inferred_sort or self.name in annotated_vars:
annotated_vars.add(self.name)
return lg.Var(self.name,lg.TopS)
if not isinstance(self.sort,lg.TopSort):
annotated_vars.add(self.name)
return self
lg.Var.drop_annotations = var_drop_annotations
def const_drop_annotations(self,inferred_sort,annotated_vars):
if inferred_sort and self.is_numeral():
return lg.Const(self.name,lg.TopS)
return self
lg.Const.drop_annotations = const_drop_annotations
def eq_drop_annotations(self,inferred_sort,annotated_vars):
arg0 = self.args[0].drop_annotations(False,annotated_vars)
arg1 = self.args[1].drop_annotations(True,annotated_vars)
return lg.Eq(arg0,arg1)
lg.Eq.drop_annotations = eq_drop_annotations
def ite_drop_annotations(self,inferred_sort,annotated_vars):
arg1 = self.args[1].drop_annotations(inferred_sort,annotated_vars)
arg2 = self.args[2].drop_annotations(True,annotated_vars)
arg0 = self.args[0].drop_annotations(True,annotated_vars)
return lg.Ite(arg0,arg1,arg2)
lg.Ite.drop_annotations = ite_drop_annotations
def apply_drop_annotations(self,inferred_sort,annotated_vars):
name = self.func.name
if name in polymorphic_symbols:
arg0 = self.args[0].drop_annotations(inferred_sort and self.sort != lg.Boolean,annotated_vars)
rest = [arg.drop_annotations(name != '*>',annotated_vars) for arg in self.args[1:]]
return self.clone([arg0]+rest)
return self.clone([arg.drop_annotations(True,annotated_vars) for arg in self.args])
lg.Apply.drop_annotations = apply_drop_annotations
def symbol_is_polymorphic(func):
return func.name in polymorphic_symbols
# convert symbol to string with its type
def typed_symbol(func):
return func.name + ' : ' + str(func.sort)
def quant_drop_annotations(self,inferred_sort,annotated_vars):
body = self.body.drop_annotations(True,annotated_vars)
return type(self)([v.drop_annotations(False,annotated_vars) for v in self.variables],body)
for cls in [lg.ForAll, lg.Exists, lg.Lambda]:
cls.drop_annotations = quant_drop_annotations
lg.NamedBinder.drop_annotations = lambda self,inferred_sort,annotated_vars: lg.NamedBinder(
self.name,
[v.drop_annotations(False,annotated_vars) for v in self.variables],
self.body.drop_annotations(True,annotated_vars)
)
def default_drop_annotations(self,inferred_sort,annotated_vars):
return self.clone([arg.drop_annotations(True,annotated_vars) for arg in self.args])
for cls in [lg.Not, lg.Globally, lg.Eventually, lg.And, lg.Or, lg.Implies, lg.Iff, Definition]: # should binder be here?
cls.drop_annotations = default_drop_annotations
def pretty_fmla(self):
d = self.drop_annotations(False,set())
return d.ugly()
for cls in [lg.Eq, lg.Not, lg.And, lg.Or, lg.Implies, lg.Iff, lg.Ite, lg.ForAll, lg.Exists,
lg.Apply, lg.Var, lg.Const, lg.Lambda, lg.NamedBinder]:
cls.__str__ = pretty_fmla
# end string conversion stuff
def close_formula(fmla):
variables = list(lu.free_variables(fmla))
if variables == []:
return fmla
else:
return ForAll(variables,fmla)
free_variables = lu.free_variables
def implement_type(sort1,sort2):
sig.interp[sort1.name] = sort2
def is_canonical_sort(sort):
if isinstance(sort,UninterpretedSort):
s = sig.interp.get(sort.name,None)
return not isinstance(s,UninterpretedSort)
return True
def canonize_sort(sort):
if isinstance(sort,UninterpretedSort):
s = sig.interp.get(sort.name,None)
if isinstance(s,UninterpretedSort):
return canonize_sort(s)
return sort
def sort_refinement():
return dict((s,canonize_sort(s)) for s in sig.sorts.values() if not is_canonical_sort(s))
# This returns only the *canonical* uninterpreted sorts
def uninterpreted_sorts():
return [s for s in sig.sorts.values() if isinstance(s,UninterpretedSort) and s.name not in sig.interp]
def interpreted_sorts():
return [s for s in sig.sorts.values() if is_interpreted_sort(s)]
def is_uninterpreted_sort(s):
s = canonize_sort(s)
return isinstance(s,UninterpretedSort) and s.name not in sig.interp
# For now, int is the only infinite interpreted sort
def has_infinite_interpretation(s):
s = canonize_sort(s)
return s.name in sig.interp and not ivy_smtlib.quantifiers_decidable(sig.interp[s.name])
def is_interpreted_sort(s):
s = canonize_sort(s)
return (isinstance(s,UninterpretedSort) or isinstance(s,EnumeratedSort)) and s.name in sig.interp
def sort_interp(s):
return sig.interp.get(canonize_sort(s).name,None)
def is_numeral(term):
return isinstance(term,Symbol) and term.is_numeral()
def is_interpreted_symbol(s):
return is_numeral(s) and is_interpreted_sort(s.sort) or symbol_is_polymorphic(s) and is_interpreted_sort(s.sort.dom[0])
def is_deterministic_fmla(f):
if isinstance(f,Some) and len(f.args) < 4:
return False
return all(is_deterministic_fmla(a) for a in f.args)
def sym_decl_to_str(sym):
sort = sym.sort
res = 'relation ' if sort.is_relational() else 'function ' if sort.dom else 'individual '
res += sym.name
if sort.dom:
res += '(' + ','.join('V{}:{}'.format(idx,s) for idx,s in enumerate(sort.dom)) + ')'
if not sort.is_relational():
res += ' : {}'.format(sort.rng)
return res
def sig_to_str(self):
res = ''
for name,sort in self.sorts.iteritems():
if name == 'bool':
continue
res += 'type {}'.format(name)
if not isinstance(sort,UninterpretedSort):
res += ' = {}'.format(sort)
res += '\n'
for name,sym in self.symbols.iteritems():
sorts = sym.sort.sorts if isinstance(sym.sort,UnionSort) else [sym.sort]
for sort in sorts:
res += 'relation ' if sort.is_relational() else 'function ' if sort.dom else 'individual '
res += name
if sort.dom:
res += '(' + ','.join('V{}:{}'.format(idx,s) for idx,s in enumerate(sort.dom)) + ')'
if not sort.is_relational():
res += ' : {}'.format(sort.rng)
res += '\n'
return res
if __name__ == '__main__':
V1 = Variable('V1')
V2 = Variable('V2')
x = Constant('x')
y = Constant('y')
n = Predicate('n', 2)
is_ = Predicate('is', 1)
print [[~n(V1, V2), ~n(x, V1), n(x, y), is_(V2), is_(V1)],
[V1 == x,V1 != x],
[y == x, y != x],
[V1 == V2, V1 != V2],
[x == V2, x != V2],
]
def is_true(ast):
return isinstance(ast,And) and not ast.args
def is_false(ast):
return isinstance(ast,Or) and not ast.args
def simp_and(x,y):
if is_true(x):
return y
if is_false(x):
return x
if is_true(y):
return x
if is_false(y):
return y
return And(x,y)
def simp_or(x,y):
if is_false(x):
return y
if is_true(x):
return x
if is_false(y):
return x
if is_true(y):
return y
return Or(x,y)
def simp_not(x):
if isinstance(x,Not):
return x.args[0]
if is_true(x):
return Or()
if is_false(x):
return And()
return Not(x)
def simp_ite(i,t,e):
if t == e:
return t
if is_true(i):
return t
if is_false(i):
return e
if is_true(t):
return simp_or(i,e)
if is_false(t):
return simp_and(simp_not(i),e)
if is_true(e):
return simp_or(simp_not(i),t)
if is_false(e):
return simp_and(i,t)
return Ite(i,t,e)
def pto(*asorts):
return Symbol('*>',RelationSort(asorts))
CaptureError = lu.CaptureError
def lambda_apply(self,args):
assert len(args) == len(self.variables)
return lu.substitute(self.body,dict(zip(self.variables,args)))
lg.Lambda.__call__ = lambda self,*args: lambda_apply(self,args)
substitute = lu.substitute
def rename_vars_no_clash(fmlas1,fmlas2):
""" Rename the free variables in formula list fmlas1
so they occur nowhere in fmlas2, avoiding capture """
uvs = lu.used_variables(*fmlas2)
iu.dbg('uvs')
uvs = lu.union(uvs,lu.bound_variables(*fmlas1))
iu.dbg('uvs')
rn = iu.UniqueRenamer('',(v.name for v in uvs))
vs = lu.free_variables(*fmlas1)
vmap = dict((v,Variable(rn(v.name),v.sort)) for v in vs)
iu.dbg('vmap')
return [lu.substitute(f,vmap) for f in fmlas1]
class VariableUniqifier(object):
""" An object that alpha-converts formulas so that all variables are unique. """
def __init__(self):
self.rn = iu.UniqueRenamer()
self.invmap = dict()
def __call__(self,fmla):
vmap = dict()
res = self.rec(fmla,vmap)
return res
def rec(self,fmla,vmap):
if is_binder(fmla):
# save the old bindings
obs = [(v,vmap[v]) for v in fmla.variables if v in vmap]
newvars = tuple(Variable(self.rn(v.name),v.sort) for v in fmla.variables)
vmap.update(zip(fmla.variables,newvars))
self.invmap.update(zip(newvars,fmla.variables))
try:
res = fmla.clone_binder(newvars,self.rec(fmla.body,vmap))
except TypeError:
assert False,fmla
for v in fmla.variables:
del vmap[v]
vmap.update(obs)
return res
if is_variable(fmla):
if fmla not in vmap:
newv = Variable(self.rn(fmla.name),fmla.sort)
vmap[fmla] = newv
self.invmap[newv] = fmla
return vmap[fmla]
args = [self.rec(f,vmap) for f in fmla.args]
return fmla.clone(args)
def undo(self,fmla):
return lu.substitute(fmla,self.invmap)
def alpha_avoid(fmla,vs):
""" Alpha-convert a formula so that bound variable names do not clash with vs. """
vu = VariableUniqifier()
freevars = set(vs)
freevars.update(lu.free_variables(fmla)) # avoid capturing free variables
vmap = dict()
for v in freevars:
vu.rn(v.name) # reserve the name
vmap[v] = v # preserve the variable in formula
res = vu.rec(fmla,vmap)
return res
def equal_mod_alpha(t,u):
if isinstance(t,Definition):
return isinstance(t,Definition) and all(equal_mod_alpha(x,y) for (x,y) in zip(t.args,u.args))
return lu.equal_mod_alpha(t,u)
def alpha_rename(nmap,fmla):
""" alpha-rename a formula using a map from variable names to
variable names. assumes the map is one-one.
"""
vmap = dict()
def rec(fmla):
if is_binder(fmla):
newvars = tuple(v.rename(nmap.get(v.name,v.name)) for v in fmla.variables)
forbidden = frozenset(vmap.get(v,v) for v in lu.free_variables(fmla))
if not forbidden.isdisjoint(newvars):
raise CaptureError(forbidden.intersection(newvars))
bndgs = [(v,v.rename(nmap[v.name])) for v in fmla.variables if v.name in nmap]
with BindSymbolValues(vmap,bndgs):
return fmla.clone_binder(newvars,rec(fmla.body))
if is_variable(fmla):
return vmap.get(fmla,fmla)
return fmla.clone([rec(f) for f in fmla.args])
return rec(fmla)
def normalize_ops(fmla):
"""Convert conjunctions and disjunctions to binary ops and quantifiers
to single-variable quantifiers. """
args = map(normalize_ops,fmla.args)
def mkbin(op,first,rest):
if len(rest) == 0:
return first
return mkbin(op,op(first,rest[0]),rest[1:])
def mkquant(op,vs,body):
if len(vs) == 0:
return body
return op(vs[0:1],mkquant(op,vs[1:],body))
if isinstance(fmla,And) or isinstance(fmla,Or):
return fmla.clone([]) if len(args) == 0 else mkbin(type(fmla),args[0],args[1:])
if is_quantifier(fmla):
return mkquant(type(fmla),list(fmla.variables),args[0])
return fmla.clone(args)
def negate_polarity(pol):
return 1 - pol if pol is not None else None
def polar(fmla,pos,pol):
""" Return the polarity of the `pos` argument of `fmla` assuming the
polarity of `fmla` is `pol`. The polarity indicates the
number of negations under which the formula occurs. It is 0 for an
even number, one for an odd number and None if the formula occurs
under both an even number and an odd number of negations. """
if isinstance(fmla,Not):
return negate_polarity(pol)
if isinstance(fmla,Implies):
return pol if pos == 1 else negate_polarity(pol)
if is_quantifier(fmla) or isinstance(fmla,And) or isinstance(fmla,Or):
return pol
if isinstance(fmla,Ite):
return None if pos == 0 else pol
return None
|
import pandas as pd
import numpy as np
import math
c_mean=2
k_mean=1
c_sig=2
k_sig=1
df = pd.read_csv('normal.csv')
lr=0.07/len(df)
nrounds=10000
def erf(x):
# save the sign of x
sign = np.sign(x)
x = abs(x)
# constants
a1 = 0.254829592
a2 = -0.284496736
a3 = 1.421413741
a4 = -1.453152027
a5 = 1.061405429
p = 0.3275911
# A&S formula 7.1.26
t = 1.0/(1.0 + p*x)
y = 1.0 - (((((a5*t + a4)*t) + a3)*t + a2)*t + a1)*t*np.exp(-x*x)
return sign*y # erf(-x) = -erf(x)
def meandiff(x, u, s):
return (x-u)/(s*s)
def sigdiff(x, u, s):
return ((x-u)**2 - s**2)/(s**3)
def meandiff_censored(x, u, s):
PDF = (1/s)*math.sqrt(2/math.pi)*np.exp(-(x-u)*(x-u)/(2*s*s))
CDF = (1 + erf((x-u)/(s*math.sqrt(2))))
return PDF/CDF
def sigdiff_censored(x, u, s):
return meandiff_censored(x, u, s)*((x-u)/s)
for i in range(nrounds):
sigs = c_sig + df['x']*k_sig
means = c_mean + df['x']*k_mean
c_mean += sum(meandiff(df['y'][~df["censored"]], means[~df["censored"]], sigs[~df["censored"]]))*lr
k_mean += sum(meandiff(df['y'][~df["censored"]], means[~df["censored"]], sigs[~df["censored"]])*df['x'][~df["censored"]])*lr
c_sig += sum(sigdiff(df['y'][~df["censored"]], means[~df["censored"]], sigs[~df["censored"]]))*lr
k_sig += sum(sigdiff(df['y'][~df["censored"]], means[~df["censored"]], sigs[~df["censored"]])*df['x'][~df["censored"]])*lr
#print(i, c_mean, k_mean, c_sig, k_sig)
c_mean -= sum(meandiff_censored(df['y'][df["censored"]], means[df["censored"]], sigs[df["censored"]]))*lr
k_mean -= sum(meandiff_censored(df['y'][df["censored"]], means[df["censored"]], sigs[df["censored"]])*df['x'][df["censored"]])*lr
c_sig -= sum(sigdiff_censored(df['y'][df["censored"]], means[df["censored"]], sigs[df["censored"]]))*lr
k_sig -= sum(sigdiff_censored(df['y'][df["censored"]], means[df["censored"]], sigs[df["censored"]])*df['x'][df["censored"]])*lr
print(i, c_mean, k_mean, c_sig, k_sig)
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'variable.ui'
#
# Created by: PyQt4 UI code generator 4.12.1
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
import FFT_Variable
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_variableTime(object):
def messagePoint1(self):
self.temp_messageBox= QtGui.QMessageBox()
self.temp_messageBox.setInformativeText("One of the Input values havent been entered. Please enter them all")
self.temp_messageBox.setStandardButtons(QtGui.QMessageBox.Ok)
ret = self.temp_messageBox.exec_()
if(ret == QtGui.QMessageBox.Yes):
return
def messagePoint2(self):
self.temp_messageBox= QtGui.QMessageBox()
self.temp_messageBox.setInformativeText("Value of l2 < l1. Please enter the correct values")
self.temp_messageBox.setStandardButtons(QtGui.QMessageBox.Ok)
ret = self.temp_messageBox.exec_()
if(ret == QtGui.QMessageBox.Yes):
return
def checkPoint(self):
if(self.lineEdit.text() == "" or self.lineEdit_2.text() == "" or self.lineEdit_3.text() == ""):
self.messagePoint1()
return
if( int(self.lineEdit_3.text()) < int(self.lineEdit_2.text())):
self.messagePoint2()
return
else:
#print(self.lineEdit.text(), " ", self.lineEdit_2.text(), " ", self.lineEdit_3.text(), " ", self.lineEdit_4.text())
FFT_Variable.main(self.lineEdit.text(), self.lineEdit_2.text(), self.lineEdit_3.text())
def setupUi(self, variableTime):
variableTime.setObjectName(_fromUtf8("variableTime"))
variableTime.resize(688, 300)
self.layoutWidget = QtGui.QWidget(variableTime)
self.layoutWidget.setGeometry(QtCore.QRect(10, 110, 586, 101))
self.layoutWidget.setObjectName(_fromUtf8("layoutWidget"))
self.gridLayout_3 = QtGui.QGridLayout(self.layoutWidget)
self.gridLayout_3.setMargin(0)
self.gridLayout_3.setObjectName(_fromUtf8("gridLayout_3"))
self.gridLayout_2 = QtGui.QGridLayout()
self.gridLayout_2.setObjectName(_fromUtf8("gridLayout_2"))
self.label_2 = QtGui.QLabel(self.layoutWidget)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.gridLayout_2.addWidget(self.label_2, 1, 0, 1, 1)
self.label = QtGui.QLabel(self.layoutWidget)
self.label.setObjectName(_fromUtf8("label"))
self.gridLayout_2.addWidget(self.label, 0, 0, 1, 1)
self.gridLayout = QtGui.QGridLayout()
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.label_3 = QtGui.QLabel(self.layoutWidget)
self.label_3.setObjectName(_fromUtf8("label_3"))
self.gridLayout.addWidget(self.label_3, 0, 0, 1, 1)
self.label_4 = QtGui.QLabel(self.layoutWidget)
self.label_4.setObjectName(_fromUtf8("label_4"))
self.gridLayout.addWidget(self.label_4, 1, 0, 1, 1)
self.gridLayout_2.addLayout(self.gridLayout, 2, 0, 1, 1)
self.gridLayout_3.addLayout(self.gridLayout_2, 0, 0, 3, 1)
self.lineEdit = QtGui.QLineEdit(self.layoutWidget)
self.lineEdit.setObjectName(_fromUtf8("lineEdit"))
self.gridLayout_3.addWidget(self.lineEdit, 0, 1, 1, 1)
self.lineEdit_2 = QtGui.QLineEdit(self.layoutWidget)
self.lineEdit_2.setObjectName(_fromUtf8("lineEdit_2"))
self.gridLayout_3.addWidget(self.lineEdit_2, 1, 1, 1, 1)
self.lineEdit_3 = QtGui.QLineEdit(self.layoutWidget)
self.lineEdit_3.setObjectName(_fromUtf8("lineEdit_3"))
self.gridLayout_3.addWidget(self.lineEdit_3, 2, 1, 1, 1)
self.pushButton = QtGui.QPushButton(variableTime)
self.pushButton.setGeometry(QtCore.QRect(10, 220, 88, 29))
self.pushButton.setObjectName(_fromUtf8("pushButton"))
self.retranslateUi(variableTime)
QtCore.QMetaObject.connectSlotsByName(variableTime)
def retranslateUi(self, variableTime):
variableTime.setWindowTitle(_translate("variableTime", "Form", None))
self.label_2.setText(_translate("variableTime", "2) Enter the sampling rate (integer) at which the reading was taken", None))
self.label.setText(_translate("variableTime", "1) Enter the Name of the file", None))
self.label_3.setText(_translate("variableTime", "3) Enter the sampling rate (integer) at which you want the FFT to be done, in case", None))
self.label_4.setText(_translate("variableTime", " of doubt enter the sampling rate you entered previously for observing results", None))
self.pushButton.setText(_translate("variableTime", "OK", None))
self.pushButton.clicked.connect(self.checkPoint)
if __name__ == "__main__":
import sys
app = QtGui.QApplication(sys.argv)
variableTime = QtGui.QWidget()
ui = Ui_variableTime()
ui.setupUi(variableTime)
variableTime.show()
sys.exit(app.exec_())
|
"""
This section covers functionality for computing predictions
with a [NERDA.models.NERDA][] model.
"""
from NERDA.preprocessing import create_dataloader
import torch
import numpy as np
from tqdm import tqdm
from nltk.tokenize import sent_tokenize, word_tokenize
from typing import List, Callable
import transformers
import sklearn.preprocessing
def sigmoid_transform(x):
prob = 1/(1 + np.exp(-x))
return prob
def predict(network: torch.nn.Module,
sentences: List[List[str]],
transformer_tokenizer: transformers.PreTrainedTokenizer,
transformer_config: transformers.PretrainedConfig,
max_len: int,
device: str,
tag_encoder: sklearn.preprocessing.LabelEncoder,
tag_outside: str,
batch_size: int = 8,
num_workers: int = 1,
return_tensors: bool = False,
return_confidence: bool = False,
pad_sequences: bool = True) -> List[List[str]]:
"""Compute predictions.
Computes predictions for a list with word-tokenized sentences
with a `NERDA` model.
Args:
network (torch.nn.Module): Network.
sentences (List[List[str]]): List of lists with word-tokenized
sentences.
transformer_tokenizer (transformers.PreTrainedTokenizer):
tokenizer for transformer model.
transformer_config (transformers.PretrainedConfig): config
for transformer model.
max_len (int): Maximum length of sentence after applying
transformer tokenizer.
device (str): Computational device.
tag_encoder (sklearn.preprocessing.LabelEncoder): Encoder
for Named-Entity tags.
tag_outside (str): Special 'outside' NER tag.
batch_size (int, optional): Batch Size for DataLoader.
Defaults to 8.
num_workers (int, optional): Number of workers. Defaults
to 1.
return_tensors (bool, optional): if True, return tensors.
return_confidence (bool, optional): if True, return
confidence scores for all predicted tokens. Defaults
to False.
pad_sequences (bool, optional): if True, pad sequences.
Defaults to True.
Returns:
List[List[str]]: List of lists with predicted Entity
tags.
"""
# make sure, that input has the correct format.
assert isinstance(sentences, list), "'sentences' must be a list of list of word-tokens"
assert isinstance(sentences[0], list), "'sentences' must be a list of list of word-tokens"
assert isinstance(sentences[0][0], str), "'sentences' must be a list of list of word-tokens"
# set network to appropriate mode.
network.eval()
# fill 'dummy' tags (expected input for dataloader).
tag_fill = [tag_encoder.classes_[0]]
tags_dummy = [tag_fill * len(sent) for sent in sentences]
dl = create_dataloader(sentences = sentences,
tags = tags_dummy,
transformer_tokenizer = transformer_tokenizer,
transformer_config = transformer_config,
max_len = max_len,
batch_size = batch_size,
tag_encoder = tag_encoder,
tag_outside = tag_outside,
num_workers = num_workers,
pad_sequences = pad_sequences)
predictions = []
probabilities = []
tensors = []
with torch.no_grad():
for _, dl in enumerate(dl):
outputs = network(**dl)
# conduct operations on sentence level.
for i in range(outputs.shape[0]):
# extract prediction and transform.
# find max by row.
values, indices = outputs[i].max(dim=1)
preds = tag_encoder.inverse_transform(indices.cpu().numpy())
probs = values.cpu().numpy()
if return_tensors:
tensors.append(outputs)
# subset predictions for original word tokens.
preds = [prediction for prediction, offset in zip(preds.tolist(), dl.get('offsets')[i]) if offset]
if return_confidence:
probs = [prob for prob, offset in zip(probs.tolist(), dl.get('offsets')[i]) if offset]
# Remove special tokens ('CLS' + 'SEP').
preds = preds[1:-1]
if return_confidence:
probs = probs[1:-1]
# make sure resulting predictions have same length as
# original sentence.
# TODO: Move assert statement to unit tests. Does not work
# in boundary.
# assert len(preds) == len(sentences[i])
predictions.append(preds)
if return_confidence:
probabilities.append(probs)
if return_confidence:
return predictions, probabilities
if return_tensors:
return tensors
return predictions
def predict_text(network: torch.nn.Module,
text: str,
transformer_tokenizer: transformers.PreTrainedTokenizer,
transformer_config: transformers.PretrainedConfig,
max_len: int,
device: str,
tag_encoder: sklearn.preprocessing.LabelEncoder,
tag_outside: str,
batch_size: int = 8,
num_workers: int = 1,
pad_sequences: bool = True,
return_confidence: bool = False,
sent_tokenize: Callable = sent_tokenize,
word_tokenize: Callable = word_tokenize) -> tuple:
"""Compute Predictions for Text.
Computes predictions for a text with `NERDA` model.
Text is tokenized into sentences before computing predictions.
Args:
network (torch.nn.Module): Network.
text (str): text to predict entities in.
transformer_tokenizer (transformers.PreTrainedTokenizer):
tokenizer for transformer model.
transformer_config (transformers.PretrainedConfig): config
for transformer model.
max_len (int): Maximum length of sentence after applying
transformer tokenizer.
device (str): Computational device.
tag_encoder (sklearn.preprocessing.LabelEncoder): Encoder
for Named-Entity tags.
tag_outside (str): Special 'outside' NER tag.
batch_size (int, optional): Batch Size for DataLoader.
Defaults to 8.
num_workers (int, optional): Number of workers. Defaults
to 1.
pad_sequences (bool, optional): if True, pad sequences.
Defaults to True.
return_confidence (bool, optional): if True, return
confidence scores for predicted tokens. Defaults
to False.
Returns:
tuple: sentence- and word-tokenized text with corresponding
predicted named-entity tags.
"""
assert isinstance(text, str), "'text' must be a string."
sentences = sent_tokenize(text)
sentences = [word_tokenize(sentence) for sentence in sentences]
predictions = predict(network = network,
sentences = sentences,
transformer_tokenizer = transformer_tokenizer,
transformer_config = transformer_config,
max_len = max_len,
device = device,
return_confidence = return_confidence,
batch_size = batch_size,
num_workers = num_workers,
pad_sequences = pad_sequences,
tag_encoder = tag_encoder,
tag_outside = tag_outside)
return sentences, predictions
|
from bs4 import BeautifulSoup
import urllib2
import cookielib
from getpass import getpass
import sys
import time
import click
def run():
cric=urllib2.urlopen("http://www.cricbuzz.com")
html=cric.read()
soup=BeautifulSoup(html)
scoreall=soup.find_all("div",class_="cb-ovr-flo")
list=[]
c=0
for link in scoreall:
if link.string!=None:
c+=1
if c<6:
list.append(link.string)
s=list[0]+"\n"+list[1]+"\n"+list[2]+"\n"+list[3]+"\n"+list[4]
message=s
message = "+".join(message.split(' '))
url = 'http://site24.way2sms.com/Login1.action?'
data = 'username=' + username + '&password=' + passwd + '&Submit=Sign+in'
cj = cookielib.CookieJar()
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
opener.addheaders = [('User-Agent','Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.120 Safari/537.36')]
try:
usock = opener.open(url, data)
except IOError:
print("Error while logging in.")
sys.exit(1)
jession_id = str(cj).split('~')[1].split(' ')[0]
send_sms_url = 'http://site24.way2sms.com/smstoss.action?'
send_sms_data = 'ssaction=ss&Token=' + jession_id + '&mobile=' + number + '&message=' + message + '&msgLen=136'
opener.addheaders = [('Referer', 'http://site25.way2sms.com/sendSMS?Token=' + jession_id)]
try:
sms_sent_page = opener.open(send_sms_url, send_sms_data)
except IOError:
print("Error while sending message")
print("SMS has been sent.")
@click.command()
def cli():
global username
username = str(input("Enter Username: "))
global passwd
passwd = getpass()
global number
number = str(input("Enter Mobile number: "))
while True:
run()
time.sleep(2*60)
|
import setuptools
import codecs
with codecs.open('README.rst', encoding='utf8') as fh:
long_description = fh.read()
setuptools.setup(name='xlogit',
version='0.2.0-beta1',
description='A Python package for GPU-accelerated ' +
'estimation of mixed logit models.',
long_description=long_description,
long_description_content_type="text/x-rst",
url='https://github.com/arteagac/xlogit',
author='Cristian Arteaga',
author_email='cristiandavidarteaga@gmail.com',
license='MIT',
packages=['xlogit'],
zip_safe=False,
python_requires='>=3.5',
install_requires=[
'numpy>=1.13.1',
'scipy>=1.0.0'
])
|
import subprocess
from subprocess import PIPE
import json
import cson as cson_lib
cson_lib.loads
def cson2json(cson):
if cson is None:
return None
result = cson_lib.loads(cson)
return result
|
from flaskeztest import RouteEZTestCase, expect_fixture
class IndexOneTestCase(RouteEZTestCase):
FIXTURE = 'oneuser'
def runTest(self):
RouteEZTestCase.runTest(self)
expectation = expect_fixture('oneuser')
self.assert_expectation_correct(expectation)
|
# Tek tırnak veya çift tırnak aynı şeyi yapar. Ancak noktlama işaretlerini kullanmak için her ikisini de kullanmamız gerekir
print("Neler neler yapıyorsun bensizken Ankara'da?")
# Üç tırnak ise fazla satırlı tanımlamalar için kullanılabilir.
print("""Neler neler yapıyorsun
bensizken Ankara'da?""")
################################
var1 = "YemekSepeti"
# İndisler 0'dan başlar, -e kadar devam eder.
print(var1[0]) => "Y"
# (-) işareti ile sondan da başlayabilirsin.
print(var1[-10]) => "t"
# İndisler arasında (:) kullanarak istediğimiz karakterleri alabiliriz.
print(var1[0:5]) => "Yemek"
# İndis başlangıcını belirtmezsek eğer 0. elemandan başlar.
print(var1[:5]) => "Yemek"
# İndis bitişini belirtmezsek 5. elemandan sonuncu elemana kadar yazar.
print(var1[5:]) => "Sepeti"
# İndis bitişini aynı şekilde tersten de yapabiliriz.
print(var1[:-6])
var2 = "Yaşamak"
# 1'den başla, 2 adım atlayarak devam et.
print(var2[1::2]) => "aaa"
# 0'dan başla, 2 adım atlayarak devam et.
print(var2[::2]) => "Yşmk"
var3 = "Teşekkürler Süpermen"
var3.replace("S", "T") # S harfini T ile değiştirir
var3.replace("e", "ı").replace("ü", "ı")
var3.replace(old, new, 3)
var4 = "______________Yemek_Sepeti_____________"
print(var4.strip("_")) => "Yemek_Sepeti"
var5 = "___ ______sssss__aaaa__________Yemek_Sepeti___"
print(var5.strip("_sa ")) => "Yemek_Sepeti"
var1 = "Yemek_Sepeti"
print(var1.split('_')) # ['Yemek', 'Sepeti']
# Soldan 1 tane boşluğu alır ve iki liste oluşturur
var1 = "Herkesin hayatına kimse karışamaz"
print(var1.split(' ',maxsplit=1)) # ['Herkesin', 'hayatına kimse karışamaz']
# Join ile
liste = ["Yemek","Sepeti","216222222222"]
print(";".join(liste)) # Yemek;Sepeti;216222222222
a = [1,2,3,4]
b = a.copy() # Bu şekilde b'ya a'yı kopyalar, append yapıldığında a değişmez.
b.append(3)
print(a) # [1, 2, 3, 4]
# 0 indidinse 0'ı ekler, kalanlar sıralanır
liste.insert(0, 0)
print(liste.pop()) => "[1,2,3,4]"
print(liste.pop(1)) => "[1,3,4]"
# "set" kullanımı (Tekrarlı elemanları ortadan kaldırabilirsin)
liste = [1, 2, 2, 2, 2, 2, 3, 4, 5]
liste2 = [5, 4, 6, 6, 6, 6, 6, 7, 8]
kume1 = set(liste1)
kume2 = set(liste2)
print(kume1) => "[1,2,3,4,5]"
print(kume2) => "[4,5,6,7,8]"
# print(kume1.difference(kume2)) # {1, 2, 3}
# print(kume2.difference(kume1)) # {8, 6, 7}
# print(kume1.symmetric_difference(kume2)) # {1, 2, 3, 6, 7, 8}
# print(kume1.intersection(kume2)) # {4, 5}
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------
# Copyright (c) Merchise Autrement [~º/~] and Contributors
# All rights reserved.
#
# This is free software; you can do what the LICENCE file allows you to.
#
from xoeuf import fields, models, api
MIXIN_NAME = "typed.reference.example.mixin"
class ExampleMixin(models.AbstractModel):
_name = MIXIN_NAME
test = fields.Char(default="Hello")
partner_id = fields.Many2one("res.partner")
test2 = fields.Char(default="Hello")
name = fields.Char(related="partner_id.name", readonly=False)
comment = fields.Text(related="partner_id.comment", readonly=False)
ref = fields.Char(related="partner_id.ref", store=True, readonly=False)
street = fields.Char(related="partner_id.street", store=True, readonly=False)
phone = fields.Char(related="partner_id.phone", readonly=False)
mobile = fields.Char(related="partner_id.mobile", readonly=False)
class Model1(models.Model):
_name = "test.model1"
_inherit = MIXIN_NAME
_description = "Model 1"
class Model2(models.Model):
_name = "test.model2"
_inherit = MIXIN_NAME
class SubModel2(models.Model):
_name = "test.sub.model2"
_inherit = "test.model2"
class TestModel(models.Model):
_name = "test.model"
typed_ref = fields.TypedReference(mixin=MIXIN_NAME, delegate=True)
filtered_typed_ref = fields.TypedReference(
mixin=MIXIN_NAME, selection=[("test.model2", "model2")]
)
# define to set store=True
test2 = fields.Char(compute="_compute_test2", store=True)
# name = fields.Char(compute="_compute_name", store=True)
ref = fields.Char(compute="_compute_ref", store=True)
@api.one
@api.depends("typed_ref", "typed_ref.test2")
def _compute_test2(self):
if self.typed_ref:
self.test2 = self.typed_ref.test2
# @api.one
# @api.depends("typed_ref", "typed_ref.partner_id.name")
# def _compute_name(self):
# if self.typed_ref:
# self.name = self.typed_ref.name
@api.one
@api.depends("typed_ref", "typed_ref.ref")
def _compute_ref(self):
if self.typed_ref:
self.ref = self.typed_ref.ref
|
import numpy
from datetime import datetime
import Files.FetchData as FD
dbName = 'market_data'
stockCollName = 'stock_data'
indexCollName= 'benchmark_data'
data = FD.findDataWithFilters_AllFields(dbName,stockCollName,{'Ticker':'AAPL'},[('Date',1)])
# for val in data:
# print(val.get('Close'))
# for data in FD.findDataWithFilters_AllFields(dbName,collName,{'Ticker':'AAPL'}):
# print(data)
print("\n")
# for data in FD.findDataWithFilters_CustomFields(dbName,collName,{'Ticker':'AAPL'},{'CompanyName':1,'_id':0}):
# print(data)
disStocks = FD.getDistinctValues(dbName,stockCollName,'Ticker')
disIndexes = FD.getDistinctValues(dbName,indexCollName,'BenchmarkTicker')
start = datetime(2014, 5, 2, 0, 0, 0)
end = datetime(2019, 4, 30, 0, 0, 0)
for stock in disStocks:
print(stock)
data = FD.findDataWithFilters_CustomFields(dbName,stockCollName,{'Ticker':stock,'Date':{'$lte':end,'$gte':start}},{'Close':1,'Date':1},[('Date',1)])
stockReturns =[]
# stockDate =[]
iterData = iter(data)
prev = next(iterData)
for cur in iterData:
prevClose = prev.get('Close')
curClose = cur.get('Close')
perDayReturn = (curClose-prevClose)/prevClose
stockReturns.append(perDayReturn)
prev = cur
# stockDate.append(cur.get('Date'))
for index in disIndexes:
print(index)
indexData = FD.findDataWithFilters_CustomFields(dbName,indexCollName,{'BenchmarkTicker':index,'Date':{'$lte':end,'$gte':start}},{'Close':1,'Date':1},[('Date',1)])
indexReturns =[]
iterIndexData = iter(indexData)
prevIndex = next(iterIndexData)
indexDate =[]
for cur in indexData:
prevIndexClose = prevIndex.get('Close')
curIndexClose = cur.get('Close')
perDayIndexReturn = (curIndexClose-prevIndexClose)/prevIndexClose
indexReturns.append(perDayIndexReturn)
prevIndex = cur
break
break
cov = numpy.cov(stockReturns,indexReturns,bias=True)[0][1]
var = numpy.var(indexReturns)
beta = cov/var
rfr = 0.0253
mktAvg = numpy.average(indexReturns)
expectedReturnFromFormula = rfr+beta*(mktAvg-rfr) #ER = rfr+beta*(mktAvg-rfr)
expectedReturnWithoutFormula = numpy.average(stockReturns)
# print('variance is ')
# print(var)
#
# print('beta is ')
# print(beta)
#
# print('Mkt avgs is ')
# print(mktAvg)
#
#
# print(expectedReturnFromFormula)
# print(expectedReturnWithoutFormula)
# for returnVal in stockReturns:
# print(returnVal)
# for oneDate in stockDate:
# print(oneDate)
# print('##################')
# for returnVal in indexReturns:
# print(returnVal)
# for oneDate in indexDate:
# print(oneDate)
|
from django.http import HttpResponse
import datetime
from django.template import Template, Context
from django.template.loader import get_template
from django.shortcuts import render
class Persona(object):
def __init__(self, name, last_name):
self.name=name
self.last_name=last_name
def saludo(request):
doc_externo=open('/var/www/html/proyectos-django/learnDjango/learnDjango/templates/home.html')
plt=Template(doc_externo.read())
doc_externo.close()
ctx=Context()
document=plt.render(ctx)
return HttpResponse(document)
def template(request):
view=get_template('home.html')
document=view.render({})
return HttpResponse(document)
def shortcuts(request):
return render(request, 'extend.html', {'name_person': 'Laura'})
def variables(request):
p1=Persona('Lalo', 'Díaz')
temas=['Templates', 'Models']
otros_temas=[]
# name='Juanito'
# last_name='Ramirez'
doc_externo=open('/var/www/html/proyectos-django/learnDjango/learnDjango/templates/home.html')
plt=Template(doc_externo.read())
doc_externo.close()
ctx=Context({
'name_person': p1.name,
'last_name': p1.last_name,
'temas': temas,
'otros_temas': otros_temas
})
document=plt.render(ctx)
return HttpResponse(document)
def date(request):
current_date=datetime.datetime.now()
return HttpResponse(current_date)
def calculaEdad(request, old, year):
edadActual=old
periodo=year-2019
edadFutura=edadActual + periodo
documento="<html><h2>En el año %s tendrás %s años</h2></html>" % (year, edadFutura)
return HttpResponse(documento)
|
from peewee import *
from faker import Faker
# Para generar valores random en los rows
fake = Faker()
# Crea conexión con la DB
db = PostgresqlDatabase(
'random_cats',
user='the_cat',
password='secretcat123',
host='localhost',
port=5432
)
# ORM con una tabla
class MyCats(Model):
nombre = CharField()
imagen = CharField()
class Meta:
database = db
table_name = 'my_cats'
def __str__(self):
return f"ID: {self.id}\nNombre: {self.nombre}\nImagen: {self.imagen}"
# Conectate a la DB
db.connect()
print("¡Conectado!")
# Creamos un registro
some_cat = MyCats.create(nombre=fake.unique.first_name(), imagen=fake.url())
print("Inserté un nuevo registro:")
print(some_cat)
# Obtén solo un registro
some_other_cat = MyCats.get()
print("Busqué un registro:")
print(some_other_cat)
# Actualizamos algunos registros
some_cat.name = fake.unique.first_name()
some_other_cat.name = fake.unique.first_name()
some_other_cat.imagen = fake.url()
some_cat.save()
some_other_cat.save()
print("Actualicé mis registros:")
print(some_cat)
print(some_other_cat)
# Añade un nuevo registro si es que no existe
charizardo = MyCats.get_or_create(nombre='Charizardo', imagen='https://purr.objects-us-east-1.dream.io/i/G4Iu1.jpg')
print("Inserté un nuevo registro:")
print(charizardo)
# Obtén múltiples registros
print("Busco múltiples registros:")
cats = MyCats.select()
for _cat in cats:
print(_cat)
listita = [_cat.nombre for _cat in cats]
print(listita)
# Borra algunos registros
print("Borro algunos registros:")
print(MyCats.delete().where(MyCats.id == MyCats.get().id).execute())
# print(some_cat.delete_instance())
# *SIEMPRE* recuerda cerrar la conexión
db.close()
|
#!/usr/bin/env python3
# Copyright © 2019-2020 Broadcom. All rights reserved.
# The term “Broadcom” refers to Broadcom Inc. and/or its subsidiaries.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may also obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pyfos_brocade_extension.py(pyGen v1.0.0)
"""
:mod:`pyfos_brocade_extension` - PyFOS module for management and\
statistics for brocade extension.
******************************************************************************\
*******************************************************************************
The:mod:`pyfos_brocade_extension` The PyFOS module support for management and\
statistics for brocade extension.
"""
# Start module imports
from pyfos import pyfos_rest_util
from pyfos.pyfos_type import pyfos_type
from pyfos import pyfos_version as version
# End module imports
class global_lan_statistics(pyfos_rest_util.rest_object):
"""Class of global_lan_statistics
*Description global_lan_statistics*
Represents global LAN DP statistics for extension blade or system.
Important class members of global_lan_statistics:
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| Attribute Name | Description | Frequently Used Methods |
+==========================================================+==================================+============================================================================+
| slot | In case of non-chassis system, | :func:`peek_slot` |
| | the slot number is always 0. | :func:`set_slot` |
| | In case of chassis system, it | |
| | is the slot number of chassis | |
| | in which the extension blade | |
| | is inserted in. In case of | |
| | chassis, slot number is | |
| | non-zero value. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| dp-id | Extension Data Path Processor | :func:`peek_dp_id` |
| | ID. Based on platform either | :func:`set_dp_id` |
| | it will have a single DP or | |
| | dual DP. In case of single DP | |
| | only DP0 is supported, and in | |
| | case of dual DP both DP0 and | |
| | DP1 are supported 0 : DP0 1 : | |
| | DP1. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| in-udp-packets-1500-bytes | Number of UDP packets received | :func:`peek_in_udp_packets_1500_bytes` |
| | of size equal to & greater | |
| | than 1024 bytes but less than | |
| | 1500 bytes. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| out-as-is-ip-pdu-drops | Number of as-is Tx IP PDUs | :func:`peek_out_as_is_ip_pdu_drops` |
| | dropped. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| in-error-parity | Number of Parity errors | :func:`peek_in_error_parity` |
| | detected on Rx packets. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| active-tcp-connections | Active TCP connection count. | :func:`peek_active_tcp_connections` |
| | | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| in-udp-pdus | Number of UDP PDUs received | :func:`peek_in_udp_pdus` |
| | from host. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| total-ipv6-packets | Total IPv6 packets received - | :func:`peek_total_ipv6_packets` |
| | IPv6 WQEs received by IP API | |
| | layer. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| syn-fail | Number of SYN packets dropped | :func:`peek_syn_fail` |
| | due to error. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| out-tcp-bytes | Total number of TCP bytes | :func:`peek_out_tcp_bytes` |
| | transmitted. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| in-icmp-pdu-drops-due-to-stream-flow-control | Number of ICMP PDU drop due to | :func:`peek_in_icmp_pdu_drops_due_to_stream_flow_control` |
| | stream flow control. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| in-udp-packets-128-bytes | Number of UDP packets received | :func:`peek_in_udp_packets_128_bytes` |
| | of size equal to & greater | |
| | than 64 bytes but less than | |
| | 128 bytes. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| tcp-tcl-deny-connections | Number of TCP connection | :func:`peek_tcp_tcl_deny_connections` |
| | denied based on TCL deny rule. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| established-udp-connections | Number of UDP connections | :func:`peek_established_udp_connections` |
| | opened since bootup. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| in-udp-tcl-lookup-fail-pdus | Number of UDP PDUs dropped due | :func:`peek_in_udp_tcl_lookup_fail_pdus` |
| | to TCL lookup. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| in-total-udp-pdu-drops | Total UDP PDUs dropped due to | :func:`peek_in_total_udp_pdu_drops` |
| | multiple reasons. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| in-error-tcp-checksum | Number of TCP checksum errors | :func:`peek_in_error_tcp_checksum` |
| | detected on Rx packets. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| in-udp-packets-256-bytes | Number of UDP packets received | :func:`peek_in_udp_packets_256_bytes` |
| | of size equal to & greater | |
| | than 128 bytes but less than | |
| | 256 bytes. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| in-error-length | Number of Length errors | :func:`peek_in_error_length` |
| | detected on Rx packets. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| out-udp-packets-1500-bytes | Number of UDP packets | :func:`peek_out_udp_packets_1500_bytes` |
| | transmitted of size equal to & | |
| | greater than 1024 bytes but | |
| | less than 1500 bytes. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| maximum-udp-connection-exceeded-on-egress | Total UDP flows failed due to | :func:`peek_maximum_udp_connection_exceeded_on_egress` |
| | Maximum context on egress. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| in-total-icmp-pdu-drops | Number of ICMP PDU drops. | :func:`peek_in_total_icmp_pdu_drops` |
| | | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| in-error-crc | Number of CRC errors detected | :func:`peek_in_error_crc` |
| | on Rx packets. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| out-udp-packets-64-bytes | Number of UDP packets | :func:`peek_out_udp_packets_64_bytes` |
| | transmitted of size less than | |
| | 64 bytes. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| drop-packets | Number of LSM packet dropped | :func:`peek_drop_packets` |
| | in egress. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| flow-control-on | Flow control on from FTNL. | :func:`peek_flow_control_on` |
| | | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| active-tcp-connections-on-remote-backup | Active clone HCL TCP | :func:`peek_active_tcp_connections_on_remote_backup` |
| | connection count on Remote | |
| | Backup tunnel. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| flow-control-off | Flow control off from FTNL. | :func:`peek_flow_control_off` |
| | | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| maximum-udp-connection-exceeded | Total UDP flows failed due to | :func:`peek_maximum_udp_connection_exceeded` |
| | Maximum context on ingress and | |
| | egress. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| out-pdu-preserve-on | Number of times transmit with | :func:`peek_out_pdu_preserve_on` |
| | PDU preserve ON. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| maximum-tcp-connection-exceeded-as-client | Maximum connection exceeded on | :func:`peek_maximum_tcp_connection_exceeded_as_client` |
| | listen connection allocation. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| in-as-is-ip-tcl-deny-pdus | non-terminated TCP, | :func:`peek_in_as_is_ip_tcl_deny_pdus` |
| | non-batched UDP, Non-ICMP PDU | |
| | dropped due to TCL deny | |
| | status. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| in-total-as-is-ip-pdu-drops | Total non-terminated TCP, | :func:`peek_in_total_as_is_ip_pdu_drops` |
| | non-batched UDP, Non-ICMP PDU | |
| | dropped in ingress. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| udp-packets-sent-as-is | Total UDP packets sent as-is. | :func:`peek_udp_packets_sent_as_is` |
| | | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| in-tcp-bytes | Total number of TCP bytes | :func:`peek_in_tcp_bytes` |
| | received. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| in-udp-tcl-deny-pdus | Number of UDP PDUs dropped due | :func:`peek_in_udp_tcl_deny_pdus` |
| | to TCL deny. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| out-udp-packets-1024-bytes | Number of UDP packets | :func:`peek_out_udp_packets_1024_bytes` |
| | transmitted of size equal to & | |
| | greater than 512 bytes but | |
| | less than 1024 bytes. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| out-udp-packets-3000-bytes | Number of UDP packets | :func:`peek_out_udp_packets_3000_bytes` |
| | transmitted of size equal to & | |
| | greater than 1500 bytes but | |
| | less than 3000 bytes. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| stale-reset-from-host | Stale reset from host. | :func:`peek_stale_reset_from_host` |
| | | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| in-udp-packets-3000-bytes | Number of UDP packets received | :func:`peek_in_udp_packets_3000_bytes` |
| | of size equal to & greater | |
| | than 1500 bytes but less than | |
| | 3000 bytes. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| in-icmp-tcl-deny-pdus | Number of ICMP PDUs dropped | :func:`peek_in_icmp_tcl_deny_pdus` |
| | due to TCL returning deny | |
| | status. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| closed-udp-connections | Number of UDP connections | :func:`peek_closed_udp_connections` |
| | closed since bootup. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| tcp-tcl-lookup-fail | Number of TCP connection | :func:`peek_tcp_tcl_lookup_fail` |
| | denied based on TCL lookup | |
| | failure. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| in-as-is-ip-tcl-lookup-fail-pdus | non-terminated TCP, | :func:`peek_in_as_is_ip_tcl_lookup_fail_pdus` |
| | non-batched UDP, Non-ICMP PDU | |
| | dropped due to TCL lookup | |
| | failure. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| established-tcp-connections | Total LAN TCP connections | :func:`peek_established_tcp_connections` |
| | established. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| in-udp-packets-6000-bytes | Number of UDP packets received | :func:`peek_in_udp_packets_6000_bytes` |
| | of size equal to & greater | |
| | than 4500 bytes but less than | |
| | 6000 bytes. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| closed-tcp-connections | Total LAN TCP connections | :func:`peek_closed_tcp_connections` |
| | closed. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| in-pdu-preserve-on | Number of packets received | :func:`peek_in_pdu_preserve_on` |
| | with PDU preserve ON. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| in-udp-packets-1024-bytes | Number of UDP packets received | :func:`peek_in_udp_packets_1024_bytes` |
| | of size equal to & greater | |
| | than 512 bytes but less than | |
| | 1024 bytes. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| in-error-mac | Number of MAC errors detected | :func:`peek_in_error_mac` |
| | on Rx packets. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| in-udp-packets-4500-bytes | Number of UDP packets received | :func:`peek_in_udp_packets_4500_bytes` |
| | of size equal to & greater | |
| | than 3000 bytes but less than | |
| | 4500 bytes. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| out-udp-packets-4500-bytes | Number of UDP packets | :func:`peek_out_udp_packets_4500_bytes` |
| | transmitted of size equal to & | |
| | greater than 3000 bytes but | |
| | less than 4500 bytes. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| maximum-tcp-connection-per-second-exceeded-as-client | Maximum connection per second | :func:`peek_maximum_tcp_connection_per_second_exceeded_as_client` |
| | exceeded on listen connection | |
| | allocation. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| udp-route-lookup-fail | Drop of NT PDUs on egress due | :func:`peek_udp_route_lookup_fail` |
| | to route lookup failure. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| in-as-is-ip-pdu-drops-due-to-stream-flow-control | non-terminated TCP, | :func:`peek_in_as_is_ip_pdu_drops_due_to_stream_flow_control` |
| | non-batched UDP, Non-ICMP PDU | |
| | dropped due to stream flow | |
| | control. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| maximum-tcp-connection-exceeded-as-server | Maximum connection exceeded | :func:`peek_maximum_tcp_connection_exceeded_as_server` |
| | during active connection | |
| | allocation. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| in-udp-pdu-drops-due-to-stream-flow-control | Number of UDP PDUs dropped due | :func:`peek_in_udp_pdu_drops_due_to_stream_flow_control` |
| | to stream flow control. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| in-icmp-tcl-lookup-fail-pdus | Number of ICMP PDUs dropped | :func:`peek_in_icmp_tcl_lookup_fail_pdus` |
| | due to TCL lookup failure. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| active-tcp-connections-on-local-backup | Active clone HCL TCP | :func:`peek_active_tcp_connections_on_local_backup` |
| | connection count on Local | |
| | Backup tunnel. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| syn-received | Number of SYN packets | :func:`peek_syn_received` |
| | received. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| out-udp-packets-256-bytes | Number of UDP packets | :func:`peek_out_udp_packets_256_bytes` |
| | transmitted of size equal to & | |
| | greater than 128 bytes but | |
| | less than 256 bytes. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| drop-bytes | LSM Tx TCP drop bytes. | :func:`peek_drop_bytes` |
| | | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| udp-pdu-drops-due-to-pko-flow-control | Drop of NT PDUs on egress due | :func:`peek_udp_pdu_drops_due_to_pko_flow_control` |
| | to PKO flow control. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| in-udp-packets-512-bytes | Number of UDP packets received | :func:`peek_in_udp_packets_512_bytes` |
| | of size equal to & greater | |
| | than 256 bytes but less than | |
| | 512 bytes. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| active-udp-connections | Number of active NT UDP flows. | :func:`peek_active_udp_connections` |
| | | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| out-as-is-ip-pdus | Number of as-is Tx IP PDUs. | :func:`peek_out_as_is_ip_pdus` |
| | | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| in-icmp-pdus | Number of ICMP PDUs received. | :func:`peek_in_icmp_pdus` |
| | | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| out-udp-packets-6000-bytes | Number of UDP packets | :func:`peek_out_udp_packets_6000_bytes` |
| | transmitted of size equal to & | |
| | greater than 4500 bytes but | |
| | less than 6000 bytes. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| in-udp-packets-64-bytes | Number of UDP packets received | :func:`peek_in_udp_packets_64_bytes` |
| | of size less than 64 bytes. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| in-udp-packets-9000-bytes | Number of UDP packets received | :func:`peek_in_udp_packets_9000_bytes` |
| | of size equal to & greater | |
| | than 6000 bytes but less than | |
| | 9000 bytes. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| out-udp-packets-512-bytes | Number of UDP packets | :func:`peek_out_udp_packets_512_bytes` |
| | transmitted of size equal to & | |
| | greater than 256 bytes but | |
| | less than 512 bytes. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| out-icmp-pdu-drops | Number of Tx ICMP PDU dropped. | :func:`peek_out_icmp_pdu_drops` |
| | | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| in-as-is-ip-pdus | non-terminated TCP, | :func:`peek_in_as_is_ip_pdus` |
| | non-batched UDP, Non-ICMP PDU | |
| | received. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| out-udp-pdu-drops | Number of Tx UDP PDUs dropped | :func:`peek_out_udp_pdu_drops` |
| | due to different reasons. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| out-udp-packets-128-bytes | Number of UDP packets | :func:`peek_out_udp_packets_128_bytes` |
| | transmitted of size equal to & | |
| | greater than 64 bytes but less | |
| | than 128 bytes. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| in-error-ip-checksum | Number of IP checksum errors | :func:`peek_in_error_ip_checksum` |
| | detected on Rx packets. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| maximum-tcp-connection-per-second-exceeded-as-server | Maximum connection per second | :func:`peek_maximum_tcp_connection_per_second_exceeded_as_server` |
| | exceeded on listen connection | |
| | allocation. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| out-udp-packets-9000-bytes | Number of UDP packets | :func:`peek_out_udp_packets_9000_bytes` |
| | transmitted of size equal to & | |
| | greater than 6000 bytes but | |
| | less than 9000 bytes. | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| out-udp-pdus | Number of Tx UDP PDU. | :func:`peek_out_udp_pdus` |
| | | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
| out-icmp-pdus | Number of Tx ICMP PDUs. | :func:`peek_out_icmp_pdus` |
| | | |
+----------------------------------------------------------+----------------------------------+----------------------------------------------------------------------------+
*Object functions for global_lan_statistics*
.. function:: get()
Get the instances of class "global_lan_statistics from switch. The
object can be printed using :func:`pyfos_utils.response_print`.
:param session: The session handler returned by
:func:`pyfos_auth.login`.
:rtype: A dictionary of errors or a success response.
*Class functions for global_lan_statistics*
.. function:: peek_slot()
Reads the value assigned to slot in the object.
:rtype: None on error and a value on success.
.. function:: set_slot(value)
Set the value of slot in the object.
:rtype: A dictionary of error or a success response and a value
with "slot" as the key
.. function:: peek_dp_id()
Reads the value assigned to dp-id in the object.
:rtype: None on error and a value on success.
.. function:: set_dp_id(value)
Set the value of dp-id in the object.
:rtype: A dictionary of error or a success response and a value
with "dp-id" as the key
.. function:: peek_in_udp_packets_1500_bytes()
Reads the value assigned to in-udp-packets-1500-bytes in the
object.
:rtype: None on error and a value on success.
.. function:: peek_out_as_is_ip_pdu_drops()
Reads the value assigned to out-as-is-ip-pdu-drops in the
object.
:rtype: None on error and a value on success.
.. function:: peek_in_error_parity()
Reads the value assigned to in-error-parity in the object.
:rtype: None on error and a value on success.
.. function:: peek_active_tcp_connections()
Reads the value assigned to active-tcp-connections in the
object.
:rtype: None on error and a value on success.
.. function:: peek_in_udp_pdus()
Reads the value assigned to in-udp-pdus in the object.
:rtype: None on error and a value on success.
.. function:: peek_total_ipv6_packets()
Reads the value assigned to total-ipv6-packets in the object.
:rtype: None on error and a value on success.
.. function:: peek_syn_fail()
Reads the value assigned to syn-fail in the object.
:rtype: None on error and a value on success.
.. function:: peek_out_tcp_bytes()
Reads the value assigned to out-tcp-bytes in the object.
:rtype: None on error and a value on success.
.. function:: peek_in_icmp_pdu_drops_due_to_stream_flow_control()
Reads the value assigned to
in-icmp-pdu-drops-due-to-stream-flow-control in the object.
:rtype: None on error and a value on success.
.. function:: peek_in_udp_packets_128_bytes()
Reads the value assigned to in-udp-packets-128-bytes in the
object.
:rtype: None on error and a value on success.
.. function:: peek_tcp_tcl_deny_connections()
Reads the value assigned to tcp-tcl-deny-connections in the
object.
:rtype: None on error and a value on success.
.. function:: peek_established_udp_connections()
Reads the value assigned to established-udp-connections in the
object.
:rtype: None on error and a value on success.
.. function:: peek_in_udp_tcl_lookup_fail_pdus()
Reads the value assigned to in-udp-tcl-lookup-fail-pdus in the
object.
:rtype: None on error and a value on success.
.. function:: peek_in_total_udp_pdu_drops()
Reads the value assigned to in-total-udp-pdu-drops in the
object.
:rtype: None on error and a value on success.
.. function:: peek_in_error_tcp_checksum()
Reads the value assigned to in-error-tcp-checksum in the object.
:rtype: None on error and a value on success.
.. function:: peek_in_udp_packets_256_bytes()
Reads the value assigned to in-udp-packets-256-bytes in the
object.
:rtype: None on error and a value on success.
.. function:: peek_in_error_length()
Reads the value assigned to in-error-length in the object.
:rtype: None on error and a value on success.
.. function:: peek_out_udp_packets_1500_bytes()
Reads the value assigned to out-udp-packets-1500-bytes in the
object.
:rtype: None on error and a value on success.
.. function:: peek_maximum_udp_connection_exceeded_on_egress()
Reads the value assigned to
maximum-udp-connection-exceeded-on-egress in the object.
:rtype: None on error and a value on success.
.. function:: peek_in_total_icmp_pdu_drops()
Reads the value assigned to in-total-icmp-pdu-drops in the
object.
:rtype: None on error and a value on success.
.. function:: peek_in_error_crc()
Reads the value assigned to in-error-crc in the object.
:rtype: None on error and a value on success.
.. function:: peek_out_udp_packets_64_bytes()
Reads the value assigned to out-udp-packets-64-bytes in the
object.
:rtype: None on error and a value on success.
.. function:: peek_drop_packets()
Reads the value assigned to drop-packets in the object.
:rtype: None on error and a value on success.
.. function:: peek_flow_control_on()
Reads the value assigned to flow-control-on in the object.
:rtype: None on error and a value on success.
.. function:: peek_active_tcp_connections_on_remote_backup()
Reads the value assigned to
active-tcp-connections-on-remote-backup in the object.
:rtype: None on error and a value on success.
.. function:: peek_flow_control_off()
Reads the value assigned to flow-control-off in the object.
:rtype: None on error and a value on success.
.. function:: peek_maximum_udp_connection_exceeded()
Reads the value assigned to maximum-udp-connection-exceeded in
the object.
:rtype: None on error and a value on success.
.. function:: peek_out_pdu_preserve_on()
Reads the value assigned to out-pdu-preserve-on in the object.
:rtype: None on error and a value on success.
.. function:: peek_maximum_tcp_connection_exceeded_as_client()
Reads the value assigned to
maximum-tcp-connection-exceeded-as-client in the object.
:rtype: None on error and a value on success.
.. function:: peek_in_as_is_ip_tcl_deny_pdus()
Reads the value assigned to in-as-is-ip-tcl-deny-pdus in the
object.
:rtype: None on error and a value on success.
.. function:: peek_in_total_as_is_ip_pdu_drops()
Reads the value assigned to in-total-as-is-ip-pdu-drops in the
object.
:rtype: None on error and a value on success.
.. function:: peek_udp_packets_sent_as_is()
Reads the value assigned to udp-packets-sent-as-is in the
object.
:rtype: None on error and a value on success.
.. function:: peek_in_tcp_bytes()
Reads the value assigned to in-tcp-bytes in the object.
:rtype: None on error and a value on success.
.. function:: peek_in_udp_tcl_deny_pdus()
Reads the value assigned to in-udp-tcl-deny-pdus in the object.
:rtype: None on error and a value on success.
.. function:: peek_out_udp_packets_1024_bytes()
Reads the value assigned to out-udp-packets-1024-bytes in the
object.
:rtype: None on error and a value on success.
.. function:: peek_out_udp_packets_3000_bytes()
Reads the value assigned to out-udp-packets-3000-bytes in the
object.
:rtype: None on error and a value on success.
.. function:: peek_stale_reset_from_host()
Reads the value assigned to stale-reset-from-host in the object.
:rtype: None on error and a value on success.
.. function:: peek_in_udp_packets_3000_bytes()
Reads the value assigned to in-udp-packets-3000-bytes in the
object.
:rtype: None on error and a value on success.
.. function:: peek_in_icmp_tcl_deny_pdus()
Reads the value assigned to in-icmp-tcl-deny-pdus in the object.
:rtype: None on error and a value on success.
.. function:: peek_closed_udp_connections()
Reads the value assigned to closed-udp-connections in the
object.
:rtype: None on error and a value on success.
.. function:: peek_tcp_tcl_lookup_fail()
Reads the value assigned to tcp-tcl-lookup-fail in the object.
:rtype: None on error and a value on success.
.. function:: peek_in_as_is_ip_tcl_lookup_fail_pdus()
Reads the value assigned to in-as-is-ip-tcl-lookup-fail-pdus in
the object.
:rtype: None on error and a value on success.
.. function:: peek_established_tcp_connections()
Reads the value assigned to established-tcp-connections in the
object.
:rtype: None on error and a value on success.
.. function:: peek_in_udp_packets_6000_bytes()
Reads the value assigned to in-udp-packets-6000-bytes in the
object.
:rtype: None on error and a value on success.
.. function:: peek_closed_tcp_connections()
Reads the value assigned to closed-tcp-connections in the
object.
:rtype: None on error and a value on success.
.. function:: peek_in_pdu_preserve_on()
Reads the value assigned to in-pdu-preserve-on in the object.
:rtype: None on error and a value on success.
.. function:: peek_in_udp_packets_1024_bytes()
Reads the value assigned to in-udp-packets-1024-bytes in the
object.
:rtype: None on error and a value on success.
.. function:: peek_in_error_mac()
Reads the value assigned to in-error-mac in the object.
:rtype: None on error and a value on success.
.. function:: peek_in_udp_packets_4500_bytes()
Reads the value assigned to in-udp-packets-4500-bytes in the
object.
:rtype: None on error and a value on success.
.. function:: peek_out_udp_packets_4500_bytes()
Reads the value assigned to out-udp-packets-4500-bytes in the
object.
:rtype: None on error and a value on success.
..
function:: peek_maximum_tcp_connection_per_second_exceeded_as_cli
ent()
Reads the value assigned to
maximum-tcp-connection-per-second-exceeded-as-client in the
object.
:rtype: None on error and a value on success.
.. function:: peek_udp_route_lookup_fail()
Reads the value assigned to udp-route-lookup-fail in the object.
:rtype: None on error and a value on success.
.. function:: peek_in_as_is_ip_pdu_drops_due_to_stream_flow_control(
)
Reads the value assigned to
in-as-is-ip-pdu-drops-due-to-stream-flow-control in the
object.
:rtype: None on error and a value on success.
.. function:: peek_maximum_tcp_connection_exceeded_as_server()
Reads the value assigned to
maximum-tcp-connection-exceeded-as-server in the object.
:rtype: None on error and a value on success.
.. function:: peek_in_udp_pdu_drops_due_to_stream_flow_control()
Reads the value assigned to
in-udp-pdu-drops-due-to-stream-flow-control in the object.
:rtype: None on error and a value on success.
.. function:: peek_in_icmp_tcl_lookup_fail_pdus()
Reads the value assigned to in-icmp-tcl-lookup-fail-pdus in the
object.
:rtype: None on error and a value on success.
.. function:: peek_active_tcp_connections_on_local_backup()
Reads the value assigned to
active-tcp-connections-on-local-backup in the object.
:rtype: None on error and a value on success.
.. function:: peek_syn_received()
Reads the value assigned to syn-received in the object.
:rtype: None on error and a value on success.
.. function:: peek_out_udp_packets_256_bytes()
Reads the value assigned to out-udp-packets-256-bytes in the
object.
:rtype: None on error and a value on success.
.. function:: peek_drop_bytes()
Reads the value assigned to drop-bytes in the object.
:rtype: None on error and a value on success.
.. function:: peek_udp_pdu_drops_due_to_pko_flow_control()
Reads the value assigned to udp-pdu-drops-due-to-pko-flow-control
in the object.
:rtype: None on error and a value on success.
.. function:: peek_in_udp_packets_512_bytes()
Reads the value assigned to in-udp-packets-512-bytes in the
object.
:rtype: None on error and a value on success.
.. function:: peek_active_udp_connections()
Reads the value assigned to active-udp-connections in the
object.
:rtype: None on error and a value on success.
.. function:: peek_out_as_is_ip_pdus()
Reads the value assigned to out-as-is-ip-pdus in the object.
:rtype: None on error and a value on success.
.. function:: peek_in_icmp_pdus()
Reads the value assigned to in-icmp-pdus in the object.
:rtype: None on error and a value on success.
.. function:: peek_out_udp_packets_6000_bytes()
Reads the value assigned to out-udp-packets-6000-bytes in the
object.
:rtype: None on error and a value on success.
.. function:: peek_in_udp_packets_64_bytes()
Reads the value assigned to in-udp-packets-64-bytes in the
object.
:rtype: None on error and a value on success.
.. function:: peek_in_udp_packets_9000_bytes()
Reads the value assigned to in-udp-packets-9000-bytes in the
object.
:rtype: None on error and a value on success.
.. function:: peek_out_udp_packets_512_bytes()
Reads the value assigned to out-udp-packets-512-bytes in the
object.
:rtype: None on error and a value on success.
.. function:: peek_out_icmp_pdu_drops()
Reads the value assigned to out-icmp-pdu-drops in the object.
:rtype: None on error and a value on success.
.. function:: peek_in_as_is_ip_pdus()
Reads the value assigned to in-as-is-ip-pdus in the object.
:rtype: None on error and a value on success.
.. function:: peek_out_udp_pdu_drops()
Reads the value assigned to out-udp-pdu-drops in the object.
:rtype: None on error and a value on success.
.. function:: peek_out_udp_packets_128_bytes()
Reads the value assigned to out-udp-packets-128-bytes in the
object.
:rtype: None on error and a value on success.
.. function:: peek_in_error_ip_checksum()
Reads the value assigned to in-error-ip-checksum in the object.
:rtype: None on error and a value on success.
..
function:: peek_maximum_tcp_connection_per_second_exceeded_as_ser
ver()
Reads the value assigned to
maximum-tcp-connection-per-second-exceeded-as-server in the
object.
:rtype: None on error and a value on success.
.. function:: peek_out_udp_packets_9000_bytes()
Reads the value assigned to out-udp-packets-9000-bytes in the
object.
:rtype: None on error and a value on success.
.. function:: peek_out_udp_pdus()
Reads the value assigned to out-udp-pdus in the object.
:rtype: None on error and a value on success.
.. function:: peek_out_icmp_pdus()
Reads the value assigned to out-icmp-pdus in the object.
:rtype: None on error and a value on success.
"""
def __init__(self, dictvalues=None):
clsuri = "/rest" + "/running" + "/brocade-extension" +\
"/global-lan-statistics"
clstype = pyfos_rest_util.rest_obj_type.global_lan_statistics
clsver = version.VER_RANGE_900_and_ABOVE
super().__init__(clstype, clsuri, clsver)
self.add(pyfos_rest_util.rest_attribute("slot", pyfos_type.type_int,
None, pyfos_rest_util.REST_ATTRIBUTE_KEY))
self.add(pyfos_rest_util.rest_attribute("dp-id", pyfos_type.type_int,
None, pyfos_rest_util.REST_ATTRIBUTE_KEY))
self.add(pyfos_rest_util.rest_attribute("in-udp-packets-1500-bytes",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("out-as-is-ip-pdu-drops",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("in-error-parity",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("active-tcp-connections",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("in-udp-pdus",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("total-ipv6-packets",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("syn-fail",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("out-tcp-bytes",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute(
"in-icmp-pdu-drops-due-to-stream-flow-control",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("in-udp-packets-128-bytes",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("tcp-tcl-deny-connections",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("established-udp-connections",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("in-udp-tcl-lookup-fail-pdus",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("in-total-udp-pdu-drops",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("in-error-tcp-checksum",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("in-udp-packets-256-bytes",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("in-error-length",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("out-udp-packets-1500-bytes",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute(
"maximum-udp-connection-exceeded-on-egress",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("in-total-icmp-pdu-drops",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("in-error-crc",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("out-udp-packets-64-bytes",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("drop-packets",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("flow-control-on",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute(
"active-tcp-connections-on-remote-backup",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("flow-control-off",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute(
"maximum-udp-connection-exceeded", pyfos_type.type_int,
None, pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("out-pdu-preserve-on",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute(
"maximum-tcp-connection-exceeded-as-client",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("in-as-is-ip-tcl-deny-pdus",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("in-total-as-is-ip-pdu-drops",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("udp-packets-sent-as-is",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("in-tcp-bytes",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("in-udp-tcl-deny-pdus",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("out-udp-packets-1024-bytes",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("out-udp-packets-3000-bytes",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("stale-reset-from-host",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("in-udp-packets-3000-bytes",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("in-icmp-tcl-deny-pdus",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("closed-udp-connections",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("tcp-tcl-lookup-fail",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute(
"in-as-is-ip-tcl-lookup-fail-pdus", pyfos_type.type_int,
None, pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("established-tcp-connections",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("in-udp-packets-6000-bytes",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("closed-tcp-connections",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("in-pdu-preserve-on",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("in-udp-packets-1024-bytes",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("in-error-mac",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("in-udp-packets-4500-bytes",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("out-udp-packets-4500-bytes",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute(
"maximum-tcp-connection-per-second-exceeded-as-client",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("udp-route-lookup-fail",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute(
"in-as-is-ip-pdu-drops-due-to-stream-flow-control",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute(
"maximum-tcp-connection-exceeded-as-server",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute(
"in-udp-pdu-drops-due-to-stream-flow-control",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute(
"in-icmp-tcl-lookup-fail-pdus", pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute(
"active-tcp-connections-on-local-backup",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("syn-received",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("out-udp-packets-256-bytes",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("drop-bytes",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute(
"udp-pdu-drops-due-to-pko-flow-control",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("in-udp-packets-512-bytes",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("active-udp-connections",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("out-as-is-ip-pdus",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("in-icmp-pdus",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("out-udp-packets-6000-bytes",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("in-udp-packets-64-bytes",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("in-udp-packets-9000-bytes",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("out-udp-packets-512-bytes",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("out-icmp-pdu-drops",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("in-as-is-ip-pdus",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("out-udp-pdu-drops",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("out-udp-packets-128-bytes",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("in-error-ip-checksum",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute(
"maximum-tcp-connection-per-second-exceeded-as-server",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("out-udp-packets-9000-bytes",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("out-udp-pdus",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("out-icmp-pdus",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.load(dictvalues, 1)
class lan_flow_statistics(pyfos_rest_util.rest_object):
"""Class of lan_flow_statistics
*Description lan_flow_statistics*
The LAN per-flow statistics.
Important class members of lan_flow_statistics:
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| Attribute Name | Description | Frequently Used Methods |
+=====================================+==============================================+=======================================================+
| dp-id | Extension Data Path Processor ID | :func:`peek_dp_id` |
| | associated with flow. Based on platform | :func:`set_dp_id` |
| | either it will have a single DP or dual | |
| | DP. In case of single DP only DP0 is | |
| | supported, and in case of dual DP both DP0 | |
| | and DP1 are supported 0 : DP0 1 : DP1. | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| slot | In case of non-chassis system, the slot | :func:`peek_slot` |
| | number is always 0. In case of chassis | :func:`set_slot` |
| | system, it is the slot number of chassis | |
| | in which the extension blade is inserted | |
| | in. In case of chassis, slot number is | |
| | non-zero value. | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| flow-index | flow index associated with the LAN flow. | :func:`peek_flow_index` |
| | This is a dynamic index associated with | :func:`set_flow_index` |
| | the LAN flow. Depending on the LAN flow | |
| | behavior the index may change and also can | |
| | get reused after some time but at any | |
| | given time they will be unique. | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| out-bytes-wan-compression | Total bytes sent compression engine on | :func:`peek_out_bytes_wan_compression` |
| | WAN. | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| in-packets-lan-session-manager | Total packets received by LAN session | :func:`peek_in_packets_lan_session_manager` |
| | manager. | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| vlan-priority | Specifies the VLAN priority associated | :func:`peek_vlan_priority` |
| | with the flow. | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| traffic-control-list-name | The traffic-control-list name matching the | :func:`peek_traffic_control_list_name` |
| | flow filter to allow the traffic. | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| out-bytes-lan-session-manager | Total bytes sent by LAN session manager. | :func:`peek_out_bytes_lan_session_manager` |
| | | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| tcp-retransmits | TCP retransmits /lost packets. | :func:`peek_tcp_retransmits` |
| | | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| zero-window-count | The count of TCP zero window encountered. | :func:`peek_zero_window_count` |
| | | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| active-flow | Indicates that LAN flow is currently | :func:`peek_active_flow` |
| | active. true: The flow is active. false: | |
| | The flow is not active. | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| in-drops-lan-session-manager | The number of drops at the ingress from | :func:`peek_in_drops_lan_session_manager` |
| | LAN session manager. | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| destination-port | Remote destination port number of the LAN | :func:`peek_destination_port` |
| | flow. | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| in-bytes-average | The throughput in bps for packets received | :func:`peek_in_bytes_average` |
| | via an extension tunnel over WAN per 30s | |
| | average. | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| destination-ip-address | Destination IP address corresponding to | :func:`peek_destination_ip_address` |
| | the LAN flow. | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| in-bytes-lan-session-manager | Total bytes received by LAN session | :func:`peek_in_bytes_lan_session_manager` |
| | manager. | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| in-bytes-lan-compression | Total bytes received by compression engine | :func:`peek_in_bytes_lan_compression` |
| | from LAN. | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| out-tcp-packets | Total TCP packets sent. | :func:`peek_out_tcp_packets` |
| | | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| duplicate-acknowledgement | TCP duplicate ACK received. | :func:`peek_duplicate_acknowledgement` |
| | | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| source-port | Source port number of the LAN flow. | :func:`peek_source_port` |
| | | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| in-bytes-wan-compression | Total bytes received by compression engine | :func:`peek_in_bytes_wan_compression` |
| | from WAN. | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| in-tcp-bytes | Total bytes received. | :func:`peek_in_tcp_bytes` |
| | | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| vlan-id | Specifies the VLAN ID associated with the | :func:`peek_vlan_id` |
| | flow. When not set, this value will show | |
| | up as 0. | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| out-packets-lan-session-manager | Total packets sent by LAN session manager. | :func:`peek_out_packets_lan_session_manager` |
| | | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| zero-window-maximum-duration | The maximum of zero window duration | :func:`peek_zero_window_maximum_duration` |
| | encountered. | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| source-ip-address | Source IP address corresponding to the LAN | :func:`peek_source_ip_address` |
| | flow. | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| fast-retransmits | TCP fast retransmits count. | :func:`peek_fast_retransmits` |
| | | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| rtt | round trip time. | :func:`peek_rtt` |
| | | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| hcl-flow | Indicates that LAN flow is in HCL. true: | :func:`peek_hcl_flow` |
| | The flow is in HCL. false: The flow is not | |
| | in HCL. | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| dscp | DSCP value for the LAN flow. | :func:`peek_dscp` |
| | | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| crc-errors | Number of CRC errors encountered. | :func:`peek_crc_errors` |
| | | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| lan-interface | The interface corresponding to the | :func:`peek_lan_interface` |
| | traffic. This could be either a GE port or | |
| | a LAG name associated with the LAN flow. | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| local-host-mss | The local-host-mss is the MSS of the TCP | :func:`peek_local_host_mss` |
| | connection at the LAN ingress side | |
| | connected host. | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| start-time | Indicates the LAN flow start time. | :func:`peek_start_time` |
| | | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| out-tcp-bytes | Total bytes sent. | :func:`peek_out_tcp_bytes` |
| | | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| out-bytes-average | The throughput in bps for packets sent | :func:`peek_out_bytes_average` |
| | over the extension tunnel on WAN per 30s | |
| | average. | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| slow-retransmits | TCP slow retransmits count. | :func:`peek_slow_retransmits` |
| | | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| out-drops-lan-session-manager | The number of drops at the egress from LAN | :func:`peek_out_drops_lan_session_manager` |
| | session manager. | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| out-bytes-lan-compression | Total bytes sent by compression engine on | :func:`peek_out_bytes_lan_compression` |
| | LAN. | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| in-tcp-packets | Total TCP packets received. | :func:`peek_in_tcp_packets` |
| | | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| tcp-out-of-order-packets | TCP total out of order packets. | :func:`peek_tcp_out_of_order_packets` |
| | | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| end-time | Indicates the LAN flow end time. | :func:`peek_end_time` |
| | | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| remote-host-mss | The remote-host-mss is the MSS of the TCP | :func:`peek_remote_host_mss` |
| | connection at peer extension tunnel | |
| | endpoint connected host to its the LAN | |
| | ingress side. | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| protocol | Describes that the Layer 4 protocol of the | :func:`peek_protocol` |
| | flow. | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| mapped-tunnel | The interface used for extension-tunnel. | :func:`peek_mapped_tunnel` |
| | | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| ve-port | The VE port of the extension-tunnel | :func:`peek_mapped_tunnel_ve_port` |
| | interface. | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
| qos | The IP priority QOS associated with the | :func:`peek_mapped_tunnel_qos` |
| | flow. | |
+-------------------------------------+----------------------------------------------+-------------------------------------------------------+
*Object functions for lan_flow_statistics*
.. function:: get()
Get the instances of class "lan_flow_statistics from switch. The
object can be printed using :func:`pyfos_utils.response_print`.
:param session: The session handler returned by
:func:`pyfos_auth.login`.
:rtype: A dictionary of errors or a success response.
*Class functions for lan_flow_statistics*
.. function:: peek_dp_id()
Reads the value assigned to dp-id in the object.
:rtype: None on error and a value on success.
.. function:: set_dp_id(value)
Set the value of dp-id in the object.
:rtype: A dictionary of error or a success response and a value
with "dp-id" as the key
.. function:: peek_slot()
Reads the value assigned to slot in the object.
:rtype: None on error and a value on success.
.. function:: set_slot(value)
Set the value of slot in the object.
:rtype: A dictionary of error or a success response and a value
with "slot" as the key
.. function:: peek_flow_index()
Reads the value assigned to flow-index in the object.
:rtype: None on error and a value on success.
.. function:: set_flow_index(value)
Set the value of flow-index in the object.
:rtype: A dictionary of error or a success response and a value
with "flow-index" as the key
.. function:: peek_out_bytes_wan_compression()
Reads the value assigned to out-bytes-wan-compression in the
object.
:rtype: None on error and a value on success.
.. function:: peek_in_packets_lan_session_manager()
Reads the value assigned to in-packets-lan-session-manager in the
object.
:rtype: None on error and a value on success.
.. function:: peek_vlan_priority()
Reads the value assigned to vlan-priority in the object.
:rtype: None on error and a value on success.
.. function:: peek_traffic_control_list_name()
Reads the value assigned to traffic-control-list-name in the
object.
:rtype: None on error and a value on success.
.. function:: peek_out_bytes_lan_session_manager()
Reads the value assigned to out-bytes-lan-session-manager in the
object.
:rtype: None on error and a value on success.
.. function:: peek_tcp_retransmits()
Reads the value assigned to tcp-retransmits in the object.
:rtype: None on error and a value on success.
.. function:: peek_zero_window_count()
Reads the value assigned to zero-window-count in the object.
:rtype: None on error and a value on success.
.. function:: peek_active_flow()
Reads the value assigned to active-flow in the object.
:rtype: None on error and a value on success.
.. function:: peek_in_drops_lan_session_manager()
Reads the value assigned to in-drops-lan-session-manager in the
object.
:rtype: None on error and a value on success.
.. function:: peek_destination_port()
Reads the value assigned to destination-port in the object.
:rtype: None on error and a value on success.
.. function:: peek_in_bytes_average()
Reads the value assigned to in-bytes-average in the object.
:rtype: None on error and a value on success.
.. function:: peek_destination_ip_address()
Reads the value assigned to destination-ip-address in the
object.
:rtype: None on error and a value on success.
.. function:: peek_in_bytes_lan_session_manager()
Reads the value assigned to in-bytes-lan-session-manager in the
object.
:rtype: None on error and a value on success.
.. function:: peek_in_bytes_lan_compression()
Reads the value assigned to in-bytes-lan-compression in the
object.
:rtype: None on error and a value on success.
.. function:: peek_out_tcp_packets()
Reads the value assigned to out-tcp-packets in the object.
:rtype: None on error and a value on success.
.. function:: peek_duplicate_acknowledgement()
Reads the value assigned to duplicate-acknowledgement in the
object.
:rtype: None on error and a value on success.
.. function:: peek_source_port()
Reads the value assigned to source-port in the object.
:rtype: None on error and a value on success.
.. function:: peek_in_bytes_wan_compression()
Reads the value assigned to in-bytes-wan-compression in the
object.
:rtype: None on error and a value on success.
.. function:: peek_in_tcp_bytes()
Reads the value assigned to in-tcp-bytes in the object.
:rtype: None on error and a value on success.
.. function:: peek_vlan_id()
Reads the value assigned to vlan-id in the object.
:rtype: None on error and a value on success.
.. function:: peek_out_packets_lan_session_manager()
Reads the value assigned to out-packets-lan-session-manager in
the object.
:rtype: None on error and a value on success.
.. function:: peek_zero_window_maximum_duration()
Reads the value assigned to zero-window-maximum-duration in the
object.
:rtype: None on error and a value on success.
.. function:: peek_source_ip_address()
Reads the value assigned to source-ip-address in the object.
:rtype: None on error and a value on success.
.. function:: peek_fast_retransmits()
Reads the value assigned to fast-retransmits in the object.
:rtype: None on error and a value on success.
.. function:: peek_rtt()
Reads the value assigned to rtt in the object.
:rtype: None on error and a value on success.
.. function:: peek_hcl_flow()
Reads the value assigned to hcl-flow in the object.
:rtype: None on error and a value on success.
.. function:: peek_dscp()
Reads the value assigned to dscp in the object.
:rtype: None on error and a value on success.
.. function:: peek_crc_errors()
Reads the value assigned to crc-errors in the object.
:rtype: None on error and a value on success.
.. function:: peek_lan_interface()
Reads the value assigned to lan-interface in the object.
:rtype: None on error and a value on success.
.. function:: peek_local_host_mss()
Reads the value assigned to local-host-mss in the object.
:rtype: None on error and a value on success.
.. function:: peek_start_time()
Reads the value assigned to start-time in the object.
:rtype: None on error and a value on success.
.. function:: peek_out_tcp_bytes()
Reads the value assigned to out-tcp-bytes in the object.
:rtype: None on error and a value on success.
.. function:: peek_out_bytes_average()
Reads the value assigned to out-bytes-average in the object.
:rtype: None on error and a value on success.
.. function:: peek_slow_retransmits()
Reads the value assigned to slow-retransmits in the object.
:rtype: None on error and a value on success.
.. function:: peek_out_drops_lan_session_manager()
Reads the value assigned to out-drops-lan-session-manager in the
object.
:rtype: None on error and a value on success.
.. function:: peek_out_bytes_lan_compression()
Reads the value assigned to out-bytes-lan-compression in the
object.
:rtype: None on error and a value on success.
.. function:: peek_in_tcp_packets()
Reads the value assigned to in-tcp-packets in the object.
:rtype: None on error and a value on success.
.. function:: peek_tcp_out_of_order_packets()
Reads the value assigned to tcp-out-of-order-packets in the
object.
:rtype: None on error and a value on success.
.. function:: peek_end_time()
Reads the value assigned to end-time in the object.
:rtype: None on error and a value on success.
.. function:: peek_remote_host_mss()
Reads the value assigned to remote-host-mss in the object.
:rtype: None on error and a value on success.
.. function:: peek_protocol()
Reads the value assigned to protocol in the object.
:rtype: None on error and a value on success.
.. function:: peek_mapped_tunnel()
Reads the value assigned to mapped-tunnel in the object.
:rtype: None on error and a value on success.
.. function:: peek_mapped_tunnel_ve_port()
Reads the value assigned to ve-port in the object.
:rtype: None on error and a value on success.
.. function:: peek_mapped_tunnel_qos()
Reads the value assigned to qos in the object.
:rtype: None on error and a value on success.
"""
def __init__(self, dictvalues=None):
clsuri = "/rest" + "/running" + "/brocade-extension" +\
"/lan-flow-statistics"
clstype = pyfos_rest_util.rest_obj_type.lan_flow_statistics
clsver = version.VER_RANGE_900_and_ABOVE
super().__init__(clstype, clsuri, clsver)
self.add(pyfos_rest_util.rest_attribute("dp-id", pyfos_type.type_int,
None, pyfos_rest_util.REST_ATTRIBUTE_KEY))
self.add(pyfos_rest_util.rest_attribute("slot", pyfos_type.type_int,
None, pyfos_rest_util.REST_ATTRIBUTE_KEY))
self.add(pyfos_rest_util.rest_attribute("flow-index",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_KEY))
self.add(pyfos_rest_util.rest_attribute("out-bytes-wan-compression",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute(
"in-packets-lan-session-manager", pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("vlan-priority",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("traffic-control-list-name",
pyfos_type.type_str, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute(
"out-bytes-lan-session-manager", pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("tcp-retransmits",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("zero-window-count",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("active-flow",
pyfos_type.type_bool, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute(
"in-drops-lan-session-manager", pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("destination-port",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("in-bytes-average",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("destination-ip-address",
pyfos_type.type_ip_addr, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute(
"in-bytes-lan-session-manager", pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("in-bytes-lan-compression",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("out-tcp-packets",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("duplicate-acknowledgement",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("source-port",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("in-bytes-wan-compression",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("in-tcp-bytes",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("vlan-id",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute(
"out-packets-lan-session-manager", pyfos_type.type_int,
None, pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute(
"zero-window-maximum-duration", pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("source-ip-address",
pyfos_type.type_ip_addr, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("fast-retransmits",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("rtt", pyfos_type.type_int,
None, pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("hcl-flow",
pyfos_type.type_bool, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("dscp", pyfos_type.type_int,
None, pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("crc-errors",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("lan-interface",
pyfos_type.type_str, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("local-host-mss",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("start-time",
pyfos_type.type_str, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("out-tcp-bytes",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("out-bytes-average",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("slow-retransmits",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute(
"out-drops-lan-session-manager", pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("out-bytes-lan-compression",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("in-tcp-packets",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("tcp-out-of-order-packets",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("end-time",
pyfos_type.type_str, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("remote-host-mss",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("protocol",
pyfos_type.type_str, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("mapped-tunnel",
pyfos_type.type_na, dict(),
pyfos_rest_util.REST_ATTRIBUTE_CONTAINER_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("ve-port",
pyfos_type.type_str, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG),
['mapped-tunnel'])
self.add(pyfos_rest_util.rest_attribute("qos", pyfos_type.type_str,
None, pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG),
['mapped-tunnel'])
self.load(dictvalues, 1)
class traffic_control_list(pyfos_rest_util.rest_object):
"""Class of traffic_control_list
*Description traffic_control_list*
Represents traffic control lists in order to manage IP Extension LAN
flows.
Important class members of traffic_control_list:
+---------------------------------------+------------------------------------------+---------------------------------------------------------+
| Attribute Name | Description | Frequently Used Methods |
+=======================================+==========================================+=========================================================+
| traffic-control-list-name | Name of the Traffic-Control-List. | :func:`peek_traffic_control_list_name` |
| | | :func:`set_traffic_control_list_name` |
+---------------------------------------+------------------------------------------+---------------------------------------------------------+
| target-ve-port | The VE port of the extension-tunnel | :func:`peek_target_ve_port` |
| | used for allowing a LAN ingress | :func:`set_target_ve_port` |
| | traffic to be sent over the WAN. | |
+---------------------------------------+------------------------------------------+---------------------------------------------------------+
| action | Set the action for this TCL. The TCL | :func:`peek_action` |
| | can be programmed to allow a traffic | :func:`set_action` |
| | or deny the traffic. | |
+---------------------------------------+------------------------------------------+---------------------------------------------------------+
| segment-preservation-enabled | Is segment preservation for this TCL | :func:`peek_segment_preservation_enabled` |
| | enabled. Default: false, Values | :func:`set_segment_preservation_enabled` |
| | supported are true/false. false - | |
| | Disabled true - Enabled | |
+---------------------------------------+------------------------------------------+---------------------------------------------------------+
| port | The protocol port input filter for | :func:`peek_port` |
| | this TCL. The port arguments can be | :func:`set_port` |
| | specified as a single port or in case | |
| | of multiple ports a comma separated | |
| | list of ports or else a range of ports | |
| | can be specified or a combination. | |
| | example : 22 or 600-603 or 300,302,305 | |
+---------------------------------------+------------------------------------------+---------------------------------------------------------+
| source-address | Source IP address input filter for | :func:`peek_source_address` |
| | this TCL. | :func:`set_source_address` |
+---------------------------------------+------------------------------------------+---------------------------------------------------------+
| target-slot | In case of non-chassis system, the | :func:`peek_target_slot` |
| | slot number is always 0. In case of | :func:`set_target_slot` |
| | chassis system, it is the slot number | |
| | of chassis in which the extension | |
| | blade is inserted in. In case of | |
| | chassis, slot number is non-zero | |
| | value. | |
+---------------------------------------+------------------------------------------+---------------------------------------------------------+
| target-dp-id | Extension Data Path Processor ID. | :func:`peek_target_dp_id` |
| | Based on platform either it will have | :func:`set_target_dp_id` |
| | a single DP or dual DP. In case of | |
| | single DP only DP0 is supported, and | |
| | in case of dual DP both DP0 and DP1 | |
| | are supported 0 : DP0 1 : DP1. | |
+---------------------------------------+------------------------------------------+---------------------------------------------------------+
| l4-protocol | The Layer 4 protocol input filter for | :func:`peek_l4_protocol` |
| | this TCL. The value can be a well | :func:`set_l4_protocol` |
| | known protocol string value or | |
| | otherwise a L4 protocol number. The | |
| | 'any' protocol string is the default | |
| | value and is meant to match any L4 | |
| | protocol value. The valid range for L4 | |
| | protocol is from 0-255. The list of | |
| | known protocol string values from | |
| | system are as below: ICMP ICMP6 TCP | |
| | UDP VRRP | |
+---------------------------------------+------------------------------------------+---------------------------------------------------------+
| target-qos | QoS priority associated with an | :func:`peek_target_qos` |
| | extension-tunnel to be used to allow a | :func:`set_target_qos` |
| | LAN ingress traffic over WAN. | |
+---------------------------------------+------------------------------------------+---------------------------------------------------------+
| reset-propagation-enabled | Is End to End reset propagation for | :func:`peek_reset_propagation_enabled` |
| | this TCL enabled. Default: false, | :func:`set_reset_propagation_enabled` |
| | Values supported are true/false. false | |
| | - Disabled true - Enabled | |
+---------------------------------------+------------------------------------------+---------------------------------------------------------+
| source-address-prefix-length | The prefix length operator for source | :func:`peek_source_address_prefix_length` |
| | IP address input filter. | :func:`set_source_address_prefix_length` |
+---------------------------------------+------------------------------------------+---------------------------------------------------------+
| application | The application input filter for this | :func:`peek_application` |
| | TCL. This includes a list of known | :func:`set_application` |
| | apps already present or a user defined | |
| | app-type name. The 'any' application | |
| | type name is a special value to | |
| | identify any application. Below are | |
| | few examples of system defined known | |
| | application types: CIFS Data-Domain | |
| | FCIP FTP HTTP HTTPS | |
+---------------------------------------+------------------------------------------+---------------------------------------------------------+
| destination-address-prefix-length | The prefix length operator for | :func:`peek_destination_address_prefix_length` |
| | destination IP address input filter. | :func:`set_destination_address_prefix_length` |
+---------------------------------------+------------------------------------------+---------------------------------------------------------+
| dscp | The DSCP input filter for this TCL. | :func:`peek_dscp` |
| | The values supported are from 0-63. | :func:`set_dscp` |
| | The value 'any' is default value and | |
| | is meant to match any dscp value | |
| | specified. | |
+---------------------------------------+------------------------------------------+---------------------------------------------------------+
| vlan | The VLAN input filter for this TCL. | :func:`peek_vlan` |
| | The values supported are from 1-4095. | :func:`set_vlan` |
| | The value 'any' is the default value | |
| | and is meant to match any vlan-id | |
| | value specified. | |
+---------------------------------------+------------------------------------------+---------------------------------------------------------+
| l2cos | The L2CoS input filter for this TCL. | :func:`peek_l2cos` |
| | The valid values are from 0-7. The | :func:`set_l2cos` |
| | value 'any' is the default value and | |
| | is meant to match any value of l2CoS. | |
+---------------------------------------+------------------------------------------+---------------------------------------------------------+
| destination-address | Destination IP address input filter | :func:`peek_destination_address` |
| | for this TCL. | :func:`set_destination_address` |
+---------------------------------------+------------------------------------------+---------------------------------------------------------+
| admin-state-enabled | Is TCL admin status enabled. Default: | :func:`peek_admin_state_enabled` |
| | false, Values supported are | :func:`set_admin_state_enabled` |
| | true/false. false - Disabled true - | |
| | Enabled | |
+---------------------------------------+------------------------------------------+---------------------------------------------------------+
| priority | TCL priority provides an order of | :func:`peek_priority` |
| | precedence to the TCL rule within the | :func:`set_priority` |
| | overall TCL list. The priority 65535 | |
| | is a special priority associated or | |
| | applicable to only the default TCL | |
| | rule and no other user configured TCL | |
| | is allowed this value. The priority 0 | |
| | is a also treated as a special | |
| | priority and is only to indicate that | |
| | the priority is not set. A valid user | |
| | configured TCL priority must use a | |
| | value from 1 to 65534 only. | |
+---------------------------------------+------------------------------------------+---------------------------------------------------------+
| non-terminated-enabled | Is non terminated traffic for this TCL | :func:`peek_non_terminated_enabled` |
| | enabled. Default: false, Values | :func:`set_non_terminated_enabled` |
| | supported are true/false. false - | |
| | Disabled true - Enabled | |
+---------------------------------------+------------------------------------------+---------------------------------------------------------+
| hit-count | Total number of times this TCL rule | :func:`peek_hit_count` |
| | was hit. | |
+---------------------------------------+------------------------------------------+---------------------------------------------------------+
| cp-dp-synchronized | Indicates whether an | :func:`peek_cp_dp_synchronized` |
| | admin-statatus-enabled TCL is | |
| | synchronized between the CP->DP. In | |
| | case of error the value will be set to | |
| | false false - CP-DP synchronizing | |
| | failed. true - CP-DP is synchronized. | |
+---------------------------------------+------------------------------------------+---------------------------------------------------------+
*Object functions for traffic_control_list*
.. function:: get()
Get the instances of class "traffic_control_list from switch. The
object can be printed using :func:`pyfos_utils.response_print`.
:param session: The session handler returned by
:func:`pyfos_auth.login`.
:rtype: A dictionary of errors or a success response.
*Class functions for traffic_control_list*
.. function:: peek_traffic_control_list_name()
Reads the value assigned to traffic-control-list-name in the
object.
:rtype: None on error and a value on success.
.. function:: set_traffic_control_list_name(value)
Set the value of traffic-control-list-name in the object.
:rtype: A dictionary of error or a success response and a value
with "traffic-control-list-name" as the key
.. function:: peek_target_ve_port()
Reads the value assigned to target-ve-port in the object.
:rtype: None on error and a value on success.
.. function:: set_target_ve_port(value)
Set the value of target-ve-port in the object.
:rtype: A dictionary of error or a success response and a value
with "target-ve-port" as the key
.. function:: peek_action()
Reads the value assigned to action in the object.
:rtype: None on error and a value on success.
.. function:: set_action(value)
Set the value of action in the object.
:rtype: A dictionary of error or a success response and a value
with "action" as the key
.. function:: peek_segment_preservation_enabled()
Reads the value assigned to segment-preservation-enabled in the
object.
:rtype: None on error and a value on success.
.. function:: set_segment_preservation_enabled(value)
Set the value of segment-preservation-enabled in the object.
:rtype: A dictionary of error or a success response and a value
with "segment-preservation-enabled" as the key
.. function:: peek_port()
Reads the value assigned to port in the object.
:rtype: None on error and a value on success.
.. function:: set_port(value)
Set the value of port in the object.
:rtype: A dictionary of error or a success response and a value
with "port" as the key
.. function:: peek_source_address()
Reads the value assigned to source-address in the object.
:rtype: None on error and a value on success.
.. function:: set_source_address(value)
Set the value of source-address in the object.
:rtype: A dictionary of error or a success response and a value
with "source-address" as the key
.. function:: peek_target_slot()
Reads the value assigned to target-slot in the object.
:rtype: None on error and a value on success.
.. function:: set_target_slot(value)
Set the value of target-slot in the object.
:rtype: A dictionary of error or a success response and a value
with "target-slot" as the key
.. function:: peek_target_dp_id()
Reads the value assigned to target-dp-id in the object.
:rtype: None on error and a value on success.
.. function:: set_target_dp_id(value)
Set the value of target-dp-id in the object.
:rtype: A dictionary of error or a success response and a value
with "target-dp-id" as the key
.. function:: peek_l4_protocol()
Reads the value assigned to l4-protocol in the object.
:rtype: None on error and a value on success.
.. function:: set_l4_protocol(value)
Set the value of l4-protocol in the object.
:rtype: A dictionary of error or a success response and a value
with "l4-protocol" as the key
.. function:: peek_target_qos()
Reads the value assigned to target-qos in the object.
:rtype: None on error and a value on success.
.. function:: set_target_qos(value)
Set the value of target-qos in the object.
:rtype: A dictionary of error or a success response and a value
with "target-qos" as the key
.. function:: peek_reset_propagation_enabled()
Reads the value assigned to reset-propagation-enabled in the
object.
:rtype: None on error and a value on success.
.. function:: set_reset_propagation_enabled(value)
Set the value of reset-propagation-enabled in the object.
:rtype: A dictionary of error or a success response and a value
with "reset-propagation-enabled" as the key
.. function:: peek_source_address_prefix_length()
Reads the value assigned to source-address-prefix-length in the
object.
:rtype: None on error and a value on success.
.. function:: set_source_address_prefix_length(value)
Set the value of source-address-prefix-length in the object.
:rtype: A dictionary of error or a success response and a value
with "source-address-prefix-length" as the key
.. function:: peek_application()
Reads the value assigned to application in the object.
:rtype: None on error and a value on success.
.. function:: set_application(value)
Set the value of application in the object.
:rtype: A dictionary of error or a success response and a value
with "application" as the key
.. function:: peek_destination_address_prefix_length()
Reads the value assigned to destination-address-prefix-length in
the object.
:rtype: None on error and a value on success.
.. function:: set_destination_address_prefix_length(value)
Set the value of destination-address-prefix-length in the
object.
:rtype: A dictionary of error or a success response and a value
with "destination-address-prefix-length" as the key
.. function:: peek_dscp()
Reads the value assigned to dscp in the object.
:rtype: None on error and a value on success.
.. function:: set_dscp(value)
Set the value of dscp in the object.
:rtype: A dictionary of error or a success response and a value
with "dscp" as the key
.. function:: peek_vlan()
Reads the value assigned to vlan in the object.
:rtype: None on error and a value on success.
.. function:: set_vlan(value)
Set the value of vlan in the object.
:rtype: A dictionary of error or a success response and a value
with "vlan" as the key
.. function:: peek_l2cos()
Reads the value assigned to l2cos in the object.
:rtype: None on error and a value on success.
.. function:: set_l2cos(value)
Set the value of l2cos in the object.
:rtype: A dictionary of error or a success response and a value
with "l2cos" as the key
.. function:: peek_destination_address()
Reads the value assigned to destination-address in the object.
:rtype: None on error and a value on success.
.. function:: set_destination_address(value)
Set the value of destination-address in the object.
:rtype: A dictionary of error or a success response and a value
with "destination-address" as the key
.. function:: peek_admin_state_enabled()
Reads the value assigned to admin-state-enabled in the object.
:rtype: None on error and a value on success.
.. function:: set_admin_state_enabled(value)
Set the value of admin-state-enabled in the object.
:rtype: A dictionary of error or a success response and a value
with "admin-state-enabled" as the key
.. function:: peek_priority()
Reads the value assigned to priority in the object.
:rtype: None on error and a value on success.
.. function:: set_priority(value)
Set the value of priority in the object.
:rtype: A dictionary of error or a success response and a value
with "priority" as the key
.. function:: peek_non_terminated_enabled()
Reads the value assigned to non-terminated-enabled in the
object.
:rtype: None on error and a value on success.
.. function:: set_non_terminated_enabled(value)
Set the value of non-terminated-enabled in the object.
:rtype: A dictionary of error or a success response and a value
with "non-terminated-enabled" as the key
.. function:: peek_hit_count()
Reads the value assigned to hit-count in the object.
:rtype: None on error and a value on success.
.. function:: peek_cp_dp_synchronized()
Reads the value assigned to cp-dp-synchronized in the object.
:rtype: None on error and a value on success.
"""
def __init__(self, dictvalues=None):
clsuri = "/rest" + "/running" + "/brocade-extension" +\
"/traffic-control-list"
clstype = pyfos_rest_util.rest_obj_type.traffic_control_list
clsver = version.VER_RANGE_900_and_ABOVE
super().__init__(clstype, clsuri, clsver)
self.add(pyfos_rest_util.rest_attribute("traffic-control-list-name",
pyfos_type.type_str, None,
pyfos_rest_util.REST_ATTRIBUTE_KEY))
self.add(pyfos_rest_util.rest_attribute("target-ve-port",
pyfos_type.type_str, None,
pyfos_rest_util.REST_ATTRIBUTE_CONFIG))
self.add(pyfos_rest_util.rest_attribute("action", pyfos_type.type_str,
None, pyfos_rest_util.REST_ATTRIBUTE_CONFIG))
self.add(pyfos_rest_util.rest_attribute(
"segment-preservation-enabled", pyfos_type.type_bool, None,
pyfos_rest_util.REST_ATTRIBUTE_CONFIG))
self.add(pyfos_rest_util.rest_attribute("port", pyfos_type.type_str,
None, pyfos_rest_util.REST_ATTRIBUTE_CONFIG))
self.add(pyfos_rest_util.rest_attribute("source-address",
pyfos_type.type_str, None,
pyfos_rest_util.REST_ATTRIBUTE_CONFIG))
self.add(pyfos_rest_util.rest_attribute("target-slot",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_CONFIG))
self.add(pyfos_rest_util.rest_attribute("target-dp-id",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_CONFIG))
self.add(pyfos_rest_util.rest_attribute("l4-protocol",
pyfos_type.type_str, None,
pyfos_rest_util.REST_ATTRIBUTE_CONFIG))
self.add(pyfos_rest_util.rest_attribute("target-qos",
pyfos_type.type_str, None,
pyfos_rest_util.REST_ATTRIBUTE_CONFIG))
self.add(pyfos_rest_util.rest_attribute("reset-propagation-enabled",
pyfos_type.type_bool, None,
pyfos_rest_util.REST_ATTRIBUTE_CONFIG))
self.add(pyfos_rest_util.rest_attribute(
"source-address-prefix-length", pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_CONFIG))
self.add(pyfos_rest_util.rest_attribute("application",
pyfos_type.type_str, None,
pyfos_rest_util.REST_ATTRIBUTE_CONFIG))
self.add(pyfos_rest_util.rest_attribute(
"destination-address-prefix-length", pyfos_type.type_int,
None, pyfos_rest_util.REST_ATTRIBUTE_CONFIG))
self.add(pyfos_rest_util.rest_attribute("dscp", pyfos_type.type_str,
None, pyfos_rest_util.REST_ATTRIBUTE_CONFIG))
self.add(pyfos_rest_util.rest_attribute("vlan", pyfos_type.type_str,
None, pyfos_rest_util.REST_ATTRIBUTE_CONFIG))
self.add(pyfos_rest_util.rest_attribute("l2cos", pyfos_type.type_str,
None, pyfos_rest_util.REST_ATTRIBUTE_CONFIG))
self.add(pyfos_rest_util.rest_attribute("destination-address",
pyfos_type.type_str, None,
pyfos_rest_util.REST_ATTRIBUTE_CONFIG))
self.add(pyfos_rest_util.rest_attribute("admin-state-enabled",
pyfos_type.type_bool, None,
pyfos_rest_util.REST_ATTRIBUTE_CONFIG))
self.add(pyfos_rest_util.rest_attribute("priority",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_CONFIG))
self.add(pyfos_rest_util.rest_attribute("non-terminated-enabled",
pyfos_type.type_bool, None,
pyfos_rest_util.REST_ATTRIBUTE_CONFIG))
self.add(pyfos_rest_util.rest_attribute("hit-count",
pyfos_type.type_int, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("cp-dp-synchronized",
pyfos_type.type_bool, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.load(dictvalues, 1)
class dp_hcl_status(pyfos_rest_util.rest_object):
"""Class of dp_hcl_status
*Description dp_hcl_status*
Represents the HCL status on extension datapath process.
Important class members of dp_hcl_status:
+-----------------------------+------------------------------+-------------------------------------------------+
| Attribute Name | Description | Frequently Used Methods |
+=============================+==============================+=================================================+
| dp-id | Extension Data Path | :func:`peek_dp_id` |
| | Processor ID. Based on | :func:`set_dp_id` |
| | platform either it will | |
| | have a single DP or dual | |
| | DP. In case of single DP | |
| | only DP0 is supported, and | |
| | in case of dual DP both | |
| | DP0 and DP1 are supported | |
| | 0 : DP0 1 : DP1. | |
+-----------------------------+------------------------------+-------------------------------------------------+
| slot | The slot number of for the | :func:`peek_slot` |
| | datapath processor. | :func:`set_slot` |
+-----------------------------+------------------------------+-------------------------------------------------+
| ip-hcl-stage | The current DP HCL stage | :func:`peek_ip_hcl_stage` |
| | for IP protocol. | |
+-----------------------------+------------------------------+-------------------------------------------------+
| firmware-version | A human readable string | :func:`peek_firmware_version` |
| | identifying the firmware | |
| | version running on the | |
| | datapath process of the | |
| | switch/blade. | |
+-----------------------------+------------------------------+-------------------------------------------------+
| state | The current DP HCL state | :func:`peek_state` |
| | | |
+-----------------------------+------------------------------+-------------------------------------------------+
| svi-swapped | Is the SVI swapped for HCL | :func:`peek_svi_swapped` |
| | processing | |
+-----------------------------+------------------------------+-------------------------------------------------+
| dp-communication-status | The current state of DP-DP | :func:`peek_dp_communication_status` |
| | communication | |
+-----------------------------+------------------------------+-------------------------------------------------+
| status | The current DP status. | :func:`peek_status` |
| | | |
+-----------------------------+------------------------------+-------------------------------------------------+
| fc-hcl-stage | The current DP HCL stage | :func:`peek_fc_hcl_stage` |
| | for FC protocol. | |
+-----------------------------+------------------------------+-------------------------------------------------+
*Object functions for dp_hcl_status*
.. function:: get()
Get the instances of class "dp_hcl_status from switch. The object can
be printed using :func:`pyfos_utils.response_print`.
:param session: The session handler returned by
:func:`pyfos_auth.login`.
:rtype: A dictionary of errors or a success response.
*Class functions for dp_hcl_status*
.. function:: peek_dp_id()
Reads the value assigned to dp-id in the object.
:rtype: None on error and a value on success.
.. function:: set_dp_id(value)
Set the value of dp-id in the object.
:rtype: A dictionary of error or a success response and a value
with "dp-id" as the key
.. function:: peek_slot()
Reads the value assigned to slot in the object.
:rtype: None on error and a value on success.
.. function:: set_slot(value)
Set the value of slot in the object.
:rtype: A dictionary of error or a success response and a value
with "slot" as the key
.. function:: peek_ip_hcl_stage()
Reads the value assigned to ip-hcl-stage in the object.
:rtype: None on error and a value on success.
.. function:: peek_firmware_version()
Reads the value assigned to firmware-version in the object.
:rtype: None on error and a value on success.
.. function:: peek_state()
Reads the value assigned to state in the object.
:rtype: None on error and a value on success.
.. function:: peek_svi_swapped()
Reads the value assigned to svi-swapped in the object.
:rtype: None on error and a value on success.
.. function:: peek_dp_communication_status()
Reads the value assigned to dp-communication-status in the
object.
:rtype: None on error and a value on success.
.. function:: peek_status()
Reads the value assigned to status in the object.
:rtype: None on error and a value on success.
.. function:: peek_fc_hcl_stage()
Reads the value assigned to fc-hcl-stage in the object.
:rtype: None on error and a value on success.
"""
def __init__(self, dictvalues=None):
clsuri = "/rest" + "/running" + "/brocade-extension" +\
"/dp-hcl-status"
clstype = pyfos_rest_util.rest_obj_type.dp_hcl_status
clsver = version.VER_RANGE_900_and_ABOVE
super().__init__(clstype, clsuri, clsver)
self.add(pyfos_rest_util.rest_attribute("dp-id", pyfos_type.type_int,
None, pyfos_rest_util.REST_ATTRIBUTE_KEY))
self.add(pyfos_rest_util.rest_attribute("slot", pyfos_type.type_int,
None, pyfos_rest_util.REST_ATTRIBUTE_KEY))
self.add(pyfos_rest_util.rest_attribute("ip-hcl-stage",
pyfos_type.type_str, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("firmware-version",
pyfos_type.type_str, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("state", pyfos_type.type_str,
None, pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("svi-swapped",
pyfos_type.type_bool, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("dp-communication-status",
pyfos_type.type_str, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("status", pyfos_type.type_str,
None, pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.add(pyfos_rest_util.rest_attribute("fc-hcl-stage",
pyfos_type.type_str, None,
pyfos_rest_util.REST_ATTRIBUTE_NOT_CONFIG))
self.load(dictvalues, 1)
|
import datetime
from django.db import models
class Trade(models.Model):
"""
A model to hold trade information - stored, per `symbol` per `time_stamp`.
"""
SYM_LFZ = "LFZ"
SYM_EURGBP = "EUR/GBP"
SYM_6A = "6A"
SYM_FDAX = "FDAX"
SYMBOL_CHOICES = (
(SYM_LFZ, "LFZ"),
(SYM_EURGBP, "EUR/GBP"),
(SYM_6A, "6A"),
(SYM_FDAX, "FDAX"),
)
time_stamp = models.DateTimeField(
verbose_name="Time Stamp", editable=False,
default=datetime.datetime.now, blank=False)
symbol = models.CharField(
verbose_name="Symbol", editable=False, default="", blank=False,
max_length=8, choices=SYMBOL_CHOICES)
quote_count = models.IntegerField(
verbose_name="Quote Count", editable=False, default=0, blank=False)
trade_count = models.IntegerField(
verbose_name="Trade Count", editable=False, default=0, blank=False)
open_px = models.DecimalField(
verbose_name="Open Px", editable=False, default=1.0, blank=False,
max_digits=20, decimal_places=10)
close_px = models.DecimalField(
verbose_name="Close Px", editable=False, default=1.0, blank=False,
max_digits=20, decimal_places=10)
high_px = models.DecimalField(
verbose_name="High Px", editable=False, default=1.0, blank=False,
max_digits=20, decimal_places=10)
low_px = models.DecimalField(
verbose_name="Low Px", editable=False, default=1.0, blank=False,
max_digits=20, decimal_places=10)
# these two, `created` and `modified`, refer to the db record only
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
class Meta:
get_latest_by = "modified"
ordering = ("time_stamp", "symbol", )
unique_together = (("time_stamp", "symbol"),)
|
import unittest
from netengine import get_version, __version__
from netengine.backends import BaseBackend
from netengine.exceptions import NetEngineError
__all__ = ['TestBaseBackend']
class TestBaseBackend(unittest.TestCase):
def test_version(self):
get_version()
__version__
def test_dict(self):
device = BaseBackend()
dictionary = device._dict({})
self.assertTrue(isinstance(dictionary, dict))
def test_base_backend(self):
device = BaseBackend()
self.assertTrue(device.__netengine__)
with self.assertRaises(NotImplementedError):
device.validate()
with self.assertRaises(NotImplementedError):
device.to_dict()
with self.assertRaises(NotImplementedError):
device.to_json()
with self.assertRaises(NotImplementedError):
str(device)
with self.assertRaises(NotImplementedError):
device.__repr__()
with self.assertRaises(NotImplementedError):
device.__unicode__()
with self.assertRaises(NotImplementedError):
device.os
with self.assertRaises(NotImplementedError):
device.name
with self.assertRaises(NotImplementedError):
device.model
with self.assertRaises(NotImplementedError):
device.RAM_total
with self.assertRaises(NotImplementedError):
device.uptime
with self.assertRaises(NotImplementedError):
device.uptime_tuple
with self.assertRaises(NotImplementedError):
device.ethernet_standard
with self.assertRaises(NotImplementedError):
device.ethernet_duplex
with self.assertRaises(NotImplementedError):
device.wireless_channel_width
with self.assertRaises(NotImplementedError):
device.wireless_mode
with self.assertRaises(NotImplementedError):
device.wireless_channel
with self.assertRaises(NotImplementedError):
device.wireless_output_power
with self.assertRaises(NotImplementedError):
device.wireless_dbm
with self.assertRaises(NotImplementedError):
device.wireless_noise
with self.assertRaises(NotImplementedError):
device.olsr
def test_get_manufacturer_unicode(self):
device = BaseBackend()
self.assertIsNone(device.get_manufacturer(u"wrong MAC"))
|
# Picon Zero Servo Test
# Use arrow keys to move 2 servos on outputs 0 and 1 for Pan and Tilt
# Use G and H to open and close the Gripper arm
# Press Ctrl-C to stop
import tty
import termios
import sys
sys.path.insert(0, "../../lib/PiconZero/Python")
import piconzero as pz
#======================================================================
# Reading single character by forcing stdin to raw mode
def readchar():
fd = sys.stdin.fileno()
old_settings = termios.tcgetattr(fd)
try:
tty.setraw(sys.stdin.fileno())
ch = sys.stdin.read(1)
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
if ch == '0x03':
raise KeyboardInterrupt
return ch
def readkey(getchar_fn=None):
getchar = getchar_fn or readchar
c1 = getchar()
if ord(c1) != 0x1b:
return c1
c2 = getchar()
if ord(c2) != 0x5b:
return c1
c3 = getchar()
return chr(0x10 + ord(c3) - 65) # 16=Up, 17=Down, 18=Right, 19=Left arrows
# End of single character reading
#======================================================================
# speed = 60
print "Tests the servos by using the arrow keys to control"
print "Press <space> key to centre"
print "Press Ctrl-C to end"
print
# Define which pins are the servos
pan = 1
tilt = 0
pz.init()
# Set output mode to Servo
pz.setOutputConfig(pan, 2)
pz.setOutputConfig(tilt, 2)
# Value to increase or decrease serve value by to rotate by 1 degrees (ish)
degrees = 3
# Centre all servos
defaultVal = tiltVal = panVal = 100
maxTilt = 170
minTilt = 100
maxPan = 160
minPan = 40
pz.setOutput (pan, panVal)
pz.setOutput (tilt, tiltVal)
# main loop
try:
while True:
keyp = readkey()
if keyp == 's' or ord(keyp) == 18:
panVal = max (minPan, panVal - degrees)
print 'Right', panVal
elif keyp == 'a' or ord(keyp) == 19:
panVal = min (maxPan, panVal + degrees)
print 'Left', panVal
elif keyp == 'w' or ord(keyp) == 16:
tiltVal = max (minTilt, tiltVal - degrees)
print 'Up', tiltVal
elif keyp == 'z' or ord(keyp) == 17:
tiltVal = min (maxTilt, tiltVal + degrees)
print 'Down', tiltVal
elif keyp == ' ':
panVal = tiltVal = defaultVal
print 'Centre'
elif ord(keyp) == 3:
break
pz.setOutput (pan, panVal)
pz.setOutput (tilt, tiltVal)
except KeyboardInterrupt:
print
finally:
pz.cleanup()
|
# -*- coding: utf-8 -*-
"""AG module containing base Asset class and associated enums
This module contains all of the necessary components for the proper
function of standard asset classes within AlphaGradient, as well as
the API.
Todo:
* Complete google style docstrings for all module components
* Complete function/class header typing
* Replace is_numeric with Number instance checks
* Interpret more than just isoformat for normalizing datestrings
* IMPLEMENT SAVING ASSETS LOCALLY TO BE REUSED, REFRESH/ONLINE INPUT
"""
# Standard imports
from __future__ import annotations
from abc import ABC, abstractmethod
from datetime import datetime, time, timedelta
from functools import lru_cache
import math
from numbers import Number
from pathlib import Path
from weakref import WeakValueDictionary as WeakDict
# Third Party imports
from aenum import Enum, unique, auto, extend_enum
import pandas as pd
import numpy as np
# Local imports
from .._data import get_data, AssetData, ValidData
from .. import utils
# Typing
from typing import (
TYPE_CHECKING,
Any,
cast,
no_type_check,
NoReturn,
Optional,
Type,
Union,
)
from ..utils import DateOrTime, DatetimeLike
if TYPE_CHECKING:
from ._standard import Currency
from ._portfolio import Portfolio, Position
_currency_info_path: Path = Path(__file__).parent.joinpath("currency_info.p")
"""The path to local currency info"""
_currency_info: pd.DataFrame = pd.read_pickle(_currency_info_path)
"""Currency information stored locally"""
class Instances(WeakDict):
"""A weakly referential dictionary of all instances of the
subclass to which the enum member corresponds"""
def __init__(self, name: str) -> None:
self.name = name
super().__init__()
def __getattr__(self, attr: str) -> Any:
try:
return self[attr.upper()]
except KeyError:
raise AttributeError(f"Asset type '{self.name}' has no instance '{attr}'")
def __str__(self) -> str:
return str(dict(self))
@property
def base(self) -> Currency:
"""The monetary basis of this asset subclass,
represented by a currency code. Only used by Currency
subclass"""
if self and getattr(list(self.values())[0], "base", False):
return [c for c in self.values() if c.is_base][0]
else:
raise AttributeError(f"Asset type '{self.name}' has no instance 'base'")
@unique
class types(Enum):
"""
A enumeration with members for all asset subclasses
The types enum is a special enumeration which dynamically creates
new members for all subclasses of ag.Asset. Enum members store a
weakly referential dictionary to all instances of that asset
subclass, which can be accessed as attributes or dynamically
through indexing.
Examples:
* TYPES.STOCK.instances returns all instances of Stock
* TYPES.STOCK.SPY returns Spy Stock (if instantiated)
* TYPES.STOCK["SPY"] does the same
.. code:: python
spy = ag.Stock("SPY")
dia = ag.Stock("DIA")
qqq = ag.Stock("QQQ")
.. code:: pycon
>>> ag.stock
{'SPY': <STOCK SPY: $143.53 /share>, 'DIA': <STOCK DIA: $112.28 /share>, 'QQQ': <STOCK QQQ: $92.0 /share>}
>>> ag.stock.spy
<STOCK SPY: $143.53 /share>
>>> spy is ag.stock.spy
True
>>> ag.types.stock
STOCK
>>> ag.types.stock.spy is ag.stock.spy
True
"""
def _generate_next_value_(name: str, *args: Any) -> tuple[str, WeakDict]:
"""Determines how new enum members are generated when new asset
subclasses are created"""
class _Instances(WeakDict):
"""A weakly referential dictionary of all instances of the
subclass to which the enum member corresponds"""
def __getattr__(self, attr: str) -> Any:
try:
return self[attr.upper()]
except KeyError:
raise AttributeError(
f"Asset type '{name}' has no instance '{attr}'"
)
def __str__(self) -> str:
return str(dict(self))
@property
def base(self) -> Currency:
"""The monetary basis of this asset subclass,
represented by a currency code. Only used by Currency
subclass"""
if self and getattr(list(self.values())[0], "base", False):
return [c for c in self.values() if c.is_base][0]
else:
raise AttributeError(f"Asset type '{self}' has no instance 'base'")
return (name, Instances(name))
# Non-asset types that need to be instantiated manually
undefined = auto() # Used when the subclass is hidden
portfolio = auto()
# Reconsider if these are necessary
algorithm = auto()
environment = auto()
universe = auto()
def __init__(self, *args: Any, **kwargs: Any) -> None:
self.c = object
super().__init__(*args, **kwargs)
def __str__(self) -> str:
return self.name.upper()
def __repr__(self) -> str:
return self.__str__()
def __getitem__(self, item: str) -> Any:
return self.instances[item]
@property
def instances(self) -> WeakDict[str, Any]:
"""A list of all instances of a certain asset type"""
return self.value[1]
@classmethod
def to_list(cls) -> list[types]:
"""
Returns the types enum as a list of enum members with attribute access
Returns:
types_list (TypeList):
The list created from types with special attribute access methods
"""
class TypeList(list[types]):
"""A list of types enum members with attribute access for enum members"""
def __init__(self, types_enum: Type[types]) -> None:
self += [t for t in types_enum][1:] # type: ignore[attr-defined]
self.enum = types_enum
def __getitem__(self, item: Any) -> Any:
if item.__class__ is str:
try:
return self.enum[item] # type: ignore[index]
except KeyError:
raise KeyError(f"Asset type '{item}' does not exist")
return super().__getitem__(item)
def __getattr__(self, item: str) -> Any:
try:
return self.enum[item] # type: ignore[index]
except KeyError:
raise AttributeError(f"Asset type '{item}' does not exist")
return TypeList(cls)
@classmethod
def instantiable(cls) -> list[Type]:
"""
A list of all instantiable asset subclasses
Returns all subclasses of ag.Asset that are currently instantiable,
meaning they have successfully defined all abstract methods.
Returns:
All asset subclasses which contain no abstract methods.
"""
check = Asset.__subclasses__()
instantiable = []
for sub in check:
check += sub.__subclasses__()
if ABC not in sub.__bases__:
instantiable.append(sub)
return instantiable
class AssetDuplicationError(Exception):
"""Raised when asset duplication is forced via copy methods"""
def __init__(self, asset: Asset) -> None:
message = (
f"Attempted duplication of {asset.name} " # type: ignore[attr-defined]
f"{asset.type}. Multiple instances of this asset are not "
"permitted"
)
super().__init__(message)
class DataProtocol(Enum):
"""
Enumeration of different data requirement protocols for assets
This enumeration and its members control data requirements for the
instantiation of different asset subclasses.
Attributes:
REQUIRE (DataProtocol):
Data MUST be supplied in some form, or the asset will fail to
instantiate. For assets that require that the asset.data attribute
is a pandas dataframe object
FLEXIBLE (DataProtocol):
Asset will be instantiated whether or not data is supplied,
asset.data attribute can be None or pd.DataFrame
PROHIBITED (DataProtocol):
Asset will fail to instantiate if any data is supplied. For assets
that require that the asset.data attribute is None (eg. simulated
assets)
"""
REQUIRED = auto()
FLEXIBLE = auto()
PROHIBITED = auto()
@classmethod
def _get(cls, require_data: bool, prohibit_data: bool) -> DataProtocol:
"""
Returns the appropriate data protocol based on asset
subclass initialization settings
Parameters:
require_data:
Whether or not the asset requires data
prohibit_data:
Whether or not the asset prohibits data
Returns:
The member protocol corresponding to the settings
Raises:
TypeError: raised when inputs are not booleans
"""
if require_data is prohibit_data:
return cls.FLEXIBLE
elif require_data and not prohibit_data:
return cls.REQUIRED
elif not require_data and prohibit_data:
return cls.PROHIBITED
raise TypeError(f"inputs must be booleans")
@classmethod
def _decompose(cls, member: DataProtocol) -> tuple[bool, bool]:
"""
Returns the respective subclass initialization settings that
correspond to each protocol member
Parameters:
member:
The member protocol to be decomposed
Returns:
The DataProtocol member decomoposes in to a tuple (structured as
tuple(require_data: bool, prohibit_data: bool))
Raises:
KeyError: When member is not a valid member of DataProtocol
"""
if member is cls.FLEXIBLE:
return False, False
elif member is cls.REQUIRED:
return True, False
elif member is cls.PROHIBIT:
return False, True
raise KeyError(
f"{member} is not a valid member of the DataProtocol enumeration"
)
class Asset(ABC):
"""
Abstract base class representing a financial asset
The abstract base class underlying all standard and custom asset classes
within AlphaGradient, designed to be used in conjunction with other standard
ag objects such as portfolios and algorithms. All Assets are singletons.
When defining a new Asset subclass, the user must define a valuate method,
which takes in a datetime input and returns a float representing the price
of the asset at that datetime. For Assets that intend to require data for
valuation, this method can simply return the current value (self.value)
Parameters:
name:
The name of the asset. This asset will be accessible through the
global api under this name. For example, instantiating MyAsset("test")
will allow the user to reference that object at ag.myasset.test,
so long as it has not been garbage collected.
data (Optional[ValidData]):
The data input for this asset. The input will be coerced into an
AssetData object. When passed as "online", the asset intialization
will force the asset to gather data through its online_data()
method, if one has been defined.
columns:
When passing in an array-like input for data, use this parameter
to specify what elements of the array refer to what columns.
Columns are interpreted/assigned in the same order that they come
in via data.
force:
Force an Asset to be instantiated when it would otherwise fail.
When data initialization fails, forces the asset to instantiate
by creating a fake AssetData input with only a single data point.
base:
The base currency this object's value and price history are
represented in. If passing in data that is not represented in the
global default currency (defaults to USD), use this parameter to
specify what currency the data is represented in.
Examples:
.. code:: python
import alphagradient as ag
# Creating a class which will require a data input
class MyAsset(ag.Asset, require_data=True):
def valuate(self, date):
return self.value
"""
_args: dict[str, Any]
_base: str
_benchmark: Asset
_close_value: str = "CLOSE"
_data: Optional[AssetData]
_data_protocol: DataProtocol = DataProtocol.FLEXIBLE
_date: datetime
_global_base: str
_global_res: timedelta
_global_persistent_path: Path
_market_close: time = time.fromisoformat("00:00:00")
_market_open: time = time.fromisoformat("00:00:00")
_name: str
_open_value: str = "OPEN"
_optional: list[str] = [_open_value]
_required: list[str] = [_close_value]
_rfr: float
_value: float
_unit: str = "unit"
_units: str = "units"
def __init_subclass__(
cls,
*args: Any,
hidden: bool = False,
require_data: Optional[bool] = None,
prohibit_data: Optional[bool] = None,
required: Optional[list[str]] = None,
optional: Optional[list[str]] = None,
open_value: Optional[str] = None,
close_value: Optional[str] = None,
market_open: Optional[DateOrTime] = None,
market_close: Optional[DateOrTime] = None,
units: tuple[str, str] = None,
settings: dict[str, Any] = None,
**kwargs: Any,
):
"""
Controls behavior for instantiation of Asset subclasses.
Creates new enumerations within the TYPES enum for newly
created subclassses of Asset. Also sets some class level
attributes that control behavior during instantiation of that type.
"""
# Asset subclass (class-level) attribute initialization
# These will all be coerced to appropriate types later in this function,
# so this initial assignment is for convenience. MyPy doesnt like it though.
cls._required = required # type: ignore[assignment]
cls._optional = optional # type: ignore[assignment]
cls._open_value = open_value # type: ignore[assignment]
cls._close_value = close_value # type: ignore[assignment]
cls._market_open = market_open # type: ignore[assignment]
cls._market_close = market_close # type: ignore[assignment]
cls._unit, cls._units = units if units is not None else (None, None) # type: ignore[assignment]
# Ensuring safe access if nothing is passed
settings = {} if settings is None else settings
# Hiding the asset from the types enumeration
hidden = hidden or settings.get("hidden", False)
# Using the passed in settings object to set class attributes
# in the case they are not provided by kwargs
if settings:
for attr_name in [
"required",
"optional",
"open_value",
"close_value",
"market_open",
"market_close",
]:
if not getattr(cls, "_" + attr_name, False):
try:
setattr(cls, "_" + attr_name, settings[attr_name])
except KeyError as e:
pass
if require_data is None:
require_data = settings.get("require_data", False)
if prohibit_data is None:
prohibit_data = settings.get("prohibit_data", False)
if settings.get("units", False):
cls._unit, cls._units = settings["units"]
# Setting the data protocol
# MyPy
assert isinstance(require_data, bool) and isinstance(prohibit_data, bool)
cls._data_protocol = DataProtocol._get(require_data, prohibit_data)
# Setting asset-level market close and open
for attr_name in ("market_open", "market_close"):
attr = getattr(cls, "_" + attr_name, None)
if attr is not None:
try:
setattr(cls, "_" + attr_name, utils.get_time(attr))
except ValueError:
raise ValueError(
f"Invalid input for {attr_name} during initialization of {cls.__name__} asset subclass. Unable to convert {attr} to a time object."
)
else:
setattr(cls, attr_name, time(minute=0, second=0, microsecond=0))
# What will become the name of this subclass' type
TYPE = cls.__name__.lower()
# Extending the enum to accomodate new type
if ABC not in cls.__bases__ and not hidden:
if TYPE not in [t.name for t in types]: # type: ignore[attr-defined]
extend_enum(types, TYPE)
cls.type = types[TYPE] # type: ignore
cls.type.c = cls # type: ignore[attr-defined]
# Used when a new asset subclass is hidden from the AG api
if not getattr(cls, "type", None):
cls.type = types.undefined # type: ignore[attr-defined]
if cls.unit is None or cls.units is None:
cls.unit = "unit"
cls.units = "units"
def __new__(cls, *args: Any, **kwargs: Any) -> Asset:
# Seeing if this asset is already instantiated
if args or kwargs.get("name", False):
name = kwargs["name"] if kwargs.get("name") else args[0]
# Returning the asset if exists
if name in cls.type.instances: # type: ignore[attr-defined]
return cls.type.instances[name] # type: ignore[attr-defined]
# Returning a new asset
return cast(Asset, super().__new__(cls))
def __init__(
self,
name: str,
data: Optional[ValidData] = None,
columns: Optional[list[str]] = None,
force: bool = False,
base: Optional[str] = None,
) -> None:
# Standard style guideline for all asset names. Ensures that
# they are accessible via attribute access through the global env
# eg. ag.stock.stocknamehere
self._initialized = False
name = str(name).upper().replace(" ", "_")
# Checks if arguments have changed materially from previous
# initialization, and skip initialization if they haven't.
if self.type.instances.get(name) is self: # type: ignore[attr-defined]
skip = True
if data is not None:
if isinstance(data, pd.DataFrame) and isinstance(
self._args["data"], pd.DataFrame
):
if not data.equals(self._args["data"]):
skip = False
elif type(data) is not type(self._args["data"]):
skip = False
elif data != self._args["data"]:
skip = False
if skip:
return
# Saving new args, storing new instance
self._args = locals()
del self._args["self"]
self.type.instances[name] = self # type: ignore[attr-defined]
# Attribute Initialization
self._name = name
valid_bases: list[str] = list(_currency_info["CODE"])
self._base: str = base if base in valid_bases else self._global_base
self._value: float = data if isinstance(data, (int, float)) else 0
self.close: bool = True
self._data = None
# Data entry is prohibited, data must always be None
if self.protocol is DataProtocol.PROHIBITED:
self._data = None
# Data entry is not prohibited, initialize dataset
else:
if data is None:
# First attempt to get data from saved files
data = get_data(self)
# Second attempt to get data from online data
if data is None and getattr(self, "online_data", False):
try:
data = self.online_data() # type: ignore[attr-defined]
except ValueError:
pass
# Third attempt to get data from nothing...?
if data is None and self.protocol is DataProtocol.REQUIRED:
# When we need to force the instantiation of an
# asset that requires data, but no data is available
if force:
data = AssetData(self.__class__, 1)
else:
raise ValueError(
f"{self.name} {self.type} " # type: ignore[attr-defined]
"could not be initialized "
"without data. If this is "
"the first time this asset is "
"being instantiated, please "
"provide a valid dataset or "
"instantiate with force=True."
)
assert isinstance(data, (AssetData, type(None)))
self._data = data
# Explicit reinstancing from online data
elif isinstance(data, str) and data.lower() == "online":
try:
data = self.online_data() # type: ignore[attr-defined]
except AttributeError as e:
raise ValueError(
f"{self.name} unable to retrieve online data, " # type: ignore[attr-defined]
f"{self.type} has not implemented an online_data method"
) from e
if data is None:
raise ValueError(
f"Unable to retreive online data for {self.type} " # type: ignore[attr-defined]
f'"{self.name}", initialization failed'
)
self._data = data
# Data input received, make a new asset dataset
else:
self._data = AssetData(self.__class__, data, columns)
# Data verification when required
if self.protocol is DataProtocol.REQUIRED and not self.data:
raise ValueError(
f"{self.name} {self.type} could not " # type: ignore[attr-defined]
"be initialized without data. If this "
"is the first time this asset is "
"being instantiated, please provide "
"a valid dataset or instantiate with "
"force=True."
)
# Ensures that the initial price is set properly
self._valuate()
self._initialized = True
self._save()
def __str__(self) -> str:
return f"<{self.type} {self.name}: {self.price} /{self.unit}>" # type: ignore[attr-defined]
def __hash__(self) -> int:
return self.key.__hash__()
def __repr__(self) -> str:
return self.__str__()
def __eq__(self, other: object) -> bool:
return self is other
def __getstate__(self) -> dict[str, Any]:
return self.__dict__
def __setstate__(self, state: dict[str, Any]) -> None:
self.__dict__ = state
def __copy__(self) -> NoReturn:
raise AssetDuplicationError(self)
def __deepcopy__(self) -> NoReturn:
raise AssetDuplicationError(self)
@property
def base(self) -> str:
"""
The asset's base currency
Represented as an international currency code; a string of uppercase
alphabetic characters of length 2-5.
"""
return self._base
@property
def benchmark(self) -> Asset:
"""
The benchmark asset to use in calculations of alpha, beta, etc
"""
return self._benchmark
@property
def close_value(self) -> str:
"""The name of the column to associate with market close prices for this asset type"""
return self._close_value
@property
def data(self) -> Optional[AssetData]:
"""
The historical price data for the this asset
Either None or an instance of AssetData. Depending on their ``protocol``,
Assets may either require the presence of historical data (eg. Stocks),
forbid it (eg. BrownianStocks), or operate regardless of circumstance. AssetData
objects always evaluate as ``True``, so it is safe to evaluate ``self.data``
as a boolean, unlike a pandas DataFrame.
"""
return self._data
@property
def date(self) -> datetime:
"""The date of this asset's most recent valuation, which to which its
current value corresponds"""
return self._date
@property
def expired(self) -> bool:
"""
Whether or not this asset is expired
Most assets will never expire, so the default behavior is to
always return false. Some assets like options can expire,
however. Overwrite this property to determine when an asset
expires
"""
return False
@property
def key(self) -> str:
"""
Returns a key used for accessing stored files relevant to this asset
Creates a key from information unique to this asset. These
keys are used to access locally stored data relevant to this
asset
"""
return f"{self.type}_{self.name}" # type: ignore[attr-defined]
@property
def market_close(self) -> time:
"""The market closing time for this asset type"""
return self._market_close
@property
def market_open(self) -> time:
"""The market opening time for this asset type"""
return self._market_open
@property
def name(self) -> str:
"""
The name of this asset
A string of uppercase alphanumeric characters that denotes the name of this
asset. Names are unique to assets by type; no two assets of the same type
may share the same name (but assets of different types may be named
identically). Used to access this asset in the global environment, as well
as encode its key for storage in positions.
Examples:
.. code:: pycon
>>> spy = ag.Stock(name="SPY")
>>> spy.name
SPY
>>> spy
<STOCK SPY: $429.06 /share>
>>> ag.stock.spy
<STOCK SPY: $429.06 /share>
>>> spy is ag.stock.spy
True
>>> spy.key
STOCK_SPY
"""
return self._name
@property
def next(self) -> datetime:
"""The next available datetime"""
if self.data:
return self.data.next(self.date)
else:
return self.date + self._global_res
@property
def open(self) -> bool:
"""Whether or not this asset is tradeable based on the current date"""
return (
self.date.time() >= self.market_open
and self.date.time() <= self.market_close
if self.market_close != self.market_open
else True
)
@property
def open_value(self) -> str:
"""The name of the column to associate with market open prices for this asset type"""
return self._open_value
@property
def optional(self) -> list[str]:
"""A list of optional columns for any data input to this asset type"""
return self._optional
@property
def price(self) -> str:
"""
A print friendly version of the asset value with the asset's base
currency symbol attached
"""
symbol = types.currency.instances[self.base].symbol
price = abs(self._value)
if price != 0:
r = 2
while price < 1:
r += 1
price *= 10
price = round(self._value, r)
return f"{symbol}{price}"
@property
def protocol(self) -> DataProtocol:
"""The data requirement protocol that this asset type operates under"""
return self._data_protocol
@property
def required(self) -> list[str]:
"""A list of required columns for any data input to this asset type"""
return self._required
@property
def rfr(self) -> float:
"""The risk free rate used for this asset type"""
return self._rfr
@property
def value(self) -> float:
"""This asset's current value"""
return self._value
@value.setter
def value(self, value: float) -> None:
if isinstance(value, Number):
if value != float("nan"):
self._value = value
else:
raise TypeError(
f"Can not update value of {self.name} " # type: ignore[attr-defined]
f"{self.type} to "
f"{value.__class__.__name__} "
f"{value}. Value must be a numnber"
)
@property
def unit(self) -> str:
"""How to refer to a single unit of this asset type"""
return self._unit
@property
def units(self) -> str:
"""How to refer to multiple units of this asset type"""
return self._units
def _data_valuate(self, date: Optional[DatetimeLike] = None) -> float:
"""
Determines how asset prices update when using data
Determines how assets are valuated when data is available. Keep
track of/updates when the asset is at the beginning or the end
of a time interval, and valuates accordingly.
Parameters:
date (DatetimeLike):
DatetimeLike object that determines at what point in time the
valuation will be
Returns:
The price as of the given datetime
"""
assert self.data is not None
date = self.date if date is None else utils.to_datetime(date)
value = float(self.data.valuate(date, self)) # TODO Should this use quote??
if not math.isnan(value):
return value
else:
if not self._initialized:
data = self.data.asof(self.data.index[0])
return data[self.close_value]
else:
return self.value
def _save(self) -> None:
"""Saves this asset's data locally"""
if self.data and self._global_persistent_path is not None:
path = self._global_persistent_path.joinpath(f"{self.key}.p")
with open(path, "wb") as p:
self.data._data.to_pickle(p)
def _step(self, date: DatetimeLike) -> None:
"""
Automatically called before this asset is valuated during time steps
This function is a hook to perform some behavior on the asset
prior to its valuation at each time step. It should return None;
it modifies this asset in place.
Parameters:
date (DatetimeLike):
The date of the valuation that will occur after this function
is executed
"""
return None
def _valuate(self) -> None:
"""
Updates asset prices when time steps take place
This is the method that is actually called under the hood when
time steps take place, which properly directs valuation
behavior to either valuate or _data_valuate depending on the
asset type.
"""
self.value = self.quote(self.date)
def alpha(self, days: float = 365, benchmark: Asset = None) -> float:
"""
Returns this asset's alpha for the given period
Calculates the return of this asset relative to its risk adjusted
expected return, using some other asset a basis.
Parameters:
days:
The period across which to calculate alpha
benchmark:
The benchmark to act as a basis fot the calculation of risk
adjusted expected return
Returns:
This asset's alpha for the period
"""
benchmark = benchmark or self.benchmark
assert benchmark.data is not None
asset_roi = self.roi(days)
bench_roi = self.benchmark.roi(days)
expected_roi = self.beta(days, benchmark) * (bench_roi - self.rfr)
return asset_roi - expected_roi
def beta(self, days: float = 365, benchmark: Asset = None) -> float:
"""
Returns the beta for the period
Returns the beta for the period. Calculated by weighting the covariance
of this asset and the benchmark asset by the ratio of their
volatilities.
Parameters:
days:
The period across which to calculate beta
benchmark:
The benchmark asset to compare to
Returns:
This asset's beta for the given period
"""
benchmark = benchmark or self.benchmark
assert benchmark.data is not None
start = self.date - timedelta(days=days)
self_vol = self.vol(days)
bench_vol = benchmark.vol(days)
self_data = self.range(start, self.date)["CHANGE"]
bench_data = benchmark.range(start, self.date)["CHANGE"].asof(self_data.index)
r = self_data.corr(bench_data)
if bench_vol == 0:
return 0
return r * (self_vol / bench_vol)
def cagr(self, days: float = 365) -> float:
"""
Returns this asset's compounding annualized growth rate for the given period
Parameters:
days:
period across which to calculate cagr
Returns:
Compounding annualized growth rate for this asset
"""
return (self.roi(days) + 1) ** (365 / days) - 1
def expire(self, portfolio: Portfolio, position: Position) -> None:
"""
Controls the behavior of this asset and positions in this asset when
it expires inside of a portfolio
Positions in portfolios are automatically removed from that
portfolio when they expire. Their expiration is determined
based on two conditions: Whether or not the position.quantity > 0,
and whether or not the position's underlying asset is expired.
Changing this method in an asset subclass will change its
behavior as it expires. The conditions for expiring can also
be changed by overloading the asset's 'expired' property
(must be a property)
This function should return None; it modifies this asset, as well as the
given Portfolio and Position in-place
Parameters:
portfolio:
The portfolio holding a position in this assets
position:
The above portfolio's current position in this asset that is
becoming expired after this call.
"""
return None
@classmethod
def get_settings(cls, unpack: bool = False) -> Union[list, dict[str, Any]]:
"""
Returns a dictionary of class attributes
This settings object is the same one used in the class header for
defining class attributes.
Parameters:
unpack:
When true, provides the values unpacked into a list, for easy
unpacking
Returns:
Class-level settings for this asset type
"""
require, prohibit = DataProtocol._decompose(cls._data_protocol)
settings = {
"hidden": cls.type is types.undefined, # type: ignore[attr-defined]
"require_data": require,
"prohibit_data": prohibit,
"required": cls._required,
"optional": cls._optional,
"close_value": cls._close_value,
"open_value": cls._open_value,
}
return list(settings.values()) if unpack else settings
def ma(self, days: float = 365) -> float:
"""
Returns the moving average of this asset's value over the period given by days
Parameters:
days:
A number indicating the number of days for which the moving
average should be calculated
Returns:
A floating point value representing the average price over the
period given by days.
"""
start = self.date - timedelta(days=days)
data = self.range(start, self.date)
return data[self.close_value].mean()
def quote(self, date: DatetimeLike) -> float:
"""
Returns the value of this asset on the given date
Parameters:
date (DatetimeLike):
The date on which to return the value of this asset
Returns:
The asset's value on the given date
"""
date = utils.to_datetime(date)
if self.data:
return self.data.valuate(date, self)
else:
return self.valuate(date)
def range(self, start: DatetimeLike, end: DatetimeLike) -> pd.DataFrame:
"""
Returns a datetime-indexed dataframe of asset values from start to end
TODO: Inefficient implementation, not necessary.
Parameters:
start (DatetimeLike):
The date corresponding to the beginning of the period
end:
The date corresponding to the end of the period
Returns (DatetimeLike):
The range of date corresponding to the period defined by start and
end
"""
if self.data:
return self.data.range(start, end)
@np.vectorize
@lru_cache
def vquote(date):
return self.valuate(date)
dates = pd.date_range(start, end)
prices = vquote(dates)
data = pd.DataFrame(data=prices, index=dates, columns=[self.close_value])
if len(data) > 1:
shifted = data[self.close_value].shift(1)
shifted[0] = shifted[1]
data["CHANGE"] = (data[self.close_value] / shifted) - 1
elif not data.empty:
data["CHANGE"] = [0]
else:
data["CHANGE"] = []
return data
def roi(self, days: float = 365) -> float:
"""
This assets return on investment for the input period
Returns the difference between the starting value and the current value
as a percentage of the starting value
Parameters:
days:
The period across which to calculate
Returns:
The percentage difference from start to end
"""
start = self.date - timedelta(days=days)
initial = self.quote(start)
if initial == 0:
return 0
return (self.value / initial) - 1
@no_type_check
@abstractmethod
def valuate(self, date: DatetimeLike) -> float:
"""
Determines how asset prices update when not using data
This is the method that defines non-data-based valuation
behavior for an asset subclass. The standard implementation
essentially does nothing--prices stay constant. New asset
subclasses are required to replace this method in order to be
instantiable.
Parameters:
date (DatetimeLike):
The date on which to valuate this asset
Returns:
Value information for this asset at the given datetime
"""
return self.value
def vol(self, days: float = 365) -> float:
"""Returns the historical volatility of this asset's value over the
period given by days, as a percentage of the current valuation
Parameters:
days:
A number indicating the number of days for which the historical
volatility should be calculated
Returns:
A floating point number representing the average deviation of this
asset's price from its moving average over the same period,
expressed as a percentage of the current value.
"""
### TODO ### THIS MUST CONSIDER THE INTERVALS OF TIME BETWEEN EACH
# INDEX, RIGHT NOW ASSUMES THAT ALL VALUES ARE EQUIDISTANT / DAILY.
# TIME RESOLUTION OF THE DATA WILL AFFECT THE STD
# 252 Trading days per year
multiplier = (days / (252 / 365)) ** 0.5
start = self.date - timedelta(days=days)
data = self.range(start, self.date)
return data["CHANGE"].std() * multiplier
|
from __future__ import unicode_literals
from django.apps import AppConfig
class PollsConfig(AppConfig):
name = 'polls'
|
import cherrypy, os, sys, json
import helpers
class Settings(object):
def __init__(self):
cherrypy.engine.subscribe('settings-broadcast', self.listen)
return
def listen(self, m):
if m['type'] == 'settings':
# print(m)
f = open('settings.json', 'r')
settings = json.load(f)
f.close()
settings['format'] = m['format']
settings['snoozeInterval'] = m['snoozeInterval']
settings['seconds'] = m['seconds']
settings['date'] = m['date']
if hasattr(self, 'settings') and self.settings != settings :
self.settings = settings
m = {
"type": "update",
"format": self.settings['format'],
"seconds": self.settings['seconds'],
"date": self.settings['date']
}
cherrypy.engine.publish('websocket-broadcast', json.dumps(m))
if not hasattr(self, 'settings'):
self.settings = settings
f = open('settings.json', 'w')
json.dump(settings, f)
f.close()
return
@cherrypy.expose
def index(self):
return helpers.loadTemplate('settings')
|
# -*- coding: utf-8 -*-
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('common', '0002_address'),
]
operations = [
migrations.AddField(
model_name='address',
name='label',
field=models.CharField(default='Default', max_length=20),
preserve_default=False,
),
]
|
import types
try:
from .config import *
except ImportError:
from .config_example import *
DB_SQLITE_SQL_GETVAR = "SELECT data FROM `main` WHERE variable=? AND user=?"
DB_SQLITE_SQL_INSERTVAR = "INSERT INTO `main` (`user`, `variable`, `data`) VALUES (?, ?, ?)"
DB_SQLITE_SQL_UPDATEVAR = """
UPDATE main
SET user = ?, variable = ?, data = ?
WHERE user = ? AND variable = ?;
"""
# State scope types.
UNKNOWN = -1 # not used
INDIVIDUAL = 1
GROUP = 0
GLOBAL = 2
TYPE_RULE_ALL = {
}
TYPE_RULE_MSG_ONLY = {
"post_type": ["message"],
"message_type": ["group", "private"],
"^sub_type": ["notice"]
}
def print_all():
tmp = globals()
[print(k,'=',v) for k,v in tmp.items() if not k.startswith('_') and not isinstance(v, types.ModuleType) and k!='tmp' and k!='In' and k!='Out' and not hasattr(v, '__call__')]
|
import numpy as np
# Load data
data = np.loadtxt('challenges/01-sonar-sweep/input.txt', dtype=int)
# Perform a sliding window summation per 3 elements
data = np.convolve(data, np.ones(3, dtype=int), 'valid')
# Calculate the differences
diffs = np.diff(data)
# Get the ones that have positive difference
has_increased = diffs > 0
# Count their sum
result = np.sum(has_increased)
print(result)
|
import sys
# TODO: Investigate more precise timing libraries
import time
from contentcuration.models import ContentKind, ContentNode, File
def print_progress(text):
sys.stdout.write("\r" + text)
sys.stdout.flush()
class Objective:
"""
Objective is a class that provides tools for measuring and exploring the performance impacts of performing
various operations on ORM objects.
"""
def __init__(self):
self.topic, topic_created = ContentKind.objects.get_or_create(kind='Topic')
self.root_node = ContentNode.objects.create(title='test_server_perf Root Node', kind=self.topic)
def __del__(self):
if self.root_node:
raise Exception("Test cleanup not run. Ensure you manually delete root node with id {} and all nodes and files that are connected to it.".format(self.root_node.pk))
def cleanup(self):
print("Performing clean up, please wait...")
try:
if self.root_node:
files = File.objects.filter(contentnode=self.root_node)
if files:
files.delete()
self.root_node.delete()
self.root_node = None
except Exception as e:
if self.root_node:
print("Error in cleanup. Root node with id {} may still exist.".format(self.root_node.pk))
raise
def create_content_nodes(self, num_nodes=100):
"""
Creates the specified number of ContentNode objects, and returns the time taken.
:param num_nodes: Number of nodes to create.
:return: Time taken in seconds to perform the operation.
"""
# Allow calling this method multiple times
current_nodes = ContentNode.objects.count()
parent = self.root_node
start = time.time()
for i in range(num_nodes):
node = ContentNode.objects.create(title="test_server_perf Node {}".format(i), parent=parent, kind=self.topic)
# try to create a multi-level tree structure to better test tree recalc operations
if num_nodes > 20:
if i % (num_nodes / 10) == 0:
sys.stdout.write('.')
sys.stdout.flush()
parent = node
elapsed = time.time() - start
if ContentNode.objects.count() != current_nodes + num_nodes:
raise AssertionError
return elapsed
def create_files(self, num_files=100):
"""
Create File database objects.
:param num_files: Number of File database objects to create
:return: Time taken in seconds to perform the operation.
"""
current_files = File.objects.count()
start = time.time()
for i in range(num_files):
file_obj = File.objects.create()
elapsed = time.time() - start
if File.objects.count() != current_files + num_files:
raise AssertionError
return elapsed
def get_object_creation_stats(self, object_type, num_objects=100, num_runs=10):
"""
Runs the create_content_nodes logic multiple times and reports the min, max and avg times the operation takes.
:param object_type: Type of object, can be "ContentNode" or "File"
:param num_nodes: Number of nodes to create each run
:param num_runs: Number of time to run the function
:return: A dictionary with keys 'min', 'max', 'average', representing reported times.
"""
creation_func = self.create_content_nodes
if object_type == "File":
creation_func = self.create_files
run_times = []
for i in range(num_runs):
print_progress("Creating {} {} objects. Test run {} of {}".format(num_objects, object_type, i+1, num_runs))
run_times.append(creation_func(num_objects))
return self._calc_stats(run_times, num_objects)
def get_object_creation_stats_mptt_delay(self, num_objects=100, num_runs=10):
"""
Creates the specified number of ContentNode objects within a dleay_mptt_updates block, and returns the time taken.
:param num_nodes: Number of nodes to create each run
:param num_runs: Number of time to run the function
:return: A dictionary with keys 'min', 'max', 'average', representing reported times.
"""
run_times = []
for i in range(num_runs):
print_progress("Creating {} {} objects with delay_mptt_updates. Test run {} of {}".format(num_objects, 'ContentNode', i+1, num_runs))
with ContentNode.objects.delay_mptt_updates():
run_times.append(self.create_content_nodes(num_objects))
return self._calc_stats(run_times, num_objects)
def get_large_channel_creation_stats(self):
# Let's use the stats from KA as a base
num_nodes = 44000
num_files = num_nodes * 3
stats = {}
stats['Node creation time'] = self.get_object_creation_stats_mptt_delay(num_nodes, num_runs=1)['min']
stats['File creation time'] = self.create_files(num_files)
return stats
def _calc_stats(self, run_times, num_items):
run_times.sort()
total_time = 0
for run_time in run_times:
total_time += run_time
average = total_time / len(run_times)
return {
'min': run_times[0],
'max': run_times[-1],
'average': average,
'per_record_average': average / num_items
}
|
# Copyright 2019 Mycroft AI Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import requests
from requests.exceptions import ConnectionError
from personal_mycroft_backend.settings import DEBUG, SSL
class BackendMycroftAPI(object):
def __init__(self, api, lang="en-us", url="https://0.0.0.0:6712/v0.1/",
debug=DEBUG, ssl=SSL):
if not ssl:
url = url.replace("https", "http")
if debug:
# filter warnings, TODO this should be removed once we stop using self signed certs
from requests.packages.urllib3.exceptions import \
InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
self.api = api
self.headers = {"Authorization": self.api.encode("utf-8")}
self.lang = lang
self.url = url
self.timeout = 10
self.wait_time = 0.5
def pair(self, code, uuid, mail, name="jarbas"):
''' add a new user, requires admin api '''
try:
response = requests.put(
self.url+"pair/"+code+"/"+uuid+"/"+name+"/"+mail,
headers=self.headers, verify=not DEBUG
)
try:
return response.json()
except:
print(response.text)
raise ValueError("Invalid admin api key")
except ConnectionError as e:
raise ConnectionError("Could not connect: " + str(e))
if __name__ == "__main__":
ap = BackendMycroftAPI("admin_key")
username = "jarbasX"
code = "XQFTNM"
uuid = "cc3524c7-ff52-42b3-af8f-de89249b19c8"
mail = "fakemail2@not_real.com"
print(ap.pair(code, uuid, mail, username))
|
import pandas as pd
import numpy as np
from uszipcode import SearchEngine
import sqlite3
search = SearchEngine(db_file_dir="/tmp/db")
conn = sqlite3.connect("/tmp/db/simple_db.sqlite")
pdf = pd.read_sql_query("select zipcode, lat, lng, radius_in_miles,
bounds_west, bounds_east, bounds_north, bounds_south from
simple_zipcode",conn)
brd_pdf = sc.broadcast(pdf)
@udf('string')
def get_zip_b(lat, lng):
pdf = brd_pdf.value
try:
out = pdf[(pdf['bounds_north']>=lat) &
(pdf['bounds_south']<=lat) &
(pdf['bounds_west']<=lng) &
(pdf['bounds_east']>=lng) ]
dist = [None]*len(out)
for i in range(len(out)):
dist[i] = (out['lat'].iloc[i]-lat)**2 + (out['lng'].iloc[i]-lng)**2
zip = out['zipcode'].iloc[dist.index(min(dist))]
except:
zip = 'bad'
return zip
output_df = df.withColumn('zip', get_zip_b(col("latitude"),col("longitude"))).cache()
|
"""Tests for selection functions."""
import numpy as np
import condense.optimizer.masking_functions as mf
from logging import info
def test_mask_min_value():
"""Testing default selection function."""
test_array = np.array([[1, 4, 3], [4, 2, 6]])
mask = mf.mask_min_value(test_array, 0.5)
assert (mask != test_array).any(), 'check for mask'
assert mask.shape == test_array.shape, 'check if shapes changed'
assert mask.dtype == bool, 'check mask datatype'
assert (mask != mf.mask_min_value(test_array, 0.2)).any(), 'check if p argument affects mask'
# check values
info(f'Mask Values: {mask}')
assert (mask == np.array([[True, False, True], [False, True, False]])).all(), 'correct mask values'
|
import discord
ICE_BLUE = discord.Color.from_rgb(207, 242, 255)
|
import logging
import typing
from abc import ABC
from abc import abstractmethod
from vk.utils.mixins import MetaMixin
if typing.TYPE_CHECKING:
from vk.bot_framework.dispatcher.dispatcher import Dispatcher
T = typing.TypeVar("T")
logger = logging.getLogger(__name__)
class AbstractExtension(ABC, MetaMixin):
@abstractmethod
async def get_events(self) -> typing.List:
"""
Get events from any resource and return list of events.
:return: list of coming events.
"""
@abstractmethod
async def run(self, dp: "Dispatcher"):
"""
Get events from self.get_events function in the endless of the cycle
and call dispatcher method dp._process_events.
:param dp: dispatcher
:return:
"""
class BaseExtension(AbstractExtension, ABC):
"""
Can be added to extensions with ExtensionsManager and
used to get events.
>>> extension_manager.run_extension(name=unique_key)
"""
key = None # unique key to access the extension
class ExtensionsManager:
def __init__(
self,
dp: "Dispatcher",
default_extensions: typing.Dict[str, typing.Type[BaseExtension]],
):
self.dp: "Dispatcher" = dp
self.extensions: typing.Dict[str, typing.Type[BaseExtension]] = {}
self.extensions.update(default_extensions)
def setup(self, extension: typing.Type[BaseExtension]):
if extension.key is None:
raise RuntimeError("Unallowed key for extension")
self.extensions[extension.key] = extension
def run_extension(self, name: str, **extension_init_params) -> None:
"""
:param name: name of the extension
:param extension_init_params: params which accept the extension constructor
:return:
"""
if typing.TYPE_CHECKING:
BaseExtension = typing.Type[T] # noqa
extension: BaseExtension = self.extensions.get(name) # noqa
if not extension:
raise RuntimeError("Undefined extension")
extension: BaseExtension = extension(**extension_init_params)
self.dp.vk.loop.create_task(extension.run(self.dp))
|
from plot_events import *
from helper_functions import *
from random import randint
# TODO: Consider separating out all conversion functions into their own file
def test_convert_event_to_boxes(faker) -> None:
"""Test that an `Event` can be converted into a list of `EventBox`s on different days (one event can be split across
multiple days, and when plotted on a calendar, this event must be drawn with multiple boxes)."""
# Case when an event starts and finished on the same day
start_datetime = datetime(2020, 11, 5, 9, 0)
end_datetime = datetime(2020, 11, 5, 14, 0)
test_event = Event(faker.word(), start_datetime, end_datetime)
expected_output = [
EventBox(0, time_to_float(start_datetime.time()), time_to_float(end_datetime.time()))
]
assert convert_event_to_boxes(test_event) == expected_output
# Case when an event overlaps one day boundary
start_datetime = datetime(2020, 11, 5, 18, 10)
end_datetime = datetime(2020, 11, 6, 9, 0)
test_event = Event(faker.word(), start_datetime, end_datetime)
expected_output = [
EventBox(0, time_to_float(start_datetime.time()), 24.0),
EventBox(1, 0.0, time_to_float(end_datetime.time()))
]
assert convert_event_to_boxes(test_event) == expected_output
# Case when an event overlaps two day boundaries
start_datetime = datetime(2020, 11, 5, 18, 10)
end_datetime = datetime(2020, 11, 7, 9, 0)
test_event = Event(faker.word(), start_datetime, end_datetime)
expected_output = [
EventBox(0, time_to_float(start_datetime.time()), 24.0), # the first day
EventBox(1, 0.0, 24.0), # second day
EventBox(2, 0.0, time_to_float(end_datetime.time())) # third day
]
assert convert_event_to_boxes(test_event) == expected_output
|
#
# Copyright (c) 2021 Joe Todd
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import os
import platform
from cffi import FFI
if platform.system() == 'Windows':
ffi = FFI()
base_path = os.path.dirname(os.path.abspath(__file__))
if platform.architecture()[0] == '32bit':
soundio = ffi.dlopen(os.path.join(base_path, 'libraries', 'win32', 'libsoundio.dll'))
elif platform.architecture()[0] == '64bit':
soundio = ffi.dlopen(os.path.join(base_path, 'libraries', 'win64', 'libsoundio.dll'))
from pysoundio._soundio.lib import ( # noqa: F401
SoundIoBackendNone, SoundIoBackendJack, SoundIoBackendPulseAudio,
SoundIoBackendAlsa, SoundIoBackendCoreAudio, SoundIoBackendWasapi,
SoundIoBackendDummy,
SoundIoFormatS8, SoundIoFormatU8, SoundIoFormatS16LE,
SoundIoFormatS16BE, SoundIoFormatU16LE, SoundIoFormatU16BE,
SoundIoFormatS24LE, SoundIoFormatS24BE, SoundIoFormatU24LE,
SoundIoFormatU24BE, SoundIoFormatS32LE, SoundIoFormatS32BE,
SoundIoFormatU32LE, SoundIoFormatU32BE,
SoundIoFormatFloat32LE, SoundIoFormatFloat32BE, SoundIoFormatFloat64LE,
SoundIoFormatFloat64BE, SoundIoFormatInvalid
)
from .constants import ( # noqa: F401
BACKENDS,
FORMATS
)
from .pysoundio import PySoundIo, PySoundIoError # noqa: F401
|
import os
import pytest
import mne
from meegpowreg.power_features import compute_features
data_path = mne.datasets.sample.data_path()
data_dir = os.path.join(data_path, 'MEG', 'sample')
raw_fname = os.path.join(data_dir, 'sample_audvis_raw.fif')
frequency_bands = {'alpha': (8.0, 15.0), 'beta': (15.0, 30.0)}
def test_compute_features_raw():
raw = mne.io.read_raw_fif(raw_fname, verbose=False)
raw = raw.copy().crop(0, 200).pick(
[0, 1, 330, 331, 332] # take some MEG and EEG
)
raw.info.normalize_proj()
computed_features, res = compute_features(
raw, features=['psds', 'covs', 'cross_frequency_covs',
'cross_frequency_corrs', 'cospectral_covs'],
frequency_bands=frequency_bands)
n_channels = len(raw.ch_names)
n_freqs = len(res['freqs'])
n_fb = len(frequency_bands)
assert (
set(computed_features.keys()) ==
{'psds', 'covs', 'cross_frequency_covs',
'cross_frequency_corrs', 'cospectral_covs'}
)
assert computed_features['psds'].shape == (n_channels, n_freqs)
assert computed_features['covs'].shape == (n_fb, n_channels, n_channels)
assert (computed_features['cross_frequency_covs'].shape ==
(n_fb * n_channels, n_fb * n_channels))
assert (computed_features['cross_frequency_corrs'].shape ==
(n_fb * n_channels, n_fb * n_channels))
assert (computed_features['cospectral_covs'].shape[1:] ==
(n_channels, n_channels))
with pytest.raises(ValueError, match=r".* specified .*"):
computed_features, res = compute_features(
raw, features='covs haha',
frequency_bands=frequency_bands)
def test_compute_features_epochs():
raw = mne.io.read_raw_fif(raw_fname, verbose=False)
raw = raw.copy().crop(0, 200).pick(
[0, 1, 330, 331, 332] # take some MEG and EEG
)
raw.info.normalize_proj()
events = mne.make_fixed_length_events(raw, id=3000,
start=0,
duration=10.,
stop=raw.times[-1] - 60.)
epochs = mne.Epochs(raw, events, event_id=3000, tmin=0, tmax=60.,
proj=True, baseline=None, reject=None,
preload=True, decim=1)
computed_features, res = compute_features(
epochs, features=['psds', 'covs', 'cross_frequency_covs',
'cross_frequency_corrs', 'cospectral_covs'],
frequency_bands=frequency_bands)
n_channels = len(raw.ch_names)
n_freqs = len(res['freqs'])
n_fb = len(frequency_bands)
assert set(computed_features.keys()) == {'psds', 'covs',
'cross_frequency_covs',
'cross_frequency_corrs',
'cospectral_covs'}
assert computed_features['psds'].shape == (n_channels, n_freqs)
assert computed_features['covs'].shape == (n_fb, n_channels, n_channels)
assert (computed_features['cross_frequency_covs'].shape ==
(n_fb * n_channels, n_fb * n_channels))
assert (computed_features['cross_frequency_corrs'].shape ==
(n_fb * n_channels, n_fb * n_channels))
assert (computed_features['cospectral_covs'].shape[1:] ==
(n_channels, n_channels))
|
import numpy as np, os, itertools
import pandas as pd
from rpy2 import robjects
import rpy2.robjects.numpy2ri
rpy2.robjects.numpy2ri.activate()
import rpy2.robjects.pandas2ri
from rpy2.robjects.packages import importr
from .comparison_metrics import (sim_xy,
glmnet_lasso,
relative_risk)
from .risk_comparisons import risk_comparison
def output_file(n=200,
p=500,
rho=0.35,
s=5,
beta_type=1,
snr_values=np.array([0.10, 0.15, 0.20, 0.25, 0.30,
0.35, 0.42, 0.71, 1.22, 2.07]),
tuning_nonrand="lambda.1se",
tuning_rand="lambda.1se",
randomizing_scale=np.sqrt(0.50),
ndraw=50,
outpath = None):
df_risk = pd.DataFrame()
if n > p:
full_dispersion = True
else:
full_dispersion = False
snr_list = []
for snr in snr_values:
snr_list.append(snr)
relative_risk = np.squeeze(risk_comparison(n=n,
p=p,
nval=n,
rho=rho,
s=s,
beta_type=beta_type,
snr=snr,
randomizer_scale=randomizing_scale,
full_dispersion=full_dispersion,
tuning_nonrand =tuning_nonrand,
tuning_rand=tuning_rand, ndraw = ndraw))
df_risk = df_risk.append(pd.DataFrame(data=relative_risk.reshape((1, 6)), columns=['sel-MLE', 'ind-est', 'rand-LASSO',
'rel-rand-LASSO', 'rel-LASSO','LASSO']), ignore_index=True)
df_risk['n'] = n
df_risk['p'] = p
df_risk['s'] = s
df_risk['rho'] = rho
df_risk['beta-type'] = beta_type
df_risk['snr'] = pd.Series(np.asarray(snr_list))
df_risk['target'] = "selected"
if outpath is None:
outpath = os.path.dirname(__file__)
outfile_risk_csv = os.path.join(outpath, "dims_" + str(n) + "_" + str(p) + "_risk_betatype" + str(beta_type) + "_rho_" + str(rho) + ".csv")
outfile_risk_html = os.path.join(outpath, "dims_" + str(n) + "_" + str(p) + "_risk_betatype" + str(beta_type) + "_rho_" + str(rho) + ".html")
df_risk.to_csv(outfile_risk_csv, index=False)
df_risk.to_html(outfile_risk_html)
|
from pyxdameraulevenshtein import damerau_levenshtein_distance
from doublemetaphone import dm
import json
import sys
#---------------------------------------------------------------------------
# EDIT DISTANCE SEARCH
#---------------------------------------------------------------------------
def load_vocab(vocab):
"""
Transforms a vocabulary to the dictionary format required for the candidate generation.
:param vocab: a list containing the vocabulary
:return: vocab_dict
"""
# TRANSFORM VOCABULARY TO DICTIONARY
# initialize vocab word length keys and character set length keys
vocab_dict = {}
min_len = len(min(vocab, key=len))
max_len = len(max(vocab, key=len))
item_lens = range(min_len, max_len+1)
for item in item_lens:
vocab_dict[item] = {}
for i in range(1, max_len+1):
vocab_dict[item][i] = set()
# fill vocab according to word length and character set length
for word in vocab:
vocab_dict[len(word)][len(set(word))].add(word)
return vocab_dict
def levenshtein_candidates(word, vocab_dict, editdistance=2):
"""
Generates candidates
:param word: the misspelling for which to generate replacement candidates
:param vocab_dict: the output of load_vocab()
:param editdistance: the maximum Damerau-Levenshtein edit distance
:return:
"""
candidates = []
word_len = len(word)
set_len = len(set(word))
if word_len <= 2:
word_lengths = range(word_len, word_len + 1 + editdistance)
else:
word_lengths = range(word_len-editdistance, word_len+1+editdistance)
if set_len-editdistance > 0:
set_lengths = range(set_len-editdistance, set_len+1+editdistance)
else:
set_lengths = range(set_len, set_len + 1 + editdistance)
selection = []
for i in word_lengths:
key = vocab_dict[i]
for j in set_lengths:
selection += key[j]
for item in set(selection):
if damerau_levenshtein_distance(word, item) <= editdistance:
candidates.append(item)
full_candidates = list(set(candidates))
return full_candidates
#---------------------------------------------------------------------------
# METAPHONE SEARCH (~ Aspell 'soundslike' suggestions)
#---------------------------------------------------------------------------
def load_metaphones(vocab):
"""
:param vocab_file: either a list containing the vocabulary, or a text file which contains one lexical item per line
:return: dictionary with mappings between Double Metaphone representations and corresponding lexical items
"""
# MAKE METAPHONE-LEXICAL MAPPING
metaphone_dict = {}
for item in vocab:
metaphones = dm(item)
for metaphone in metaphones:
if metaphone:
try:
metaphone_dict[metaphone].append(item)
except KeyError:
metaphone_dict[metaphone] = []
metaphone_dict[metaphone].append(item)
return metaphone_dict
def convert_candidates(metaphone_candidates, detection, metaphone_dict):
"""
:param candidates: replacement candidates
:param detection: misspelling
:param metaphone_dict: output of load_metaphones()
:return: candidates converted from Double Metaphone representation to normal lexical representation
"""
converted_candidates = []
for i, candidate in enumerate(metaphone_candidates):
for item in metaphone_dict[candidate]:
if len(set(item).intersection(set(candidate))) >= 1: # have at least one character in common
if damerau_levenshtein_distance(item, detection) <= 3: # enough overlap
converted_candidates.append(item)
return converted_candidates
if __name__ == "__main__":
"""
argv[1] = json file containing devcorpus
argv[2] = edit distance of generated candidates: 1, 2 or all
argv[3] = name of outfile to write candidate lists to
argv[4] = language from ["en", "nl"]
"""
language = sys.argv[4]
assert language in ["en", "nl"]
vocab = json.load(open("lexicon_" + language + ".json", 'r'))
vocab_dict = load_vocab(vocab)
with open(sys.argv[1], 'r') as f:
detection_list = json.load(f)[1]
print(str(len(detection_list)) + ' misspellings to generate candidates for')
candidates_list = []
if sys.argv[2] == "1":
print("Generating Damerau-Levenshtein candidates edit distance 1")
for i, misspelling in enumerate(detection_list):
print(i)
candidates_list.append(levenshtein_candidates(misspelling, vocab_dict, editdistance=1))
else:
print("Generating Damerau-Levenshtein candidates edit distance 2")
for i, misspelling in enumerate(detection_list):
print(i)
candidates_list.append(levenshtein_candidates(misspelling, vocab_dict, editdistance=2))
if sys.argv[2] == "all":
print("Generating Double Metaphone candidates edit distance 1")
metaphone_dict = load_metaphones(vocab)
vocab_dict = load_vocab(list(metaphone_dict.keys()))
metaphone_candidates = [levenshtein_candidates(dm(misspelling)[0], vocab_dict, editdistance=1)
for misspelling in detection_list]
soundslike_candidates = [convert_candidates(candidates, detection, metaphone_dict) for
candidates, detection in zip(metaphone_candidates, detection_list)]
candidates_list = [list(set(candidates1 + candidates2)) for candidates1, candidates2 in
zip(candidates_list, soundslike_candidates)]
with open(sys.argv[3], 'w') as f:
json.dump(candidates_list, f)
|
#!/usr/bin/env python
# filename: pl.py
#
# Copyright (c) 2021 Bryan Briney
# License: The MIT license (http://opensource.org/licenses/MIT)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software
# and associated documentation files (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge, publish, distribute,
# sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or
# substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING
# BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
from collections import Counter
import itertools
import os
import re
import sys
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib as mpl
from matplotlib.lines import Line2D
from matplotlib.patches import Patch
from mpl_toolkits.axes_grid1.inset_locator import inset_axes
import scanpy as sc
import seaborn as sns
from anndata import AnnData
from natsort import natsorted
from abutils.utils.utilities import nested_dict_lookup
# ===========================
# QUALITY CONTROL
# ===========================
def qc_metrics(adata, ngenes_cutoff=2500, mito_cutoff=10, ig_cutoff=50,
fig_dir=None, fig_prefix=None):
if 'ig' not in adata.var:
pattern = re.compile('IG[HKL][VDJ][1-9].+|TR[ABDG][VDJ][1-9]')
adata.var['ig'] = [False if re.match(pattern, a) is None else True for a in adata.var.index]
if 'mt' not in adata.var:
adata.var['mt'] = adata.var_names.str.startswith('MT-')
sc.pp.calculate_qc_metrics(adata, qc_vars=['ig', 'mt'],
percent_top=None, log1p=False, inplace=True)
palette = {'include': '#202020', 'exclude': '#C0C0C0'}
hue_order = ['include', 'exclude']
# plot Ig
ig_hue = ['include' if i < ig_cutoff else 'exclude' for i in adata.obs.pct_counts_ig]
ig_counter = Counter(ig_hue)
g = sns.JointGrid(data=adata.obs, x='total_counts', y='pct_counts_ig')
g.plot_joint(sns.scatterplot, s=10, linewidth=0,
hue=ig_hue, hue_order=hue_order, palette=palette)
g.plot_marginals(sns.kdeplot, shade=True, color='#404040')
g.ax_joint.set_xlabel('total counts', fontsize=16)
g.ax_joint.set_ylabel('immunoglobulin counts (%)', fontsize=16)
g.ax_joint.tick_params(axis='both', labelsize=13)
handles, labels = g.ax_joint.get_legend_handles_labels()
labels = [f'{l} ({ig_counter[l]})' for l in labels]
g.ax_joint.legend(handles, labels, title='ig filter', title_fontsize=14, fontsize=13)
if fig_dir is not None:
plt.tight_layout()
if fig_prefix is not None:
fig_name = f'{fig_prefix}_pct-counts-ig.pdf'
else:
fig_name = 'pct_counts_ig.pdf'
plt.savefig(os.path.join(fig_dir, fig_name))
else:
plt.show()
# plot mito
mito_hue = ['include' if i < mito_cutoff else 'exclude' for i in adata.obs.pct_counts_mt]
mito_counter = Counter(mito_hue)
g = sns.JointGrid(data=adata.obs, x='total_counts', y='pct_counts_mt')
g.plot_joint(sns.scatterplot, s=10, linewidth=0,
hue=mito_hue, hue_order=hue_order, palette=palette)
g.plot_marginals(sns.kdeplot, shade=True, color='#404040')
g.ax_joint.set_xlabel('total counts', fontsize=16)
g.ax_joint.set_ylabel('mitochondrial counts (%)', fontsize=16)
g.ax_joint.tick_params(axis='both', labelsize=13)
handles, labels = g.ax_joint.get_legend_handles_labels()
labels = [f'{l} ({mito_counter[l]})' for l in labels]
g.ax_joint.legend(handles, labels, title='mito filter', title_fontsize=14, fontsize=13)
if fig_dir is not None:
plt.tight_layout()
if fig_prefix is not None:
fig_name = f'{fig_prefix}_pct-counts-mt.pdf'
else:
fig_name = 'pct_counts_mt.pdf'
plt.savefig(os.path.join(fig_dir, fig_name))
else:
plt.show()
# plot N genes by counts
ngenes_hue = ['include' if i < ngenes_cutoff else 'exclude' for i in adata.obs.n_genes_by_counts]
ngenes_counter = Counter(ngenes_hue)
g = sns.JointGrid(data=adata.obs, x='total_counts', y='n_genes_by_counts')
g.plot_joint(sns.scatterplot, s=10, linewidth=0,
hue=ngenes_hue, hue_order=hue_order, palette=palette)
g.plot_marginals(sns.kdeplot, shade=True, color='#404040')
g.ax_joint.set_xlabel('total counts', fontsize=16)
g.ax_joint.set_ylabel('number of genes', fontsize=16)
g.ax_joint.tick_params(axis='both', labelsize=13)
handles, labels = g.ax_joint.get_legend_handles_labels()
labels = [f'{l} ({ngenes_counter[l]})' for l in labels]
g.ax_joint.legend(handles, labels, title='genes filter', title_fontsize=14, fontsize=13)
if fig_dir is not None:
plt.tight_layout()
if fig_prefix is not None:
fig_name = f'{fig_prefix}_n-genes-by-counts.pdf'
else:
fig_name = 'n_genes_by_counts.pdf'
plt.savefig(os.path.join(fig_dir, fig_name))
else:
plt.show()
# ===========================
# FEATURES
# ===========================
def feature_kde(data, x, y, hue=None, hue_order=None, colors=None, thresh=0.1,
show_scatter=True, scatter_size=5, scatter_alpha=0.2,
fill=False, kde_fill_alpha=0.7, kde_line_alpha=1.0,
highlight_index=None, highlight_x=None, highlight_y=None, highlight_marker='x',
highlight_size=90, highlight_color='k', highlight_name=None, highlight_alpha=0.8,
xlabel=None, ylabel=None, equal_axes=True,
legend_loc='best', legend_title=None, show_legend_title=True, legend_fontsize=12,
return_ax=False, figsize=[6, 6], figfile=None, **kwargs):
'''
Produces a 2-dimensional KDE plot of two features.
Args:
data (anndata.AnnData or pd.DataFramne): An ``AnnData`` object or a ``Pandas`` dataframe
containing the input data. Required.
x (str): Name of the column in ``data`` containing the feature to be plotted on the x-axis. Required.
y (str): Name of the column in ``data`` containing the feature to be plotted on the y-axis. Required.
hue (str): Name of the column in ``data`` containing categories for hue values. For scatter plots,
the categories in ``hue`` will be plotted as different colored points. For KDE plots,
``hue```` categories will each be plotted as differently colored KDE plots
on the same plot.
hue_order (iterable): Iterable of hue categories, in the order they should be plotted and listed
in the legend. If ```hue_order``` contains only a subset of the categories
present in ```data[hue]```, only the categories supplied in ```hue_order```
will be plotted.
colors (iterable): List of colors to be used for ```'hue'``` categories. If ```'colors'``` is
shorter than the list of hue categories, colors will be reused.
thresh (float): Threshold for the KDE. Default is ```0.1```.
show_scatter (bool): Show the scatterplot beneath the transparent KDE plot. Default is ```True```.
scatter_size (int, float): Size of the scatter points. Default is ```5```.
scatter_alpha (float): Alpha of the scatter points. Default is ```0.2```.
fill (bool): Fill the KDE plot. Default is ```True```.
kde_fill_alpha (float): Alpha for the filled KDE plot. If ```fill``` is ```False```,
this option is ignored. Default is ```0.7```.
kde_line_alpha (float): Alpha for the KDE plot lines. Default is ```1.0```.
highlight_index (iterable): An iterabile of index names (present in ```data```) of points
to be highlighted on the KDE plot. If provided, ```highlight_x```
and ```highlight_y``` are ignored.
highlight_x (iterable): An iterable of x-values for highlighted points. Also requires
```highlight_y```.
highlight_y (iterable): An iterable of y-values for highlighted points. Also requires
```highlight_x```.
highlight_marker (str): The marker style to be used for highlight points. Accepts
standard matplotlib marker styles. Default is ```'x'```.
highlight_size (int): Size of the highlight marker. Default is ```90```.
highlight_color (string or RGB list): Color of the highlight points. Default is black.
highlight_name (str): Name of the highlights, to be used in the legend. If not supplied,
highlight points will not be included in the legend.
highlight_alpha (float): Alpha for the highlight points. Default is ```0.8```.
xlabel (str): Label for the x-axis. By default, the value for ```x``` is used.
ylabel (str): Label for the y-axis. By default, the value for ```y``` is used.
equal_axes (bool): If ```True```, the the limits of the x- and y-axis will be equal.
Default is ```True```.
legend_loc (str): Location for the legend. Uses standard matplotlib locations. Default
is ```'best'```.
legend title (str): Title for the legend. By default, ```hue``` is used.
show_legend_title (bool): Whether or not to show the legend title. Default is ```True```.
return_ax (bool): If ```True```, return the plot's ```ax``` object. Will not show or save
the plot. Default is ```False```.
figsize (list): A list containg the dimensions of the plot. Default is ```[6, 6]```.
figfile (str): Path to which the figure will be saved. If not provided, the figure will be
shown but not saved to file.
kwargs: All other keyword arguments are passed to ``seaborn.kdeplot()``.
'''
# input data
if isinstance(data, AnnData):
_data = {}
for var in [x, y, hue]:
if var is not None:
if any([var in data.obs.columns.values, var in data.var_names]):
_data[var] = data.obs_vector(var)
else:
print('"{}" was not found in the supplied AnnData object.'.format(var))
return
df = pd.DataFrame(_data, index=data.obs_names)
else:
_data = {}
for var in [x, y, hue]:
if var is not None:
if var in data.columns.values:
_data[var] = data[var]
else:
print('"{}" is not a column in the supplied dataframe'.format(x))
return
df = pd.DataFrame(_data, index=data.index.values)
# hue
if hue is not None:
if hue_order is None:
hue_order = natsorted(list(set(df[hue])))
df = df[df[hue].isin(hue_order)]
else:
hue_order = []
# colors
n_colors = max(1, len(hue_order))
if colors is None:
colors = sns.hls_palette(n_colors=n_colors)
plt.figure(figsize=figsize)
# scatterplots
if show_scatter:
if hue_order:
for h, c in zip(hue_order, colors):
d = df[df[hue] == h]
plt.scatter(d[x], d[y], c=[c], s=scatter_size,
alpha=scatter_alpha, linewidths=0)
else:
plt.scatter(df[x], df[y], c=[colors[0]], s=scatter_size,
alpha=scatter_alpha, linewidths=0)
# kdeplot
if fill:
if hue_order:
sns.kdeplot(data=df, x=x, y=y, hue=hue, fill=True, alpha=kde_fill_alpha,
hue_order=hue_order, palette=colors, thresh=thresh, **kwargs)
else:
sns.kdeplot(data=df, x=x, y=y, fill=True, alpha=kde_fill_alpha,
color=colors[0], thresh=thresh, **kwargs)
if hue_order:
ax = sns.kdeplot(data=df, x=x, y=y, hue=hue, alpha=kde_line_alpha,
hue_order=hue_order, palette=colors, thresh=thresh, **kwargs)
else:
ax = sns.kdeplot(data=df, x=x, y=y, alpha=kde_line_alpha,
color=colors[0], thresh=thresh, **kwargs)
# highlighted points
highlight = any([highlight_index is not None, all([highlight_x is not None, highlight_y is not None])])
if highlight:
if highlight_index is not None:
hidata = df.loc[highlight_index]
highlight_x = hidata[x]
highlight_y = hidata[y]
plt.scatter(highlight_x, highlight_y, zorder=10,
s=highlight_size,
c=highlight_color,
alpha=highlight_alpha,
marker=highlight_marker)
# legend
legend_labels = hue_order
if fill:
handles = []
for c in colors:
f = Patch(fc=c, alpha=kde_fill_alpha / 3)
e = Patch(ec=c, fill=False, lw=1.5)
handles.append((f, e))
# handles = [Patch(fc=c, ec=c, alpha=kde_fill_alpha / 3, label=h) for c, h in zip(colors, hue_order)]
else:
handles = [Line2D([0], [0], color=c) for c in colors]
if highlight_name is not None:
legend_labels.append(highlight_name)
handles.append(Line2D([0], [0], marker=highlight_marker, color='w',
mec=highlight_color,
mfc=highlight_color,
ms=highlight_size / 10))
if show_legend_title:
legend_title = legend_title if legend_title is not None else hue
else:
legend_title = None
ax.legend(handles, legend_labels, loc=legend_loc, fontsize=legend_fontsize, title=legend_title, frameon=False)
# style the plot
ax.set_xlabel(xlabel if xlabel is not None else x, fontsize=16)
ax.set_ylabel(ylabel if ylabel is not None else y, fontsize=16)
ax.tick_params(axis='both', labelsize=13)
for spine in ['right', 'top']:
ax.spines[spine].set_visible(False)
for spine in ['left', 'bottom']:
ax.spines[spine].set_position(('outward', 10))
if equal_axes:
xlim = ax.get_xlim()
ylim = ax.get_ylim()
axlim = [min([xlim[0], ylim[0]]), max([xlim[1], ylim[1]])]
ax.set_xlim(axlim)
ax.set_ylim(axlim)
if return_ax:
return ax
elif figfile is not None:
plt.tight_layout()
plt.savefig(figfile)
else:
plt.show()
def feature_scatter(data, x, y, hue=None, hue_order=None, color=None, cmap=None, marker='o', size=20, alpha=0.6,
highlight_index=None, highlight_x=None, highlight_y=None, highlight_marker='x',
highlight_size=90, highlight_color='k', highlight_name=None, highlight_alpha=0.9,
xlabel=None, ylabel=None, equal_axes=True, force_categorical_hue=False,
legend_loc='best', legend_title=None, legend_fontsize=13, legend_frameon=True,
cbar_width=35, cbar_height=5, cbar_loc='lower right', cbar_orientation='horizontal',
cbar_bbox_to_anchor=None, cbar_flip_ticks=False,
cbar_title=None, cbar_title_loc=None, cbar_title_fontsize=12,
return_ax=False, figsize=[6, 6], figfile=None):
'''
Produces a scatter plot of two features, optionally colored by a third feature.
Args:
-----
data (anndata.AnnData or pd.DataFramne): An ``AnnData`` object or a ``Pandas`` dataframe
containing the input data. Required.
x (str): Name of the column in ``data`` containing the feature to be plotted on the x-axis. Required.
y (str): Name of the column in ``data`` containing the feature to be plotted on the y-axis. Required.
hue (str): Name of the column in ``data`` containing categories for hue values. If ``hue`` is categorical,
each category will be plotted in a different color (using the ``color`` for the colors). If
``hue`` is continuous, points will be colored using a colormap (using ``cmap`` if supplied).
hue_order (iterable): Iterable of hue categories, in the order they should be plotted and listed
in the legend. If ```hue_order``` contains only a subset of the categories
present in ```data[hue]```, only the categories supplied in ```hue_order```
will be plotted.
force_categorical_hue (bool): If ``True``, ``hue`` data will be treated as categorical, even if
the data appear to be continuous. This results in ``color`` being used
to color the points rather than ``cmap``. Default is ``False``.
color (iterable): List of colors to be used for ``hue`` categories. If ``colors`` is
shorter than the list of hue categories, colors will be reused. Only used
if ``hue`` contains categorical data (``cmap`` is used for continuous data).
Default is to use ``sns.color_palette()``.
camp (str or matplotlib.color.Colormap): Colormap to be used for continuous ``hue`` data. Default
is to use ``'flare'``.
marker (str): Marker for the scatter plot. Accepts standard matplotlib marker styles.
Default is ``'o'``.
size (int, float): Size of the scatter points. Default is ``20``.
alpha (float): Alpha of the scatter points. Default is ``0.6``.
highlight_index (iterable): An iterabile of index names (present in ```data```) of points
to be highlighted on the scatter plot. If provided, ```highlight_x```
and ```highlight_y``` are ignored.
highlight_x (iterable): An iterable of x-values for highlighted points. Also requires
```highlight_y```.
highlight_y (iterable): An iterable of y-values for highlighted points. Also requires
```highlight_x```.
highlight_marker (str): The marker style to be used for highlight points. Accepts
standard matplotlib marker styles. Default is ``'x'``.
highlight_size (int): Size of the highlight marker. Default is ``90``.
highlight_color (string or RGB list): Color of the highlight points. Default is ``'k'`` (black).
highlight_name (str): Name of the highlights, to be used in the legend. If not supplied,
highlight points will not be included in the legend.
highlight_alpha (float): Alpha for the highlight points. Default is ``0.9``.
xlabel (str): Label for the x-axis. By default, the value for ``x`` is used.
ylabel (str): Label for the y-axis. By default, the value for ``y`` is used.
equal_axes (bool): If ``True``, the the limits of the x- and y-axis will be equal.
Default is ``True``.
legend_loc (str): Location for the legend. Uses standard matplotlib locations. Default
is ``'best'``.
legend title (str): Title for the legend. By default, ``hue`` is used.
show_legend_title (bool): Whether or not to show the legend title. Default is ``True``.
return_ax (bool): If ``True``, return the plot's ``ax`` object. Will not show or save
the plot. Default is ``False``.
figsize (list): A list containg the dimensions of the plot. Default is ``[6, 6]``.
figfile (str): Path to which the figure will be saved. If not provided, the figure will be
shown but not saved to file.
'''
# input data
if isinstance(data, AnnData):
_data = {}
for var in [x, y, hue]:
if var is not None:
if any([var in data.obs.columns.values, var in data.var_names]):
_data[var] = data.obs_vector(var)
else:
print('"{}" was not found in the supplied AnnData object.'.format(var))
return
df = pd.DataFrame(_data, index=data.obs_names)
else:
_data = {}
for var in [x, y, hue]:
if var is not None:
if var in data.columns.values:
_data[var] = data[var]
else:
print('"{}" is not a column in the supplied dataframe'.format(x))
return
df = pd.DataFrame(_data, index=data.index.values)
# hue and color
continuous_hue = False
if hue is not None:
if all([isinstance(h, float) for h in df[hue]]) and not force_categorical_hue:
continuous_hue = True
hue_order = []
if cmap is None:
cmap = sns.color_palette("flare", as_cmap=True)
else:
cmap = plt.get_cmap(cmap)
max_hue = df[hue].max()
min_hue = df[hue].min()
df['color'] = [cmap((h - min_hue) / (max_hue - min_hue)) for h in df[hue]]
else:
if hue_order is None:
hue_order = natsorted(list(set(df[hue])))
n_colors = max(1, len(hue_order))
if color is None:
color = sns.color_palette(n_colors=n_colors)
if len(color) < n_colors:
color = itertools.cycle(color)
hue_dict = {h: c for h, c in zip(hue_order, color)}
df['color'] = [hue_dict[h] for h in df[hue]]
else:
hue_order = []
if color is not None:
df['color'] = [color] * df.shape[0]
else:
df['color'] = [sns.color_palette()[0]] * df.shape[0]
# scatterplot
plt.figure(figsize=figsize)
ax = plt.gca()
if hue_order:
for h in hue_order[::-1]:
d = df[df[hue] == h]
plt.scatter(d[x], d[y], c=d['color'], s=size, marker=marker,
alpha=alpha, linewidths=0, label=h)
else:
plt.scatter(df[x], df[y], c=df['color'], s=size, marker=marker,
alpha=alpha, linewidths=0)
# highlighted points
highlight = any([highlight_index is not None, all([highlight_x is not None, highlight_y is not None])])
if highlight:
if highlight_index is not None:
hi_index = [h for h in highlight_index if h in df.index.values]
hidata = df.loc[hi_index]
highlight_x = hidata[x]
highlight_y = hidata[y]
plt.scatter(highlight_x, highlight_y, zorder=10,
s=highlight_size,
c=highlight_color,
alpha=highlight_alpha,
marker=highlight_marker,
label=highlight_name)
# legend
if not continuous_hue:
if hue is not None:
ax.legend(loc=legend_loc, fontsize=legend_fontsize, title=legend_title, frameon=legend_frameon)
# colorbar
else:
cbax = inset_axes(ax, width=f'{cbar_width}%', height=f'{cbar_height}%',
loc=cbar_loc, bbox_to_anchor=cbar_bbox_to_anchor,
bbox_transform=ax.transAxes)
fig = plt.gcf()
norm = mpl.colors.Normalize(vmin=min_hue, vmax=max_hue)
ticks = [round(t, 2) for t in np.linspace(min_hue, max_hue, num=4)]
fig.colorbar(mpl.cm.ScalarMappable(norm=norm, cmap=cmap), cax=cbax,
orientation=cbar_orientation, ticks=ticks,)
if cbar_orientation == 'horizontal':
ticks_position = 'bottom' if cbar_flip_ticks else 'top'
cbax.xaxis.set_ticks_position(ticks_position)
else:
ticks_position = 'left' if cbar_flip_ticks else 'right'
cbax.yaxis.set_ticks_position(ticks_position)
cbax.set_title(cbar_title, fontsize=cbar_title_fontsize, fontweight='medium')
# style the plot
ax.set_xlabel(xlabel if xlabel is not None else x, fontsize=16)
ax.set_ylabel(ylabel if ylabel is not None else y, fontsize=16)
ax.tick_params(axis='both', labelsize=13)
for spine in ['right', 'top']:
ax.spines[spine].set_visible(False)
for spine in ['left', 'bottom']:
ax.spines[spine].set_position(('outward', 10))
if equal_axes:
xlim = ax.get_xlim()
ylim = ax.get_ylim()
axlim = [min([xlim[0], ylim[0]]), max([xlim[1], ylim[1]])]
ax.set_xlim(axlim)
ax.set_ylim(axlim)
if return_ax:
return ax
elif figfile is not None:
plt.tight_layout()
plt.savefig(figfile)
else:
plt.show()
def cellhash_ridge(adata, hashname, category, colors=None, alpha=1.0,
categories=None, hide_extra_categories=False, rename=None, xmax=14,
ylabel_fontsize=11, xlabel=None, xlabel_fontsize=12, xtick_labelsize=11,
feature_label_xoffset=5, figfile=None):
'''
Docstring for feature_ridge.
'''
# input data
data = adata.obs.copy()
data = data[data[hashname] <= xmax]
if category not in data.columns.values:
print('"{}" is not a column in the supplied dataframe'.format(category))
return
# rename
if rename is None:
rename = {}
else:
if not any([k in data.columns.values for k in rename.keys()]):
rename = {v: k for k, v in rename.items()}
# categories
category_set = data[category].unique()
if categories is None:
feature_cats = natsorted([c for c in category_set if rename.get(c, c) in data.columns.values])
extra_cats = natsorted([c for c in category_set if rename.get(c, c) not in feature_cats])
categories = feature_cats + extra_cats
else:
feature_cats = categories
if hide_extra_categories:
extra_cats = []
else:
extra_cats = [c for c in category_set if rename.get(c, c) not in feature_cats]
categories = feature_cats + extra_cats
# colors
if colors is None:
n_colors = len(feature_cats)
colors = list(sns.color_palette(n_colors=n_colors))
n_greys = len(extra_cats)
greys = list(plt.get_cmap('Greys')(np.linspace(0, 1, n_greys + 2))[1:-1, :3])
cdict = {h: c for h, c in zip(categories, colors + greys)}
elif isinstance(colors, (list, tuple, np.ndarray, pd.core.series.Series)):
colors = list(colors)
if len(colors) < len(categories):
n_greys = len(categories) - len(colors)
greys = list(plt.get_cmap('Greys')(np.linspace(0, 1, n_greys + 2))[1:-1, :3])
cdict = {h: c for h, c in zip(categories, colors + greys)}
else:
cdict = colors
if len(cdict) < len(categories):
missing = [k for k in categories if k not in cdict]
n_greys = len(missing)
greys = list(plt.get_cmap('Greys')(np.linspace(0, 1, n_greys + 2))[1:-1, :3])
for m, g in zip(missing, greys):
cdict[m] = g
colors = [cdict[c] for c in categories]
# plot
g = sns.FacetGrid(data, row=category, hue=category,
aspect=7.5, height=0.75, palette=colors,
row_order=categories, hue_order=categories)
g.map(sns.kdeplot, hashname, clip=[None, xmax], shade=True, alpha=alpha, lw=1.5)
g.map(sns.kdeplot, hashname, clip=[None, xmax], color="w", lw=3)
g.map(plt.axhline, y=0, lw=2, clip_on=False)
def label(x, color, label):
ax = plt.gca()
ax.text(0, .2, label, fontweight="bold", color=color,
fontsize=ylabel_fontsize,
ha="left", va="center", transform=ax.transAxes)
g.map(label, hashname)
# set the subplots to overlap
g.fig.subplots_adjust(hspace=0.1)
# remove axes details that don't play well with overlap
g.set_titles("")
g.set(xticks=range(0, xmax + 1, 2))
g.set(yticks=[])
g.set(xlim=[-feature_label_xoffset, xmax + 0.25])
g.despine(bottom=True, left=True)
# xlabel
if xlabel is not None:
g.set(xlabel=xlabel)
xlabel_position = ((xmax / 2) + feature_label_xoffset) / (xmax + feature_label_xoffset)
for ax in g.axes.flat:
ax.set_xlabel(ax.get_xlabel(),
x=xlabel_position,
fontsize=xlabel_fontsize)
ax.tick_params(axis='x', labelsize=xtick_labelsize)
# for ax in g.axes.flat:
# ax.set_xlabel(ax.get_xlabel(), fontsize=xlabel_fontsize)
if figfile is not None:
g.savefig(figfile)
else:
plt.show()
def feature_ridge(data, features, colors=None, rename=None,
xlabel='UMI count ($\mathregular{log_2}$)',
ylabel_fontsize=11, xlabel_fontsize=12,
feature_label_xoffset=5, xmax=14, alpha=1.0,
figfile=None):
'''
Docstring for feature_ridge.
'''
# input data
data = data.copy()
features = [f for f in features if f in data.columns.values]
melted = data.melt(value_vars=features, var_name='feature')
# rename
if rename is None:
rename = {}
else:
if not any([k in data.columns.values for k in rename.keys()]):
rename = {v: k for k, v in rename.items()}
# colors
if colors is None:
n_colors = len(features)
colors = list(sns.color_palette(n_colors=n_colors))
cdict = {h: c for h, c in zip(features, colors)}
elif isinstance(colors, (list, tuple, np.ndarray, pd.core.series.Series)):
cdict = {h: c for h, c in zip(features, colors)}
else:
cdict = colors
colors = [cdict[f] for f in features]
# plot
g = sns.FacetGrid(melted, row='feature', hue='feature',
aspect=7.5, height=0.75, palette=colors,
row_order=features, hue_order=features)
g.map(sns.kdeplot, 'value', clip_on=False, shade=True, alpha=alpha, lw=1.5)
g.map(sns.kdeplot, 'value', clip_on=False, color="w", lw=3)
g.map(plt.axhline, y=0, lw=2, clip_on=False)
def label(x, color, label):
ax = plt.gca()
ax.text(0, .2, label, fontweight="bold", color=color,
fontsize=ylabel_fontsize,
ha="left", va="center", transform=ax.transAxes)
g.map(label, 'feature')
# set the subplots to overlap
g.fig.subplots_adjust(hspace=0.1)
# remove axes details that don't play well with overlap
g.set_titles("")
g.set(xticks=range(0, xmax + 1, 2))
g.set(yticks=[])
g.set(xlim=[-feature_label_xoffset, xmax + 0.25])
g.despine(bottom=True, left=True)
# xlabel
g.set(xlabel=xlabel)
xlabel_position = ((xmax / 2) + feature_label_xoffset) / (xmax + feature_label_xoffset)
for ax in g.axes.flat:
ax.set_xlabel(ax.get_xlabel(),
x=xlabel_position,
fontsize=xlabel_fontsize)
if figfile is not None:
g.savefig(figfile)
else:
plt.show()
# ===========================
# VDJ
# ===========================
def germline_use_barplot(adata, gene_names=None, chain='heavy',
germline_key='v_gene', batch_key=None, batch_names=None,
palette=None, color=None, germline_colors=None,
pairs_only=False, normalize=False,
plot_kwargs=None, legend_kwargs=None, hide_legend=False,
ylabel=None, ylabel_fontsize=16,
xtick_labelsize=14, ytick_labelsize=14, xtick_labelrotation=90,
show=False, figsize=None, figfile=None):
'''
Produces a bar plot of germline gene usage. For datasets containing multiple batches, a stacked
bar plot can optionally be generated.
Args:
-----
adata (anndata.AnnData): An ``AnnData`` object containing the input data. ``adata`` must have
the ``adata.obs.vdj`` populated with annotated VDJ information. Required.
gene_names (iterable): A list of germline gene names to be plotted. If not provided, all
germline genes found in the dataset will be shown.
chain (str): Chain for which germline gene usage will be plotted. Options are ``'heavy'``,
``'light'``, ``'kappa'`` and ``'lambda'``. Default is ``'heavy'``.
germline_key (str): Field (found in ``vdj.heavy`` or ``vdj'light``) containing the germline
gene to be plotted. Default is ``'v_call'``, which plots Variable gene use using the standard
AIRR anotation naming scheme.
batch_key (str): Field (found in ``adata.obs``) containing batch names. If provided, batches
will be plotted as stacked bars, one per batch. If not provided, all of the input data is
assumed to be from a single batch and a standard bar plot is generated.
batch_names (iterable): List of batch names to be plotted. Useful when only a subset of the
batches found in ``adata.obs.batch_key`` are to be plotted or when the desired order of batches
is something other than the order produced by ``natsort.natsorted()``. Default is ``None``,
which results in all batches being plotted in ``natsort.natsorted()`` order.
palette (iterable): List of batch colors. If none of ``palette``, ``color`` or ``germline_colors``
are provided, bars are colored by the germline gene.
color (str): Single color to be used for all bars in the plot. If none of ``palette``, ``color``
or ``germline_colors`` are provided, bars are colored by the germline gene. If provided in
combination with ``germline_colors``, ``color`` will be used as the default color for genes
not found in ``germline_colors``.
germline_colors (dict): Dictionary mapping germline genes to colors. Particularly useful when
highlighting one or more germline genes is desired. Germline genes not found as keys in
``germline_colors`` will be colored using ``color``.
pairs_only (bool): If ``True``, only sequences for which a heavy/light pair is present will be
plotted. Default is ``False``, which plots all seqeunces of the desired ``chain``.
normalize (bool): If ``True``, normalized frequencies are plotted. Note that normalization is
performed separately for each batch, so the total frequency may exceed ``1.0``. Default is
``False``, which plots sequence counts.
plot_kwargs (dict): Dictionary containing keyword arguments that will be passed to ``pyplot.bar()``.
legend_kwargs (dict): Dictionary containing keyword arguments that will be passed to ``ax.legend()``.
hide_legend (bool): By default, a plot legend will be shown if multiple batches are plotted. If
``True``, the legend will not be shown. Default is ``False``.
ylabel (str): Text for the Y-axis label.
ylabel_fontsize (float): Fontsize for the Y-axis label text. Default is ``16``.
xtick_labelsize (float): Fontsize for the X-axis tick labels. Default is ``14``.
ytick_labelsize (float): Fontsize for the Y-axis tick labels. Default is ``14``.
xtick_labelrotation (float): Rotation of the X-axis tick labels. Default is ``90``.
show (bool): If ``True``, plot is shown and the plot ``Axes`` object is not returned. Default
is ``False``, which does not call ``pyplot.show()`` and results the ``Axes`` object.
figsize (list): List containing the figure size (as ``[x-dimension, y-dimension]``) in inches.
If not provided, the figure size will be determined based on the number of germline genes
found in the data.
figfile (str): Path at which to save the figure file. If not provided, the figure is not saved
and is either shown (if ``show`` is ``True``) or the ``Axes`` object is returned.
'''
# split input into batches
if batch_key is not None:
batch_names = batch_names if batch_names is not None else natsorted(adata.obs[batch_key].unique())
batches = [adata[adata.obs[batch_key] == batch] for batch in batch_names]
else:
batch_names = [None, ]
batches = [adata, ]
# process batches
batch_data = []
all_gene_names = []
for batch in batches:
vdjs = batch.obs.vdj
if pairs_only:
vdjs = [v for v in vdjs if v.is_pair]
# parse sequences
if chain == 'heavy':
seqs = [v.heavy for v in vdjs if v.heavy is not None]
elif chain == 'light':
seqs = [v.light for v in vdjs if v.light is not None]
elif chain == 'kappa':
lights = [v.light for v in vdjs if v.light is not None]
seqs = [l for l in lights if l['chain'] == 'kappa']
elif chain == 'lambda':
lights = [v.light for v in vdjs if v.light is not None]
seqs = [l for l in lights if l['chain'] == 'lambda']
# retrieve germline genes
klist = germline_key.split('.')
germ_counts = Counter([nested_dict_lookup(s, klist) for s in seqs])
if normalize:
total = sum(germ_counts.values())
germ_counts = {k: v / total for k, v in germ_counts.items()}
batch_data.append(germ_counts)
for gname in germ_counts.keys():
if gname not in all_gene_names:
all_gene_names.append(gname)
gene_names = gene_names if gene_names is not None else natsorted(all_gene_names)
# colors
if palette is not None:
colors = [[p] * len(gene_names) for _, p in itertools.zip_longest(batches, palette)]
elif germline_colors is not None:
default_color = color if color is not None else '#D3D3D3'
germ_color_list = [germline_colors.get(g, default_color) for g in gene_names]
colors = [germ_color_list] * len(batches)
elif color is not None:
colors = [[color] * len(gene_names) for _ in batches]
else:
fams = natsorted(set([g.split('-')[0] for g in gene_names]))
germ_color_dict = {f: c for f, c in zip(fams, sns.hls_palette(len(fams)))}
germ_color_list = [germ_color_dict[g.split('-')[0]] for g in gene_names]
colors = [germ_color_list] * len(batches)
# plot kwargs
default_plot_kwargs = {'width': 0.8, 'linewidth': 1.5, 'edgecolor':'w'}
if plot_kwargs is not None:
default_plot_kwargs.update(plot_kwargs)
plot_kwargs = default_plot_kwargs
# legend kwargs
default_legend_kwargs = {'frameon': True, 'loc': 'best', 'fontsize':12}
if legend_kwargs is not None:
default_legend_kwargs.update(legend_kwargs)
legend_kwargs = default_legend_kwargs
# make the plot
if figsize is None:
figsize = [len(gene_names) / 3, 4]
plt.figure(figsize=figsize)
bottom = np.zeros(len(gene_names))
for n, d, c in zip(batch_names, batch_data, colors):
ys = np.asarray([d.get(g, 0) for g in gene_names])
plt.bar(gene_names, ys, bottom=bottom, color=c, label=n, **plot_kwargs)
bottom += ys
# style the plot
ax = plt.gca()
if ylabel is None:
ylabel = 'Frequency (%)' if normalize else 'Sequence count'
ax.set_ylabel(ylabel, fontsize=ylabel_fontsize)
ax.tick_params(axis='x', labelsize=xtick_labelsize, labelrotation=xtick_labelrotation)
ax.tick_params(axis='y', labelsize=ytick_labelsize)
for s in ['left', 'right', 'top']:
ax.spines[s].set_visible(False)
ax.set_xlim([-0.75, len(gene_names) - 0.25])
# legend
if len(batches) > 1 and not hide_legend:
ax.legend(**legend_kwargs)
if hide_legend or palette is None:
ax.get_legend().remove()
# save, show or return the ax
if figfile is not None:
plt.tight_layout()
plt.savefig(figfile)
elif show:
plt.show()
else:
return ax
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
from downward import suites
import common_setup
REVS = ["issue546-base", "issue546-v1"]
LIMITS = {"search_time": 1800}
SUITE = suites.suite_optimal_with_ipc11()
CONFIGS = {
"seq_opt_fdss_1": ["--alias", "seq-opt-fdss-1"],
"seq_opt_fdss_2": ["--alias", "seq-opt-fdss-2"],
}
exp = common_setup.IssueExperiment(
search_revisions=REVS,
configs=CONFIGS,
suite=SUITE,
limits=LIMITS,
)
exp.add_comparison_table_step(
attributes=common_setup.IssueExperiment.PORTFOLIO_ATTRIBUTES)
exp()
|
from __future__ import absolute_import
from __future__ import unicode_literals
from datetime import datetime
from corehq.toggles import EMG_AND_REC_SMS_HANDLERS, NAMESPACE_DOMAIN
from custom.ilsgateway.tanzania.reminders import REC_HELP, REC_CONFIRMATION, REC_ERROR, INVALID_PRODUCT_CODE
from custom.ilsgateway.tests.handlers.utils import ILSTestScript, TEST_DOMAIN
from custom.zipline.api import ProductQuantity
from custom.zipline.models import EmergencyOrder, update_product_quantity_json_field, EmergencyOrderStatusUpdate
import six
class ReceiptTest(ILSTestScript):
@classmethod
def setUpClass(cls):
super(ReceiptTest, cls).setUpClass()
EMG_AND_REC_SMS_HANDLERS.set('ils-test-domain', True, namespace=NAMESPACE_DOMAIN)
cls.order = EmergencyOrder(
domain=TEST_DOMAIN,
requesting_user_id=cls.user1.get_id,
requesting_phone_number='5551234',
location=cls.user1.sql_location,
location_code=cls.user1.sql_location.site_code,
timestamp=datetime.utcnow()
)
update_product_quantity_json_field(
cls.order.products_requested, [ProductQuantity('dp', 100), ProductQuantity('fs', 50)]
)
cls.order.save()
@classmethod
def tearDownClass(cls):
EmergencyOrder.objects.update(confirmed_status=None)
EmergencyOrderStatusUpdate.objects.all().delete()
EmergencyOrder.objects.all().delete()
super(ReceiptTest, cls).tearDownClass()
def test_help(self):
script = """
5551234 > rec
5551234 < {}
""".format(six.text_type(REC_HELP))
self.run_script(script)
def test_valid_message(self):
script = """
5551234 > rec dp 100 fs 50
5551234 < {}
""".format(six.text_type(REC_CONFIRMATION))
self.run_script(script)
order = EmergencyOrder.objects.get(pk=self.order.pk)
self.assertDictEqual(
order.confirmed_status.products,
{'dp': {'quantity': '100'}, 'fs': {'quantity': '50'}}
)
def test_invalid_quantity(self):
script = """
5551234 > rec dp quantity fs 50
5551234 < {}
""".format(six.text_type(REC_ERROR))
self.run_script(script)
def test_incomplete_message(self):
script = """
5551234 > rec dp fs 50
5551234 < {}
""".format(six.text_type(REC_ERROR))
self.run_script(script)
def test_invalid_product_code(self):
script = """
5551234 > rec invalid_code 40 fs 50
5551234 < {}
""".format(six.text_type(INVALID_PRODUCT_CODE % {'product_code': 'invalid_code'}))
self.run_script(script)
|
"""
TODO:
1) Move final data to a real object; needs better and named structure
2) Give all data on return; fix connections across project
3) Move plotting to a separate optional place
"""
import json
import statistics as s
from pathlib import Path
from copy import deepcopy
from datetime import datetime, timezone, timedelta
from collections import defaultdict
import pandas as pd
from settings import JSON_PRIVATE_DIR, CORE_DIR
from finalization.averaging import get_final_average_two_sample_data, get_final_single_sample_data
from IO.db import DBConnection, OldData
from processing.constants import DETECTION_LIMITS, EBAS_REPORTING_COMPOUNDS, PROPOSED_AUTOMATIC_DETECTION_LIMITS
from finalization.constants import (MEDIAN_10_COMPOUNDS, MEDIAN_25_COMPOUNDS, SEASONAL_CYCLE_COMPOUNDS, NONE)
from plotting import MixingRatioPlot
FINAL_FILTERS_DIR = JSON_PRIVATE_DIR / 'filters/final_manual_filtering'
EBAS_REPORTING_COMPOUNDS_SET = frozenset(EBAS_REPORTING_COMPOUNDS)
def jsonify_data(data, rel_dir):
"""
Create json files that can be used with DataSelector to filter any bad averages, etc.
:param dict data: data to be jsonified, formatted: {compound: [dates, mrs], ...}
:return:
"""
for compound in data.keys():
data_for_json = []
for date, mr in zip(*data[compound]):
if mr is not None:
date = date.replace(tzinfo=timezone(timedelta(hours=1))).timestamp()
compound_obj = {'date': date, 'mr': mr} # report time as epoch UTC
data_for_json.append(compound_obj)
# print(r.date, datetime.fromtimestamp(date, tz=timezone(timedelta(hours=1)))) # shows conversion works
file = Path(rel_dir) / f'{compound}_filtered.json'
with file.open('w') as f:
f.write(json.dumps(data_for_json))
def print_stats_on_ratios_by_compound(ratios):
"""
Print statistics on ratios between two-sample data so they can be used in reporting the data.
:param ratios:
:return:
"""
for name, data in ratios.items():
print(f'{name}: {data}')
def join_2018_and_newer_data():
single_sample_data = get_final_single_sample_data(EBAS_REPORTING_COMPOUNDS)
two_sample_data = get_final_average_two_sample_data(datetime(2018, 12, 20),
datetime(2021, 3, 1),
EBAS_REPORTING_COMPOUNDS)
joined_new_data = {}
for compound in EBAS_REPORTING_COMPOUNDS:
single_samples = single_sample_data.get(compound)
two_samples = two_sample_data.get(compound)
joined_new_data[compound] = (single_samples[0] + two_samples[0], single_samples[1] + two_samples[1])
# final data is now a dict of compound: (dates, mrs) for every compound
return joined_new_data
def prepend_historic_data(new_final_data):
"""
Add data from prior to 2018 (more or less, 2013 - 2017 data), which are stored as "OldData" objects in the database.
:param new_final_data:
:return:
"""
all_final_data = {}
with DBConnection() as session:
# connect to db and grab old data from previous project
for compound in EBAS_REPORTING_COMPOUNDS:
old_results = (session.query(OldData.date, OldData.mr)
.filter(OldData.name == compound)
.filter(OldData.filtered == False)
.order_by(OldData.date)
.all())
dates = [o.date for o in old_results]
mrs = [o.mr for o in old_results]
# prepend older dates and mrs
all_final_data[compound] = [dates + new_final_data[compound][0], mrs + new_final_data[compound][1]]
return all_final_data
def fork_and_filter_with_moving_median(final_data, plot=False):
"""
Accepts near-final data, and filters based on a moving median or stdev, and excludes values according to their
deviation from the moving median by some fixed or supplied percentage.
:param final_data:
:return:
"""
final_flagged_data = deepcopy(final_data) # create an entirely separate copy to hold flagged-only data
final_clean_data = deepcopy(final_data) # create an entirely separate copy for clean data only
for compound in EBAS_REPORTING_COMPOUNDS:
final_data[compound].append([None] * len(final_data[compound][0])) # add a new list which will hold the median values
stdev_all = s.stdev([d for d in final_data[compound][1] if d is not None])
for index, (date, mr) in enumerate(zip(final_data[compound][0], final_data[compound][1])):
if date is None:
continue
if compound in SEASONAL_CYCLE_COMPOUNDS:
days = 14
else:
days = 28
date_start = date - timedelta(days=days)
date_end = date + timedelta(days=days)
# this will be slow! It's a linear all-points check every time, but guarantees we get it right
# some iterator magic would be faster but riskier without substantial testing
cleaned_points = [
point for date, point in zip(final_data[compound][0], final_data[compound][1])
if date_start <= date < date_end and point is not None
]
median = None if not cleaned_points else s.median(cleaned_points)
if mr is not None:
if compound in SEASONAL_CYCLE_COMPOUNDS and stdev_all is not None:
if median - (stdev_all * 2) <= mr < median + (stdev_all * 2):
# data is consistent with median; remove it from the flagged data
final_flagged_data[compound][1][index] = None
else:
# data is outside the bounds; remove from clean data
final_clean_data[compound][1][index] = None
elif compound in MEDIAN_10_COMPOUNDS and median is not None:
if median * .9 <= mr < median * 1.1:
# data is consistent with median; remove it from the flagged data
final_flagged_data[compound][1][index] = None
else:
# data is outside the bounds; remove from clean data
final_clean_data[compound][1][index] = None
elif compound in MEDIAN_25_COMPOUNDS and median is not None:
if median * .75 <= mr < median * 1.25:
# data is consistent with median; remove it from the flagged data
final_flagged_data[compound][1][index] = None
else:
# data is outside the bounds; remove from clean data
final_clean_data[compound][1][index] = None
else: # is in group NONE or some other non-filtered list
final_flagged_data[compound][1][index] = None
if plot:
if compound in SEASONAL_CYCLE_COMPOUNDS:
flag_policy = 'stdev'
elif compound in MEDIAN_10_COMPOUNDS:
flag_policy = 'median 10%'
elif compound in MEDIAN_25_COMPOUNDS:
flag_policy = 'median 25%'
elif compound in NONE:
flag_policy = 'no flag'
else:
flag_policy = 'none given' # just in case
MixingRatioPlot(
{
f'{compound} (clean)': (final_clean_data[compound][0], final_clean_data[compound][1]),
f'{compound} ({flag_policy})': (final_flagged_data[compound][0], final_flagged_data[compound][1])
},
title=f'{compound} Mixing Ratios',
limits={'left': datetime(2013, 1, 1), 'right': datetime(2021, 3, 1)},
show=False,
save=True,
filepath=Path(
CORE_DIR /
f'finalization/scratch_plots/flagged_data_comparisons/{compound}_flagged_mrs.png'
)
).plot()
clean = [v for v in final_clean_data[compound][1] if v is not None]
flagged = [v for v in final_flagged_data[compound][1] if v is not None]
clean_max = max(clean) if clean else 0
flagged_max = max(flagged) if flagged else 0
top_limit = max((clean_max, flagged_max))
MixingRatioPlot(
{
f'{compound} (clean)': (final_clean_data[compound][0], final_clean_data[compound][1]),
f'{compound} ({flag_policy})': (final_flagged_data[compound][0], final_flagged_data[compound][1])
},
title=f'{compound} Mixing Ratios',
limits={'left': datetime(2013, 1, 1), 'right': datetime(2021, 3, 1), 'bottom': 0, 'top': top_limit * 1.25},
show=False,
save=True,
filepath=Path(
CORE_DIR /
f'finalization/scratch_plots/flagged_data_comparisons_zeroed/{compound}_flagged_mrs_zero.png'
)
).plot()
# after plotting; strip all mr-as-None entries from flagged data
final_flagged_data = {
c: [
[date for date, mr in zip(final_flagged_data[c][0], final_flagged_data[c][1]) if mr is not None],
[mr for mr in final_flagged_data[c][1] if mr is not None]
]
for c in final_flagged_data.keys()
}
# print(f'FINAL FLAGGED DATA:')
# print(final_flagged_data)
return final_clean_data, final_flagged_data
def filter_all_final_data(final_data):
filter_data = defaultdict(list)
for filter_file in FINAL_FILTERS_DIR.iterdir():
if filter_file.suffix == '.json':
filters = json.loads(filter_file.read_text())
for date, compounds in filters.items():
filter_data[datetime.strptime(date, '%Y-%m-%d %H:%M')].extend(compounds)
for date, compounds in filter_data.items():
for compound in compounds:
if compound not in EBAS_REPORTING_COMPOUNDS:
continue # ignore any filters that don't apply to final data (eg SF6)
try:
if __name__ == '__main__': # only write the filter output if run directly in module
print(
f'Filtering {compound} for {date}, which was '
+ f'{final_data[compound][1][final_data[compound][0].index(date)]}'
)
final_data[compound][1][final_data[compound][0].index(date)] = None
except ValueError:
# if compound isn't found in the list, we can't/won't bother to filter it
# this can happen is something was wholesale-filtered beforehand, and no longer appears in some
# manual filter that was created specifically while finalizing data; it's perfectly okay
continue
non_null_data = defaultdict(int)
detection_limit_occurences = defaultdict(int)
for compound in EBAS_REPORTING_COMPOUNDS:
# iterate explicitly over indices instead of the list! Python Rule #1: Don't iterate over and modify
for index in range(len(final_data[compound][1])):
# if the mr is below the detection limit, set to half the limit
if final_data[compound][1][index] is not None:
# track a per-compound number of valid data points
non_null_data[compound] += 1
# proposed detection limit testing; see how many of each compound are below dl
if final_data[compound][1][index] < PROPOSED_AUTOMATIC_DETECTION_LIMITS.get(compound, 0):
detection_limit_occurences[compound] += 1
# if something is set to 0, set it to half the detection limit
if final_data[compound][1][index] == 0:
final_data[compound][1][index] = final_data[compound][1][index] = PROPOSED_AUTOMATIC_DETECTION_LIMITS.get(compound, 0) / 2
with open('detection_limits_calculated.csv', 'w') as f:
f.write('compound\tdetection_limit\tpercent below DL\n')
for compound in EBAS_REPORTING_COMPOUNDS:
print(
f'{compound} had {detection_limit_occurences[compound]} below detection limit values out of '
+ f'{non_null_data[compound]}, or {detection_limit_occurences[compound] / non_null_data[compound]:.2%}'
)
f.write(
f'{compound}\t{PROPOSED_AUTOMATIC_DETECTION_LIMITS[compound]:.3f}\t{detection_limit_occurences[compound] / non_null_data[compound]:.2%}\n'
)
# filter data and plot it if this script is being run directly, ie __name__ == '__main__'
final_clean_data, final_flagged_data = fork_and_filter_with_moving_median(final_data, plot=__name__ == '__main__')
return final_clean_data, final_flagged_data
def get_all_final_data_as_dicts():
"""
Sequentially build the final data by joining the single sample data to averaged two sample data, adding in the old
historic data, then applying all manual filter files and applying detection limits after.
:return: tuple[dict, dict]: (final_data, final_filtered_data)
"""
return filter_all_final_data(prepend_historic_data(join_2018_and_newer_data()))
def rejoin_all_final_data(final_data, final_filtered_data):
"""
Takes two dictionaries of final data, one clean and the other filtered, and rejoins them, but with a boolean flag
to denote those that have been flagged.
:param dict final_data: final data as {'comp': (dates, mrs), 'comp2': (dates, mrs)}
:param dict final_filtered_data: final data that's been filtered as {'comp': (dates, mrs), 'comp2': (dates, mrs)}
:return:
"""
final_joined_data = {}
for compound, (dates, mrs) in final_data.items():
filtered_dates, filtered_mrs = final_filtered_data.get(compound, ([], []))
# add False flags to all clean data
dates, mrs, flags = dates, mrs, [False] * len(dates)
# add True flags to all filtered data
filtered_dates, filtered_mrs, filtered_flags = filtered_dates, filtered_mrs, [True] * len(filtered_dates)
# join with simple concatenation
joined_dates, joined_mrs, joined_flags = dates + filtered_dates, mrs + filtered_mrs, flags + filtered_flags
joined_dates, joined_mrs, joined_flags = [
list(tple) for tple in zip(*sorted(zip(joined_dates, joined_mrs, joined_flags), key=lambda x: x[0]))
]
final_joined_data[compound] = (joined_dates, joined_mrs, joined_flags)
return final_joined_data
def final_data_to_df(data):
"""
Takes in data and returns a Pandas DataFrame.
:param dict data: comes in as {'compound_name': [dates, mrs], ...} where dates and mrs are iterables of equal length
:return:
"""
all_data = defaultdict(dict)
# unpack all compounds, either creating an entry for date (defaultdict behavior), or adding f'{compound}_mr' and
# f'{compound}_flag' entries to each date
for compound, (dates, mrs, flags) in data.items():
for date, mr, flag in zip(dates, mrs, flags):
all_data[date][f'{compound}_mr'] = mr
all_data[date][f'{compound}_flag'] = flag
# create a df, using the index (dates) as index/rows
final_df = pd.DataFrame.from_dict(all_data, orient='index')
return final_df
def main():
final_data, final_filtered_data = get_all_final_data_as_dicts()
final_joined_data = rejoin_all_final_data(final_data, final_filtered_data)
df = final_data_to_df(final_joined_data).sort_index()
df.to_csv(f'final_data_{datetime.now().strftime("%Y_%m_%d")}.csv', float_format='%.3f')
if __name__ == '__main__':
main()
|
import sys
from typing import List
sys.setrecursionlimit(10 ** 5)
def partition(array: List, start: int, end: int) -> int:
"""
Helper function for quick_sort
Partitions array around a pivot
such that elements to the right of pivot are > pivot
elements to the left of pivot < pivot
and pivot is in the correct position
and returns index of pivot in sorted array
>>> array = [4,1,5,6,3,5,2]
>>> p = partition(array,0,6)
>>> p
3
"""
pivot = array[start] # pivot element to partition the array around
i = start + 1 # pointer to keep track of partition elements
for j in range(i, end + 1):
"""
loop that runs through all elements in the sub array
and partitions around the pivot
"""
if array[j] < pivot:
array[j], array[i] = array[i], array[j]
i += 1
"""
Swapping pivot so that it ends up in it's right place
"""
array[start], array[i - 1] = array[i - 1], array[start]
return i - 1
def quick_sort(array: List, start: int = 0, end: int = None) -> List:
"""
function that takes in a list as input
and return sorted list
>>> array = [4 , 1, 6, 5, 3, 2, 5]
>>> sorted_array = quick_sort(array)
>>> sorted_array
[1, 2, 3, 4, 5, 5, 6]
"""
if end is None:
"""
Overriding default pointer to end of original array
"""
end = len(array) - 1
if len(array) <= 1:
return array
elif start >= end:
return array
else:
pivot_index = partition(array, start, end) # partition array around a pivot
array = quick_sort(
array, start, pivot_index - 1
) # run quicksort on left subarray on elements < pivot
array = quick_sort(
array, pivot_index + 1, end
) # run quicksort on right subarray on elements >= pivot
return array
if __name__ == "__main__":
import doctest
doctest.testmod()
|
# coding: utf-8
import socketserver
import os
# Copyright 2013 Abram Hindle, Eddie Antonio Santos
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# Furthermore it is derived from the Python documentation examples thus
# some of the code is Copyright © 2001-2013 Python Software
# Foundation; All Rights Reserved
#
# http://docs.python.org/2/library/socketserver.html
#
# run: python freetests.py
# try: curl -v -X GET http://127.0.0.1:8080/
class MyWebServer(socketserver.BaseRequestHandler):
def handle(self):
#receive the requests from client
self.status = 200 #self.status is used to check the status code and control the status_200 function
self.data = self.request.recv(1024).strip()
print ("Got a request of: %s\r\n" % self.data)
print(self.data.decode("utf-8"))
request_type = self.get_request_method(self.data.decode('utf-8'))
file_location = self.get_file_location(self.data.decode('utf-8'))
file_content = self.check_file_content(file_location)
print(request_type)
print("\r\n")
print(file_location)
self.status_405(request_type)
self.status_404(file_location)
self.status_301(file_location)
self.status_200(file_location, file_content)
def get_request_method(self, data):
"""
Find what type of Request we are getting
"""
#return the string of request_type
return str(data).split(' ')[0]
def get_file_location(self, data):
"""
This function returns the location the requested file lies
"""
file_location = './www' + str(data).split(' ')[1]
if file_location[-1] == '/':
file_location += 'index.html'
return file_location
def check_file_content(self, file_location):
"""
This function returns the type of current file
"""
file_types = ['html', 'css']
for type in file_types:
if type in file_location:
return 'Content-Type:text/' + type
return 'Content-Type:text/plain'
# 200: OK
def status_200(self, file_location, file_content):
if self.status == 200:
self.request.sendall(bytearray("HTTP/1.1 200 OK\r\n",'utf-8'))
self.request.sendall(bytearray(file_content + '\r\n' + '\r\n\r\n', 'utf-8'))
self.request.sendall(bytearray(open(file_location, 'r').read() + '\r\n', 'utf-8'))
return
# 301: Moved Permently
def status_301(self, URL):
"""
This function checks URLs to see if there is a match
"""
if "./www/deep" == URL:
print("redirected to: " + URL + " :Sending 301 status code\r\n")
self.request.sendall(bytearray('HTTP/1.1 301 Moved Permanently\r\n','utf-8'))
self.request.sendall(bytearray('Correct location: /deep/\r\n', 'utf-8'))
self.status = 301
return
# 404: Not Found
def status_404(self, URL):
"""
This function checks if a 404 error
(if the path does not exist )
"""
if not os.path.exists(URL):
print("Non-exis path: " + URL + ":Send 404 status code\r\n")
self.request.sendall(bytearray('HTTP/1.1 404 Not Found\r\n', 'utf-8'))
self.status = 404
return
if '../' in URL:
print("Non-exist path: " + URL + ":Send 404 status code\r\n")
self.request.sendall(bytearray('HTTP/1.1 404 Not Found\r\n', 'utf-8'))
self.status = 404
return
# 405: Method Not Allowed
def status_405(self, type):
"""
VALID HANDLE: GET
INVALID HANDLE: POST/PUT/DELETE return "405 Method Not Allowed"
"""
if not type == 'GET':
print("Invalid request: " + type + ":Sending 405 status code\r\n")
self.request.sendall(bytearray('HTTP/1.1 405 Method Not Allowed\r\n', 'utf-8'))
self.status = 405
return
if __name__ == "__main__":
HOST, PORT = "localhost", 8080
socketserver.TCPServer.allow_reuse_address = True
# Create the server, binding to localhost on port 8080
server = socketserver.TCPServer((HOST, PORT), MyWebServer)
# Activate the server; this will keep running until you
# interrupt the program with Ctrl-C
server.serve_forever()
|
from ftis.corpus import Corpus
from ftis.world import World
import argparse
parser = argparse.ArgumentParser(description="Process input and output location")
parser.add_argument(
"-i",
"--input",
default="~/corpus-folder/corpus1",
type=str,
help="Folder for input. This should contain some audio files.",
)
parser.add_argument(
"-o",
"--output",
default="~/corpus-folder/corpus-management",
type=str,
help="Folder for output. This will be made if it doesnt exist.",
)
args = parser.parse_args()
"""
Corpus objects come bundled with some filtering processes to remove items that you don't want or need.
This example demonstrates filtering a corpus so only the top ten percent of samples by loudness are let through.
"""
# Corpora can be pre-processed to remove files that match certain patterns or constraints
# In thie case I am taking the top 10% of files by EBUR-128 loudness and filtering the corpus to that selection
corpus = Corpus(args.input)
print(f"Corpus began with {len(corpus.items)} items")
corpus.loudness(min_loudness=90)
print(f"Corpus filtered to {len(corpus.items)} items")
# You can also use a more declarative syntax like so:
# new_corpus = (
# Corpus("~/corpus-folder/corpus1")
# .loudness(max_loudness=10) #filter to bottom 10%
# )
# This becomes more clear when you use multiple filters
|
import os
import pytz
settings = {}
settings["TIMEZONE"] = pytz.timezone("America/Chicago")
settings["FPS"] = 29.969664
settings["FRAME_INTERVAL"] = "0.03336707S"
settings["starttime_file"] = "../../dataset/Sense2StopSync/start_time.csv"
settings["starttime_train_file"] = "../../dataset/Sense2StopSync/start_time_train.csv"
settings["starttime_test_file"] = "../../dataset/Sense2StopSync/start_time_test.csv"
settings["starttime_val_file"] = "../../dataset/Sense2StopSync/start_time_val.csv"
RAW_DIR = os.path.join(os.path.dirname(__file__), "../../dataset/Sense2StopSync/")
settings["RAW_DIR"] = RAW_DIR
settings["sensor_path"] = os.path.join(RAW_DIR, "SENSOR/")
settings["reliability_resample_path"] = os.path.join(RAW_DIR, "RESAMPLE/")
settings["flow_path"] = os.path.join(RAW_DIR, "flow_pwc/")
TEMP_DIR = os.path.join(os.path.dirname(__file__), "../../data/Sense2StopSync")
settings["TEMP_DIR"] = TEMP_DIR
settings["vid_feat_path"] = os.path.join(TEMP_DIR, "vid_feat")
settings["qualified_window_num"] = 10
settings["window_size_sec"] = 10
settings["window_criterion"] = 0.8
settings["stride_sec"] = 1
settings["video_max_len"] = (17*60+43)*1000
settings["val_set_ratio"] = 0.2
settings["sample_counts"] = 7
|
"""
Tests for Docking
"""
from __future__ import division
from __future__ import unicode_literals
__author__ = "Bharath Ramsundar"
__copyright__ = "Copyright 2016, Stanford University"
__license__ = "MIT"
import unittest
import os
from nose.plugins.attrib import attr
from nose.tools import nottest
import sys
import deepchem as dc
class TestDocking(unittest.TestCase):
"""
Does sanity checks on pose generation.
"""
@nottest
def test_vina_grid_rf_docker_init(self):
"""Test that VinaGridRFDocker can be initialized."""
if sys.version_info >= (3, 0):
return
docker = dc.dock.VinaGridRFDocker(exhaustiveness=1, detect_pockets=False)
@nottest
def test_pocket_vina_grid_rf_docker_init(self):
"""Test that VinaGridRFDocker w/pockets can be initialized."""
if sys.version_info >= (3, 0):
return
docker = dc.dock.VinaGridRFDocker(exhaustiveness=1, detect_pockets=True)
'''
@attr("slow")
def test_vina_grid_dnn_docker_init(self):
"""Test that VinaGridDNNDocker can be initialized."""
docker = dc.dock.VinaGridDNNDocker(exhaustiveness=1, detect_pockets=False)
def test_pocket_vina_grid_dnn_docker_init(self):
"""Test that VinaGridDNNDocker can be initialized."""
if sys.version_info >= (3, 0):
return
docker = dc.dock.VinaGridDNNDocker(exhaustiveness=1, detect_pockets=True)
'''
@attr("slow")
def test_vina_grid_rf_docker_dock(self):
"""Test that VinaGridRFDocker can dock."""
if sys.version_info >= (3, 0):
return
current_dir = os.path.dirname(os.path.realpath(__file__))
protein_file = os.path.join(current_dir, "1jld_protein.pdb")
ligand_file = os.path.join(current_dir, "1jld_ligand.sdf")
docker = dc.dock.VinaGridRFDocker(exhaustiveness=1, detect_pockets=False)
(score, (protein_docked, ligand_docked)) = docker.dock(
protein_file, ligand_file)
# Check returned files exist
assert score.shape == (1,)
assert os.path.exists(protein_docked)
assert os.path.exists(ligand_docked)
@nottest
def test_vina_grid_rf_docker_specified_pocket(self):
"""Test that VinaGridRFDocker can dock into spec. pocket."""
if sys.version_info >= (3, 0):
return
current_dir = os.path.dirname(os.path.realpath(__file__))
protein_file = os.path.join(current_dir, "1jld_protein.pdb")
ligand_file = os.path.join(current_dir, "1jld_ligand.sdf")
docker = dc.dock.VinaGridRFDocker(exhaustiveness=1, detect_pockets=False)
(score, (protein_docked, ligand_docked)) = docker.dock(
protein_file,
ligand_file,
centroid=(10, 10, 10),
box_dims=(1, 1, 1),
dry_run=True)
# Check returned files exist
assert score.shape == (1,)
@nottest
def test_pocket_vina_grid_rf_docker_dock(self):
"""Test that VinaGridRFDocker can dock."""
if sys.version_info >= (3, 0):
return
current_dir = os.path.dirname(os.path.realpath(__file__))
protein_file = os.path.join(current_dir, "1jld_protein.pdb")
ligand_file = os.path.join(current_dir, "1jld_ligand.sdf")
docker = dc.dock.VinaGridRFDocker(exhaustiveness=1, detect_pockets=True)
(score, (protein_docked, ligand_docked)) = docker.dock(
protein_file, ligand_file, dry_run=True)
# Check returned files exist
if sys.version_info >= (3, 0):
return
assert score.shape == (1,)
'''
@attr("slow")
def test_vina_grid_dnn_docker_dock(self):
"""Test that VinaGridDNNDocker can dock."""
current_dir = os.path.dirname(os.path.realpath(__file__))
protein_file = os.path.join(current_dir, "1jld_protein.pdb")
ligand_file = os.path.join(current_dir, "1jld_ligand.sdf")
docker = dc.dock.VinaGridDNNDocker(exhaustiveness=1, detect_pockets=False)
(score, (protein_docked, ligand_docked)) = docker.dock(
protein_file, ligand_file)
# Check returned files exist
assert score.shape == (1,)
assert os.path.exists(protein_docked)
assert os.path.exists(ligand_docked)
@attr('slow')
def test_pocket_vina_grid_dnn_docker_dock(self):
"""Test that VinaGridDNNDocker can dock."""
if sys.version_info >= (3, 0):
return
current_dir = os.path.dirname(os.path.realpath(__file__))
protein_file = os.path.join(current_dir, "1jld_protein.pdb")
ligand_file = os.path.join(current_dir, "1jld_ligand.sdf")
docker = dc.dock.VinaGridDNNDocker(exhaustiveness=1, detect_pockets=True)
(score, (protein_docked, ligand_docked)) = docker.dock(
protein_file, ligand_file, dry_run=True)
# Check returned files exist
assert score.shape == (1,)
'''
|
# -*- coding:utf-8 -*-
def application(environ,start_response):
start_response('200 OK', [('Content-Type','text/html')])
return [b'<h1>Hello world!</h1>']
|
from pythondjangoapp.settings.base import *
DEBUG = True
INSTALLED_APPS += (
# other apps for local development
)
|
import unittest
from queue import Queue
from threading import Thread
from time import sleep
from satella.coding import Monitor
class MonitorTest(unittest.TestCase):
def test_synchronize_on(self):
class TestedMasterClass(Monitor):
def __init__(self):
self.value = 0
super().__init__()
def get_locking_class(self):
class LockingClass:
@Monitor.synchronize_on(self)
def get_value(self2):
self.value += 1
return LockingClass()
msc = TestedMasterClass()
lc = msc.get_locking_class()
class TesterThread(Thread):
def run(self):
lc.get_value()
with Monitor.acquire(msc):
TesterThread().start()
sleep(0.1)
self.assertEqual(msc.value, 0)
with Monitor.release(msc):
sleep(0.1)
self.assertEqual(msc.value, 1)
def test_release_contextmanager(self):
class TestedClass(Monitor):
def __init__(self, cqueue):
self.cqueue = cqueue
Monitor.__init__(self)
@Monitor.synchronized
def execute(self):
self.cqueue.put(1)
sleep(1)
self.cqueue.get()
class TesterThread(Thread):
def __init__(self, tc):
self.tc = tc
Thread.__init__(self)
def run(self):
self.tc.execute()
cq = Queue()
cq.put(1)
tc = TestedClass(cq)
tt = TesterThread(tc)
with Monitor.acquire(tc):
with Monitor.release(tc):
tt.start()
sleep(0.4)
self.assertEqual(cq.qsize(), 2)
def test_release_contextmanager_syntax(self):
class TestedClass(Monitor):
def __init__(self, cqueue):
self.cqueue = cqueue
Monitor.__init__(self)
@Monitor.synchronized
def execute(self):
self.cqueue.put(1)
sleep(1)
self.cqueue.get()
class TesterThread(Thread):
def __init__(self, tc):
self.tc = tc
Thread.__init__(self)
def run(self):
self.tc.execute()
cq = Queue()
cq.put(1)
tc = TestedClass(cq)
tt = TesterThread(tc)
with tc:
with Monitor.release(tc):
tt.start()
sleep(0.4)
self.assertEqual(cq.qsize(), 2)
def test_acquire_contextmanager(self):
class TestedClass(Monitor):
def __init__(self, cqueue):
self.cqueue = cqueue
Monitor.__init__(self)
@Monitor.synchronized
def execute(self):
self.cqueue.put(1)
sleep(1)
self.cqueue.get()
class TesterThread(Thread):
def __init__(self, tc):
self.tc = tc
Thread.__init__(self)
def run(self):
self.tc.execute()
cq = Queue()
cq.put(1)
tc = TestedClass(cq)
tt = TesterThread(tc)
with Monitor.acquire(tc):
tt.start()
sleep(0.4)
self.assertEqual(cq.qsize(), 1)
def test_monitoring(self):
class TestedClass(Monitor):
def __init__(self, cqueue):
self.cqueue = cqueue
Monitor.__init__(self)
@Monitor.synchronized
def execute(self):
self.cqueue.put(1)
sleep(1)
self.cqueue.get()
class TesterThread(Thread):
def __init__(self, tc):
self.tc = tc
Thread.__init__(self)
def run(self):
self.tc.execute()
q = Queue()
tc = TestedClass(q)
a, b = TesterThread(tc), TesterThread(tc)
a.start(), b.start()
while a.is_alive() or b.is_alive():
sleep(0.1)
self.assertNotEqual(q.qsize(), 2)
def test_monitoring_synchronize_on_attribute(self):
class TestedClass:
def __init__(self, cqueue):
self.cqueue = cqueue
self.monitor = Monitor()
@Monitor.synchronize_on_attribute('monitor')
def execute(self):
self.cqueue.put(1)
sleep(1)
self.cqueue.get()
class TesterThread(Thread):
def __init__(self, tc):
self.tc = tc
Thread.__init__(self)
def run(self):
self.tc.execute()
q = Queue()
tc = TestedClass(q)
a, b = TesterThread(tc), TesterThread(tc)
a.start(), b.start()
while a.is_alive() or b.is_alive():
sleep(0.1)
self.assertNotEqual(q.qsize(), 2)
|
from distutils.util import strtobool
from typing import Dict
from ..common.config import AppConfigReader
from ..common.icons import Icon, Icons
from ..pull_requests import PullRequestSort, PullRequestStatus
class GitlabMrsConstants(object):
MODULE = "gitlab_mrs"
TIMEOUT_MESSAGE = "Timeout while trying to connect to GitLab."
CONNECTION_MESSAGE = "Failed to connect to GitLab."
UNKNOWN_MESSAGE = "An unknown exception occurred while trying to fetch MRs."
NO_RESULTS = "There are no merge requests in GitLab."
class GitlabMrsConfig(object):
_config = AppConfigReader.read(GitlabMrsConstants.MODULE)
GITLAB_HOST = _config["preferences"].get("gitlab_host", "https://gitlab.com")
PRIVATE_TOKEN = _config["preferences"]["private_token"] # no default, crash
SORT_ON = PullRequestSort[_config["preferences"]["sort_on"].upper()]
ABBREVIATION_CHARACTERS = int(_config["preferences"].get("abbreviation_characters", "30"))
OMIT_REVIEWED_AND_APPROVED = strtobool(_config["preferences"].get("omit_reviewed_and_approved", "False"))
NOTIFICATIONS_ENABLED = strtobool(_config["preferences"].get("notifications_enabled", "False"))
GROUP_NAME = _config["preferences"]["group_name"]
EXCLUDE_MRS_WITH_LABELS = set(_config["preferences"].get("exclude_mrs_with_labels", "").split(","))
CACHE_FILE = _config["common"]["cache_path"]
class GitlabMrsIcons(object):
GITLAB_LOGO = Icon(
"iVBORw0KGgoAAAANSUhEUgAAABAAAAAOCAYAAAAmL5yKAAAAAXNSR0IArs4c6QAAAi5JREFUKBV1Us9rE1EQnpm3+bFpatMG0tJE6o9YF+JJaIoKRbOlh5YcPBvx7kE8qBQvXvwH/Ac86aV/gBTBm17sWSVNLUYrWg+i2Cab7L43zm6zREIdeDvz3vfN92ZmH8DA9lwn/9l1Hsb7//l2zVnfdy9MxzjFATNVEej+l9p8MT4b9buSSIj3uqwvx9hQgGB13KKcAVWLwVGv2CxlLcojwmqMRQLvrlaywLwSMAMQ1GNw1BNxXQsFGJY/Lp+ZCPFIYMwPFiQodwMDYGDpQ9XJjya3FssnjIaaJxwLcI48+1LIscLPVJXXrJ9MvmaW8qYzB/i0fc35hpG8XCi6dg4K6TEzK3S2FKKeMmvwGjaRN0q26dtb9AkrZh8AFUD/F8HvPQIRiyzsbKJoIDlpgLWUXZBCT3GLUsmLVtBLLxJhRZeE+0cmcQhItuGAELSGSIIIGG0DJpB9RjRKwlF0LvD0FWJDHbnpq7KFe1ZELGCSJhMZFjWpV1ZSYpWQZMFCjspEut8t4gNK3tp+29fosg+bKidSp6VywdPj4biPLJXlo3bmpMNJRPb5FRrtYqP5JhpT+maz2fZ7102fH0l/XSoipGwGknmEKy0V0Kw8sxnoGR8eY7JTx8bO+1A+qmVwUeT8Z+UVAnpCu+j82BJY5lZYYOQytAzyXevG9ot/+YMfNTxKNHZeegpdKJrnWXnx2RlAc5I3PBW4o8nDrGMiGR4drs/f7jw4f0diaeR4+wuHx8azyo51NwAAAABJRU5ErkJggg==")
MERGE_REQUEST = Icons.PULL_REQUEST
PR_STATUSES: Dict[PullRequestStatus, Icon] = {
PullRequestStatus.UNAPPROVED: Icons.GREY_CIRCLE,
PullRequestStatus.NEEDS_WORK: Icons.ORANGE_CIRCLE,
PullRequestStatus.APPROVED: Icons.GREEN_CIRCLE
}
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from flask import Flask, request
import app_05_db
# -- 関数 -- #
def shutdown_server():
""" サーバー停止 """
func = request.environ.get('werkzeug.server.shutdown')
if func is None:
raise RuntimeError('Not running with the Werkzeug Server')
func()
# -- 初期化、 Flaskアプリの用意 -- #
app = Flask(__name__) # アプリ本体用
# -- ルーティング -- #
@app.route('/status/led', methods=['GET'])
def status_led():
"""LEDの状態を取得"""
running, led_values, button_values = app_05_db.load_app_05_status()
if not running:
return 'not running', 404
return str(led_values[0])
@app.route('/status/button', methods=['GET'])
def status_button():
"""ボタンの状態を取得"""
running, led_values, button_values = app_05_db.load_app_05_status()
if not running:
return 'not running', 404
return str(button_values[0])
@app.route('/shutdown', methods=['GET'])
def shutdown():
""" flaskアプリとapp_05の停止
app_05.dbの排他制御をしていないので、
ボタンの状態を変更しながら停止しないこと
"""
running, led_values, button_values = app_05_db.load_app_05_status()
running = False
app_05_db.save_app_05_status(running, led_values, button_values)
shutdown_server()
return 'Server shutting down...'
if __name__ == '__main__': # このファイルがスクリプトとして実行される時だけ処理を実行
app.run(host='0.0.0.0')
|
import math
import sys
from PyQt5 import QtGui
from PyQt5.QtCore import *
from PyQt5.QtGui import *
from PyQt5.QtWidgets import *
class DrawPoints(QWidget):
def __init__(self):
super(DrawPoints, self).__init__()
self.init()
def init(self):
self.setGeometry(300, 300, 300, 300)
self.setWindowTitle('draw sin line')
def paintEvent(self, a0: QtGui.QPaintEvent) -> None:
painter = QPainter()
painter.begin(self)
painter.setPen(Qt.red)
size = self.size()
for i in range(1000):
x = 100 * (-1 + 2.0 * i / 1000) + size.width() / 2
y = -50 * math.cos((x - size.width() / 2.0) * math.pi / 50) + size.height() / 2.0
painter.drawPoint(x, y)
painter.end()
if __name__ == '__main__':
app = QApplication(sys.argv)
main = DrawPoints()
main.show()
sys.exit(app.exec_())
|
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import time
import datetime
from seir.wrapper import MultiPopWrapper
from seir.utils import plot_solution
# read calibration data
actual_hospitalisations = pd.read_excel('data/calibration.xlsx', sheet_name='Hospitalisations')
actual_hospitalisations['Date'] = [pd.to_datetime(x, ).date() for x in actual_hospitalisations['Date']]
# TODO: should check if file is downloaded: if not, download, then use the downloaded file
actual_infections = pd.read_csv(
'https://raw.githubusercontent.com/dsfsi/covid19za/master/data/covid19za_provincial_cumulative_timeline_confirmed.csv')
actual_infections.rename(columns={'date': 'Date', 'total': 'Cum. Confirmed'}, inplace=True)
actual_infections.index = pd.to_datetime(actual_infections['Date'], dayfirst=True)
actual_infections = actual_infections.resample('D').mean().ffill().reset_index()
actual_infections['Date'] = [pd.to_datetime(x, dayfirst=True).date() for x in actual_infections['Date']]
# TODO: should check if file is downloaded: if not, download, then use the downloaded file
reported_deaths = pd.read_csv(
'https://raw.githubusercontent.com/dsfsi/covid19za/master/data/covid19za_timeline_deaths.csv')
reported_deaths.rename(columns={'date': 'Date'}, inplace=True)
reported_deaths['Date'] = [pd.to_datetime(x, dayfirst=True).date() for x in reported_deaths['Date']]
actual_deaths = reported_deaths.groupby('Date').report_id.count().reset_index()
actual_deaths.rename(columns={'report_id': 'Daily deaths'}, inplace=True)
actual_deaths.index = pd.to_datetime(actual_deaths['Date'])
actual_deaths = actual_deaths.resample('D').mean().fillna(0).reset_index()
actual_deaths['Cum. Deaths'] = np.cumsum(actual_deaths['Daily deaths'])
# variable parameters for front-end
asymptomatic_prop = 0.75 # 0.2-0.8
asymp_rel_infectivity = 0.5 # 0.3 - 1
asymp_prop_imported = 0.0 # 0 - 0.8
r0 = 2.6 # 1.5 - 5.5
lockdown_ratio = 0.6 # 0.25 - 0.8
imported_scale = 2.5 # 0.5 - 2
lockdown_period = 35 # 35, 42, 49, 56, 63, 70
social_distancing_ratio = 0.75 # 0.5-1
period_asymp = 2.3 # 8-12
period_mild_infect = 2.3 # 2-4
period_severe_infect = 2.3 # 2-4
period_severe_isolate = 6 - period_severe_infect
period_hosp_if_not_icu = 10 # 6-10
period_hosp_if_icu = 8 # 6-10
period_icu_if_recover = 10 # 8-12
period_icu_if_die = 6 # 3-7
mort_loading = 1.0 # 0.5 - 1.5
prop_mild_detected = 0.3 # 0.2 - 0.8
hosp_to_icu = 0.2133 # 0.1 - 0.4 (0.21330242 = Ferguson)
descr = 'asymp_' + str(asymptomatic_prop) + '_R0_' + str(r0) + '_imported_scale_' + str(
imported_scale) + '_lockdown_' + str(lockdown_ratio) + '_postlockdown_' + str(
social_distancing_ratio) + '_ICU_' + str(hosp_to_icu) + '_mort_' + str(mort_loading) + '_asympinf_' + str(
asymp_rel_infectivity)
full_descr = f'Baseline R0: {r0:.1f}, asymptomatic proportion: {asymptomatic_prop:.0%}, asymptomatic relative ' \
f'infectiousness {asymp_rel_infectivity:.0%}, {prop_mild_detected:.0%} of mild cases detected \n '
full_descr += f'Imported scaling factor {imported_scale:.2f}, asymptomatic proportion imported {asymp_prop_imported:.0%}\n '
full_descr += f'Lockdown period: {lockdown_period:,.0f}, R0 relative to baseline {lockdown_ratio:.0%} in lockdown,' \
f'{social_distancing_ratio:.0%} post-lockdown \n '
full_descr += f'Infectious days pre-isolation: {period_asymp} asymptomatic, {period_mild_infect} mild, {period_severe_infect} severe; severe isolation days pre-hospitalisation: {period_severe_isolate} \n'
full_descr += f'Hospital days: {period_hosp_if_not_icu} not critical, {period_hosp_if_icu} critical plus {period_icu_if_recover} in ICU if recover/{period_icu_if_die} if die \n'
full_descr += f'Proportion of hospitalised cases ending in ICU: {hosp_to_icu:.2%}, mortality loading {mort_loading:.0%}'
# get s0 from file:
df = pd.read_csv('data/Startpop_2density_0comorbidity.csv') # , index_col=0)
df['density'] = df['density'].map({'High': 'high', 'Low': 'low'})
df['label'] = df['age'].str.lower() + '_' + df['sex'].str.lower() + '_' + df['density'].str.lower()
df_dict = df[['label', 'Population']].to_dict()
s_0 = {df_dict['label'][i]: df_dict['Population'][i] for i in df_dict['label'].keys()}
# Ferguson et al. parameterisation
ferguson = {'0-9': [0.001, 0.05, 0.00002],
'10-19': [0.003, 0.05, 0.00006],
'20-29': [0.012, 0.05, 0.0003],
'30-39': [0.032, 0.05, 0.0008],
'40-49': [0.049, 0.063, 0.0015],
'50-59': [0.102, 0.122, 0.006],
'60-69': [0.166, 0.274, 0.022],
'70-79': [0.243, 0.432, 0.051],
'80+': [0.273, 0.709, 0.093]}
# work out deaths as % of those entering ICU
for key in ferguson:
# TODO: add this calc to the df, not to the lists.
ferguson[key].append(ferguson[key][2] / ferguson[key][1] / ferguson[key][0])
# age profile - calculate ICU transition adjustment
age_profile = df.groupby('age').Population.sum().reset_index()
ferguson_df = pd.DataFrame(ferguson).T.reset_index()
ferguson_df.rename(columns={'index': 'age', 0: 'symp_to_hosp', 1: 'hosp_to_icu', 2: 'symp_to_dead', 3: 'icu_to_dead'},
inplace=True)
age_profile['Proportion'] = age_profile['Population'] / age_profile['Population'].sum()
age_profile = age_profile.merge(ferguson_df[['age', 'symp_to_hosp', 'hosp_to_icu']], on='age')
age_profile['hosp'] = age_profile['Proportion'] * age_profile['symp_to_hosp']
age_profile['prop_hosp'] = age_profile['hosp'] / age_profile['hosp'].sum()
age_profile['overall_hosp_to_icu'] = age_profile['prop_hosp'] * age_profile['hosp_to_icu']
overall_hosp_to_icu = age_profile['overall_hosp_to_icu'].sum()
icu_adjustment = hosp_to_icu / overall_hosp_to_icu # ~1 when hosp_to_icu is == ferguson number
# hard-coded parameters
infectious_func = lambda t: 1 if t < 11 else (
1 - (1 - social_distancing_ratio) / 11 * (t - 11)) if 11 <= t < 22 else lockdown_ratio if 22 <= t < (
22 + lockdown_period) else social_distancing_ratio
c = 1
s = 0.06 # proportion of imported cases below 60 that are severe (1-s are mild)
# scale of ferguson ratio for 60+ - setting to inverse value from ferguson means we assume 100% of cases 60+ are severe
scale = {'60-69': 1,
'70-79': 1/ferguson['70-79'][0],
'80+': 1/ferguson['80+'][0]}
a = 0.25
l = asymp_prop_imported / (1 - asymp_prop_imported)
x = c * imported_scale
imported_func = lambda t: {'0-9_male_high': [0.0101 * x * l * np.exp(a * t), 0.0101 * x * (1 - s) * np.exp(a * t),
0.0101 * x * s * np.exp(a * t), 0, 0, 0],
'10-19_male_high': [0.0101 * x * l * np.exp(a * t), 0.0101 * x * (1 - s) * np.exp(a * t),
0.0101 * x * s * np.exp(a * t), 0, 0, 0],
'20-29_male_high': [0.0657 * x * l * np.exp(a * t), 0.0657 * x * (1 - s) * np.exp(a * t),
0.0657 * x * s * np.exp(a * t), 0, 0, 0],
'30-39_male_high': [0.1768 * x * l * np.exp(a * t), 0.1768 * x * (1 - s) * np.exp(a * t),
0.1768 * x * s * np.exp(a * t), 0, 0, 0],
'40-49_male_high': [0.0960 * x * l * np.exp(a * t), 0.0960 * x * (1 - s) * np.exp(a * t),
0.0960 * x * s * np.exp(a * t), 0, 0, 0],
'50-59_male_high': [0.1717 * x * l * np.exp(a * t), 0.1717 * x * (1 - ferguson['50-59'][0]) * np.exp(a * t),
0.1717 * x * ferguson['50-59'][0] * np.exp(a * t), 0, 0, 0],
'60-69_male_high': [0.0758 * x * l * np.exp(a * t), 0.0758 * x * (1 - scale['60-69'] * ferguson['60-69'][0]) * np.exp(a * t), 0.0758 * x * scale['60-69'] * ferguson['60-69'][0] * np.exp(a * t), 0, 0, 0],
'70-79_male_high': [0.0202 * x * l * np.exp(a * t), 0.0202 * x * (1 - scale['70-79'] * ferguson['70-79'][0]) * np.exp(a * t), 0.0202 * x * scale['70-79'] * ferguson['70-79'][0] * np.exp(a * t), 0, 0, 0],
'80+_male_high': [0.0051 * x * l * np.exp(a * t), 0.0051 * x * (1 - scale['80+'] * ferguson['80+'][0]) * np.exp(a * t), 0.0051 * x * scale['80+'] * ferguson['80+'][0] * np.exp(a * t), 0, 0, 0],
'0-9_female_high': [0.0000 * x * l * np.exp(a * t), 0.0000 * x * (1 - s) * np.exp(a * t),
0.0000 * x * s * np.exp(a * t), 0, 0, 0],
'10-19_female_high': [0.0101 * x * l * np.exp(a * t), 0.0101 * x * (1 - s) * np.exp(a * t),
0.0101 * x * s * np.exp(a * t), 0, 0, 0],
'20-29_female_high': [0.0606 * x * l * np.exp(a * t), 0.0606 * x * (1 - s) * np.exp(a * t),
0.0606 * x * s * np.exp(a * t), 0, 0, 0],
'30-39_female_high': [0.1111 * x * l * np.exp(a * t), 0.1111 * x * (1 - s) * np.exp(a * t),
0.1111 * x * s * np.exp(a * t), 0, 0, 0],
'40-49_female_high': [0.0556 * x * l * np.exp(a * t), 0.0556 * x * (1 - s) * np.exp(a * t),
0.0556 * x * s * np.exp(a * t), 0, 0, 0],
'50-59_female_high': [0.0657 * x * l * np.exp(a * t), 0.0657 * x * (1 - s) * np.exp(a * t),
0.0657 * x * s * np.exp(a * t), 0, 0, 0],
'60-69_female_high': [0.0152 * x * l * np.exp(a * t), 0.0152 * x * (1 - scale['60-69'] * ferguson['60-69'][0]) * np.exp(a * t), 0.0152 * x * scale['60-69'] * ferguson['60-69'][0] * np.exp(a * t), 0, 0,
0],
'70-79_female_high': [0.0303 * x * l * np.exp(a * t), 0.0303 * x * (1 - scale['70-79'] * ferguson['70-79'][0]) * np.exp(a * t), 0.0303 * x * scale['70-79'] * ferguson['70-79'][0] * np.exp(a * t), 0, 0,
0],
'80+_female_high': [0.0000 * x * l * np.exp(a * t), 0.0000 * x * (1 - scale['80+'] * ferguson['80+'][0]) * np.exp(a * t), 0.0000 * x * scale['80+'] * ferguson['80+'][0] * np.exp(a * t), 0, 0, 0]
} if t < 22 else 0
init_vectors = {
's_0': s_0,
'i_0': {'30-39_male_high': [0, 0, 0, 0, 0, 0]}
}
# begin timing loop
loops = 100
loop_times = []
start = time.time()
for i in range(loops):
loop_start = time.time()
model = MultiPopWrapper(
pop_categories={'age': ['0-9', '10-19', '20-29', '30-39', '40-49', '50-59', '60-69', '70-79', '80+'],
'sex': ['male', 'female'],
'density': ['high', 'low']
},
inf_labels=['AS', 'M', 'S', 'SI', 'H', 'ICU'],
alpha={'0-9': [asymptomatic_prop, (1 - asymptomatic_prop) * (1 - ferguson['0-9'][0]),
(1 - asymptomatic_prop) * ferguson['0-9'][0], 0, 0, 0],
'10-19': [asymptomatic_prop, (1 - asymptomatic_prop) * (1 - ferguson['10-19'][0]),
(1 - asymptomatic_prop) * ferguson['10-19'][0], 0, 0, 0],
'20-29': [asymptomatic_prop, (1 - asymptomatic_prop) * (1 - ferguson['20-29'][0]),
(1 - asymptomatic_prop) * ferguson['20-29'][0], 0, 0, 0],
'30-39': [asymptomatic_prop, (1 - asymptomatic_prop) * (1 - ferguson['30-39'][0]),
(1 - asymptomatic_prop) * ferguson['30-39'][0], 0, 0, 0],
'40-49': [asymptomatic_prop, (1 - asymptomatic_prop) * (1 - ferguson['40-49'][0]),
(1 - asymptomatic_prop) * ferguson['40-49'][0], 0, 0, 0],
'50-59': [asymptomatic_prop, (1 - asymptomatic_prop) * (1 - ferguson['50-59'][0]),
(1 - asymptomatic_prop) * ferguson['50-59'][0], 0, 0, 0],
'60-69': [asymptomatic_prop, (1 - asymptomatic_prop) * (1 - ferguson['60-69'][0]),
(1 - asymptomatic_prop) * ferguson['60-69'][0], 0, 0, 0],
'70-79': [asymptomatic_prop, (1 - asymptomatic_prop) * (1 - ferguson['70-79'][0]),
(1 - asymptomatic_prop) * ferguson['70-79'][0], 0, 0, 0],
'80+': [asymptomatic_prop, (1 - asymptomatic_prop) * (1 - ferguson['80+'][0]),
(1 - asymptomatic_prop) * ferguson['80+'][0], 0, 0, 0]},
t_inc=5.1,
q_se=[asymp_rel_infectivity, 1, 1, 0, 0, 0],
q_ii=[
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 1 / period_severe_infect, 0, 0, 0],
[0, 0, -1 / period_severe_infect, 1 / period_severe_isolate, 0, 0],
[0, 0, 0, -1 / period_severe_isolate, 1 / period_hosp_if_icu, 0],
[0, 0, 0, 0, -1 / period_hosp_if_icu, 0]
],
q_ir=[1 / period_asymp, 1 / period_mild_infect, 0, 0, 1 / period_hosp_if_not_icu, 1 / period_icu_if_recover],
q_id=[0, 0, 0, 0, 0, 1 / period_icu_if_die],
rho_delta={'0-9': [0, 0, 1, 1, ferguson['0-9'][1] * icu_adjustment, 0],
'10-19': [0, 0, 1, 1, ferguson['10-19'][1] * icu_adjustment, 0],
'20-29': [0, 0, 1, 1, ferguson['20-29'][1] * icu_adjustment, 0],
'30-39': [0, 0, 1, 1, ferguson['30-39'][1] * icu_adjustment, 0],
'40-49': [0, 0, 1, 1, ferguson['40-49'][1] * icu_adjustment, 0],
'50-59': [0, 0, 1, 1, ferguson['50-59'][1] * icu_adjustment, 0],
'60-69': [0, 0, 1, 1, ferguson['60-69'][1] * icu_adjustment, 0],
'70-79': [0, 0, 1, 1, ferguson['70-79'][1] * icu_adjustment, 0],
'80+': [0, 0, 1, 1, ferguson['80+'][1] * icu_adjustment, 0]},
rho_beta={'0-9': [0, 0, 0, 0, 0, ferguson['0-9'][3] * mort_loading],
'10-19': [0, 0, 0, 0, 0, ferguson['10-19'][3] * mort_loading],
'20-29': [0, 0, 0, 0, 0, ferguson['20-29'][3] * mort_loading],
'30-39': [0, 0, 0, 0, 0, ferguson['30-39'][3] * mort_loading],
'40-49': [0, 0, 0, 0, 0, ferguson['40-49'][3] * mort_loading],
'50-59': [0, 0, 0, 0, 0, ferguson['50-59'][3] * mort_loading],
'60-69': [0, 0, 0, 0, 0, ferguson['60-69'][3] * mort_loading],
'70-79': [0, 0, 0, 0, 0, ferguson['70-79'][3] * mort_loading],
'80+': [0, 0, 0, 0, 0, ferguson['80+'][3] * mort_loading]},
infectious_func=infectious_func,
imported_func=imported_func,
init_vectors=init_vectors,
extend_vars=True
)
periods_per_day = 5
t = np.linspace(0, 300, 300 * periods_per_day + 1)
model.q_se = model.q_se * r0 / model.r_0
solution = model.solve(t)
loop_end = time.time()
loop_times.append(loop_end - loop_start)
end = time.time()
print(f"Total time for {loops} loops: {end - start}")
print(f"Average per loop: {(end-start)/loops}")
print(f"Std of {loops} loops: {np.std(loop_times)}")
|
### Dependent packages ###
import time
import sys, os
import numpy as np
import scipy as sp
from scipy import optimize
from ..util.div import formattime, len_none
from ..util.linalg import jitchol, try_jitchol, triang_solve, mulinv_solve, chol_inv, traceprod, nearestPD
from ..util.stats import norm_cdf_int, norm_cdf_int_approx, normal_cdf_approx
class GPmodel():
""" GP model """
def __init__(self, kernel, likelihood = 1, mean = 0, constr_likelihood = 1E-6, verbatim = True):
### Prior model input ##################################
# GP parameters
self.kernel = kernel # Object containing kernel function and its derivatives
self.mean = mean # Constant mean function
self.likelihood = likelihood
# Design data
self.X_training = None
self.Y_training = None
### Cached data from intermediate calculations ###########
# Depending only on X
self.K_w = None # K_w = K_x_x + sigma^2*I
self.K_w_chol = None # Cholesky factor L s.t. L*L.T = K_w
# Depending only on Y
self.Y_centered = None
# Depending on (X, Y)
self.LLY = None # Only used in the unconstrained calculations
### For new observation ################################
self.x_new = None
self.y_new = None
self.new_X_training = None
self.new_Kw = None
self.new_K_w_chol = None
self.new_Y_centered = None
self.new_LLY = None
### Other ##############################################
self.verbatim = verbatim # Print info during execution
# Parameters that need calculation reset
@property
def X_training(self): return self.__X_training
@property
def Y_training(self): return self.__Y_training
@X_training.setter
def X_training(self, value):
self.K_w = None
self.K_w_chol = None
self.LLY = None
self.__X_training = value
@Y_training.setter
def Y_training(self, value):
self.Y_centered = None
self.LLY = None
self.__Y_training = value
def __str__(self):
""" What to show when the object is printed """
txt = '----- GP model ----- \n mean = {} \n likelihood = {} \n '.format(self.mean, self.likelihood)
txt += 'kernel: \n {} \n'.format(self.kernel.__str__())
txt += ' constraint: \n'
txt += '---------------------'
return txt
def reset(self):
""" Reset model. I.e. forget all older calculations """
self.K_w = None
self.K_w_chol = None
self.Y_centered = None
self.LLY = None
self.x_new = None
self.y_new = None
self.new_X_training = None
self.new_Kw = None
self.new_K_w_chol = None
self.new_Y_centered = None
self.new_LLY = None
def set_x_new(self, x_new, lik = None):
"""
Set new observation location x_new
"""
self._prep_K_w()
self.x_new = x_new
self.y_new = None
### Update GP matrices
# New training input
self.new_X_training = np.append(self.X_training, x_new.reshape(1, -1), axis = 0)
# Update Kw
new_Kw_col = self.kernel.K(self.X_training, x_new.reshape(1, -1)) # Define new column
self.new_Kw = np.append(self.K_w, new_Kw_col, axis = 1) # Add column
likelihood = lik if lik is not None else self.likelihood
self.new_Kw = np.append(self.new_Kw, np.append(new_Kw_col, self.kernel.K(x_new.reshape(1, -1), x_new.reshape(1, -1)) + likelihood, 0).T, axis = 0) # Add row
# Update Cholesky (can do this much faster!!)
self.new_K_w_chol = np.matrix(jitchol(self.new_Kw))
def set_y_new(self, y_new):
"""
Set new observation y_new
"""
self.y_new = y_new
self.new_Y_centered = np.append(self.Y_centered, y_new.reshape(-1, 1) - self.mean, axis=0)
self.new_LLY = mulinv_solve(self.new_K_w_chol, self.new_Y_centered, triang = True)
def calc_posterior(self, XS, full_cov = True):
"""
Calculate pridictive posterior distribution f* | Y
Returns: mean, cov (full or only diagonal)
"""
# Check input
self._check_XY_training()
assert len(XS.shape) == 2, 'Test data XS must be 2d array'
# Start timer
t0 = time.time()
# Run pre calcs
self._prep_Y_centered()
self._prep_K_w(verbatim = self.verbatim)
self._prep_K_w_factor(verbatim = self.verbatim)
self._prep_LLY()
if self.verbatim: print("..Calculating f* | Y ...", end = '')
# Kernel matrices needed
K_x_xs = np.matrix(self.kernel.K(self.X_training, XS))
v2 = triang_solve(self.K_w_chol, K_x_xs)
# Calculate mean
mean = self.mean + K_x_xs.T*self.LLY
# Calculate cov
if full_cov:
K_xs_xs = np.matrix(self.kernel.K(XS, XS))
cov = K_xs_xs - v2.T*v2
else:
K_xs_xs_diag = self.kernel.K_diag(XS)
cov = np.matrix(K_xs_xs_diag - np.square(v2).sum(0)).T
if self.verbatim: print(' DONE - Total time: {}'.format(formattime(time.time() - t0)))
if full_cov:
return mean, cov
else:
return np.array(mean).flatten(), np.array(cov).flatten()
def calc_posterior_new(self, XS):
"""
Mean and variance of GP at XS assuming new observation (self.x_new, self.y_new) was added
"""
# Check input
self._check_XY_training()
self._check_new()
assert len(XS.shape) == 2, 'Test data XS must be 2d array'
K_x_xs = np.matrix(self.kernel.K(self.new_X_training, XS))
v2 = triang_solve(self.new_K_w_chol, K_x_xs)
mean = self.mean + K_x_xs.T*self.new_LLY
K_xs_xs_diag = self.kernel.K_diag(XS)
cov = np.matrix(K_xs_xs_diag - np.square(v2).sum(0)).T
return np.array(mean).flatten(), np.array(cov).flatten()
def optimize(self, method = 'ML', fix_likelihood = False, bound_min = 1e-6):
"""
Optimize hyperparameters of unconstrained GP
method = 'ML' -> maximum marginal likelihood
method = 'CV' -> cross validation
fix_likelihood = False -> Don't optimize GP likelihood parameter self.likelihood
bound_min = minimum value in parameter bounds = (bound_min, ...)
"""
# Start timer
t0 = time.time()
if self.verbatim: print("..Running optimization for unconstrained GP ...", end = '')
# Run optimization
if method == 'ML':
res = self._optimize_ML(fix_likelihood, bound_min)
elif method == 'CV':
print('TODO...')
raise NotImplementedError
else:
raise NotImplementedError
# Save results
self.__setparams(res.x, not fix_likelihood)
if self.verbatim:
if res.success:
print(' DONE - Total time: {}'.format(formattime(time.time() - t0)))
else:
print('WARNING -- NO CONVERGENCE IN OPTIMIZATION -- Total time: {}'.format(formattime(time.time() - t0)))
def _optimize_ML(self, fix_likelihood = False, bound_min = 1e-6):
"""
Optimize hyperparameters of unconstrained GP using ML
fix_likelihood = False -> Don't optimize GP likelihood parameter self.likelihood
bound_min = minimum value in parameter bounds = (bound_min, ...)
"""
# Check input
self._check_XY_training()
# Define wrapper functions for optimization
def optfun(theta, fix_likelihood):
self.reset()
self.__setparams(theta, not fix_likelihood)
return -self._loglik()
def optfun_grad(theta, fix_likelihood):
self.reset()
self.__setparams(theta, not fix_likelihood)
grad = -np.array(self._loglik_grad())
if fix_likelihood:
return grad[1:]
else:
return grad
# Define bounds
num_params = self.kernel.dim + 2
if fix_likelihood: num_params -= 1
bounds = [(bound_min, None)]*num_params
# Initial guess
if fix_likelihood:
theta = np.array(self.kernel.get_params())
else:
theta = np.array([self.likelihood] + list(self.kernel.get_params()))
# Run optimizer
res = optimize.minimize(optfun, theta, args=fix_likelihood, jac = optfun_grad, bounds=bounds, method = 'L-BFGS-B')
return res
def _loglik(self):
"""
Calculates log marginal likelihood
I.e. log(P(Y_training | X_training))
"""
# Run pre calcs
self._prep_Y_centered()
self._prep_K_w(verbatim = False)
self._prep_K_w_factor(verbatim = False)
self._prep_LLY()
### Calculate log marginal likelihood ###
n = self.X_training.shape[0]
loglik = -0.5*self.Y_centered.T*self.LLY - np.log(np.diag(self.K_w_chol)).sum() - (n/2)*np.log(2*np.pi)
loglik = loglik[0,0]
return loglik
def _loglik_grad(self):
"""
Calculates gradient of log marginal likelihood w.r.t hyperparameters
"""
# Run pre calcs
self._prep_Y_centered()
self._prep_K_w(verbatim = False)
self._prep_K_w_factor(verbatim = False)
self._prep_LLY()
# Invert K_w using the Cholesky factor
K_w_inv = chol_inv(self.K_w_chol)
# Partial derivative of K_w w.r.t. likelihood
n = self.X_training.shape[0]
dKw_dlik = np.matrix(np.identity(n))
# Partial derivative of K_w w.r.t. kernel parameters
dK_dpar = self.kernel.K_gradients(self.X_training, self.X_training)
# Calculate gradient
alpha = K_w_inv*self.Y_centered
tmp = alpha*alpha.T - K_w_inv
Dloglik_lik = 0.5*traceprod(tmp, dKw_dlik) # W.r.t. GP likelihood parameter
Dloglik_ker = [0.5*traceprod(tmp, K) for K in dK_dpar] # W.r.t. kernel parameters
Dloglik = [Dloglik_lik] + Dloglik_ker
return Dloglik
def _check_XY_training(self):
"""
Check that X_training and Y_training are OK
"""
assert self.X_training is not None, 'Training data not found. Use model.X_training = ...'
assert len(self.X_training.shape) == 2, 'Training data X_training must be 2d array'
assert self.Y_training is not None, 'Training data not found. Use model.Y_training = ...'
assert len(self.Y_training.shape) == 1, 'Training data Y_training must be 1d array'
assert self.X_training.shape[0] == len(self.Y_training), 'Number of points in X_training and Y_training does not match'
def _check_new(self):
"""
Check that x_new, y_new has been set
"""
assert self.x_new is not None, 'x_new not found. Use model.set_x_new()'
assert self.y_new is not None, 'x_new not found. Use model.set_y_new()'
def __setparams(self, theta, includes_likelihood):
"""
Set model parameters from single array theta
"""
if includes_likelihood:
self.likelihood = theta[0]
self.kernel.set_params(theta[1:])
else:
self.kernel.set_params(theta)
def _prep_K_w(self, verbatim = False):
"""
Calculate K_w = K_x_x + likelihood
*** Need to run this if one of the following arrays are changed : ***
- X_training
"""
if verbatim: print('..Running calculation of K_w ...', end = '')
if self.K_w is None:
# Start timer
t0 = time.time()
n = len(self.X_training)
if np.isscalar(self.likelihood):
self.K_w = np.matrix(self.kernel.K(self.X_training, self.X_training) + self.likelihood*np.identity(n))
else:
self.K_w = np.matrix(self.kernel.K(self.X_training, self.X_training) + np.diag(self.likelihood))
if verbatim: print(' DONE - time: {}'.format(formattime(time.time() - t0)))
else:
if verbatim: print(' SKIP - (cached)')
def _prep_K_w_factor(self, verbatim = False):
"""
Calculate matrix L s.t. L*L.T = K_w
*** Need to run this if one of the following arrays are changed : ***
- X_training
"""
if verbatim: print('..Running calculation of Cholesky factor for K_w ...', end = '')
if self.K_w_chol is None:
# Start timer
t0 = time.time()
# Cholesky
self.K_w_chol = np.matrix(jitchol(self.K_w))
if verbatim: print(' DONE - time: {}'.format(formattime(time.time() - t0)))
else:
if verbatim: print(' SKIP - (cached)')
def _prep_LLY(self):
"""
Calculate LLY = L.T \ L \ Y_centered
*** Need to run this if one of the following arrays are changed : ***
- X_training
- Y_training
"""
if self.LLY is None:
# Run calculation
self.LLY = mulinv_solve(self.K_w_chol, self.Y_centered, triang = True)
def _prep_Y_centered(self):
"""
Calculate Y_centered
"""
if self.Y_centered is None: self.Y_centered = self.Y_training.reshape(-1, 1) - self.mean
|
from graper.spiders import *
from graper.utils import log
import js2py
import json
import datetime
import openpyxl
import tqdm
logger = log.get_logger(__file__)
class AppListSpider(Spider):
"""
采集指定行业下的APP列表
"""
def __init__(self, **kwargs):
super().__init__(**kwargs)
encrypt_js = open("encrypt.js", "r", encoding="utf8").read()
self.js_context = js2py.EvalJs()
self.js_context.execute(encrypt_js)
self.data = {}
self.downloader.proxy_enable = False
def before_stop(self, **kwargs):
with open("app_list.json", "w", encoding="utf8") as f:
json.dump(self.data, f)
return
def encrypt(self, text):
return self.js_context.encode(
"ZGFwcH" + "JhZGFy" + self.js_context.encode(text)
)
def make_request(self, cat, page):
params = f"page={page}&sgroup=max&featured=1&range=day&category={cat}&sort=user&order=desc&limit=26"
url = "https://dappradar.com/v2/api/dapps?params={}".format(
self.encrypt(params)
)
return Request(url, meta={"cat": cat, "page": page})
def start_requests(self):
category_list = [
"games",
"collectibles",
"gambling",
]
for cat in category_list:
yield self.make_request(cat, 1)
def parse(self, response: Response):
meta = response.request.meta
cat = meta["cat"]
j_response = response.response.json()
for item in j_response["dapps"]:
self.data[item["id"]] = item
page = j_response["page"]
print(cat, page, len(self.data))
if page < j_response["pageCount"]:
yield self.make_request(cat, page + 1)
return
class AppDetailSpider(Spider):
"""
采集app详情
"""
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.data = {}
self.downloader.proxy_enable = False
def before_stop(self, **kwargs):
with open("app_detail.json", "w", encoding="utf8") as f:
json.dump(self.data, f)
return
def start_requests(self):
with open("app_list.json", "r", encoding="utf8") as f:
for k, v in json.load(f).items():
yield Request(
f"https://dappradar.com/v2/api/dapp/{v['protocolSlug']}/{v['category']}/{v['slug']}",
meta=v,
)
def parse(self, response: Response):
try:
meta = response.request.meta
j_response = response.response.json()
self.data[meta["id"]] = {
"detail": j_response,
}
yield Request(
f"https://dappradar.com/v2/api/dapp/{meta['protocolSlug']}/{meta['category']}/{meta['slug']}/chart/all",
meta=meta,
callback=self.parse_chart,
)
except Exception as e:
logger.exception(e)
yield response.request
return
def parse_chart(self, response: Response):
try:
meta = response.request.meta
j_response = response.response.json()
self.data[meta["id"]].update(
{"chart": j_response,}
)
assert len(self.data[meta["id"]]) == 2
print(len(self.data))
except Exception as e:
logger.exception(e)
yield response.request
return
def export_data(self):
with open("app_detail.json", "r", encoding="utf8") as f:
data = json.load(f)
headers = [
"date",
"Users",
"Volume",
"Transactions",
"category",
"name",
"app_id",
]
values = []
for item in tqdm.tqdm(data.values(), desc="reading", total=len(data)):
chart = item["chart"]
detail = item["detail"]
user_list = volumn_list = trans_list = None
for x in chart["series"]:
if x["name"] == "Users":
user_list = x["data"]
elif x["name"] == "Volume":
volumn_list = x["data"]
elif x["name"] == "Transactions":
trans_list = x["data"]
_values = []
for date, user, volume, trans in zip(
chart["xaxis"], user_list, volumn_list, trans_list
):
date = datetime.datetime.fromtimestamp(date / 1000).strftime("%Y-%m-%d")
_values.append(
[
date,
user,
volume,
trans,
detail["category"],
detail["name"],
detail["id"],
]
)
_values.sort(key=lambda x: x[0])
_values.reverse()
if not _values:
_values.append(
["", 0, 0, 0, detail["category"], detail["name"], detail["id"]]
)
values.extend(_values)
print(len(values))
wb = openpyxl.Workbook(write_only=True)
sheet = wb.create_sheet()
sheet.append(headers)
for v in values:
sheet.append(v)
wb.save("dappradar.xlsx")
return
class IndustryOverViewSpider(Spider):
"""
采集行业大盘图表
"""
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.data = {}
self.downloader.proxy_enable = False
def before_stop(self, **kwargs):
with open("industry_overview.json", "w", encoding="utf8") as f:
json.dump(self.data, f)
return
def start_requests(self):
for url in [
"https://dappradar.com/api/charts/users-activity/category/history/year?currency=USD",
"https://dappradar.com/api/charts/users-activity/protocol/history/year?currency=USD",
"https://dappradar.com/api/charts/transactions/category/history/year?currency=USD",
"https://dappradar.com/api/charts/transactions/protocol/history/year?currency=USD",
"https://dappradar.com/api/charts/volume/category/history/year?currency=USD",
"https://dappradar.com/api/charts/volume/protocol/history/year?currency=USD",
]:
yield Request(url)
def parse(self, response: Response):
try:
url = response.request.url
j_response = response.response.json()
self.data[url] = j_response
print(len(self.data))
except Exception as e:
logger.exception(e)
yield response.request
return
def export_data(self):
with open("industry_overview.json", "r", encoding="utf8") as f:
data = json.load(f)
headers = [
"date",
"name",
"value",
]
wb = openpyxl.Workbook(write_only=True)
for url, j_response in data.items():
title = url.replace("https://dappradar.com/api/charts/", "")
title = "-".join(title.split("/")[:2])
sheet = wb.create_sheet(title=title)
sheet.append(headers)
date_list = j_response["xaxis"]
for item in j_response["series"]:
name = item["name"]
for date, value in zip(date_list, item["data"]):
date = datetime.datetime.fromtimestamp(date / 1000).strftime(
"%Y-%m-%d"
)
sheet.append([date, name, value])
wb.save("dappradar_industry_overview.xlsx")
return
if __name__ == "__main__":
with IndustryOverViewSpider(pool_size=5) as my_spider:
my_spider.run()
|
"""
- What is Dogeon?
Dogeon is a simple, fast, complete, correct and extensible DSON <http://dogeon.org>
encoder and decoder for Python 2.5+ and Python 3.3+.
It is pure Python code with no dependencies.
The encoder can be specialized to provide serialization in any kind of situation,
without any special support by the objects to be serialized (somewhat like pickle).
This is best done with the default kwarg to dumps.
The decoder can handle incoming DSON strings of any specified encoding
(UTF-8 by default). It can also be specialized to post-process DSON objects with
the object_hook or object_pairs_hook kwargs.
"""
from setuptools import setup
setup(
name="Dogeon",
version='1.0.1',
author="Ju Lin",
author_email="soasme@gmail.com",
description="Simple, fast, extensible DSON encoder/decoder for Python",
long_description=__doc__,
license="MIT License",
keywords="DSON dson Dogeon",
url="https://github.com/soasme/dogeon",
packages=['dson'],
classifiers=[
"Development Status :: 4 - Beta",
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
"License :: OSI Approved :: MIT License",
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
from six.moves import xrange
class Communicator(object):
def communicate(self, visualizer, solution, visualizer_cb, solution_cb):
visualizer_cb(visualizer.readline(), flush=False) # maxPercentage
line = visualizer.readline() # H
visualizer_cb(line, flush=False)
for i in xrange(int(line)):
visualizer_cb(visualizer.readline(), flush=False) # worldMap[i]
visualizer_cb(visualizer.readline(), flush=True) # totalPopulation
# communicate solution <-> visualizer
while True:
line = solution.readline()
if line.strip() != '?':
break
solution_cb(line, flush=False)
solution_cb(solution.readline(), flush=True) # query
visualizer_cb(visualizer.readline(), flush=True) # answer
# answer
solution_cb(line, flush=False)
for i in xrange(int(line)):
solution_cb(solution.readline(), flush=False)
|
# Python3 code to delete middle of a stack
# without using additional data structure.
# Deletes middle of stack of size
# n. Curr is current item number
class Stack:
def __init__(self):
self.items = []
def isEmpty(self):
return self.items == []
def push(self, item):
self.items.append(item)
def pop(self):
return self.items.pop()
def peek(self):
return self.items[len(self.items)-1]
def size(self):
return len(self.items)
def deleteMid(st, n, curr) :
# If stack is empty or all items
# are traversed
if (st.isEmpty() or curr == n) :
return
# Remove current item
x = st.peek()
st.pop()
# Remove other items
deleteMid(st, n, curr+1)
# Put all items back except middle
if (curr != int(n/2)) :
st.push(x)
# Driver function to test above functions
st = Stack()
# push elements into the stack
st.push('1')
st.push('2')
st.push('3')
st.push('4')
st.push('5')
st.push('6')
st.push('7')
deleteMid(st, st.size(), 0)
# Printing stack after deletion
# of middle.
while (st.isEmpty() == False) :
p = st.peek()
st.pop()
print (str(p) + " ", end="")
|
"""
Helper methods to demangle an ILIAS date.
"""
import datetime
import locale
import logging
import re
from typing import Optional
from ..logging import PrettyLogger
LOGGER = logging.getLogger(__name__)
PRETTY = PrettyLogger(LOGGER)
def demangle_date(date: str) -> Optional[datetime.datetime]:
"""
Demangle a given date in one of the following formats:
"Gestern, HH:MM"
"Heute, HH:MM"
"Morgen, HH:MM"
"dd. mon.yyyy, HH:MM
"""
saved = locale.setlocale(locale.LC_ALL)
try:
try:
locale.setlocale(locale.LC_ALL, 'de_DE.UTF-8')
except locale.Error:
PRETTY.warning(
"Could not set language to german. Assuming you use english everywhere."
)
date = re.sub(r"\s+", " ", date)
date = re.sub("Gestern|Yesterday", _yesterday().strftime("%d. %b %Y"), date, re.I)
date = re.sub("Heute|Today", datetime.date.today().strftime("%d. %b %Y"), date, re.I)
date = re.sub("Morgen|Tomorrow", _tomorrow().strftime("%d. %b %Y"), date, re.I)
return datetime.datetime.strptime(date, "%d. %b %Y, %H:%M")
except ValueError:
PRETTY.warning(f"Could not parse date {date!r}")
return None
finally:
locale.setlocale(locale.LC_ALL, saved)
def _yesterday() -> datetime.date:
return datetime.date.today() - datetime.timedelta(days=1)
def _tomorrow() -> datetime.date:
return datetime.date.today() + datetime.timedelta(days=1)
|
# Generated by Django 3.0.3 on 2020-02-25 07:05
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('datahub', '0006_auto_20200224_1212'),
]
operations = [
migrations.AddField(
model_name='train',
name='trainCategory',
field=models.CharField(default='Cargo', max_length=20),
preserve_default=False,
),
]
|
# Generated by Django 2.2.1 on 2019-05-16 11:51
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('music', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='songs',
name='album',
field=models.CharField(default='New Album', max_length=255),
),
]
|
import numpy as np
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from sklearn import decomposition
from sklearn import datasets
from sklearn.preprocessing import StandardScaler
from scipy import linalg as LA
def run_pca_np(x):
#centering the data
x -= np.mean(x, axis = 0)
cov = np.cov(x, rowvar = False)
evals , evecs = LA.eigh(cov)
idx = np.argsort(evals)[::-1]
evecs = evecs[:,idx]
evals = evals[idx]
a = np.dot(x, evecs)
return a
def run_pca(x, n_components=3):
scaler = StandardScaler(copy=True, with_mean=True, with_std=True)
# print("Before normalization")
# mean = np.mean(x)
# variance = np.var(x)
# print(mean)
# print(variance)
scaler.fit(x)
# print(scaler.mean_)
x_std = scaler.transform(x)
# print("After normalization")
# mean_std = np.mean(x_std)
# variance_std = np.var(x_std)
# print(mean_std)
# print(variance_std)
# print(x_std)
pca = decomposition.PCA(n_components)
# https://stats.stackexchange.com/questions/235882/pca-in-numpy-and-sklearn-produces-different-results
# input the standarized data with mean 0 and var 1 to the PCA
result = pca.fit_transform(x_std)
# we can just return this
# but we can normalize
# the result too
# scaler.fit(result)
# print(scaler.mean_)
# result_std = scaler.transform(result)
return result
if __name__ == "__main__":
x = np.random.random_sample((18, 3209))
print(x)
print(x.shape)
result = run_pca(x)
print(result)
print(result.shape)
|
"""
.. currentmodule:: flytekitplugins.sqlalchemy
This package contains things that are useful when extending Flytekit.
.. autosummary::
:template: custom.rst
:toctree: generated/
SQLAlchemyConfig
SQLAlchemyDefaultImages
SQLAlchemyTask
"""
from .task import SQLAlchemyConfig, SQLAlchemyDefaultImages, SQLAlchemyTask
|
'''
Created on 13.06.2016
@author: Fabian Reiber
@version: 1.0
'''
class NoSignedPartException(Exception):
def __str__(self, *args, **kwargs):
return Exception.__str__(self, *args, **kwargs)
|
import matplotlib.pyplot as plt
import numpy as np
from ezmodel.models.mean import SimpleMean
from ezmodel.models.svr import SVR
from ezmodel.util.sample_from_func import sine_function
svm = SVR()
# create some data to test this model on
X, y, _X, _y = sine_function(20, 200)
# let the model fit the data
svm.fit(X, y)
# predict the data using the model
y_hat = svm.predict(_X)
# predict the data using the model
_X = _X[np.argsort(_X[:, 0])]
y_hat = svm.predict(_X)
plt.scatter(X, y, label="Data")
plt.plot(_X, y_hat, color="black", label="GPy")
plt.legend()
plt.show()
|
from datetime import datetime
from rich import print
def time_now():
dt = datetime.now()
dtime = dt.strftime("%X")
return f"[italic white]{dtime}[/] |"
class console():
def nanostyle(text): print(f'[magenta]{text}[/magenta]')
def log(text): print(f'{time_now()} [[dim]INFO[/]]:\t{text}')
def botlog(text): print(f'{time_now()} [[blue]DISCORD[/]]:\t{text}')
def system(text): print(f'{time_now()} {text}')
def warn(text): print(f'{time_now()} [[bold underline yellow]WARN[/]]:\t{text}')
def error(text): print(f'{time_now()} [[bold underline red]ERR![/]]:\t{text}')
def success(text): print(f'{time_now()} [[bold #32cd32]OK[/]]:\t{text}')
def notice(text): print(f'{time_now()} [[bold]NOTE[/]]:\t{text}')
|
# Support Python 2 and 3 API calls without importing
# a 3rd party library
try:
from urllib.request import Request as HTTPRequest
from urllib.parse import urlencode
except ImportError: # pragma: no cover
from urllib2 import Request as HTTPRequest # pragma: no cover
from urllib import urlencode # pragma: no cover
import requests
from .errors import (NotAuthorizedException, NotFoundException, InvalidRequestException,
RateLimitException, UnknownException)
class Client(object):
"""TypeForm API client"""
BASE_URL = 'https://api.typeform.com/forms'
def __init__(self, personal_token):
"""Constructor for TypeForm API client"""
self.personal_token = personal_token
self._client = requests .Session()
self._client.headers = {
'User-Agent': 'python-typeform/0.2.1',
'Authorization': 'Bearer ' + self.personal_token
}
def _request(self, method, path, params=None):
"""Helper method to make requests to the TypeForm API"""
# Append our API key on to the request params
if params is None:
params = dict()
# Get our full request URI, e.g. `form/abc123` -> `https://api.typeform.com/v1/form/abc123`
url = "{}{}".format(self.BASE_URL, path)
# Make our API request
print("Making the API call: {} {}".format(method, url))
resp = self._client.request(method=method, url=url, params=params)
# On 500 error we don't get JSON, so no reason to even try
if resp.status_code == 500:
raise UnknownException('typeform client received 500 response from api')
# If form successfully deleted return successfull
if resp.status_code == 204:
return True
# Attempt to decode our JSON
# DEV: In every case (other than 500) we have gotten JSON back, but catch exception just in case
try:
data = resp.json()
except ValueError:
raise UnknownException('typeform client could not decode json from response')
# Good response, just return it
if resp.status_code == 200:
return data
# Handle any exceptions
message = data.get('message')
if resp.status_code == 404:
raise NotFoundException(message)
elif resp.status_code == 403:
raise NotAuthorizedException(message)
elif resp.status_code == 400:
raise InvalidRequestException(message)
elif resp.status_code == 429:
raise RateLimitException(message)
# Hmm, not sure how we got here, just raise hell
raise UnknownException(
'typeform client received unknown response status code {code!r}'.format(code=resp.status_code)
)
|
import random
import aiarena21.server.settings as settings
from aiarena21.server.item import Item1, Item2, Item3
from opensimplex import OpenSimplex
from aiarena21.server.logs import log, replay
class Game:
def __init__(self, players):
self.current_round = 0
self.total_rounds = settings.TOTAL_ROUNDS
self.map_size = settings.MAP_SIZE
self.map = settings.MAP
self.bike_length = settings.BIKE_LENGTH
self.players = players
self.items_map = [[[] for _ in range(settings.MAP_SIZE[1])] for _ in range(settings.MAP_SIZE[0])]
self.new_items = None
self._incoming_items = None
self.heatmap = [[0 for _ in range(self.map_size[1])] for _ in range(self.map_size[0])]
self.bike_cost = settings.BIKE_COST
self.portal_gun_cost = settings.PORTAL_GUN_COST
self.bike_turns = settings.BIKE_TURNS
self.portal_gun_turns = settings.PORTAL_GUN_TURNS
self.all_items = [Item1, Item2, Item3]
self.noise_gen = OpenSimplex()
self.random_spawn()
def cell_available(self, row, col):
return 0 <= row < self.map_size[0] and 0 <= col < self.map_size[1] and self.map[row][col] != '#'
def path_available(self, p1, p2, max_dist):
q = [(p1, 0)]
seen = [p1]
dx = [0, 1, 0, -1]
dy = [1, 0, -1, 0]
while len(q) > 0:
front, dist = q[0]
if dist >= max_dist:
return False
q = q[1:]
for d in range(4):
new_p = (front[0] + dx[d], front[1] + dy[d])
if new_p not in seen and self.cell_available(*new_p):
if new_p == p2:
return True
seen.append(new_p)
q.append((new_p, dist + 1))
def random_spawn(self):
spawn_locations = []
for row in range(self.map_size[0]):
for col in range(self.map_size[1]):
if self.map[row][col] in ['S', 's']:
spawn_locations.append((row, col))
while len(spawn_locations) < len(self.players):
row, col = (random.randint(0, self.map_size[i] - 1) for i in range(2))
while not self.cell_available(row, col) or (row, col) in spawn_locations:
row, col = (random.randint(0, self.map_size[i] - 1) for i in range(2))
spawn_locations.append((row, col))
random_bool = random.randint(0, 1)
if random_bool == 1:
spawn_locations[0], spawn_locations[1] = spawn_locations[1], spawn_locations[0]
for i in range(2):
self.players[i].location = spawn_locations[i]
def deploy_items(self):
"""
Generate new items and update items_map and new_items accordingly
if no new item -> self.new_items = None
if new items -> add to self.new_items
"""
# Do nothing outside of the spawning tick.
if self.current_round % settings.ITEM_SPAWN_PERIOD != 0: return
if self._incoming_items is not None:
# Time to spawn some items.
self.new_items = self._incoming_items
for x in range(settings.MAP_SIZE[0]):
for y in range(settings.MAP_SIZE[1]):
if settings.OVERLAP_ITEMS:
self.items_map[x][y].extend(self.new_items[x][y])
else:
if self.new_items[x][y]:
self.items_map[x][y] = self.new_items[x][y]
# Set the new incoming items.
grid_noise = [[-1 for __ in range(settings.MAP_SIZE[1])] for _ in range(settings.MAP_SIZE[0])]
self._incoming_items = [[[] for __ in range(settings.MAP_SIZE[1])] for _ in range(settings.MAP_SIZE[0])]
for item in self.all_items:
z = random.random() * 10000
for x in range(settings.MAP_SIZE[0]):
for y in range(settings.MAP_SIZE[1]):
if not self.cell_available(x, y):
continue
grid_noise[x][y] = self.noise_gen.noise3d(x * item.NOISE_MULT, y * item.NOISE_MULT, z)
for a, b in [(x+1, y), (x-1, y), (x, y+1), (x, y-1)]:
if grid_noise[x][y] < self.noise_gen.noise3d(a * item.NOISE_MULT, b * item.NOISE_MULT, z) + item.SPAWN_DIFF:
break
else:
if settings.OVERLAP_ITEMS:
self._incoming_items[x][y].append(item())
else:
self._incoming_items[x][y] = [item()]
def items_score_map(self):
res = [[0 for _ in range(self.map_size[1])] for __ in range(self.map_size[0])]
for row in range(self.map_size[0]):
for col in range(self.map_size[1]):
for item in self.items_map[row][col]:
res[row][col] += item.points
return res
def update_heatmap(self):
"""
Return a 2d list with the shape of the game map for heatmap
"""
self.heatmap = [[0 for __ in range(settings.MAP_SIZE[1])] for _ in range(settings.MAP_SIZE[0])]
target_distance = 1 + ((settings.ITEM_SPAWN_PERIOD - 1 - self.current_round % settings.ITEM_SPAWN_PERIOD) * settings.HEATMAP_LARGEST_DISTANCE) // settings.ITEM_SPAWN_PERIOD
# Target distance will start off as HEATMAP_LARGEST_DISTANCE, then taper off to 1.
for x in range(settings.MAP_SIZE[0]):
for y in range(settings.MAP_SIZE[1]):
if not self.cell_available(x, y):
continue
for a in range(settings.MAP_SIZE[0]):
for b in range(settings.MAP_SIZE[1]):
dist = abs(x-a) + abs(y-b)
if dist < target_distance:
self.heatmap[x][y] += (sum(item.points for item in self._incoming_items[a][b]) / (pow(dist + 1, 3)))
self.heatmap[x][y] = float("{:.2f}".format(self.heatmap[x][y]))
def transport_random(self, player):
row, col = random.randint(0, self.map_size[0] - 1), random.randint(0, self.map_size[1] - 1)
while not self.cell_available(row, col):
row, col = random.randint(0, self.map_size[0] - 1), random.randint(0, self.map_size[1] - 1)
player.update_location(row, col)
def finish_turn(self, wagers_obj=None):
if self.current_round >= 2:
import aiarena21.server.network as network
network.TIMEOUT_TIME = 2
recap = {
'type': 'tick',
'heatmap': self.heatmap,
'positions': [{
'new_pos': self.players[i].location,
'delta': self.players[i].last_move
} for i in range(2)],
'items': [[[y.short_name for y in position] for position in sublist] for sublist in self.items_map],
'bike': [
self.players[i].using_bike
for i in range(2)
],
'teleport': [
self.players[i].using_portal_gun
for i in range(2)
],
'scores': [
self.players[i].score
for i in range(2)
],
'remaining_rounds': self.total_rounds - self.current_round
}
if wagers_obj is not None:
recap.update({
'wagers': wagers_obj['wagers'],
'positions': [{
'new_pos': wagers_obj['before_positions'][i],
'delta': self.players[i].last_move
} for i in range(2)],
'wager_positions': [self.players[i].location for i in range(2)]
})
replay(recap)
self.current_round += 1
|
/usr/lib/python3.6/encodings/cp866.py
|
import torch
import numpy as np
import yaml
from collagen.data import FoldSplit
from collagen.core.utils import auto_detect_device
from collagen.strategies import Strategy
from collagen.data.utils.datasets import get_mnist
import random
from tensorboardX import SummaryWriter
from examples.autoencoder.utils import init_args, init_data_provider, init_callbacks
from examples.autoencoder.models import AutoEncoder
from examples.autoencoder.sampler import VisualizationSampler
device = auto_detect_device()
if __name__ == "__main__":
# parse the arguments
args = init_args()
# detect device
device = auto_detect_device()
# summary writer for tensorboard
summary_writer = SummaryWriter(log_dir=args.log_dir, comment=args.comment)
torch.manual_seed(args.seed)
np.random.seed(args.seed)
random.seed(args.seed)
with open("settings.yml", "r") as f:
sampling_config = yaml.load(f)
train_ds, classes = get_mnist(data_folder=args.save_data, train=True)
splitter = FoldSplit(train_ds, n_folds=5, target_col="target")
test_ds, classes = get_mnist(data_folder=args.save_data, train=False)
# Get data of the first fold
df_train, df_val = next(splitter)
item_loaders = dict()
data_provider = init_data_provider(args, df_train, df_val, item_loaders, test_ds)
model = AutoEncoder(16).to(device)
optimizer = torch.optim.Adam(model.parameters(), lr=args.lr, betas=(args.beta1, 0.999))
criterion = torch.nn.MSELoss().to(device)
viz_sampler = VisualizationSampler(viz_loader=item_loaders['mnist_viz'],
device=device, bs=args.bs, ae=model)
callbacks = init_callbacks(args, summary_writer, model, viz_sampler)
strategy = Strategy(data_provider=data_provider,
train_loader_names=tuple(sampling_config['train']['data_provider'].keys()),
val_loader_names=tuple(sampling_config['eval']['data_provider'].keys()),
data_sampling_config=sampling_config,
loss=criterion,
model=model,
n_epochs=args.n_epochs,
optimizer=optimizer,
train_callbacks=callbacks['train'],
val_callbacks=callbacks['eval'],
device=device)
strategy.run()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.