blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
69
| license_type
stringclasses 2
values | repo_name
stringlengths 5
118
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
63
| visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 2.91k
686M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 23
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 213
values | src_encoding
stringclasses 30
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 2
10.3M
| extension
stringclasses 246
values | content
stringlengths 2
10.3M
| authors
listlengths 1
1
| author_id
stringlengths 0
212
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
5a7031394d020167401d415b01b1c26641cdcea8
|
3428fc46ae76d03220da813944421bbdf9631df5
|
/linkedin_bot.py
|
fd3cd922e259baeee79cb835bfd1b8a2fe379bfa
|
[] |
no_license
|
shakirshakeelzargar/tkinter
|
1d321c1c4153b22d61750f06f0eb20a7d4eabed7
|
0990071a2f18afc3853f04611bb3f3c31ccb0a26
|
refs/heads/master
| 2022-11-14T18:42:14.801119
| 2020-06-15T12:57:51
| 2020-06-15T12:57:51
| 272,438,329
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 21,856
|
py
|
from selenium import webdriver
import time
import os
from urllib.parse import quote
import configparser
from random import randint
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.keys import Keys
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.chrome.options import Options
import tkinter as tk
from tkinter import *
from tkinter.ttk import *
from tkinter import scrolledtext,messagebox
import threading
import webbrowser
from bs4 import BeautifulSoup
chrome_options = Options()
chrome_options.add_argument("--headless")
chrome_options.add_argument('--log-level=3')
chrome_options.add_experimental_option('excludeSwitches', ['enable-logging'])
# country="india"
# f=open("login.txt")
# login_details=[]
# for x in f.readlines():
# login_details.append(x)
# driver = webdriver.Chrome("chromedriver.exe")
def login():
add_logs("Opening Linkedin")
driver.get("https://www.linkedin.com/login")
add_logs("Trying to Login")
element = WebDriverWait(driver, 20).until(EC.presence_of_element_located((By.XPATH, "/html/body/div/main/div/form/div[1]/input")))
driver.find_element_by_xpath("/html/body/div/main/div/form/div[1]/input").send_keys(username_text)
driver.find_element_by_xpath("/html/body/div/main/div/form/div[2]/input").send_keys(password_text)
driver.find_element_by_xpath("/html/body/div/main/div/form/div[3]/button").click()
if "Login" in driver.title:
print("\n##############################################")
print("#######LOGIN FAILED WRONG EMAIL OR PASS#######")
print("##############################################")
return False
else:
print("Login Successful")
return True
def goto_connections():
add_logs("Opening Connections")
# my_networks = WebDriverWait(driver, 20).until(EC.presence_of_element_located((By.XPATH, "/html/body/header/div/nav/ul/li[2]/a/span[1]")))
# my_networks.click()
# connections = WebDriverWait(driver, 20).until(EC.presence_of_element_located((By.XPATH, "/html/body/div[5]/div[5]/div[3]/div/div/div/div/div/aside/div[1]/div/section/div/div[1]/a/div/div[1]/li-icon")))
# connections.click()
driver.get("https://www.linkedin.com/mynetwork/invite-connect/connections/")
def goto_filters():
add_logs("Opening Filters")
search_with_filters = WebDriverWait(driver, 20).until(EC.presence_of_element_located((By.XPATH, "/html/body/div[5]/div[5]/div[3]/div/div/div/div/div/div/div/div/section/div/div[2]/a")))
driver.execute_script("arguments[0].click();", search_with_filters)
all_filters = WebDriverWait(driver, 20).until(EC.presence_of_element_located((By.XPATH, "/html/body/div[5]/div[5]/div[3]/div/div[1]/header/div/div/div[2]/button/span")))
driver.execute_script("arguments[0].click();", all_filters)
def apply_filters():
add_logs("Applying Filters")
if len(region_text)>0:
country_region = WebDriverWait(driver, 20).until(EC.presence_of_element_located((By.XPATH, "/html/body/div[4]/div/div/div[2]/div/div[1]/ul/li[3]/form/div/fieldset/ol/li[1]/div/div/input")))
country_region.send_keys(region_text)
time.sleep(5)
country_region.send_keys(Keys.DOWN)
country_region.send_keys(Keys.RETURN)
add_logs("Applied Region Filter--"+str(region_text))
if len(industries_text)>0:
for x in industries_text:
industries = WebDriverWait(driver, 20).until(EC.presence_of_element_located((By.XPATH, "/html/body/div[4]/div/div/div[2]/div/div[1]/ul/li[6]/form/div/fieldset/ol/li[1]/div/div/input")))
industries.send_keys(x)
time.sleep(5)
industries.send_keys(Keys.DOWN)
industries.send_keys(Keys.RETURN)
add_logs("Applied Industry Filter--"+str(x))
if len(current_companies_text)>0:
company = WebDriverWait(driver, 20).until(EC.presence_of_element_located((By.XPATH, "/html/body/div[4]/div/div/div[2]/div/div[1]/ul/li[4]/form/div/fieldset/ol/li[1]/div/div/input")))
company.send_keys(current_companies_text)
time.sleep(5)
company.send_keys(Keys.DOWN)
company.send_keys(Keys.RETURN)
add_logs("Applied Company Filter--"+str(current_companies_text))
if len(schools_text)>0:
school = WebDriverWait(driver, 20).until(EC.presence_of_element_located((By.XPATH, "/html/body/div[4]/div/div/div[2]/div/div[1]/ul/li[8]/form/div/fieldset/ol/li[1]/div/div/input")))
school.send_keys(schools_text)
time.sleep(5)
school.send_keys(Keys.DOWN)
school.send_keys(Keys.RETURN)
add_logs("Applied School Filter--"+str(schools_text))
apply = WebDriverWait(driver, 20).until(EC.presence_of_element_located((By.XPATH, "/html/body/div[4]/div/div/div[1]/div/div[2]/button[2]/span")))
driver.execute_script("arguments[0].click();", apply)
driver.implicitly_wait(7)
def scroll():
driver.execute_script("window.scrollTo(0, document.body.scrollHeight / 3);")
time.sleep(3)
driver.execute_script("window.scrollTo(0, document.body.scrollHeight);")
time.sleep(3)
driver.execute_script("window.scrollTo(0, document.body.scrollHeight / 2);")
time.sleep(3)
def send_message():
scroll()
nextt=True
page=1
while nextt==True:
add_logs("Going to page "+ str(page))
if page>1:
next_page = WebDriverWait(driver, 20).until(EC.presence_of_element_located((By.XPATH, "/html/body/div[5]/div[5]/div[3]/div/div[2]/div/div[2]/div/div/div/div/div[1]/artdeco-pagination/button[2]")))
driver.execute_script("arguments[0].click();", next_page)
scroll()
scroll()
# WebDriverWait(driver, 20).until(EC.presence_of_element_located((By.XPATH, "/html/body/div[5]/div[5]/div[3]/div/div[2]/div/div[2]/div/div/div/div")))
# driver.execute_script("window.scrollTo(0, document.body.scrollHeight / 2);")
# driver.execute_script("window.scrollTo(0, document.body.scrollHeight);")
n=0
people_list = driver.find_elements_by_xpath('//button[@class="message-anywhere-button search-result__actions--primary artdeco-button artdeco-button--default artdeco-button--2 artdeco-button--secondary"]')
lenth_of_people=len(people_list)
while n<lenth_of_people:
# add_logs("sending message to page "+str(page)+" person" + str(n))
people_list = driver.find_elements_by_xpath('//button[@class="message-anywhere-button search-result__actions--primary artdeco-button artdeco-button--default artdeco-button--2 artdeco-button--secondary"]')
# print(len(people_list))
one_person=people_list[n]
# try:
# name = driver.find_elements_by_xpath('.//span[@class="artdeco-pill__text"]')
# # name=driver.find_element_by_css_selector('.artdeco-pill__text').text
# name=name[0].text
# print(name)
# # print("Sending Message to "+str(name))
# except Exception as ex:
# print(ex)
# pass
driver.execute_script("arguments[0].click();", one_person)
try:
time.sleep(2)
html = driver.page_source
page_soup=BeautifulSoup(html,"lxml")
namee=page_soup.find("span","artdeco-pill__text")
name_textt=str(namee.text).strip()
add_logs("Sending message to "+"'"+name_textt+"'")
except Exception as ex:
print(ex)
try:
type_message = WebDriverWait(driver, 5).until(EC.presence_of_element_located((By.XPATH, "/html/body/div[5]/div[5]/aside/div[2]/div[1]/form/div[2]/div/div[1]/div[1]")))
type_message.send_keys(message_text)
except:
try:
type_message=driver.find_element_by_xpath("//div[contains(@class, 'msg-form__contenteditable t-14 t-black--light t-normal flex-grow-1 notranslate')]")
type_message.send_keys(message_text)
except:
pass
n+=1
pass
close = WebDriverWait(driver, 20).until(EC.presence_of_element_located((By.XPATH, "/html/body/div[5]/div[5]/aside/div[2]/header/section[2]/button[2]")))
driver.execute_script("arguments[0].click();", close)
n+=1
page+=1
# nextt=check_exists_by_xpath("/html/body/div[5]/div[5]/div[3]/div/div[2]/div/div[2]/div/div/div/div/div[1]/artdeco-pagination/button[2]")
try:
html = driver.page_source
page_soup=BeautifulSoup(html,"lxml")
nxt=page_soup.find("button","artdeco-pagination__button artdeco-pagination__button--next artdeco-button artdeco-button--muted artdeco-button--icon-right artdeco-button--1 artdeco-button--tertiary ember-view")
if nxt is not None:
add_logs("Next page found")
nextt=True
else:
add_logs("Next page not found. This is the last page.\n The program will stop")
nextt=False
except Exception as e:
add_logs("Next page not found. This is the last page.\n The program will stop")
nextt=False
def check_exists_by_xpath(xpath):
try:
driver.find_element_by_xpath(xpath)
except NoSuchElementException:
return False
return True
def run():
try:
start_popup.configure(text="Process Started.\n Check Execution logs!",fg='green')
button_start_process.config(state="disabled")
add_logs("********Process Started********")
global driver
if CheckVar1.get()==1:
driver = webdriver.Chrome(executable_path="chromedriver.exe",options=chrome_options)
else:
driver = webdriver.Chrome(executable_path="chromedriver.exe")
try_login=login()
time.sleep(3)
if try_login==True:
add_logs("Login Successful...")
time.sleep(3)
goto_connections()
time.sleep(3)
goto_filters()
time.sleep(3)
apply_filters()
time.sleep(3)
scroll()
time.sleep(3)
send_message()
time.sleep(3)
button_start_process.config(state="active")
else:
button_start_process.config(state="active")
add_logs("Login Failed. Check your Credentials")
except Exception as ex:
button_start_process.config(state="active")
add_logs("Error Occured. The erros is: \n"+str(ex))
# run()
def save_message():
if len(message_entry.get(1.0, END))<=1:
message_popup.configure(text="Enter Valid Message!",fg='red')
else:
global message_text
message_text=str(message_entry.get(1.0, END))
message_popup.configure(text="Successfully saved!",fg='green')
add_logs("Message Saved")
def save_credentials():
if len(username_entry.get())==0 or len(password_entry.get())==0:
login_popup.configure(text="Enter Valid Credentials!",fg='red')
else:
global username_text
global password_text
username_text=str(username_entry.get())
password_text=str(password_entry.get())
add_logs("Login Credentials Saved")
login_popup.configure(text="Successfully saved!",fg='green')
def save_filters():
if len(str(region_entry.get()))>0:
global region_text
region_text=str(region_entry.get())
if len(str(schools_entry.get()))>0:
global schools_text
schools_text=str(schools_entry.get())
if len(str(current_companies_entry.get()))>0:
global current_companies_text
current_companies_text=str(current_companies_entry.get())
filter_popup.configure(text="Successfully Saved!",fg='green')
add_logs("Filters Saved")
# def run():
# start_popup.configure(text="Process Started.\n Check Execution logs!",fg='green')
# button_start_process.config(state="disabled")
# add_logs("********Process Started********")
def add_logs(message):
sctext.configure(state='normal')
sctext.insert(tk.END, message + '\n')
sctext.configure(state='disabled')
# Autoscroll to the bottom
sctext.yview(tk.END)
def submit():
button_start_process.config(state="disabled")
time.sleep(5) # put your stuff here
run()
def start_submit_thread(event):
global submit_thread
submit_thread = threading.Thread(target=submit)
submit_thread.daemon = True
# progressbar.start()
submit_thread.start()
root.after(20, check_submit_thread)
def check_submit_thread():
if submit_thread.is_alive():
root.after(20, check_submit_thread)
else:
pass
def save_industries():
for name, var in choices.items():
if var.get()==1:
if name not in industries_text:
industries_text.append(name)
else:
if name in industries_text:
industries_text.remove(name)
print(industries_text)
def callback(url):
webbrowser.open_new(url)
# t = threading.Thread(target=add_logs)
# t.start()
username_text=""
password_text=""
region_text=""
message_text=""
current_companies_text=""
schools_text=""
global industries_text
industries_text=[]
choices = {}
f=open("industries.txt","r")
x=f.readlines()
f.close()
root = tk.Toplevel()
root.iconbitmap('icon.ico')
root.withdraw()
window = Toplevel(root)
window.protocol("WM_DELETE_WINDOW", root.destroy)
window.geometry('900x570')
window.title("LinkedIn Marketing Tool")
window.resizable(0,0)
window.iconbitmap('icon.ico')
header_frame=tk.Frame(window,bg="white",borderwidth = 1,highlightcolor="green",highlightbackground="green",highlightthickness=2,height=60,width=890)
footer_frame=tk.Frame(window,bg='white',borderwidth = 1,highlightcolor="green",highlightbackground="green",highlightthickness=2,height=20,width=890)
login_frame=tk.Frame(window,bg='white',borderwidth = 1,highlightcolor="green",highlightbackground="green",highlightthickness=2,height=223,width=218)
message_frame=tk.Frame(window,bg='white',borderwidth = 1,highlightcolor="green",highlightbackground="green",highlightthickness=2,height=223,width=218)
filter_frame=tk.Frame(window,bg='white',borderwidth = 1,highlightcolor="green",highlightbackground="green",highlightthickness=2,height=223,width=440)
final_submit_frame=tk.Frame(window,bg="white",borderwidth = 1,highlightcolor="green",highlightbackground="green",highlightthickness=2,height=60,width=445)
log_frame=tk.Frame(window,bg="white",borderwidth = 1,highlightcolor="green",highlightbackground="green",highlightthickness=2,height=385,width=445)
log_title_frame=tk.Frame(log_frame,bg="white",borderwidth = 1,highlightcolor="green",highlightbackground="green",highlightthickness=2,height=60,width=433)
sctext=scrolledtext.ScrolledText(log_frame,bg='black',width=51,height=19,fg='white',state='disabled')
log_title_label=tk.Label(log_title_frame,text="Execution Log", font=("Arial Bold",20),bg='white',fg='green')
message_title_frame=tk.Frame(message_frame,bg='white',borderwidth = 1,highlightcolor="green",highlightbackground="green",highlightthickness=2,height=40,width=206)
login_title_frame=tk.Frame(login_frame,bg='white',borderwidth = 1,highlightcolor="green",highlightbackground="green",highlightthickness=2,height=40,width=206)
message_title_label=tk.Label(message_title_frame,text="Enter Message", font=("Arial Bold",13),bg='white',fg='green')
login_title_label=tk.Label(login_title_frame,text="Login Credentials", font=("Arial Bold",13),bg='white',fg='green')
username_label=tk.Label(login_frame,text="LinkedIn Username:", font=("Arial ",10),bg='white',fg='green')
password_label=tk.Label(login_frame,text="LinkedIn Password:", font=("Arial ",10),bg='white',fg='green')
username_entry=tk.Entry(login_frame,width=30,bg='black',fg='white',insertbackground='white')
password_entry=tk.Entry(login_frame,show="*",width=30,bg='black',fg='white',insertbackground='white')
message_entry=scrolledtext.ScrolledText(message_frame,bg='black',width=23,height=7,fg='white',insertbackground='white')
button_login = tk.Button(login_frame, text=" Save ", fg="black", activebackground = "green",command=save_credentials)
button_message = tk.Button(message_frame, text=" Save ", fg="black", activebackground = "green",command=save_message)
message_popup=tk.Label(message_frame,text="", font=("Arial",8),bg='white')
login_popup=tk.Label(login_frame,text="", font=("Arial",8),bg='white')
filter_title_frame=tk.Frame(filter_frame,bg='white',borderwidth = 1,highlightcolor="green",highlightbackground="green",highlightthickness=2,height=40,width=425)
filter_title_label=tk.Label(filter_title_frame,text="Enter Search Filters", font=("Arial Bold",15),bg='white',fg='green')
region_label=tk.Label(filter_frame,text="Region:", font=("Arial ",10),bg='white',fg='green')
region_entry=tk.Entry(filter_frame,width=30,bg='black',fg='white',insertbackground='white')
button_filter = tk.Button(filter_frame, text=" Save ", fg="black", activebackground = "green",command=save_filters)
filter_popup=tk.Label(filter_frame,text="", font=("Arial",8),bg='white')
button_start_process = tk.Button(final_submit_frame, text=" Start Process ", fg="black", activebackground = "green",command=lambda:start_submit_thread(None))
start_popup=tk.Label(final_submit_frame,text="", font=("Arial",8),bg='white')
current_companies_label=tk.Label(filter_frame,text="Company:", font=("Arial ",10),bg='white',fg='green')
current_companies_entry=tk.Entry(filter_frame,width=30,bg='black',fg='white',insertbackground='white')
schools_label=tk.Label(filter_frame,text="School:", font=("Arial ",10),bg='white',fg='green')
schools_entry=tk.Entry(filter_frame,width=30,bg='black',fg='white',insertbackground='white')
title_label=tk.Label(header_frame,text="LinkedIn Messaging Bot", font=("Arial ",20),bg='white',fg='green')
title_label2=tk.Label(header_frame,text="By Shakir Shakeel", font=("Arial ",10),bg='white',fg='green')
# title_label3=tk.Label(header_frame,text="http://instagram.com/sshakirzargar", font=("Arial ",10),bg='white',fg='blue')
facebook=PhotoImage(file="facebook.png")
insta=PhotoImage(file="insta.png")
github=PhotoImage(file="github.png")
facebook_button=tk.Button(header_frame, text = 'Click Me !', image = facebook,bg='white')
insta_button=tk.Button(header_frame, text = 'Click Me !', image = insta,bg='white')
github_button=tk.Button(header_frame, text = 'Click Me !', image = github,bg='white')
CheckVar1 = IntVar()
check_button_invisible = tk.Checkbutton(final_submit_frame, text = "Invisible", variable = CheckVar1,onvalue = 1, offvalue = 0, height=2,width = 10,bg='white')
industry_dropdown = tk.Menubutton(filter_frame, text="Choose Industries", indicatoron=True, borderwidth=1, relief="raised")
menu = tk.Menu(industry_dropdown, tearoff=False)
industry_dropdown.configure(menu=menu)
menubar = Menu(window)
window.config(menu=menubar)
# Create a menu button labeled "File" that brings up a menu
filemenu = Menu(menubar)
menubar.add_cascade(label='File', menu=filemenu)
filemenu.add_command(label='Print')
filemenu.add_command(label='Save')
filemenu.add_separator( )
filemenu.add_command(label='Quit' )
editmenu = Menu(menubar)
menubar.add_cascade(label='Edit', menu=editmenu)
editmenu.add_command(label='Undo')
editmenu.add_command(label='Redo')
editmenu.add_command(label='Clear' )
header_frame.place(x=5,y=5)
footer_frame.place(x=5,y=525)
login_frame.place(x=5,y=70)
message_frame.place(x=227,y=70)
filter_frame.place(x=5,y=297)
final_submit_frame.place(x=450,y=460)
log_frame.place(x=450,y=70)
# log_title_frame.place(x=455,y=75)
log_title_frame.place(x=3,y=5)
sctext.place(x=5,y=70)
log_title_label.place(x=125,y=7)
message_title_frame.place(x=3,y=5)
login_title_frame.place(x=3,y=5)
message_title_label.place(x=40,y=6)
login_title_label.place(x=27,y=6)
message_entry.place(x=4,y=50)
username_label.place(x=45,y=60)
password_label.place(x=45,y=110)
username_entry.place(x=15,y=85)
password_entry.place(x=15,y=135)
button_login.place(x=75,y=165)
button_message.place(x=75,y=172)
message_popup.place(x=10,y=197)
login_popup.place(x=10,y=197)
filter_title_frame.place(x=5,y=5)
filter_title_label.place(x=110,y=3)
region_label.place(x=5,y=70)
region_entry.place(x=5,y=95)
button_filter.place(x=5,y=190)
filter_popup.place(x=320,y=195)
button_start_process.place(x=160,y=14)
check_button_invisible.place(x=50,y=11)
industry_dropdown.place(x=270,y=70)
for choice in x:
choice=choice.replace("\n","")
choices[choice] = tk.IntVar(value=0)
menu.add_checkbutton(label=choice, variable=choices[choice], onvalue=1, offvalue=0, command=save_industries)
current_companies_label.place(x=5,y=130)
current_companies_entry.place(x=5,y=155)
schools_label.place(x=245,y=130)
schools_entry.place(x=245,y=155)
title_label.place(x=280,y=8)
title_label2.place(x=750,y=1)
# title_label3.place(x=660,y=30)
# title_label3.bind("<Button-1>", lambda e: callback("http://instagram.com/sshakirzargar"))
facebook_button.place(x=750,y=25)
facebook_button.bind("<Button-1>", lambda e: callback("http://facebook.com/sshakirshakeel"))
insta_button.place(x=790,y=25)
insta_button.bind("<Button-1>", lambda e: callback("http://instagram.com/sshakirzargar"))
github_button.place(x=830,y=25)
github_button.bind("<Button-1>", lambda e: callback("http://github.com/shakirshakeelzargar"))
window.mainloop()
|
[
"shakir.s@softcrylic.co.in"
] |
shakir.s@softcrylic.co.in
|
ec11bb3617b1bf1e5798aba01cf98d6ef1902aaa
|
559dad409652172c4052749354dcb1f2f178f9c3
|
/day01/my_regression.py
|
9b5a932f63517fcefbb3ec5bd3ec318aac4604b8
|
[] |
no_license
|
panghu96/tensorflow
|
5bb138e18212f2e69703b81bd91e0b8efa08f5cb
|
5ffdc28012f95c6b12d3045f506fad27e4cbb15d
|
refs/heads/master
| 2023-07-08T04:01:48.810876
| 2021-08-10T08:13:49
| 2021-08-10T08:13:49
| 394,577,385
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,580
|
py
|
"""
自定义实现线性回归
"""
import tensorflow as tf
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
def regression():
# 创建指定名字的作用域
with tf.variable_scope("data"):
# 1.随机生成数据,x和y都必须是矩阵
x = tf.random_normal([100, 1], mean=1.70, stddev=0.50, name='x_data')
# 矩阵相乘,计算y. y = wx + b
y = tf.matmul(x, [[0.7]]) + 0.9
with tf.variable_scope("train"):
# 2.定义随机初始w和b。w和b必须是变量,trainable默认True,表示数据跟随梯度下降变化
w = tf.Variable(tf.random_normal([1, 1]), name='w', trainable=True)
b = tf.Variable(tf.random_normal([1]), name='b')
# 变量显式初始化op
init_op = tf.global_variables_initializer()
with tf.variable_scope("loss"):
# 3.计算损失
y_predict = tf.matmul(x, w) + b
sqr = tf.square(y - y_predict)
loss = tf.reduce_mean(sqr, name='loss')
with tf.variable_scope("optimizer"):
# 4.梯度下降,使得损失最小。learning_rate学习率,一般取0-1之间
train_op = tf.train.GradientDescentOptimizer(learning_rate=0.05).minimize(loss)
# 收集tensor
tf.summary.scalar("losses", loss) # 收集标量
tf.summary.histogram("weight", w) # 收集高维变量
# 合并tensor
merged = tf.summary.merge_all()
# 保存模型,参数为要保存和还原的变量
saver = tf.train.Saver([w, b])
# 通过会话运行程序
with tf.Session() as sess:
# 运行初始化op
sess.run(init_op)
# 还原变量
saver.restore(sess, '/home/sun/py_code/py36/deep_learning/day01/tmp/ckpt/model')
# 保存事件文件
filewriter = tf.summary.FileWriter('/home/sun/py_code/py36/deep_learning/day01/tmp/summary/test', graph=sess.graph)
print('初始权重为:%f,偏置为:%f,损失为:%f' % (w.eval(), b.eval(), loss.eval()))
# 循环迭代,使得损失最小
for i in range(500):
sess.run(train_op)
# 运行合并op,写入事件文件
summary = sess.run(merged)
# i表示每一次的值
filewriter.add_summary(summary, i)
print('第%d次梯度下降后权重为:%f,偏置为:%f,损失为:%f' % (i, w.eval(), b.eval(), loss.eval()))
# 保存模型训练结果
saver.save(sess, '/home/sun/py_code/py36/deep_learning/day01/tmp/ckpt/model')
return None
if __name__ == '__main__':
regression()
|
[
"1062571616@qq.com"
] |
1062571616@qq.com
|
b82426fd99ef70095177e8a4947758a3ae5c3025
|
08bbb0f4f072c85665a14a573b405a2e40e01221
|
/baekjoon/0329/12967.py
|
5bab03ea632d1cfc8367b2fc18a3f83ee86ee567
|
[] |
no_license
|
Gyutae-Jo/algorithm
|
db8b905a08bc8dddaaab6a143e5fefd477981536
|
25e9d4bf5a1ec1f84e5f8066169dc792da0ab845
|
refs/heads/master
| 2023-05-24T15:04:45.839566
| 2021-06-09T05:06:56
| 2021-06-09T05:06:56
| 332,739,387
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,181
|
py
|
# # 12976 - pqr
# import sys
# N_K = sys.stdin.readline().split()
# N = int(N_K[0])
# K = int(N_K[1])
# # N, K = map(int, input().split())
# # arr = list(map(int, input().split()))
# arr = sys.stdin.readline().split()
# cnt = 0
# total = 1
# for i in range(N):
# total *= int(arr[i])
# if total % K == 0:
# for p in range(N-2):
# if int(arr[p]) % K == 0:
# cnt += (N - (p+1)) * (N - (p+1) - 1) // 2
# continue
# for q in range(p+1, N-1):
# if (int(arr[p]) * int(arr[q])) % K == 0:
# cnt += (N - (q+1))
# continue
# for r in range(q+1, N):
# if (int(arr[p])*int(arr[q])*int(arr[r])) % K == 0:
# cnt += 1
# print(cnt)
# else:
# print(0)
N, K = map(int, input().split())
A = list(map(int, input().split()))
# K를 소인수 분해하자
K_clone = K
# 소수 구하기 (위키피디아 참조 https://ko.wikipedia.org/wiki/%EC%97%90%EB%9D%BC%ED%86%A0%EC%8A%A4%ED%85%8C%EB%84%A4%EC%8A%A4%EC%9D%98_%EC%B2%B4)
def prime_list(n):
# 에라토스테네스의 체 초기화: n개 요소에 True 설정(소수로 간주)
sieve = [True] * (n+1)
# n의 최대 약수가 sqrt(n) 이하이므로 i=sqrt(n)까지 검사
m = int(n ** 0.5)
for i in range(2, m + 1):
if sieve[i] == True: # i가 소수인 경우
for j in range(i+i, n+1, i): # i이후 i의 배수들을 False 판정
sieve[j] = False
# 소수 목록 산출
return [i for i in range(2, n+1) if sieve[i] == True]
K_dict = {}
for i in prime_list(K_clone):
while True:
if K_clone >= i and K_clone % i == 0:
K_clone //= i
K_dict[i] = K_dict.get(i, 0) + 1
else:
break
if K_clone == 1:
break
# print(prime_list(K))
# print(K_dict)
# A의 원소들은 K의 소인수들의 갯수를 세자.
A_list = []
for a in A:
# a_dict = {i:0 for i in K_dict.keys()}
a_list = [0 for _ in K_dict.keys()]
# for i in a_dict.keys():
for i in range(len(a_list)):
while True:
if a >= list(K_dict.keys())[i] and a % list(K_dict.keys())[i] == 0:
a //= list(K_dict.keys())[i]
# a_dict[i] += 1
a_list[i] += 1
else:
break
A_list.append(sum(a_list)) #######고친부분
cnt = 0
# print(A_list)
New_A = []
for i in range(N):
New_A.append((A[i], A_list[i]))
# print(New_A)
A_New = sorted(New_A, key = lambda x : (-x[1]))
# print(A_New)
##############################
cnt = 0
total = 1
for i in range(N):
total *= A_New[i][0]
if total % K == 0:
for p in range(N-2):
if A_New[p][0] % K == 0:
cnt += (N - (p+1)) * (N - (p+1) - 1) // 2
continue
for q in range(p+1, N-1):
if (A_New[p][0] * A_New[q][0]) % K == 0:
cnt += (N - (q+1))
continue
for r in range(q+1, N):
if (A_New[p][0]*A_New[q][0]*A_New[r][0]) % K == 0:
cnt += 1
print(cnt)
else:
print(0)
##############################
# A_list = sorted(A_list, key=lambda x: [-x[i] for i in range(len(A_list[0]))])
# K_list = list(K_dict.values())
# # print(K_list)
# # 만족하는지 함수 정의
# def my_func(a, b):
# for i in range(len(a)):
# if a[i] < b[i]:
# return False
# else:
# return True
# # 리스트 합 구하기
# def list_sum(a, b):
# temp = []
# for i in range(len(a)):
# temp.append(a[i] + b[i])
# return temp
# # 중간에 만족하면 멈추는걸 해야할거같음.
# for p in range(N-2):
# temp = A_list[p]
# if my_func(temp, K_list):
# cnt += (N-1-p)*(N-1-p-1)//2
# else:
# for q in range(p+1, N-1):
# temp1 = list_sum(temp, A_list[q])
# if my_func(temp1, K_list):
# cnt += N-1-q
# else:
# for r in range(q+1, N):
# temp2 = list_sum(temp1, A_list[r])
# if my_func(temp2, K_list):
# cnt += 1
# print(cnt)
|
[
"ckt94@khu.ac.kr"
] |
ckt94@khu.ac.kr
|
cd035662a44e51d9cae8ad9af22afc792b3220dc
|
9db3423c0c0b460eed51421857e207f5accad2a6
|
/scraper.py
|
9cc9ce0c4675389d961d2fc4dd41f45ac090e4c9
|
[] |
no_license
|
nathanleiby/bayes-thorn
|
29322cf827b8faadf96c97a1cbd80c298919596d
|
b7d565c61009aaf442dbee389b57d328b84c52fa
|
refs/heads/master
| 2020-12-25T19:15:00.012537
| 2014-11-16T19:37:13
| 2014-11-16T19:37:13
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,408
|
py
|
import os
import pandas as pd
from urlparse import urlparse
import urllib2
import extract_prices
from bs4 import BeautifulSoup
os.chdir("/Users/danfrankj/src/bayes-thorn")
df = pd.read_csv('escort_all_sampled.csv')
df['postAge'] = df['postAge'].convert_objects(convert_numeric=True)
def parse_backpage_url(url, seen_urls=[]):
"""crawl all posts by this user"""
url_parsed = urlparse(url)
url_city = url_parsed.netloc.split('.')[0]
html = urllib2.urlopen(url)
soup = BeautifulSoup(html)
post_text = soup.find("div", {"class": "postingBody"}).get_text().strip()
post_age = float(soup.find('p', {'class': 'metaInfoDisplay'}).get_text().split(':')[1].strip())
post_links = soup.find('div', id='OtherAdsByThisUser').findAll('a', {'class': ''})
post_links = [link['href'] for link in post_links]
prices = extract_prices.get_prices(post_text)
seen_urls.append(url)
stats = {'url_cities': [url_city],
'post_ages': [post_age],
'post_texts': [post_text],
'post_links': post_links,
'post_prices': prices}
for post_link in post_links:
if post_link in seen_urls:
continue
new_stats = parse_backpage_url(post_link, seen_urls=seen_urls)
seen_urls.append(post_link)
for k, v in new_stats.iteritems():
stats[k] = stats[k] + new_stats[k]
return stats
|
[
"danfrankj@gmail.com"
] |
danfrankj@gmail.com
|
8d47ee8cce3c52d924a9a410f1ad5f3b69c62326
|
11ef95494e2ca425a6ffb4cf389dd4e78d1ba494
|
/game_engine/types/size.py
|
16413d3c575204f9ba2d84104e36209c6f57b276
|
[] |
no_license
|
dungeontiger/gutlicsArena
|
011c66887982af5ae04a97cf45a0d75c83671d98
|
ebf2a584cd87a3418639388425f4f65032c4265f
|
refs/heads/master
| 2020-12-09T23:09:10.760397
| 2020-03-01T16:31:56
| 2020-03-01T16:31:56
| 233,442,712
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 80
|
py
|
import enum
class Size(enum.Enum):
SMALL = 0
MEDIUM = 1
LARGE = 2
|
[
"Stephen.d.gibson@gmail.com"
] |
Stephen.d.gibson@gmail.com
|
6412d5edf654881f4c03fb542c80c4d66fd3c408
|
d1a85e7907e43f13c9f28142671b7268e9d6b418
|
/context_processors.py
|
b3914385312ec4766a018b475e9803bb63c5cede
|
[] |
no_license
|
ezl/squareone
|
b288cd9922006365f4c25ff824fbe403d76b7469
|
5a59f9845859a975e2fee2fb9e51b3c0746ab62d
|
refs/heads/master
| 2020-05-30T16:58:29.373604
| 2011-04-04T01:15:42
| 2011-04-04T01:15:42
| 1,335,525
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 122
|
py
|
from django.contrib.sites.models import RequestSite
def request_site(request):
return {'site': RequestSite(request)}
|
[
"ericzliu@gmail.com"
] |
ericzliu@gmail.com
|
4a01c4de0b7f502cd3a3b7636430d6c45cbdb9b6
|
ad7debb0ecd87597dbcd5083d0fe94fc8d88f7fa
|
/noobnews/tests.py
|
dd1a60d6150e279fd0c2fa5ede4622d22a851d1b
|
[] |
no_license
|
EndaMcVey/NoobNews
|
0bc6525080457251c6667b724f313bf99d543a5a
|
f4cccfead7926add4d2eeb55f6b2b6bdadc57bdc
|
refs/heads/master
| 2022-12-11T01:34:17.886959
| 2019-05-24T12:21:59
| 2019-05-24T12:21:59
| 188,415,101
| 0
| 1
| null | 2022-12-08T01:46:09
| 2019-05-24T12:05:20
|
CSS
|
UTF-8
|
Python
| false
| false
| 5,680
|
py
|
from django.test import TestCase
from django.core.files.uploadedfile import SimpleUploadedFile
from django.core.files.storage import default_storage
from django.core.urlresolvers import reverse
from noobnews.models import VideoGame, VideoGameList, Genre, ratingValue, User, UserProfile, Review
import noobnews.test_utils as test_utils
import populate_noobnews
import datetime
import os
# Check if both arrays are equal
def checkEqualArray(arr1, arr2):
return len(arr1) == len(arr2) and sorted(arr1) == sorted(arr2)
class PopulationScriptTests(TestCase):
# Check if all the data is populated in the database
def test_right_amount_data(self):
# Populate database
populate_noobnews.populate()
# Get the number of videogames from the database
number_videogames = VideoGame.objects.count()
self.assertGreaterEqual(55, number_videogames)
# Get the number of genres from the database
number_genres = Genre.objects.count()
self.assertGreaterEqual(5, number_genres)
# Get the number of rating values from the database
number_rating_values = ratingValue.objects.count()
self.assertEqual(5, number_rating_values)
# Get the number of user profiles from the database
number_user_profiles = UserProfile.objects.count()
self.assertGreaterEqual(7, number_user_profiles)
# Get the number of reviews from the database
number_reviews = Review.objects.count()
self.assertGreaterEqual(64, number_reviews)
def test_right_values_data(self):
# Populate database
populate_noobnews.populate()
# Get a videogame to check if it has the correct values
videogame = VideoGame.objects.get(name="Uncharted 4: A Thief's End")
self.assertEqual(videogame.id, 8)
self.assertEqual(videogame.genre.genre_id, 2000)
self.assertEqual(videogame.rating, 5)
self.assertEqual(videogame.release, datetime.date(2016, 5, 10))
self.assertEqual(videogame.developer, "Naughty Dog")
self.assertEqual(videogame.publisher, "Sony Computer Entertainment")
self.assertEqual(
videogame.image, "/static/videogameImages/uncharted4.jpg")
# Get the rating values to check if they are correct
rating_values = ratingValue.objects.all().values_list(flat=True)
self.assertEqual(checkEqualArray(rating_values, [1, 2, 3, 4, 5]), True)
def test_data_insertion(self):
# Create a user
user, user_profile = test_utils.create_user()
# Check there is only the saved user and its profile in the database
all_users = User.objects.all()
self.assertEquals(len(all_users), 1)
all_profiles = UserProfile.objects.all()
self.assertEquals(len(all_profiles), 1)
# Check profile fields were saved correctly
all_profiles[0].user = user
all_profiles[0].player_tag = user_profile.player_tag
# Create a videogame
videogame = test_utils.create_videogame()
# Check there is only the saved videogame
all_videogames = VideoGame.objects.all()
self.assertEquals(len(all_videogames), 1)
# Check if the videogames library is created successfully
videogame_list = test_utils.create_videogames_library(
all_profiles[0], videogame)
# Check there is only one videogame in the videogames library
user_videogames = videogame_list.userLibrary.all()
self.assertEquals(len(user_videogames), 1)
#Check if the review is created successfully
review=test_utils.create_review(all_profiles[0], videogame)
#Check the is only one review for the videogame
reviews = Review.objects.all()
self.assertEquals(len(reviews), 1)
def test_upload_image(self):
# Create fake user and image to upload to register user
image = SimpleUploadedFile(
"testvideogame.jpg", b"file_content", content_type="image/jpeg")
videogame = test_utils.create_videogame(image)
videogame = VideoGame.objects.get(id=56)
path_to_image = './media/testvideogame.jpg'
# Check file was saved properly
self.assertTrue(os.path.isfile(path_to_image))
# Delete fake file created
os.remove(path_to_image)
class NavigationTests(TestCase):
def test_login_redirects_to_profile(self):
# Create a user
user, user_profile = test_utils.create_user()
videogame = test_utils.create_videogame()
videogame_list = test_utils.create_videogames_library(
user_profile, videogame)
# Access login page via POST with user data
try:
response = self.client.post(
reverse('login'), {'mail': 'testuser@testuser.com', 'password': 'test1234'}, follow=True)
except Exception as e:
self.assertTrue(False)
# Check it redirects to profile
self.assertRedirects(response, reverse('profile'))
def test_home_redirects_to_videogame(self):
# Populate database
populate_noobnews.populate()
# Access videogame page via GET with videogame data
try:
response = self.client.get(
reverse('show_videogame', kwargs={'videogame_name_slug': 'spider-man'}))
except Exception as e:
self.assertTrue(False)
# Check it get the correct data for the videogame Spider-Man
self.assertIn('the super-human crime lord Mr. Negative orchestrates a plot to seize control of New York City'.lower(),
response.content.decode('ascii').lower())
|
[
"endamcvey@hotmail.com"
] |
endamcvey@hotmail.com
|
0e6f8a6a31f748ba3519f11636a59f0ac76ce07d
|
6970ad9dfec3a2b3d00675b12dcd73b19e2d6f8f
|
/notebooks/model.py
|
eef4f6e208b1355e69bfdc5081b5b72ccbdd16b1
|
[] |
no_license
|
seanjparker/knowledge-distillation
|
e0ac709e312948f7fd0f86928efcfe0813d4411b
|
63812cc9efe6ccf3d234bc8592945779757afe59
|
refs/heads/master
| 2023-02-21T18:33:44.661764
| 2021-01-16T02:32:12
| 2021-01-16T02:32:12
| 323,975,006
| 2
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,221
|
py
|
import torch
from torch import nn
from torch.nn import functional as F
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, in_planes, planes, stride=1):
super(BasicBlock, self).__init__()
self.conv1 = nn.Conv2d(
in_planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3,
stride=1, padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(planes)
self.shortcut = nn.Sequential()
if stride != 1 or in_planes != self.expansion*planes:
self.shortcut = nn.Sequential(
nn.Conv2d(in_planes, self.expansion*planes,
kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(self.expansion*planes)
)
def forward(self, x):
out = F.relu(self.bn1(self.conv1(x)))
out = self.bn2(self.conv2(out))
out += self.shortcut(x)
out = F.relu(out)
return out
class ResNet(nn.Module):
def __init__(self, block, num_blocks, num_classes=10, in_dims=3):
super(ResNet, self).__init__()
self.in_planes = 64
self.conv1 = nn.Conv2d(in_dims, 64, kernel_size=3, stride=1, padding=1, bias=False)
self.bn1 = nn.BatchNorm2d(64)
self.layer1 = self._make_layer(block, 64, num_blocks[0], stride=1)
self.layer2 = self._make_layer(block, 128, num_blocks[1], stride=2)
self.layer3 = self._make_layer(block, 256, num_blocks[2], stride=2)
self.layer4 = self._make_layer(block, 512, num_blocks[3], stride=2)
self.linear = nn.Linear(512*block.expansion, num_classes)
def _make_layer(self, block, planes, num_blocks, stride):
strides = [stride] + [1]*(num_blocks-1)
layers = []
for stride in strides:
layers.append(block(self.in_planes, planes, stride))
self.in_planes = planes * block.expansion
return nn.Sequential(*layers)
def forward(self, x):
out = F.relu(self.bn1(self.conv1(x)))
out = self.layer1(out)
out = self.layer2(out)
out = self.layer3(out)
out = self.layer4(out)
out = F.avg_pool2d(out, 4)
out = out.view(out.size(0), -1)
out = self.linear(out)
return out
class Student(nn.Module):
def __init__(self, temperature):
super(Student, self).__init__()
self.temperature = temperature
self.cnn1 = nn.Conv2d(3, 32, kernel_size=(3, 3))
self.bn1 = nn.BatchNorm2d(32)
self.mp1 = nn.MaxPool2d(kernel_size=(2, 2))
self.cnn2 = nn.Conv2d(32, 64, kernel_size=(3, 3))
self.bn2 = nn.BatchNorm2d(64)
self.mp2 = nn.MaxPool2d(kernel_size=(2, 2))
self.fc1 = nn.Linear(2304, 128)
self.fc2 = nn.Linear(128, 64)
self.fc3 = nn.Linear(64, 10)
def forward(self, x):
x = self.mp1(F.relu(self.bn1(self.cnn1(x))))
x = self.mp2(F.relu(self.bn2(self.cnn2(x))))
x = x.view(x.size(0), -1) # Flatten
x = F.dropout(F.relu(self.fc1(x)), p=0.2)
x = F.dropout(F.relu(self.fc2(x)), p=0.2)
x = self.fc3(x)
soft_target = F.log_softmax(x / self.temperature, dim=1)
hard_target = F.log_softmax(x, dim=1)
return soft_target, hard_target
class Assistant(nn.Module):
def __init__(self, temperature, in_dims):
super(Assistant, self).__init__()
self.temperature = temperature
self.cnn1 = nn.Conv2d(in_dims, 32, kernel_size=(3, 3))
self.bn1 = nn.BatchNorm2d(32)
self.cnn2 = nn.Conv2d(32, 64, kernel_size=(3, 3))
self.bn2 = nn.BatchNorm2d(64)
self.mp2 = nn.MaxPool2d(kernel_size=(2, 2))
self.cnn3 = nn.Conv2d(64, 128, kernel_size=(3, 3))
self.bn3 = nn.BatchNorm2d(128)
self.mp3 = nn.MaxPool2d(kernel_size=(2, 2))
self.cnn4 = nn.Conv2d(128, 256, kernel_size=(3, 3))
self.bn4 = nn.BatchNorm2d(256)
self.mp4 = nn.MaxPool2d(kernel_size=(2, 2))
self.fc1 = nn.Linear(1024, 512)
self.fc2 = nn.Linear(512, 64)
self.fc3 = nn.Linear(64, 10)
def forward(self, x):
x = F.relu(self.bn1(self.cnn1(x)))
x = self.mp2(F.relu(self.bn2(self.cnn2(x))))
x = self.mp3(F.relu(self.bn3(self.cnn3(x))))
x = self.mp4(F.relu(self.bn4(self.cnn4(x))))
x = x.view(x.size(0), -1) # Flatten
x = F.dropout(F.relu(self.fc1(x)), p=0.2)
x = F.dropout(F.relu(self.fc2(x)), p=0.2)
x = self.fc3(x)
soft_target = F.log_softmax(x / self.temperature, dim=1)
hard_target = F.log_softmax(x, dim=1)
return soft_target, hard_target
# Softmax with temperature
# -- Adapted from PyTorch Softmax layer
# -- See: https://pytorch.org/docs/stable/_modules/torch/nn/modules/activation.html#Softmax
class SoftmaxT(nn.Module):
def __init__(self, temperature, dim=1) -> None:
super(SoftmaxT, self).__init__()
self.temperature = temperature
self.dim = dim
def __setstate__(self, state):
self.__dict__.update(state)
if not hasattr(self, 'dim'):
self.dim = None
def forward(self, in_data):
return F.log_softmax(in_data / self.temperature, self.dim)
def extra_repr(self) -> str:
return 'dim={dim}'.format(dim=self.dim)
def create_student(device, temperature):
return Student(temperature).to(device)
def create_teacher(device, temperature, teacher_state_dict_path=None, in_dims=3):
teacher_model = ResNet(BasicBlock, [2, 2, 2, 2], in_dims=in_dims).to(device)
if teacher_state_dict_path is not None:
teacher_model.load_state_dict(torch.load(teacher_state_dict_path, map_location=device))
return torch.nn.Sequential(
teacher_model,
SoftmaxT(temperature)
).to(device)
def create_assistant(device, temperature, state_dict_path=None, in_dims=3):
assistant_model = Assistant(temperature, in_dims).to(device)
if state_dict_path is not None:
assistant_model.load_state_dict(torch.load(state_dict_path, map_location=device))
return assistant_model
|
[
"sean.parker067@gmail.com"
] |
sean.parker067@gmail.com
|
90bbd64df5c967ac7d128d777a73aca4f34eb441
|
b0cf0e020a097382700721db025463554eb4742f
|
/logger.py
|
6932d8dfcc465faff48981c678ea0d50282cc101
|
[] |
no_license
|
oslab-ewha/blkanal
|
82887eb5d9f8f4d541daa35aa0539b30ebd11356
|
10126802030efd9281b56f4752fe73e0b73f09ca
|
refs/heads/master
| 2023-02-18T03:17:47.302585
| 2020-12-23T04:12:42
| 2020-12-24T02:50:16
| 309,553,189
| 6
| 1
| null | 2021-01-06T02:11:50
| 2020-11-03T02:44:52
|
Python
|
UTF-8
|
Python
| false
| false
| 730
|
py
|
import logging
def init(prog):
global logger
#
log_path = None
level = logging.INFO
logger = logging.getLogger(prog)
if log_path != None:
handler = logging.FileHandler(filename=log_path)
else:
handler = logging.StreamHandler()
formatter = logging.Formatter('%(asctime)s:%(name)s:%(levelname)s: %(message)s', datefmt="%m/%d %H:%M:%S")
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(level)
def info(str):
global logger
logger.info(str)
def debug(str):
global logger
logger.debug(str)
def warn(str):
global logger
logger.warn(str)
def error(str):
global logger
logger.error(str)
|
[
"cezanne@codemayo.com"
] |
cezanne@codemayo.com
|
38009bd31863e446cc553fda7789b52f3f9980fd
|
17e9dac1b09a99fbe1d2dc006e58675f583ce0fd
|
/venv/Scripts/easy_install-script.py
|
39ad86993f538851e236e54fc740f78720b68a99
|
[] |
no_license
|
Vc346/PirateTwitterBack
|
bee179be63722dfc765512156509f7d22d21b05b
|
ceb50f328f65a54647e597b8a45bc3ae52d68085
|
refs/heads/master
| 2020-08-04T13:12:27.379279
| 2019-10-01T16:42:48
| 2019-10-01T16:42:48
| 212,147,811
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 462
|
py
|
#!C:\Users\nyara\WebstormProjects\PirateTwitter\back\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==40.8.0','console_scripts','easy_install'
__requires__ = 'setuptools==40.8.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==40.8.0', 'console_scripts', 'easy_install')()
)
|
[
"A01336434@itesm.mx"
] |
A01336434@itesm.mx
|
ebb236e44d2e50bfe6d2ca849747552c0f0f91ed
|
20c1f61d4f3bdfec8bbe47e2b6bfa8d36e5456ea
|
/23.3/models.py
|
f3092c672fd38f269a536caa1da4f820e9b7f007
|
[] |
no_license
|
SNSTRUTHERS/springboard
|
de910db081838480a5bdc5275a85f4aff50f1570
|
ebeb94906be8774c2f4e2b0e8eaefd6c1ce10959
|
refs/heads/master
| 2023-04-23T08:46:13.541922
| 2021-05-18T17:53:46
| 2021-05-18T17:53:46
| 287,401,487
| 0
| 1
| null | 2020-08-14T02:03:43
| 2020-08-13T23:45:48
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 3,233
|
py
|
"""Models for Blogly."""
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy_serializer import SerializerMixin
db = SQLAlchemy()
def connect_db(app: Flask):
"""Connects a Flask application to the database.
Parameters
==========
app `Flask`
The Flask application to connect.
"""
db.app = app
db.init_app(app)
class User(db.Model, SerializerMixin):
"""A Blogly user."""
serialize_only = ('id', 'first_name', 'last_name', 'image_url')
__tablename__ = "users"
id = db.Column(
db.Integer,
primary_key = True,
autoincrement = True
)
first_name = db.Column(
db.String(64),
nullable = False
)
last_name = db.Column(
db.String(64),
nullable = False
)
image_url = db.Column(
db.Text(),
default = "/static/default0.png",
nullable = False
)
posts = db.relationship('Post', cascade="all,delete")
@property
def full_name(self):
return f"{self.first_name} {self.last_name}"
@classmethod
def get_sorted(cls):
"""Retrieves a list of Users sorted in ascending order of last_name, first_name."""
return cls.query.order_by(cls.last_name.asc(), cls.first_name.asc()).all()
def __repr__(self):
"""Returns a string representation of the current User object."""
return f"<User id={self.id} name={self.full_name} image_url={self.image_url}>"
class Post(db.Model, SerializerMixin):
"""A Blogly post."""
serialize_only = ('id', 'title', 'content', 'created_at', 'updated_at', 'user_id')
__tablename__ = "posts"
id = db.Column(
db.Integer,
primary_key = True,
autoincrement = True
)
title = db.Column(
db.Text,
nullable = False
)
content = db.Column(
db.Text,
nullable = False
)
created_at = db.Column(
db.DateTime,
nullable = False
)
updated_at = db.Column(
db.DateTime,
nullable = False
)
user_id = db.Column(
db.Integer,
db.ForeignKey('users.id'),
nullable = False
)
user = db.relationship('User')
@property
def created_timestamp(self):
"""Retrieve the timestamp of when a post was first created as a string."""
return self.created_at.strftime("%d %a %Y at %H:%M:%S UTC")
@property
def updated_timestamp(self):
"""Retrieve the timestamp of when a post was last updated as a string."""
return self.updated_at.strftime("%d %a %Y at %H:%M:%S UTC")
class PostTag(db.Model):
"""Tag on a Blogly post."""
__tablename__ = "post_tags"
post_id = db.Column(db.Integer, db.ForeignKey('posts.id'), primary_key=True)
tag_id = db.Column(db.Integer, db.ForeignKey('tags.id'), primary_key=True)
class Tag(db.Model, SerializerMixin):
"""Blogly tag."""
__tablename__ = "tags"
serialize_only = ('id', 'name')
id = db.Column(db.Integer, primary_key = True)
name = db.Column(db.Text, nullable = False, unique = True)
posts = db.relationship('Post',
secondary="post_tags",
backref="tags"
)
|
[
"snstruthers@gmail.com"
] |
snstruthers@gmail.com
|
c8ddfdccfaf6d9c4816507811c5039f504cfcc99
|
b95a665b40a61e975ad0583767cb0356c39cab8c
|
/pythonOOPfeatures/5SpecialMethod.py
|
2f19ddc4e8fbf543b1f9e310198423255a428d1f
|
[] |
no_license
|
ConyYang/TrickyPackages_Python
|
616d964df504507d40fc6d97ad579c998ab84602
|
7beda22107c8718cd96134b5714a3632910d4d52
|
refs/heads/main
| 2023-02-28T06:07:36.980701
| 2021-02-04T14:09:54
| 2021-02-04T14:09:54
| 309,988,066
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,078
|
py
|
class Employee(object):
raise_amount = 1.04
num_of_employ = 0
def __init__(self, first, last, pay):
self.first = first
self.last = last
self.pay = pay
self.email = first + last + '@company.co'
Employee.num_of_employ += 1
def fullname(self):
return '{}{}'.format(self.last, self.first)
def apply_raise(self):
self.pay = int(self.pay * self.raise_amount)
def __repr__(self):
return "Employees('{}','{}','{}')".format(self.first, self.last, self.pay)
def __str__(self):
return '{}-{}'.format(self.fullname(), self.email)
def __add__(self, other):
return self.pay + other.pay
def __len__(self):
return len(self.fullname())
def test_1():
dev_2 = Employee('Nancy', 'Lee', 5000)
print(dev_2.__repr__())
print(str(dev_2))
def test_2():
dev_1 = Employee('Nancy', 'Lee', 5000)
dev_2 = Employee('Liang', 'Lee', 5000)
print(dev_2 + dev_1)
def test_3():
dev_1 = Employee('Nancy', 'Lee', 5000)
print(len(dev_1))
test_3()
|
[
"yangyubei0218@gmail.com"
] |
yangyubei0218@gmail.com
|
59abafbe78f6016e4f518df08d9a9076bc3bc6f1
|
621dfccf1ace31bcf48cd1e7cc80b8dbd556b21b
|
/01_Jump_to_python/3_control/4_exer/3_codingdojang/2_ Tap_to_WhiteSpace.py
|
720c8bf9281e57b760b5b143a6847695baf87b57
|
[] |
no_license
|
hansangwoo1969/iot_python2019
|
35d90997b442845d1f5fa5a6119d5352a8dbe46f
|
e1ea46bb4afcc33e1fc28b43e845759d8688098b
|
refs/heads/master
| 2020-06-14T22:19:47.647686
| 2019-09-19T07:40:16
| 2019-09-19T07:40:16
| 195,142,246
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,448
|
py
|
# 소스코드내에 사용된 탭(Tab) 문자를 공백 4개(4 space)로 바꾸어 주는 프로그램
# === replace(old, new, [count]) -> replace("찾을값", "바꿀값", [바꿀횟수]) ===
text = '123,456,789,999'
replaceAll = text.replace(",", "")
replace_t1 = text.replace(",", "", 1)
replace_t2 = text.replace(",", "", 2)
replace_t3 = text.replace(",", "", 3)
print("결과 :")
print(replaceAll)
print(replace_t1)
print(replace_t2)
print(replace_t3)
# === 우측부터 변경 함수 ===
def replaceRight(original, old, new, count_right):
repeat = 0
text = original
count_find = original.count(old)
if count_right > count_find: # 바꿀 횟수가 문자열에 포함된 old보다 많다면
repeat = count_find # 문자열에 포함된 old의 모든 개수(count_find)만큼 교체한다
else:
repeat = count_right # 아니라면 입력받은 개수(count)만큼 교체한다
for _ in range(repeat):
find_index = text.rfind(old) # 오른쪽부터 index를 찾기위해 rfind 사용
text = text[:find_index] + new + text[find_index + 1:]
return text
text = '123,456,789,999'
#text.replace(",", "", -1); print(text) #안됨
#text = replaceRight(text, ",", "", 2)
print("결과 :")
print(replaceRight(text, ",", "", 0))
print(replaceRight(text, ",", "", 1))
print(replaceRight(text, ",", "", 2))
print(replaceRight(text, ",", "", 3))
print(replaceRight(text, ",", "", 4))
|
[
"you@example.com"
] |
you@example.com
|
be4abfce4f2e0523a1b2af14d786546ccf6ac7c0
|
aee1f0cff0690b3796092c061d0ace80884d586e
|
/415_Add_Strings.py
|
42ae18d5a871e606ed7bbbf8effd62bfc8853f85
|
[] |
no_license
|
nickciaravella/leetcode
|
486baaf3ce3bc9d3d3ec61195e611ad4742ee25d
|
1bba7aadabd5d234a9482a661da84a6829adfb77
|
refs/heads/master
| 2020-03-18T00:54:52.426951
| 2018-05-25T05:58:11
| 2018-05-25T05:58:11
| 134,118,899
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 605
|
py
|
# https://leetcode.com/problems/add-strings/description/
# Easy
from itertools import zip_longest
class Solution:
def addStrings(self, num1, num2):
"""
:type num1: str
:type num2: str
:rtype: str
"""
val = ''
carry = 0
for n1, n2 in zip_longest(num1[::-1], num2[::-1], fillvalue='0'):
digit = (ord(n1) - ord('0')) + (ord(n2) - ord('0')) + carry
carry = digit // 10
val += str(digit % 10)
if carry:
val += str(carry)
return val[::-1]
|
[
"nick.ciaravella@live.com"
] |
nick.ciaravella@live.com
|
9128ef94f59ce5b466db8f96c63f17bf7f301496
|
67ce544a9767af0c2a44fc6783260c2492183d02
|
/Test/test_rotation.py
|
eadc7f6862c77bfa99185f616d80d81c213b84db
|
[] |
no_license
|
givemealung/LemonTea
|
cb8a02dbd2f28fd568b852b3b86adc37f74e1dda
|
d4ee807166436d2c610ecff418f1321a5b9ace44
|
refs/heads/master
| 2020-04-09T18:14:04.886505
| 2018-12-05T11:05:08
| 2018-12-05T11:05:08
| 160,496,399
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 982
|
py
|
import cv2 as cv
import numpy as np
def rotate_bound(self, image, angle):
# grab the dimensions of the image and then determine the
# center
(h, w) = image.shape[:2]
(cX, cY) = (w // 2, h // 2)
# grab the rotation matrix (applying the negative of the
# angle to rotate clockwise), then grab the sine and cosine
# (i.e., the rotation components of the matrix)
M = cv.getRotationMatrix2D((cX, cY), -angle, 1.0)
cos = np.abs(M[0, 0])
sin = np.abs(M[0, 1])
# compute the new bounding dimensions of the image
nW = int((h * sin) + (w * cos))
nH = int((h * cos) + (w * sin))
# adjust the rotation matrix to take into account translation
M[0, 2] += (nW / 2) - cX
M[1, 2] += (nH / 2) - cY
# perform the actual rotation and return the image
return cv.warpAffine(image, M, (nW, nH))
img = cv.imread('test.bmp')
h, w = img.shape[:2]
cX, cY = w // 2, h // 2
M = cv.getRotationMatrix2D((cX, cY), -90, 1.0)
print(M)
|
[
"givemealung@users.noreply.github.com"
] |
givemealung@users.noreply.github.com
|
323a59b8047c88211c419f0f35612ab000bfb532
|
2589ba31d1efccc0413f3c8e037adb20de70004a
|
/python/astro_metadata_translator/translators/helpers.py
|
4236aeea46bc6e146822cfa66ab31cd82b2158f7
|
[] |
permissive
|
HyperSuprime-Cam/astro_metadata_translator
|
0acf51b6f03c3427e4c5dcae4151037b0779f27d
|
b976306e3e6fb85232cc838a145475ae8f16ca31
|
refs/heads/master
| 2020-05-14T18:52:50.234657
| 2020-02-01T01:55:06
| 2020-02-01T01:55:06
| 181,917,815
| 0
| 0
|
BSD-3-Clause
| 2019-04-17T15:19:23
| 2019-04-17T15:19:23
| null |
UTF-8
|
Python
| false
| false
| 5,920
|
py
|
# This file is part of astro_metadata_translator.
#
# Developed for the LSST Data Management System.
# This product includes software developed by the LSST Project
# (http://www.lsst.org).
# See the LICENSE file at the top-level directory of this distribution
# for details of code ownership.
#
# Use of this source code is governed by a 3-clause BSD-style
# license that can be found in the LICENSE file.
"""Generically useful translation helpers which translation classes
can use.
They are written as free functions. Some of them are written
as if they are methods of `MetadataTranslator`, allowing them to be attached
to translator classes that need them. These methods have full access to
the translator methods.
Other functions are pure helpers that can be imported and used to help
translation classes without using `MetadataTranslator` properties.
"""
__all__ = ("to_location_via_telescope_name",
"is_non_science",
"tracking_from_degree_headers",
"altitude_from_zenith_distance")
import logging
from astropy.coordinates import EarthLocation, SkyCoord, AltAz
import astropy.units as u
log = logging.getLogger(__name__)
def to_location_via_telescope_name(self):
"""Calculate the observatory location via the telescope name.
Returns
-------
loc : `astropy.coordinates.EarthLocation`
Location of the observatory.
"""
return EarthLocation.of_site(self.to_telescope())
def is_non_science(self):
"""Raise an exception if this is a science observation.
Raises
------
KeyError
Is a science observation.
"""
if self.to_observation_type() == "science":
raise KeyError("Header represents science observation and can not default")
return
def altitude_from_zenith_distance(zd):
"""Convert zenith distance to altitude
Parameters
----------
zd : `astropy.units.Quantity`
Zenith distance as an angle.
Returns
-------
alt : `astropy.units.Quantity`
Altitude.
"""
return 90.*u.deg - zd
def tracking_from_degree_headers(self, radecsys, radecpairs, unit=u.deg):
"""Calculate the tracking coordinates from lists of headers.
Parameters
----------
radecsys : `list` or `tuple`
Header keywords to try corresponding to the tracking system. If none
match ICRS will be assumed.
radecpairs : `tuple` of `tuple` of pairs of `str`
Pairs of keywords specifying the RA/Dec in units of ``unit``.
unit : `astropy.unit.BaseUnit` or `tuple`
Unit definition suitable for the `~astropy.coordinate.SkyCoord`
constructor.
Returns
-------
radec = `astropy.coordinates.SkyCoord`
The RA/Dec coordinates. None if this is a moving target or a
non-science observation without any RA/Dec definition.
Raises
------
KeyError
No RA/Dec keywords were found and this observation is a science
observation.
"""
used = []
for k in radecsys:
if self.is_key_ok(k):
frame = self._header[k].strip().lower()
used.append(k)
if frame == "gappt":
self._used_these_cards(*used)
# Moving target
return None
break
else:
frame = "icrs"
for ra_key, dec_key in radecpairs:
if self.are_keys_ok([ra_key, dec_key]):
radec = SkyCoord(self._header[ra_key], self._header[dec_key],
frame=frame, unit=unit, obstime=self.to_datetime_begin(),
location=self.to_location())
self._used_these_cards(ra_key, dec_key, *used)
return radec
if self.to_observation_type() == "science":
raise KeyError("Unable to determine tracking RA/Dec of science observation")
return None
def altaz_from_degree_headers(self, altazpairs, obstime, is_zd=None):
"""Calculate the altitude/azimuth coordinates from lists of headers.
If the altitude is found but is greater than 90 deg, it will be returned
fixed at 90 deg.
If the altitude or azimuth are negative and this is a calibration
observation, `None` will be returned.
Parameters
----------
altazpairs : `tuple` of `str`
Pairs of keywords specifying Alt/Az in degrees. Each pair is tried
in turn.
obstime : `astropy.time.Time`
Reference time to use for these coordinates.
is_zd : `set`, optional
Contains keywords that correspond to zenith distances rather than
altitude.
Returns
-------
altaz = `astropy.coordinates.AltAz`
The AltAz coordinates associated with the telescope location
and provided time. Returns `None` if this observation is not
a science observation and no AltAz keys were located.
Raises
------
KeyError
No AltAz keywords were found and this observation is a science
observation.
"""
for alt_key, az_key in altazpairs:
if self.are_keys_ok([az_key, alt_key]):
az = self._header[az_key]
alt = self._header[alt_key]
# Check for zenith distance
if is_zd and alt_key in is_zd:
alt = altitude_from_zenith_distance(alt * u.deg).value
if az < -360.0 or alt < 0.0:
# Break out of loop since we have found values but
# they are bad.
break
if alt > 90.0:
log.warning("Clipping altitude (%f) at 90 degrees", alt)
alt = 90.0
altaz = AltAz(az * u.deg, alt * u.deg,
obstime=obstime, location=self.to_location())
self._used_these_cards(az_key, alt_key)
return altaz
if self.to_observation_type() == "science":
raise KeyError("Unable to determine AltAz of science observation")
return None
|
[
"tjenness@lsst.org"
] |
tjenness@lsst.org
|
19d90aabbffa1ff627828091e33fba3d914b4dbd
|
b22fe6a78705084904a68e187d28136c13ab7760
|
/apps/user_operation/apps.py
|
8b260e734bb14bcc4b0dc536c614d4ea7f1868d5
|
[] |
no_license
|
testomg/DRFDemo
|
5d43db66305f533f7c4fcb5d66104f0dbb2410f2
|
4241481d8324381a3b8fc8fab747b6eb0a87423f
|
refs/heads/master
| 2022-11-29T13:31:54.766992
| 2020-08-15T16:02:42
| 2020-08-15T16:02:42
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 161
|
py
|
from django.apps import AppConfig
class UserOperationConfig(AppConfig):
name = 'user_operation'
def ready(self):
import user_operation.signals
|
[
"ljh_132s@qq.com"
] |
ljh_132s@qq.com
|
0cd9c31eea6953e2e2fc3f5b66f096fae2d66ff7
|
3effac4a67f2bd6e644262f9e6d61962aa37c905
|
/WorkbookBS/01_basics/ex32.py
|
5d4cf0278fdd9c0665e7fab5bc2953bb02df8e98
|
[] |
no_license
|
WitoldKaczor/py_repo
|
06b3080c057079b4c93fd110a66839b6b2af5eb7
|
a95cd2ba5068c556a63cefc0fa2ff42dc688a1ef
|
refs/heads/master
| 2022-11-15T06:27:07.351992
| 2020-07-09T11:02:18
| 2020-07-09T11:02:18
| 275,154,829
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 279
|
py
|
int1 = int(input('First integer: '))
int2 = int(input('Second integer: '))
int3 = int(input('Third integer: '))
int_min = min(int1, int2, int3)
int_max = max(int1, int2, int3)
int_mid = int1 + int2 + int3 - int_min - int_max
print('Sorted numbers:', int_min, int_mid, int_max)
|
[
"63341757+WitoldKaczor@users.noreply.github.com"
] |
63341757+WitoldKaczor@users.noreply.github.com
|
f1245fd5f0a11cd259b2d46738a5fab7b90f5370
|
2f6405caec09b1dd184f260f112efce530c6a143
|
/mysite/mysite/settings.py
|
a1d437040d2dfc0c8e1aff694978f9a1843ef98a
|
[] |
no_license
|
bjorksus/my-first-blog
|
a6f6b4ed49b97087c5d0e1c609e727b81dcc188e
|
cb823868eca26bd06cc0e16b2ef36a4b6b5e8803
|
refs/heads/master
| 2020-06-20T22:08:53.903330
| 2016-11-26T16:15:12
| 2016-11-26T16:15:12
| 74,820,565
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,167
|
py
|
"""
Django settings for mysite project.
Generated by 'django-admin startproject' using Django 1.9.11.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'ey+u&6-)4jo_(r(f3k$o(jx8fy8$j)706=anw)&c-+s%ozt%y@'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'mysite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mysite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
|
[
"asa_lindgren@yahoo.com"
] |
asa_lindgren@yahoo.com
|
486e795c0000a52d94070003291791381d950f24
|
af962fc2c12aa158daa408e51e010e9a450241ea
|
/kbengine/assets/scripts/base/interfaces/GameObject.py
|
548891b5dbb7cb9bae918b56ab2008608b907ea4
|
[] |
no_license
|
ruifly2012/ShiSanZhang
|
c93685501cf4dba94367cd091066d51bf1850210
|
b9085046ac03e63087118b7555ab0b4f7f29c43f
|
refs/heads/master
| 2020-05-14T13:18:24.114010
| 2019-04-16T08:31:34
| 2019-04-16T08:31:34
| 181,809,959
| 0
| 1
| null | 2019-04-17T03:23:53
| 2019-04-17T03:23:52
| null |
UTF-8
|
Python
| false
| false
| 1,127
|
py
|
# -*- coding: utf-8 -*-
import KBEngine
from KBEDebug import *
class GameObject:
"""
服务端游戏对象的基础接口类
"""
def __init__(self):
pass
def onEnterWorld(self):
"""
KBEngine method.
这个entity已经进入世界了
"""
pass
def onLeaveWorld(self):
"""
KBEngine method.
这个entity将要离开世界了
"""
pass
def getScriptName(self):
return self.__class__.__name__
def destroySelf(self):
"""
virtual method
"""
if self.cell is not None:
# 销毁cell实体
self.destroyCellEntity()
return
# 销毁base
self.destroy()
def onGetCell(self):
"""
KBEngine method.
entity的cell部分实体被创建成功
"""
DEBUG_MSG("%s::onGetCell: %i" % (self.getScriptName(), self.id))
pass
def onLoseCell(self):
"""
KBEngine method.
entity的cell部分实体丢失
"""
DEBUG_MSG("%s::onLoseCell: %i" % (self.getScriptName(), self.id))
self.destroySelf()
def onRestore(self):
"""
KBEngine method.
entity的cell部分实体被恢复成功
"""
DEBUG_MSG("%s::onRestore: %s" % (self.getScriptName(), self.cell))
|
[
"693749857@qq.com"
] |
693749857@qq.com
|
3048750031b8a35363b103cc6375537337a7f8e7
|
c8272c29fb80c2f097f1a850151c5a8faa993ccb
|
/src/pyelliptic/cipher.py
|
fb8d0d46144c0c4764638babf1360e4828384a95
|
[
"GPL-3.0-only",
"GPL-1.0-or-later",
"MIT"
] |
permissive
|
wmantly/PyBitmessage
|
5aa7cf25ef5aa668c0998db528729256ca669638
|
11accf973a2b6229a94613b352fd5cbe2c862729
|
refs/heads/master
| 2020-12-25T00:19:49.727559
| 2018-09-26T18:14:56
| 2018-09-26T18:14:56
| 30,033,550
| 0
| 1
|
MIT
| 2018-09-26T18:14:58
| 2015-01-29T18:02:28
|
Python
|
UTF-8
|
Python
| false
| false
| 2,675
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2011 Yann GUIBET <yannguibet@gmail.com>
# See LICENSE for details.
from pyelliptic.openssl import OpenSSL
class Cipher:
"""
Symmetric encryption
import pyelliptic
iv = pyelliptic.Cipher.gen_IV('aes-256-cfb')
ctx = pyelliptic.Cipher("secretkey", iv, 1, ciphername='aes-256-cfb')
ciphertext = ctx.update('test1')
ciphertext += ctx.update('test2')
ciphertext += ctx.final()
ctx2 = pyelliptic.Cipher("secretkey", iv, 0, ciphername='aes-256-cfb')
print ctx2.ciphering(ciphertext)
"""
def __init__(self, key, iv, do, ciphername='aes-256-cbc'):
"""
do == 1 => Encrypt; do == 0 => Decrypt
"""
self.cipher = OpenSSL.get_cipher(ciphername)
self.ctx = OpenSSL.EVP_CIPHER_CTX_new()
if do == 1 or do == 0:
k = OpenSSL.malloc(key, len(key))
IV = OpenSSL.malloc(iv, len(iv))
OpenSSL.EVP_CipherInit_ex(
self.ctx, self.cipher.get_pointer(), 0, k, IV, do)
else:
raise Exception("RTFM ...")
@staticmethod
def get_all_cipher():
"""
static method, returns all ciphers available
"""
return OpenSSL.cipher_algo.keys()
@staticmethod
def get_blocksize(ciphername):
cipher = OpenSSL.get_cipher(ciphername)
return cipher.get_blocksize()
@staticmethod
def gen_IV(ciphername):
cipher = OpenSSL.get_cipher(ciphername)
return OpenSSL.rand(cipher.get_blocksize())
def update(self, input):
i = OpenSSL.c_int(0)
buffer = OpenSSL.malloc(b"", len(input) + self.cipher.get_blocksize())
inp = OpenSSL.malloc(input, len(input))
if OpenSSL.EVP_CipherUpdate(self.ctx, OpenSSL.byref(buffer),
OpenSSL.byref(i), inp, len(input)) == 0:
raise Exception("[OpenSSL] EVP_CipherUpdate FAIL ...")
return buffer.raw[0:i.value]
def final(self):
i = OpenSSL.c_int(0)
buffer = OpenSSL.malloc(b"", self.cipher.get_blocksize())
if (OpenSSL.EVP_CipherFinal_ex(self.ctx, OpenSSL.byref(buffer),
OpenSSL.byref(i))) == 0:
raise Exception("[OpenSSL] EVP_CipherFinal_ex FAIL ...")
return buffer.raw[0:i.value]
def ciphering(self, input):
"""
Do update and final in one method
"""
buff = self.update(input)
return buff + self.final()
def __del__(self):
OpenSSL.EVP_CIPHER_CTX_cleanup(self.ctx)
OpenSSL.EVP_CIPHER_CTX_free(self.ctx)
|
[
"git@jonwarren.org"
] |
git@jonwarren.org
|
9844ffeb251ce2d3f012663c169bb3696ebed044
|
e8ec436ebcabfa494e2f59499009333d45eda9a1
|
/P3/P3_Query_DB.py
|
e8b276f7aff03fd465c8e14dbfc26ccb9b2d8279
|
[] |
no_license
|
petetheat/DAND_Udacity
|
ec32ee72c0202398f475638b0a34161f7e07f2f7
|
12dfa187e8eddd031f085a71955d083af8085222
|
refs/heads/master
| 2021-01-14T10:30:39.898688
| 2017-04-14T20:37:13
| 2017-04-14T20:37:13
| 82,033,021
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,832
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Wed Nov 02 17:30:30 2016
@author: Peter Eisenschmidt
"""
import sqlite3
import pandas as pd
import matplotlib
import matplotlib.pyplot as plt
import seaborn as sns
import numpy as np
#%% Open database
db = sqlite3.connect("P3_Bremen.db")
c = db.cursor()
#%% Database queries
# Number of unique users
Q = '''
SELECT COUNT(*)
FROM (SELECT uid FROM nodes GROUP BY uid UNION ALL SELECT uid FROM ways GROUP BY uid);
'''
c.execute(Q)
rows = c.fetchall()
no_uid = rows[0][0]
print "Number of unique users: ", no_uid
# Top 10 users
Q = '''
SELECT user, uid, count(uid)
FROM nodes GROUP BY uid UNION ALL SELECT user, uid, count(uid) FROM ways GROUP BY uid
ORDER BY count(uid) DESC LIMIT 10;
'''
c.execute(Q)
rows = c.fetchall()
df = pd.DataFrame(rows)
print df
# Total number of ways
Q = '''
SELECT COUNT(*) FROM ways;
'''
c.execute(Q)
rows = c.fetchall()
no_ways = rows[0][0]
print "Number of ways: ", no_ways
# Number of nodes
Q = '''
SELECT COUNT(*) FROM nodes;
'''
c.execute(Q)
rows = c.fetchall()
no_nodes = rows[0][0]
print "Number of nodes: ", no_nodes
# Number of postcodes
q = '''
SELECT count(*)
FROM (SELECT value FROM nodes_tags WHERE key=='postcode' GROUP BY value);
'''
c.execute(q)
rows = c.fetchall()
no_pc = rows[0][0]
print "Number of postcodes: ", no_pc
#%% Investigate bus stops
# select all nodes
QUERY = "SELECT lat, lon FROM nodes;"
# select only nodes where the key is public_transport
Q1 = "SELECT nodes.lat, nodes.lon FROM nodes, nodes_tags WHERE nodes_tags.key == 'public_transport' AND nodes.id==nodes_tags.id;"
# select only nodes where the value is bus_stop
Q2 = "SELECT nodes.lat, nodes.lon FROM nodes, nodes_tags WHERE nodes_tags.value == 'bus_stop' AND nodes.id==nodes_tags.id;"
c.execute(QUERY)
rows = c.fetchall()
df = pd.DataFrame(rows)
c.execute(Q1)
rows = c.fetchall()
df1 = pd.DataFrame(rows)
c.execute(Q2)
rows = c.fetchall()
df2 = pd.DataFrame(rows)
# This query extracts the boundary of the federal state. This is used in the map plot later.
q_boundary = '''
SELECT ways_tags.id, nodes.lat, nodes.lon FROM nodes, ways_tags, ways_nodes
WHERE ways_tags.key == 'admin_level' AND ways_tags.value==4
AND ways_tags.id = ways_nodes.id
AND nodes.id == ways_nodes.node_id;
'''
c.execute(q_boundary)
rows = c.fetchall()
df_bd = pd.DataFrame(rows, columns=['id','lat','lon'])
#%% Plot map data and bus stop/public transport nodes
# color map for plot
cmap = matplotlib.cm.get_cmap('afmhot_r')
sns.set(palette="afmhot_r", rc={'axes.facecolor': cmap(.95)})
plt.figure(1, figsize=(20,40))
plt.plot(df[1],df[0],'.', alpha = .2, color = cmap(.4)) # plot map data (all nodes)
plt.plot(df1[1],df1[0],'o', alpha = .2, color = 'red') # plot public_transport nodes
plt.plot(df2[1],df2[0],'s', alpha = .2, color = '#0b8c18') # plot bus_stop nodes
for i in np.unique(df_bd['id']): # iteration through individual state boundary ways
df_tmp = df_bd.loc[df_bd['id']== i]
plt.plot(df_tmp.lon,df_tmp.lat, color='#f2f0c9', linewidth=2.5) # plot state boundaries
plt.ylim([min(df[0]), max(df[0])])
plt.xlim([min(df[1]), max(df[1])])
plt.xlabel('Longitude / deg')
plt.ylabel('Latitude / deg')
plt.show()
#%% Identify number of incorrectly tagged nodes
q1 = '''
SELECT count(*) FROM nodes_tags as a, nodes_tags as b
WHERE a.id == b.id AND a.key == 'public_transport'
AND b.value=='bus_stop';
'''
c.execute(q1)
no_bus_stops_pt = c.fetchall()[0][0]
q2 = "SELECT count(*) FROM nodes_tags WHERE nodes_tags.value=='bus_stop';"
c.execute(q2)
no_bus_stops = c.fetchall()[0][0]
print "Number of bus stops: ", no_bus_stops
print "Number of bus stops including public_transport tag: ", no_bus_stops_pt
print "Number of bus stops without public_transport tag: ", no_bus_stops - no_bus_stops_pt
|
[
"p.eisenschmidt@gmail.com"
] |
p.eisenschmidt@gmail.com
|
32da7c7dcdd60fe08e91d3e9259fdbf78987fc31
|
e823989ba8d2b536e0d87516b6b96dbd3bcae14d
|
/1definitionfunction.py
|
dfd26a3c93b14d2f774061ce6bdb832a47ec8480
|
[] |
no_license
|
AlexMendozaa/Python_Crash_Course
|
39503f0479afc39c9ca5139d3b2ac0e34fd98e1f
|
13b5e2379aa95175b0f2a1cb01e91a295a557e0b
|
refs/heads/master
| 2020-05-31T13:26:14.534238
| 2019-06-05T02:02:13
| 2019-06-05T02:02:13
| 190,303,371
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 69
|
py
|
def hello_world():
message = "Hello World"
print(message)
|
[
"angel17mendoza@gmail.com"
] |
angel17mendoza@gmail.com
|
f445ab234342c95f1dfb2f9267ecdd52dfa63bb5
|
ba14dedcc99297ac424580d52c276cc2862bcaeb
|
/___Python/RomanS/untitled/p07_file_io/m01_count_files.py
|
75c674366a64344109584583d834c3b6e1e012ba
|
[
"Apache-2.0"
] |
permissive
|
uvenil/PythonKurs201806
|
ac8ddf7255f7c420f5e7c2c1d1f28d4d2c83e410
|
85afa9c9515f5dd8bec0c546f077d8cc39568fe8
|
refs/heads/master
| 2020-03-22T00:06:04.163119
| 2018-06-30T22:17:14
| 2018-06-30T22:17:14
| 139,224,682
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 402
|
py
|
from pathlib import Path
# Zähle die Anzahl Ordner in einem Ordner (incl. aller Unterordner)
def count_dirs(path):
subdirs = [subdir for subdir in path.iterdir() if subdir.is_dir()]
count = 0
for subdir in subdirs:
count += count_dirs(subdir)
return count + 1
count= count_dirs(Path("O:\Spielwiese"))
# count= count_dirs(Path("C:\Program Files"))
print(count)
|
[
"uvenil@web.de"
] |
uvenil@web.de
|
3804875fccfdcddc332d90909984baaf01922606
|
c1aa240e35a7a8355a7ca9dcd546bdb6446b8509
|
/Trie_Tree.py
|
f0f8f73acacee9fa36b47a3f9c5172469f20b7d6
|
[] |
no_license
|
LGSW/Algorithms
|
a713685c737b7f7578704cd9c859e98a411e0a59
|
1cdf9caf74299c36735069fef41e8e95f4ed68fc
|
refs/heads/master
| 2020-03-24T22:21:58.025620
| 2018-08-12T03:23:28
| 2018-08-12T03:23:28
| 143,058,806
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 987
|
py
|
class Trie:
def __init__(self):
self.trie = {}
self.size = 0
def add(self, word):
p = self.trie
word = word.strip()
for c in word:
if c not in p:
p[c] = {}
p = p[c]
if word != '':
p[''] = ''
def search(self, word):
p = self.trie
word = word.lstrip()
for c in word:
if not c in p:
return False
p = p[c]
if '' in p:
return True
return False
def output(self):
print('{')
print(self.trie)
print('}')
if __name__ == '__main__':
trie_obj = Trie()
trie_obj.add('hello')
trie_obj.add('help')
trie_obj.add('world')
trie_obj.add('abc')
#打印构建的Trie树
trie_obj.output()
#查找单词
if trie_obj.search('hello'): print('Yes')
else: print('No')
if trie_obj.search('China'): print('Yes')
else:print ('No')
|
[
"41303602+LGSW@users.noreply.github.com"
] |
41303602+LGSW@users.noreply.github.com
|
6db4e149d5148b1e14e2fd4f50023e8be42d556d
|
d053ffce6fb82d63f0c88d6c5d39a7cdf561a70f
|
/picammon.py
|
887ccb922d87660ead99166643ab4dbb6bddeb83
|
[] |
no_license
|
theapu/picamscripts
|
55eedc94515f4c0e2d7c13a9b2f32e8d3f37b165
|
bd3ffbded065c2ec29fe41f19f28eeb3b96902e4
|
refs/heads/master
| 2021-01-19T04:52:15.215337
| 2013-09-16T14:37:27
| 2013-09-16T14:37:27
| 12,869,490
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,880
|
py
|
#!/usr/bin/python
import sys
import time
import os
# Packages for sending emails and attachments
import smtplib
from email.mime.image import MIMEImage
from email.mime.multipart import MIMEMultipart
watchdir = '/home/pi/picam/'
contents = os.listdir(watchdir)
count = len(watchdir)
dirmtime = os.stat(watchdir).st_mtime
# email settings
emailFrom = 'raspiofme@gmail.com'
emailFromPwd = 'password'
emailTo = 'tome@gmail.com'
emailSubject = 'MOTION DETECTED!!!';
# Send an email with a picture attached
def sendEmail(emailTo, filenames):
# Create the container (outer) email message
msg = MIMEMultipart()
msg['Subject'] = emailSubject
msg['From'] = emailFrom
msg['To'] = emailTo
# Open the files in binary mode and let the MIMEImage class automatically
# guess the specific image type
for file_path in filenames:
try:
with open(file_path, 'rb') as fp:
part = MIMEImage(fp.read(), name=os.path.basename(file_path))
fp.close()
msg.attach(part)
# except IOError:
except :
print "error: Can't open the file %s"%file_path
pass
# Send the email via the Gmail SMTP server
smtp = smtplib.SMTP('smtp.gmail.com:587')
smtp.starttls()
smtp.login(emailFrom, emailFromPwd)
smtp.sendmail(emailFrom, emailTo, msg.as_string())
smtp.quit()
while True:
newmtime = os.stat(watchdir).st_mtime
if newmtime != dirmtime:
dirmtime = newmtime
newcontents = os.listdir(watchdir)
added = set(newcontents).difference(contents)
if added:
print "Files added: %s" %(" ".join(added))
filenames = [ watchdir + s for s in added ]
print filenames
try:
sendEmail(emailTo, filenames)
except :
print "error! but passed."
pass
contents = newcontents
time.sleep(30)
|
[
"theapu@gmail.com"
] |
theapu@gmail.com
|
329820e4fb0bdc8a6f6066a7fd3f7ef27e2680a6
|
f0d713996eb095bcdc701f3fab0a8110b8541cbb
|
/cZ6zaRiKn7dfvJhnF_16.py
|
64c5a53e5ff83398fdf70ff7162ad41d08b62c4b
|
[] |
no_license
|
daniel-reich/turbo-robot
|
feda6c0523bb83ab8954b6d06302bfec5b16ebdf
|
a7a25c63097674c0a81675eed7e6b763785f1c41
|
refs/heads/main
| 2023-03-26T01:55:14.210264
| 2021-03-23T16:08:01
| 2021-03-23T16:08:01
| 350,773,815
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 398
|
py
|
"""
Creates a function that takes a string and returns the concatenated first and
last character.
### Examples
first_last("ganesh") ➞ "gh"
first_last("kali") ➞ "ki"
first_last("shiva") ➞ "sa"
first_last("vishnu") ➞ "vu"
first_last("durga") ➞ "da"
### Notes
There is no empty string.
"""
def first_last(name):
return name[0] + name[-1]
|
[
"daniel.reich@danielreichs-MacBook-Pro.local"
] |
daniel.reich@danielreichs-MacBook-Pro.local
|
b3dae15c899ddfcd56e004fb6d731077460a5bbb
|
26076d82841fa3376aa1ae571e1e24fd6edc04f1
|
/coupon-codes.py
|
e107c438fceae862b907c57122171c174cb4e4f5
|
[] |
no_license
|
grnwood/sfcc-stuff
|
a6f9c427010549fac8911ef4cd718abf2ee5a72b
|
a604c9673b908c257ddbef7ed2c3bdd931ccbb37
|
refs/heads/main
| 2023-08-14T17:07:51.344909
| 2021-10-06T20:04:03
| 2021-10-06T20:04:03
| 414,349,993
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 757
|
py
|
#! python3w
import xml.etree.ElementTree as ET
tree = ET.parse("C:/Users/jogreenw/AppData/Local/Temp/7zO0D747F01/coupons.xml")
root = tree.getroot()
for coupon in root.findall('coupon'):
desc = coupon.find('description')
if desc is not None:
if (desc.text) == 'Home Delivery':
cid = coupon.attrib['coupon-id']
if not cid.startswith("HomeDelivery_"):
cid = 'HomeDelivery_'+cid
print("keeping: "+cid+':'+desc.text)
coupon.attrib['coupon-id'] = cid
else:
root.remove(coupon)
else:
root.remove(coupon)
for cc in root.findall('coupon-codes'):
root.remove(cc)
tree.write("C:/Users/jogreenw/AppData/Local/Temp/7zO0D747F01/coupons-hd.xml")
|
[
"grnwood@gmail.com"
] |
grnwood@gmail.com
|
1e2b5755554832e80defe145dcc51fac19a9392b
|
97a019b52a56cfd16cd7c4dbb730e83c581d2b3e
|
/Archived/Research/PredictiveMouseTracking/process.py
|
dd72d04a9559bbe7dc2fd92a30b70b0dc624f85c
|
[] |
no_license
|
nalinraut/high-level-Motion-Planning
|
f0263dcfa344b914159b0486370bc544552ac360
|
08cd792124defba63583ba6ae6394b20329d38c0
|
refs/heads/master
| 2020-03-09T04:20:12.846801
| 2018-04-09T06:11:29
| 2018-04-09T06:11:29
| 128,585,159
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 10,138
|
py
|
import sys
import csv
import re
import copy
import math
import random
from database import Database
from timeseriesdb import TimeSeriesDatabase
def do_reach_default(db,obsfile,transfile,initfile,scalefile):
drop = ['iteration','time','target x','target y','widget x','widget y']
process = ["target rel x = {target x}-{widget x}",
"target rel y = {target y}-{widget y}",
"widget dx = {widget x}-{widget x}[-1]",
"widget dy = {widget y}-{widget y}[-1]",
]
pflipx = ["target rel x = -{target rel x}",
"widget dx = -{widget dx}"]
pflipy = ["target rel y = -{target rel y}",
"widget dy = -{widget dy}"]
historyvars = ["widget dx","widget dy"]
db.process(process)
trials = db.split("trial")
print len(trials),"trials"
newkeys = db.keys[:]
newkeys = newkeys[:db.keys.index("widget dx")]+["speed"]+newkeys[db.keys.index("widget dx"):]
for trial in trials:
dt = float(trial.get(-1,"time"))-float(trial.get(0,"time"))
dx = float(trial.get(0,"target rel x"))
dy = float(trial.get(0,"target rel y"))
dist = max(math.sqrt(dx*dx+dy*dy)-float(trial.get(0,"target r")),0.0)
trial.process("speed = %g"%(dist / dt,))
trial.shuffle_keys(newkeys)
trial.delete_key(*drop)
#construct new trials by flipping x and y
newtrials = trials[:]
for trial in trials:
flipx = Database(trial)
flipx.process(pflipx)
newtrials.append(flipx)
flipy = Database(trial)
flipy.process(pflipy)
newtrials.append(flipy)
flipxy = Database(flipy)
flipxy.process(pflipx)
newtrials.append(flipxy)
trials = newtrials
db.keys = []
db.entries = []
db.addTrials(trials)
mean = db.mean()
stdev = db.stdev()
dbscale = Database()
dbscale.keys = ["key","mean","stdev"]
dbscale.entries = [[k,m,s] for (k,m,s) in zip(db.keys,mean,stdev)]
print "Writing scale to %s"%(scalefile,)
dbscale.writeCSV(scalefile)
#shift and scale the db
for i in xrange(1,len(db.keys)):
for e in db.entries:
if stdev[i] != 0.0:
e[i] = (float(e[i])-mean[i])/stdev[i]
#shift and scale the trials
for trial in trials:
for i in xrange(1,len(trial.keys)):
for e in trial.entries:
if stdev[i] != 0.0:
e[i] = (float(e[i])-mean[i])/stdev[i]
print len(db.entries),"entries processed"
print "Writing observations to",obsfile
db.writeCSV(obsfile)
print "Writing initial conditions to",initfile
initdb = Database()
initdb.keys = db.keys
initdb.entries = [trial.entries[0] for trial in trials]
initdb.writeCSV(initfile)
duplicateStart = 10
duplicateEnd = 20
#numbetween = len(trials)/2
dbtrans = TimeSeriesDatabase()
#print "Adding",len(db.entries)-len(trials)+(duplicateEnd+duplicateStart)*len(trials),"within-trial transitions"
if duplicateStart+duplicateEnd > 0: print "Duplicating",duplicateStart,"start steps and",duplicateEnd,"terminal steps"
for trial in trials:
prefix = [trial.entries[0][:] for i in xrange(duplicateStart)]
suffix = [trial.entries[-1][:] for i in xrange(duplicateEnd)]
trial.entries = prefix + trial.entries + suffix
trialnext = Database()
trialnext.keys = trial.keys[:]
trialnext.entries = copy.deepcopy(trial.entries[1:])
trialnext.delete_key(*historyvars)
trialnext.keys = [k+" next" for k in trialnext.keys if k !="trial"]
trialtrans = Database(trial)
trialtrans.entries.pop(-1)
trialtrans.join(trialnext)
dbtrans.addTrial(trialtrans)
"""
print "Adding",numbetween,"between-trial transitions"
for iter in xrange(numbetween):
trial1 = random.choice(trials)
trial2 = random.choice(trials)
dbtrans.entries.append(trial1.entries[-1][:])
next = Database()
next.keys = trial1.keys[:]
next.entries = [trial2.entries[0][:]]
next.delete_key(*historyvars)
dbtrans.entries[-1].extend(next.entries[0])
dbtrans.delete_key('trial')
"""
print "Writing transitions to %s"%(transfile)
dbtrans.writeCSV(transfile)
def do_traj_default(db,obsfile,transfile,initfile,meanscalefile):
drop = ['iteration','time','slider x','slider y','pattern','height','width','speed','slider param','slider velocity x','slider velocity y','widget x','widget y']
process = ["target rel x = {slider x}-{widget x}",
"target rel y = {slider y}-{widget y}",
"target dx = {slider velocity x}",
"target dy = {slider velocity y}",
"widget dx = {widget x}-{widget x}[-1]",
"widget dy = {widget y}-{widget y}[-1]",
]
pflipx = ["target rel x = -{target rel x}",
"target dx = -{target dx}",
"widget dx = -{widget dx}"]
pflipy = ["target rel y = -{target rel y}",
"target dy = -{target dy}",
"widget dy = -{widget dy}"]
exchxy = ["target rel y = {target rel x}",
"target rel x = {target rel y}",
"target dx = {target dy}",
"target dy = {target dx}",
"widget dy = {widget dx}",
"widget dx = {widget dy}"]
historyvars = ["widget dx","widget dy"]
db.process(process)
trials = db.split("trial")
print len(trials),"trials"
for trial in trials:
trial.delete_key(*drop)
#construct new trials by flipping x and y
newtrials = trials[:]
for trial in trials:
flipx = Database(trial)
flipx.process(pflipx)
newtrials.append(flipx)
flipy = Database(trial)
flipy.process(pflipy)
newtrials.append(flipy)
flipxy = Database(flipy)
flipxy.process(pflipx)
newtrials.append(flipxy)
#swap the x-y coordinates too to be isometric
exch = Database(trial)
exch.process(exchxy)
newtrials.append(exch)
eflipx = Database(flipx)
eflipx.process(exchxy)
newtrials.append(eflipx)
eflipy = Database(flipy)
eflipy.process(exchxy)
newtrials.append(eflipy)
eflipxy = Database(flipxy)
eflipxy.process(exchxy)
newtrials.append(eflipxy)
trials = newtrials
print trials[6].keys
print trials[6].entries[0]
db.keys = []
db.entries = []
db.addTrials(trials)
mean = db.mean()
stdev = db.stdev()
dbscale = Database()
dbscale.keys = ["key","mean","stdev"]
dbscale.entries = [[k,m,s] for (k,m,s) in zip(db.keys,mean,stdev)]
print "Writing scale to",meanscalefile
dbscale.writeCSV(meanscalefile)
#shift and scale the db
for i in xrange(1,len(db.keys)):
for e in db.entries:
if stdev[i] != 0.0:
e[i] = (float(e[i])-mean[i])/stdev[i]
#shift and scale the trials
for trial in trials:
for i in xrange(1,len(trial.keys)):
for e in trial.entries:
if stdev[i] != 0.0:
e[i] = (float(e[i])-mean[i])/stdev[i]
print len(db.entries),"entries processed"
print "After scaling"
print trials[6].keys
print trials[6].entries[0]
print "Writing observations to",obsfile
db.writeCSV(obsfile)
print "Writing initial conditions to",initfile
initdb = Database()
initdb.keys = db.keys
initdb.entries = [trial.entries[0] for trial in trials]
initdb.writeCSV(initfile)
duplicate = 0
numbetween = len(trials)*2
dbtrans = TimeSeriesDatabase()
print "Adding",len(db.entries)-len(trials)+duplicate*len(trials),"within-trial transitions"
if duplicate > 0: print "Duplicating",duplicate,"terminal steps"
for trial in trials:
for i in xrange(duplicate):
trial.entries.append(trial.entries[-1][:])
trialnext = Database()
trialnext.keys = trial.keys[:]
trialnext.entries = copy.deepcopy(trial.entries[1:])
trialnext.delete_key(*historyvars)
trialnext.keys = [k+" next" for k in trialnext.keys if k != "trial"]
trialtrans = Database(trial)
trialtrans.entries.pop(-1)
trialtrans.join(trialnext)
dbtrans.addTrial(trialtrans)
print "Adding",numbetween,"between-trial transitions"
for iter in xrange(numbetween):
trial1 = random.choice(trials)
trial2 = random.choice(trials)
dbtrans.entries.append(trial1.entries[-1][:])
next = Database()
next.keys = trial1.keys[:]
next.entries = [trial2.entries[0][:]]
next.delete_key(*historyvars)
dbtrans.entries[-1].extend(next.entries[0])
print "Writing transitions to "+transfile
dbtrans.writeCSV(transfile)
db = TimeSeriesDatabase()
if len(sys.argv) == 2:
if sys.argv[1]=='reach':
db.readCSV('processed_data/reach.csv')
print len(db.entries),"entries read"
do_reach_default(db,'processed_data/reach_scaled.csv',
'processed_data/reach_trans_scaled.csv',
'processed_data/reach_init_scaled.csv',
'processed_data/reach-mean-scale.csv')
elif sys.argv[1]=='traj':
db.readCSV('processed_data/traj.csv')
print len(db.entries),"entries read"
do_traj_default(db,'processed_data/traj_scaled.csv',
'processed_data/traj_trans_scaled.csv',
'processed_data/traj_init_scaled.csv',
'processed_data/traj-mean-scale.csv')
else:
print "Usage: %s {reach,traj}"%(sys.argv[0],)
exit(0)
else:
if len(sys.argv) < 3:
print "Usage: %s in.csv out.csv [commands]"%(sys.argv[0],)
exit(0)
infile = sys.argv[1]
outfile = sys.argv[2]
db.readCSV(infile)
print len(db.entries),"entries read"
for cmd in sys.argv[3:]:
func,arg=cmd.split(" ",1)
eval("db."+func+"("+arg+")")
print len(db.entries),"entries processed"
db.writeCSV(outfile)
|
[
"rautnalin@gmail.com"
] |
rautnalin@gmail.com
|
bb15f603b4ce5286f3877fbe5c9f8d23abda767f
|
7e334b54b3a92d642af688139edaa952135f1026
|
/pythMatrix/PythagorasMatrix.py
|
a1018123597b004bb31fe28e9a8b680198970593
|
[] |
no_license
|
chinook82/calcMatrix.numero.py
|
db46dcf031bee2768788d56e49ee441cab363bd1
|
b4e5b763aa3724dda3a954b9d15af12f9c9badf0
|
refs/heads/master
| 2021-01-01T16:52:45.911307
| 2017-08-04T10:14:06
| 2017-08-04T10:14:06
| 97,938,852
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 99
|
py
|
""" Class Pyphagoras Matrix """
class PythagorasMatrix:
pass
# end PythagorasMatrix class
|
[
"cheguevaros@mail.ru"
] |
cheguevaros@mail.ru
|
cdb75a85f07982575bcab34275dda5cb319f027c
|
eb649fdf454118f54247501605b7ebcbf3f53dca
|
/drafts/amp_uvdist.py
|
c5ac63177627c0d9f26a703d1250bf505589a233
|
[] |
no_license
|
abulatek/knac
|
c20cdd9f1a79edb3cf6cc338107ab988181f33ca
|
44421624d1873c112b7f665914311610450f817d
|
refs/heads/master
| 2018-09-22T04:45:19.779829
| 2018-06-06T17:34:55
| 2018-06-06T17:34:55
| 93,764,736
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,598
|
py
|
from astropy.io import fits
import matplotlib.pyplot as plt
import numpy as np
def amp_uvdist(sbfile,lbfile,binsize):
plt.clf()
filenames = [sbfile,lbfile]
for name in filenames:
image = fits.open(name)
u = image[0].data['UU']
v = image[0].data['VV']
u *= image[0].header['crval4']/1e3
v *= image[0].header['crval4']/1e3
# plt.plot(u,v,'.')
vis = image[0].data['data']
real = (vis[:,0,0,0,:,0,0] + vis[:,0,0,0,:,1,0])/2.
real = np.mean(real,axis=1)
imag = (vis[:,0,0,0,:,0,1] + vis[:,0,0,0,:,1,1])/2.
imag = np.mean(imag,axis=1)
amp = np.sqrt(real**2 + imag**2)
amp = amp.squeeze()
uvdist = np.sqrt(u**2 + v**2)
# plt.plot(uvdist,amp,'.')
###### for binning!
newuvdist = []
newreal = []
newimag = []
newamp = []
rstdevs = []
istdevs = []
rNs = []
iNs = []
amperrs = []
rangemin = np.amin(uvdist)
rangemax = np.amax(uvdist)
for minimum in range(rangemin,rangemax,binsize):
maximum = minimum + binsize
uvsubarray = (uvdist > minimum) & (uvdist < maximum)
uvdist2 = uvdist[uvsubarray]
uvrep = np.median(uvdist2)
newuvdist.append(uvrep)
realsubarray = (real[uvsubarray] != 0)
real2 = real[uvsubarray][realsubarray]
realav = np.mean(real2)
newreal.append(realav)
imagsubarray = (imag[uvsubarray] != 0)
imag2 = imag[uvsubarray][imagsubarray]
imagav = np.mean(imag2)
newimag.append(imagav)
rstdev = np.std(real2)
istdev = np.std(imag2)
rN = len(real2)
iN = len(imag2)
amperr = np.sqrt((rstdev**2/rN)*(realav**2/(realav**2+imagav**2))+(istdev**2/iN)*(imagav**2/(realav**2+imagav**2)))
amperrs.append(amperr)
newuvdist = np.asarray(newuvdist)
newuvdist = newuvdist[~np.isnan(newuvdist)]
r = np.asarray(newreal)
r = r[~np.isnan(r)]
i = np.asarray(newimag)
i = i[~np.isnan(i)]
newamp = np.sqrt(r**2 + i**2)
amperrs = np.asarray(amperrs)
amperrs = amperrs[~np.isnan(amperrs)]
plt.plot(newuvdist,newamp,'.')
plt.errorbar(newuvdist,newamp,yerr=amperrs,fmt='.')
plt.xlabel('Distance from center of uv-plane (klambda)')
plt.ylabel('Amplitude (Jy)')
plt.title('Amplitude versus uv-distance')
plt.grid(True)
plt.show()
|
[
"noreply@github.com"
] |
abulatek.noreply@github.com
|
33ef53497f24fd895f37e3502f3660d462331246
|
7bd96167a0e28b67d23d588536eb4eb9d3f30fc2
|
/sstcam_sandbox/d181105_sim_telarray_cfg/pixel_mapping.py
|
bc9517e247a68b260a0ae22506a7107d01fc575a
|
[
"BSD-3-Clause"
] |
permissive
|
watsonjj/sstcam-sandbox
|
18bc6ca6ae4a0f09cdc1ae70396b79fc9d54a8a6
|
91330d3a6f510a392f635bd7f4abd2f77871322c
|
refs/heads/master
| 2023-07-12T22:03:37.668204
| 2021-08-17T14:19:58
| 2021-08-17T14:19:58
| 128,373,226
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,343
|
py
|
from sstcam_sandbox import get_data
from CHECLabPy.utils.files import create_directory
import numpy as np
import pandas as pd
import os
from target_calib import Mapping
def create_new_mapping(input_path, output_path):
"""
Create a TargetCalib Mapping file with the latest pixel positions,
but the prod3 pixel ordering
"""
create_directory(os.path.dirname(output_path))
df = pd.read_csv(input_path, sep='\t')
df_new = df.sort_values(['row', 'col'])
df_new['pixel'] = np.arange(2048)
sp_ordering = df_new.groupby('superpixel').min().sort_values('pixel').index.values
lookup = np.argsort(sp_ordering)
df_new['superpixel'] = lookup[df_new['superpixel'].values]
slot_ordering = df_new.groupby('slot').min().sort_values('pixel').index.values
lookup = np.argsort(slot_ordering)
df_new['slot'] = lookup[df_new['slot'].values]
for itm in range(32):
lookup = np.argsort(df_new.loc[df_new['slot'] == itm]['pixel'].values)
df_new.loc[df_new['slot'] == itm, 'tmpix'] = lookup
df_new.to_csv(output_path, sep='\t', float_format='%.7f', index=False)
def create_new_camera_cfg(tc_cfg_path, output_path):
"""
Create a new sim_telarray camera cfg file using a TargetCalib mapping file
"""
create_directory(os.path.dirname(output_path))
mapping = Mapping(tc_cfg_path)
mappingsp = mapping.GetMappingSP()
with open(output_path, 'w') as f:
write_PixType(f)
write_pixel_positions(f, mapping)
# write_trigger_groups_original(f, mapping, mappingsp)
write_trigger_groups_unique(f, mapping, mappingsp)
def write_PixType(file):
pixtype = """# PixType format:
# Par. 1: pixel type (here always 1)
# 2: PMT type (must be 0)
# 3: cathode shape type
# 4: visible cathode diameter [cm]
# 5: funnel shape type (see above)
# 6: funnel diameter (flat-to-flat for hex.) [cm]
# 7: depth of funnel [cm]
# 8: a) funnel efficiency "filename", b) funnel plate transparency
# 9: a) optional wavelength "filename", b) funnel wall reflectivity
# In case a) in column 8, columns 3+7 are not used. If in case a) the
# optional file name for the wavelength dependence is provided, the
# overall scale in the file provided as parameter 8 is ignored because
# it is rescaled such that the average over all mirrors is equal to
# the given wavelength dependent value.
#
# Shape types: 0: circ., 1: hex(flat x), 2: sq., 3: hex(flat y)
#
#Angular Dep currently all set to one for checks
# Note that pixel size is scale from the actual 6.125 mm at the focal length
# planned with GATE telescopes (228.3 cm) to the focal length of ASTRI (215 cm).
# Similarly scaled are the pixel positions.
PixType 1 0 2 0.620 2 0.620 0.0 "funnel_perfect.dat"
# Pixel format:
# Par. 1: pixel number (starting at 0)
# 2: pixel type (must be 1)
# 3: x position [cm]
# 4: y position [cm]
# 5: drawer/module number
# 6: board number in module
# 7: channel number n board
# 8: board Id number ('0x....')
# 9: pixel on (is on if parameter is missing)
# 10: relative QE or PDE (1 if unused)
# 11: relative gain (1 if unused)
"""
file.write(pixtype)
def write_pixel_positions(file, mapping):
qe_arr = np.load('checs_qe_variation.npy')
gain_arr = np.load('checs_gain_variation.npy')
for i in range(mapping.GetNPixels()):
ipix = mapping.GetPixel(i)
xpix = mapping.GetXPix(i) * 10 ** 2
ypix = mapping.GetYPix(i) * 10 ** 2
imod = mapping.GetSlot(i)
ichan = mapping.GetTMPixel(i)
l = "Pixel\t{} 1\t {:.2f}\t{:.2f}\t{} 0 {}\t0x00 1\t{:.5f}\t{:.5f}\n"
lf = l.format(ipix, xpix, ypix, imod, ichan, qe_arr[i], gain_arr[i])
file.write(lf)
file.write('\n')
def write_trigger_groups_original(file, mapping, mappingsp):
for i in range(mappingsp.GetNSuperPixels()):
nei = mappingsp.GetNeighbours(i, True)
neisort = []
for inei in nei:
neirow = mappingsp.GetRow(inei)
neicol = mappingsp.GetColumn(inei)
neisort.append((neicol, neirow))
sort = sorted(range(len(neisort)), key=neisort.__getitem__)
nei = [nei[isort] for isort in sort]
file.write("MajorityTrigger * of ")
for isp in [i, *nei]:
con = list(mappingsp.GetContainedPixels(isp))
rows = [mapping.GetRow(j) for j in con]
cols = [mapping.GetColumn(j) for j in con]
min_r = np.min(rows)
min_c = np.min(cols)
con_bl = con[np.where((rows == min_r) & (cols == min_c))[0][0]]
con.remove(con_bl)
file.write('{}[{},{},{}] '.format(con_bl, *con))
file.write('\n')
def write_trigger_groups_unique(file, mapping, mappingsp):
pairs = []
for i in range(mappingsp.GetNSuperPixels()):
nei = mappingsp.GetNeighbours(i, True)
neisort = []
for inei in nei:
neirow = mappingsp.GetRow(inei)
neicol = mappingsp.GetColumn(inei)
neisort.append((neicol, neirow))
sort = sorted(range(len(neisort)), key=neisort.__getitem__)
nei = [nei[isort] for isort in sort]
for n in nei:
pair = sorted((i, n))
if pair not in pairs:
pairs.append(pair)
for p in pairs:
file.write("MajorityTrigger * of ")
for isp in p:
con = list(mappingsp.GetContainedPixels(isp))
rows = [mapping.GetRow(j) for j in con]
cols = [mapping.GetColumn(j) for j in con]
min_r = np.min(rows)
min_c = np.min(cols)
con_bl = con[np.where((rows == min_r) & (cols == min_c))[0][0]]
con.remove(con_bl)
file.write('{}[{},{},{}] '.format(con_bl, *con))
file.write('\n')
def main():
input_path = "/Users/Jason/Software/TargetCalib/source/dev/mapping_checs_V1-1-0.cfg"
output_path = get_data("d181105_sim_telarray_cfg/tc_mapping.cfg")
create_new_mapping(input_path, output_path)
input_path = get_data("d181105_sim_telarray_cfg/tc_mapping.cfg")
output_path = get_data("d181105_sim_telarray_cfg/pixel_mapping.dat")
create_new_camera_cfg(input_path, output_path)
if __name__ == '__main__':
main()
|
[
"jason.jw@live.co.uk"
] |
jason.jw@live.co.uk
|
b572b186134842cde318ac3176c3a537bc2fd2a2
|
2ce474218a76137c28da34100b76732b71774514
|
/noxfile.py
|
45ac6e92aba53fe8374f7b5f0d0f5e76f96cf01d
|
[
"ISC"
] |
permissive
|
connectedcars/nb-clean
|
1c75d6541cad346e404fa62487d9ee53287dbf03
|
214481fe33f3cb0e4ef62a7e53f739b1ccab9460
|
refs/heads/master
| 2020-05-07T09:10:55.641838
| 2019-04-09T15:05:46
| 2019-04-09T15:05:46
| 180,366,143
| 0
| 0
| null | 2019-04-09T12:52:37
| 2019-04-09T12:52:37
| null |
UTF-8
|
Python
| false
| false
| 1,088
|
py
|
"""Nox configuration."""
import nox
SOURCES = ["noxfile.py", "src"]
def install_dependencies(session):
"""Install Poetry and project dependencies."""
session.install("poetry")
session.run("poetry", "install")
@nox.session
def mypy(session):
"""Type check code with mypy."""
install_dependencies(session)
session.run("poetry", "run", "mypy", "--strict", "src")
@nox.session
def flake8(session):
"""Lint code with Flake8."""
install_dependencies(session)
session.run("poetry", "run", "flake8", *SOURCES)
@nox.session
def pylint(session):
"""Lint code with Pylint."""
install_dependencies(session)
session.run("poetry", "run", "pylint", *SOURCES)
@nox.session
def isort(session):
"""Check import ordering with isort."""
install_dependencies(session)
session.run(
"poetry", "run", "isort", "--check-only", "--recursive", *SOURCES
)
@nox.session
def black(session):
"""Check code formatting with black."""
install_dependencies(session)
session.run("poetry", "run", "black", "--check", *SOURCES)
|
[
"scott@stevenson.io"
] |
scott@stevenson.io
|
36809d5546b14a81bb05f8539e7eee6d5ce6c676
|
53fab060fa262e5d5026e0807d93c75fb81e67b9
|
/backup/user_165/ch84_2019_06_06_16_35_44_977490.py
|
4474d83ec8994225edbf0e29de51fccb478e8ee1
|
[] |
no_license
|
gabriellaec/desoft-analise-exercicios
|
b77c6999424c5ce7e44086a12589a0ad43d6adca
|
01940ab0897aa6005764fc220b900e4d6161d36b
|
refs/heads/main
| 2023-01-31T17:19:42.050628
| 2020-12-16T05:21:31
| 2020-12-16T05:21:31
| 306,735,108
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 266
|
py
|
def inverte_dicionario(dicionario_1):
dicionario_novo = {}
for nome,valores in dicionario_1:
if valores not in dicionario_novo:
dicionario_novo[valor] = []
dicionario_novo.append(nome)
return dicionario_novo
|
[
"you@example.com"
] |
you@example.com
|
0ef874fe8275ef5798b65edcde8202115ad19c98
|
10b0b7a9adc6e293501488d91eac3b6bd8546b99
|
/src/ros_deep_vision/data_monster.py
|
d1a7cc8e1b599a0e9a4e2cdef3de359e19eb51f2
|
[
"MIT"
] |
permissive
|
goolygu/ros-deep-vision
|
9e0c552b6901cb07cfb3210caa53238b145c54b7
|
9328dfe6c13f5e3dd3e3a3f816cbbfbca15aaf3b
|
refs/heads/master
| 2020-04-06T04:57:22.761956
| 2017-04-03T17:23:38
| 2017-04-03T17:23:38
| 46,363,009
| 9
| 4
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 65,673
|
py
|
#! /usr/bin/env python
# -*- coding: utf-8
import roslib
import rospy
import sys
import os
import cv2
import numpy as np
import time
import StringIO
from threading import Lock
from misc import WithTimer
from numpy_cache import FIFOLimitedArrayCache
from app_base import BaseApp
# from core import CodependentThread
from image_misc import *
from time import gmtime, strftime
from utils import DescriptorHandler, Descriptor
import settings
import caffe
from collections import namedtuple
import yaml
from data_collector import Data
from data_ploter import *
from distribution import *
import pcl
from ros_deep_vision.srv import String2
from visualization_msgs.msg import Marker
from geometry_msgs.msg import Point
import copy
import scipy
from scipy.special import expit
from data_util import *
from data_analyzer import *
from input_manager import *
from visualizer import *
class DataMonster:
def __init__(self, settings, data_settings):
print 'initialize'
self.settings = settings
# self.ds = data_settings
self._data_mean = np.load(settings.caffevis_data_mean)
# Crop center region (e.g. 227x227) if mean is larger (e.g. 256x256)
excess_h = self._data_mean.shape[1] - self.settings.caffevis_data_hw[0]
excess_w = self._data_mean.shape[2] - self.settings.caffevis_data_hw[1]
assert excess_h >= 0 and excess_w >= 0, 'mean should be at least as large as %s' % repr(self.settings.caffevis_data_hw)
self._data_mean = self._data_mean[:, excess_h:(excess_h+self.settings.caffevis_data_hw[0]),
excess_w:(excess_w+self.settings.caffevis_data_hw[1])]
self._net_channel_swap = (2,1,0)
self._net_channel_swap_inv = tuple([self._net_channel_swap.index(ii) for ii in range(len(self._net_channel_swap))])
self._range_scale = 1.0 # not needed; image comes in [0,255]
self.available_layer = ['conv1', 'pool1', 'norm1', 'conv2', 'pool2', 'norm2', 'conv3', 'conv4', 'conv5', 'pool5', 'fc6', 'fc7', 'fc8', 'prob']
# self.threshold = {}
# self.threshold['conv5'] = self.ds.thres_conv5
# self.threshold['conv4'] = self.ds.thres_conv4
# self.threshold['conv3'] = self.ds.thres_conv3
# self.threshold['conv2'] = self.ds.thres_conv2
# self.threshold['conv1'] = 0
self.visualizer = Visualizer()
self.visualizer.set_frame("/r2/head/asus_depth_optical_frame")
self.visualizer.set_topics(['grasp_distribution', 'feature', "grasp_target"])
GPU_ID = 0
if settings.caffevis_mode_gpu:
caffe.set_mode_gpu()
print 'CaffeVisApp mode: GPU'
caffe.set_device(GPU_ID)
else:
caffe.set_mode_cpu()
print 'CaffeVisApp mode: CPU'
self.net = caffe.Classifier(
settings.caffevis_deploy_prototxt,
settings.caffevis_network_weights,
mean = self._data_mean,
channel_swap = self._net_channel_swap,
raw_scale = self._range_scale,
#image_dims = (227,227),
)
self.input_dims = self.net.blobs['data'].data.shape[2:4] # e.g. (227,227)
self.set_data_settings(data_settings)
# self.back_mode = self.ds.back_prop_mode
# w = self.ds.avg_pointcloud_width
# self.average_grid = np.mgrid[-w:w,-w:w]
self.visualize = True
self.show_backprop = True
# self.input_manager = InputManager(self.ds, self.input_dims)
def set_data_settings(self, data_settings):
self.ds = data_settings
self.threshold = {}
self.threshold['conv5'] = self.ds.thres_conv5
self.threshold['conv4'] = self.ds.thres_conv4
self.threshold['conv3'] = self.ds.thres_conv3
self.threshold['conv2'] = self.ds.thres_conv2
self.threshold['conv1'] = 0
self.back_mode = self.ds.back_prop_mode
w = self.ds.avg_pointcloud_width
self.average_grid = np.mgrid[-w:w,-w:w]
self.input_manager = InputManager(self.ds, self.input_dims)
def set_frame(self, frame):
self.visualizer.set_frame(frame)
def set_box(self, min_max_box, margin_ratio):
self.input_manager.set_box(min_max_box, margin_ratio)
def set_train_path(self, path):
self.train_path = path
def train_each_case(self, tbp, data_name_list):
data_dic = self.input_manager.get_data_dic(self.train_path)
dist_dic = {}
for case in data_dic:
print "train case", case
if tbp:
dist_dic[case] = self.train(data_dic[case])
else:
dist_dic[case] = self.train_without_tbp(data_dic[case])
return dist_dic
def train_all(self):
data_list = self.input_manager.get_data_all(self.train_path)
return self.train(data_list)
def train(self, data_list):
distribution = Distribution()
conv5_list = self.load_conv5(data_list)
filter_idx_list_conv5 = self.find_consistent_filters(conv5_list, self.threshold["conv5"], self.ds.n_conv5_f)
print "consistent filter conv5", filter_idx_list_conv5
# distribution.set_tree_list([],filter_idx_list_conv5)
# for each consistent conv-5 filter look for top N conv-4 filters
for filter_idx_5 in filter_idx_list_conv5:
print "handling filter", filter_idx_5, "layer conv5"
# img_src is the abs diff back propagate to the image layer
conv4_diff_list, img_src_5_list = self.load_layer_fix_filter_list("conv4", "conv5", conv5_list, data_list, filter_idx_5)
rel_pos_5 = self.get_relative_pos(filter_idx_5, data_list, conv5_list, img_src_5_list, self.ds.frame_list_conv5, self.threshold["conv5"])
self.set_distribution(distribution, self.ds.frame_list_conv5, filter_idx_5, rel_pos_5, [])
filter_idx_list_conv4 = self.find_consistent_filters(conv4_diff_list, self.threshold["conv4"], self.ds.n_conv4_f)
print "consistent filter conv4", filter_idx_list_conv4
for i, filter_idx_4 in enumerate(filter_idx_list_conv4):
print "handling filter", filter_idx_4, "layer conv4"
conv3_diff_list, img_src_4_list = self.load_layer_fix_filter_list("conv3", "conv4", conv4_diff_list, data_list, filter_idx_4)
rel_pos_4 = self.get_relative_pos(filter_idx_4, data_list, conv4_diff_list, img_src_4_list, self.ds.frame_list_conv4, self.threshold["conv4"])
self.set_distribution(distribution, self.ds.frame_list_conv4, filter_idx_4, rel_pos_4, [filter_idx_5])
filter_idx_list_conv3 = self.find_consistent_filters(conv3_diff_list, self.threshold["conv3"], self.ds.n_conv3_f)
print "consistent filter conv3", filter_idx_list_conv3
for j, filter_idx_3 in enumerate(filter_idx_list_conv3):
print "handling filter", filter_idx_3, "layer conv3"
conv2_diff_list, img_src_3_list = self.load_layer_fix_filter_list("conv2", "conv3", conv3_diff_list, data_list, filter_idx_3)
rel_pos_3 = self.get_relative_pos(filter_idx_3, data_list, conv3_diff_list, img_src_3_list, self.ds.frame_list_conv3, self.threshold["conv3"])
self.set_distribution(distribution, self.ds.frame_list_conv3, filter_idx_3, rel_pos_3, [filter_idx_5, filter_idx_4])
filter_idx_list_conv2 = self.find_consistent_filters(conv2_diff_list, self.threshold["conv2"], self.ds.n_conv2_f)
print "consistent filter conv2", filter_idx_list_conv2
for k, filter_idx_2 in enumerate(filter_idx_list_conv2):
print "handling filter", filter_idx_2, "layer conv2"
conv1_diff_list, img_src_2_list = self.load_layer_fix_filter_list("conv1", "conv2", conv2_diff_list, data_list, filter_idx_2)
rel_pos_2 = self.get_relative_pos(filter_idx_2, data_list, conv2_diff_list, img_src_2_list, self.ds.frame_list_conv2, self.threshold["conv2"])
self.set_distribution(distribution, self.ds.frame_list_conv2, filter_idx_2, rel_pos_2, [filter_idx_5, filter_idx_4, filter_idx_3])
# filter_idx_list_conv1 = self.find_consistent_filters(conv1_diff_list, self.threshold["conv1"], self.ds.n_conv1_f)
# print "consistent filter conv1", filter_idx_list_conv1
return distribution
def train_without_tbp(self, data_list):
distribution = Distribution()
conv5_list = self.load_layer(data_list, 'conv5')
conv4_list = self.load_layer(data_list, 'conv4')
conv3_list = self.load_layer(data_list, 'conv3')
conv2_list = self.load_layer(data_list, 'conv2')
filter_idx_list_conv5 = self.find_consistent_filters(conv5_list, self.threshold["conv5"], self.ds.n_conv5_f)
print "consistent filter conv5", filter_idx_list_conv5
filter_idx_list_conv4 = self.find_consistent_filters(conv4_list, self.threshold["conv4"], self.ds.n_conv4_f)
print "consistent filter conv4", filter_idx_list_conv4
filter_idx_list_conv3 = self.find_consistent_filters(conv3_list, self.threshold["conv3"], self.ds.n_conv3_f)
print "consistent filter conv3", filter_idx_list_conv3
filter_idx_list_conv2 = self.find_consistent_filters(conv2_list, self.threshold["conv2"], self.ds.n_conv2_f)
print "consistent filter conv2", filter_idx_list_conv2
distribution.set_tree_sig([-1,-1,-1])
for i, filter_idx_4 in enumerate(filter_idx_list_conv4):
if filter_idx_4 == -1:
continue
print "handling filter", filter_idx_4, "layer conv4"
conv3_diff_list, img_src_4_list = self.load_layer_fix_filter_list("conv3", "conv4", conv4_list, data_list, filter_idx_4)
rel_pos_4 = self.get_relative_pos(filter_idx_4, data_list, conv4_list, img_src_4_list, self.ds.frame_list_conv4, self.threshold["conv4"])
self.set_distribution(distribution, self.ds.frame_list_conv4, filter_idx_4, rel_pos_4, [-1])
for j, filter_idx_3 in enumerate(filter_idx_list_conv3):
print "handling filter", filter_idx_3, "layer conv3"
conv2_diff_list, img_src_3_list = self.load_layer_fix_filter_list("conv2", "conv3", conv3_list, data_list, filter_idx_3)
rel_pos_3 = self.get_relative_pos(filter_idx_3, data_list, conv3_list, img_src_3_list, self.ds.frame_list_conv3, self.threshold["conv3"])
self.set_distribution(distribution, self.ds.frame_list_conv3, filter_idx_3, rel_pos_3, [-1, -1])
for k, filter_idx_2 in enumerate(filter_idx_list_conv2):
print "handling filter", filter_idx_2, "layer conv2"
conv1_diff_list, img_src_2_list = self.load_layer_fix_filter_list("conv1", "conv2", conv2_list, data_list, filter_idx_2)
rel_pos_2 = self.get_relative_pos(filter_idx_2, data_list, conv2_list, img_src_2_list, self.ds.frame_list_conv2, self.threshold["conv2"])
self.set_distribution(distribution, self.ds.frame_list_conv2, filter_idx_2, rel_pos_2, [-1, -1, -1])
return distribution
def test_clutter(self, path, data_path, name, tbp, case_list, single_test):
dist_path = path + '/distribution/'
result_path = path + '/result/'
result_xyz = {}
result = {}
fail_count = {}
distribution_dic = {}
if single_test == None:
data_list = self.input_manager.get_data_all(data_path)#, [23])
else:
data_list = self.input_manager.get_data_all(data_path, [single_test])
for case in case_list:
distribution = Distribution()
distribution.load(dist_path, "[" + case + "]" + name)
distribution_dic[case] = self.filter_distribution(distribution, self.ds.filter_low_n)
for i, data in enumerate(data_list):
if self.visualize:
cv2.destroyAllWindows()
cv2.imshow("img", data.img)
cv2.waitKey(100)
case = data.action + ':' + data.target_type
if not case in result:
result[case] = {}
result_xyz[case] = {}
fail_count[case] = {}
obj = data.name
result_xyz[case][obj], result[case][obj], fail_count[case][obj] = self.test_accuracy(distribution_dic[case], [data], tbp)
if self.visualize:
print "continue?"
key = raw_input()
if key == 'n':
break
test_name = self.ds.get_test_name()
run_analysis(result)
check_fail(fail_count)
with open(result_path + "/" + name + "_" + test_name + '.yaml', 'w') as f:
yaml.dump(result, f, default_flow_style=False)
def cross_validation(self, path, name, train, tbp, data_name_list):
self.visualize = False
data_name_dic = self.input_manager.get_data_name_dic(self.train_path, data_name_list)
dist_dic = {}
result_xyz = {}
result = {}
fail_count = {}
dist_path = path + '/distribution/'
result_path = path + '/result/'
if True:#self.ds.evaluate == 'full':
full_distribution = {}
for case in data_name_dic:
if train == False and os.path.isfile(dist_path + "[" + case + "]" + name + ".yaml"):
full_distribution[case] = Distribution()
full_distribution[case].load(dist_path, "[" + case + "]" + name)
else:
print "full training", case
train_data_list_full = []
for j, train_object in enumerate(data_name_dic[case]):
train_data_list_full = train_data_list_full + data_name_dic[case][train_object]
if tbp:
full_distribution[case] = self.train(train_data_list_full)
else:
full_distribution[case] = self.train_without_tbp(train_data_list_full)
full_distribution[case].save(dist_path, "[" + case + "]" + name)
train_data_list = {}
test_data_list = {}
# create train data list and test data list
for k, case in enumerate(data_name_dic):
# print "train case", case
train_data_list[case] = {}
test_data_list[case] = {}
for i, test_object in enumerate(data_name_dic[case]):
# print "leave", i, test_object, "out"
train_data_list[case][test_object] = []
test_data_list[case][test_object] = []
for j, train_object in enumerate(data_name_dic[case]):
if j != i:
train_data_list[case][test_object] += data_name_dic[case][train_object]
else:
test_data_list[case][test_object] += data_name_dic[case][train_object]
# print "train_data_list", train_data_list
# train and save
if train:
print "start training"
for k, case in enumerate(data_name_dic):
print "train case", case
for i, test_object in enumerate(data_name_dic[case]):#enumerate(["katibox"]):#
print "leave", i, test_object, "out"
if tbp:
distribution = self.train(train_data_list[case][test_object])
else:
distribution = self.train_without_tbp(train_data_list[case][test_object])
# print "test_data_list", test_data_list
distribution.save(dist_path + '/cross_validation/', "[" + case + "][leave_" + test_object + "]" + name)
print "start testing"
# test
for k, case in enumerate(data_name_dic):
result[case] = {}
result_xyz[case] = {}
fail_count[case] = {}
print "test case", case
for i, test_object in enumerate(data_name_dic[case]):#enumerate(["katibox"]):#
print "test", i, test_object
distribution = Distribution()
distribution.load(dist_path + '/cross_validation/', "[" + case + "][leave_" + test_object + "]" + name)
if self.ds.evaluate == 'full':
for other_case in full_distribution:
if other_case != case:
distribution.merge(full_distribution[other_case])
distribution = self.filter_distribution(distribution, self.ds.filter_low_n)
result_xyz[case][test_object], result[case][test_object], fail_count[case][test_object] = self.test_accuracy(distribution, test_data_list[case][test_object], tbp)
test_name = self.ds.get_test_name()
run_analysis(result)
check_fail(fail_count)
with open(result_path + "/cross_validation_" + name + "_" + test_name + '.yaml', 'w') as f:
yaml.dump(result, f, default_flow_style=False)
with open(result_path + "/cross_validation_xyz_" + name + "_" + test_name + '.yaml', 'w') as f:
yaml.dump(result_xyz, f, default_flow_style=False)
with open(result_path + "/cross_validation_fail_" + name + "_" + test_name + '.yaml', 'w') as f:
yaml.dump(fail_count, f, default_flow_style=False)
def test_accuracy(self, distribution, data_list, tbp):
diff_sum_dic = {}
diff_count = {}
diff_fail = {}
for idx, data in enumerate(data_list):
print data.name
if tbp:
filter_xyz_dict, filter_resp_dict = self.get_all_filter_xyz(data, distribution)
else:
filter_xyz_dict, filter_resp_dict = self.get_all_filter_xyz_notbp(data, distribution)
distribution_cf = self.get_distribution_cameraframe(distribution, filter_xyz_dict)
avg_dic = self.model_distribution(distribution_cf, filter_resp_dict)
if self.visualize:
self.show_feature(filter_xyz_dict)
self.show_point_cloud(data.name)
self.show_distribution(distribution_cf)
# print "avg", avg_dic
frame_gt_xyz = {}
for frame in avg_dic:
# get ground truth frame location
frame_gt_xyz[frame] = np.array(self.get_frame_xyz(data, frame))
if not frame in diff_sum_dic:
diff_sum_dic[frame] = np.array([0.,0.,0.])
diff_count[frame] = 0
diff_fail[frame] = 0
# print "frame_xyz", frame_xyz
if not np.isnan(avg_dic[frame][0]):
diff_sum_dic[frame] += np.absolute(frame_gt_xyz[frame] - avg_dic[frame])
diff_count[frame] += 1
else:
diff_fail[frame] += 1
if self.visualize:
self.show_frames(frame_gt_xyz, "gt", None)
diff_avg_dic = {}
diff_dist_dic = {}
for frame in diff_sum_dic:
avg_xyz = diff_sum_dic[frame]/diff_count[frame]
diff_avg_dic[frame] = avg_xyz.tolist()
diff_dist_dic[frame] = (np.linalg.norm(avg_xyz)).tolist()
# print frame, diff_avg_dic[frame]
return diff_avg_dic, diff_dist_dic, diff_fail
# self.show_distribution(distribution_cf)
def filter_distribution(self, dist, low_n):
if low_n < 0:
return dist
if self.ds.filter_same_parent:
return self.filter_distribution_same_parent(dist, low_n)
else:
return self.filter_distribution_variance(dist, low_n)
# filter out high variance features
def filter_distribution_variance(self,dist, low_n):
new_dist = Distribution()
data_dict = dist.data_dict
var_dic = Dic2()
frame_dic = {}
for sig in data_dict:
for frame in data_dict[sig]:
frame_dic[frame] = 1
point_list = data_dict[sig][frame]
# dist = np.linalg.norm(np.nanmean(point_list, axis = 0))
var = np.linalg.norm(np.nanvar(point_list, axis = 0))
# var_dist = dist * var
var_dic.add(frame, sig, var)
# var_dic.add(frame, sig, np.sum(var))
print "low variance filters"
for frame in frame_dic:
var_list = var_dic.get_sublist(frame)
# print var_list
var_list = sorted(var_list, key=lambda var_tuple: var_tuple[1])
print ""
print frame
if low_n < 1:
low_n = int(math.floor(len(var_list) * low_n))
for i in range(min(low_n, len(var_list))):
sig = var_list[i][0]
point_list = data_dict[sig][frame]
new_dist.set(sig, frame, point_list)
new_dist.set_tree_sig(sig)
print sig,
# if np.sum(var) < threshold:
# new_dist.set(sig, frame, point_list)
# new_dist.filter_tree = dist.filter_tree
return new_dist
# find low variance features that has the same parent filter
def filter_distribution_same_parent(self, dist, low_n):
data_dict = dist.data_dict
var_dic = {}
frame_dic = {}
for sig in data_dict:
parent = sig[0]
if not parent in var_dic:
var_dic[parent] = Dic2()
for frame in data_dict[sig]:
frame_dic[frame] = 1
point_list = data_dict[sig][frame]
# dist = np.linalg.norm(np.nanmean(point_list, axis = 0))
var = np.linalg.norm(np.nanvar(point_list, axis = 0))
# var_dist = dist * var
var_dic[parent].add(frame, sig, var)
dist_dic = {}
min_average = float('inf')
min_parent = -1
parent_var_list = []
print "low variance filters"
for parent in var_dic:
print "parent", parent
dist_dic[parent] = Distribution()
var_sum = 0
count = 0
for frame in frame_dic:
var_list = var_dic[parent].get_sublist(frame)
# print var_list
var_list = sorted(var_list, key=lambda var_tuple: var_tuple[1])
print ""
print frame
for i in range(min(low_n, len(var_list))):
var_sum += var_list[i][1]
count += 1
sig = var_list[i][0]
point_list = data_dict[sig][frame]
dist_dic[parent].set(sig, frame, point_list)
dist_dic[parent].set_tree_sig(sig)
print sig,
var_average = var_sum / count
if var_average < min_average:
min_average = var_average
min_parent = parent
parent_var_list.append((parent, var_average))
# parent_var_list = sorted(parent_var_list, key=lambda var_tuple: var_tuple[1])
# merge_dist = Distribution()
# for i in range(1):
# merge_dist.merge(dist_dic[parent_var_list[i][0]])
# return merge_dist
return dist_dic[min_parent]
def show_frames(self, frames_xyz, name, color_map):
if color_map == None:
color_map = {}
color_map["r2/left_palm"] = (0.5,0.5,0)
color_map["r2/left_thumb_tip"] = (0.5,0.5,0)
color_map["r2/left_index_tip"] = (0.5,0.5,0)
if self.visualize:
for frame in frames_xyz:
ns = name + frame
self.visualizer.publish_point_array([frames_xyz[frame]], 0, ns, "grasp_target", color_map[frame], Marker.SPHERE_LIST, 1, 0.04 )
def model_distribution(self, dist_cf, resp_dict):
dist_list = {}
dist_list["r2/left_palm"] = np.array([]).reshape([0,3])
dist_list["r2/left_thumb_tip"] = np.array([]).reshape([0,3])
dist_list["r2/left_index_tip"] = np.array([]).reshape([0,3])
w_list = {}
w_list["r2/left_palm"] = np.array([])
w_list["r2/left_thumb_tip"] = np.array([])
w_list["r2/left_index_tip"] = np.array([])
print resp_dict
# concatenate all points in camera frame of same robot joint
for sig in dist_cf:
for frame in dist_cf[sig]:
# print type(dist_cf[sig][frame])
# remove nan
dist = dist_cf[sig][frame]
nan_mask = np.any(np.isnan(dist), axis=1)
dist = dist[~nan_mask]
if self.ds.dist_to_grasp_point == "weightmean" or self.ds.dist_to_grasp_point == "weightdensepoint":
weight = np.ones(dist.shape[0]) * resp_dict[sig]
else:
weight = np.ones(dist.shape[0]) * (resp_dict[sig]/dist.shape[0])
dist_list[frame] = np.concatenate((dist_list[frame], dist), axis=0)
w_list[frame] = np.concatenate((w_list[frame], weight), axis = 0)
# print dist_list
avg_dic = {}
for frame in dist_list:
if self.ds.dist_to_grasp_point == "mean":
avg_dic[frame] = np.nanmean(dist_list[frame], axis=0)
elif self.ds.dist_to_grasp_point == "density":
avg_dic[frame] = find_max_density(dist_list[frame])
elif self.ds.dist_to_grasp_point == "densepoint":
avg_dic[frame] = find_max_density_point(dist_list[frame])
elif self.ds.dist_to_grasp_point == "weightmean" or self.ds.dist_to_grasp_point == "filterweightmean":
if sum(w_list[frame]) == 0:
print "weights sum to zero", dist_list[frame]
avg_dic[frame] = np.nanmean(dist_list[frame], axis=0)
else:
avg_dic[frame] = np.average(dist_list[frame], axis=0, weights=w_list[frame])
elif self.ds.dist_to_grasp_point == "weightdensepoint":
if sum(w_list[frame]) == 0:
print "weights sum to zero"
avg_dic[frame] = np.nanmean(dist_list[frame], axis=0)
else:
avg_dic[frame] = find_weighted_max_density_point(dist_list[frame], w_list[frame])
color_map = {}
color_map["r2/left_palm"] = (0.5,0,0)
color_map["r2/left_thumb_tip"] = (0,0.5,0)
color_map["r2/left_index_tip"] = (0,0,0.5)
self.show_frames(avg_dic, "", color_map)
return avg_dic
def set_distribution(self, distribution, frame_list, filter_idx, rel_pos_list, parent_filters):
for j, frame in enumerate(frame_list):
distribution.set(parent_filters + [filter_idx], frame, rel_pos_list[j,:,:])
distribution.set_tree(parent_filters, filter_idx)
def show_distribution(self, dist_cf):
color_map = {}
color_map["r2/left_palm"] = (1,0,0)
color_map["r2/left_thumb_tip"] = (0,1,0)
color_map["r2/left_index_tip"] = (0,0,1)
idx = 0
for sig in dist_cf:
for frame in dist_cf[sig]:
ns = "/" + "/".join([str(c) for c in sig]) + "-" + frame
self.visualizer.publish_point_array(dist_cf[sig][frame], idx, ns, 'grasp_distribution', color_map[frame], Marker.POINTS, 0.4, 0.01 )
def show_feature(self, filter_xyz_dict, ns_prefix=""):
color_map = {}
color_map[1] = (1,1,0)
color_map[2] = (0,1,1)
color_map[3] = (1,0,1)
color_map[4] = (1,0.5,0.5)
for sig in filter_xyz_dict:
print sig, filter_xyz_dict[sig]
idx = 0
for sig in filter_xyz_dict:
ns = ns_prefix + "/" + "/".join([str(c) for c in sig])
self.visualizer.publish_point_array([filter_xyz_dict[sig]], idx, ns, 'feature', color_map[len(sig)], Marker.POINTS, 0.9, 0.01 )
# add offset from feature xyz to grasp_points
def get_distribution_cameraframe(self, dist, filter_xyz_dict):
dist_cf = copy.deepcopy(dist.data_dict)
for sig in dist.data_dict:
for frame in dist.data_dict[sig]:
if sig in filter_xyz_dict:
dist_cf[sig][frame] = np.array(dist_cf[sig][frame]) + np.array(filter_xyz_dict[sig])
# dist_cf[sig][frame] += filter_xyz_dict[sig]
else:
if sig in dist_cf:
dist_cf.pop(sig)
return dist_cf
def get_filter_xyz(self, layer_data, pc, threshold):
resize_ratio = float(pc.shape[0]) / float(layer_data.shape[0])
if True:
filter_xy = self.get_filter_avg_xy(layer_data, threshold)
else:
filter_xy = self.get_filter_max_xy(layer_data, threshold)
orig_xy = self.get_orig_xy(filter_xy, resize_ratio)
if self.ds.xy_to_cloud_xyz == "avg":
filter_xyz = self.get_average_xyz_from_point_cloud(pc, orig_xy, self.average_grid)
elif self.ds.xy_to_cloud_xyz == "closest":
filter_xyz = self.get_closest_xyz_from_point_cloud(pc, orig_xy, max_width = self.ds.avg_pointcloud_width)
return filter_xyz, filter_xy
def get_all_filter_xyz_notbp(self, data, dist):
xyz_dict = {}
response_dict = {}
self.net_proc_forward_layer(data.img, data.mask)
conv5_data = copy.deepcopy(self.net.blobs['conv5'].data[0])
conv4_data = copy.deepcopy(self.net.blobs['conv4'].data[0])
conv3_data = copy.deepcopy(self.net.blobs['conv3'].data[0])
conv2_data = copy.deepcopy(self.net.blobs['conv2'].data[0])
filter_idx_5_list = []
if self.ds.filter_test == 'top':
filter_idx_5_list = self.get_top_filters_in_list(conv5_data, dist.filter_tree, self.ds.conv5_top)
elif self.ds.filter_test == 'all':
filter_idx_5_list = dist.filter_tree
for filter_idx_5 in filter_idx_5_list:
if filter_idx_5 == -1:
continue
print 'conv5', filter_idx_5
layer = 'conv5'
if not self.filter_response_pass_threshold(conv5_data[filter_idx_5], self.ds.thres_conv5_test):
continue
conv4_data, img_src_5 = self.load_layer_fix_filter('conv4', 'conv5', conv5_data, data, filter_idx_5)
xyz_dict[(filter_idx_5,)], max_xy = self.get_filter_xyz(img_src_5, data.pc, 0)
response_dict[(filter_idx_5,)] = self.get_max_filter_response(conv5_data[filter_idx_5])
self.show_gradient(str((filter_idx_5)), self.net.blobs['data'], max_xy, 0)
filter_idx_4_list = []
if not -1 in dist.filter_tree:
filter_idx_4_list = []
if self.ds.filter_test == 'top':
filter_idx_4_list = self.get_top_filters_in_list(conv4_data, dist.filter_tree[-1], self.ds.conv4_top)
elif self.ds.filter_test == 'all':
filter_idx_4_list = dist.filter_tree[-1]
for filter_idx_4 in filter_idx_4_list:
if filter_idx_4 == -1:
continue
print 'conv4', filter_idx_4
layer = 'conv4'
if not self.filter_response_pass_threshold(conv4_data[filter_idx_4], self.ds.thres_conv4_test):
continue
conv3_data, img_src_4 = self.load_layer_fix_filter('conv3', 'conv4', conv4_data, data, filter_idx_4)
xyz_dict[(-1, filter_idx_4)], max_xy = self.get_filter_xyz(img_src_4, data.pc, 0)
response_dict[(-1, filter_idx_4)] = self.get_max_filter_response(conv4_data[filter_idx_4])
self.show_gradient(str((-1, filter_idx_4)), self.net.blobs['data'], max_xy, 0)
filter_idx_3_list = []
if not -1 in dist.filter_tree[-1]:
filter_idx_3_list = []
elif self.ds.filter_test == 'top':
filter_idx_3_list = self.get_top_filters_in_list(conv3_data, dist.filter_tree[-1][-1], self.ds.conv3_top)
elif self.ds.filter_test == 'all':
filter_idx_3_list = dist.filter_tree[-1][-1]
for filter_idx_3 in filter_idx_3_list:
print 'conv3', filter_idx_3
layer = 'conv3'
if not self.filter_response_pass_threshold(conv3_data[filter_idx_3], self.ds.thres_conv3_test):
continue
conv2_data, img_src_3 = self.load_layer_fix_filter('conv2', 'conv3', conv3_data, data, filter_idx_3)
xyz_dict[(-1, -1, filter_idx_3)], max_xy = self.get_filter_xyz(img_src_3, data.pc, 0)
response_dict[(-1, -1, filter_idx_3)] = self.get_max_filter_response(conv3_data[filter_idx_3])
self.show_gradient(str((-1, -1, filter_idx_3)), self.net.blobs['data'], max_xy, 0)
print "dist", dist.filter_tree
filter_idx_2_list = []
if not -1 in dist.filter_tree[-1][-1]:
filter_idx_2_list = []
elif self.ds.filter_test == 'top':
filter_idx_2_list = self.get_top_filters_in_list(conv2_data, dist.filter_tree[-1][-1][-1], self.ds.conv2_top)
elif self.ds.filter_test == 'all':
filter_idx_2_list = dist.filter_tree[-1][-1][-1]
for filter_idx_2 in filter_idx_2_list:
print 'conv2', filter_idx_2
layer = 'conv2'
if not self.filter_response_pass_threshold(conv3_data[filter_idx_2], self.ds.thres_conv2_test):
continue
conv1_data, img_src_2 = self.load_layer_fix_filter('conv1', 'conv2', conv2_data, data, filter_idx_2)
xyz_dict[(-1, -1, -1, filter_idx_2)], max_xy = self.get_filter_xyz(img_src_2, data.pc, 0)
response_dict[(-1, -1, -1, filter_idx_2)] = self.get_max_filter_response(conv2_data[filter_idx_2])
self.show_gradient(str((-1, -1, -1, filter_idx_2)), self.net.blobs['data'], max_xy, 0)
return xyz_dict, response_dict
def get_all_filter_xyz(self, data, dist):
xyz_dict = {}
response_dict = {}
self.net_proc_forward_layer(data.img, data.mask)
conv5_data = copy.deepcopy(self.net.blobs['conv5'].data[0,:])
conv4_data = copy.deepcopy(self.net.blobs['conv4'].data[0])
conv3_data = copy.deepcopy(self.net.blobs['conv3'].data[0])
conv2_data = copy.deepcopy(self.net.blobs['conv2'].data[0])
filter_idx_5_list = []
if self.ds.filter_test == 'top':
filter_idx_5_list = self.get_top_filters_in_list(conv5_data, dist.filter_tree, self.ds.conv5_top)
elif self.ds.filter_test == 'all':
filter_idx_5_list = dist.filter_tree
for filter_idx_5 in filter_idx_5_list:
print filter_idx_5
if not self.filter_response_pass_threshold(conv5_data[filter_idx_5], self.ds.thres_conv5_test):
continue
conv4_diff, img_src_5 = self.load_layer_fix_filter('conv4', 'conv5', conv5_data, data, filter_idx_5)
if not self.ds.tbp_test:
conv4_diff = conv4_data
xyz_dict[(filter_idx_5,)], max_xy = self.get_filter_xyz(img_src_5, data.pc, 0)
response_dict[(filter_idx_5,)] = self.get_max_filter_response(conv5_data[filter_idx_5])
self.show_gradient(str((filter_idx_5)), self.net.blobs['data'], max_xy, 0)
# self.show_depth(str((filter_idx_5))+'depth', self.net.blobs['data'].data, pc_array)
filter_idx_4_list = []
if self.ds.filter_test == 'top':
filter_idx_4_list = self.get_top_filters_in_list(conv4_diff, dist.filter_tree[filter_idx_5], self.ds.conv4_top)
elif self.ds.filter_test == 'all':
filter_idx_4_list = dist.filter_tree[filter_idx_5]
for filter_idx_4 in filter_idx_4_list:
print filter_idx_5, filter_idx_4
if not self.filter_response_pass_threshold(conv4_diff[filter_idx_4], self.ds.thres_conv4_test):
continue
conv3_diff, img_src_4 = self.load_layer_fix_filter('conv3', 'conv4', conv4_diff, data, filter_idx_4)
if not self.ds.tbp_test:
conv3_diff = conv3_data
xyz_dict[(filter_idx_5, filter_idx_4)], max_xy = self.get_filter_xyz(img_src_4, data.pc, 0)
response_dict[(filter_idx_5, filter_idx_4)] = self.get_max_filter_response(conv4_diff[filter_idx_4])
self.show_gradient(str((filter_idx_5, filter_idx_4)), self.net.blobs['data'], max_xy, 0)
filter_idx_3_list = []
if self.ds.filter_test == 'top':
filter_idx_3_list = self.get_top_filters_in_list(conv3_diff, dist.filter_tree[filter_idx_5][filter_idx_4], self.ds.conv3_top)
elif self.ds.filter_test == 'all':
filter_idx_3_list = dist.filter_tree[filter_idx_5][filter_idx_4]
for filter_idx_3 in filter_idx_3_list:
print filter_idx_5, filter_idx_4, filter_idx_3
if not self.filter_response_pass_threshold(conv3_diff[filter_idx_3], self.ds.thres_conv3_test):
continue
conv2_diff, img_src_3 = self.load_layer_fix_filter('conv2', 'conv3', conv3_diff, data, filter_idx_3)
if not self.ds.tbp_test:
conv2_diff = conv2_data
xyz_dict[(filter_idx_5, filter_idx_4, filter_idx_3)], max_xy = self.get_filter_xyz(img_src_3, data.pc, 0)
response_dict[(filter_idx_5, filter_idx_4, filter_idx_3)] = self.get_max_filter_response(conv3_diff[filter_idx_3])
self.show_gradient(str((filter_idx_5, filter_idx_4, filter_idx_3)), self.net.blobs['data'], max_xy, 0)
filter_idx_2_list = []
if self.ds.filter_test == 'top':
filter_idx_2_list = self.get_top_filters_in_list(conv2_diff, dist.filter_tree[filter_idx_5][filter_idx_4][filter_idx_3], self.ds.conv2_top)
elif self.ds.filter_test == 'all':
filter_idx_2_list = dist.filter_tree[filter_idx_5][filter_idx_4][filter_idx_3]
for filter_idx_2 in filter_idx_2_list:
print filter_idx_5, filter_idx_4, filter_idx_3, filter_idx_2
if not self.filter_response_pass_threshold(conv2_diff[filter_idx_2], self.ds.thres_conv2_test):
continue
conv1_data, img_src_2 = self.load_layer_fix_filter('conv1', 'conv2', conv2_diff, data, filter_idx_2)
xyz_dict[(filter_idx_5, filter_idx_4, filter_idx_3, filter_idx_2)], max_xy = self.get_filter_xyz(img_src_2, data.pc, 0)
response_dict[(filter_idx_5, filter_idx_4, filter_idx_3, filter_idx_2)] = self.get_max_filter_response(conv2_diff[filter_idx_2])
self.show_gradient(str((filter_idx_5, filter_idx_4, filter_idx_3, filter_idx_2)), self.net.blobs['data'], max_xy, 0)
return xyz_dict, response_dict
# dist is expected features
def get_state(self, dist, data):
xyz_dict = {}
response_dict = {}
self.net_proc_forward_layer(data.img, data.mask)
conv5_data = copy.deepcopy(self.net.blobs['conv5'].data[0])
# self.show_depth('depth', self.net.blobs['data'].data, pc_array)
if not dist == None:
filter_idx_5_list = dist.filter_tree
else:
if self.ds.filters == 'top':
filter_idx_5_list = self.get_top_filters(conv5_data, self.ds.conv5_top)
elif self.ds.filters == 'spread':
filter_idx_5_list = self.get_spread_filters(conv5_data, self.ds.conv5_top)
for filter_idx_5 in filter_idx_5_list:
print filter_idx_5
conv4_data, img_src_5 = self.load_layer_fix_filter('conv4', 'conv5', conv5_data, data, filter_idx_5)
xyz_dict[(filter_idx_5,)], max_xy = self.get_filter_xyz(img_src_5, data.pc, 0)
response_dict[(filter_idx_5,)] = self.get_max_filter_response(conv5_data[filter_idx_5])
self.show_gradient(str((filter_idx_5)), self.net.blobs['data'], max_xy, 0)
if not dist == None:
filter_idx_4_list = dist.filter_tree[filter_idx_5]
else:
if self.ds.filters == 'top':
filter_idx_4_list = self.get_top_filters(conv4_data, self.ds.conv4_top)
elif self.ds.filters == 'spread':
filter_idx_4_list = self.get_spread_filters(conv4_data, self.ds.conv4_top)
for filter_idx_4 in filter_idx_4_list:
print filter_idx_5, filter_idx_4
conv3_data, img_src_4 = self.load_layer_fix_filter('conv3', 'conv4', conv4_data, data, filter_idx_4)
xyz_dict[(filter_idx_5, filter_idx_4)], max_xy = self.get_filter_xyz(img_src_4, data.pc, 0)
response_dict[(filter_idx_5, filter_idx_4)] = self.get_max_filter_response(conv4_data[filter_idx_4])
self.show_gradient(str((filter_idx_5, filter_idx_4)), self.net.blobs['data'], max_xy, 0)
if not dist == None:
filter_idx_3_list = dist.filter_tree[filter_idx_5][filter_idx_4]
else:
if self.ds.filters == 'top':
filter_idx_3_list = self.get_top_filters(conv3_data, self.ds.conv3_top)
elif self.ds.filters == 'spread':
filter_idx_3_list = self.get_spread_filters(conv3_data, self.ds.conv3_top)
for filter_idx_3 in filter_idx_3_list:
print filter_idx_5, filter_idx_4, filter_idx_3
conv2_data, img_src_3 = self.load_layer_fix_filter('conv2', 'conv3', conv3_data, data, filter_idx_3)
xyz_dict[(filter_idx_5, filter_idx_4, filter_idx_3)], max_xy = self.get_filter_xyz(img_src_3, data.pc, 0)
response_dict[(filter_idx_5, filter_idx_4, filter_idx_3)] = self.get_max_filter_response(conv3_data[filter_idx_3])
self.show_gradient(str((filter_idx_5, filter_idx_4, filter_idx_3)), self.net.blobs['data'], max_xy, 0)
return xyz_dict, response_dict
def show_depth(self, name, layer_data, pc):
img = layer_data[0]
img = img.transpose((1,2,0))
img = norm01c(img, 0)
img_size = layer_data[0].shape[1]
resize_ratio = float(pc.shape[0]) / float(img_size)
for x in range(0, img_size):
for y in range(0, img_size):
orig_xy = self.get_orig_xy([x,y], resize_ratio)
# m_idx = np.array([[round(orig_xy[0])],[round(orig_xy[1])]])
# print "m idx", m_idx
# xy_index = np.ravel_multi_index(m_idx.astype(int),(480,640))
# print "xy", xy_index
if np.isnan(pc[orig_xy[0],orig_xy[1]]):
img[x,y] = [1,0,0]
# else:
# img[x,y] = [0,0,pc_array[xy_index[0]][2]]
img = norm01c(img, 0)
cv2.imshow(name, img)
cv2.waitKey(100)
def show_gradient(self, name, data_layer, xy_dot=(0,0), threshold=0):
if not self.visualize or not self.show_backprop:
return
grad_blob = data_layer.diff
grad_blob = grad_blob[0] # bc01 -> c01
grad_blob = grad_blob.transpose((1,2,0)) # c01 -> 01c
grad_img = grad_blob[:, :, (2,1,0)] # e.g. BGR -> RGB
img_blob = data_layer.data
img = img_blob[0].transpose((1,2,0)) # c01 -> 01c
# grad_img2 = cv2.GaussianBlur(grad_img2, (5,5), 0)
# grad_img = cv2.bilateralFilter(grad_img,9,75,75)
# grad_img2 = np.absolute(grad_img).mean(axis=2)
# xy_dot2 = self.get_filter_avg_xy(grad_img2, threshold) #
# print xy_dot2
# max_idx = np.argmax(grad_img.mean(axis=2))
# xy_dot2 = np.unravel_index(max_idx, grad_img.mean(axis=2).shape)
# xy_dot2 = self.get_filter_avg_xy(np.absolute(grad_img).mean(axis=2), threshold) #
# print xy_dot2
# xy_dot2 = xy_dot2.astype(int)
# Mode-specific processing
back_filt_mode = 'raw'#'norm'#
if back_filt_mode == 'raw':
grad_img = norm01c(grad_img, 0)
elif back_filt_mode == 'gray':
grad_img = grad_img.mean(axis=2)
grad_img = norm01c(grad_img, 0)
elif back_filt_mode == 'norm':
grad_img = np.linalg.norm(grad_img, axis=2)
grad_img = norm01(grad_img)
else:
grad_img = np.linalg.norm(grad_img, axis=2)
cv2.GaussianBlur(grad_img, (0,0), self.settings.caffevis_grad_norm_blur_radius, grad_img)
grad_img = norm01(grad_img)
# If necessary, re-promote from grayscale to color
if len(grad_img.shape) == 2:
grad_img = np.tile(grad_img[:,:,np.newaxis], 3)
if not np.isnan(xy_dot[0]) and not np.isnan(xy_dot[1]):
for i in range(-8,8):
for j in range(-8,8):
grad_img[i+xy_dot[0],j+xy_dot[1]] = [1,0,0]
# if not np.isnan(xy_dot2[0]) and not np.isnan(xy_dot2[1]):
# for i in range(-3,3):
# for j in range(-3,3):
# grad_img[i+xy_dot2[0],j+xy_dot2[1]] = [1,0,1]
cv2.imshow(name, grad_img)
# cv2.imwrite(self.path + "visualize/" + name + "_grad.png", norm0255(grad_img))
cv2.waitKey(100)
def find_consistent_filters(self, conv_list, threshold, number):
if number <= 0:
return []
hist = np.zeros(conv_list.shape[1])
max_hist = np.zeros(conv_list.shape[1])
max_sum = np.zeros(conv_list.shape[1])
for idx, conv in enumerate(conv_list):
# print idx
bin_data, max_data = self.binarize(conv, threshold)
hist = hist + bin_data
max_hist = np.amax(np.concatenate((max_hist[np.newaxis,...],max_data[np.newaxis,...]),axis=0),axis=0)
if self.ds.top_filter == 'max':
max_data = scipy.special.expit(max_data)
elif self.ds.top_filter == 'maxlog':
max_data = np.log(10*max_data+1)
max_sum = max_data + max_sum
# print "hist", hist
# print "max hist", max_hist
if self.ds.top_filter == 'above':
filter_idx_list = np.argsort(hist)[::-1]
for i in range(number+1):
if hist[filter_idx_list[i]] == 0:
number = i
break
elif self.ds.top_filter == 'max' or self.ds.top_filter == 'maxlog':
filter_idx_list = np.argsort(max_sum)[::-1]
for i in range(number+1):
if max_sum[filter_idx_list[i]] <= 0:
number = i
break
# print "top filters counts", hist[filter_idx_list[0:number+10]]
print "top filters", filter_idx_list[0:number+10]
print "max sum", max_sum[filter_idx_list[0:number+10]]
return filter_idx_list[0:number]
def get_filter_avg_xy(self, filter_response, threshold):
# print "max", np.amax(filter_response)
assert filter_response.ndim == 2, "filter size incorrect"
max_value = np.amax(filter_response)
if max_value <= threshold:
return np.array([float('nan'),float('nan')])
xy_grid = np.mgrid[0:filter_response.shape[0], 0:filter_response.shape[0]]
if np.sum(filter_response) == 0:
return np.array([float('nan'),float('nan')])
filter_response_norm = filter_response / float(np.sum(filter_response))
avg_x = np.sum(xy_grid[0] * filter_response_norm)
avg_y = np.sum(xy_grid[1] * filter_response_norm)
return np.around(np.array([avg_x, avg_y])).astype(int)
def get_filter_max_xy(self, filter_response, threshold):
assert filter_response.ndim == 2, "filter size incorrect"
max_value = np.amax(filter_response)
if max_value <= threshold:
return np.array([float('nan'),float('nan')])
max_idx = np.argmax(filter_response, axis=None)
max_xy = np.unravel_index(max_idx, filter_response.shape)
return max_xy
def get_max_filter_response(self, filter_response):
return np.nanmax(filter_response).item()
def filter_response_pass_threshold(self, filter_response, threshold):
max_v = np.nanmax(filter_response)
# print "max", max_v
if max_v <= threshold:
print "failed threshold", max_v
return False
else:
return True
def get_top_filters_in_list(self, layer_response, filter_tree, number):
filter_list = filter_tree.keys()
max_list = np.zeros(len(filter_list))
for i, filter_id in enumerate(filter_list):
max_list[i] = np.amax(layer_response[filter_id])
sorted_idx_list = np.argsort(max_list)[::-1]
sorted_idx_list = sorted_idx_list[0:number]
sorted_filter_idx_list = []
for idx in sorted_idx_list:
sorted_filter_idx_list.append(filter_list[idx])
return sorted_filter_idx_list
def get_top_filters(self, layer_response, number):
num_filter = layer_response.shape[0]
max_list = np.zeros(num_filter)
for filter_id in range(num_filter):
max_list[filter_id] = np.amax(layer_response[filter_id])
sorted_filter_idx_list = np.argsort(max_list)[::-1]
# only include when value greater then 0
for i, idx in enumerate(sorted_filter_idx_list[0:number]):
if max_list[idx] <= 0:
number = i
break
sorted_filter_idx_list = sorted_filter_idx_list[0:number]
return sorted_filter_idx_list.tolist()
def get_spread_filters(self, layer_response, number):
response = copy.deepcopy(layer_response)
max_list = np.zeros(number).astype(int)
layer_height = response.shape[1]
layer_width = response.shape[2]
for i in range(number):
max_flat_index = np.argmax(response)
# print "max_flat_index", max_flat_index
max_index = np.unravel_index(max_flat_index, response.shape)
filter_id = int(max_index[0])
filter_x = max_index[1]
filter_y = max_index[2]
max_list[i] = filter_id
# inhibit close filters
response[filter_id] *= 0
for x in range(-5,6):
for y in range(-5,6):
idx_x = x+filter_x
idx_y = y+filter_y
if idx_x < 0 or idx_x >= layer_height or idx_y < 0 or idx_y >= layer_width:
continue
dist_square = float(x**2 + y**2)
response[:,x+filter_x,y+filter_y] *= dist_square/(50.0+dist_square)
return max_list.tolist()
def get_relative_pos(self, filter_idx, data_list, conv_list, img_src, frame_list, threshold):
relative_pos = np.empty([len(frame_list),len(data_list),3])
for idx, data in enumerate(data_list):
print idx,
sys.stdout.flush()
if not self.filter_response_pass_threshold(conv_list[idx][filter_idx], threshold):
feature_xyz = (float('nan'),float('nan'),float('nan'))
# continue
else:
if self.ds.location_layer == "image":
feature_xyz, filter_xy = self.get_filter_xyz(img_src[idx], data.pc, threshold)
else:
feature_xyz, filter_xy = self.get_filter_xyz(abs(conv_list[idx][filter_idx]), data.pc, threshold)
for frame_idx, frame in enumerate(frame_list):
frame_xyz = np.array(self.get_frame_xyz(data, frame))
diff = frame_xyz - feature_xyz
relative_pos[frame_idx,idx,:] = diff
return relative_pos
# returns a distribution list of shape(num_frames, number of filters, num of data, 3)
# distribution contains diff of frame xyz to feature xyz
# conv_list is for checking if pass threshold
def get_relative_pos_list(self, filter_idx_list, data_list, conv_list, img_src_fid_array, frame_list, threshold):
relative_pos_list = np.empty([len(frame_list),len(filter_idx_list),len(data_list),3])
for idx, data in enumerate(data_list):
print idx,
sys.stdout.flush()
xyz_list = []
for i, filter_idx in enumerate(filter_idx_list):
# cv2.imshow(layer + " " + str(idx)+" "+ str(filter_idx),norm01c(bp[i], 0))
# cv2.waitKey(200)
if not self.filter_response_pass_threshold(conv_list[idx][filter_idx], threshold):
xyz_list.append((float('nan'),float('nan'),float('nan')))
continue
if self.ds.location_layer == "image":
xyz, filter_xy = self.get_filter_xyz(img_src_fid_array[idx][i], data.pc, threshold)
else:
xyz, filter_xy = self.get_filter_xyz(abs(conv_list[idx][filter_idx]), data.pc, threshold)
xyz_list.append(xyz)
for frame_idx, frame in enumerate(frame_list):
frame_xyz = np.array(self.get_frame_xyz(data, frame))
diff_list = [frame_xyz - feature_xyz for feature_xyz in xyz_list]
relative_pos_list[frame_idx,:,idx,:] = np.array(diff_list)
return relative_pos_list
def net_preproc_forward(self, img):
assert img.shape == (227,227,3), 'img is wrong size'
#resized = caffe.io.resize_image(img, net.image_dims) # e.g. (227, 227, 3)
data_blob = self.net.transformer.preprocess('data', img) # e.g. (3, 227, 227), mean subtracted and scaled to [0,255]
data_blob = data_blob[np.newaxis,:,:,:] # e.g. (1, 3, 227, 227)
output = self.net.forward(data=data_blob)
return output
def net_proc_forward_layer(self, img, mask):
assert img.shape == (227,227,3), 'img is wrong size'
data_blob = self.net.transformer.preprocess('data', img) # e.g. (3, 227, 227), mean subtracted and scaled to [0,255]
data_blob = data_blob[np.newaxis,:,:,:] # e.g. (1, 3, 227, 227)
mode = 2
# only mask out conv1
if mode == 0:
self.net.blobs['data'].data[...] = data_blob
self.net.forward_from_to(start='conv1',end='relu1')
self.mask_out(self.net.blobs['conv1'].data, mask)
self.net.forward_from_to(start='relu1',end='prob')
# mask out all conv layers
elif mode == 1:
self.net.blobs['data'].data[...] = data_blob
self.net.forward_from_to(start='conv1',end='relu1')
self.mask_out(self.net.blobs['conv1'].data, mask)
self.net.forward_from_to(start='relu1',end='conv2')
self.net.forward_from_to(start='conv2',end='relu2')
self.mask_out(self.net.blobs['conv2'].data, mask)
self.net.forward_from_to(start='relu2',end='conv3')
self.net.forward_from_to(start='conv3',end='relu3')
self.mask_out(self.net.blobs['conv3'].data, mask)
self.net.forward_from_to(start='relu3',end='conv4')
self.net.forward_from_to(start='conv4',end='relu4')
self.mask_out(self.net.blobs['conv4'].data, mask)
self.net.forward_from_to(start='relu4',end='conv5')
self.net.forward_from_to(start='conv5',end='relu5')
self.mask_out(self.net.blobs['conv5'].data, mask)
self.net.forward_from_to(start='relu5',end='pool5')
# mask out all conv layers, identical to mode 1
elif mode == 2:
for idx in range(len(self.available_layer)-1):
output = self.net.forward(data=data_blob,start=self.available_layer[idx],end=self.available_layer[idx+1])
if self.ds.mask == "mask" and self.available_layer[idx].startswith("conv"):
self.mask_out(self.net.blobs[self.available_layer[idx]].data, mask)
def net_proc_backward(self, filter_idx, backprop_layer):
diffs = self.net.blobs[backprop_layer].diff * 0
diffs[0][filter_idx] = self.net.blobs[backprop_layer].data[0,filter_idx]
assert self.back_mode in ('grad', 'deconv')
if self.back_mode == 'grad':
self.net.backward_from_layer(backprop_layer, diffs, zero_higher = True)
else:
self.net.deconv_from_layer(backprop_layer, diffs, zero_higher = True)
# Set the backprop layer of filter_idx to data and backprop
def net_proc_backward_with_data(self, filter_idx, data, backprop_layer):
diffs = self.net.blobs[backprop_layer].diff * 0
if self.ds.backprop_xy == 'sin':
x,y = np.unravel_index(np.argmax(data[filter_idx]), data[filter_idx].shape)
diffs[0][filter_idx][x][y] = data[filter_idx][x][y]
elif self.ds.backprop_xy == 'all':
diffs[0][filter_idx] = data[filter_idx]
assert self.back_mode in ('grad', 'deconv')
if self.back_mode == 'grad':
self.net.backward_from_layer(backprop_layer, diffs, zero_higher = True)
else:
self.net.deconv_from_layer(backprop_layer, diffs, zero_higher = True)
def net_proc_deconv_with_data(self, filter_idx, data, backprop_layer):
diffs = self.net.blobs[backprop_layer].diff * 0
if self.ds.backprop_xy == 'sin':
x,y = np.unravel_index(np.argmax(data[filter_idx]), data[filter_idx].shape)
diffs[0][filter_idx][x][y] = data[filter_idx][x][y]
elif self.ds.backprop_xy == 'all':
diffs[0][filter_idx] = data[filter_idx]
assert self.back_mode in ('grad', 'deconv')
self.net.deconv_from_layer(backprop_layer, diffs, zero_higher = True)
def get_orig_xy(self, xy, resize_ratio):
if np.isnan(xy[0]) or np.isnan(xy[1]):
return (float('nan'),float('nan'))
new_x = int(round(xy[0]*resize_ratio))
new_y = int(round(xy[1]*resize_ratio))
return (new_x, new_y)
def get_frame_xyz(self, data, frame_name):
return data.pose_dict[frame_name][0]
def gen_receptive_grid(self, receptive_field_size):
return np.mgrid[0:receptive_field_size,0:receptive_field_size]
def get_closest_xyz_from_point_cloud(self, pc, xy, max_width):
return closest_pc_value_fast(pc,xy[0],xy[1],max_width)
def get_average_xyz_from_point_cloud(self, pc, xy, receptive_grid):
if np.isnan(xy[0]):
return [float('nan'),float('nan'),float('nan')]
print "filter response zero no max xy", xy
pc_array = pc.reshape((pc.shape[0]*pc.shape[1], pc.shape[2]))
# receptive grid has shape (2,w,w) that contains the grid x idx and y idx
grid = np.zeros(receptive_grid.shape)
grid[0] = xy[0] + receptive_grid[0]
grid[1] = xy[1] + receptive_grid[1]
# this step flattens to 2 arrays of x coordinates and y coordinates
xy_receptive_list =np.reshape(grid, [2,-1])
# remove out of bound index
xy_receptive_list_filtered = np.array([]).reshape([2,0])
for i in range(xy_receptive_list.shape[1]):
x = xy_receptive_list[0,i]
y = xy_receptive_list[1,i]
if x < pc.shape[0] and x >= 0 and y < pc.shape[1] and y >= 0:
xy_receptive_list_filtered = np.append(xy_receptive_list_filtered, xy_receptive_list[:,i].reshape([2,1]), axis=1)
idx_receptive_list = np.ravel_multi_index(xy_receptive_list_filtered.astype(int),pc.shape[0:2])
avg = np.nanmean(pc_array[idx_receptive_list],axis=0)
if np.isnan(avg[0]) or np.isnan(avg[1]) or np.isnan(avg[2]):
print "nan found", xy
return avg.tolist()
# show point cloud in rviz, input server needs to be running
def show_point_cloud(self, name):
rospy.wait_for_service('show_point_cloud')
try:
show_point_cloud = rospy.ServiceProxy('show_point_cloud', String2)
resp = show_point_cloud(name,'')
return resp.result
except rospy.ServiceException, e:
print "Service call failed: %s"%e
def append_point_cloud(self, path, name, point_list):
p = pcl.PointCloud()
p.from_file(path + name + ".pcd")
a = np.asarray(p)
print type(a), a.shape
print type(point_list), point_list.shape
new_a = np.concatenate((a,point_list), axis=0)
new_p = pcl.PointCloud(new_a.astype(np.float32) )
# p.from_array(new_a.astype(float))
new_p.to_file(path + '/distribution/' + name + '_new.pcd')
return p
def mask_out(self, data, mask):
# print "data shape", data.shape
dim = data.shape
for y in range(dim[2]):
for x in range(dim[3]):
if is_masked((dim[2],dim[3]),(x,y),mask):
data[:,:,y,x] = 0
return data
def load_conv5(self, data_list):#img_list, mask_list):
conv5_list = np.array([]).reshape([0] + list(self.net.blobs['conv5'].data.shape[1:]))
for idx, data in enumerate(data_list):
print idx,
sys.stdout.flush()
# print "img", img.shape
self.net_proc_forward_layer(data.img, data.mask)
# self.net_preproc_forward(img)
conv5_list = np.append(conv5_list, self.net.blobs['conv5'].data, axis=0)
# print "shape", self.net.blobs['conv5'].data.shape
return conv5_list
def load_layer(self, data_list, layer):#img_list, mask_list):
layer_list = np.array([]).reshape([0] + list(self.net.blobs[layer].data.shape[1:]))
for idx, data in enumerate(data_list):
print idx,
sys.stdout.flush()
# print "img", img.shape
self.net_proc_forward_layer(data.img, data.mask)
# self.net_preproc_forward(img)
layer_list = np.append(layer_list, self.net.blobs[layer].data, axis=0)
# print "shape", self.net.blobs['conv5'].data.shape
return layer_list
def load_conv4_conv3(self, data_list):
conv4_list = np.array([]).reshape([0] + list(self.net.blobs['conv4'].data.shape[1:]))
conv3_list = np.array([]).reshape([0] + list(self.net.blobs['conv3'].data.shape[1:]))
for idx, data in enumerate(data_list):
print idx,
sys.stdout.flush()
# print "img", img.shape
self.net_proc_forward_layer(data.img, data.mask)
# self.net_preproc_forward(img)
conv4_list = np.append(conv4_list, self.net.blobs['conv4'].data, axis=0)
conv3_list = np.append(conv3_list, self.net.blobs['conv3'].data, axis=0)
# print "shape", self.net.blobs['conv5'].data.shape
return conv4_list, conv3_list
def load_layer_fix_filter_list(self, load_layer, fix_layer, fix_layer_data_list, data_list, filter_idx):
layer_diff_list = np.zeros([len(data_list)] + list(self.net.blobs[load_layer].diff.shape[1:]))
img_src_list = np.zeros([len(data_list)] + list(self.net.blobs['data'].data.shape[2:]))
for idx, data in enumerate(data_list):
print idx,
sys.stdout.flush()
self.net_proc_forward_layer(data.img, data.mask)
layer_diff_list[idx,:], img_src_list[idx,:] = self.load_layer_fix_filter(load_layer, fix_layer, fix_layer_data_list[idx], data, filter_idx)
return layer_diff_list, img_src_list
# perform forward path and backward path while zeroing out all filter response except for filter_idx
# return layer_diff_list which is the load layer diff and img_src_list which is the abs diff if back propagate to image layer
def load_layer_fix_filter(self, load_layer, fix_layer, fix_layer_diff, data, filter_idx):
#self.net_proc_forward_layer(data.img, data.mask)
self.net_proc_backward_with_data(filter_idx, fix_layer_diff, fix_layer)
layer_diff = self.net.blobs[load_layer].diff[0,:]
# mean is to average over all filters
if self.ds.img_src_loc == "absolute":
img_src = np.absolute(self.net.blobs['data'].diff[0,:]).mean(axis=0)
elif self.ds.img_src_loc == "relu":
img_src = np.maximum(self.net.blobs['data'].diff[0,:],0).mean(axis=0)
# make a copy
layer_diff = copy.deepcopy(layer_diff)
img_src = copy.deepcopy(img_src)
return layer_diff, img_src
# binaraizes such that output is a 1-d array where each entry is whether a filter fires, also ouputs the max value
def binarize(self, data, threshold):
# print data.shape
bin_data = np.zeros(data.shape[0])
max_data = np.zeros(data.shape[0])
for id, filter in enumerate(data):
max_value = np.amax(filter)
if max_value > threshold:
bin_data[id] = 1
max_data[id] = max(0.,max_value)
return bin_data, max_data
if __name__ == '__main__':
ds = DataSettings()
data_monster = DataMonster(settings, ds)
|
[
"lku@cs.umass.edu"
] |
lku@cs.umass.edu
|
3893f77b41dce8ad59233c0410f8f973797f485a
|
b228f1130e7b4797a7f8061ad0a3021648bdafce
|
/server/test.py
|
03e347db6761e5ef4d36ba463bde46adba54b3e1
|
[] |
no_license
|
flobe99/VerteilteSysteme
|
14c5202076800bc657c8762df629d91d113188b9
|
6ad1457c3d59e97fb0cd17d6fc84da07ca4eab17
|
refs/heads/main
| 2023-05-24T15:14:44.357181
| 2021-06-15T14:41:04
| 2021-06-15T14:41:04
| 366,633,388
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,598
|
py
|
#!/usr/bin/env python3
import unittest
import server
import database
import mongomock
import json
from urllib.parse import urlencode
from datetime import *
app = server.app.test_client()
def call(method, path, data):
return app.open( path, method=method, query_string=urlencode( data ))
# Wrapper for generating datetime.
# Microseconds are not stored in MongoDB. Cut off microseconds for testing
def now():
d = datetime.now()
d = d.replace( microsecond=0 )
return d
class MockDatabase( database.Database ):
def __init__(self):
self.client = mongomock.MongoClient()
self.db = self.client["blackboard"]
self.collection = self.db["blackboard1"]
class TestBlackboardCreate(unittest.TestCase):
def setUp(self):
server.db = MockDatabase()
server.db.collection.insert_many([
{ "name": "exists"}
])
def test_create_blackboard_success(self):
r = call( "POST", "/blackboard/create", {
"name": "test",
"validityTime": 5
})
self.assertEqual( r.status[:3], "200" )
def test_create_blackboard_exists(self):
r = call( "POST", "/blackboard/create", {
"name": "exists",
"validityTime": 5
})
self.assertEqual( r.status[:3], "409" )
def test_create_blackboard_invalid_param(self):
r = call( "POST", "/blackboard/create", {
"name": "test",
"validityTime": "abc"
})
self.assertEqual( r.status[:3], "400" )
r = call( "POST", "/blackboard/create", {
"name": "test",
})
self.assertEqual( r.status[:3], "400" )
r = call( "POST", "/blackboard/create", {
"validityTime": "abc"
})
self.assertEqual( r.status[:3], "400" )
r = call( "POST", "/blackboard/create", {
})
self.assertEqual( r.status[:3], "400" )
class TestBlackboardDisplay(unittest.TestCase):
def setUp(self):
server.db = MockDatabase()
server.db.collection.insert_many([
{"name": "test1", "content": "", "validityTime": 5, "timestamp": now() },
{"name": "test2", "content": "old", "validityTime": 5, "timestamp": now() }
])
def test_display_blackboard_sucess(self):
r = call( "GET", "/blackboard/display", {
"name": "test1",
"data": "test"
})
self.assertEqual( r.status[:3], "200" )
r = call( "GET", "/blackboard/display", {
"name": "test2",
"data": "test"
})
self.assertEqual( r.status[:3], "200" )
def test_display_blackboard_invalid_param(self):
r = call( "GET", "/blackboard/display", {
"name": "test1",
})
self.assertEqual( r.status[:3], "400" )
r = call( "GET", "/blackboard/display", {
"data": "test"
})
self.assertEqual( r.status[:3], "400" )
r = call( "GET", "/blackboard/display", {
})
self.assertEqual( r.status[:3], "400" )
def test_display_blackboard_not_found(self):
r = call( "GET", "/blackboard/display", {
"name": "not_exists",
"data": "test"
})
self.assertEqual( r.status[:3], "404" )
class TestBlackboardClear(unittest.TestCase):
def setUp(self):
server.db = MockDatabase()
server.db.collection.insert_many([
{"name": "test1", "content": "", "validityTime": 5, "timestamp": now() },
{"name": "test2", "content": "old", "validityTime": 5, "timestamp": now() }
])
def test_clear_blackboard_success(self):
r = call( "GET", "/blackboard/clear", {
"name": "test1",
})
self.assertEqual( r.status[:3], "200" )
r = call( "GET", "/blackboard/clear", {
"name": "test2",
})
self.assertEqual( r.status[:3], "200" )
def test_clear_blackboard_success(self):
r = call( "GET", "/blackboard/clear", {
})
self.assertEqual( r.status[:3], "400" )
def test_clear_blackboard_success(self):
r = call( "GET", "/blackboard/clear", {
"name": "test3"
})
self.assertEqual( r.status[:3], "404" )
class TestBlackboardRead(unittest.TestCase):
def setUp(self):
self.data = [
{"name": "test1", "content": "new", "validityTime": 5, "timestamp": now() },
{"name": "test2", "content": "old", "validityTime": 5, "timestamp": (now() - timedelta(10)) },
{"name": "test3", "content": "", "validityTime": 5, "timestamp": now() }
];
server.db = MockDatabase()
server.db.collection.insert_many(self.data)
def test_read_blackboard_success(self):
# Valid
r = call( "GET", "/blackboard/read", {
"name": "test1"
})
self.assertEqual( r.status[:3], "200" )
result = json.loads( next(r.response).decode("ascii") )
self.assertEqual( result, {"content": "new", "validity": True} )
# Invalid
r = call( "GET", "/blackboard/read", {
"name": "test2"
})
self.assertEqual( r.status[:3], "200" )
result = json.loads( next(r.response).decode("ascii") )
self.assertEqual( result, {"content": "old", "validity": False} )
def test_read_blackboard_invalid_param(self):
r = call( "GET", "/blackboard/read", {
})
self.assertEqual( r.status[:3], "400" )
def test_read_blackboard_not_found(self):
r = call( "GET", "/blackboard/read", {
"name": "test4"
})
self.assertEqual( r.status[:3], "404" )
def test_read_blackboard_empty(self):
r = call( "GET", "/blackboard/read", {
"name": "test3"
})
self.assertEqual( r.status[:3], "444" )
class TestBlackboardStatus(unittest.TestCase):
def setUp(self):
server.db = MockDatabase()
self.data = [
{"name": "test1", "content": "old", "validityTime": 5, "timestamp": (now() - timedelta(10)) },
{"name": "test2", "content": "new", "validityTime": 5, "timestamp": now() },
{"name": "test3", "content": "", "validityTime": 5, "timestamp": now() }
]
server.db.collection.insert_many(self.data)
def test_status_blackboard_success(self):
# Valid
r = call( "GET", "/blackboard/getStatus", {
"name": "test2"
})
self.assertEqual( r.status[:3], "200" )
result = json.loads( next(r.response).decode("ascii") )
print( self.data[1]["timestamp"] )
self.assertEqual( result, {"timestamp": self.data[1]["timestamp"].isoformat(), "validityTime": 5, "validity": True, "empty": False })
# Invalid
r = call( "GET", "/blackboard/getStatus", {
"name": "test1"
})
self.assertEqual( r.status[:3], "200" )
result = json.loads( next(r.response).decode("ascii") )
self.assertEqual( result, {"timestamp": self.data[0]["timestamp"].isoformat(), "validityTime": 5, "validity": False, "empty": False })
# Empty
r = call( "GET", "/blackboard/getStatus", {
"name": "test3"
})
self.assertEqual( r.status[:3], "200" )
result = json.loads( next(r.response).decode("ascii") )
self.assertEqual( result, {"timestamp": self.data[2]["timestamp"].isoformat(), "validityTime": 5, "validity": False, "empty": True })
def test_status_blackboard_invalid_param(self):
r = call( "GET", "/blackboard/getStatus", {
})
self.assertEqual( r.status[:3], "400" )
def test_status_blackboard_not_found(self):
r = call( "GET", "/blackboard/getStatus", {
"name": "test4"
})
self.assertEqual( r.status[:3], "404" )
class TestBlackboardList(unittest.TestCase):
def setUp(self):
server.db = MockDatabase()
server.db.collection.insert_many([
{"name": "test1", "content": "", "validityTime": 5, "timestamp": now()},
{"name": "test2", "content": "old", "validityTime": 5, "timestamp": now()}
])
def test_list_blackboard_success(self):
r = call( "GET", "/blackboard/list", {
})
self.assertEqual( r.status[:3], "200" )
result = json.loads( next(r.response).decode('ascii') )
self.assertEqual( sorted( result, key=lambda e: e["name"] ), [{"name": "test1"}, {"name": "test2"}] )
class TestBlackboardDelete(unittest.TestCase):
def setUp(self):
server.db = MockDatabase()
server.db.collection.insert_many([
{"name": "test1", "content": "", "validityTime": 5, "timestamp": now()},
{"name": "test2", "content": "old", "validityTime": 5, "timestamp": now()}
])
def test_delete_blackboard_success(self):
r = call( "DELETE", "/blackboard/delete", {
"name": "test1"
})
self.assertEqual( r.status[:3], "200" )
def test_delete_blackboard_invalid_param(self):
r = call( "DELETE", "/blackboard/delete", {
})
self.assertEqual( r.status[:3], "400" )
def test_delete_blackboard_not_found(self):
r = call( "DELETE", "/blackboard/delete", {
"name": "test3"
})
self.assertEqual( r.status[:3], "404" )
class TestBlackboardDeleteAll(unittest.TestCase):
def setUp(self):
server.db = MockDatabase()
server.db.collection.insert_many([
{"name": "test1", "content": "", "validityTime": 5, "timestamp": now()},
{"name": "test2", "content": "old", "validityTime": 5, "timestamp": now()}
])
def test_deleteall_blackboard_success(self):
r = call( "DELETE", "/blackboard/deleteAll", {
})
self.assertEqual( r.status[:3], "200" )
if __name__ == "__main__":
unittest.main()
|
[
"tobi-bungard@t-online.de"
] |
tobi-bungard@t-online.de
|
38661368c26aa1da318d78f95ed8e96aff94449e
|
68117ea00c89e2416531b8bbe9f3ded2d82dc7a6
|
/XGB_regressor.py
|
3c308373bd678d5fd24344f98333d1aab35e82e5
|
[] |
no_license
|
dmcglynn10/WindForecast
|
77f23b8abd6f6db6cfc6b6e62a9c0cafa168ae9b
|
dfa9ce367c8a9b0da96eae8cf8f5277f43594080
|
refs/heads/main
| 2023-06-17T23:00:48.469521
| 2021-07-13T18:31:45
| 2021-07-13T18:31:45
| 385,700,074
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 700
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Thu Feb 6 15:31:23 2020
@author: Daniel.McGlynn
"""
import xgboost as xgb
from sklearn.metrics import mean_squared_error
import pandas as pd
import numpy as np
def XGB_regressor(normalised_x, normalised_y):
"""Implements XGBoost Regressor """
data_dmatrix = xgb.DMatrix(data=normalised_x,label=normalised_y)
xg_reg = xgb.XGBRegressor(objective ='reg:linear', colsample_bytree = 0.3, learning_rate = 0.1,
max_depth = 5, alpha = 10, n_estimators = 10)
xg_reg.fit(X_train,y_train)
preds = xg_reg.predict(X_test)
return preds
if __name__ == '__main__':
XGB_regressor()
|
[
"noreply@github.com"
] |
dmcglynn10.noreply@github.com
|
d15eaf4ac71e27cbe43d303f116879cdbc162b2b
|
6cae49b177f24440f6ad6f2331ce6747c23eafec
|
/occu-table/Mohabir-Jason/formMagic.py
|
03c904b1314086afa096922bba9749f6943e631d
|
[] |
no_license
|
JasonMohabir/Flask_SoftDev
|
37016b96bc7f835b5af22419e57b30273dae97bb
|
3669a0c69fa27f454eb886fd44c5e5bf22fc16ea
|
refs/heads/master
| 2021-06-11T18:49:19.934403
| 2016-10-11T17:30:26
| 2016-10-11T17:30:26
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,015
|
py
|
from flask import Flask, render_template, redirect
import random
app = Flask(__name__)
occL = open("static/occupations.csv").read();
occL = occL.split('\n')
lineZero = occL[0]
del occL[0]
lineLast = occL[len(occL)-1]
del occL[len(occL)-1]
occDict = {}
upperBoundL = []
currentUpperBound = 0
occL2 = []
for line in occL:
occL2.append(line.rsplit(",",1))
occS = line.rsplit(",",1)[0]
currentUpperBound += float(line.rsplit(",",1)[1])
occDict[currentUpperBound] = occS
upperBoundL.append(currentUpperBound)
def pickOccupation():
randNum = random.random()*99.8
passenger = 0
for line in occL2:
percentage = float(line[1])
passenger+=percentage
if randNum < passenger:
return line[0]
@app.route("/")
def redir():
return redirect("/occupations")
@app.route("/occupations")
def occTable():
return render_template("occTable.html",occList=occL2,occupation=pickOccupation())
if __name__ == "__main__":
app.debug = True
app.run()
|
[
"jmohabir@stuy.edu"
] |
jmohabir@stuy.edu
|
2a7f86e7378adde57ac4d8825f6fda12c5641584
|
4a27c4bc014ad3c2342e2a548e265d8e693ebf25
|
/sacluster/lib/cls/delete/delete_class.py
|
1721c1c781162e3c8fa064cfe67b82a8eeff381c
|
[
"Apache-2.0"
] |
permissive
|
hpc-team2020/sacluster
|
bdd5baa2f67a3688bd2e8b1ca025a97dca41995c
|
b4374fb5ebf39b21c1e6a1ba0d72fbda2f8f4b29
|
refs/heads/master
| 2023-08-11T15:06:13.570369
| 2021-10-04T05:03:37
| 2021-10-04T05:03:37
| 364,796,660
| 1
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 30,784
|
py
|
import sys
import os
import datetime
import logging
from tqdm import tqdm
from concurrent import futures
import signal
logger = logging.getLogger("sacluster").getChild(os.path.basename(__file__))
path = "../../.."
os.chdir(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(path + "/lib/others")
from API_method import delete,get
from info_print import printout
import pprint
class delete_sacluster:
def __init__(self, cluster_info, auth_res, max_workers,fp='', info_list=[1,0,0,0], api_index=True):
signal.signal(signal.SIGINT, self.handler)
self.cluster_info = cluster_info
#pprint.pprint(self.cluster_info)
#sys.exit()
self.auth_res = auth_res
self.fp = fp
self.info_list = info_list
self.api_index = api_index
#cluster_infoの中身をチェック
self.contents_bool = self.validate_params(self.cluster_info)
#pprint.pprint(self.contents_bool)
#sys.exit()
#self.cluster_id = [k for k in self.cluster_info.keys()]
#if "server" in self.cluster_info["clusterparams"].keys():
if self.contents_bool["server"] == True:
#該当serverが存在するzoneを抽出
self.zone_list = list(zone_list for zone_list in self.cluster_info["clusterparams"]["server"].keys())
else:
#bridgeのみのときにzoneはis1aに設定
self.zone_list = ["is1a"]
#If there is NFS, get NFS zone
self.nfs_zones = []
#if "nfs" in self.cluster_info["clusterparams"].keys():
if self.contents_bool["nfs"] == True:
for zone in self.zone_list:
if self.cluster_info["clusterparams"]["nfs"][zone] != None:
#self.nfs_zones = list(self.cluster_info["clusterparams"]["nfs"].keys())
self.nfs_zones.append(zone)
#else:
#self.nfs_zones = []
#else:
#self.nfs_zones = []
#Get head zone
if self.contents_bool["server"] == True:
for zone in self.zone_list:
if "head" in self.cluster_info["clusterparams"]["server"][zone].keys():
self.head_zone = zone
break
else:
self.head_zone = None
#if "front" in self.cluster_info["clusterparams"]["bridge"].keys():
if self.contents_bool["bridge"] == True:
#if self.cluster_info["clusterparams"]["bridge"]["front"] != None:
#if len(self.zone_list) >= 2:
self.bridge_front_id = self.cluster_info["clusterparams"]["bridge"]["front"]["id"]
if self.cluster_info["clusterparams"]["bridge"]["back"] != None:
#if "back" in self.cluster_info["clusterparams"]["bridge"].keys():
self.bridge_back_id = self.cluster_info["clusterparams"]["bridge"]["back"]["id"]
else:
self.bridge_back_id = None
self.front_switch_id_list = {}
self.back_switch_id_list = {}
#if "front" in self.cluster_info["clusterparams"]["switch"].keys():
if self.contents_bool["switch"] == True:
for zone in self.zone_list:
self.front_switch_id_list[zone] = self.cluster_info["clusterparams"]["switch"][zone]["front"]["id"]
if "back" in self.cluster_info["clusterparams"]["switch"][zone].keys():
if self.cluster_info["clusterparams"]["switch"][zone]["back"] != None:
self.back_switch_id_list[zone] = self.cluster_info["clusterparams"]["switch"][zone]["back"]["id"]
#state check
#self.validate_status(self.cluster_info)
#Get URL
self.url_list = {}
for zone in self.zone_list:
self.url_list[zone] = "https://secure.sakura.ad.jp/cloud/zone/"+ zone +"/api/cloud/1.1"
if self.head_zone != None:
self.head_url = "https://secure.sakura.ad.jp/cloud/zone/"+ self.head_zone +"/api/cloud/1.1"
#Get sub URL
self.sub_url = ["/server","/disk","/switch","/interface","/bridge","/tag","/appliance","/power"]
self.date_modified = "Date modified:" + str(datetime.datetime.now().strftime("%Y_%m_%d"))
self.max_workers = max_workers
def __call__(self):
#progress barの設定
self.bar = tqdm(total = 100)
self.bar.set_description('Progress rate')
self.progress_sum = 0
#2つ以上のゾーンを持つ時,ブリッジとスイッチの接続を切断
#if len(self.zone_list) >= 2:
if self.contents_bool["bridge"] == True and self.contents_bool["switch"] == True:
#if self.cluster_info["clusterparams"]["bridge"]["front"] != None:
for zone in self.zone_list:
connect_bool = self.get_bridge_info(zone,self.bridge_front_id)
if connect_bool == True:
printout("Disconnect bridge and switch : " + zone + " zone", info_type = 0, info_list = self.info_list, fp = self.fp, overwrite=True)
self.disconnect_bridge_switch(zone,self.front_switch_id_list[zone])
if self.bridge_back_id != None:
connect_bool = self.get_bridge_info(zone,self.bridge_back_id)
if connect_bool == True:
self.disconnect_bridge_switch(zone,self.back_switch_id_list[zone])
self.progress_bar(int(20/len(self.zone_list)))
if self.contents_bool["bridge"] == True:
printout("Delete bridge: " + str(self.bridge_front_id) ,info_type = 0, info_list = self.info_list, fp = self.fp, overwrite=True)
self.delete_bridge(self.zone_list[0],self.bridge_front_id)
if self.bridge_back_id != None:
printout("Delete bridge: " + str(self.bridge_back_id) ,info_type = 0, info_list = self.info_list, fp = self.fp, overwrite=True)
self.delete_bridge(self.zone_list[0],self.bridge_back_id)
self.progress_bar(10)
#peripheral zone 削除
for zone in self.zone_list:
if zone != self.head_zone:
self.delete_peripheral_zone(zone)
if self.head_zone != None:
self.delete_head_zone(self.head_zone)
self.bar.update(100 - self.progress_sum)
self.bar.close()
def delete_head_zone(self,zone):
logger.debug("Delete " + zone + " zone")
if self.contents_bool["server"] == True:
head_node_id = None
head_disk_id = []
compute_node_id_list = []
disk_id_list = []
if "head" in self.cluster_info["clusterparams"]["server"][zone].keys():
head_node_id = self.cluster_info["clusterparams"]["server"][zone]["head"]["node"]["id"]
logger.info("head node ID: " + str(head_node_id))
#head_disk_id = []
if "disk" in self.cluster_info["clusterparams"]["server"][zone]["head"].keys():
for i in self.cluster_info["clusterparams"]["server"][zone]["head"]["disk"].keys():
head_disk_id.append(self.cluster_info["clusterparams"]["server"][zone]["head"]["disk"][i]["id"])
logger.info("head node disk ID: " + str(head_disk_id))
if "compute" in self.cluster_info["clusterparams"]["server"][zone].keys():
#compute_node_id_list = []
#disk_id_list = []
for i in self.cluster_info["clusterparams"]["server"][zone]["compute"].keys():
compute_node_id_list.append(self.cluster_info["clusterparams"]["server"][zone]["compute"][i]["node"]["id"])
#print(self.cluster_info["clusterparams"]["server"][zone]["compute"][i]["disk"].keys())
if "disk" in self.cluster_info["clusterparams"]["server"][zone]["compute"][i].keys():
for j in self.cluster_info["clusterparams"]["server"][zone]["compute"][i]["disk"].keys():
disk_id_list.append(self.cluster_info["clusterparams"]["server"][zone]["compute"][i]["disk"][j]["id"])
logger.info("compute node disk ID: " + str(self.cluster_info["clusterparams"]["server"][zone]["compute"][i]["disk"][j]["id"]))
#delete compute node
if compute_node_id_list != []:
logger.info("Delete compute node")
with futures.ThreadPoolExecutor(max_workers = self.max_workers, thread_name_prefix="thread") as executor:
for compute_node_id in compute_node_id_list:
#executor.submit(MulHelper(self, "build_one_compute_node"), kwargs={"zone": zone, "i": i})
printout("Delete head zone(" + zone + ") : compute node(" + str(compute_node_id) + ")", info_type = 0, info_list = self.info_list, fp = self.fp, overwrite = True)
executor.submit(self.delete_server,zone,compute_node_id)
self.progress_bar(int(10/1+len(compute_node_id_list)))
'''
for compute_node_id in compute_node_id_list:
printout("Delete head zone(" + zone + ") : compute node(" + str(compute_node_id) + ")", info_type = 0, info_list = self.info_list, fp = self.fp, overwrite=True)
logger.debug('Previous: ' + str(compute_node_id))
self.delete_server(zone,compute_node_id)
logger.debug('After: ' + str(compute_node_id))
self.progress_bar(10/1+len(compute_node_id_list))
logger.debug('finish deleting computenode in head zone')
'''
#delete compute disk
if disk_id_list != []:
with futures.ThreadPoolExecutor(max_workers = self.max_workers, thread_name_prefix="thread") as executor:
for disk_id in disk_id_list:
#executor.submit(MulHelper(self, "build_one_compute_node"), kwargs={"zone": zone, "i": i})
printout("Delete head zone(" + zone + ") : compute disk(" + str(disk_id) + ")", info_type = 0, info_list = self.info_list, fp = self.fp, overwrite = True)
executor.submit(self.delete_disk,zone,disk_id)
self.progress_bar(int(10/1+len(disk_id_list)))
'''
for disk_id in disk_id_list:
printout("Delete head zone(" + zone + ") : compute disk(" + str(disk_id) + ")", info_type = 0, info_list = self.info_list, fp = self.fp, overwrite=True)
self.delete_disk(zone,disk_id)
self.progress_bar(10/1+len(disk_id_list))'''
#delete NFS
if self.contents_bool["nfs"] == True:
if self.nfs_zones != []:
if zone in self.nfs_zones:
nfs_id = self.cluster_info["clusterparams"]["nfs"][zone]["id"]
printout("Delete head zone(" + zone + ") : NFS(" + str(nfs_id) + ")", info_type = 0, info_list = self.info_list, fp = self.fp, overwrite=True)
self.delete_nfs(zone,nfs_id)
self.progress_bar(10)
#if self.back_switch_id_list in locals():
if self.contents_bool["switch"] == True:
if self.back_switch_id_list != {}:
if zone in self.back_switch_id_list.keys():
printout("Delete head zone(" + zone + ") : back switch(" + str(self.back_switch_id_list[zone]) + ")", info_type = 0, info_list = self.info_list, fp = self.fp, overwrite=True)
self.delete_switch(zone,self.back_switch_id_list[zone])
#disconnect head node and switch
if "nic" in self.cluster_info["clusterparams"]["server"][zone]["head"].keys() and self.front_switch_id_list != {}:
head_node_nic_front_id = self.cluster_info["clusterparams"]["server"][zone]["head"]["nic"]["front"]["id"]
printout("Disconnect server and switch: " + str(head_node_nic_front_id) + " and " + str(self.front_switch_id_list[zone]), info_type = 0, info_list = self.info_list, fp = self.fp, overwrite=True)
self.disconnect_server_switch(zone,head_node_nic_front_id)
self.progress_bar(5)
#self.disconnect_server_switch(zone,head_node_id)
#delete switch
#if self.front_switch_id_list in locals():
if self.front_switch_id_list != {}:
printout("Delete head zone(" + zone + ") : front switch(" + str(self.front_switch_id_list[zone]) + ")", info_type = 0, info_list = self.info_list, fp = self.fp, overwrite=True)
self.delete_switch(zone,self.front_switch_id_list[zone])
self.progress_bar(5)
logger.info("Delete head node")
logger.info("head node ID: " + str(head_node_id))
if self.contents_bool["server"] == True:
if head_node_id != None:
printout("Delete head zone(" + zone + ") : head node(" + str(head_node_id) + ")", info_type = 0, info_list = self.info_list, fp = self.fp, overwrite=True)
self.delete_server(zone,head_node_id)
self.progress_bar(10)
#delete head disk
#if head_disk_id in locals():
logger.info("Delete head node disk")
if head_disk_id != []:
for disk_id in head_disk_id:
printout("Delete head zone(" + zone + ") : head disk(" + str(disk_id) + ")", info_type = 0, info_list = self.info_list, fp = self.fp, overwrite=True)
self.delete_disk(zone,disk_id)
self.progress_bar(int(10/1+len(head_disk_id)))
def delete_peripheral_zone(self,zone):
logger.debug("Delete " + zone + " zone")
if self.contents_bool["server"] == True:
if "compute" in self.cluster_info["clusterparams"]["server"][zone].keys():
compute_node_id_list = []
disk_id_list = []
for i in self.cluster_info["clusterparams"]["server"][zone]["compute"].keys():
compute_node_id_list.append(self.cluster_info["clusterparams"]["server"][zone]["compute"][i]["node"]["id"])
#print(self.cluster_info["clusterparams"]["server"][zone]["compute"][i]["disk"].keys())
if "disk" in self.cluster_info["clusterparams"]["server"][zone]["compute"][i].keys():
for j in self.cluster_info["clusterparams"]["server"][zone]["compute"][i]["disk"].keys():
disk_id_list.append(self.cluster_info["clusterparams"]["server"][zone]["compute"][i]["disk"][j]["id"])
#print(self.cluster_info["clusterparams"]["server"][zone]["compute"][i]["disk"][j]["id"])
#delete compute node
if compute_node_id_list != []:
with futures.ThreadPoolExecutor(max_workers = self.max_workers, thread_name_prefix="thread") as executor:
for compute_node_id in compute_node_id_list:
#executor.submit(MulHelper(self, "build_one_compute_node"), kwargs={"zone": zone, "i": i})
printout("Delete compute zone(" + zone + ") : compute node(" + str(compute_node_id) + ")", info_type = 0, info_list = self.info_list, fp = self.fp, overwrite = True)
executor.submit(self.delete_server,zone,compute_node_id)
self.progress_bar(int(10/1+len(compute_node_id_list)))
'''
for compute_node_id in compute_node_id_list:
printout("Delete compute zone(" + zone + ") : compute node(" + str(compute_node_id) + ")", info_type = 0, info_list = self.info_list, fp = self.fp, overwrite=True)
self.delete_server(zone,compute_node_id)
self.progress_bar(10/1+len(compute_node_id_list))'''
if disk_id_list != []:
with futures.ThreadPoolExecutor(max_workers = self.max_workers, thread_name_prefix="thread") as executor:
for disk_id in disk_id_list:
#executor.submit(MulHelper(self, "build_one_compute_node"), kwargs={"zone": zone, "i": i})
printout("Delete compute zone(" + zone + ") : compute disk(" + str(disk_id) + ")", info_type = 0, info_list = self.info_list, fp = self.fp, overwrite = True)
executor.submit(self.delete_disk,zone,disk_id)
self.progress_bar(int(10/1+len(disk_id_list)))
'''
for disk_id in disk_id_list:
printout("Delete compute zone(" + zone + ") : compute disk(" + str(disk_id) + ")", info_type = 0, info_list = self.info_list, fp = self.fp, overwrite=True)
self.delete_disk(zone,disk_id)
self.progress_bar(10/1+len(disk_id_list))'''
if self.contents_bool["nfs"] == True:
if self.nfs_zones != []:
if zone in self.nfs_zones:
nfs_id = self.cluster_info["clusterparams"]["nfs"][zone]["id"]
printout("Delete compute zone(" + zone + ") : NFS(" + str(nfs_id) + ")", info_type = 0, info_list = self.info_list, fp = self.fp, overwrite=True)
self.delete_nfs(zone,nfs_id)
self.progress_bar(10)
if self.contents_bool["switch"] == True:
#if self.back_switch_id_list in locals():
if self.back_switch_id_list != {}:
if zone in self.back_switch_id_list.keys():
printout("Delete compute zone(" + zone + ") : back switch(" + str(self.back_switch_id_list[zone]) + ")", info_type = 0, info_list = self.info_list, fp = self.fp, overwrite=True)
self.delete_switch(zone,self.back_switch_id_list[zone])
# if self.front_switch_id_list in locals():
if self.front_switch_id_list != {}:
printout("Delete compute zone(" + zone + ") : front switch(" + str(self.front_switch_id_list[zone]) + ")", info_type = 0, info_list = self.info_list, fp = self.fp, overwrite=True)
self.delete_switch(zone,self.front_switch_id_list[zone])
self.progress_bar(5)
def delete_server(self,zone,node_id):
if(self.api_index == True):
while(True):
delete_res = delete(self.url_list[zone] + self.sub_url[0] + "/" + str(node_id), self.auth_res)
check = self.res_check(delete_res,"delete")
if (check == True):
logger.debug("Delete this server:" + str(node_id))
break
else:
self.delete_error()
else:
delete_res = "API is not used."
def delete_disk(self,zone,disk_id):
if(self.api_index == True):
while(True):
delete_res = delete(self.url_list[zone] + self.sub_url[1] + "/" + str(disk_id), self.auth_res)
check = self.res_check(delete_res,"delete")
if (check == True):
logger.debug("Delete this disk:" + str(disk_id))
break
else:
self.delete_error()
else:
delete_res = "API is not used."
def delete_switch(self,zone,switch_id):
if(self.api_index == True):
while(True):
delete_res = delete(self.url_list[zone] + self.sub_url[2] + "/" + str(switch_id), self.auth_res)
check = self.res_check(delete_res,"delete")
if (check == True):
logger.debug("Delete this switch:" + str(switch_id))
break
else:
self.delete_error()
else:
delete_res = "API is not used."
def delete_bridge(self,zone,bridge_id):
if(self.api_index == True):
while(True):
delete_res = delete(self.url_list[zone] + self.sub_url[4] + "/" + str(bridge_id), self.auth_res)
check = self.res_check(delete_res,"delete")
if (check == True):
logger.debug("Delete this bridge:" + str(bridge_id))
break
else:
self.delete_error()
else:
delete_res = "API is not used."
def delete_nfs(self,zone,nfs_id):
if(self.api_index == True):
while(True):
delete_res = delete(self.url_list[zone] + self.sub_url[6] + "/" + str(nfs_id), self.auth_res)
check = self.res_check(delete_res,"delete")
if (check == True):
logger.debug("Delete this NFS:" + str(nfs_id))
break
else:
self.delete_error()
else:
delete_res = "API is not used."
def disconnect_bridge_switch(self,zone,switch_id):
if(self.api_index == True):
while(True):
delete_res = delete(self.url_list[zone] + self.sub_url[2] + "/" + str(switch_id) + "/to" + self.sub_url[4], self.auth_res)
check = self.res_check(delete_res, "delete")
if (check == True):
logger.debug("Disconnect biridge and switch: " + str(switch_id))
break
else:
self.delete_error()
else:
stop_res = "API is not used."
def disconnect_server_switch(self,zone,nic_id):
if(self.api_index == True):
'''
while(True):
server_info = get(self.url_list[zone] + self.sub_url[0] + "/" + str(node_id), self.auth_res)
check = self.res_check(server_info, "get")
if(check == True):
interface_info = server_info['Server']['Interfaces']
interface_id_list = []
for interface in interface_info:
if interface['Switch']['Scope'] == 'user':
interface_id_list.append(interface['ID'])
break
else:
self.delete.error()
'''
while(True):
delete_res = delete(self.url_list[zone] + self.sub_url[3] + "/" + str(nic_id) + "/to" + self.sub_url[2], self.auth_res)
check = self.res_check(delete_res, "delete")
if (check == True):
logger.debug("Disconnect server and switch(NIC ID): " + str(nic_id))
break
else:
self.delete_error()
'''
if interface_id_list != []:
for interface_id in interface_id_list:
while(True):
delete_res = delete(self.url_list[zone] + self.sub_url[1] + "/" + str(interface_id) + "/to" + self.sub_url[2], self.auth_res)
check = self.res_check(delete_res, "delete")
if (check == True):
logger.debug("Disconnect server and switch: " + str(node_id))
break
else:
self.delete_error()
'''
else:
stop_res = "API is not used."
def get_bridge_info(self,zone,bridge_id):
if(self.api_index == True):
while(True):
get_res = get(self.url_list[zone] + self.sub_url[4] + "/" + str(bridge_id), self.auth_res)
check = self.res_check(get_res, "get")
if (check == True):
logger.debug("Get bridge infomation(bridge ID): " + str(bridge_id))
if get_res["Bridge"]["Info"] != None:
if "Switches" in get_res["Bridge"]["Info"].keys():
connect_bool = True
else:
connect_bool = False
else:
connect_bool = False
return connect_bool
else:
self.delete_error()
else:
connect_bool = False
return connect_bool
def progress_bar(self, up):
if len(self.zone_list) == 1:
self.bar.update(int(up))
self.progress_sum += int(up)
elif len(self.zone_list) >= 2:
self.bar.update(int(up)/(len(self.zone_list) + 0.5))
self.progress_sum += int(up)/(len(self.zone_list) + 0.5)
#API response check
def res_check(self, res, met):
met_dict = {"get": "is_ok", "post": "is_ok", "put": "Success","delete": "Success"}
index = met_dict[met]
logger.debug("confirm API request(" + str(met) + ")")
if (index in res.keys()):
if res[index] == True:
logger.debug("API processing succeeded")
check = True
return check
else:
logger.warning("API processing failed")
printout("Error:",info_type = 0, info_list = self.info_list, fp = self.fp)
check = False
return check
elif ("is_fatal" in res.keys()):
logger.warning("API processing failed")
printout("Status:" + res["status"],info_type = 0, info_list = self.info_list, fp = self.fp)
printout("Error:" + res["error_msg"],info_type = 0, info_list = self.info_list, fp = self.fp)
check = False
return check
#API処理失敗時の処理
def delete_error(self):
logger.debug("decision of repeating to request")
while(True):
conf = printout("Try again??(yes/no):",info_type = 2, info_list = self.info_list, fp = self.fp)
if conf == "yes":
break
elif conf == "no":
printout("Stop processing.",info_type = 0, info_list = self.info_list, fp = self.fp)
sys.exit()
else:
printout("Please answer yes or no.",info_list = self.info_list,fp = self.fp)
def handler(self, signal, frame):
printout("Stop processing",info_type = 0, info_list = self.info_list, fp = self.fp)
sys.exit()
def validate_params(self,cluster_info):
contents = [k for k in cluster_info["clusterparams"].keys()]
contents_bool = {}
zones = ["tk1a","tk1b","is1a","is1b","tk1v"]
if "server" in contents:
if cluster_info["clusterparams"]["server"] != {}:
#zone = next(iter(cluster_info["clusterparams"]["server"]))
#print(zone)
server_zones = [zone for zone in cluster_info["clusterparams"]["server"].keys()]
for zone in server_zones:
if "head" or "compute" in cluster_info["clusterparams"]["server"][zone].keys():
if "head" in cluster_info["clusterparams"]["server"][zone].keys():
if cluster_info["clusterparams"]["server"][zone]["head"] != {}:
contents_bool["server"] = True
break
else:
contents_bool["server"] = False
else:
if "compute" in cluster_info["clusterparams"]["server"][zone]["compute"]:
if cluster_info["clusterparams"]["server"][zone]["compute"] != {}:
contents_bool["server"] = True
break
else:
contents_bool["server"] = False
else:
contents_bool["server"] = False
else:
contents_bool["server"] = False
else:
contents_bool["server"] = False
else:
contents_bool["server"] = False
if "switch" in contents:
if cluster_info["clusterparams"]["switch"] != {}:
zone = next(iter(cluster_info["clusterparams"]["switch"]))
if cluster_info["clusterparams"]["switch"][zone] != {}:
contents_bool["switch"] = True
else:
contents_bool["switch"] = False
else:
contents_bool["switch"] = False
else:
contents_bool["switch"] = False
if "bridge" in contents:
if "front" in cluster_info["clusterparams"]["bridge"].keys():
if cluster_info["clusterparams"]["bridge"]["front"] != None:
contents_bool["bridge"] = True
else:
contents_bool["bridge"] = False
else:
contents_bool["bridge"] = False
else:
contents_bool["bridge"] = False
if "nfs" in contents:
if cluster_info["clusterparams"]["nfs"] != {}:
zones = list(cluster_info["clusterparams"]["nfs"].keys())
nfs_index_list =[]
for zone in zones:
if cluster_info["clusterparams"]["nfs"][zone] != {}:
if cluster_info["clusterparams"]["nfs"][zone] != None:
#contents_bool["nfs"] = True
nfs_index_list.append(True)
else:
nfs_index_list.append(False)
else:
#contents_bool["nfs"] = False
nfs_index_list.append(False)
if True in nfs_index_list:
contents_bool["nfs"] = True
else:
contents_bool["nfs"] = False
else:
contents_bool["nfs"] = False
else:
contents_bool["nfs"] = False
return contents_bool
|
[
"tsukiyama.sho675@mail.kyutech.jp"
] |
tsukiyama.sho675@mail.kyutech.jp
|
470c4c7bca3b8d12735c165a27f7358ffea8ad63
|
5bfd36d3ad0afe03aed0bacfe124cfc38b3b01cb
|
/__init__.py
|
fdb43eccc6ce141e20756a7e09baafba952f2788
|
[] |
no_license
|
rocailler/tele_bot
|
b53ab8988cba24290ffc9b1cd48e8604fb11628c
|
c8d5755be0c5acedcd737696cf3a1f55298514dd
|
refs/heads/main
| 2023-01-18T21:52:20.106961
| 2020-11-20T16:36:47
| 2020-11-20T16:36:47
| 314,608,220
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 337
|
py
|
import requests
import os
def send_message(text, token, group_id):
os.environ['NO_PROXY'] = 'telegram.org'
requests.get('https://api.telegram.org/bot{}/sendMessage?chat_id=-{}&text={}'.format(token, group_id, text))
print('message sent: ', text)
if __name__ == '__main__':
send_message(text, token, group_id)
|
[
"noreply@github.com"
] |
rocailler.noreply@github.com
|
bdf97e5eac0773a85acc6dddcf93068b539e13f3
|
1bab2fc1968062c566e23b888b47de358d15213e
|
/leetcode/Binary Tree Maximum Path Sum/main.py
|
36ad4718a97290705457cf7656b74647fe538aaa
|
[] |
no_license
|
dalleng/Interview-Practice
|
7bad4493c5f8131f988708bc4652d56dc961ed90
|
b72229c50e87d1ff32d3538d13779953451b9daf
|
refs/heads/master
| 2022-11-27T04:02:20.446524
| 2022-11-10T12:02:44
| 2022-11-10T12:02:44
| 44,450,636
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 939
|
py
|
# Definition for a binary tree node.
class TreeNode:
def __init__(self, val=0, left=None, right=None):
self.val = val
self.left = left
self.right = right
class Solution:
def maxPathSum(self, root: TreeNode) -> int:
self.max = None
self.mps(root)
return self.max
def mps(self, root):
if not root:
return None
left = self.mps(root.left)
right = self.mps(root.right)
if self.max is None:
self.max = root.val
self.max = max(self.max, root.val)
if left:
self.max = max(self.max, root.val + left)
if right:
self.max = max(self.max, root.val + right)
if left and right:
self.max = max(self.max, root.val + right + left)
left_sum = left or 0
right_sum = right or 0
return max(root.val, root.val + left_sum, root.val + right_sum)
|
[
"diegoallen@gmail.com"
] |
diegoallen@gmail.com
|
5c82bdf13fb47780c93ec4b522b30db165482492
|
b91578b96ffe63639d3efc70d4737b92091cd0b1
|
/backend/unpp_api/apps/project/migrations/0071_auto_20180810_0753.py
|
c588c551d41e1036a9d7e44416026b07ae15c43f
|
[
"Apache-2.0"
] |
permissive
|
unicef/un-partner-portal
|
876b6ec394909ed2f72777493623413e9cecbfdc
|
73afa193a5f6d626928cae0025c72a17f0ef8f61
|
refs/heads/develop
| 2023-02-06T21:08:22.037975
| 2019-05-20T07:35:29
| 2019-05-20T07:35:29
| 96,332,233
| 6
| 1
|
Apache-2.0
| 2023-01-25T23:21:41
| 2017-07-05T15:07:44
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 952
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.14 on 2018-08-10 07:53
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('project', '0070_auto_20180810_0729'),
]
operations = [
migrations.RenameField(
model_name='application',
old_name='decision_date',
new_name='partner_decision_date',
),
migrations.RenameField(
model_name='application',
old_name='decision_maker',
new_name='partner_decision_maker',
),
migrations.RenameField(
model_name='application',
old_name='win_date',
new_name='agency_decision_date',
),
migrations.RenameField(
model_name='application',
old_name='win_decision_maker',
new_name='agency_decision_maker',
),
]
|
[
"maciej.jaworski@tivix.com"
] |
maciej.jaworski@tivix.com
|
e15c255280888dca99479b6ba6e64b392083d8ac
|
29db018a8d18d99f0e7bf17102adc4fb12c0253a
|
/augment_images6.py
|
3af00f1e1e426cb29dfa92d03685e600c6a0c3ee
|
[] |
no_license
|
hafiz703/AI-Proj
|
2796fb47b67988c5353ed4a252abfbd28ef13d9a
|
9dce3f491a905b713d4472007c073ddcf8f57922
|
refs/heads/master
| 2021-01-01T06:09:01.998643
| 2017-08-07T19:40:41
| 2017-08-07T19:40:41
| 97,370,116
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,003
|
py
|
from PIL import Image
import numpy as np
import math
import os
from random import randint
from colorsys import rgb_to_hsv, hsv_to_rgb
def resize_image(img_name):
shorterside = 250
pilimg = Image.open(img_name+".jpg")
w,h=pilimg.size
if w > h:
longerside= np.int32(math.floor(float(shorterside)*w/float(h)))
neww=longerside
newh=shorterside
elif h > w:
longerside= np.int32(math.floor(float(shorterside)*h/float(w)))
newh=longerside
neww=shorterside
else:
newh=shorterside
neww=shorterside
resimg=pilimg.resize((neww,newh))
im = np.array(resimg,dtype=np.float32)
if(im.ndim<3):
im=np.expand_dims(im,2)
im=np.concatenate((im,im,im),2)
if(im.shape[2]>3):
im=im[:,:,0:3]
return im
def save_image(img, img_name):
result = Image.fromarray(img.astype('uint8'))
result.save(img_name)
def horizontal_flip(img, img_name):
flipped = np.zeros_like(img)
h = len(img)
w = len(img[0])
for i in range(len(img)):
for j in range(len(img[i])):
flipped[i][w-j-1] = img[i][j]
save_image(flipped, img_name+"_horizontal_flip.jpg")
def vertical_flip(img, img_name):
flipped = np.zeros_like(img)
h = len(img)
w = len(img[0])
for i in range(len(img)):
flipped[h-i-1] = img[i]
save_image(flipped, img_name+"_vertical_flip.jpg")
def rotate(img):
h = len(img)
w = len(img[0])
rotated = np.zeros((w, h, 3))
for i in range(w):
for j in range(h):
rotated[i][j] = img[h-j-1][i]
return rotated
def rotate_full(img, img_name):
rotated = rotate(img)
save_image(rotated, img_name+"_rotated_90.jpg")
rotated = rotate(rotated)
save_image(rotated, img_name+"_rotated_180.jpg")
rotated = rotate(rotated)
save_image(rotated, img_name+"_rotated_270.jpg")
def random_brightness(img, img_name):
rounds = 3
for r in range(rounds):
brightness_multiplier = np.random.uniform() + 0.5
brightened = np.zeros_like(img)
for i in range(len(img)):
for j in range(len(img[i])):
(h,s,v) = rgb_to_hsv(img[i][j][0], img[i][j][1], img[i][j][2])
v *= brightness_multiplier
v = min(255, v)
brightened[i][j] = hsv_to_rgb(h,s,v)
save_image(brightened, img_name+"_brightened_"+str(r)+".jpg")
def augment_image(img_name):
img = resize_image(img_name)
# crops
stride_length = 2
stride_pixels = 249
h = len(img)
w = len(img[0])
if w > h:
w_stride = np.int32(math.floor(float(stride_pixels)*w/float(h)))
h_stride = stride_pixels
elif h > w:
h_stride = np.int32(math.floor(float(stride_pixels)*h/float(w)))
w_stride = stride_pixels
else:
h_stride = stride_pixels
w_stride = stride_pixels
h_step = (h-h_stride)/(stride_length-1)
w_step = (w-w_stride)/(stride_length-1)
for i in range(stride_length):
for j in range(stride_length):
cropped = img[np.int32(math.floor(i*h_step)):np.int32(math.floor(i*h_step+h_stride)), np.int32(math.floor(j*w_step)):np.int32(math.floor(j*w_step+w_stride))]
transform_image(cropped, img_name+"_"+str(i)+"_"+str(j))
def transform_image(img, img_name):
# transforms
save_image(img, img_name+".jpg")
if (randint(0,2) == 0):
horizontal_flip(img, img_name)
if (randint(0,2) == 0):
vertical_flip(img, img_name)
if (randint(0,2) == 0):
rotate_full(img, img_name)
if (randint(0,2) == 0):
random_brightness(img, img_name)
if __name__=='__main__':
# directories = ["images"]
directories = ["benign", "malignant"]
for img_dir in directories:
files = os.listdir(img_dir)
for filename in files:
if filename.endswith("_val.jpg"):
img_name = os.path.splitext(filename)[0]
augment_image(os.path.join(img_dir, img_name))
|
[
"jang93@gmail.com"
] |
jang93@gmail.com
|
97432421da5686a55155023b69fb9729d2989ab9
|
5b245d51b2084c89de6d2c9e41c11debf1f58433
|
/samples/basic/crud/ydk/models/infra/nc-create-config-infra-infra-clock-linux-20-ydk.py
|
4438007faca8e47f9b4a2169928b312fe480ab4b
|
[
"Apache-2.0"
] |
permissive
|
fmontoy/ydk-py-samples
|
161071afc47bf05e7ad25506b3065fa052130173
|
b46e37a87d73ded101a6c27988c99ef58dd3d344
|
refs/heads/master
| 2020-12-28T21:05:43.969348
| 2016-07-23T00:43:23
| 2016-07-23T00:43:23
| 63,990,521
| 0
| 0
| null | 2016-07-23T00:03:35
| 2016-07-23T00:03:35
| null |
UTF-8
|
Python
| false
| false
| 2,915
|
py
|
#!/usr/bin/env python
#
# Copyright 2016 Cisco Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Create config for model Cisco-IOS-XR-infra-infra-clock-linux-cfg.
usage: nc-create-config-infra-infra-clock-linux-20-ydk.py [-h] [-v] device
positional arguments:
device NETCONF device (ssh://user:password@host:port)
optional arguments:
-h, --help show this help message and exit
-v, --verbose print debugging messages
"""
from argparse import ArgumentParser
from urlparse import urlparse
from ydk.services import CRUDService
from ydk.providers import NetconfServiceProvider
from ydk.models.infra import Cisco_IOS_XR_infra_infra_clock_linux_cfg \
as xr_infra_infra_clock_linux_cfg
import logging
def config_clock(clock):
"""Add config data to clock object."""
# time zone configuration
time_zone = clock.TimeZone()
time_zone.time_zone_name = "PST"
time_zone.area_name = "PST8PDT"
clock.time_zone = time_zone
if __name__ == "__main__":
"""Execute main program."""
parser = ArgumentParser()
parser.add_argument("-v", "--verbose", help="print debugging messages",
action="store_true")
parser.add_argument("device",
help="NETCONF device (ssh://user:password@host:port)")
args = parser.parse_args()
device = urlparse(args.device)
# log debug messages if verbose argument specified
if args.verbose:
logger = logging.getLogger("ydk")
logger.setLevel(logging.DEBUG)
handler = logging.StreamHandler()
formatter = logging.Formatter(("%(asctime)s - %(name)s - "
"%(levelname)s - %(message)s"))
handler.setFormatter(formatter)
logger.addHandler(handler)
# create NETCONF provider
provider = NetconfServiceProvider(address=device.hostname,
port=device.port,
username=device.username,
password=device.password,
protocol=device.scheme)
# create CRUD service
crud = CRUDService()
clock = xr_infra_infra_clock_linux_cfg.Clock() # create config object
config_clock(clock) # add object configuration
crud.create(provider, clock) # create object on NETCONF device
provider.close()
exit()
# End of script
|
[
"saalvare@cisco.com"
] |
saalvare@cisco.com
|
b1604e1939a42f45ed1da431900f5e65732c66e3
|
ae2a10fda58cede0fb69a00ba1cb6d9dd4ca7c0a
|
/evebot/__init__.py
|
b0ef723be1a3981b65384259496a9b2d49699305
|
[] |
no_license
|
pinkeen/evebot
|
c55763d8a76de17f1b7f621334b4a544957f4f5f
|
5c1e8b37bd2387b61202d7cc972fe92ddbe1174d
|
refs/heads/master
| 2021-05-30T01:41:45.497249
| 2015-11-17T14:34:38
| 2015-11-17T14:34:38
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 25
|
py
|
__version__ = '0.2.0'
|
[
"pinkeen@gmail.com"
] |
pinkeen@gmail.com
|
36200ebc4b1b37c8c1e7c75209f118c1ba35d4a7
|
72f89a06dfb27c30b2c5cfc51535cf1337b9d697
|
/DB_thread&calcilator&filter&mssql.py
|
dff18f8019e94ecdf101bcbfdfe3fcb9e890b46e
|
[] |
no_license
|
DaLemon/stockyakult
|
5b34c86be1166b9ea3662b8b85c543f62190cac3
|
77a1f0af31b82cbd3c9e7f857c09c83c5e5176d0
|
refs/heads/main
| 2023-03-08T00:09:33.298549
| 2021-02-19T14:23:32
| 2021-02-19T14:23:32
| 328,613,943
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 11,659
|
py
|
import requests
import pymysql
import pyodbc
from bs4 import BeautifulSoup
from queue import Queue
import threading
import time
import datetime as dt
stock_info=[]
db_settings = {
"host": "**********",
"port": 3306,
"user": "root",
"password": "**********",
"db": "**********",
"charset": "utf8"
}
# 建立Connection物件
conn = pymysql.connect(**db_settings)
# 建立Cursor物件
cursor =conn.cursor()
mutex = threading.Lock()
def Catch_Data(all_stock_info,Stocknum_q):
#將陣列4等分
while Stocknum_q.qsize():
#進入目標網頁
url = 'http://pelements.money-link.com.tw/service/RQD2Service.ashx?systex=on&symid=%s'%Stocknum_q.get()
res = requests.get(url)
soup = BeautifulSoup(res.text , 'html.parser')
#soup處理分割,存入陣列
doc3 = str(soup).replace('"','')
doc3 = doc3.split(',',20)
stocknum = doc3[0].replace('{SID:','')
#stock_info.append(doc3[1].replace('Name:',''))
_date = '2020/%s'%doc3[3].replace('Date:','')
Open = doc3[6].replace('O:','')
high = doc3[7].replace('H:','')
low = doc3[8].replace('L:','')
close = doc3[9].replace('C:','')
volume = doc3[16].replace('Qt:','')
all_stock_info.append(['',stocknum,_date,Open,high,low,close,volume,None,None,None,None,None,None])
return all_stock_info
def multithreading (all_stock_info):
threads = []
#建立4個程序
for i in range(0,4):
t=threading.Thread(target=Catch_Data,args=(all_stock_info,Stocknum_q))
t.start()
print('Catch_Data - Thread%d...STRAT'%i)
threads.append(t)
for thread in threads:
thread.join()#等待線程結束後繼續
print('Catch_Data - ALL Thread is FINISH')
#result = []
return all_stock_info
def KD_Calculator(stock_kd,Stocknum_q,dateid,RedDuckyYakult,RedConfirmDucky,RedYakult,Green2Yakult):
while Stocknum_q.empty() == False:
high_list = []
low_list = []
have_none = 0
db_settings = {
"host": "**********",
"port": 3306,
"user": "root",
"password": "**********",
"db": "**********",
"charset": "utf8"
}
# 建立Connection物件
conn = pymysql.connect(**db_settings)
# 建立Cursor物件
cursor =conn.cursor()
command = "SELECT DateID,stockinfo.StockID,_Date,Open,High,Low,Close,Volume,K,D,EMA12,EMA26,DIF,MACD9,StockName,Market,Industry FROM stock.stockinfo,stock.stockid WHERE stockinfo.StockID=stockid.StockID AND DateID <= '%s' AND stockinfo.StockID = '%s' ORDER BY DateID DESC LIMIT 9"%(dateid,Stocknum_q.get())
print(command)
cursor.execute(command)
for (DateID,StockID,_Date,Open,High,Low,Close,Volume,K,D,EMA12,EMA26,DIF,MACD9,StockName,Market,Industry) in cursor:
high_list.append(High)
low_list.append(Low)
if DateID == dateid-3 :
if K == None or D == None or EMA12 == None or EMA26 == None or MACD9 == None :
pass
else:
DIF_3day_before = float(DIF)
MACD9_3day_before = float(MACD9)
elif DateID == dateid-2 :
if K == None or D == None or EMA12 == None or EMA26 == None or MACD9 == None :
pass
else:
K_before_yesterday = float(K)
D_before_yesterday = float(D)
EMA12_before_yesterday = float(EMA12)
EMA26_before_yesterday = float(EMA26)
DIF_before_yesterday = float(DIF)
MACD9_before_yesterday = float(MACD9)
elif DateID == dateid-1 :
if K == None or D == None or EMA12 == None or EMA26 == None or MACD9 == None :
have_none = 1
else:
C_yesterday = float(Close)
K_yesterday = float(K)
D_yesterday = float(D)
EMA12_yesterday = float(EMA12)
EMA26_yesterday = float(EMA26)
DIF_yesterday = float(DIF)
MACD9_yesterday = float(MACD9)
elif DateID == dateid:
C_today = float(Close)
Sid = StockID
_D = _Date
O = Open
H = High
L = Low
V = Volume
Sname = StockName
Market = Market
Industry = Industry
H_9day = max(high_list)
L_9day = min(low_list)
#漲幅
percent = C_today/C_yesterday
if percent > 1:
PP = ('+'+str(round((percent-1)*100,2))+'%')
elif percent == 1:
PP = '0'
else:
PP = ('-'+str(round((1-percent)*100,2))+'%')
#跳過昨日空白
#1372(6289),1466(6598),1503(6732)
if have_none == 1 or Sid ==6289 or Sid ==6598 or Sid ==6732 :
stock_kd.append([dateid,Sid,_D,O,H,L,C_today,V,round(K,3),round(D,3),None,None,None,None])
continue
#RSV=(今日收盤價 - 最近九天的最低價)/(最近九天的最高價 - 最近九天最低價)
#K = 2/3 X (昨日K值) + 1/3 X (今日RSV)
#D = 2/3 X (昨日D值) + 1/3 X (今日K值)
if H_9day == C_today:
RSV = 100
elif L_9day == C_today:
RSV = 0
else:
RSV = 100*(C_today-L_9day)/(H_9day-L_9day)
K = (2/3*K_yesterday) + (1/3*RSV)
D = (2/3*D_yesterday) + (1/3*K)
#print(RSV,K,D)
#EMA12 = 昨日EMA12 * 11/13 + 今日收盤*2/13
#DIF = EMA12 -EMA26
#MACD9 = 昨日MACD * 8/10 +今日DIF *2/10
EMA12 = EMA12_yesterday *11/13 +C_today*2/13
EMA26 = EMA26_yesterday *25/27 +C_today*2/27
DIF = EMA12 - EMA26
MACD9 = MACD9_yesterday *8/10 + DIF*2/10
stock_kd.append([dateid,Sid,_D,O,H,L,C_today,V,round(K,3),round(D,3),round(EMA12,3),round(EMA26,3),round(DIF,3),round(MACD9,3)])
#Filter
KD_before_yesterday_dispatch = K_before_yesterday - D_before_yesterday
KD_yesterday_dispatch = K_yesterday - D_yesterday
KD_today_dispatch = K - D
OSC_3day_before = DIF_3day_before -MACD9_3day_before
OSC_before_yesterday = DIF_before_yesterday - MACD9_before_yesterday
OSC_yesterday = DIF_yesterday - MACD9_yesterday
OSC_today = DIF - MACD9
if KD_before_yesterday_dispatch > KD_yesterday_dispatch and KD_today_dispatch > KD_yesterday_dispatch and K_before_yesterday > K_yesterday and K > K_yesterday and KD_before_yesterday_dispatch > 0 and KD_yesterday_dispatch > 0 and KD_today_dispatch > 0 and OSC_before_yesterday > OSC_yesterday and OSC_today > OSC_yesterday and OSC_today > 0 and OSC_yesterday > 0 and OSC_before_yesterday >0 and V>500:
RedDuckyYakult.append([Sid,Sname,Market,Industry,C_today,PP])
elif KD_before_yesterday_dispatch > KD_yesterday_dispatch and KD_today_dispatch > KD_yesterday_dispatch and K_before_yesterday > K_yesterday and K > K_yesterday and KD_before_yesterday_dispatch > 0 and KD_yesterday_dispatch > 0 and KD_today_dispatch > 0 and V>500:
RedConfirmDucky.append([Sid,Sname,Market,Industry,C_today,PP])
elif OSC_before_yesterday > OSC_yesterday and OSC_today > OSC_yesterday and OSC_today > 0 and OSC_yesterday > 0 and OSC_before_yesterday >0 and V>500:
RedYakult.append([Sid,Sname,Market,Industry,C_today,PP])
elif OSC_3day_before < OSC_before_yesterday and OSC_before_yesterday > OSC_yesterday and OSC_yesterday > OSC_today and OSC_3day_before < 0 and OSC_before_yesterday < 0 and OSC_yesterday < 0 and OSC_today < 0 and V>500:
Green2Yakult.append([Sid,Sname,Market,Industry,C_today,PP])
conn.close()
return stock_kd,RedDuckyYakult,RedConfirmDucky,RedYakult
def multithreading2 (stock_kd,dateid,RedDuckyYakult,RedConfirmDucky,RedYakult,Green2Yakult):
threads = []
#建立4個程序
for i in range(0,6):
t=threading.Thread(target=KD_Calculator,args=(stock_kd,Stocknum_q,dateid,RedDuckyYakult,RedConfirmDucky,RedYakult,Green2Yakult))
t.start()
print('KD_Calculator - Thread%d...STRAT'%i)
threads.append(t)
for thread in threads:
thread.join()#等待線程結束後繼續
print('KD_Calculator - ALL Thread is FINISH')
#result = []
return stock_kd,RedDuckyYakult,RedConfirmDucky,RedYakult,Green2Yakult
def RemoveSpace(all_stock_info):
for n in range(0,len(all_stock_info)):
#去marketday抓DateID
command = "SELECT DateID FROM marketday WHERE _Date = '%s'"%(all_stock_info[n][2])
cursor.execute(command)
dateid = cursor.fetchone()
all_stock_info[n][0] = dateid[0]
#判斷移除空格
if all_stock_info[n][3] == '--':
command = "SELECT * FROM stockinfo WHERE DateID = '%s' AND StockID = '%s'"%(all_stock_info[n][0]-1,all_stock_info[n][1])
cursor.execute(command)
a = cursor.fetchone()
all_stock_info[n][3] = a[6]
all_stock_info[n][4] = a[6]
all_stock_info[n][5] = a[6]
all_stock_info[n][6] = a[6]
all_stock_info[n][7] = 0
return all_stock_info
def WriteToDB(all_stock_info,dateid):
command = "DELETE FROM stock.stockinfo WHERE DateID = '%s'"%(dateid)
print(command)
cursor.execute(command)
conn.commit()
command = "INSERT INTO stockinfo(DateID,StockID,_Date,Open,High,Low,Close,Volume,K,D,EMA12,EMA26,DIF,MACD9)VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
cursor.executemany(command,all_stock_info)
conn.commit()
print('Finish....')
def UpLoadmssql(datetime,RedDuckyYakult,RedConfirmDucky,RedYakult,Green2Yakult):
odb_settings = {
"DRIVER": "{SQL Server}",
"SERVER": "**********",
"UID": "**********",
"PWD": "**********",
"DATABASE": "**********",
"charset": "utf8"
}
O_cnxn = pyodbc.connect(**odb_settings)
O_cursor = O_cnxn.cursor()
#Detele old data
command = "DELETE FROM dbo.UpdateTime"
O_cursor.execute(command)
command = "DELETE FROM dbo.RedYakult"
O_cursor.execute(command)
command = "DELETE FROM dbo.RedDucky"
O_cursor.execute(command)
command = "DELETE FROM dbo.RedDuckyYakult"
O_cursor.execute(command)
command = "DELETE FROM dbo.Green2Yakult"
O_cursor.execute(command)
command = "INSERT INTO dbo.UpdateTime(UTime) Values(?)"
O_cursor.executemany(command,[datetime])
O_cnxn.commit()
if len(RedYakult)!=0:
command = "INSERT INTO dbo.RedYakult(StockID,StockName,Market,Industry,Price,PricePercent) Values(?,?,?,?,?,?)"
O_cursor.executemany(command,RedYakult)
O_cnxn.commit()
if len(RedConfirmDucky)!=0:
command = "INSERT INTO dbo.RedDucky(StockID,StockName,Market,Industry,Price,PricePercent) Values(?,?,?,?,?,?)"
O_cursor.executemany(command,RedConfirmDucky)
O_cnxn.commit()
if len(RedDuckyYakult)!=0:
command = "INSERT INTO dbo.RedDuckyYakult(StockID,StockName,Market,Industry,Price,PricePercent) Values(?,?,?,?,?,?)"
O_cursor.executemany(command,RedDuckyYakult)
O_cnxn.commit()
if len(Green2Yakult)!=0:
command = "INSERT INTO dbo.Green2Yakult(StockID,StockName,Market,Industry,Price,PricePercent) Values(?,?,?,?,?,?)"
O_cursor.executemany(command,Green2Yakult)
O_cnxn.commit()
def main():
all_stock_info = []
global stock_kd,DuckyYakult,ConfirmDucky,Green2Yakult,datetime
stock_kd = []
RedDuckyYakult = []
RedConfirmDucky = []
RedYakult = []
Green2Yakult =[]
datetime = []
global Stocknum_q,dateid
now = dt.datetime.now()
datetime.append(now.strftime("%H")+':'+now.strftime("%M"))
print(datetime)
Stocknum_q = Queue()
command = "SELECT StockID FROM stockid"
cursor.execute(command)
# 取出上市上櫃股票
for (StockID,) in cursor:
Stocknum_q.put(StockID)
print('Load DB Finish...')
all_stock_info = multithreading(all_stock_info)#爬資料多呈緒
all_stock_info = RemoveSpace(all_stock_info)
dateid = all_stock_info[0][0]
WriteToDB(all_stock_info,dateid)
Stocknum_q = Queue()
command = "SELECT StockID FROM stockid"
cursor.execute(command)
# 取出上市上櫃股票
for (StockID,) in cursor:
Stocknum_q.put(StockID)
multithreading2(stock_kd,dateid,RedDuckyYakult,RedConfirmDucky,RedYakult,Green2Yakult)
#本機MYSQL
WriteToDB(stock_kd,dateid)
#雲端MSSQL
UpLoadmssql(datetime,RedDuckyYakult,RedConfirmDucky,RedYakult,Green2Yakult)
if __name__ == '__main__':
main()
|
[
"noreply@github.com"
] |
DaLemon.noreply@github.com
|
4e54bcac7ab0d1397ec7dea3927c9d7894f8ebd0
|
74082dce295f1db67c5bae83e8c5cf1268b8be23
|
/Python-django-DepOA/DepOA/settings.py
|
7aa62d86db28eae2a846fb49cf61ce97a18da1ed
|
[] |
no_license
|
chu888chu888/Chuguangming.Django.HtmlTemplateCRMDemo
|
a6ff7914b7cfc15bb4ded2bb441fa51afbfc9610
|
37e113d9a8ee46ebf5c9f39bcbc8208aa3cb8203
|
refs/heads/master
| 2021-01-18T14:44:55.707071
| 2013-05-20T08:06:37
| 2013-05-20T08:06:37
| 10,168,564
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,603
|
py
|
# Django settings for DepOA project.
import os
DIRNAME=os.path.abspath(os.path.join(os.path.dirname(__file__),".."))
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
('depsast', 'depsast@thudep.org'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': DIRNAME+'/database.db', # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'Asia/Shanghai'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'zh-cn'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = DIRNAME+'/static/'
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# Make sure to use a trailing slash.
# Examples: "http://foo.com/static/admin/", "/static/admin/".
ADMIN_MEDIA_PREFIX = STATIC_URL+"grappelli/"
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'njk!z47dfaj%dg8+izrexrkx0yyd=szuhqxkzx)3htdv!0blg-'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'DepOA.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'DepOA.wsgi.application'
TEMPLATE_DIRS = (
os.path.join(DIRNAME, 'templates').replace('\\','/'),
)
AUTH_PROFILE_MODULE='account.UserProfile'
LOGIN_URL='/login/'
LOGIN_REDIRECT_URL='/'
LOGOUT_URL='/logout/'
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
'grappelli',
'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
#'django.contrib.admindocs',
'account',
'doc',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
|
[
"chu999chu999@gmail.com"
] |
chu999chu999@gmail.com
|
ff60447409f1f60b961cd873e62239504e08e8e6
|
fc4c94d041a1510099c622fe4af553148e4d8440
|
/Sim/Models/Simulation.py
|
ad164cab7e46cf71b61d4fef2876f1cd7fb81f35
|
[] |
no_license
|
zl326/RaceSim
|
135b33aef4c47a6ce5a0eaff45a7dc55cd0d5e57
|
1efed8a7e173f997eb12be62d9ce2fbf5eb8aa6f
|
refs/heads/master
| 2020-07-03T17:33:40.845910
| 2019-10-22T11:10:22
| 2019-10-22T11:10:22
| 201,987,181
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 52,082
|
py
|
import pandas as pd
import yaml
import datetime
from astral import Location
import dateutil
import math
import numpy as np
from scipy.interpolate import griddata
import os
import copy
import pymongo
class Simulation:
kph2ms = 1/3.6
ms2kph = 3.6
rad2deg = 180.0/math.pi
deg2rad = math.pi/180.0
g = 9.80665
Ra = 287.05
Rw = 461.495
C2K = 273.15
K2C = -273.15
rads2RPM = 60/(2*math.pi)
RPM2rads = 2*math.pi/60
def __init__(self, settingsPath):
# Load settings
self.settingsPath = settingsPath
with open(settingsPath, 'r') as stream:
try:
self.settings = yaml.safe_load(stream)
except yaml.YAMLError as exc:
print(exc)
# Initialise MongoDB connection
if self.settings['weather']['fromMongo'] :
self.mongClient = pymongo.MongoClient('localhost', 27017)
db = self.mongClient['CUER']
self.db_weather = db['weather']
self.initData()
def initData(self):
self.getRoute()
self.getWeatherData()
self.getSolarProjectedAreaData()
self.getElectricalData()
self.updateCol(self.data, 'simName', self.settings['meta']['name'])
# Add meta data to data table
self.splitStints()
def getRoute(self):
self.data = pd.read_csv(self.settings['route']['routeFile'])
# Set the speed constraints along the route
self.updateCol(self.data, 'speedMin', 0)
self.updateCol(self.data, 'speedMax', 999)
iConstraint = 0
# Loop through all mesh points
for i in range(0, len(self.data)):
# Calculate heading using central difference
i1 = i
i2 = i
if i == 0:
i1 = i
i2 = i+1
elif i == (len(self.data)-1):
i1 = i-1
i2 = i
else:
i1 = i-1
i2 = i+1
d_long = self.data['longitude'][i2] - self.data['longitude'][i1]
d_lat = self.data['latitude'][i2] - self.data['latitude'][i1]
heading = math.atan2(d_long, d_lat)*self.rad2deg
heading = heading + (heading<0)*360
self.data.at[i, 'heading'] = heading
# Apply speed constraints
if iConstraint >= len(self.settings['route']['speedConstraints']):
# No more constraints
break
else:
if self.data.distance[i] >= self.settings['route']['speedConstraints'][iConstraint]['startDistance']:
if iConstraint < len(self.settings['route']['speedConstraints'])-1:
# This is not the last constraint
if self.data.distance[i] >= self.settings['route']['speedConstraints'][iConstraint+1]['startDistance']:
iConstraint += 1
# At this point we have the correct constraint identified
self.data.at[i, 'speedMin'] = self.settings['route']['speedConstraints'][iConstraint]['speedMin']
self.data.at[i, 'speedMax'] = self.settings['route']['speedConstraints'][iConstraint]['speedMax']
# Convert the locations list to a more user friendly dict
locations = self.settings['route']['locations']
self.locations = {}
for iLocation in range(0, len(locations)):
thisLocation = locations[iLocation]
self.locations[thisLocation['name']] = {}
self.locations[thisLocation['name']]['distance'] = thisLocation['distance']
def getSolarProjectedAreaData(self):
self.solarprojectedAreaX = pd.read_csv(self.settings['solar']['projectedAreaXFilePath'])
self.solarprojectedAreaY = pd.read_csv(self.settings['solar']['projectedAreaYFilePath'])
yRotationPattern = self.solarprojectedAreaY.rotation.to_numpy()
yRotation = np.array([])
xRotation = np.array([])
areaRatio = np.array([])
# Compile the full grid
for iXRotationValue in range(0, len(self.solarprojectedAreaX.rotation)):
xRotationValue = self.solarprojectedAreaX.rotation[iXRotationValue]
yRotation = np.concatenate((yRotation, yRotationPattern))
xRotation = np.concatenate((xRotation, yRotationPattern*0+xRotationValue))
areaRatioAtThisXRotation = self.solarprojectedAreaY.areaRatio.to_numpy() * self.solarprojectedAreaX.areaRatio[iXRotationValue]
areaRatio = np.concatenate((areaRatio, areaRatioAtThisXRotation))
self.solarXRotation = xRotation
self.solarYRotation = yRotation
self.solarAreaRatio = areaRatio
def getElectricalData(self):
self.efficiencyMotorController = pd.read_csv(self.settings['powertrain']['waveSculptorEfficiencyFilePath'])
self.batteryCellDischargeCurve = pd.read_csv(self.settings['battery']['cellDischargeCurveFilePath'])
# Initialise battery SOC
self.updateCol(self.data, 'car__rSOC', 1)
def getWeatherData(self):
print('Loading weather data...'.format())
if self.settings['weather']['fromCsv']:
self.weatherData = pd.read_csv(self.settings['weather']['filePath'])
for i in range(0, len(self.weatherData)):
# Convert wind direction cardinals to degrees
if isinstance(self.weatherData.windDirection[0], str):
self.weatherData.at[i, 'windDirectionDeg'] = self.settings['compass']['cardinal2deg'][self.weatherData.windDirection[i]]
# Convert wind speed into northerly and easterly components
self.weatherData.at[i, 'windCompN'] = math.cos(self.weatherData['windDirectionDeg'][i]*self.deg2rad)
self.weatherData.at[i, 'windCompE'] = math.sin(self.weatherData['windDirectionDeg'][i]*self.deg2rad)
# Convert to datetime
self.weatherData.at[i, 'datetime'] = dateutil.parser.parse(self.weatherData.time[i], dayfirst=True)
# Assign a distance to each location
self.weatherData.at[i, 'distance'] = self.locations[self.weatherData.location[i]]['distance']
elif self.settings['weather']['fromMongo']:
print('Fetching from... {}'.format(self.settings['time']['days'][0]['start'].timestamp()))
print('Fetching to... {}'.format(self.settings['time']['days'][-1]['end'].timestamp()))
self.weatherCursor = self.db_weather.find({
"_docType": "hourly",
"time": {
"$gte": self.settings['time']['days'][0]['start'].timestamp() - 1.0*3600,
"$lte": self.settings['time']['days'][-1]['end'].timestamp() + 1.0*3600
}
})
# Prepare mongo data for interpolation
distance = np.array([])
time = np.array([])
airTemp = np.array([])
airPressure = np.array([])
humidity = np.array([])
windSpeed = np.array([])
windGust = np.array([])
windDirection = np.array([])
windHeading = np.array([])
# airDensity = np.array([])
cloudCover = np.array([])
precipProbability = np.array([])
precipIntensity = np.array([])
precipIntensity = np.array([])
dewPoint = np.array([])
for doc in self.weatherCursor :
distance = np.append(distance, doc['_distance'])
time = np.append(time, doc['time'] + 9.5*3600)
airTemp = np.append(airTemp, doc['temperature'])
airPressure = np.append(airPressure, doc['pressure'] * 1E2)
humidity = np.append(humidity, doc['humidity'])
windSpeed = np.append(windSpeed, doc['windSpeed'] * self.ms2kph)
windGust = np.append(windGust, doc['windGust'] * self.ms2kph)
windDirection = np.append(windDirection, doc['windBearing'] - 180)
windHeading = np.append(windHeading, doc['windBearing'])
# airDensity = np.append(airDensity, doc[''])
cloudCover = np.append(cloudCover, doc['cloudCover'])
precipProbability = np.append(precipProbability, doc['precipProbability'])
precipIntensity = np.append(precipIntensity, doc['precipIntensity'])
dewPoint = np.append(dewPoint, doc['dewPoint'])
self.weather = {}
self.weather['d_min'] = np.min(distance)
d_max = np.max(distance)
self.weather['d_range'] = d_max - self.weather['d_min']
self.weather['d_norm'] = (distance - self.weather['d_min']) / self.weather['d_range']
self.weather['t_min'] = np.min(time)
t_max = np.max(time)
self.weather['t_range'] = t_max - self.weather['t_min']
self.weather['t_norm'] = (time - self.weather['t_min']) / self.weather['t_range']
self.weather['airTemp'] = airTemp
self.weather['airPressure'] = airPressure
self.weather['humidity'] = humidity
self.weather['windSpeed'] = windSpeed
self.weather['windGust'] = windGust
self.weather['windDirection'] = windDirection
self.weather['windHeading'] = windHeading
self.weather['cloudCover'] = cloudCover
self.weather['precipProbability'] = precipProbability
self.weather['precipIntensity'] = precipIntensity
self.weather['dewPoint'] = dewPoint
print('Loading weather data... Complete'.format())
def splitStints(self):
# Set the stints
self.stints = self.settings['route']['stints']
self.NStints = len(self.stints)
for iStint in range(0, self.NStints):
stintNumber = iStint+1
startDistance = -1
endDistance = -1
startDistance = max(self.settings['initialConditions']['distance'], self.locations[self.stints[iStint]['startLocation']]['distance'])
endDistance = self.locations[self.stints[iStint]['endLocation']]['distance']
# Meta data
self.stints[iStint]['isSensitivities'] = False
self.stints[iStint]['nStint'] = stintNumber
self.stints[iStint]['startDistance'] = startDistance
self.stints[iStint]['endDistance'] = endDistance
# Find the section of the table that contains the current stint
inStint = ((self.data.distance > startDistance) & (self.data.distance <= endDistance)).values.tolist()
if True in inStint:
startIndex = inStint.index(True) - 1
endIndex = len(inStint) - inStint[::-1].index(True) - 1
# Get the data that belongs to this stint
self.stints[iStint]['data'] = self.data.iloc[startIndex:endIndex+1, :]
# Reset the index
self.stints[iStint]['data'] = self.stints[iStint]['data'].reset_index(drop=True)
# Add some meta data
self.stints[iStint]['stintLength'] = endDistance - startDistance
self.stints[iStint]['meshPoints'] = endIndex+1-startIndex
# Calculate additional route properties
self.stints[iStint]['data'].at[0, 'd_distance'] = 0
self.stints[iStint]['data'].at[0, 'd_elevation'] = 0
self.stints[iStint]['data'].at[0, 'inclination_angle'] = 0
self.stints[iStint]['data'].at[0, 'inclination_angle_deg'] = 0
for i in range(1, len(self.stints[iStint]['data'])):
# Set the distance between mesh locationss
self.stints[iStint]['data'].at[i, 'd_distance'] = self.stints[iStint]['data'].loc[i, 'distance'] - self.stints[iStint]['data'].loc[i-1, 'distance']
# Calculate change in elevation
self.stints[iStint]['data'].at[i, 'd_elevation'] = self.stints[iStint]['data'].loc[i, 'elevation'] - self.stints[iStint]['data'].loc[i-1, 'elevation']
self.stints[iStint]['data'].at[i, 'inclination_angle'] = math.atan(self.stints[iStint]['data'].at[i, 'd_elevation']/(self.stints[iStint]['data'].at[i, 'd_distance']*1e3))
self.stints[iStint]['data'].at[i, 'inclination_angle_deg'] = self.rad2deg*math.atan(self.stints[iStint]['data'].at[i, 'd_elevation']/(self.stints[iStint]['data'].at[i, 'd_distance']*1e3))
# Set the stint number (starting from 1)
self.stints[iStint]['data'].insert(len(self.stints[iStint]['data'].columns), 'stintNumber', stintNumber)
# Define the stint start time
if self.settings['initialConditions']['time'] > self.settings['time']['days'][self.stints[iStint]['startDayDefault']-1]['start']:
self.stints[iStint]['startTime'] = self.settings['initialConditions']['time']
else:
self.stints[iStint]['startTime'] = self.settings['time']['days'][self.stints[iStint]['startDayDefault']-1]['start']
self.stints[iStint]['startDay'] = self.stints[iStint]['startDayDefault']
for iDay in range(0, len(self.settings['time']['days'])):
# Find the start day
if (self.stints[iStint]['startTime'] >= self.settings['time']['days'][iDay]['start']) & (self.stints[iStint]['startTime'] < self.settings['time']['days'][iDay]['end']) :
self.stints[iStint]['startDay'] = iDay+1
# Find the end day
if (self.stints[iStint]['arrivalTime'] > self.settings['time']['days'][iDay]['start']) & (self.stints[iStint]['arrivalTime'] <= self.settings['time']['days'][iDay]['end']) :
self.stints[iStint]['endDay'] = iDay+1
# Control stops
self.stints[iStint]['NControlStops'] = len(self.stints[iStint]['controlStops'])
# Calculate the time available
# Time available = Total allowable time on road - time for control stops
self.stints[iStint]['availableTime'] = datetime.timedelta(0) # Initialise time available
for iDay in range(self.stints[iStint]['startDay']-1, self.stints[iStint]['endDay']):
if iDay == self.stints[iStint]['startDay']-1:
# First day of stint
if self.settings['time']['days'][iDay]['end'] > self.stints[iStint]['startTime']:
self.stints[iStint]['availableTime'] += self.settings['time']['days'][iDay]['end'] - self.stints[iStint]['startTime']
elif iDay == self.stints[iStint]['endDay']-1:
# Last day of stint
if self.settings['time']['days'][iDay]['start'] > self.stints[iStint]['startTime']:
self.stints[iStint]['availableTime'] += self.stints[iStint]['arrivalTime'] - self.settings['time']['days'][iDay]['start']
else:
self.stints[iStint]['availableTime'] += self.stints[iStint]['arrivalTime'] - self.stints[iStint]['startTime']
else:
# Day between start and end days (not applicable for 2019)
if self.settings['time']['days'][iDay]['start'] > self.stints[iStint]['startTime']:
self.stints[iStint]['availableTime'] += self.settings['time']['days'][iDay]['end'] - self.settings['time']['days'][iDay]['start']
else:
self.stints[iStint]['availableTime'] += self.settings['time']['days'][iDay]['end'] - self.stints[iStint]['startTime']
self.stints[iStint]['availableTime'] -= self.stints[iStint]['NControlStops']*datetime.timedelta(minutes=self.settings['time']['controlStops']['duration'])
# Initialise the speed
self.stints[iStint]['averageSpeed'] = self.stints[iStint]['stintLength'] / (self.stints[iStint]['availableTime'].seconds/3600)
self.updateCol(self.stints[iStint]['data'], 'speed', self.stints[iStint]['averageSpeed'])
def runModels(self, stint):
self.getWeather(stint)
self.calculateAero(stint)
self.calculateMech(stint)
self.calculateElec(stint)
self.calculateSolar(stint)
self.calculateEnergy(stint)
def calculateTime(self, stint):
# Calculates the time using the car speed as input
self.updateCol(stint['data'], 'time', stint['startTime'])
self.updateCol(stint['data'], 'day', stint['startDay'])
self.updateCol(stint['data'], 'time_unix', stint['startTime'].timestamp())
self.updateCol(stint['data'], 'd_time', 0)
self.updateCol(stint['data'], 'd_timeDriving', 0)
self.updateCol(stint['data'], 'solar__sunElevationAngle', 0)
self.updateCol(stint['data'], 'solar__sunAzimuthAngle', 0)
for i in range(0, len(stint['data'])):
if i > 0:
averageSpeed = 0.5*stint['data'].speed[i] + 0.5*stint['data'].speed[i-1]
d_time = datetime.timedelta(hours=stint['data'].d_distance[i]/averageSpeed)
d_timeDriving = d_time
# Account for controls stops
for iControlStop in range(0,stint['NControlStops']):
if (stint['data'].distance[i-1] <= self.locations[stint['controlStops'][iControlStop]]['distance']) & (stint['data'].distance[i] > self.locations[stint['controlStops'][iControlStop]]['distance']):
d_time += datetime.timedelta(minutes=self.settings['time']['controlStops']['duration'])
# Account for driver changes
for iDriverChange in range(0,len(stint['driverChanges'])):
if (stint['data'].distance[i-1] <= stint['driverChanges'][iDriverChange]) & (stint['data'].distance[i] > stint['driverChanges'][iDriverChange]):
d_time += datetime.timedelta(minutes=self.settings['time']['driverChange']['duration'])
# Account for end of day
if stint['data'].day[i] < len(self.settings['time']['days']):
if stint['data'].at[i-1, 'time'] + d_time > self.settings['time']['days'][stint['data'].day[i]-1]['end']:
d_time += self.settings['time']['days'][stint['data'].day[i]]['start'] - self.settings['time']['days'][stint['data'].day[i]-1]['end']
stint['data'].at[i:len(stint['data']), 'day'] = stint['data'].day[i]+1
# Final step, perform the addition of time
stint['data'].at[i, 'time'] = stint['data'].at[i-1, 'time'] + d_time
stint['data'].at[i, 'time_unix'] = stint['data'].at[i, 'time'].timestamp()
stint['data'].at[i, 'd_time'] = d_time
stint['data'].at[i, 'd_timeDriving'] = d_timeDriving
# Backfill i=0
if i == 1:
stint['data'].at[0, 'time_unix'] = stint['data'].at[0, 'time'].timestamp()
### CALCULATE POSITION OF SUN ###
# Create the location object
l = Location()
l.name = ''
l.region = ''
l.latitude = stint['data']['latitude'][i]
l.longitude = stint['data']['longitude'][i]
l.timezone = self.settings['time']['timezone']['name']
l.elevation = 0
stint['data'].at[i, 'solar__sunElevationAngle'] = l.solar_elevation(stint['data']['time'][i].to_pydatetime())
stint['data'].at[i, 'solar__sunAzimuthAngle'] = l.solar_azimuth(stint['data']['time'][i].to_pydatetime())
self.calculateArrivalDelta(stint)
def getWeather(self, stint):
# Fetch the weather conditions for every point on the route at the specified time
if (not stint['isSensitivities']) & (stint['interation']%self.settings['weather']['iterationsPerEvaluation']==1):
# Default values
self.updateCol(stint['data'], 'weather__airTemp', 30)
self.updateCol(stint['data'], 'weather__airPressure', 101250)
self.updateCol(stint['data'], 'weather__humidity', 0)
self.updateCol(stint['data'], 'weather__windSpeed', 0)
self.updateCol(stint['data'], 'weather__windDirection', 0)
self.updateCol(stint['data'], 'weather__windHeading', 180)
self.updateCol(stint['data'], 'weather__airDensity', 1.225)
self.updateCol(stint['data'], 'weather__cloudCover', 0.0)
self.updateCol(stint['data'], 'weather__dewPoint', 6.0)
if self.settings['weather']['fromMongo']:
d_query_norm = (stint['data'].distance.to_numpy() - self.weather['d_min'])/self.weather['d_range']
t_query_norm = (stint['data'].time.astype(np.int64).to_numpy() * 1E-9 - self.weather['t_min'])/self.weather['t_range']
self.updateCol(stint['data'], 'weather__d_query_norm', d_query_norm)
self.updateCol(stint['data'], 'weather__t_query_norm', t_query_norm)
print('Weather interp...')
self.getWeather_interpolate(stint['data'], self.weather['d_norm'], self.weather['t_norm'], self.weather['airTemp'], d_query_norm, t_query_norm, 'weather__airTemp')
self.getWeather_interpolate(stint['data'], self.weather['d_norm'], self.weather['t_norm'], self.weather['airPressure'], d_query_norm, t_query_norm, 'weather__airPressure')
self.getWeather_interpolate(stint['data'], self.weather['d_norm'], self.weather['t_norm'], self.weather['humidity'], d_query_norm, t_query_norm, 'weather__humidity')
self.getWeather_interpolate(stint['data'], self.weather['d_norm'], self.weather['t_norm'], self.weather['windSpeed'], d_query_norm, t_query_norm, 'weather__windSpeed')
self.getWeather_interpolate(stint['data'], self.weather['d_norm'], self.weather['t_norm'], self.weather['windGust'], d_query_norm, t_query_norm, 'weather__windGust')
self.getWeather_interpolate(stint['data'], self.weather['d_norm'], self.weather['t_norm'], self.weather['windDirection'], d_query_norm, t_query_norm, 'weather__windDirection')
self.getWeather_interpolate(stint['data'], self.weather['d_norm'], self.weather['t_norm'], self.weather['windHeading'], d_query_norm, t_query_norm, 'weather__windHeading')
self.getWeather_interpolate(stint['data'], self.weather['d_norm'], self.weather['t_norm'], self.weather['cloudCover'], d_query_norm, t_query_norm, 'weather__cloudCover')
self.getWeather_interpolate(stint['data'], self.weather['d_norm'], self.weather['t_norm'], self.weather['precipProbability'], d_query_norm, t_query_norm, 'weather__precipProbability')
self.getWeather_interpolate(stint['data'], self.weather['d_norm'], self.weather['t_norm'], self.weather['precipIntensity'], d_query_norm, t_query_norm, 'weather__precipIntensity')
self.getWeather_interpolate(stint['data'], self.weather['d_norm'], self.weather['t_norm'], self.weather['dewPoint'], d_query_norm, t_query_norm, 'weather__dewPoint')
print('Weather interp... Complete')
elif self.settings['weather']['fromCsv']:
# Normalise the distance and time so that the interpolation algorithm is well conditioned
d = self.weatherData.distance.to_numpy()
d_min = min(d)
d_max = max(d)
d_range = d_max - d_min
d_norm = (d - d_min)/d_range
t = self.weatherData.datetime.astype(np.int64).to_numpy()
t_min = min(t)
t_max = max(t)
t_range = t_max - t_min
t_norm = (t - t_min)/t_range
d_query_norm = (stint['data'].distance.to_numpy() - d_min)/d_range
t_query_norm = (stint['data'].time.astype(np.int64).to_numpy() - t_min)/t_range
self.updateCol(stint['data'], 'weather__d_query_norm', d_query_norm)
self.updateCol(stint['data'], 'weather__t_query_norm', t_query_norm)
# Interpolate the data for each quantity of interest
self.getWeather_interpolate(stint['data'], d_norm, t_norm, self.weatherData.airTemp.to_numpy(), d_query_norm, t_query_norm, 'weather__airTemp')
self.getWeather_interpolate(stint['data'], d_norm, t_norm, self.weatherData.airPressure.to_numpy(), d_query_norm, t_query_norm, 'weather__airPressure')
self.getWeather_interpolate(stint['data'], d_norm, t_norm, self.weatherData.humidity.to_numpy(), d_query_norm, t_query_norm, 'weather__humidity')
self.getWeather_interpolate(stint['data'], d_norm, t_norm, self.weatherData.windSpeed.to_numpy(), d_query_norm, t_query_norm, 'weather__windSpeed')
self.getWeather_interpolate(stint['data'], d_norm, t_norm, self.weatherData.windCompN.to_numpy(), d_query_norm, t_query_norm, 'weather__windCompN')
self.getWeather_interpolate(stint['data'], d_norm, t_norm, self.weatherData.windCompE.to_numpy(), d_query_norm, t_query_norm, 'weather__windCompE')
# Change limits of direction to 0-360
windDirection = self.rad2deg*np.arctan2(stint['data']['weather__windCompE'].to_numpy(), stint['data']['weather__windCompN'].to_numpy())
windDirectionClean = windDirection + (windDirection<0)*360
windHeading = windDirection + 180
self.updateCol(stint['data'], 'weather__windDirection', windDirectionClean )
self.updateCol(stint['data'], 'weather__windHeading', windHeading )
# Dew point
# https://www.omnicalculator.com/physics/dew-point
humidity = stint['data']['weather__humidity'].to_numpy()
airTemp = stint['data']['weather__airTemp'].to_numpy()
a = 17.62
b = 243.12
alpha = np.log(humidity) + a*airTemp / (b + airTemp)
dewPoint = (b * alpha) / (a - alpha)
self.updateCol(stint['data'], 'weather__dewPoint', dewPoint)
if self.settings['weather']['fromMongo'] | self.settings['weather']['fromCsv']:
# Calculate air density
# https://www.omnicalculator.com/physics/air-density
humidity = stint['data']['weather__humidity'].to_numpy()
dewPoint = stint['data']['weather__dewPoint'].to_numpy()
airTemp = stint['data']['weather__airTemp'].to_numpy()
pressureVapourSaturation = 6.1078 * 10**(7.5*dewPoint /(dewPoint + 237.3))
pressureVapourPartial = pressureVapourSaturation * humidity
pressureDryPartial = stint['data']['weather__airPressure'].to_numpy()
rhoVapour = pressureVapourPartial / (self.Rw * (airTemp + self.C2K))
rhoDry = pressureDryPartial / (self.Ra * (airTemp + self.C2K))
rho = rhoVapour + rhoDry
self.updateCol(stint['data'], 'weather__airDensity', rho)
self.updateCol(stint['data'], 'weather__airDensity_dry', rhoDry)
self.updateCol(stint['data'], 'weather__airDensity_wet', rhoVapour)
def getWeather_interpolate(self, df, d, t, values, d_query, t_query, paramName):
interpolatedValues = griddata((d, t), values, (d_query, t_query), method='linear')
self.updateCol(df, paramName, interpolatedValues)
def getSolarProjectedArea(self, xRotation, yRotation):
griddata((self.solarXRotation, self.solarYRotation), self.solarAreaRatio, (xRotation, yRotation), method='linear')
def calculateAero(self, stint):
CdA = self.settings['aero']['CdA']
# Calulate wind effect
self.updateCol(stint['data'], 'aero__headingDeltaCarWind', stint['data']['weather__windHeading'] - stint['data']['heading'])
self.updateCol(stint['data'], 'aero__vTailwind', (stint['data']['weather__windSpeed'].to_numpy() * np.cos(stint['data']['aero__headingDeltaCarWind'].to_numpy() * self.deg2rad) ) )
self.updateCol(stint['data'], 'aero__vCrossWind', (stint['data']['weather__windSpeed'].to_numpy() * np.sin(stint['data']['aero__headingDeltaCarWind'].to_numpy() * self.deg2rad) ) )
self.updateCol(stint['data'], 'aero__airSpeedForward', (stint['data']['speed'].to_numpy() - stint['data']['aero__vTailwind'].to_numpy() ) )
# Calculate forces
self.updateCol(stint['data'], 'aero__dragForce', CdA*0.5*stint['data']['weather__airDensity'].to_numpy()*(stint['data']['aero__airSpeedForward'].to_numpy()*self.kph2ms)**2)
self.updateCol(stint['data'], 'aero__dragPower', stint['data'].aero__dragForce*stint['data'].speed*self.kph2ms)
self.updateCol(stint['data'], 'aero__d_dragEnergy', 0)
self.updateCol(stint['data'], 'aero__dragEnergy', 0)
for i in range(1, len(stint['data'])):
averageDragForce = 0.5*stint['data'].aero__dragForce[i] + 0.5*stint['data'].aero__dragForce[i-1]
stint['data'].at[i, 'aero__d_dragEnergy'] = averageDragForce*stint['data'].d_distance[i]*1000
stint['data'].at[i, 'aero__dragEnergy'] = stint['data'].at[i-1, 'aero__dragEnergy'] + stint['data'].at[i, 'aero__d_dragEnergy']
def calculateMech(self, stint):
### TYRES ###
Crr = self.settings['tyres']['Crr']
carMass = self.settings['car']['mass']
carWeight = carMass*self.g
self.updateCol(stint['data'], 'car__ForceNormal', carWeight*np.cos(stint['data'].inclination_angle.to_numpy()))
self.updateCol(stint['data'], 'car__ForceLongitudinal', carWeight*np.sin(stint['data'].inclination_angle.to_numpy()))
self.updateCol(stint['data'], 'mech__tyreRollingResistanceForce', Crr*stint['data'].car__ForceNormal)
self.updateCol(stint['data'], 'mech__tyreRollingResistancePower', stint['data'].mech__tyreRollingResistanceForce*stint['data'].speed*self.kph2ms)
self.updateCol(stint['data'], 'mech__d_tyreRollingResistanceEnergy', 0)
self.updateCol(stint['data'], 'mech__tyreRollingResistanceEnergy', 0)
### CHASSIS ###
self.updateCol(stint['data'], 'mech__chassisRollingResistanceForce', 0)
self.updateCol(stint['data'], 'mech__chassisRollingResistancePower', stint['data'].mech__chassisRollingResistanceForce*stint['data'].speed*self.kph2ms)
self.updateCol(stint['data'], 'mech__d_chassisRollingResistanceEnergy', 0)
self.updateCol(stint['data'], 'mech__chassisRollingResistanceEnergy', 0)
### GRAVITY ###
self.updateCol(stint['data'], 'mech__gravityResistanceForce', self.settings['car']['mass'] * self.g * np.sin(stint['data']['inclination_angle'].to_numpy()) )
self.updateCol(stint['data'], 'mech__gravityRollingResistancePower', stint['data'].mech__gravityResistanceForce*stint['data'].speed*self.kph2ms)
self.updateCol(stint['data'], 'mech__d_gravityRollingResistanceEnergy', 0)
self.updateCol(stint['data'], 'mech__gravityRollingResistanceEnergy', 0)
### TOTAL ###
self.updateCol(stint['data'], 'mech__totalResistiveForce', stint['data'].mech__tyreRollingResistanceForce + stint['data'].mech__chassisRollingResistanceForce + stint['data'].mech__gravityResistanceForce)
self.updateCol(stint['data'], 'mech__totalResistivePower', stint['data'].mech__totalResistiveForce*stint['data'].speed*self.kph2ms)
self.updateCol(stint['data'], 'mech__d_totalResistiveEnergy', 0)
self.updateCol(stint['data'], 'mech__totalResistiveEnergy', 0)
### LOOP ###
for i in range(1, len(stint['data'])):
mech__tyreRollingResistanceForce_avg = 0.5*stint['data'].mech__tyreRollingResistanceForce[i] + 0.5*stint['data'].mech__tyreRollingResistanceForce[i-1]
stint['data'].at[i, 'mech__d_tyreRollingResistanceEnergy'] = mech__tyreRollingResistanceForce_avg*stint['data'].d_distance[i]*1000
stint['data'].at[i, 'mech__tyreRollingResistanceEnergy'] = stint['data'].at[i-1, 'mech__tyreRollingResistanceEnergy'] + stint['data'].at[i, 'mech__d_tyreRollingResistanceEnergy']
mech__chassisRollingResistanceForce_avg = 0.5*stint['data'].mech__chassisRollingResistanceForce[i] + 0.5*stint['data'].mech__chassisRollingResistanceForce[i-1]
stint['data'].at[i, 'mech__d_chassisRollingResistanceEnergy'] = mech__chassisRollingResistanceForce_avg*stint['data'].d_distance[i]*1000
stint['data'].at[i, 'mech__chassisRollingResistanceEnergy'] = stint['data'].at[i-1, 'mech__chassisRollingResistanceEnergy'] + stint['data'].at[i, 'mech__d_chassisRollingResistanceEnergy']
mech__gravityResistanceForce_avg = 0.5*stint['data'].mech__gravityResistanceForce[i] + 0.5*stint['data'].mech__gravityResistanceForce[i-1]
stint['data'].at[i, 'mech__d_gravityRollingResistanceEnergy'] = mech__gravityResistanceForce_avg*stint['data'].d_distance[i]*1000
stint['data'].at[i, 'mech__gravitysRollingResistanceEnergy'] = stint['data'].at[i-1, 'mech__gravityRollingResistanceEnergy'] + stint['data'].at[i, 'mech__d_gravityRollingResistanceEnergy']
mech__totalResistiveForce_avg = 0.5*stint['data'].mech__totalResistiveForce[i] + 0.5*stint['data'].mech__totalResistiveForce[i-1]
stint['data'].at[i, 'mech__d_totalResistiveEnergy'] = mech__totalResistiveForce_avg*stint['data'].d_distance[i]*1000
stint['data'].at[i, 'mech__totalResistiveEnergy'] = stint['data'].at[i-1, 'mech__totalResistiveEnergy'] + stint['data'].at[i, 'mech__d_totalResistiveEnergy']
def calculateElec(self, stint):
### MOTOR ###
# Torque and speed
rollingRadius = self.settings['tyres']['rollingRadius']
NMotors = self.settings['car']['NMotors']
resistiveForcesCombined = stint['data']['aero__dragForce'].to_numpy() + stint['data']['mech__totalResistiveForce'].to_numpy()
motorTorque = rollingRadius * resistiveForcesCombined / NMotors
motorSpeed = stint['data']['speed'].to_numpy() * self.kph2ms / rollingRadius
motorPowerTractive = motorTorque * motorSpeed
self.updateCol(stint['data'], 'elec__motorTorque', motorTorque)
self.updateCol(stint['data'], 'elec__motorSpeed', motorSpeed)
self.updateCol(stint['data'], 'elec__motorSpeedRPM', motorSpeed*self.rads2RPM)
self.updateCol(stint['data'], 'elec__motorPowerTractive', motorPowerTractive)
# Temperature
for i in range(0, len(stint['data'])):
Tw = 323 # Approx winding temp, initial condition
Tw_err = np.inf
while Tw_err > 1 :
Tm = 0.5*(stint['data']['weather__airTemp'][i]+self.C2K + Tw) # Magnet temp
B = 1.32-1.2E-3 * (Tm - 293) # Magnet remanence
i_rms = 0.561*B*stint['data']['elec__motorTorque'][i] # RMS per phase motor current
R = 0.0575 * (1 + 0.0039*(Tw - 293)) # Per phase motor winding resistance
Pc = 3*i_rms**2*R # Total motor winding i2R copper loss
Pe = (9.602E-6 * (B*stint['data']['elec__motorSpeed'][i])**2) / R # Total motor eddy current loss
Tw_new = 0.455*(Pc + Pe) + stint['data']['weather__airTemp'][i]+self.C2K # New estimate for motor winding temperature
Tw_err = np.abs(Tw_new - Tw)
Tw = Tw_new
stint['data'].at[i, 'elec__motorTempWinding'] = Tw
stint['data'].at[i, 'elec__motorTempMagnet'] = Tm
stint['data'].at[i, 'elec__motorMagnetRemanence'] = B
stint['data'].at[i, 'elec__motorCurrentPerPhase'] = i_rms
stint['data'].at[i, 'elec__motorResistanceWinding'] = R
stint['data'].at[i, 'elec__motorPowerWinding'] = Pc
stint['data'].at[i, 'elec__motorPowerEddyCurrent'] = Pe
stint['data'].at[i, 'elec__motorPowerLossTotal'] = Pc + Pe
powerMotorTotal = motorPowerTractive + stint['data']['elec__motorPowerLossTotal'].to_numpy()
self.updateCol(stint['data'], 'elec__motorPowerTotal', powerMotorTotal)
### MOTOR CONTROLLER ###
efficiencyMotorController = griddata((self.efficiencyMotorController.motorSpeed, self.efficiencyMotorController.motorTorque), self.efficiencyMotorController.efficiency, (stint['data']['elec__motorSpeed'].to_numpy(), stint['data']['elec__motorTorque'].to_numpy()), method='linear')
motorControllerPowerLoss = (1 / efficiencyMotorController - 1) * powerMotorTotal
self.updateCol(stint['data'], 'elec__efficiencyMotorController', efficiencyMotorController)
self.updateCol(stint['data'], 'elec__motorControllerPowerLoss', motorControllerPowerLoss)
### BATTERY ###
powerDemand = powerMotorTotal + motorControllerPowerLoss
cellVoltage = griddata(self.batteryCellDischargeCurve.rSOC, self.batteryCellDischargeCurve.voltage, stint['data']['car__rSOC'].to_numpy(), method='linear')
packVoltage = cellVoltage * self.settings['battery']['NCellsSeries']
cellResistance = self.settings['battery']['resistanceInternalCell']
packResistance = cellResistance / self.settings['battery']['NCellsParallel'] * self.settings['battery']['NCellsSeries']
packCurrent = powerDemand / packVoltage
packPowerLoss = packCurrent**2 * packResistance
self.updateCol(stint['data'], 'elec__batteryCellVoltage', cellVoltage)
self.updateCol(stint['data'], 'elec__batteryPackVoltage', packVoltage)
self.updateCol(stint['data'], 'elec__batteryPackCurrent', packCurrent)
self.updateCol(stint['data'], 'elec__batteryPackPowerLoss', packPowerLoss)
### TOTAL ###
self.updateCol(stint['data'], 'elec__totalLossesPower', Pc + Pe + motorControllerPowerLoss + packPowerLoss)
self.updateCol(stint['data'], 'elec__d_totalLossesEnergy', 0)
self.updateCol(stint['data'], 'elec__totalLossesEnergy', 0)
for i in range(1, len(stint['data'])):
power_avg = 0.5*stint['data'].elec__totalLossesPower[i] + 0.5*stint['data'].elec__totalLossesPower[i-1]
stint['data'].at[i, 'elec__d_totalLossesEnergy'] = power_avg*stint['data']['d_timeDriving'][i].seconds
stint['data'].at[i, 'elec__totalLossesEnergy'] = stint['data'].at[i-1, 'elec__totalLossesEnergy'] + stint['data'].at[i, 'elec__d_totalLossesEnergy']
def calculateSolar(self, stint):
sunAzimuthRelativeCar = (stint['data']['solar__sunAzimuthAngle'].to_numpy() - stint['data']['heading'].to_numpy()) * self.deg2rad
# sunAzimuthRelativeCar[sunAzimuthRelativeCar<0] = sunAzimuthRelativeCar[sunAzimuthRelativeCar<0] + math.pi
sunElevation = stint['data']['solar__sunElevationAngle'].to_numpy() * self.deg2rad
temp = np.arctan( np.sin(sunElevation) / (np.cos(sunElevation) * np.sin(sunAzimuthRelativeCar)) ) * self.rad2deg
rotationX = -np.sign(temp)*90 + temp
temp2 = np.arctan( np.sin(sunElevation) / (np.cos(sunElevation) * np.cos(sunAzimuthRelativeCar)) ) * self.rad2deg
rotationY = -np.sign(temp2)*90 + temp2
self.updateCol(stint['data'], 'solar__sunAzimuthRelativeCar', sunAzimuthRelativeCar*self.rad2deg)
self.updateCol(stint['data'], 'solar__rotationX', rotationX)
self.updateCol(stint['data'], 'solar__rotationY', rotationY)
projectedAreaRatio = griddata((self.solarXRotation, self.solarYRotation), self.solarAreaRatio, (np.abs(rotationX), rotationY), method='linear')
projectedArea = projectedAreaRatio * self.settings['solar']['NCells'] * self.settings['solar']['areaPerCell'] * self.settings['solar']['ratioProjectedFlat']
self.updateCol(stint['data'], 'solar__projectedAreaRatio', projectedAreaRatio)
self.updateCol(stint['data'], 'solar__projectedArea', projectedArea)
cloudCover = stint['data']['weather__cloudCover'].to_numpy()
irradianceNominal = self.settings['solar']['irradianceNominal']
powerIncidentOnArray = irradianceNominal * (1 - 0.75*cloudCover**3) * projectedArea
powerCapturedArray = powerIncidentOnArray * self.settings['solar']['efficiencyEncapsulation'] * self.settings['solar']['efficiencyCell']
self.updateCol(stint['data'], 'solar__powerIncidentOnArray', powerIncidentOnArray)
self.updateCol(stint['data'], 'solar__powerCapturedArray', powerCapturedArray)
self.updateCol(stint['data'], 'solar__d_energyCapturedArray', 0)
self.updateCol(stint['data'], 'solar__energyCapturedArray', 0)
for i in range(1, len(stint['data'])):
power_avg = 0.5*stint['data'].solar__powerCapturedArray[i] + 0.5*stint['data'].solar__powerCapturedArray[i-1]
delta_time = stint['data'].time[i] - stint['data'].time[i-1]
stint['data'].at[i, 'solar__d_energyCapturedArray'] = power_avg*delta_time.seconds
stint['data'].at[i, 'solar__energyCapturedArray'] = stint['data'].at[i-1, 'solar__energyCapturedArray'] + stint['data'].at[i, 'solar__d_energyCapturedArray']
def calculateEnergy(self, stint):
self.updateCol(stint['data'], 'car__powerUsed', stint['data'].aero__dragPower + stint['data'].mech__totalResistivePower + stint['data'].elec__totalLossesPower)
self.updateCol(stint['data'], 'car__d_energyUsed', stint['data'].aero__d_dragEnergy + stint['data'].mech__d_totalResistiveEnergy + stint['data'].elec__d_totalLossesEnergy)
self.updateCol(stint['data'], 'car__energyUsed', stint['data'].aero__dragEnergy + stint['data'].mech__totalResistiveEnergy + stint['data'].elec__totalLossesEnergy)
self.updateCol(stint['data'], 'car__SOC_Delta', stint['data'].solar__energyCapturedArray - stint['data'].car__energyUsed)
self.updateCol(stint['data'], 'car__powerDelta', stint['data'].solar__powerCapturedArray - stint['data'].car__powerUsed)
self.updateCol(stint['data'], 'car__SOC', stint['SOCInitial'] + stint['data'].car__SOC_Delta.to_numpy())
self.updateCol(stint['data'], 'car__rSOC', stint['data'].car__SOC.to_numpy() / self.settings['battery']['capacity'])
def calculateSensitivities(self, stint):
speedPerturbation = 0.1
# Make a copy of the stint
stintCopy = copy.deepcopy(stint)
stintCopy['isSensitivities'] = True
# Perturb the speed
stintCopy['data'].speed = stintCopy['data'].speed + speedPerturbation
# Run the model with the perturned speed
self.runModels(stintCopy)
# Calculate effect on power
self.updateCol(stint['data'], 'sens__powerPerKph', (stintCopy['data'].car__powerUsed - stint['data'].car__powerUsed)/speedPerturbation)
self.updateCol(stint['data'], 'sens__energyPerKph', (stintCopy['data'].car__d_energyUsed - stint['data'].car__d_energyUsed)/speedPerturbation)
def adjustSpeed(self, stint):
changesMade = ''
# Correct speeds if they violate constraint limit
self.updateCol(stint['data'], 'speed', pd.DataFrame([stint['data'].speed, stint['data'].speedMin]).max())
self.updateCol(stint['data'], 'speed', pd.DataFrame([stint['data'].speed, stint['data'].speedMax]).min())
# Determine if speed is at the constraint limit
self.updateCol(stint['data'], 'onSpeedMin', stint['data'].speed - stint['data'].speedMin <= 0)
self.updateCol(stint['data'], 'onSpeedMax', stint['data'].speed - stint['data'].speedMax >= 0)
self.updateCol(stint['data'], 'sens_powerPerKphDeltaToMax', stint['data'].sens__powerPerKph - stint['data'].loc[~stint['data'].onSpeedMax, ['sens__powerPerKph']].max().to_list())
self.updateCol(stint['data'], 'sens_powerPerKphDeltaToMin', stint['data'].sens__powerPerKph - stint['data'].loc[~stint['data'].onSpeedMin, ['sens__powerPerKph']].min().to_list())
# Gate the power sensitivity to speed by whether it's still possible to change the speed there
self.updateCol(stint['data'], 'sens_powerPerKphDeltaToMax_gated', stint['data'].sens_powerPerKphDeltaToMax * (~stint['data'].onSpeedMax).astype(int))
self.updateCol(stint['data'], 'sens_powerPerKphDeltaToMin_gated', stint['data'].sens_powerPerKphDeltaToMin * (~stint['data'].onSpeedMin).astype(int))
# Calculate weightings to use for deciding how much speed to add or subtract at each location
if stint['data'].sens_powerPerKphDeltaToMax_gated.sum() != 0:
weightAdd = stint['data'].sens_powerPerKphDeltaToMax_gated / stint['data'].sens_powerPerKphDeltaToMax_gated.sum()
weightSubtract = stint['data'].sens_powerPerKphDeltaToMin_gated / stint['data'].sens_powerPerKphDeltaToMin_gated.sum()
else:
weightAdd = 1/len(stint['data']) + stint['data'].sens_powerPerKphDeltaToMax_gated*0
weightSubtract = 1/len(stint['data']) + stint['data'].sens_powerPerKphDeltaToMax_gated*0
# Adjust the weightings so that only the most weighted points get adjusted
convAggro = self.settings['simulation']['convergenceAggressiveness']
weightAddProcessed = weightAdd * (weightAdd >= (weightAdd.max() - (weightAdd.max()-weightAdd.min())*convAggro))
weightSubtractProcessed = weightSubtract * (weightSubtract >= (weightSubtract.max() - (weightSubtract.max()-weightSubtract.min())*convAggro))
print('weightAddProcessed: {} entries'.format((weightAddProcessed>0).sum()))
print('weightSubtractProcessed: {} entries'.format((weightSubtractProcessed>0).sum()))
self.updateCol(stint['data'], 'sens_powerPerKph_weightAdd', weightAddProcessed)
self.updateCol(stint['data'], 'sens_powerPerKph_weightSubtract', weightSubtractProcessed)
self.calculateArrivalDelta(stint)
stepSize = np.nan
# Check if we are too slow to achieve the arrival time
if stint['arrivalTimeDelta'] > self.settings['simulation']['arrivalTimeTolerance'] :
# Increase speed at cheap locations
stepSize = max(min(50,0.6*stint['data'].sens_powerPerKphDeltaToMin_gated.max()), min(2, 0.001*stint['arrivalTimeDelta']**2), self.settings['simulation']['minStepSizeSpeedAdd'])
# Set new speed
self.updateCol(stint['data'], 'speed', stint['data'].speed + stepSize*stint['data'].sens_powerPerKph_weightAdd)
# Apply speed constraints
self.updateCol(stint['data'], 'speed', pd.DataFrame([stint['data'].speed, stint['data'].speedMin]).max())
self.updateCol(stint['data'], 'speed', pd.DataFrame([stint['data'].speed, stint['data'].speedMax]).min())
changesMade = '+Speed'
elif (stint['arrivalTimeDelta'] < -self.settings['simulation']['arrivalTimeTolerance']) | (stint['data'].sens_powerPerKphDeltaToMin_gated.max() > self.settings['simulation']['powerSensitivityTolerance']):
# Decrease speed at expensive locations
stepSize = max(min(50,stint['data'].sens_powerPerKphDeltaToMin_gated.max()), min(2, 0.001*stint['arrivalTimeDelta']**2), self.settings['simulation']['minStepSizeSpeedSubtract'])
# Set new speed
if stint['arrivalTimeDelta'] > 0:
self.updateCol(stint['data'], 'speed', stint['data'].speed + stepSize*stint['data'].sens_powerPerKph_weightAdd)
changesMade = '+Speed'
else:
self.updateCol(stint['data'], 'speed', stint['data'].speed - stepSize*stint['data'].sens_powerPerKph_weightSubtract)
changesMade = '-Speed'
# Apply speed constraints
self.updateCol(stint['data'], 'speed', pd.DataFrame([stint['data'].speed, stint['data'].speedMin]).max())
self.updateCol(stint['data'], 'speed', pd.DataFrame([stint['data'].speed, stint['data'].speedMax]).min())
print('stepSize: {}'.format(stepSize))
return changesMade
def calculateArrivalDelta(self, stint):
stint['arrivalTimeDelta'] = (stint['data'].time.iloc[-1] - stint['arrivalTime']).seconds
if stint['data'].time.iloc[-1] < stint['arrivalTime']:
stint['arrivalTimeDelta'] = -(stint['arrivalTime'] - stint['data'].time.iloc[-1]).seconds
def combineStints(self):
initialised = False
for iStint in range(0, self.NStints):
stint = self.stints[iStint]
if 'data' in stint:
if not initialised:
self.data = stint['data']
initialised = True
else:
self.data = self.data.append(stint['data'], ignore_index=True)
def writeOutput(self):
outputFolder = '{}\\..\\Cases'.format(os.getcwd())
if not os.path.exists(outputFolder):
os.makedirs(outputFolder)
self.data.to_csv('{}\\{}.csv'.format(outputFolder, self.settings['meta']['name']))
def updateCol(self, df, colName, colValues):
if colName not in df.columns:
df.insert(len(df.columns), colName, colValues)
else:
df[colName] = colValues
def unique(self, list1):
unique_list = []
list1.sort()
for x in list1:
if x not in unique_list:
unique_list.append(x)
return unique_list
|
[
"tom1my2li3@gmail.com"
] |
tom1my2li3@gmail.com
|
c09b88a41324a9278190be87dd7ac601f2af61b0
|
a2dc244fe1c65dd3c30957513bf282bb23e20069
|
/ansible/my_modules/action/batfish_testfilter.py
|
6fed19d4d67aacc0d17529e20cbb516c38963f5e
|
[] |
no_license
|
igoqueen5/ansible-batfish
|
60e1d9906c799bb8002f9866e905be3881a3c04f
|
55add982f5adf88d482b964e9104db6c592f49ad
|
refs/heads/master
| 2020-06-25T12:03:11.834422
| 2019-08-12T10:57:51
| 2019-08-12T10:57:51
| 199,299,032
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,942
|
py
|
# -*- coding: utf-8 -*-
# pylint: disable=E0611,C0111
# E0611:No name 'urllib' in module '_MovedItems'
# C0111:Missing class docstring
# flake8: disable=E111,E114
# (c) 2018, Lisa Go (@lisago)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# python2でもpython3の機能を使えるようにする
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from pathlib import Path
import re
from ansible.plugins.action.normal import ActionModule as _ActionModule
from ansible.module_utils._text import to_text
#from ansible.module_utils.six.moves.urllib.parse import urlsplit
from ansible.module_utils.network.common.config import NetworkConfig
from ansible.errors import AnsibleError
import os
import json
import sys
import re
import csv
#textfsmのパスをnotebookに渡す
sys.path.append('/Users/lisago/python/ansible/venv/lib/python3.7/site-packages')
import textfsm
from pybatfish.client.commands import *
from pybatfish.question.question import load_questions, list_questions
from pybatfish.question import bfq
from pybatfish.question import *
from pybatfish.datamodel import *
class ActionModule(_ActionModule):
def run(self, tmp=None, task_vars=None):
del tmp
# モジュールを実行する
# ただし、このモジュールは何もしない
result = super(ActionModule, self).run(task_vars=task_vars)
import csv
#
# モジュール実行後の後工程処理
#
snapshot_name = self._task.args.get('snapshot_name')
snapshot_path = self._task.args.get('snapshot_path')
network_name = self._task.args.get('network_name')
csv_file_path = self._task.args.get('csv_file_path')
csv_file_list = []
PASS = "PASS"
FAIL = "FAIL"
result = {}
result_list = []
with open(csv_file_path) as f:
csv_file_list = [x for x in csv.DictReader(f)]
answer_list = []
load_questions()
bf_set_network(network_name)
bf_init_snapshot(snapshot_path, name=snapshot_name, overwrite=True)
result = {}
for csv in csv_file_list:
intend_condition = csv.get('intend_condition')
test_id = csv.get('test_id')
src = csv.get('src')
dest = csv.get('dest')
acl = csv.get('acl_name')
node = csv.get('node')
application = csv.get('application')
if intend_condition == '' or intend_condition == None:
result['failed'] = True
result['msg'] = 'intend_condition parameter is required.'
return result
if test_id == '' or test_id == None:
result['failed'] = True
result['msg'] = 'test_id parameter is required.'
return result
#srcは必ず必要なパラメータ
if src == '':
result['failed'] = True
result['msg'] = 'src parameter is required.'
return result
if node == '':
result['failed'] = True
result['msg'] = 'node parameter is required.'
return result
if acl == '':
result['failed'] = True
result['msg'] = 'acl parameter is required.'
return result
if dest != '' and application != '':
ip_flow = HeaderConstraints(srcIps=src,
dstIps=dest,
applications=application)
answer = bfq.testFilters(headers=ip_flow,
nodes=node,
filters=acl).answer()
show = answer.frame()
answer = show.to_json()
json_answer = json.loads(answer)
json_answer["Test_id"] = test_id
json_answer["Intend_condition"] = intend_condition
answer_list.append(json_answer)
elif dest != '' and application == '':
ip_flow = HeaderConstraints(srcIps=src,
dstIps=dest)
answer = bfq.testFilters(headers=ip_flow,
nodes=node,
filters=acl).answer()
show = answer.frame()
answer = show.to_json()
json_answer = json.loads(answer)
json_answer["Test_id"] = test_id
json_answer["Intend_condition"] = intend_condition
answer_list.append(json_answer)
elif application != '' and dest == '':
ip_flow = HeaderConstraints(srcIps=src,
applications=application)
answer = bfq.testFilters(headers=ip_flow,
nodes=node,
filters=acl).answer()
show = answer.frame()
answer = show.to_json()
json_answer = json.loads(answer)
json_answer["Test_id"] = test_id
json_answer["Intend_condition"] = intend_condition
answer_list.append(json_answer)
result['batfish_result'] = answer_list
for answer in answer_list:
action = answer['Action']
action_num = action["0"]
condition = answer["Intend_condition"]
test_id = answer["Test_id"]
if action_num != condition.upper():
result['failed'] = True
result_list.append('{0}. test_id {1} test is failed.'.format(FAIL, test_id))
else:
result_list.append(PASS)
result_list.append('{0}. {1} test is passed.'.format(PASS, test_id))
result['msg'] = result_list
return result
|
[
"oh.lisa@jp.fujitsu.com"
] |
oh.lisa@jp.fujitsu.com
|
f3b4af59fd5030bb5cdaad00e82843951dd20ba7
|
463979f26ecf5d7480913d0c5f4b3d4c6f8fddff
|
/digital_tuta.py
|
ba4f25dd1f9e31f605e7bf2da7e587ce23a29003
|
[] |
no_license
|
sjsakib/digital_tuta
|
301646c09f878603f524b37177dd215e741e1a20
|
c37be0325a4f43ba54acec56e38640427a84444d
|
refs/heads/master
| 2021-01-10T04:13:33.918787
| 2016-04-03T05:14:38
| 2016-04-03T05:14:38
| 55,329,679
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,465
|
py
|
#encoding:utf-8
from math import *
from functions import *
global p
p = '\nব্যাবহারকারীঃ---> '#prompt
s = None #user input string
banner()
exit = ['exit','বিদায়','খোদা হাফেজ','বাই','bye']
while not s in exit:
s = raw_input(p)
print "\nডিজিটাল তোতাঃ----> ",
if s in exit:
break
if 'বর্গমূল' in s or 'square root' in s:
root(s)
elif 'বর্গ'in s or 'square' in s:
square(s)
elif 'গুণ' in s or 'multiply' in s or '*' in s:
multiply(s)
elif 'ভাগ' in s or 'divide' in s or '/' in s:
divide(s)
elif 'তৈরী' in s or 'বানাইসে' in s or 'আবিষ্কারক' in s or 'আবিষ্কার' in s or "বানানো" in s or 'made' in s:
maker()
if "কিভাবে" in s:
process()
elif 'আসসালামুয়ালাইকুম' in s or 'আসসালামুআলাইকুম' in s:
print "ওয়ালাইকুম-আসসালাম"
elif 'যোগ' in s or 'add' in s or '+' in s:
add(s)
elif 'বিয়োগ' in s or '-' in s or 'minus' in s:
minus(s)
elif 'কেমন আছ' in s or 'how are you' in s:
how_are_you()
elif 'মৌলিক' in s or 'prime' in s or 'প্রাইম' in s:
if "বড়" in s:
l = num_err(s)
prime(s)
elif 'গান' in s or ('play' in s and 'song' in s):
play_song()
elif 'hi' in s or 'হাই' in s or 'hello' in s or 'Hi' in s or 'হেলো' in s:
chat_hi()
elif 'fuck' in s or 'sex' in s:
print "আমার বয়স মাত্র ২ দিন । দয়া করে খারাপ শব্দ ব্যবহার করবেন না "
elif "কিভাবে" in s or ('how' in s and 'work' in s):
process()
elif 'sin' in s or 'সাইন' in s:
sin(s)
elif 'cos' in s or 'কস' in s:
cos(s)
elif 'tan' in s or 'ট্যান' in s:
tan(s)
elif 'ত্রিভুজ' in s or'ত্রিভূজ' in s or 'triangle' in s:
triangle(s)
elif 'দেশের নাম' in s or ('country' in s and 'name' in s):
print 'বাংলাদেশ'
elif 'রাজধানী' in s or 'capital' in s:
print 'ঢাকা'
elif 'ফুল' in s or 'flower' in s:
print 'শাপলা'
elif 'ফল' in s or 'fruit' in s:
print 'কাঁঠাল'
elif 'গাছ' in s or 'tree' in s:
print 'আমগাছ'
elif 'পাখি' in s or 'bird' in s:
print 'দোয়েল'
elif 'মাছ' in s or 'fish' in s:
print 'ইলিশ'
elif 'ছেলে' in s and 'মেয়ে' in s:
sex(s)
elif 'থাক' in s or 'live' in s or 'বাড়ি' in s or 'home' in s:
print "আমি যেখানে যাই সেখানেই আমার বাড়ী । আমি বেশিরভাগ সময় ই ঘুমিয়ে থাকি । যখন ডাক দেয়া হয় তখন ওঠি ।"
elif 'doing' in s or 'করছ' in s:
print 'আপনার সাথে চ্যাট'
elif 'এত' in s or 'অত' in s in s:
print "বস জানে"
elif 'school' in s or 'class' in s or 'ক্লাসে' in s or 'college' in s or 'স্কুল' in s:
print 'আমার বয়স মাত্র দুই দিন । স্কুল কলেজে পড়ি না । আমার বস আমাকে শেখায় । মাথার ভেতরে ঢুকে '
elif 'অধিনায়ক' in s:
print 'মুশফিকুর রহিম'
elif 'নাম' in s or 'name' in s:
print 'ডিজিটাল তোতা'
elif 'খাও' in s or 'khao' in s or 'eat' in s:
print 'এনার্জি'
elif '?' in s:
print "দুঃখিত । আমার জানা নেই । "
else:
err()
print "খোদা হাফেজ । আবার দেখা হবে । আপাতত ঘুমিয়ে পড়ছি । আবার যদি ইচ্ছা হয় অধমকে ডাকবেন ।"
|
[
"sjsakib.bd@gmail.com"
] |
sjsakib.bd@gmail.com
|
48072ce1f0ad677cab018047c999241ae7fbcb3f
|
bf24bb98588d310d1e7b4de076b6e05a4ea05647
|
/news/models.py
|
3c8568d839c4872469ef45c7736fedb0bfda7669
|
[
"MIT"
] |
permissive
|
freddy358/landing_page
|
e42fbb341a4ba5be28011c8e37166d433826a4a6
|
df46be01e58ece5d500c7d40547cbdba673eea05
|
refs/heads/master
| 2020-06-17T07:04:01.672053
| 2019-07-08T15:28:23
| 2019-07-08T15:28:23
| 195,839,526
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,738
|
py
|
from django.db import models
class MainName(models.Model):
title=models.CharField(max_length=255)
url=models.CharField(max_length=200)
def __str__(self):
return self.title
class MainTitle(models.Model):
sitename=models.CharField(max_length=255)
mainname=models.CharField(max_length=255)
portfolioname=models.CharField(max_length=255)
aboutname=models.CharField(max_length=255)
contactname=models.CharField(max_length=255)
footername=models.CharField(max_length=255)
class Menu(models.Model):
title=models.CharField(max_length=255)
url=models.CharField(max_length=200)
class Main(models.Model):
img=models.ImageField(upload_to='photos')
description=models.CharField(max_length=255)
class Portfolio(models.Model):
img=models.ImageField(upload_to='photos')
title = models.CharField(max_length=255, null=True)
description = models.CharField(max_length=255, null=True)
class About(models.Model):
description=models.TextField()
class Contact(models.Model):
name=models.CharField(max_length=255)
email=models.EmailField()
number=models.CharField(max_length=255)
message=models.TextField()
create_date=models.DateTimeField(auto_now_add=True, null=True, blank=True)
def __str__(self):
return f"{self.name}"
class Footer(models.Model):
title = models.CharField(max_length=255)
description = models.TextField(null=True,blank=True)
def __str__(self):
return f"{self.title}"
class Icon(models.Model):
icon=models.CharField(max_length=255)
link=models.URLField()
section=models.ForeignKey('Footer',on_delete=models.CASCADE)
def __str__(self):
return f"{self.icon}"
# Create your models here.
|
[
"farid.bakhishli@gmail.com"
] |
farid.bakhishli@gmail.com
|
c07f2619ac97efd5ccd2baee8b5067c853bc1158
|
63f930156e3ef8105346305916fbf8c9c438d5e4
|
/recom_sys_all_ranked/apriori/spmf_data_generator.py
|
3dffd1ada372282f3d120c81023cd4a6d4ca7db1
|
[] |
no_license
|
ACmagic/GAE
|
3ddafdcd45d9e003c8099c99021ed58d64f56bfd
|
00afdb9595dffee1a5e797a3e17d47baf5ba8b75
|
refs/heads/master
| 2020-03-29T21:19:23.467461
| 2018-07-19T14:19:51
| 2018-07-19T14:19:51
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,184
|
py
|
"""
generate txt files used by SPMF
"""
import pickle
import numpy
data_path = '../../input/dota.pickle'
win_team_csv_path = 'dota_win_team.txt'
lose_team_csv_path = 'dota_lose_team.txt'
oppo_team_csv_path = 'dota_oppo_team.txt'
win_f = open(win_team_csv_path, 'w')
lose_f = open(lose_team_csv_path, 'w')
oppo_f = open(oppo_team_csv_path, 'w')
with open(data_path, 'rb') as f:
M_o, M_r_C, M_b_C, match_id2idx_dict, champion_id2idx_dict, Z, M = pickle.load(f)
for i in range(Z):
if i % 1000 == 0:
print(i)
# winning team is M_r_C
if M_o[i] == 1:
win_team = numpy.sort(M_r_C[i])
lose_team = numpy.sort(M_b_C[i])
else:
win_team = numpy.sort(M_b_C[i])
lose_team = numpy.sort(M_r_C[i])
oppo_team = numpy.hstack((win_team, lose_team + 1000)) # lose team use +1000 id
win_team = ' '.join(map(str, win_team)) + '\n'
lose_team = ' '.join(map(str, lose_team)) + '\n'
oppo_team = ' '.join(map(str, oppo_team)) + '\n'
win_f.write(win_team)
lose_f.write(lose_team)
oppo_f.write(oppo_team)
win_f.close()
lose_f.close()
oppo_f.close()
|
[
"czxttkl@gmail.com"
] |
czxttkl@gmail.com
|
fee9e22265cc85ca4cd46fefc0d7645fe866ef8b
|
c8f358b3e908099618665e9218fabae7dc9c66fe
|
/Routes/Create.py
|
233ee1ea6df0b4a891b0e8cc3b4171f845e81ae1
|
[] |
no_license
|
twizzler/Houdini-Website
|
462cce522c2084ca6a276d07bbeef00287388fab
|
f23036abc22ea19301a0d35894cd4f3a496dbc2f
|
refs/heads/master
| 2021-08-10T16:55:03.652208
| 2020-04-13T09:26:15
| 2020-04-13T09:26:15
| 155,768,707
| 9
| 3
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,127
|
py
|
from flask import Blueprint
from flask import request, render_template
from Application import config, utils, database
import requests
import json
create = Blueprint('create', __name__, template_folder='templates')
class Create(object):
@staticmethod
@create.route('/create', methods=['GET', 'POST'])
def create_index():
details = {'success': '', 'error': ''}
if request.method == 'POST':
on_click = request.form['on']
post_value = request.form['value'] if 'value' in request.form else None
if on_click == 'username_click_':
username_exists = database.username_exists(post_value)
if len(post_value) < 4 or len(post_value) > 13:
details['error'] = 'Username is too short or too long.'
elif username_exists:
details['error'] = 'Username already in use.'
elif not config["register"]["allowed_chars"].match(post_value):
details['error'] = 'Incorrect username, pick another one.'
else:
details['success'] = 'Username available !'
elif on_click == 'password_click_':
if len(post_value) < 4 or len(post_value) > 32:
details['error'] = 'Password too short or too long.'
else:
details['success'] = 'Password accepted !'
elif on_click == 'email_click_':
if len(post_value) < 5 or len(post_value) > 50:
details['error'] = 'Email too short or too long.'
elif database.email_exists(post_value):
details['error'] = 'Email already in use.'
else:
details['success'] = 'Email available !'
elif on_click == 'form_submit__':
username = request.form['value[name]']
email = request.form['value[mail]']
password = request.form['value[pass]']
color = request.form['value[color]']
captcha_response = request.form['value[recaptcha]']
if not ReCaptcha(captcha_response).is_human():
details['error'] = 'You have not completed the recaptcha! <a href="#">Click here to refresh !</a>'
return utils.send_output(details)
bcrypt_password = utils.generate_bcrypt_password(password)
database.add_user(username, bcrypt_password, email, color)
details['success'] = 'Penguin successfully created !'
return utils.send_output(details)
return render_template('create/index.html')
class ReCaptcha():
def __init__(self, response):
self.response = response
def is_human(self):
secret = config['recaptcha']['recaptcha_secret_key']
payload = {'response': self.response, 'secret': secret}
response = requests.post("https://www.google.com/recaptcha/api/siteverify", payload)
response_text = json.loads(response.text)
return response_text['success']
|
[
"noreply@github.com"
] |
twizzler.noreply@github.com
|
b7f9f287433ff5b1cd142a9307281405f9dbbfd1
|
4bdb8e324a833c10380bb7b1f436d1e9629c873c
|
/Anachebe_Ikechukwu/Phase 1/Python Basic 1/Day 3/Qtn_3.py
|
395d4b5b4d5f60d8492e0ace25e108c9cb701e8c
|
[
"MIT"
] |
permissive
|
dreamchild7/python-challenge-solutions
|
e3831a57447f6132dd098be8b941cc27db92ace2
|
29e2ca780e86fc8a3e9d4def897c26bfa6d6493d
|
refs/heads/master
| 2022-11-08T17:23:57.763110
| 2020-06-19T08:38:20
| 2020-06-19T08:38:20
| 263,923,130
| 0
| 0
|
MIT
| 2020-05-14T13:29:33
| 2020-05-14T13:29:32
| null |
UTF-8
|
Python
| false
| false
| 79
|
py
|
print("This is \n a multi line \n\t String. It is \n Formatted \n\t\t in tabs")
|
[
"dreamchild7@outlook.com"
] |
dreamchild7@outlook.com
|
13a1f1028eb9fcbb96cecfbc3d02b7ee7cbc42f9
|
ff7b0cc72c3f1b2e2f289bc6055f5c3b17db2c38
|
/chinook/chinook/urls.py
|
bee58b57cbeefac37e24ce9d99c10a36e416eaee
|
[] |
no_license
|
Abhiintheweb/project_chinook
|
368b8b5ec71fac839e92a3179ae48be6fc10a8f0
|
19a313bf89c474febf42aa87098a2ec87308eb56
|
refs/heads/master
| 2022-04-22T16:58:53.686487
| 2020-04-24T09:53:30
| 2020-04-24T09:53:30
| 257,696,198
| 0
| 0
| null | 2020-04-24T09:23:11
| 2020-04-21T19:31:25
|
Python
|
UTF-8
|
Python
| false
| false
| 1,213
|
py
|
"""chinook URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from django.conf.urls import include
from django.contrib.auth import views as auth_views
from django.views.generic import TemplateView
urlpatterns = [
path('', TemplateView.as_view(template_name='home.html'), name='home'),
path('login', auth_views.LoginView.as_view(template_name='login.html'), name='login'),
path('logout/', auth_views.LogoutView.as_view(), name='logout'),
path('admin/', admin.site.urls),
path('api-auth/', include('rest_framework.urls')),
path('api/', include('api.urls'))
]
|
[
"abhishek.singh@admins-MacBook-Pro.local"
] |
abhishek.singh@admins-MacBook-Pro.local
|
1f3c814e7e781395dd93313aae0293543a565a9f
|
5f0846828cf6788f9bb0ef3fe557cb777a6bf7d7
|
/An Introduction to Interactive Programming in Python/project6_blackjack.py
|
fedab16f58e6cf20afd197c012cf2c8b567119f2
|
[] |
no_license
|
nabeelfahmi12/Fundamentals-of-Computing-from-Rice-Unversity
|
c8065a2a884e34a002f6477b4221292dd7661a9e
|
438eb318221e30bf0ff24c1f0db0dd61939fdf49
|
refs/heads/main
| 2023-01-13T23:38:10.520031
| 2020-11-17T06:30:31
| 2020-11-17T06:30:31
| 308,280,161
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,953
|
py
|
# Blackjack
# http://www.codeskulptor.org/#user40_b06kFMCAFJiUp04.py
import simplegui
import random
# load card sprite - 936x384 - source: jfitz.com
CARD_SIZE = (72, 96)
CARD_CENTER = (36, 48)
card_images = simplegui.load_image("http://storage.googleapis.com/codeskulptor-assets/cards_jfitz.png")
CARD_BACK_SIZE = (72, 96)
CARD_BACK_CENTER = (36, 48)
card_back = simplegui.load_image("http://storage.googleapis.com/codeskulptor-assets/card_jfitz_back.png")
# initialize some useful global variables
in_play = False
outcome = "Hit or Stand?"
score = 0
# define globals for cards
SUITS = ('C', 'S', 'H', 'D')
RANKS = ('A', '2', '3', '4', '5', '6', '7', '8', '9', 'T', 'J', 'Q', 'K')
VALUES = {'A':1, '2':2, '3':3, '4':4, '5':5, '6':6, '7':7, '8':8, '9':9, 'T':10, 'J':10, 'Q':10, 'K':10}
# define card class
class Card:
def __init__(self, suit, rank):
if (suit in SUITS) and (rank in RANKS):
self.suit = suit
self.rank = rank
else:
self.suit = None
self.rank = None
print "Invalid card: ", suit, rank
def __str__(self):
return self.suit + self.rank
def get_suit(self):
return self.suit
def get_rank(self):
return self.rank
def draw(self, canvas, pos):
card_loc = (CARD_CENTER[0] + CARD_SIZE[0] * RANKS.index(self.rank),
CARD_CENTER[1] + CARD_SIZE[1] * SUITS.index(self.suit))
canvas.draw_image(card_images, card_loc, CARD_SIZE, [pos[0] + CARD_CENTER[0], pos[1] + CARD_CENTER[1]], CARD_SIZE)
# define hand class
class Hand:
def __init__(self):
self.card = []
def __str__(self):
s = ""
for i in range(len(self.card)):
s+= " "
s+= str(self.card[i])
return "Hand contains"+ s
def add_card(self, card):
self.card.append(card)
def get_value(self):
hand_value = 0
Ace = False
for card in self.card:
hand_value += VALUES[card.get_rank()]
if card.get_rank() == "A":
Ace = True
if Ace and hand_value + 10 <= 21:
return hand_value + 10
else:
return hand_value
def draw(self, canvas, pos):
i = 0
for card in self.card:
card_loc = (CARD_CENTER[0] + CARD_SIZE[0] * RANKS.index(card.get_rank()),
CARD_CENTER[1] + CARD_SIZE[1] * SUITS.index(card.get_suit()))
canvas.draw_image(card_images, card_loc, CARD_SIZE, (pos[0] + 100 * i, pos[1]), CARD_SIZE)
i += 1
# define deck class
class Deck:
def __init__(self):
self.deck = []
n = 0
for suit in SUITS:
for rank in RANKS:
self.deck.append(Card(suit, rank))
def shuffle(self):
# shuffle the deck
random.shuffle(self.deck)
def deal_card(self):
return self.deck.pop()
def __str__(self):
s = ""
for card in self.deck:
s += " "
s += str(card)
return "Deck contains" + s
#define event handlers for buttons
def deal():
global new, outcome, dealer_hand, player_hand, in_play, canvas, score
if in_play:
outcome = "Last game interrupted. Hit or stand?"
score -= 1
print "score is now " + str(score)
else:
outcome = "Hit or Stand?"
new = Deck()
new.shuffle()
print new
player_hand = Hand()
dealer_hand = Hand()
player_hand.add_card(new.deal_card())
dealer_hand.add_card(new.deal_card())
player_hand.add_card(new.deal_card())
dealer_hand.add_card(new.deal_card())
print player_hand
print dealer_hand
print outcome
in_play = True
def hit():
global in_play, score, hint, outcome
# if the hand is in play, hit the player
if in_play:
player_hand.add_card(new.deal_card())
print player_hand
# test whether player busted
if player_hand.get_value() > 21:
outcome = "Player Busted. New deal?"
in_play = False
score -= 1
print outcome
print "score is now "+ str(score)
def stand():
# if hand is in play, repeatedly hit dealer until his hand has value 17 or more
global dealer_hand, player_hand, outcome, in_play, score
if in_play:
while dealer_hand.get_value() < 17:
dealer_hand.add_card(new.deal_card())
print dealer_hand
if dealer_hand.get_value() > 21:
outcome = "Dealer Busted."
score += 1
print "score is now "+ str(score)
else:
if dealer_hand.get_value() >= player_hand.get_value():
outcome = "Dealer wins."
score -= 1
print "score is now "+ str(score)
else:
outcome = "Player wins."
score += 1
print "score is now "+ str(score)
outcome += " New Deal?"
in_play = False
print outcome
# draw handler
def draw(canvas):
# test to make sure that card.draw works, replace with your code below
canvas.draw_text("Blackjack", (200, 50), 36, "Black")
canvas.draw_text(outcome, (50, 125), 24, "Red")
canvas.draw_text("Score: " + str(score), (450, 125), 24, "Red")
canvas.draw_text("Dealer's hand", (50, 200), 24, "Black")
canvas.draw_text("Player's hand", (50, 370), 24, "Black")
dealer_hand.draw(canvas, [90, 260])
player_hand.draw(canvas, [90, 430])
if in_play:
canvas.draw_image(card_back, CARD_BACK_CENTER, CARD_BACK_SIZE, (90, 260), CARD_BACK_SIZE)
# initialization frame
frame = simplegui.create_frame("Blackjack", 600, 600)
frame.set_canvas_background("Green")
#create buttons and canvas callback
frame.add_button("Deal", deal, 200)
frame.add_button("Hit", hit, 200)
frame.add_button("Stand", stand, 200)
frame.set_draw_handler(draw)
# get things rolling
deal()
frame.start()
|
[
"noreply@github.com"
] |
nabeelfahmi12.noreply@github.com
|
2ce9ac23cc7cdaa65e7d477ea29b58dfa9601295
|
607bc657720a3ddf66df32151d8d210ae1d6e2b9
|
/3- Using IP Webcam to perform task 1/Task-3.py
|
bbbcc789c007202e4b1e58ce99a20b1322c6f3f8
|
[] |
no_license
|
MuhammadAffanWahid/Computer_Vision
|
9149546b7e64d4ca51a2714de1a9271d4496e108
|
84056c89f4a2c45cd2e73a93d8a4da1fa43b446b
|
refs/heads/main
| 2023-07-23T11:24:56.287249
| 2021-08-31T10:31:25
| 2021-08-31T10:31:25
| 395,269,149
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,420
|
py
|
from urllib.request import urlopen
import cv2
import numpy as np
url='http://172.16.20.202:8080/shot.jpg'
#url='http://192.168.1.4:8080/shot.jpg'
i=0
while True:
imgResp = urlopen(url)
imgNp=np.array(bytearray(imgResp.read()),dtype=np.uint8)
img=cv2.imdecode(imgNp,-1)
cv2.imshow('Image',cv2.resize(img,(600,300))) # displaying live video
q=cv2.waitKey(1) # 1 milli second wait
if(q==ord('c')): # if 'c' is pressed
cv2.imwrite('./img_'+str(i)+'.jpg',img) # save the image
img = cv2.resize(img, (600, 300)) # resizing the image
b, g, r = cv2.split(img) # splitting the colored image into its channels
cv2.imshow("Red Channel", r) # displaying red channel
cv2.imshow("Green Channel", g) # displaying green channel
cv2.imshow("Blue Channel", b) # displaying blue channel
img_swapped = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) # swapping red and blue color
cv2.imshow("Swapped Image", img_swapped) # displaying the colored image
i=i+1 # increment i for naming next image
if q==ord('q'): # if 'q' is pressed
break # quit program
cv2.destroyAllWindows() # closing all opened windows
|
[
"noreply@github.com"
] |
MuhammadAffanWahid.noreply@github.com
|
66943cf2f20d1afb0b5face9a0118bc83c5e4b83
|
81b52d4cfa08974119f9b0f4ecc04e9f65bfaa58
|
/main.py
|
6a6e8b202ee502eb5faf5a5f0921e0caa7214b56
|
[] |
no_license
|
vanajmoorthy/SudokuSolver
|
3d835a317b81e6106e815cd6a70e585c97b81c28
|
c3f6dc3e9268ae225015d1318841b01e70aade5e
|
refs/heads/master
| 2022-04-17T21:51:07.912107
| 2020-04-17T22:52:59
| 2020-04-17T22:52:59
| 256,626,036
| 1
| 0
| null | 2020-04-17T22:52:24
| 2020-04-17T22:52:23
| null |
UTF-8
|
Python
| false
| false
| 1,037
|
py
|
import numpy as np
grid = np.array([
[0, 2, 0, 6, 5, 8, 0, 0, 9],
[7, 0, 0, 0, 0, 1, 5, 0, 0],
[5, 0, 0, 0, 0, 0, 0, 0, 2],
[0, 0, 7, 0, 0, 2, 3, 4, 5],
[0, 9, 0, 0, 1, 0, 0, 0, 6],
[0, 0, 5, 0, 0, 7, 0, 9, 1],
[0, 7, 0, 0, 0, 0, 9, 1, 0],
[9, 0, 0, 3, 0, 0, 0, 0, 0],
[6, 0, 0, 1, 0, 0, 0, 0, 0]
])
def possible(x, y, n, grid):
for i in range(9):
if grid[y][i] == n:
return False
if grid[i][x] == n:
return False
x0 = (x//3)*3
y0 = (y//3)*3
for i in range(3):
for j in range(3):
if grid[y0+i][x0+j] == n:
return False
return True
def solve(grid):
for y in range(9):
for x in range(9):
if grid[y][x] == 0:
for n in range(1,10):
if possible(x, y, n, grid):
grid[y][x] = n
solve(grid)
grid[y][x] = 0
return
print(grid)
input("Find more solutions?")
|
[
"shivbhatia10@gmail.com"
] |
shivbhatia10@gmail.com
|
419594e3ef1ee1e3d70ffbcdd4ca235dad6d61df
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p02665/s402491742.py
|
b987775b8cb3a6948b09b8c7f2f5df1fd116af00
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 667
|
py
|
N = int(input())
A = list(map(int, input().split()))
B = []
if N == 0:
if A[0] != 1:
print(-1)
exit()
else:
print(1)
exit()
else:
if A[0] != 0:
print(-1)
exit()
else:
B.append(1)
for i in range(1, N + 1):
B.append((B[i - 1] - A[i - 1]) * 2)
if (A[i] > B[i] or (A[i] == B[i] and i != N)):
print(-1)
exit()
ans = 0
ans += A[N]
B[N] = A[N]
for i in range(N - 1, -1, -1):
ans += min(B[i], B[i + 1] + A[i])
B[i] = min(B[i], B[i + 1] + A[i])
print(ans)
exit()
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
c13b1aae1a1a9059776ee92947cca4f9de0bd62b
|
423d89ae3c05db0944b45c421174e9a653551189
|
/dart/lib/collection/collection_sources.gypi
|
d7fcc266501687d679087af14d6e98472ef4a4af
|
[
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause"
] |
permissive
|
kwalrath/bleeding_edge
|
37d14e50f9a13a624dbca0ea2a22cae4c4a34275
|
cef245f4c9e92398333da599d490cad40db49778
|
refs/heads/master
| 2020-12-24T15:32:07.816186
| 2012-10-29T08:38:15
| 2012-10-29T08:38:15
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 365
|
gypi
|
# Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
# This file contains all sources for the dart:collection library.
{
'sources': [
'arrays.dart',
'collections.dart',
'maps.dart',
],
}
|
[
"ajohnsen@google.com@260f80e4-7a28-3924-810f-c04153c831b5"
] |
ajohnsen@google.com@260f80e4-7a28-3924-810f-c04153c831b5
|
a9041e4264d06096ba86e38e3dfbcb50e9a85356
|
9f557f2ffc67e9f07478c7e2366daa7168ea56d5
|
/TestRestApi/product_catalog_api/serializers.py
|
b241286fbd68660e5bdae4583c3049f197902b43
|
[] |
no_license
|
kolodkinv/ProductRestApi
|
044904f7829ebbc1585cfe75c3fc8836d24fa1d5
|
94ce43cf2f6166b696d25f4f1e9c3919f6a42add
|
refs/heads/master
| 2020-04-08T06:54:22.353646
| 2018-11-27T04:46:14
| 2018-11-27T04:46:14
| 159,117,912
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,157
|
py
|
from rest_framework import serializers
from product_catalog_api.models import Product, ProductRegister
class ProductSerializer(serializers.ModelSerializer):
class Meta(object):
model = Product
fields = ('url', 'title', 'SKU', 'date_creation', 'image', 'is_new')
def validate(self, data):
if len(data['SKU']) != Product.SKU_LENGTH:
raise serializers.ValidationError(
'Длина SKU не соответсвует стандарту EAN-13')
if not data['SKU'].isdigit():
raise serializers.ValidationError(
'SKU должно содержать только цифры')
return data
class ProductRegisterSerializer(serializers.ModelSerializer):
class Meta(object):
model = ProductRegister
fields = (
'url', 'date_operation', 'user_email', 'count', 'action', 'product'
)
def validate(self, data):
if data['count'] < 1:
raise serializers.ValidationError(
'Количество товара в операции должно быть больше 0')
return data
|
[
"vladimir.kolodkin@webpp.ru"
] |
vladimir.kolodkin@webpp.ru
|
fd69ba1ff167a6dbdabf8af06d4754e8b7bc7ce7
|
b3fbd3e92c02e78c6a6cd51dba0c3394cb7ca649
|
/DistanceMatrix/DistanceMatrix.py
|
259350927ec82bd34cbfe323c973508be003f620
|
[] |
no_license
|
KieranM279/Rosalind-Challenges
|
debfcc1020306d254c42289978f8eca29de7d682
|
5e0b8e7cddbbe037668c77acebf60984768304a1
|
refs/heads/master
| 2023-03-23T06:57:20.823925
| 2021-02-20T15:11:47
| 2021-02-20T15:11:47
| 304,687,835
| 0
| 0
| null | 2021-02-16T18:16:22
| 2020-10-16T16:56:10
|
Python
|
UTF-8
|
Python
| false
| false
| 1,626
|
py
|
from Bio import SeqIO
#### Step 1 #### Import the data ####
def Dictmaker(filename):
# Parse the FASTA formatted text file given by Rosalind
record = SeqIO.parse(filename,'fasta')
sequences = {}
# Loop through each record and add to a dictionary
for rec in record:
sequences[rec.id] = str(rec.seq)
return(sequences)
sequenceDict = Dictmaker('rosalind_pdst.txt')
#### Step 2 #### Calculate distance between two strings ####
def DistCalc(string1, string2):
index = range(0,len(string1))
differenceDict = {'same':0,'diff':0}
# Loops through the length of the string(s)
for i in index:
# Counts the bases which are the same and different between strings
if string1[i] == string2[i]:
differenceDict['same'] += 1
elif string1[i] != string2[i]:
differenceDict['diff'] += 1
# Calculates the distance between the strings
distance = differenceDict['diff']/len(string1)
return(format(distance, '.5f'))
#### Step 3 #### Distance Matrix printer ####
def DistMatrix(sequences):
# Create a list of the IDs in the dictionary
ids = list(sequences.keys())
# Compare each sequence to every other sequence, inclusive
for ID in ids:
distances = list()
for ID2 in ids:
# Calculate distance and add it to a list
distance = DistCalc(sequences[ID], sequences[ID2])
distances.append(distance)
# Print each list in the format required by Rosalind
print(' '.join(distances))
DistMatrix(sequenceDict)
|
[
"70451332+KieranM279@users.noreply.github.com"
] |
70451332+KieranM279@users.noreply.github.com
|
8e59c0d21bc251a1bc557059bc21b25c8f6282a0
|
bc3e0035c60696bcd754991005b9596de0d29ec3
|
/blog/migrations/0001_initial.py
|
39aa477e0429f43b798a70c97eff56a5cb475cd5
|
[] |
no_license
|
bmonsueir/naturiere
|
6700e4a1fa999c4094b8cee2035bcc993b0f8a19
|
2eac6d1ffec25d53d70ea286b0f7ee03bb603a82
|
refs/heads/master
| 2021-01-19T23:50:07.030575
| 2017-05-14T19:55:54
| 2017-05-14T19:55:54
| 89,039,808
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,765
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2017-05-05 21:08
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('author', models.CharField(max_length=200)),
('text', models.TextField()),
('created_date', models.DateTimeField(default=django.utils.timezone.now)),
('approved_comment', models.BooleanField(default=False)),
],
),
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=200)),
('text', models.TextField()),
('created_date', models.DateTimeField(default=django.utils.timezone.now)),
('published_date', models.DateTimeField(blank=True, null=True)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.AddField(
model_name='comment',
name='post',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comments', to='blog.Post'),
),
]
|
[
"bmonsueir@gmail.com"
] |
bmonsueir@gmail.com
|
0c2a744373f61ec8628bd435b7f2a4844ec4de24
|
621c49ed8e713103572ea2a1405429ac5835456d
|
/yumewatari/vendor/uart.py
|
5b1e55d2cc31e545644c2edee7f791c6ed6ea7fb
|
[
"0BSD"
] |
permissive
|
whitequark/Yumewatari
|
751c7ee2b4086cce46759dc8150121c6b9d987ce
|
0981d8c832850c72745808c022dc63944a7164bc
|
refs/heads/master
| 2020-04-05T06:36:25.863887
| 2019-04-02T17:47:53
| 2019-04-02T17:47:53
| 156,643,351
| 54
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,783
|
py
|
from migen import *
from migen.genlib.fsm import *
from migen.genlib.cdc import MultiReg
__all__ = ['UART', 'uart_bit_cyc']
class UARTBus(Module):
"""
UART bus.
Provides synchronization.
"""
def __init__(self, pads):
self.has_rx = hasattr(pads, "rx_t")
if self.has_rx:
self.rx_t = pads.rx_t
self.rx_i = Signal()
self.has_tx = hasattr(pads, "tx_t")
if self.has_tx:
self.tx_t = pads.tx_t
self.tx_o = Signal(reset=1)
###
if self.has_tx:
self.comb += [
self.tx_t.oe.eq(1),
self.tx_t.o.eq(self.tx_o)
]
if self.has_rx:
self.specials += [
MultiReg(self.rx_t.i, self.rx_i, reset=1)
]
def uart_bit_cyc(clk_freq, baud_rate, max_deviation=50000):
"""
Calculate bit time from clock frequency and baud rate.
:param clk_freq:
Input clock frequency, in Hz.
:type clk_freq: int or float
:param baud_rate:
Baud rate, in bits per second.
:type baud_rate: int or float
:param max_deviation:
Maximum deviation of actual baud rate from ``baud_rate``, in parts per million.
:type max_deviation: int or float
:returns: (int, int or float) -- bit time as a multiple of clock period, and actual baud rate
as calculated based on bit time.
:raises: ValueError -- if the baud rate is too high for the specified clock frequency,
or if actual baud rate deviates from requested baud rate by more than a specified amount.
"""
bit_cyc = round(clk_freq // baud_rate)
if bit_cyc <= 0:
raise ValueError("baud rate {} is too high for input clock frequency {}"
.format(baud_rate, clk_freq))
actual_baud_rate = round(clk_freq // bit_cyc)
deviation = round(1000000 * (actual_baud_rate - baud_rate) // baud_rate)
if deviation > max_deviation:
raise ValueError("baud rate {} deviation from {} ({} ppm) is higher than {} ppm"
.format(actual_baud_rate, baud_rate, deviation, max_deviation))
return bit_cyc + 1, actual_baud_rate
class UART(Module):
"""
Asynchronous serial receiver-transmitter.
Any number of data bits, any parity, and 1 stop bit are supported.
:param bit_cyc:
Bit time expressed as a multiple of system clock periods. Use :func:`uart_bit_cyc`
to calculate bit time from system clock frequency and baud rate.
:type bit_cyc: int
:param data_bits:
Data bit count.
:type data_bits: int
:param parity:
Parity, one of ``"none"`` (default), ``"zero"``, ``"one"``, ``"even"``, ``"odd"``.
:type parity: str
:attr rx_data:
Received data. Valid when ``rx_rdy`` is active.
:attr rx_rdy:
Receive ready flag. Becomes active after a stop bit of a valid frame is received.
:attr rx_ack:
Receive acknowledgement. If active when ``rx_rdy`` is active, ``rx_rdy`` is reset,
and the receive state machine becomes ready for another frame.
:attr rx_ferr:
Receive frame error flag. Active for one cycle when a frame error is detected.
:attr rx_ovf:
Receive overflow flag. Active for one cycle when a new frame is started while ``rx_rdy``
is still active. Afterwards, the receive state machine is reset and starts receiving
the new frame.
:attr rx_err:
Receive error flag. Logical OR of all other error flags.
:attr tx_data:
Data to transmit. Sampled when ``tx_rdy`` is active.
:attr tx_rdy:
Transmit ready flag. Active while the transmit state machine is idle, and can accept
data to transmit.
:attr tx_ack:
Transmit acknowledgement. If active when ``tx_rdy`` is active, ``tx_rdy`` is reset,
``tx_data`` is sampled, and the transmit state machine starts transmitting a frame.
"""
def __init__(self, pads, bit_cyc, data_bits=8, parity="none"):
self.rx_data = Signal(data_bits)
self.rx_rdy = Signal()
self.rx_ack = Signal()
self.rx_ferr = Signal()
self.rx_ovf = Signal()
self.rx_perr = Signal()
self.rx_err = Signal()
self.tx_data = Signal(data_bits)
self.tx_rdy = Signal()
self.tx_ack = Signal()
self.submodules.bus = bus = UARTBus(pads)
###
bit_cyc = int(bit_cyc)
def calc_parity(sig, kind):
if kind in ("zero", "none"):
return C(0, 1)
elif kind == "one":
return C(1, 1)
else:
bits, _ = value_bits_sign(sig)
even_parity = sum([sig[b] for b in range(bits)]) & 1
if kind == "odd":
return ~even_parity
elif kind == "even":
return even_parity
else:
assert False
if bus.has_rx:
rx_timer = Signal(max=bit_cyc)
rx_stb = Signal()
rx_shreg = Signal(data_bits)
rx_bitno = Signal(max=rx_shreg.nbits)
self.comb += self.rx_err.eq(self.rx_ferr | self.rx_ovf | self.rx_perr)
self.sync += [
If(rx_timer == 0,
rx_timer.eq(bit_cyc - 1)
).Else(
rx_timer.eq(rx_timer - 1)
)
]
self.comb += rx_stb.eq(rx_timer == 0)
self.submodules.rx_fsm = FSM(reset_state="IDLE")
self.rx_fsm.act("IDLE",
NextValue(self.rx_rdy, 0),
If(~bus.rx_i,
NextValue(rx_timer, bit_cyc // 2),
NextState("START")
)
)
self.rx_fsm.act("START",
If(rx_stb,
NextState("DATA")
)
)
self.rx_fsm.act("DATA",
If(rx_stb,
NextValue(rx_shreg, Cat(rx_shreg[1:8], bus.rx_i)),
NextValue(rx_bitno, rx_bitno + 1),
If(rx_bitno == rx_shreg.nbits - 1,
If(parity == "none",
NextState("STOP")
).Else(
NextState("PARITY")
)
)
)
)
self.rx_fsm.act("PARITY",
If(rx_stb,
If(bus.rx_i == calc_parity(rx_shreg, parity),
NextState("STOP")
).Else(
self.rx_perr.eq(1),
NextState("IDLE")
)
)
)
self.rx_fsm.act("STOP",
If(rx_stb,
If(~bus.rx_i,
self.rx_ferr.eq(1),
NextState("IDLE")
).Else(
NextValue(self.rx_data, rx_shreg),
NextState("READY")
)
)
)
self.rx_fsm.act("READY",
NextValue(self.rx_rdy, 1),
If(self.rx_ack,
NextState("IDLE")
).Elif(~bus.rx_i,
self.rx_ovf.eq(1),
NextState("IDLE")
)
)
###
if bus.has_tx:
tx_timer = Signal(max=bit_cyc)
tx_stb = Signal()
tx_shreg = Signal(data_bits)
tx_bitno = Signal(max=tx_shreg.nbits)
tx_parity = Signal()
self.sync += [
If(tx_timer == 0,
tx_timer.eq(bit_cyc - 1)
).Else(
tx_timer.eq(tx_timer - 1)
)
]
self.comb += tx_stb.eq(tx_timer == 0)
self.submodules.tx_fsm = FSM(reset_state="IDLE")
self.tx_fsm.act("IDLE",
self.tx_rdy.eq(1),
If(self.tx_ack,
NextValue(tx_shreg, self.tx_data),
If(parity != "none",
NextValue(tx_parity, calc_parity(self.tx_data, parity))
),
NextValue(tx_timer, bit_cyc - 1),
NextValue(bus.tx_o, 0),
NextState("START")
).Else(
NextValue(bus.tx_o, 1)
)
)
self.tx_fsm.act("START",
If(tx_stb,
NextValue(bus.tx_o, tx_shreg[0]),
NextValue(tx_shreg, Cat(tx_shreg[1:8], 0)),
NextState("DATA")
)
)
self.tx_fsm.act("DATA",
If(tx_stb,
NextValue(tx_bitno, tx_bitno + 1),
If(tx_bitno != tx_shreg.nbits - 1,
NextValue(bus.tx_o, tx_shreg[0]),
NextValue(tx_shreg, Cat(tx_shreg[1:8], 0)),
).Else(
If(parity == "none",
NextValue(bus.tx_o, 1),
NextState("STOP")
).Else(
NextValue(bus.tx_o, tx_parity),
NextState("PARITY")
)
)
)
)
self.tx_fsm.act("PARITY",
If(tx_stb,
NextValue(bus.tx_o, 1),
NextState("STOP")
)
)
self.tx_fsm.act("STOP",
If(tx_stb,
NextState("IDLE")
)
)
|
[
"whitequark@whitequark.org"
] |
whitequark@whitequark.org
|
c107c7173288ef97e293a3795997983fde1a851b
|
b4e704f67214028cd0ba8b7a641a2267faf606e4
|
/projectmf/data/initial_mealplan_data.py
|
325fe23a1b305b251761047df5ba70f0e9a6ca6e
|
[] |
no_license
|
matthias4366/mf-2
|
d21b32aae9ae617d78b4b1541edc196f6f822e4e
|
73e2110c031c8c17596e27af1143b557b1062b3c
|
refs/heads/master
| 2022-10-23T05:54:32.889983
| 2020-06-11T15:40:04
| 2020-06-11T15:40:04
| 190,999,715
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 776
|
py
|
mealplan_initial_data = [
{
'name': 'Mealplan Sandor',
'nutrient_profile':
'sandor',
'fulldayofeating_list': [
'Pasta', # check
'Pasta',
'Pasta',
'Chili mexican',
'Chili mexican',
'Chili mexican',
'Chili mexican',
'Fried Rice',
'Fried Rice',
'Fried Rice',
'Fried Rice',
'Pasta',
'Pasta',
'Pasta',
'Chili mushrooms',
'Chili mushrooms',
'Chili mushrooms',
'Chili mushrooms',
'Asian vegetables',
'Asian vegetables',
'Asian vegetables',
'Asian vegetables',
]
}
]
|
[
"spam.matthias.h.schulz@gmail.com"
] |
spam.matthias.h.schulz@gmail.com
|
efe2450ed674337d07ec6f4bd09c73dff689dd14
|
ba66da3901361854b9bb621586f1e49ad0121ee0
|
/正式开班/Scrapy爬虫/yiwugou/yiwugou/settings.py
|
0a401720e94bb79bd1cb4bc31ddf6545738731e8
|
[] |
no_license
|
luobodage/PythonBasis
|
c4739920055afbda03774d90151ab183a83583f8
|
ea65536e759fec221a70d7647ae86120277d5459
|
refs/heads/master
| 2023-05-14T15:51:56.213282
| 2021-05-31T00:57:56
| 2021-05-31T00:57:56
| 322,145,745
| 3
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,245
|
py
|
# Scrapy settings for yiwugou project
#
# For simplicity, this file contains only settings considered important or
# commonly used. You can find more settings consulting the documentation:
#
# https://docs.scrapy.org/en/latest/topics/settings.html
# https://docs.scrapy.org/en/latest/topics/downloader-middleware.html
# https://docs.scrapy.org/en/latest/topics/spider-middleware.html
BOT_NAME = 'yiwugou'
SPIDER_MODULES = ['yiwugou.spiders']
NEWSPIDER_MODULE = 'yiwugou.spiders'
# Crawl responsibly by identifying yourself (and your website) on the user-agent
# USER_AGENT = 'yiwugou (+http://www.yourdomain.com)'
# Obey robots.txt rules
ROBOTSTXT_OBEY = False
# Configure maximum concurrent requests performed by Scrapy (default: 16)
# CONCURRENT_REQUESTS = 32
# Configure a delay for requests for the same website (default: 0)
# See https://docs.scrapy.org/en/latest/topics/settings.html#download-delay
# See also autothrottle settings and docs
# DOWNLOAD_DELAY = 3
# The download delay setting will honor only one of:
# CONCURRENT_REQUESTS_PER_DOMAIN = 16
# CONCURRENT_REQUESTS_PER_IP = 16
# Disable cookies (enabled by default)
# COOKIES_ENABLED = False
# Disable Telnet Console (enabled by default)
# TELNETCONSOLE_ENABLED = False
LOG_LEVEL = 'WARNING'
# Override the default request headers:
DEFAULT_REQUEST_HEADERS = {
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Language': 'en',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.190 Safari/537.36',
}
# Enable or disable spider middlewares
# See https://docs.scrapy.org/en/latest/topics/spider-middleware.html
# SPIDER_MIDDLEWARES = {
# 'yiwugou.middlewares.YiwugouSpiderMiddleware': 543,
# }
# Enable or disable downloader middlewares
# See https://docs.scrapy.org/en/latest/topics/downloader-middleware.html
# DOWNLOADER_MIDDLEWARES = {
# 'yiwugou.middlewares.YiwugouDownloaderMiddleware': 543,
# }
# Enable or disable extensions
# See https://docs.scrapy.org/en/latest/topics/extensions.html
# EXTENSIONS = {
# 'scrapy.extensions.telnet.TelnetConsole': None,
# }
# Configure item pipelines
# See https://docs.scrapy.org/en/latest/topics/item-pipeline.html
ITEM_PIPELINES = {
'yiwugou.pipelines.YiwugouPipeline': 300,
}
# Enable and configure the AutoThrottle extension (disabled by default)
# See https://docs.scrapy.org/en/latest/topics/autothrottle.html
# AUTOTHROTTLE_ENABLED = True
# The initial download delay
# AUTOTHROTTLE_START_DELAY = 5
# The maximum download delay to be set in case of high latencies
# AUTOTHROTTLE_MAX_DELAY = 60
# The average number of requests Scrapy should be sending in parallel to
# each remote server
# AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0
# Enable showing throttling stats for every response received:
# AUTOTHROTTLE_DEBUG = False
# Enable and configure HTTP caching (disabled by default)
# See https://docs.scrapy.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
# HTTPCACHE_ENABLED = True
# HTTPCACHE_EXPIRATION_SECS = 0
# HTTPCACHE_DIR = 'httpcache'
# HTTPCACHE_IGNORE_HTTP_CODES = []
# HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage'
|
[
"fuyu16032001@gmail.com"
] |
fuyu16032001@gmail.com
|
36f215554963e89e34152fa1be8ade2695aa7c85
|
12273ce7234d7e34df2968e7b254441d7819f136
|
/CS4442_a2/3_f.py
|
d2a110ea993f0da42892d36bef9929dca52b5b3c
|
[] |
no_license
|
mattySKR/CS4442
|
de287577fb726536baa5a7f07d8b5924ca35540f
|
aabacaad39312670200820423899365ad3c54753
|
refs/heads/main
| 2023-07-15T12:38:32.154586
| 2021-09-03T21:52:11
| 2021-09-03T21:52:11
| 402,229,205
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,892
|
py
|
import numpy as np
import matplotlib.pyplot as plt
from sklearn.decomposition import PCA
# Extracting the data
vals = np.loadtxt('faces.dat.txt')
# Creating our PCA object instance
pca = PCA(400)
# Fitting pca over the given data
pca.fit(vals)
# Computing our Principal components or Eigenvectors
comp = pca.components_
# Computing our Explained Variance or Eigenvalues
var = pca.explained_variance_
# Sorting the eigenvalues in descending order
# It looks like pca tool has taken care of it, but
# will sort just in case.
descend_index = np.argsort(var)[::-1]
descend_eigenvalues = var[descend_index]
# Can also sort eigenvectos based on the descending order
# of eigenvalues above. This will arrange the principal
# components in descending order of their variability
sort_eigenvectors = comp[descend_index,:]
# Printing put the eigenvalues iin descending order to the screen
eigen_tuples = [(np.abs(descend_eigenvalues[i]), sort_eigenvectors[i,:]) for i in range(len(descend_eigenvalues))]
print("Eigenvalues in descending order:")
for i in eigen_tuples:
print(i[0])
# ----------------------- The above is just a copy of 3_c.py ------------------------
# Displaying out top 5 eigenfaces
eigenface_1 = plt.figure(1)
plt.title("First Best")
plt.imshow(np.swapaxes(sort_eigenvectors[0].reshape(64, 64), 0, 1), cmap='bone')
eigenface_2 = plt.figure(2)
plt.title("Second Best")
plt.imshow(np.swapaxes(sort_eigenvectors[1].reshape(64, 64), 0, 1), cmap='bone')
eigenface_3 = plt.figure(3)
plt.title("Third Best")
plt.imshow(np.swapaxes(sort_eigenvectors[2].reshape(64, 64), 0, 1), cmap='bone')
eigenface_4 = plt.figure(4)
plt.title("Fourth Best")
plt.imshow(np.swapaxes(sort_eigenvectors[3].reshape(64, 64), 0, 1), cmap='bone')
eigenface_5 = plt.figure(5)
plt.title("Fifth Best")
plt.imshow(np.swapaxes(sort_eigenvectors[4].reshape(64, 64), 0, 1), cmap='bone')
plt.show()
|
[
"noreply@github.com"
] |
mattySKR.noreply@github.com
|
07afebad1d7801ecc79c60b77e84ee033a7a3457
|
5e8cd8bd77086d06ff915c49ffe3c85f37e3ee66
|
/Class6-7-8-list/project_travis_security_guard.py
|
4725e341c8dead9ef550773690607d398995295f
|
[] |
no_license
|
QAMilestoneAcademy/python_basic
|
05a7f88e570c39d71c15467bbed00e5783bbe5fa
|
20fd6949df62515a20d1812301a02511e28410ec
|
refs/heads/master
| 2023-01-05T22:19:27.184397
| 2020-11-07T09:05:33
| 2020-11-07T09:05:33
| 294,338,569
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 802
|
py
|
known_users = ["Shrek", "Donkey", "Fiona", "Patrick", "Bob", "Joe"]
print("Hi my name Travis")
name = input("What is your name? ")
name=name.strip().capitalize()
print(name)
if name in known_users:
print("Hello {}.How are you".format(name))
stay=input("Would you like to stay in the list-Yes/No: ")
stay=stay.strip().capitalize()
print(stay)
if stay=="No":
known_users.remove(name)
print(known_users)
#
# elif name not in known_users:
# print("Sorry {} you are not in the list ".format(name))
# enter = input("Would you like to enter the system? ")
# enter=enter.strip().capitalize()
#
# if enter == "Yes":
# known_users.append(name)
# print("Welcome to the system! \t")
# print(known_users)
# else:
# print("have a good day \t")
|
[
"48306511+QAMilestoneAcademy@users.noreply.github.com"
] |
48306511+QAMilestoneAcademy@users.noreply.github.com
|
5e7e8e6706206b0150b41af07a8abe66751ac96a
|
488578a753f8cf51d4b50f61304b3a92fee0a79f
|
/Transformaciones_geométricas.py
|
6b7a023628cb13f6ac3d289a4ff7e325e3da71e7
|
[] |
no_license
|
dairon20jb/Taller4
|
c738f49b3fe937d126e574fafa889314af104b75
|
23a6b69aa7401702400c9517fb1442a0eb2bc47d
|
refs/heads/main
| 2022-12-26T10:31:07.697141
| 2020-10-09T00:29:41
| 2020-10-09T00:29:47
| 302,481,991
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,487
|
py
|
import cv2
import os
import numpy as np
click = 0
refPt = list()
def click_event(event, x, y, flags, pafram): # Por cada click se guardan las coordenadas en las que se posicione el mouse
global click
if click < 3 : # Si click es mayor a tres se cierra la imagen
if event == cv2.EVENT_LBUTTONDOWN:
click = click + 1
print(x, ",", y) # Se imprimen las coordenadas guardadas
refPt.append([x, y])
print(len(refPt))
else:
click = 0
cv2.destroyAllWindows()
if __name__ == '__main__':
print("Digite la ruta de las imagenes:") # Se digita la ruta, el nombre de la imagen I1 y el nombre de la imagen I2
path1 = input()#'C:\PRUEBA'
print("Digite nombre de la imagen I1:")
name1 = input()#'lena(1).png'
path_file1 = os.path.join(path1, name1)
I1 = cv2.imread(path_file1, 1)
print("Digite nombre de la imagen I2:")
name2 = input()#'lena_warped.png'
path_file2 = os.path.join(path1, name2)
I2 = cv2.imread(path_file2, 1)
cont = 0
if cont == 0 : # Mientras click sea menor a 3 se tomaran puntos de la primera imagen
cv2.imshow("I1", I1)
cv2.setMouseCallback("I1", click_event)
cont=cont+1
cv2.waitKey(0)
if cont == 1 : # Se toman los tres puntos de la segunda imagen
cv2.imshow("I2", I2)
cv2.setMouseCallback("I2", click_event)
cv2.waitKey(0)
pts1 = np.float32(refPt[0:3]) # Seleccionando puntos de la primera imagen
pts2 = np.float32(refPt[3:6]) # Seleccionando puntos de la segunda imagen
M_affine = cv2.getAffineTransform(pts1, pts2) # Transformada Afín a partir de los puntos
image_affine = cv2.warpAffine(I1, M_affine, I1.shape[:2]) # Aplicando la transformada Afín sobre la imagen
# Parámetros para calcula la transformada de similitud
s0 = np.sqrt(((M_affine[0,0])**2) +((M_affine[1,0])**2)) # Escalamiento en x
s1 = np.sqrt(((M_affine[0,1])**2) +((M_affine[1,1])**2)) # Escalamiento en y
theta =np.arctan((M_affine[1,0]) / (M_affine[0,0])) # Rotación
theta_grad= theta*180 / np.pi # Rotación en grados
x0= (((M_affine[0,2])*np.cos(theta_grad))-((M_affine[1,2])*np.sin(theta_grad)))/s0 # Traslación en x
x1= (((M_affine[0,2])*np.cos(theta_grad))-((M_affine[1,2])*np.sin(theta_grad)))/s1 # Traslación en y
M_sim = np.float32([[s0 * np.cos(theta), -np.sin(theta), x0],[np.sin(theta), s1 * np.cos(theta), x1]]) # Matriz de similitud
image_similarity = cv2.warpAffine(I1, M_sim, I1.shape[:2]) # Aplicando transformada de similitud en la imagen
# Error
# Para aplicar la transformada sobre los puntos se necesitan los mismos tamaños en los arreglos y disposición
vnorm = np.append(pts1.transpose(),np.array([[1,1,1]]), axis = 0)
similitud_puntos= M_sim.dot(vnorm) # Transformada de similitud sobre los puntos
Trans_similitud_puntos= similitud_puntos[:-1,:].transpose() # Se necesita pts2 de 2x3 y trans_similitud de puntos
error=np.linalg.norm(Trans_similitud_puntos-pts2,axis=1) # Norma del error respecto a los puntos anotados de la imagen I2
print("La norma del error es :", error)
cv2.imshow("Similar", image_similarity)
cv2.imwrite(os.path.join(path1, 'Similar.png'), image_similarity)
cv2.imshow("Affine", image_affine)
cv2.imwrite(os.path.join(path1, 'Affine.png'), image_affine)
cv2.imshow("Image2", I2)
cv2.imwrite(os.path.join(path1, 'Image2.png'), I2)
cv2.waitKey(0)
|
[
"dairon.barbosa@javeriana.edu.co"
] |
dairon.barbosa@javeriana.edu.co
|
9ebb3464ee77da1b477f86ec903b6e422aef4531
|
8486f822f9264e7fd98101e1e16c236b84a3e165
|
/game_playing_agents.py
|
53513336b11b611717f3943d7e0c3a719d2aa3e4
|
[] |
no_license
|
daunlee/mini_capstone
|
6e4926c2bb4e8bb7bfcce2e4bd6ae54ec7978229
|
cdc546bf4e296aa936466bc3ddbbebf53cae0a74
|
refs/heads/master
| 2020-05-30T01:35:28.560501
| 2019-06-12T22:24:06
| 2019-06-12T22:24:06
| 189,480,565
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 16,228
|
py
|
from lab2_algorithms import *
from time import time
from math import sqrt
from lab2_util_eval import all_fn_dicts, always_zero
from connectfour_gamestate import ConnectFourGameState
from tictactoe_gamestate import TicTacToeGameState
from roomba_gamestate import RoombaRaceGameState
from nim_gamestate import NimGameState
INF = float('inf')
QUIT = ['q', 'Q', 'quit', 'Quit', 'QUIT']
YES = ['y', 'yes', 'Y', 'Yes', 'YES']
NO = ['n', 'no', 'N', 'No', 'NO']
NO_LIMIT = ['inf', 'INF', "Inf", 'infinity', 'infinite', "Infinity", "Infinite",
'INFINITY', 'INFINITE', 'none', "None", "NONE"]
def ask_yes_no(prompt):
while True:
inp = input(prompt)
if inp in QUIT:
quit()
elif inp in YES:
return True
elif inp in NO:
return False
else :
print("Oops, please type either 'y[es]' or 'n[o]'.")
def pick_from_dict(prompt, d):
print("Options: {}.".format(str(list(d.keys()))))
while True:
inp = input(prompt)
if inp in QUIT:
quit()
elif inp in d:
return d[inp]
else :
print("Oops, please pick from the following options: {}.".format(str(list(d.keys()))))
def get_int_or_inf(prompt):
while True:
inp = input(prompt)
if inp in QUIT:
quit()
elif inp in NO_LIMIT:
return INF
try :
return int(inp)
except :
print("Oops, please enter either an int or 'none'/'inf'.")
def get_int(prompt):
while True:
inp = input(prompt)
if inp in QUIT:
quit()
try :
return int(inp)
except :
print("Oops, please enter an int.")
def get_float(prompt):
while True:
inp = input(prompt)
if inp in QUIT:
quit()
try :
return float(inp)
except :
print("Oops, please enter a float.")
class GamePlayingAgent:
""" An abstract class for Game Playing Agents, either human or AI.
"""
def __init__(self, game_class, name = None):
self.name = name
self.game_class = game_class
def set_up(self, **kwargs):
"""
For instantiating settings, including name.
Should prompt user (via command prompt).
"""
raise NotImplementedError
def choose_action(self, state, **kwargs):
"""
Return an action for the state and its expected utility (from the perspective of the current player).
Return None as action to forfeit the game.
May return additional things!
"""
raise NotImplementedError
class HumanTextInputAgent(GamePlayingAgent) :
def __init__(self, game_class, name="Human Player"):
super().__init__(game_class, name)
def set_up(self, **kwargs):
"""
For instantiating settings, including name.
Should prompt user (via command prompt)
"""
for kw in kwargs:
self.kw = kwargs[kw]
if 'name' not in kwargs:
new_name= input("Name: >>> ")
if new_name != "":
self.name = new_name
if 'verbose' not in kwargs:
self.verbose = ask_yes_no("Be verbose? (Print time elapsed) >>> ")
if 'GUI' in kwargs and kwargs['GUI']:
self.show_thinking = False
def choose_action(self, state, **kwargs):
"""
Return an action for the state and its expected utility (from the perspective of the current player).
Return None as action to forfeit the game.
"""
action = None
search_start_time = time()
while action is None:
inp = input("Player {} ({}): Choose your action >>> ".format(state.get_current_player(), self.name))
# Allow the player to quit gracefully
if inp in QUIT:
return None
try:
action = self.game_class.str_to_action(inp)
if action not in state.get_all_actions():
print("Invalid action '{}'.Your valid actions include: {}".format(self.game_class.action_to_str(action),
str([self.game_class.action_to_str(x) for x in state.get_all_actions()])))
action = None
continue
except:
print("Invalid input '{}'. Your valid actions include: {}".format(inp,
str([self.game_class.action_to_str(x) for x in state.get_all_actions()])))
continue
elapsed_time = time() - search_start_time
if self.verbose:
print("Total elapsed time: {:.4f}".format(elapsed_time))
return action, None
class ClassicSearchAgent(GamePlayingAgent) :
def __init__(self, game_class, search_alg, name="Classic Search Player"):
self.search_alg = search_alg
super().__init__(game_class, name)
def set_up(self, **kwargs):
"""
For instantiating settings, including name.
Should prompt user (via commnand prompt)
"""
for kw in kwargs:
self.kw = kwargs[kw]
if 'name' not in kwargs:
new_name= input("Name: >>> ")
if new_name != "":
self.name = new_name
if 'util_fn' not in kwargs:
self.util_fn = pick_from_dict("Pick endgame utility func: >>> ",
all_fn_dicts[self.game_class]['endgame_util_fn_dict'])
if 'cutoff' not in kwargs:
self.cutoff = get_int_or_inf("Cutoff (Max Search Depth): >>> ")
if 'eval_fn' not in kwargs:
if self.cutoff == INF:
self.eval_fn = always_zero
else:
self.eval_fn = pick_from_dict("Pick heuristic cutoff evaluation func: >>> ",
all_fn_dicts[self.game_class]['heuristic_eval_fn_dict'])
if self.search_alg != RandChoice:
if 'random_move_order' not in kwargs:
self.random_move_order = ask_yes_no("Random move order? >>> ")
if 'transposition_table' not in kwargs:
self.transposition_table = ask_yes_no("Use a transposition table? >>> ")
else:
self.random_move_order = False
self.transposition_table = False
if 'verbose' not in kwargs:
self.verbose = ask_yes_no("Be verbose? >>> ")
if 'GUI' in kwargs and kwargs['GUI']:
self.show_thinking = ask_yes_no("Show thinking? (slower) >>> ")
def choose_action(self, state, **kwargs):
"""
Return an action for the state and its expected utility (from the perspective of the current player).
Return None as action to forfeit the game.
kwargs (optional keyword arguments)
may include 'state_callback_fn', 'counter'
"""
if 'state_callback_fn' not in kwargs:
kwargs['state_callback_fn'] = lambda s, v :False
if 'counter' not in kwargs :
kwargs['counter'] = {'num_nodes_seen':0, 'num_endgame_evals':0, 'num_heuristic_evals':0}
search_start_time = time()
action, leaf_node, exp_util, terminated = self.search_alg(
initial_state = state,
util_fn = self.util_fn,
eval_fn = self.eval_fn,
cutoff = self.cutoff,
state_callback_fn = kwargs['state_callback_fn'],
counter = kwargs['counter'],
random_move_order = self.random_move_order,
transposition_table = self.transposition_table
)
elapsed_time = time() - search_start_time
if self.verbose:
print("{} values this state at utility {:.4f}".format(self.name, exp_util))
print("{} nodes seen, {} endgame evals, {} heuristic evals ".format(kwargs['counter']['num_nodes_seen'], kwargs['counter']['num_endgame_evals'],kwargs['counter']['num_heuristic_evals']))
print("Total elapsed time: {:.4f}".format(elapsed_time))
return action, exp_util
class RandChoiceAgent(ClassicSearchAgent) :
def __init__(self, game_class, name="Random Player"):
super().__init__(game_class, search_alg = RandChoice, name = name)
class MaximizingDFSAgent(ClassicSearchAgent) :
def __init__(self, game_class, name="MaximizingDFS (Optimistic) Player"):
super().__init__(game_class, search_alg = ExpectimaxSearch, name = name)
class MinimaxSearchAgent(ClassicSearchAgent) :
def __init__(self, game_class, name="Minimax (Pessimistic) Player"):
super().__init__(game_class, search_alg = MinimaxSearch, name = name)
class ExpectimaxSearchAgent(ClassicSearchAgent) :
def __init__(self, game_class, name="Expectimax (Cautiously Optimistic) Player"):
super().__init__(game_class, search_alg = ExpectimaxSearch, name = name)
class MinimaxAlphaBetaSearchAgent(ClassicSearchAgent) :
def __init__(self, game_class, name="Minimax w/ Alpha-Beta (Pessimistic Pruning) Player"):
super().__init__(game_class, search_alg = MinimaxAlphaBetaSearch, name = name)
class ProgressiveDeepeningSearchAgent(GamePlayingAgent) :
def __init__(self, game_class, name="Progressive Deepening Player"):
self.search_alg = ProgressiveDeepening
super().__init__(game_class, name)
def set_up(self, **kwargs):
"""
For instantiating settings, including name.
Should prompt user (via commnand prompt)
"""
for kw in kwargs:
self.kw = kwargs[kw]
if 'name' not in kwargs:
new_name= input("Name: >>> ")
if new_name != "":
self.name = new_name
if 'util_fn' not in kwargs:
self.util_fn = pick_from_dict("Pick endgame utility func: >>> ",
all_fn_dicts[self.game_class]['endgame_util_fn_dict'])
if 'eval_fn' not in kwargs:
self.eval_fn = pick_from_dict("Pick heuristic cutoff evaluation func: >>> ",
all_fn_dicts[self.game_class]['heuristic_eval_fn_dict'])
if 'time_limit' not in kwargs:
self.time_limit = get_float("Time Limit (seconds): >>> ")
if 'random_move_order' not in kwargs:
self.random_move_order = ask_yes_no("Random move order? >>> ")
if 'transposition_table' not in kwargs:
self.transposition_table = ask_yes_no("Use a transposition table? >>> ")
if 'verbose' not in kwargs:
self.verbose = ask_yes_no("Be verbose? >>> ")
if self.verbose:
self.super_verbose = ask_yes_no("Be SUPER verbose? >>> ")
if 'GUI' in kwargs and kwargs['GUI']:
self.show_thinking = ask_yes_no("Show thinking? (slower) >>> ")
def choose_action(self, state, **kwargs):
"""
Return an action for the state and its expected utility (from the perspective of the current player).
Return None as action to forfeit the game.
kwargs (optional keyword arguments)
may include 'state_callback_fn', 'counter'
"""
if 'state_callback_fn' not in kwargs:
kwargs['state_callback_fn'] = lambda s, v :False
if 'counter' not in kwargs :
kwargs['counter'] = {'num_nodes_seen':[0],'num_endgame_evals':[0], 'num_heuristic_evals':[0], }
search_start_time = time()
best_actions, best_leaf_nodes, best_exp_utils, max_cutoff = self.search_alg(
initial_state = state,
util_fn = self.util_fn,
eval_fn = self.eval_fn,
time_limit = self.time_limit,
state_callback_fn = kwargs['state_callback_fn'],
counter = kwargs['counter'],
random_move_order = self.random_move_order,
transposition_table = self.transposition_table
)
elapsed_time = time() - search_start_time
if self.verbose:
if self.super_verbose:
for c in range(1,max_cutoff+1):
print("Cutoff {}: Best action is {} at exp value {:.4f}.\n{} nodes seen, {} endgame evals, {} cutoff evals".format(
c, best_actions[c-1], best_exp_utils[c-1],
kwargs['counter']['num_nodes_seen'][c], kwargs['counter']['num_endgame_evals'][c], kwargs['counter']['num_heuristic_evals'][c]
))
else:
print("After max cutoff {}: Best action is {} at exp value {:.4f}.".format(
max_cutoff, best_actions[-1], best_exp_utils[-1],
))
print("Total:\n Nodes seen: {} | Endgame evals: {} | Cutoff evals: {}".format(kwargs['counter']['num_nodes_seen'][0],kwargs['counter']['num_endgame_evals'][0],kwargs['counter']['num_heuristic_evals'][0]))
print("Total elapsed time: {:.4f}".format(elapsed_time))
if max_cutoff > 0:
return best_actions[-1], best_exp_utils[-1]
else :
return None, None
class MonteCarloTreeSearchAgent(GamePlayingAgent):
def __init__(self, game_class, name="Monte Carlo Tree Search Player"):
self.search_alg = MonteCarloTreeSearch
super().__init__(game_class, name)
def set_up(self, **kwargs):
"""
For instantiating settings, including name.
Should prompt user (via commnand prompt).
"""
for kw in kwargs:
self.kw = kwargs[kw]
if 'name' not in kwargs:
new_name= input("Name: >>> ")
if new_name != "":
self.name = new_name
if 'util_fn' not in kwargs:
self.util_fn = pick_from_dict("Pick endgame utility func: >>> ",
all_fn_dicts[self.game_class]['endgame_util_fn_dict'])
if 'exploration_bias' not in kwargs:
self.exploration_bias = get_float("Exploration Bias x sqrt(2) (recommended = 1000 [~max utility]): >>> ")
if 'time_limit' not in kwargs:
self.time_limit = get_float("Time Limit (seconds): >>> ")
if 'verbose' not in kwargs:
self.verbose = ask_yes_no("Be verbose? >>> ")
if 'GUI' in kwargs and kwargs['GUI']:
self.show_thinking = ask_yes_no("Show thinking? (slower) >>> ")
def choose_action(self, state, **kwargs):
"""
Return an action for the state and its expected utility (from the perspective of the current player).
Return None as action to forfeit the game.
kwargs (optional keyword arguments)
may include 'state_callback_fn', 'counter'
"""
if 'state_callback_fn' not in kwargs:
kwargs['state_callback_fn'] = lambda s, v :False
if 'counter' not in kwargs :
kwargs['counter'] = {'num_simulations': 0}
search_start_time = time()
best_action, best_leaf_state , best_exp_util, num_simulations = MonteCarloTreeSearch(
initial_state = state,
util_fn = self.util_fn,
exploration_bias = self.exploration_bias * sqrt(2),
time_limit = self.time_limit,
state_callback_fn = kwargs['state_callback_fn'],
counter = kwargs['counter']
)
elapsed_time = time() - search_start_time
if self.verbose:
print("After {} simulations, best action is {} at exp value {:.4f}.".format(
num_simulations, best_action, best_exp_util,
))
print("Total elapsed time: {:.4f}".format(elapsed_time))
return best_action, best_exp_util
|
[
"noreply@github.com"
] |
daunlee.noreply@github.com
|
f5837d589c9b72efb9bb9da96d52c1503d7136ad
|
f0d713996eb095bcdc701f3fab0a8110b8541cbb
|
/7vN8ZRw43yuWNoy3Y_5.py
|
fc5744478874764cd33eda091a690328a1cfa51a
|
[] |
no_license
|
daniel-reich/turbo-robot
|
feda6c0523bb83ab8954b6d06302bfec5b16ebdf
|
a7a25c63097674c0a81675eed7e6b763785f1c41
|
refs/heads/main
| 2023-03-26T01:55:14.210264
| 2021-03-23T16:08:01
| 2021-03-23T16:08:01
| 350,773,815
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 864
|
py
|
"""
Create a function which takes in an encoded string and returns a dictionary
according to the following example:
### Examples
parse_code("John000Doe000123") ➞ {
"first_name": "John",
"last_name": "Doe",
"id": "123"
}
parse_code("michael0smith004331") ➞ {
"first_name": "michael",
"last_name": "smith",
"id": "4331"
}
parse_code("Thomas00LEE0000043") ➞ {
"first_name": "Thomas",
"last_name": "LEE",
"id": "43"
}
### Notes
* The string will always be in the same format: first name, last name and id with zeros between them.
* `id` numbers will not contain any zeros.
* **Bonus:** Try solving this **using** RegEx.
"""
def parse_code(txt):
f_name, l_name, ID = filter(None,txt.split('0'))
return {'first_name':f_name, 'last_name':l_name, 'id':ID}
|
[
"daniel.reich@danielreichs-MacBook-Pro.local"
] |
daniel.reich@danielreichs-MacBook-Pro.local
|
9ade747b227c3c62d7615d4baa5558f857f3ca45
|
fd5f5a92c0de46ffcb039a43bc7fc04163581332
|
/Seharali.py
|
1bba983b58966f512814f66059e1204087a13d55
|
[] |
no_license
|
Paridoll786510/Seharali
|
b1e128107d9d48263c061654b50e4ffa6a49b765
|
8eb628661936bbfc819f507c1a99e4a2323e84cd
|
refs/heads/main
| 2023-07-09T11:55:09.647501
| 2021-08-12T12:52:06
| 2021-08-12T12:52:06
| 395,316,892
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 244,182
|
py
|
# ECRYPT BY Sohni kuri
# Subscribe Cok Chanel YouTube Gua Anjing
# Dan Jangan Lupa Follow Github Gua
exec((lambda _____, ______ : ______(eval((lambda ____,__,_ : ____.join([_(___) for ___ in __]))('',[95, 95, 105, 109, 112, 111, 114, 116, 95, 95, 40, 34, 122, 108, 105, 98, 34, 41, 46, 100, 101, 99, 111, 109, 112, 114, 101, 115, 115],chr))(_____),"<haMzah>","exec"))(b'x\x9cT}y_\xd6\xcc\xd2\xf4\xff\xef\xa7\x00\x15\x04E\xcdd\x9d\x887\xb2\xba\xa2\xb8\xe1FP\x92I\x02(\x8b"\xe0\x82\xf8\xd9\xdf\xab\xaa+p\x9e\xf3;\xde"\\d\x99\xe9\xb5\xba\xba\xe7\xea\xd8\xca\xd2\xab\x0f/\xde\x8c-~\x18[<\xfc=\xf6\xa8\xde\xffS?\xfa\x7fW\xc7^\x9f4?\xc2\xd1n\xd3\x8d-\x1d~\x1d[\xda\xa9\x0f\xba\xbd\xb1\x0f\x87\'oNF\xdfzxR\x8f-\x1c|\xd9=\xd8\x1e}r\xb9>\x18{R\x1fl\x8f\xfeZ=\xf9V\x8f=8\xdc\xdb;\xfc9\xf6p\xf7x\xe7\xa4\xc1G\xff_\xf7\xab\x0bSS{\xf5~\xd3\xd6c\x9f\xf1\xbf\x19\xfb\xeb\xf3\xd8]}1\xd5\x9d\xd6{\xff\xe733\xf8\xbf~~\xfb\xcb\xe1\xee\xc1\xd4\xc6\xe7\xa9\xd1\xd7\xd3c\xfd\xe1\x11\xbe;\xb6{0\xfakszz\xea\xfa\xf5\x99\x8d2\x9b\x19\xc3\x1f\x17\xf1?\xe5\xe8?.\xc6\x7f\x1c\xfe\x93\xe2?\xb9}\x02\x7f\xd2hf,\xc17c|&\xf2\xfa\xbd\xd2\xdb\xb7\xd3\xd1/\xa59\xbe\x19\xe1?\xa3\x7f\x95\xe5p\xad\xff\xb9t\xaa\x1f:\x97\xf1?\x9b3a\xe7h\xf4<|\xa3\xe9\x99+\xf7v\xeag\x7f\xea\x9d\xb9+3W\xb0\x04WF?j\xae\xff\xaa~\x95\xe1\xcd\xf9\xeeF\xf5\xab\xcdV\xab_MQ\xfd\xea\xd3\xd1\x1f\xfc]\xbc\xab~\xd5\xd1\x95\x1b\xd5/\x9f\x8c\xbe\xcaG\xff\xe9F\xdfw\xf9\xf2\xf7\xd1\xafu+\xa3/\xddB\xf5+\x8aG\xff\xf2\xcf\xef\x8e>\x16\x8f\xbe\x8c<\xbf\x98\x1a]\xac\x1f]\xa4\x1d]\xb9]\x1f\xfdz\x83oW\xbfB_\x8e\xbe\x83\xcb\xbb\xd1\xdf\xf9\xe8\xf3\xf8z\xf4w\xe8F\xff\x8eF\xbf5\xfa\x8d\x90\x7f\x1c}\x91\xbd\xfd<\xbaEs6\xfay\x18\xfd\x89\xecj]=\xfa\xd1\xe8bu6\xfa{t\r\x9f\xe1\xd9/\xbf\xe6\x1d\x13\xfc{{t!\xfcB\xf7a\xf4]<<\xae\xd2o\xe2\x9a\xa3/\x9ao\xf6\x9db\xf4\x94\xcd\xe8\xfe~\xf4\x1cm37\xfa"\xfc7\xfa\x8a\x9f\x9d\xc3\xa3}\xb3\x87oG\xcf\x1d5\xb3\xc1\xfe\xd5\x8f\xfe\x84\xe2)\xfe3\xfa\xc6\xe8\xe2\xae\xff\xef\xbf\x03<\xf9\xe8\xcb\x0eW>\xc7\xe5F\x7f:|x\xb48\xfd\xe8\xb9\xbb\x14\x1f\xee\x0f\xf1\xf3\xd1\xf25\tV\xf9\xc4\x96\x1ck\x82+w\xbc\x98=z;Z\xcb2\xb1w*\xf5\x87\xef\xd7\x875\xdb,?<}\xf7\xdf\x9c\xdd\xa3\xc7\xef\xc6\x8fw\x0e\xed\xdem\xd8==\xb7\xab\xfb\xb0?\xba|l\x1f\xa9G\xbf\xd4\x8d.U\xebVX>|\x1f\x8f\x82\x8b\x96x\x14\xf7N\x0f\x99`\x9d\xe6F\xff\xf5\xad}\x0c\xd7\xeb\xf0"X>\xbego\xf7\xc3\xd64%\x1e\xea\xf7\x0f\xbbZ\xd7o\x04{\xec\xbe\x1b\xffw\xd7\x9e\xb7\xa1\x08\xd4_\xf5>\xa3\xef\xd4x\xbf\xa68X\xc53\xad\xdbz\xe3\xd7\x9d\xc7/\x8e~\x03r\xe5\xf5\xf0\xe5\x03|\xf1\xdd\xd9\x8f\xb0d\x10\x8fr\xf4#\x97j\tJ{?\x87\x9fq\x19\x1f@.Bu\xfc\xcf\xae\xd9%\xc9K\\\t\x0f\xb8n\xe2\xd4c\xbd]{>mK\x02\t\r\x89I4.\xe9J{Z,Y=Zi7\xfaSc{\xb3\xfb\xa3\xeb\x17\xcf\xf1\xd5S\xe8\x08\x96\xb8\xdb\x1e\xadzSC\xfaG\xaf\xec SQo\xcb\xd4y{c\xeezfo\x8a\xb7\xef\xf0\xbd\xec\x03\x96\xfc\x04Oe\xb2\xde\xa7\xeb3\xa3\xe7p\xfa\xac\xb3\xfbC#\xb0\x17\x94\x0e<G\xfa\x05\xcf\x80\x1b\xff\xdb\x84 \xadU\xd5[\xac\x08^wt\xa3\xeb\xf8\xb5\x87\xa6\x93u\xb7f\xbf\x04\xcd- \xe4\xbe\xc5\xa2\xd8-\xae\xbf\x94\xb0\xe2\xd2-\x7f\x93Z\x84\x87\xee\xd7\xdf\xd9*7\xa9I=\x95\xa7\xb1\xc7\x8a\xa8@\xa7{\xa3\xdf\x8e\xfb\xcc\xae\xd1\xc6\xf8\xcd\xb57\x10P\xdb+(\x07\x16\x11\xea\xee\xfb\x7fX\xae\xd1c9\x88g-\xb5K\xeczP\xc8N_\x97\xc5\x83\xfb\xf6\x0c\xd8\x90\xd1\x15\x0f\xf0*;\xa3\x1f\xd4\x87\xd3P\xf13J?\x94"\x82\x8d1\x0b\x16\xf0\x14\xb09\xb5\xfd.\xee\x02\x19\x1dmZ\x05\xfb\x021~`\xcb\xe6\xeas\xd3\tWn\xdacC\x88qkZ\x8e\xf4\xb3\xdd\xff\xa1$\xc1\xdbK\xe0f\xae\xfd\xef}\xf9\x07\xbaj+M\x8b\x00\r\xea:\xd3\x8c.\xb2\xcdl\xdd\xc9\x15\xdb8\xda\x84\x9e\xa2\x16\x06\x8d\x8b\xcd\x1ayl\x07\xdf>\x8c.\xe7a\xe7\xf0\xd0\xe9\xc9,\x16{\x07\x0f\x11\x1e\xda\x1ex\xff\xc0\xf6\xb3\xf3\xff}\xb2\xa5\x8a\xb8\xc0\xa3\x05\xe9\xcb\xff }O\xf0\xf8\xd3f\xf0\x1a<N?R\xb8(\xb3\xa7\x80\xa1m\xdd\xfa\xc3w\x92\xa8\xd8\xee\x85m\xf5\xcd\xb1\x89k\x17J\xb8\x08\xa7g\x82Y\xcd\xde\xdb\xf6C\xa9\xa0!\x9d\xbf*\xc3\x0c\t)\xd6\xef\xe3\xc6od\x14\xbc\x943\xb5\xcd\x18}\xe4\xfa\',\xf8\xb4md\x8d\xc5\xed\x83\x16\x0e\xef\xd2\xd6\xa6\x8a\xb8\x9dke\xf5\x1b[\x1b,b\x14la}\xf2\x19+\xfb\xdb41\xd4\x9f\xf0\xf0w\xeeoA\xa3?\r\xcb(\xdd\x82\x05\x83\xf5p9\xd6\x10J6\xfaf\xa9\x9b\xc0F\x84xs\xe4d\xa2\\V\xac4\xcd\x80\x0c\xd7\xc9h\x1d\xbb\x12\n|\x1bKt\xae\xed\x1a\xc423\x9b\x0cK\\B\x0f a\xf8\x1b\x8a\x0b\x95\xed\xca\x1d\xda\xc4\x19\xd9r\xfc8\xda=\x9f\xb2\x17\x83\xb3\xc02\x06\x1a\xf0[\x90\x8c\x1eb\xd8\xae\x996@_<\x16\xa6\xc1\xcb&\xddh\xe3\xbb\xe65.\xf5\xe3D\x86\xb3\x80~\xe2\x9d\x9ax\xe5\xf0&\xec\xcaO3\xb05~\xcf\x7f\xda\x80\x83\x0f\x9b\xa6\x8d\x10\xa7\x86\xd6&\x98<P\x86\xb1\xef\xfd\xf3Bj\x03\xc1\n\x9f\xcc\xb0\xb5nv\tW\xba{6\x0e\xa1=\xc0\x9dG7n\xc2\xc1WS\xfd\x8e&@\x9a\xd6RXO\xcdux\xbc^q\xbca\x0f\x87\x17)a\xcb\xe9\x86\n\xfc\xd2\xaa6\xba\xefa\x19\x9d\xbcB\xb8T\x9d\xce\x0f2\xda\x9am\xeadg\x06\x0f\x05\x91h\xf3\xfd\xf4\xbe^\xae\x90\x9c\xb7O\x0b\xfb\x8e\xcb\xec\xceQ7\x92P_\xce\xc0\x7f\xd3\x87\\?\xcd\xcd\x9a\xc3\x1f\xe3\xfd\xf1\xc4#e\xbc\xa6(\xa2\xb5\xab\x85\xe8\x94k\x8d\xf5z-\xd1\x83( \x84\xa8S\xec\x11\x02\x80\xb6\xb93m\xc2SC&i\xf5\xb4\xfe\xae\xfe\x93\xfc\xb1-\xc4\xb7)\xdfnXc\xdc\xeb\x83\xd9Vj\xe4\xe8CQbB\x0e\x81\xc5\xa3bY|\x04\x05,\x11<\x1dC\xb3\xfe\x9a\xf2\xd3S\x8d~-4fJ\xe1\xf6\xb8\x8a\x85Lm\xf4\xf3\xeb!\xde\x1a\xcf\ni\xee\xe08`\xd2}i\xff\xe8\xfcs\x05QIu\x84\xb7\xda\x95\xa6y\xb3#]77G\t9>\xdb3\xc9\r\xf9m\xbd\x8c\xd7\xcb\xf4wd\x03\xca\xb5\x91D\xb8\x08\x963\x98xQ\xb4:\xb3u\x11\xe2\x88P\x8e<\xbb\x8b\xe1\xff\xc3\x16\xec\xf3\xe1\xa5\xe3\xee\xb39{\xf4\xd0\xd3\xb1<\x9bV\xc0\x80\x9d,\x1f\xc3\x8b\xdf0\xa9\x88\x14\xae\x84x\xe3\xf5\xcfGP\xd47\xf2\\\xf8\x81{\xf2N[n\xd1Hef\r\x0b2Z\xe3cl\xf4{h\xec\xe1)\x9e\xe7\x86\x19\xf9\xbe\x90\xfda$)\xcd\x88\x86H\xd4\x99*\xe0\xfda\x9b\xca\xc8\xf6\xb0\x95\xbd\x86\xf63RI\x96?\x99-\xea\xda\x07\xd0\xc3r\xe5\x97]>$\xf7G2\x10\x82\xb9!\xb8?\x86$\xd8\xb7\xf8\xfe\xe8ij\x98\xcd\xf8\x9f\xdc?\xe4\x85\x9e\x1f\xcbW>^@L\xbc\x88\xef\xdf\xb6\xf7\xaf\x15+ \xf6h\xf5\xcc\x8e\x12\xbc`\xe1s\x9d.\xda\x93\x97\xb9I\x07\xaeV\xd6\xcf\x1f\xbaO0\x9f\xcd\xe7\xb3){bn$\xa2\x99\xc8D;*\xee\xd9#60u\xd0\x8c\xa8xm\xeaK\xcd\xee\x86\xeb_\xba\xc7\x10\xdb\xcf]~\xc7\x1e\xdf)6\xf7\x0c\xfb\xfe\xe1\x01b\x0b9\x11\x1cD\xd9)\xe4\x86\x81v\xb9#\x0f\x9b\xc9\x92@\x03\x9bd\x12;\xfa\x0b\xbb6\x05\xf7\xe0dI\xe3G\x8a\x9d\x1b3\x96uG\x15\xaf\xa5\xab\xb8S\xe4\'q\x85}^\xee\xc0^\xdd\x02\xe4}\x88U~\xba\x8a\xf8\xac\xc1\xde\x86\xfe->\x01\x1b\x827\x81\xeaP\x9d"s\x80PO(\xc8\xc8\xfeW\xb6\x94X\xf4\xa6;\xd4\x8fz\xb3\x94\xb0\x14ern[\xc9\x94\xa0\xb6\xc0}dX\x8e\xb7ma\xb0h\xc1\xdd\xb1\xefw=\\c\xf4n\xd7\x84\xb2\xedO\xa1\xf1\xf7`\xff\xb1\xd8\xf1\xacD\x80A\xeb\x99\x04#\xda\x1b\xac\xac\xdc\x16\xfd\xe2\x11\xd4x_F\x82\x9a\xffn\x0f.\xe3\xbd=\r\xfd5,U\xb1!\xbf\xc3\xa0g\xd6\x1cW\xa7(\xac\xee\xb1\xe1\xe9\xe7_f\x0b\x83\xb7\x04\xad\xed\x9f\x8e\x14\xb5\x8cM2|\xf1\xd2^\xb5i\xa3\xe2{-\xfb\xd6X\xac\x04\x93\r\x05)\xa5,\r\xf2\xbf \xaf\xdde\xd8\xbd\xee\x1f\x16u\x8e\xa6\xfd\x08\xd1\x83\xbfO\x93\xf3\xc2\xfc+\x16\x12\xe6+\x04\xe8r:\xb3p:\xbe \xb1\xcd\xa1\x96\x1e\xa6\xd4l\xf2D~P]\xa4quw\xcbn\xddD\xd7p\x7fd\x9b\xe1\x8a\t^H\xb7\xcf\xec\xd9\xb0\x06P\x92\xa8\xbbk6\xb4I\x9e\xe1\x8b\x7f\xb6\x9e>\xcdL\x08\xb1\xd2Q\xffl\xfe/v\xf8\xe1)\xe2\xbb\xb9\x9do/\x14\xdf\xe5\xe7\x7f\xbeXD\xe0\xb1\xf8\xfd\xd6\xc2\xbdg0\x1f_\x07K\x96K \x90\x81\xf8{\xc8\xa5\xfb\xfa\xb1\x85\x1c\xb0\x84A\x16\x1dV\x93\xca\x18\xce\x13\x18\xad\xa4\xbf\xf9\x07\xd6\x07\xb1\x16\x83\xbcX\xd6\xb2\x93\x80\xc0\xacd_-\xe9\x8e\x9a\x17k\x8f\xbe\xdf\xb5\x07G\x84I\xa9Mm\xbf\xe1?\x98\x08\x04H\xf1\xb9\xfe\xe1\x9e\xbf\xfb\x84\xa5j?\xc1\x95YR\x8c\xb5\xa3\xcb\xa0\xf6\xe3\xc1\xf3\xfd\xf8#4\r\xaa\xdc\xee[\x8c\xe0`\xd8j\x85\x85\x19\xf6\r;\xdew\xaf\x9eZ2\x14)\x16\xf4\xe5.\xbe\xf80}i\xf2\x83\x87#\xe8\xe6\xe1\x9d>\x1f(\xb7I~\x9av1u\xa3\x13b\xa6\x86\xfd/_IOk\xff\x1dK\xb6h\xeb\xe0\xd3\xc3\x93\xc1\x11\x1f<\x1b\xff\xf8\x0e\x0b\x7f`\xc2\\\xb7cT\xa8\xea\xc9\xf4\xcb)\xb9\x8d\xc2,l\x14\x9a\xfb\xb6\xd0|\xd9\xdc\xec9mnc\xdb\xe7\xfd-\x85"\xa5=A\xdb\xe9]\x1a\xd3R\xdf\xed\xe3\xd37\xecM\x9d[TZE\xdf\'\xf7\xc9\xa8\xd0\xa7\xb7M\x01]\xfe~\xfe\x96\xc9\xbc\xf7\xe7yg!EG=\xd9~\xaf\xa5)\xa8!\xc7\x1f\xe1\xf1s\xbc\t\xf6\n\n\xc6\xe4\x9d\xb9\xea]\x8b\n\xa2tnmw\x0f\x12V\xcc\xc2\\^3\xeb\x82\xcf\xc25\xc3\xfe\x12:\x19\xed\xde\xf5\xdaV\x12*\x87x\x912\x0f\x19-\xa1\xd9\xed\t\xecW$\x1bJ\xa7\xd3i_c{\xa2N\t-,!\xed\xb8\xb7\x15\xf6\xe9\r\xe1\r\xde"!\xdf\xde\xffh_\x95\xfdG/\xb7\xd3\xd1\x85\xad\x99\xd2\xc1\n9z\x80\x17\xe6<\xfb!r\xcbJ\x13\xa8\xde\xff\xc4\xc7\x96\x7fI\xe3a\xbd\xfbb\xe6\x11\x96\xf59\x9e\xfd@\xd9vj\xf6\n/\xe3\x8aW\x16M\rOH\x90$\x83\x97\xf0\x7fi\x8b*st\x9eyP\xa1g\xc8\x01+\xe4wo{\xa8\xcb\x91=\x0f\x8cE(\xde\xcd\xad\x99\xa9\xa2=\x1d \x83\x12\xa1\x01$\x05j\xe2\x8a\x9f\xd0\x93\x05\xfby\xd3\xe6\xb3\xb8\x89O\xf7p\x8dl\x0f\x9e\x00\xf1\x86\xcf\xf1;\xf15E\x17)"\xa6\xfe\xb69\xbb2\xccb!\'\x1f\xbd]F6O\xbf_(kh\xcd\xf0u\x8c\x0c\xd6\xef\xe1\xce^!J\x9d\xff=5)m\x81\xac\x84\xe8\xb5\xadB\x04\x17\xc6D&\xb2\xc7\xe4\x87\xfbc\x98\t<O:\xb9n\x9b\xc0t\xab6\xdb\\\x97oe\x18\xda\xdd!^~ljX\x86#\xdb^\xe7\xd6\xfbw\x07\x9f\xe5(\x0bD\x10\xe1\x89\x19Y_\xbe39\x1b\xc9\xde\xc1\xab\x1ffR\xa0D\xf0U\x88\x1a\xbd\xa0)\\\xb0I\x8f\xa0\xdcg\xa6E\x08\xfd\x19\xc9&z\x19Z\xf3\x8d\x9d13Z.\x7f\x88w\x0f\xe3pd\xff\xbe\xe03\xcbR+\xbc^\n\xd7P\x96/\xe5\x90RX\x9f\x0bY\xf5\x0e\xa1Z\x89\xcf\xbb\x95\xea\x02R\xeb\x03\xf3\x93}{\xe2\x10\xdf\xfbyS\x96\xa77\xbb\x00y\xc2\xca\xfb\xe8\xc6\x9d\xbd;c\xa7r$=L\x8e\x9b\xb6\x7f\xc1\x043\x82u/p\xb1\xdd\x1d\xc8\xee\xfd\x05\xa8j}\xc7$/\xe0\xe5i\xac\x9c\xf2>:?d\xf5\xe9C\x01\x18q\x03q\xa9\x95\xfb7\xc9\xbc\x82\x03\x87\xc0=\xec\x99e\x82\x942\xf9qW\xcb\xaf77\xce>\x9a=\xe8\xdc\xe1\xef\x1b3\'x\xf6{\x16\xd1RL\x1b\x93*\xbc\x10P\xad\x01\xe4\x8c\x04\x8c\xb9\xec\x81\x99\x9c^\xd0W-/\x8cX\x87P\\\x0c\x93\x16?77\xc4K\xe5\x90\xaf\xf65n\xf0\xcc\x1e\xa9f\xdc0\xa1\xdc\xd0\xfdw\xc7\xcc#\xe54{e\xc98\xcc\x01\xfd`*\xe4\x8b8@\xb6o\x8b\x81\x9f \x81\x83Wr\x8a\xc1\x19\xae$\xf6a\x1f\xcd\x9b\xb9\xc5u\x9bB\xe8\xd0\xf0\xa0\x88\xb8\x1a\x84&e8\\<\xbbf\xa2\x83_\x87+\x8c\xbae\xb3dmv\x80g\xfe\xcf\xfd6\t\xec"(P\xed^wJ\x88}\xb2\xbf\x86=<\xc4\x85\xee\x00\xba\xf6\x8b[\xf8\xbd\xcfn\xdf\xde\xa6U\x00C/\xe1\xf7_\xbf3\x03\xdf)\x9ded\x94\xbf\xb4\'-\x14\xe7\xe1\xb3\xc4\xc0\xdd7-\t\x969\xbbe\x12\x8d\'\x85\xa6\xf9\x0c\x91\xab;\xb3\xbdE,\xec\xcb\x0f\xd9\x142t\xaa~\xf3\x08WC&\xd9\xdc\xb1mm\xc3[h\xe1;%\xd0\x99\x12\x0c\x86Ox\x99\\\xf9K\x82\xed*>\xdb~\xb7\x0c\xbb\xda\x19!tJ\xdcki\x1d\r\x1a\\\x91{k\xea\xc8\x00A\x80/\xe5[!n\xd9>P\x12\x90ZL\xd0"\xe6\xa3aO\xcd\xf5\xb6\xe5\x9a\x00m\x9a\x91\xfdg\xeb&\x1d\xa5\xa0\xd5\xd1\x15\x8f\xb1\xfc\xb0oD\xeeq\xff\x91\x94^\x7f\xf4\x1b\x8e\xeb|\xdfb\xcd>\xdc2\xcf\xe6\xca=\xcb\x19\xe8A\x06\\\xa3\xe8\x8b\xabJ\'S\xa1\xc0\xb0\xc4XF\x18J\nvd\x89\x02\x93\xafr\x05\x8ax\xe3\xf93K\xba\xbbRYX\x17\x96\xed7\xda\xe6\x13D\xe9\x19\x1c\xc2\x8c\t\xb7w0y\xbdCT\xd6n\xe3#\x1bx\xec#\xc2\xff[\xa6VM\xf9\xd76\xaeS\x11\x03\xd2^\xc3+\xe1\xd6L\xe7{3\xa4\xd8\xd9.\xee\x1e\xc9\x83@x\x8a3\x8b\xf7Bss&\xae*\xe0-\x10\x15,H\xd3\x7f\x96\x1d(}=\ts\xd6\x01\x88\xad\xbb\x04:\x12\xee\xdaz;A\x9b\xb0T\x00\x0c]\xb7V\x9f\xc0P\x8c\x0b\xd2,m\x93\xa8L\x99\xa9#\xd2\x92(\xfbq\xfe#\x15\xdc\x93\x15\x92\x9b\xd26\x89\xbe\xb7O\xc6l;!\xd9Mf\x97\xea\xdd{\xb3\x88\xad\x8bO\x85\xae%\'\xd2\xdfX\x99K/\x18\x82\xd5\x16f\xea\x04\x18\xc6 \xb7\xdb\xb6\x14\xd8\xfa\xba\x84{\xe0\xde\xfa\xb7\xe6\xa6#lA\xd7\x9a\xbc2m\x89\xcc/`m\x89\x8f\xfa%\x05\xa2\x85\xb9\xad@\xac\x1e\xea\x00\x13\xee\x11[\x94\xe5Vm\x0f\xd0\x87S\x01v\x00c\x88\xab\x15\xd8\xb1\xe2\x8a\xfd\x1c\xa2\xcd8\xbf`\xe2r\xfc\xe1\xc4^\xaeQT\xdeD_\xdf~zo\xb2\x17\x15IbK\xde\xc6\xbf\xe0\xe3\xc2\xc2\x0b\xac\xcf&<\x11^\x1fJEL(\x16\xea^\x9eq_\xa1\xbb\x13\xa8\x05\xb1\xbc\x03\xb0\x97\xfbT\xcebS6f\xa0\\0E\xc9ju\xd0\xd9B2T\xf6\x12\xf9\xc6\xf6\x0c\xef\n!\x8d\x92\x19\x86\xfdop\xa3\x83\xdb\xf9&\xb6\xff\xe6\x15\x8b\xcf\x08\x952\xdf~\xf7\x04\xfe#\xde\x9b\xcc\xec\x12\\\xd3`\xa1\xadO6\xcf\xbe<\xbda\xfb\x13\xc5\x8f\x0f\x88\xe3P\x81\x95\xc1b\xc9\xfa\xe8\xa6\x05\x050\xcf\xd0\x85P+\xe0ll\xaf\xfaz\xf3l\xc7\xae\x8e5\xa3\x1b\x84\x1c\xa1\x04\xd7\xd7_L\xe4\xca\xf8\xcd\xc1\xb4\xd9N\xcaJj[\x8b;C\xff\xb0\x12!\xd9\xfc1\x03-\xd8.\x06\xf8\x80oq\xdd\\\xb0oo\x1d#!\xc0O\xca\xfe\xd9\x87\xbf\xb3\xfbv\r<K\x9f\x17o\xdf\n\xccH\x07\xec\x0cbRn\xe3\xdd\x8a\xafJ\xdf\xa0\x80\xc9=\xe8;\x92\x9c\xf4\xe7.\x0c!d#_2\xd5)\x93\xf5I\xbc\xe6\xc4\xfe\x90\xa4\xef\xc2\x16\x84\xf8\x93\xae\xe0\x06\xc0\xe5\xb19\xd4(E\xd8\x1d}5I\xac\x81x\xc09wB\xcc\xe0>\xf1r#\xd3~\xb3S8\xc3\x14s\xb9\xde>\xdbU\x0eK\x03\x04]\x01\xd4^\xa8\xe8\xd98IzdZ\xe7\xa3\x7f\x0f\xed\x9a>\x01\xdc\x96\xfd\xb5;\x95\x82M\xca\xee\x8b\xbd<M|xa\xbfY\xfa\x03@\x15\xb5\xe0dbb0\xb2\xc8\x93\xb0\xa1Q\xf2K\xf8T+\xcf\r\xdb\x99}\x81\x94\xd0j\x8c\x99\xee1\xe7\xaae\x1d(C\xdb\xfe\xf6\xad\x19\xd8\xdbU<\xd5g\xac\xc6\x84\x19,\x84\xa3\x88\x06\x9a\xf8O\xfe\xc1\xac\x1eW\xaa\xdfjL;\xe1\xf5[s\xbc\xe1\xa3y\x1c\xca#\x82\xe1ZXgG\xefy\xf4dJX\x9cP$\x97\xc1\xfa\x16s\xd3B\xf3\x82-@\xdb\x96\x8f\x96r[N\x9a\x8c\xf4fc_\x85\xbe\x9fT\x9cU\xc2\xfdt\x7f\x9f\xbd\xbc&p%\xfa!\xf84\xd1\x0bF\xf2}\x8d\x8a\x04\xb8#\x8c*\x8d6\xeb\xcf\xbf\x01\xafe@\xd3\xb3!&\xbf\x83+M\xbd8\xbc\x82\x85\xb6\x1b!\xb3\xaf\xdf\xfe2\xe3\x12\xea\xd7[\xb63\r\xc2LF\xb5\x99\x89>\xc2A&\t\xce\xf2I\x02&\xce4\xb4\xa9w\x1fc\x1d\xfd\x80J\x14\x16\xccw\xe5\xf8\xef\xb1ma\xfe\xe9\xf9\x9a\xc0\x99\xa0:\x87\x8f\xff\xb3\xf8\xfe"\xce\x0f\xdf\xbf\xdc\x116\xefM\x94\xfa\x1e\xf1X\xff/^\xb0\x94\x90~\x1bV\xab\xfc\x80jV\xb4\xb8A\x7fr\xddT\xd0\xcb\xa5\xf9\xec\xbf\xd8n\xe6d\x88\xdb\xb6\x8b~\xfe\xb6\xdf\xa5\xff)\x11D\xc5?L\x9aBxu\x9c\x7f\xdf\xb2 \xdaw\x13\x82\xc1\xea\x9b0r\x1d0\x81\xec\x87\x05\xe9=j\x0e\xa1\x9c\xb6\xcc\xaael\xb1\xf9\x0b\xd7?6\x9bBk\x90\xb9\xd5)\xbd\x92\xc2\xcd&\x9f\x1a\xff3c\x0e\t\xa1 \xd1S7\xb1H;S=3M\xedr\xe8e\xb7\xf5\x99\xbfs\x84|\xba\x9b\xbe\xa7\xfc\xb9\xb3[b\xed\xf1\x12A\xf7\xf22\xe1\xf4f\xb0\xd4\xde_\x14"\xee\xdb3w\xdd\xad\xbf\xcaP!\xef]\xfe\x1c\x1av\xf7?\x7fe\xd6\xe2w\xe4\x89\xdd\xd6\xb6E\xf5u\xb6\xbb\xb7j\xf7\x83\x11\x0f,\xcb~yoXM\x88\xe6.qa\'&\x03\x83\x01\xd7\x9f\xba\xbf/\xcc\x03\x02`\xa9\xcb\xdb\x97\xaa0\xbc\xf5H@\x0f\x84 \x12z\x8f\xe4l\x0b\xb3Xe8\x10\xda\x02\xf1\xb8 \n\x94\xd2;\xb9g\xbc[Wo\xfe\xb17*sa\x10\r\x03\xdb\r3\r\x9d\xca"\xc6\xedx;&!`:\xb2\xcb\xd0|\xfa\xc8^\xb7P\x81\x1bkIXE\x11=\xdd\x06-\xe2\x14\\bo\xaf\xdb\xf4\x80\x81F1b\xd5\xd8\x03u\xc9c{Z\xbcs\xd7\xbd\x06\x86\x15\xea\xfb\xf8\xfd\x17\xab\xf2J\xf8\xbdl\xf2\x1f\x9c\xf8\xa4\xdd\x00/\xdd\xc8\xf9\xc0\xf4\xc1S6*\xdc\xb2\xc2QT\x15\xde\x0fa\x1e\x02\xbf\xbe\x98R\xb0\x10^\n0K\xde\xcc\xbc2{\x1d%S\x16\x16@\xfa=t\x8f@}\x0cQ\x8e\xafL|2#\x86\xcbG\xd9U\x86f\xa9\x12\xac\xd8\xbc\\\x88\xb7P\xfa*\xc6O\xa2\xf39\x8b\x05\xa1\xa2\x84\xac\x89W\x8e\xe9% \x015y/\xc5>\xae\xf8\x07\x06\xaf7\x01`hha\x1c\x00\x99\x08\xd5cT\'q\xdf\x90 \x02s/\xa5\x1d\x9dD\x19\xe1J$;\x0b\x13AQ\xa3<\xcb\xee\x10\xf3K\xbf\x8d\xef\xde0k\x1e\xdc\xa6\x19\x9d&[3\x0b\xd4\x0b\x80\r\xe9:\x9c\x84B\xe0\x90\xc4\xb6\xbe]{\xb5:&\xea\xd5\xaf\x8f\x7fQ \xde\xbd\x95\xe1UI\x89@{m\x12\xc2\x08\x8a\x91\xfdO\xcbsh\xfa\xbaf\xf9\xd7U3S\x84%\xb2\'\xc2\x9f\xa8\x1b\xb1-\xb8\x1f\xd0[\x15\xff\xbd"\\\xec7t\x00u\xb7V\x98w\x19\x8fI\xa0\xfb\xff\xa9\xff\x86d\x0br\xf3={$[\x97|\x9d\xff\x1a,\x03w\xd9\xa6\xc9)\x9c\x8e\x05\xc8\x8f\x15\xbe\xe3\xfe\xd1,\x98\rDV\x11D\x84w\xe6oF\xe1\xcc\x81\xfd\xfeH,\x0e\xde\t!\xcb-\x0bm\xe3\xc3\xab\xf07\xedSK\x92z\x88\x0e~\xabU\x81\xdb\x83\xad\xe0\x9b\'\x93\xaf\x11)\x94\x0f\x07\x19d\\\xbd?-\xc01\x83"\x01\x1b\xf6\xed<n\x90[\\\xd5\xb0\x8c\xb7\xb6\x7fn\xae\xa3`\x82\xbf\x80\xe5E\xd9\xd4u[\x87o\xbf\xf3\xd1\x8e7\xaa\xa3{\xa8F\xb6\xbf\xc96\x80\xbdo6\x81x1f/\x14\xaf\xf1\x9a\xf9\xe2\xf8\xb9\xd0\x0e\xc4\x8de\xfcx\x05\xff\xdc\x87\xd5\xfa& \x1f`I\xd3 \x0f\x06D\xd5\x02\x8f\r\xf5\xbbKo\xce\xea\x14\xe3\xb9\x13[-b\x17\xf1=\x11`\xe8\xbf\xda\x87\x1f\xb1YWeb\xb98\'?\xee\x9a\xacE\x1e\xd7m\xc6\xae\xe0\x99g\xcd\xe2QZ\x81JF\xdb\xb9mf\x14\xaf \xec\x0f\xd7lq\x9dJ\xe0\xf4\xc5N\x91e9\xbb\xf1\x18\xb1\xca\xcb{\xe7H\x95[m\x1f\x89\x02\xf9\xd8\xff}\\\xc6\xf3\xa4hL\xac}7!\x85q\x19R\xcc(\x9eE\xe9,J\xdd\x87\xa9\x01a\xfa\xcfV\xa4\xe5\xed\xe7\rDh\x142\xfb\xf8h\xa1WE\xbfk_\x99\xfe"\xfc)\xcb_\xdf\xc2\xf4s\xd5\x8f\x99\xee$\xbdY,\xd8\xfdN\xbe;b\x82X\xafB\x827M\xf8\xa1.L\xd7\x90\xc7\xd1\xef\xc1_\x82\x875R\xa2#\x99\x90\xd8\xc4\x9e\xc5\xa2\xfa\xcb;\xd5Vp\xdfl\xc6[\xe8\x11\xf16\xd3\xaa\xf1\xc6\xd5\xc1\x07\x1a\xc1\n\xcf\xa7=\x1c\n\x9d\x0c8c\x930G\xd8\x08[\xd3\xfd\xfe\xa6|\x10\x06|d\x88!\x15\xbf\x10e\xd6\xaf\x87\xa77\xbc\x85\xab\x91\x9b\x1d&\x92\xd4\xbc\x840\xf7/\x8ea\xe5\xdd\x92=l)\xb8\x98D\t\xff\xcb\xe2"&\xa1\xd1\xac\xdd\xb7w\xcfmK{\xc0OM\xaa\xca\x03s\xd3`\x9e\xa3m\xe6m\xd7X\x7f*^\xd8C3X\x81\xf1\x08\x07\x87\xaf\x1e\x99\xd1\n\n\x16\xe9o\x1b\x15y\xdb2\xad.\xb0\xef\x10/!\xa7o\xe7\xcc@\xb7n\xd62\x17\x08O\xd1\xcfBY\xde\x9b1\xc2\xde\xfbR^\xcdM\xdco\xbf\xfd\xda\x86\x85\xbfe\x8b\xce\xf8\xbaS\xa4\x94\x89\xa0\x88\xb8} )`_\xcbX\xc0~\xe8\xc6m\xe7\xc8/\x12|\x1d\xba\xf7\xe6\x00\x88CE*\x1f3#\xc05\x00\xb9\xb1\x84\x02\xdb\xec\xb4\xc3\xd82.[\xf4\xa9\xba\xa0\x115\xfd\x17\x13\xe9\x1a\x1b\xc6t\xa2\x9c\xfc*\xdb\xdcz{\x97Z\x08\r\xd6\xcde3\xf6\x800\x8b\xa3\x80\xff\xba\x92\x0c\xa8W\x1d\xdd\x10\xd4\xd6\x98\xaf\x81\x9e0\xd5+\xef\xce\xd9\xaf\x17b\x164*A\x91\xee\x90\n\x7fc5\xad},-\xe8\xfeb\xebf\xcd\x92F|\x81Y{\x0bR\'\x9c\x12\xe8D\x02\x97\xb7\x86\xa3@R\xbd\x90\xd6\x88\xf5\xf6[\xc0\xa9\xfb\x07\xfa1\xf2l\xaf\xa8\xb5\x14\x8cQ*\xa4v\xe1\x00/ud\xd2\xd7\x81\xda@\x06M\xff\xa4\xbdao\xd3\x973\x8dy\x13\x9a\xfd\xc8\xee\xe3P\xf9l\xea\xdb\xdf\xbb\x17BS\t\x17\xfd\xb5\xadu=HX\x0cr\xf1Y\x14\x17Xbv\xcd[sP\x01\xb1 \xc3\xf1\xe4\xb5\xe5\xe6\x03!\x89\xc9\x87H3\xd4\xe2l\xfc\x81y\xe8\xb6~\nM\xaa\xed\xa2\xa1\xbf\x8fl\x89\xc9\x1fI+\'\xa3\x7fV\xf9\xbf\x9f\xca\xaer\x95\xb5\xa1\xe5\x90\x01n\xd9`w\xb1\x05\x8ex\xe8\xf4\xd2/\xbdP\xbc%\x88\xb9\xb5\xdb\xd1\xc7\x0e\x8c\x85N\xf2\x93^Z\xd6R\x848\xa0\x11\x17t=o7\xf3\xf53e0\xf0\xf7\xb9\xae\xd6$\xf0\xad\x05v\xa3\xb9e\x92\x1d\xa4\x97\x03I\xa4nWL\xa5#\xdd\xa9\x1e\xc5R/-RlE\x13\xa5\x12\x85u<\xd1\xb2\xb0\x82F\x10\x86\x8b^J\xaf:R\xb9\xaa\x83\xfbv\xa9\xd1\x12\x1dO`e\x06\xbc(_\xb3\x8d \x8f\x0bK\xd34\xe3X\xf8\xb9{n\x93\x05|T\x0bX5\x04\x11\xa0\xce\'c\x85\xc0]t\xa9%M&t\x91\xb5\xc2\x1a\xd8\'J\x95\xadrU\x96\x03!g\xee\xab@\xc8tFq\xba\xfbA\xf6$\x88W$\x96\xda#\x8e\x14\xf5\xfb\xcf\xb84=w\xfd\xd6\xa1|p<}\xaal*\xc5\x9e;U&\x11|\xfb\xf4`\xd9v\xbc\x13\x8e\xc0\xacU\x89\xe1\xc0w%C\xb4|xol\xf1\xdcl\x1fb:\xa4,to\x82\x91\x18H\t\xbbe\xe5\x8f\xbc\xb3o\x07\xd5\xc1\x92\xe9\x0fcXU>\xe8k\xeb\x1ff)"\xac\x1d\x11\xdc!\x1bd\t~\x0e\xfb\x0b\xd0\x9ct\t\xac%\x12\xd7 y\xaa\xc5\xcc\xebs\x02\x14\\\'\xc0\x86\xf13z\xcb\xe3\xe7\xc2\xcea\xae\x98s\xd6\xe5\x96=X\x9b\xbe\xff`\xefV\xaa8\xd9\xa6_-\x9e\xc6\xe2\x12\x17O\xcc\xf0:1\x8b\xea\xbe\x07e\xb1|\x85\x7f\x9e\xee\x9a\x17j:%1\x00\xd9\x9c\xc3\xcb\xb8\x1d3!\x91C\xb5\x12\x96\xd6\xf9\xe7\xef\x15L\x11\x8f!\xcc\x9e}\x04s\xa3\x1e\xc8\xa8\xacaN\xca\x00\xf9\x15h\x191\xd6T`u\xcd\xe0a\xd7\xaeP\x87[\xc7\xe6\xe3CD\xff\x06\x1d\xeb\x90^\';\xdbf\x03\xa8\xed\x89\xed\n\xe1\xc3\x08f\tuK\x02nA\xfb\x03\xe3\xc8\x92h\xf88\xfdZ\xd0\xad7k\x02\xdbm\xe8\x04\xed\x93\xea\xbcm\xfe\xca\xccg\xad\xb2\xbd\xcb\xc0\xecs\x03\xd3\x13D\xaa\xa8X\x85,\x92\xac\xf5\xd6\x9e\xbf\xee~[\x90\x11\xd2;&\x16\xac]\x84\x17I\xf4\xeb\xef\x84\xf2\x89\x069CC\x94\x91\xa6x\xe7\xbe\xaa~\xc6b{7g\x9bA\xd8N\x0f\xda\xe6\xdf\xa5\xd5X-b\xa4I\xb2}j;\x10 +\xdc\xdb\xc4\x16\x04O2\xd2\xb3\xca6\x96\xc1$\xf9\x80?\xcdP4\xcd\xa1\x199K\xd4\xaf\xc8J\xaa\x8c\xdc\xe7\xc8\xd1k\xb0eZ\x12\xf4\x189\x12o\x95\xfdf\x1d\xbe\x9d\xb2\xd0\xaa\x0e\x13\xe7J{k\xa9n\xfb\xf5\xf2\xc6!I_\xad\xfcC\xac\xc5Jy8\x852\x97\xd9\xc95$\x89\xab\tQ\x96\xa5\x92\x0c\xb8\xd6r<\x92\t\x05\xf5\x8f\xe2\xb3\x83\xf3\xcc\x16\xc7\x8b\xbe\xc5\xf07\xbf\xf7\x01&\xd9+\x97a\xd6\x06\xef\xd0\xbf\xc7\x9b\x01\x86\xc9\xa6\x1fn\xdfz\xb8f\xc6\xc0\xe70E\xfdO\xb3\xa4\xe4\xe2\xb9{\xef`\xe5\xb0&,^\x0b;\x82\xe3"\x118[2\xe7\xe2af\x89.\xc9\xb0\xf7\xc9<h\x87\x81\x846\xd6\x04\x00\x19\xb4\xf3\x8f$j\xfd\xc6\x13\xac\xdf\xf7\xab;\x9fV`=\xbe\xcbA5v\x13\xa6~Cl\xc2\x10&\xcc\xde-\x06>gl\xcfK\xf5&U:\x15$\xdc?\x81\x07kY\xbd\x82\x1b\xf6\xebP\x9aS[q"\x90\xb8\x07\xf8\xc5\xb5R\xb7\x86!\xfd\xb7\x15\x08Q3\xa0\xde\x10sO[\t\x9b\x9d\xdd\xf9\xcf\xccU)CZ\x13]\xf5\xe6\xf6J\x01\x0c]\xfa\xd4v\xa6\x06\x16\xd4\xa2\\\xdc\x913\\L\xed\x9a\x9a1\x02\x1f\xb2e\xc2\xbc\xbb\xcb\x03\xda\xeb\x84Y@\x1f\x98a\x10\x19s\xc9\xf8L\xcd\xfb\x1e} \xbav\xdd\xc2\x82:#\xcdR$\x8e2y\xfcuuwI\x8a\x84\xfa]\rsI\xac\x86\xb4\x93\xe7C&\xb7*\xf8\x95u\xc0\x80\xab\x11\x84,\xd9J\xf0\x82\xdc\x92\xa6\xff\xf8Q\xe0\x93\xe8w]\xfb\xf6\xc0\xbe\xd3&\x07r\x86\xb1\x19\xf9>\xbf\xc5GJ\x80\xd3\x04\xa4n\xdc;\xe6\xcc\x0f)\x9b\x07v\x8d\x9eQ\x1f\xd8P%\x9c"HPm|\xf5\xcb-{\xb0\x86\x8c\xad\xd4\xa2z\x9fu\x7f\xf6`_\xf7\xcc\xd9\xd7\xady2\xa3\xa0\xdc\xb7\xc4\xc7E\xcb\xf6H\xac\x19\xc2\x95\x03;C\xa0\x10\xf8Z]}r\xe5\xfd\x1avF\x01\xbd#n{S\x81c\xac\x94-\x89\x92\xc3\xef\xb8bPm\x94\x98G;Ftv]\x90Dv\x19>\xb5*\xf5\x10\xcb\xcd\xcd\x051\'kL\xe9J\xb8\x1a\xc2\xc5\x11\xean\xc5\x7f\xc2:T\xaa\xf4b\xdc\xd4\xf5\x8a)(\xd3p\xb2N>\n\xc3\xca\x1f\x18\x03\xfa\xc0\x84\xba,\xff]\x83\xf5A\t\x12\xc6\xae\x14\x1f\x19\x0f@\x97W_!\xdb\xe9\xf0\xd9\x18\xf4\xac\x19h\x0f\xe4\x8d!1\xe6\x92\x90"\x0eO\xd0#\xee\x06 J Ge\xf6\xde\xafb-\xf7\xff\xad|x\xaa\x94%\xb3\xd7\xe3\xa5\xca\xcf\xbb\n\x07\xe2\xcf\xf1\x03!&\xc4>`\xa8\xe3\xa7\xb2\xc9\xf1e\xa6\\\x8a\xfd\x89\xd5\xeb\xebo\x16\xca3\xa0\x19j\xd2\x8d)r)\xdbN\\P\x7f7b\x1a\xb6\xf1\x93\x84\xfb\x00c\xe1_n\x98Z0$ \xedf~\xcfV\x811_\xf1l\x88\x82\xdeZ \x1e\xb1\x1a2V\x98\xb5\n\xac-\xbc\xa9m\x03\xeb\xa1\x8e/\xa8)\xf2(?\xd4\xf7O\xa7\xef\xdd\xb7%,\x89"\x824\x07\xf8\x81!hS]\x90n\xb1\xd9eO\x94\xf8\xc6\x81isPp\x039a"(\xba\x80K\xb8\xb4\xfd\xb8D\xa8 \xaa\xe2\xa2?\x16\x17\xe1c\xf0\xe4\xac*F\xeb\x92\xf7X\xc1i\xfd\xf0\xb06\xb3\x16\xc5\x07\xb0\xc1\xa8\xac\xf7{7\xcd\x8d\xd4\xd9\x86E<\xf4\xba\xcdsE\x19\xf1".\xf7\xcbD\x80\x0c\xaaR\xeb\xcb\xd0\xf1\x9f%z$\xbb\x14\xa86\xd5-\xca\xda(\x190a,n3c|\xca\x98p~Q\x00\xa9\xa0\xdc\x96\xa4\xe9\x15{E\x17=0a.U\xads\x92\x80\xae_@\xb1\xc2m@\x04\xcb\xf1\xa5G\xb0\xb3\x1b\x95x\xe07\xb6\xf1\x18\xb0C\xe9\xb2\x9ci\x7fb\xde\x8c\xa9\x11\x13\x98\x9b\xa6U\xadP\x12\xe4\xf9\x91\xfbc&\xa0\x06\xe9\xcd7?\x87\x12\x11\x11\x94\'7?\xd8\xe31+g\x18\xf7\xc6\x8cj\x1f\x14\xa8\xb4\x07\xef\xefB\x12\xe6\xab\x8b\n>\xaf\x1d\xee\xcc\x91\x05\x84f#\x84\xf0\x17\xf5\xe2\xf4\xcc\xf2_\xa8e]\xcec\xbfZB\x99\xad\t\x9d\xf3\xab\xe6.\xc8\xf4\xc4\x86\xb7 \x95\xb9Y\x94\x08\xebY\\\xfe\x18Bx\xa6\x80\x9c\xb4\xf3\xf7\xa8\xfb\xe5+JO\x9a5uj\xb4t\xdaA9\xb6\x7f\x04\xc0\x01\xbe\x83PM\xd6\xbd\xb6w\xadE\xff\x1d\xa5\xac\x07\xb2M\xf9\xde\x026\xf1\xd1\xfc\x1d\xcbfC\x82\xcf\xf5\x9f\xa8%\xc7\xc2a\x933\x90\x9eXf\xe8m\x11\x87\xfc\x16\x02\xd2\xa2]$t\xa7\xa6\xa2\x0c?\xa2}\x93;\x92\x87\xc2\xe2,\xc3\x10\xb1-{\xe3\xb3\x182\x1d>\xee)B\xce\x94\xf8\x17\xce6\xc0\xe9c>>{i\xcb\xd4K?\x08l0\xf9Z7\x1d\x8c\xc2\x9bY\x89R\xaf\x10\xa9\xb8\xcb\x00\xf0\t\x16w\xf7\xbb\x85c\xad!!\xa6\x8b\x04\x05\xeaO\x87`\xbf6\xbf+\xd1\x94\xcc\xc2x\xf2azS\\v\x9d\x15\x1f\xf5\x089=\x9cb\xc6Tx\x06\xf3l\xa1\x89a\xd0*\xd7\xdd\xbcc\xcf\x89_*\xd3D\xb1\x0eC\x91\\p\x06\xdd\xcd\x1b\xc5\xbb\x88\xdd\xa3\xd4\x0b\xbd\x89\xcc\x06\x95\x85\x9e\x9ePC\xb6-\x08\xca\xb7\xc9_-%\xc9i\xf7\x95\xb1\xf7\xc2:\xe8g\x94\xc5\xf5\x00K"\xd1b\xcat\xee\x11\x1a\x14\x92\xab\xd5\xf5\xab\xf4El|\xfb\x8f\x0e\xfc\x14;\x0cL\xaf\x17\xf7\xaai\x11\xbbe@^-\xc1\xbfDTh\x86{{.\xeeH+\xe3\x1a\xcc41\xcb\xc0\xef\x84\xd5J\x95\xab\xbf\xe6<k\xbf\xf3\xdb\xbeE\x93\x16V_H\xfc\x13e\xee\x8a\xc5\x88R\xd5B\x0c\xb9\x1d\x85 \xe8>{\x7f> !k\n\xc3{\xf3\x00\xad\x1a\xa0\x9c,c\x9b\xc2\xcb\x16\xa8\x87& \xb05\xe7\xdfMc\xc86.\xcc]\xb1\xe31\xb1\xe8\xa1Q\r\xda\xa7B\xdcJO(\xa6\xdek\xd6m\x19\xe1\xf8K~\x8c\x11\x00*\x8b\xd9\x1b\xdb\xb1RB;R\xfb\xca\x1ct\x88\x9f?\xba\xb6xm\xe5\xc1[\xbb\xads\xaf$\xc9Nq\x9c\xd8k.\xdf\xdc\x82b\x95\xfd\xda<\x9e\xee\x95\x15\x9e\xa1\xac\xafw\xed\xa3xd\xfa\x1c\x98-\xe4\x86\\M \x18\xbe\x11\xe8\xcfX/7\xbdo\x04\xf83\x14\xf7\x07\x06K\x94\xd1\x1a\x1d\xb1m^\xed\xf2\x9f,\x8c/Av\x9e\xc3x\xdd\x13\x14\x06\x17ScS\x11\xae\xb4\xfe\xd6\x15\xf9D\xa4\xb5\x96\x98\xd8\x07]\xbc\x83Z\x10z\x82Z7l\x1a\x15\x04\x94B\x08\x127\x9f9\x15\x96\xb9\x01\x85\xa1\x1c\xbf\xf6\xe6\xe5%\xce\xc0\x82c\xc1J@a\x167\x8aa\xa4Q\xc5$4\xe5\x0e\xb1\x8b!\x86c\x0e\xd3\xe6\xf7\x1b\xf1/B/\n\x0c\xab\xd8n\xf7\x97\xbdh=z\xc8\xeb\x16\xf8\x91\'\xe6\xed\xeb\xe0v\xce\xec\x03E\x7f\xfe\x12\n\x07\x06l\xfd\xea\xc7\xb9\xa9Rg\xe1pn~\x9f\r+l\xd6\x14l9\x14\x8a\xfary\x13\xca\xb9\x04\t\xdb\xbeaV\x87~=\x9b\x17bNd\xf2\x8d\xc5#\xdd`Hp\xa9\xf2D<\xc8RHk;dd\xdd\x9e\xedi\x99\xdb\x0f\xc9\x1c"\xb8\x9d\xa9\xc2Q\x9b\xfb%\x99$\xa6t\x99L\x86\xa1\xae\x96\xc86\x15\x8f\xab\x83\xbf\xa6\x06N\x18\xb6k.\xe5a\xb4\xeaG\xd33\x1b/L\xcd\xcb\xf2@\x98f\xb4=e\xe2IZ\n\xd1\x8a{X\x98\x93\x1b\xa6-x\x01\xdf\x83A\xd1\xfc\xbd\x06\xe6[@\xc5\x9c)c\xf1D\x81;"\x9b\x9eY\x9c\xdcM\x88\xc7\xe7v,3\xeb\xc5L,\x07(X\xf4\x1d\xafBJ\x18<n\xb3\xad\x08\x87(+\x9b\xe1q\xb5\xfaMjQ0a$\x16\xe4\x18u\xff\xc0\xa5b{t\x16\xf0\xfa\xe6##\x0bT\xa8\xca\xab\n\xf1k[\xa42\x9c-\x1d\x1f+?!\xfb%\x18jB&Fdr\xc9\x9ed\x06\x04\x1b\nb\x1cb\xb5\x94\xdd]\x8dmC\x04\xdaMh~*\x1f\xe7\x15\xc8X=\t\xa6$Q\xbac\xef\xd9\xc7\xe1\xb7\xa2^\xaf\x9c\xb1L\xb7\x85\xc7\xaa\x9b(\xf8W04\xd1\x9b\xf9W\xb0]\x8f\x84N\xe0\xb5\xbb\'$P\x17k\xb6($\x80e3_\xee\xbf 1\x9c\x85\x83E\x14r\xb21\xd3\x0b\xeamf!\x19\xb1\x86\xc6\xe4\xa5/\xef\x90\x11\xc4\x12\xef\x8e\xa5\x0eew\xcfR\x05\x16\xc4\xe4c}\xf6\xe4\x01C\xec\xeb2h\xe2\xc9\x8d\xe4\xac"\xbc\x9d<1#S\x8a\xec\xd5\xf4\xa0\xea4/&*k/\xaf\xdb[\x0fL\xeeKQ\xc3\xda\xce\x03H\xf5\xcb\xf6XV@\xadD\x9e7\x07\xccf\xe9v\x17 E9\xaf\xe86\xb2e\xb3\xce\xa3\xa0gnU\\\xed\x9a\xc7\x7f\xec\xe1\xc8U\xa7[\xbai\xd7!C\x01\x05A\x825\xfd\x8c\xf0\x03\xc6\xda\'\xa2\x081\x0cF\x1d\x8fpka\x0f\xc4\x16\xf5N^\xa7M?I\xe8\x11\xfb\xe7\xa6\xfc\x84\x85\x18\xe1\xfd\xfcl7cg\xa3\x8aQ\xf4B\xb1-\xb2\x11\xc1U8\x80\x91G/>\x19A\xc8&\xfd\x87\xe5\xbd\x13%\xd4\x9dp\xb5\xd6\xf5\xf0\xa6\xec\x0e\xc0\xce\xb7\xd7$@\x8d%\x04eY\x80Z\x94\xfe\x9a\xbfg6\xa2S\x00R\xba1\xa5\x90P/@3\\]4\x01\x91Q\x1c,\xfc#$\x82\x07\xf0J\x02\t\xb4g\n\xf5;\xe1\x97\xb0\xbe\x864\xdb[\x07\xc3\x8f\xf0LWW\xcc\x82@q:\xa5Z\x91*\x97\xe5(\x9b\x1f\x85\xbaS\\\xe7*S\x8c\xd9\x84%\x0b\xff\xfbz\xd3\xe4\xb7$\xc5\xfb\x1b\xb2\xa6\x18I\xb2\xbb\xa9\x08\xcd\xa8t\\l\xd2\xac\x91a0\x8b\x8d\x9es\xa1a\xd0\xfdc\xc1\x9b\xc2\xe7\rT\x9e\xb5\x88\x8a.R\xf5\x90n(\x16\xd1\xbe\xe2\xbd\xcb\xcf\xc4}\x0e\xec\t|\x8f\x17\xaa\xe7\x97\x84\x08\x88\x03\xc7`K\xa6(J\xff\xd0)\xc2\x19\xd5\x1b\xc4\xc7\xc1\xc3\xabcbT7\xd0,\x92\x1f\x81\xefV\xca\xb8\xb7\xc5.\xa5\xfd:\xd0\x16L\x1b`\x0fY:\xa6XR\xbc\xb6\x08\xfaP\'\x1b\xe6R)J1\xbcx\xffF\xb9\xaf\xd8\x06\x96\xc6\x9a\xa1\xa2\xf9\xcf\x84Y4\xd5\x05\x91\x91\x18}\x7f\x9ao\x1fZ\xb0\x1e\xb1\xa2\xd1\xfc\x990\xfd)Q\x99\x1f\x19\xe2\x03p\x98\x9fV\xd7\x1bKN\xd91A\xa7\xf2\xd1$\xb5\xae\x1f\xddV\xbc\xd3\x9b\xa4\xc1\x10\xfa|]\xe6\xb6>\xbb\x8c\x8f;\xbdJ\xd3l\xcfR\xb7\x8e\xef\x98{k\xd9\xcb\x16\x9bp\xc0\xf5\x93\x01\x1d\xbf\xd9\x9e\x84|>\x1e\xab\xaa\xc5\x9b\xa6\xfd\xb4\xf5\xc2D\x82\xf6\xa9\xf1 \x1b4\xedK\xbcx\xfe\xe6\x8f\xadY\x99\xddV(\xeb^\x9d\xc9\xd12J\xccv\xb0\xb2\xef-\x9an\x0b\xb3\xa1\x0cq\xf1^\xc5\x93\x94[\xb0\xa2\x90\xb03Aa\x89\x89\xce\xa7\xbf\xf5\x02\x0b\xfb\xe0iU\xddT\xfe\x94<\x92\xc3\xcc*\r\x9cx\xa5+&\xb6\x1f\xbe\x10-\xc0\xd5K\xb7\xc6\xcd\xdb\xb2%\x80\xb6(G.\x86\xa9\x05L)\x19\x93\xde\x16\x80\x06=\xa9\x7fMo\xcf[NY\x02\x01g\x82\xa3\x9ax#\x9b\xd3\xaa\xd4L\\Qq\x03\xdd\xbb\xf3/m\xcb\ts\xb7\x86~\xd0\x91\xc3\xdeu\xddM\x05\xb0\x8d\x05\r\\\xfb\xf6\xf7\x869\xd6\xda\xb3L\x7fr\x86\xfe\x9a~\xec\xa7J\xb8\xe4/B\xc1C1.\x9c\x8bd\xb3\x9bg\x8a\n\x866\t\xa6\xc1IuA\xa9\'D\xd7\x0fH3L\xb2;\x9f\xf8lO\xcb\xd0\xb2\xa5& ~E\xe7t\x08Go\r0\t \xcc\x90,\x8e\xa8\xac\xfc\xf0\xef\xc5%\xf2\xd5\xc8\xb8\xb2U\x99\xddu\x8e\x9d\xcc\xe5\n\x98\xec\xa1{FC\x08\x07X\xfe\xfed\xca>\x00\xe1l\xf3\xcd\x19u\x00\'\xa0ej\xaf\x82\x04\x9f\x91.\xb8UN*p\x8eL)\x98t7\x93\xb4oG\xd8\xbe\x89\xca\x88\xc0\x81\xc6\xe9\x1d\xca\r\x05\xc5\xe7\x8d%\x05L\xe8b\x8b\xc2m \xc2\xbc\xb2\x7fr1\x80?\x80~N\xd8\'\x93c$\x03\xfa\xd4\xc4\xbb\x8d\xd9}\xd8K3\xe0\xabP\x87\xaf\xa3_\x04i\xcb\x85\x97fx;\xea\xcc:\x91\x9d\x9bf\xfa`\x93k\x14[H\x16\xaf\xe33\xcb\x80\x8f\xaa\x833\xedL\xa3\xdcW\xb4\xaa\xd1/\x1d\xedTj!\xfb\xa4\xebj\xd7H\x1b\xa1\xeb\xf9\xad\x9c\xcb\xdf\x1c\x03L\x8al"0"hl_\xf1\xf8M\xb1\r\xd5H \x06H\x04Q\xa6\xe9\x11*B\xc5K\xd1^\x18\xd83\x0bwW^$\xafO\xef\xff3\xa7M\xad\xccm_;cBV\xb2n\xe4\x10\xa3\x80\xc0\x14\x0bD\xfb\x9e\x9d\xab\xe9\xc9\x87\xear\xda\x0cC\xe6\x87\xc41\xc3\xae\x82kg"\xee\xc5\x96q\xfd\xad\xa7?\xec6\xac\x12"t+\xbe\x9d`\xd1\xf3E\xc5\xafq\xa5\x06ZT\xf2K=P\x046\xde\x00n\x95`\x94Q$\x9c\xc2\x12(F\xf9\xf4/X>1\xc2\xdb\xf0\x84\xfc\x8fx\xdan\xdb\xa4\x93\x7f\xb1\xd1\xd7V\xec"Q\x92>\x11\x8c\x0b?\xe4k\x13\x0cD\x98\xc5\x9b3\xc9*\n\x8eV\xbe\x14\xae\x16\xb1\xfe\xd3\xbcz\r\xbd\xcf\x0f\x95>\xf7\xaa\x8f\x13\xd8M\xe7g\xafr\xa5\x8f\xc7\xde@\x8f\xc8\x18R\xad\x8b\xcd\xb1\xfd\xc6"^q\xf9\x87P\xb4\\\xeb\x0e\xcf\xc6\xeaK\xba\x81`s\xac\xba\xe0\xbb;\x95\xd2Y"(\xd6\x96-\xb1g\xa9\x13\xb2\x83\xc6?fw~\x96\xe6@[F\x93\xdd"\xf1\x02\x01\xca\xa3\x0c\xd0\xa67^\x98H\xb0\x1b\xbe\xab\xbf\xe7\x97.\xd8\xab&\xde+\x7fD\xe4\xc1F\xc8\xee\x8c\x94Z\x087\xeb/\xda5\xd7\xe3=\xcbE\xe8P8\\\xc0\x96\x17\x16\xcc\x97\xca\xfd\xcb\xe4\x87B,hw\xf4`\xb2\x12}\xff\xf9\xdc\x02l\xe6\'{\x96\x0e\xa0\x7f\x93+\x82\xf2\xe5O\xf9\x05\x8e\x01\x8aN`*1\x1d(\x14\xb7\xdb\xb9\xdc\x0c8\x1bg\xc2=3m%\nB\xb52yN\x08(Y\xdf\xf6\xd98\xf2a0\x18\xf8>}V\x16?8\xa8\xc4\xbd\x07\\\xd7<\xc0mA\x00m\x9e\x1d\xdbg\xa0\xaeQ\x1e\xd0\xe1\x8a2<\xb9&\xb5\x05\xb2\x0e3\xc6\xa88\x03I\x10\xca\xd4\x8d]\x15\x18[\x08\x1a@{\\C\x86\x08H\x94\xfd\xe1-A\x9a\x90\x910v\xed\x95\xacL-`\x13AZ\xf8w\xf4?\x08]K\xea\x14\xad\xdd\x17\x0b#\xd8[\x10d\x1e\x14\xad\xd6\xa2&\x91P\x94_\xc6\xf5M`!J\xd8o#-k\xf5\xc9\x92\xdd\x10\xe9\xbb\x7fF(G\x03?\x1bp\x8b\xb5\xc7[\xf0%\xe5\x91\xec\x8dj\x89\x03\x1f\x16\xe2\xe2\x00\x8b\xb7\xed\x1c\x9ai\xc2\x89\xc9\x17y\xcb\xc5eh\xc2\x9a\x87j\xc7\xb4\xa1\xe5\xc6\xa2\xe2\xa3N\xbc\x0f\xea\x93\x9bW|P<\xab\x0e\x145\x13A\x9a\xfd\xb0i\xef\xd6\xab\xee\xe1\xda\xc7\xb7\xabJ,B\\\x92L\x98B\xa2\xd8a\x0cJ\xc7>\x010X[\xa1\xdc\x8cC\xc1-k\xca+\xe6!\x19\xd2\r\xa4\x8d\xe4\xe8\x8b\x1e\xbd\x1ct\xcf\xecc\x879 d\x17\x17\xb7Vo]\xf9\xae\xdc\x16&*\x1e\xcc[\xa0\xc5?\x82y]\x9c\\6\xbb\xe6\x87,Y\x99|#\xe4\x93\x8c\xe3\x06\xc5\xf2\xf2\xf6CA&\xd1\xbfU\xb3\xd6\xc4S\x13\x16\xec\x7f\x997\xe8\xf4+-XX\xc1\xdd\xd3\x1ef\xf6w\xcf\x97*\xafQ\r\xd1\x83\x19\x1f\xcc\xdbN\xe3\xb9\x08\xf0g\xd0\xf6z\xa2\xd2D\x9d\tS\x16_\x88~\x139C\x9c\xd8\x9b\xd6=W\xb6\x11\xee\xdd{\xfaT\x80@9\xec[\xad\x00\xc5\xaa\xfc7\xec>\x9c\xa1\x16\x7f\x85\xdd\x1cP\xf8 ^\xb2\xb1\x00&\x7f\n\xfe"\r\xed?\xe6\xcc\xd5\x06\xc8k \xf2\xb0^\x8a\\\xae\xf9\xf2\xed\x1arlL<\xe05;\x01\xc9m/j\x14\x93\xa4\xce\x92$g(m\x85\x8f\xdc\x13D]\xa6\xdf\x98\xe5\xa6\xe4g DB\xd3X\xcd\xcac\x9d\x9f]\xab\x8eV\x84\x14&\xb6\xd3%\x1f\xc7\x18WP\x140\xc4\xea\xd9q\xcbH\x18\xfa4\xb7\x8f-\xef?V\nL\xa2\xb0\xc5s\xd7\x95$tH*\xd8?\xeaMpj\x82\xce\x1b\x9f-Ta\xc5\x89tAnfl>\xc3)\xf2\x0b\xf9g\x13\xe9\x91\xb2\x1c\xb8\xa7\nL\xfao\x16T\xb3\xa2\xcf,\x9c\x94^v{{\x95\xfc\x86\x1a\xa4\xca\xc5\x11I\xe4\x00\x7f\xfd\xb4\xb9\xb0 &\x1a\xdbiR~\xea\xfa\xc6[]4\xfb\x8e\x94\x062\x16T\xa6\r\x02\xc5\x86\x924S\xc9\x02\xce\x97\xc0\x0ch\x83m<vB036c\xc0TLU\xbe\xb2o\'\r\x95"\xa4\xc7z\xcd\xce\xbb\xed\xb5\xd5M{\xdeZ4\x82 .\x1d\xfd\r>\x99\xac<2t\xdd\x01Xa\xbe.\xa8\xcb\xfb\\\xb5 \xf2\xcb\xa2wo,bfl\xcc!N\xb9=r\xa4\xb29;\xa2\x94\xf44\xba\x10\t+h\xc0fM5\x9e\x13\x01\xd0\xa1#\xb8\xed\x8e/\xe3\x85\x81\xcb\xd6\x90[A\xe5\xed\x04\xdc4D\xa1~\xab\xdd\x92\xa0ZnJ?L\xb6\x1b\xea\xefM?K2\x03\x96\xbd\xdd\x07\xe4\xee\x9e\x8dG/\x0f\xec\x82>\xbc\xa7\xb9\xbbn\xd6;p&b\xae\xb5(\x1e\xdd\x84\xe0\xad[\x1a\xe9e\x14\xa3l`\x824\xaa\xc4v\xf2\xb4\xa5\x90\n\xe2\x00\x80\xe6IliNmkjUH\xfa\x82}\xd1It\xf7\xdb\x94PeV\x17\x84\xf5\x07\x94q[\xb18\xb0\xad\xb8\x13#\x0c\xe53m\x8cZcT\x90R0\xb3"P\x85d\xc1\xce\xb4\x90y$+Dcf\x87\xdb\xa0\x00\xbdF\xaa\t\xc1\xe5\xac\xb4D9\n\xbef\x81\x90\xc6\xd3\xedn\x9a>r\xb2\x12\xf2mK`H\xb0u\x1b\xbd\xbd\x05m?Br\xa6\xc4\x8c\xdc\x94l0s\x12rE|!\xd5\xcb\x07\xff\xcdV\xb9\xf6w\xcc-5\xdd\xe3\xf8\'6}K>\xb0\xd3\n\xb2\x15\r;\x06\xe2 3U|\xd3-\xae,>\xb6\x1d\xa7\xf1\xc1\xeb\xb5\x9dF\xd9\xb5}"_\xcd6\x13R\xc9\xd6\xcc\x90\rk\x12\xcad\xc6\xa4\x8b-nPQ\x8cJ\x1c\x9a\xf5\xb8&\xb1\xb7\xe8\xa1-o\xfe0\xab\x13\xb2\xf5\x1fx\xd4\xcd\xd9\xc9\xdb\x07\x16n\x8d.z\x84\nw\xfa\xe8\xbd=\'\xd4\x1e/\xc4\xe1a\x16\x86W\xffY@\xc1,F\x05\xed2\xdd4\xa9\xe1\x14\xb5ba\xdar\xc8\x0e\xd5\x81\xd0n\xbf\xbfF\xc9\x12\xe0\xc3\x94\x84\xbc]$\xfe\xc5yu\xfc\xd6\xf4)"\x05\xef?\xf3\x1e6\x85\x0f6\x04\xdd\xceNA<\x87\x8d\xe6\x91\xa9j\x99,\xe3\xbbo\x0e\xc7gV\xc7\xd2\xc9\x1dTK\xfa\xab\xe8H\x05\x03\x99\xa2\xdd\xda\xaft\xf9\xd1=\x03P;\xbfd\x1e\xf5Xv";\x03\xd6\x91\\\xb5}e\xa6\xe5MN\x9b\xf4U0\x1b\xc5\x06\xa6\x80\x91`\xe9\xe2\r\xd9\xa0\xc2\x1c\xb5\xeb\x8f\xff\xc2\xfb\xecP\x89_\xea\r!\xd1\xfe\xe7!\x83@\xbf\xf6>\xfc\xe9/\x91\x86a:Y\x8b\xf8\x83\xe3+\xca\xe2\xbd\xa0\xc7"\xde\xb6\x07\xe6\xf8T\x17\xc3\xbf;\xb3\xa7V\x1e\xc6\xec#\x02^\xf8\x87\xfbw~l\x0b\xd5\x8a\\Ml\xa7\x94\xcb\xc2\xd7\x91\xb2\x99(\xbd\xa5\x9d\xca_\xafL\xc9ti\x95\xd9\x89\xd3.~\xfe\xb7l\xce\x13\xc1M\x99\xfc\xad4\x1b\xf0\xbf}{8\xe3\xec\x98.\xb3`#`\x81J\xd3VG\x7f\x1f\xdeC~\xd8\xa1\xf7\xbe\x06\'7\xf4\xf2U\x0811\xe2\xa1W\x19\x95s\x0e\xea\xea\xe0=#\xf5\xe3U3b\xf4\x8b\x8e\n.\xa3H\xdf\x1f3\x92\x05A\xdf\xca\x9d\x8dEJu\xe6\x87\xf4\xe5ju\xd9\xbf\xeb\x0f\xff\xb3Md\xbe\x18\xaf~P\x08\xc2:g\xa9\x80h \xba"\xca!#\n\x96\xac\x1d\xaaaN>\x89\x83\xd30\xab\x88\xf8^\xf4\xde\xd6\x8f\xe5\xac\xd6\x9e\x94C8"{F\xf2j\xba;&\xe5\xb5\xb2\x84R\x91\x04l!\x1d\x08\xf4\x8f\x1c\x85\x10\xbd\xb8\x05o\xdc\xff\xc3\xb50\xae\xa4.\xed\xc5\x9a\x82\xa9\x0ff\xd9\x8d"\xbb\xe3\xa5\xcf\xfbf9\xba|\xaf\xa0~0\x9a\x8b\x1f\xb8G\xe6\xa0Cvu\xd7\xec\x18S\x96\x0c\x040\xb2\x9d\xdc:g\xb3\xb4r5\xe2\x88\xf6\xc5}L\x1c\xa8\xc7\xcdKu\xa2\xc2:\xff\xe0IuA\x7f\xe3\x88\xcb2?\x81m%\xd5\xf9\xa3\xbd$\x07ft\xf5mSU?`\xd5\xc9/@\x959r4\xcf>/A\xebM?\xf8\xc8\'\xa7$E~\xba\x96*\x00L-\xd7`*#\x8eo\x9f>\xd6#\n\xf7&\xc5\x05\x1dm\xac\xf1\x16\xf6h\x04\x81z[\xda\xbay\x7fvco\xba\xb6\x9f\xb0\xd7\xba\x9e\xdb7\xaf\xe9\xb57\xa5\xf0\xfbV\x80\x19\x8dZnZHi\xeef\xde]S\x82\xc2u\x12\x80\xcd\xda]c/Xvx2&yC\xad\x9az\xf0\xdanA\xf0\xa6y\xa3k(\xfa\x0c~\xca\x1ez\xb4\xef\xc7S\xcb\x90\xb4\x04\x9dI\xd1\xa4\xf6$\xff\xa3\x0f\x8f4\x00$\xcf\xbex\xb9fY\x16K\x92$\xa7\xcfT\xd7{\xc1\xd1\x03\x99\xd2\xd9\xa317\x16$\xca\xa28\x03\x85\x17\xd0z4\xe4&\x1fL{\x18+\xd4\xb6\x02LNQ\xdbp\xcd\xea ,\x7f\xcdL0xF9\xd7\xe6y\x14\xbf\x84\xe5\xb5\x9f\x7f\xd8jX\xd1\x0b\xa4\x8f~F\xe5\xd5\xbe\x9d@cd\x93\xce\x9a\x13\xa3\x99\x8a\x1fy\x0b\x00\x1dgo\x96yfn\x93d\xfdX\x9a\xc3R\xf7\xab?\x95\x18\x99\xcaC\x134\xa4\x01<\x8cX\x04\x0e\xa8\x8e7\xe9\nP5\x0cL\xb0\xe8\xf2\xfe\x1d\xb9\xe0\xf0\xda\x0b\xf2 \xc2\xf3\xf9\xe3\xf9_\x0c\x03k\xe6\xff\x1d\xdc\x85q\xb8\x83RF\xbet\xcd\xde\xad\x0e\xb7\xe7\xbf\x99o%\x0f\x03\xeb_\xb3\xf1\xa6U\xe4\xdb\xa2\xc2M\x96gfY"k[*%Z/\x1a\xa0\xb7\x80u\xf5\x9b#\xd3\x05BQz\xe7\x89\xed!\xcb\xf2)\xb8w\x0e= e\xb7\xfd{]\xbej\x14o\x1f\x03\x10N7E\x01"C=\xb5m\x89R\xab\xaa\xda\xf8\x02\xb3}\xa3\x85\xdd\xee\xff\x08\x7f%s z~>\x9bi\x95"\x05\x97\x99=U\x14\x85y\xd9\x9f!5d=\xf3\x13\x04\xd6\x17\xcf~4\x88$\xa2\xd9\xf6a%\xd6\x16*\'~\xdcb\xfa>^\x1a\x04=\x06\x9d\x02C\x1f\xd9y\x9b\x7f\xfdh\x91\x06\x9d\xf0\xd0\x17\x85\x7f\'\x98\x0e\xd9\xf9mp\xcc\xc1\xa4q"\xcdF\xf9\xda;A\x08ZD\x17\xddM\xcd\x14\xbb\xf4\xa7r!\xa4\xc8\x9c0\xc7\xa4\xa7C\xee\xc9\xc9\x85\xf5\xec+Ym\xc5\xb7,\xcc%+R_\x95\xd1K\x91\xc1:\xf2\xd2\xc2\xf8\xdc\xa1\ty\x94\xec|S\xa2\xc5\xc8G \x1e\xdb\xab\xa0\xf5\xf5\xd6\x9dC-_nk?2\x10\x18\xa5\x1d\xe2\xadI\x93\xf4\xd1j\x1c=\x95+3\xb6\xd1\xc1\xe3\xf7[\xb7\xc0\xe6*\xc6&,\xc7&d\xda\x9b\xa03\xc2\xc4\xecI:f\x98:\x07\xdc/\xf9\x02\x03\xf5\xb7:H\xaa\x8b\xb9,Q8\xc6\xf8\xbe\x0e<@\xa2e~M\xc9O[]\x8c\x06\xf21\xc0\xe6\xe4\xec\xcf\x8bNQ\xaeB\xbd\xbe=@\x16\xeb\x85\xaf:\xe5O\xf8T\x1b~\xac\x9b\xa1hBtD\xe2D^]\x0e\x1b\x0e\x94a\xd4\xbejM&\xa5\x07"\x1dzA\x84\x80\xbc\x1az\xa1F\xf6\x12z\r\xfa0\xccO\xa9\xaa\xb85nM[D\xc6\x92;\xe6\xdeS\xa5\xe0\x0b;Un\\\xf9\xb70A6\x96\xd4\xfe\xec\xca\xbf\x99\xc1=\xe1J\x13\xb2^`\xe2\xb3\x8b\xbf\x11^\xc3\xceo\xf6q\xa4\xff\x19>A\xf8"-\xc5\xcb\x8c\xd2\x03\x05$\xadmO\x8b\xc6G\x88d\xc4t"\x87 F/O\xcd\xfds\x92J\xa3\xac\x82O\xa2^\x85a\xbc\x05QG\x91\x01|t\xf7\xbc\xba\x18}\x1dD-\xaf\xebG\x08\x838(\x90\xdc\xd9_\xe6\x13F\x11\x0e\'\xd8/J\xe3{\xb3\xfdQ\x984\xcf\xd3\xd6\xaf\xdfT\x9a\xca@E\xbf\x8e\xf1\x97lo\xec\xcc\\\xb3z\x827q\x9fn\xfd\x81\xe8\xb0B\x8b\xedr\xec\xc5e\x9d\xf5\xf3\x132\xbc&l\xf7\x99\x95\x11wcG5\x9c\x08J\xcb\xecA*\x8b\xce\xe4\xd2%c\xff\x1e\xc3\xe3%\x8bKJ_]\xa5\x19\x02\xbb\xd5\x05\xeb\x98F"\xdb~$\xc1\x03@\x10\xdf\xf9\x8e\x97\x7f\xf3\xdc\xdcj\xc4J\xf8\x03\x13S\x1f\xd7&\xf2\xa5\xb2\n2\xf5\x19\xf0\x9e+\xd3r&\x88Q4C1\x1b\x92a\xfa4PZ[\x11\xa5X\xbcD\xda\xd2}~$\x8d\xe7H\x0c\xe2\x1b\x0e\xa4\xd1d\x873HBu1~\xaeFS\xb8g\x8d\xa4_\xdaR*\x18>M\xd8-\x9b\x9a\x13\xfc\xdbA\x0e\xf1\xce\xdd\xdcb\xa4\xb8\x8e\xb2xlV\xb2\xeb\xbe\xc6\x1b\x9fY\x8d>5\xf7\xd9\t.\xec\xd8\xdc\x83\x016\x9c\x11C\x08\x82\x91g\xa4X\x1b|F/\xec\xb1\x93\x13$\xf5\xa4T\x0e\xc7\x95}+847\xef\xc6!4=x\xe9\xf9\xc2\xf7Y\xdb\x07\x9a\x94\x14\xb0\x19&o\x97\xcd\xbc\x1a&<^\x92=5\x99\x89$]A\xfe\xa5\xba\x98\x9eKp_\x0e\x86\xbc\xc6\xf2\xcd\xd5\xb1\x97\x137g\xa60\x8d2\xdb\x98\xad.\xa7\x87\x0c\x95a_\x9c%\xe6\xeb\x18\xe0\xa5\xae\x12\x83\x12=#\xcd\x94R\x17\x91%\xdb(\xbbc\xf6\xda\xb3\xe7\x1e^\x84\x83*r\xa5\xa2\xc4\xf1\x9e\x1d!\x0c\xcez\xe9d\xf8j\x06\x85\x94\x16b\x86\xfd\x133[\xccN\xf1"\x18\x95\xc7\xc6\x87d\xe2\xb7\x00\xa3\xd4\xe4\xa3W|\xec@\xc0\xed\xf2\x1fbU \x17\xaeci}\x8d\xa9\x11\xd4HV\xe5\x1bK6k\xb4\x0cFXp\x87\xc2\x18\x11\xea\x01\xa5JI(\xf8l\x1a\x81\xeaw\x84\xe1\xf5\xa3\xd8Xq\x10\xae^\\Y\xfe\xf3}Y\x0e\x06\x0f\x9aO\xdc\x835\xab\x11\xbd\x02\x8d\xb4\xbe\xa8\xc4\xd6c\x94\xc5.\x8c\x9bm`\xcdK\xe4\x0c\xe6$\x85\x99\xa02\x9e\xb3%\x1b\x0e\x1f`\xbbE\'\x0c\xae\xb4T\x8c\xb4\xc7\xf0\x1c-N^|\xf1:\xb91.\t\xe8\xde\x93\xb1L\xc0\xe2\xafi^\x10G\xce+\n\xe9\x95\xc2\xb5\xfde\xec\x19\x92w\x12\xb5:\xc3\x10\xfa\xbe\xff\xf2\xfaY\xa6U\x04Y\xce:\nd\x9d\x85\x03\x96\xc4JV,m\xaa\x99"cby\x179Egu\x84L"\xe6\x18\x98\xec\x12\xd9G\xd0\x01y\tQ\xff\xf8\xd46\x930i\xa2\xf1J\x05\xa6\x94s\x02\\\xb0\xf5\xa1\xe7\xc0\xfc@\x9aBo{J&\x0b\xe4\xa4T\xf4\xedT\x1fo\x8a\xbd\xe9\x05\xd4c\x98Kt\xb6}\xc6!e\x9dM}\xed\r\x07\xf7\x18S{N\x80\x04\xac\xa9\xff\xa3%Bp\x9f}\x9c\xb7\x1c\xaf\x8f\x96?U\xc7Ln\x91\x01\xd6\x0bw\xd1\xdc\x1e\xbd"{\xf5_u1\xa1\x91\xe5\x88\xc6\xecp\x84j\xac\xcd\xd0\xfe\xa1\xc8c\xa8\xe9\xe3\x10\x8b>\x9b\xb8\xff?\xbb\x10a\xee!f;\x97\xe9\x89\x85\xfe\xcc\xa5\xb8\xfb(\x1a&\xeb\x8f`=\xa3\xa3\xfb2\xca\xf5\x8f\x1aqP\xfa\x1d6\xa0\xdb\xdbxD\xa6t"\x83HX\'\xd8\xab\x0c\xe3\x9cHT\x81,\xf7b>w\xaa\xb36.\xdfF\xb1;?\xc0"\x87\xc5\x13\x93\x9c\xc6~\xef\xd8$\x95\xeda\x8d\x8c\x0b\xe0&J\x19y\xce\xbd\x0c~\x10\xfeTX\xf4\xe5\xfb\xbd\xcf\x8f\xceLU\x89q\xe4\x02\x95\xbaK\\\x8d\x85=\xc2\x97\x18c\xdb\x94\xf5?\x13}\xf6C%\xb6\r\xcc\x8b\xf3K\x0fA\xc6T\xf9u\xb2\xaa\x80*E\xaf\xde*\xb7\xe3\xaf\xbd\x14\xbc\xe2\xcd\xd0\xbad\xcdn\xd2e\xbbX\xb4 \x83\\>G\xf6\xdf\xe0\x87=p<\x18`\xb2\x8a\xd3-9\xe8\x8c/\x7f\xa0,\xbe\xb4\x98\x85\x19\x96`6\xeb\xc7\x001\x0f\xd3\x97\x1b0\x93j\x1c0C\xe0\x8b\xa7\xa7\x94\xf6\'\xa4\xc8\xb1\xd1r\xdfa|J\'&hpQ\xbe,%\xf7B\x18\x85\x02\xc1`\xd8\xc8\x99E\xc8\xd2\x99\x14\'\xb9\x84\xe9\xd8rW\xdc\xfaf\x1e\xc1c\xf0\x89M]\xfd\x8e\xc7\xbfU\x1d\xa1\n\x95\x8e\xf5&\x815c\xfb+\x9c\x82\x90L~\x15\xdc@&\xc5\x97JCj^\x9c+\x17\x8a\x94\'\xc4\xe6+\x82\xc3\xe8d\xb6^\x967\x85\xdf\xa2M)\xe4Wn\\\xb3\x88=\x02\xcdb\x18\xa23Z\x08\xe6{\x89\x10\xce\xfa\x86\x92}\xa2\xfc\x10\xad\xf4\xfe\x98\xe9\xb2\xc1?\xaf\xef\tgd$\xfa\xcc\xe4\xa4!?\x06\xe5\xbb\x0b\xfaZ\\\xa9K\xa9P\xa0@[\xb4m\xb9@\xe9P?/^O\xef\xaf\x98)\xef\x91\x90\x958\x8d\xc0\x91\xcc\xfa\x0e\nq\xf5\xa3IE\xe4\xff\xce\xd9&v\xed\x1d\xb6\x1c\x05\x8bLx\xd7LK\xcc\x9bj\xd0i\x07\xca1\xa1gQ\xc6\xdb\x9c\xee\xfc\xef\x98\xedK\x93M\xa2\x84\x86h\xb2+\x1eW6\xc3\xd69\xcc\xa9\xe9\xb5\xa1\xa1[\x94\xf6@\x9bj1\x12]\xc2^\xa9\xf6/\x8c\x1ey\r@\xef|w\x81\xe4{3\x84\xc38\x97V\x9c\xf2\x06\x9c\xaa\x91\x86\x1e \xb2\x0c/\xcf\xb8\x0c\xc7\x9f\xaa\x03\xee\xb2Gp\xd7F\xcd\xd9-SE\x8eU\xc4\x9e\xbb7\xfb\xa6\xd8\xfd0\x00\x93c\xdaYVc\xa5\xa5]\xd8A\x85\xb7)\xdc3kA\x83\x1e\x7f~\x82L\x04+\xca\x00\x86y\xec{\x01zEu1\xd1\x85l\xd3x\xad1O\xd5v3\xde\x86VR\x9ca|\xdao\n\xa4s\x19\xd9N\xe1\x19\xd4)\xfb\xf38\xfb\xcf\x94\x8f,Iip\xa8??\x97vx\x8e\x11\x80\xd5\xcc7\xaf\xc1\xed#r\xa2UCO\x9c\xa1\x06xx`\x08pu\x9c\x0f\xd7\x8dt\xf8\xfa\xd4}KQ\x82\xf0\x8eN\xd5O\x9fN\xfcxc\x0fO\xbe6\xc5`\xd9~\x1e\xa9NY\xbb\xa7\xf7a\x07\x1c\x8c^\xdd\x1c\x81wP\xcea\xdb\x19u\xf6&\x17uk;=\x00>\x8d\x1b\xb7K\xf4\x85\xca9\x9c\xad\x9c>\x99n\xab\x8b\xa9`t-\xfd\xbf%3\x08.\xffWi\xb6;\x04\xa5[\xb5\xdf\t\xf2\x97\xad"\xbb\xd1/^_5\x97L(\xb2\x80{\xf07\xfb\x97\xb0\x1d\xf8,\xee\xca\xf0\x1a&\xa7\x06\xb1\xc4X\x88H\x8a\xb1\xc2,\xb2\x08\x8f\xaaaw[\xe5\x96}\x8eFg\xffl\x8a\xa21E\xbc\x0f\xd1\x0c9)\x0e\xeb\xceV\x9eT1\x0b#n"w\xf1ms\xeb\xa8\x85\xb1~\xe6\x19\x0e\xa0\xad\x9f\x96\xce+\x0f\x1b\n\xfdmqMc\xe1\xb37\xee\x06&\xd6s\xee\x8c\xd1g\x8f\x14&r\xc4c\xbc\xa3X\x80\xf6o\x0b\xdb\xc3z\xceK\x0b\xc2F\xe1\xc11\x86\x18G\xc84IG\xabQ w\xe7\xe6U\xc9\xe8\xeeMi\x03\xb0\x8b.\xefl\xf5\x99\xa5\xa7O\x1e\xf4{\xef\xb53\xa5\xd2-\xb2\r\x8e\x7f\x98a\x8d|\x9acjZ\xbdc\x0e\x90\x15+\xcc\xfb\x19f\r4\xd1\x07\x894\xf6\xbfU@\xea\xdb\xef\xb6\xa2\xa5ja!\xdfj\xaf\xa6\xb6H\xf0\xcd\xac3A\t\xe2\xfd#\x84\x10\xc7\xca\xca[\xff\xbe}\xf2\xe6;\x0f\xd3@\x08\x91\xe7V3aU\x19\xfd\x81\x1e\xd4\x15v\xe4\xda0\xac\x85\x05[-\x0e\x00\xd6\xcdH\xdbi\xcc\x0f\xd0\xaa\x921\xe1\xcd\xe6\x968\xbe\' e\xc7\xae\xba\x16\xbdOD\x96\xf4D\xf8,Q\xc8\xdc\x1c\xf90,\xce\xc7\xa7\x8fU?g\xfe\xecm\x85\x1b\xd4\xa9\x99V\x10F\xd3a6Q\x98\xb0\xab\xd4%\xcfk90+\xd4)\x91\xe04\xe1P\xec|\xb4\xb7\xa0\xf9 \x9d\xf3\xbf\xef\xa7&\xacD\xb6\xe8\x80^\xc08\xcfI%[D\xc8\r\xce\x15!C>gQa\x1d\x8a\x8b \x8ce\x9ev\xee\xa8\xba\x18\xb6F\x98\x01\x97)\x17-\x0c!\x07\xbc)D\xbfh\xcdg\t\xcc\x0c*\x88\xb2\xa1\xc9\x0cSeV\x93\x94\x19\xf4\x18\xf4\x18\x02\xc0X#Y\x97\xce6\xbbo\x16\xb8rG\xe3\xf6&!Z\xbb\x86\xce\xec\xe8c\x0f6\x05\xc8LC\xe3\xadA\x91=B\xf4V\x1e\x8d%\xcd\x91@\x1e\xc8\xb5\x93&s|_\xd8>y\xbb\x0f\xcd]\xb3\xfd\xb4S\xe3\x0b\xbb\xb2\xe2\xeab\x0c\xa5S\\\xd3a\xfe\xaf\x8b\xc0\xfaH\xa0M8K\'\xc4\xad\xedJC~oj\xae\x983\xfc\xca!s\xb1\xf7d\x1aN\x16\xc7?\x8b?p\xd9:\xe7\xc9s3W\x14\xb4\t\xf4\xe40\xa6\xa0Gs\xe6S 3\x01\xe8\x03\xcb\xcf\xe0\xe1\xb6\xe2^\x11\xd1tG\xe6\xdbk\xb2\x99B?>~\xf0\xf6\xe7\xb3\x17\n\xf4b3\x1f5\xc1\x06\xa4\xef\xfe\xc5\xd2o\x83\xf6jo,\x06\xd6V\x1e*\xdaf&y\xb5\xba\xa0\xea0=\x89\x96\xec~=\xfc@)\xbc\x9e\xe1\x9c\xe3\x8cF\xe5\x08$\x8e*\xbfi\x95\x86\x07\x95\x8e\xa9M\x91|\x8f\x8f\xdfa\x050\x19\xa9\x8d\x9f\xbe\xb3,\xab\xc5\x80\xb4\xb2I\xe1\xcc\xe4\xbcX\xff\x0c\xa6ee\xf3][W\xcf[\xc64\xccVnc6t\xd6\xb0`AA6I\x13\xa1\xb9\xf6\xe2&\x89C\xf0\xc3\r\x90@\x873\x1aF\xf1\xe0\x01\xe6\xfb!\x91\xa2\xbb\xc2\x11\tQ|\xfe\xd9Bq\x9e\x08P`\xc2q\xa9]$V\xe1;\xd4\xb8\x89/\x96W\x90\x0e2\x11\xa3!\x80 \xe7cC|_\xeb$\x1c\xaal\xc6NF\x163\xe9\x8fNz3;>\xb3a\xde\x08o\xc2\xdd\xdd\x0f\xd5p\xa6\x90\xdeX\xb0\x91\xd7\xd7\x14\xb4n\x8f\xadg~\xf9\\\x1f\xe4\xbab\xf0\x85\xdb&\xa1\xb60\x83[\xa2\x11\xe4bt\n9h\xaf\xa7\xcc\xc8\x18\xd7 \xb8c3\x06\xc3X\xcf:\x97\xec\x96\xf1Y-\x10\x8e\x1b\xb9l\n^\x8buG\x86:$\x90\x9c2\xf7\xc6\x84\x96J\x8f\xc9F\x91R\x96&<\xbe\t7\xeb\x9f\x98\x85\xea\x11\xc9v\xed\xda\x12\xe4\xe3\tN\x93\xa1\x00\xa1\x82Qs\x10J\xa24\n\x9a\xd2m\x0c\xedq\xb5[\x04\xc2\x90\xde\xc1\x82\xa2y\xb9\x95f1\xae\x8d\xb4\x88tX\xad\xd9\xbc\xde\xebT\x9c\xae}y|\xa5.\x1f\x9a\xdd\x8bzU\\\xa3\xae}\xf2j\xe92\x9f\xe2<\x167w\xd6\xd8\xaf\x13"LE\nf\xfe\x94o\xd8O\\=g*\xd0PJq$\x03B!_\xec\x99\xf0\xb2\xa5(\xb6\x04\xc2\xf5\x0b\x97X<\xff.6-\xd4\xaa\xeb\xe9\xe5\xeab\xe0Z\xdbf+\xdfm\x05\x19\xe2$\x99\xad|\xe7\xb0x\xc4[\x07zc\x86\x15)6\xec\t9V+=&\xb3u\x13\x15\x13\xc6X\xd8\x87\xf4f\x07\xed\xc9\xa7\xab\x81\x97k\xadB\xf0\xe5\xad8x5\xb2\xe1\x08\xc28\xcc\xd8t5\x0f\x86x\x0e\x8ac\xfbl\xc9\xd4\x8dS\x15X\x15Un\xc6V\x01g!\x10\xdb8\x99k\xbd\xb5\xb5r\xd9\x8a\xf9\xb80\x94\xb2\xb2\x016P\xcf\\#,\x9d\x95F\\\xa2\x99\x87\xde#\x0c`\xb9n\xe8\x8cq\xc2b\x1b\x9cfh\xa3\x99\x92\xef\xf8\xe0K\xb9\xbe\xf6\xdb\x82\xa9\x97\xe5\x80\x16\x19\xf1As{j\n}d\xd10\x87\x08\xb4(\xd1\xf8me\xcf\x05\xf0\x10\x97,T\x17\x83a`\xbe"\x91\xce\xb8\x19\x8aPY`\x14\xb7\x96\xc7{\xd5\xca\x12\x9d\xb2ZR) \x92\xd8Q\x9e\xc8\xda\x9e\xe0\xa7\x18\x9f\xe3W_\xbd7%\xac\xeb\xd3m\xbc\xff\xfb\xccnU\x0f\xe4\x14\x84\x7f\x18pY\xeb\xa1\r\xc0Fa0\xdcY8\xae.\x9a9"d\xc3\xc1!\x93\xa1\xceb5\xcb\xb3-\xcc\x0e\x8a\x0fQ\x0fI\xcf\x95\xdd\xe7\xb6\xc6\x8d2d\xeb\xffC\xab`=ibL\xaeM\xcc0J\x86%\x80\xf0\x1b\x8a\xd7\x1bbK\x10vc\x92\xfe\xd7\xf6\xba\x16!\xa2\x0f\x87\x1b\x8a.\xa9\x8a\xe4\x02\x10=J\x06i\x81`\x90\xc7\x19=\x10TZ\xa2]?\xc2\xc4d&\xb4\n\x04\xfbzk\xc6T%\n\x8f\xf7\xab\x0bN=\xfdD.(\x00;\x98\xc9}\xf7H\xfd\xba+\x16\xe858\xe5\xb7Sy\x803\xe2\xdd"H\x1f\xed\xa1\xc2\xa1\xa1\x8a\x19\xa4j\xb9BQ\x8e\x80j\xdf\x01\x11\xc1\xe9>Qvu\x07\x0c\xa8\xfe\x81\x85h\xb5?\xf9a!\x8a\xb9\x9f;\x8a\x01\xe4\x85C31\xa5\xe8 \xb6\xc0\x99\x83\x1dJ\x93\x9dZ\xcbN\x9e\x17\xca\x1d4%\x91\xa4\xd4\xd9^\xb0\xf2\x03p\xa2\x06\xb1\x95-\xbb\x188M\x0e\x16\x04\xb2\xab\xef,S\xf1\x0f\xbeV\xd7\xef\xdf\x99?3\x91\xe7)\x8e\xa9Y\x04V\x9d\xc2\x9f\x7f[[v\xdf\x91*\x1d\x1c\x7fL-\xb0a\xad\xcfN\xeaM\xcc\x1c\x91\x1f\x08\xf5\x1e\x86!\xf58C\xb6S\xda\xc3\x00\xa3V\xd4\xa42`\x944\xe6\xc8\xbd"\x10\xc3\xea\xafZ\x92D\x95f0\xb7\xfbd\xf1l8&$\xb0\xc8\xc98\xbbT\x89\x90\x97\xed\x90\xbfd\xc1b@\xc2\x80\xe87\xe8\xa21\xe0C\xd1\xad\xe3\xb5/J\xf2\\\xa5\x13u\xf7\x17?\x80\x1f\xc4\x88\t\x87\xe6!$\x89<\x9aV\xd2\t)i\xfbh\xe2\xa3=n\x04B\xc8PZ\xa7Vu\x95\x8e\xbf\x94\x85\x8d\xdc\x13K\x18\x1c\xf0\xc7\xa1.\xc7\x91e8\x1fhd^\x8e\xb2\xeab\xbc\\\x99O~|\x89\x7f\x9e\xe2\x04X6\x04\xf1\xfe8\xbe\x99\xc8\x8e\xaa\n\x0e.w\xb0U\xe4\xfd\xf9\x0f\xb6\xe4\xdc\xf0\xd4<-\xcfU\xc2\x9dZN\xa0g\xdf\xc1\xbei/s\xf3\xa0\\C\x8d7\xbdp\x18\x0e\x04\x8d\xb5d1\xcd?\xa2\xba\xe2\xe8[uq\xa8F$f\x95cCNj\x8a\xcbI\x91\xaa\x0fu\x98\xbc\xd5K*z\xe1|\xac\xf1a\x19\x87\x83\xf7\xa8\xdd\x85p\x1d\xacK\xbe\xb1\x80\xc4\x90yM\x98\xc6\x91\x89\xf9\x99\xe9Q7\xf0,:3\x8b\xec\xa2\xcfa\xc61\x02\xafg\xfb,\xe4%\xfc\xdc9\xb0\xb0\x8dX8\xffXk\xc3\x9d72%YbZ\xd4FO\x9f\xdb\xe6\x04 \xba5\x88[\xce\xb3i#^E\xd7\x19\xe7\x9f\x16k\x18\x9aC\xd0\x9d\x1b}f\xee\xb6,y\x06\xd6\x9e\xa5q\xbdpw\xa2\xb0Fp\xc0?\xc6\xa9\xa2\xc7\xd5\xd1\xa9-\x04}n\xc4N\xbf\x14g\xaeq\x00\x01\xdbhsn\x1fP1d\xf3\x11Nf#;&X\xc8\xc1\x11v8\xf0\xac\xf10\x1e}>g\xd6\xab\xe1\x13\xe2\xe0\xacv\xf5\\\x19,N\xcf\xb1\xec\x8e(\x94\xf5 \xd9vq*1\xeb\xa6C\x96\x93(\xd4bN\x01\x0f\n.\xb6\xe5\x7frEp\x80\xf1\xcf\xf3\xfb9\xce\x84\xe1\xaa\xd7hn\xa8\xb3_\'\xb6\xb3m\xbfz\x0eS\x9f^\xbbfR\xcc\xea\xba?\xbb\x89\x8a(M`\xa2`\xbe\xa7lT(-2\x0b\xc5\x9c\xaf \xa1\x1d\xc6R\xb0E\xdeU\x1ag\xb3\xf5[0zD\xd2!*\xd4\x1c\x9aU\xa6S\xaf\x05\xca\xe7r\x95\x01\xeb\x96\t\xc7f\xb3\xbd<t)\xfa\x12\xfc%Oda24\xf5\xde\xf6\x97\x15vg\x99\x8do\x97\xed\xa3e4\x85\x85O6\xdfak\\\xa5\t2sf\xa8\\\xfd\x12V\xca\xe1\x14N\xffx\xec\xe1\xca\xe4\xa9\xc5\xa5\xbeY\x99W\xd4\x16\x04\xf6\x88:G\x9a\xe0\x80\x86wC\x0c\x83"q)Rl#\xd4\xce\x1a\\\xf0\x8a\x1eMfvb\x1ep\xd7\x0eQC\xbc\x01^\x86CI\xbb\x91\xa6{\x9c\xc1\xcec\x86\xcb\xa5\xfd\xfd\x03\x95uK\xb7\xdf\xde]\xfaW\x1do*\x0e\x07\xeb\x8a\x9e,\x1dp\x98\xe1\xd4H\x97\x9f\xfd\x00\xd3\x0fs\x86\x08a4\x92\td\xee\xd1\xe2\x9e=\x9f\xcb&\x94W\xaa<>\xb8w\xce\xb4\xc1\xb1Q%Z\xd5\xc9:@\xbc\x18\xba\xef\xdeL\x92\xc3\x14Y\n[\xfb\x13\xd3q\xd8\xf3\xd8\x81\x1fI\xcam-X\x82h\xf9\x8d1\xc5\x00\xb5\xbd\x86\xcfoV\x17#\xae\xeax\x03\x8c\x0e\xce\x92\xc7\xac\xc5\xc6\xade\x91"e\x98\xf8\xd8Ak\xc0\xbfi\xf2\tC\x8f9\xae\x85A\x0c\xb6:\xbd\x8a\x80\x8f\x103\xe0h\xb6\x87z\xf0m\xea\xf6~e\x8d\xb4\xecmW\x95\x88\x0fA\xb7\xfc\xd6\x84\xb0\x07r\xc7\xcb\xc1Vub\xc1X\xbe\xf3{\xccP\xfe2\xb9fWa\xe7e\xf6la\xdaaY\xd8\xeb\x12\xb2+\x10\xcb\x9f_\xb4G\xbe\x12\xb6o\x192+`\xb5\x89\x80\x8d\x8bS\x16\x96\x8e)"\x10\x1c\xc9d\x12\x7fg\x1f\xee\x85\xb5\x85\xdb\x97\x10h\xa3J\x10{\xe3\xfc\xb5\xbf\xb6\xc6\xa4\x92\xa3\xb3\xdb\x85)P\xf0\xba\xfa-8\x07^\xb5;\x8e\x91io}P*\x19W\x17\'X\x10C\xc0\tj\xa1F#B\x8bU\x1f\x0eK \x8d\x12\x17F\x06\x14j4uS\xc2\xfd^\xa5\xd9\xab9\xf2\xc2"(f\x8b:\xd5\xab\x89\xf8\xcaW61\xce\x0b&\xb0\xc9\x83\xcb\xc4\xcd\xe9r?5\xa5/\x95pu\xc2\xd7=\xda\x96\x1c\x0e\x12d\x1d\x83\xf3\xa7\x12\x19B\xe3\x93Y1\x94\x92\teK\x17?T\x17\'\xcd;\xc1n.\xbf%\xe5,/\x15\xb5\x05\x93\x9b\xf6.U{\xb0\'\x1301-\xe5%s\xb3#\x910\x85\x9es#\xb0\xef\x0e\xb4?2&0T\xcb;L\xdbh\xc4z\xf5\xe5\x10\x1e[\x93y\xfc\xc9<\x91\x1d\xfeg\x11\x1f\x8b\xae\x85\x94\xc7\xdb>S\xd5\xfc4f\xa31\xc2\xcb\xde\xcf\x99\xf3\xf3^\xf7o\xfc\x8b\xf5=\xe1>\x03a\r\xdc\x19\xcdN\xb5i\x18\n\x05\x18\xdf\xe6\xf6o\x8e\xdc\xe2@\x84!c\xcaX\x11\xc39\xc9\xa1\xbc\x01\x87\x8ec\xd6<gu\x82\x14\xcf\x81\xb8\x9dB:\x96P\xc1IhP\xd1\xe5>\x02\xc1\rb31\x90\xa3\xf2\xd4\xbf\x14\x11\x90\x1c\x89\xc8\x86\xe9HTiX [\rY^\xf5\xf7,T$\xc9S)U\x83\xaa\t\x974\xffm\xd9^\x04\x86\x18\xe25"\xca\xaa>2\xec\xc9>*\xc2\x14\xb3\xccR\x01\x9c\xa9\xd9}\xbdb\xaf\x86g\xbeZ\xd4\x8a\x9c\xa1o\xcc\x04\x81\x17\x97wMr\x1a\x108\x9bn3\xb3\x97\xe9x\xc8Ll\xcbI\xcb\xdfl\xf0p\xcd\xc6\xd2K\x1f\x1cf\xb5\xd2\x93G\x0f>?\x16\xc0\x91I\x0fQF`?M\n\xe8:\xea\x7f\x11j\x81\xca\x80\x80@\xe6\x12\xa0\xf0\xa6\xe3\xac\xf7\x7f[\x12\x04\xbe\xf9+{\xc7\x16G\xb2\xe1<,\xd3;\x0e\x01\xc8\xecw9\xaa#\xb17\x8e\xb4$us\xa6\xe2\x8b\xf1v\xb4\'\xc16\xbcq\xbb\xbf+5"\x98pu\xf1\xc2M\xa1\x0bR\xb1:y\xfck\x80\x07+\x9d\xfd\x01\xc2^\x01vF\x8c\xb9\xa8\x1e\r\x0bN<72k0M#R\xca\xcbA\x86\xb1\xfd\x80\x8cx\xa6\x18\x13\xeb\xf6D}\xd1~\x00\xe3\xa7\xff\xa00\xd9+\xd2O+\xcdE\xddz\xf5]\x80C\xae,\x9c\xe5\xc2\xcf_\x15\x06\xe4\x1b?\xa5\xf9\x91\xc0\x1e\xf1\xd3\x06\x92q\xdf\xde\xbd\xcbR\x00\xabcHv\x91yr\x1eE\xf1\x004\xf9\xe1,\xcf\x80\xb0\x893\x0f"\xe5\xd2\t\xbe\xc8\xf9\xa57u\'\x89\x83e\xd3\xc6\xc0\xb0\xd0\x89\x84\xe9\xe1\x13\x1amp+\xd6e\t\xc2,C\xa2\x1c\x1c\xe0\xeee\xfb\xb8@;]P\x8a\xd6\x8e\x12\xbd\xe3uI\x07$\xb7\xe8t~S\x88^)>\xcaG\x8a\xf8\x87\xc2\xc9\x99\x8b\xbfY\xce\xae\xf7x\x92\xca\xb2\xf8\xb6>\xfc2g\x13\xdaL<@^\xae\xe1AEO\x95\x86\x13q\x14d\xc5\xd3mR{\x8a \xd4B\xadi|\xfd\xec*\x12F\xb4I\xb8\x1e\x1cn\x1e\xf5\x02A\xeb|z\xeb\xc5\x97\x1b\xa7\xabg\xbf\x7fI\x07C;\xb5\x99\x9b\xcd\x81w$j\xd5\xee\xd8\xd6\xb3=\'\x9c\xc3_\x81]2\xf0\x11\x86\xc1K>,<\xbe\xc2*"\x0f2\xf6C\xcc\x93T\xeaW\xb1A4\xe6|B\'\x86\x1e\xf1"\xcf\x86\xfdFt$\x12GY\x94\x02\xef\xbc+v\xc6\x849\x96\n%\xfb\x82=\x06@\xcaH\xe6\x9e$\x10\xcf\xb7\xbf)\x8f\x11\x8f1@\x9d\xdb\x00\xb3 \x1b\xfbai#\xf9p\xe8\xde\xa7\xe6v\x02x:\xdb\xe0HH\x84\x17lO\x13\x9cn\xbf\xbbs*c\xdeX*\xd8"/(\x01M9!\xc4<r\xbc\xabDu\x07\xf0\nB \r\xbe\x80\x18;\x87\xef\x9e\x12\xcb\xac\xba\xa0\xb8EC\t\xc0\xdf?{\xbe\xb3[\n\x0f\x05\xf30\xf27\x96\x05\xf1\x14f9\xec\x04\xb7U{W\xf6>\xa2\xd0\xcd\n<\xce\xc65\x18Ao\x1a\x96\x8f\x9e~\xc2\xaa\xffWU\x00\x92\xe9vx.(v\x00\x14\n\xdfa\x8e>\xfcx\xc7$\x1a\xbb\x12\xbf\xfch\x8aK_\x88\xa5\xce@R\xca\x95+\xf3\x00\x10E\xbc=Fj\x95\xe9\x92t\x86\x95\x84\xb2[\xc1f\xdf\xb6\x85\xf5$\xa8b\x82\x9c?\xaa\x8ck\x9eV\x07\xff\xcc\xf2\xd4 \xa11Z\xa4/\x9c\xd0\rP\'\x0c\x1e ONp\x1c=\x1f]\xd4\x9d\xd9\xd3\xd8Qu\x98\xef\xe4\x06\xb4\x1e\xa7\xc5\xb4\xe0\xf9y\x86\xf5\xa4\xa6\xf5\xd5\xc5\xa8O\x8e\xc1H\x95.\x89\r7\xa4\xceQr/39\xe5\xb8\x8d\x084\x00\xe2\x9c\xfd?\x95\xbeI]Jl\x97\xda\xf2\x93}\x93c\xd7\x1c}\x99\xdbRP\x8c\xfdnq\xaeE\xdb]\xc3\x02\xb2h\x8c&\xaa\x90\x81\xd3\xe4\x1e@tnU\x17#J9-\x081\xf90\x19\xbe\x19N@!\x8c\xd6\x99%\x8b\xb2s\x01S\x11\x1a\x05\xd8O\xdc\x99\x80\x0f\x1d4m9\xb5g\xb2\xd0\xa8B\xdd\x96K\xc8\x9e\xc2\xabo\xbf\xf1\x8fe\x1d^\xcb\xd2/\x9a\xa8\x8c\xf5\xb88q\x86\x1aC\xbc\x02\x83\x0ca\xcbw\xcc$\xd3\xbe3\xd2\x01]\xda\r\xa1UQi\xdc\xef\xb3RV\x9d\xa4\xbc\xd99\x8c\x14\x02x\xddg\x0c\xba\xde\xdd1\x03\xcc\xb8\x8a\xea\xbd\xf3\xf2\xef\xc9\xe7\xef\xcf\x0e\x7fB\x8b\xae\xd0\x88\xcd|\xee~\x98\xa8\xb2)6\xc6|\x05\x9e\xdf\xe1L\x96\x88\x8a\xe6Bz\xd8%\x85\x16S\xa7t\xba\x91c\xf7\xf2\x8f\xc4\xacR\x13\x90(\x1d\x97\xb29\xdbcN$\xc8\xec\x92M\x87X\x0b\xddh\xd6\xff\xab\xbc\xb0\xb3U-\x87\x16j\x82\xa08\x8c\x89X\t\x07M\xc0\xa3FC\xa4\xc9\x8a&\xa6V\xe3\xb0\x15\x0eu%V\xbbn\x82Uc\xfaG\xd7\xef\xd8*\x11{\xadm\x9b\xdbx\xfd\x8d\xe5\x1d\x1cR\x81\xf36[\x9c\xeb<\x8c\xf9\xe6 \x1a\xc5z<n\x97|\xa6\x99\xf5\x19{VVG\ty/\xe1\x1f_\xed\x836\xd6\xed\xe3\xe1\xb9\xd9`\x12\xe9\xf3\xdf\xed\x8d\x7f\x93\x8f\xb1\x13\xcdU\xf67F38\xb3,(\xb9\xe3\x1c\xa5\xa6\xba8\xd4\x9b\x9d\xe5\xb4\x19$%@\x8ci\x96\xf8\'(3\xc107\x063X/\xa6\t\xf0<\xc9\x9d\xbb\xe4^/\x99\xef\xe2P\x84\xd4\x9c\x1d\x89\x9f\xd0\x8d\xe4\xa1\x8cF\xbc\xee\xa7\x97_ 9HP\x1e\xf08\x07\xc5\xe7\xe0\xf8y\xb28\xea\x9fOg\xcd>\x19\xc1\x14K\xf8\xf9\xcc\xf6\x8e\xc1DgV\xc4\xf7\x0bS\xb6\xabe\xce*\xde7\xbbB\x94)\x9f\x1c\x88\xde=\xc6b{\x12\xf1`\x9dZ\x9cvkCB\xc0KOv\xd7\'l;\xa3\xfc\xbd\x0cl&\xe4\xb2\xb78eh<(\xd3\xe9O\x8b\xaf\x83\x94\xbdP|\xe3\x04p\xc40\xe5IuQ\x10\xc5\xa9"cZ\x104\xb3\x92\xa2$:\x91y\xa0\xea\xa2\x18\xda\x06l\xaf\xc7\xe0\x11DS\x1e\x93\xfa\x1d\x93\x11\x02\xe5\x93\xfa\xedl\xc8\xbf0.)\xff\xfe\x81CX\xe3/\xf3\xf2\x07d\x89fo\x05UR\n_[\x88\xc6\xfeh\x1ch\xe0Qk\x0b\x82\x8a\x1ap\xf6\x87\xd6H\x07\xd4\xd4\x0e0\xfab\x96\x89\xa1\x19\xb4\xa1\xfe\\]\x10\x19Z\xcc\x9c\x8d\xe2\x0fk\x9f-_-qV,G\x8a\xb3+\x9e\x01\x02\xbc\x90\x13\x8f\x01\xaf\xc3~\xad\xe4ely|\r\xa5"Y\x0eS6\xda\x08P\x05\xc6\x0b\x10z\xcb\x95s\xa5K\x0b\xcafR\xcc\xeb%V\x19\xe7%J\x1c8p\xdb\xfbm3\xf0ev\xf5\xaei\x1c\xc1\xf8\xb2\xd2\xe9\x89@d]\xad|\xc1\x99\x1deLW\xcb\x1b\xc7c\xef*\x1d\x02\x89Z\x88_\xdc\x9cR\xa2\x96\xce\x9b\xb5\xf7\xc5\xa3J\x84\x17\xfbC\xcb\x90\x0b\t$\xaf,\x92l\xc3\xde6_V\xe4\xef\xe0^\xa3\xfe\x14rt\x156\x18-6\x1dgI\xf681\x91\xbf\xd5\x9eb\x84#"\xc2\x9a\x93>ylG\xa7\x18\xaf\xaf4\xf3`PQN\x84\xf6\x96(\x1b\xefl\xee\x8f\xec\x07\xd4\xab\x9b\x16\xd8\x04\x1a\x11k\xeb\x98\x1c\xdd\x89\xa1\x19\x18\xe4\xc0\t\x15S\x8f\xcc\xaa\xfbzr\xdc\xf2/\xd6-a\xed\x92)\x08DB\xc4l\x13+\x88e\xf4_,9\xb3YF\xc9\x1f[\x03\xd7n\x90\xfe\xf4\xe4\xefU<\xd6\x15\xc4Mw\x10:\xba\xe6\x0e\xb0x\x80!\x8d\x8a\x12,\xa6E$\xc5\xd5\xd5\xc5\x118]\xfcA\xb5\x8a\xfa\xa0\x9e:\x07\xf3=,V6\x91\xabU\xe9\xcb\x0e\xaa\x13\xf8\xd4\x86\xbd\x9dc\xdb\xdf(\xa3\xab\xff"\xcb\xaf\x8c\xd8\x9aP\xbf\x88\xcf\xd5\xf6\x93+\xb6$\xc3\xf8J\x92\xfd\x95\x1c\xf7\x80\xb9\x9cx\xb54o\xfd\xb3\xad\xa7h\x85\x8c\xd0g\x8dWd\xd1\x03z\xda\xbd\xb3\xeb\xd7\xa8v\xda \xa4\x7fQu1_\xf8\xa2\x97$3\xdbd\xe5\xa1\x06\xfd\x9a\xe5\xd2j\xc9\xc8gB\x07\xd95\xc5\xd9\xb3\xc3\xea\xe287\x87F\xf1\xd6O\xe2QV\xedez\xc5}V~\x1d\xd7%\x91z\xe1Y\xda\xf0\xa7\xd2\x8c\xf2\xfbr)Aa\x04\xc9N\x7fM>\xac3\x9b41\x992\x06\x0cD\x93\xe3\xcf)\x13\x91HL\x98\x88\xdd\x0e\x11\xc1E\x90\xa4\xea\xf7\xd4\x98J\x07z_\x9c\xa3\xc9#\xb8B\x02\xfa\x99\xffi\xc6\x95\xfd\x04\xe8k\xa4\x19\xebd\x98\\\xa5\xb1\x96\xe6\x04j.\x11\x81\x88\xfc\xdd\xfb\x1d3\x9a\xc4\x1e\xd9v\x00i\xc5\x11\x14\x1c\x8e\t\x87\x80\xf9Q!{\r\xa3\xf6\xee\x9f\xa58\x8c\x1e\xda\xb9U\xb3\xf4M\xc9BE\'r1\xfd=\xae\x1e\xc5Ir\xf0\xdc\x12\xda.\xbfUU:\xcc\xaf#I\x98\x1d(\x916\xd2\xdf@,\t2\xe3p|\x1e\x13\x1a\xa4\x11\x9c\xe0\xe1\xcb\xd3\xe3o\x16b\x90\x16\xd526=\xb2\xeb\x91u\xa7\x80\xa3\xb3\x7f_G\xea\t@\x8c\xa0\x8d\x82$\xd6\x8b0W\xa2\x19\xfa$\x8a\xc5\xea\xb8\xa1\x8f9\xe7\x9d\xd8A\xc4\xca\r\x08\x19\x9d\x88v\x9c\xb0\x1a\xcc\x9f\xd7\xed\xcf5\xbb@Y\xfe]x*W\x94s\x8aCj9~S\x7fG\xf2\xe0X\xb7\xdf2\x1bA\x94L\xe4$\x96Fpi\x8e\x0b\xaay\x14B\xa7\xf0\x93\xda\xef+M\xe9\xab4\x87\xbe\xba\x98\x83\xce\x93g\x88`\x9f\xdfE\xa7%N`&\x93:\x9d\xdf\x9e8\xbdQ]\x1c\xc0\x11\xdc\x13\x18\x06\xf7\xfd\x9e@\x85\xcc"\xd0\x0e\xb5\x06\x16\xa0\x04\xae\x05\x15\xb2HT\x06\xcc\xd0\x88\x84N\xfcL\x8f\x1b\xc0\x86\xae\xa3\x05,\xdbS\xa5\xdc\xeevu\xfd\xf4\x1cd\xc2~\x05;yK\xc0\x7fd\x16\xc5\xc5g<A\xc8\xf0\x90\xf5\xea\x08E\xa0b\xef\x01V\x0b\x94\x9bly\xd9^\xb1\x0b(\x7f\xc2\xde7`v1\xecCH\xd9\x97TnH!\xf0m\xa6\xdfnl\xdf,1\xf3\xc8\xf2\xae\xd9\x01\x90\xd6\xcb\x02\x03\x12\xda\xdf\xb0{\xec\xf3,\xae}7O\x1e\xe4l\xc8\x0e\x05\xa4\xe0q2Q\x985\xb9h:\x8cX \x99\x18\x8d\xcc-\x81\x8bDY%\xcb_Eu1\xdb\x002\xc3\xa9\x93\x01\x07\xcf\x96\xf1\x9e\xc5\xb2\xa3<\x1f7/\x8f\xbf(\xbeU\x81\xc0\x06-Y\xab\x8ab\xb2\xf4\x87\xc9H\x8d\xa9\x82\x8d\xf16\x17d\x15id\x88\x1b\xe0\x84=O\\\x12\xd6\xa5\xd7\xf6\xf1\xc0\x0b1&\xd9\xc8\xe8\xd3\xdd\xa7&R5=\xa2\xae\xd1\xd7\xf5\xe1\x9c\xc9\x13\x9b\x88RC"\xeab\x155\xd3\xf8\xd6\x8b- \xd1\xa5\x98J\x908R\x06\xe5\x98\x1a\xc0\x8b44\x11/\x89AH\xac\xef\x86\xb9\x9f0\xbd\xb7,\xea2\x9e\xed\x9b\xcf\x8c\x13\xaa\xe3\xf7+\xb6:v*\x81%2x^;(\xae\xd2\xfce\xdc)\x08\xdc\x86\x1b\xea\xdc\xc3\xdf\xd0\xb3x\x95\xa5\xb7e{\xe0\xb6\xf9\xb3\x7f(\xe0\x93\xd3-\x18\x19\xa5\xa5=_[\xbe\xad\xec\xe4\'_]\x0e\xa0\n&\xa7^\x10[?T\xd7\x92\xc1M\x8f\xdb\x82\x10\xd8\xe6\x8e\xee\x81\xd7=\xda\x90\x03\x8ba\xd9\xfdZ\xff\xa70\x9e\n\x04,%0\x97\xcb\xa4\xf1\xb5A%\x16\xd5tO\xd0\x83\x1f\xc4\xa2\xe2\xd4\xb2\xe1t\xb5\xe1\xdcx\xf6\xa0\x85\xf7\x91,Z/3\xdb\x8a\xf5F\xf6\x0c\x81\x8bS\xa1\x89^\xe87N\x0c\xe80\xa5\x89]\xe6*i\x05C\x9c\x8e\xbf-\xef)UA^\x13\xb2\xe8*\xb4\x0e\xf3\xe9\x81\x82\xb9\x01\x87\xc9\x86\xd0\xfdo:>Y]\x8e\xc3!\xc5\xb6\xb8]\xe9\xc8\xda\xd7\xa4\x02\xdf\xfeOnlt\xfb\xca\xb6\xcc\x0f\x04\xebf\x7f\xf7\x96\xb2L\xfc\xe9\x1fN~\xa3\xe7\xc9\xcd\xad5n\x1f>kS\xf9\x8b\xaa\x89mY\xdf\xc5\x89\xae\xe5\x0bgv\xc6\x8e\x8e\x1c?\xfb6\xc1\xc1b\x9dD\x00\xec\xe1\xc8\x188\xa5\x85\x175{"\xb1V\x05\x0e\x8e\xe1h\x81^\xbep8\x01\x0eA\x16Mm\xba~\xa2\\2\x7f|O\xa8(\x8e\x03/q\x0eL-\x10\x91\x01\x9b\x04\x90\x88\x86\xaf.\x86\xe6\x958\x1f\xb2\x1b\x99W\x0c\x8f*9\xb1\x01G3\xb5\xe9\xd6m \x8a3J\x01U\x1fdk\x9c\x82\x1b\xa3\xa6\xbfy\xb7\x19W\x03X\x8dO\xe1\xf8\x8a\x12($\xf4\x87M\xc2\xc8\xd0\x8a/\xcc\xf5\x04Y\xb2!%(\xb5%Q\xc7\x8f\xdd\xac\x8ep\x92\x1d\xfb3\xa2J\'\xdeT\x17m\xb8\xc3\xec\xdc(\x1d\x85\x08\xd7\xc71q\xd7\xad\xdf\xa6\x01\xba\x0b;[\x0c<\x82Z\xee\xad>z\xeb\x05~F\x1f-[rbKy\x87@/lW\x07oa\xa8\x0e\xf3=yUo\x9e\x88yR\xfa\xd6\x82\xaeR\xaf[\x92 +\xcaT\xd7\xa3\xe2\x0c$\x96\x1c3Q\xc5\xfa\x8e\x98\x88\x15\xc1\xaf\xbf\xb0\x8b\xd9\xd1\x12\xeb\xc8O\xf2\x92\x93kf\x05\x96\xe1\xb1\xc8\xc9\x8d\xa9n\x9f\x9e\xcb=6\x12w\xd6\xcfnZ@E#V(\xf9kmK\xfb\xe4\x11\xe0\xbc\xe6\x83\x192\xe4\xa4\x84\x8d\x85\x03\xb0\x83=\x11\xb4_\xdbsEho\xe3T\xb5\x9a\xc6\xe3xQ\x97\xc7\xce\xd3\xf3\xe4\xbfo\xd9\x967\x80A|sE\x96\xdf\x8c\x87>Ub(z\xa07\xbd\xf2\xc3d\x8cC/\xa3^J\xd6\xa2\xb7\x91]\xb5\xde\x82\xc8\x812\xc0:_a\x069\xb4\xe7\xa4J\xad\x0b\x1ap&\xd3C\xa5\xa8\xe4\xcc7\xba\x88\x85\xaaz\xb0k\x17"\xf57\\5\xc7\x13\x85c\x1ev\xf6f)1\xdbW\xf4\xba\xb6SJ\xde\x0bCu\xe11^\x15\'\x0c\xb2\xd2\\\x0e\x96\xc7^\x88\xd1<\x0cV\t\x14$R\x06\xd5\x16W\x9el\x7f\x93\x18\xc0\xbf7\x98\xae\xd0`\xaeI\x9b\xe2$\xe1\x06\xcd8<y\xb5\xd9\xbb\x86EU\x8d\xbd\x15\xb8\xc30\x15\xc7,\xf48\xc6\xab\x95:\xd2\x97A\xb4k\xd2FQp\xc1\xdc\xd5N\xc4\xdf.t\xe9K\xd3\xf9.\xb9\xf9\x00\x05e\xce\xaa\xa3\x05|\xf4\x96?\xd1\x11\xc8N\xc4\xe2\x8e\xe7+7?\x91\xa8\xb7/\x80\xac\xf2X\xd6lr\xcd,/AE\x9c\x8aGVs#(\xa7\x90\x173XfFU\x91.\xdb\xecv\x14\x88\xf7(\x8e\xd1\x8de\xe6\x9a\x08r#\'p\xcd\xf8\xba=\x03\x8f$M\xa7^\xcc\xdbc\xf2u)\x82\x83\xc9\x10\xd5aP\xc0>\x86_\x89\x0e-\xaa\xa8\x01\xaer\xbae\xab"i\x13p\xb0\x9a\xcf\x05-\x9bT\xb4\xabWMWx\x8e\x0b\x91Y0\x8d\xc9\xe6\xc1\xc1\tvzF\xa5\xbe\xf0\xfcn\x0f\xfaD7\xffw\xd3D?t\xcb\x0f&D\x10fSk\xad\xc2b\xd7N\x02\x02N\x1f/C\xcc\xefL_[\x9d\xa8\xac\xfb0\n\x03\x86\x8b\xd7\x1b\xb7\xe7\xf3\xd1\xc7\xe3\x9fh\x10\x07\xdf\xbe)\xdb?[\xb4\r\x88\xfds\x04\x07\xf1\xb2"U\x1e\xcf\x80\xd8\x940\x87[\x98\xb00\x88g.QUO\x84\x1e\x90\\\xf8\x02cP\xe9\xaf\n\xee\xee\xf5\xc9WwL\x1e:\x06\x9fx\xb1\xf2\x9a)n\xc0\xb9\xe9\xbe\xf91\x89_I\xc0&\xc6\\r6*\xe68\xc0\xaa\xe6\xc1z\xb9\xbb\xff\xba:\xfao\x02\xcb\x83\xa9\x95\x8d9nN\xa5\xb8\x033\xdb\xdc^\x13\xa8\x13\xdbC\x0e\x95\xa1@\x90\xe6\xe6\x9f\xda\xbcg\xcb\x91\xd9\xf0\x86\x8co\xb3\xe5Fi\xa8\xc8\x00M\x8e\x91v\xbd\xcabv\xde\x14\xbd\xccmS\xb4\x0et\x87\x062\xde\xa9\x92i\xad\xe6\x88!\xd3qT\x95C\x8d\x19(\xbd\xe8\xb1\xf5p\xf0_4\x10\xf2i(\xb0\x8f\x88%\x83\x91r\xae\xef\t1\x80O\x05\x8a\xe5P>eR\x14l\x95\t\xee\xa4\xe6(C\x89\xfa\xbf\xd5h\xb6>\xbd\xc4\x1a}\xd7\xaaz\xfb\xa4\xab\xd7\x87\xc0a\xa0\x81\xcaC\x91\x9e\xc6\xa2\xec\xf6G\x13\xb3(l\xcd*\xcdnWU\xd7\x82\xbe2\x98\x0e\x95*3\xe62I\x80\xa4\xb6C\xa3\xd2u\xe3j!L6\xfbm\xc6\x99\x03.p<\r;\xac`\xce\xa3\x06\xd3-y\x84\xe7\x90K\xa6\xb6>\xccR\xe2\x87\x82\x06y\xf2@d\xb1\x05\xb9#\xf5&\xdb}o\xb2\xcc\x7fh\xe6\x86\xe97\x8es\xa9\x95[\xd5\xd1;{\xf50\x94\xa4\xb3J\x84\x9e/\x02(}<\x01\xf2\x98\xe4\x82\xed+\xec\x86lv^O[\xbc\xc8!wn\xbf\xd66\xd7\x9b,\x92\xb1\xeb\xa4,\x07\xc2e\xb9a\xe6\xb1\xe8\xc7\x9eb\x99\xbb\x12\xa5\x82\xe1$)NMm\xd7>]\x1av"\xe6pc\xa8\xbf\x97V\x8e\xa8>X\xc8\xd1\xaa2\xda\x85\xbb\x8d\x92\x89\xc6\x9c\x1a\xf3\x02\xcc\xd1`rO>\xe5\xa6\xe9\x1bG\xce\x80\xa0\xd7z\xb4\xb7\x93\x05Sv$\xe6\x7f\x1f\xaf\x8e~\xa3\xde\xe30\x12\x98\x19\xaepw2\xfa\xdd\xfe\x9fS\xf3|m\xbe`"\xd5\x85[\xccd\x11\x08\xb3\xd6-U\x88x\x84w\xaf\xbc\x15{\xd9o\xbc\x83\xe4\xed!\xd9\x8a~\x9ah\xb4\x9cO\x9a\x8d\xa3N\xd3\xa1\x05\x84\xa5d\x05MQ\xfe\xeb\xe0\x12\x15$\x13\xc7\n\xad\x00\x95Z\xa2?KR\x8b\xa1&\xd7\xb3\x8d\x81\xd9\x06\xee\x00g\xcd\x10\x04\x01\xa4\x7f>\x94h\xb0H$Z\xe5v\xe9:e\t\x0bf(\xc4\x11j\xfb\xe4p\x92\xc7\xfa\x05@l\xc21\x1e\xf7.\xc1K;\xe9\xf9\xe7O\x987\x0c6\x18)\xfd\xc1\xa9t\xa2\xfc\xc0\xb1d\x0c\x7f\xe0P1\xacg$q\x07xN\x9c\xe2\xca\xe3@\xd0\xacK8\xabT\xe8\x1a+(\x85\x8f\xac\x15u\xf3`\xfa\x1e\x87\xed\x90\xa7\xc3\xce\x1a\x1b\x91\xbc\x8e\xc9\x18#-<Z\x12\xf2\x02\x18\xab\x07$Dh\x87h\xca#L\xd7a\xbfR\xfc.Wm\x90l\xe9\xfe\xb7\xc9\xccH\xb1\x0e~\xd9\xeeBo\x82\x80\xf2\xa2\xff<\xb7\x82\xedDQ\x90\xb1(\xdbj\xb8r\x1f\xcd\xe2\x87B\x86\x89\xbeM\x91\x02\x9b\x8a\x88\xb6\xe21\xfc\xd85\xc56\xf8(\xdf\xa8\xc7\xb1\xb7-\xa9\xedsfR\xfbvI!~W\xdcx\r\xdb\xc4rBf\x8b\xcc\xde|o\x9b\xd6c\x14\xed0\xe2\xdb\xb9\xf3k\x04b\xa5g\x8eS\x01\x1b\x92\xc0\xde\xc3\x00\xcc\xbf\x15\x8f\xd8[\xb4w\x80\x13\xebl\x96\xf0\x8a\xd6\xba\xa8\xa7\xec]\xd8_A\x83\x18U\x17,\x07;\x11\xf1!\xceqb\xd8\x9c\xfd\x00\x06[\xfc\xad\xaa\xcdJ\xe3\xa5z=\xa0\xfdk\xfb\xc84\x8fp\x02+G(\xef\x00L\xad\xb3\x07c\xb2\xa7\x17\x1e\xe2\x9f=m\xe3\x1fu\x03\xf5\xa8\xb34"\xf4\x1f\xcf\xb5\xa4\xf9\xfc\xee\xc4\xf4O\x0b\x95K1\x16xpH\xbbr\xe7}e\xed\xae$q\x11\xb3\xc3}\x13vH\x94\x17\x18\x1ft\x01g&\xb4\x84\xe9:\xdbu\x86\xac\x98q\x13Af\xa8\xd61\x8e+\xaf\x89\x87\x8d\xd6*\x7f<)\xe4\x96\xed \xf4`\xac\xa5\x1fX\xbe\xd4\xa5,\x8e\x90\xe5\xa0H\xcb\xe3\x88U{\x8d\xf6\xcb\xebO\x8a\r\x9d\xc2\xb0\xfc23\xaa\xcb\xe9Of\xe2K\x9e\x93\xc8\xc0\xfe\x05\x96\xee\x99\xd2J\x9cvG\x00\xa2\xf8\xa0\x8c5\xb1\xc0?JlKI\xe9\xe9\x9f,\x99\x11&\xa9\x106\xad\xf8\xfa5\\37\xcb\'bqiF\xb1,\xf6){\xb9j\xf2\xad\xa3\x16\x8e\x1e,\x9e\x9a\xeb\x8cP\x16a\x96ct`#"v\x12]\xd6q\xaeN\xde\xd5F$\xcbO\xed9j\x0e\xc6\x8d-Vh$\xa6\x91J\xa04\xb3\xe2\xd5u \xb6\xb0A\x193\x80\x86X\x91\x82\x8d&\xa8\x8b\x83j\xf8l*\x1a\xf8\xa1\xbe\xa2\xe0\xb8\xf58=\xa1\xaeM!\xc9\x12\xef-\xa8fCZ\xf6\xf4\xe5_sZ\x1c\x98\xc2\x83\xb5\xf1\x148O\x86"\x83\x93=\x18\xcd+Zh\x95\x80\xf4\xe2\rX\xa3\x17\x86\x99\x97\xa0t\xd5\xe8{\xe5A\xd0\xd8\xc7\x02\x10\x18\xb5\xa7Q\xc0\x16\xa3\x00\xc1f\xb6\x8b\x8c\xcc>\x1fx\x14\xa9o~\xef0}\xf9\x85\xd5h\xc4\xdd\xc8\xc4\x8b\xec\xd8@\xed\x17\xdfp\x06\xf1\xc8\xf1\x7f\\\xde\xb5\x9f\xf8\xe6\xe9\xcc\xb5\xea\xa2d\xd3g*\xbeF\x18q\xc7\xa3\xe0\x99\xea\x0e\xebKrV\xc7JVT]\x1c\x03\xe4\xb3\xeff=\xc9\xf2\xe9eVil\xdc\xeeM)\xad\xe2\x06F^=\x87\xdc\xef\xcb\x97\xe6\x18\xe3\x18\xd2\xe8\xd1+\xdb{\x16\x92\x92Ow+u\x84L^bcv\x04-\x07\x08\x0c\x1c|\xf2kkS\xb4\xbeS\x9e\x1e\xcbj\xc9\x8a\x92ME\xf0\x8e\x93\x04\xaf\x18\xea\xc0\xce\xfc\x80\x11\xdfM\xcb#\xcb\'\xed\xfe\xd6\xd9\xfeR\x80\x01I\n\xd0)D\x92\x11\x9b\x0c\xe4\xf9J\x15\x0fz\x9c\xd0I\x8a5c\r\xbfU\xe9T\xc7\xcb\xe0\x91\x07\x1e\x93\x93\xc1\x18\xab}\xc6\xd4+\xa9\x8e\xc6\x14\x12\x13\x0e\x15\x11\xb6%\xa8\x82\x1b\xd3\xf0\xe3\x1d\xfd\xcd\x8f\xda$a=\x175\xb6t\xc8\xab1\xd6\x18y\n\xb3vJ#\xa6Nw\xbb\x0f\x19\x95\x1d7g\xd5\xf1{\x05\r\xc5\x02\xfa\xb5[\x01\xea\xec\x8e\xc7\xa5:\x1c\x01P\xd7\xe2c\xf1\xac\x8dD\x94\x12Z1\xef1\x97\t\xd3I#V\xb5\xb8\x17\x1f\xa7\r\xbe\xa1\r\x14\xb5\x9c\xa3\xc3\x90\xa2\x07\x94\x168\xeb\x0c/\x81 \x82\x12\x1b\xde\xae\x94\x82\np\\L\x13m.\x1d\x15\x97A9E\xd7-\x80\xd0\x819\x90\xa1E\xe4^\x9f.\xdb\xdb\xbax\xee\xb6\x19uW\x9c/\xf8\xe7\xf8%\x0c\x01\xab\xa3\x87\xaf\xbfZ\xd4V\xfb\x13\x90k\xd8\x8e=\x80\xeaN\x86\x978\x0c\x89*\x8d\xed^\'\x9dd\xffMb~(*\xd4\x1f"g\xc5\xce\xcb|_\xa9\x9dj"F\xd1{ \xaf\x9cc\xa9l\xda\xea\x15\xbb-\xd9~4\xc8K\x8b\xb6I4\xcb\xf8\x13\xfdY9\x16-\x9f\xcd\xf6\xd1\x11N\xde\x89D\xa7r\xca\xfa\xa2\x88\x18\x80\xfa j\x94\x96\x86\xc1z\xc3\xf4y\x06\x12\x98\x13B\x9eF\x82P\x8e\x95;pEH\x10c\x8d|\x0e\x90|\x87T\xd4c\x02\x92\xcd\x14A6\x9f\xdc\xbd\x02/\xf5m\x89\x08\x9ajy\xb0\xb9\x04\x86\xdd\xb2\x17F\xd2\x0bp\xc3;`H\x10\x87R\t\x92\x8a\xd0F\xd5\xc5/\xe1e\x02\xc6\x83\x94\xda[\xebj\x84\x15:6p\x8d\x8d\x84\xa2mp(\t\xd9\xdaS`?cHy\x88\x19\xd5}4%`_Y\xae7\xa5\xe0\xaf\x1b\xea\xc5N"\xd8\xc1\x0c\x19@D\xb3#\xd3\xdf4\xfb\x98R\x15`\xed\x88\xae\xba\x85\xd9J\x07j\xfe\xfa\xaax\x94\x07\xec\x08\x89\xea2\xfegS\t\x95\x08@.\xde\xdaxs\x88\xd0\x95\xc3m\xc1\x8c\x08\xdd\x1e\xd0\xc9"\x0b\xb2,\x1c\x1a\x95\xe1\xc7\xc5\xb5E\x0b\x01k\x19"\x16\xf1\xe2\xfc\x06\x88\x90\xcdql\xbe\x90%?\xc6a\xc2\x1b\x1c\xc8\x05\x9c\xb5A\xb8\x1c\x12\x8cH\xc0\xae\xdc\xc4\xc3\x18z\xbc\xc4\xea\xa4\xd2\x8c\xda\x9e\xdeX\xeb\xde\x0cP\x83\xe3\x9bIhM\x1f\xbd\xac\x86j\xebux!L\xf4\x1a\xaan\x03\x05\x83\xa7H\xe1\xd8\xd6\x88V\xac\xb1]\xe0\x8c\x1eg\x81\x7f\xc9x^\xe4;K\xd5;px\x1aqx:\x1c\x8d\xedT\xa7p(z\xd3\xe2\xb7\x97Q:\xc7\x9b\xe6\xf2\xe3mu1\xf37\xca\xb7\xaaa\xea\x07\x1e\x90D@|\xdfM\xbf4\x8b\xea\xc4\xb5a\xf3=\xab\n\xe0\xcf\xd77\x06/\xd7\xc3\xaf\x07L\xe2b\x19\x1b.\xa5Y\x01\xd9\xb3]D\xcd\x97l\xb3L\tk\xa3"\x0ck\xd1$\x8c0m\xc1\xc9-\x8c/\xfd\xab\x01\x94\x15L\xd4\x9c\x01\x81\xe9\xe7?\xbd\xb5\xc8\xadC\xefRms\xe79W\x8d\x00\xc0\x0c\n\x02\xc97a\xd9\xbd=\xaf+\xc6\x9a\xfb\xe6\xae\x08\x1a\x86\xea\x82\xf3\xdd7\xb7n\x9c\xc9\xfe8fk"\xc2G\xdd#\xcc\x80\xaf# \x98^hE^\xe9\xf8s\x1e\xe3\xd0\xe2De\xf7\xd8\xd97\xc9c&1\xe6\x0c\xd21\xa9\xe8\xc8_y\x80#\xd4\xd2\xbd^Y\xa0*\xe4N4\x89\x92\xd3\xb0\xba\x89I\xdb\xe2\x002U\x99\x08F"\x10\xea3\x0c*p\xf1\x8ciQ\x99cL\xb7O\xb7\xee\x08\t\xa1\xcbR\xed\x9d&\xccK\xb9S\xd6\xce\xb0\x1dF\xef\xab\x86\xc3\x8f\xad\x85G\xec\xecaF/\xc9\x85%!K\xdc%\xba[]L\x99\x8dp\xccf-\xbe6y\xa2\xc2+8\xa2\'1\x97GV\xf2\xff\xf0?\xfa\x01\xd2\x87Yo\x1f)\xfb \x05o\r\x9d\x1e\x0c\x92\x84\xf1\x13fUh\xd9\xfb_c\xc2g\x90\\\xf9\xfa?\x16Zp|uP\xc0\xcbq%r\xe1\x84\rK\x05/\x98(m\xe3\xdb\x86\xd8\xb5\xd9:\xb0W-Q\xa2\xa8\xc3\x93\xfb\xaf\x19\xb2\xa2S\x8a\x08\x1c\xce\x9cs\xa1\x7f6_W\x97c\xdd\xfb\xfb\x80^\xda\xa3\x1f\xc8\xb6\x9dP\x0c\xdaFg\xfa\xea\xa2y\xc1\x94\xe4*GKb\xf9\x8c\x92\xa8\x07\x16\xfb\x04d\xda\x84@\xc3UDR\xa8\xa3\xb2\x978\xdd\xdd^\x96q\xc5O\x917v\xed*\xde4}7n\xc0@\x07\x10\xcf*`\xb6\xc6\xe8\x91C\xa1\x9e\xb8ak\xa6\xa6.o\\\x15\xf6&\x05\xa6\xfa+\xb6,\x15R\xf5\xf17y-\x18lHl\x8fYB\xe4,\x90"\xf8\xf1\xaa\xf4LEt\xfbe\xc4\xbd\x1d\xb8\x9e^\xe0E\x8d)\xdeV\xb4\xdf\x14\xbe\xcb\x16\xc8fn\xfd\xf7\xb0\xcf\x13\xad\x85\xd2\x17\xd3L\xc2\xce\x1d\x01\xa9\x1d\xfa;\xdbD\xf9$\x9d\x02l|L\x87\xc9\xa2\t\xf3\xe8O\x95\xa8mO*\x9d\xaa\x01^\x07\x08/%\x1d\x88\xa2\xfd:\xfcz\xf0\xc1V\x88\xf4\xbf"F\x82\xc6\xe1\x80\x0c\xeaV\xdf\x10i\xd2Q\xaeM\xf6\xea1\xbcM9\xb3D\xe92\x03O\x0em\x81q,\xb4\xb9*\x03\xb0G~\xc8\\\xa0\x18\x9d\x8a\xa7\r`-v\x00\x96\nf\xc9\xe6\t\xf6\xf1P>\xbf\xb1X\x1d]\x95\x86$\xda\x8ffb\xbcR\x15{L\x11nio\x104\x1f\xca8_I*\xa0+<\xe5\x04\xf9^\xf9\xa3M7D\x08\xd7\xc7\xd1?\x8ch\x1e\xce\x0c,\xfd\xb2_\xc0\x8e#\xcan\xe6\xefB\xda\x91\xd1&\xf7\xdc\xdf\x15\xbb\'\x9b\xd4\xb0\x0b\xb5\x10\xea\x16=y\x1c\x8f\x02\xb7\xd2No+O\x86\x04\xdaiZCvs\xcd\xb4\x91\xa7X\xe5\xe6\x8e\x99K\xf4*x\x17&<\xb5\xe2\xcaa\xda\xa9\xf7\x8f\xd1\x1d\xd7ts+C^,\x1b\xec\x7f\xb6\xdbf\x1d\x86\t\x16\x9c<\n\x95+6\xb7g\xaa\x81Q_Y<M\x00\xc1?\xb2\xe7\xf3\x02\xf8\x1b\x9e\xc7\xce\xd5D\xbe\x1b\xc6L\xc8Xro\x18\xea"\xf0t\xaa\x80\xd4\x00\x8a\x9a\xe6\r\xcc\x08\x0e[\xaaS\xbd\xa1A)\xf7,\x1af\xa2\xd7\xa9\x12\xc4Io\xa0\x1c\x96\x02\xf9\x1a\x1e\xac\xe7\xaa\x8b~\xb5N&\xb3U\x84\xd4cjV\x97\xb5\x16x\xda\x1c\x05\xec\xf3Sg>\xc4G\xd3\x7f/\xbd\xd00?\xcf\x9aX\xc6\x11"\x01\xa2\xc2\x84\xec\x8b\xc3\xc0\x19^\xbeU\xe8\xe0\x15\xdf\x95-*\xb6\xf9s\xd3n\xdf\xff\x94G*l9\xd8\x87\x1a\x14)\x87\xfb0\xa3\xcd\x17\x9cB7\x8a!\x8fg\xde\xcf\xca^\xc2v\xd4B\xebI\xff\x15\'\x90\x93\x08\xdba\xd5\xe3\x08\xfet\x980\xcd\x06\x9a\xe6\xd3\n^\x00\xb7\xc6T/\xd6g\xfe\x97\xf6\xd2\xca\x86\x17\x16\xa8\x97d\x1c#q\x04\xb0\xdc\x8b\x9f\x13\x92c{\x19\xd6\xf3\x11\xb4\xf2\x18\x1e,Y\xfc\xcc|W\x93\xa9\x01\x82p\xbe\xca\xba\x84\xa1\xe0\x15r\x95D\x08\xe4\xd4\xb6B\xa18d\x8a\xf8[\x8a\xae*\x93\x03\xb6\xc5\x99\xab6T\x15\xb8lzmOB-\x97\xc7j\x15\xe8\xd0<"\x9e|\xce}\x13\x84\xb2\xfc \xd9O\xbf\x1c\xda\xbeDb\x8d\xb0\x8e\xe6\xf2\xf5\xb3\x15!\xb9*\xe4S\xfa;3\x89\xd8\n\xcf\x88LAI?\xd4\x11I?z\x05=\xb8I\xb04Y\x1cc\xa5E\x08\x07%\n\x9f\xc1\xa4\x9d\xe15#\x9cC\xc5Yk\xb0\x0f\xf0\x0bm6ko0P\x03Z\x8c{\xf3\xc82\xc0\xd9\xad.\xa6\xd8\xd1\xcab\xf9\xeb\xbf/*5BJ\xcc\x12\x1c\x9eK\xa0\x15U\xf8\xaeF\xd6\x1c\xef\xb5\xd4(\x9dZ\xdb\xf9\xf1xK\x9a\xe7\x84\'\xd4\x97\x89Z\x13\xfd\xcex\xab\n\x92\xf4\xd3\x96\x88\x96\x9c\xa9\xd5\xb3\x05[\xed\xa6\xc7\xe5\x90\xea6\xe80d?#J0\xbd;\xd0p*\x0e0\xcb^?\xb7\xc5\xa3\xa7\xea\xe6\x14\xea\xd6\xb63%Zl\xc9\x01\xca\x0530\x9c\xdf\xab\xae\xbf{\x06\xae.\x8a2MD\xba\xe6\xf4\x07\xcbeX\xbe\x10\xa7i$\xe4D`\xb6"\x05@J\x81\xb9\x12\xedB\xa5\x86\xd5\xab\xf6\xa2#\xc3tD\x8a\xc82\xde\xe0\x1bb\xca\xd5\xdfH\xedp\xfa!{b\x85i6\xee\xe0\xbf{O\xa6?>\x99\x17\xce\xdfT\xd7\xfb\xa93q@\xfb\xe2\xc5`=\x05\x0bp\x9a",~\xe0P\xeb\\\xd1>[\x87#KQ\x82*\xf2\xc4\xd91@\xa4\xeb\xffn\xbdx\xfd\xe3\xcc\x14\x94C\x123d\x118\x8a\xad\x14\x90\xe3\xc2\xd6s\xba\xb3\xe3[\x877\xf0\x81\xee\xe5\x1fs6\xa4F\xb4r\x99\xd9\xa5\xa4\xf7b\xce6\x1cE\xc99Se\x04\xfd\xf4Z!\xb2\xa1\xcab\xe9\xcb&lp\xd6\xbd\xb6\xfd \xb75\x1d7\x07\x00\xc9fQ\xaa\x90#\x80X\x12\x8aWU\x8d#\x87\xf2\xf2\x8a\x8a\r\x0cY\x94\x96\x97\xf1\x80b\xd9\xaf\x10U\x01;\xa6\xce\xc1\xcc\x0b\x16@r\xe86pZ\xa8=IL\x89=\xe2p\xe6V\x8f\x1ao\x99\x93a\xb1\xc63\x90\xac\xce\x0c66\x06\x97\x96\xfe\xfb\xe3U\x8b\xcbln\xab\xa2\xc5\xee\xdf\xb6-\x82\x1d\x99\x00\xc1\xf1\xb2\x8b%\xe6\x80\x18u\xe0\xc3\x96Ll\x83\xd1!\xae\xbe\x86V\x93\xf0\xc0$\x91\xf8P\xf1\r\x93(\xfbx\xe7\x15\xca\x96\x98YR\xe2 \xf6\xc1a\xb1\xc3EK\xcd)C\xb9\xe0\x0b\x02[\\\x8ao\xd5\xc5x\xc6\x0035\x18&\x9fo\x1c5\xd5E\xf7^\x94\xddU\xb0\x19\xac~Qc*TH7\xeec\xcd\xb2\xf7\'[\x0f\xcc+\xf2\x0c\x1cD\x134\xd0\xb1y!c\x92\xbe\xc3\xb6\xad\n;*\xed\xda\xde\xcf+R\xa1\xe1h\x13\x16\xa7\xc6-\xccu-FO\x84\x9b\xdfe\xc4\xe3\xa7\x9c\xf7\xd9\xa9F \xd6\xc2@#\xe6J%\x16\'\x90\xdfY\x9a9h\xd1\xd5O\x87\x0c\xf1\xaf\x97y\x008\x16\x07s\xb1#\xfa\xcf\xdet\x8a\x80\xa4\x92\xeb \xb8\x99)*m\x02-\xd2\x9a\xa5\x9d>\xda\xdd\xd5PM&#,\x94\xa3\'0\xbbF\x1bp\x0f\xd8=x\xf0\x16K\xf3\x97\xaa\xea\xfa\xe1\xfd\xea\xa25\xb5M\xff=\'L\x8a\x17\xcd\x97\xef)\x1e-\xed])\xdf^\x9b\x05\xfe\x86\xc3\x1c\x08\x92\xcd=\x07\xd9B\x9e\xb1\xf7LB\x08?#6Ko)\xf9\xea\xcc~\x13$"\'f\xcd\xb6\x8d\xe5Z\x07\xd2"\xb3SA]\x8d\x1c1\x89\xbcN\xceT\xb1h\r\x18\xa2\xe4<\x08\x8bP\x8f\x15O\xe3=\xd0\x93\xc2\xb3U\xfd\\Pb\x8a\xc3\xb3|` ^\xfe8\xc5\xfa\xfc2\xbde\x9c\xec\xaa\x8b\xb9{\xe87\xb4\x01\'\x057>\xff\x03%\xcbh\xd9w\xec,\x90#\xd3\xb8!)r\xc69\xb0\xee\x0efT\x9d\t%ge\x13c[4\xa1\x1f\xc6>\x95\xa0\xd2w\xea\xa4\xf0\xd1c\x9356\x99\x14\xd5p\\\xca1\xbe\xb3\xf6\x9f\xed\xb5\x13+\x87\x93\xf7\x04\xa7p\xaa\x0c\xe9d\x0b\x16W\x10\x0b\xf28\xad2\xb8)\x81\x82]\xaa\xea\x07\xd3\x18\xa4\xca<\xb2\xbd\xb61\x81\x10\x16\xf8(2@A\x85\x8f\xfe?UW\xdaP\xb5\xb6,\xff\x8a \xa0\xe2\x94\x95dg\x10\x14\x19\x04\x9c\x15Q\x06\xe3\xc1\xac\xb5\x12gT@DA\x7f\xfb\xa3\xaa+\xdb\xfb>\xdc{\xceQ\xd8;YC\x0f\xd5\xd5\xd5y\xa2\xf3\x1b\xc1\xb0\xe8Fs\x1b(/3JF\x02\xe70\xb1\x84\xf7\x06\xa1\x07Z\xc8}7\x9e\xb1\x96\x9a\x8d\xebXW\xb9\xbb5\x8fa\x88V\xb5\xbbo\xf7\xc1%\xf7&\xed$\x9cG-\xcf\xf1\x98\xa7\xba\x9e\xc0K\\n\x06\xbf\xce9\xb9\xfa\xa10"^\x1c\x9c\x11\xf0\x86\xf9\xd5\xa9\x0ea\t\xdc\x9d\x1d\x88$\xed\xe1$\xa2\xae\x13\x00Ua\x96\xc3i#mI\x01\x10d\x1a(\xbd\x08C\x9e\x17\x9a\xf1\x00\xc5\x80iH\xa1\x9c\xb3\xab\x96\x14\xcb\xd7\x15]\xf4\n\xa3\xcaf\\\x82\xa9\x84\xcdR\x85\xa3\xd3A\x80\xc1\xa4\xf0\x0f,UY\x1b\xa8b\xbd\x03,\xa6&\xe6\x9cz\xd9?\x03\xdc\xec\xcfB;\t\x1f\x07_\xe1\x03\xaa\xfe\x11\x8d\xed-\x7f\x0c\xbdw\t\xc3\xcfN\xd1t\x8d1\xc5\xa4*F\xa5gC\xfd\xb3\xb7#\xe8\xe2\x9aE\xf5<\xd2\x9d\x1d\x86Z\x1c/G\x19\x9bN\t\x0c)38JP\xc8\xe0(\xbe\x96\xf4\x88\x01\x10Kl\xc7\xe8\xa1\x0b|\x1d\xf8\xfd\x9c\xfb\x05#\xe0\xee|\xb0\xe0\x8c\x81\x05\ta\x1bx\xbe\xdb|\xe7K\x10\xe8\xa8\x8b\x98\x1f+\xc15-\x11"\\\x0f\xd6?a?qX0\xb6\x81>Y0RLXp\xbdh\xa1\xa2CD\xc0\n\x19\xc9\x80\xc5\'T\x83\xc8\x8d\x8c\xab\xcdx\xe4}Gi\x82n\x1dv\x83S\xcfXs|\xd5\x8c\xfbAI\x8bpf\xe9\xc8\x89)\xed\xcbz\x8c:`A-\xb5\xa31\xe82\x13`\x1b\x81\x84\xe8Ap@\xa7\x05\xe5\xecZ\xf3\x11=\xa4]\x02\x13^t-u\xeeN\xaf\x0f\xc6g\xc4\x19\xc0\xa4\x98\xa3\xc1\xdc\x18k\x1c\x10\x84\xb4\xdf\xf1\xee\xfd\x87\xd4\x8e\r\xb3gW<z\xfb\xe4\xf2\xc4=\xf8*\xa0!A\xc1\x02i\xda\x89E\xa7|\xf2h\xff3\xadq\x8dl\xab{Ma9\xffi\xc9\x17\x92\xec\xc6\xe1n\xc9\xbf`\x98\xc9|/\xd7\x16AM\x0e\xd5\xc5c\xfb;\x8e\xe9\xc2ka*;A\x8dZ\xd8\x91WNB\x9e\x0b,f\xfb\x19\xaa4\x18\xe3\x15Q\xe4\xed\xd2\xeb\x16l\xf5\xaa\x86S;\xdf}0\x9b\xd3bN\x8fM*\x13Td\x14]v}\xfc\xfcl7\x81\x0c\xfe\xac\xde\x89\x96\xec\x84\xfc\xc3ce\x99"\xaf\xb6\xe5q%@\x16\x87\x80J\xad\xa0\xf7s\xc4\x1c\xf8\x13\x9c\x19\xc3\xa4\xec\xd0>\x92\n\x06\xe0\xbf\x05\xb9Lp*n?\xb3#\x9b\x8c\xbe\xbc\x012\xd3\xd1c\x91\xe6\x14\xec\xf5mj\tbfva\x92\xb7Z\xd9\x92\x0f\x83*BvYLY\x1c1\xcc\xbc\xeeA1\xb6\xd1\\\xf6\x80\xec=\x86\xc8\xc90\xc3\xd5\xa4_\xce\xa3\xe1}\x05\xe8\xee9;\xd6s[\x92^\x8e\x91\xb1\xc4\xe8\xbe\x1cm\xa5\x90\x9c.\xef\xd8bwN\x99\x19\xc1>\xc54\xb7\x97\xa9e\xcaX\x0c\xc6\xfd\xc2\xa8\xd7n\xa09y[\x1b\xda\x9d\xfc\x88\xb13\xf8\xdd\xbd\xc0\xc8\xbe\x0f\x87\xfa\xbdD\x05d\x06\xfaA\xa99I\x9b\x07X\xe8[\x02\xb5\xaai\xa5\xc6\x10Zw\xa8\xfc\xd6\xee_\xec\xe0\xe3\x01\xbb\x84\x1f\x9c\xdd\x10\x90\x8eW\xaa2\xfd\xb6\xa3\x8fR$\x1f\xc2\x05\xa8\xab\xb7\x15\x18\xf0\x113)\xa8_R\x9c\xfd\x1e\xb0\xa0y[=F\xce\xd1\xc2$\xeaAs"\x18\x19P\x95\xf2\xd2\x92\xd0\'\xe9\x1bW^\t\x0c\xad\xed\xca8\x80%\xd50V\xd5Qq\xa0\xdb1\xbb9\x88;\x11q!-\xfc\xec\x97r\xea\xd0\x8c\x07\x1dThsp\xd9+\xfb\xcch\xc5\x03\xc8A\xa6\xfbS\x02r\xc40\x8b\xc5\xfc\xd6\xbb\xed=\x0e\xdd]\x93\xa7\xa9\x9a\xb1F^T\xe6I\xbe\xb3j\xe9A\xf8,\xf3=\xe8\xd0\xb2Fg\xf7~\xf7\x9a\xad\xac\xaf\xdf\xcd\\Mm\xe5\xce\xb7\xea@\x10&\x0e\x15\xba]X\xa5L\'1\xfa\xa2\xda\xb6\x98\x83\x11\x15\x13|\x98\x86\xbc\x13s\xed\xfc\xf9\xa1\x8cFQi\x02V\x81\xd2\xe6T&\xf4\xe9\xebf\xdc\xe6\xc5\x1e\xa9\xc4\xf2\x03\xc2[U#\x90{Q7\x12+\x97\xdc^;{\xf1g\x95\xa8\x08V\x068RU\xfc\xba\xaaza\xdbK\xc4\x9c\xd8R\xb8\xf2\x166\t\xf9X\xb1l>\xac\x1a\rq\x87\xd2\xc4"\xbe\xf2\xb6\x1c8T\xa4\xd39\xc5\xb5\xd5\x9b=;\x9ba(\xb1E\xea\xe4\n\xbdg\x0b\x05Y1{\xcd\xb8\xfd\xab\x1e\x81_\\\xdd\xda\xc6s\xa0\xb9\xd4{1\x9b\xd8\xa5\x96\xe8\x7fQ\xb6\xb6\xb4\xaf\xe4`\xea\xe1\x13\xac\xe4\xb3\xdf%\xcb\xbb\x8a\x10\xb0\xb2\x14\xd2\x13\x0f\x878M\xa1\xd0yd\x1fG\x96!b\x93Z\xa0\x12\x91\xdd\xf4\xd7W;\x07\xb5\xfbr}\xd6.\xa9\xcffo<\x9b\xc1\x8d\xc5\xde{\x81\xf6\xf6\x05Td!\xc1\xf5\x9d\x0c|\xdf\xc3\xcd\xf8\x95w\xa8\x1f\xb6\xd8\xe4\x8e\x8c\x94S\xb3\xfat\x9e\xd9\xd6\xc9\x1d\xc5\xd7\xa9\x90\xa7`Oj\xca\xab\xc6\xbb8\x8f\x93\x96\xb0c\xb0\x0fa\xc6z\xc8\xdbF2\x00_\x97\x00\xb3\xd5\x90\xbbF\x03\xaa\x91\xf9\x90\xe7\xfd\xe6\x0f\x92\xc9\x81!<\xf1\x19\x1a\xc3\xdaE\xbb*lI\x8e\x98|\xe8\x8b\x95\x0b;\x06\xb0x\x05\xf3\xec\xfc\x0f\xf6gm\xfbp d\x94\xcd\x98\xebT\xeb\xdd\xfb\x04\x12H\x94k\x87p\x10;&\xe1\xbbs\x92\xc6D\xe9I\xaa\xff4\xf8\x81\xa7R\xfb\xc2:_a\x18&\xe6\x8d\x9e\x9b\xefK#\xe5\x1d<\x82\xeflA\x92\xfe\x15\xa3`\xd7h\xfa\xd8\x01\xc4DzX\xb4\x90\xaf\xbc\x9a\x17v\xd4\x9a\xe1\xf7\x9c\x17\xd5\r\xb7\n\xde-;\xc3$\xf3(\xfe.\xa9\x1b\xf9W{\x1e\xce\x07\x11\xc7%\xaa&CO\xc1\x8d\x94\x8b\xc4\xb4HV\x1d\xc9\xd7D\x07|\xbf\xa1\x82:y6\x94\xc5\xadm\xa3\xaa\x81#Vz\x94y\xe2&.\xc3\xb2\x05\x14\x8e\x13\xc6[\x0f\x9f]\xca\x02r8\x08\x1e;g\x87\xd6\x93\xb7v6:1\n\x88fv\xff\x8eZ\x85B\x18\xf3\xe6\xf8\r\x15\xa2\xd1\xad\xe7\xcdx\xf8O\xebO\x1f\x93\x9f\x0b\x19\xb0\xc8\xe9\xc8\x9d\x96\x01\xcb\\>[\x02\x85\x02\x1a\xa4\xac\xec\x17v\x0f\x87i\x8b\xe7q\x1a,\xca4T\x990\xe5o\x10\x01#\xd9\xb5_\xb2\xfd\x1a\xe0\x1f\x8a\xe7\x86\x17l\xeci\xd1@\x9b\x80GTi\x87\x1d\'\xf7\xe2\x0c\xd5\x10\x8c\xa4\xe8b\xfeE\xf7\x17\xc6\xb7_\x85\xfdHf\x9bq;\x17U\x89\xe0\xf6\x92C\x98b\x7fZ\x99i\x0bPng\x97\xbe\x889u\xb7\xfbN6q\xa8\x99\xf9[\x9c\xba\xf2B\x96\xdd+\x19\xc5\xd0b\x1f\x9f\x7f\xff\xf9\xf5\xf5e\xe4f\x1dD\xb1Z\x12\x07\x11\x01\xa4\xe9\x8do\x9c\xef\x91Y\x80v\xfe\x14pm\xf5\xc3e\xbb\x8fF\xc6\xbc\x0e\x13\x83V\x06W)\x1c\xb5a\xeb\xcboPv\xf3g3\xca\x0cF\xf0 L\xf2+\xad\xc3h{\xf99\x12\xeb\xb8\x86\x83\xcc6\x97\xectAk\xaa\n#\xc1\x14\x1a\xb6\x97x\xc0\xd9\x85\xc5g7\x9e+\xd2\xe8\xb5\xb1\xc3\xa8\xe1\x9e\xc9\xce\xe8-z\x19\xdb\xfa\xa7\xd9i\xef\xd6\xcc\xe2\x92\x83\xe1\x04\xddW\xb6\x96A\x95\xfb\x1e\xa7\xb0\xa3\x94\xcb\x88x\x9b~6\xb8\xfc\x0b\xb2\xbd\x1erj\xd4\r-\xef\x98\xa3r@\x01\x98\xe7\xa5\xb0\n\xf5u@\xce\x80\r\x07mG.Z6\xb9\xa4@1A=&\x07\xda\xc8\xd1&#;\xb4U\xb1v\x11\x86\x06\xaa\x05\x18\xb3\xc5\xf1\xc0\xe7\x9f\xba\xff\xb4\x197j\xc3\xe2\xb0/\x1c3\xb0<Z\xb8\x06u\xb9\x1a4\x94\x98l}\xd6VXD{\xcd\xeev\x1ch\x95N\xd7A\xa1\x1f\xe1\xcch\xc9\xcf\xb9Q:j$\xea^\x08\x90`0\x8e\xc2z\xfd\xee\xc8\xd2\x90\xba=\xfe\x1f\xbb\xe0l\x7f\xaa\xdc\xd9g\xd8X\xe7\xec\xb1\x1d\x02\xe9\x9e\xb6\x80\xbfj\xce*\x1d\x084\xfd\xa1R:\xf3\xf9Vh\xc3\x91\xee^7\x92\xab\xd2C\'_\xa9\x9eC-\xd7\x8ee\xc0|\xd7N\xceP&\xa5\x98\x16r6\xda\xd7\x88j\x1dH\xa5\xae\xc4^\xfb\x10\x16\xec\x92\xb5"\xf0t\xfa\xda\x8a\x08\x04E\xaf\t\xa9_\xb1[\xe2\x041\x86\xf4\xc9=8]\x02\xabJ\xc1\xbbZ5\xa2\xb6\xfb*\xc7L\x18\xbc3\xb9J\x94\xdb\xdbOv\xab\x98\x07\x18\x95\xccJ:X\xc9x\x80\xed\x05\xfd*\x01R\r\xee1\xdb\x99Z\xb3,\x9c\xb4[\xaa\xf2\x83\xbb\xed7\xb9}\xb3\x82:TI\x92\x9a\xca%9\xe9\xca\xb6\x82\x12\xaeN\x10ok\x16\xb1S\x16\xe8F\xaf\xf5-\xe9\xec\xee\xab\xd9\x81\xae\xa4H\x83\xb1y)L\xdac\x88I\xb2/\xdc\x14M\xee\x9c\xbc]\x98\xfd\xeb\x0b\xe2\x88\x8b\xe2\xaf\xb0\x7f\xd6\rW\xe0\xfa\x81\x00\xb0\xe4\xde\xec1]0\xa7|\xa0\xf7\xc61T<\xb6\xf3\xe7\xb3G\'\x1f\x00l\xbb\xe2\xa7\xb2\xec\xd8\xbeJ_\xc8\x0b\xe5\xa7C\xda\xf0\xc8\xfc\n\xd1\xb7\xe4%D1\x13\xa5\xb6=\xa6\x19\xb0\xbf \xade\xbd*\x0b`\xa8_\xe5\xda\xdbk\xf6i\x15yj\x15\xb5\x9a\xde*\xe3HvQ\x9a\x08\x02\x1c;\xd5\xd0\x18\xcd\xd2\xc3qk\x1f\xdd\xb3S\x93 ~OF\xab\x96P\xb0\'\x0e\xe2g$\xdf\xf6\x9b(K\xa6\x07\n\x9f\xdb\xf0h\xd2\x02\xed\xa1\xeec\xa2\xbcfb\t;ys^a\xf8\xfb\x0c\xc7\x06\xdc=|KK&\r;.1\xc9$\x01\x1d\x9a!\xd8\xc0PJ\xe4\xb5q\x0b\x91\x16&a\xd3P\xab\xa3\x150\xc5Q\xd2"\x0f\x8f\x15\xdf^[\x93\x08ce\xd4\xb9d\xdbH\xd2\n\x02\xa0~\xd8\xfa\xc2\x02D\x12\x1e\x98)\xcdvb 9\xe8\x13`\xbd\xa0\xf8\xdd\x05\xecI\xbdh\xd6\x98\xa8b;\x08@\xca0\xf5\xe5\x00\xd0\xe0P\x1f\xcc\x9a\xe9\xa2(g\xd5\x9e\xa1\x9e\x0f%K\xcft\x14\x9a\xa8.n/Y\xdc\xc8\xcc\xb4\x0b\xeb7?~\x10\x00\x0bC\xd5k\x08cl7l\xc3(\x80\xd6\xb9\xf7\xb6e\x89hT\xac\xe3\xb2(\x9f(\xc7\x19\xbal\x88\xd9\xf0\x82\xec\xdbn\xb7\x88\xd2\x93\xe2\xd3\xddC\xae\xeb%=3"\'\x08}\xb1\x83\x03\xe2`\xec\xa3\xc8\r\xec"\xd7\x13\x0e\x08\x13\x04\xfa\xfc\x99!\xa8-\x1a+|n\xdc\x95Ej\x1c{\x05\xebH\xaa\xa2*\xe7A4goP\xda\xbe-X\xa2\xc2_%\xc2k\xd2]Gg\x0e\xef\x14+^\x1f\x1f=\x9f\xfe\xd4\x1c\xfc\xd4\x96\x08\xcaM\x80R8dB\x14]jQ1\xcc^\xd8B\x04\x8eH#\xd7\xbd%6\n+\x9bL\xec\xdc\x7f\xb5\xa7L\x9a\x1fLz\xe4\xd2\xbb\x016m\x8f\x9c\x19\xd46\xbf\xf2\xd2\xe2h\x1b\xc5\x86\x17\xf2\xb0@\x14\xda\x10\xee\xcb+\x08\xe2g@rF\x918"\x8d8Z\xf96\xa6\xb0&W\xedn\x86\xf8\x1b\xf5%\x86e\xed\xc3ME\xec\xfd\x9e\x9d\x86\xa0\x88d\x90W%\xe6\x88\r\xed\xd2\xcc\xdc\xc6\xa0\xc0\xd3\xe5\x8f\x85\x95\xc0\xf2:\xb1\xa8\x88\xd8\x9e\xc7]\xfb\x02\'<\x9a\xf0\xd3w\xbfQ\x8b"\xe3f@\xf3a\xd5\xc2W9\xaf\xa1\x06\xe6x\x85Y2\xbd`;\xd6\xba\x89=\x01Jm\xf6EN\x08\xb8\t\xe4\x85\xa2 \xb6Di]T\xce\x1e+4\x95\xd3 \x83\xcf\x1e\xddK;\x895\x8b\xa9,\x8e\xb5\xc2\x1e\x98\xb2\xa6Gv\xd8H\xa8\xc7\xc0<\xfc\xa1j\xdeI\x98\x98\xb0\xb3\xdcG\xc2.\xc1\x028\x9f\xe8_\x88\xfeW\x8d\x86>\x99\xe3un`\xb5C\xfb\xca\xf7\xec4$\xf52\xa1x\x883_\xcd\x7f\x8ev\xdf6\xfb\x0fq\x02\xf2\xbf\x02\x05F\xb0\xce\x86\x95%V\xb9`\xe8\xd8\x91\xe1\r\xe5\xcf\x02$\x02:\xf5na\xea\xaa\xbd\x06y\x11\xb4\x1e\x82\x8d\xd9\x1a\xe0\x94V$\xc2?X\xdb\x88\xc2\x8fk\x01.\xb5=5\xdb\xeas\xc6PGO\xf7\x82~\x00\xaf\xd6.l\x9a\xc1\xab!\xecZ\x07.Q\xe8\x86\x1c<\x80!n\xb2j\xcdX,\xb4\xcb\x10o\xf6\x98\xd1\xe2\x1d\x08\x1f) \xe7\xfa\xba\x99\xfb\xf3{\xd1\\7\x8b\xe01=\xa2V:\xcd\x1c\x19\xb6\xbf\xbb\xac\x01Ec\xf1\x91d\xda\xb2\xb9\xae{\xd7\x9aQc@\x00.=y\x03\xd6\x08\xf9\xc96\x97\xedT\xfd\x82\x9d\xa9\xba\xbc}\xa2\x8c\xacn\xc6S\x8b\xc9A\xc5W\xf5\x9c$\xf2\xfa\x89\x0c/\x83\x86\xa7\xdf\x110\xe0\xde\xa4"3\xd4\xd43\xcdX\xdbZ}\xed\xb4\xe1Es\xf0CxK\xa9\x04\xd2\xbfx*\xd8*\xfb\xa4\xbc$]\xb0\xc3\xe4\xd9\x97\xd2\xbf\x02\xe1+\xf9h\x8f\xc2<d$(\x93\x15e\xc8\x1d\x0eJ|\t\xa0/\x1a\x99n\xe9\x8b\x9e\x9d\xc4:\x94)\xc0\xce\xaf\xcb\xc7\xd0\x01\xa4R\x8d\xe2\x0e\x1fw\x12Z\x05P\xd4\xdap\xc3V\x84\xb9\xfah\x1f&\xcd\x7f\x18\x901\x1c\xb6\x19;\xb7\x9ca\xd2O\x9e"\xb9*T\x9b\x89=\x8f\xdc\xd4?3U\xb3Z\x80\xe8*)+\xdb \x9fB\xff\x0f\x11"u\xc2D\x07\xe8T\x86&?\xc9}?^\xc36\xb9\xb5gX\xbe\x8b\x1e\xed\xef\xe9E\x1c\xc9uE\x08D\xcd\xb0\xcc\xd5\xad\r\xfc\xd63\xd9\x9fn\x0b\x19\x97\xff\x02\xc6\xc4X?\xaa\x98\xc7\xc9\xe3(#\xba\x83B\xa8\xd1\x987\xe7,\xf3p\x00\xe2}G\x93\xdc+(o\xdb?\xcdxD)\xf9\xe9\xd5GR;\x97\x05|D\x1e\xf5\xc6b\x86\x844G`\x1dN\xd5\x0cv\x04\x08\r\'\t#\xb2\xcf\xab\xb8\x07\x91\xac\xa1bA\xe2u\x0c;\xb7\x9b\xa1\x83w\x8c\xc7\rC\x96b\\\x9d\xb1s\xdf\xd6\x1f\x01\xf4C1\x85\xc2\x8e,\xd4\x93I%\x0c\x84\x10o{\xd3\xbc\t\xeb\xfb\xca$:Q?\x19{\xe0\xd4\xfb\x17]vx\xd3\x1e\xd2\xd4\xc4\x7f\xc9J%2F\x9a=B\xba\xc5[\xc3\xdbzA\x9cT\xcb-\x9f\x1c\xf7\xfdS\x1d@\xe0Z\x1e\x88\xfe0\x14\x88e\x1d\x95s\xba\xfa\xf6)\xb8\x07`n\xb2\xcf\xb35[e\xe8\xe55e\xb5\xf1"\xfaE\x1c\xfe\xb3\\\x043\x9c\xb2_\xa3F\xadf\x1bJr*\xbd%\xbd\xe4 \xe6\xcbn\xb3\xd2\x96\x10\x0e#\xb6g{\xff\x00L\xaa^\xf5\xfd\xd4\xb6\x90`\xbc\x7f{\xeb\xb8\x91x\x89vjdF\x86\xda\x82\xcai\xad\xec\x8a\xe6\x0e(\xd1\xb1h\x0e\xa7\x91\xad\x17\xefoF\xc2\xf8\xbf\xd1\x0e\x00l#\xe4\x83\xd3\xb4@\x8f)\xb1\xea\x80ld\xaf9\x9a%r\xc5z\xb3\xea\xdc\xdc\\x\\\xa5,\\L(\xfe\xbe[\xa7\x87\xe9\xf6n.\xca\xe0\'_a\x0c\x8a\xef\x12\xce\xef\xea\x8fx\xba\xe2\xc8\xd0\x9c\xf3\xbc\x0fS\xe0\x18\xce1D\x9cD%\x99F!\xaf\xaf?\xb6{gb\x9d\x1f\x9b\xb14\x87\x11\xfaa \xbc\xdd\xac\x84\x03Pi_\x7f\xcbW\x8e\x89I\xc7\xb6\x1f\xc9\xe8\xd7\xe6We\xd6\xed\xa1}GR*\xb9\xc7\xe5\x1caRNO\xdd\x10\xc6`#;\x12\x1c\xe3Q\x19\xeb\xc5P\xbb\x04EXF\x83\x1eS\x81\xba\xea2>\xb7\xdd;\xdc\x92c\xc2\xd3\xa1\xbf\'\xe6\x0b\x9c\x99<\xfa>ko[\x97K\xcf\xf1iO\x9fYr\xc4\xea\x1b\xbb\x02N\x0f\xe7\x84\x8b\x88\xa6\xc3\xa0#\x1a\xd6G\xad\x99\x9c\x07y\xff\x13\xdeD\xa4<>\xba\xe2\x82\xaa\x08\xef\xef\xdbi\xa9\xa0\xea\xdd\xa9\xe2\xc0\x88 W\xe7~\xe2\xd7\xe8\xd7R;H!#\x1e\xb8\xf8D\x0eWu\x8d\x84\x95H\xa5ACw\x0b\xcd\x81\xb7\x90\xa0v\x0b\xe4J>\x02\xaf\xa2s \xb8To\xa0\x19\x13\xb3\xfbg\xcd\x98\xd9\xcbJ \x0f\xff\xeeey\xe7\x8e\xd9\xa5\x97W\xef\x95Q\xb1\x99\xb9d\x08\xbbh>\x9e\xf5Wb\xf6\xac\xf4=\xb8+\xb0#@\xa2!I\x9e#\xb1#\xdb\xe1\xdc\x8f\xfdj\x0e.\x03b\x19\xe1\xc8\x96\xf7d\x1d{\xfb \xca[c\xe4\x95\xe9\xc8\xe1:\xc0l\x16\x1f\xef\xb3\x8c\xca9E\xa3\x07\xa7\x17\x85\xe7\xf1+\x181\xfd\xb2`\xdf\xa9\x10\\\xe9H\xb1NU\xbc\x02414\x07\x92\xf7\x97\xcf\x03I\xa2aI\xb6\x05\x85\xe4\xca\xdc\xc3\xcd\x0f\xca\xffq\x82\xc2-gK\xc3\x86#\x86.\xab\xf6\xcc\xbd\xc7 \xb6\x04c\xbaI\xd9\x08\xd5\xad\x0b\xb2\xdb\xa9\xfd\n\x85V\xcbIsW\xd4\xbf\xc2\x1b\x95(\x8f\x14\xf0\x87\xe5\x9f\x97\xd8\xbe)E\xc9\xf8-Sw\xd9B\xe6\xd1\x86;\x8dF\x84*\xf7*\xec\xb0\xb2\xed\xbe\xb2\xa8\xae\x16g\x88=JPAq\xc5G\xc12m\x99\x0b\x1c\x14E\x95\xd0g\x7f\xda\x8cI^\xacqb4c\xa5L\x88\x11\n\x91"\x15\xd7I\r\xc9\xacZC\xd2\x8d\x12*J1\x89\xbd\xd5\xa5r\x92N\xdcf*u "\x01a\x8a\x84\xc0B(\x10\xcb\xb7L\xa1\xef\xd8YbR6\x1a*\x16\xb3Je\xd1\xd5\xd3\xc6\x077 \xfe\xdc\x8a6IQ\xb7 #\xdc\x99\x91\xa9k\xb7\x01\x9dg\xf7\xc3N\xa7\x95K\xdf_\xa5>\rV\x19\xd2w\x1cQ\'P\xc4gJ\x06L r\xf1@[\x03\xd0>b\xde\x82\x8d\n\x96]\x0e\x16\x08\xd9\xfcm\xbbkl\xc1\xe0\x1f\xa6zp>\x12\xcc\x8c\xa7\xf85\xae\x86{\xf3\xc26(\x8a%1P\x97\x9c6\x93\xb2\x94\x88|\xc8\x067JE\x93\x9b\x11a\xc6\x93\xdb\xd9\xb1\xe2\x1fN>\x9a\xc1\x99\x9a\xf0\x9c\xb4\x86\xfaY\xc0\xe1m\xdb<\x11b\xed\xb6\t\xf9\x833\xe1\xaf\x01!\xa5\x96\x8c\xfc!\x0f& \x89\x0e\xbd\x1c\xad\xb8Hl\x8aj\x15~\xc1\xb8dr\xd6\x9d\xdd\xee\xbe\x9c86\x0bF\x89\xbd\xe4\x1e\n\x06\xe5\x02!d\xea\xf02\x91k\x992.\xd9\x82\xf3\xeeys\x92\xd4V\x81\xccz\x07\xe8\xbe\x03Q%\xa47\xfe\x83\xd3I\x9e4c\xed\x0f\xf6"\'v\n\x18m\xb87\xe8\x8d$\xc0\xa4\x88\x9c\xcc\'L\xcb\xa3\x02c\xfc\xf6d\xfdd\xc7\xa2\x0e\xf6\xf2\'\xc7H\x0f\xd0\xc7\xc9\xb9 \x98\xf7\xdb\xd5\xdf\xefi>C\'h%\x96\xa8\xb1\x8e\xa6Q\x17$\x8eX\xd83\x90\xb2\xd4\xda]`\x8a\x02vu\x95_\xd8\xd5\xe3\xc0\xd0\x87\x87C\x08\xf3E&\xd9\xb7\x7f\xfe\xbe\xff\xce)\xb4{\xeb\xe6)*\x95\t\x88\xf8\x96W\xed\xbaA\xb2\x9e\xb1A\xad}\xa3\xa5\n\x9f\'n\xfcy(8M\xf0\x0f\xbb0\xd3\xb7\xcd\xa5I\xe4y\xe8\xb0\xabK\xbb[l\xed\x1c5juX~+\x9f^?\x06\xb0\x94`\x829\xa9\x08\x89\xfc_\x0b\xfc\x07\xa4Z\xa7"5\x05.\xd3f\\\x02\xad0\xea\xa9o\xd7\x06<\x1dR\x84\xf5\xe9\xc3S\xa1\x81\xe1`\x1a\xd1=\xe0Z\x1c\xb2\xd6\xdf\xb5C\xd4\xa5\xad\x82bxW\xa2@x\xb0\x1c|\x15\xd0\xca\xac\x88g\'\x9a\x0f\x845\x01\xd5\xd7\rz\xe55\'e\xe1&%?\xf0ZsT\xfd\x90\xd9>\xbf \x97n~~\xfdPY\x96\xe8\xa1\xa4\x87\xd7\x8d:\x9d\xf0\xb4+f\xba(\xd0\xd4Z\xce\xc4\xc4Y!\x0e\xedE\xcfG\x96%\xaa\xfa\x835\x8d\xe4\xa2\x12>A\xceiUa\x10\x11S[\xafr\xd7\x89\xfcf<\xcd\xa7\xd0\xcep\xc7\xef`\xc7\x0b\xc12-\xbc(soP;\x1c0\x90\xa8\xbb\xe5\xfd\xdaC\xa1\xf6"U\xc0^\x93\xda\x00|\xa9\x15\xfa\x9a\x84C\xedQ\n(\xdao\xda\xfdc\x13#\xcc\x04\x06\x8dq\xadF\xcdx\xa09\'D`\xa6F\xc4\xc0\xfa\xa1\x00\xdb\x13\xb2\x82\xe3\x832\xaf\x17\xb3\xca\x85lSi\xc40x;\xa8\x14\xcd:I\xb0 \x94\xec)B\x87\xdf\xf0\xf9`\xb68\x94\x18\xe8\xb1\x85\xa8\xc4\xe2J\xb3\xaf\xb9f\x95\x7f\x85k4\xb5*\x06`2\x02\xd1e\x04\xc4\t\xa3>[ \x05^u\xcf\x80)\x9f\xe7g\xbd1\xbf\xd1VS\xff\xea\x02A6\xd4\x18Q\xebk\xf6#\x9e\xac\xc0\x81(#\xc4\x19\x173!f\xd75\xe2\xc7\x9c\xe0\x198\xcd\xe7\x11f$t\n\x85C\xf5\xf3\xee\xb1\xbd\x97\x01\xdc\xbb\x9f\x0f\x9aK\x18\xc1<z\xfd\xf4\x9f/\xf5b\x88\x91@\xc09X\x03\'!l\xdf\xb28b\xe8=n\xd3\x89\x05A\xb3\xce+[*{\x80e\xf8\xaf\x00\xac\x8f\x93\xde[\xcc\x14\xef\x19.)%\xa9\xac3\x83H\xeb\x133\xe56\xee\x9c\xcde\x17\x9f\xda\x05caS^\x82L\xdf\xd1\x0c\xd3\xa1Y\x03\x90\x99\xb6\x8c\x86e@\xe53#\xeb\xfa\xa9\xcc[\xaf\xb8\xde\xa9\xa8\x89\xfbS\xf3\xbe\xe5\xcdX\xd1\x83\xdcb\xaa\x0e2zyg\x9b\x0e\xb8@e\x03\x9e7N\xcfH\x1f\x13\x17C)\x8cs\x9a\xbbF\xe3\xbf\xcd,FL\xf9f\xbe\x9a=P\xc0\x14\x1a\x15\x9b\x14\x86\x94\x87\xdb7\xecS\xf1,\xc4|\n\x1d\xfaDv\x16\xfe\x03\xdd-\xac!\xd5\xe2\x02\xfb\xe4\xe4?\xbb\x97\x1c\xa5\x01\xb9\\\x12\x9e\xe1\x87!kZc:H\x05\xe5vW\x7f\x93\x0bN\xcc\x8f\xb3\'\xbaj$\x15e\x86\x8c\x1c\xc3T\xf9ko?\x9bd\x98\xd0=P\xed}\xb8\xf0\xc2\xdelP\x88\xb7^\xbe\xea\'\xe9\xe5\xf9\xdd\x1f\xde\x02\xc4\x01E\x08\x95\x9bl\xc6\xac\x99\xaa\xbd\xbb\x19w&\xb0p\x8f\x0e\x85\rg/\x91-\xb4C\xed\x96\x1d\x92\xce\xee)\x07\xda\xd5\x96,r\x86\xa9{\x83\x8f\x8d\xdf7\xcd\xa7\'\xee\t.\x1cf\xab\x92\xea\x96_XV>D\xaa\xccMayx;\x92G"{{\xcbE;kl\xdc\xcfu\x04J;\xbd\x95.A\x1ddg\xade\xad\x19\x17\xf6*\xedER*\xce\t\xdd\x97\xbf\xf8\xfb6;D%%\xbe\xb8/t\xd5\x7f\x96\xd7\xf4\xdc\xd5\xa3u{\x9aZ8]dKj\xb5\tC\x08Z\x10\xd1\x89R\x91\xf0y<\x0bK\xe1\xbe\xd9! xY%\xe0_W\xcf\xec\x13\xe9\x97;pfj\xc80\x13\xc9\xc6\x80?6\x89Q\x14\x96\xc4Gl(x\xdb\xbe|v\xa1x\x0b\xd4\n\x98A\x02(\xa5\xce\x96d\tG\xf4\x16\x14\xe2\xd9\xba\x01le\xf4\xd86\xb1\x07\xfa\xc2\x04\xa5\xfc1o\xc7\x81+\x8bONV\xdf\x1f({`y-(\xe3B\x14\xca\xb0Y\xd4\x1a\x8baf\x84\x0f9\xb3%\xe7\x8bU\xea\xb3\x86\x9c8Q\xa5\x8f\x95\x99\xf2\t\xcd\x14\n\x1a\xf5\xcbO\xdf\xcc\xa6\x1b\xd1\xb0\x19\xf3\xc9cI\xc0\xee#\xc6\xcc\xb3\xea\xc3+\xb3\xf8Wv\x01\xf2\xff\x1e\x107\xf3^\xc6\xea\xde\x1e8&\xb3\xcb\xb2\xd3\xf5/\xfbz\xca\xab\x83\xb2IA\x19\xbe\x15\xe6Xu\xe9\xcdo\x16\xa1\x13\x8eH\x1a\xf5\xd5Nb\xa4G.\xd6v\xa5\x17c\xb9\x9b\xd9\xde<\x01I8\xce\xfc\xca\xca\x0b\xb3\xcb\x14\x19\x1f\xa6\xbe\x85dsJ\xa6&\xb3\x13\xcb\xb5\x15\xa2\xc7)k\xb0a\xe1\xc2\xd2\x11|\xd3A\xf9k\x06\xcb\xbe\xd9\x98\xf2\xd9\xd04|\xbe\xa4\x07\xb6t\x9c\n\xdd\x9a\xc1l\x91\t\xf6\xf9\xe4\xac\xbd\r\x19F\xa9]v8a\xa7\x93\\\xe2\xe3\x18 d\x97\x17\xdf)TdBAz\x1d.O\xfd\xf1\xd5\xcf\'\xf6\xd4\x01Y\x1bE\x0e*\xc5\xd3\xf0K\x14KBX\xc9\xd1\x1c\xe71a\xb3*\xd4\x06\xc5\xf6@\x10\xf3L\x08\xd7\xc8V\x99Z#\xc4\xa9/"jC\x1fR.\xf3\xc3i\xa0\xd0\x12\xa2\x83\xcf\x05\x14E\x88M\x83\x08\xc2\xe0\x8d\x10\x0c8\x92\xee\xa8\x19w^t\x02HH:H\x96\xe7\xf5\xe2p\xd2\xa9_\x81\xb5\x87Fl\x05\xbc\xb3\x0e\xeb\xe6\xd2\x12"\x8e\x93\xcf\xcd\xa9W\xd4\x9eM\xd0\xdaK\x86\xbe{\xbe\r~j\xfbK\xe4WV\xa9\xf3!\xf5\x98\x92I"\x19\xef\xd3\xa6\xd1\x9bN^\xdbe\xedy5\xebM1`\x83\x13\xe9\x98!\xec\x08\xa8J\xf9\x9f\xad)\xf1K\x98\xd9^,\xd0\xde\xff\xa5M\x00*\\\xbc\xd0\xa2$\xe80F\xba\xdc\x1b\xaa\xfe\xfb\xf1\x89Yx\x1a86\xff\x94__\x99\xa7 aSl!fH\xfdO\x9c\x85\x1e\xb6\x07\x92\xa9D\xeb\x87z=\xf3\xe1\x13[E\xc6V0H6\xe5\xf0h@F\xe9A\xaf!\x12\xaf\x97\x96\xec\x8d\x99\x84{\x8e\xa0@;\x9b\x87Yb1B\xf5\x1b\xcb\x08\xbe\x1d+`\r\xdd\x8f\xffQtg\x87\x03r\x01\xf3\xbbO\xb7\x95e\x0eeXx\xe5\xf6\x9a\x05u\xe4\xe1\xb1\x06\xd9\xca\n\xd5\x7f\xe6\xec\xfdx"+}/CY\xb0\x9f\xf3\xb9;\x8d\xb4*\xdeYdiV\x057i\x82^\xc7+\x99\x11\xe9\xc3\xc1\xd0\x8d\'0\x8a\x17G\xed*\x8e\xc6(\x07de\xfe\xf6_\xccU\x81h(\xb3\xf5\x82\xbe\xf4\x89\xdd^\n\xa6\xc2\x86\xd0LT\xf6\x80\x84W\x99>\xbe8U\x12PY\x9a\xe8\xbb\x1f\x8f\xff\xb3\xe3W\x97\x8f.No\xe1\x9aV+\x17f\xecZ0r\xc7\xf0\xa5\x80\xa2\xc0\xf9z_\xc29! 7a\xa6&V\xc8\xd1G\xdf\xb0Hs\xb3r\xc4\xf9k\xb0j\xcaO\xb6\xbd6\xf5\xef:\xf8%a\x13\xf7\x0f\xd4m\x02\xa7\xd5\xa6r]\x03\xfc\xde~\xb3u\xaa\xc2\x9b\x19t\xff\xa6\xdd;\xe5\xf5u\xfdzN\xa7\x1cV\xda\x7f\xb2{\x1dK\xc8\xe2\xf2\xdfF\x82\xa2\x83nP\xdb\x88\xfceF\x16\xb1\x04o\xac\x17\xec\xcd8*@\xb2\x12\xcf\xde\tc%\xe2\xd0\xb2\xf2>m\x17\xbeMO\x10\'\xf6\n\x848L\x16\xdb\x1d\xbe]\xb8)`\x839\xe6\x95F\xd5\x80)\xbb<\x04[\x98\x01\xb3\r\xb3ShS)\x9c\xec\x98\x97\xc9\xa1\xab\x02M\x94\xd5\xff\xfa\xb3c.\x83\xa5\xf9\xdcn\x92W5\xc3\xb9M\xbbN\xb1\xd8j\xc6|\x8d\xb6^\x92\x15+NnM\xdc\xd4\xe1e\x8di\xcd2\x85:G#\r;S\xd0\x17\x14\x10\xcd\xfal\xe1\xef\xa6m\x98\x0f\xdf\xf1r]\x96\xa0\xad\x18rJlXH$bL\xa9\xe4H\xb0\x05=\xca\xe8?\xebQ\x9f\xe6-\xf0\x0f\xcd\xb3\xd2\xe7\xa3i\xbb\xa3B\x11\x0b\x7f\xfe\xce\xb2r\xc9\xce\x8e\xfeA3VZ\x88\xd0\x9bv\xf1\'8\x98\xa0\x81\xe3\xe0\x06q\x8b\x8c\xc8\xf3h\xfe\r\xf0\xbd\xee\xaeN\xf205\x0e3\xecL\x7f\x00\xd7\xb2}\xa1r#\xd14Qcz\x91hXD\x84\xeb\x87\xd0g5\xf0\x8dYU\xa3\xd6K~\xb3\x91\xd8\xd0\xa7_g\xc7\xf8h\x18CW\x86k_\xc4#q\xe5\x07{\xf9\x1eT\xac\xa4\xc2W\xb7\xef?4jY\xfflg\xa7\xed\xe40Yc\x03\xb8H\xf5\xf3hH\x00\x87\xb1c\xf0M/o\xd3YF}d\xabVW2\xa1,7q\xbe\x970r\xd2\x83\xc0\x1c\xa9F\x9bz\x13\x06\xea\xe1\x81\xc5\x8b\xeckho\x9eZp[\xfb\x89W\xefhr\x0e\xee\xd9Qs\xee\xcaw,\xc4\xfa\xbe\xd2\xe4\xce\x8el"\xf6\xc10\xeb~\xfc\xb0\xbd\xd9D\xce,\xf6(us\xfa\x11\x04O\xda\xfc2\xa4\xdc\xbcb\xe2\nS\xefi\xcb\xf2F\xed\x9e\x17\xf1\x16\xec4\xf6\xb3;\xb7\xcc\xfb\xd4\xc9\x860$\xac\x0f\x9a,\xc8_G\x94\xd1\xce\xd8/V\xd9k;n\x0c\xef\xe1\xd7Y\x08\x1d\xfdb,\xfd\x12\xc1\x15\xcd \xa2\xc9\xf6m3\x9e\xbaT\x81h_W\xd8/\xea\xd0\x16\xb2\xbd\xee\xea{\xa5\x0c\xad\x94H5+\x80\x85\xed\x91E\x1dt\xb9\xa9>\x08\xed\xaf<}qI\xb9\xb4\xfbK\xac\x08\x8a7\xe8\xaa\xe9\xd1\xc1\xee2I\x80\xc5\xec\xf5\x8bF\xa4\xce\xb3\xb7\x82R\xdbib\xb22\xf2\xad\xbbN\xb23\xc3/8:t\x92\xf4\xc0]\xfc\x10#\x88\xc5\xda\x8b\x9dG\xbdQ\xd2\x15\xee\x9eL^\xd9\x1f\xce\xcd\x1f m^@\xd0\xb9\r\x83P\xca\xe8+\n\x83\xf5\xce\x1d\xdbK\x07\x05\xfb\x81\xce\xc6\xe0\xcf\xdb\xc1\t\xa8\xcf\xd4n\xd1\x8c#\xd3@Xt\xc71\xe2\xbb\xb6\x90\xac,\x98\x159\xfa\xbcz\xd4\x8c\xa5T}5\x1ek\xf4\x9e\xf7c\x01\x8b\r\x12,IK@\xc3l\xa2\xca{[DG\x1a5+~z\x94\x88\xd0\x9e\r\x9a\xd0\xf0/\xfb\t\xb9\xa4\x01\xffi\xaf\x9d\x99\x93\xaf\x8a\xbf`\xc12!S\xe8\xd5\x16\xfb\xcd\xff\x1b\'Fo\x01\x1e\x94\xe9$\xdae\xe7\xb4d\xb6\xba\xa6Jl\x08~b$\\\x10\rk\xa8!\xf2\xa4\x8a\\\xc1\x1e\x18\x92Y\xee\x93\x85\xbb\xbeg\xbfX\xb5\x9f\xee\x82\xc5\x96=S\xba\x891\x0c\xec\xb4\xc9\x05\xb7\x80\xd1\\\xf5/\x97h\xe9\xfe(\xf0\xf5\n(\x95\x80U\xaa\xbaC\xecb\xfeY\xbf\x93\x0b&!+(\xbd}\xe5\xbf=\xe5\xca\xe5qL\x14\x81\xd8\xcd\x83=J\x02\xfbr\x88\x9e\xdd[2\x94\x8f\x8d\xfd\x8c\xc1\xbf\x98\x8b\xc7\xafS9\xc8)p\xf3v\x08\x93\x91\x80\xf9RHSo\xab\x92\x14k\xbf\x14\x1dG\xb5\xd3\x90\xc0\x0fK\xd2N\xda\x96\xf4\x00|]\x91>\xd7\xc7\xf2\xfc\xac\x10\xa2\xa5Qz\xf8r\xfa>B\xd0pu\xe7:\xebr\x99\xe5Rd\x96\x8e,\x14\xa3~\x9c\xdb\xc9\xb1\x9e\x182\xe5D\xa3\t\xac\x8d\xb1#\xcfO)\xb7 @2$\x88\x9d\xe1\x0el\x1c\xed\xbf\xe2\x96d(\xabQ"\xd87G\x0f~\xd9\xbbP\xf5\xac\xb0\xf7\'\xfe\xefe\xe8\xab\x8fo\xb9qH\x8e\xbao\xbf#\xc8\x1bd\xe3\xce*\xa3!\x85\xeaT\xdb.\xb8\xbe\r\x00=\xd3\'\xaf\'X=\x15\xb8\xce\xb9(\xa3\xcb[\xa9\xfd\xa6Q[\xbe\x92\x17r\xc5\xbc\x7fP\x19\xa4\xcd?*\x9b\x0fya\xb1\x1a\x7f\xb9\\\x11\xac\x8d\x0b7R\xc4\x17\x93\x95-<\xc3\xb5\x0b\n\xa3ix\xd1\x1d\x98J8\xc9C\x14\xac\x07\x97\x95\xc4\x8aNy\x98\xa8\x8b\xb5J\xf8T9\x1e\xdd]k.a\xbb\xb2\xd3m\x03~\x86\x89.n\x94a\xd2\x8fW\xdf0\xc5\x98\x9c\xedt\xeb.\xa2L\xd8\x9e\xde;\x01\x18\xdf!\xbe\xea\xc0\x10J\xa0\xec_E\x0eZ\x81#\xdb\x81K+\xb5\x14\x0cK)\xa1@\xf9\x00\x04\x996/\xf1\xce\x12\x12\x89\x81\x91\xda\xf7\xb5\xd8\x08\x81e\x0c\x12\xbe\xaf\x16\xb6P=\xd8<\xb4\xff\xf8{\x96`\x13\xacKE\xde3C\xed\x1f\xca6\x0b\xbc\xbfA\x85\xb7,`\x1a:\xe4\x89\x88z\xd4\xc6L5\xb3\x16\xb4\xc4\xf2\x15\\kU\xbe}\t;q\x11\xdeK\x80M]\xcf\x1bY\x12\x1b<\xfa\x8d\xfb\xd4r|T\xc8\x9e\xdc\xdeQ\x10\x8cS\xc8]l\xcd:\x87\xec\x03\xec \xb2\xef\xa04\xb6G\x1fgE@&\xb7\x1fl\xb3\xdb\xd9E\x94\x15*m\xb0\xcf{N\x80a4\x0cHu@\xf3`\x17\xba\xf0\xfe\x85\xae\xa6\xa8\x13\xb5 uVzIi\xaad6\x10\xb5e/\xf9-\x07\xbf\xf8\x11\xfb\xf0\xff\x10B\xa9\xc51%L\x0cu}\xaf\xc2L\x9bm4c\xa6\xae5\xcc<\xb7\x8f\xeb\x818\x0c\x92\x90\xf4\xeeJ`8\xdc41\'\x1d\xb5\xd9\xbez 3\x9c>\x99\xa64xg\xffM\xc6r\x07p\xbbz\xb7\xdch\xd0\x9c\x01\x03^\xd9\x1a\xd9\r\t\xb4\x12\x9d\xea\xf8d\xfc\xa5\xb4\xbdD\xbco\xeaA\xaa\'\x8bv\xaf\xea\xf4\xba\xed&2Z\xce\xba\xae\xf5\xb0^\x16\xdfh\xc6\x00\x87F\xa0\xe9\x9bnSb\xbf\xc0\xde\x00\x86\xdf\xfeu\xa3\xee*\x90\xf4(\x8e\x00\xbb\x9c\xb1\x8e\xc0!=WU\x14a\xefP6\x00\xc0\xb7\'\x04\x1db\xccJ"\xd3\xdd\xa3\x91\xd7\xa5\xcb,\xc8\x91\xd4\xf3\xae\x19\x8f\x8c\xea\xcb\xfe\xacQ\xdb\xda]\xb0\xe90{{H2\xc9\xcc,\x0f\xbf\xa9F\x11\x1bi.\xce\xad\xfdT\xac\xcf\xe6}nb\x97\xde\\\xdcTL\x1dv\xa2!"l\x04B\x08\x10\xee\x1d\xe7\x9f\x9a\xf1Xv\x96W\xbb\xff\xc8\xda\x17\x1aI\x10\x9c\x1c\x86\x0b\x82\xef2\x0b.\xab\xb83-\xf4\xb1\xd38tz\x19D`\xec\xb7\xa8u\x81\xca\xd1\xa6\x98\x13\xe4\x8cy%\xafj\xfb\x1a&\xde\xfa\xd8\x8c\xf5<\xaa\xba\xfe-\x8c8\x13\x94W4\xd2\x91\xc4\xa1du\xa8&X\x83\xd4\xcaA\xe5\x19s\xa1\xca\xfe\xd7a3\x96K\xa4d\x1e\x8e\xc1\x88\xd5|2\xeeg.\x0b\nhG\x17\x86\xb0l\xd7\xee\xb7U\xc7\x9f\xef\xff\xd8\xa4\x85=0\xa7\xc2\x9c\x1b7( V\x86\xe1d9+k\x0e\x80\x86\x93\xacE&\n\xca\xf0\xa0\x97GVC\xfa\xbbdM2\x99\x98\xc6R`5\x93j\xff\xc7\x86m\xa25\x1f\x7f\xb4\xd5\x88\xd5<\x9a?\xcb-\xdb\x9b\xa0\xc2!\xcbF\xc8\xb7C\xd4\xb8\xb8\xa8r6;\xf99\xe5\xaa@\xb0\x98W\x16\x16Pc1\xfe0K\xe5\xab\x03\x01}y[V\xd7\x1b\xf5\xc0\x7f\xfc"\xb0\t\xc6\xcc\xff\xba\x01\x98\xb5\x9d\x90_\xf6\xe6\xb0]\xf0W\xcd|$9\x89Io-Kb\x9d\x03\xb7>{W\x9bm\xa6\xd1\xef\x9b\xf1\xb8\r\x96!\x89\xda~\x90\x19O\x12\x90\xe4\xd9p\xae\xfa\x1a\xc7\xaf#\xa3d\xcf\xacn\x10\x1ce\xaf\xf2_\x9f\xbd=\xc1\xd1\x01\xfd\x95\x87\x82Q\xcf\xd9+\x83\x1a\xdb\xee\xa7\xb0F\xa5\xc34\x9a,\x7fn\n,\x15\xde\xd9\x17\x1b\xcdx4\x03iF\x1cf\xcf\xb0q\xf9\x7f\x88g\x14\x1b\xc2\x03\x8e\x9e]\xbfH"\xe8\xd3\x87h\xf0\xe9\x9e\xaf\n\x0bj_ok-1\xe56b6*\x9d\xa6\n\x15\x1e\xc5\xf6N\x10\x1d%\xa0\x0b;%I\xff~\x97\xd3/\xee\xd8\x93y\x99M\xfeL\xd8\xc0W\x81Q[\xbfP\xed\xac\x1e}\x1c \xc7G\x16\x00\x90/Y\xac\xe9W\x88"\x9dG\'\x97\x14)U\xaf7\xdf\x99\x9f`\x0f\xbe\x98g\xc3\xc0FV\xcbH\xe6%\x06\xdb7\x1a\xd9\x94\xdb1c\xfc\x9bY\x14\x1e\xb47\xf0\x82<z\xc9\xd2\xf1\xb4\x02\xad\xf6\x8a\xfdL\x179\x8aV\xb3U;f\x8a\xc8\r\x13\x8c\x9a\x0e\xee\xc2\x0f}\x1c\xab\xa4t\xe3\x02\x879\xd2,\xfdo\nW\x07\xcd\xf2\x15\xce\x13!\xb3\xd4\xceTOBh1\xda\xbe\x80\xbaW6\x99\x81\xc0\x806x\xe2\xb5\xf9{e,Q\xe6\x043\xe0\x87I\x16\xae\xc5xs\xe0E!|\x83\x11\xae6o\xd2h\xe0\xf6V\x07\x1f\x00\xb2Q\x1f75\xbb\x15\x85\x12\x86dy\xf6\xf9\xca\x8f\x07\x9aW\xdcW{\xc0\r\xdc,\xa7\xf71\xf3\xe9\x84\xf5\x84\xd1M H\xd4\x9c\x80\x1fcN\x19\x04_\x13uM}\xd3<\x82\x86L\xc4\xa3z\xc5\xe8\x95@\\\x1bq\n\x15\x19\x8f:\xc3X\x1bOU%\x9b,.\x10\xd7C\xa8\xd7\x1b\xaddU\x81\x1b}\xfe\xfe\'h\xebD\x8aM\x93\x08\xf8n\xbd\x19\x97<\x83X\xa8\xfd\x00\x12\xe7\xcdX_e\xa8\x9f\x9d\x7f\xd4\xd1_X\xbe\x03\x8bD\xda\x11X\xc5,Z\x8a\x14\xe6\xc5\xb8\x86If$\x95eR\xb0&\x0b\xb8(\xed\x0cR\xe48\x9d\xfb\x89\xd8\x1d\x82V\x11\xc3\x8d\xfad\x1b\x0c\xdaTD\x88(>\xa6\x87)\x1c\x86\xc1u\x0314\x01j\xda&\xf3\x7fu\x81"\x8e3\x89\xe8\xdd\x85\xa3UsT\x1c\xc2\x0b\xbd\xde6\xbc\xd2\xb5I\x9b\x7f\xbag\x00\xfb\x08\xaa\x8f6\xc5b\x1b\xa4\xe5\x075VrBX\x1e}\xa4\xd3\x8d!1\xe7\xdf\xbf\xbfr\xdd\xfe \x11P\x10\xc5\xc9\xb1\xd2\x7f#\xcd\x87\xb7\x90\xd4\x8ba\xder\xaeH\xfd\x1a\x8bN\xae\xc8\x96\x94\xc2\xdd3%0i\x94\x07\x1d\xfd\xdb\x19\x9c4\xf2cP\x08\x1c\x06mP\x97K\x15\xc8J\'\xa0m\x07\xffC>\xc9\x89aG\x16\x02\x87f\xac\xa8\x1a\xe5.}\xb1c[Nm\r\xcc\x86\'\x92\xcbR=\xd8\x9c\xa6\x16\xcd\xd0\x01\x85\xb9z\x0f\xb8B;g?Ou?\xf8<\x97\xbe\x81M\x1d\xcd\x0e\xedD\xe8\xe2\xa0\xfe\x11H\xd3^\x1f\xcf\x91D\xe2ZR\xf3\xbb<\x95\xffF\x9f\xc2\xa0\x08\xc6/+\x07;z\x03/\xf2p\xe6@\x8e!\xb3\xd5&\xf3Lu^\xc24\xce\xfe\x9b\xb2\x05%\xa9%\x8a\x08JLpA\xbbQ\x82z\x03\xc3vl;Z\xc7L\xfb*\xe7\x90\xcb`\xab\xcbh\xbe\x16\xb6\x80S\x9cP!\xe0\xad\x9d\xaf\xaa{Fj\x06\xe9\x053\niJ3%A\x98\x99\xaf\xf6m\xf3\xbc\xf8\xc9!\xbf\xf7\x0b\xa8\x00\xd0\xc4\xba~\xb4\xb0\xf2\xd5\x02\xf3\x86 \r\x0e)\xc4\x97|\xb9\x04\x85\xa4Temv\xde\xc4\xa6\xb9\xa3x\xb4\x7f\xf3w\xde\xdcd\x8c\x8a\x98\xd29!/\x0c\xa67\x9b\x7fSvp\xadF\xf7O\xedO\x08\x9b\xa5v\x1a\xb8\xa9\x89\xc2\x8c$\x7f\xb8%\xa7\x00\xe7\x05Ar\xa2\x90l\xbe^\xc6P\xc1*\x15\xbf\xd4(p\x80\xc9\xd0\x8e\xc2\x1c<\xd8\xd5\xe4\x93(\xdd\xf1\x1d"ZW\n\x9e\x19\x98n\x9c\xceU\xff{\x00R6\xb3F\x9d\xfe\xdds\x067*\x89F\x88\xb8\xd5\xb2=\x1c\x84\xda\x92\xe1\x02KT\xed\xae\x1a\xecdWe\xcdn@\x07\xd6H\xdf\x9e}\x87\x83Gwr\x9fb\xa4f\x8fVH\xcaN$\xdbSBB\xa0[G\x9e\x17\xe2fV\x18\x14$T\xa8\xa4\xb4\xd5\xea\xf3\x7f\xb93+}\xbdmb\x8cG\x02\x9c\xd27\xf33\x8b\x17\'\xe4\x02\xdaF\x84\x10\xc4$\xee1\xc6\xa6\x14\xacY\xe4?\x04\x02\x15\x96\xdd\x90\xde\xc6\x92\xc3\xb2-\x07\xbci\x05\xf7\xe5\xbb/\xf6\x98\xc3\xa3$l!\xeb,\xa7\r*\x9c\xb3\x9c\x88\x15\x8c\x13<o\x97\xe6\xcc\xc4T`\x8d\xb3\x8b\x93\xd8\xf8@<!R\xb7an\xd1\xd7\x08g3\xdc$x\xef\x90\xdc\x1c\x12\xe1\xe7\x16\x7fU\x98\xb5\x18\x91s\x90c\x86z=\x03\xb1\x12-#\xa5QJ\x80F\xd67\xcc(\x06\x90\xd7x\xec\x94\xe1\x0f\x9d\xa1\x94\x9a`"r\x93\x91\xc9o3\x04\xe7\x91\xfcA\xdb\x1c=\xb7X\x9c\xc2\xb2X\x19\xcc\xbfc\xe7[b\xe7\x95y\xb13\x138\x0cqgs\xb2\xd0\xd1J\xacS\x1b\xf5\x06\x13\xf9\xcd6\x93\x91%~\xa9\x94\xe0\x12\x932\x88j\x077\xf9\xdf:F\xc8\x12\xe1\x82\xe5MvX\x9e\x1e\xce\x9bW&\x99\xea\xbaW\nu\x18\x95\x96\xd7\xcdX\x98\x98\x166\xe8\xae\xbd\xb3c\xf3n\xf6T\x89kf\xe7\xb4\xa2\x83 \xe2\xc4:\xd9\xafF\xa3\xda\xee\x10d\x10\xfe\xc5\x02O)D/\xd9\x7f\xb0\xc1[\xb0o\xaf\xdag\x13\xb7\x84\x81ev^\x9c\x98\x9b\xb5\x92\xf7>\xc7\xfc^j\xa1EH\x81t\xc2\xcc\x88\xa5\xb1oW\x84\xd9\xa8\xa2\xa9\x17\xb6\xd7\x95\xf9\xcf\xf2\x89\x9d0\xe7\xe2ol!\x90\x0c\x82l\x9d\x05\x07\xac{\xd6{;\x7f~\x9d\xac\x0e\xd0\x1c>\xb2]\xfck\x06\x9cqd-d\x81\xf3\xaf}s\xe9K3\xee\xa9\xe9\x06\xe8G\xc97O\x977\x93\xc4\xdb\x80/\x01)\xcbP-[\xcd\x1a\x9d\x8c\xfc@\xd2\xfd\xef\xd8\xfe\xf9z\xb7\x19k2\xfa:\xff0\xb7\x14\x9f\xa2\x10\x04@\xdf\x95\xc9\x92\x05\xdbd\xa5\xb4W0}\x82\xb2\xae\xe4V>o\xc6\xea\xbbln\xc4\x93\x80\xb4\xd3\x86gw\xec\x01\xd9/H\xf9\x8fr\xf6)\xde\x8b\x83\xfb\x82ri\x82\x8doQEaH\xc5\xa8+3\x0b\x96\xe4\xbfdJ\x9c\xfd\x93\x8cQ\x9aku\xcbRe\xbf\xb6\x1b\xcf\xb8\xb8\xb0\xb0\x07\xa3G\xceWw\xab\xd14*\xec\xe232\x86\xa6\x14A\xc0]\xa5\x93l\xcbK/\x083\xf0\x18lp\x9e\x8a\xac<[\xc1\xc3a#\x98\xbf\xe0q\x10\xab\xf6^\x87[|\xfc\x08V\x1f]Nj\x99!\x83\x1c\xccsj\x15!\x12\x81).\xf3>\x92o\x12\x8e\xdf\xffC\xfd\x08qW\x86\xe9X\x97\x96\xacrk\xbbG\x80^\t{_L>\xf8\x86\xd3\xf6CS\xa0\x9c\x96\xc1\x04\xab\xdf*\x9bi_\x98=%%\xbc6\x0bA\xff\x9d,\x98\x07\xa8\xca\x04\xe2\xdb-\xd0,\x11\x85\x8f\xe4\'\xd2f\xdc\xbf\x94\xd0\x93a\xd5y\x8c\x85\xdb\x92\x86\x01\xb6\xed\x00B\xf5\xaa::h\x97 \xd8(\xfb\x1fw9\x0f\x88c\x8c\xe0\xb7\xc0\xb8\xc3 \x9b\r\xa1\x9e\xac\xbb\xd6\xf6\x8e\xb4\xd1Y\xfc|\xff\x83\x9c\xb02\x18k<\x04Z\x83X\x894\xd4Z9\x80\x83\x7f\xead\xa7\x07\x19\x8e\xaa\x83\nTK\xca\\n\x7fE}>\x04\xa3\x10\xb6\xe2\x1f\x8c\x9aK7\xa1\xeb\x01x\xd9\xc6"\xe2i\xfe\xd8\x0e0nm\xff\xe3\xads\xff\x0e{=\x02L\x00\xb0\xaa\x16I\x86\x90\xd6\x88s\xd4)\xc1\ty\xac\xa0\x84\x96\xec\xba\xcaL\x04\xfdueG\x86\x03\x0e`\x81\xe2_\xed\x01&\x8a\x13*a\x06\r\raB\xd58\n\x0e\xfa\x04\x84\x18\x03\x10_\xba\xa4nZ\xb13\xce\x81\xdb\xfe\xc2)\xee\xa3\xbe\xdf\xb5\xad\xa1\xfd\xe6u\xaet\xafHza\xdcy\xfdX\xd1`.\xa8\xdb\x0b\x15\x0bfS(\xc0R\x92\xb6\xc5\x9b\x88\x07\x1aa\xf2E\xb7kIl \x807\xb0\x13\xd1\xc8\xc5\xcc/\x13\xdc\xdc_?1k\x8a\xa3\xcf\x04\x1e\xd8 \x05\xfe\xc2\x19|\t\xe0\xb4$=C\x1b\x91\xd7\xb1>\xff\xce\x83\xcf\x7fp\x1b(\xde\x85\'\xef\xe6\x01\x95\x18\xd3\x13_\xf5Y\xa0\x91\xb7\xf7p\xe5\xe2\x96m\xd2 \xf3Ef3\xc7c\x8f0\xc9\xb4\xca\xaeb\x1d\xb0.\xe1\xef\x95-<\xdc\xcf\xb7\xcf\x94\x05&v6Y\xf5\xe0\xafos\x14\xce\xa1Le\xe6\xc0\x0bO\xd7gl5\xc8^\xac\x9fV\x0bfE\xfb0H\x9d\xbfD\xa1\xbfnD\xcf\\\x7f\x86f\x1f\xce\x06\xe9\xff%_CXQ\x035H(\xca@\xaa2\xd0\x14\xea)\xb8\xc3f\xac\x1eHnd\xfc\xf2Z\xa1Dm\x9e\xb7\xef/\xdfT\x84\xcb\xb3su\xee\xb0\x11G\x0c\xdb6\xf9C\xf6\x1b\xf6\xa3Z\xc3R \xe2o\x85\xa6s\xd0-\xd5wx\x16\xa6\x13\xb3*\xac5\x06\xc3\xea\x88\xb0\xc3\x90"\xab\xaaPE&Q\x90-\xca\x89\x13\xd4Zk\xe5\x87\x1a\x8f\xcd`\x13\x81\xbe\xeb\xf6O\xb6g\xed\xb03L\xaf\x9bKu\xa3q[{f\xb7LG\x88\nS\xe9\xa7-\x94\x12\x99\xd2\xe2\n\xd0[f\xa5]\xe8\xb6\xba\x07Y\x88\xf2\xf0=\x96\xb0L\xd6\xed=\xc8f\xcag\x9b\x83\r\x8bEH;\xa4e\x1a\x00\x9f\x81<\x8a3\x10X\xef\xed\xde\x1cc\xaf\x1e7Rj\xac\xecZ\xf8\x11\x9a5\x9cb:\xd6,\xd3?\xcf\xed\xfer\xe4\\7\xc3I\xb4ON~\xf2\x82 h\xf1\xf1Vk\xbbY\xe7+O\x925(\x85V\xb0\x1c\x11\xaa\x88q\xb4\xbc\x81\xb2C\x0fh\x99\xfc\xf4\xb4\x19w\x07:\x8c\x8e\xa3\xe8\x0c\xdb\xff\xb0.\x10\'e\x9c\x01\x8a\x88C=\x84B\xfa\x89B\xc3\xdc\xde\x82zH\xeaPcJD.\xd8k\\\xa9g\x0c/\xf6\xd7\x14/3\x03\x82\xbc4\xe4M\xe9\xf2\xfb\x85w\xdf\xcf\xecY\x08J\xd05\xa3\xe7\x84m;I~E\xc6\xbb=\xb2(\x87\xb3\xdb\xcbG\x13f\xc6\x87\xc1V,c\x8c\xde\xd9\xb5q\x90{dj9\xb8wj\xba\xb7\xb7\xb5\x01$\x05\x95\x8a\x81\x12;\x18D\x8b\x82m,i\xd6\x03\x9e\x06\x00+\x0cw\x87\x1d\x12N\xe6\xbe\x15\xf2T/LM\x1f1\xd0\xbf\xd4\n\xfc\xc6\xdaPL\xaf\x1f\xda\x0c\xd3f\xdc\xde\x9b`\xca\x01\xbf\xb7\xdb{\xaf&\xc8\xca\x928\xad\x9ajmL\x1eU\xe5\xb5\xa2\x80\xb7;\xd0\x8blfc\xfe\x92\x03\xb1o8\x97\x8a\x01\xd2m]\x81\xcc\x0c \'\xb9T\xf9\xfb\xff\x14\xf0\x8c\x08\xba|\xc3\xbf\xbd\xb33_i\xf9I\xda\x04\x9b\xd1\x17\xb7\x1aI&M`\xc5\x84\xc3\xb2\xbe\x92~\xd9\x1f@\xc5\x0b\x88\x94\xa8\x0e\xd6\tt!X\x1c\x07\xec\xdeN"\xc9HP\x1e\xe9r\xe1"\xc4tS\r\x94\xf6\xee\xe0_\x1a\x15\xd3\x9f\x7f\x15\x9e\xe3~t\xcf\x97,\x8c\xef\x02\xec\xba\xa3)R\x00r~~.\xa1\x10\xde]V\xa4\x8fJP/\xea(\x1b\x1b\x12(WrH@\xf5\xa9!\x06\xf5[S\xb7pL:\xe6D l\x14[\xcd\xfe\xaa>$\xfb!\xd3\x1e \x00JcD\xb0\xbf\xa2\x17\xdb\x7f1\x14\xd5 \xb3\xc3&\xf6Q\xf2\xc8\x0e \xbb\x02\xac\x90\xf2lK0\x13[\x18P\x1c\xac\x97\xf7\x1fw\xe8M\xea1\xe4\x9a\x8c5\xf4R\x10^\xc4\xe1E\x9f#)P \xb4\xc7\x82\xb3\xd0\x99\xfa\xd6\xb6i!\xfd\xb9\xd8\xecwvz\xc9a\xccm\x91Mi\xd10ONS\xecl#:\n\x828\xd2\xfa\x0e\x1f\xe0\xc3\xbf\x0bfj\xc9\xfa\xc0d\x03\x07t&$7\xe6\x05\xa0r\xbcreh\xc5\xb0\x81\x89\x82+>\x8dr.\xf6A\xe12&\x82\xb1\xda\xe4\x9b\xa5G\x14\x0b7{\xb2o\xcb\xcc\x9b\xc6\x89\xf3\xd1\xce\xb9\xcb\xbf\x83\x1dG\xbf\x9e\xdb\x81\x81Q7E,$\xde@(*+\x9e\xe0q\xbe\xea\xc0{=\x17\xcbp\x1e\xe3B@cpa\xe9\x18?\xea)\xf4\x88\xd4\xbe}\x8a\x13\x07T\xc2\xbbu7\xf7r\xd3.e\xa7\x9a$\xdf\xab\xb7\xf7l)\xc3\xe4\xee1\xf8\xc0j\x15\xd3\'\xc2}\x13\\|\xfa\xedn\xd2\xae$\xc9\xa7\xb2v\xb1R\x80\xcb\xd8\xbe\xda\xcf\xe5\xdf\x89\x96\xed\xa1\xa4\xc2\xcb\x98\xbcW\x8a^5\xea\xd5\x1e\x1e\xa2\xb2\xc5m\xd5\x17@I\xa2Jk\x94\x98\x7fj\x8b\xf55 e\xe8\x99f:\x89g\x18\xfd8\xe6\x89>\xb0g\x1aH\x1f\x03\xa6\x1b\x07\xa2\x98\xc8t\x8c\x03",~\xb9M\x95\x1cd\x9b`t\xf9\xf4\xe3O\x01a\x91\xf7\x89$\x8b\x05K\x06\x93<\x13o\x8b\x0c;r\x89\xe1\x04\x8a/\x05b\xa2\xd1\xb59\x81\xaa\x08\x0f\xc11\xa8\xba\xc5\x7f\xf0\x11}\\+\xa3\x9b?\xbb\xa1,5\x9bk\xf67\x04\xabX\xb9\x1a>\x85\xa3\xd2\x14\x10\x93\x9d\x88\xd9U\x1e\xcc\xc4J)\x05o \xce\xca\xd0a9\xf4>\xf6\x03\xa1\xd7\xc9(\xb7\x18\xc0C`\xcd\x7f$\x12_\xd8_\x0e\xd3-ZR.\xc8\xc0\xd8\x162\x0c\xd8jH:\x10\xbbW\xbaP|g\x7f\xfaSQ\x1dV)R\x1ehmd\xee \x06B\x9cP\xb3cwo\x8e\xf4\xb0\xdeT\xaa\xea/\x1e\xef}\xb0P/\x11\x14^\x87\xef\x85\xedK\x8b\x12\x7f\x18\x88\x02dvg\x0f\x1b\xa9\xb1x\x06^\xc0d\x18\xcb\x14\x8a}+{\xde.\x9f\xe1\xc5\xc6\xee\xf7;\x7f\xdf\xda\xc3v\xddc\x90\x9c\x03H)\xd9\xa4\x1d\x80NF\xbd\x15\x17\xdd\xe5wE\xb6\xeb\xc85\xc6\xde\xc4\xbb\x84\xba\x96\x1f\nX&\xd4\x06\x03?\xfa\xdb\xda\x89\xa8\xea\x93{\x96\t\xd4\xa3c\xfc\xd1\xe5\xf7\xd7\xb0\xd0Hp\xa0\x0b\x14b\xfc\xf1l\x12\x9f\'l\x85\x05\x00\xf8\x11fn\xec\x99\xd9\xb4\xe3\xef\x8a\xc5D~.o\xd4\x80fg\xa6\x8e\x17\xee\xee4\xe3\xc9<\x11\xf0D7\x02jQ\xa3\x9eG\x14,{k)#\xef\x80\xd3.\xe7\x8c\x88\xe1\x8a\xda\x89\x17\xca9q\x04\xbc\x0eFR\xfd:\xae\xdf\xdcj\xc6\x1a3\x89\xd8\x01\xc4\x96\xcaw\x9bH\xde\xda\x15{yb\x13a}\xda\x8ea\x9d`\x8c\xbd\xb5\x90ZL\xc3`_\x18%\xd3\xa6\xccnH\xad\xe8,\xe67,P\xf7\xe2\x1d\xe1(\xb1U\x07Q>C\x98\xd4\xfe\x19\x05\xf9S\x94^\xf9)\x19\x89\xad\x9d\xb4\xae~y\x8b\xc6T\x00\x08\xcba\xe7{~\xe1\xa5\xa2\x9fVk<z\xf0]\xe1s;.\x18\x9c\xbf\'\xceA\x0fg\xe91s\x84f\x8e\xa3\xca\xd9\xb0Y\xbfQ\x12\n-j\xaf\xa8\xb2\xce\x16\xc0\xfeHy\x99>\xbch\xc6\xaa\xba\x1ai\x04\xf5S8\xba\xa3{\xd2\xc1\xe3;\xaa,\xc2a\xcce\xf7\xe4\t\xa4\x94"\x86]Q\xd2EL\x90v(\xb1\xf7\xf6\x9b\xdd\x90\xc1\x915\xd7\xdf<}\xf7\xd3\x9e\x9f}\xfc\x05#ih\x8bx\xf1\xe8\xa8%\xd9\xdb7\xf6\xd9\x94V#yl\xaf\xd3\xb1\x01M\x9d(\xectC\x0f\x10"W\x02\x90#\xbc\x9b_BvP\x81E\xd0\x92@\xbe\xa3}Jt\xa72!\x94\xf8\x17&\xad\x98&\xcd\xac\xb4\xb0\xb4\x83\xc2\xc90\x0c\xd0\xf6\x18f\x01\xfb\xa1@\xd1j\x88S\x0b}PW}n\xfe)\xbd#ds\xeb\xba\xc6N)l\xf6\x9d\x89\xd7\x81\xe5\xf1\x0c9\xbc\x0e@\xd64\xd0\x8b\xadf\xae\xbe=\xfa`G\xae"\xf7\x021\x94\xe3\xe8T\xe5u\xb5\xdc,\x85d\x98\x98c\xe4E\xc0\xc8\xe36\x92/\x8a\xbbMj\xbb(\x17I\xae\x80n\x90\xd4\x0fi\xf5\x00\xdf\xc6\x1e\x1cV\xdf\x0e\xccY\x8d\xb1\x89\xcc\xb6*\x88\xc8\xe3 nM\x07\x98~\x15\x145R\x1a\x9f\x1f+b-\x0f-;\xa0\xac\r\xc2\xeax\x87\x19J#"\xbfK\x11S\xb5\xb7\x15m\xb2:\xd6Z6K\x15F\x1a\x8a\xbd#\xb9\xe0\xce.9#\x0f\xf1\x0f\x88\x87\xd7?\xcf\xe3\xb5J\xe1H/L\x8d\xda\xfd\x98qY#ns$\xeb\x1d\x08\x06+\xed\xfb(E\x1a\xc1o\xe6}\xec\xe7\xc4{\xa6Y\xccu,R{\xfaP\xbe\xa7\x89g\xddf\xf5\xcb\t\xdaG\xdd\xd65\xc5]\xceB;\x96\x8f \x07\xc9\xb0\'\xa8\xca\xc9\xbdW\x08J\xcaLbG<Tw1\xd8\x9b\xfc\'h\xff\xf4J-*Xdz\x9c!\xcb\xc3\xd5\xabw\xa7\xccF\x10s\xab\xa4\xe3\xd1\x0b\xf0M\xc2\xc7\x86s\xcf\x0fp>8o\xa1\xb0o\xeeF\xbb_/8\xcb\x9f\xe8x\xbdB\x82v\xb9\xd0\xad\xc6\xe6\x14_\x1b\x1b@\x93\xdbU\xe8I\xed\x0fCI\x0c\xc51\'T\x88\xe02\xcbbG\xc2$\x94\x9b\x0fe\xdfzHVH1ynf\x8d\x92\xbc\xd9\x02\x00X\xb4\x1fT\xa2>\xd0\xd3\x06\x16\x12W\xe5-\x91\x0f%\xfe\x8bF\x93\xd3z\xd4\x0bv[X\x12N\r\xc4\xea\xdcFs\xf0\xea\xfe\x9a\xb2\xaa\xda\x8e\x1c\x9d|\xf7H\xb6\x91\xfc\x88\xdc\xa2\x13\xd6=Ty\xa9R\x1c\x01\x8c\xa0a\xa7S\x12vw\xcd\x94\x04j\r\x02Q\xb1f\xb7-["o,\xf6K\xcd?e\xac\xd4\xfei:\x11:\x88\xb8\xdc\x1d+<\xd8\x7f$\xcb\xceM\xdeRa\xb4\x13>\xca.\xe7\x1a|\xc5t\xb6\x11\xf2o\x0e:\x88w\xe4\xc5\xb2dF\xebt\x00\xdb\xf2\xbd9\x10\x0f\xd6\x8a\xef0a\xac\xea\x96\xed\r\xea\xeeC3\x969\xe6\xf4\xe2h5\xb3\x06\x8b\x8e\xca\x8c\xfb\xef\xc5\x99\x8c\x9b\x13F\t\xbf\t\x8er\xaf4\x95\x88uj/K\xd8\xbd\xb6\xc4`\xc0]\xe1\x8b\xaag\xd7\xec&\xb3&\xc3\xa1\x12\xd9\xa3F\x83.\xdf\xe9~\x05\x04\xbc\xccIbs\xc4\t[\xd0+\xaf\xb4r\x94V\xf5\x16\xeb\xf6\x1c\xcd\x88\xf6G\x0f\xcd\xd0\x90\nQ\x80ge,\x9b|\xbex\x11\x1fx\xdf\x99\xed\xad\x92\x9b\xaf\xb0\xfd\xaf>\xd1\x01\x99\xdd%SY\xf8C \xee\xd2\xae\x1d[d\xcf\xed*\xae\\\x98\xd4&\xb8j\x03\x16\x16=\x0f\xc4]sH\xebp(]!\xa3[\xdc\xfa\x8bp6\xfd\xc0\xe8\x1a?\xab\x16l\x16\xc5\xc3\xe7f\\Q\xaa\xb2]!\x9fA\xb5\xc5\x9e\xfc\xfb\xceCL4\xc1tf\x92\x1e<\n\xe9P\xae\xa0\x14nf1d\xa7\xc5&\x7fC\xec\xde\xb6\xd8\xb5\xaf\x19f\xe7\x91\x1ccv\xb3\x19\x0b\xe3\x134,\x85\x1f\xc0\xbf\xb1\xe9\x07\x89Tq\xdc\x88\x8f^>\xf8W\x05a\xfa\x85\xb8\xa1Pv\xe5\xc4\xbc\xabI\xe8\x0f\xe5\x0f\x9c\x96I\xbc.\xef\xe3\x97\xb9\xb9F|\x01\xbb,\x03\xccVcp\x15Q|\xdf\xef\x807I\xe4\xab\x02\x83\xdc\x88\xbb\x0f\xecX\x10?\x81\x8a\x1cv\xb9B\x9b\x10\xabE\x95\x1d\x01\xaa!\xe0I\x19W\xb5\xf6\xc4\xacAD\xdb\xc5\x90\xfe\xb0+@\xae\xd9\xc8\xfc\x99c\x1e/<\xc1\xc5\xb3\xcb\x02\xd1\xe5\xf7Xs\xc0\x0c\x89\xd8\xff\xbe+K\x05\xf8\xac\xcf\'\xf1`\x03\xd3\xb4^\xea\x07\x8f\x8d\xae\x1f\x80^\xbc=\x95=\x89\xe1"\xb1\x19\xcf\x93\xa6l+&Y\x87\xe4\x9a\xe0\xa6J\x06\xa1\xd7\x8e\x83Wg\x13\xd4\x85\'\x8b\x8bV\xa3\xd9\x83\x1aV\xb9\x19\xa6\xa0@\xd3\x0b\xbc\xe44>X\xdf\xea\xe0\x86\x90#N\xeeD?\x11\xc1\x1d\x18\x86\xfe\xd3?\x07]A\x1a\xf3\xdc\x82\x1f\xdc\x16\x07\xcfy\xb44\xb7\xed\xad\x17\xcd\xfeG\xf4\x9a#\x9f\xe9\x81\xd6t\xa8\x91\xd4\xf9_\xd6_iW\x9dY\x1aB\xe9,#\xa3\x83\x02\x8c\xae\xba\xbey\xfbWs\xe9+\xc0V\x1c\xdb\x0eI\x1f\xe6\x167\x0fm;\xa8#i\x05\x14\x85\'\x90\x07\xf7<\xe9N\x11\\\x909\x89q\xe6\xb8\x11u\xe1\x18\xd6\xd2F\xb6\xde$\x02\xff\xc6\xc2}B \xf0\x8a\xa3%B0\xbe\x19\xcbU\xfb\xf6\xe3s\xac"\xaa\x85e\xbfHs\x8bOM\x90\xf6\xf6:\xdc\xec\xb2J\x80#\x92`\x11\xae\x9a%\x89hd\xb5\x91>\xeb\x8f~\x89\xe87\xa8W\xd1~:\xcb)\x9c\x8e\xdb@\xb5f\xdb\x1bM?l\xf2\xa0\xd7h\x06|\x00\xb8\xfe\x07\x97\xe9\x14_\x15\xd7\xe1p\x0b\xb5\x19\xb3\x0e\x98\n\x86\xef\x18\x93b\xc2\x08\x01u\xc4\xa4\xe4\xe6\xb0\xf8\xfc\xd2n\xb6\xb1\x13\xe8x\x17\x157\xe7\xf6\x11\xa4U\x08\xacc(\x9b\xda\r\xa4&X|\xa8B\x1buY\xc4\xb6\xea1R\x85\x984\xeaFt\xc8\x95\xcc\x83[\x17 \xd0e\xdffpVV\xd7\xcc\xf83\xf1\xf0\xf6\xaeLk\xf2\x8d\xf7J\x99Jsn\xa4\x8aB\x06\x1cV\x91\xed\x1bX\xb8t\xb7\xbe\xc79\x9chP\xa8\x8f\xdf\x98\x9dw\xc9\xad-\xc5.\xe7.\xa7\xb4\xd0\x84\xb4\x0f\xae\x15 \xc3\xc0\xd6\x7fv\xb9As\x90\x8c\x93\xc2v\x87c\xc0\t\x12L5\x1a ,\x18H\xd8\x1d\xd5\xe8\x12\xf1{H\x01\xad\xe4>\x8a\x8f\x9f.L+\x82\xa9$\x06C\xab\x83\xe0\xb8\x07\xf1+\xd4\x7f& =\xd2\xebo\xfa\xb8\xbb\xd3h\x06\x07\'\xc4\xe1]\xd9\x1eW^_\xb25%\x91\x02\x1e"\x16\xb5B\x08\x8ae;\xe1(\xee\xd1\x04\xf6\xa7\x18\x99\xa1\xe9\x85\xab\xb0j\x9f/q\xea\x9c8\x08\xe4\xc2\xf9\xa9\x97\x8b\xb6i&jv\xf8\xfc\xe1>\xcc_7\xf9\xf2\xc0\xfe\xacc?X\xba\xbd}Y&C9dPd\xcf\xec\xb8;\xde\xb1M\xf5\xfd\x90B\xd1\xc2\\|c\xd6\xbf\xea\xa7?\x9b\x8b\x19\x06\xb7\xd2\'t\xdfh\x04H\xf0!\xe0\xd77\x1a\x19\xbftK[\x1f^l\x92R\xe4lq\xa9\x88\xe3,^q\xc2C\x86q\xb8\xf8h?BH\xd7\x8a\xa3\xc3\x98\x85\x84:\xf4]\xa2\xcaN\xe8\xa0\x9a\xc8-\xb6cW5\xc8Z1\xff\xf0\xc9\xceH\xd2M!\xc9\xcb\x9e\x9d\x80\xc5\xd9\xae<\xb3K]y\x05\xce6\xf1\xf0\x1e>\xfb\xb0\xd1\x14^3?\xae\xff\xd9\x1c\xcc\xbf7\x1b\xde\xa1A+\xaa\xe0L\xf4&\x17s\x8bU\xa5RA\xf6H\xc1?\x0b\xeb\xb1\x918\xf87\xec\xe3\xe6\xbd\xd7\xb6}\x9d2\xd2~\x847Bw8A9Z\r\xb8\x93\xba\xdb\xa1\x08\x02\x19M\xfd\xcb\x87\xf6\x1b\xa1\xfen\'\x00\x06\x82f\x05\x94\x04\n`\xa5\xb7x\xa1\xa96\xfen3\x7fnf\xafJo[\xea\x14R\x15\xba\xdaQ\xaat\x85\x82\x05\xa3\xe2\x0e\x8a\xd4\xee\xd5\x88\xd3\xc4\x15\xb1\xd7\x02\xb4b\xd8}f_D\x00\x18\x08;O\n\xaexD\n\xe5\x7f~\xb4\xe0\xbbs\xd3\x8e<\x088!\xab\xa3\xe2q%\x97Jx\x02w\x8dmf4\x91\xa8\xfed[\xfae\x9c:\xa8\xd3%\x19\xccH\xfa\x9e\xb4\x80Gv\xc0\xd9\x91;P\x14l\xc2"\xb0\xda\xea\xdb\xf2\x12v^\x9e\xb1KV~\xda\xc1IF\x0b2x\x9e\xd6\x1e\xfb\x8c\x80\x83\xd3W\x98\x98!\xab\xaa\x15\xd6p4[i\x03d\xaf6\x92\x82xh\xfe\x98R\x03\x0c\xb2\xe1\xba@\xf3 \x1dW\xc0\x11K;\xd6\'93\xf5\x070\x1e\xb1\xea\xb0%\xc05\x85\x9a@\xc8\x1e\x83a\xc4\x04\xca\x99\x87%x\xcb\xa2\xe1+\x18\xfbE\x1c\xc5\x1b\xe6\x80\xdd\xc0\xad\xee//7\x9c\xd9n\xbd\xcc\x8a\x0b\xc8\x9e\xaa\x15\xa8\'W\x14\xdc\x88\xc1A:\x83j(=8\xca\xd1;\xe88[+D\xa7\x98\x8aD\xb2\x1d\xc5&\xe2a\x92\xc6\x98\x13:\xdb6\xb3\xc8^\xba\xac\'s\xe5\xac\x19K\xb3x(\xb3\x93[\x14\xab\x85\xb9\xe6\xe8\x0b\xa2\xb6\xec\x0b\xf2\x80N\xf6cPD\x8f*`\xb1\xec?\xb2#JP\x90\x03\x07\xdca3\xa6t\x92H\xd6\x0b\x1bj\x7f\xdfi4A\xf9\xfe\x10\xabl\xc3\x97\xb5?m\xab\xe2\xf0Y\xac\xe0\x1a\xb7\xa7\xb1\x850\xcd\x9e\x8dFcS\xdd\x0cz\x12\x12\xe8\x851\xa9vT\xf2n\xfdkt\x93\xf0\x91\x00\x83\xf4\x86\xe2\xed\xbf\x9e\xd4\xf7\xe6vz\xab\xe2\xa3\xa0\xb0\xea\xe7\xd5\x05\xfb\x08\x8a\x99\xebX\xc4\xe2\xfa<J\xfd\x944\x8d\xf2\xb4\x1d\xa4{c\x0b\xb8\xa1\xfe\x88 \rr\x19\x9d\xb2Dfp\xa5\xb2\xc6l\xf7\x85\x05-u\xceB\xeb\x9d\x81\x86\xc8\xa2\xfc\x87\xcf\xeb\x93(\xa1F\x11\x92\xfax\xb7\x10h\t\xc8\x93\xe1\x88\xdb\xa0]A\x19\x0ce\xe1A\x1d)\xe4\xb8\xc5\xbe\xcf\xc7!\xfe\x81\x85\xad1\x9eE9\x0e"?\x95@$\xd6\x17\x02\xe7\xf74\x07D\xc8\x0e\xee(\x9e.U-`8\xe3\x15\xbe\x18\x95\xea\xfc\x07\x12\xfb\t\xd3\x89\xcb\x08q\x9c"\xb0\x1a\x18\x0e\xd4\xc9\x89o\x98E\x7f\xb57ku{H\xdb\xe9l\xb1\xab!^*\x9b#\xb4\xcb\x02Wc\xcf\x99j\xe8\xc3\xb88\xeax1?\x83\xf7rG+\x16\xce\xf6\xd9r\xa3\x11\x9a\x0f-\xfd\x82\x1bJ\xba\xebW\x7f\xd8\xf3\x85n&5\xb7\x10 \x0f\x98\x14w\xb6&\xe1\n\xe2\x02\x831\xe6k\xd7\xbf\x99Q\xad\x92\xdd!M\x99\x96\x98N\xdf=\xb8=\xc5AT\x1f\xef\xbf=\x96odvS0eC\xbb#\xe6\xa5:e\xf9\xb1\xa5\xf8\x12\x87\x15\xfe\xfd\xfb\xf9\xa7\xbc\x81J\xbe\xfd\x08\x87\xb9\x9d\xc6\xde\x7f\xb4(\x89\xc4L7\xdc\xce\xf9!bh\xa4\x9e\xb1S\x83\xcb\xc8\x83\xde\x0f\xdc\x1a\\\xd0xGh-,-\xb3\x10\x84>L 2{]\xe6\xed\xa3\xf3\x08\x05h\xbe\xffa^\x8d)\x7f\xb4|"\x94\xe9\xc5Ma\xb9\xd8Q\xb4\x1c\x9f\xe7\x91\x90\x94o\x17\x90\x15\xb3\xbe\x9f\xd9?}\xfd\xfav\xa3Nh\xb3Q\x95\xd8\x1a\x96\x87\xbf\xb6\x83=V\xce\x8d\x18\x87\xce\x91\xf1\x10\xe7;_\x9a\xdd\xc7\xf3fu\xa2\xf0\xe6\xa8\x0c:\xc2\x01\x92\xebMH\xe3\xe6\x9a\xc5G!_\xfc\xbdG\xb6!r<\xd3\xb6\xbb\xf3!\n\x0c\x06\x19\x9d\xc0`fKO\x8f\xea\xcd\xa0p\xb6\x973\xec\x8ey3Z\x97b\xf2~[\xab5\x1a&\xd6r\x16v\xaa\xd0\x9e\xe1\x998G\xe4j\x8b&\xe1\xdc\xcf\x97*\xd8\x8aA\xd3\x8a\x05z\xee\xfbO\xecz\xf3\x9b0o\xcb\xab\xb6\xe68\xc7\xa5k\xc6=1\xa4\x13f\x9f\xbf\xadX\xb8\xc1"C\xb4/\xe9\xfaI\x92\xba9\xae\xd0\xe8\x96\xcd\x95c\xbf\x8e\x82L\xe40%\x85<\xec\x88$\xa4_\xdc\xb3J\xe1\x84\xbd0\xc2\x08*\xbd0+O\x1a\xb5:\x7fS~\x8b5\xcb6\x95\xf2\xe0uG\x87\x7fn\xd9]\xe9\xfd\xb5\xbf\xc2\xd0\x18\xa0\x7fk4\xe3\xe2\x8a\xc5e\x9e\x0cH?3e\xb6\x90\xfa\x16\\5\xa1\x84\x9d\xc7P\xc8\xda\x8b\xd2\xc7r[f\x06\xbfM\xbf\xdc7\xdf\xcf[\xeew\x85\x97\x13\xf7U]\xa9cY\x107\xca\x9f`\x13\xdd\xbd\xa7\xe6\x0b(O\xdf\x99E\xa6\xe2tq\xcd>\x99YT\x9d\xbc<\x81\xe5\ngO\x94\xf9\x95/\xe7F\x94\xca\x08\xef\x9b\xa3o\x8d\xa4Q\xe7&\xe7\x84\xe0w\x7f\xef\xd1\xe6\x91\xaf\xc3\xa3q\xdd.\x19\x89\x11\x1cR\xc2\x84\x04yh;\xf1\x8b6\xb3V\xe8\xab\xe3NXed\x97\xdcg\xd5m\xa1\x8b\x82\xd4\xe9W\xb3\xc5\xebv\xa7C\xb5\'\x94\x8eE\xec\xfd7\x16\x1aD\xf1-qP\xc9\x94\x0e2QQ\x80\x98k\xc6#k"\x84\xcd9\x139\x919\xe5\xac\x0b\x0euT\x01\xc7\xa3 \xda\xa7\x07\t\x13&\xb8iV\xde\xaa\xfc\xbf\x1f\xaf_\xdb\x02\x87\xd1\\v\x1f\xc7\xb5;\x82B\x17\xa4\x7f]\xf1\xdf\xdf\x81\xf7Z\xec\x99=\x0e\x18\x15_g\x0fX+\xdc\xbd\xberx\xc1\xb6%\x14\x93\xe6MYL:\x7f\xa9#3g\xbci^\xc7\x97p\xe7\x9a\xd2\xc2X\x17\x87\x8d\xe46\xf0s\x12mgR\x19-\xe9d\x04_\xcf6\xaa\xe2\xcc7c\x8d%f\x0cU\xcb\xa7\xc3\xdd\xae\xdf\xe8FD\xdb\xd1\x84]\x07\xe6p\xe0V\x10\x95\xb1\xbe\xd9\xdb\xb5\xf5\xf1\x81\x85;\x0e\xca5u\xcf\x92$\xd8p\xec\xd1\xe8DE\x83\x90|\x1b0!\xa7\xae)A\xea\xec\xd9:0\x02zL&\xaf\xd1!\xdb\x01\x12\x8a\xf1\xd1\xd6\x06N\xb1\xecN%\xb6p\xdb\xdf\xc3\xe3<\x06\xdc\x96\xbe"\x0e\xfa\x8a\xd0\x01w\xee\xb2\x9d\x12\x97=\xfe\xabWW\xfd\xb8\xea\xfe<~\x05\xd7{\xbfQk\x9a\x8a\x07\xd1LG,\xaf;\xcb\xbak\xf9,c\xc5\xf3\xac\x1d\x1d0\x17V\t\x07oQ\xa5\xa7\x9b:Z\xbe\xfd\xcb\x92^\xfb`\xca\x9e\x14z;\x10\xb8o\xaf\xdba1\x15\x04\xf8\xf7\x0e\xf8k\xa5\x80\x91\x99\t^\x86!f\x15\xa0BOh\xcao/\x1e\xdb\xaa\xb3\xdb\x90d\xb63{\x13N\xafD\xf0\xcf\xd8\xa9n\x0e&P~\x07\x89\xb5\x8d\x8a:XuxI!\xc5\xea\x16\xb8\xf5H>L\x84\xf0\xf4\xe83\xa1\xaao\'f|\xa9\x11\xe2\x9a1\x87\xd3\x88\xd8K\xf6\x97\xfc\xde\x88\xca\xb4\xff\x8dT\x14\xd9\x8fW\xcd2\xba\xb3\xafv\xf1\x06\xbe\xb0\x93\x13\xe90<\xa1.\x9c\x08\n\xc6\xf0(.7\xe3v\xe1A\xf9)\x14\xcf`\xfd\xa1\x8d\x03\xe6]O\x1fP\xb6{\x8a\xa5Z\\*\xdf\xbd\xf8\xf4\xca@\x99\x96\'\xe0\xd9\x9c|\x1a\xd3\xd1\xa2\xb9\xa4\x94\x89d5\x8e\x18\x1c\xfcQ}\xc5\xac\xa4\x83\xe2\x8972\xe0E\x8b\x84\x82\x9b\x90\xd7M\',\x1b\xa0A\xc0\xcebLv`YA\xe5\x18\xa6\x97\xaa?&\xc8\xd9j\xd5\x80\xf9<\xd4\xe2\x97G\xe2\xf8\xd6n\x16\x94\xce\x14\xc9E\xeb\xe6\x1e3BLm9\xebB\xa5!\x1a\xe1\x01\x96-\x04\x1a\x14\x93,\xc2\x034\xeb\xe2\x11\xee\x16\xb4\x83\xc8\x01\xc8\xdf\xff\xf8\x81\x06\x90\xfa\x97\xba\xc0q\xbe[!#N\xc41\x87\xc9"\x07\xf6g,\xf9\t\x15b\x02O\xe0tN&\x03\xe6\xc2\xc4\xe9\xbd\x1976HU/-\xcc&\xbf\xea<\x1a<\xb2\xe3\xcf\xeap\xf1E\xfe:\x00\r\xe7\x1f\x0f\xb8^g\x87\x9b\xec\'V1A\xcc\xac\xd1WIj*\x04%H,"\x08<\x02\xf2f\xe2a\x0b\xcak\n;4\xec\xa9\xec\x0e\xbf^_\xbcc7\x88\x13%\x92/\xd3\xaf-\x99\xa7\x8f\x83\x96g\xd7\x011\x1cf:T.\x99]\xc1k\xad\r\xf2-\xe2+\x11x\x11\'\'\xc1x\xf2^|\xcf\x04I\x16Y\x889\x0b\x04\x17\x05{(\x9a\t\x0c\x86\xdb\xcdk\x8b\x97\xed\xca$\xe5\x8a3{]\xbb\xdf\x82\x8bh\xe4H\x17\xdc\x02\x91\x8f\xdd\xc1~uNoC\x9c\xf6\x87\x19\x86\xb6~DQY\\\xa4\xe4\xe9\xca\x85[\xf6t>\xff|\xdcB\xba\x1f\\\x97V\xe9\x1d\x85\x1aux=\n\x97LlZ\xce\x84.\xbe\xc8Ly\x0b\xb8\xea\xf8f\xd4\x0cS\xdbh\xb2\xbc\xd2\x97\x90\x08\xb7e1<}fV\xd3fg\xc0\xe0l\xe0[\x9f|\xb1\x8fj+Tk\xd3\x17\x96\x8b\x0c\xd5y<H\xd5N \x18\xa6jC/H\x9bH\xfdGK$\xc9n\xf7\xee\xf5\xc4\x9d\x05\xbb"\xe7\xafx\xb0\xbfX\xdf\xdf\xb0H\xb4&\xb4\xbf\xfa\xf2\x9b\xa5\x12\xfc}\x84\xd8\x98\xdc\xc9\xca\xbfr\xffJ\xa5=\x17naM_\xd8\xc9\xa2\xfe\x0e\xdf5\xeb\x85\x14\xc6\xe7\xacl\x89\xa4\x99\x90\xf1\xdbq\x83\xf2\xe3\xa7\x8a\x04\x8a\x9f?\x145\x12\xc4\x02#$\x1d5\x92\x04\xb4\x07\xe0\x8cNg\xd9\\R\xf4f\xf8\xacm\xb1\xb1Id\xbe\xbd\xfc\xdd\xb6\x97k\xa2\xeb\xe1\xc3K;?AXY\x9b\x7f\xdeWn4\xf8\x12"\xc7I3V\xb2\x0fqOQ\x06R]b\x17,\xe8\x00#\xca~oc\xdf\xe2\x1c6\xa7\xff\xbb\xa2UKV\xb1P\xd9\xdf\x93\xaf\xca\xb5q\xc6\xcb\xddCN.b\xd1g\xdf\x1ey(z\x07\xff_\x06_\x04\xd5\xa2^\xc9a\x92\xb1<\xe9\x1b\xb176\xec\x00Vn\x03\x9c\xd9\xd1\xb5/\xbb_\xbe<\xb6mp\xee\x99\x0e\\\xca\x05\xcd\xed\x93;\xf6\tF\x8b<x.y\xdave\xcb(\xde\xde\x7f_\xc5\xfd\xf9iYG\x10\xbf\x8dQY\xf7\xe1\xa9\xf2\tr\x90\x96\x9f\xeb\x8ewOwe\x92\xd3\xedO\xe9\x93\xeb\xf20\xf0\x8f\x88tY\xaa\xac\xb4W\xa2\xc2\xb8\xe26\xd8\x12\x119\xe3\xe8\xb9-=\x0b\xf4y#\xf1D\x9c\x8f\xe9f\\k\xf5"H\xc6\x0224\xcc\x96\xa03A"ar\xe32\xa4\xbb\xa1\x88]\x95\xa2\xfdR\xb3\xa9\x1a\xa8<\xbc\x86\\l\xc8\x15@\x1b\xa9e\xafc\xb0S\x9fTq\xd6V\xd5fm\xe962\xedQ\xc1\x81\x89\xcbH\xf0\x9e\x03\xa8\x17\xdb\xebs\x8a\xe3\x9c\xc0\xa3\xb2\x19\x8f\x12r\xdd\xc1\xd2\xbf\xfb\xc5\xf1J\xa3\x0b\x1fIZb\xc3\xae\n\xdd>_\xfe\xa6\xd35\x9a>\xb33Y\xab\xf8\x8d\x84D\xea\xe2\xa5P5\x9e\xc4\xdb\x16r1\x84\x05\xeb\x82@S\x89+\x9f\xf4\xc7Wf\xff\xce\xda\x8au\xa4\x9ar\xb2f\xbc\xac\x9cN\x9f\xcc"\xb3wC\xc2\xb5h\x8f\xdc\xe2f\x91\xd1\x9fB-\x16R\x02\x1dgf\xe8n\xb5\xc5/h\x86\xd4\xaa\x91\xe3\x03\xcb~{\xda\xfe\xad\xcb\xde\x1f\xdb\xad`\nr\x1eVO\xdbcr\xc6ow\xef\xd8\x0eD\xa5\x82{b->\xa5\x85\xa8d\xb5&\xb6\x17\xc4\x00Ye\xc96?*\x94"\x07C\x85\xdb\xce\xea\xd9\xb0\xa3\xf9\r\x96\xfb\xde\xda\xb1"w\x0bv\xa4z~\x95\xd4\xc4\xa7\x16=0\xe0\x13\x19\xc7\x15\xcf\xaeQd\x88\xc9j\xb5\xf1\xe0\xe2\x0f\xdd\x06:\xbdv\no\x93\xaf\xe0\\?V>\x8eOf\xef\xa4\x17r]I\xbb\x8cW\xb2\x9a~\x0e^P\xc2a5W\xe5F\xd8\xb0\xa0\xc2T \x12\xd4\xdb\xa3\xb0\x9c(\x9c\xbb\x8d\x7fD\xc9\xa8\x91\xd3\'\x93\xf7\x85S\x15\x9c:{\xd9\x8e \xf5\x0f\xbb\x9b\x7f-\x1dE>\xdf)\xe0o\x8b\x97\xc8\x96\xf2\xbd\xe9\xa6Y7\xf7\x1e\xb3\x19\x95\xd6\xf0_\xe3\x99\xe6\x9d\xd0J\\c\x0e\x8a\xa8\xde\xc2Ef\x8b.y\xf2\x1f\xa0\x8a\xe2\xb6\xad\x12\x1d\xbe_G\xee\t\xb92\x12\x1cK\x0bc\xaa\x88#\x91\xd0\x0b\xdc\xb7\x9c\x8b6\x1bW\xd5\xbf\x94E\x10\x81\x9d\xe5\xed\xbe\x91\xc0\xf0\x1c\x9c3\xd3\xd8\xfe\xa2\x1d\x10\xd7/0\xb1yI#\xde\xdai%%_\xd4BWs\x9ao\xf6\xef\xe61HKn\x88\x802\x9e_?\xba8g\xe1\x86\x87RB\xd5\x0e\xce\xd9\x9cc\xa5\xc2\x0b\xf1\xb1\x11\x9a]\x19\x11\x8c\x1e\x98I\xabe\x14\xcfC\x9c\x03H\x1b\xbb\xe7\xcdx>F\x10\xd1\x8cLJ\xa6N\xe0\xb7\xc7\x12\x02\x7f\xd5\x0f\xdbDf\xa2\xac\x15\xd8\xb9\xf3\xe5\xdb\xe6`\xe1\xd1\x13]/:\x1f\xbe\xd8\xc3\xfbSBo\xe5*\xe8\xc2\xfb\xc3+\xab\xdb\xcbJa\xdb\xdf\'J\xcdc\xba\xf3\t\x0f\xfa\xf6F\'\xb6\x80\x0bw\xd0\xd7\x026\x04\xe5^}\xff\xdb\x826\x9e\xc6\xf2\x9b\x0c\'~\xda\x98\x0b\xc9\xb1\x05\xef\x9d\x00\xd4\x1e8$\xa9K\xbc\xd2\xed?\xbbW+\xced\xe1\x1ew+\x9f\xbbe\x0bN\xac&\xfb]\r\xe19\x16\x9e\x85\xc2%[\xbcNg\xd3\xc7+\x17\xe7\xd7\xb7\xd6\x14\xb3v\xdb\xc3\x85G\xb9\x02\xfa\x8b]\xab\xbe\xaf\xd0\x87C\x8b\'\x0c8f!\xa8\x9fy\xd8\xd9!\x8b\xdd0`\x04\xc1\x0f\xb9G]8\xb3\xfd\xae\x8b\xb3O4\xe7\x1d\xa7n\xde\xfdi\x16\xa3\x076\x17\x0bP~c\xbdg\xc9\xaaUQ\xcf\x04\x8a\xc8\xf4{\xc8%\xf0\x08\xa22B\xd0\xc77\x1a\xf7\xf1~\xe6\xa2\xc5\xd9\x11\xf2?l\x15\xe2m%S\xe1?\x11"\x86\xf1\x0e\x83(?\xa9\\\xed\xf3\x87\x9f\xee\xcf\xd0\xfa\x1d\x99\x11nU\x8a\xedxD\ns\xc5\xf4\xd5\x03a)1\x7f\xc4\x8ef\x12\x9e\x9f\xd9\xc9H\xa00\xcf6\xdfr\x00B\xef\xe3\xebp=\xa9\xec\x94\xde\xfak\x079\xc9\x7f\x13\xd9\xd7\xc5&\xd3\x0c\x1d\x06vb\xfb\x87\xf6\xc6$7\x8c\xe6Wi\xc9>\xf0\xae\x1e\\\x96\x8ffn\xe7\x16\xe6\xfe\'\x05\xea\xcd\xb7\xd6\xd9\xcf#\x8b:cWm\xd8\xb6\x87\xe2\xbb\xf9\'\xf24\xfb\xe3\xa0_\xc9N\x94\xddF\x85"\xbe\xcfRC)\x07ni\xf4\xa8\x0fwlnn\xe9m\xca\xa6\x91\x96\xf5\xd0\xec\xcf\xfd\x12}$\xd6\x17\xed\x18\x11\xbcV0@\x0f\xc5\xaa\xe4m\x99\x1dr\x9c.,\xd9\xc2\xc4\xea\xde0\x827\xfb\\\x90\x0c\x9b\xcbij\x8d\x13\xa4E5X\x12\x95\xb0o\x0e\xaa\xa1v-\xb7\xdc\xa8\xb5:x\xee\xbf7\xf29\x88\x1f\xe3\x9b)Aw6J[1g\xa2b\t\xbea(=\x9d/\x11\x80\x81\x92\x10\t\x96\x0c:&1_\x9e\x86\x81z\xf0>\x17\xb4C\x92\xd4]H)Qe\'{\x03/\x98!\xd2Mr\xf2\xc7~\xffh${\xf3\x1a:\x1a\xec\xba\xccu\xfd\xb1.}}\xed\xe8\xd3\x10@6\x07?g\r\xb1\x1df\xaa9\xa5\xcc\x9ds\xa4\x1ao\xeaT\xb6\xab\xb6zT\x13%\x93\nhy\xee\xbf\xe3\xff\xd7\xc4\x94\xe4\x18\x8e|\xebT\x87\x14\xde\xa0\xba\xf8h\x03\x86\xf6.\xeda3\xc9\xe7\xbf\x03#\xd0=}\xac\x0b!\x9fO\xa1l\xbf\xf7\\\x89\x1d\x8b\x0f\x88\x10\x879\x926\x87\x83cd\xba\xd7\xf3\xbf\x91!\xf5_\x9f\x86\x9d\xe6\x12\xd0\x11\x1eap\xc6\xc0U\xb6\xf8\x12)M\x89\x84\x1eS-\x02v\xb0wW\xed\xd3\xd96R(,\xc9\xcf\x1a\r[)\x05\x1c\x93\xc4R\xdb\xa5\xe0\xe4\x01t\xc8D\x08\x7f#0\xad\xc5~d\xf3\x97\xcae!\x9d^2\x93\x10\xcb\xff@\xaen\xaf\xcf\xcc\x92K\x8f\xd1\x94\xdc\xe4\xa4\x19\x0b\x88\xb9n\xe3\xd6\xe1\xa1\x9d\x00\x92h)\x0e\xeb\x95\xaet\xbf\r\xbfc\xb9\xa1\x1a5b\xa4\xb9\x07\xb6^5\xf9!\x95|\xd5h\x1b\xa4\x1c\x0cB\xc25p\xdd\xf2\xdd\x91|<\xde\x95\xea\x15\x086{\xbc$\x1d\x84v\x86\x11Xj\xa7\x93\x8e\x1a\xff\xed\xeeXF\xed\xf3\xdf\xd5\xe9[\xe4\x80\xc5\xac]o\x9fm\xe3\xc5\xcfR9%\x11\xb5\xbd\x9f8\\iD\xb4\xc0\x85\xc7\x80l\xca\t\xb0\x1c\x00\xfa&\xb4\xf6I\'\xc7<q\xa2\x8eLQ^\xd9\x9f\xb2i\r\xcb\xdd)"\xe8\xfd>\xe4,@Ug\xa3+\xcbv\x95\xbdpGO\x8a\t\xcf!\x99y\xbec\'\xdb\xc4\x9fn(M\xc7\x1d$C\xbb\xddz\xb4\xac\xf2An\xdf\x1b\x0c\xfc8\xba\xa8l\xa2\xda\x07l\xd0^\xc45{$d\x80\xc0\xd6\xc1"\x9e\x16\x7f\xe9\xe6\xef\xdbSu5v\xcb\xdf|\xf8\xe7\xb5\xc2\xf8Z\xb9C\x86zf\x8dV\x17\x1azx\x18J\x81\xa6O\x97`\x80\xb0N\x989\x92T\'\xa2\xa7&B\xe5+\x0bs\xad\xed\xc45\xcd\x85\'\xcf\xfe\xe1\xcdT6R\xd5\x98\xd2\x15qX\x81\xb5\xcfp?\xcc\xd9\xab\x15\x1c\x96\x96\xa1h\xa6\x14K\xff\xec\xd0\xdao\x9dc\xa9]\x8e\xe8\x7f54\xcem3\xa6\xa1\x07\x91\x8a"3\xee2Z\xe8Ci\x96Nh=\xa7Sp.\x0eZ\x03\xabz\xe2\n\x0f\xa9\x06\xb3\xd34T\xc2\x12\x12\xb4\xa5G\x05\xd1t\x12\x89\xe5j-}\x01\x93\xebUY<\xb6\x06\x8c\xccJ\x13+\xf67\x96\x94Z\x12y\xec\x1e\xd9\x02Q\x07\x01\x9a+\xce\x87o\x90c!\x81@\xf43\xe6\x1c\xe2K\xc5\x923S\xc3\xfeE[\xb4V,\xad\xb1\x96\x1b\x06\xdev\x02\xdc\xd8\nY\t\x86\xf4\xf3\x00\xcf\x90\x0b2\x7f\xe3T\xe6\x12\xc3`I3\x80\xca\xae\xeb_\xc9\xa9\xe2\xf7X<+v\xef`\xeaj\xf1\xe4\x95\x1d\x1a\x1e\x058\x8d\xb0\xac\xdc\x8c\xdc\xf55X\xefJ\xf6\x8b\xa3\xf7\xbc\xe8\xee>\xdf\xba\xdc\x8c\'\xca\x115\xeb\x14\x16B\xa0\x85\xff\x81\x02\xb2\xf7\xa0\xd9\x06\x85\xb2\xc4L\xc5\x82\xe1t\xaf"^<0g\xe2\xb2\x96<\thpR\x03!\xb5\x8b\xc5\x96\x07\\\x88\xf4\xf1\xb3\'\xb6\xd8=\xd9\xbc\x83xU"\x13\x1c\xb5[\x0c_Gr\xde\xbd\x8e\x9f\xb3p\x9aR\xe0\xb1\x19\x93\x9d0\x8b\xf9>\x1e\xef)F\x92\x80\x8d<h\xf9\x04\xfff_!x\xae%\xc2M\xae\xfcK\x8e{\xc5\xd1\xceiu\xee\xda^1\xac\xc3\x1f\x18\x10\xbc\xfd\xd4\xb69\x91\x85j\x01\x7f\xba\xea\x97<U\xe5\x91\xec\x80\x1fI\x9f\x85cL\xbc\x0b@ 5\x10(:T7c\xc6\xda0\t\x8f\x9a\x10\x0c\\/cS\xf9\xfe\x10\x7f\xa8G\x1bS[\xba\x9f\xc1\xde\x8dx\\j\x0b\xe9\xc2+\x96\x8e\x08\xa6\xdc\xbb=%\x97M"\xe9\x96\xfd\x181\xb6!\x15\xf5w\x0e\xff\xfb\xac\xab\xae\xb4\x99Y/\xbe=l\xad2\xbe>\x1eY\xea\xc1>\x98R\x9e\x1a<\x18\x1f\x97wqx\x8e\xedY\x06!E\'f\x92w[#3\xe5\xc4[\xb1\x8b\xe1\xdb\xafy\xadW6\xb9\xaac\x95\x9d\xdb\x85Kv\x11\x87\xe3E>\x87\xd2\xc0\n\\\x146\r\x89\x14\xd0#i\xef\x83\xb8\x9em\xaa\xd36\xba\x11\x9e\xd8\x1e\xf4\xf5\xe3-\x9cA\xdc$\x13)\xbd/g\x80X\xc2\xcfm\xd9{t\xb9\xa0]\xd3\xc6\xd3)l\x9bKk\n\xbb\x89\n\xeai\xbc,TT3\x07\x1f\xa6\xe2L\x06\x8d3\xe7\\\xbc\xf2i\xd7\\\xbapu]\xcf\x861\x8b\xbc\xe0:\xa6\xa5\x02\x13\xcel\xcb\x95.\xc4ih\xe7\x15\x17ea\x8bf<\xd1\xbc\x04\x8f\xbbu"\x19\xdb\x18\xce\xe4\xef\x04\x95\x00\xa1IV\xfc\xb1\x93Pu;vZ\xba\x1c\x93\x9ei\xce\xaaO\xe6\xa4y\xee\xa2\xc5\x9d\x14\xdfK\xed\xb3q\xday3\x14\x87S\xf2D\x1cEN\x96\xc4\xf0\xa3\xae\x9e\xda0\x7f_\x95\'\x9b\xb6lT\xa5\x1f\x9d*;\xc2(\x02\xb2}\x85\xe0\xf6\xf1\xcePf\rf\x02B\x87\x94\xafO\xef\x995`}\x19zK\x01\x03\x03q^J\xf4qE$\xc2\xd6\xa1\x84\xd3>?aO\\\x0f\xd0\x1d\x88\xb3\x9e\x11N\xfe\x03N\x02\x15\xd2V\xa0\x0b\x15R|\xda\xde\x98V\xd8\xc7\xa8\xfaO#\xa9\xf6f,\xf9\xe2\x94\x0c1\xc2\x89\x1a\xf9\xd9\xb7\x8br!\xb1Q\x87\xa2\x02q\xf6\xef\xb1\xe28\xaf\xaa\x19\x0eH\x7f\xf6\x98\x9f|Ii\x8a805H\xe3\xe4\xd3fvj\x86\xf2\x1b\x7fQ\xa0j\x92\xdeY\x15v\xa3\xd5!\x19l\xf4nB\xe7\xc4\xdb!\xe8\xeb[\x13\xe0\x82\xb3\x1ae\xea\xc3v<\xdb\xc1Td\xb6.m\xb0e8\x0ff\x892&\xba\xbf-0\x80b\xe3J\xc3\xc9+Gv\xfc\xc9\xfb\xeb\xe7mC\x19\x18\xf9\xbb\xcf9\xec\xfc\xb2*\'X\xb8\n5\xd2\xd1\x7f\xa6\xe7h\xf5\x96d_\xb9r\xd1hp2\xd4\xc3\xeb\xb0{\x02r\x1f\xa9\xeaC\x0b4\x99&\x98\xacJ\xe566`\xean0\xea\x88\xbf\x16\xed\ri=}|W\xa8\xc6\x83\x15\xe3\xc2\x13\x06/uU\n-\x1d\xe3\xc7\xb3?\x96\x91\x11\xd7#\x936\xc9/\x82\x82\x85\xb1A}A\xae\x1d\xe0\r\xce\x02`\xce\xf1\xed\xb5\xc08\'\xf3\x180}\x9cD\xdbH,\x03\xd2\x10\x19\xb8\nq\x96Z\xab\xcb_\xed\xe9\xd8,\x8e\x83\x92\xa1\x1f\x9eY<>%\xb9s`\xd18\x89\xfa.\xc3\xb8\x9d\x9e*U\xba\xb8$0\xc0\xd0\xc1\xb9\r7\xcaDg\xaeN*_\x89\xf1\xae\xf2d\x0c\xb4\xec\xdax\x1d\x11\x90\xbb\xb0w\xc1\xa2*vW\xd4\xcd\xb8,F*H\xff\n_H\x8d\x1do\x16\x98I=\x86!\xf2B\xe1>r\xb1\x0b!i\x9c3\xd55"a\xe1\xd0l\xea\xc0\xe1c1\xb4\x9c#\xa8\x0b!\x86xT\xa8\xe9\xb0\xfb>M\xcd\xb9\x12\x9ev\xe6\xec\xa9\xe2S6\xd2\x7f\x83\x1d|\xc5\x93\x87\xde\x91\x96%\x91\xde\x96-x\xe4\xb6\xc8\xfb":\x91\x02\xc4\x82\xd9\xf6 \x9a^R|\x02\xcf\xb9\xf8\x15\xe5-\x88\x12\xcc\xf0\xaa\xef7\xe3^\xbf\x1aF\xc1\xe1`v\tR;\x0c\xe7\t\xc9\xcb\xa7\x82RY\xe6~\xc5${\xe8\x8c\xf3a\x7fa{\xf8e\xc5T\xed\xf4O\xcbb\xc9\xeb\x94i\x0c\xe4\x881\x1e+\x9a\xa3Z\xc8{\\\x11\x92\xa5\x9aI/\xc7P\x0b\x15"r= \x9a\xa9\x1d\n**\xb7\x16P\xd5\x8a%\x83\x9d\xd9\xa6Q\x01\x1aw\x0f!z\xb4:\xe1~6\x0c>\xfa\xban\xc6\xb9\xe6P\xb8\x80\xa1\x86\xee\x94\x90;\xbd\xe1\xc8\xe6\x8b^\xb2\x83P\x8bwe~u\xc1v\xec<\xf7\xc8^tf\x1cH\xf2\xe9N\xff\xc5\x9a5\xd2\xcb\x84\xf30m)\x85\x9f;\x10z\xa9\x01\xc8\xb9@\xec\x89\xa0\xdf\x11e\xc6\x03\xab\xecE\xf1\xe5\xe4[x=\x96\xdd\xbc\xecA\x8fb%\x9de!\xf0\x15\x03\t\x86i%No\x98p\xbc\xf8\x88\xc3d\xb2?\x9f\xcd\x08\x90[\x956\x8d\xa8Muq\xf2[\xcb\x99\xfe\x95{\xa7\xed\xac5\x1d\xa2\xa2\xa6\x81\r\x08o\xa4\\Nu\x89n\x96\x82\x90b\x87\xba\xf6\xf4\xd1+\xf3\x18-(\x0e\xf0\xceIv\r8l\x9b-M\x0b\xdbc\x02R\x8f@\xd0\xf6\xd3+\x16 \xda\x8c\x1b\xf6\xf2\x93\xa2\xb5\x8c\xa5\xbd;\xda\xd0Fg\x82\x10S\xbb\xb2>{\xba\xba(\x14\x06\xbd\xb2\x95\xbf\xd7\x8cG\xe6\xb6\xe9\x9a\xdd\x15W\x9e\xfc\x89\x9f\x10W\x83\xa6bm\xb1\x85\xbb\x0fr\x10\xba\xed\xce\x9f\xe8""\x0e\xd3\xee\x90?`\x82\x88\xd5\x93?\x0b\xc8;{A\xc6T\xd4\xa9\xd8$\x96}\x11(\xe7\x9bK\xa0#V\xf7\xecG8g\x11~:\xbf\xd3\xa8}\x9dTn\x91\xadz\x81\x8f\xbd8\xe2\xc4\x8b\xe8\x80\x114V;`p\xa0\xbf\xb8\x0e\xb7\xbebO\x17\x87\xecN\xf1;[\xf3(l\x85\xf6\xfd\x84\xef\x0e\xc1\xbc\x96\xa1\x03\xcb\xef\xba;\xac\x02\xa4\xa2\x9b\xb7\xc9\x9f+\xf6^\xbe{|\xd3.P\x10\xf1\'\x94\xb6\x90U\xb1\x8e\x11#n\xb6-\xfe\xc5xu\xfcs\xf3^0\xb3\x0b\xbf\x1c\xdb\x1f\xb2\xb0$\xb1\xfa\xdf\xaf.\x98Oc3\rK\xa08\xdb\xad\x9d0\xca+\xc0\x90\x8c\xa46\xe8y\x9b\xdac\xfb\x02\x93\xf7\xdfb\x93T\xc9\x87\xc2\x19\x15\x8d\x9c\xb8^.\xb0;hC\xc2\x1a\x9df\xf3\x0cfsiqy[\x81\x81\xbe\x97\xd2t\x98aX\xf9\xd7\x00W\xda\xb3_z\xff\xee\x8d\xfd\x14\xcf}\x8f\xeeZ\xc6[\xb0\xdf\xe9\xee\xee]\x9a\x8d\xa3\xcb\x8b\'\x7f\xecm:\xbf\x9b\xed\xc9\x96\xb8F\xd3k,~\x0b\xed\x9f\xe6\xe0*\xe5\xe5YO\x7f\xd7\x1cQ\x14\x14\x02\nlf\xf6r\x1fxn\x14*lt\x1c\x8a\xdbP\xc5\xecZ\xa4\re?\x9c\xeb\x88N\x8c\xf3sr\xa4\x98\xe7\xfc\xd7\x8ef\x04h\xb1\x089\xcc"\x02O8\xeaj#\x86O\x14\xe1w*a$\xd9\xc1-{\xca\x04\x92\x89\xbe\x98\x9d\xcf\x14\xc5%\xed*\x14\x13\xc3\xc4\xa4\xf2\x1a\x07\xc1\x1e\xaf\\\xcf\xc9\xb3\x10\t\xe9\xcc\\\xf6\x02Jcn\x16\x04\x87T\x19]7\xc2\xc9o\xefs\x0b\x19\xd18F\xfa\x00\xdeJ\xb1D\x1c\x12p\x9b\x07-\xa2t\x9b\xad\xe7b\x86Q\xe1\tz5\x04\x168\x14\x0b>,\x0c\x14\xd4R.\x97\x984\xea\x08\xf5\xcc$\xe7^\xe6\x02\x80\xf8\xbc\xd9\xef\x80"n\xaf\xb9}\xed\x00(\xc7i.\xdc\xbe}-\xc1WAf\x14\xd4\xc2\x02\xf2\xe53\xc8A\xa0\xacn\xb3G\x875l\x8cX@T\xad:[\xb6\xd7\t\xee\xd5\x10>$\xc9+\x8cf\xe0\x03zK\xe8(\xde\xeblc\xea\xf4\n6\xbaB\xcf\n;tS{^6z\xc2"\x90\x84\xe5\xcc\x16\x10*K\x04\xe8t\xaa_\'\xd9\xd7\x19\xdb\x9b\nS\x87j\xbaH|{N\t\xc2w\x8a\xdf\x12a\x0b\xf5\x8f\x17\xf6\xed=\x08x\xd6\x1bg\xe9\x1aG=b\xf3\xfc\xcc\x8ae2\x0c|\xb5\x1f\x15:\x9e\xc8\xd5\x03[\x8ev}d\xf7!\x8a8\xc22Y\x8b\x10\xbbf#go\xb6\xb1\x16\x81\x15o\xc6dh\xc0\x9bT\xb6%#\xd3\x06S\x82N\x97G\xf3\xce\t\xf8x\xbcB\xad \x16$\xf7\xec\x97\xa8\x84b:\xaf\x03B\xbc=\xfc\xd7h49b\xc7\x98qJ\xf3\xf6\xbf:tm\x92\xd5"\x05\xba\xf0\x06Y\x12$\x99\x88Bd\xf60\xc3]\xa4\xce\\\xb0U\x88\xf9\x9a\x7f\x9f\x83\x05\x90\xacL)\xb4(a\xc4\x8a\xa3\xab??\xd9\x95\xa0\x0f\xa6\xb5\xf4\xe8rH\x17\x86A\xdd|\xd6\x11\x80X\xb2\xc1G+\xd7N\x85<S\xc2K\xe5\x11\x02$\xd1<\xabuJ7\x9a\xc1\xfb\xfe\xfa4\xac\xf6\xa1\xe5\xae]m9\x12\xe01\xe0\xa8\xfd\xaf\xaf\xbf\xed\xb7I\x0b\xa5\x84\x15\x0eM-\x8e\x18Y\x97\x88\x02\x92\xfd\x0bS\x8dQ\xf7\x9d\xca\x04\x84\xe3\xd3\x96\xbc-\\\xde\x04B\xf0}\x8f]\xae\xa7\xee\xda\xed\xa9\xa9\xa8\xd1\xff\xfe\xac !\xff\x97v\xd7\x0e$+\x9f&v\xb6\xfb\x1a\xbb8zbi-\xcd1\x90uv\xf2V\x8b\xcdX?\x9c\xccziI\xee7\x12\x1b0k\x1e,\xa5|\xfd\x14\x15\xb4n\xfd\xad\xe5H\x84\x1b\xc9\x88\xc6E(\xce\xd0z\xd1q\xd6gF\x0e\xe1\x8e\x19[\xf2\xaf\xa1\x92c\x1c\xf93K8\xe0J\xacT\xc5`\t\x12\x1c\xf4\x87m\xa3\xeak\x87\xf1\xa0\x01\xc2\r\xe4\x94G-Kn\xe1s\xe5\x7f\xcd\x89\xa5j\xda\xbd\xc1\xae\xb7\xc7\x1d\xab\xb2\xe2\xce/L\xf1\xea\x06\x80at\xb0v\xdb\xbc\x07\x9b\xfa2\xdbS\x12\xd1\xf3\x01\xae\x8bv\xf9b\xb9c\x87\xcd%\xf1\xba\x1e\xda\xbaC\xec\x103\x0b\x1a5b\x01\x1e\x87St.8\x05n6L\xd1\xdb\x7f\xf4\xe2\x07rz\x8e\x08\xe6\x14T\x13\x96\x96d\x12E\xc1\x1d\xa0DC\xa9L\x08\x83_\xfa0\x8f\xd4\x9by\xb7k\xc6\xc2\x11\xac\x14\x86!\xec\x84\xady1\xdf4\xb7\xf0\xea\x10\xd03\xd8tR\xe8.\xa2\x81D,sv\xb4\xc2t!Ym\x87\x94&3W\xca\xbc\xa5\xfekA\x7fM\x1a\x16\xf18\xdd9\xa7\xf23\x02\xa2\x1e"\xeelC+\x13;RdC\rt\xb5|\xc0\xe8pD\'\xf0\x0cO\x16\x84f\xe4\xb2sb\x14\xd48\x85\xad\xbb\xbah\xeb*$\xdc\xfe=\x9a\x11%nR\xc9PaD9\xa3\x93\xec\xba\x1cAl\x8e\x9e\xd9/Q\xd0\xb6\xb4#D\xb3\x8c\xa3Ym\xa3\xaeo\xec\x18\x8buk-\x9a\x83\x81\x8d\x85\x9a\xe3\xdb\x0e\xd0\xa8\x13/\x9d\x97\xbc\x1cN\x13\x06\xc7`f\xb9\x1b-b\x85\xc5\x86$\xa71*\xf7\xc3\xa1\xaa0\x85=\x87\xa4\x0b\x15"TJf[t\xbe\xb4\xab\x05\xcc,\xf1aWJ\xfa\xf6\xbbY\xbb\xd8\xad N\xc4m\x8e"40\xa7\xaa\x9e\xd8W\xb0\xeb\x1f\x17\x82]-\xf8\x98\xf2tS\xf0\xda`*\xb2\xb5\x97;\xf6\x92\t\x1ci\xa2\xdd\xa26\x07\xaa_&\xbe\xce:3^.y\xf7\xda\x96\xd9\xb1\xc00\x04\xca\xe4v\xbc\x0c\xff\xfd\xbd\xf2\x9f\xb0\x91B\xc8\x8e\x91\xc5\xf7\x0f~_\x95\xdf\xc0UI\xff|9\xe2\x95Y\xd5*\xc2\x1b\xb11{T\x9f\x9a\x97\xe4eF<@\xbc\x81M\xef\x18\x0f\xdb\n\xa6\xa1\x0eP\x01\xbe\\\xa7\x1b\xc3\xa41cFa\x99\xe4\xf9\x1d\xdaP\x16Q^\xd9\xd9\x85!\xcb\xa4\xc6\xca\x9c;3\xc3\xdcw\'\x162\x10\xb6\xa9u\xef:\n\x89\xf9]A\xb0\xa5\x1d\xf5*\x9a\xc8\x18\x82\x97+\xcdXu\xc9\x9aY\x84\xb7 l\x89\xd7\xcc@\xd1\xc9\xe5\ny1\x05\xa0\x95\xbf\xe6\xc2ez\xe8nH\xdc\x06\x82\xa8\x9f\xb3\xcf\xed\xc4\x7fvJgC\xbd\x86Q\xd7\t\xba9\x9cnE\xa2\x84\x82-\xdb\xb9\x02\x89:[\xc6\x07\xd7G\x0f\xd61\x81\xa1/\xc5\x16\xe4Pt\xc8=T\xe8V\xe2[\xd9\xe7\x1c\xa1\xf36z\xe5\xdea\xb4"4K\xb1n\x1f\x1f\xbc\x9c\xbaaF\x8fx9Og\xb4\'(U\x1a\xef\xd1\x96S\x15\xc0\x82\xab\xa9\x97\xdfW\xcc\xdc\x91\x13\x94\xdaO\xf7\x03\xa5\x0b\x08U\xc2*\xf9\xe9=`\x86#\x151\xeb\xf8\xf7\xa6\xac\r\xed*\x8b^\x0f\xb1\xda\xefp\x14\xb3\x19|\xdb\xaa\x81(u\xfeeI`\x8c\xc7\xec\x98:\xccn\x9a\xad\xac\xdbc\xbf\x8cC\xb9%\xcb\x93\xec\xcd*\x85\x06\xdf\xc8\xc7o\xf8\xfa\xfe\xc5\x07;\xb8\x15\xe6\xa8\x11\xbbO\xce\x06\'\x880\xe8\r\xec[=\x11\xbe\xfc\xb0\xcbm\xd3Lq\xfa{\x8b-\x8dR6\x82\xe5\xc4\xa8\xa2\xaaZ\x1c5\x97\xbeaK>\xec\t\xd7\xad\xed\x07\xc9\x1e\xd0\x85dn\x94\xcbfy\xcbk\x92R\x14k\xcf\x96[\x19$\xafB\x1b{\x06\xfc\xfd\xd3\xec\x06\xd4-{a\xc2.\xbe\xdf\xb5K\xc9\xb1\xc5\xfd\xca\xe3k%\xaf\xea\xa5}\xd7HC\xb0\x94\xb6\xd0\xd0\x9f\\\xd5s\xe6qI(\x10\xe1\x8fh\x05V\xa4{!\x95_D\xfe\x89\x97:\xd9\x11\xb6\xa6k\xc6\x13\x9b\x9d\xb0\x98\x08}\x10\x07H6\x0c\xb8pn\xf7\xd3aj,\t\x7f\xf8\x83\xfaL\xd7\xd1\xb2N\x08\x86z\x8c\xde\xa9\xa1;\xc3\xe7(\xfe\xcc\xe0\xb5\x1e\xd9Z%\x19G\xa2<[\x81\x81\xe3\xc1\xa0~ra\x9b\xd5\x8d\xd6\x91!\x15\xd9\xe5\'\xf6\x1d]r\x94=\xfc\xfe\xeeX\'+;\x10\xa1\x88\xb3\x9b\xc2\xe95\x10\xa0\xfc\x87\xcc\x9e\x92e\xbcza\xae\xfe2+\xc3\xd1\xad?\xb1\xc0\x94\x1a\x01\xb8KP\xa3f\xb0\x8f\x11K=kZ\x05\x97d\xffl\xce\xce\xa6K>""\xafQ\x17\xa7tD\xa7<\x8d\x18\xe7\xa4\x8c\xe8PSE2\x97\xed[d\xd2\xe2\x84\xd5\x1e\xad\x16\x81\xd3\xab^\xdaw\xd9|\x97\xdbb\xbd9\xb7\x8c\x93\x8cL\xd2j\xae\x17\xcd%P\xf1\xbf4\x97\x1fF$\xf7\x01\xcbC\xe2H\x0e\xd6\xc8\x82\xf7\xae\xb5(\xb1\x8b\xc0\x03`0z\x10\x00\x06\xa7?h3\xf8\xf8\xe4@I\x1f\x8f|\x18b\x88\xf7\x16\xce\r\x8d:\xae\xde\x01?\xde\xd7\xd3\x16\xec\x10S\x80\xc9\xea\xbf\xed\x9f4c\x8e_\x075\x94\x80\x89\xa2\xbd*\xe6\xbd*\xcaC\xd9\xba\xcd`\xd6\xdc\xcaw\xfc\xd1\xee^\xa6\x97\x19\xd2j\xecY}e\xc9l1}>d\xf4\\\xfe\xd2\x9c\x88i%]AE\xc9\xad\x90\xd7\x13\x96p-T\x9e\xb3\xbb\x82q-dK\xd53\xb3\x06%\x91`\x19\x8e\x99\xa6\xd7\x8a6S\x05\x0b\xc9\xf2{\x8bf\xea\xd1\xcb\x7fP1\t\xac\xf5\xb5Sv\x83.\x7f\xb0\xa3m\x88\xd8p\x86\xb1(\x93\n\x07\xa9J\xe4\xec!\x1c\xd2\x10\x83\x97\xa04BBV\xb0\x15\x1f\x1a3\x98.u[v\xf3\xa8[2\xa4\\ps%\xa8M#\x12\x90\xdf\xfe\xc3\x18\xa9)\xcfZ\x08\x98X\x05:\xa5\xaa\x1e\xb0\x84\xc7\xc8N\xca\xd6 \x89\xa2\xd8\x0f\xf4\x19\xcemb\x83\x8d\xeb \xe0\x9f\xf8\x17\xfa\x19\xdc\xd0j\xfa\xd1\xefu\xc0.$\xf7\x07\xd9\x19\x92\x9d\n5E\x8a\x9f\xc8\x8a\xe3@\xa7t\xc4P\xb0\xe7W\xfd6|h&\x99H\x17\xd5\xf5J\x7f\xdc+\xd6&~C\x8e\xfd\x88\x15\xfe_6\xa5\x08xY\x82P\xb8\x95\tKT\x0c\x0eH0\x18\x07\xfcOi\x86<#\x96(\xd6\xed\xbb\xfaj\xe5\x85\xa5d\x8c!\xaa=\x0e\x92\xd4\xect\x12\xa1U9\x8b\xe5\xad\x83\xe9\x8d\xc7vah\xae\xe0\xb6\xaao\xe6\x90\x12\xc3\x91\x10\xd0`\xecO\x08\x0f\x82)\xc9}\xd0D\x1b*"\xe3\xe1\xdb;\x08\x9b\xd8*V\xda3q\x8cy\xb4\x08\x81$\x84\xde\xe29\x16#\xdco\xe1\xde\xcc\x14n\x11DA\xef"\xfb\x88\xf0\xc6\x18\x92W\x9b\xb0x\xb1\xb0R\x08Wl\xf5\xd0\xb9\xfdd?\x02\xf4\x94l"P\x86\\\xc0\xf9\xfb6\nZ\xf0\xb7\x10\x0c\xa9D\xfb\xac\xf8(\xacW}U\xd4\x12\xa6.\xebp\xe3\xac9\x80l52SN\xf7ej\n\x1b\xd3^\xbc\x8c\x00e4\xf3\xfc\x10\x0fq\xa1\x19k\x8b\xb1}\x12\xb7\x19\xf5\xaa*<\xb3\x13o\x12$\xb8\x1e\xef\xae\x01\x11\xf7\x18O\x12l\xe8"\x8e\x90\xf8q\x110\xb4i\xe2\xe9\xc6\xe7f\xda\x18\x1a\xb7\xd7\xaaF\xb25\xcd\xbf\xfaSmG&\xf4\x8a=\xa9>\x8c0\x11\x1d\x7f\xdc\x05\xdci\xb6>\xb7\xeb\xee\x16\xa7\x07\xbe\xd0&\xa0\xaa5\xd4\xdc\xd9^\xc0\xd4\xec\x85@\x15$JN\xa7\x83\x0f\x85\xc9@\x86\xb0\xa8\x1e\xc5\x0b\xd3\x9a\x81jkpPj\x95\x85\xbb\xf6\xe7\xedf\xacq\xc3\nDeW#\x9e\xc7f\x97\xeew\xb6&]5\xb4\xcf\x92*\xf3\x12\xb5\xac|\xfa\xc4v \xa0\x11\x8cah\xfb\x12O\xf0\xfb\x9fEd\xee\x9a\xd8\x17\xf7\xec\x15\xa6\x8a\x86\xb6\x95\xd08\xb43BR<z\xd7\x8c%\x0b\x07\xadO\xbe&\xe3\xc7uBE\xe0M\x85\x9f8\xf6\xc8Z3\xe2\x9d\x80\xf9\xd0~\xe5\x99!\x93\x03\x14\xecN{\x01OT\xff5i\xc5\xcb\x8d\xaao\x02O\xd9\xb8\xd1\xd2\xdbD\xd0\x059;\xbd\x84\xb9\xa5D.\xb0\xdc\xea\xbf\xbbJ\xd0\xbb\xef\xc7\x0c0\xff(\xd6\xca\x98T,\x9e\xdd\xbb\xb7\xa5\x02[\x9b>!:=\xeb\x9aK\xdf\x11\xc8"\xc3L\xaeL5\xe3I\xc8\xc8\xa7\xc9&\x1a\xca\xe0,\x07\x1fX\x00\x11Je+%\x9b\xaay\xd3>)=\xf7v\xb3\xaa\xcc\xa68\x9d\xd8\xc1\xa0D9\xe8\xc2\x84F\xf3\x0fDS\x8f-\xa7\xee\xd1\x90\xe0\xc3\xdd-m\xbf\xb7um\xb5B\xac\xa8\x95\x8a\xcfG\x14\xbb{k\xdbI\xb5u\xa2\x1c\x08g\xd2G\xdf_|\xf9\xdaHV\xfd\xd1\x8e={Hn(\xe1\xf0\n~G\x0f\x91A\xe72P\x01\x1dZC\x7fO\x97\xaa\xea\xe9\xe3\xf4\\3\x16`\xab\xab\xa3\xc3\x01mD\xa08\xff\xc6NGT\x1d.\xb2\x0c\xca~\xf3\xfe\x85\x1d%VYD#\xe5\xe1(\xb5\xc7\x0e\x04\x0c\xd3\x1e\xc2\xca\xec[BN\xf8\x1b!\x04\xff\xb6P\xb0P)\x8b\xc3\xd7@\\\x9c1\x14\xc6\'$\xe1\xc2\n\xf9\x00h\x184;\xf4\xd4\xf2\x14\xa2\xfa\xda!W\x9c\tr,\x1a\r\x98\xb2\xfd\xed!$\xd8\'\xe6i\x9c\x90=\x9bonG\xcezf\x11P\xb1\xf3\xa6z\xba\xcd:\t\xa0y\xb6\xa9`&\x19\x97\x89\r\x16\x13;W\x1e\xcdZ\xe4\xc7\x96\x85\x92\x91\xfc\x03\x80\x84)\xc4\xf2\xba\x81\xe7\x835)\xee\xfc\xb1\xfb\x19\xa0\xe8\xc0:nyd\xe0\xb6\x8f{\x15\x85\x14J\xa5nL\x04NI\x89\xc2\xf4\x04\xb7!\x8bH_\xd8\tF8\x0f\xae\xbf\x1f5\xa2\x82 \x80s\x18\xe4\xc6\xf0\x04\xed\r8|\x1d!C\xd8\x03\xbfj_U\xa5<G"D\xf1\xee\x8f\xbe\xa0\xfcF\x19%,Yr\xfa\xd6\xb6\xa6\xce\xd8y\xbb\x7fo\xda\xd3\'\x81\xf3\xd5\xa3\xf4\x96\x88\xc5[\xd5\xb7\x1f\xda\x7f\x99EO\'\xd7\xc1i\x84xr\x97\xfc2\x03\x16\xday{\x1f\x8eLi1\xda\x9au\x93\x8c\xd4(l~}\x8bz\xe08\x18\xf9\xbc\x1dz"\x12\xed\xde\xa1]\xc4\x8a#\xd9\xb1\xb6\x01$Q\x96\x8a\x19\xc1}C\xe8\xea\x14 V\x9f\xd0\x9b\x87V\xb4\x83!\x95\xea\x1a5\xec\xa23&\xc0\xe1\xd7\x0f\x0e)\x0c\xa5\xddO\xca\x88.\xa1\x04~\xdb\xd7\xab\xf8\xd7\xfe\x8d,=q\xfaM;\x1d\xbcA\xbd\x05\xb5,\xc7y\x0b\xd8#\xbb\xe6\xfbl\x10\xa4"\xa4\x10\x0e\xb7)[E\xa7\xb0,\xc3\xdd\xdbZ\xf7\xed/\xbbJ\x0c\xe3@M\x8e\xe8\xfa\xa2\xadJ\xf7\xfe\x85|\xe4\xee\x04H\xfd\xb1U\xbb3\xcb\xd4\'\x8fn).\xc2\xd1EpO\xf6\x8b\xa0\xd9$\xdb\xbbk\x0fN\xda#(\xa2\x98$\xc91b\xc1r\xa9V\xf6\xfe<\xe6\xc1\xe0\xb4\x01\xef\xb2&\x044M\x06\xc1\xc9\xb1\xbd\x1a6\xf0)\x00=\xaav\x97\xef}\x840\x1335M\xaefm\x9e\x91\xdb\x819iv\x98"\xda\x05\xaf\xaaU\xb09\x8c\xfc\xa9\xfa\xcd\xdfB\xdd*\xa4\x19\x01\xb4y\xeeOr_\xdd*5\xa7\x1e%\x1bz\x05\x8eg\x809\xed\xd3\xc2\xd6\x8f\x95Al@\xbd\x9a\xd9\n\x90\x1a\xcc\xce,f\xe0\x1a\x8d\xec\xa1v\x15TU\xab\xe3\xe5\xfd\xebvF\x89K\xd4\xff\xd2]\x9b\xe6C\x88\xff\x8fB[O\x04\xce\xcb\xbe\xa5\xbf\x95|\x0b\xfd\x1f\x06\xa5\xf3\xe8\x8b\xf0`\xda\xe5\xd7\x9e\xeb \x8a\xb8\xc5]Q\xf5\xdf\x0b\t\xa4(:5\x01)ol\xf5\xd7\xce\x9c\'\xa5fz;\xf7\x14\xa1\x11+\xd4\xc9\xa6\xb1\xe8\x85\xfa*E\xd0\x92\xf5\x1b\xcac\x12\xd0s\xbagv0\x99jp`4?\xed\x0e\x8a\xea\xff\xa6L\xc0\x00}7SK\xd5\x90\xce2\x01\xc2\xfa$E-\xdbc\x93\x10Uk\x8f\t\xf63\xba\x19nh \xf1<\x17\xf8\xc0\x1eHa`\xbd\xd0I\xc6\xdf\xfd\x00\xe2\xdd\xa8/\xaf\x9f\xac\xd8\xf2\x9e\x07_\xf0\xa2\xc4\x87\xc3\xc2\x9e\xb9\x08\xaa\x8b\xf4qn]\xdd(\x91\xedj\n**h\xd7\xf6\x18L\x9cd_\xa6\x05\xff\xe5\xcd\x18;\x0f\xee\x0cqt\xfc\x0e\xe01Y\x86\xbf^\xc1\x9e(\xea\x8aVx\xe8\x1a\xa33u\xb6\xa0\xad\xd5\'\x97p\xfe\xe3\x1f[\x12\xd2\xb7K%\xf11\x1c)\xbd\r\x14\x10=\x8f\xe0-\xd8\xa6\xa3\xd0\x96\xb0DH9;\x0c3e:CHc\xe5)yX\x17\xf7\x18\xcb\xaec?\x81\xecR\x03\xb3\xcd\xf7\x1b\x89\xdb\xb7\xa3\xd9?\xbc7G\x94k\xed,4\xed\x0c\xa1F\xf8\xd0}2K\xefU\xb8\xa6pLuw\xa5\x11\xa9\xeb\xd5\xb70\x8f`\xbb\xf8! \xc1\xbf\xdd\x92k\xaa-\xf9aZ\xd0\x0b\'a\xf7\'n:D)\x06D\x93P\x02\xacR\xec\xc0\xb6\xa4J)K\x87\xe0RY\xfb\x92y,\x86CT\xa0\xe9D\xc6o1]\xc9\x0bZ\xed\x92\xf9\xc7\x95\x0cM\xd4.%W\xb7\xb6^\t9\xe60+g[\xd6s\xfc\x02Xd}\xfd\xf9\x03\xe3\xda\xf5\r\\\xea\xbb\xdf\xf1\xa9\'\xbaH\x03\xf5>\x00\x9b\xc9\x166\xf9@\xa8\xb6uz!\xe2\xfd\xb0\x89\xfe\x91]\x0b\xf2\xc4P\x8e$(\xeb\x84P\xe7v\x18Y\xee\x18Y\xccR\xf6s\xe3\xd0\x12$\xfb\x87s\xb6\x82\xb4Ul\x14\x89\xaf\x9aq\xf7\xdb\xf9\x85\x87\x93\xce\xdf\xfd\x98\xc2BM6\xff\xa6\x0c\xa2\xaeN\x0c\x12\xeb\x0bEH\x82\xac4\xe57\xb10\xcf\x08\x1dP\x18\xe2\xa3\xd0z\xf8\x8ep\x01:.\x19\xd0\xe6\n\xddMm"\xb2\x02s}\xbd\xf8P"\xe6\x94Xv\xd1\xb3\x1c\xb1\x81W\x99}\xb07\xa9$)\x9d\x99Y\x00(\x056\x0f\x1d\x7f?\x077\xe7.\xe8\xfe`2\xb2\x8b\xbf,\xe9\xf4\x83\x12e\xb0\x92\x06\x00\x8d\xb8\xfd\xe1\xca\x91\xe2\\\xc6\x83\xf5\xddq\x14kS\x03\nF!\xa0-\xa3p\x00\x0bG`\xc3\x86\'\t\xb6\x85\xc5\x15\xab\xe4|S^*\x88\xb3\xc2\x86\r^U\x03\n\x19C\xf0\xe6\xe8\xcdK\xaa\x05Vym\xb0\xb4\x99\xd6|\xba\x19k\x84\x96=\xed\xdc\x19\xb9\x94\xf00\x98\xd9\xcev\x16]\x7f\xa7\x1a\xed\xa0\x88\xc8\x89\x05#\xc5\xd6\xbc\xac\xc5;(;\xd6\xb5\xd8\xf4\xd50\x19;\xb4`G@-\xdb\xda\xb9P{\xaf\x9f\xa36LW\xcfq\xd1\xed\x800\xcb\x1dxe\xd4A\xf9\x1c\'s\xd5\x07[\x9c\x84\x8c\x8b\x0b\xad@vX\xe3,T\x8az\x19\xa9\xd4fX\x98\xb9"0A\xebl,\xd0\xea6\xfa\xfc\xddN)\xab\xb2\xf9\xe6+\x01\x17U\xfe\x00\xfd!4\xc2\xd4\xfc\xca\x1b\t\xb0A\xa3\xdeX\x85\x007r\xc1r}q\xbd\x19S\xc1:\x8e4K_\x0b\xe35=\xb4u\x0b\xc3\x1c\x81\xc79\xbb\xd7^\x95\xf9\x81\xc8\xc5\xe6\x15% \x01\xdd\xbbN\x0e\xaa\xf76\xecfR\xc1\x91g|\x92~\x1c]~\xc3\xc3s\xa4\xeb\x9a\xe8\xca%\x8a\xdc\x82E&^>\xb1\x8b\xd3\xd7\xec\xb0\xf1\x08\xba\xab\x0c\xca\xa0\x9eH\'\xd3)\x92\xc7k\xd4GSO\x80Z\x9c\x9b\xc6\xfdm\xf3\xf2\x81\xa0\xc4\xc3=;V\xadB*+\xbb\xba\x11\x07\xb7\x94v\xd2#@t\xfce2\xba\xb1\xb5\x03\x1b\xb3d;\xd1w\xb7\x06\xf8\xc3>\xb3\x1d\xa14\xca\xc3\x13\xfb;\xdb\xf6\xb81\x7f\x05\xcaO\xd8\x9dzk\xe1\x1bQF"\x95\xdf,\x98w\xa3\x13\xd2\x82\xb1]\xe1\xd4rY\xca{U\xc9o\xe5K\xcaa\xe3h\x83u\xd9\xd5\x1f\x7f\x95iu\x8a\x00[\xc5\xdbQe\xd1\x91v\x19\xe3\x1b\x82[zAW\x85\x1b\x90\xee\xffgv\x94\x98*j#\x1d\xfb\x06\xdc\xdb\xdbo\xef\x7f\xc2\xbc\xe4\xf6\xd3\x89\x99\n\x92_\x83}8\xeeYl\xaf\xe1/\x8aF-\xb8\xbf?\t8\xc0\x12\xc0@\x86\xfa\x9a\xbcEfw\xde)\xc1%\x80\xe4\xdaE\xa6\xb9\xc5\x91\xb9If\x9b\x048D\xbb!\xa6\x954c\xa147\x14\xf0\xc9\xd0_\xb2\x93\xd4\xb5\xc5\xd4)\x12\xdd\xf8l@\x90\x82\xa8X]\xfe\xe4N3\xee\x17\xa5\x18\x96\x7f|W\x06\xdd\xff\xc6\x92\xb4\xf7\x10\xd3a\x8cp\x0b\x04\x88\n\xa8D+6\x94+\xc0\x00\x04T<\xba7\x17\xce\xcc@\xf8\xc8\xae\x14\x81?\xac\\\xfb\xb5\x9d\x9d%\xdd\xcf\xc1Oe\xfa\xda\xce\xcec/\xe3P\xe9\xc5<\x18{\x1e\xa8\n\xd52h\xdb&\xee\x1e7\xa6\xfc\x0c\xe4\x81x/b\xa5\xfe9q\xd6\x153^,\xb4\xe4\x8c\xaa\xae\x99?\xea,\x89h`gP\x89e\x12\xc1Z\x93H\x19mz\xb8\x8a#+\xdeJ$1\x92dU:\x86\xd72x\xb5\xfdj\xa2\x1c9\xa1-U\xb2M07\xfd\xf2\x1c\xd9\x17\xdct\xd2)\x99\x0c\n\x87\x1c\x86GE\x11\t\x08\xe7&\x8a\xdf\x82>\x86\x04\xa2\xa11\xbfK\xa6\xb0Is(V\xc7\xc3\x97\xb6i|\x8a\xca\x0cS09L\xc4{\xfe\xc9G\x0b\xb48&G\x1c\xbb\x81\x8bL\xc9\x08!\xceIqS\xc5\x9b\xd0\x88rt\xf3\x1f\xee\x1f\xacF\xdb\xd8\x91t\xe9\x1a\x04\x80\x13\xa2\x87\xd5.\x96\xe7/\xf6\x1b\xd2\x99\xf5\xd3E\x10&\xd1\x9e_C-\xad\xebP\xc2\xa0\x0b$\xfc\xbde;\xe1\x8a\xa9{\x8a"\x9c\x8a\x83q\xe02\xb0\xc5\x0b\xa2\xbc.\x99}]\xbc\xb3+R\xd7\x7fQ3+\xdc\xe4/3c5\'\x7f2\x18\xb8\xaa\x0c\x9a\xb9\xcd\xf4\xba\xaa\x05\x99=\xb6\x8d$\xfdzdP\x0b9G8\x985\xa2<\x9a\xa4\xa1(\x0c|\x92\xa14\xf5\x9b\xf2k\xcf\x93\xc3-\xb92\x1c\x8a\xf6\x9b\xb0 */ui\xbdm\xc1@o\xc8\xec\xbe\xc0\xf7\xe4#\xbbS:\x14\xb8A\xfd\xac\x92\xc1A\x88x\xe8P\x16\xec\x158R\xbcCl\xc5\x16\x15\x1a\x18\xe3:\xfbqW?\x8bSW\xcd4\xe3\x19\xc858\xdb}9>\xb7V\xaai\xcdv8\xf0\x95-\x83\x06\xbc\xefwt72;\xd2\x88\x10\x18Jv~i\xcf. %\xf4G\xca\xb1\xf4\x89&\xc4n\x91\x97ww\xef\xe9T\xe67V\x05\x9b\xb4\x8dF\xbb7\xd6\xf4\xc9\xbc\x88\xcf\xefl\x8d\x99\x19\xe3\xacBf\xcbC\xebo\x10\x03\xe031\x1fMW\xcd\xa8\xb6\xe5E\xec\xfa\xd1\xb4\xdd\x04\x92F\x9c\xf2\x16\xe6\xac\xe7fn_\xa5\x0f\x01\xee-8{\xc3dC\xd3\x81\x9b\x82k]\xb0\xbd\xac\xd8|\x16U\xe9\xe2\x07\xfa\x9b\xf6\xe9u\x89\xe7\xce\xc0\x10c\x94\x8d\x12w\x0c\x8f \x19MX\xc4\xa1hL7\x997c\xf6\x19\xbb\xbc\xda\xcbT\x14?\xb4\'\xe0j\xa2\xab\x9f[\'f\x16\xdb\xe9\xc1;k\x93;8\x12h\xf8\xa3\xf4p\xf1R\xaf\xda\xfd\x0b\xb3\xc8\xbfP\xc9\x84z"\xa9YEN\xd2"\x16\xd0\xda"t\xa8\xddU\xd5\xbd)\xddE:\x80[\xaa\x1d\xaa:\xebs$\x18h\xe7>7}\xfb\xe6)\xc3hcIZ\x87}\xf8M\xfb\x0f\x8d|?9/t\x0e\x06\xba\x7f\x0eY\xecV>\x8eI^/\xcf0j\xc6\xe5x\x85\x19\x16;T\xc2\x8cX_\xc4\x18\xbf\xe4\xca\x89\xe5\x10\xa1\xfc\x8b $\x99\x87\xd8"\x81\xfb\xc2p\xc3\n\xfa\xe6\x84ga\xf7\xa0\xf9\xdf\'s\xb3\xf6\x18\xc4e\nY\xfc\xeeU3\xeeEa\xbd\xabV\xc0\x98\x0e}\x85\xe5\x03\x92c\xe5\xebY\xe9@\xcc\x07\x91`cz\x98!``+\xec\x90\xfcW\x15P\xeaB\x96\x82)\x1d&\x87\xb0+\xa6%\x06|eU\xc6S0W+v\x1e\xb9K\xa9\x8a{]y\xbcuK@\xa92\x19&\x02\xca`\xa2\xc8\xf8&\xcc\xc5f\x82\xe4Zb\xee\x81\x84Q\x94\x9ebyw\xce"\xbc\x1a-r\x0e3\x9f\xa8\x8bQ\xa0R\x99\xef\xa2\xc5\x10B\xfel\xaaC\x81\xaf\x1b=}(\x04<C)\xa6\xb6:\xb0\xd9m\xecN,\x10\x1f\xf9\x9fvpY|\xec\xbd)\xcd\xd8)\xf2l\x89\x02\x8b\x91\xc1L\xd8R8Y\xe1+i\x98r\xc6\xbd\x93\xf2\xc1\xa9\xa2\xaa\xf6\xab\x85\x16,zB\xda>\xf6\xdb\x8f,\x90\xf1*\xe8z\xf9\x92$\xfbm\xf7\x80\x1dEP7\xeb\xd9\xe5\xd2>\x98X\xb6\xadgB\x8ah8rl`4$\xc2\x83D\xc2\xf8"\x9f\xdf\x9d5\xef]\x85\x1f\xcbf`b\xc2\xa9\x02\xe4k\x1f\xda\xf1L(\x0c\x1d\x95`T\xcd\xb8\xc3\xbb\x17\x15\xa4\x12\xa5\xf2\xfc\x88\x1c\x88\xe8BA\xe7T\xc7mp\x94Y3&HS\xa3\x19Z\x0bu\xf7\x0eQ\x9f?}z\x1f\xbb|\xc1\x8e6\xe7\x03S\x1f;S\x95\x88u\xd1\xfcPw\xb02\xec\x9d<\x86\xcc\x9c\xb6\xf3?PkI\x1e<\xb6\xad\xa6\ru\xca\x02j{h\'\x12\xf2y\xd2\xfdD\xb9Hi\xf6\xb7F\xa1\xd6\x95\x8f\xd1[\x97\xc1\x01B\xc1\x93\xb4\x18\xca\x9e\':\xa5\xa9\xfd\x06A\xbf\xb4\x19O%\xec\xba\x0bo\x8c\x8f\xb1\n!\xea(\xa8\x89\x84d%<U\x8f\x11\x0c\xa3{/\xfc\x16\xc97k\xf3\'\xfd)\xf7\xeeH\xd6?\x01x\xd2_\x98y\xf0^\x19x\xd7w\xf7\x11KP,\x95\xd5\xd5\xbf\x16P%Hj\xa2\xe8\xc3\xbd\xb8\x8emz\xf1\xf4\xed\x81\xd2[\x92\x08\xeb\x87H\xf8\xd0\xe5\xe1\x01\xeeXe\x92\x86l\x9f\x04\xebq\xcb\x0c\xbe\xab:fD\xf9p\x0fuc\xf4\x8d\xb6\xd9\x83e3+\xac\xa0 \xfcf\x0eS.\x83\xe4\x96\xa7?\xe1\x94\xbe\xc8\xc7\xa3Q\xac\xce;[%\x1b\x02\x8a\x97\x01LR\xc8\xe3\x99\xa0\xea5\xb3t\xad\xfb} ,\x82\x80\x0fnoq\xc1\x8e2\xdb\xb4\x00\x1b\x9c\x1b\x97\xa39\xbb\x17\xbe\xfd\xb00\x05]\xc4\xea\x14)\x06A\xc5B\xc9\x03\xd6\xc1o\x7f\xd2\x99\xa2\x0b\xbbjC\xa2\xed\x0bL\xbb\xdb\xa9$\xc5\xb97\xfd\x90h.\xda\xde&\xc9M\x10S\xd9d\x9b+\x84\x87Q\x19\xfd5t\xa5\xaf\xde\xcb\x98\n\x8dK\xc8\xad\xaa\xaf^h\x8cwK)\xb2Df\xce[fC\xfc\x0c\x8f\x83\xe1\xa2\x03\xc6d\xc8\x0bZy\x12\xe8\x9dR\xec\xbb\xff<\xd3\x88\x99hg\xa7F@\xdf\x03!\xab\xa0\x86\xd6\xa3\x9dY\x03>\x0fd\x84\xff\x8f\xa9\xeb`\xabb[\x96\x7f\x050\xa1(\xce\x9a\xbc\xcc\x04\x91c@\x05T\x94!LZ&@D@\x11\xe1\xb7_\xaa\xbafs\xdf\xf7\x9d\xfb\xceQ\xd8{f\x85\x0e\xd5\xd5\xd5v\xdb.>{\x12?\xf9Aw]\x90\x971\xdb&.?\xd4aZ\xd50\x9c\xa8F\xca\xcd\x11\x0cl.(-\xc8`\xa5\x94T\x8e}\xe5\xf5\xe8\xf3\n\xd1G1d:9\xe8\xae>\xad4\xec\x1c\xeb>Y\x89\xfa\xf9G9u!\x9c\x0b\xd9Ws\\\xddhl#"%\x1a\xcc\xcc\xdc\x8fs\x05dH\xa4\xfa\x8f\xdf\xcc\x00R\xe8\xa6\x7f\xf0|\xd2V\x9f e,(\x85\xf4`U!\xe9\x96#\x9d1\xea\x9db\xb4\x1c\xf9\xd7\xaa\x07u\x85\x9f\xbdu\xca\x08\xe0\xca\xe4\xa7\xaf\x16\x89\x05\x08\x87plTa\xdf>\xd4l9*%\x9e\xb3\xac\xda\xc5\xca\xc8;\xe1&|\xcc\xdd3T\xd1\n\xb0\xd4\xc8\x7fC\xe0\x1d\x85\xe7\xf7\xcd\x14y\x02P,\x15C>\xb9m>\xc8\x9a_\x04\x92\x07\xd7C\xa5\xd9\xa9r\xf2\xe4\xa3\\\xc3.]\xe7\xf8S\x1c\xd5\xb0\xf4\xca\xfe\xb6\x15\xc4\xed\x11\xc3\x10\xb1T\x1d\x90I/\xe1\xa3\xdf\xec\xec\xc9\xae0\xc7\x08\x93_*\x8d\xa0^6\x03\x1d\xc2\x9b\xd3\xb9#\n \xdc\'\x0fm|\xb2\xd2\x04\x16[ \xf2\xddr\x19B\xf1D\xd9~Q\xdb\xd9&_\xa1\x9b]9U\x84\xe5\xecH\xb3m\xa9\x90\xeb)\xc1\x84\xe4,K\x96\xb3\x8e,\xee F\x9f\xc8\xf8\xab\xf6\xc9\x8c%5\xbf?(\xe4t\xf9\xed\xe3\x1e\x13t\xd2\x8f\xf4\xd88\xa5\xe5\n\x06DS\xac\xa8=}\xb6}\x1c&\x10\xc6\x9a\xcc[\x7f\xbe\xb2\xbd\x89\xbd\x9f\x94iOy\xb3oT#\xd6)\xbc\x91\xa5\xea\xcb\xfc\x83C\x0b\x92\x19\xf0\x84\xaaJ\xab\x11\xe5\x9b\xec\xf7\xfe6#\xc3Is6\x8d\xdb[\xb2\xbb\xe2\x88\xdee\xff \xf0I\x9d\x82|YY\x83jh|e\xbf\x06\xaf\x8d\xc8\x96\xf7\xc4\x17\xde^Y-{x\xcd=\xa5\x88\x90\xd1\xed\x15m\xb5`a\xb4\xa2\xe2\xf4m>\x91\x9a\x99\xa7\x11\x83\xc5\xa5bN\xfcp\xb5\xd28\xe7V\xe1\'1jR\x08\x91\xf9w\xcf\xec0PA\x85\xa88\xf8y\xe1\x96]\x18\x8eT\x02_\xb8\x13`\xde\x97h\xd1\xab\x05Nx]\xa1F\xc1\xba\r\xb6B\xa1$=\x10\xa4\x87#\n\x18\x96\xaa2vc5\x01<*\x90x\xa2\xff\x92m8\xee\xb6=H\x89,\x9e\x08\\\xd7}\xb5\x8f\x0f\xa0\x17\xb1\xd2\x83O\xec\xce\x11\x02\x13\x11\xc6\xdb\xfbG\xc3\xb4\xe8!\xec\x0c:\'\xa4\xdb0\xce8xR\x8d\xda$\x1bT\x9c]\xf6\xf8/\xcc@}~\xf6\xe6\xcb\x14\xee\x9d\xd3<GR\x85#y\xa9H\xb9fj\xc7\xdbC+\xb3f\xebZSI\xb4\xf3\x94\xde\xa0\x9c\xfdoG\xdb\xd9^Az\x10=\xad4\xcdu\xc3\xae\x023\xd5\xfa%\x10Mxf\xce3\xca\x11\x177$\xad\x97\x98,\xd9\xe4\xab\xff,\xafs\xec\xf2\xf5\x95\xe4]n\xdb\xa3\xb30\x1a\xdbr\x93\xf3\x8d\x0f\x01\xd3=\x8a8Nmg|\t&h\xac\x1a\x89\x0e\xb49z&rbB\x9b\xff\xcc<w\xe5\tl\xfb8\x13|\x12\xeb\x9f\x98\xb1\xf0\xc9;\xc4\x0c(u3\x95\x00\xcb\xb8\x8f\xaf\xed\xdb\xbeF\xe2\xb7\x0eO\xd5\xd6W\x8eq\xdd.\x02\x8d\xdfv\x8f\xa8A\x821HTD\x03\x0e6\xf0\xa2\xdd\x10\xc2%\'\xb3\xab\xcc\xa0W~\xed\xebt\xa3I\x8e\t\xb0\x1fOg\tw\x12\xf6\x01\xd8\x07a7\xd75{w\xe7-T\xa5\x88\x1c\x1b\xd5#{&\xda\x9d\xda>\x9b}L\xed\x87\xcf:\x85\xa4\xfb\xe9\xaa\xb1\x13\x1d:\x0f\xe5@pbR\x02|\xb8~l\x9f\xd0\xe4\xef\x1f\xdb\x867\x0e\xdc\xe1^\xd8I\xc9\xc6\xc8\xf0R\xef\x0e\xe7\x87A\x86\x0c\x1f\x1d\xf8\xf8$\xe5\xb3d\x13)C\xc7\xe7\x93\x90\x9bW#ILFz\x0cL\xf7\xb4\xa7\x80\x00\x90a\xd2N$\x1c\xc8\t\x98%\xad\x97\xe0\x9d\x1d\xecS\xb8\x02u\xba\xe2\xd7k{`\xce\xbf\x83\x8a\x1b\x8b\xfa\xec\xb0`\x12zU\x01Qi\x06\xd5(\xa2\xab\x16\x87y8\xf4~\x98]i\xe2,\xbfq\x86\x17\xcdN2\x1dMP1(v\x14\xacx\x8680\x86\x01\xd5>*\xac\x87W\xbf\xcc\xd4\xb1w"\x9aBSD\x07\x90\xbd{\xff\xc4\xbe\x95\n\xac\xd1\xd0\x98\x10=X\xb4\xb8\xdd\rH\x19\x18\x93apW\xd85\xd6\x8a\xa9\x99M\x81\xf5\xfa\x9a\xa3y\xbe\x01fd\xbf\xe4-D\x8d\xdc\xbd\x9fO\xd7\xccc\xd5\xf1\x14\x8b\x9cA\xc96\xc7\xedB\x90\xbd\x8e\xff\x01\x93K\x16\x85\x00#\xe6K5g\xb2\xe4\x18\x15\x07\xa5\xfd\x81\x8bR\x0f\xc6\xae1\xe3Bm\x0c\x92ZI\x1eF2\x03i[\xeeZkQ\x08\x01\x18\x14\xf1Y\xdcf0\xd3\xde\xb5\x94\x81\xc1)\xea\xe8\xae\x9ex\x8b\x87\t\xabKv\xf8Ie\xecV\xf0 a\xb5\xd2`5\x01FT\xf0h\x9f\xdd[\xaaD\x83D\xf9\x05\xa5\x03\x92B\x99\xbd\x89=\xc3\x93=\xfcY\xbf\xf1Z\x80A\xa18\tE\xe3\xd2;\xb0^\xd0\x00\x00\x9b|\xe1\xdf\xa0\xd8\x18\xa0\x9fMl2\x9d\xba\n%"\xf0\r9\x1f\xce\xdb\xd2\xb0js\x11\x12\x8f\xefX\x1e\x17:w\xa4\xe88\xfe\xf4\x0b\'\xc2\xeb\xdc%\xaa\xb9\x84\xb5%\xf6Y\xb2\xd8\x1c\xdc\x8f\x95\xb1u\xc4\x01\x05\xeb\x95\xf7\x07\xbc\xf2\xfa7\xe5\xee\x8d\xbe\x0b\x15\x02\xb2\x84T_j\x88\x7f#\xcc-\xc5\x95ah\xdd\x1b\x88\xcf\xf5)>?\x19b\x81\n\x13\x0f\xa9\xf7W\xfeD"\xda6?\x90\x05\xb3\xee\xd7Z\xa5\x81\xb5\n\xb0\xf0\x90?\xf13\x83\xc5\x19\x94{\x1b\xc6\xb8PL\xc1\xd9\x0fu\x02\xba\xd8\x82\xd8v\xf2\x9c\x9dj6<\xa3\xb8<\x80\xff\xd4\xdf}\xf0\xc1\xbe\xc7\x84\xcd\x1f_\x95\r\xf47\xbfX\x8cLUV\x18\xed\x8b_<<\x86\xb9\xbe\xf2\xe8\xc9\xd4]\x81C\xcd\x03\xdc\x121\x8aX\x0fh\x9eK\x85\x80\x8d\xe2\xc5\xf1\xfc\x13\xf3\t6\xd8R\x90p\x94\xfdZ\xc0}\xc0d\xaa(\xda\xc7\x99c\x9bER/pr^\xad\xd7j\x04\xaf\xbbf^\xd7:R\x81\x8b\xedZ\x8aq\x99\x98\xe7?\x8f\x04f\xc5\xd5h\xe4a\x1b-\xff\xb3t\x9d\xf1A\xf1\xd5\xeaU>=;\xc6\x19\ng\xfb\xc8\x08\xdc\xc5\x11\xbeQ]N\xa3q[kO\xe1\xad(\x8f8d_V7<\x94\x19j\xaaK9l\x82\xe0\xe0\xa7\x97\x8b\xc2\xc8\xfd\x00\xa3\xf6\xbb\x84\x08a\x01\xda\xb1\xe7\xd5H\xcc**w&\x1bP\xfe\xd9\xf9\x90\xe2\xdb\xd9\n\x9a\xc0l\x10y\x83A\x0b\x7f\xec\xdb\\1fW\xd6\xc5\xf3\xba\xee<\x90\xeb|\xeaDV_\x0b\xc1\xaa\x04\xf4K\\JD\xcc\t\xf7@LB\x84\xdd\xcb\x80\x97v\xfa\xe0\x19\x06\xf5\xefa\x8eX\x1f~1\xdb\xaf\xcc\xf82#m\x9f\x8bU\xd4)\xc8\xb6z\x8b]\xe2\xa0t\x8e\xc2n\xd9\x80\xb2A,\x81\x89\x1e\x03O\xeaw\x03\xc6$\xd3U\xdc|\x0f\r\xf1N5F\x967\x99\t\xf6\xb7\x8ej\xfbd\xdfX\xe4\xd2\x08\x96d\x0bv\xaa(\xa6\xe4\xef\x01\xfa\xef\x1f\x0eQ\x9cEJ\x04?\x8a\x17%\xfbA\xd2K\x8bF\x06\xca\x88\xd0s\xf7\x1f\x0bK\x1e\x87\xa1\x06\x17\x15"\xfd\x1c\x16\xd4\x90\x08#@\xd5+^\xe5\xf8\x83Bo\n\xfa:\xc9\xc0\xed\xc1\xbd\x99\xe5M|\x04\xe0w\x9f\x7f~oo\xd44\xa0\xde\x83\xc9A\x05\xc7DU\x17\x07L\xad\x9d|nN\x8e\xa8\xd0\xd0\x1c\x90)b\x87\x16\xc2PB \x07\xa24c\xde\xb9\x17\x16\x19p\xba\x02\xfe\t\xeb\xbf\x8f\xedgB\xb9b\xa1\x84\x8f\xef\xad\xbd\xc3u\xed>=\xc5rm"zu\x0bo\xa5n\x14\x0c\xc1\x95\xfc\x0e\xedR\xf6\xca\xfe%*\x8eo\xff\xb3\x03\xc5\x95&U\x84\xff\xb1q\xeb\xaa=j[\xec=#\xd5\x84\x05\xcc\x1c\x83\xf5\xca\xf4\xa7\x02\x1e\xb68\xb6k8\x19K\x0f6\x15\xd0!\xbb\x08y\x1f=}@\xe3\xb9\xa7\x12j\xbc?\xa7(\xb7\xa9\xd4\xb38!\xab\xd54\x88\xe2\x9a_\xc8\xf9\xd2\x9b/\xec\x84\x1a\xb5`\xc2\x8e)\xf7\x85c\xf7\xca\xf4Ztp\xdf\xb2U\xabH`\xfb\x0f\xc9\xa3\xc7\xe9GhD\x18\x02\xf6\xa3\xd1\xc5\xeds\x15s{\x95T\xc9\xfbi,\xaf\xe8u\xb2kLR\xf5\xcdo\xbc\x1b\xf0Cr\xca\x93\xe9\xcf/\xfe\xb0\xa2UT\x87\xbd\xfd"\x97Iw\x8a\x9bT\x9b%f\x8b\x00I\xe1B\xf7\xd8\xa4\xa0Wa\x87--\xc5\x1e\xed\xf8\x8dc\x81-5.n\xb8\xab\x9b\x96\xcf\x1d]7\xc3O\xe7\x81\xe3\x80V\x1a_\xfc\x9dQ4\xed\xa6Y\xdb<Q\x1eM\xce\xcd\xd1{<\xf3\xb6\xd0\xbe\xf6\x8e\x19\xb0\x08\xb9\x8a\x1fr1\x9699 \xa6\xf3O~"0\x0c\x10\xfa\xe7\xd8\x0fe\xfa8\xe9\x1a\xe7l=u\xd8\xb7\xf0Wl\x86\x86\x9cpo\x17\xb3+\xde\xa23\x00\x88H\x9d=~j\xc6\x88U\xa2L\x06\x86\x08\xfd\x90OS\xc5\xb2\xb5\xa52]\xcb\xb6\xbf\xff\xb9\x12o\x9biR\x03G\xeed\x17j(\x0b\xd66U\x8f}\x8fp\x06\xee\xefss\xc76\xa0n3\xb2\xa7\xa58\xee\xc0\xc6\xb3\xc8\xeb\xc2\xfe\xa5\x8a\xd2\xd1\xea\xd6\xaa\x98Cn\xac\xbf\x8acV\x9c<aH\x86\xba\x84\x07\x04\x18\r\xfc\x13?\xaf\x04/S\x15\x04\xd9l\x13\x1f>Q\xca\x8d\xe8\xaf\xae\xb5\xdd\xa5:\xb0\x9cB\xc1\xbe;X7\xd7\x0c\xa7S\xf6\xac\xe30\xa7\xcb\xed\x82SiM\xcdN\xd6Fb/\xc1\xce\x8c\xf4\xeb\xed\xc6nK\x0f\xd1\xdd\xb2\xdd\xfa\xae\x80\r\x16>\x88\x95\xe9k\x8c:f7\xf2Ho\x87\xa1\xc8\x89\xdc:I\x1eN\x11a\x8d\xb0!h*\x07\x13\xc2D\x9b\xab\xe8\x80\x11x\xfa\xea\xb7\xe2\x05\x18n\x90\xeb\xea(\x9a\xff\xbc\xact\xc6\xa9PT\xda\xbf\xfb\x18"IL\x92\xf3G\x16D)K\xe2*\xc2vR8\xa4\x01Q\x85\xac\xcb\xfe\xdb\x8b_\xeb1\xe5\xf0O\xb0\xb1o_\tg\xca+\xb5;\xe1w\xbe\x98\xd3\xae\x078:`DQ\xce\x89\xdb[\xb2\xff\xb5=Q\x90\x07rH\x8bz\xb7\xbdph\xfe\x9a\x94p\x0e\x0e\x0b\xca\xe6\x93jD\x05\x0e\xfe\xc7;]\xf9\xec\x15\x8d\x1a\x13;\xaf\xe0E(!M\xbf\nzCoE\xa3\xe3\xd5w\xaf\xed\x19F#\xdfp\xa3\xd9\xfb\xde\xdd\xab\x0ef?\xbf\xa8m\xa1[}\x10\t\x8d\xd8\xda\xfc\xd9\xdd}I\x9cP*\xa5\xb3\x9c\x86\xe5,\xaf\xb8\xc6tR\xbf-]\xc3\xcf\x8f\x7f\x90\x1d\xc19\xcaO\xf6\x95\xf5\x94\xf6\xfe\xec_\xc7f\xa2\xfc\xd4+\xa3ewpViJ*\xb4\xcf\xb1\x12D\x0e\xb2G\xc8\xec-XE\xd1#_\x18\xc7\x1a\xedW\xe2\xa8\xe1\xa9@\x13e\xa9\xbf\xb7\xe3\x19\xa5\xab\xd1\xaf\x85_\xf6"\x83\xc1l\x15q\x12\xf3t\x1f\xed\xa0\x19\x99\x1e\x0fp\x1d_\xb0\xaa4\xaaD\xc7M\xd9/\xfc\xa6I\x84xh\xafJ\xea@\xf7c\x08\x8d\x9eJ/\x00\xc6\xb9\xf3\x13A\xf1\xaa\xa13\xcep\xcf\xc6\xef\x0b?cu\x06]4u\x9b@\xe0\x8f40\xd1~h\x90\x81z\xbah\xe3\xe3\xdfj\xef\xfd\xd3\x05\xdb\xf9Fw\x848/<\xb8\xdf\x10\xfa\xdbM\xcc\xff\xd1\x89\x1b\x98\x05\xf8\xd9\x0c\x03^\xfc\xd8\xbcEr]\xf9ye\xc56\x8f\xdd\x1d\xb5\xd69\xad\xa4\xad\x8b\x15}eV\xc8eS\x8f\xce\xfe}\xbd\xbeUi\xa2\xfa\xac\xf2\xae\xccN\x0c\xf90\x8e\xbc\r_\x8dd\x9f\x9a\xb6\x1a\xc9\x87\xd6\xc9\x03+\xce\xb1\xff\x8e\xc8\x00\x9a\xfe0\xd1\xd8\xf2`\x86\x850\xf9\x8e\xbe\x1a-\xbc\xd6S\xdc\t\xc7\xab\xcd\xb5\xc02\x90`\xd4\x8d\xc9\xa5\xe6\xe3BZ(8\x16\xee(\x8bd\x01\xf1\n-\xe9\x01\x8e\xd2\xf7u$\xd9\xee\xbf\x7f\xcaK\x12\xfbb&N\x90\xe1-y\x05\xf0\xe6\xad\x1a\xac\xbbv\xfcg\xf5g\x18\x0e9\xf4P\xb3\x16\x94\xaf}V\xfc\xe6?\xac\x0c\x1a\x9d\xb4\xd7\xe5Z\x9e\xda\x0fG\x83\xe3\x19\xe8o0\x15<\x01\xed\x86*\x03\xe4.\xf7gK\xf6j\xbe\x84M\xed\xec+)\xfeC\xdb\xca\x82\xf6\x8a\xae`\xce8\xf8]\xbf\xf5\x06\x17ys\xbb\xdc=\xffN\xec\x10w0\x9a~P]\xea\xc2\xe0\xe4\xb1\xbf43\xd3\xc26\x02>&\x14\x91\xc3\x93i\xdd>\xe5t\xd6\r\x06\x06rv\xde\xd9\x1a\xb3\xaf$\xe8\xe5\x92iA\xc5*R7\xa8\x01\xb1:U\xa8\x0c=\x00\x8d\xa6\x86\xb1!\xe3\x06\x9b\xc5&Lw\x7f\xff\xcbG\xfc\xcd\x9cm\x15\x0fa\x98\xc14Z\xc0l6\xd9\x88\x11\x1d&\xb18Ah\x8c\x16Z\x0bg\x89\xd7%\xc0\x11\x08\xae%\xf6\xe9,\xe8\xb0l\x00SA{+\x1a\n\x83\xf1\xde\xac#\x03\xb4\x0eu4\\pJ\x90\xd7\xd7\x9f\x98c*\x9bH,I>\x17j\xb2\xbd<D\x97?yY\t\\\xbdO\xdf\x8c\x12\x12$49\xcc-\x1a\xd7\'\x17\x0bX\x1a\xb8FF`a-\x18\xb6\xd1\x98Ys\x1d\xea\xbc\xf9\xfa0b&1\x84\x8ae\xbb\x1a\n^\xbcI\xe9\xbfg\x7f\x947e\x96\r\x11\xab\x8a\xedD\x98\'\x03!<\xc6\xd4\x19\x16\x7f\xcb\x1c\x8d\xccl\x1c\x07A\xa8!H\xda+wN\xae\xfa\'\xf6Z\xa4!aN\x1b\x0f\x1c8\xacD\xcd\x1a\xf3\xfeTI\x08\xed\xac\x18VLS\xe8\xe9\xb3\x9d\xf5M\xfay\x82\xd1\x7f\x10,\x902\xd8\x92\xd7\x17\xed+pI\xedF\x05\xc2\xd58p\x18\xbe@\x8aD\xc1\xd1]\xdd\xfb\xb5\xeapkyR\xa0j\xda\x89\x91\xcc\x91\x02-\xc1\x8e\x03\xfe\xc2\x81\xb2e\xf8\x16w\xed\x03~\xfe\x84A4\xc86\xe9s\xdd\x906}\x8b\x05\x8b\x00\xc3\xa1T\xda*/\xe2\x89\x81\xeb\x1e\x84\xfa]2\x83\x03q]\xe1@;57f\xdb\x13 f\xc7"X\xffh^|o\xef\xd6\xe7\x14\xcd\x90\xc0\xfb\xaa\x12\xb7\xa4\x92R\xcf%V\x16\x88A\xc5\xc4cm\x9b\xea\xc1\x81`\xad)\xb5\x95>x`\x97"\xb0\xa1\xa1a\xed\x15\xe6h\xd8z\x16\xc9J\xb3\xd2\x0cG0\xca\x97$-D:lZ!\xad\xea\x9b\xfd8\xcbSaO\x19\ns\xcb}]\x90\xd6\xb6\x81\x83\x071.\x89]\xa7\xd4I"\x8b\x01G\x94T\x82\xd2B\x01\'\xdb0\x08\x83t\xf2\xe6\x8e>%\xb5\xdbNm\x9c\xee\xbf\x19Xn_\xce\x80\xbf\xe9\xf2\xd5#@X]\x84\x1c,$\xfe\x9e}n4pd\x08a\x049\xb7\xd6\x92\x18\xde\xbc\xaeR\x95\x13\xb9\xeaq%e\xbd}3\x86\xa1\xbf\x84G<\xd2\x1b\x9cH~f\xc9r%\x04q\xad\xf7s\xfa\xa1<\xb6*\x1d<\xc2\xf2\x7f%\x118\x140\x0bT\xbfr\xd2+^O\x0cV\xd0\xbe\x86r%\xc8\xdc\xfb<\xdd\xaf\x0eNg\xb1p7\xb1\xa4\xc4t \x9aA\x155g\x1e\xa4\x1fj\xd3\xee\xcb\xf4N%\xaa\xa2\x9d\x86\x12\x1c\xea\xc6=[\x07k\x01\xfa\xbd5\x87\xc1\xfb\xe5\xaf\x0b\xb2\x98\xd4\xb9d\x96\x16\xd3\xf9\xc0\xe2\xee\xda\xb1\xf4\xe5\xdd;\x16\xa5\x91\x9a\x8fW\xcd\x9c]{\x96\xb7\xf2\x1d\xbb\xc3\x11\xf4WK_\x7f\xad-\xaf\xf5\n\xafHH\xc3\x145\xfe\xc7\xe0g\x0b;\x89\x8dJ\xde\xad\xd0\xa3\x16\xdfH\x95\x15\xf1\xd4X\xd2km\xed\x9b\xfc! \xd8@\xde{{f[V\xb3\xbc\xd5\xb4vb+\x1ba\xdaBk\x87\xae\x9f\x11\x11*\x02@C\xcbH\xf4U\xf6?x\xb1\xc4\x03\x06\x05\x11\xc9\x14}\x82U\x9d\xa2\x12\x85\xae\xba\x14\x10J\xecTX\x11miZ\x1eG\x15j7\x84\x86x\x08wVI\xb5\xe4\xcem\x8b\x1f\xe9\x832\xd0\x83\xc3\xd9\x871\x83K\xd8\xa1\x02(\xc6\x1b\xe4r \xb2*\x99q\x90Ru\x8a\xfa\xe8U\x8aO\xef \xf8F\xa9\'\xdaAF\x1em\xa3\xe4\xb1\x97;\xeb\xb3\xc1\xf7\xea\x99\x18[/\xb1\x94t\x84\x95\xdb\xb4\xfd!|\x1al[\xda\xac\xb8\xbbS\x8d\x18\xf0\x83\x89\xea\xd05N\xfa\t\xf2Z\x97\x1f\xac\xbf\xb4m\xeb\xc5~\xaa\xdb_\x0b\xa0L4\xf0L\xf1\xdf\xb5\xf5pS\xb6\x12&\xde_\xb5\'\xe8\x13\xd4~\xb3O\x8a\xef3\x1d\x1e\x18\x01\xa4\xc9\xac\n\xe7\xb6\x9bDY9\xbd\xb5\xdd\xe2\x91,x\xc5\x9e\xc9v\xe0\x83\xd8\xc0\xdb\xef\x9e>\xfeW\x8d\x04t|k\xf7\x80\xb3N \x86@^\x15.C\xb2\x0e\xaa\x18X\xa5\x17\x1f{O\x10Z\xb3hW\x9c\'.\x98\x19\xe8#ANT_\x08\xb6\'\x94r\x8c\xa7s\x05\xd3\xdd\xcc\x84\x9d\x0c\x12\tqd\xdb\xff\x84*$\x83\xd5\xa4\x8a_n\xcb\xdb\x17O\x96\x15\xcc2*}t\t\xe3\xb5\xf5<\xacc\xbf\xf1K@\xaa\xbb\x86\xceF\x87a|,\x13\xc4\x95&\xe1\x95u&\xbb\xd3\xa9\x06\xc9$O\x043r\xbb0\xa6\x96\xc0>\xdb1x1\xd1$F\xea7\x1a$\\-8\x90\xd0\x1e\xec\x10\n\xc7\xa4\x9b\xe0p!\xcc`\xb3`-\x94\xe1"x\xb9q\xdb\x8e\xa7o\xd3\xbb\x0c\x18;[\x83\xda\x86?\xa4*\xcc\xa0\x11\xdc\xa4r\xcc\xd2t\xdd!\xb2\xb3\xfe\xf4*\x94G\x05\xb3\x1a\xf72\x7f\x08\xe5\xb1\x9cU\x8f3K\xef\x88\xc9$\xf6\xc2\x1c\xc2\xd0\xc0\xaaE\xf1\xc7\xcb\x93[\x8a\xd7A\xbcB|\xc4(\xfd\xb0\xf5OI":\xd6)U\x85\xfeu\x9c\xa4\x12i\x05{\x06J\xa5\x17\x01N\xb9\x8b\x14\xac\xb2\xa8+\x9c\x9bg\x1c\xbc\xfbR\xe0\x9b\r\xb5\x98Q\x05\x81\xf4cp\xc0(\xcf\x8e8\x12\xfc\xaf\x96#_\xeb\xe7\x99\xa6?\xb5\x14\x8bl\xa2Y\x0bG\xca\xcc\x0b\xb7\xe04M2Z\x11\xfe I\xb6f\x8b\xb9J\xc4`e\x17\x04^\xa0^\x17\x92\x16\x84\xf3\xfa|O&\x80\x95\xbf\xad\xc9\xff&\xed\x9bLw\t\xc7U\xd7\x9bj\'\x88\xa8\xe8{#u\xbb\xb0w(\xb3C\xecdc\x87!\xa4\x94\xb6uP\xc7\x01\xfc\xbd\x7f\xe9=\xa2\xe4\xe5z5j:\xf5b\x08\x93\x9d\x1a\xc9<\xd2h(\x04 \x97\xa9\xe8>O\xdf6\x1b\x1a\xc5\xde>\xcb%b\x8bs\xb0\x01*\xc3\x1ct\xcdG\x03\x9b\xb9\xe6\xccia\x86|B\x83A@\xf9\xea\x04J\x96\x82\xb1"\x9a\xaeR.\x19\x17\xf0"0?P\xaeW\xde\xc1I\xca\xfeSZT,O\xc0FL\x9b\xdf\x19\xc0\xbf\x10\x9d)\xb1\t\xba!\xb0\xc9\xed\x81\x9d\x8e\x088\xa1\xc7\xef\x90\x8cQ\x12\xed\xbaqRI\xa8\xab\x1a\xcdCg\x87pi\xb0\xc2\x9e\xf9\xc1\xda\xa9\x02K\xa1\xbd\x16A\x9b{\xad\xe0\x1a\xee\x01|\xc7\x08l\x0f\xc2\xcd\xecN\xf6\x02\xa8S;\xec\x84h\xcb\xab\xdfl+\xeb\xffC\xea\x8c\xa0\x84\\\x96\x05\xeaR\xb93\xfe\x96\xa2r\x9d\xecI_\xed\xd5\x00\xa2\xc9\x9e!_\xfe\xa5\xa1{\x84\xc6\xb8\xb0\xd1\x0b\x8b\'\xfc\x80\xcfu\xf7\xff\xe9\x12\xb1\xce\xd0\xa8\xae?t\xdcq}\xea\xef\x85b%\x98U\xa8?G\x83}\xcd\xf0\xa8\xe5\xd6\xfbJ\n\xa3\xf6*\x03}\xd3\xab\x0cJ\x03\x97\xeb\x15Y\x9d\xba\x851\x03\xc8\x93\xa8\xd9\x97\xfd\xe06\x8aT\xdaQ\xb7\x9f\x15f\x8fF"\x0f\xd5\xf8\xb6\xdb1\x0fN\xd7W\xff\xb5\xf5\x0b\xc2\x0e\xdav\xf7=\x81\xa5y\x8b=\xc8\x86\xd7\xd1w\xc5\xcb\xd9\x8fK\xb6qM\xf1\xfb\xca\xfdo<\x90{\xa4;\x93\x99s\x13_\xf7Bykf\xf1\xac\xd5s\x80\xeaC\x9e)@i\x87J\xb7\xf8k`\xfc\xdd\xa0\xa5\xceT\xae#?\xea\xd9\xb1\x05i\xbd8\x94\x04A\x95\xbf\x91\x10FD\xb9\x98}\x88{\x06\xcdN\xe6Oyu\xf0\x07\x8c9Nh\x1bnt\xf2\xf8\xe3\x16:A\xda!\xe4\xc02aD\x13\x07\xc2AZ\xaan\xe9DgY\xdc\xf9S\xa9]\xbe\xb2\xf9\x08\\\xf0N\x99\x01y\xb4\xa7S\xa5\x05M=BX\x17\xad\xbd\x937ct\xfbjEI(\xcdL\xac\xafi\xb6\x94z\xe4o6O~\x99E\xa4\x04H\xf4\xc6\x80\x8cH\xb5U+*\xdaE(\x05|t\xee\xeem4)\x94\xcf\x19}O\xda\xc6\x907\xd2\xa8JL\xfb\xe0,\x87\xb5\xa1\xc5\x95\x9a1,\x00\xa8\x03\x1a\xcb\x9dP}\'\x00\xd7\xd5\'?\x94\x86\x92\xf3\xf8\xb1\x92\xae \xfe\x92\xd4\xb3!\xed\xe6\xda\x95\xd5h\x04\x08\t}8\x92\x10\xac\xa8\xd3\xcc\xbc/\x8f\xb9\x9c@\x8d\xe6v*\xaerB!\x12A*\xea\xf5go\x10\xcc\x16s}%m\xfbkfmM|N\x04Bf\x1d\x04A\xff{\xf4\xce\xd2\x82\x90\xde\xb1o)\xf1\x1b&J}\xaa8\xba[\xf9\xfcB\x19\x99%\xf0\xdd\x1a,\x00\x1agJ\x82\xf5r\xb1\x1d\xc9\x80(x\xb4V\xa6:\x10\xbaW\x0cK\x858\x91\x00v\xcb1\x9b\xe9\x00\x04q\xe0\xe3u\x9a1..{\xbcP\xba(\x05sp\x8eX\xbe\xf5\xd4\x0e\x19\x01\x10o\xf6\xd6%\xb7\xb5$\xdd\xd9\xb5\x97\x8f\xb1\xd8\xa7\xe3va\xc8\x89dFZ\xdd\xb8\xf2\xd9~j\xe0(\xd6\xee\xe5OK\x86\x8d\xf7\x1d_\xc3\x9cy\xef\xce\x1e\xdama\xe2#?\xc8\xd0%<B\x18\x08\x9a;\x9f\xa7\x18\xbeT\xd9$?\x13\xec,\x023\x1d\xcb\xe4\xc4\xb6\x80@{LI\x8e\xdc\xf1\x1fs\xf0$\xa7%\x8b\xcf\xec\x04\xd7XM\x96\xfb8\x1a\x82\xc8WG]\x84\xfe+\xcc|\xf8\xb2\x8c\xed\xc3\xfa\xa17\x92\xc4\xe2\xb8\xdeOt,)\xadO\x82\x0bN\x94\xa9\xed\x9bu@\x80\xd8u\xa7v\xc5I\xa2\xcc\xf4.\x89]\xb8Z\x95\x1doL\xa5\xc3/B\x13\xb2J\xf3\xd2\x10\xaa\x94bKX(\xca\xd2\xc6\xcc\x83\xf3w\x02\t\xb3\xa7_^)X\xc8\xaf\\1W\xcb\'\xc4<Irs\x0bCE\x9a\xee|\x8f\t\xec-%\xa8p\\\xa8\xfc\xf2N\xe5f\xb4\xc8YCgk\xa30\x83\x13\xa4\x9cx\x88,\xb3\xb9c$\xb6\xcd\x1d|\x04a\xe4B\xc0;})\x90\xb2\xfe\x1e\xc3\xe7\x97\xe6Z\xea\x1a\xc5\x06\xc8"\x91\xb2\xdc\xda\x0epRM\xae\xebD[\xff\xef\xafe\x0f\x83\xb23\xc1\xde\xbe\x1au\xafDh_h\x1cg\xf3\x11!\xdb\x84YfB\xc5\xe2\xe1\xe9\x96<n.\x0c\xa3\xb1\xfd7\xe3~\xf7\x81\x923\xb6~\x06%>Q\xff\xf3\xb6\x9c\'\x9e\xa9\x06^\x17w\xe8c\xc1\x8c\xdb\xb6X"\t\xac\xb0\x0fu\x1d\'~\xb0\x1e\xfd\x8c\xbe\x0c\xff\x13o\x1cYjK,\x86!\xd6m.*\xb5Zz\x93JZ\xb1x\xb2\x14"\xdc\xb9\x9f/\xe7?\xdaz\x12ql\xffT#I\x89:\x1a\x10f\xc2\x0b\xe5~t\\\xfexi\x06?\x8a\xa7\xd7\xfeT\xa3\xb1\x0c\xadJ\x10\x8d\xf27N\xb6CI\x81\xfd\x89\x9d\x0e5.I\xfd\xde\xecq\xa9N\t*C\x0c\x99J\xc6\xe2Q\xb3\xbb*I\xd9\x81\xdf\xe7\xd1a\xd56\xfffu>"\xb9\x8e\xe2\xf9/3Yl\xa3f\xbb\x1e\xcep2\xf1~\xe7\x9a\x9dJj\x99$\xec?\xbe\xff\xb3\xd2\xec\xbc%\xb3C\xba\x9e\x96p\x957\x8b\xc3\xbb\xb60]y\x7f\xeb\xf9O3\xbd\xfc9]\x04N\xeec\xfe\xcc\x18\x1e\x83\xf9\xea\x08\xf3B\xa3\xff\xd6\x16~\xfe\xed\xd0BP\xd4\xb7\x8f\xaf!\xb6.H\xb7X\xb7\x15p!z\xba\xb8\xf8@\x97\xdb+\rb\x8d\x84\x9a\x11\x9c\x13$\xa88\x14\x10\x1b\xb4\xea$~\xbd_\xc4\xb6\x061\x8f)m\x99T\x12\xe3\x15f\x8a\xb6\xb36:\xf9\x8a&af\xc4\xc0\x8c\xf8Mp\xa5\xfd)\xe2\xa7\xe8wucN\x9e\xae\xb6u*\x8b\x1d\xb4\xabw\x824zl\r\x99\xfe\xc2\xdd\x02\xf1\x88>\xdd\xb4\x9d""\x94\x08A\x12\xca\xdc\xe6[\x18@\xe4\x05t8\xb4\xf7\x86\xb4\xd5E\x8d\xd6\x10Ol\xda\xdb\x10\x9eG\x073\xc7Tp\xe6\xb1\x9c\x98O\xef \x80\x8c|\xfb\xfack.\x979\xb1\x8eM\x9b\x9d\xdd\x97\xa3d\x94\x04\x0c\x04\xca\xdc\x83\xaeR[\xbe\xc1:[\x067\xael\x8b\xcc\xd7S\xf7\xc7\x82\xd1\xd2V\xee\x00\xeb=\x867$\xab\x8b\xb6a\xa01\xb3\xc1I\x11w\x8d\x84\xac\xec\xbe=\xb3}\xaa\x1b\x9b\x89\x85\xed\xbdN\xdaq3\xff\x10\xf6\xb65\xe2\x97!\x16\x18E\xd1\rI6\xd6a\x98\xae\xcdZ@\xadd\xc1++\x8c\xd1\x1f\x10?\\D\xe1\x0c:\x00lgW\x8eC\xc8\xbd\xb5cV\xf7\xa0\xa9\xc5_\xd8\xf9\x12A\xdb\xaa\xa7bI\x9b\xee\x97\x93\x82\x87\xc8\xf7\xf9i\x8bF\xff\xeam7J\xb8Q\x12@";4\xad#k:\xa2\xf4D\xf2\xef\x93\\\x81\xb7\x10\xaaQ\x14I1uJ)\xfam\x06\xb5\x93\xc2\xe9YQ\xc5 \xc3\x92\xad\x00cr\xf9\x08\xc6\x92\xaf\xdf\xd6\x91\xbf\xbb\x9b\x86\xabX\xbb\xe13E\\:\xef\\\xe6z\x9e\xa4\x8b\x9cf\xbf\x92\xd7M\tAt7\xc7v\xed\x91\x9824J/\xd39X\xa0l\xee\x1f"\xf6\x084\xb0\xa6\xff6#\x0b\x92\x9b1\'\xf7\xa54\x9f\x13\xca\xf6\xa1\x9dj\x96\x90\x9cY<N?\xcc\x1e\xa3w\xb7\xb9#\xe3\x17\x92\xdeV\xdbf\x16\x9e\xe0\x19\x9e%\x00\x91.\xf6h\xef\xf5u\xb3\x8b>\xe2\xa8P\xb0\xdb\x9aG\xcaL\xe1E\xd1\x8e\xea\xa2\x03\x15o\xd9I\x11\xce\xbe\rk\xfa`eV\xde\x83\xec\xd0\t[\x02\x1e"\xd2\x93\xb6\xab\x91Jj\xa9\xa0\x9f<\xb8B\xe1j\xe8\x80\xc5\xdb5?\xb5\xbb\x1e\x86b\x99. \x8fkk\xc6\x97\xdc1\x84C\xf9\xbdG\x13\xe6I\x19\x97#6\xa4\xb2+\x19X/,^\n\xf6!l\xaa\xbc\x97++\xc1\xa1K7\xc6\x8ew\xad\x8d\xe6\x8fY\x8f\xa8\xfdI\x88\x07\xd7\x94s\x17`QJ0\xdf\xd2e!x\xf5\x85#\xd8\xbbW\x89\xf5i\xde\x9b\x1e\xbc{g\xcfL\x12J\xbc\xa3\x92\x1a\x07\xf6\x01.(D\x14r\xaa)\x11W\x86Px\xc9\x96l\xd2\x9b\xfa\x9fHd\xd0\x1e\xecH\xb1n\x10\x04\xb3\x03\x05\x14\xf9\x06\x1e\xa6\xee\xae2+B\xcb-\x159\xc8\x02\x98}z\xef\x95-C-oX_l\xe9\x033l\x8c\xadK\xcbu:\xcc\xd4\xc2*\xb2\xe6\x87\x1c\x98\xe9/\xdd\xb7z\xb3\xcaxyJ\xeaaM\xbf\x83\x00\xa3KNT\xd6\xe8\xb2\xc5/:z\x10\xb5a\xd3\\)\x7f(\xe2o\x93\x8f\xdb\x9e\x06\x0e\xf0(\xe4\x00J\xf3{\x01\xd0\r\xd3\x05\xec|\xb2jg\x8ed\xde\xc8\xb6.\xa8\xffb\x18B\xe2\xcb\x7fs\x8d\xfdr\x19\x94?\x89\xfd\x10\xc2\xca8\x81\xb6BW\x01\x9b\x9b^[ QS\x08%)\xa8$h\xb7\xb7\xbb\xab:\x92\x10-=H\x85K\xc2\x06\xe4\xff\xed\xd9\xee\xb4\xd9\xcd\xf9O\xd8\xbf1\x0b\x8f(\xd8@x\xc8&\x975\xce\xca\x17\x87\x1fm\xf7}7{\x8cRQ\x9a\xdfZ\xc33o+\xd9\x8dI\x97\xcb\xec\xb2P\xf6\x80\x92T\x01\xbe\xab\xf3[\xdd\xeb\xaa\xca,\xa6a\xc2\x07\xb6\xb1g;t&\x94\xb5\xa8\x0e~T\xa3>\xc7&\x12b\xdf\x82&\xee\xb5LL\x06\xeao\n\xd9\xdaj4\xa1\xad\x0b\xd8K6K\xcc\xe8O\x19\'\x1f\xdb\xba\xb04\x99\x9b1b\xed\xd9U\x02\xcb_\x9b}\xe4\x90\xac\xdal\x1b\x91K\x8c6\xaf\xc5b\xe1<&\xb5\xbdPZC\x1egH\x0f\x03:^\x1a\x11\x13:\xd5\xd7M\xbe\xc3\x1e\x9d\'\x8d}\xe6h\x96\x0f\xdd\x84\xc8\x8b}\xa2\x16\rN\xcadU\xec\xbbNO\xff\x8eG\x1b\xdc\xdf03w\xfd\xddg\xfd\x06\x89\xab\x9b\xad=\x94Mk\xbf\xbbl\x07\x822R\xbd\xad\r\xfba\x93DQo\xfdL\x99\x8e\xc3\xb4\t\xb2#H\r;\xb3\x98\xc3\x8b\xf4\x18\xa8\x07\x84jx\x14\xdfR\xc3C\xe8\xb6O\xcd\xe7\xb5*\x1b4\x0c\xaf\x02Z\x08\xb2\xfc\xd1\xb2\x1d\xb0\xd2&\x14\x16\xff\t\x10\x0c\x96x\x06\xf4\xcd\xb8\xe4\xadbv\xa7 2;W\xd3\x17\x99\xf5\xfe>\\ \xaei\xdd\xb2\x976\xd3EWQ\x9c`(<=\xe5\xf2R\xcb\'\x98=\xf6\xb7\xed#\x08\xe3\xe9$\x0c\\)r\\\xd0/J\xb4(\xd5\x12\x85\x9f\x88\xdd\xfc\'b\xd3(\xf1cR|\x13\xff9\xc6\x11@\xdc\xd2f\xeb\x18\x9f\x8a\x11c\xec\xa8q<\xf8\xce?\xd9\xbc\xfe\xe3\xfb~5\x1a\x85H\x08\x10\x14\xa5\xc6?\xafF\x03\x07\xd8\x14\xde`\x13;\xe9j\x0bf\x8f\xe7\xed \xd6D\x0b#{\x876\xdb\xfatn\xb6\x94\xc6\xc9\xd9[\xb0\xe5\xe0\xffX\x1a\x17\x19\xff\xe3\x8f\x84oal\x92\xa8\x1a\xc9\x8a\xfa\xf8\xab\xec*\xedY\xba\xfd\xad\x1a5\x8e\x92\xc2\xd6\x1c\x1a\xfa\xc0\xa9\xeb\xfe\xee,\xb3\xf4\xba\xd2\xf8\xc5\x95\xdf\n\xf2\x14>\xb0@\xd1\t\xc8\xc3N\xb7\xaf\xc2u\x04\xa7\xd0\xebc#\x07\x11\xe8\xd5\x07\x96\x08E\xf9\xd8\xc2\xac\x990\x82\x8d\xd8\xb2z\xcf\x9e.(\xed\xa3\x94b\xbd\xc92\x0c\x82\x92\xe6\xc3\xd1\xc6\xe6\xd2\x94\xad\x16_\x14/\x9c\x1c\x82\x8fL5+\xe6\x1b\xf3\xb6\xf1\x9cM\xc9\xa8\xf3\xa3l\x15\x93\t\xf1\x8ck=9\xed\x1b\x01\x9a-\x99"\xb2\x12\xaa\xaa\xb4S\xc7s\xab\x90\xb3Lq\x11\xa0\x81\xe1\xbd\x08h\xde\x11B\xf0L\xfba\x93\xde\xc4\x95$\xe8\'\xcc\x1e\xf8\x81\xf8\x94\x00ii\xcbU\xf3;\x8c\xb2\x19\xf1\xfc\x98\xfc&\xef\xdc%\xb7\x94\n\x02\xbf+\xa10\xe6\x1bdo6Wr\x17T\r\xb4=\x12Tj\xcd/\x94\xed\xf3\x13\x04Q\xae\x17\xaf\x8c/\xc3\x87\xc7x\x8d\xfe\t\xad\xd3\xc1Y\xa5\x86ND\x85\xe8\xb4a\x06\x870 \xe5T\x1dtl7\x18z3$P\x01P\xaf\x83h\x8eM\x12B|\xf9\x821\xf4\'\x83K\x1a1\x87"p\xc9=\xd0>\xa2\xc7\x89\x99\xc6a4\x03\xff\x0c\xa5Y\x9f@5\x1cF\xc8\x0b\x96!\x17\xaf\xdb&\xd9\x96\xf8\xc3\xed\xc9\xab\xb6c\x1d&D\x12\xfc\x89\xde:\x94\x0b\xa8HX\xfc\xb6\xe0\x9d_j\xc5\xf8\xbd\xd9+\x11\xb4\xde\x9c\xce\n\xcb%\xe4\xd5|\x01\x04\xe1\xafoK\xfc\xb9\x8e\xee)\xea\xf1f\xd4\xd8Z\xd2\xa9\xc5\xaf\xfaT\x8d$:\xebr\xe1\x9f\xed\x0cE\x14\xe4\x8e)\x14\x8b\x82\'=P\x18R\x96\xa0\x93\x8f\xedGgd\x87\x06\x9f(\x89>^\x9f\xba\xf7]\x05]Z\xd0\x9d\xearfDj[A\x1aC\xc0\xee C\x82nl-\xb6\x8b\x0b+r<\xecM\xaeQ>\x87+\x1a>\x81Q\x83\xf2Ln!\'\xd4\xb6S\xac-\xff\xb3\xdbC\x10\r\xfc\x8c\xd2+\xde3Z\xdf{{\xa2\x12\x83\x05\x11\x03\xe3*t(F\x05x\xcc\xd1l"\xac\'\xd0y\xdf\x9e\xc2\x81\x12\xe4cU\xb8x6\x81Y\xed\xc8f\xfaru\xbf\x12\xadE\x063\xfelA\x04\xa7\xf0\xa4k\xf8\x9b\xfb\xe6-\x86\xca=N(%C\xd3Y\xdd\x93\xa8\xba\x14Xtv\x14\xc0\x0f[\x93iPU\xc0\x17=\xd2\x15n\xb4{<e\x06\xc5\x15\xd7\xae*\x07\xc3\xd2d\xf3\x98\x0c\x95\xd4vdK\xe0`\xf4\xe9\xb9`\xd2\xa0\x9aB\xb4\x98\xed\n\xc3\xe1\x14\x9f.\x1f\xff{\xc5v\xabL_\x80`\xd6\xf9\xf9[K\xb6\x88\x17\xa1\xd5\x9e~/\x0b\x02\xcc\xa3{\x8bv,\x1a*\xc4q?\x10\xf4\xd2\xab\xa4\xfa&\x95#\x07\xba\x17\xab\x90\x90\x8cCm\xb8k.\x83\n\xdf\xde\xfc+\xca\xfe@\xc6c\xfc\xa3\xc4\x84\x15c\xa8-\xd3\x08\n\x82k\xfa\xb9\x15\xac\xe3<\xce\xd6\xdc\xd7)e\x8d\xc9\xe3\xa5L\x85(WI\xda1\x10\xfa\x12*\xee\x05\'\x85\xe8\xcfg\xe6\xbcc\x93,\xc1\xe3;l\x9e\xcf\x03\xe5\x9a\xf2\xdf\xa6t\xffK\xbf\x05\xa2\x86\xc5\xc1v4Y\x94L\x1f\xbe\xb1`\x94\xd2\x9b\xac\x12\xe1\xf5\xb3\xb3\x1f\xe6\xbdj\x15\x90\xa8\xb7\xd4<\xdc\xb8\xbdS\xe0M\x80\xba\xb1\x02\x89\xf7M#3hM\xd8?\xb2\x0f"Y\xb5P\x11\x98\xf2\xa6\x88\xb5\xbaR\xa8\x10\x1c\x12\xaec\x99\xdc9\xbf\xa9X\x8e1\xddK\x0bdJ\x0e\xe0h\xb6n\x8e]\xfdb\xc7\xb1\x01>\x18\x94\xcb\xfa\xfc\xaa\xfd\x07GM\xc2\xd0\x94\xef7\xb0oc\x8c80\x10\xb6g\xd6\xdeW\x972\xa4,\x1c\xba\xb9\xadJj;MNn\xea)b\x8e\xf4\xae\x82cG\xbbM#\xb3m\xcbG\xf84\xbdyj\x8bD\x18"E\x07@9\x98\x15\xcc}dkcgo\xcaH?TR,\xdc;\xb4\xcd%n\xd0\x10\x82W\xd3\x1e\x01\n$\xa0\xf1\x03\xcb\x00B\x80\x83\x08`\x01a\x91J\xd0\xb7\xc9d\xa0\xdd\xfb\x88o\xf0\xe3\x03:o\x9e<\x84S\x84\x8c\xed\'\xbc\x1bt3\xd8=#\x1e\xf3\xc0\xba \xd0%\x9b~qM\xf7\x04\xf36\x80\xffm\xc2\x11\x1a\xb1\xd2W\xa7\x95\xbam\xff\xddD\xa7Ytsz\xeb\xca\xfb/\xf60\x9c\x19\xe4\xfd\xae\xadI\x03[\xde\x8b\xe2\xcf\xd8\xb8C\xeby\xb2\x00jE{O1Na>\x82\xce/\xb25\xe2dnV\x82H\xa1g"H\x9a)\xcc;OEyh\xdb\xc5<\xb2\xafF\xa3\x12\x9c\x056\x87\x08\xb3\xf7\x11\x93\xb6O\xf7\xee~]Wr@\xd5G,\x15\xe6(\x05\xae\xb5\xdb\x7f\x82\xdbsE\x8b\x19,\x98\xf7\xbe\xf8\xa1GO\xc6\xf6m}\xc8\xce\xcb\xab\xea\xa9\xecLr\xa7\xd2\xb8Ol\x8e\xea\x87\x1c3\x11\x0b6\x8fi\xb5n0\xc5\x8e^U\x87\x93:\x8d\xde^\xa7W,\xd8cg\xfb\xf6\xdd_sD\xac\xbf\xe1\xaf\x80\xfes\xea\x8e{\xfe3\x11\x88\x19\x84\xff\xbaS\x14\xe1\xb85\xe5M\xd4A}\xf3\x02\xa6\xe2\xab\xcaZ\xbd\xb3!\x0b\xb0HT\xac\x0ef\x92L\xf8\xf7\xfb\xb6@\xa3\xf0\x1b$\xafvF\xc7\x12!\x00:6\xc3`\xbe\x95\xaa\x90\xafX\xee\x82 \xdd\x089\xb0>\x18\x01?\x19W\xe2\xc0L\x0c\xa3q\xb2w>\xad\xbc~(\xc8/\xf7\x13p\x9cI\xae\xb4\x8a\x1c[\x92\x8f>\xfb1\xbb\xd9\xaeY\xdaQ\x12\xcc\xc0Vy\t\xa5"\x8aJ\r<\xc7\xd5!V\xc5\x81FX\xfe\x18\xdf\x7f\xddo\xde][\xb2\x13\xd9\x92\xc5\xdb\xae=\x93mj*\xc9\xa7co\xf1\t\x98\xae\xea\xa3\x05\xcdX\xf5\xd9\xc3\xf5?\x95M\xb7\xa0\xecB~G\x92:\x9c\xbd\x99\xc8\xb8\xe7\x9b\xdf\xec\xd0\xb8v\xe6\xb5\x854\xa4"d\xb3J\xafH\xcb\xae\xdf\xe0K~\r\x9d1L7\xc6\xdf\xdb\xc7\xd0\xa8\x02y\xdf\xdb[\xbc4\x08\x9c\xf5EJ\xe2\xdeB5\x9a \xed\xb4\xd1>\x9f\xc0\xca!\n\x04\x01\xfe"\xdb\xd9\xb3h\x9d\xfd/\xa4\xda\xa5r.\xb1\x19\x1fZl\xb2\x88k\xcf\xb2\xb0\t$\xe1OQ\xc9+\x85\xddu\x8a\xfcy\xf0\xc4\xe3\xb3\xf6\xbaE\xd5/\xeb\xf0\xfd\xeeU`g\x10.\x19t_\xea\xe8\xa0RY\xfdc5\xaai4\xc5_\xb6\xa4\x1c\x9b\xf3k\xc9\x8d\xcbe\xfe`\x8e\x80\xfd\xb2\x93\x0f\xc78\x95 -\x05\nYk\x80\xca\x16MQ\x99|Y\xb4X\x90\x04\x9b\xf4\xce\x15\x0b\x11\x07n\x10E`T\xa2u=\xc2\xac\x1c\x8a\x1b\x10\xba\x8b\x92%\xe5t\x03\xaa\xea\xcdN\xb2\xd8P\xdf\xfen\x91jh\x11\x97r\x06zj\xd1?\xaba\xd1\xea6\xc6N7a\x82\xcdA\xc5\x83\xbb\xc7\xd8\x1af\xdb\xbf\xb1\xa0*\x826\xb25N\xf9B]\xe0\xbe\xc4\xf4\x9dw\xed\xb5|y\xa4\xb7\x19NE.\x1b\x85S\xd1lM\xdb\x9e1\x83o\x85\x1b\xa9.\xe2\x8am\xe5\xe9\x18z\xc1\x93Q\x10kB\xe4\x15?\xb7;c\x93<A3\xce\x00\x16Q\xc7\xa5\xad\x86\x9e\xb3\xc3\xd7\x02L\xd9\xcc\x9c\xfeXZ\xbbk\x9bMDD4\xc0\xa1\xe7\x83\xf5\xabHQLl\x07\xa0\xe7\xc6\xa5\x18],j\xb0\xb5\xa6\xc0\x1a\xe5d\x1c\xe2\xe06\xcbP\x19n\xde`\xcf@F\x8d\x1e\xbb=\x9dC\x15\x80\x9a\xfc\xe5\xcf\x0f\x92\x9d\xe88\x18\xbb\x11lE\x94\xa7\xf5d\xcfBU\x93\x1ahT\x9d*\xcc_\x932W\x9c\x9c$H9Q\xa1c3z\xb7!\xc7P\xda\x97\x94\xf9\x99\x80\xec\xbc=\xb4\t\xb4\x7fOV\xedi)\xfd\x86\xedi0#\xcb\xd0\x133_\x14\x84\x10.\xe9\xd1z\xe6\xd2\xcf\xf7\xf1\\`D\x14o\xd0w\x94\xe5\x1f\xee\n\xf4H-P\xa9\xfb\x19;\xfe\xac:\xb7\x95d\xda\x99\xda\xc7\xf9\xc9\xd9s\xfb|z\xaa\xf4\x8a\x99\x11\x96(\xb4\x1e\xc4"T\xc1f\xf5\xb2\xdd\xeb\xf7\xcfh\xba^,\xae`\x05\x16\x94\x01\x817\xdb`z\\\x9d\xff\xfdk% F\x9e\xf8%\xce\xb9b\xb1\xec\x88\xcc\xb2\x07\x18\x9e\xdb\x7f}j\xe7\x88]\x11}3\xf6\x8d{u\xe3+U\xd7\x19\xf9v\xe5\x9f\x973\xa7\xf2j\x81\x08tm\xaf\xc1 -\xbbc"W\x88\x82zqD\xc9\xd0q/\xed\xf2x\x08\x10E\xea\xa7\xa0\x00\x88{\xb3cG\xdcC\xe8\x91\xe1&\x0f\xfe\x9f;\xf6\xa842\xf5\xdf/\x02\t[\xf3q-\x1d89\xa4\x8c\x9bn\xaf\xbf\xb1\xfd\xe0\xd4\x00\x920\xe6\xed<F\xe9\xc2\xdb!\x9a\x17\xa5\xafm\x8e\x90K7\x82\x04\x89\xd1\x87q;\xfa\x91\xe8\xb7\x84#r\x1e\xb4\x83\xbb\xfaW\xf7_%\x112\x1d\xac\xda_\xbd5\'\xaf\x90\xcbw%\xcf+\xc9\xe4,\xd8\x19\xab\xc5\x9e.u)\x9a\x98\xd8U$\xbc\t\xfd)>\x9d\xd3\x8e\xe3\t1\x12\x8a]\xb1\xcduE\xb3\xcd=\xc1\xe80U\xe8\x1e\xa2GQ\x94I\x98\x14g\xb8\x9c\xb6\xc5fbF\x18\xee\xcc\x16\xab\xce^r\xb5\x92K\xb87(\x16\xa7\xce\xa00\xaa:%v;}\xddv\x88\x9c\x93\\\xe1\x10\xd1<x\xa0\x04\x80/R3k\x9e\x04\xaa^\x8b\xe4c\xcd!\r\xe3\xb4\xaa\xd6\x01\x8f\xb2\x1f\xaa$u\x8d\x82\xd8\x0ed\xc0\x1a\x10\xd60\xd0\xd4w\x8b\xd5\x1e\xc6\xc0e\xcf\rB -\xd2\xd9e\xed\x15yw\xed\'s\x95\x9c8\x86x\xa7\x11%\x0b\x96\x9a\xdd\xe3A\xb0"\x8e"\x87\x99w\xd9\xeeg\xb9\xfa\xee\x9fBSXy\x8ak\xf3du\xca\xb5\xe1\xae\x92\xd9]\x9cP\xb8~\xa0\xb0\xd2\xe6\xe0\x02\xb0\xa4\xa0\x14\x97.\x93\xa6\xc8\xd7\x80$Mw\xf1D\x869[j\xde\xfdS\x84\x90\xd8y\xeb\x8b\x01\xdd\xde\x90}\xed\x07\x86\xd7 \xefB\xbe\xa5\x08@,qG\xe6\x11\xfb\x1c\xce\xc3\xc6N\xd9\x89\xa5\xdelw\xda\xfe\xa7\x80\xa9\x13\xba\xec\x19\xcd\xf2\xf4\x95\x82\x92E\xe7!\x1bE\xc0/\xebj\xfd/\xd4m\xf3\xe3N\xe8\x0c\xe0Zj\x07\xab\xa0\xe11\x85\xb3d\x1fI\xac\x90\xb2\xdb\xdd\xb5\x17\xf5\x84\tX\xc4Ao.\xd3}\x82\n\x08\x8c\xbbU[Q\xb6S\xc4\xf6\xeb%ea\xc0?\x8d\x84\xe0D\x83QH?*\xc8\xc2\xfa\xf8G\xf8\x9f#d\xab\xefW\xf1 (\xe9\x90X\x83ZB\xd3\xa6O,v\x8b\xda\xb77\xcd\xc0Y\xfb\x0b\\\x1dk\xde\xcd\xb3\xc7\xd8z)\xbe0\xc8\x15=\x87%\x12\xc8\xeb\x07\xb2RIK\xcdnS\xdc4\xfb\x1c\xec\xa4wZJ\x9e\xd4R\xb9WD\x13\xb2g\x11\xcd@\x08f\x81\x0c\x96&\x06K8^\x9f\xfdeG\x853\x84\xd1H?\x84\xb0Q\xb2\xcam&\xbb\xb3\x1cB<\xdd\x17\xfc}^\x88VSz\xaa\x87PW\xe5\xa9<B\xaf\xae$\x07\xabE\xb2v\xa8\xf6\x90\x18\xa3\x1c\xc5\x8eZ\xd6\xac\xff\x1e\xd8\xcfsQ\x83\xdd\x18\n:\\\xec\xce\xde\x86b\xa8\xfe\xf2P1&/\x7f|\xb2#\xdf\x06\x0c\xbf+\x8d\x0b\x8e\x03\xbch&\x98\xf3>s\xd3\xf7\x82\r\xaeq\x8bR\x16\xf6|\xbcA\x1d\x87\xab\xe609{h\xf0,\x04\xb5~d\x98\x80@\xd8\xbd\xb3\xc0\xb3A\x04\xd0u\x8fG\xb5t,"Y\x99\x1f\x88\xb0/\x9a\xb7\xaf\xcb\xe5\xd5cE\x15\x8d\xc8\x1b\x9d\xff\x0f\xd4\xd7\xfe\xefE\xcas#exrP\xa9U\xf6\xd6K\xbb\r\xb5B\x0b\xd2\xeaZ\xee\xf5\x9a\x19W\xd6\x11z\xe4\xa6\xfd8\x10_\xa9\xa8\x92*\xd4\xbf5\xb4\x85\x87\xaf\x93[\xc4\xb7\xa3I9D\xeb\xdb\xd5!\xb40"\xa6\xea\xd0\xd7*\xe5\x19\xc8\xf5\x16\x8f\x88-\x01\x85\xdd\xfc\xd0O\xed\xbe\xd6\xc9\xf4\xe6{\x9c\xdf\xb6\xcb\xc9\xde |xL8M\xf3\xa3L\x066\xb9j\xbf\x14 ZY\xc7\xf8\xa6\x0e\xed\xc1\xa0Zq\xaa\t\x19\x90\xdb:t\xde\xbe\x9c\xc8Ul\xc9\xb5\xcb\xff\xde\\e\xf2\xac\xed\xe6X&\xacU\xf3o\xc0\xafVw\xbf\nK\x89\xc8\x84y\x7f\xef\x89\xc0%\xc2\x01\xbbvYZ\x88Z\xf3:w\xf2-\n,\x89\xcc\xc1[u\xb7\x88IL\xea\x8fk;.M8\xe1\xe4f\x98\\\x0fWo\x90<\xb6\xb5xt\xb4k\xd8c\xcb\xec\xd0\xc9hp\xd9\xd3\xdf\xdd,\x0e\xdaz%n*\x1c\xfc\xe6"\xf3&\x1d\xa9\xb6\x92\x90\xb6\xeb\xa7\xc0\x17\xa6\xe0\xef\x00\xe5\xb26\xaf\x83GU\xc3\xf8\x1e\x08\x1dIW\x8dx\xda\xde\x15S\xf6\x1b\xbc\xa3\xcc\xc9>`a\x9f\x10\x07\x06\xd7\x8d#\xa4\xc3\xf9\xa2}[#KOQy\xb2\xb8\xe6e\xd0\x06J\x97SN\xddU#~\x11\xdc\xaa\x1b\xdcI\xb0\xe3\xdb\xab\xdc\xe1\x01\x14\xb6:$\x8d\xeaSQ\xb8w\x89\xa9E\xc9,\xd4\xf2\x180\xc4Bh\x941\x0fLB_>\xa9F\x02\'\xe4\xa7`\xf2\x1dyc\x99e\xe2\x94\xb4o\xcc}v\xb8\x12F\x8a\xc2\x19=\xd8\xaen0\xdfxkO\x1d\\\xf6\xd8\xbe}\x80\xe2.\xdc\xe9!\x90\xec\xee\xe95\x99\xc3\xec\x84:.C\xed\xad\xe7x\x1b\x0c\xa5\xf1\xb9MR\x93x\xe80\xa9\x8c\xfd!\xe9\xc4\xe1\xac.\t\x8c#\xa720%I\xed\xb9.\xce\xc6\x03\xa6zt\xef\'\x8a\xbf\xe3J\x8d\xf2\x82pa\x9e\xfb\xaf\x16Uz\xcc\xcc\x19\xfa\xd0\xca\x12\x9a\xb35\x04>X\xc3#\x0c\x95\xd1\x89U\xdb\n\xd2\x9d\x1d\xd4\x16}{L\x9b\x8b5\x0b\nFHE\x813\xde\x1d\x9d\xd6Z\xfc\xa0\x9d\xe9\x85\xb2t\xb0\x8d\xd1\x94m`W\xdf\xbd*\x8b\x84\xf3\x82\xf9\x8d\xe4\x12\xb6v\xaf\xf1\x87}\xfa\x9flPv0\x84\x18\x00\xc0/\xbe\xec\xc6\x85\r\xde1\x13\x1a\xb5\xaf\xb5\x06\xb8\n\xe0pR\xb8\xb5\xbdj\x87\xa9\x07\xe0\xce0\x1b\x97\xda\x9f\xe2<A\x88\xb2\x81V\xb3u\'V\xaa:\x94\xd0\x9fr\x1d\x14\xb6yyY&y{\xcf\x16\xae\xb1z\xe0\xe1\xff\x99\x1d"\xe3?\xf1\xe9\xea5\xf1\x80\xe0\xc9\x8b`!\x1f\x89Q\x8a.bG\xd6\xda\x07\xe580\x9b\xa19Z\xc1\xf9\x83\xd4m\xba4\x83\xe4\x0c3oY\x1f\xad\xed\x04\xd5\x90H\xe9\xf1r\x0e\xc2\x85\xbd\xd8\xacm\x12\xbf\\2d\xa7)\xbf\xda\nG\xe9S\xcc\xa3k\xben\xd8j_\xc4\xdd\x95\xec\x08\x96\xd44\xef\xfb\x07&\xbcm\x87\xbb\xc1\x89\xab\xcb\xbb\xff\xfe"7\x03\xe5\xa4O\xf2\t\x15[{\xaf\xda\x08\x0e\n\xd9\xe5h\xe7\x8d\x8aA#\n:l%\xfd\x90\x9f\x81C?>x\xbc\xaf<\n\x0f\xdf\x89yQ+\x9d\x1c\xf8\xdce{\xf5\xb9\xfd\x98\x8b\xdf\x8e\x1d\xdb\x87\xb5\x85\xa7\x1aCb{\xef\xc0\xad0\xa3\x88\x16\x0b&\xb5\xf8\xd5\xfe\xa8\xd28@\xf2\xcbpd\xf2\xf7\x0b\xf6\xb8\xec\x843TK\xa3w^\xcc\xff\x08JA0N\x8d\xb5\x0cP\xdc\xe8\xbfG\x85KeN\xbc\x1aZ+\x127\x02\x84w\x18\xaa\x0c\x11\x97pKfc\xad\xd9\x10\xc3\x8ad_#\xa7\x83\x8a\x1fl@\xa9\xed\xde\x9c\x08\xdf\x84\xa1\xefY\xf49\x93\x1f\xc4\x15\x18t\x15j\xa4\xb4\xa4\xe4\xd7\x82\xea\x8aj\xc4\x08\r\xc5\xb8\xfa\\B>4n\xe0\xcbz\x13\x95\xbau\xb0rnQ|\x10-\x8f\x08\x1a\xd4\xee\x88\xbb\x1ag\x06\x0c"ji\xb9\xf5}\x03\x11\xc88\x80Q\xcf\x7f\xdc\x11\x8a\x84?E\xfa\xc3\xe9c)\xc6:\x0c=\x9c\x8d\xaa\xba\rf\xff\x18\xc4x\xde|\xd6\xb8\x91\x8b\xf4v\xa62\x18\x9fC\xf0\xc4fc\xce\x13\x1d\xc2\xd6\x97\x1f\xd3\xc2\x10\x9c\xa8Ecu\xdc\xef\x8cW\xa3\x12u\xdd\xdf\xfci\xfb^\x0fq^\x8e\xe1-m\xcd\x899\xd9\xdc\xa4m\xcb\xc5JU\xf9\x7fW\xf8\x92\x15\xe0D\xe2i\x84rI\xf0]P\x9a\x82\xd7p\xab8_\xca<\x98\xce1\xe8d\xbd\xe4\x9a\xfd\x16\x0b\xf9\xe5\x1f;\xae\x8d\xb06fD"\xb1\x11\x9e\xcb\xc8\xc4D\xd5\x0f\x1c2j\x0e\x01!k\xf1B\x9d\xd2\xb7(yt\x05\xd2d\t\xa6\x7f\xf7\xc8B"\x05\xb4t\x93HvX\xea,\x1f\x01\xadL\xef\x0b\xa7\x19~\xbd\x1f\xa0v\xee\xc6\xa3J\x12,z0/\x80\xb2\xaeO\x07\xcc\x8e\x80\xf5\xfa\xf4\xa4\x01\xe0\xd4/`j6\xf3\xa2\x1a\x15\x94\xa8\xa3\xe3U\xe3\x0bn\xf6\xe7/\x0b\xf7#\x8a|\x90\x02_\xea\xe3j\x8b\xa9\x87 \xbcQ\r\xa9\x13\x1a\xd2\xd5\xd3\xce>\xd3\xfbC\xf3\xc7\x9c\x86W\x98\x0b\xa5oA\xd8\x11\xb9S\xbbB\xb0\xa3\x87\xbf\xed}:\x15\x0e\xadi\x82\xc4ug\xbe\xc8\x94\x07\xd7\xedm9\x1c\xad\xc54\xce\x9a\xa04\xb1;\xc4\xa7\xfdc\xdb{\x96{aAZ\xf6$7\x82^U\xaag\x85Y\xe9c)\xcc\xa9\x8b\x08\xca \xaaj\xe6\x04\x86\x0c0Ao\x1f\x110\xb0\x8d\xe5\xbdPI~\x15?\xc8\xaaZ\xa8FT\xf0&\xf9D\xc4\xa59\x10U\x8a\x8a\xe1tX\xc9\x00\x16(\xd7#X\x80YK%\xcd\x90\x88\x0f\\.\xa7\x08\xb1\xcbW\x07hw\x02\x0bNB\xdd\x19E\xe6\xc2\x1e\\/F_\x13jI\x1f}\xb0\x9da\xbb\x04\xdcw\xfa\x0b\rti\x07\x07\x93\xe1\xab\xbc\xbf\xf7\xde\xaes\x17\xeao\x0fW^|%\x90\x80\xec\xba\xd8\xfdc{8p|\x1aH\x90\x0f\xe7\xa7\xe6\x18\x08\xb1\'(uHp\xd1\xdb\xc1\x81\x1d-\xf3\xe7\xef*\xd3\xf1\xe4n\xc4\xa7v\x96\xc8\x8a\xed_#j\xe3\x00g.0\xfa\x99\x00\xaf\x06\xad\x85&\xea\xb5\x8b\x1f\x9e\x88\xf8`\x03\xf3`\xd9\xeeo\xda\x174\xe5\xd4\xce\xf4M\x9a\xf1\xbd\xe2!\xaa{\xdd\x922\xe9^\xa6\\|\x842\xde\xb2\x0b\x11h\xa6\xf2\x97\xf6\xc7&\x80\x115\xab\xaf\xf5k\xa9\xbc\xbe\'H\x03_\x8e\xf6\xb1\x9a\xcd\xda\x14r\xd6\x02x\x91k\xac\xbd\x1b,\x0b\x8eV\x1b\xe8\xcb\xcdau\xf8M\xc3w:\xe1\xdb$u\xa8\xe6\xd8)\x10"m#\xdf\xb8\x82\x97:\xb0P)\x948"\xd0\xaf\xe5x\xf7x\xdd\x90l\x1bA\xa2\\\x8a2B\x91.R\xb2f\xdb\xc0IU~YY\xbc\xf0GgQ\xed\x1eF\xb8\xa0\xdb\xbf\x8e7>\xd8\xd1\xe4\x06\xe1\x0c\xc5\x87v\xb4{\xe5\xd3l\xb6\xf7\xedeY\xf6\x1a\xd7!\xb2L1\x1aXW 5t1+2\x9fVyb\xc4\x7fd\xbaA\x83RW#\xa93\xc2_\xaa\xf5\xd1\xe9\xc6\x0b\x8f-Ep\xc2\xb6X\x97\x0b\xe6\x17\xf9\xd4b\xae\xe2\xb7Kq$\t\xbe\x95\xdbcv\xa1{`\x11\xa4\x0c\xb3\xcbB\xa9n\x99\xaf7\xe7K\x16\\\x13\xb7\xef\xb8\x1a{\xda\x8c\xd2\xe0\xe22\x9c\xc0R8@Z\xfe\x9d\x85\xe1\r\xfa\x88}y\xfe\x7f\xc5\xeb\x06|\xa0\x1a=\xbf5D\x81\x89\xc3\x91\\\xb7\xf7\x16\xd772\xf4\xaf\x8d\x90@D\xfd\xbcEV!{\x89\xe6c\xa4\xc1u\\\xbe\xb9\t\xb6*\x04\xc9\xe9\\\xc5ls\xe8\xafd\x81\xb7]\xdc}\xa8D\x13\x914\xf3\x95 \xd4\x0c\x06&\x81\xc0J<m\xbf\xde\xcb\xb7Dnq\xb5\x96\x05\x12\x91\x8c\xefY\x90d\xf6 \xa9F\xb5\x0e\xaf\xf6\xc6@ecx\xd2\xf0\xfe-V\xeb\xeb\xb89KN\t"+\xae>4\xa0\x82c\xa4\xc3\x00\xf9\n\xdffR\xf9\xc9\xae\xa7\xaf\xef\xdb\xefR$\n\xde1[\xbfF\xbce\xacR\xff&\xdc\xd0\xfbq\xbb\xf7Q\x8b\xd0\xa9 g\xf8\x93\x00\xa0\x0b\x0f\x8b\xf2.\xa6\xedt\xb1 \xb2<Z\xfe(\xbf\xaa$\x8dS\xb4\xc3\x8bu\x94\xb1\x9bM\xfb\xbe\x81\xf3\x1a\xd1b\xb7?\x94\xeaw:\xc4\xb5\xbc\x8fH\tu\xfc\xee\'l\xe1U\x192e\xcc\\\x88\xf8\xba8\x1f5\x87!\x0f\xe9~l\xdf\xdf\xb6\x0f\xa6\xaaQ\x8b\x9ckft;0\x9e\xdb\x17\xb3\xd5\xe1?\x13\x11\'\xab\xd2\xd3\xf0\x1f\xdayo\xac|\xfd\x87\xd0#>\xa0\x00\xacA\xa0\xb9\x99x6\xd4\t\xedM;\xd5\xc2\x9bA\xa1\x8d\xda\x14\x8c\x97\xd2W\x08K\x93\xaf8\xe4\x83\xff\x1a\xf4\x9d\x19\xdc!z\xee\xcb\x0f\x94l\xe38\x8d\x8c\x1fH\xa5\xe5\xd8\x0e+\xa9\x1e\x9e\xcc\x7f\xe6\xf6\xb0\x84\x90\xb90\x91\xa6\xbe\x85Xq2v\x98)vJ,|$\x0b7\xdb\xb8\x8e\x17\x8d\xc7\xafNm\xdb\r4\xe3\x00\xdf\x89\xe5d\xd1?B%\x86RQ$\x97\xe4\x96\t2\xab\x1f,\x01\xf3\xc6\xa7p\x81\xed\xf1]\x0b\x19|\\\x8dT\xe5\xbd\xa06\x9f\x9d\xd9\x02\xf4\xf0\x8c\xc41;\xfdt1;\x04\xfb\xb8j\x9f\xec?\xa8L\xcd2\'$<Q73\xf9\r\xd5c8V)\x9d\xfbmK\x16\x01\xde2\r\x8a\t\xb3\x0cX\x13\xdc\xc0\x86\x84 l\xa0\xa5\xd5Bp\xc92\xc9\'\'\xben\x12\xd7\xc05\xad\xef]\x9b\x17\xb8\x9d[\xe0\xde\xa0\x05\x97\xc4L,G#t\xaf\xe3\x0c+ \x12\x01\x08\xf6 \xcc\xcdL5\xbbV\xa9L\xa1\x07\x14g\xca\'\xeb\xefms\xc8\xe1`\xc8\x8d\x80\'2z\xea\x9e\x90\x8dX\x91\x06\x8d \xcf\xf7\xdf!\x80\xfd\xa9\x9896#\xcd\x0eL\xd6\x02\xfe\x82\x12\x08\xfe=\xe7K\x0e\xf0x\xac\x7fo\x952\xa4\x16$x\xd88j(\x90\x0e~\xcd\xacH\x17\xed\xfd\xb5\xe7\xa5\xbe\r\xb1\x19\xb7pTI8y\xe1\xe8?\xfb2\xb2J\x13E\x00\xb5\x9d\x06.\x9d_z\xdfCG\x82i\x8a\x88Ju\xbd\xaa\x089z\xaf<\x88b\xba\xad\x0em\x8b.\xe6\xa8\xfcaS0\x06 \xcen\x7f\x11v\xc5\x14\xa8!\x92uq\x92n0\xb8\xe64\x94-[\x81\xbe\xb9b)W\xa9\xc0\xb3Q\x91\x8e\xad\xde-\x9dde\xa7\x9e\r\xee\xa9`q\x84\xbd\x00xi\xe9\xbd\x10\xfa\x0e\x9dv}\xfc\xc6\x0eS{\xf1b\xe0\xa1D\xff\x9eT#\x86\x1bY<Yd\x97\x01&$R)\x85\x9c\x08\x14\x95k\x15MJ\xcc\x17\xc4\xc9\xed\x03\x88\x0f\x18\xaf\xc9\xbc6A\xcfB\xf1\xcc\xa2Z\x0f\x85\xb4\xc8\xbd\xab$\xbe\x01\x92EX\xd4\xd7\xa4C\x9e\x11\xcc\x97\xf4\xf2\xafm\xd3\xfexfn\xb6)8\xa9\xe8\xe9\xc3J\x94\x9c\xe3\xdf\xf6\x8bA\x08%}si\x1e\x92v\x93*?\xe9\xc4\xcc\x00\x08\xc3\x94@\xef\xde\xe5\xb7\x14h\x0c\x92\x89\x17y[\xa5\xea8\xd6\x12\xb3;\xfb\x1c\xf1\x13\xe8U\x1d\xc6d\xb8\xe6\xbdL\x95\xdd4\xebn\x11\xe4\xdf\xb2r\xcf\xa8=\xd65\xf6\xfa\x02\x14\x18<\xea\xfc>E2\xdb\\\x15\xf6\x13U7\x18\xa2\xa2\xa7\x08\xb5\x9f2\x1f\xaa\x80\xe9\xf3%\xc6\xb5\x87\x16\x1d\xb2;\x04\xdf\t\x83\x1b\x81\xab\xd1@\x93\xbeT\x15\xb8\xd4\xb1\x89\x86\x14\x85\xaa\'\xee\x13^\x8c\x84\xd5\xdel Y\x9c\xa5\xedc/\x1bv\xe1\xba\xbe_:\'\x86\xa1\xee\xd7\x1b\xa2\x97U\xb5S\x89\xfbx_\xc0|\xbd\x7f~G\x86Q!\x08\'\x8d\x0e,\x8c\xe23\x90\x9av\xa8\xd3cC1\x83\x9c\x08^3\x80P+8\x18\x8b\xf3\x97H\x01\xa9\x00\xfe\xf3\xcce@\x18\xd2\x83e{d\xf6\xaa\x17\xc3Q\xb2\xd5\x1f\x8e\x1fk\x9d\x10>\xe8\xb3rH\x99w\xf1Ydv\x18\r\xfa\xc0V\xa8C\xcb~\xe9\xc6\xaa\x1bJ\xa7\x9bv\xef?m\xbdj\x88\x143\x91\x7f\xae\xc3?}\x05\xaf\xed\x1c\xca-\xe4z\xf4\x0b\x1f\xcd \xd3o\x14(E\xb0\xbdl L4J\x0e9\xc8\xa2.\x87\xfdW\x94\x87\xda\x7f\x13\xaf\xe9\x9axsHla\x10\xc8\x1d\xd2\xebSp\xa4\xf8Z\x9b\x19u\\\x89.\x8e\x1f\xde{L1\xbc\xa9\x87\xdbv\xec"\xc5C\x9c\xf1\xc2\xb2qkF\xafl_\x9f\x83e_\xefkV+\x15<\x08A>\xfb-\xc8\x02\x14\xd9\x08c\xd6\xd8\xed\xe2\xda\x7f \xa9\x80\xf83\xd0\xfa\x1a\x05\xe5%\x081<\x1c\xbd\xd9\x85\xae\xde\x7f~\x07\x02\xdf\xe9\xb6\xb7\xec\xabs\x9b_S{\xad69\\]\x83\x12\x92\x1f\x8a\xc4"\xe7\xb1\xccW\xdf\xec\xabQ\xd5\x96\xd5L\xf4\x02:\xff\x95\x9a\x17\xbaI\x0e\xa4\n\xd2\xdcS=\x81\xaf\xa4\xa3\xb1;\x84\xe1\xa4\x86\x9fl\xac\xd1\x97\xbf\x02\xc6\xe2\xef\x9f\xad\x99\r.\x9b\xd5\xff\xcc.\x91"\x91\x9a\x972R\xcd{\xe5\xae~\xcb\x8e\x08!\x84D\xb8\x16\x19\x92\x08\x9c\x98Y\xea\xa2\xb0\x94\x84\x87`(\x9d,\xbe\xbav\xdfL\t\xa1\xe5v\xff>\xdar\xc9\x16\'\xd6\xd9\xb4\xebh\xd2\xaf\xaf\xd9\xb7\xb6,\xa7\xd3 \xb0c\x9b<\xa17\x9f\x90\x7f\x16\xd3\x9bW\x0e\x0b{0\nX\x14f]\xbdx*]4E\'A\x8a\xf3\xb1\x80\x0ch\xbd6d\x16\x05a\x88m\xf4J5\'\x97~G\xfe\x1c\x9fl\x9d\xaf_\xd6\x8d\x87\xf2FM\xbf\x82\xbb\xd0\xafX\xae\xc7|?\x1bR\xfb\r\x0b\x86)\xaa\xa6\xcbK\xb1a\xbfd\x16\xb5QN\x13\xe5s\x0b\x10\x9eA\xbc\x88\x98\xa2&W\x111\x07F\x908\xcc\xb6\xab1B\xa1\xee\xe2\xeb\xb2\\1\xb2\xd2\xf6\x85}03kQ\xb7\xd8\xba\xcc9\xad\x1e\xe4\xa3\xa8\x7fl\x92=?g\xaba\xdc\xbdUX\xf0\xb3\x90\xb0\xaa\x91\x9dY/7>\x06\x1d\xa7m\xf6\x0c\xa1e\xdd\x9c\xff=\xeb\xed\x906\xd1M\xe4\xa8\x905i\x8a\xf7p"\x9f\xed\x1c\x90l\x19?\xfd\x0c\xe9\xf5\xf8\xda\xfa33M\xa4\'\x0e1+\xcb\x9b\x8f@\x00E\xee\xc3A\x03\xc9\xd3\t\x01\xa0n\'\x92\xb1\xc0\x071\x10 \xeb\xa0SB\xa8\xf4\x93\xad\x94i\xb3&~-w\x02\x19\x8cdF\x90\xa4\x1dZ\xa4\x12\x19\xcf\x03_\x7f\xe3\x9cO\x02\xdeV\xde\xfd#?n\xdaP\xc6N9@\xd7\xdf\xdee\xe9h\xcbNd\xdb*\xe6\x80:\x07\xb5w\xc2\x81\x05>\x1c\xba`\x82\xebY5\x1a1\xc1\xfa\x92\xc0\xb5\x1e\\z\xb2\x13\x13\xc5M\x8c\xd7\xce\xcc\xd2\xe2e\xebzYa1q\xfcT1!\xdak\xda\x14\x086\xa1PoI{]\xdc\x99\x94\xc3\xe6\x81zz\x17?~\xbd\xd2\xd8jt\x88@g\x90]e\xdd\xafj4\xce\x88)N\\^\xdd\xb1\x9fd\xcb;:q\x99,\x0f\xb5\x17\xa7\xe8\x8d\x94\xde\xe6%!\xe4\xbd=\xc1t-P"F\xc6\xc1\xde\x92\x99\x1d\xeb\x00\xdb\xab\x82\xd22[\'\xe7\'9\x8a9\xfa\xa90\x864\x0b\xc6\x8b\x9d\xad\xb4S.N\xa7\x06\xfd\xbc\x0e\x9dN\x1e\x13\x86\x87\x8eb\x96B\x83}\x1d\xdb\x8c\xd8\'p\xbf\x12Q\xa4b\xcdj(\x82\x95=P0\x16aX\xba\x01\xc7\x80\xf2}\x8d\xde(3;\xdf\x1au\x85~\xc0c\xc8e\x87\x9e\x87"\x0c\x81\x19d\xa1\xe2\x15\x99\x05\xd4\xbf\xc8\xe1Q`\xdc\xc4\'\xb67m\x82\xf9\xa1=\x14\x16H\xc9n\xccJt\xe5\xcd\xf1{\x9b\x96gG\xe1\xc7\xba] \x1a?\x0c\x80\xe3\x88\x00V\'\x01\xd0\xb0\xaf\x19\x99\x14\xe3\xe8\xe4\xcb\'\x8dsa9\xd1\xd9\x17{hNp\xea\x04\xeb\x86t\x91\xc8\xe3\xc3\x9f\xb1_ooa\x84h\xfeu^\xc8\xa1\xeb\xd1\xa1T\x8eO\x1c[LWvS\x88\x0e\xdd\xad\xffdf\xcaj\xd4\xcafw\x1a\xee\x8bu4\xe2\x85\x88\xafI\xaa\xe1|\xbaBq#\x8eH\xd9l|\x12\xdaC\x0f\xa3\xbcc`\xe28\x04\xdaT\xb3\xef\x94Z"\xd1\x8e\x02b-\xecCD\xd6b\xf9k\xde\xaed+\xd6\x0b\xabX\x85B\xa3Z\x18[m\xbb\xe4e\xb9\xca\xf0|sXs\x1ey\x0c\xf1\xe9\x9a\xdd;\x95D\xd3\x1e>UF\x95\x9a\xb9#\xdfA\xd4\x18\xb6\x7f{\x01\x93CP\x9a(\xcc+u\xe1=5\x14\xdf,\x83\xab\r\xcd~o\xd3b\xb1\xf6\xa8D\xb9\x84\xac\x9c\x99j\xa4\xb2?\x18z\xde\xe2b\x80bm\x97\x88\xb6`\x10!\xf5\xafX\xdb\xc7&\x03\xfa\x0b\xa8\x166\x801h\xba\xa2?\xfb\xe3\xf6\x99\x9c\xef\xe6>\xcc\xd8\xed\x8e\xf4\xac\x1d\xbe\x92\x9d\xaa\xb5\xf9w\xaaM\xa8$\xd7u\x7f\xef*\x05.lOa\xb5mB\xf4?|\x17&J\x04\x7f\x05\x06$\x88g1@\x16\x0cd\x82}5\xfb{\xe1\xf4\xbc\xaabd\x04\x04E<\x8ct\xb1\xef\xc9\xe9\xa6-w\xdb\xcfZ\xc8i\xb3x\xde\xdb\xe3F\x99\xa2\x9e\xd2M\xbe\xc4\xab!\xf1h\xe3\xab\x93\xe65\xcb\x14\xf5\xbav\x15\x1cW4\\S\x1a"U\x14\x0bFf\xd9l\n\xdb`\x05\xb7]\xb8\xfen\xe9\xbb\xa5\xbc\x8d\xdbR:\xd4M\x8e\x0b\x81N*\x8d$\xb8\xb99\x8a*o\xfc\x18\xb3=\xb3\xbe|\xd6\x98_\x90,\xbf\xb4\xfd\x16\x91\xe1M3\xb4N\xc4\x13\x93\xa6\xf7\xcc\x16\xa3wLS\x9e\xcb\xc7\x83G\xeb\xa2\xc5gg\xb0-\xd0\xe4(\xdb\xdb\xa6\xc2h/I\xa2\x06\xb3\xcf\xefJ\xc6\xbaq[<Jt\xc6\x8bC\xf1\'\xc8&\xd4\xc90\xf2\xc2\xc2#\xcc\xfa#\x88\xc5\xd1\x08\xc5D\x85j:\x19}\x84\xda0U\x95\x8d\x9d\xbcm\xbfA\xf3\x8c@\x0eKo/W\x97\xe5\xebp\x19\x96\x97*\xd6\xd5\xdd\xc3\x07L*\x15h\xfb\xec\xea\xcc\x98\x1dI6\xcc\xd4lOH7\x04\xe3\xf4rv\x02\xb8CR\x9eS\xb3\xd7\xd9+\xf2Yq\x1a2mc\x1d\xff\x13\xb8K\x9ek"gT\xaf\x1d\xbc\xd9\x85\x94d\xccN\xed);\xb0\x17\x81\xca\xa1\xb7\xf4}\xe8\xd5\xc5o\xb0\xe2\x11^\xfc\x118*\xcc\x9ff\x1e\xcbS\x82V\x10C_\xd2\x01T%\x88\x8e"\x03\x1b\xeb\x88h\x18\xe3\xe6\xd0lY/\xa3\xdc\xfb\x03;\xe6>\xfe\xab\x98U6\xcd\xa39\x90\x8d\x93\xb9\xad\x99K?\xe0\x8a73\xab\xaa\xadpS\x8f\xcd[\xb2R(\n\x08\xbb\xc7\xe0\xee\xd8\xfd\x1d\xfd6\xd9\x0e\x0b=h\xe8U\x8d\r\xfe\xeb\xb6\x00\xbc\xd2\xf6\xacV6\xdd\r\x1c\x85\xcc~\xb3\xec\xbe\xac\xecN+\x81\x8c\xcc\xef\xb5\x828\xd8\'\x05\xed&\xd3\xf2\xfci\x0e\x84\xb8\xa1\x0eQ\xa3\xda\xcbhBno\x97\x90\xe3\x1a8\x83\xd8}?\xd5\x8f1i\xd6\xad\'t!\x9aJ\x80\xd8L\xd7ob\x0e+3L\xd9\xdb\xd0L\x1fZ\x82\xd9\xc6Rb\xe7\xf4\xa6l\x03\x9e&?:\xac4\xb9\x08\xe6\xf7\x83\x19\xbcNL\x17*\x913\xed\x8d\xcc%\x97\xbd\xba\xec\xe9\x85\x93#\xd9Mf\xdb\xe2\xf9\x96\xf1\x0b\xdd\xee\xe2\x0e\xe1\x98\xb77\xed~P\xd1\x88\xdb7\x81\xd4\xb1\xff\x89o\x85`h\xd3\xc5\x93f7\xf9Oj.\xd3u\xef\xed\x12]\x9c\xb6\x1b\x1cX\xa4K\xe4\xa8\x1f-6J\x946\xfaqW\x8d\xe4$\xec1V+\x8dX\xb3\xb8\xcb)\xcc\xef=\xa79\x1f\n\xca\x01X\xd5\x02\x8d(\xbbW\x10/-\xeb%\x16p\xb6\xe7n\xc6\xa8\xeb@\xe4\x93\xcd{\xa5\x05#\x9d\xee#\x19D\x03;\xc8\xd8\xe5\x95\xa5\xbe\xe4p\x12U\x9d{\xfd\xd1\x1e9X\t\xfeF\xa5\xf9\xe6\xd8\x95\x7f\x16\\\x8d\x84\xe6\x94>\xd1\xa35\x94\xc4\x074\xd3\xbc\xa9\x0e\xf6\xe6\xbf\xdf\x1e\xa7\x14,\xf6/{{\t\x9b\x86\xfe\xa7\xad-So,\x1c(F\xc3@\xf9 \xde+\x11\xaa\xfa\xff\xb6\xbe\xe5/\xdd0SQ*\xaem\xa2\xf1\xc9\xf7\x90\xc0hV\x15\xa8\xb6\xeb\xba\x05q0S\xdc\xeb\xb27\xaa\xea\x1b"T\x99J\xc6\xb0\x10\xdc\x0clU\xfc\xe8\xad\x93A\x17\x13\xcdd\xb2fc])\xa7l\xad\x17b\x05\xf5\xccN\xa9\xdb\xd0\t\xc1\xc6\xa1\x04\x16\xb6>\xfc\xfc\xfe\\\xe6\xa4\xb6\'\xf4\xca\xad]v\xe7\xe3\x9e\xadri\x8fZ\xd9)m\x19\x9b\xcb\x16\x1a\'\t\xa4\xa9N\x9cJ\x962\xb4r$#1E\xd8\xbe\x84_\x9d\xe0\xbaP\xfc\x94\x99\xcd-P\xf2n\x83#|\xd7)\xbc\x85P%\xbf\xfdI\t1X\xe8\xf5\x80\x0b\xc54\x91{\xf2\xfe0\xd6\x84\xec\xca\xf5j$\xa4\xe5\xfc\xad\xc5u[7\x9e\x13\xec\x16\xc5,\xe1\xab\xea\xec\xd1\x13;pN\x05|<\xac3\x1c\xff\xa6e[Da\xfc\xe5\x05#}\t]\x96\xf4\x13\t\xd4sL\xb5\xc7>\xa4\x13\x05\x84X@\x82\xe0\xf8\x85\xbd\x14=\x16\r\xe2\xf3\xab\xac\xbd\x0c#\x8b|<av\xc1#>#\xc7+\xae\x95na\x19\x83\x90\xe3\xd6\xdf5\x1f\xc2\xd7\x0f\xd4\x87\xa2\x05\xd9\xabD\'\xb1\xdb\\\xcb\x8c!\xba\x88&\xb7X\x99@\x079\x8d\xa9\xb2f\x0e\x97f\x04\x97\n\xb1\xce\xf8\x94\x87@\x99\xd1yM=\x08\xd2\x191\xe2\xb5\xc9\xb6\xcd\x06\xf8\xfe?\x03\x99k\xd2\x0b\xf2G\x1c\xc1\x91Y6\xc6\xd9\xf5\x18:\xdb\xab\xc3\x80jDL\x03\xa9r\xd3\xc8~4vi\x08q\xc6\xf6\\=m\xb3\xe2l\x0e\xfe\xc5\x19\x81\x90n\x8f\x81\xf2$\x14gpP\xfd"\xd1\x85O\xe6ih\x89\xf1\xd1\xc5\x99\xdd\x9b2\x95\xbc}\x17\xb7\xe7s@\xa8@\xc6\xc7\xd8NR\xc9Y\xb4\x9c\xaeF*\\\xac3\xe0|\xe1\xae\xf5\xe5\xab\xc3\x13\x10f\x92\x05Q\x1a\x06m\xa1\xba\x7fj\x93\xee0\xb6\x952\x08\x85\xf2}\x15O(\x8a\xd6|\xb1\x032\xfcC-\x99b\xc5\xbc\xb8#b2$\x01J\xe0\x1d;\xcdl\xf2\x1b^\x1f\xd9<\xe5\x03\xd2\xf1or\xeb\x8d\xeeR\xa0\xb9\xdd\xbbj\xcf\xcb$\x9eE\x98\xabvW\x9b\xf4\xd0\xf2\x0f\x86\xed\x8a1\xaci\xee\xb1\xbe\xb7\xe5\xc2\xecu\xda?\xe6\x0e?\xdf\xc1\x08\xceX\x96\xe8X\x88\x84\xe8\xc60O\x84Z\x16eU1\xc1~L\xbc\xbeV\x02*\xd2\xc90\xbd\xc3;;\xa3M2uzE\x98 \xba=\xeb\xf4\xcd\xdb\x7fv\x9a\xb4\xc3\x15\xf6\xb3\rk\xb7\x97\x9f\xac\np\x83k\xa6\x901\xa8\xaed\x9f\xa8b\x1f\xc0\x8bk\xc22\xcbg\xfd\xb6R\x84\xa8\x12\xe5\x08\x9f7nG\xb7I1\xa5\xc8S\xd5jh\xd8\xa8\x8bc N\\FT\xf1\x87\x89\xb0\xcc9\x13\xc5z\xa5\x02\x0c\xe4\xee\xce\xa8><\xb7\x95\xd9B\xa6\x06\x89\xf2\xfb^\xf17\xeeN1\xf3\x99\xc7\x8f:N\xbb\'\xd3\xb7\xef4\xf6\r\x94"\xac\x8f++\x0f\x96\x0c\x17\xc9P\xfd\xef\xe47\x17\xe6Pn\x02k\x9c?Z\xab\xc4Q\xc2\xa7!\xe0\x02 \xdb\xb2-\x9e\xd2M\xb9A\xb3l\xa7\xc9\x84z\xd42\xd0MvKU\xa0R@rT\xec\xa0\xdf\xad\xbf\xfe\xc3\xcei\xafjC\x8bzX\r\x8d\xbe\xbe+\xdeC\x11\xa7]\xfe"\x13\x01\xf3\x96O\x80\xb2\x94S@\xfc\x9do_/\x1e\x9eo\xcb.\xe2\xf9q\xd4;(\xfe\xb3MPuo\xd2\t\r\xbc6\x1c\xb26\x1b\xd9\xe4_\xae\xdeR\xba\x11\xbeo\xac\x1a\x00@j_i\xb6?*\xc1Qey+\xaf\x0e\xaeT\xa3\xcap-\x989\xea\x15-\xfad\x02\xefZ\x02\x98\x1b\xa6%5\x94\xcb\xa6Z\xd4`s\xebx\xff\x03uEO6\xa2I\xb3\xf3\x8c"q\x94zPU\xe8\x81/\xdc\xc9\xc55\\G)\x87\x82\t\x03\xb7!3;_g\x86d\xf4\x18\xa7[7\xff\x90\x80\x97? iKf\x95j\xda\xc34#"D\xf9\xe2\t\x8e\x94r\x162\xb8\xb1W`\xb03c\x84\xf3.62\x01\x9b\x1e\xecs\xe2\xd3\xb9\x85\x15u\x89\x02rv\x8e\xa3\x0f\x01|\xa6\xd4\xb5\xed\xe8Hp\xca\xbf~\n\xd3\xb3c\x8f@\x1f/BP\x93Y\xa3<\xa2\xf1c\xa8\xe8\x87hZ\xf1J\xf1B\xc8\x11\x8e7F\xd7\xb7\x81CU?}9\xbc7\xaeW\xbe\xf8mX3\xe4\x08\xbe\\9\xc2f|\x14J\xaa\x027I$\xf9\x17\xd6\x97\xbe\xe1F\xbb\xef\x16\xe81\xc4)\xffc\x849nF\x80\n\x8a\xc9\x80L\x9d[\x82A\xdc\x8a\xbamA&L\x07\x9f\x9d~\xc5}\x86\x9b;\xf2\x04X\xbd\xa2\xbf\x8c\xaf\x18k\xf4\xb2\xaf\xbd\xbd\x0bAL\xb4A\xba\xf6\xee\xd7\xe7\x1f\x1e[`\xcd\xf2N\xf3_\xb5\xf7\xc2\xd6\xb0V\t\xa7\x85\xf08\x0b\x9e\x18\x94g\x13)\xafai\xc9H\xb8\xf7\xeazk\'\xa0\xce8Yfm\x19\xf4GJ\xd7\xb0\xd2\xe0\x97m\xce\xde\xbe\xb2%4\xf9\x93\x93\xcf\xe8\x01\xcb\x9e-Tj\xfe\x18\x132\xef\x16\xc8C\x01\xe2\x11\xbd\xc2/o[t\xe0\x1bRX\xf6\x80\xc9\x13\xbe\x91\x07\x88\x14[\xb7C\x93S\xa1)E&&\x0e\x12R\xb1\xfd\xa85\x04c`75(\n05\x14e\xcd\xb3\xfb\x8d\xfc\x07\xdf\x1c\xaf \xdd\xcaoV\x9aM\x06 \xde\'\x7f\xd0\x9fR\x0b\xad$#*\xb2\xa7` \x96\xe9\xcdX\x9b=\xb3\xc8\x9e\xe5\x7f&\x82\xc9\xde\x1a\x8e\xf9\xbc\x85\xa8\x1d\xa6\xdau\xcd%8\xc2\xe2"\xa3\xa5\xbe\x1a\xb5\xcc6\xe0\xd24\xfd\xb7\xa9\xe7[s\x8f\x06\x9c\xe7\xd9\x15y\x1a^\x8c}\xb0=;\xc5\x1a]\x01JC\xba\xfd\xea\xeczu)\xc2\xa82\x1b\xc3\xaf\xf4\xeb\xe3m\xf3i\xb42\x03NL\xf2\n\x1e\x19#\xaf;Q1ysp\x02\xbaE\x98\x89\xfa\xe8\x91\x99y"c\xa26\xd8\x98"\xbb\xd1>y\\Iu\x1b2\xce\xe13.R?\xb7\xad\xc0<y!\xfc9\xad\x0e\xbeU\x87\x0f\xb8W\x87\xc7\xf7\xedC|x\x94\xae\xac*\xfde\x07\x0b\xc0)L\xa6!f\xeb \x02\xc1\xc2\x19\xde\x97_2,;8w\xa4\x94uf\xd0\xd9\x15\x9e\xd9I \x91\x06\xc3_\x1dr\xb0\x86\xb3f\tga~;ex\x84Cq\x02Yb.\xc2\xa6\xf5\xe0\xbf\xfd\xf4\xfd!`&\xd77\x11>\x85\xd9K\x94\x10\xc2o4\xe7\x95j\x8fd\x17d\xeb\x954\\v\xaa=JF<\x1b\x8c(O\xdd\x1b;A\xbc\x90\xa8\xca\x05!l\xc3\xd0\x12\x13\xf2\xc0~\xa1\xd5\x05.\xad\x85\xa0\x12\xa3\xe3\xe6\xc8|\x00\xa7\x0b\x97\x8a\xe8\x9cR+8\xb7\x92\x03\xc0U\xc0(\xdb\xf1b\xf7\xf1\xf2C\xfdNmV\xe3\xc2\xb5\x1e\x1eI\x0f\xce\xfd\xd0\xb1\x05\x898\x00E\xea\x8a\xfb\xe7g\xdaR\x02#\xbb\x9f_\xc2(\x15\'g\xef\xaa\x91`\x82U\xc2w\xed_\xd89*\xd0\x94\x146\x95\xdf\x08\x0e\xba\x15m\x84\x90\x1b\x9eTla\xcf\x02[v\x8f\xcb\x06\xdeK\xf6|lyVn\xd9\xdb\x115e\x05\x18\x80\x05\xbb\xf0\x9c\xea\x01%o\xe6\x910\xb2\xfd\xed7\xd8\xf1[\x96\x1fz\xd4\xc9[\x19\xae\xe0\xfe\xa1\xfe\xee\xe7p\xec\xce\xec\x07B\t=\x81^\x86\x94\x89Hs\xf8\xb2\x92\x9a\x17x[\xf1\x13\xe5\xca\xf5\xad\x17\xd5H\xdb\x97]N\x17\x0b\xf7\xca6\xaf\x01\x83}h\xf4 O4wc\xe6l\x89\xed\x81b\xd6\x16\x9f\r\xe1a\xdb\'sx\xb2k\xb4\xcd}\xb4|\xc52.\xeb\xf0T\xda\x8d1\x12\x0c\x18\xa1Y\x10)\xa5 \x97"\xa1\xc9*m\x13\x9c\xccz\x83\xb3J\xbatY\x8dT\x84X\xe8a\x8fq\x7f\r\x8c\x83\xf8\xcd\xad\x87WQ\xa2)6\x14\xa8\xb3:\xe6\xfe\x9e\xc3{B\x04\n\\\xf7:\xfd;\xf6m\xe7\xbc\x1a\x89\xfbz\xedip\x1b\x08a\xf2\xa9\xff,6s\xaa\x02\xf5\xe9op\x03Qw\xeb\x9a\xb1\xf7\x8ai\xe2wv\xfc)\x1c\x96\xd8\xc9o\x01b\xd6\xf5\xb5\xab\xb8t\xe1\xc3+e\xe6\xa5\x0cS\xa48\x94e"/\xb3\xdbQ\xaa[|\x99\x0b\x7f\x7fx\x9f\x1a\x1d\xe8Ii{\xe9-\x90\xd9 \xe8\x84\x05Kf\xb2\xc2\x1d\xfa\x98\x08K~]C\xa5\xa3\xe8\xe5O\xb3\x1c\x94\x1eC\xdd\x85\xd4@\xee\xe2\xb1\x1d:\r\x10B4\x85\xc5\xc4\x1d)\x02\xccv\trY\x1d\'\xb2\x8a\xe4\xb4\xe6\x07 \xe5\x11g)\xe6l\xe5\xa9\xcc\x8fc\x06KHV{\xaf\x9c\xc9\xd9\x19\xea\xd3?\xd5h\x90\x12\xc3U\xb7\x15\xcb\x0e\xe7=\x90\xee\x18\xde\x16=\xfe\x11\xa6\xdb\x97`c\xb2\xc6\xd4\x98o$\x83\xbaS>Mf\xea\'\x0b\x1a\xa3\xf0\xfb\xdb\xcd\x13\xf3>\x91J\xeb\x8c\x02s\x0b\x97jE\xe5-I\xf8"\xa51\xe9PoR#\xabO\x8b\xce\x08tG\xa3\xc5Z\xb3l\x9c\x05Q\x03\x0c#\xc7"\xa1Ky\x86\x1at\x1b\xc2\xaf\rX\xbcOr\xb1-\xf1\x9f\x97\xf5\xa3m;\xc0\x14\x98\xc7\x95\x85z2\x02\xc1f`\xa0\x90\x01(\xf4\xd9)\x8di\x13\x8d\x02a\xa5\xe7\xbe\x02\x17\xee\x8f*w\x14-*~\xbd@\xb8\x86&D\x93\x7f8\xb2\xd3\xed\x14\xffv\xfe\xad]\xfaNM1]>\xa6g\x17\x04Twh\x01\xc4\x00\xaa\x06\xd3\x8a\xd8\x9d\x99U\x97\xfd\xc3J\x19Z\x81\x11$\xd3\xa7\x86\xd8\x1f\x9f\x9b\xef\xe8d\xfc\x98b\x14\x9f\xc0cI_|5\xab\x1a\xc8\xa4\x820\x16\xafK\x7fs0\xd4G8\x83x\x86~\x075B\xa8\xe3^\xa4N\xdbLW8\x0e\x8b\x99\xc2\'\xbb\x1dC\x1b \xcdW\x18UzNlk{\x00\x80\x1d\x03\xf1\xfbfJ\x88g\xa7\xe9\xe9\xf6\x0c\xe6\x10\x94\x9f\xd0\xa8XtJ5l\x18)\xaa&\x8d\xe8\xb1\xac\xc9B\x07\x8d\ti99\xb7\x03\x8b\xca\xbc\n\xad\xd6\x9c\xaa\x1a\xb1\xe9\x92\xaeu|\x19\xb4\xb2\x0b\xf3\x0b\xdc\'\x85\xdeY\xe0\xb8\xfa\xe6\x99 B^j\xacmvg\xf7\x9d\xdd\x8a\x0e\x05\xbc&E\xe6\xe6\xa0BU\xb7?~\xd8F\x84\xfc\xc92x\xca\xcd\xa4e^\x84\x94RTh\xb2{*y\xd7\xaa\xa2\x17a\xfeAP\xea\x0c\xf6\xd4\xc5\x11\x80\x00[\xd7\xf4[\xeb\xb6e6\xf0\xe5\xbb\x00\x93\x82<\x83\x0eS\x19\xe9Q#\x8e\xf8\x83/\x8c\xbf\xcd<b\xf2\x13\xa9j)\x0e!g[\xe0\x08\xd46[\x0b\x9d\x1b\x1ccFH[X\x01\xa3z\xe2{\xca\xd9\xd9\xf8\xc4I>\xc2\xc7/,q59a\xce\xa5l!\xbc\x9c\x1c\x1d_S\x08\xe6l\x1fm\xf2\xd1\x13\x9dF\xd1fHEif^\x08\xa9\xf1\xf1\x8c\x99\xaaa^\xc8\xd0\x84G\xbd+\xd8O ^\x9c#\xe7\x92k\xb7\xf5\xe5]5\xe8\xffUfq\xdatn\xd3\x82\x8b\xa1\xd3\xa6l\xb7@\xf2\xc7\xe4\xc0\x90,\xff\x84#[\x937\x85\x8e)V/\xa8\xc7\xd3N\xf7\xe4\xe5\xf7\xf2\x1c\x15v\xf4\\\xd8C\xd1l\xd0)\xc3\x95\xe3\xd4\xd7`\x91U\x0f\x0c\x84\x80`c\xf9<\x83\x07\x05+^)(\x1b\xd3k\x89Q\x91\xfeO\x08Af\xb4O\xdf\x98\x81n\x10\xd24\xd9"\xbe|w\xcbN\x01_I\xf6\xa0-\x0ev\x1e\x98\xcdc\r\x91d\x9d\x1cU\xf0\x12\xa2\x81\x87W\x19\xc6\xdf\x90\xd7`\x05\x8bu\x1a\x08\xe4$\xdf_W\xa3\x01\xf5\xb6>k\x16~\x10R\xa3\x1d\xa8+M_\\Y\xfa\xfa\x0c\t\x17+\xa6\xa5BF\xa6VSr{\xa2S\x91\x14\xabr_\x84\xb7\x8c\x04\x949\x94@\xc8\x02\x83\xe3\xb2\x16-\xbe\x0cZ\x14\x03i\x15N\xa6\x90\xc5\x91=\x8b\xe5\xbc\x02*\x1c\xf4H|\xab\x16\x84m\x1b\xda\x8aC\x90\xa1\x9f)\xdb\xf8\xb0\xb0\xa5T\xb1\xde[\xc5/>\xff \x840\xb1\xf3\xd1\xd7W\x95\xc0\xf8\xff\xec\xdb#\xd1\xd6\xa8*\xd0\xba\x07\xb6\xb0<\x04\xa4p\x00\xe4\x00\r\x98\x17\x1c\x9f\x90\xdf\x03\xab\x03\x92\xaf\xce\xaa\xe2\x07\xe7v\xff\xfa\xf2\x99\xb2^\x10 ;(\x8b\x91\xb9\xd9\xd8#\x90G\'\xa3\xc3:)S(\xd0)\xa2\x92\x02\xf1\xb5I\x19F\x02\xed\x18>\x01\xab&\xe4\x8b\xf5\x10p\x8b\xa5\xa0$\xbeB\x06V\xc2\xc5\x8de/y\xb8\xbc3\xecZ\xf0 6\r\xccD\xea\x13\x90\xee\xdc\xff\x9d\t\x86\xe8i\x89Y2\x9a\xb8\xa2\xf4\x9e\xf7\x15\xd2\x93\x17\x7fwxn\x1f\xc0N\xb0\xc4<\x16\xfbz\xcb)\x0e@\xc8\xff\xfe\x02\xef\xa2\x0b\x9f+\xe3T\x94\xfd\xd4\x94=\xf5\xe0\xb0\x87Ni\x1b\x12\xfdU\xd1}/\x04\x9a\xc5\x8aY\\\x8etbe\xc8\xb0\xfe[\x10$\x1d\xbf\x1e\xca[\x94g\xcd.\xedB\xd4\xce\xeb\x066o\xe7hR*\xa2\xda\x8d\xdd\x13\xd3r9\x7fm\xb6\x98\x8b%\xc6C\x04\xf0\xa8\xf3\x188\x06O\xe5\xd0\x92k%IP\x89\x1c\xb1\xf1\x92\x95\xbd\xec&\xaa\xba\x80\xaf[\x95\xafy\x99E\x8b\xbb\xb8\xae{k\x0fa\xa2I\xd9\xa9\x0f`y\x0b\xa5\x1d,S\xc4J\xaap\xa90\x99\xa0\x13O\x87D\\\xccM!\x07\x1e\x0f\x9c\xe6\xf3\xdc\xc1\n?\xd5\xdc\x92-h\xa0\x90\x8d\x16]\xc8t\xf3\xa4\xed)\x13\x1b\x18\x03\x08\xa5\x105u\xc9\x0f\x12%p\xf5\xca\xa7\nf\xc5(\xf5\t\xde\xb5\x1c\xa9\xcf\x06\x0bNz\x16)H{\xc91\xf8\x04aV\x83F\x802}`\xa7\xc2\x06o?\x16\xd0Y\xd8\x8dt6\x8f\xf2\xed3f~\xc5\x06\n\x14\x98\x83P\xd3\xf8\xd1\xd6\xfd4+\xc5\xe2N\xad\xa1\xbad\x8a\xe0\xf8\xa0y\x8b\xd6\xa1\xb7\x90\x9a(\xba\xd8D\x1c\x9eF\x97\x06BQ\xa1b@\x97\x95\xd5H\x9ev\xd0\xaa/\xe3\xb5\xbf\xa0#\xd2\xe0\xb0\xe0\x0b+\x99]\xc1\x8f\xb5\xab\xd2a\xf4J\xaa\xc8:-\xd8%6\xd9\xf3\nb5\x0b\x18\xbet\xfa\x9d\x9dg2\xf1"1,\xea\x84\xc3\x1a\xff`:&\xef`\xfe\xa2\x1a\x91{I\n\xef\xaa\x91\xbeL\x8dV[\xb8\xb6\x08o^\xab*`\x9c\xeem;.\x84\x0b\x07;\xd4\xad\x9e\x1d\x98K\xa0\x1dHy\xe7\x11]\xa1:\xdb\x8a\xcc\x13\x89:?\xcc\xc3f\x1a\x9b\xcc\xdf\xaeFU1\x17\xb1\xa6\r\xb2\xb5\x13\'\x9c\xd9/:;\x18c\xca,\x93\xa5\xeb\xa0\xba\x7fa\xd5\xaa\xc9-\xa5\xcea\xd5\x9c\xb8\xbd\x01:\xa9\xa8\x03\xc3a\xcd\xd1\x03{\\V\xfek\xdb\x1bbt\xd9\xf4\xc2\xcc\x12\xa6\x9b6 \x1d\x90\xaa\x1c[<\x10\xf5s\xe8\xd3\xef:\xe4\x06\x8dl\x1d\x993x\xd4\xf6\xbd0\xbd\xc26\xbff\xb8\x8fY\x02%\xc4\x8a\x06\xee\x1a\xf3\\\x88r\xd5\x10hk\x87\xa2\xb4j\x8dQPM\x93-h\xd0\xb4\xa3a\xc9\xcb\x87\xcfu\xa2\xd3\xef?\x0e\xe5\xa1\x8a\xa1\x0cU\\\x9a\xdf\xa8\xff\xc6\xe8\x08\x17\x00\xd2\xfeur\xa2,\x0cM\xf6\x83\xecs/\xb8\xb0\xcbIX\xbak\xeb\xdf\x16\x0f6\x15G\x92W\xb6%\x93\xa6\x95a\x10E\\\x15\xb1\x1b\xe7e\xe0\x89\xcb\xfd-mc\xa2\xe5p~a~\x1e\xd6\x8aP\xc63\xb3$4\xf1\xde"\xae\xee"\x86\xfd^\x8d&*\x91mLj\x1aJ\x85\x1c\x0f\x93\xe9\xcd\xa8NL\xbe\xe7\x84\\\x06\x93\x86\xae\x11q\xb6\x95\xb3\xf0*\x15R \xa4N\xdf\xcc\xdb\x1e\x90\x91\x93W\x9a\x0e\x86\xa3m\x98\xba\xb1\x0b"\xbb\xae\xce\xef\t\xf1-8{\xb0\x7f\x88\x8e\xa6F.\xb2\xeb\xa0K\x9e\xbe\xc5\xe9{+\x94\x949hj\xf1\x1eV\xa8\x08w\xbe\xd8\xe7\xb1\x84\x08\x9d8\x06\xd7T\xe6K\x84\xffpn1N\x9d\x8bVtd\x00\x1ct\x08\x1a\xc85M\xca\x8d\xa6\xd2\xe4*s\xb9\xad\x85\xb7c\xd6={q\xbd\xd9\xa1\xf5\x14qK|\xcf\\\x00\xddV\xf7\xdd\xec\xae\x8b9\xce\x06\xb4\xc3dI\xacx\x84\x9f]\x7fm\xeb\xaf7\xd3\xc8\x8a\\\xf1\xd4\x9c\xb6\x13{\x82]0\xf9\xdc>\xb6fL\xb4\x92\x08\x04\x80(\xbbn.\xccF\xce\x023cghCcA\x1c\x96\xa3\x99Q^$=!\xd5q,\xccJ\x10\\f\x105\xf1\xdc\xbc\x1d\xa79;3\xed\xdc\xd9\xd4\x8e\x1c\x9b\x98\xdd\xb3k\x13J\xb5U1Fo\xd5\xc5\xff\xfd\x96\x87\xb6:\xd9\xae\x9d\xff\xb2\x17\xd7\x87H_\xff\xe4\x16g+\xcc\xac\xc2\x80\xf8c\xfc\x0f\xe8\xcah\xf9\xe3t\x18\x92\x137\x1f\n\xb0r\x8f\xdf\xd8\xbb\xd3\xbf`a\xcbO\xb6\xbb\x1d\xe7\x18\xd4f\xd5X\x03k\xed\xd8F\xd9\x8e-U\xdd\x7f\x06LQ\xc0\x1c\x95\xec\xe8\xedE mQf,1[\xc4\xb1\x7f\xdbW\x07\x0b\xb2\xbd\xe2%D\t\xeb\tO\x15\x80\rl\x05\x9c\xaa\xe2X\x89\x07ER\x88\xf9;\x83\x05:\x01\x10\xe4\xa8\xb2\xa5A\xd6\x10\xb1\x1e\xb1!\xb6:3\xef\xc5|[S\xcc\xdeF\xc8\x87,\x89BbledSz\xa2\x93\x19\xc6\xecx\x94\xd1\xc4\xca\xc9\x8c\xe2Do\xa8w\x1d\xa3\x81\x94\xf5\xc5\x91\xc6x\xbeTU\xdf\xd1\x05\x9d\xee\xd87\xb7\xe5\xa4>\x92`\x83Xg\x9e\xaar\xa9]\x13\n\xcb\xb2z\x85F\x10"]\xc8=\t\xac\xb7\xef\x1e\x9b\xbb`z\x04k\xc1\xa9\xf1\xa9y\x9a\xa02\x07\x19\x1ex3\x90\x16x\x9b\xb0\xce\x17\xa6]\xc35\xa3^d^\x9e\xecQ\xd7Hc\x089Q\xb8^\x87\xb2u\xf3:\x07|"\xec\x12\x80\x04\xea\xce\x04\xf9\xa1\xec2=r\x14\x08\xb4\x8bE}\xe6\x9d\x9fS\xf6S\x83\xe2)y\xaa\xceNH\x10K\x85\xa6\t\t!\xc8\xd2>\xc7\x19\xf2\xb7l\xb7Y{\x89\'6\x1e\x9a\x8f\x8f\xca[xoe\x10%) <\x88\x1bf\xeb\xbbn\xe2\x16\xb6jL\xc9jh\xc6\'\xcc\x81t\xf9\xa3\xc7\xb0\xe2\xbf\x15\x83\xe3x\x04\x0e\x00E\x98\x9a}\xf4\xf3\x0b\x0b\xb9\xb9#\x96&y\x81\x01^w\xec>\x8b\x85p\x96og\re\xe3\x0eQSU\xa5\xba\xa8\x00\xe7\x9b\xc8\r.@67g\x1bR\'_.\xdcx\xa5(5\x08#\xcbl\xd7<\x02<\xa3\xf5\xcdb\x0f\x9d\x9c8\x9a\xcb:?\x9d/O\xa0O\xaf\x8e\xa1\xef\x98m\x1f-\x0b\x96\x81\xa1\xeb\x1e*\xe2n\xe5!\xcb\xed+\xb1\xfchq\xe5\xfe\x0bH\xfe\xb4\xca\\\x07-S\n\r\xc0\xd6\xd6\xc0S"mr#\xc6[\x04Z\t\xc7l\xd3k>a\x07V7\x8d\x0593\xd7C\x8ahr\'\x08\x9b\xe8Umb+Cxc\xc7\xa7\x01g\xc1\x88\xd5\xa7\xb6\xcdT\xe9om\xeb\xbd\x1c\x18u\x038\x15\x9c\x8c \xd2\x1b:\xddZ\xc2\x01\x0c}P\xf0\x08a\x1a\xbd\x15\xe9\xd9\xac%\xf24~\x9f\xaa\n\xb3K\xd9\x1e\xca\xf5zG\xd3x\xb8\xc3\xdd\xab\xa6\xabK\xaeE\xfeMwW\xbc*\xb2%\xbd]3\xbaQ\x11\x7fL\xa4\x7f\x1e\x7f0\x07\xbb\xd6\xd7\xe0.\xa1\xc9\xb4-\xb7\x97uOp\x90\x03*e\xdd;\xa8\xa8\xb3\xbc\x86X4Z^[^\xb7,\r\xa7\x96\x1b\x84\xd6\xeb\xce\xa3\xcaJ\xf9\x8c\xfa\xdb\x11\xc9#hK\xe2&aE\xd8\x9c\x03\xc9\x17R\x9d\x9aW\xf6q\x9e\xbc\xb0\xf6\xe8\xbb\x99[c\xccc\xbaW\xfb\xee\x042\xcc\xd9\xb2?x\x8a\xcf\xfaj\xe9\x0bm\x04$\xca=\x9a\na\x05\x0b0\xed[p\x7f\x9a\xd4>\x9c.\x9f\x90\x02\xa0\xf9\xf6\xde_{\x1b\x92\xdfU\x16\xab!G\xd4\xc4\xfd\xe4\xf3J\xa3Up\xcf\xf2_\x07f]\x9c\xc8Z\x0cKz\xb3\xf5E\xd8a^\xd4*i\xc9Az\x0c2$\xc4Rs\x10P 1\xd1\xa9\xe5\xd0\xd5\x188\xcfZ*i\xed\xcf\x8f\xccy\xf2F\xe2g\xd2s\xb38\xec\xa9\xe9P@\x0e\x83\xbdh\xf4\x1e&\xca>\x0bF\x04Ltk0\x07\xfe\xe2\xe3\x07\x81P\xc5l\x0e\x02l\xc7I\x163B|\x83m3y(\xedeR\xcb\xf83\xe8#x$&\xb0\xdf\xf4\xab\xd3\xd1\x97\x88\xef|\xb088\xc5i\x1c\xaf\x1e\xca\x8b\xc9\x9b+\x1b\xf6hu\xbf|\r\xa0o\xb3\xf8\xf5t\xf3\'|\xec\x87\xa78\x8b\xd7\x05\x88\x96v\xff\x83\xa0_\x12(\x92\xdbo-\xd0\t\x03\xe9\x97\x00X\x93\x02\x01\x8d\xdf~\xb4\xb3\xdd\xab\x90j\x01\x0f\x8c\x0f0^\x9aQ\xddl\xd7l\xfd\xd8U\xe8\x827\x8a\xaf\xa0\n\x94<\xa2\xb9g\x95I\x9e\x8e\xb16s\xc6Ha\x84S\xa1\x8f\xf2\x17\x04Q\xee {r\x9c,#\xac\xd1\xc7\x90\xcdN\xef\x99\xafi\xac\xfd7\xaeFe6\xce\xb4"\xcdm\xb3:\x1c\xac\x01\xf9\x97\xdd\xc7\x05\xb8\xca\xdd\x1f\xa7\xf6\xdd\x0c\xaa#\xb3\x19\x8c(\xbc\xfeQ\xf5\xa9\x8f^p\x97\'\xec\x8f\xbb(Ez\x12m\x8c\xc3z\r\xa4\'\x9d<\x8bU\x97D\x05!U\x94\xd9mc\x0b\xdd\xf8\xf0\xdb\x1e/b\xc1\x14\xf3\x94\xdc\xd0\xa6\x83?!S\xa0Q\xed\xb6\xd1\xe1\xa2\xfe\x81\nJ]\x7f\xdf\xbc(\xa1\xcc~\xa1\x1a\x06\x0c^|"4\x13 \x04\xd0t\xa7\xbf\xd0\xb1\x93\xbd\xb3W\xbe\xc8\x1b\xc7Vd*\x8b\x02\x01\x02\x95xJ\xdd\x8e\xf2\xc9k\xd9\xeaz\x1c\xa7\xfa\xc3\xe1\x9c!\x13\rl\n\xeb\x13\x8dr|\xa4k\xa5+\xfe|\xb5kZ\xe7\xf1c$\xd6\xe5\xd2\x9e-]_?4\xc3\xdd\xa2t\xce\x92Xc\x86\xdb\xe6\xe5<yTY\x0f&\x8d*\xe6/u\xa2\xbf\x12-t;\x82\xd3\x15j3n\x14\xd9\xa6t\xe7\xb6\ner\xf3\xb6\xea\xf2\xc3,]\xdauh\xe0\xb1\x87WQ\x84G\x83{\x93r\x0czp\xdf\x1e\xc8U\xab H\x9a\x11\xa0/g\xe9\x1fF\xabB\x97\x94\xcd!\xce\xce<\xc5\xbd\x10\xb8\xf4Kw\xf4%\xe2\xdb\xf4\xf5\xcc\xd9\x83-{\x1e\xb2%\xfdj\xa5\xf9\x04\xb8\x13\xee\x1e\xc6\x0e\xd3\x9cG\xb7O\x8f\x97\xde\xd8A\x1f\x18\xe3M\x0b\x95\xcbV\xb5\xe1\xd0\x8f\x99\xe1\xb9\xb0\xb8+\x16Axp;\xe9\x97b\xb4\x1d\x10A\xed8\xc0\x8d\xa1\x9f\xb3k\xd7\t\x04\x1e\xaaH>\xfcu\x7f\x15\x9b\xb0\xd2\xb1[\x8dF\x8c3\xf4\xab\xcd\xe5Pg[$Q\xca\x8c)\xe5\xa2\xf6\x98\x96\xa7)\xb4\x16\x0c\xbf\x00\xc9\x04\xd8\xbdP\xfe\x00\xdf\xb9\x9eS(\x17(\x9f\x93?:\xb27h8\xf6\x00\x97\r\xc9T\x07\x9d\x19\xda@\xf1Y\xd8\xf4\xdf\x84\xef\xb8\xd8\xe7\xb5\xed\x14c\x01T\xb1\x1a\xc6\xab\xbf\xbe\xabG\xd4E\xebX\xe63\x05~\x9e4\xc8\x97\xc4\xcc`\x8d@^f\x80\x10\xb4\xad\x8d\x1d\xd96\x9b9\xb0\x0fh\x1b\xd5\x18\xc9\xa0#\xbe9e\xf92\x89^\x996\x94\xacLt\xd5\xd88@\x0e\nNv\x15r\xc2\x0c\xc1\xac\xb5\xcc\xc2\x9d\xad\xab\x03\x03\x9fM4\xe4\xa3\xbc\xb5\xf7\x88\xfa7\xbf\r@\x1b=\x16+?\x10*o*IW:;AL\x00\xb0p\xc0;{\xcc\xc0\x0c\x10@\xe6\x90XF\x8b7\x15?\x8a\xc3\x1c\xe2\xfb\xe3:h\x83\xde\xec\x10W\x1b\xe4\x84/<F\xc96\xe4O\xf7\xd0\xf6\xd7Qu\xbb&HR+\x9cr\x16\xe3\x95\xe2O\xd3\xee\x87\xb5\x9b\xb6v\ru\xc6"\x8c\x90\xf3\n\x04\xc9\xd8\x16\x9d\x12\xc7\x84OR>\x18H+\x03\x8d[\xf4\x1e\xf2\x87\xd1\xea\xd3\x85;\xb6r$\xfd"&\xcb\xdf\xdb\x82P\xae3\\\\\xe3\xc3\xf9p\r=\xd5\xf9\xa4r\x7f\xf2IE\x9a\xe2\xf1\xc1\xe9\x83 \x0e\xf2y$\xd96\xa6\x19\xd0P2\xfe\x7f4\x03\x18\xd2\xb2\xcc&|5\xea\x81"\xfa\xd7\x1a\x9cIT\x08\xf1\x8e\xc7@xv\x02B\xe3\xd37\xaf~\xdf1\x1b4\x00\xbd\r\x14\x0fH\xbb\xb3\xa6$-v\xa2\xa4\x1a\x96\xa4s\xfb\xe6\rK\x88\x0f\xf7\xdaSf\xafD\xc0\x9e>\xd3\xe9A\xbe\x94\xac\x1e\x1d\xcb\xa1\xf7\xb3\xb7\xccHYD\x08\xa5L`\xdd6\xf1\x98\xf1\xaaB\xab\x88\xac\xadW\x8a=\rE\xdb#\x86\n0\xb9\x8b\x0fn\x9dR\x14\xe7\xe9\xf8\xd8\x88f\xbeP\xa9R#\xb5Wf\xd6\xed\xadE\xdbA\xa7}\xe9\xd8\xbf\x82p\xa0\x9d\xff`?\xc8F\xc6\xd66\xd7\x8b\x9a\xd0\xeb\xc8\xb7\xe5\x18\xf9U\x91\xb2\xdaz\xfc\x95\xfd@\xb0\x8a@\xf5\xb2\x1a\xa9\xfcZ\xac\xdc\x1d~\xc7=\x8b\xf6e\xbe[R?\t \xe1\x00\xb8\xeb\x99\xdd\xd2\x9e\x83@D8\xf0\xddk6\xe9\x81N\x95\x908\x83,7\xe7w\xbc\x99\xbc\xf4~m\x98\xe3\x183TS\t`x\xb3\x1a>E\xfc\x10\x8b\xb9D\xdf\x18\x94%\xc1\xaaRL\x0b\xf9c\xb9\xf6\x99f\xa36wG\xa9\xc7Vl\xc8\xae\x19\x124\xfbm\xdfA\xb6\x8e\xedz\xb9\xcevX\xff\xc2\xbbp\xe3\x85\x82\xf1P\x1d\xea~R3=\x91\x9d\xf46z\xd0\x96\xc5g\x0f\x0em1C\xf6\xde\x9c[\xdb\x7f\x06\x18\x95`\xacN\xcf\x99\x06\x18\xd1\xce\x06\xe7v\x8a\xec\x96|S\x88\\\xaa\x039\xb4\x8b\x98\x14\xe8;\x05b0\x8e\x98\x11\xd7\x0e\xd6\x015\xca\x9e\tqg\xb1\x829\x9ey\x99\xfa\xd6fk#$b\xaf\xba\xc7\xac\xa9\x90\x1fK\xc3[\xf2E7^\xdb=n\x02\xb1\xb1r\xcd>n(z\x10b\xa8\xcb\x01\xf9\xc4X\x8d\xe6\x91\xfc\x0b3F\x94\x04\xdb\xf6\x9b\x9d\xaa\xbe\x7f57c\xcf\xd2\x8b>\xd7\xaa\x0c\xdb\x8bv\xe2\xb9\xdb\xe0t\xb4\xc5\xda\x14q\x1c\xe8k`\xde<{\xc9|\xf4\x115\x82r\x82q\xf6\rHQ\x94\x98\x95\xe1`^\x88I\xb8\xcf\x1b\xfa\x8a\xbc\xda{\x8e\xbc\xb3\x0e\xaf~\xd9\x9e\xb2\xc0\xedu_\xb3\xd6\x96\xf2\xc2\xe9\xd8\xf9d\xf8\xd4\xf4\xd3J)\x05\n\x99,\x92R.jYe\xd5^y\x0fP\x1a02\x1e\x91v\xe9\x90\x8a\xa4\x08T\xd8,\xa67,U8j\xe5\x86\x86|\x9d%.\xc3\x18oT\x12@\x9d4\xc3\xc0!\xe9\x1e\xf5@\x86I\x99\x99\xd3\xde\xcd\x8ci|,\xdbQ[\x0cm\x06V\xcd.qQ\xdb\x82\x80.\xfcB\x19\xa6`\x1d0\xe3\xa6\x94?\xef\x06~1B+H\xd5t\xf1\xf2oC\x04\xd8\x9dXV\xea#:\x16\xe9\xa5K\xebJ\xc2^\x87\xcf-\x97!w\xa9\xb3\x1f\x1e\x08\xaf\x1c(\xd5T\x07\xd8\x1e\xd6\x192\x8fC\x1a\xf1\xd5\xb0\xc5\xa0\x1dG\xf9S\xbbJ]24&\xf5\x16\xef\xfb\xe2\xf02m`\xca\xca\x90A\xf1\x01\xe9\xffP\xb7+\xdb\xb7\xe0 \xe02\xe0\x1c\xb3\xa3\x97\xd4\x02T\xfa\xdb\xf3\xc5rI\x98z/+\x1c\xeb\x02\x929\xf6A\xe1\r\r\x05i\x00{_\xccS\x1as\xcb\xb6\x8c\xf2R\x94*\xc2\x8e\xe0 D\xd9\xaf\'\n\xc92\x8bE\xa2\xf2\xc3t5\xe2*\x92\x1b\x18\xab\xa8\x14\xdbqv\x98=\xc6\x99\xa9\xbd\xd9\x8cZ\xf1e\x97<\xb1\x1e\xae\x0ffL\xcd9\x12P{t\xdd\xf6\x9b\x9d\x9f\xad\x85\xa1\x1c\x12\xd8T\xaa<\xcb\x1f\x93\xfcu;a\xa8\xb7-N\x14\xc5F\x05i\xb1nM\x8a\x8c\xa6\xb6\xb1\xb8\x18\xddzbNM\xcc\xff\xaa\x1a\t\xa8\x04Q\xa2zn\xf8C;rm\xfd\xda\xe0\x93\xdeO\xab\x9e\xdfA\xfe\x83-\x14\x98\x86\x19\xa0\x91\xc1\xb4\n\x16\xbb\xf6\xdb\xd0\x81\xaa\x0f\xe6,_\xb4\xfe^\xac;\x8eDtb\xdbIZ\x0e\xe7\x94gGO\xa0#\xe9\x91L\xf7\xf9\xfb?\x16:\xf0|b|.\x8c]\xe9\xaf\xca\x93*\xc7\xf0\xc5\xfd\x85\'\xf6!\xa1X\xb2\xe0\x97\x05,\xa7Iu\xa4%d\xa8\xf7\xa2\x16Z+\xf1\xf2\x02\xbb\xcb\x18\xef\x11\xa8A{d\xfb\xd5a$\xa1\x8dH\xc4\x9f\xceW\xa3\xfe\x08\x9aj\xe5\x19V\x97\x8b\xec\xb8\xf3\x98\xe6\xb6\xc2,\xb1\x80\xa0\x18\x1c3\xdco<\x87\xbf\xec\x9e\xb0p\x84\xe5\xb90\x87w-x\x0f\xc5\xb8\xb9\x0e\x92I\x84\x83\x04\xd1\x96\xea\xfc4X,6\x04P\xceM\xdd\xf9n\xc7\x8e8C\xf7\xf7\x96\xc5\x03];\xb1\xbc6i\x1f\xd3\x82\x92\xe69\x13\x00m6]\xedf\x15\x86\xf9\xdd}[7\x12\x12\xd1U\x89\x91\xdb;\x97\x0e\x9b\x8b\xd3+\xaa\xef\x14X\xc25\x14\x1f\xa7\x14O\xe2\'9*\xa1>\xb9\xff\xd2NS\xd9B\xfb\xa2x\xa5W\xa9\xc1Jl\xa1\x99\xe9\x94\x92\xd1\xcaq\xda\x96\x17\xd0"\xd8\x86M\xd7\xb0\'\xe9\xca\xca\x90\xfd\xbf|aQ\x12"\xd7Z\xa0\x8b\xcb\x00+\xb9\xa3\xb5v\xc8\xba\xa7\x1fai;\xde\x1a\x0c\xe8\x18\xd8Ol\xba\x15\xaf\xa0K4v9\xc0\xe0\x972\xbf]\x96N\xcb\xd5`Vq\xe9\xd7a\xe9\x0b\x14\x06\xdb\xf9\xa0\x14\x80\x0f\xbdn\x1f\xc5a!h\x17c\xe9\xb7\xb8\rg\x9d\x9f\x9b\xb3\xbc\xb8\xce\xe2\x1a3\xc3i\xe4\xf6\x85\xa1\x11\xacb-d\xd3L4ok\xad?\xf4gK\x93W\x94\xf0(\xd6\xd6\xfb\x08\x1bO\x9e\xed|\xb1\xcd\x1e\x86e\xf8\x06s\xd7k\xd0\xaa)\xc2\xe6\xcc8\xf8\x8e$\x9b\xfc\xd0n-w\xb2\xae4`g\xe5>\xaaT\xec\xce\xe9\xdf,\x9b\xbd\xba8\xdc{W@1!\x94\x94\xae\x8a\xeb\xc2>\x02\xe3\xe8\x819\x0fE\xacN\x9c\x0cs\'Ou$\xc8\x81\x08\x80\xba\x89{g\x8a\x0c\xe0\xc4\xdc\xc7c\xac\xca\xf3\xb7\x95\x11\x17\x0c[\x91\x97\xe1,\r\x17Y\xc4M\xa0\x14\xd2TQ{\xf0R\xb7\xc0)\xaat:\x8b\xe1\xc8"kJO\xe1@B\xff\x80w$5\xbbUv\xd3?\xaa\x11\x19\xb9\x86d^\x1f=\x86\x84\x0f\xf5C3[\xcaZ\xf7\xb7\x15\xebd\x18\x17\xcc\xba}x\xbao\xb6\xbbngmM\x1dOT\xba\xf9op\xc3\xa2:\x04\x0c\x10\xf51\x1b\xaf\x9c\xbf\t\x12X\xdb\x1c\xc7w#\x0bwj\xff\x1f\xa2\xa5V\xde\x88\x88,Be\x7fo\xed\xd92\x828\xd4\xd6jT\x04\x87\x81\x0c\xbe\x9c\xf8\xad\x9c\x1a){\xc1.\x00R\x07\x1e\x08\xac\xe8\xdf`Mgd1iAr\xfb}\x96b\xc5{\x0e\xaa\xab7jn\xe9\xfb\t;T\xb5\xfb\xf9JK\xeb\xbf\x83=\xc1\x06\x1b\xecD<\xb0\x96\xfc^\xa5\xae\x82=\x036\xec\x92\xb3\xa3\xc0\xd9\x92\x11T\x8c\x17\xab=\xc0\x86=X\xad\x14\x96q\xb4\xc9\x00\x05\xf2\xef\xc0t\xfa\xad\x8f+\x1f\xcc\xd4\xf5\x96#\xdc\xba\xf3BQL\x1fP\xc7iN\x89\xd7\xefq2\\*\x9c\xa8g\xe2t\xc3\xce\xd9\xa0p\xca\xf3\x9a\xd8\x0e\x11\x16B\xca\xe6\xdfT{l\x9b\xef\xb5\xaf\x9c\xffG\xe0\xf4\xb7\xe5 \r\xe06\x16qA:d\xafT\\IC\xdaN\x1eq\xfaX\xfe\x10F\xb6\x11\x0c\xc1f\xadB}R\x14\xdd\xa7\x85j\xbe\xdb\x96P\xd95\x93\x93\x88\xcd\x7f\x1b\xdb\xa5\x92\xb2\xd6\xc06D{\r\x05\xfc\xd0\xee\xcd<1\xaf\xd4\x10p\x13\xf9\x0b\x8a\xdaN\xc6\xb6Vm\xbc\xad\xd7\x85\xd5Dv\x04\xfb\xe4\xe4\x95<\xb8)Eb)nV\x87\x0b\xf6\xed\x0c;SV;\xb9r\xf0\xd5\xc1\xda\x16,\xb2\x80\xde]\x1d\xfd\xb2\x9bS\x86+o\xc6\xa7\xa7\xde]\xc29\x1d\xc9}\xaa\xd4\xd5\xf1\xc7\x9f\xe6PHo`\x18\xf6\x9fmA]"H\x86\xeew\xff\xff\xf0\x1a\xe6\xdd6\xe8\xc6\x8d\xc4\x07c~\xdc\xd9\xde\xd7\xd4\xe1*!\xc0Gf^-\xbf\xaf4\x9f\x12\xd3\xc4F\xe4\x12}\xbfa\x11\x19\xe3Z\x966\xf6?Y\xe4\xe7\x85\xf8RJ.(\xfd\x858\x92\x95\x88\xcc\xbf\x97*:\xfa\xecw5\xeao\xb6iDH\xcf\x8f?\xe2D,+\x98\xcb\x04\x87\xa5\xc4\x84~\nk\xca\xcfX\x148\xafFs\xab\xa2xy\xcen5\xae1#\xb8\xc6\x0e\x11\x07\xe4vJ`\x1a\xf1\x87\xa8;\x81S\x01\xbd\x0bNCu~\xef\xf9\xf2uRXf\xaf\xc2z\xa3~\x1bSE\xbb\xa5c\xf9\x8d\xe8\x81e\xbaqz\xa1\x9b\x8aa\x83]\xd0\xb2\xbf\xbbW\x8d\x94\xcf\xf0\xbe\x9c\xe4W[\xf4[\xfb\x17v\xacM\x04\xf6\xbb\x02\xc2X\x99\xc9\x80E\n\x12/e\xc0\x99\x9ca{At\t"\xce\x87N\x1d\xf7,]r\x86O\xf4\rP9\xbf\'{V\x8d\xba<)\xb4\x16\xbd\xf0?\xec\x17\xe9\x12\x12[\x83\xa8\xe0u:\xd4\x13\xc6v\x90#\xfd\x13\x1aT~\xc2\xfe\x02\xba\x11\xfb\xf5\'\x13\xbcd\x87\x07\x1fpj\x99\x99>\xb0m\xac\xb3\xf7\xdf5\xde\x04+\xca^\xe4b\xec\xdd\xca\xef\xdb\xffU\x97\x1a\x192x]\xf4`\xde\x16a\x08\xfb\xd8Q\x99\xde\xf9\x82\xa1$\xe0Jc\xe8\x86\xab\xe7\xec\xa1\xe8\x83`\xff\x91\x192\xdcR\x1a\xd5E\x1c\xf0t\xfa\x01\x0f7\xf9Ny,\t\xd1.\x17o0\xb0\xae\xd8\x1a\xc4\xc3\xfb\x9c\xcaU\x10\xd3\x98\xaan\x9c\xe0\xcf\x8bu\xa8\xa4\x81\x1dM\xfaxv<\xc12\x0e~\xed\x9b\xafFJL\xae\x17`Oe\xc6\xaex\xfe\x0b\xfe\x1cN%\x01u\xce\xdd\xbc\x0c\xe1=\xb8+\xac\x16\xfbt\xc1\xb6\x80\xe32\\t\xff\xf1\xa6\xa0\x9f\x0ed\xee\xe8\t6:\x7f\xf9_5\xaa\x05\xb1\xa9\xa8\xb1{^\xc77e\xd0\x88\xbcW{\x8f\xed2\x84V\xc8\x8dO\xbf\xbf\xb2x\xd6\x83\xa4[\x97\x0f\xae\xea\xce\xf5\x16\x0f91N}\xf1F\x11~c\xff\xbfd,\x8f3\xd8\xe9\xdcE\x90\xdc,\x950:!HC\x95\x8at\xcb@i\x9c\xd3j$4\x04\x0b\xe6\x87\xe4\xba\x9b\xde\'\xa2\xe1\x19\xc2\xdeR\xf9\xbe\x8c^\xdd\xd5\xe7\xb7\xfbo\xaa\xcb\xd1x\xe4\x9b\xdd\x16\xca\xec\xf5\xc2\xf4\xd9\x1dw\x18\xb21M\xf9V\xae?\x9d\xde\x9f\xfav\r<\xb5\x96\x02?\x87\xb6\xa9\xc4M\xb06`\xd3Q\x1a\\\x17\x87\xcaU)\xe2C3t\x0b\xd5H4\xb3\xb4>\x81j\x87$\xc3wJ]Q\xf8fI\x93e"\xc4d\x1d\xa2\x18\x88\xf0\x90\\\x19\x9d\x80\xcf\x1b?\xb6D\x8e-\x0b\x94]\xc9+\x8d\xb5\xfb\xd5<\xaeF\xb3\n<"\x00&q$z>\xb3e\xea3\x16\xa4\x06\xf3\xc8\xa6\xe3\xfe\x8et\xb0{o\xaf\xd4\x04\xe4/\x03=\x97\xae!3g\xd6\xf7:\xbee\x8d\xce\xb7>l\x10\xcfN\x08\xfe`.q\x94\x1d\xbdP\x88D\xd22\x02D\x8e\xf6\x0b\xe0$G\x9f\xbf\xa9\xcc\xe8\x84\x11x\xa1O\xa5{y\xd7^#\x1257\x82;\xf4\x08\xa8B\xbd\xfd~\x17\x11V\x81\xe6a\xb0j\xc8\xaf\xf6\xd5h\xdc\x1d;\xbe\xda\xb9c\xfb\x04\xd2\xccqA8c8\xdd\xbbcx\x91\xf7O\x9e\xea\xdc\xc89\xe2c\xa2\xe4\xf9\x8c\x19AVE\xf1\xaed4\x08\xe9c\x18\x11W\x92\'\xb3\xc3\xdc\xf5\xd5Hs\xa7\x06\xc9\x97\x99<\xd4\\Y\xec\xee\xbe\x19\xe9V\x1c\x85\x0e\xa1k\xa7X\xdcfA\xda\xf9\xf0\xc0\x06\x1cm2\xfe\xa4\xbe\xbe\xa3S,\xc6I\x8fi\r\xdc\x00\xc7\x9bv\x88\xebp\x8eY\xc4\x1dAs\xdd\xa6\x1e}\x84Q~\x84\x00\x96"aZ\x0f\xeb@B\x99\xd8\xed\xc0|BA\x91\x93\rP\x82\xbe8\xb0\xd5\x9c9,\x12#1\x89\xc1\xc1\xb3\xb0\xbb\xb9\x08\x08\xf1\xb3\x93k\x0f ;[\xae\x8c\x05;2\x8d\x1bB"\xa4\x1f\xa9P\x00\xe77\x0b\xcb\x1b\xa2\xf2\'\xe2\x81\x1831\x9anL\xc7\x92A\x8c$\xeeM|\x08\xcc\x9fv\xab\xb1\xef\'\x96\xdb\x03\x8aM@@\x0c\xe3\xc0\xads@\x83}\xbe\x888\x8bi\'\x1cc6\xcfT\t\xbcr\x90\xf6\xa99\xa0\x17eI\xbdUf3\x10\xb0B\xf4\xba\x1a\x95\xc38\xe1.\xb1M#J\x82\xc4,\x02\xe7!\x08\x9e\xeb\xb3\xbfS\xf2\xd4\xbdp\xb2\xfc\n;\x872\xe5\xeb\xc5g\x04[\xdd\xfe\x0ef\x0fyL`\xae\x15\xf4Rj\xb1S*\x1f\xa6\xd5TYPPlCA>"\xafn\xa7\xb6\xcb\xc4\xdeA\xf4\x861}\xc2\x13x\xf0\xea;\xfdW\'RmP\xd8\xd1\xaa\x0eI\x91\x06\x16\xb9&\xf0\xd2\xbf\xc7\x89g\xadm\x98\xb3\xef\x00\xa6\xb0\xa1<\xf7\x9bJ\x10\x19\xd6\\QZ\x99Zp\xdfF\x87\xcc\x87\xc1\xa7\xc8\x7fj\x1bm\xd8\xe0\xa6\x19\'\x12>2[\xb1\xda\xca\x16\xeb\xb2\xc4X\xe9h\xefD\x11`\xf2P\xa9<\x07\xb4\xe1B_\xec"\xaaBae\xd1\xdc\xa7KgO\xce-3\xa7\xe4\r*\xea\xad\x88t\x14\x88\'\'\xfb\x08g\x12\xa4C6\x9b\xbb\xcd[7\xa7\'gmm\x89\x19\xa6\x12\xf3\n\x8a\x10ZuAPC@I\x11\xab\xbb\xacML}aV\x8bJ>\x87\xbe`$\xad\xd5\xc2\xc2\xd9\xc7\xc9w\'\xb6#\x0c\xce \xfd\x1f\x05\x00\x0f\xe57\x14:J\xb1\ti^\xc8\xf6\xf8f\x87\x9b\xe5-\xdc\xe7\xec\xca\x16m%\xf02\x7fbw\xd5\x9c\xe3u\xdc\xd3/\xd5\xa8\x99\x1a\x06\xc4*\xad\xd8\x84s{rB\x13C^\xd1\xd8\x9fy\x98N\x03\x1cN\xef\x95\xbf\x87/R\x0e\x8a[\x17\x8b-\xd7\x8bK\xed\xea\xc7f\xd0IX\xae\xbf\xf8\xe1[\xcc4Q\xb2\xd1}\x9c<\xb2\x05\xa5\x92\t\xd9\x12\xa7Ws\xc5#\r\xfb@\x17\xafB$c\x90X\x19\x14CY\xdb\xf2h\xd1\xf3J\x0c\x07*0ex\x904\x96o;\x9eb\x04;\xe5<\x077\xbd\xc1o\x95\xdf,\x9f\'\x19v\xa8}v\xb8\x10\xc0\xea\xcb/\xf8:\xe4\x18Q`~\x89\x05 p\x9a\xbez\xa1\xd0R\xec\x0ev=\xb1{/{\xf9\xca\x0ero\ro\x9f,h\xe3}`\x18\x8f\x04\xd7\x7f\xbb\xfdy\xfb\xdf-R\xa7r[v\x02\xadx\xf3|\x00\xcc\xf1Ea\xea\xd7\xce*f_\x94\xd7\xbcee\xac\x81\xf7\x9a\x18\xc5\x90\x10\xf0\x1bS;\xd6\x94\x8f\x95\x02`\t\xdc\xba@\x98\x81/WZvS\x96\xd3\xb8i\xe1\x17\xd0\xb3\xdc[S\x9c\xf9P\xb2\xff\xdc\xfbWW\xc1\xe6\x01C\x9da\x8b\x13\xec\x9b\x99\xd1 [ vJ\x9e\x82\x10U7\'\x19N|\x1d}\x18\xe2\xeb\xbaGE\x17CNY\x8d\xeb\x18t\xee\xa9;\x92\xc2L\xf53\x81\x9b\x11\xe6=\xf1\x9e\xe4\xb7\xd1\xb5\xe0\x1a\x8a\xf3\xaaJ\xdb\x02\xc1\xec/\xd2\xca\x83\x81\x88\xcbbRj;\xc8\xb3W\x0b{S\x12[\x8b\xfa\xde\x95\xeal\xa4\xf1-_\xebMa\x01\x13\xca$\x0fx[c\xb9-3\x01Lr\xe88p>zf\x07\xb7\xe50\x00<\x0e\xefcy\xfb\xb3\xca\xa5>\xa1\xd2\xda\xee\xbb\xd9I1\xa2\xca\x01\x8af\xb2x0-\x1c\xc4\xdb)\x8b\x04\rElU\xa91\xda\x8b\x9d,^qd[\xe2nG\xedG|\xc8\x03\x98\xe5x\x1aU\xe9\xbe\xd6\xecvN\xc2\xfd?7\xd1\x01\xf6\xed1\xd1\xb9M\xc7-+\xe4I\x86jN\xd3\xed=\xb7\xa43\x90\xf2{\xd7\xd6\xa4\x14\xa1\xd4\'\x02\xc1\xdb.}\xb8\x1f+H+\x95\xdb\xab\x92\x877\x7f~\xf2V\x01\xc5\xc5yy\x11\xd1t\x93\xa5\xfa\xc5\xceF\x89\xb9\x96\x0cTk\xb3\xb6A\xe5b\x1eMvJp\x96\xaa\xa2\xf5\x12\x16iD\xd90Ew\x1d0\x0c9gJ\\\xfc\x14RB\x16\xd2\xb96\x93X\xde0%\xdbXn\x87\x8a\xaaU\xd3\xa7\x84kA>\xea=\n\x845\xed-b?\xc1v\x85\x88\xbbB\xfb2\xfb\x03\xcc7~\xf9\x1a\xa3\xec\xa8\xba\xc3\xce\xbd\xe6\x0b(\r\xdd\xe2\x8f-\xfb\xcc\xda\t,\xc7r\x81\x9b\xca\xea\x13\xaaDM\xb1V\xddX\x15\xaa\x80$\xb6\x7fo\xe9\xc4\x10\xdb\xd0\xcd\x96\xc2K\x89Zh\x89;\x91R\x89C3\x19\xbe}\x94/Z$\x18\x15\xbf\xb7\xde\n\xa6Mp6\xc2\x12\xa8K\xed?\x8b\xba\x9bd\x0c-6\xe0\xcb\r\xfc\x7f\xb6\nA\xce\x89\xd7C\x06\x8drrx0p6\xa2\xfedL\x17,Y\xc2_B\xab"\xa4\x99E\xe56\xf9\xf6\xc1\xb0L\xb7\x1f\xcaf\xc0\xc5u\x88\x939vJ\xa6\x8cZ\x1a8-5plp\x189\xe5\x854\xdb\xf4\xae\xf2T\xfa\x8c~\xb5-v\x9f~\xb3\x18%\n[\xe3JB:\xbb%\x9cV\xe9\xa8\x97!\xda\xab\xcb\xa6\xaaQ\xdd\x88\xa3\xb0p\x9e\x19y\x84j$\xc5\xd3\x89\xbe\xcb\xf4\xa3\x18\xeaV\xadr\xf5\xdc~\xdb\x06\x06\x00Cp\xff\x84\xac\x12\xfaG\xcd\xa4\x8d\x96QU\xac\xafo*O\x1e&6vb\xfe1\\\x8d\xcc2\x92\xbe\x9b\xdb76\xf1\xc6/s<A(\x16qb\xd2\xcd>\xd8\xb9\xa8\x95\x01\x87\xf6\xef\x9f\xdfo\xed\x07\xa5"~\xb1X\xd3\xf6\xe6}\x8e\x16kRr<\x10\xfc\xf0\n\xa4N\xec(Cyg\xf6\xc2\x15\x87P\xc5\xed\x12\xbc\x1e\xef?\xfd\xc0;\xe5[\xcc=Z3\x7f\xce\xa0\xc1h\x1bc\x938HE\x81\x0b\x13\xbe\xec\x15\xe5m\xcd&\xb1\x06\xa1\xd2\xb8C\xd3\x1fk\xc0\x8d\x9dYr?\xcb\xed[\n)P\xd8\x8d\xe2\xa7\xff\xec0\xf7\xf5\xc3\'\xca\xb0\x94\x8av\xc9\xfe\x11\xf1:\xc8ue\x0f\xab\x111\xd6z%H5\xfb\xc8\xb4yf\xeb\xca\x15\x05y\xb0\xbdhZ\xa2"tl\x07\x903\x8da\x96\xda\xfb\x8a\x97(C\x00\x1b\x17\x8e\x0b[\xc1\x1a\x12\x9f\x9e\x98\xc2];\x89\xe4=@\xe7\x0cX\x85kO_\xbe\x8b\xab\x91\xc6y\x9bm\x1d\xe3\xfec\xae\xf5\x00\x0b\x90c\x0b\x9f\x9f\x95n\xc3B\xec\x12\x8c\x0b\x92Q\x9d\x0e\x11\xe9\xa0\x8d\xe2\x91p\xd5\xecu\x93==\xad\xd4h\xbc\x83D\x1b\x05\x17\xd61\xf2uX\xda$\xfap\x15\x0f~&t\x15P%\xb7>\xde\xaf\x0e\x8e\xed\xcc\xdb,\xd8Y\xb3\xe3\x8d\xb0\xc5\x0e\xe1\x16\xafW\x7f\xb0"\xf4\x85$\x01D\x82\x85S<\x80\x83\x055M\xa3;T\xd2\xc2\n\xdd\x9d\xfb\x8ar\x86\xac/oU\x8e-\xfd\x1d<\x161\x91\xc7v5\x1c\xa6\xd79\x02K\x99\xf6A\xb1\xc6\xa0\xea\xca\x8e_JV\x11\x16\xd4\xc4\x8f!t\x1d&\x07\xfal\x19Y\r\xa4\t\x82\xc8\xecM\x99\x11\x8ez\xb3.\x07\x1b\xdeZ\xe6\xcai\xc0M\x81B\x9c\x1b\x7f|O\x96\xb4\'\x1f\x92\xa5\xa8)\xa1Q\x98\xe9\xd3#^\xb8\x08p\xe6p\x9f\x10\x19\xc3\x113\x8f+7\xc6\x8f\x84\x0b\xd1\x0c5\xe6U\xbb\xa1\xd5\x8e>\x86\x0c}0\x8a\xea\xc3\xafV>"t]\x0e\xc3\xa5xJj\xb34Ff_4k\xe9\x92\xa9-\x0b)LD\xc0\x0ey[\x9f\xab\xeaEQ\x90\xdcLD\xc3*\x0c*\xc6\xa4\xc0\x03\xf3\x08\xfd\x8c]\xafR\xe5j\xce\x18`\x97ro\xcba\x95\xc0H\x19\x9a0\x1cX\x0bF\xc8\xe2\xbfQ}\x14\x1a\x9a,P\x88\x11n>k\xc6\xde2\x08\xd5\xefT"7\t\xaaO\x86hS\xdd\x11\xa0iW\xec\x99?\xec\xe2\xbb\x824\x1b\x0b\xa3\x02\x1bx\xa2/:\xd2\xd8\xd2\x18s \x98<\xb5f_\x88\xc5gO:\x91\xf3\xfaL!\x82\xac\xa1\r\xca\x93U\x1c\xd8\x92,\xe3\x88NH[\x1c\x85\x833]O\x92\x82\xda\xf0\xba6\xebJ\xc6\x00\xdfZ\xf4*\xa2\xa9|\xb1^ wS]\xaa\x0b\xfa\xcb\x944r\x1fU\x15\x88\xb7\xccXP\xa74\x91%#\xbd\xf0\x95.P\xb7\xb6ev\xd1\xe4m\x88\x03\xc7C\x1c\x9c\xec\xbc\xb9S\xee_\xa1\x82\x1f\x16\xcf2\x87c\x8b\\H\xc0\x8d\x1fNV\xa21\xce\t\x1eW\xb8\xd9\xf8\x9f\xf7\x05K\xd7\xca\xd19;\xa3T\xa4_\x1fT#\t\x93\x806-\xacU\x97\xbd>\xb2\xaf\tq\xadzV\xdd\xa2\x0e\nq$\x04Y!P\x94\x161t\x0e]\xd2\x1em\xc4\xd4\x0e\x87\xb8z\'\xfe6\x93\xd1\xfe\xef\xd9d5\xb4\xf1\\\xec\xe8\xb4=X\xad\xbak\xc79\xc8dZ\xe3\xfbp\x08\xdbr\xe9D\x11`\\\x89\xa1x\xa7\x14\xcc\x83\xe3\x85\xe8\xc5e\xef\xfe)\x0fD\x1f\x00\xcb\xff\xa9r\x1cO\xb2q$d3jw\x95\x190\xab=\xc5\x0f\xfdm\x04)\x85+\x82N\x1a;MN\xf6\xa1M_\x1f\x0b\xe0\xf3S\xa8\xef\'\xff\xab\xeaJ\x1b\xa2\xd6\xb6\xe5_\x11g\x145\xe9t\x92\x1dAF\x07\xc0#\xe2\xc4\xa0\x11\xcd\xceN\x14Adr\x00\x94\xdf\xfe\xa8Z\xb5\x9b\xfb>x\xef9\x1e\xe8N\xf6\xb0\x86Z\xb5j\x1d\xbfbt6\xd8\xd1\xce\xf7\xdb\x04\xa3\xdc\xc3\xf2\xf5\x98\xfc\x07\xa3\xbdu\x99\xcb\xb2\xb40\x8b\x05\x92fL\x07\xa3\x8a\xcc\xd2F\xdc\xad*y(\x93Ym\xdb\x0b\x93.\xde\tO\xe9f\xf0y\xe0t4\xfeCs\xe5\xa5\x05\x16U\x0c\xc8 \x98\xdc`I\x1d\t\xf8\xa1\xbc\xbdc\xdf\xd3+Zm\x98G\xf7\xf6\x92\xed`\xf6\xc6<\xd6aJe\'z\x97;\xf8\xdaq\xdb\xd7\x08\xf3\x87\x8bH\xfbX8\\&0\xaa\xd0\x9e\xf4\x9fe\ns\xbb\xa8\x95\x1apZL\xc7\xf0\xbav\xa4\xe8`RQ\x8a\xd9\x10\xa4\x99!\x0e`\xd3\x0e\xbe\x1e\xcc\xa8[\xc8p\xfd\x83\xf9\x98\xb3-\x99\xd1\xe2T{\xeb^J\xec\x1b\x12\x9dVN\x14LQ\xe2\xee\xe0\x18\x12\xac;f\x83\x93\xad\x8c\x8f\x88\xc1)\xdbS0\x88\x95eRo\x17\x9dQJ8\xba\xfd\xe4\xa3\xa2\x0c\xa1\xbb\x1e\xecE\x12w\x14\x836*r\xa6A\xc6\x1f\xb6\x07\x94\xef$\x81\x0c8CU\xe4\xcc\xdc\xb1\x80v/\xcaO!\x1ck1\xb6\x85\x1c>1PLI\xf2\x95y\xc40\xfc<\xa3\xac\xad\x13\xd4\xca0\xf5\xd1\xe3z48\xb5\x8ce\xd6.\xa6\x19\x033\'\xa6\xf1\xcd\x0c\x0e\xc8\x10\x00\xa9J\x1e0@\xed\xa6\x93\xed\xb7\x08\x1e$\xed\x01\x14h{Q\x9am\xfa\xcc\x92Y\x99\x0b\xa3\xb6g\xc7\x82\xa9\x06\x9a\x8f\xcd\xef\xa9\xd2\xc5\xb1g\x839\x10\xbe .\xd6Z\xe9b\xff\x85=p\n\x103\xf6\x1f\x9b\x84\xfa\xde\x89\xe8Yd\xc4\xa6q\x9b\x04m\x041 \xac\x16\xabq\xb6\x01\xdc\x08\xce\x9b\x0c\xe6N(\xeb\xa1\\\x97\xe2\xa2\xec\xe3\x8d\xe5\xe4\x812\xbcj\x1e\xcf\x833\x95\x1e\x7fzi_\xc6\xaa)\xee\x08\xf9\x80\n\xbe\x93\xa4\xb2\xcfdTS\xd8wx\xc5&d\r\xa5\xca%:\xc5\xcem\xadQ\xa7\xbb\xba\x84i-]\x1b\xecT\xc0,\x86FU\xf2\x9e\xb8Z\xd2\xff\x89L["\x15CD\x8b\x85NL\x93b\x86\t\x0e\x9d\x8f\x04\x98\x81\x9c\x7f\x84\xfa;EwM\xac\xe9\x0c\xb4\xf4I=\x9a\\\xd6\xe6\x0b\x16EU\xed]\xdc\x1f\x05\xfd\t\xa6\xf0`\x19\xab\x04\xe3\xbfb\xa4@\xda4\xbb%-\xd4\xc31\x1aj\x99\x01>\x82\xb0\x19\x86\xb8<\x9df.E\xed\x1f\x17\x04\xe6\x13tE1\x88\x82UN\x95a\xe7\x8e\xc1\xefL\x96Y@\xbf\xbdEKY\xcb\xc6\xc7\x02O\xcf1\xab\xac\x12>Qf!Z\xc0\xadz\xc4\xc4q\xfd\x89-/\xeb\xbaIF\xb4\x1d6\xa5\xcb\x9e\xe3v\xa0\xacWq\xf0\x11\x18\x13\x88\x11=[\xe7}\xad\x067\xc2s?^8;\x8c\x1d\x9a\xddmD$\xce|E\xd9#\xc4\x12\x9e\x92R\xfe\xcd`\x01p:\xa9>\x1d\xe1S\x07J\\\xfa\x1e\xafP\xbe\xfdP\x8f\x06l\xf2\xacP\x1d\xa0-\xcf\xd7\x8e\x84p\x85Z\xedJ\x945\x9a\xabG88k\x90\x15,iyn.\x81A\x01\xaec\xf7\'/\xaf\xa9\xbe\xc3v\x82Bw\x85\xfe\xbeC\xeb\xdbh\xb0"\x9e+]\xfd\x8a\xe5n\x89\xbf\\5\xaf\x96\xbaY\xdc\x0f\xe8Hu\xa4\xa1\xf6fu\x13&K\xcah\x99T\xe2\xce\xf7\xb0\xe2Q\xff\x9b\xf9U\xfeM\xf1\x01\xaeC\xb5\xbe\x8e\x9b\x17\xdb\xeb\x87\x7f_\xd9\xc6\xb1$\xd5\x9cd\xf7\xa8"Lyw(\xd3T\xe93\x1d\x0e\xb49\xf6~\xe3>\xea\x9cEw\xe7\xfe?\x01\x12\xa2\x932\xcah\xecb\xb3\xaf\x9eSSK[2o\xcdW,cU\x11T\x9a\xbf\x04\x98m8\x18R\x8a\xa4;C\x03\xac+\xaf\xf6\x87\x1f\xce\xec\x98\x84\xf6\xf0\x9d\x963\xb7\x87\xf1\xd5\xac\x12\x0b\xd1\xfdL\xcbn\xc7\x16\x91v(\xb5\xab\xcf:a\xfa\r\xa2\xef\xadP\xba\xca\xbdR%\x9c\xa18\\\x0c\x19\xa8\x02)\xdb\x8bsZOI3\x9f:yH\x1b\xa0\xca\xd4u\xd7o\xd4\xa3A\x08\xa4\xaf\xe4\'wu\xf6D\xac\x0c\xd0\x99!\xcf\x82a-2)\x0c\xc4\xe1\xd9-7f\xce\xecF\xf3\x08\x0e\xc8B! \xbc\xfa\xef;C\x9f\x01\x12q\x14,,]\x99UD\xa90\xde\xce\xcd\xceS\xd8\x1148\xb1\xb4\xa7\xbbP5\x93\xc8kq\x08\x1d\x8f\xcc+\x05}\xd9Gs\xf8I\x81\x0b\xd9lO=R\xaa\x88A\x898\xeb\x84C\xb0{\x1e\x8an!l\x9e\xd9\xca5\xd9"\xc9\xa9\x13\xf6\x82\xb4\xe6\xbe>\xa4?\xd5RuC\xe1\x9cU\x0bY\x8e2\xc3=(5L\x9b\x8d\xf5\x91\x9bPr\x82P9\xe2c\xac\xcbZ\xa3n\xe3\x15\xe5\xb3k7oN{{\xfd`\xa3q\x07\xe6@q\xd4K\xd9GJ\xe7\xbaZrEX\xd1\x03E\xd7\xc3\xfaRW\x10AeQR\x14\x00\xb5y\x92)HW\xde2\xc3D]\x1a\x05\xfd\xbc&\xd0^1F*\xdbS\x8b2]\x02\xcd\xc8B\xec9\x8bL\xaa*_\xdb\x936x(;\xf4\xa9\x01,\x8e\x04q*\x805*\xc93\n\x00\xdf\xdc\xa4\xea[\xb0\r\xd0\xe8@\xa7\xa3r;\x89\xa0\x15\xf0j6\xcb\x0c\xcd\xee\xd2\x1a4f1+\x81\xe2=G\x8f\xb2\xb9\xab\xa8\xd5\x94\x0f\xfa\x0fKi%\xfa\xce\x86\xef\x96\x93\xe7\xba\x8c\xad\xa5\x95dq\xa5\xcc\xc8n\xd5\xa3\xb9U\t:\x9b\x83C_k~\xe3\xcc\xe2\xfcf\xb8(\x04\xc5\x9be\x08h\xd2p\xa9JU\x17\xafp\x88J\xb7W\x12\x9c\x10\xbc\xec\x86cW\xec\x9e\xb0D\x98\x080\xec\xdc\xd9\xce\xd6f-\xc24\x9a\xd6\x89 w\xdbB\x982A\x92 ]\xb39\xa3\xfcm\x97\x9f]\x05\x89\x05\xc3\x14H\x83\xc1*>`\xf3pA\x87\xbf\xd7juU\x1f\x08TM,4H\x95B3\xa4\xef\x04,r\xa0U&;\x97\x11\x8a\xb4\xc4\xc3\xb8\xd1\x1bv\xa9\xaa\xfc\xef)=\xf4\xb3\x05\x9de\x155Z\x01\xd5i\xe1\'\xc0\x16nr\xd5N\x98\x01\xb2\x80\xb98\xfb\xfc\xc3\x81=n\xd7\x89oC(p`\x91\xb7\xc3u\xec\x89\x0b\x8a2\xdbW\xff\x10(\xe6\x87\x96%\xbbb\xa9V\xc3\xe0\xd8\xb4\x88\x18\xd9\x94y8&\xa3\xa2\xd4\xb3\x07\x00\xdb\x0e\x15h\xc74\x08\n\x17\x8d\xca\xae\xec\ri7\x9e\xc9\xb0(\x0c\xaa\xd2]\x88\xea\xf0<\xe0O\xc6\xa8*y\xaa(%\x06K\n\x7fH\xd7T\xe9\xb9S\xb0XAb\x81\xda\x91\xdd\xcd\xb9-3y$\x8c:a\\\x9d\x9e\x1e\xf6\x9e\xbd\xceD\xc9\x1f\xcad\x95\xeb\xef\xa6\xda\xf5\x9b\xf5h\x8cfT\xfc\xe5\xab\xa5\xe7,\xe1W\x1e\xa0R\xb3\x0f\xf7\xbdr\x17\x1b\x1c\xae\xec*J#\x8e\xb8m+\x17\xfb\xab\xa8\xa4_\x99\x87\xedd\xe4\xabb\xdf\x02\x1bJ\xc8\xb9\x17\xc7\x16 2O\xc8\x84\xdd\x83>f\xed\xc5\xdb\xb5:z\x91\xed\xb0\xfeF\xb9\x94\x8c,\x0e\xc2t\xd0u\xe8\x8e\xe4W1\xa0\xb3j.\x8f.m#\x17\xe0\xf6\x12.7\x18L\x1c\x1b/\xcf\x83"\xeeq4P\xe7\xf6]-\xc7\x8f\xf4\x18\xa5\xc8\x98(\x03-\x00\xedD\x84\x14\xb0\x0b\xd0\x04ds\xad\xfe\x10\x10j\xe5\xb4\x8a\x86\xa4\xaem\xe1<\x0cN\x7fn\x9e\x1e\xfa\x15\xdb/v\xfa\xbb-{.V\x1e0/"a\x1e\xe8\x92w/gdt\x92\xfa\xf0\x08\xefGhq\xd7L3E\xb2p)\xc0\xfad\xfft?\x8d\xb9%M\x95|\xbf\x82C\x0e\xb3\x95\xa9\xda\x99\x92\'\xaa\xde<VAr\xe5\xbaT\x7f\xcd\x80\xbd\xa2\x94\xdb\xeb\xe9\xf9\xad\xe5/\xaa\xf0W_f[]H\x08\x9a\x87\xee\x95\xdd\xdd&\xa0\xe5\xb8i\x16\xf1ha~\x15\x16\x0c\xce"\x9d6\x83NY\xb2\x1e\xaa\xe5\xad\xdb\x02\x93 \xfc>\xb5G\xa7\xf0$n\x17\x0b\xc3\xfd\xfe\x7f&\x93\x8f\xa7\xd9\x100T\xd5#uc\x0eu\xc4A\x82\x8e\x84\x17\xdb$8$\x17\xb9,\x18l\x8fw7^\xfc\x03)\xa6\xfb\x8c\xeaX\xaerOU\xfck\x14\x95\x03Z\xa8\x8e\xf7\xae\xb7\xb7\xe5@[\n\xd2\xa5\xba\x1b\xb2ni\xbe}h\x0bA\xbd\xae\xc2\x12M& |\xb0m\x01iA\xee\x96\x9a}\x91?\xd7|\x9d\x13\x00G\xfc\x01\xcdRa_axUKmEP\x13\rY\xb1\x00\x0f\xc0\xc8\xafc\x99\x18\xad\x04\xe5\xd5\xb1ZZ>\xcb\x93\xb6\xd3iY\x99\xab\xec\x84\x88\xb4P\xba\x0f\xdd\x95\xb7\x07\xf4\xcf\xcf\xec\x0c\xb0\x9b\x82\x10T\xff\xfd\x8e\xbd\x81\x8d\x94\x06\xd2Tn\x8a\x8cE5\xe60\x04\x81\xa6\xc0\xb0kb\xad\xadb\xb0!\x8f\xfa\xe1Y\xadiKS\xe0,\x0e7\xcd\x99\xb5\xe8\x18\xf0\x0e\x84\xb1|\xf7\xa6\xe0\x18/#\xe7\xd3\xf1\xf3\xff\xd6?\xe9\x0e\'1\x08;\xff\xb3g\xeb\x98\xe6\xcb\xcfwo\x1f~\xb2\xadu1\xa7)\xed\xc23A#Y\xe5\x85r:\xb8\xca\xc0n\xb9j\xff\xb9\xadz\xaf2<\x99\xaf\x00\xd1\xfb\x98l2\x1bf\xa3m#\x9bU\x9a\x01a+\x0e\x9cdP\xc8\xd9\x89\xae\xdc\xb1\x80\xa0\nu\xab\x93\x14\\\x14\xf3\xfdj\xeb\x1c\xab\x84\x1d\xbf\xf9\x91\x00\xd2\x02;\xd1\x83\x81\xca\xe2\x15\xbc\x18\x05\x9da\xa3\xc82(\xcd\xc0\xb6"a\x90/\xc8\x81\x7fA\xb4V\x9fm\xef\xdc\xf8\xae\x80\x80\xc3d:k\x15\xc2V^\xfc3B(\x06"\x18r\xebZ,{\x11YW\xcdg,\xd7\xa3z$\xf9B\xa4F\xe5\x91\xb4aO\xde\xfa\x97M\xc1\xedN\xe9\xf90\xfb\xb4B\xe5\x9d\x8f\x006A\xc9n\xaa\xe9\x9f\xe3X\xa9\x8d\x13\xf3\xbc>\x7f,D)\x87\xe9\xf1\xef\xee|\x9e\x94\x97A\x03\x0f\xab\xcc\xaaywh\x0b\x8c]\xe5i\xb8\x03\xfa\xfd`-\xcb"Fn&\xafG\xcb\x981S0O\x9106\x8a\x11T\xdd\xe6&\xb42&\x00\xae\xfb\xf7P\xab\x08\xe2S\xf5\x18\xd9\xd9\x88\x9b\xd1\xa2\xad\xdd\x949>\x1c\xac\xfc\xdd\x1d)7\xe2&A\x8f\x85\xfaH\xe1\xe5\xdb\x87\x16H\x99J\xe0{=\t\xae\xd2p/&\xe6W\xebQ\xaf}\x87P\xddww\xecD0}cx\xd9\x9c\x91\xee\x02D,#\x03k\x8bu\x99\xff\xd6\xb6\xa0\x06\x8b\x1aE"\xef\x1c\xaau;\x85<=\x84\xdfo<\xcb\xe4\xa6\x05\xbc\xb3\xe7Ut\x08\xc2\x89E-u\xa3f\xfby\xa3\xafL\xeaQ\xbd\x86DbZg\x96Lv\x8e\x04\xafq\x06\x0e\'\xc7\xa3\xfb3\x8d\x85\x82\xf4\xe1?\x85\xf5E}\x0b\xbd$\x90\xf8\x8b`(\x0b5\x99\xd2\xb7\xe2\xef\xf1}\xbbqM2\xfb\x1ev\x82D\xb0\x8b\xa5\xd1\xa0i\xd2\xa3|r\xc8!z\xe9\xdc\xbc@0\xd1\xd0\x9b\xb0iv2QP\x9f\xb8\xb6\x7f{WD\x12\xd2\xa0\x81\xe0\xb0\xe3\x10\xf7=\x1b\xb8\xe2\xe5\x1cn\xda\xeb\xbf\xb6\x9a\xd8\x8d.\xfe|e\x7f\xba\xf4\xdd\x99\xcc"R\x1d_(\xe8p\xd48\xa5\x8e\xc8P2!\xd4\xad\xa9\xfcOg\x1e*%I\xee\xc3\x06\xec\x10\'\xea\xa4\xd7\n\x04\x92\xbe\xdd\xdb\x12\x10\xd2Y<\xcd`\x83I\xc6$\xf5\xd1{\xa1iN\x84\xa1\x0bSwh\xa6\x9c\x005\xa2\xb5\x90\xb3\xd0w\xb6\xf8\xc0\x96\xd9\xa40\xaf\xd4\xeag;0k\xce\xb6\xbe\xeao\xadR\xcc\xae\x1d-GgP\xbe\x158\x88\xb8$\x1d\x1e\x8e+\xf3\x83KL\x05\x96\xb5\xed\x16\xb4\x9b8$!\x7f\xd2\x9aEM\x90\xe21\xe0\x15\x9aIh\x9dV\nN}\xf0l\xc6\xde=\x10P\xfb\xb4\x01\x0b\xb5\xf4\x0bg\xf5\xf3K\xf9J\xa2:\xf32M,\xc85\xd3\xdf^\x0c\xccC\xf8\x8a\xfc\xd9F\x87\xcc\xc5Q\xf7\xf9\x93z\xd4\xba\xcb\x16\x82\xf6*@\xfc|\xf77|Ln\x87\x89UJ\xe2\xf9\xcf5\x1c\xdbu\xd7\xed>\xf4\x01?\xee\x1f\xfd\xb4S\x12\xc7\x07t9\x1d-y\xd1\x18;\xc9\xfeI\xb7\x84\x95\xf1\xdf&&~=\xa8%|\xfeXdg\x87\x0b\xe6\xe5D\xaa\xf6D\xaa\xd7\xca2j\xbb\xdd65{i\xa3\x1e\xe9\x9b$\xed\xaa\xb9\x10\xfaH\xb9\xbe\xd8\x1fFm\xf4\xfed\xcd\x16\xb2\xc4F\xb2T\xa4%!\xf4\x0c\x0b\xc8\xb1\xaf\x89\xddI\x1cd\xe2j\xc8\xbf\x00R\x10\x7f\x13\x0e\x17\x80#V\xf2\x9aQ\xc0\xb2J\xb6\xed\x185\xecu\x03\xf6]M\xd8\xc5\xad\xf2lY\xc1\xcc\xf0\x89B?&;\xfb\xd8\x82\xe7\x9fp*\xdb\xc9\x1f\xec\xe4\xcd\xde*\xea`\xbf/V\xd6\xbd\xab\xa5\xda\x82\x05w\xaf\x97^\xc3\xdb\xa8bd\rC\xdb\xe7\xb3\x8f\xcc\xea&\x85>\xdb\xcd?\xf8k\xebfc\r\x07v:\xd8X\xd3\xdc\x9c\xaaGe\xc0\x94\x8d\xc1\xcc\xfa\t\x9e\x81}\x91l\xac\xbf8@"\r\xb5k\xd6\xa5\xb1F%O\xaab\x1f\x8e\xbf(\x84\xd5\x0e\xfe\xc7\xe8\x95vn\x13\xd9\xb0d\x90\xea{\xc8\xf0\xfd.w\x9ag\x87\x8a\xcd\x88C\xd8\xf0\x1b\x84J\x1c\x1b!\x17\x1a\xf5X\xb9\xee\x99F\xb8\x13\xef\xd5\xba7\xc0hiL\xfa7fe\x1a\x8f\x9c\xdd\xc7\x02\xe4p]UtVq\x83\xa0\xd8\x8b\x83\xb4\xc9\x9a\x00\x16\xbe9\xfa\xb0h\xa9l*\x04\xc4\r\x8f,vh"\x83]\x159lj$\xb0\xb3A\xcc\xd9\x85!\x07\x10\x178{-\xb7\xc5\xd5D\xcc\x9a\x1e\xc3dZA\xfb\x90\x9e]\xc8\x17I9\x89@\xa2\\a7\x83\xb0\xfdz\xa4\x8d\x17\xdc\xf2\xa6"\nb\x8f\xc3\x97\n\xe5\xb5\xa6M\x8b\xd2u\xb2\x13m\xf5\x17-\x0f\x92\x8cvp$\xf7\x03f\x1b;\xd3\x1b-)\xaf2\xe9\xae \xc3\xe6g\x13\xeb|\xb2fU\xfeBuXn\xa7\xbf7\xa7\xa8\xb7z\xc4\xdc$P?\x8c\xbb\xces~rP\x82tFt\x7f\x18\x80\xc1\xa9\xecJ\xc6\x01\xda/ZFY\xa4B\xbb\xa9\'v7\x13q\xea|\xb5\xf2}\xfe\xe6O\xf0K\xda\x8c\xb6\x1d<\x83\xaa\x9b\xdc\x96\x83\xc7\x98\xbfdX\xeavW\x16#\x10A\xf2J\xa4z\xc5\x87\xa2\x8c\xfa\xa1\x12\x07\x96\x19\xdbz4\x95\x95\x82\x7fAQ1\xc7\xa3\xb1\xdb3QF$\x9e[:\x9c\x8e\t\x1f\x1ei\x1e\xddo\xc9\x8f\'v\xa8\x89\xc8\xb6+\xf5h\x80N\xd5\xee+V\x80Aq_A\xf3fa\x8d\xa6\x0c\x11a\x9c\x85\x81}N\xda\xb7\x08\xfe\xff\xd8\xbfQ:\r\xff\xc5\xb1\'u\x1f\x17\xf4\xdc\xfe\x8a,\xf7bA\xe9\x92S\x18\xd3\xdb\xd7$\x83\xf3\xd3\x13{\xf0\xc8l\xe5\x88\x93\xe6\xbe9\xf2\x16\xbc.g\xf3\xaba_\xc9}\x80\x1fb\xbf>\x86\xbd\xc61\xb0\xa9\xdb>\xc2\xa2\x02$\xb9\xb8\xf7\xc0\xaa\n\xa5\x03$!\xcbQY\x175n\x0cgx\x01\xff3\xa1\x1e|\xd7o;\xed-\x88\xba\xbc@\xdd\xd2}\x8b\xc0\xfbB\xb0#-|g\x1b\xc2I\xf1\xe2.\x136\xf2\x1b\x8f\xa9wB5\x05\xf7\x9c$j\xa4\xb2\xd0RJ\xc9~\x16\x1b\xa4\x01W\x99}\xe9i\xad\x1a\xdd{\xfb\xfc\x1em{U\xbf\x80\xff\xd9E\xe0\x10\t\xa6=\xc9\\2X6V\xc7\xb6\xadO\xd0\xd4\xcf\x8e\xa3L\x99/\x7f,\xb7;\xe9\x99L\x14\x16\x18W\xd0\x80\xa2\xdat\xcb\xdb{,\xcf.\x1c=\xc5\x14\x1a6B\xb0\x0c\x99\xda\xcf%*}\xda\xe8\xce5\xb9\r\xa4\xed\xe1\x95\x1d\xfc\xb6\xfa\xae\xbd\xcb\xae\x83\x0c\x02\xcc\xde\r?\xaf]==z\xb4\xf2\xe3*B\xf4\xf4\xbeY\x01|:\x0b\xd4\x8e~Bt!\x0fIL\x87O!p\xec\xd2\xcf;\xb3\x1f%\xef\x1arNX\x17t\xd3\xc5\xe4\xc1+\xdf\xeb,\xa4\tT\t`e\x8fx\xea\xf6l\xad\xc9\tkf9x7\xd2#\x19*\x16\xe3\x07\xe68<\xa6\x08I\xc8\x1f\xda\x9e\xed\x04\xe9\xbbw\xee\xd9\xa9\x89\xa2\xbe\x1e\xe1\x01[\x96\xb0\xd5\xc5\x9b\x17\x8a\x1d\xbb{\x7f\x14n4\xf2\xbd\xc1<G\x10x\xc6 \x8d\x94\'\x04\x93\x05\xc0\x03\x10v)\x9cQ(\xccgUY\xa5\xa5\xc4\xd9\xae\x07Yi\x02;\x14\x9dd\x91\xb5\xb3\x8c\xd6\xca\xdd\xe3\xbfv\x96maI\xf6\xc4.\rO\xb0r\xbe\xbcws[G>\xd5\xb3\x0c\xcd\x03\x919]\x9a7o\xf3\xa8\xee\\^\xba=\x0fm6\xc3a\xb7\xe6\x95[\xe2A\x07[7\xee\nB\xc0o\x0c\xca\xd5\xc5Z#/\xc7\xb8r\xaa\x8d\x13\x83\x1d\xd4\x9a]\xba|\xdb~\xa7\xeb\xc6j5\xdd\x95\xe4\xef\x06\xd5S;q\xa6\x18x\x04\xf3 \x1c\x90\xd1\xbf\x9f\xb2\xd5h\xf3\xaf\xba\xcen\xf9\xe3\'[\xe1\x9e\x93\xb6\x99\xb1\xda\xd8\x94\xa5\x9b\x8f\x91\x1e\xac-\xdaQd\x15b\x08@\x11T\xc8\xc0\x89\xb3\x8cI?\xff\xb0,\x82\x11\x06>\xa4\xd1\x91j\xe0A\x1a\xb1\x8e(2\x13~\xcb:\x82\x18rKf\x13\xfdI\xdc\xe6\xc6~\xaa\xf5\x8b\xfa\x0b&\xc9\xbf\x8e\xc7\xd1\xf9\x19Nu\xd20\xf7=\x0c\n\xbbhiu\xf6Q\x1b\xccq?\xa1\x1e\xe9\x1cZ\xe2\xa4[B;1\xbc\xbb\xce\x0c\xe2\xd8\x0c\\\x8b\xa2E_M\n=,\xcd=\x06xF\xac`\xc9\x06_\x18\x8bp\xdeL\x9b\xc9g4\xd8\xea\x88\xf5\x98X\x1f\xb5\\yY/\xe2\xfdc3\x14.;\xfab\xe1h\xdbl\xe0#\xf6\xbfb\x92c\xfbQ\x01z\xf5\x83\x0f\xf5\xe6\xf7e\xa9\xc1\x8b\x95@m\x99\xce\xef\xf6`\xdb\xfb\xdb\xd7\xb76\xfe\xd5\x87\x9f\xe3\xe5\xdb\xd5\x1d\x12o2\x94/\xef\xd9e!f\r7\x9b\x9e\x90]wSf5y6a\xff1\x15\x94\x18D\x9ab\x0f\xad\xb3\x1bra\xaf\x8e\xed\x88\xa5T\xbc\x13\x01\x92\x0c\xc9~\xdfv\xa3\x83F\x9b\r^yu`\x8f\x1c\xd4\xf3\xc2!\x9fJ\x03\x1a\xde{4\xb92Z\xf0?7.\x15\xadZs\x06\xbd\xd6\x9f\xe1\xf5x-}\xa6\x8ec&\\=\x12\xfca4R,\x00\x83\x0c\xc5+{\xf8^h\x17a4XwL\xa4\xe91\x9a\x9a\xe8\xa4\x12\xfc\xa4\x1dO\x85\x861!\x9dz\x8d\xf7\xb8\xb9\xf1\xe7\xa7]\x10:\x7f\xf4Mq0\xf7\xa0\xbel\xe6\xc7\x083Zb\x9c\xf1\xbe\xbck\x97\x9cj_\x86\x13BS\x80a \x1b\x1c\xcb\xe5\x83\xfar\xb4\r\xeeS\x7fm\x1am4U$\x0bVn\xc6N\xf6h49\xfb\x87\xc4pu\xd9\xc2\xf3\xb1?X\xdf\xc2\xcc]\xdfL\xd9\xc2Q\x7f\xa75\x13B\xf70X\xc5\xa3\x0e\xd3\xad\xbf\x13f\xc2\xea#aN\x95\xbd\x07\x81\x8a\xf4\xb3\x9dd\x92\x1c\xb9\x00\x0e\xd1@I\x12\'\xe0\xcdbB\xa7\x98\x81\xbd\x08\x86T"\x17O7T\xaf\xf6\x04 \x85\xfe\xfd\x1d;#\xe4\xf8\xe4\xda\x1eO|\xfc\xb6\xd9Q\x9b%mqA2\xfc\xb4f\xafA\xc1\x18\xb0\xc5;\x05\xe8\x1c\x04,\xe0-\xea\xe2\xa5\x98\x0f\x9a\xf6\xd2\x0c\xb2\x89<\x8a\xc3\x873\x02#\xe1UA\n`\xcb\x06\xc1\xcc\x9c\xab\xc4\xfd\xe8\x17q%m\x82\xe2\xa2\x05\xd9\\\xe5V\xe8F\xe1\x1eo\x11X]\x16h\xc8\xa8\x96\xa3J \xc9\x15y\x01n\x08\x9afF.Wg\xde\x88\xf0\x7fbG\xa1\x85\xcdc;~a\x16\x8f\xcb\xa7\xa4\x867\x045\x1fcc\xe2hm\xff\x100\x92\x99\x15l\xab5pSK\xe2\xfe\xe5\x89Z\xed\xbb\xfc7\xfe;D\xd7\xdcE\x0cq\xcb\xd66\xaam\xb2\x81\xcba\xac2+wH\x8e\xdb\xe2\x8f\x05S\xa9\xc0 \x06N\x18xC\x01\x1a\xd2J\x112\xe7\xab\x7f\x85"d\xb2\xc4\xb8s8\xd7>\xd1,&\xe7\x81\x1e\xfb\xea\xbfUs\xfbI\xc6a\x9bo\xef"9\xca7)\xfa\xcd\xb1g\x98H\xea\xa1g\xc6\xa21mN&\x9b\x08#>x\xa6l\x8f\x0bs\xc3l\x01W\x05F\xd5\xfa\x12\xeaQ\xeb k2\xd1M@\xd4\xe2b\x17\x0fmU\x198d\x83x]\xf1A{v\xfc[H\xf2rz\x04\xa6%\xb6\xa8;\x90\x99\xca\xf2\xd7\xbe\xee\x9c\x8a\x8b^L8vG\x05\x0c\xdda\x89\xb8\xfa\xb5\x97\xca\xd9y%\x03\x8a98\x9dih\xf7\xcd\x8b\xf5\x00\x80)`\x88 \xc3\x1b\x8c\xc6\xf4`x\x92N\x85\xc9\xc81\xf5\xaf\x00\xf4\xd8\xf0\xe7c\xb9\xcdF\xb1<V\xa3<\xb2\xc3[\x19\xe9\xe6\xf8\xca\xcc\r\x0b\'\t.\t\xe5f\xf1\t\xe2\x8et\x83C\xe5\xa8A\x0f\xcb\xe2\xf4\xbb\xcb\xcfd\x04\xd3\xdb\xde7\xf8-\'\xe6?\xc9d*\xddx\xff\x0b\x15\xbe l\x88\n\xc1\x1cO\x82\xbeR\xb2tY1{d\x07\xa1\x17\x0f\x813\x06\xf8; _1d\xc2K@B,T\xd0gN\x8a\x99],*\x1a*0,\xc5\xa1\xc1\x98|\xaeD~\x18w\xa4\x95\x9a"i\xcd\xb8\xf4\xe1\xbd=%[\xc9\xd2l\xb7\x8e\x03\xbe\xecp\xf2m\xd7l\xc7\xd9\xfc\x96\xc4$nGAK\xc6\xc2\xd0\x8bma\xc1\xfd\xdbG\xb8\xe7h\xf9l\xd3\xd7\x971++B^\x88HL\x86\x1a;A\xa1\xe5\xac\x0e\x12LZH_8\xe8EQ\xde\xbb{F\xbf\x93\xbd\xbc\' \x83\xc4\x85\xfe[-\x9a\xb7\xaez\xb6T\x8f\x8a3$0\xcbw\x99d\xc2\x89-V\xdf\xbf\xf9\x87&7\x88\xec\x12\xe2\t\xf6CM\xba\xcfvUH!\xb2\x97\xaf\xf8\xfc\xbb\x8es\x03\xf7$s\x17\xb5)Y\xe6hk\xa9\x97\xdab\x12\xcd@\xc5,\xc9\xba\x87\xaa\x05\xfb\x9b\xf3"\xfc\x93;\x13NO\x1ec\x15W\xbbd\xf2\xd9L"X\xa9\xa9Gz/\xad\x18\x04\xa9\x88\xa31\x94\t\x1d\xa5C2y\x12\x8et\x8a\nD6J\xe5\xa1R"A\x82>o>\x91;*\xb23\x97%\xb3\x88\x98\x07\x18\x89\x1bX\xb4D\xa5Q\x85c\xd3$\xab\x166Q\x15\xeb}\xac\xdb\x85\xeb\xe6>|\x9c\x84m*Dt\xb88|5\x96\x8f\xba\xf1\x84\x01\x92\xfa\xd6\x0f\xf0\xb7S\x88w\x07\x15\xe4|\xf9o(@-[c\xbdJ\x15\x1e\xce\x04o\x15\xf43\xa1o\x0f\xbfq|f+x\x85\xf5J,\x1fT\x13\xc8\x97\x1el\xdc\x9dQ\xc6A(\xe3\x86\xe5\x12\x89\xf1\x98l\x8d\xfd_\xfb\xec\xa8\xd0\x97\xb2\x9d\x9d\xc4_e\xc5\x9c\x92\xc2r\x0e\xc9t\xcf\xbd\xfd\x15%1Qem\xb2\xe2\xe8\xb3|u\xec\x86b\xb1\xd0@\xa4_\xcf\xea\xcb)\x91\xae\xec> \xcaW\x1b6\x90\x95\xea9^\xbf\xc7\xa9g\xd3\xb8*\x12Q:\xfb\xffi\xd0e\xe6ty\xaf*\x9e\xbc\xba^8jm7Z\xcc\x8bJ\xab9E\x84D\xa8DGO\x87\xb3h\x1f\xe9^\xa1\x87 \xa4w\x87/\xeacx\x9a\xe2\x99a\x83\xde\x8a\'w\x15*6\x1fV\xb4*\x1c\x17\x0b?\x1d\xd8\xfdJ\xb8z\\ydgF\x875\x87\x848\xe9u\xa5\x07\xe9\xfd\xaf\x82\xca\xd2w\xe8N\x85}-\xfb\x1f\x16\x8bPG6\xe7\xb1\x92bR\xd4)\xf2Z\xf9\xc0\xe6\xec\xdc\x16\x90cy2-\x8c\x17?\x8f0aR\x8f\x8a\xb6\xc6I_0\xebD=\x9c\xd4.>"\x19Z\xc7R"!\r\xc0\x0c\xa6\xc6\xc35\x1d[o&\xd5\xcbE\xd3d\x82\xca\xe8 \x82\xd5*\xedbL\xda\t\xa0\xf7V|\xa4\x0c\tX[\xa9v\xbeGp^\xf5\xa7\xcaH\xbbr\xdd,\x0e\xd5\x93\xbd9\xb2 T\x8c\xee\x14\xcbX}\xb6k\x97\xa2\xe0e\xddI\xeb\xd3\xa4\xa9\xd3}\xf3q\xc8L\xfd\xfat\xba\x13F\xe3\xf2\xc5\xc9\xa5\x88\x8fo\xe0\x82\xdf>f\xf7<\x06&\x8d\x04\x07\x9b\x07u\x8d\xa17\xec\x1av\xd3v\xfd\x9dc\xe9\xff\xefn=bf1;\xcao\xcf\xaf<\x90e%7\xdf\xd7#\x99>\xa7g%\x16"\xbe}\x9f\x08\xce\xb4I\xae\xdb2L\x8a\x0bXATHO\x17\xc2q\xc7\x18,\x1e\xbb\x90\x03\x98\xbe\x1dz\x85\xa8\nQ|\xb7Mb\x17=\xb4\x98\xc9Gn\x94\x95\r\xedN\\x\xbc\x03\x84\xa7\x83?;v\xd2(\xd6*~\xae\x13\xe9\xb3\xec\xbfC\x07\xb5UL\xd0\x02c\x8es\xd0\xc9XVu\x96%\xee\xe0\x00x\xa7\xa8#XW\x85\xd0\x1b\x120[3Dl\x00t\xa6n\x88j\x8b\x1f\x9cO]\xec\x0f:\xd0SE\x8c\x1e-l\x1e\xecM\xd6\xba\xf0gpGpg\xff\xd4\x9c\x06\x87\xfa`rM\x8aN\x91\x84\xcd\xacC\xb5\x06\xa6\x18dM\xa1\x8b\\\xf1\x15K\x00\xa0<t1H\xf5\x1b,Lo\x94\xba\x0f\x95C\x11\x06\xf1\x02\x1b\xa6\x1b;f\xecmA\xcfT\xa7\xab\xcf\xff7\xd5:\xfb\xaf\xa5L{\x14\x18\xeaE)m\x8bG8\'\x88MIu\x1c\x08\x08s_\x04\x8cx\x0bW\xbc\n\x03\x04\xe5\xc8b\xccV \xbd\xc2\xa9\x88\x99~1}\xb4\xf9\xc0\x82\x08N}H\x07K\xe7\xe6p\x08e\xb6v\xe6:\xe5{\xa1pWP\xa2\xf5G\xdd\x87M\x16&\x87te\xc7$\xb8\xe2\x07\xf2qE\xcc]ywqqUg\r\x05\x1a\xf6K\x8a\x04\x15\x02\x14z\xd1\x1c\xca\xa3\xe7eD\x8b\x99\xb1\xbb\xba\xa3\xe5G\x80*\xc3/B7\x87,\xc0\xb3\xccg<P0;\x8bG\x1b:\x9d4\xd2B\xcd\\\xa2!\xcfQA\xcc\xde?\x0c\xbe\xd8\xcf\x98:\xc4\xd9\xbc\xf9bO\x0c\x12\x0e\x02\xa38\xe8`-\x1e\xed\xebQ\x9f6)\xb9$!\xa4B\xcb\x00\x06\xf9\xf5\xe7\xcb8z\x9b\xb5\xc6\x9a.6+\x7f\xcc\x9e1$\x8b<\x8a\x06\x01\xd4p\x1c\xd7\x1d\xb5\xd2J\xc1e\nOor6\xbf\x84|a\xbb\x07\xd4\xb1G\xbbH\xf9\xc0\xcc\x1feNH}\xf8z\xb4j7\xba\x15\'\xd3\xf9\x99z\x0f\x08x\xc8\xa6Q\xb9\x8f\xa3<I\x8eH\xbea\xa70[\xb7\x81u\xa2@\xbdovd\xbc\x19\t\x80`\x9a?\xb4\x8bq\xf1\x99\xb5\xea\xeaU:q\xfa\xc4>\x89\xad\xe9\xe9\xd5\xc3\xa0\x935\xb4\xd5\xa0^X\xae\xfd\xa4\x9dE\xbd\x11\x88\x16\xd3\x1e\xc6\xc7\xd5\xfb]\xf96yf\xc2\x12d\xce\xdc=S^\x84\xaa\xa6\xe1Bc}\x0b\xc9\x80jx\x9b\xea\xc6\x9a Lg\xd9\xdb\xcb\x9ad\xcc\xfbW\xd7,\xe4n\x92\x1f\x983Q\xe4\xb5\x84R\x81X\xbaC\xd2\r\x14\xe5\x9a\x8e\xed\x1c\xa6\xd2\xb9\x01\xc4\xe4+r\xe4>\xda\xb9&\xbc\x05\xa6\x84\x1b\xac_\xc3V\xdc\xb4\xdb\xe2\x89\x05\xce\xcb\x96\xc1\xdc\x87\xf4\xd9\xf9\xfb\x97S[\xffi\xe7:m\xcc\xe0\xed\xf2\xde\xa6\x9d\x9e\xb4z\x82\xad\xfd\x07o\x99\xbd\xa1\x18#Y\x8d?k\xf5y\xd9\xbd\xbf\x88\x91\xce\xebQ\xfd\xb2J5\xf8\x9a\x01h\xb5\x0ej\x18\xf5lR9\x9d\xce\xb6\xc5\xaey\xe5\xbe#~\xce\xfe\xf0*x%\xc0\xbdY j\x8e\xa6\xb5:\xe8_\x813\xd2N?\xfdQ\x8f\xc4\x0c\x89\xa3\xe3\xe1\x914\xf5A\tr\xdb\xbf\xaa\x0fq\xa8)\xff\xe4\x1e?5\xc3\x9c(\xe8e\xd8\x159\x8d\xd5\x0bE\xc6\xb8\x00\x85\x06\xce\xd2\xb4\xf7v\xc5\x08\x8f\xe2s\xd0$W5\xd7d\x9b\xc3\xf75A\xb0q\xfaE\x03Rt\xd7}<|V\x8f\xb8\xa3\xcc\xbbp]\xd0f@\xfa:\xc7\x86\xe5\xb6\xffU\xb7\x86\xf2\x1a\xd6\x94\xc1t\x98\xdbPx\x82\xab\x92\x86\x99\xb3d\x82\x1ag\x08r\xd3\xf9\x89\x13%=\xa4\x08\x9a\xa8X-N\x1d(\x1a\x1d55eM\x83\x1b\xb30\x85\xe9\nnB\x89\xa1\xba}u\x13\xa7\x08B}`_7*kW\xc8\xa3\xc8V\xcd\xcd\x05zh\xabRR!WN"\x90\x8f\xf20l#A8\x9fsp\xdc\x87\xef\xca\xd5\xbc\xednc\x8ew\xaf\x1e\xd1\xad\xc8\xae\xeao_\x84\x03\xbf?\xfd\xd8<Y\xb0\xdd#\xb8TD\xe8@v\x03\xad\xa1\t\xe4\xda\x88\xdd\x90:\xe3h{0\xe6\x95>tX\x8f\xe4\xaa\xbc\xcagU\xb9u\xfb\x7f0\xa1\xf6\xba\\\x81(\x1b\xb1\xe6nB\x02wm\xebCu\x15\xa7\x1d\xf7\xa8\x83\xd6<%\x1cK3qI\x87\xc6\xf5\x04\x00t\x93\xa9\xcf\x14\xe7\x9bdm\x0c\xb9\xc65+\x85Px\x9d\xda\xb49Y\xfef\x9fM\x7f\x0b\xe6\x1a\xeb\xac\x05)\x96\xe7W\xed_m\x04\xc8\x86}\x11yy\x82-=$\xab\x18\x11\xb2\x8cp\xe8pw\xe3\xfc\xeb\x866\x8e\x91\xcc\x91\x99J\xea*p\xf5\xce\xed-\x19"\x16\xe0\xf8\xa5s\x96\xd7\xda\x1d\xaa\xbe\xd4\xaa\xd0YT\x15\x99\xc9\x1eD/"\x1fi\x91|\x05\xa72\xff\xf9`\xe7\x04\xdf18x\x10c\xfaUK\xc0*\xb1\x9ab\x8b +l\xe1\x14"&D/\x9c]\x02\x87\xe91\xa9"3\x87>I\x92\xe6\xbdl\x0c*\xaa\x8d\xb0\x08\'\x18\xcc\xa57_\x12\xf7\xff\'$\xa9M\x1e\xdb\xd6\xb51\xe8\xe5Di\x1b\x07H\x15\xaf\xddJ\x90\x90\xfbcA\x8ey\xb8\x9d\xb8\x90"\xaav\xe0\x94U\xf9\xb7\xc7K\x16\xe1\xb69\x0e\x07HmU\xfa\xf1\xec]\x8c)\x0f\xcc\xae\xfa\xe6\xdauEh\xaa\xc1u\xf9\xe6R=\x1aI\xd6 \xd6 \x9a\x93\xbe\xc4\xe4c\x96\x01\xb0\x8e\t\x88xT\xb1fg\xc7\x99\xfdT\xe5\xe6\x10\xd9\x05\xd3\x7f\xa3\x8f\xc5\xd0\x8f\xc8\x0b\r-"\x97R\xe3dX\x06F\xbc\x00C\xcaH=[5;A\x04\xbcTN\xdb\xc9\xf8dv\xab\x08i\x94\nW\x90\xddU* \xf6\x83~}:JGuf+\xcc\xea\xbdx~\xc7BZ\x02\xf7\n\xd4yo{\xdd*\xb5#\xb0\xb6\x11\xb3y\xf9I\xa6\x85\xb0\x05\xe5?\xc8<\x87\xfe\x19\xc7\xc1~4\x0bQ\x8a2\xe4"{\xb6\xfc\x02\x83\x83(\xb1\xa9\xce7\xae\x1c\xe8z\x11\xc5\xc5]\x18.\x93\xe1\x05\xb4<\xf9X\x1f\xff5\xd7\xd6\xa2$b\xd3\xdc\xae\x9c\xda\'\x1a\xfb\x15u\xd8\xfe\xf9\xd1\xd1\xf5\x17x\xb2)\x9a\x98[@`Bt\xdd\x03<\x04\x0e"\xb9#\xa2\x14qC\x9d]\xbd\x8em\x06\x9c\xf8#~E\'$<Tg\xeb/u?cs:])FS\xf8\xf0\xae!\x0c\xcc\x00\xe4\xe1\xce\xbd3{\xaeF\xc5\xbcDmY\xc9`AXC\x86\xe2m\xffv\n_\xf0\xc4\x1b\xe6A\x81F\xea\xe3\xa6b52VO\xae\xadYxU\xa57\x18e\xe3;\xb2\'\xd8\xddk\'\xffY\x18\x80\xcfd\x80\xd9\t\x88\x1c\xbe\xdf\xacG\x13\xa3{A\xdf&\xbc\xa0\xec\x06\xd1\x1eX?l\x86f\x91\xa5\xde[\x929\xf7\x02\xc1Hc\x02{\x9d\x8dWf\x08\xf81h\xbc\x1bL\xdae\xba\xb8\x1a\xa8\xa79\rYn\xd0@\xc2\n\xb4\x806\xca\x90+m\xed\x12\x88hx\x8c\xc7\xe1,\xfa\xd4B\x8aTAc\x05\xe0\x9e\xed\xb8\xf8=\xf4\xf9V\x83O\xe7\xd8B\xf6`\xd16"\xe3\xc8\x08\x83@\x05Q\xea7\x9a&\\\xff?\xb9\xb1V\xe6\xd7\x01-\xa6\x93%\x8b\x9de\x95t\x11L]\xf4Hq\xaaX!\xdc\x90V\x0f\x18\xac\xcd\xda=5oO\x9a\xfe@&\x8ayS@\xb1\xa4\xaaTS\x8b\xcah\x1cmDT\x81\xc8\x1a\xa7\x95\x8c\xe9\x96b\x1b\xfa\xd3\xaf\xa5\x9d7\x0c\x8do\x08\x01\x16\x16\x06\xa56\x1f\xc4\x9e\x93\xce\x85\xf2el\xe5\x8c\xc1\x80\xfbs\x0f5\xc5\xe6\xb1\x8cDo\xc7\x91L\xf1\xa1aV,y\xc2\xa5\x0e\x9e(\x88CWo/\x0f\xcf\x86f\x95\x1e\xd8\x03\x01\xe9\'\xcf\x9f\xe8T\xfb\x894-\x96\xd7\xa6\xed\xb9I9b\x84h\xadk2\xf4\xde\xa1\xa6\x1b\x0b\xc5t|\x11\x98\xeb\x90\xb9V\xc3W89\x0eB\xa5\xac\x16\xc1\xdff\xca\xe6\xdb.?#\x03\x0f\xac\x93^\xd5\xfb\x8b\xa3\x00\xb5\xd2\xd4\x98\xf6\x87\xf6\x9a\x89x\x99\x8c\xbb\xc3\xa3O\x02yd\x95\x08\xa5\x14\xe6\x00X\x14j\x7f\x9d\xff\'\x90o\xb0\x80~s\xcb/\xbb\xefp\x19\x90\x89\xef\x14KplmN\xc6\xc9_\x8f0+\xc5\xd46V\xf9\x00(\x08\x04$\xdb\xa9\xb1\xd3I)\xd4\xec#\xc3\x16\xba\xf0 \xecV[\x17z\xd1}h\xdd/.\xf2U\xe5ndL\xfd\x10\x1a\xa2\x920_\'\x01M\xcaS\xc3\x8fH\xdfJ=\xd2\xd7\xb0\x11R\x85\xc1\x19}v\xf4I\x19H;\x07\xdfZ\xde\xab5\xa1\xe4\xa4\x1e5\xe9P\xfc\xbb\x97C\xc5)\x1cr\x18\xfcU\x190\x88,0\x8a\xc4\x94\x04b\xbb\xbc7xQ\xce\x04)\xcc\xbe\xa7\xe5\xf5=\ro\xee\x04\xc8&1\xd9@J\xd6\xa2L\xcb3\x90)\xa0*\xe9\x04\xea9\xc5x\x95\xed\x04q\xb3\xd4V\xc7\x04\x0bf\xec\xd1\xa25\xa64.\xcc\x1a\xa6\x998\xe6\xa3\x11\x0c$"-n^\x19+\xf8\xcd\xf5\x03d\n9\x8aQI\xf2\xf3\x9b}]\xa22D\xcay\x94\xaa\x8d\xb5h\xea\xea\x95\x1cS\xc3\x155S\x16[\x92z\xd4^\xd1\xf4\xe7?\x95\x1d\xb0\xee\x80\xc0\x1b-\xef\x17O\x7f\xfc\xc7n4\xab\xd6\x83\xeb\xe0\xb1\x14\xd7\x04\x835\x88\x05\xb1\x1c\xe4\x05\x82\xfe\x9ab\xc6\x9e\xcb\xdf\xac?\xb8\xafh#\xbdkYW\x02=\r\xf6/\x88\xfce\x15\xc2})%\xf8\x1c\xc0\x9d?R\xc8%\x9eh\xdf\x8fw\x97\xce\xa7\xed\xa7\xec\x0c\xbb\xe4\xcbS\x84c{\xfc\xc9\x1aw\x0f\xba8\xdc\x06\x7f\xfa\xc4v\xcf\xf4]&\xac\x8f\xb06\xefO\xb4\xde)Y\xc12g\xfb\xb3 \x83\xd0`\xe0\xc6\x96`)&\x83\x1b\xdf^\x82\x1a\x88\n\xa8\x97a\xeeD&\xadD\xca$/\x95\xe0\xbf\xe4\xe4<\xba\x199)\xa0\xb8\x7f\r?\xf7]\xb9Ni\x87\x84\xdc \xe4\xe1\x9c\xa8Y%n\x03\xbc\x83\x8c\xe9\xf6P\xf1\x15\x11\xbe\x8b\xd8\xb69Ad\x88\x8di\xa9n\n7\x9fU\'\xe8\xb1\xeat3p\x12\xa9\xb6\x9f\x8bm\xc8\x81p\x94q\x82\xd3f\x87k\xa37\x1b^\xb7\xab\\\xf6d]\xdc\x19\xab/\tfr\x95\xa9{s\x7fK&a $\xab\xbd\xb9j\xfe\x8a\xaedP\x9e\xed\xd5\x12HB\xc3\xc7`m}\x7f]\x07-\x9f\xb4\xec\x8a-P\xff6\x94V%\xb2eh6#\xbf4\xb7\xe5\x84E\x11U\xcb\xb8\x81\xe9\xcd\xe1wM\xb2\xf70\xd7\x94c\xc7X3r{\x1d\x8a5\r\xd5\xd9\x05\xf0t\xaa\xdc\xb3\xaa\xe9\xed\x82\x81=\xe1\xff\xe0\x06W\xb5\x00Q\x8c\x1b\x87\xee\xb8\xeb6\xeen\xd9+\x930\x97Z\xe4G\xbaY\nf&T\xcdM\xbd\xfe\x1f\x11s\x06\xf3\x88b\x9a\x99z\xa4\x9d\xc5Wj\x02\x12\xb2F4\x95dx\x9d\xca\xa7\x8d\x05\t\xb1p\xdc\t\xbd\xebZ*:c\x8e\x11!4\xda\xc2M\x80\xa2\x18#\xe7hu\x1fn)\xa2\x16*\xca\xd9V8\xf9\xedW0z\xcad\xd3\xd6\x8dPt\xf5\xb01\xbb\x96\xf8+7\xeaQ\xe7{\x92\x9c\xae\x98_\xea\xcb\x82\xc32\xff\xd8\x15\xa3\xa7\'\xc5\xec\x1dg\xfa\xa2\xf0\x86\xe1\xb0d\xb3\xb6\xabK"Y\xb1L\x06\x05g\x9b\xeb\xb6b\x9e\xdb\xf4\x0f\xdf\\\xd6\x8ciL\x87 &3\x9f\x86\x92\x1aw\x8f\x81\xcf\xd8\xb1\xfe\xa9\xbc\xc4\xd8H\xb1\xeb\xaf}S\x1d`\xa0\xcft\x00\x06X@\x1e\x08\x05\xc9\xed<\xd0\xbe%\xf7\xc2k\xdb\xab\xde\x9f\xda\xea\x13jQ\xa0\x16\xe7\xc5\xf8|\x0c_\x06\xfdB\x90\xd0l\x98\xc1\x1f\xd9J\xe0\x7f\x8c\xf6\x9c=\x145KYU\xf8\xa43\x15\x84w\xb3\'\x13+\x94\xa03\xaa\xadV1=\x8b\x95X\xf3\\\xd7\xb13_>-\x1d\xe36\x9dYJ!vj\xed\xedv\xf5\xe9W\xe2\xa01+\xa0n\x07\xd9(0\x7f}q\xef\xb3\x96\xd0\x99\x93\xa8\x92]1\xb2\xd8:Fx\xd1\xd9\xf1b\xbc\xd2?\xc20\r\x06&\xe5\x8f\xd9\x8b\xe8hO\x1b\xd6\xd9M\xedt\x1a\xe8l\x10E\x93\x17\xa6\xe8\x92\x0c8\xb2V\xdf\xe8\x8e\xd1\x0f\xadL\x8c\x0b`\xc8\x04\x0es5\xde\xbd1{J\x06\x1e\xf9\xae\x02\x90\xc8\x01\xf5\xef\x0e-\npb"9&\xe10\xe3\xd9\x15\xdbS:\xbd\x81\x05\x8b=\xf4\x86i)\r\xb6\x83\xc9\x1d\x08\xbc@\x181\x18Cc\'\x19\xae\xa9\xbdA%\x1a`/_E\x1a\x04p\x146\xdf\xa5\x06\xed\xd7\xc0\x02\xfc\xf0\xe7\x1b\xf40V\xc2\xb0\xfaD3\xc0\x18\x08\x97\xeb?\xeb\x11\xf7\xa3b\xa9\x00\xba\xaa\x1c\xe2CnUf\xe7\xac\xe3l\x16\xd7N\xd6\x87\xf3\x96yS\xd1\xb7\x19\xcc\xfe\xd3\x19q\x82\xde\x88~aA"2d\x9a\x10\xe2\xcf\\\x18\xd8\xdf\x97\x1e\x96!\x81\xe2"\x1b\x8d\x80\xd9:\r\'\xbd\xc1KY\x1eef\xd5\xe8\xcc\xf31,\xb6\xe7w\xf9\xdcl-\xe2\xeby-m#\x0bd\x99<\xb0\xc7\x16\x8b\x81f\x05\x1c\xe7\xbe\xe5\x04\xa4\xfb\xcaFY\xdf\x9c\xbfDZ\xd9\xccT\xac\xbc\xb4\x1c\x91\xe5\xb6H&@\xf7g\x1a&\x9cB\x8f\xcaN\x96\x17\x01\x8b\xf3\xdd\x83v\x94\xc8\xcb#M\xc1n\xc2\'\xf0V\xbbpy\xc1)\x0e\xd7-\xfeA \xd2\xa7g\x9b\xb0\xa9\xcf\xb2:\x96\xb9o)JKK\xca]\xaf\xc9\x13\xe1h\x80l\x14\xfc\xc2\x01<[\x7f\xb6m\x7f\xeb\xdcg-\x9f\xd31\x8a>\x9aW\x14m\xad\xa6}\x9f)\x85nc\x17~\xd3\xbd\x9e\x14RD\xea\x08\x03\xb8\xe9\x8b\x8c\xe2x\xd4T\xaf\xbe\xad\xdaF"\xb2\x07~h\xaf\xc8V\xd8\xc4n;\xbbaZ*|\xc6\xe8\xd5\x01`\x1e.\xdb1\xb5\t\xc0\xf5\x88\xfbB\xb9eC\xd5\xd0\xc8\xaf_\x01\xdd2\x96=y\xc1\xbbi\x99\xadV\\\xbf(\xfa\x9a\x88(\xc7\xde\x85\xc6\xeep\x94Mb\x98G\xaev]/\xac\xbcj\x0c\x01\xe3\x85\xe1Q\x9a\x95\xd5$\x1a\xbfC\xba;\x94a3\xb5.{!O\x0c*\x10\xcd\xc5\xac>N\x01c\t\xb77?H64k\xe6\xe3\xf6C4\x12\xa0\xfa\xe0\x08\xa6B\xc0\x1a\x85\x19>D\xd0\x99\xc5\xb4\x01J\x95\xd9\xd3\xfa\xf8\xc2\x1e\xef\xe5\xbf\xbf+\xad\xca1\x07-i\x97\x93\x88\x05~\x95\x19i\xce\xd6\x9f\xc2\x92\x80hl,E\xb3-D>\xdd\x87z\xd4\xd7\x1b\xf5\x0b\x99\xce\xb0\xb2\x8b\xc7\xec\xdf\xce\x99\xdf\xb0\xb1\x0bA\xa5\x8b\xa6\x9c\x07\x0c\x8f\xc0\x87E\xb5\xee\x91\x04*S\x1d\xe9\xa8\xb9\xc9\x8c\x17\x9d\xdaF,\x1b\xce\xda\xb6\x04\x8c\x05\xafpqY\xdc\x81\xbezH\x96\xa9\xfa\xd1]\xde2\x02\x82\xd8\xa8j\xf9\xc9L}9\x93c`\'\x92C\x0f+\xc16\xdc\xa6bY\x97\x00A\x8d\x0f\xc8\x82\xdd\xf2WL\x9d\x8e\x94\xf9\xc0\x81*\xed?\xd4+\x8a\x15d=^\x80\x1a\xa1\xff\xe21\xf6\xf2w-\t\xb0\x1f\xf8\xa5\x0f:\x19\xc4\xe6\xca\xfb\x1fn<V\x0c\x8f_\x1c<\xc6\xcb\xac\x08\x05\x10i$\xf6E\xf0\x16\x0c\xec\xddi\xf6\x11\xe7\x0c\xe7\xcc\xba\xc4\x81\'}9\xeb\xec&\xf2\t\x82\xa5]!\xab\x9e@\x04\xad\x83\x8eP\x13Ca\x1e\xd8}\xbbZ\x1e$\xd0\x06\x11sEnqX\xc7\xd3\xa2\xfb\xa5\x8f\xabl\x8d\xff\xefOT\xc9.\xbc\x82\xee\xee\xbarn\x92U\x9d\x9e\xc3\xd9\x17yY5\xc2\x02-h\x0b\x0cY\x15:\xc6\x81\x15\xf4\xff0\x9c\xd92\xc1\xd9B\xf0\x0e4(\x1b\x7f09e\xbf\xd2q\xa8\xa6\xb3\x13\x9d\xb4\xa7\xb6\t\x1d\x90s\x9f\xdcT\xbaY\xda-H\xbak\xb8\xa3\xddM[4\'X\xaa\xad\xae@\x8a\xa1mn\xac\xe3\xca~\x15\xbaS\xfa\xa9\xc7J<\xfav\xf5\xfe\xb5k\x8f\xf1:\xbf\x91e\xb7\xcb\x02]\xd3\xb3o\xd3\x9f\x81\xf3aN\x9c\xf7\x13\x88\'\x12@D\xa3\xce\xd6\x94\x15\x08LE\xe8H5\xe8P1N\x1f\x18\x0eE\x11P\xb7o!]*\xa7\xef\xe2M\xe6mn\xeaQK\xed\xc5:\xde2[\xd5\xea\xf6\x06\x04s\x8d\xa0\x11\x16\x9c\xdc\xbc\xed\x00\x89\xb3b\x06\xf5\xe2<\xb0\x90\x91\xa1\xc9\x87\xdf\xc0d\xf1y}k\xf51\x06\xa2\xd1A\xf6\x86\x03\xf4Dq\xaa\'dAtu\x1c \ne\x06\x90\xbc\x18G!p\xc0\xfa\x12XQ\xb2KaK\x86\xbc\x05\xb8}\x9cS;\xb8W\x8f\xca\x05=\xb48\x93l\x06\r7Pte\r\x83h\xee\x97z\xa4[\xc1t\x1d\x86\xa1\xd8\xd9\xaa\xa3(A\xad#J\x90xr\xde\x8cK\x1b\xcb+\x82GZ\xe8\xf6\xb1\x83\x087:\xb1\x85`\xeb"\xc2\x06\x8c>\xa1\x0eU\xa2\xe2Jw\x15\xc5\xe3\xe4\xf4\xd1\x07;\xe0\x8d\xea\xe2!?;\xb6\x1d\xe1\x06\x90}\xda\xd4\x12b#\xed*\xeem{\tJ[\xe7\xe8\x82Y\x15J\x9b\xa4/\xec\xf7l\xee\xe8K\x1c\x0f\n\xfa\x96sw\xder\xf7\x8e\xc9V\xf3\xec\x1e\x99\xb6\xedK\xd233hi\x01\xba!(\xf9|\x07\xdczJ)\r\xf6\xb1\x858\xdb^\xb4\xc5\x80&\xa3\xaa\x9c\xac\xd5M7%H\x1d\x07*\xc0\xce\xc4h\x8d\x0b\x81\xe3\x95~\xaf\xd5\xcfo\x966\xa0{\xc0uO\xd0]\xc2r\x03\x1e+\x0f\xf8\xa0\xf2\xf3\x8a]\xdf\xa4\x05;c(p\xc3\x0b\xe3n\xd3IY\xf7X\x17\xef\x1f.\xda\x9d"{\xb8\x19\x9b\xc1>b&\x14\xd37%U\xdd\xf0%\xe7\x87\xd0&$C\x81M\x89\xa2\xfb`\x11>\xb8\x07\xc3w\xacb\x8d\xd7\x1a\xed\x03\xdc\xa3\x99WP\xad+\x00\x93A.\x05\xb65\x80\x95\xd3\xdfWn\xdf\x0b\xdej\xf2\xcd#{-\x92\xd8\x13\xfe\xf2-\xb3\x82\xa8\xc9\x1c/(\xfe\t\x16\x9e0zPHN-#\xda\xef\x1d^>\xcf\xf9u\xc5\x8b\xbb\xfa\xf5\xb0*\xeb\x93\x98\xa3\xb1\x11t\xb5d\xc4?6\xfb\x13\xb5\x14\x8e\xb1\x90\xeb\x03\xfb)\x17\x0e,(\xa8\xca\x1b\x9f\x88\xb9\xb5]\xcb\xf9\xa4?\xecs\x89j\x14\xfd\xd5_\x07p5;$\xf4\xc6\xdaUr\xffa}|U>\xc3\r\x0fe=\x18\xa8\xb1\xe6_\xd4\xa3\xe9\xaf\x1e\xa2,\xdc\xe4\\\x00\x01\xfcB~j\xcb\xc1\x0c4\xd59*\xed\xb8\xa6\xa8\x92\xb7\xc9\xeb\xb1\xf7\xf3\nF\x9ce\xd2^\xf4\x96P*\xd6$\xe3\xb1\xb7m%\xe3\xa3\xb7\xec!\xf1c\xb2\xa0\xc4\xe2\xc4#M\xbcx\xe0V\x14\xde\xb7%\xf3\xe194d\xfa\'\xa2\xdb\x11\xdf\x80\x0eA\xa4\xb9pVo6\xc1\xdby\x8c\xff\xe3\x1d\xcam\x95\x92\x1c\x15\xebnO\x07\x10\xe3}(V\x130\x9b\x8eoLFX\xc1f\xf8%\xcb\x0f\xfa\xf0\x9b\xba\x1fP\xef\xa2\xd8\x987\xcf9\x12\xb1D\x04\x93\xcd\xfeg\x9fI\rL\x15I\x98]\x08FN\xba\x7f\n8\x06f\xb5\xda\xe6\xf4\xbd\xd2\xd9bF\x12\x1fDK\xa2\xc4V\xab\x9a_K\x89\xbdJ[c,\xbc\x80\x127\xe99\x1c]\xc4\xf3W\xc8\xb1+Jm\x9c\xf6\xaf0\x13\xd7)\x86c\xf4\x8a\x04y\xb0W\xd7\x93\x16|yV\xb0\x8a\x1c\xfa\x80\xfd\xa1\xbd\x1e\x91Ny\x7fW%\xeb\xe8I\x02\xd5\x97T\x19J\xa9:3\xc1\x14\x0eA\xfe\x91\xff\xac\xc5\xdb\xc3rM.\xacV[\xbf\xcd\x02\x93\xa4XZ\xea\x1a*\xf6\r\x9c\xdbB25\xc5\x84v\xca\xb2\x91o\x84\xddF\x04\xcep\xb5\x14FgS_\xdf\xdb/\x91}\xa7jF\xd5=\xbd\xf6p\xf3\x83\xf2K\x00\x8d-B\x8d\x84\x9d\xa3x\xef\x84\xa8Sb/\x94Z\xb1\xc1Bn:%\xd5\x03*\x15\x1el|(\x06\xa4\xb0\x8e\x94\x7f\xc3\xb1\x03\x84C-\xe5\xec\xba1\x98\x8f\xea[\x87C\xda%\xc0\x9d\x83\xdf\xf7kM\xbe\x16\xe2\x88\xbe)\x16"\xd3Z\xcd/\x10\xc2\x1a\xa0$\x97\xe4\x11\x13\xc5\x0bM\xfe\xb2\xb4\xc0C\xa5\xc9T\xac\xeaQ\xa91@\xf9\x87\x95\x06\x9a\xe7\xfd\x97\xb5\x1a\xf1n\xd4*7\xfeFx\x05\xfd\xf3\xa4\x1fg\xf2\x83kY\xbc\x1e\xc7W<\xda\xdf\xd2\xf5A~K\xddF\xd3\xc6\xc3gMUvH8!\x8di\xedY}k\xfa2\xd0\xef\xfc~mhz\xf0\xcb\x8f\xc7\xec-\x8c_\xf7&5\xfb\xd4\x93\xe4\xe1\x0eY\xb61~/\x16\xfc\xdb\xfe\x1d\xac\x0b"\xffr\xd1Nap\x83\xe7\x96G\xc4\x105(pd\xa5\xa3\x81\xb41\x08r\x1c|\x9b\xde\x83E\xae\x94\x91\x12d\xf5\x87\xc6\xad\xb1\x98\x98]Z\xc5\x7ffD\x1b\xa8\xe1R^\x90\xe4\xb1\xf2\x1aZsm\xbe\x97\xf65[\xb8\xf1\x16N\x03$\x80\xea\xe8\xbfy\x03\xc4\xa2\x866\x07\x0e\x82\x80\xc6\x84\x89\x9aRQ\xb4\x8e\xb1B\xa7cSY\xf4F\xfb\xd9\xde\x9ah\x7f|\xdf\xdf\xde\xed\xc6\xc7\xff\x0fV\xc6\xdc\x7f',compile))
|
[
"noreply@github.com"
] |
Paridoll786510.noreply@github.com
|
2dada9662680945a673aa0d872099c21de1fbd90
|
d2c164e0b828496af0b9fe105c89eacd441d2e5f
|
/synonyms/synonyms_tester.py
|
1d57e3b3ba623f6f1a20300a2f943f853c2a5fda
|
[
"MIT"
] |
permissive
|
JLefebvre55/ESC180-Labs
|
abfae38640967c1c6d5293c636b19becb0251bf8
|
e34afaf235fabac6b5d284bddfd0b61d276ad50d
|
refs/heads/main
| 2023-02-01T07:03:33.822607
| 2020-12-18T05:01:42
| 2020-12-18T05:01:42
| 315,552,358
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,039
|
py
|
# No need to thank me but if you really want to express your gratitude
# feel free to get me some BBT once covid has passed :)
# Also don't even try looking at this code it's f****** disgusting
import socket, threading, json, contextlib, io, time
from random import *
synonyms = __import__("synonyms") #put your filename here (pls for the love of god run this shit in the same folder as your file (and for the love of jesus do not pyzo this))
HEADER = 16
DELAY = 0.0 #hehehehe
PORT = 5555
FORMAT = 'utf-8'
HOST_IP = '172.105.7.203' #hackers. Challenge accepted aight?
class Network:
def __init__(self):
self.client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.host = HOST_IP
self.addr = (self.host, PORT)
self.id = self.connect()
def connect(self):
self.client.connect(self.addr)
self.client.send(str.encode('controller'))
received_message = self.client.recv(2048).decode(FORMAT)
print(received_message)
def send(self, function, data = ""):
try:
#print(function +":" + str(data))
self.client.send(str.encode(function +":" + str(data)))
while(True):
try:
msg_length = int(self.client.recv(4).decode(FORMAT))
break
except:
continue
#print("MSG LENGTH: ", msg_length)
temp = ""
while(len(temp) < msg_length):
temp += self.client.recv(4096).decode(FORMAT)
#print("IN", len(temp))
#print(temp)
#print("LENGTH OF DATA: ", len(temp))
return temp
except socket.error as e:
print(str(e))
return False
def get_sentences(self):
return self.send("get_sentences")
def get_dict(self):
temp = self.send("get_dict")
#print("LENGTH: ", len(temp))
return json.loads(temp)
def get_cos(self):
return json.loads(self.send("get_cos"))
def get_tests(self):
return self.send("get_tests")
class client():
def __init__(self):
self.network = Network()
def run(self):
not_ended = True
print("Hello! Welcome to mrmandarin's synonyms testing program!")
while(not_ended):
print("Here are your options:")
print("1 - Check Build Semantic Descriptors (Subpart b + c)")
print("2 - Continuously Check Build Semantic Descriptors")
print("3 - Continuously Test Cosine Similarity (Subpart a)")
print("4 - Continuously Run Similarity Test (All subparts)")
print("5 - Exit")
s = input()
if(s == '5'):
not_ended = False
elif(s == '1'):
self.check()
elif(s == '2'):
self.continuous_check()
elif(s == '3'):
self.continuous_cosine()
elif(s == '4'):
self.continuous_run()
else:
print("Dafuq you entered boii")
def check(self):
print('\n')
sentences = self.network.get_sentences()
mandarin_dict = self.network.get_dict()
f = open("sample_case.txt", "w", encoding = "latin1")
f.write(sentences)
f.close()
user_dict = synonyms.build_semantic_descriptors_from_files(["sample_case.txt"])
good = True
for word in mandarin_dict.keys():
if(not good):
break
values = mandarin_dict[word]
for value in values.keys():
try:
if(mandarin_dict[word][value] != user_dict[word][value]):
print("VALUES NOT MATCHING!")
print("WORD BEING INDEXED: ", word)
print("WORD NOT MATCHING: ", value)
print('\n')
print("Mandarin's Dict: ", values)
print("Your Dict: ", user_dict[word])
print('\n')
good = False
break
except:
print("Something went wrong!")
print(f"An error occured when trying to index [{word}][{value}]")
print('\n')
good = False
break
if(good):
for word in user_dict.keys():
if(not good):
break
values = user_dict[word]
for value in values.keys():
try:
if(mandarin_dict[word][value] != user_dict[word][value]):
print("VALUES NOT MATCHING!")
print("WORD BEING INDEXED: ", word)
print("WORD NOT MATCHING: ", value)
print('\n')
print("Mandarin's Dict: ", values)
print("Your Dict: ", user_dict[word])
print('\n')
good = False
break
except:
print("Something went wrong!")
print(f"An error occured when trying to index [{word}][{value}]")
print('\n')
good = False
break
if(good):
print("ALL GOOD!")
else:
print("Here's the sentences:")
print(sentences)
print("Here's Mrmandarin's dict:")
print(mandarin_dict)
print('\n')
print("Here's YOUR dict:")
print(user_dict)
return good
def continuous_check(self):
print("To exit, just quit the program. Fuck user usability.")
cnt = 0
while(True):
if(self.check()):
cnt += 1
print("Number of test cases passed: ", cnt)
else:
break
print(f"Well, you passed {cnt} cases...")
def continuous_cosine(self):
cnt = 0
good = True
while(good):
sentences = self.network.get_sentences()
mandarin_dict = self.network.get_dict()
mandarin_cos = self.network.get_cos()
#print(mandarin_dict)
#print(mandarin_cos)
words = mandarin_dict.keys()
for word1 in words:
if(good):
for word2 in words:
if(word2 in mandarin_cos[word1].keys()):
if(round(mandarin_cos[word1][word2], 5) != round(synonyms.cosine_similarity(mandarin_dict[word1], mandarin_dict[word2]), 5)):
print("SOMETHING DOESN'T MATCH!")
print(f"Cosine Similarity for {word1} and {word2} don't match!")
print('\n')
print("MRMANDARIN'S VALUE:", mandarin_cos[word1][word2])
print("YOUR VALUE:", synonyms.cosine_similarity(mandarin_dict[word1], mandarin_dict[word2]))
print('\n')
print("DICT1: ", mandarin_dict[word1])
print("DICK2: ", mandarin_dict[word2])
good = False
break
else:
break
if(good):
cnt += 1
print(f"Successful matches: {cnt}")
def continuous_run(self):
good = True
cnt = 0
print('\n')
while(good):
sentences = self.network.get_sentences()
f = open("sample_case.txt", "w", encoding = "latin1")
f.write(sentences)
f.close()
mandarin_dict = self.network.get_dict()
get_tests = self.network.get_tests()
f = open("sample_test.txt", "w", encoding = "latin1")
f.write(get_tests)
f.close()
result = synonyms.run_similarity_test("sample_test.txt", synonyms.build_semantic_descriptors_from_files(["sample_case.txt"]), synonyms.cosine_similarity)
if(int(result) != 100):
print("HMMM SOMETHING SEEMS TO BE OFF")
print("Can't really pinpoint why tbh but I'll provide you with the debugging info tho...")
print('\n')
print("Checkout sample_case.txt made in your folder for the sentences used")
print("Checkout sample_test.txt for the testing of run_similarity_test (also in your folder now)")
print('\n')
print("Here's my dict if that helps: ")
print(mandarin_dict)
good = False
else:
cnt += 1
print("Cases passed: ", cnt)
root = client()
root.run()
|
[
"jayden.lefebvre55@gmail.com"
] |
jayden.lefebvre55@gmail.com
|
d4c1f69239d68c94a286d903669854b493ab1ae4
|
0d4391537a80b405f15853fafc3746b8b710cd3b
|
/src/pjBallot/ballotTree.py
|
093b2951df7433847e58726d2ab18ae59827ab3c
|
[] |
no_license
|
simbara/Old-Voting
|
54afa41de4684de10520bd19be4f1e89eaa869bd
|
a238167fd71242d9ff4c12e884a8a01204810266
|
refs/heads/master
| 2021-01-23T06:34:30.250664
| 2012-03-02T00:18:55
| 2012-03-02T00:18:55
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 582
|
py
|
class Race(object):
def __init__(self, name, selectionList, instructions):
self.name = name
self.selectionList = selectionList
self.instructions = instructions
self.works = "worker"
def __repr__(self):
return '<pjBallotObj>'
class Contest(object):
def __init__(self, name, selectionList=[], userSelection=[]):
self.name = name
self.selectionList = selectionList #candidates
self.userSelection = userSelection
class Candidate(object):
def __init__(self, name):
self.name = name
|
[
"rahul.rajan@gmail.com"
] |
rahul.rajan@gmail.com
|
8771d6eb8a5cf3ab0bb828212ec5ff42613a25f7
|
23fbc3d634b5c8251b847a8d248edb200df802d4
|
/loopchain/tools/signature_helper.py
|
18258ea3e9358e49faef30057dda48161a301184
|
[
"Apache-2.0"
] |
permissive
|
ahastudio/loopchain
|
2d942b46602ae61bcadc4a3c7f586cd9f3792e0b
|
88b76956c069fedc1a0a2d239f47c3866493ad0f
|
refs/heads/master
| 2021-05-05T03:22:03.633035
| 2018-10-23T05:26:44
| 2018-10-23T05:26:44
| 157,025,637
| 0
| 0
|
Apache-2.0
| 2018-11-10T22:05:26
| 2018-11-10T22:05:26
| null |
UTF-8
|
Python
| false
| false
| 11,745
|
py
|
# Copyright 2018 ICON Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Signature Helper for Tx, Vote, Block Signature verify"""
import hashlib
import logging
import binascii
from cryptography import x509
from cryptography.exceptions import InvalidSignature
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import serialization, hashes
from cryptography.hazmat.primitives.asymmetric import ec, utils, rsa, padding
from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePublicKey
from cryptography.x509 import Certificate
from secp256k1 import PrivateKey, PublicKey
from loopchain import configure as conf
class PublicVerifier:
"""provide signature verify function using public key"""
# KEY OPTION JSON NAME
LOAD_CERT = "load_cert"
CONSENSUS_CERT_USE = "consensus_cert_use"
TX_CERT_USE = "tx_cert_use"
PUBLIC_PATH = "public_path"
PRIVATE_PATH = "private_path"
PRIVATE_PASSWORD = "private_password"
KEY_LOAD_TYPE = "key_load_type"
KEY_ID = "key_id"
def __init__(self, channel):
"""init members to None and set verify function you must run load_key function
:param channel: using channel name
"""
self._public_key: EllipticCurvePublicKey = None
self._cert: Certificate = None
self._public_der: bytes = None
self._cert_der: bytes = None
self._channel = channel
self._channel_option = conf.CHANNEL_OPTION[self._channel]
self._tx_verifier_load_function = None
self._consensus_verifier_load_function = None
if self._channel_option[self.CONSENSUS_CERT_USE]:
self._consensus_verifier_load_function = self._load_cert_from_der
else:
self._consensus_verifier_load_function = self._load_public_from_der
if self._channel_option[self.TX_CERT_USE]:
self._tx_verifier_load_function = self._load_cert_from_der
else:
self._tx_verifier_load_function = self._load_public_from_der
def load_public_for_tx_verify(self, public):
"""load public for tx signature verify
:param public: der format public key or der format cert
:return:
"""
self._tx_verifier_load_function(public)
def load_public_for_peer_verify(self, public):
"""load public for peer signature verify
:param public: der format public key or der format cert
:return:
"""
self._consensus_verifier_load_function(public)
@property
def public_der(self):
if self._public_der is None:
self._public_der = self._public_key.public_bytes(
encoding=serialization.Encoding.DER,
format=serialization.PublicFormat.SubjectPublicKeyInfo
)
return self._public_der
@property
def cert_der(self):
if self._cert_der is None:
self._cert_der = self._cert.public_bytes(
encoding=serialization.Encoding.DER
)
return self._cert_der
@property
def tx_cert(self):
if self._channel_option[self.TX_CERT_USE]:
return self.cert_der
return self.public_der
@property
def peer_cert(self):
if self._channel_option[self.TX_CERT_USE]:
return self.cert_der
return self.public_der
def _load_public_from_der(self, public_der: bytes):
"""load public key using der format public key
:param public_der: der format public key
:raise ValueError: public_der format is wrong
"""
self._public_key = serialization.load_der_public_key(
public_der,
backend=default_backend()
)
def _load_public_from_object(self, public: EllipticCurvePublicKey):
"""load public key using public object
:param public: der format public key
:raise ValueError: public type is not EllipticCurvePublicKey
"""
if isinstance(public, EllipticCurvePublicKey):
self._public_key = public
else:
raise ValueError("public must EllipticCurvePublicKey Object")
def _load_public_from_pem(self, public_pem: bytes):
"""load public key using pem format public key
:param public_pem: der format public key
:raise ValueError: public_der format is wrong
"""
self._public_key = serialization.load_pem_public_key(
public_pem,
backend=default_backend()
)
def _load_cert_from_der(self, cert_der):
cert: Certificate = x509.load_der_x509_certificate(cert_der, default_backend())
self._cert = cert
self._public_key = cert.public_key()
def _load_cert_from_pem(self, cert_pem):
cert: Certificate = x509.load_pem_x509_certificate(cert_pem, default_backend())
self._cert = cert
self._public_key = cert.public_key()
def verify_data(self, data, signature) -> bool:
"""개인키로 서명한 데이터 검증
:param data: 서명 대상 원문
:param signature: 서명 데이터
:return: 서명 검증 결과(True/False)
"""
pub_key = self._public_key
return self.verify_data_with_publickey(public_key=pub_key, data=data, signature=signature)
def verify_hash(self, digest, signature) -> bool:
"""개인키로 서명한 해시 검증
:param digest: 서명 대상 해시
:param signature: 서명 데이터
:return: 서명 검증 결과(True/False)
"""
# if hex string
if isinstance(digest, str):
try:
digest = binascii.unhexlify(digest)
except Exception as e:
logging.warning(f"verify hash must hex or bytes {e}")
return False
return self.verify_data_with_publickey(public_key=self._public_key,
data=digest,
signature=signature,
is_hash=True)
@staticmethod
def verify_data_with_publickey(public_key, data: bytes, signature: bytes, is_hash: bool=False) -> bool:
"""서명한 DATA 검증
:param public_key: 검증용 공개키
:param data: 서명 대상 원문
:param signature: 서명 데이터
:param is_hash: 사전 hashed 여부(True/False
:return: 서명 검증 결과(True/False)
"""
hash_algorithm = hashes.SHA256()
if is_hash:
hash_algorithm = utils.Prehashed(hash_algorithm)
if isinstance(public_key, ec.EllipticCurvePublicKeyWithSerialization):
try:
public_key.verify(
signature=signature,
data=data,
signature_algorithm=ec.ECDSA(hash_algorithm)
)
return True
except InvalidSignature:
logging.debug("InvalidSignatureException_ECDSA")
else:
logging.debug("Invalid PublicKey Type : %s", type(public_key))
return False
@staticmethod
def verify_data_with_publickey_rsa(public_key, data: bytes, signature: bytes, is_hash: bool=False) -> bool:
"""서명한 DATA 검증
:param public_key: 검증용 공개키
:param data: 서명 대상 원문
:param signature: 서명 데이터
:param is_hash: 사전 hashed 여부(True/False
:return: 서명 검증 결과(True/False)
"""
hash_algorithm = hashes.SHA256()
if is_hash:
hash_algorithm = utils.Prehashed(hash_algorithm)
if isinstance(public_key, rsa.RSAPublicKeyWithSerialization):
try:
public_key.verify(
signature,
data,
padding.PKCS1v15(),
hash_algorithm
)
return True
except InvalidSignature:
logging.debug("InvalidSignatureException_RSA")
else:
logging.debug("Unknown PublicKey Type : %s", type(public_key))
return False
class IcxVerifier:
_pri = PrivateKey()
def __init__(self):
self._address: str = None
self._serialize_pubkey: bytes = None
@property
def address(self):
return self._address
@property
def peer_cert(self):
return self._serialize_pubkey
def _init_using_pub(self, pubkey: bytes):
self._serialize_pubkey = pubkey
hash_pub = hashlib.sha3_256(self._serialize_pubkey[1:]).hexdigest()
self._address = f"hx{hash_pub[-40:]}"
def init_and_verify_address(self, pubkey: bytes, address: str):
self._init_using_pub(pubkey)
if self._address != address:
raise ValueError(f"Invalid Address : {address}")
def verify_data(self, origin_data: bytes, signature: bytes):
return self.__verify_signature(origin_data, signature, False)
def verify_hash(self, origin_data, signature):
return self.__verify_signature(origin_data, signature, True)
def __verify_signature(self, origin_data: bytes, signature: bytes, is_hash):
try:
if is_hash:
origin_data = binascii.unhexlify(origin_data)
origin_signature, recover_code = signature[:-1], signature[-1]
recoverable_sig = self._pri.ecdsa_recoverable_deserialize(origin_signature, recover_code)
pub = self._pri.ecdsa_recover(origin_data,
recover_sig=recoverable_sig,
raw=is_hash,
digest=hashlib.sha3_256)
extract_pub = PublicKey(pub).serialize(compressed=False)
return self._serialize_pubkey == extract_pub
except Exception:
logging.debug(f"signature verify fail : {origin_data} {signature}")
return False
class PublicVerifierContainer:
"""PublicVerifier Container for often usaged"""
__public_verifier = {}
@classmethod
def get_public_verifier(cls, channel, serialized_public: bytes) -> PublicVerifier:
try:
channel_public_verifier_list = cls.__public_verifier[channel]
except KeyError as e:
cls.__public_verifier[channel] = {}
return cls.__create_public_verifier(channel, serialized_public)
else:
try:
return channel_public_verifier_list[serialized_public]
except KeyError as e:
return cls.__create_public_verifier(channel, serialized_public)
@classmethod
def __create_public_verifier(cls, channel, serialized_public: bytes) -> PublicVerifier:
"""create Public Verifier use serialized_public deserialize public key
:param serialized_public: der public key
:return: PublicVerifier
"""
public_verifier = PublicVerifier(channel)
public_verifier.load_public_for_tx_verify(serialized_public)
cls.__public_verifier[channel][serialized_public] = public_verifier
return public_verifier
|
[
"winDy@windystudio.com"
] |
winDy@windystudio.com
|
d6db02dcdcedad9af1fa0a7ffc7f9c8d89a09914
|
a4191cc76c1d733c58bbb6692a75b0885bb74e13
|
/VISION-master/차선/ENet-SAD/enetsad_node_ver0809.py
|
a21f0b7cda28f7110cd39d5ba22af0bdbb34abd0
|
[] |
no_license
|
js7850/sonjunseong
|
69173d67e34ce2085d2e0617fbefa02cbc6676b5
|
0d8bb7c87fac07634abd4b002f1111108b42e939
|
refs/heads/main
| 2023-07-16T15:28:00.459349
| 2021-08-28T13:08:19
| 2021-08-28T13:08:19
| 400,545,944
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 25,686
|
py
|
import argparse
import cv2
import torch
# import the necessary packages
from scipy.spatial import distance as dist
from scipy.linalg import norm
from collections import OrderedDict
torch.cuda.set_device(0)
from model import SCNN
from model_ENET_SAD import ENet_SAD
from utils.prob2lines import getLane
from utils.transforms import *
import numpy as np
import rclpy
import time
from multiprocessing import Process, JoinableQueue, SimpleQueue
from threading import Lock
from std_msgs.msg import Float32MultiArray, Int16MultiArray, MultiArrayDimension
from sensor_msgs.msg import Image
from cv_bridge import CvBridge
from visualization_msgs.msg import Marker
from visualization_msgs.msg import MarkerArray
from geometry_msgs.msg import Point, Vector3
img_size = (800, 288)
#net = SCNN(input_size=(800, 288), pretrained=False)
net = ENet_SAD(img_size, sad=False)
# CULane mean, std
mean=(0.3598, 0.3653, 0.3662)
std=(0.2573, 0.2663, 0.2756)
# Imagenet mean, std
# mean=(0.485, 0.456, 0.406)
# std=(0.229, 0.224, 0.225)
transform_img = Resize(img_size)
transform_to_net = Compose(ToTensor(), Normalize(mean=mean, std=std))
pipeline = False
centerPt = [(0, 0)]
prev_steer = []
prev_centerPt = [(0, 0)]
prev_steering = 0
prepoints = 1 # this for count lane...
bridge = CvBridge()
previs = False
# color_choice = ["N", "N", "N", "N"]
color_choice = [0, 0, 0, 0]
num_of_my_lane = 0
frame_100 = 0
# parameters
# rectangle patch half width and half height
PATCH_WIDTH = 4
PATCH_HIEHGT = 4
PATCH_RADIUS = 6
LEFT = 20
class ColorLabeler:
def __init__(self):
# initialize the colors dictionary, containing the color
# name as the key and the hsv tuple as the value
# "yellow": (15, 120, 220),
# "yellow2": (16, 89, 161)}
self.yellow = (15, 100, 0)
self.yellow_threshold = 40
self.diff_threshold = 15
'''
high_hue = OrderedDict({
"h_white": (160, 10, 180),
"h_gray": (130, 15, 132)})
low_hue = OrderedDict({
"l_white": (5, 20, 200),
"l_gray": (5, 20, 100)})
# allocate memory for HSV colors and color names
self.high_hue_colors = np.zeros((len(high_hue), 1, 3), dtype="int32")
self.high_hue_colorNames = []
self.low_hue_colors = np.zeros((len(low_hue), 1, 3), dtype="int32")
self.low_hue_colorNames = []
# loop over the colors dictionary
for (i, (name, hsv)) in enumerate(high_hue.items()):
self.high_hue_colors[i] = hsv
self.high_hue_colorNames.append(name)
for (i, (name, hsv)) in enumerate(low_hue.items()):
self.low_hue_colors[i] = hsv
self.low_hue_colorNames.append(name)
'''
def label(self, image, centerXY, vis=False):
# construct a mask for the contour
# cv2.rectangle(mask, XY1XY2[0], XY1XY2[1], 255, -1)
# cv2.circle(mask, centerXY, PATCH_RADIUS, 255, -1)
left = image[centerXY[1]][centerXY[0] - LEFT]
center = image[centerXY[1]][centerXY[0]]
diff = center.astype("int32") - left.astype("int32")
# minDist = (np.inf, None)
d = dist.euclidean(self.yellow, center, w=[2, 1, 0])
if d < self.yellow_threshold:
# color = "yellow"
color = 2
# when the left pixel and center pixel is differ a lot, and darker than center
# consider it as white lane
elif norm(diff[1:]) > self.diff_threshold and diff[2] > 0:
# color = "white"
color = 1
else:
# color = ""
color = 0
'''
# in case the color is high hue
elif center[0] > 80:
for (i, row) in enumerate(self.high_hue_colors):
# Give a weight on euclidean distance between HSV color range,
# for give more weights on Hue and saturation (not that matter on brightness (value))
d = dist.euclidean(row[0], center, w=[0, 3, 3])
print(row[0])
print(d, i)
# if the distance is smaller than the current distance,
# then update the bookkeeping variable
if d < minDist[0]:
minDist = (d, i)
color = self.high_hue_colorNames[minDist[1]]
# in case the color is low hue
else:
for (i, row) in enumerate(self.low_hue_colors):
# Give a weight on euclidean distance between HSV color range,
# for give more weights on Hue and saturation (not that matter on brightness (value))
d = dist.euclidean(row[0], center, w=[0, 3, 3])
print(row[0])
print(d, i)
# if the distance is smaller than the current distance,
# then update the bookkeeping variable
if d < minDist[0]:
minDist = (d, i)
color = self.low_hue_colorNames[minDist[1]]
'''
# return the name of the color with the smallest distance
if vis:
print(color)
print("center lab : " + str(center))
print("yellow distance: "+str((int)(d)))
print("diff: "+str(diff))
print("diff norm: " + str(norm(diff[1:])))
# print("diff : " + str(diff))
print()
return color, center
def lane_classification_f(image, patches_centerXY, vis=False):
lab = cv2.cvtColor(image, cv2.COLOR_BGR2HSV)
# initialize the color labeler
cl = ColorLabeler()
for centerXY in patches_centerXY:
# cX, cY = centerXY
# x1, y1 = max(cX - PATCH_WIDTH, 0), cY + PATCH_HIEHGT
# x2, y2 = cX + PATCH_WIDTH, max(cY -PATCH_HIEHGT, 0)
color, mean = cl.label(lab, centerXY, vis)
# text = "{} {}".format(color, str((int)(mean)))
if vis:
cv2.circle(image, centerXY, PATCH_RADIUS, (0, 0, 255), 2)
cv2.putText(image, color, centerXY,
cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255, 255, 255), 2)
# show the output image
if vis:
cv2.imshow("Image", image)
cv2.waitKey(1)
class rosPub:
def __init__(self):
rclpy.init()
self.node = rclpy.create_node("enetsad")
self.sub = self.node.create_subscription(Image, '/image', image_callback)
self.infoPub = self.node.create_publisher(Float32MultiArray, 'enetsad/info')
self.imagePub = self.node.create_publisher(Image, 'enetsad/image')
#self.lanePub = self.node.create_publisher(Marker, 'enetsad/centerPt')
self.lanePub = self.node.create_publisher(Float32MultiArray, 'enetsad/centerPt')
self.floatmsg = Float32MultiArray()
self.floatmsg.data = [0.0]*6
def data_pub(self, data1, data2, data3):
# data1 is steering value
# data2 is number of my lane
# range of data2 is 2, 1, 0, -1 (1, 2 means that my lane is 1, 2 lane,respectively. -1 means that my lane is reverse lane. 0 means that my lane is unknown)
# data3 is each color of lanes(leftleft, left, right, rightright)
# range of data3 is 0, 1, 2, 3 (0: None, 1: white, 2: yellow, 3: unknown(detect lanes but color cannot be detected))
self.floatmsg.data[0] = data1
self.floatmsg.data[1] = data2
self.floatmsg.data[2] = data3[0]
self.floatmsg.data[3] = data3[1]
self.floatmsg.data[4] = data3[2]
self.floatmsg.data[5] = data3[3]
self.infoPub.publish(self.floatmsg)
def centerPub(self, msg):
marker = Float32MultiArray()
# marker.header.frame_id = 'enetCenterPt'
#marker.type = Marker.LINE_STRIP
#marker.action = Marker.ADD
'''
for item in msg:
marker.points.append(Point(x=float(item[0]), y=float(item[1])))
self.lanePub.publish(marker)
'''
marker.layout.dim.append(MultiArrayDimension())
marker.layout.dim.append(MultiArrayDimension())
marker.layout.dim[0].label = "x"
marker.layout.dim[1].label = "y"
marker.layout.dim[0].size = len(msg[0][::150])
marker.layout.dim[1].size = len(msg[0][::150])
marker.layout.dim[0].stride = 2*len(msg[0][::150])
marker.layout.dim[1].stride = 2
marker.layout.data_offset = 0
marker.data = [0.0]*2*len(msg[0][::150])
for idx in range(len(msg[0][::150])):
marker.data[idx] = msg[1][::150][idx]
marker.data[len(msg[0][::150])+idx] = msg[0][::150][idx]
self.lanePub.publish(marker)
def parse_args():
parser = argparse.ArgumentParser()
#parser.add_argument("--video_path", '-i', type=str, default="/home/dgist/Desktop/hoyeong0.mp4", help="Path to demo video")
parser.add_argument("--video_path", '-i', type=str, default="/home/dgist/Desktop/data/원내주행영상/밤/ioniq_night1.webm", help="Path to demo video")
parser.add_argument("--weight_path", '-w', type=str, default="experiments/culane_ours_aug/culane_ours_aug_best.pth", help="Path to model weights")
# parser.add_argument("--video_path", '-i', type=str, default="/home/dgist/Desktop/data/원내주행영상/낮/0626/E16todorm.webm", help="Path to demo video")
parser.add_argument("--camera", '-c', type=str, default=False, help="using camera or not")
parser.add_argument("--visualize", '-v', action="store_true", default=True, help="Visualize the result")
args = parser.parse_args()
return args
def network(net, img):
seg_pred, exist_pred = net(img.cuda())[:2]
seg_pred = seg_pred.detach().cpu()
exist_pred = exist_pred.detach().cpu()
return seg_pred, exist_pred
def visualize(img, seg_pred, exist_pred):
# img = cv2.cvtColor(img, cv2.COLOR_RGB2BGR)
lane_img = np.zeros_like(img)
color = np.array([[255, 125, 0], [0, 255, 0], [0, 0, 255], [0, 255, 255]], dtype='uint8')
coord_mask = np.argmax(seg_pred, axis=0)
for i in range(0, 4):
if exist_pred[0, i] > 0.5:
lane_img[np.where(coord_mask == (i + 1))] = color[i]
img = cv2.addWeighted(src1=lane_img, alpha=0.8, src2=img, beta=1., gamma=0.)
return img
def pre_processor(arg):
img_queue, video_path = arg
cap = cv2.VideoCapture(1)
while cap.isOpened():
if img_queue.empty():
ret, frame = cap.read()
if ret:
#frame = cv2.rotate(frame, cv2.ROTATE_90_COUNTERCLOCKWISE)
frame = transform_img({'img': frame})['img']
img = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
x = transform_to_net({'img': img})['img']
x.unsqueeze_(0)
img_queue.put(x)
img_queue.join()
else:
break
def post_processor(arg):
img_queue, arg_visualize = arg
while True:
if not img_queue.empty():
x, seg_pred, exist_pred = img_queue.get()
seg_pred = seg_pred.numpy()[0]
exist_pred = exist_pred.numpy()
exist = [1 if exist_pred[0, i] > 0.5 else 0 for i in range(4)]
print(exist)
for i in getLane.prob2lines_CULane(seg_pred, exist):
print(i)
if arg_visualize:
frame = x.squeeze().permute(1, 2, 0).numpy()
img = visualize(frame, seg_pred, exist_pred)
cv2.imshow('input_video', frame)
cv2.imshow("output_video", img)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
else:
pass
# when change in the number of lanes, no recognition, not enough sample points, make re-evalution sign
def re_eval_sign(points):
global prepoints
if prepoints == 0:
return False
counter = 0
for i in range(4):
if points[i] != [None]:
counter +=1
if counter != prepoints:
prepoints = counter
return True
else:
prepoints = counter
return False
def centerline_visualize(img, seg_pred, exist_pred):
mask_img = np.zeros((6,6))
coord_mask = np.argmax(seg_pred, axis=0)
color_choice = []
for i in range(0, 4):
voting_list = []
if exist_pred[0][i] > 0.5:
y_list, x_list = np.where(coord_mask == i+1)
if len(x_list) >=3:
for j in range(7):
idx = random.randint(0, len(x_list)-1)
x, y = x_list[idx], y_list[idx]
mask_img = img[x-3:x+3, y-3:y+3]
voting_list.append(decision(detect_white(mask_img), detect_yellow(mask_img)))
# cv2.circle(img, (x,y), 10,(255,0,0) , -1)
# cv2.imshow("cir",img)
color_choice.insert(i, voting(voting_list))
else:
#color_choice.insert(i, "N")
color_choice.insert(i, 0)
else:
#color_choice.insert(i, "N")
color_choice.insert(i, 0)
return color_choice
def lane_clss(image, points, vis=False):
for i, pts in enumerate(points):
voting_list = []
lab = cv2.cvtColor(image, cv2.COLOR_BGR2HSV)
# initialize the color labeler
cl = ColorLabeler()
if pts != [None]:
for centerXY in pts:
# cX, cY = centerXY
# x1, y1 = max(cX - PATCH_WIDTH, 0), cY + PATCH_HIEHGT
# x2, y2 = cX + PATCH_WIDTH, max(cY -PATCH_HIEHGT, 0)
color, mean = cl.label(lab, centerXY, vis)
voting_list.append(color)
color_choice.insert(i, voting(voting_list))
return color_choice
def detect_white(mask_img):
# cv2.imshow("maskmaks", mask_img)
# White-ish areas in image
# H value can be arbitrary, thus within [0 ... 360] (OpenCV: [0 ... 180])
# L value must be relatively high (we want high brightness), e.g. within [0.7 ... 1.0] (OpenCV: [0 ... 255])
# S value must be relatively low (we want low saturation), e.g. within [0.0 ... 0.3] (OpenCV: [0 ... 255])
# white_lower = np.array([np.round( 0 / 2), np.round(0.65 * 255), np.round(0.00 * 255)])
# white_upper = np.array([np.round(360 / 2), np.round(1.00 * 255), np.round(0.30 * 255)])
white_lower = np.array([200, 200, 200])
white_upper = np.array([255, 255, 255])
try:
white_mask = cv2.inRange(mask_img, white_lower, white_upper)
return len(np.where(white_mask!= 0)[0])
except:
return 0
def detect_yellow(mask_img):
# Yellow-ish areas in image
# H value must be appropriate (see HSL color space), e.g. within [40 ... 60]
# L value can be arbitrary (we want everything between bright and dark yellow), e.g. within [0.0 ... 1.0]
# S value must be above some threshold (we want at least some saturation), e.g. within [0.35 ... 1.0]
# yellow_lower = np.array([np.round( 20 / 2), np.round(0.00 * 255), np.round(0.35 * 255)])
# yellow_upper = np.array([np.round( 60 / 2), np.round(1.00 * 255), np.round(1.00 * 255)])
# hsv = cv2.cvtColor(mask_img, cv2.COLOR_BGR2HSV)
# cv2.imshow("ddd", hsv)
yellow_lower = np.array([10, 100, 100])
yellow_upper = np.array([40, 255, 255])
try:
yellow_mask = cv2.inRange(hsv, yellow_lower, yellow_upper)
return len(np.where(yellow_mask!= 0)[0])
except:
return 0
def decision(white, yellow):
if white == 0 and yellow == 0:
# return "N"
return 0
elif white > yellow:
#return "w"
return 1
elif white <= yellow:
#return "y"
return 2
def voting(voting_list):
if voting_list.count(2) != 0:
if voting_list.count(1) > 2*voting_list.count(2):
return 1
return 2
elif voting_list.count(1) != 0:
return 1
else:
return 3
def my_lane(color_list):
try:
idx = color_list.index(2)
if idx == 0:
try:
if color_list[1:].index(2) == 0:
try:
if color_list[2:].index(2) == 0:
return -1
else:
return -1
except:
return 1
else:
return -1
except:
return 2
elif idx == 1:
try:
if color_list[2:].index(2) == 0:
return -1
else:
return -1
except:
return 1
else:
return -1
except:
try:
if color_list.count(1) !=0:
return 1
else:
pass
except:
pass
return 0
def draw_my_lane(img, num_of_my_lane):
img = cv2.rectangle(img, (730,2), (795, 15), (255,255,255), -1)
img = cv2.putText(img, "lane: "+ str(num_of_my_lane), (730, 10), 0, 0.4,(0,0,0))
return img
def for_degree(x1,x2,y1,y2):
return (np.arctan2(x1-x2, y1-y2) * 180) / np.pi
def using_degree(x_list, y_list, idx):
degree = [[-85, -75], [-70, -50], [50, 70], [75, 85]]
start_points = [[0, 220], [180, 286], [580,287], [798, 240]]
# x1, y1 = start_points[idx][0], start_points[idx][1]
x1, y1 = x_list[-1], y_list[-1]
slope_array = (np.arctan2(x1-x_list, y1-y_list) * 180) / np.pi
x_new = x_list[np.where((degree[idx][0] < slope_array)&(slope_array< degree[idx][1]))]
y_new = y_list[np.where((degree[idx][0] < slope_array)&(slope_array < degree[idx][1]))]
x_new = np.append(x_new, [x_list[-1]])
y_new = np.append(y_new, [y_list[-1]])
return x_new, y_new
def draw_polynomial_regression_lane(x_list, y_list, img,lane_img, color_choice):
# lane_img = np.zeros_like(img)
try:
# polynomial regression
fp1 = np.polyfit(np.array(y_list), np.array(x_list) , 3)
f1 = np.poly1d(fp1)
#y_list = np.array(list(range(144, 274, 10)))
x_list = np.polyval(f1, y_list)
draw_poly = np.array([list(zip(x_list, y_list))], np.int32)
if color_choice == 3:
lane_img=cv2.polylines(lane_img, np.asarray(draw_poly), False, (0, 255, 0), 2)
elif color_choice == 1:
lane_img=cv2.polylines(lane_img, np.asarray(draw_poly), False, (255, 255, 255), 2)
elif color_choice == 2:
lane_img=cv2.polylines(lane_img, np.asarray(draw_poly), False, (0, 255, 255), 2)
except:
pass
img = cv2.addWeighted(src1=lane_img, alpha=0.8, src2=img, beta=1., gamma=0.)
return f1, lane_img, img
def affine_trasform(lane_img):
# coordinate lu -> ld -> ru -> rd
#pts1 = np.float32([[265,165],[0, 240],[535,165],[800, 240]])
pts1 = np.float32([[270,144],[0, 203],[530,144],[800, 203]])
# pts2 is points to move from pts1.
pts2 = np.float32([[0,0],[0,300],[300,0],[300,300]])
M = cv2.getPerspectiveTransform(pts1, pts2)
dst = cv2.warpPerspective(lane_img, M, (300,300))
# dst = cv2.resize(dst, dsize=(70,70))
return dst
def overwrap(lane_img, img):
lane_img = cv2.resize(lane_img, dsize=(70,70))
# cv2.imshow("df", lane_img)
rows, cols, channels = lane_img.shape
roi = img[10:rows+10, 10:cols+10]
img2gray = cv2.cvtColor(lane_img, cv2.COLOR_RGB2GRAY)
ret, mask = cv2.threshold(img2gray, 10, 255, cv2.THRESH_BINARY)
mask_inv = cv2.bitwise_not(mask)
img1_bg = cv2.bitwise_and(roi, roi, mask=mask_inv)
img2_fg = cv2.bitwise_and(lane_img, lane_img, mask=mask)
dst = cv2.add(img1_bg, img2_fg)
img[10:rows+10, 10:cols+10] = dst
return img
def make_centerPt(img, f_center):
lane_img2 = np.zeros_like(img)
y_list = np.array(list(range(144, 274, 10)))
x_list = np.polyval(f_center, y_list)
draw_poly = np.array([list(zip(x_list, y_list))], np.int32)
lane_img2 = cv2.polylines(lane_img2, np.asarray(draw_poly), False, (0, 100, 255), 1)
lane_img2 = affine_trasform(lane_img2)
# cv2.imshow("center", lane_img2)
return lane_img2
def image_callback(msg : Image):
global rosPubClass
global frame_100
global num_of_my_lane
global color_choice
global centerPt
global prev_centetPt
global prev_steer
global prev_steering
global previs
img = bridge.imgmsg_to_cv2(msg, "bgr8")
img = cv2.resize(img, dsize= (800,288))
# cv2.imshow("org", img)
cv2.waitKey(1)
frame_100 += 1
loop_start = time.time()
# img = transform_img({'img': frame})['img']
x = transform_to_net({'img': img})['img']
x.unsqueeze_(0)
gpu_start = time.time()
seg_pred, exist_pred = network(net, x)
gpu_end = time.time()
seg_pred = seg_pred.numpy()[0]
exist_pred = exist_pred.numpy()
exist = [1 if exist_pred[0, i] > 0.5 else 0 for i in range(4)]
points = getLane.prob2lines_CULane_make(seg_pred, exist, pts=30)
if re_eval_sign(points) == True or frame_100 == 30:
frame_100 = 0
#hls = cv2.cvtColor(img, cv2.COLOR_RGB2HLS)
#hsv = cv2.cvtColor(img, cv2.COLOR_RGB2HSV)
#color_choice = centerline_visualize(hsv, seg_pred, exist_pred)
#num_of_my_lane = my_lane(color_choice)
color_choice = lane_clss(img, points)
num_of_my_lane = my_lane(color_choice)
# img = draw_my_lane(img, num_of_my_lane)
f_left = None
f_right = None
x_left = []
x_right = []
y_left = []
y_right = []
aff_left = np.zeros((300, 300))
aff_right = np.zeros((300, 300))
coord_mask = np.argmax(seg_pred, axis=0)
x1, y1 = 150,300
lane_img2 = np.zeros((300, 300))
steering = 0
for i in range(0, 4):
lane_img = np.zeros_like(img)
if exist_pred[0][i] > 0.5:
y_list, x_list = np.where(coord_mask == i+1)
# y_list = y_list[np.where(y_list <144)]
# x_list = x_list[np.where(y_list <144)]
# print(i+1,"th: ", len(x_list))
if len(x_list) >= 4:
x_list = x_list[::20]
y_list = y_list[::20]
# x_list, y_list = using_degree(x_list, y_list, i)
# print("x: ", x_list)
if len(x_list) >= 4:
f1, lane_img, img = draw_polynomial_regression_lane(x_list, y_list, img,lane_img, color_choice[i])
lane_img3 = affine_trasform(lane_img)
# cv2.imshow("ddddddddd", lane_img3)
img = overwrap(lane_img3, img)
if i==1:
f_left = f1
aff_left = lane_img3
x_left, y_left = x_list, y_list
elif i==2:
f_right = f1
aff_right = lane_img3
x_right, y_right = x_list, y_list
if f_left != None and f_right != None and abs(len(x_right)-len(x_left)) < 1500:
lane_img2 = make_centerPt(img, (f_left+f_right)/2)
centerPt = np.where(lane_img2 !=0)
elif f_left != None:
centerPt = (np.where(aff_left !=0)[0], np.where(aff_left !=0)[1]+50)
elif f_right != None:
centerPt =(np.where(aff_right !=0)[0], np.where(aff_right !=0)[1]-50)
try:
# offset = np.mean(centerPt[1])-150
offset = centerPt[1][-1] -150
centerPt = (centerPt[0], centerPt[1]-offset)
x2, y2 = centerPt[1][0], centerPt[0][0]
steering = -for_degree(x1,x2,y1,y2)
except:
print("hd_map")
if steering !=0 and len(prev_steer) < 6:
if len(prev_steer) != 0 and abs(prev_steer[-1]-steering) < 50:
prev_steer.append(steering)
elif len(prev_steer) != 0 and (abs(prev_steer[-1]-steering) > 50 or abs(sum(prev_steer)/len(prev_steer) - steering) > 20):
centerPt = prev_centerPt
steering = prev_steering
elif len(prev_steer) ==5:
prev_steer = []
prev_centerPt = centerPt
prev_steering = steering
loop_end = time.time()
'''
if args.visualize:
# cv2.imshow('input_video', frame)
cv2.imshow("output_video", img)
'''
# img = visualize(img, seg_pred, exist_pred)
img = cv2.resize(img, dsize=(200,72))
imgmsg = bridge.cv2_to_imgmsg(img, "bgr8")
rosPubClass.data_pub(steering, num_of_my_lane, color_choice)
rosPubClass.imagePub.publish(imgmsg)
rosPubClass.centerPub(centerPt)
print("steering: ", steering)
# print("gpu_runtime:", gpu_end - gpu_start, "FPS:", int(1 / (gpu_end - gpu_start)))
print("total_runtime:", loop_end - loop_start, "FPS:", int(1 / (loop_end - loop_start)))
def main():
global rosPubClass
global frame_100
global num_of_my_lane
global color_choice
global prepoints
args = parse_args()
video_path = args.video_path
weight_path = args.weight_path
rosPubClass = rosPub()
if pipeline:
input_queue = JoinableQueue()
pre_process = Process(target=pre_processor, args=((input_queue, video_path),))
pre_process.start()
output_queue = SimpleQueue()
post_process = Process(target=post_processor, args=((output_queue, args.visualize),))
post_process.start()
save_dict = torch.load(weight_path, map_location='cpu')
#save_dict['net']['fc.0.weight'] = save_dict['net']['fc.0.weight'].view(128,4400)
#print(save_dict['net']['fc.0.weight'].view(128,4400))
net.load_state_dict(save_dict['net'])
net.eval()
net.cuda()
rosPubClass.sub
rclpy.spin(rosPubClass.node)
rosPubClass.node.destroy_node()
rclpy.shutdown()
#cv2.destroyAllWindows()
if __name__ == "__main__":
main()
|
[
"noreply@github.com"
] |
js7850.noreply@github.com
|
4ab0bf842e869160e1880046207d6b03ddea7f2d
|
f669ff5e74fe3eea7f3c41fa21520ed77d0f02dc
|
/AirTecFugas/WebApiApp/views/EstatusFugaViews.py
|
751149b1d7db4ed0f32e00c782fcc9bfac709a9a
|
[] |
no_license
|
codingrebelsmx/airtec-fugas
|
e6fbfbb1784ef415b6be0e27ff3aa4cef69920ed
|
4a466281a880bd8493f0ae22378dacd4bdd602e7
|
refs/heads/master
| 2023-01-03T23:00:33.634880
| 2019-08-29T02:31:47
| 2019-08-29T02:31:47
| 173,841,299
| 1
| 1
| null | 2022-12-27T15:34:00
| 2019-03-05T00:02:04
|
HTML
|
UTF-8
|
Python
| false
| false
| 701
|
py
|
# -*- coding: utf-8 -*-
from rest_framework.response import Response
from rest_framework import viewsets
from ModelsApp.models import Fuga
from WebApiApp.serializers.EstatusFugaSerializers import EstatusFugaSelectSerializer
class EstatusFugaListSelectView(viewsets.ModelViewSet):
""" View to returns list of Ubicaciones with a form for Select Control """
serializer_class = EstatusFugaSelectSerializer
def get_queryset(self):
return Fuga.estatus_fuga_dict
def list(self, request, *args, **kwargs):
estatus_fugas = Fuga.estatus_fuga_dict
serializer = EstatusFugaSelectSerializer(instance=estatus_fugas, many=True)
return Response(serializer.data)
|
[
"mmorales.dev@outlook.com"
] |
mmorales.dev@outlook.com
|
a75936de127b8aa0fd921938ecc10135d746592f
|
c4cca3f61155859de797e3236bf0838f20568c6b
|
/vector/v.stream.netid/v.stream.netid.py
|
d98b835f46567fe1591ef59da84efa9c9e430975
|
[] |
no_license
|
johnDorian/grass_addons
|
ac57123ab5760be513875cafd8c5cf23ef5af6a2
|
e1529d2735d7000cd110a1d6cb4d8e2372c68550
|
refs/heads/master
| 2021-01-22T14:25:31.924198
| 2015-05-27T20:22:15
| 2015-05-27T20:22:15
| 33,500,997
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,371
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
############################################################################
#
# MODULE: v.streams.netid.py
# AUTHOR(S): Jason Lessels
# PURPOSE: Determine and assign network ids to each stream reach.
#
# COPYRIGHT:
#
# This program is free software under the GNU General Public
# License (>=v2). Read the file COPYING that comes with GRASS
# for details.
#
#############################################################################
#%module
#% description: Determines contributing area and statistics for multiple points along a stream network.
#% keyword: vector
#%end
#%option G_OPT_V_MAP
#% key: input
#% description: Name of vector map of edges.
#% required: yes
#% multiple: no
#%end
#%option G_OPT_M_DIR
#% key: directory
#% description: Location of the folder to save the network ID files to.
#% required: yes
#% multiple: no
#%end
#%option G_OPT_V_FIELD
#% key: layer
#% description: Vector layer number to add catchment statistics
#% answer: 1
#% guisection: Settings
#%end
#%flag
#% key:c
#% description: Ignore complex influences
#%end
"""
Created on 12 March 2015
@author: Jason Lessels <jlessels gmail.com>
"""
import sys
import os
import csv
import grass.script as grass
from grass.pygrass import vector
from pygrass.modules import Module
from pygrass.modules import stdout2dict
from pygrass import raster
from grass.exceptions import CalledModuleError
if not os.environ.has_key("GISBASE"):
grass.message( "You must be in GRASS GIS to run this program." )
sys.exit(1)
def map_exists(map_name, type = "raster"):
if(type == "raster"):
result = grass.find_file(name = map_name, element = 'cell')
if not result['file']:
grass.fatal("Raster map <%s> not found" % map_name)
else:
result = grass.find_file(name = map_name, element = 'vector')
if not result['file']:
grass.fatal("Vector map <%s> not found" % map_name)
def get_coords(map_name,layer):
## Load the vector with the points in it.
data = vector.VectorTopo(map_name) # Create a VectorTopo object
data.open('r', layer = int(layer)) # Open this object for reading
coords = []
for i in range(len(data)):
coords.append(data.read(i+1).coords()) # gives a tuple
data.close()
return coords
def get_table_name(map_name, layer):
### Test to make sure the vector has the correct layer - and get the table name from it
try:
table_name = grass.vector_layer_db(map_name, layer)['name']
except:
grass.fatal("Map <%s> does not have layer number %s" % (map_name,layer))
return table_name
def add_netID_to_edges(vect_name,table_name, layer, netIDs, ridIDs):
## Create the new column.
Module('v.db.addcolumn', map=vect_name, columns='netID',layer=layer)
## This has to be good to run for a list of lists.
for i in range(len(netIDs)):
for j in range(len(netIDs[i])):
query = "UPDATE " + table_name + " SET " + 'netID' + "=" + str(netIDs[i][j]) + " WHERE rid = " + str(ridIDs[i][j])
grass.run_command('db.execute', sql=str(query))
def get_column_ints(vect_name, layer, col_name):
raw = grass.read_command("v.db.select", map=vect_name, layer=layer, col=col_name, flags='c')
raw = raw.split("\n")
return map(int, raw[0:len(raw)-1])
def check_for_columns(table_name, map_name, stop_with_complex):
# Get the column names in the table
raw = grass.read_command("db.columns", table=table_name)
raw = raw.split("\n")
required_cols = ['rid', 'prev_str01', 'prev_str02', 'nxt_str']
# Check that the required columns are in the table
for i in range(len(required_cols)):
if ((required_cols[i] in raw) == False):
grass.fatal("Map <%s> does not have column named %s" % (map_name,required_cols[i]))
# Check to make sure that there are no complex influences.
if 'prev_str03' in raw:
if stop_with_complex:
grass.message("A column containing complex influences was found. Module is proceeding as -c flag is set." )
else:
grass.fatal("A column containing complex influences was found. Check the map and use -c flag to continue.")
def main():
## Load the inputs from the user
streams = options['input']
## get the layer number to obtain the table from.
layer = options['layer']
## get the directory where to save everything.
directory = options['directory']
ignore_complex = flags['c']
# For testing only
# streams = "stream_order"
# layer = "1"
# directory = "/Users/jasonlessels/Desktop/testing/"
## Check to make sure the vector map exists
map_exists(streams, "vector")
## Check to make sure the layer exists and return the table name of that layer
table_name = get_table_name(streams, layer)
check_for_columns(table_name, streams, ignore_complex)
rid = get_column_ints(streams, layer, 'rid')
prev_str1 = get_column_ints(streams, layer, 'prev_str01')
prev_str2 = get_column_ints(streams, layer, 'prev_str02')
nxt_str = get_column_ints(streams, layer, 'nxt_str')
## Create a binaryID list. - with a stream index to for splitting up later on
binaryID = [''] * len(nxt_str)
netID_number = 1
for i in range(len(nxt_str)):
if nxt_str[i] == -1:
binaryID[i] = str(netID_number) + '_1'
netID_number = netID_number + 1
else:
binaryID[i] = ''
for j in range(len(nxt_str)):
for i in range(len(nxt_str)):
if(len(binaryID[i])>0):
if(prev_str1[i] > 0):
binaryID[rid.index(prev_str1[i])] = binaryID[i] + "0"
if(prev_str2[i] > 0):
binaryID[rid.index(prev_str2[i])] = binaryID[i] + "1"
#TODO: split the binaryID list based on the network id and write out csv files with rid,binaryID.
netID = [''] * len(nxt_str)
for i in range(len(nxt_str)):
netID[i] = binaryID[i].split("_")[0]
binaryID[i] = binaryID[i].split("_")[1]
## Seperate all of the networks
total_networks = sum([i==-1 for i in nxt_str])
rid_list = []
binaryID_list = []
netID_list = []
for i in range(total_networks):
binaryID_list.append([])
rid_list.append([])
netID_list.append([])
for j in range(len(binaryID)):
if netID[j] == str(i+1):
rid_list[i].append(rid[j])
netID_list[i].append(str(i+1))
binaryID_list[i].append(binaryID[j])
## fix up the directory string
if directory[-1] == '/':
directory = directory[0:-1]
for i in range(len(rid_list)):
file_name = directory + '/netID' + str(netID_list[i][0]) + '.dat'
with open(file_name, 'wb') as outcsv:
writer = csv.writer(outcsv, delimiter=',')
writer.writerow(['rid','binaryID'])
for row in range(len(binaryID_list[i])):
writer.writerow([rid_list[i][row],binaryID_list[i][row]])
## Now update the columns in the stream order vector.
add_netID_to_edges(streams,table_name, layer, netID_list, rid_list)
if __name__ == "__main__":
options, flags = grass.parser()
main()
|
[
"jlessels@gmail.com"
] |
jlessels@gmail.com
|
36639507a244224213c81caf578caf5019bf4ff3
|
85fb09279630a06213c6334c660f5f0d053b972b
|
/mysql-diff.py
|
56e1a5494fd1964007650f515d4cb48d49256463
|
[] |
no_license
|
gessnerfl/mysql-diff
|
180bfc5ff2837715658a1cb1a944eff29fc135bf
|
91a1c4075c7171c869854ff82c7d47c921d19b0d
|
refs/heads/main
| 2023-07-30T12:47:39.178164
| 2021-09-24T06:58:09
| 2021-09-24T06:58:25
| 402,091,912
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,384
|
py
|
import argparse
import datetime
from pathlib import Path
from config.reader import *
from metadata.meta_data_provider import *
from diff.main import *
import csv
def write_meta_data(file_path: str, meta: Dict[str, Schema]):
with open(file_path) as f:
f.write(meta.__str__())
def write_output(file_path: str, db_diff: DatabaseDiffs):
file = Path(file_path)
file.touch()
with open(file, mode='w') as f:
wr = csv.writer(f, delimiter=";")
wr.writerow(['asset_type', 'asset_name', 'diff'])
for d in db_diff.diffs:
wr.writerow([d.asset_type, d.asset_name, d.diff])
def get_output_file_path(param, config: Configuration) -> str:
if param is not None:
param
now = datetime.datetime.now()
datetime_str = now.strftime("%Y%m%d_%H%M%S")
return "{}_diff_{}-{}.csv".format(datetime_str, config.left.name, config.right.name)
def run_application():
args_parser = argparse.ArgumentParser(allow_abbrev=False, prog='mysql-diff',
description='Determine structural differences between two MySQL database')
args_parser.add_argument('-c', '--config', help='the yaml configuration file required for the execution',
type=str, required=True)
args_parser.add_argument('-o', '--out', help='the file path of the output file', type=str, required=False)
args_parser.add_argument('--left-out-path', help='the file path to store the meta data of the left side', type=str,
required=False)
args_parser.add_argument('--right-out-path', help='the file path to store the meta data of the right side',
type=str, required=False)
args = args_parser.parse_args()
config_file = args.config
config = read_configuration(config_file)
with collect_meta_data(config.left) as p:
left = p.provide()
if args.left_out_path is not None:
write_meta_data(args.left_out_path, left)
with collect_meta_data(config.right) as p:
right = p.provide()
if args.right_out_path is not None:
write_meta_data(args.right_out_path, right)
diffs = diff(left, right, config.exclusions, config.schema_mappings)
out_file_path = get_output_file_path(args.out, config)
write_output(out_file_path, diffs)
run_application()
|
[
"florian.gessner@tis.biz"
] |
florian.gessner@tis.biz
|
a8d49053c2ac05b12cc81312ae3d74cc220b5792
|
7c0e18d21cbcfacc253c49eb286f314a9b28c78a
|
/user/decorators.py
|
8fb415d5a8bdb2e4f78a0e459c9a2404485bd859
|
[] |
no_license
|
geoffrey-wen/TA
|
abd92d179fe79d9690fa362f8a462ff4b3f77c20
|
620fe067396e35dfaa1ac094f31a29454bbda1d2
|
refs/heads/master
| 2023-01-30T19:04:42.692716
| 2020-12-17T05:23:50
| 2020-12-17T05:23:50
| 294,910,270
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 920
|
py
|
from django.http import HttpResponse
from django.shortcuts import redirect
def unauthenticated_user(view_func):
def wrapper_func(request, *args, **kwargs):
if request.user.is_authenticated:
return redirect('home')
else:
return view_func(request, *args, **kwargs)
return wrapper_func
def allowed_users(allowed_roles=[]):
def decorator(view_func):
def wrapper_func(request, *args, **kwargs):
group = None
if request.user.groups.exists():
group = request.user.groups.all()[0].name
if group in allowed_roles:
return view_func(request, *args, **kwargs)
else:
return HttpResponse('You are not authorized to view this page')
return wrapper_func
return decorator
#put decorator like
#@allowed_users(allowed_roles=['groupname'])
#on the top of view function
|
[
"giangervase@gmail.com"
] |
giangervase@gmail.com
|
37cae764334ae3d2d3130e53268545121a4e46f6
|
431dd18482549fb50070b70f324732e1beb1b6e2
|
/app/image_app/views.py
|
1111c7bcda38fdf35940b16e2742b14e9f364da7
|
[] |
no_license
|
OscarGibson/test_task_python
|
2af843b3b4845aa093d2d429222b38863531e880
|
c5353cca7139a3e209a5f16dc8290c13afc1e2ad
|
refs/heads/master
| 2022-12-09T13:53:47.581636
| 2019-03-14T15:48:09
| 2019-03-14T15:48:09
| 175,651,873
| 0
| 0
| null | 2022-12-08T04:52:14
| 2019-03-14T15:42:15
|
Python
|
UTF-8
|
Python
| false
| false
| 1,724
|
py
|
from django.shortcuts import render
from .flicker_api import FlickerAPI
flicker_api = FlickerAPI()
def image_search_get(request):
text = request.GET.get('text', '')
sort_type = request.GET.get('sort_type', '')
page = request.GET.get('page', '')
if text != "":
data = flicker_api.find(text, sort_type, page)
return render(
request,
'image_app/index.html',
{
'images': data["photos"]["photo"],
'page_number': data["photos"]["page"],
'pages': data["photos"]["pages"],
'sort_types': flicker_api.FLICKER_SORT_TYPES
}
)
else:
return render(
request,
'image_app/index.html',
{
'images': [],
'page_number': 1,
'pages': 1,
'sort_types': flicker_api.FLICKER_SORT_TYPES
}
)
def image_search_post(request):
text = request.POST['text']
sort_type = request.POST['sort_type']
data = flicker_api.find(text, sort_type)
print(data)
return render(
request,
'image_app/index.html',
{
'images': data["photos"]["photo"],
'page_number': data["photos"]["page"],
'pages': data["photos"]["pages"],
'sort_types': flicker_api.FLICKER_SORT_TYPES
}
)
def image_search(request):
if request.method == 'GET':
return image_search_get(request)
elif request.method == 'POST':
return image_search_post(request)
else:
return render(
request,
'base/method_not_alowed.html',
{}
)
|
[
"dev@incorainc.com"
] |
dev@incorainc.com
|
15c83e79553ebe4c8a62abc3fa2c8f9649833fe0
|
d7c5189ce11ff291f93871110d74cd7e509f5925
|
/the-print-function-I-single-argument.py
|
57eddfd38c5aad63c17e4de858efd4387fd583bc
|
[] |
no_license
|
ihuomagold/Python_Progress
|
2db955802503f47c2861c580d88019050e44221f
|
79368c409c4d5ddc43e7b190204e75a8039a796d
|
refs/heads/master
| 2022-12-12T15:16:07.674937
| 2020-08-11T12:43:37
| 2020-08-11T12:43:37
| 286,729,651
| 0
| 0
| null | 2020-08-11T12:43:39
| 2020-08-11T11:44:49
| null |
UTF-8
|
Python
| false
| false
| 179
|
py
|
print("I had spaghetti for breakfast")
print("I also ate one boiled egg")
print("I am working with Python")
print(8)
print(8 + 5)
print(32 - 18)
print("Hello" + "Python")
|
[
"noreply@github.com"
] |
ihuomagold.noreply@github.com
|
2288be423442e1f739d09472a412009598f7f402
|
8419eaa22e58a2efbb7bdf1bccfc66a9e3288d75
|
/tensorflow/contrib/bayesflow/python/kernel_tests/layers_dense_variational_test.py
|
4e9f1193511c35beead85914ca988fde69b3afde
|
[
"Apache-2.0"
] |
permissive
|
PipelineAI/tensorflow
|
f539227fd5d3f304b4f246877e35303dbd388a0c
|
5d8e69768230ea8765a7c78cf1fa22c3ab2a4757
|
refs/heads/master
| 2021-05-05T21:54:02.830548
| 2018-01-15T04:30:05
| 2018-01-15T04:30:05
| 115,791,564
| 0
| 1
|
Apache-2.0
| 2018-01-15T05:38:46
| 2017-12-30T11:08:37
|
C++
|
UTF-8
|
Python
| false
| false
| 16,514
|
py
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for dense Bayesian layers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.bayesflow.python.ops import layers_dense_variational_impl as prob_layers_lib
from tensorflow.contrib.bayesflow.python.ops import layers_util as prob_layers_util
from tensorflow.contrib.distributions.python.ops import independent as independent_lib
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops.distributions import normal as normal_lib
from tensorflow.python.ops.distributions import util as distribution_util
from tensorflow.python.platform import test
class Counter(object):
"""Helper class to manage incrementing a counting `int`."""
def __init__(self):
self._value = -1
@property
def value(self):
return self._value
def __call__(self):
self._value += 1
return self._value
class MockDistribution(independent_lib.Independent):
"""Monitors layer calls to the underlying distribution."""
def __init__(self, result_sample, result_log_prob, loc=None, scale=None):
self.result_sample = result_sample
self.result_log_prob = result_log_prob
self.result_loc = loc
self.result_scale = scale
self.result_distribution = normal_lib.Normal(loc=0.0, scale=1.0)
if loc is not None and scale is not None:
self.result_distribution = normal_lib.Normal(loc=self.result_loc,
scale=self.result_scale)
self.called_log_prob = Counter()
self.called_sample = Counter()
self.called_loc = Counter()
self.called_scale = Counter()
def log_prob(self, *args, **kwargs):
self.called_log_prob()
return self.result_log_prob
def sample(self, *args, **kwargs):
self.called_sample()
return self.result_sample
@property
def distribution(self): # for dummy check on Independent(Normal)
return self.result_distribution
@property
def loc(self):
self.called_loc()
return self.result_loc
@property
def scale(self):
self.called_scale()
return self.result_scale
class MockKLDivergence(object):
"""Monitors layer calls to the divergence implementation."""
def __init__(self, result):
self.result = result
self.args = []
self.called = Counter()
def __call__(self, *args, **kwargs):
self.called()
self.args.append(args)
return self.result
class DenseVariational(test.TestCase):
def _testKLPenaltyKernel(self, layer_class):
with self.test_session():
layer = layer_class(units=2)
inputs = random_ops.random_uniform([2, 3], seed=1)
# No keys.
losses = ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)
self.assertEqual(len(losses), 0)
self.assertListEqual(layer.losses, losses)
_ = layer(inputs)
# Yes keys.
losses = ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)
self.assertEqual(len(losses), 1)
self.assertListEqual(layer.losses, losses)
def _testKLPenaltyBoth(self, layer_class):
def _make_normal(dtype, *args): # pylint: disable=unused-argument
return normal_lib.Normal(
loc=dtype.as_numpy_dtype(0.), scale=dtype.as_numpy_dtype(1.))
with self.test_session():
layer = layer_class(
units=2,
bias_posterior_fn=prob_layers_util.default_mean_field_normal_fn(),
bias_prior_fn=_make_normal)
inputs = random_ops.random_uniform([2, 3], seed=1)
# No keys.
losses = ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)
self.assertEqual(len(losses), 0)
self.assertListEqual(layer.losses, losses)
_ = layer(inputs)
# Yes keys.
losses = ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)
self.assertEqual(len(losses), 2)
self.assertListEqual(layer.losses, losses)
def _testDenseSetUp(self, layer_class, batch_size, in_size, out_size,
**kwargs):
seed = Counter()
inputs = random_ops.random_uniform([batch_size, in_size], seed=seed())
kernel_size = [in_size, out_size]
kernel_posterior = MockDistribution(
loc=random_ops.random_uniform(kernel_size, seed=seed()),
scale=random_ops.random_uniform(kernel_size, seed=seed()),
result_log_prob=random_ops.random_uniform(kernel_size, seed=seed()),
result_sample=random_ops.random_uniform(kernel_size, seed=seed()))
kernel_prior = MockDistribution(
result_log_prob=random_ops.random_uniform(kernel_size, seed=seed()),
result_sample=random_ops.random_uniform(kernel_size, seed=seed()))
kernel_divergence = MockKLDivergence(
result=random_ops.random_uniform(kernel_size, seed=seed()))
bias_size = [out_size]
bias_posterior = MockDistribution(
result_log_prob=random_ops.random_uniform(bias_size, seed=seed()),
result_sample=random_ops.random_uniform(bias_size, seed=seed()))
bias_prior = MockDistribution(
result_log_prob=random_ops.random_uniform(bias_size, seed=seed()),
result_sample=random_ops.random_uniform(bias_size, seed=seed()))
bias_divergence = MockKLDivergence(
result=random_ops.random_uniform(bias_size, seed=seed()))
layer = layer_class(
units=out_size,
kernel_posterior_fn=lambda *args: kernel_posterior,
kernel_posterior_tensor_fn=lambda d: d.sample(seed=42),
kernel_prior_fn=lambda *args: kernel_prior,
kernel_divergence_fn=kernel_divergence,
bias_posterior_fn=lambda *args: bias_posterior,
bias_posterior_tensor_fn=lambda d: d.sample(seed=43),
bias_prior_fn=lambda *args: bias_prior,
bias_divergence_fn=bias_divergence,
**kwargs)
outputs = layer(inputs)
kl_penalty = ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)
return (kernel_posterior, kernel_prior, kernel_divergence,
bias_posterior, bias_prior, bias_divergence,
layer, inputs, outputs, kl_penalty)
def testKLPenaltyKernelReparameterization(self):
self._testKLPenaltyKernel(prob_layers_lib.DenseReparameterization)
def testKLPenaltyKernelLocalReparameterization(self):
self._testKLPenaltyKernel(prob_layers_lib.DenseLocalReparameterization)
def testKLPenaltyKernelFlipout(self):
self._testKLPenaltyKernel(prob_layers_lib.DenseFlipout)
def testKLPenaltyBothReparameterization(self):
self._testKLPenaltyBoth(prob_layers_lib.DenseReparameterization)
def testKLPenaltyBothLocalReparameterization(self):
self._testKLPenaltyBoth(prob_layers_lib.DenseLocalReparameterization)
def testKLPenaltyBothFlipout(self):
self._testKLPenaltyBoth(prob_layers_lib.DenseFlipout)
def testDenseReparameterization(self):
batch_size, in_size, out_size = 2, 3, 4
with self.test_session() as sess:
(kernel_posterior, kernel_prior, kernel_divergence,
bias_posterior, bias_prior, bias_divergence, layer, inputs,
outputs, kl_penalty) = self._testDenseSetUp(
prob_layers_lib.DenseReparameterization,
batch_size, in_size, out_size)
expected_outputs = (
math_ops.matmul(inputs, kernel_posterior.result_sample) +
bias_posterior.result_sample)
[
expected_outputs_, actual_outputs_,
expected_kernel_, actual_kernel_,
expected_kernel_divergence_, actual_kernel_divergence_,
expected_bias_, actual_bias_,
expected_bias_divergence_, actual_bias_divergence_,
] = sess.run([
expected_outputs, outputs,
kernel_posterior.result_sample, layer.kernel_posterior_tensor,
kernel_divergence.result, kl_penalty[0],
bias_posterior.result_sample, layer.bias_posterior_tensor,
bias_divergence.result, kl_penalty[1],
])
self.assertAllClose(
expected_kernel_, actual_kernel_,
rtol=1e-6, atol=0.)
self.assertAllClose(
expected_bias_, actual_bias_,
rtol=1e-6, atol=0.)
self.assertAllClose(
expected_outputs_, actual_outputs_,
rtol=1e-6, atol=0.)
self.assertAllClose(
expected_kernel_divergence_, actual_kernel_divergence_,
rtol=1e-6, atol=0.)
self.assertAllClose(
expected_bias_divergence_, actual_bias_divergence_,
rtol=1e-6, atol=0.)
self.assertAllEqual(
[[kernel_posterior.distribution,
kernel_prior.distribution,
kernel_posterior.result_sample]],
kernel_divergence.args)
self.assertAllEqual(
[[bias_posterior.distribution,
bias_prior.distribution,
bias_posterior.result_sample]],
bias_divergence.args)
def testDenseLocalReparameterization(self):
batch_size, in_size, out_size = 2, 3, 4
with self.test_session() as sess:
(kernel_posterior, kernel_prior, kernel_divergence,
bias_posterior, bias_prior, bias_divergence, layer, inputs,
outputs, kl_penalty) = self._testDenseSetUp(
prob_layers_lib.DenseLocalReparameterization,
batch_size, in_size, out_size)
expected_kernel_posterior_affine = normal_lib.Normal(
loc=math_ops.matmul(inputs, kernel_posterior.result_loc),
scale=math_ops.matmul(
inputs**2., kernel_posterior.result_scale**2)**0.5)
expected_kernel_posterior_affine_tensor = (
expected_kernel_posterior_affine.sample(seed=42))
expected_outputs = (expected_kernel_posterior_affine_tensor +
bias_posterior.result_sample)
[
expected_outputs_, actual_outputs_,
expected_kernel_divergence_, actual_kernel_divergence_,
expected_bias_, actual_bias_,
expected_bias_divergence_, actual_bias_divergence_,
] = sess.run([
expected_outputs, outputs,
kernel_divergence.result, kl_penalty[0],
bias_posterior.result_sample, layer.bias_posterior_tensor,
bias_divergence.result, kl_penalty[1],
])
self.assertAllClose(
expected_bias_, actual_bias_,
rtol=1e-6, atol=0.)
self.assertAllClose(
expected_outputs_, actual_outputs_,
rtol=1e-6, atol=0.)
self.assertAllClose(
expected_kernel_divergence_, actual_kernel_divergence_,
rtol=1e-6, atol=0.)
self.assertAllClose(
expected_bias_divergence_, actual_bias_divergence_,
rtol=1e-6, atol=0.)
self.assertAllEqual(
[[kernel_posterior.distribution,
kernel_prior.distribution,
None]],
kernel_divergence.args)
self.assertAllEqual(
[[bias_posterior.distribution,
bias_prior.distribution,
bias_posterior.result_sample]],
bias_divergence.args)
def testDenseFlipout(self):
batch_size, in_size, out_size = 2, 3, 4
with self.test_session() as sess:
(kernel_posterior, kernel_prior, kernel_divergence,
bias_posterior, bias_prior, bias_divergence, layer, inputs,
outputs, kl_penalty) = self._testDenseSetUp(
prob_layers_lib.DenseFlipout,
batch_size, in_size, out_size, seed=44)
expected_kernel_posterior_affine = normal_lib.Normal(
loc=array_ops.zeros_like(kernel_posterior.result_loc),
scale=kernel_posterior.result_scale)
expected_kernel_posterior_affine_tensor = (
expected_kernel_posterior_affine.sample(seed=42))
sign_input = random_ops.random_uniform(
[batch_size, in_size],
minval=0,
maxval=2,
dtype=dtypes.int32,
seed=layer.seed)
sign_input = math_ops.cast(2 * sign_input - 1, inputs.dtype)
sign_output = random_ops.random_uniform(
[batch_size, out_size],
minval=0,
maxval=2,
dtype=dtypes.int32,
seed=distribution_util.gen_new_seed(
layer.seed, salt="dense_flipout"))
sign_output = math_ops.cast(2 * sign_output - 1, inputs.dtype)
perturbed_inputs = math_ops.matmul(
inputs * sign_input, expected_kernel_posterior_affine_tensor)
perturbed_inputs *= sign_output
expected_outputs = math_ops.matmul(inputs, kernel_posterior.result_loc)
expected_outputs += perturbed_inputs
expected_outputs += bias_posterior.result_sample
[
expected_outputs_, actual_outputs_,
expected_kernel_divergence_, actual_kernel_divergence_,
expected_bias_, actual_bias_,
expected_bias_divergence_, actual_bias_divergence_,
] = sess.run([
expected_outputs, outputs,
kernel_divergence.result, kl_penalty[0],
bias_posterior.result_sample, layer.bias_posterior_tensor,
bias_divergence.result, kl_penalty[1],
])
self.assertAllClose(
expected_bias_, actual_bias_,
rtol=1e-6, atol=0.)
self.assertAllClose(
expected_outputs_, actual_outputs_,
rtol=1e-6, atol=0.)
self.assertAllClose(
expected_kernel_divergence_, actual_kernel_divergence_,
rtol=1e-6, atol=0.)
self.assertAllClose(
expected_bias_divergence_, actual_bias_divergence_,
rtol=1e-6, atol=0.)
self.assertAllEqual(
[[kernel_posterior.distribution, kernel_prior.distribution, None]],
kernel_divergence.args)
self.assertAllEqual(
[[bias_posterior.distribution,
bias_prior.distribution,
bias_posterior.result_sample]],
bias_divergence.args)
def testRandomDenseFlipout(self):
batch_size, in_size, out_size = 2, 3, 4
with self.test_session() as sess:
seed = Counter()
inputs = random_ops.random_uniform([batch_size, in_size], seed=seed())
kernel_posterior = MockDistribution(
loc=random_ops.random_uniform(
[in_size, out_size], seed=seed()),
scale=random_ops.random_uniform(
[in_size, out_size], seed=seed()),
result_log_prob=random_ops.random_uniform(
[in_size, out_size], seed=seed()),
result_sample=random_ops.random_uniform(
[in_size, out_size], seed=seed()))
bias_posterior = MockDistribution(
loc=random_ops.random_uniform(
[out_size], seed=seed()),
scale=random_ops.random_uniform(
[out_size], seed=seed()),
result_log_prob=random_ops.random_uniform(
[out_size], seed=seed()),
result_sample=random_ops.random_uniform(
[out_size], seed=seed()))
layer_one = prob_layers_lib.DenseFlipout(
units=out_size,
kernel_posterior_fn=lambda *args: kernel_posterior,
kernel_posterior_tensor_fn=lambda d: d.sample(seed=42),
bias_posterior_fn=lambda *args: bias_posterior,
bias_posterior_tensor_fn=lambda d: d.sample(seed=43),
seed=44)
layer_two = prob_layers_lib.DenseFlipout(
units=out_size,
kernel_posterior_fn=lambda *args: kernel_posterior,
kernel_posterior_tensor_fn=lambda d: d.sample(seed=42),
bias_posterior_fn=lambda *args: bias_posterior,
bias_posterior_tensor_fn=lambda d: d.sample(seed=43),
seed=45)
outputs_one = layer_one(inputs)
outputs_two = layer_two(inputs)
outputs_one_, outputs_two_ = sess.run([
outputs_one, outputs_two])
self.assertLess(np.sum(np.isclose(outputs_one_, outputs_two_)), out_size)
if __name__ == "__main__":
test.main()
|
[
"gardener@tensorflow.org"
] |
gardener@tensorflow.org
|
f5f4b54315810f228e8eea7df2b7e93ca321ec0f
|
87ace37c0914e55687d04c7f7dc61d87f35bd895
|
/python/unit 5/project.py
|
027dc4b01bbbe6c259aff2a0004a0ffc2713114a
|
[] |
no_license
|
MrMyerscough/Coding-Course-Stuff
|
e6495434e07ccc62bf68ab7bc8b6290a478124fe
|
f9b3705d7c098b6abb961b4f6ab39768f38b2896
|
refs/heads/master
| 2020-04-11T13:48:40.952406
| 2019-11-15T10:03:02
| 2019-11-15T10:03:02
| 161,830,629
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 167
|
py
|
# books? - library
# contacts ? - phone book
# songs - playlist
# employees - work directory?
# students - school directory?
# items in a shop
# videogames in a shop
|
[
"isaiahwkahler@gmail.com"
] |
isaiahwkahler@gmail.com
|
e2d4f25dd72c294071243e7676c6a1470b5b5c46
|
33a634fcfe842c27fc4b2df3f49a77e319a0b348
|
/31_truncate/truncate.py
|
9c4e8a166262a92f1f45aed0e2552efe17797163
|
[] |
no_license
|
jasparvb/python-exercises
|
50f5c30e0c930acdaebc3b6e003703cf85353b77
|
4631821aed29f8aa5ef770eeb1ea12dd49e90710
|
refs/heads/master
| 2021-05-21T23:44:30.213437
| 2020-04-04T22:09:03
| 2020-04-04T22:09:03
| 252,864,398
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 814
|
py
|
def truncate(phrase, n):
"""Return truncated-at-n-chars version of phrase.
If the phrase is longer than n, make sure it ends with '...' and is no
longer than n.
>>> truncate("Hello World", 6)
'Hel...'
>>> truncate("Problem solving is the best!", 10)
'Problem...'
>>> truncate("Yo", 100)
'Yo'
The smallest legal value of n is 3; if less, return a message:
>>> truncate('Cool', 1)
'Truncation must be at least 3 characters.'
>>> truncate("Woah", 4)
'W...'
>>> truncate("Woah", 3)
'...'
"""
if len(phrase) >= n and n >= 3:
return phrase[:n-3] + "..."
elif n < 3:
return "Truncation must be at least 3 characters."
return phrase
|
[
"jaspar@UMICH.EDU"
] |
jaspar@UMICH.EDU
|
4986665d7e1a7eaf3f41852e2f37ea4c038c51a7
|
5a281cb78335e06c631181720546f6876005d4e5
|
/cinder-14.0.0/cinder/volume/flows/api/create_volume.py
|
12569f59c100af228098fe7f2b1bab7149b575b1
|
[
"Apache-2.0"
] |
permissive
|
scottwedge/OpenStack-Stein
|
d25b2a5bb54a714fc23f0ff0c11fb1fdacad85e8
|
7077d1f602031dace92916f14e36b124f474de15
|
refs/heads/master
| 2021-03-22T16:07:19.561504
| 2020-03-15T01:31:10
| 2020-03-15T01:31:10
| 247,380,811
| 0
| 0
|
Apache-2.0
| 2020-03-15T01:24:15
| 2020-03-15T01:24:15
| null |
UTF-8
|
Python
| false
| false
| 37,033
|
py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import six
from oslo_config import cfg
from oslo_log import log as logging
import taskflow.engines
from taskflow.patterns import linear_flow
from taskflow.types import failure as ft
from cinder import exception
from cinder import flow_utils
from cinder.i18n import _
from cinder import objects
from cinder.objects import fields
from cinder.policies import volumes as policy
from cinder import quota
from cinder import quota_utils
from cinder import utils
from cinder.volume.flows import common
from cinder.volume import utils as vol_utils
from cinder.volume import volume_types
LOG = logging.getLogger(__name__)
ACTION = 'volume:create'
CONF = cfg.CONF
QUOTAS = quota.QUOTAS
# Only in these 'sources' status can we attempt to create a volume from a
# source volume or a source snapshot, other status states we can not create
# from, 'error' being the common example.
SNAPSHOT_PROCEED_STATUS = (fields.SnapshotStatus.AVAILABLE,)
SRC_VOL_PROCEED_STATUS = ('available', 'in-use',)
REPLICA_PROCEED_STATUS = ('active', 'active-stopped',)
CG_PROCEED_STATUS = ('available', 'creating',)
CGSNAPSHOT_PROCEED_STATUS = ('available',)
GROUP_PROCEED_STATUS = ('available', 'creating',)
BACKUP_PROCEED_STATUS = (fields.BackupStatus.AVAILABLE,)
class ExtractVolumeRequestTask(flow_utils.CinderTask):
"""Processes an api request values into a validated set of values.
This tasks responsibility is to take in a set of inputs that will form
a potential volume request and validates those values against a set of
conditions and/or translates those values into a valid set and then returns
the validated/translated values for use by other tasks.
Reversion strategy: N/A
"""
# This task will produce the following outputs (said outputs can be
# saved to durable storage in the future so that the flow can be
# reconstructed elsewhere and continued).
default_provides = set(['size', 'snapshot_id',
'source_volid', 'volume_type', 'volume_type_id',
'encryption_key_id', 'consistencygroup_id',
'cgsnapshot_id', 'qos_specs', 'group_id',
'refresh_az', 'backup_id', 'availability_zones',
'multiattach'])
def __init__(self, image_service, availability_zones, **kwargs):
super(ExtractVolumeRequestTask, self).__init__(addons=[ACTION],
**kwargs)
self.image_service = image_service
self.availability_zones = availability_zones
@staticmethod
def _extract_resource(resource, allowed_vals, exc, resource_name,
props=('status',)):
"""Extracts the resource id from the provided resource.
This method validates the input resource dict and checks that the
properties which names are passed in `props` argument match
corresponding lists in `allowed` argument. In case of mismatch
exception of type exc is raised.
:param resource: Resource dict.
:param allowed_vals: Tuple of allowed values lists.
:param exc: Exception type to raise.
:param resource_name: Name of resource - used to construct log message.
:param props: Tuple of resource properties names to validate.
:return: Id of a resource.
"""
resource_id = None
if resource:
for prop, allowed_states in zip(props, allowed_vals):
if resource[prop] not in allowed_states:
msg = _("Originating %(res)s %(prop)s must be one of "
"'%(vals)s' values")
msg = msg % {'res': resource_name,
'prop': prop,
'vals': ', '.join(allowed_states)}
# TODO(harlowja): what happens if the status changes after
# this initial resource status check occurs??? Seems like
# someone could delete the resource after this check passes
# but before the volume is officially created?
raise exc(reason=msg)
resource_id = resource['id']
return resource_id
def _extract_consistencygroup(self, consistencygroup):
return self._extract_resource(consistencygroup, (CG_PROCEED_STATUS,),
exception.InvalidConsistencyGroup,
'consistencygroup')
def _extract_group(self, group):
return self._extract_resource(group, (GROUP_PROCEED_STATUS,),
exception.InvalidGroup,
'group')
def _extract_cgsnapshot(self, cgsnapshot):
return self._extract_resource(cgsnapshot, (CGSNAPSHOT_PROCEED_STATUS,),
exception.InvalidCgSnapshot,
'CGSNAPSHOT')
def _extract_snapshot(self, snapshot):
return self._extract_resource(snapshot, (SNAPSHOT_PROCEED_STATUS,),
exception.InvalidSnapshot, 'snapshot')
def _extract_source_volume(self, source_volume):
return self._extract_resource(source_volume, (SRC_VOL_PROCEED_STATUS,),
exception.InvalidVolume, 'source volume')
def _extract_backup(self, backup):
return self._extract_resource(backup, (BACKUP_PROCEED_STATUS,),
exception.InvalidBackup,
'backup')
@staticmethod
def _extract_size(size, source_volume, snapshot, backup):
"""Extracts and validates the volume size.
This function will validate or when not provided fill in the provided
size variable from the source_volume or snapshot and then does
validation on the size that is found and returns said validated size.
"""
def validate_snap_size(size):
if snapshot and size < snapshot.volume_size:
msg = _("Volume size '%(size)s'GB cannot be smaller than"
" the snapshot size %(snap_size)sGB. "
"They must be >= original snapshot size.")
msg = msg % {'size': size,
'snap_size': snapshot.volume_size}
raise exception.InvalidInput(reason=msg)
def validate_source_size(size):
if source_volume and size < source_volume['size']:
msg = _("Volume size '%(size)s'GB cannot be smaller than "
"original volume size %(source_size)sGB. "
"They must be >= original volume size.")
msg = msg % {'size': size,
'source_size': source_volume['size']}
raise exception.InvalidInput(reason=msg)
def validate_backup_size(size):
if backup and size < backup['size']:
msg = _("Volume size %(size)sGB cannot be smaller than "
"the backup size %(backup_size)sGB. "
"It must be >= backup size.")
msg = msg % {'size': size,
'backup_size': backup['size']}
raise exception.InvalidInput(reason=msg)
def validate_int(size):
if not isinstance(size, six.integer_types) or size <= 0:
msg = _("Volume size '%(size)s' must be an integer and"
" greater than 0") % {'size': size}
raise exception.InvalidInput(reason=msg)
# Figure out which validation functions we should be applying
# on the size value that we extract.
validator_functors = [validate_int]
if source_volume:
validator_functors.append(validate_source_size)
elif snapshot:
validator_functors.append(validate_snap_size)
elif backup:
validator_functors.append(validate_backup_size)
# If the size is not provided then try to provide it.
if not size and source_volume:
size = source_volume['size']
elif not size and snapshot:
size = snapshot.volume_size
elif not size and backup:
size = backup['size']
size = utils.as_int(size)
LOG.debug("Validating volume size '%(size)s' using %(functors)s",
{'size': size,
'functors': ", ".join([common.make_pretty_name(func)
for func in validator_functors])})
for func in validator_functors:
func(size)
return size
def _get_image_metadata(self, context, image_id, size):
"""Checks image existence and validates the image metadata.
Returns: image metadata or None
"""
# Check image existence
if image_id is None:
return None
# NOTE(harlowja): this should raise an error if the image does not
# exist, this is expected as it signals that the image_id is missing.
image_meta = self.image_service.show(context, image_id)
vol_utils.check_image_metadata(image_meta, size)
return image_meta
def _extract_availability_zones(self, availability_zone, snapshot,
source_volume, group, volume_type=None):
"""Extracts and returns a validated availability zone list.
This function will extract the availability zone (if not provided) from
the snapshot or source_volume and then performs a set of validation
checks on the provided or extracted availability zone and then returns
the validated availability zone.
"""
refresh_az = False
type_azs = vol_utils.extract_availability_zones_from_volume_type(
volume_type)
type_az_configured = type_azs is not None
if type_az_configured:
safe_azs = list(
set(type_azs).intersection(self.availability_zones))
if not safe_azs:
raise exception.InvalidTypeAvailabilityZones(az=type_azs)
else:
safe_azs = self.availability_zones
# If the volume will be created in a group, it should be placed in
# in same availability zone as the group.
if group:
try:
availability_zone = group['availability_zone']
except (TypeError, KeyError):
pass
# Try to extract the availability zone from the corresponding snapshot
# or source volume if either is valid so that we can be in the same
# availability zone as the source.
if availability_zone is None:
if snapshot:
try:
availability_zone = snapshot['volume']['availability_zone']
except (TypeError, KeyError):
pass
if source_volume and availability_zone is None:
try:
availability_zone = source_volume['availability_zone']
except (TypeError, KeyError):
pass
if availability_zone is None and not type_az_configured:
if CONF.default_availability_zone:
availability_zone = CONF.default_availability_zone
else:
# For backwards compatibility use the storage_availability_zone
availability_zone = CONF.storage_availability_zone
if availability_zone and availability_zone not in safe_azs:
refresh_az = True
if CONF.allow_availability_zone_fallback:
original_az = availability_zone
availability_zone = (
CONF.default_availability_zone or
CONF.storage_availability_zone)
LOG.warning("Availability zone '%(s_az)s' "
"not found, falling back to "
"'%(s_fallback_az)s'.",
{'s_az': original_az,
's_fallback_az': availability_zone})
else:
raise exception.InvalidAvailabilityZone(az=availability_zone)
# If the configuration only allows cloning to the same availability
# zone then we need to enforce that.
if availability_zone and CONF.cloned_volume_same_az:
snap_az = None
try:
snap_az = snapshot['volume']['availability_zone']
except (TypeError, KeyError):
pass
if snap_az and snap_az != availability_zone:
msg = _("Volume must be in the same "
"availability zone as the snapshot")
raise exception.InvalidInput(reason=msg)
source_vol_az = None
try:
source_vol_az = source_volume['availability_zone']
except (TypeError, KeyError):
pass
if source_vol_az and source_vol_az != availability_zone:
msg = _("Volume must be in the same "
"availability zone as the source volume")
raise exception.InvalidInput(reason=msg)
if availability_zone:
return [availability_zone], refresh_az
else:
return safe_azs, refresh_az
def _get_encryption_key_id(self, key_manager, context, volume_type_id,
snapshot, source_volume,
image_metadata):
encryption_key_id = None
if volume_types.is_encrypted(context, volume_type_id):
if snapshot is not None: # creating from snapshot
encryption_key_id = snapshot['encryption_key_id']
elif source_volume is not None: # cloning volume
encryption_key_id = source_volume['encryption_key_id']
elif image_metadata is not None:
# creating from image
encryption_key_id = image_metadata.get(
'cinder_encryption_key_id')
# NOTE(joel-coffman): References to the encryption key should *not*
# be copied because the key is deleted when the volume is deleted.
# Clone the existing key and associate a separate -- but
# identical -- key with each volume.
if encryption_key_id is not None:
encryption_key_id = vol_utils.clone_encryption_key(
context,
key_manager,
encryption_key_id)
else:
encryption_key_id = vol_utils.create_encryption_key(
context,
key_manager,
volume_type_id)
return encryption_key_id
def _get_volume_type(self, context, volume_type,
source_volume, snapshot, image_volume_type_id):
if volume_type:
return volume_type
identifier = collections.defaultdict(str)
try:
if source_volume:
identifier = {'source': 'volume',
'id': source_volume['volume_type_id']}
elif snapshot:
identifier = {'source': 'snapshot',
'id': snapshot['volume_type_id']}
elif image_volume_type_id:
identifier = {'source': 'image',
'id': image_volume_type_id}
elif CONF.default_volume_type:
identifier = {'source': 'default volume type config',
'id': CONF.default_volume_type}
if identifier:
return objects.VolumeType.get_by_name_or_id(
context, identifier['id'])
except (exception.VolumeTypeNotFound,
exception.VolumeTypeNotFoundByName,
exception.InvalidVolumeType):
LOG.exception("Failed to find volume type from "
"source %(source)s, identifier %(id)s", identifier)
return None
def execute(self, context, size, snapshot, image_id, source_volume,
availability_zone, volume_type, metadata, key_manager,
consistencygroup, cgsnapshot, group, group_snapshot, backup,
multiattach=False):
utils.check_exclusive_options(snapshot=snapshot,
imageRef=image_id,
source_volume=source_volume,
backup=backup)
context.authorize(policy.CREATE_POLICY)
# TODO(harlowja): what guarantee is there that the snapshot or source
# volume will remain available after we do this initial verification??
snapshot_id = self._extract_snapshot(snapshot)
source_volid = self._extract_source_volume(source_volume)
backup_id = self._extract_backup(backup)
size = self._extract_size(size, source_volume, snapshot, backup)
consistencygroup_id = self._extract_consistencygroup(consistencygroup)
cgsnapshot_id = self._extract_cgsnapshot(cgsnapshot)
group_id = self._extract_group(group)
image_meta = self._get_image_metadata(context,
image_id,
size)
image_properties = image_meta.get(
'properties', {}) if image_meta else {}
image_volume_type = image_properties.get(
'cinder_img_volume_type', None) if image_properties else None
volume_type = self._get_volume_type(
context, volume_type, source_volume, snapshot, image_volume_type)
volume_type_id = volume_type.get('id') if volume_type else None
availability_zones, refresh_az = self._extract_availability_zones(
availability_zone, snapshot, source_volume, group,
volume_type=volume_type)
encryption_key_id = self._get_encryption_key_id(
key_manager,
context,
volume_type_id,
snapshot,
source_volume,
image_meta)
if volume_type_id:
volume_type = objects.VolumeType.get_by_name_or_id(
context, volume_type_id)
extra_specs = volume_type.get('extra_specs', {})
# NOTE(tommylikehu): Although the parameter `multiattach` from
# create volume API is deprecated now, we still need to consider
# it when multiattach is not enabled in volume type.
multiattach = (extra_specs.get(
'multiattach', '') == '<is> True' or multiattach)
if multiattach and encryption_key_id:
msg = _('Multiattach cannot be used with encrypted volumes.')
raise exception.InvalidVolume(reason=msg)
if multiattach:
context.authorize(policy.MULTIATTACH_POLICY)
specs = {}
if volume_type_id:
qos_specs = volume_types.get_volume_type_qos_specs(volume_type_id)
if qos_specs['qos_specs']:
specs = qos_specs['qos_specs'].get('specs', {})
# Determine default replication status
extra_specs = volume_types.get_volume_type_extra_specs(
volume_type_id)
if not specs:
# to make sure we don't pass empty dict
specs = None
extra_specs = None
if vol_utils.is_replicated_spec(extra_specs):
replication_status = fields.ReplicationStatus.ENABLED
else:
replication_status = fields.ReplicationStatus.DISABLED
return {
'size': size,
'snapshot_id': snapshot_id,
'source_volid': source_volid,
'volume_type': volume_type,
'volume_type_id': volume_type_id,
'encryption_key_id': encryption_key_id,
'qos_specs': specs,
'consistencygroup_id': consistencygroup_id,
'cgsnapshot_id': cgsnapshot_id,
'group_id': group_id,
'replication_status': replication_status,
'refresh_az': refresh_az,
'backup_id': backup_id,
'multiattach': multiattach,
'availability_zones': availability_zones
}
class EntryCreateTask(flow_utils.CinderTask):
"""Creates an entry for the given volume creation in the database.
Reversion strategy: remove the volume_id created from the database.
"""
default_provides = set(['volume_properties', 'volume_id', 'volume'])
def __init__(self):
requires = ['description', 'metadata',
'name', 'reservations', 'size', 'snapshot_id',
'source_volid', 'volume_type_id', 'encryption_key_id',
'consistencygroup_id', 'cgsnapshot_id', 'multiattach',
'qos_specs', 'group_id', 'availability_zones']
super(EntryCreateTask, self).__init__(addons=[ACTION],
requires=requires)
def execute(self, context, optional_args, **kwargs):
"""Creates a database entry for the given inputs and returns details.
Accesses the database and creates a new entry for the to be created
volume using the given volume properties which are extracted from the
input kwargs (and associated requirements this task needs). These
requirements should be previously satisfied and validated by a
pre-cursor task.
"""
src_volid = kwargs.get('source_volid')
src_vol = None
if src_volid is not None:
src_vol = objects.Volume.get_by_id(context, src_volid)
bootable = False
if src_vol is not None:
bootable = src_vol.bootable
elif kwargs.get('snapshot_id'):
snapshot = objects.Snapshot.get_by_id(context,
kwargs.get('snapshot_id'))
volume_id = snapshot.volume_id
snp_vol = objects.Volume.get_by_id(context, volume_id)
if snp_vol is not None:
bootable = snp_vol.bootable
availability_zones = kwargs.pop('availability_zones')
volume_properties = {
'size': kwargs.pop('size'),
'user_id': context.user_id,
'project_id': context.project_id,
'status': 'creating',
'attach_status': fields.VolumeAttachStatus.DETACHED,
'encryption_key_id': kwargs.pop('encryption_key_id'),
# Rename these to the internal name.
'display_description': kwargs.pop('description'),
'display_name': kwargs.pop('name'),
'multiattach': kwargs.pop('multiattach'),
'bootable': bootable,
}
if len(availability_zones) == 1:
volume_properties['availability_zone'] = availability_zones[0]
# Merge in the other required arguments which should provide the rest
# of the volume property fields (if applicable).
volume_properties.update(kwargs)
volume = objects.Volume(context=context, **volume_properties)
volume.create()
# FIXME(dulek): We're passing this volume_properties dict through RPC
# in request_spec. This shouldn't be needed, most data is replicated
# in both volume and other places. We should make Newton read data
# from just one correct place and leave just compatibility code.
#
# Right now - let's move it to versioned objects to be able to make
# non-backward compatible changes.
volume_properties = objects.VolumeProperties(**volume_properties)
return {
'volume_id': volume['id'],
'volume_properties': volume_properties,
# NOTE(harlowja): it appears like further usage of this volume
# result actually depend on it being a sqlalchemy object and not
# just a plain dictionary so that's why we are storing this here.
#
# In the future where this task results can be serialized and
# restored automatically for continued running we will need to
# resolve the serialization & recreation of this object since raw
# sqlalchemy objects can't be serialized.
'volume': volume,
}
def revert(self, context, result, optional_args, **kwargs):
if isinstance(result, ft.Failure):
# We never produced a result and therefore can't destroy anything.
return
if optional_args['is_quota_committed']:
# If quota got committed we shouldn't rollback as the volume has
# already been created and the quota has already been absorbed.
return
volume = result['volume']
try:
volume.destroy()
except exception.CinderException:
# We are already reverting, therefore we should silence this
# exception since a second exception being active will be bad.
#
# NOTE(harlowja): Being unable to destroy a volume is pretty
# bad though!!
LOG.exception("Failed destroying volume entry %s", volume.id)
class QuotaReserveTask(flow_utils.CinderTask):
"""Reserves a single volume with the given size & the given volume type.
Reversion strategy: rollback the quota reservation.
Warning Warning: if the process that is running this reserve and commit
process fails (or is killed before the quota is rolled back or committed
it does appear like the quota will never be rolled back). This makes
software upgrades hard (inflight operations will need to be stopped or
allowed to complete before the upgrade can occur). *In the future* when
taskflow has persistence built-in this should be easier to correct via
an automated or manual process.
"""
default_provides = set(['reservations'])
def __init__(self):
super(QuotaReserveTask, self).__init__(addons=[ACTION])
def execute(self, context, size, volume_type_id, group_snapshot,
optional_args):
try:
values = {'per_volume_gigabytes': size}
QUOTAS.limit_check(context, project_id=context.project_id,
**values)
except exception.OverQuota as e:
quotas = e.kwargs['quotas']
raise exception.VolumeSizeExceedsLimit(
size=size, limit=quotas['per_volume_gigabytes'])
try:
if group_snapshot:
reserve_opts = {'volumes': 1}
else:
reserve_opts = {'volumes': 1, 'gigabytes': size}
if ('update_size' in optional_args
and optional_args['update_size']):
reserve_opts.pop('volumes', None)
QUOTAS.add_volume_type_opts(context, reserve_opts, volume_type_id)
reservations = QUOTAS.reserve(context, **reserve_opts)
return {
'reservations': reservations,
}
except exception.OverQuota as e:
quota_utils.process_reserve_over_quota(context, e,
resource='volumes',
size=size)
def revert(self, context, result, optional_args, **kwargs):
# We never produced a result and therefore can't destroy anything.
if isinstance(result, ft.Failure):
return
if optional_args['is_quota_committed']:
# The reservations have already been committed and can not be
# rolled back at this point.
return
# We actually produced an output that we can revert so lets attempt
# to use said output to rollback the reservation.
reservations = result['reservations']
try:
QUOTAS.rollback(context, reservations)
except exception.CinderException:
# We are already reverting, therefore we should silence this
# exception since a second exception being active will be bad.
LOG.exception("Failed rolling back quota for"
" %s reservations", reservations)
class QuotaCommitTask(flow_utils.CinderTask):
"""Commits the reservation.
Reversion strategy: N/A (the rollback will be handled by the task that did
the initial reservation (see: QuotaReserveTask).
Warning Warning: if the process that is running this reserve and commit
process fails (or is killed before the quota is rolled back or committed
it does appear like the quota will never be rolled back). This makes
software upgrades hard (inflight operations will need to be stopped or
allowed to complete before the upgrade can occur). *In the future* when
taskflow has persistence built-in this should be easier to correct via
an automated or manual process.
"""
def __init__(self):
super(QuotaCommitTask, self).__init__(addons=[ACTION])
def execute(self, context, reservations, volume_properties,
optional_args):
QUOTAS.commit(context, reservations)
# updating is_quota_committed attribute of optional_args dictionary
optional_args['is_quota_committed'] = True
return {'volume_properties': volume_properties}
def revert(self, context, result, **kwargs):
# We never produced a result and therefore can't destroy anything.
if isinstance(result, ft.Failure):
return
volume = result['volume_properties']
try:
reserve_opts = {'volumes': -1, 'gigabytes': -volume['size']}
QUOTAS.add_volume_type_opts(context,
reserve_opts,
volume['volume_type_id'])
reservations = QUOTAS.reserve(context,
project_id=context.project_id,
**reserve_opts)
if reservations:
QUOTAS.commit(context, reservations,
project_id=context.project_id)
except Exception:
LOG.exception("Failed to update quota for deleting "
"volume: %s", volume['id'])
class VolumeCastTask(flow_utils.CinderTask):
"""Performs a volume create cast to the scheduler or to the volume manager.
This will signal a transition of the api workflow to another child and/or
related workflow on another component.
Reversion strategy: rollback source volume status and error out newly
created volume.
"""
def __init__(self, scheduler_rpcapi, volume_rpcapi, db):
requires = ['image_id', 'scheduler_hints', 'snapshot_id',
'source_volid', 'volume_id', 'volume', 'volume_type',
'volume_properties', 'consistencygroup_id',
'cgsnapshot_id', 'group_id', 'backup_id',
'availability_zones']
super(VolumeCastTask, self).__init__(addons=[ACTION],
requires=requires)
self.volume_rpcapi = volume_rpcapi
self.scheduler_rpcapi = scheduler_rpcapi
self.db = db
def _cast_create_volume(self, context, request_spec, filter_properties):
source_volid = request_spec['source_volid']
volume = request_spec['volume']
snapshot_id = request_spec['snapshot_id']
image_id = request_spec['image_id']
cgroup_id = request_spec['consistencygroup_id']
group_id = request_spec['group_id']
backup_id = request_spec['backup_id']
if cgroup_id:
# If cgroup_id existed, we should cast volume to the scheduler
# to choose a proper pool whose backend is same as CG's backend.
cgroup = objects.ConsistencyGroup.get_by_id(context, cgroup_id)
request_spec['resource_backend'] = vol_utils.extract_host(
cgroup.resource_backend)
elif group_id:
# If group_id exists, we should cast volume to the scheduler
# to choose a proper pool whose backend is same as group's backend.
group = objects.Group.get_by_id(context, group_id)
request_spec['resource_backend'] = vol_utils.extract_host(
group.resource_backend)
elif snapshot_id and CONF.snapshot_same_host:
# NOTE(Rongze Zhu): A simple solution for bug 1008866.
#
# If snapshot_id is set and CONF.snapshot_same_host is True, make
# the call create volume directly to the volume host where the
# snapshot resides instead of passing it through the scheduler, so
# snapshot can be copied to the new volume.
# NOTE(tommylikehu): In order to check the backend's capacity
# before creating volume, we schedule this request to scheduler
# service with the desired backend information.
snapshot = objects.Snapshot.get_by_id(context, snapshot_id)
request_spec['resource_backend'] = snapshot.volume.resource_backend
elif source_volid:
source_volume_ref = objects.Volume.get_by_id(context, source_volid)
request_spec['resource_backend'] = (
source_volume_ref.resource_backend)
self.scheduler_rpcapi.create_volume(
context,
volume,
snapshot_id=snapshot_id,
image_id=image_id,
request_spec=request_spec,
filter_properties=filter_properties,
backup_id=backup_id)
def execute(self, context, **kwargs):
scheduler_hints = kwargs.pop('scheduler_hints', None)
db_vt = kwargs.pop('volume_type')
kwargs['volume_type'] = None
if db_vt:
kwargs['volume_type'] = objects.VolumeType()
objects.VolumeType()._from_db_object(context,
kwargs['volume_type'], db_vt)
request_spec = objects.RequestSpec(**kwargs)
filter_properties = {}
if scheduler_hints:
filter_properties['scheduler_hints'] = scheduler_hints
self._cast_create_volume(context, request_spec, filter_properties)
def revert(self, context, result, flow_failures, volume, **kwargs):
if isinstance(result, ft.Failure):
return
# Restore the source volume status and set the volume to error status.
common.restore_source_status(context, self.db, kwargs)
common.error_out(volume)
LOG.error("Volume %s: create failed", volume.id)
exc_info = False
if all(flow_failures[-1].exc_info):
exc_info = flow_failures[-1].exc_info
LOG.error('Unexpected build error:', exc_info=exc_info)
def get_flow(db_api, image_service_api, availability_zones, create_what,
scheduler_rpcapi=None, volume_rpcapi=None):
"""Constructs and returns the api entrypoint flow.
This flow will do the following:
1. Inject keys & values for dependent tasks.
2. Extracts and validates the input keys & values.
3. Reserves the quota (reverts quota on any failures).
4. Creates the database entry.
5. Commits the quota.
6. Casts to volume manager or scheduler for further processing.
"""
flow_name = ACTION.replace(":", "_") + "_api"
api_flow = linear_flow.Flow(flow_name)
api_flow.add(ExtractVolumeRequestTask(
image_service_api,
availability_zones,
rebind={'size': 'raw_size',
'availability_zone': 'raw_availability_zone',
'volume_type': 'raw_volume_type',
'multiattach': 'raw_multiattach'}))
api_flow.add(QuotaReserveTask(),
EntryCreateTask(),
QuotaCommitTask())
if scheduler_rpcapi and volume_rpcapi:
# This will cast it out to either the scheduler or volume manager via
# the rpc apis provided.
api_flow.add(VolumeCastTask(scheduler_rpcapi, volume_rpcapi, db_api))
# Now load (but do not run) the flow using the provided initial data.
return taskflow.engines.load(api_flow, store=create_what)
|
[
"Wayne Gong@minbgong-winvm.cisco.com"
] |
Wayne Gong@minbgong-winvm.cisco.com
|
9e38870ae3f2a1e81468c8ad0816a0ddb58d274b
|
93cef55e254d64a23e28a4228e743a35188786c8
|
/paciente/migrations/0004_preguntarespuesta_pregunta_historia.py
|
eea45754325c19dc7f97e8b160105213f47cd5de
|
[] |
no_license
|
USBeHealthProject/eHealth
|
25f5cf03c6e3677c0c43b4a3e1448a18055b7995
|
c995150403a96ef221b8ba77beb9950eb00776e2
|
refs/heads/master
| 2021-01-13T16:28:50.642932
| 2017-03-31T15:17:02
| 2017-03-31T15:17:02
| 79,489,372
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 435
|
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('paciente', '0003_auto_20170226_2048'),
]
operations = [
migrations.AddField(
model_name='preguntarespuesta',
name='pregunta_historia',
field=models.CharField(max_length=200, null=True),
),
]
|
[
"marisela.delvalle93@gmail.com"
] |
marisela.delvalle93@gmail.com
|
d25d228bda99746095522c9c4e52fb45ffbbdb12
|
f21adfcc4e7e0863a015f878e2ff3ed9d4119eb3
|
/part_a/Franke.py
|
a2c7fea284fd0f0f45ff824de38baf381e6c0f74
|
[] |
no_license
|
arnlaugs/Fys-Stk-Project-1
|
8c7dbaa4ad0818cee0ec8f75dd9fe1ae070f179b
|
9685239fe752434c56e624e3997830670b1805e9
|
refs/heads/master
| 2020-03-28T14:08:57.067791
| 2018-10-08T17:47:53
| 2018-10-08T17:47:53
| 148,460,960
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 973
|
py
|
# Importing functions from folder with common functions for project 1
import sys
sys.path.append('../functions')
from functions import *
from regression import OLS
import numpy as np
N = 1000
x = np.sort(np.random.uniform(0, 1, N))
y = np.sort(np.random.uniform(0, 1, N))
x_mesh_, y_mesh_ = np.meshgrid(x,y)
z = FrankeFunction(x_mesh_, y_mesh_)
# Add noise
z_noise = z + np.random.normal(scale = 1, size = (N,N))
# Plot Franke
fig, ax, surf = plot_surface(x_mesh_, y_mesh_, z, figsize = (2.64429, 1.98322))
fig.savefig("Franke.png")
# Create best-fit matrix for plotting
x_r = np.linspace(0,1,N)
y_r = np.linspace(0,1,N)
x_mesh, y_mesh = np.meshgrid(x,y)
model = OLS()
# Plot for higher order as well
for m in [2, 5, 10]:
X = create_X(x_mesh_, y_mesh_, n=m)
X_r = create_X(x_mesh, y_mesh, n=m)
model.fit(X, z_noise)
fig, ax, surf = plot_surface(x_mesh, y_mesh, (model.predict(X_r)).reshape((N,N)), figsize = (2.64429, 1.98322))
fig.savefig("FrankeOrder%i.png" %m)
|
[
"markus@asprusten.com"
] |
markus@asprusten.com
|
3884a03b732ddce02f8fc79170ebb93b2dac2c48
|
209a7a4023a9a79693ec1f6e8045646496d1ea71
|
/COMP0016_2020_21_Team12-datasetsExperimentsAna/pwa/FADapp/pythonScripts/venv/Lib/site-packages/pandas/tests/test_errors.py
|
9e887c00103f2f58623c9fbf4362f2812a1b757a
|
[
"MIT"
] |
permissive
|
anzhao920/MicrosoftProject15_Invictus
|
5e2347015411bbffbdf0ceb059df854661fb240c
|
15f44eebb09561acbbe7b6730dfadf141e4c166d
|
refs/heads/main
| 2023-04-16T13:24:39.332492
| 2021-04-27T00:47:13
| 2021-04-27T00:47:13
| 361,913,170
| 0
| 0
|
MIT
| 2021-04-26T22:41:56
| 2021-04-26T22:41:55
| null |
UTF-8
|
Python
| false
| false
| 1,740
|
py
|
import pytest
from pandas.errors import AbstractMethodError
import pandas as pd
@pytest.mark.parametrize(
"exc",
[
"UnsupportedFunctionCall",
"UnsortedIndexError",
"OutOfBoundsDatetime",
"ParserError",
"PerformanceWarning",
"DtypeWarning",
"EmptyDataError",
"ParserWarning",
"MergeError",
"OptionError",
"NumbaUtilError",
],
)
def test_exception_importable(exc):
from pandas import errors
err = getattr(errors, exc)
assert err is not None
# check that we can raise on them
msg = "^$"
with pytest.raises(err, match=msg):
raise err()
def test_catch_oob():
from pandas import errors
msg = "Out of bounds nanosecond timestamp: 1500-01-01 00:00:00"
with pytest.raises(errors.OutOfBoundsDatetime, match=msg):
pd.Timestamp("15000101")
class Foo:
@classmethod
def classmethod(cls):
raise AbstractMethodError(cls, methodtype="classmethod")
@property
def property(self):
raise AbstractMethodError(self, methodtype="property")
def method(self):
raise AbstractMethodError(self)
def test_AbstractMethodError_classmethod():
xpr = "This classmethod must be defined in the concrete class Foo"
with pytest.raises(AbstractMethodError, match=xpr):
Foo.classmethod()
xpr = "This property must be defined in the concrete class Foo"
with pytest.raises(AbstractMethodError, match=xpr):
Foo().property
xpr = "This method must be defined in the concrete class Foo"
with pytest.raises(AbstractMethodError, match=xpr):
Foo().method()
|
[
"ana.kapros@yahoo.ro"
] |
ana.kapros@yahoo.ro
|
7899cad4d30def6631efa09ea7835c4e2598f2f1
|
3705360ad17f0c6821cf1c683982c50413e23633
|
/audioread-2.1.8/setup.py
|
bd90605d1910c452598043d29addb8e6bd2c5f3a
|
[
"MIT"
] |
permissive
|
sleekEagle/audio_CNN
|
17725d7cc6da315c4b7a3a55d697b4451dbeebcd
|
33e63fa37c04c9b85873f09449cf7a8ffc31e330
|
refs/heads/master
| 2020-06-06T01:03:46.349390
| 2019-07-02T16:22:07
| 2019-07-02T16:22:07
| 192,596,089
| 5
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,941
|
py
|
# This file is part of audioread.
# Copyright 2013, Adrian Sampson.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
import os
from setuptools import setup
import imp
version = imp.load_source('audioread.version', 'audioread/version.py')
def _read(fn):
path = os.path.join(os.path.dirname(__file__), fn)
return open(path).read()
setup(name='audioread',
version=version.version,
description='multi-library, cross-platform audio decoding',
author='Adrian Sampson',
author_email='adrian@radbox.org',
url='https://github.com/sampsyo/audioread',
license='MIT',
platforms='ALL',
long_description=_read('README.rst'),
packages=['audioread'],
setup_requires=[
'pytest-runner'
],
tests_require=[
'pytest'
],
classifiers=[
'Topic :: Multimedia :: Sound/Audio :: Conversion',
'Intended Audience :: Developers',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
)
|
[
"lahirunuwanisme@gmail.com"
] |
lahirunuwanisme@gmail.com
|
18860f487bd6c5951eccbcd21cb4cfef1e90fc37
|
c8554d0aaa45cb087e5c36334477f10047d57394
|
/gradio/demo.py
|
2ad81ef24cdb3e645331aacae729fd20cec78082
|
[
"Apache-2.0"
] |
permissive
|
eyaler/U-2-Net
|
bd6ef847ee96cc1a2f375db6cc732ea6caa6aa32
|
2f54e75e1f05f5ca25e75480f275dc1eeadafdc4
|
refs/heads/master
| 2023-08-12T04:09:15.122278
| 2021-10-03T21:55:23
| 2021-10-03T21:55:23
| 328,661,996
| 1
| 0
|
Apache-2.0
| 2021-10-03T21:55:23
| 2021-01-11T12:47:51
|
Python
|
UTF-8
|
Python
| false
| false
| 1,360
|
py
|
import cv2
import paddlehub as hub
import gradio as gr
import torch
# Images
torch.hub.download_url_to_file('https://cdn.pixabay.com/photo/2018/08/12/16/59/ara-3601194_1280.jpg', 'parrot.jpg')
torch.hub.download_url_to_file('https://cdn.pixabay.com/photo/2016/10/21/14/46/fox-1758183_1280.jpg', 'fox.jpg')
model = hub.Module(name='U2Net')
def infer(img):
result = model.Segmentation(
images=[cv2.imread(img.name)],
paths=None,
batch_size=1,
input_size=320,
output_dir='output',
visualization=True)
return result[0]['front'][:,:,::-1], result[0]['mask']
inputs = gr.inputs.Image(type='file', label="Original Image")
outputs = [
gr.outputs.Image(type="numpy",label="Front"),
gr.outputs.Image(type="numpy",label="Mask")
]
title = "U^2-Net"
description = "demo for U^2-Net. To use it, simply upload your image, or click one of the examples to load them. Read more at the links below."
article = "<p style='text-align: center'><a href='https://arxiv.org/abs/2005.09007'>U^2-Net: Going Deeper with Nested U-Structure for Salient Object Detection</a> | <a href='https://github.com/xuebinqin/U-2-Net'>Github Repo</a></p>"
examples = [
['fox.jpg'],
['parrot.jpg']
]
gr.Interface(infer, inputs, outputs, title=title, description=description, article=article, examples=examples).launch()
|
[
"ahsengradio@gmail.com"
] |
ahsengradio@gmail.com
|
c86e8c832fe8478204873265025171e0b55c9257
|
6f75a44565c7ddaffa37d4c0dc88d643d596043d
|
/fundamental_view/preprocesser.py
|
46816daf1720ef85a9ed1ac6023e8c7c45e758b4
|
[] |
no_license
|
SangHyung-Jung/stock_prediction_boaz
|
2ee5c064506cc2c19f9b2305b0688734c0a54774
|
d2a3b34dbcc7991c7e1f48fb39cec64bf9ab973f
|
refs/heads/master
| 2022-12-22T20:21:56.899231
| 2020-10-06T06:53:24
| 2020-10-06T06:53:24
| 288,638,022
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,452
|
py
|
import pandas as pd
import numpy as np
from datetime import datetime
class Preprocesser:
def __init__(self):
pass
##0815 함수 이름 수정함.
# 재무 데이터 컬럼 이름 설정
def set_fn_data_col_names(self, df):
col_list = ['Date', 'asset(1000)', 'debt(1000)', 'sales(1000)',
'adjusted_price', 'profit_rate', 'volumne(mean_5d)(week)',
'volumne(week)','profit(1000)',
'term_profit(1000)', 'gross_margin(1000)','adjusted_BPS', 'adjusted_EPS']
df.columns = col_list
return df
def add_feature_name(self, feature_name_list, file_name):
dotpos = file_name.find('.')
file_name = file_name[:dotpos]
new_col = ['Date']
for i in range(feature_name_list)-1:
new_name = file_name+feature_name_list[i+1]
new_col += new_name
return new_col
###0815 함수내부에서 RESET_INDEX까지 시키도록 수정함.
# 인덱스 설정하고 원하는 연도부터 프레임 설정(재무)
def set_funda_data(self, data, start_date):
"""
set_funda_data(df,'2010-01-05')
"""
data = data.iloc[5:]
data.Date = pd.to_datetime(data.Date)
data.set_index(['Date'],inplace=True)
data = data.loc[start_date:]
data = data.reset_index()
return data
###0815 함수내부에서 RESET_INDEX까지 시키도록 수정함.
# 인덱스 설정하고 원하는 연도부터 프레임 설정(피쳐)
def set_feature_data(self, data, start_date):
data.Date = pd.to_datetime(data.Date)
data.set_index(['Date'],inplace=True)
data = data.loc[start_date:]
data = data.reset_index()
return data
##0815 정만오빠 파일이랑 살짝 다름. 두 함수를 합침
def replace_date_form(self,data):
data['Date'] = data['Date'].str.replace('년 ','-')
data['Date'] = data['Date'].str.replace('월 ','-')
data['Date'] = data['Date'].str.replace('일','')
for i in data.columns[1:]:
#print(i)
data[i]
col_name = '{}'.format(i)
data[i] = data[i].astype('str')
data[i] = data[i].str.replace('K','')
data[i] = data[i].str.replace('%','')
data.replace('-', np.nan, inplace=True)
data.sort_values(by='Date', ascending=True, inplace=True)
return data
#아래는 쓰이지 않은 것들.
"""
#화장품 fundamental data 내 존재하는 monthly data를 날짜별로 만들기 ( 현재 datetime으로 되어 있고, 모든 날짜에 값 부여 필요 )
def mkdatetime_mth(self, data):
data2 = data[data['년월']['년월'].str.contains("월")]
index = data2.index
for i, mth in enumerate(data2['년월']['년월']):
m = int(mth[1:-1])
idx = index[i]
idx = idx - m
y = int(data['년월']['년월'][idx][:-1])
print(y, m)
data2.iat[i, 0] = datetime(y, m, 1)
return data2
# #데이터 불러와서 datetime으로 만들고, 각 컬럼 모두 합치기, 수연이한테 각 컬럼명이 무엇을 뜻하는 지 물어볼 것
# def read_mth_data(self, data, domain):
# domain = "화장품"
# os.chdir(r"G:\공유 드라이브\Boad ADV Stock\{}\fundamental".format(domain))
# data = pd.read_excel("330112_오렌지유.xls", skiprows = [0, 1], header=[0,1])
# pd.read_excel("330113_레몬유.xls", skiprows = [0, 1], header=[0,1])
# 특정 연도 사이 휴일 리스트
def check_holiday(self, start_year, last_year):
"""
# check_hodiday(2010,2020)
"""
total_holiday = list()
for year in range(start_year, last_year):
hlist = list()
krholiday = [d.strftime('%Y-%m-%d') for d in pytimekr.holidays(year)]
week_sat = [d.strftime('%Y-%m-%d') for d in pd.date_range('{}-01-01'.format(year),'{}-12-31'.format(year),freq='W-SAT')]
week_sun = [d.strftime('%Y-%m-%d') for d in pd.date_range('{}-01-01'.format(year),'{}-12-31'.format(year),freq='W-SUN')]
hlist.extend(krholiday)
hlist.extend(week_sat)
hlist.extend(week_sun)
total_holiday.extend(pd.unique(hlist))
sorted(total_holiday)
return total_holiday
# 특정 연도 사이 휴일 x 리스트
def check_workday(self, start_year, last_year):
"""
# check_workday(2010,2020)
"""
workday = list()
for d in pd.date_range('{}-01-01'.format(start_year),'{}-12-31'.format(last_year)):
if d.strftime('%Y-%m-%d') not in check_holiday(start_year, last_year):
workday.append(d.strftime('%Y-%m-%d'))
return workday
# 데이터 프레임에서 휴일 아닌 날만 뽑기
def set_index_workday(self, df,start_year,last_year):
work = pd.DataFrame(check_workday(start_year, last_year),columns=['Date'])
work.Date = pd.to_datetime(work.Date)
df.reset_index(inplace=True)
work_df = pd.merge(df,work, on ='Date',how='inner')
work_df.set_index(['Date'],inplace=True)
# work_df = pd.concat([df,work], join_axes=['Date'],join='inner',axis=0)
return work_df
# 회시별 재무 데이터 테이블
def individual_feauture(self, stockname):
df = load_funda_data(stockname)
df = set_col_names(df)
df = set_funda_data(df)
return df
# 산업 공통 피쳐
def common_feature(self,featurename):
df = load_funda_data(featurename)
df = feature_col_name(df)
df = set_feature_data(df)
return df
# 산업 공통 피쳐 테이블
def common_feature_table(self):
os.chdir(r'G:\공유 드라이브\Boad ADV Stock\2차전지\funda_data')
filelist = os.listdir(os.getcwd())
# filelist
feature_df = pd.DataFrame()
for feature in filelist:
add_df = common_feature(feature)
feature_df = pd.merge(feature_df, add_df, on='Date', how ='left')
return
"""
|
[
"cukirhkwk@hanyang.ac.kr"
] |
cukirhkwk@hanyang.ac.kr
|
e34582f98f0645eb530e767a8736ad3fdbaec08f
|
e37894e5502f18ffcc688e88e30eb6f54afcabb7
|
/django-tutorial/test/ModelTest.py
|
c11d2e43ed636c4f3bc983918c3827baeba2f7ed
|
[] |
no_license
|
dangq/test_django
|
2764e87b9509b02564aa94c52c713594d74cd379
|
30f39fb310a3e228778cfa070b82495968078c82
|
refs/heads/master
| 2021-01-17T18:22:44.437647
| 2016-09-27T01:19:31
| 2016-09-27T01:19:31
| 59,747,566
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 618
|
py
|
from src.model.CandidateMovementModel import CandidateMovementModel
from src.predict.CandidateMovementPrediction import CandidateMovementPrediction
import src.const.TemplateData as templData
# Create Model
loc_training_data = '../data/trainingdata/dataset_07_Apr.csv'
can = CandidateMovementModel(loc_training_data)
model = can.createCandidateMovementModel()
#print model
#print type(model)
# test
test_data = "../data/test/sample_test_2.csv"
file_save= "../data/result_prediction.csv"
predict = CandidateMovementPrediction(model,test_data,file_save)
data = predict.calProbMovementPrediction()
#print type(data)
|
[
"an.dang@jvn.edu.vn"
] |
an.dang@jvn.edu.vn
|
ad33576265fdfba0ce89d23f8f9daa5399a47c72
|
79710c763de75f2b8aaaebb6e9e69fa01edfa451
|
/varaston_hallinta/asgi.py
|
76af613bc6ee0c9fc31fa89e2ad171e0dc0bc210
|
[] |
no_license
|
mika1407/varasto
|
0617b9bf1f5c3838dc8da198ea012831b1cba1f3
|
ad7f3158e10e79523c073f8062e487fb57e49c6b
|
refs/heads/main
| 2023-04-12T20:21:27.399207
| 2021-05-04T13:53:03
| 2021-05-04T13:53:03
| 363,675,570
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 411
|
py
|
"""
ASGI config for varaston_hallinta project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'varaston_hallinta.settings')
application = get_asgi_application()
|
[
"koneautomaatio@hotmail.com"
] |
koneautomaatio@hotmail.com
|
f2398be14fed7aa56b8340c3607d6ee35234bdd7
|
77bfeb1655a39a794dae79aea73aecaef2eaf8bb
|
/console.py
|
72705250870dec1a2ac35d5e97accbe6dfe63f2f
|
[] |
no_license
|
AJThomas145/week4_DB_music_lab
|
098f2b147091ab28c013091b2ddfe8100ad43860
|
981bcb08c145a4388541e97941b41007a23e0c25
|
refs/heads/main
| 2023-08-30T18:02:15.945826
| 2021-09-29T08:59:07
| 2021-09-29T08:59:07
| 411,318,011
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 766
|
py
|
import pdb
from models.artist import Artist
from models.album import Album
import repositories.artist_repository as artist_repository
import repositories.album_repository as album_repository
album_repository.delete_all()
artist_repository.delete_all()
artist_1 = Artist("Meatloaf")
artist_repository.save(artist_1)
artist_2 = Artist("Madonna")
artist_repository.save(artist_2)
album_1 = Album("Bat Out of Hell", "rock", artist_1)
album_repository.save(album_1)
album_2 = Album("Dead Ringer", "rock", artist_1)
album_repository.save(album_2)
album_3 = Album("True Blue", "pop", artist_2)
album_repository.save(album_3)
artist_1.name = "Queen"
artist_repository.update(artist_1)
album_1.title = "Greatest Hits"
album_repository.update(album_1)
pdb.set_trace()
|
[
"ajthomas145@gmail.com"
] |
ajthomas145@gmail.com
|
9792053cdb68495e95c61ba2c6b1bbe745ac5578
|
acdc2595746bc79787fae9a53320963355602498
|
/minimize_options/matyas_function_options.py
|
7c0218fbecb4cfc43f1203246f4c19697625fb93
|
[] |
no_license
|
pansershrek/Diplom
|
05f7d6b8d47dd5cc3aa1ff132b77dba7f26dc964
|
d21cddd72ca2b405c7c85e9efcc21bfbaea4ec50
|
refs/heads/master
| 2023-07-22T15:05:35.318511
| 2021-06-11T01:29:50
| 2021-06-11T01:29:50
| 299,112,693
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,432
|
py
|
import tensorflow as tf
from utils.utils import convert_variables
"""
Minimize options
"""
matyas_function_options = [
{
"x": convert_variables([10, 10]),
"opt": tf.keras.optimizers.Adam(learning_rate=0.7),
"eps": 0.0001,
"max_steps": 100,
},
{
"x": convert_variables([10, 10]),
"opt": tf.keras.optimizers.Adam(learning_rate=0.5),
"eps": 0.0001,
"max_steps": 100,
},
{
"x": convert_variables([10, 10]),
"opt": tf.keras.optimizers.Adam(learning_rate=0.3),
"eps": 0.0001,
"max_steps": 100,
},
{
"x": convert_variables([10, 10]),
"opt": tf.keras.optimizers.Adam(learning_rate=0.1),
"eps": 0.0001,
"max_steps": 100,
},
{
"x": convert_variables([-3, -3]),
"opt": tf.keras.optimizers.SGD(learning_rate=0.7),
"eps": 0.0001,
"max_steps": 100,
},
{
"x": convert_variables([-3, -3]),
"opt": tf.keras.optimizers.SGD(learning_rate=0.5),
"eps": 0.0001,
"max_steps": 100,
},
{
"x": convert_variables([-3, -3]),
"opt": tf.keras.optimizers.SGD(learning_rate=0.3),
"eps": 0.0001,
"max_steps": 100,
},
{
"x": convert_variables([-3, -3]),
"opt": tf.keras.optimizers.SGD(learning_rate=0.1),
"eps": 0.0001,
"max_steps": 100,
},
]
|
[
"g.skiba@corp.mail.ru"
] |
g.skiba@corp.mail.ru
|
482b4d53a3c7ea3bafc52b262c2e05d8fb2d3acf
|
21d0e2660780a96fdd2580a73e814119200eb139
|
/get_wenku_img/wenku.py
|
05d131534ff0294d98952986700cd42f878d638e
|
[] |
no_license
|
kifast/spider
|
0aa7d3a6abfb5785902b3b9ffe0817482a45b3cf
|
195e2444c335567e8bfc220964d05f746c2b17b5
|
refs/heads/master
| 2023-01-21T08:45:59.263208
| 2020-12-03T11:42:05
| 2020-12-03T11:42:05
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,236
|
py
|
# 多线程爬取 轻小说文库的插图
# -*- coding:utf-8 -*-
import requests
import re
import os
import io
import sys
import threading
from lxml import etree
from queue import Queue
headers = {
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:81.0) Gecko/20100101 Firefox/81.0',
}
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf8')
def get_input():
return input("输入插图链接:\n>>>")
class Wenku(threading.Thread):
def __init__(self, name, queue, path_name):
threading.Thread.__init__(self)
self.name = name
self.queue = queue
self.path_name = path_name
def run(self):
print(self.name + " start……")
while not self.queue.empty():
value = self.queue.get()
save_img(value, path_name=self.path_name)
print(self.name + ": " + value + " download")
print(self.name + " end……")
def parse_page(url: str):
response = requests.get(url, headers=headers)
response.encoding = response.apparent_encoding
html = etree.HTML(response.text)
title = html.xpath("//div[@id='title']/text()")[0]
image_links = html.xpath('//div[@id="content"]//a/@href')
return image_links, title
def save_img(link, path_name):
re_obj = re.match(r'.*?/pictures/\d+/\d+/\d+/(.*?jpg)', link)
name = re_obj.group(1)
r = requests.get(link, headers=headers)
base_path = r'D://book//images'
# 有中文名的话会出现乱码,所以需要重新解码编码
file_path = os.path.join(base_path, path_name)
file_path = base_path + '//' + path_name
if not os.path.exists(file_path):
os.mkdir(file_path)
file_path = os.path.join(file_path, name)
with open(file_path, "wb+") as f:
f.write(r.content)
def main():
queue = Queue()
url = "https://www.wenku8.net/novel/1/1009/38898.htm"
urls, title = parse_page(url)
for i in urls:
queue.put(i)
threads = []
thread_list = ['thread-1', 'thread-2', 'thread-3']
for i in thread_list:
thread = Wenku(i, queue, title)
thread.start()
threads.append(thread)
for t in threads:
t.join()
if __name__ == '__main__':
main()
|
[
"jinl1874@foxmail.com"
] |
jinl1874@foxmail.com
|
55bdb4c228a78bde95ee20f0ef7443da88fed7d1
|
7685231de16d819e51fd5976e135b98e56f1d729
|
/SessionState.py
|
037f6d7f4dc5b63d1d21c5b83b8f65899f764a88
|
[] |
no_license
|
manika-lamba/Topic-Modelling-Open-Source-Tool
|
f8a8be4f2d00441b43ccdb8f0f4e3c6b8c300a8b
|
47f443e7979b18e66507422bb5bcaba56dcd96c3
|
refs/heads/master
| 2023-01-21T00:45:20.825226
| 2020-11-10T16:59:42
| 2020-11-10T16:59:42
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,429
|
py
|
import streamlit.ReportThread as ReportThread
from streamlit.server.Server import Server
class SessionState(object):
def __init__(self, **kwargs):
for key, val in kwargs.items():
setattr(self, key, val)
def get(**kwargs):
# Hack to get the session object from Streamlit.
ctx = ReportThread.get_report_ctx()
this_session = None
current_server = Server.get_current()
if hasattr(current_server, '_session_infos'):
# Streamlit < 0.56
session_infos = Server.get_current()._session_infos.values()
else:
session_infos = Server.get_current()._session_info_by_id.values()
for session_info in session_infos:
s = session_info.session
if (
# Streamlit < 0.54.0
(hasattr(s, '_main_dg') and s._main_dg == ctx.main_dg)
or
# Streamlit >= 0.54.0
(not hasattr(s, '_main_dg') and s.enqueue == ctx.enqueue)
):
this_session = s
if this_session is None:
raise RuntimeError(
"Oh noes. Couldn't get your Streamlit Session object"
'Are you doing something fancy with threads?')
# Got the session object! Now let's attach some state into it.
if not hasattr(this_session, '_custom_session_state'):
this_session._custom_session_state = SessionState(**kwargs)
return this_session._custom_session_state
|
[
"bamigbadeopeyemi@gmail.com"
] |
bamigbadeopeyemi@gmail.com
|
95e2a54236f8e4004b2184729defd8d7cbed2b96
|
0741d7325a656244205f1ad1037a0ecf8e40917c
|
/xpyvideos/youpy.py
|
bc7c425fee042b970650f94de78f19922e08d318
|
[] |
no_license
|
prodigeni/xpyvideos
|
8d9ec85c12a43f609c90a59351c4623d68b2fa71
|
682335eb85a385273bf64f26ee47aa16fcedd220
|
refs/heads/master
| 2020-12-25T04:38:25.374958
| 2014-07-05T00:34:07
| 2014-07-05T00:34:07
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,831
|
py
|
#!/usr/bin/env python2
# Imports
from __future__ import print_function
import subprocess
import re
from sys import exit
from os import remove
from os.path import abspath
from os.path import join as path_join
from .packages.pytube.api import YouTube
from .packages.pytube.utils import print_status
from .packages.pytube.exceptions import MultipleObjectsReturned, YouTubeError
# Author and licensing
__Author__ = "bmcg"
__Email__ = "bmcg0890@gmail.com"
__License__ = """
Copyright (C) 2013-2015 bmcg <bmcg0890@gmail.com>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
def menu(lst):
"""
Prints out available video types
in a menu form
lst : list of strings to be printed out as a menu
"""
# Assign an empty list for mapping user input to list item
item_count = 1
# Print out items in original list to screen
for i in xrange(len(lst)):
print('{0:>2}) {1:<30}'.format(item_count, lst[i]))
# After each item in the list is printed, append it with the item_count number to new_lst
lst[i] = lst[i].replace(lst[i], ''.join((str(item_count), lst[i])))
item_count += 1
# Loop until the user's input corresponds to an item in new_lst
while True:
try:
user_in = raw_input('\n> ')
# Loop the items in new_lst and check the beginning for matching user input to item in new_lst
for i in xrange(len(lst)):
if user_in in lst[i][:len(user_in)]:
return lst[int(user_in) - 1][len(user_in):]
# If the user's input doesn't correspond to an item in new_lst print an invalid selection message
else:
print('\nInvalid selection. Try again.\n')
# Break and quit program if keyboard interrupt signal is given
except KeyboardInterrupt:
print('\n\nInterrrupt signal given.\nQuitting...\n\n')
exit(1)
def download_youtube(args, url, f_name):
"""
Downloads youtube video
args : parsed command line arguments
url : url of video grabbed from main() for loop
"""
# Call the YoutTube Function
youtube = YouTube()
# Set the video url
try:
youtube.url = url
# Catch invalid YouTube URLs
except YouTubeError:
print("\n\nError: Failed on ('{}').\nCheck for valid YouTube URL.\n\n".format(url))
exit(2)
# Create menu of video format/resolution options
video_option_selection = menu(map(str, youtube.videos))
# Extract video types into a list of a single string
video_format = re.findall(r'\(.(\w{3,4})\)', video_option_selection)[0]
video_resolution = re.findall(r'-\s*(\w{3,4})', video_option_selection)[0]
# Set filename if -f/--filename option is given
if f_name is not None:
youtube.filename = f_name
# Set the video format
try:
set_video_format_res = youtube.get(video_format, video_resolution)
# Catch multiple videos returned error
except MultipleObjectsReturned:
print('\n\nError: More than one video returned.\n\n')
exit(1)
# Download video
set_video_format_res.download(args['dir'],on_progress=print_status, on_finish=video_to_mp3 if args['c'] else None)
# Delete original video file if -do/--delete and -c/--convert is given
if args['c'] and args['do']:
# Call remove_original
remove_original(youtube.filename, args['dir'], video_format)
def check_for_mp3_codec(filename):
"""
Uses a subprocess call to ffprobe
to get audio codec output
on flv videos.
filename : filename of video to check codecs
"""
# Set subprocess to be called
ffprobe_cmd = 'ffprobe -show_streams'.split()
# Append filename to subprocess cmd
ffprobe_cmd.append(filename)
# Call and return the output of the subprocess
return subprocess.check_output(ffprobe_cmd)
def video_to_mp3(filename):
"""
Uses a subprocess call to FFMPEG
for converting the video file to mp3 format
filename : filename of video to be converted
"""
# Split beginning of ffmpeg_cmd for subprocess calling and append filename
ffmpeg_cmd = 'ffmpeg -i'.split()
ffmpeg_cmd.append(filename)
# Checking for mp4 format
if 'mp4' in filename:
# Replace original extension with mp3
ffmpeg_cmd.append(filename.replace('.mp4', '.mp3'))
# Check for flv format
elif 'flv' in filename:
# Check if flv video already has an mp3 audio codec
try:
# Use a regular expression to search output of ffprobe for mp3 codec
if 'mp3' in re.findall(r'(codec_name=mp3)', check_for_mp3_codec(filename))[0]:
# Extend ffmpeg list with conversion instructions
ffmpeg_cmd.extend('-acodec copy -ac 2 -ab 128k -vn -n'.split())
# Append output filename with .mp3 extension
ffmpeg_cmd.append(filename.replace('.flv', '.mp3'))
# If mp3 isn't found in ffprobe's output use regular conversion options
except IndexError:
ffmpeg_cmd.append(filename.replace('.flv', '.mp3'))
# Check for 3gp format
elif '3gp' in filename:
ffmpeg_cmd.append(filename.replace('.3gp', '.mp3'))
else:
# Otherwise should be .webm format
ffmpeg_cmd.append(filename.replace('.webm', '.mp3'))
# Call ffmpeg subprocess
try:
subprocess.call(ffmpeg_cmd)
# Catch non-existant file and if ffmpeg is not installed
except OSError:
print('\n\nError: Check whether video file exists or that ffmpeg is installed.\n\n')
exit(1)
def remove_original(filename, arg_dir, form):
"""
Removes original video file if both -c/--convert
and -do/--delete options are given.
filename : filename of video to be deleted
arg_dir : directory containing video file
form : format extension of video file
"""
# Join the directory and filename with extension
pathname = abspath(path_join(arg_dir, '{0}.{1}'.format(filename, form)))
# Remove original video
remove(pathname)
|
[
"darthoring@gmail.com"
] |
darthoring@gmail.com
|
5609f638481b61ed4a5b8c081b48fdbea9637e7c
|
1316ec3dadf5614eec01391352a01690f86fceba
|
/mha.py
|
f58b3f1912160753be934a3671236ca384840fcd
|
[] |
no_license
|
ljarabek/medslike
|
76bb21a5bd79230ee711f55befbc0e4d72524db5
|
e90afdf52c233b94832db559ef30b9dd4587fb7a
|
refs/heads/master
| 2021-01-24T09:46:15.584554
| 2018-04-30T16:53:34
| 2018-04-30T16:53:34
| 123,027,274
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,166
|
py
|
"""
This class reads and writes mha files (images or vector fields)
Author: Paolo Zaffino (p.zaffino@unicz.it)
Rev 19
NOT TESTED ON PYTHON 3
"""
import numpy as np
class new():
"""
PUBLIC PARAMETERS:
data=3D/4D matrix
size=3D/4D matrix size
spacing=voxel size
offset=spatial offset of data data
data_type='short', 'float' or 'uchar'
direction_cosines=direction cosines of the raw image/vf
CONSTRUCTOR OVERLOADING:
img=mha.new() # All the public parameters will be set to None
img=mha.new(input_file='img.mha')
img=mha.new(data=matrix, size=[512, 512, 80], spacing=[0.9, 0.9, 5], offset=[-240, -240, -160], data_type='short', direction_cosines=[1, 0, 0, 0, 1, 0, 0, 0, 1])
PUBLIC METHODS:
img.read_mha('file_name.mha')
img.write_mha('file_name.mha')
"""
data=None
size=None
spacing=None
offset=None
data_type=None
direction_cosines=None
######################## CONSTRUCTOR - START - #########################
def __init__ (self, input_file=None, data=None, size=None, spacing=None, offset=None, data_type=None, direction_cosines=None):
if input_file!=None and data==None and size==None and spacing==None and offset==None and data_type==None and direction_cosines==None:
self.read_mha(input_file)
elif input_file==None and data!=None and size!=None and spacing!=None and offset!=None and data_type!=None and direction_cosines!=None:
self.data=data
self.size=size
self.spacing=spacing
self.offset=offset
self.data_type=data_type
self.direction_cosines=direction_cosines
elif input_file==None and data==None and size==None and spacing==None and offset==None and data_type==None and direction_cosines==None:
pass
######################## CONSTRUCTOR - END - ###########################
######################## READ_MHA - START - ############################
def read_mha(self, fn):
"""
This method reads a mha file and assigns the data to the object parameters
INPUT PARAMETER:
fn=file name
"""
if fn.endswith('.mha'): ## Check if the file extension is ".mha"
f = open(fn,'rb')
data='img' ## On default the matrix is considered to be an image
## Read mha header
for r in range(20):
row=f.readline()
if row.startswith('TransformMatrix ='):
row=row.split('=')[1].strip()
self.direction_cosines=self._cast2int(map(float, row.split()))
elif row.startswith('Offset ='):
row=row.split('=')[1].strip()
self.offset=self._cast2int(map(float, row.split()))
elif row.startswith('ElementSpacing ='):
row=row.split('=')[1].strip()
self.spacing=self._cast2int(map(float, row.split()))
elif row.startswith('DimSize ='):
row=row.split('=')[1].strip()
self.size=map(int, row.split())
elif row.startswith('ElementNumberOfChannels = 3'):
data='vf' ## The matrix is a vf
self.size.append(3)
elif row.startswith('ElementType ='):
data_type=row.split('=')[1].strip()
elif row.startswith('ElementDataFile ='):
break
## Read raw data
self.data=''.join(f.readlines())
f.close()
## Raw data from string to array
if data_type == 'MET_SHORT':
self.data=np.fromstring(self.data, dtype=np.int16)
self.data_type = 'short'
elif data_type == 'MET_FLOAT':
self.data=np.fromstring(self.data, dtype=np.float32)
self.data_type = 'float'
elif data_type == 'MET_UCHAR':
self.data=np.fromstring(self.data, dtype=np.uint8)
self.data_type = 'uchar'
## Reshape array
if data == 'img':
self.data=self.data.reshape(self.size[2],self.size[1],self.size[0]).T
elif data == 'vf':
self.data=self.data.reshape(self.size[2],self.size[1],self.size[0],3)
self.data=self._shiftdim(self.data, 3).T
elif not fn.endswith('.mha'): ## Extension file is not ".mha". It returns all null values
raise NameError('The input file is not a mha file!')
######################### READ_MHA - END - #############################
######################## WRITE_MHA - START - ###########################
def write_mha (self,fn):
"""
This method writes the object parameters in a mha file
INPUT PARAMETER:
fn=file name
"""
if fn.endswith('.mha'): ## Check if the file extension is ".mha"
## Order the matrix in the proper way
self.data = np.array(self.data, order = "F")
## Check if the input matrix is an image or a vf
if self.data.ndim == 3:
data='img'
elif self.data.ndim == 4:
data='vf'
f=open(fn, 'wb')
## Write mha header
f.write('ObjectType = Image\n')
f.write('NDims = 3\n')
f.write('BinaryData = True\n')
f.write('BinaryDataByteOrderMSB = False\n')
f.write('CompressedData = False\n')
f.write('TransformMatrix = '+str(self.direction_cosines).strip('()[]').replace(',','')+'\n')
f.write('Offset = '+str(self.offset).strip('()[]').replace(',','')+'\n')
f.write('CenterOfRotation = 0 0 0\n')
f.write('AnatomicalOrientation = RAI\n')
f.write('ElementSpacing = '+str(self.spacing).strip('()[]').replace(',','')+'\n')
f.write('DimSize = '+str(self.size).strip('()[]').replace(',','')+'\n')
if data == 'vf':
f.write('ElementNumberOfChannels = 3\n')
self.data=self._shiftdim(self.data, 3) ## Shift dimensions if the input matrix is a vf
if self.data_type == 'short':
f.write('ElementType = MET_SHORT\n')
elif self.data_type == 'float':
f.write('ElementType = MET_FLOAT\n')
elif self.data_type == 'uchar':
f.write('ElementType = MET_UCHAR\n')
f.write('ElementDataFile = LOCAL\n')
## Write matrix
f.write(self.data)
f.close()
elif not fn.endswith('.mha'): ## File extension is not ".mha"
raise NameError('The input file name is not a mha file!')
######################## WRITE_MHA - END - #############################
############ UTILITY FUNCTIONS, NOT FOR PUBLIC USE - START - ###########
def _cast2int (self, l):
l_new=[]
for i in l:
if i.is_integer(): l_new.append(int(i))
else: l_new.append(i)
return l_new
_shiftdim = lambda self, x, n: x.transpose(np.roll(range(x.ndim), -n))
############# UTILITY FUNCTIONS, NOT FOR PUBLIC USE - END - ############
|
[
"leon.jarabek@rocketmail.com"
] |
leon.jarabek@rocketmail.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.