text stringlengths 8 6.05M |
|---|
def prime_factors(num):
cur_num = int(num)
cur_div = 2
prime_factors = []
while cur_num > 1:
remainder = cur_num % cur_div
if remainder == 0:
prime_factors.append(cur_div)
cur_num = cur_num // cur_div
else:
cur_div += 1
return prime_factors
def euler(num):
euler_num = int(num)
euler_factors = prime_factors(euler_num)
euler_result = 0
for i in range(euler_num - 1):
list_a = prime_factors(i)
rel_prime = True
for j in list_a:
if j in euler_factors:
rel_prime = False
break
if rel_prime == True:
euler_result += 1
return euler_result
def main():
a = euler(1000)
print(a)
b = euler(15)
print(b)
c = euler(27343)
print(c)
main()
|
import sys
sys.path.append('Config/')
import config
import cls_DataBase
from cls_ParserDataSource import ParserDataSource
import cls_Keeper
#
class DataFactoryErp():
def __init__(self):
self.conf = config
def create_data_source(self):
return cls_DataBase.DatabaseErp(self.conf.config['database']['erp'])
def create_parser(self):
return ParserDataSource(self.conf)
def create_keeper(self):
return cls_Keeper.GSKeeper(self.conf)
|
import pylab as plt
import numpy as np
# def show_scatter(times, epochs, data):
# # scatter
# plt.figure(figsize=(8, 5))
# # 2-dimensions #
# # plt.scatter(epochs, data, 'o')
#
# # 3-dimensions #
# c = np.random.randint(0, 10, 100)
# plt.scatter(epochs, data, c=c, marker='o')
# plt.colorbar()
#
# plt.grid(True)
# plt.xlabel('epochs')
# plt.ylabel('data')
# plt.title('Scatter Plot')
# plt.show()
def pointgen(x, y):
sx = 1.1
sy = 1.7
offsetx = sx * np.random.rand(16) + x
offsety = sy * np.random.rand(16) + y
print(offsetx, offsety)
return offsetx, offsety
if __name__ == '__main__':
epochs = np.array(range(100))
data = np.random.rand(100) #.reshape((100, 2))
# c1 = [[0.9, 1.1, 1.2, 1.2],[1.4, 1.2,1.0, 1.1]]
# c2 = [[2,3.2,3.3,2], [3.3, 2.2, 2, 3.1]]
fig, ax = plt.subplots(figsize=(8, 5))
x1, y1 = pointgen(2,4.5)
x2, y2 = pointgen(4,1.5)
x3 = 0.75*x1 + 0.25*x2
y3 = 0.75*y1 + 0.25*y2
plt.scatter(x1, y1, c="red", marker='o', label="class 1")
plt.scatter(x2, y2, c="green", marker='>', label="class 2")
plt.scatter(x3[:6], y3[:6], c="blue", marker='o', label="blended class")
# plt.ylim(0.5,3.5)
# plt.xlim(0.5,3.5)
plt.legend()
plt.xticks([])
plt.yticks([])
plt.xlabel('x')
plt.ylabel('y')
plt.show()
fig.savefig('./scalars.eps', dpi=600, format='eps') |
# # File holds the class to solve various problems classically (for example: using brute force methods)
import pandas as pd
import numpy as np
from itertools import combinations
from utils.data import parse_profit_dataframe
def binary_profit_optimizer(profit: list[float], cost: list[float], budget: float) -> tuple[list[int], float, float]:
"""Optimizes the profit problem classically using a binary formulation (AKA items can only be used once)
Keyword arguments:
profit - list of floats
cost - list of floats
budget -- Float indicating your total budget
Returns
Tuple of integers indicating the solution
Maximum cost found
Maximum profit found
"""
number_of_products = len(profit)
profit_cumulative = 0
cost_cumulative = 0
result = []
for product_index in range(1, number_of_products + 1):
for product_index_combinations in combinations(np.arange(number_of_products), product_index):
cost_combinations = [cost[i] for i in product_index_combinations]
profit_combinations = [profit[i] for i in product_index_combinations]
running_cost = np.sum(cost_combinations)
running_profit = np.sum(profit_combinations)
if running_cost <= budget and running_profit > profit_cumulative:
profit_cumulative = running_profit
cost_cumulative = running_cost
result = product_index_combinations
return result, cost_cumulative, profit_cumulative
def discrete_profit_optimizer(profit: list[float], cost: list[float], budget:float) -> tuple[list[int], float, float]:
"""Optimizes the profit problem classically using a discrete formulation (AKA: items can be used more than once)
Keyword arguments:
profit - list of floats
cost - list of floats
budget -- Float indicating your total budget
Returns
Tuple of integers indicating the solution
Maximum cost found
Maximum weight found
TODO: We need to extract the solution and cost of this method
TODO: We need to add a bound to number of variable chosen
"""
def unboundedKnapsack(W, n, val, wt):
# Solution found here: https://www.geeksforgeeks.org/unbounded-knapsack-repetition-items-allowed/
# dp[i] is going to store maximum
# value with knapsack capacity i.
dp = [0 for _ in range(W + 1)] # profit
# Fill dp[] using above recursive formula
for i in range(W + 1):
for j in range(n):
if (wt[j] <= i):
if val[j] + dp[i - wt[j]] > dp[i]:
dp[i] = max(dp[i], dp[i - wt[j]] + val[j])
return dp[W]
# Need to do some hacky-ness to convert these to integers
multiplier = 100
cost_int = [int(c*multiplier) for c in cost]
profit_int = [int(p*multiplier) for p in profit]
budget_int = int(budget*multiplier)
profit_solution_int = unboundedKnapsack(budget_int, len(profit), profit_int, cost_int)
return profit_solution_int / multiplier
def binary_supplier_optimizer(inventory: list[int or str], supplier_inventory:list[set[int or str]]):
# Taken from https://www.codegrepper.com/code-examples/python/set+cover+problem+in+python
# Find a family of subsets that covers the universal set
inventory_set = set(inventory)
elements = set(e for s in supplier_inventory for e in s)
# Check the subsets cover the universe
if elements != inventory_set:
return None
covered = set()
cover = []
# Greedily add the subsets with the most uncovered points
while covered != elements:
subset = max(supplier_inventory, key=lambda s: len(s - covered))
cover.append(subset)
covered |= subset
return cover
def discrete_profit_optimizer_brute_force(profit: list[float], cost: list[float], budget:float) -> tuple[list[int], float, float]:
"""Optimizes the profit problem classically using a discrete formulation (AKA: items can be used more than once)
Keyword arguments:
profit - list of floats
cost - list of floats
budget -- Float indicating your total budget
Returns
Tuple of integers indicating the solution
Maximum cost found
Maximum weight found
TODO: We need to extract the solution and cost of this method
TODO: We need to add a bound to number of variable chosen
"""
def knapsack(c,w,m,w_capacity):
Total=np.sum(m) #total buckets item_i x qty_i
N=len(c) #total items
def sum(i_list, p):
sum_p=0
for item in (i_list):
sum_p+=p[item]
return(int(sum_p))
lc=np.zeros(Total)
lw=np.zeros(Total)
lm=np.zeros(Total)
# create the long list of single items to work on and index
i=0
index_l=[]
for r in range(N):
for s in range(m[r]):
lc[i]=c[r]
lw[i]=w[r]
index_l.append(r)
i+=1
c_max=0
w_max=0
max_list=[]
for n in range(1,Total+1): # for groups of items from 1 to N
for i_list in combinations(np.arange(Total), n): # allcombinations of n items
if sum(list(i_list),lw)<=w_capacity: # if the weight of the current list of items is within the weight capacity
if sum(list(i_list),lc)>c_max: # if the cost of the current list of items is more than the max cost found so far
c_max=sum(list(i_list),lc) #c_max updated the cost of the current list of items
w_max=sum(list(i_list),lw) #w_max upated to the weight of the current items
max_list=list(i_list)
#print(list(i_list), sum(list(i_list),c), sum(list(i_list),w))
i=0
bucket=np.zeros(N)
for i in range(Total):
if i in (max_list):
bucket[index_l[i]]+=1
return(bucket, c_max, w_max)
# Need to do some hacky-ness to convert these to integers
multiplier = 100
cost_int = [int(c*multiplier) for c in cost]
profit_int = [int(p*multiplier) for p in profit]
budget_int = int(budget*multiplier)
hack_bounds = [100000 for _ in range(len(cost_int))]
result_int, profit_max_int, cost_max_int = knapsack(profit_int, cost_int, hack_bounds, budget_int)
# profit_solution_int = unboundedKnapsack(budget_int, len(profit), profit_int, cost_int)
return [r/multiplier for r in result_int], cost_max_int / multiplier, profit_max_int / multiplier
if __name__ == "__main__":
from utils.data import read_profit_optimization_data, read_inventory_optimization_data
from config import standard_mock_data
# Define some constants
budget = 1000 # 100 dollars buget
# Example usage of the classical profit optimizers
profit, cost = read_profit_optimization_data(standard_mock_data['small'])
binary_solution, binary_cost, binary_profit = binary_profit_optimizer(profit=profit, cost=cost, budget=budget)
print('\n\nFound binary (crude) profit optimization solution', binary_solution, binary_cost, binary_profit)
# TODO: fix the binary_profit_optimizer to yield solutions + costs
discrete_profit = discrete_profit_optimizer(profit=profit, cost=cost, budget=budget)
print('\n\nFound discrete (crude) profit optimization solution', discrete_profit)
# Example usage of the classical supplier optimizer
inventory, supplier_inventory = read_inventory_optimization_data(standard_mock_data['small'])
cover = binary_supplier_optimizer(inventory, supplier_inventory)
print('\n\nFound cover set solution: ', cover) |
class Solution:
def lengthOfLongestSubstring(self, s):
stringMap = {}
start = 0
end = 0
res = 0
for i in range(len(s)):
end = i+1
# print(s[i],stringMap.get(s[i]))
if stringMap.get(s[i])==None:
stringMap[s[i]]=True
else:
while s[start]!=s[i]:
stringMap[s[start]]=None
start+=1
start+=1
res = max(res,end-start)
# print(s[start:end],res)
return res
s = Solution()
f = s.lengthOfLongestSubstring("abb")
print(f) |
"""
while 循环:根据缩进为一个代码块
基本语法
while 条件(判断、计数器、是否到达目标次数):
条件满足执行的语句
...
处理条件(计数器+1)
"""
def while_test(test):
if not test:
return
i = 0
while i < 10:
print("i love you !")
i = i + 1
if i == 7:
print("说了 7 遍了")
continue
if i == 9:
print("说了9遍可,最后一遍不说了")
break
while_test(False)
|
# -*- coding: utf-8 -*-
# 中國剩餘定理
# 求基本同餘式組的通解
from .NTLExceptions import DefinitionError
from .NTLUtilities import jsrange
from .NTLValidations import int_check, list_check, tuple_check
__all__ = ['CHNRemainderTheorem', 'solve', 'iterCalc', 'updateState']
nickname = 'crt'
'''Usage sample:
remainder = crt((3, [1,-1]), (5, [1,-1]), (7, [2,-2]))
print('x ≡ ±1 (mod 3)')
print('x ≡ ±1 (mod 5)')
print('x ≡ ±2 (mod 7)')
print('The solutions of the above equation set is\n\tx ≡', end=' ')
for rst in remainder:
print(rst, end=' ')
print('(mod 105)')
'''
def CHNRemainderTheorem(*args):
rmd = []
mod = []
for tpl in args:
tuple_check(tpl)
if len(tpl) != 2:
raise DefinitionError(
'The arguments must be tuples of modulos and corresponding solutions (in a list).')
int_check(tpl[0]); list_check(tpl[1])
for num in tpl[1]: int_check(num)
mod.append(tpl[0]); rmd.append(tpl[1])
modulo = 1
for tmpMod1 in mod:
modulo *= tmpMod1 # M(original modulo) = ∏m_i
bList = []
for tmpMod2 in mod:
M = modulo // tmpMod2 # M_i = M / m_i
t = solve(M, tmpMod2) # t_i * M_i ≡ 1 (mod m_i)
bList.append(t * M) # b_i = t_i * M_i
remainder = iterCalc(rmd, bList, modulo) # x_j = Σ(b_i * r_i) (mod M)
return sorted(remainder)
# 求解M_i^-1 (mod m_i)
def solve(variable, modulo):
polyCgc = '%d*x - 1' % variable # 將係數與指數數組生成多項式
r = lambda x: eval(polyCgc) # 用於計算多項式的取值
for x in jsrange(modulo): # 逐一驗算,如模為0則加入結果數組
if r(x) % modulo == 0:
return x
# 對rmd多維數組(層,號)中的數進行全排列並計算結果
def iterCalc(ognList, coeList, modulo):
ptrList = [] # 寄存指向每一數組層的號
lvlList = [] # 寄存每一數組層的最大號
for tmpList in ognList:
ptrList.append(len(tmpList)-1)
lvlList.append(len(tmpList)-1)
flag = 1
rstList = []
while flag:
ptrNum = 0
rstNum = 0
for ptr in ptrList:
rstNum += ognList[ptrNum][ptr] * coeList[ptrNum] # 計算結果
ptrNum += 1
rstList.append(rstNum % modulo)
(ptrList, flag) = updateState(ptrList, lvlList) # 更新ptrList的寄存值,並返回是否結束循環
return rstList
# 更新ptrList的寄存值,並返回是否已遍歷所有組合
def updateState(ptrList, lvlList):
ptr = 0
flag = 1
glbFlag = 1
while flag: # 未更新寄存數值前,保持循環(類似同步計數器)
if ptrList[ptr] > 0: # 該層未遍歷,更新該層,終止循環
ptrList[ptr] -= 1
flag = 0
else: # 該層已遍歷
if ptr < len(lvlList) - 1: # 更新指針至下一層並接著循環
ptrList[ptr] = lvlList[ptr]
ptr += 1
else: # 所有情況均已遍歷,終止循環
flag = 0
glbFlag = 0
return ptrList, glbFlag
|
'''
Created on Dec 23, 2014
@author: desposito
'''
class HopsSchedule(object):
'''
A list of hops used for a recipe along with where they are used and what time they are used.
'''
def __init__(self):
'''
Creates a new empty list object for the schedule.
'''
self.clearAllHops()
def addHops(self, hops, use, time):
'''
Allows addition of hops to the list.
hops - Hops object containing name, quantity, alpha and style.
use - When to use the hops. (First batch, boil, dry hop, etc).
self.entries.append(HopsScheduleEntry(hops, use, time))
def findHopsEntryIndexByNameAndTime(self, name, time):
for hops_entry in self.entries:
if hops_entry.hops.name == name and hops_entry.time == time:
return self.entries.index(hops_entry)
else:
raise ValueError("No hops with that name and time combination found.")
def removeHopsAtIndex(self, index):
del self.entries[index]
def removeHopsByNameAndTime(self, name, time):
index = self.findHopsEntryIndexByNameAndTime(name, time)
self.removeHopsAtIndex(index)
def clearAllHops(self):
self.entries = list()
class HopsScheduleEntry(object):
'''
classdocs
'''
def __init__(self, hops, use, time):
'''
Constructor
'''
self.hops = hops
self.use = use
self.time = time |
#Project Euler Problem 12
#What is the value of the first triangle number to have over five hundred divisors?
import math
divisor=500
A=0
j=1
i=0
while i<divisor:
A+=j
i=0
j+=1
#print('New Number')
#print(i)
for k in range(1,A+1):
if math.fmod(A,k)==0:
i+=1
if k==A:
print(i)
if i>=divisor:
statement='the first triangle number with over '+repr(divisor)+' divisors is: '+repr(A)
print(statement)
break
|
# Generated by Django 3.0.8 on 2020-11-26 08:41
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('learners', '0003_auto_20201029_2101'),
]
operations = [
migrations.AddField(
model_name='lqueries',
name='status',
field=models.CharField(choices=[('Resolved', 'Resolved'), ('Doubt', 'Doubt')], default='Doubt', max_length=10),
),
]
|
from osv import fields, osv
class account_invoice_electronic_state(osv.Model):
_name = "account.invoice.electronic.state"
_columns = {
'state_electronic' : fields.char('Estado electronico'),
'active' : fields.boolean('Activo'),
}
account_invoice_electronic_state()
class account_invoice(osv.Model):
_name = "account.invoice"
_inherit = "account.invoice"
_columns = {
'fecha_nac' : fields.date('Fecha de Nacimiento'),
'state_electronic': fields.many2one('account.invoice.electronic.state', 'Tabla Relacionada'),
}
account_invoice()
|
from django.conf import settings
from storages.backends.s3boto3 import S3Boto3Storage
class PrivateMediaStorage(S3Boto3Storage):
def __init__(self, *args, **kwargs):
kwargs['bucket_name'] = settings.AWS_PRIVATE_BUCKET
super(PrivateMediaStorage, self).__init__(*args, **kwargs)
location = ''
default_acl = 'private'
file_overwrite = False
custom_domain = False |
from rest_framework import serializers
from ..models import Cocktail
from django.contrib.auth import get_user_model
User = get_user_model()
class CocktailSerializer(serializers.ModelSerializer):
author = serializers.ReadOnlyField(source='author.id')
class Meta:
model = Cocktail
fields = ["id", "title", "fortress", "description", "recipe", "picture", "rating", "author", "created_at"]
read_only_fields = ["created_at"]
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ["id", "email", "username"]
|
import pickle
import socket
def digit_sum(number):
s = 0
while number > 0:
s += number%10
number //= 10
return s
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.bind(('127.0.0.1', 9999))
data, addr = s.recvfrom(100)
dig_s = digit_sum(addr[1]) + int(data.decode())
print(addr[1])
s.sendto(str(dig_s).encode(), addr)
|
# examples of using for loops
for num in range(0,3):
print("Hello")
# print numbers between 18 and 22
for num in range(18,23):
print(num)
# for loop print by 2s
for num in range(0,21,2):
print(num)
#for loop with variables
start_num = int(input("Enter a start value"))
stop_num = int(input("Enter a stop value"))
step = int(input("Enter a step value"))
# print numbers based on values entered by user
for num in range(start_num, stop_num, step):
print(num)
|
import csv
import os
import math
import os.path
def formatUnc(unc):
return "{0:4.3f}".format(unc)
tableHeader=["","systematic"]
tableTotal=["","total uncertainty"]
tableRows=[
['stat', "statistical"],
["line"],
#fitting
['fiterror', "ML-fit uncertainty"],
['diboson', "Diboson fraction"],
['dyjets', "Drell-Yan fraction"],
['schan', "s-channel fraction"],
['twchan', "tW fraction"],
['qcd_antiiso', "QCD shape"],
['qcd_yield', "QCD yield"],
["line"],
#detector
['btag_bc', "b tagging"],
['btag_l', "mistagging"],
['jer', "JER"],
['jes', "JES"],
['met', "unclustered \\MET"],
['pu', "pileup"],
['lepton_id', "lepton ID"],
['lepton_iso', "lepton isolation"],
['lepton_trigger', "trigger efficiency"],
["line"],
#add reweighting
['top_weight', "top \\pT reweighting"],
['wjets_pt_weight', "\\wjets W \\pT reweighting"],
['wjets_flavour_heavy', "\\wjets heavy flavor fraction"],
['wjets_flavour_light', "\\wjets light flavor fraction"],
['wjets_shape', "\\wjets shape reweighting"],
['bias', "unfolding bias"],
["line"],
#theory
['generator', "generator model"],
['mass', "top quark mass"],
#['tchan_scale', "$Q^{2}$ scale t-channel"],
['tchan_qscale_me_weight', "$Q^{2}$ scale t-channel"],
['ttjets_scale', "\\ttbar $Q^{2}$ scale"],
#['ttjets_qscale_me_weight', "\\ttbar $Q^{2}$ scale"],
['ttjets_matching', "\\ttbar matching"],
['wzjets_scale', "\\wjets $Q^{2}$ scale"],
#['wzjets_qscale_me_weight', "\\wjets $Q^{2}$ scale"],
['wzjets_matching', "\\wjets matching"],
['pdf', "PDF"],
["line"],
['mcstat', "limited MC"],
]
def readCSV(folder=os.getcwd(),match="mu_"):
sysDict={}
for f in os.listdir(os.path.join(os.getcwd(),folder)):
if f.startswith(match) and f.endswith(".csv"):
inFile = open(os.path.join(os.getcwd(),folder,f),"rb")
csvFile = csv.DictReader(inFile, dialect='excel', quoting=csv.QUOTE_NONNUMERIC)
result = csvFile.next()
sysDict[result["syst"]]=result
inFile.close()
return sysDict
def addColumn(header,sysDict):
tableHeader.append(header)
totalSum2=0.0
for row in range(len(tableRows)):
sysName= tableRows[row][0]
if sysDict.has_key(sysName):
value=math.fabs(sysDict[sysName]["d"])
totalSum2+=value**2
if value*100.0<0.05:
tableRows[row].append("$<0.1$")
else:
tableRows[row].append("$%3.1f$" % (sysDict[sysName]["d"]*100.0))
else:
tableRows[row].append("-")
tableTotal.append("$%3.1f$" % (math.sqrt(totalSum2)*100.0))
addColumn("t",readCSV("histos/bdt_Jun22_final_top/2bin/0.45","mu_"))
addColumn("tbar",readCSV("histos/bdt_Jun22_final_antitop/2bin/0.45","mu_"))
addColumn("t+tbar",readCSV("histos/bdt_Jun22_final/2bin/0.45","mu_"))
'''
addColumn("muon",readCSV("histos/scan/2bin/0.45","mu_"))
addColumn("electron",readCSV("histos/scan/2bin/0.45","ele_"))
addColumn("combined",readCSV("histos/scan/2bin/0.45","combined_"))
'''
outFile = open("table.tex","w")
outFile.write("\\begin{tabular}[htc]{|r || r | r | r |}\n")
outFile.write("\\hline \n")
outFile.write(
'''
& \\parbox[t]{2.0cm}{\\centering$\\delta A_{l}^{\\mu}(t)\\cdot 10^{2}$}
& \\parbox[t]{2.0cm}{\\centering$\\delta A_{l}^{\\mu}(\\bar{t})\\cdot 10^{2}$}
& \\parbox[t]{3.0cm}{\\centering$\\delta A_{l}^{\\mu}(t+\\bar{t})\\cdot 10^{2}$}
\\\\\n
'''
)
outFile.write("\\hline \n")
outFile.write("\\hline \n")
for row in range(len(tableRows)):
if tableRows[row][0]=="line":
outFile.write("\\hline\n")
continue
formattedRow=tableRows[row][1]
for i in range(2,len(tableRows[row])):
formattedRow+=" & "+tableRows[row][i]+ " \\hspace{0.1cm} " #.replace(".","$&$")
formattedRow+=" \\\\ "#\\hline"
outFile.write(formattedRow+"\n")
outFile.write("\\hline \n")
outFile.write("\\hline \n")
formattedRow=tableTotal[1]
for i in range(2,len(tableTotal)):
formattedRow+=" & "+tableTotal[i] + " \\hspace{0.1cm} "#.replace(".","$&$")
formattedRow+=" \\\\ "
outFile.write(formattedRow+"\n")
outFile.write("\\hline \n")
outFile.write("\\end{tabular}\n")
outFile.close()
|
from django.shortcuts import render,redirect
from django.contrib import messages,auth
from django.contrib.auth.models import User
# Create your views here.
def register(request):
if request.method == "POST":
first_name = request.POST["fname"]
last_name = request.POST["lname"]
email = request.POST["email"]
username= request.POST["username"]
password = request.POST["password"]
cpassword = request.POST["cpassword"]
#check if password match
if password==cpassword:
if User.objects.filter(email=email).exists():
messages.error(request,"Email ID already registered")
return redirect('productlist:register')
else:
user = User.objects.create_user(first_name=first_name,last_name=last_name,username=username,email=email,password=password)
user.save()
#login after register
auth.login(request,user)
messages.success(request,"You are now Logged in")
return redirect('productlist:listings')
else:
messages.error(request,"password does not match")
return redirect('accounts:register')
else:
return render(request,"accounts/register.html")
def login(request):
if request.method == "POST":
username = request.POST.get('username',False)
password = request.POST.get("password",False)
user = auth.authenticate(username=username,password=password)
if user is not None:
auth.login(request,user)
messages.success(request,"Successfully logged in")
return redirect("productlist:listings")
else:
messages.error(request,"Please Provide Valid Credentials")
return redirect("accounts:login")
else:
return render(request,"accounts/login.html")
def logout(request):
auth.logout(request)
return redirect("productlist:listings") |
#!/bin/python3
import math
import os
import random
import re
import sys
# Complete the roadsAndLibraries function below.
'''
Initially tried with union find but the algorithm is not efficient and also
there is some bug, later tried with DFS
class UF:
def __init__(self,N):
self.id = [i for i in range(N+1)]
self.sz = [0]*(N+1)
#print(self.id)
def _root(self, i):
while i != self.id[i]:
#self.id[i] = self.id[self.id[i]]
i = self.id[i]
return i
def union(self, p, q):
i = self._root(p)
j = self._root(q)
if i == j:
return
if self.sz[i] < self.sz[j]:
self.id[i] = j
self.sz[j] += self.sz[i]
else:
self.id[j] = i
self.sz[i] += self.sz[j]
def getRoots(self):
ts = set(self.id)
print("id: ",self.id)
print("ts: ",ts)
roots = []
for t in ts:
if t == 0:
continue
roots.append([t, self.id.count(t)])
return roots
def roadsAndLibraries(n, c_lib, c_road, cities):
uf = UF(n)
for c in cities:
print(c)
uf.union(c[0],c[1])
print(uf.id)
roots = uf.getRoots()
print(roots)
cost = 0
for r in roots:
tc = r[1]
if tc == 1:
cost += c_lib
continue
if tc * c_lib <= (tc -1) * c_road + c_lib:
cost += tc * c_lib
else:
cost += (tc -1) * c_road + c_lib
return cost
'''
from collections import defaultdict
class Graph:
def __init__(self, v):
self.v = v
self.adj = defaultdict(list)
def addEdge(self, p,q):
self.adj[p].append(q)
self.adj[q].append(p)
def DFS(self, visited, v):
stack = []
stack.append(v)
cnt = 0
while stack:
it = stack.pop()
if visited[it] == False:
visited[it] = True
cnt += 1
for n in self.adj[it]:
if visited[n] == False:
stack.append(n)
return cnt
def CC(self, cl, cr):
visited = [False for _ in range(self.v+1)]
tmp = 0
cost = 0
for v in range(1, self.v+1):
if visited[v] == False:
tmp = self.DFS(visited, v)
if tmp == 1:
cost += cl
else:
if tmp * cl <= (cr * (tmp -1) + cl):
cost += tmp * cl
else:
cost += cr * (tmp -1) + cl
return cost
def roadsAndLibraries(n, c_lib, c_road, cities):
gr = Graph(n)
for c in cities:
gr.addEdge(c[0], c[1])
return gr.CC(c_lib, c_road)
if __name__ == '__main__':
fptr = open("out.txt", 'w')
q = int(input())
for q_itr in range(q):
nmC_libC_road = input().split()
n = int(nmC_libC_road[0])
m = int(nmC_libC_road[1])
c_lib = int(nmC_libC_road[2])
c_road = int(nmC_libC_road[3])
cities = []
for _ in range(m):
cities.append(list(map(int, input().rstrip().split())))
result = roadsAndLibraries(n, c_lib, c_road, cities)
fptr.write(str(result) + '\n')
fptr.close() |
# Generated by Django 2.1.4 on 2019-01-11 15:51
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('clientele', '0005_clientele_services'),
('services', '0002_auto_20190109_1816'),
('psychologues', '0010_psychologues_linked_in'),
]
operations = [
migrations.RenameModel(
old_name='PsychologuesServicesofferts',
new_name='Competence',
),
]
|
import sys
import csv
import numpy as np
import matplotlib.pyplot as plt
def perceptrona(w_init, X, Y):
# PERCEPTRONA Find weights for linear discrimination problem.
# PERCEPTRONA(w_init, X,Y) finds and returns the weights w as well as e, the number of
# epochs it took to reach convergence to solve the linear discrimination problem described by
# samples in X with corresponding labels in Y
#
# w_init is a vector (in numpy) of length 2 containing the initial guess for the weights of the
# linear discriminants
#
# X and Y are vectors of datapoints specifying input (X) and output (Y)
# of the classification to be learned. Class support for inputs X,Y:
# float, double, single
#
# AUTHOR: Marc Gyongyosi
e = 0
w = w_init
solution = False
total_count = len(Y)
error_count = total_count
while solution is not True:
error_count = total_count
for xk,yk in zip(X,Y):
g_x = w[0]+w[1]*xk
h_x = 0
if (g_x > 0):
h_x = 1
else:
h_x = -1
if (h_x == yk):
error_count -= 1
else:
w = w + yk * np.array([1, xk])
e += 1
if (error_count == 0):
solution = True
print "Done!"
return (w, e)
def main():
rfile = sys.argv[1]
#read in csv file into np.arrays X1, X2, Y1, Y2
csvfile = open(rfile, 'rb')
dat = csv.reader(csvfile, delimiter=',')
X1 = []
Y1 = []
X2 = []
Y2 = []
for i, row in enumerate(dat):
if i > 0:
X1.append(float(row[0]))
X2.append(float(row[1]))
Y1.append(float(row[2]))
Y2.append(float(row[3]))
X1 = np.array(X1)
X2 = np.array(X2)
Y1 = np.array(Y1)
Y2 = np.array(Y2)
print "--------"
print "Starting Perceptron on X1, Y1"
w_init = np.array([0,0])# INTIALIZE W_INIT
w, k = perceptrona(w_init, X1, Y1)
vals = np.arange(X1.min(), X1.max(), (X1.max()+abs(X1.min()))/100)
tests = np.zeros(len(vals))
for t in tests:
t =w[0]+ t * w[1]
print "Convergence took " + str(k) + " epochs"
print "Weights: w_0=" + str(w[0]) + " w_1=" + str(w[1])
#plt.plot(X1,Y1,'ro', vals, tests, 'k')
#plt.show()
"""
print "--------"
print "Starting Test 2"
w_init = np.array([0,0])# INTIALIZE W_INIT
w, k = perceptrona(w_init, X2, Y2)
vals = np.arange(X2.min(), X2.max(), (X2.max()+abs(X2.min()))/100)
tests = np.zeros(len(vals))
for t in tests:
t =w[0]+ t * w[1]
print "Convergence took " + str(k) + " trials"
print "Weights: w_0=" + str(w[0]) + " w_1=" + str(w[1])
plt.plot(X2,Y2,'ro', vals, tests, 'k')
plt.show()
"""
if __name__ == "__main__":
main()
|
a = "Hello my world today we try to find the number of spaces"
print(a.count(" "))
|
# @Title: 最长重复子数组 (Maximum Length of Repeated Subarray)
# @Author: 2464512446@qq.com
# @Date: 2020-07-01 15:15:37
# @Runtime: 6484 ms
# @Memory: 37.9 MB
class Solution:
def findLength(self, A: List[int], B: List[int]) -> int:
n, m = len(A), len(B)
dp = [[0] * (m + 1) for _ in range(n + 1)]
ans = 0
for i in range(n - 1, -1, -1):
for j in range(m - 1, -1, -1):
# print(i,j)
dp[i][j] = dp[i + 1][j + 1] + 1 if A[i] == B[j] else 0
ans = max(ans, dp[i][j])
return ans
|
import pandas as pd
import matplotlib.pyplot as plt
from scipy import stats
import numpy as np
file = pd.read_csv("landslide_data3.csv")
#QUES1
print("\n******QUESTION-1*******")
def mean(x):
return np.mean(x)
def median(x):
return np.median(x)
def mode(x):
return stats.mode(x)
def minimum(x):
return min(x)
def maximum(x):
return max(x)
def stdDev(x):
return np.std(x)
#printing mean,median,mode.minimum,maximum and standard deviation for all the 7 quantities
print("\n TEMPERATURE: ")
print(" Mean = ",mean(file["temperature"]))
print(" Median = ",median(file["temperature"]))
print(" Mode = ",mode(file["temperature"])[0])
print(" Minimum = ",minimum(file["temperature"]))
print(" Maximum = ",maximum(file["temperature"]))
print(" Standard Deviation = ",stdDev(file["temperature"]))
print("\n HUMIDITY: ")
print(" Mean = ",mean(file["humidity"]))
print(" Median = ",median(file["humidity"]))
print(" Mode = ",mode(file["humidity"])[0])
print(" Minimum = ",minimum(file["humidity"]))
print(" Maximum = ",maximum(file["humidity"]))
print(" Standard Deviation = ",stdDev(file["humidity"]))
print("\n PRESSURE: ")
print(" Mean = ",mean(file["pressure"]))
print(" Median = ",median(file["pressure"]))
print(" Mode = ",mode(file["pressure"])[0])
print(" Minimum = ",minimum(file["pressure"]))
print(" Maximum = ",maximum(file["pressure"]))
print(" Standard Deviation = ",stdDev(file["pressure"]))
print("\n RAIN: ")
print(" Mean = ",mean(file["rain"]))
print(" Median = ",median(file["rain"]))
print(" Mode = ",mode(file["rain"])[0])
print(" Minimum = ",minimum(file["rain"]))
print(" Maximum = ",maximum(file["rain"]))
print(" Standard Deviation = ",stdDev(file["rain"]))
print("\n LIGHTAVGW/o0: ")
print(" Mean = ",mean(file["lightavgw/o0"]))
print(" Median = ",median(file["lightavgw/o0"]))
print(" Mode = ",mode(file["lightavgw/o0"])[0])
print(" Minimum = ",minimum(file["lightavgw/o0"]))
print(" Maximum = ",maximum(file["lightavgw/o0"]))
print(" Standard Deviation = ",stdDev(file["lightavgw/o0"]))
print("\n LIGHTMAX: ")
print(" Mean = ",mean(file["lightmax"]))
print(" Median = ",median(file["lightmax"]))
print(" Mode = ",mode(file["lightmax"])[0])
print(" Minimum = ",minimum(file["lightmax"]))
print(" Maximum = ",maximum(file["lightmax"]))
print(" Standard Deviation = ",stdDev(file["lightmax"]))
print("\n MOISTURE: ")
print(" Mean = ",mean(file["moisture"]))
print(" Median = ",median(file["moisture"]))
print(" Mode = ",mode(file["moisture"])[0])
print(" Minimum = ",minimum(file["moisture"]))
print(" Maximum = ",maximum(file["moisture"]))
print(" Standard Deviation = ",stdDev(file["moisture"]))
#QUES2(a)
print("\n\n******QUESTION-2(a)*******")
X = file["rain"]
#plotting scatter plot for rain with all the other 6 quantities
plt.scatter(X,file["temperature"])
plt.xlabel("Rain")
plt.ylabel("Temperature")
plt.title("Plot between Rain and Temperature")
plt.show()
plt.scatter(X,file["humidity"])
plt.xlabel("Rain")
plt.ylabel("Humidity")
plt.title("Plot between Rain and Humidity")
plt.show()
plt.scatter(X,file["pressure"])
plt.xlabel("Rain")
plt.ylabel("Pressure")
plt.title("Plot between Rain and Pressure")
plt.show()
plt.scatter(X,file["lightavgw/o0"])
plt.xlabel("Rain")
plt.ylabel("Lightavgw/o0")
plt.title("Plot between Rain and Lightavgw/o0")
plt.show()
plt.scatter(X,file["lightmax"])
plt.xlabel("Rain")
plt.ylabel("Lightmax")
plt.title("Plot between Rain and Lightmax")
plt.show()
plt.scatter(X,file["moisture"])
plt.xlabel("Rain")
plt.ylabel("Moisture")
plt.title("Plot between Rain and Moisture")
plt.show()
#QUES2(b)
print("\n\n******QUESTION-2(b)*******")
X = file["temperature"]
#plotting scatter plot for temperature with all the other 6 quantities
plt.scatter(X,file["humidity"])
plt.xlabel("Temperature")
plt.ylabel("Humidity")
plt.title("Plot between Temperature and Humidity")
plt.show()
plt.scatter(X,file["pressure"])
plt.xlabel("Temperature")
plt.ylabel("Pressure")
plt.title("Plot between Temperature and Pressure")
plt.show()
plt.scatter(X,file["rain"])
plt.xlabel("Temperature")
plt.ylabel("Rain")
plt.title("Plot between Temperature and Rain")
plt.show()
plt.scatter(X,file["lightavgw/o0"])
plt.xlabel("Temperature")
plt.ylabel("Lightavgw/o0")
plt.title("Plot between Temperature and Lightavgw/o0")
plt.show()
plt.scatter(X,file["lightmax"])
plt.xlabel("Temperature")
plt.ylabel("Lightmax")
plt.title("Plot between Temperature and Lightmax")
plt.show()
plt.scatter(X,file["moisture"])
plt.xlabel("Temperature")
plt.ylabel("Moisture")
plt.title("Plot between Temperature and Moisture")
plt.show()
#QUES3(a)
print("\n\n******QUESTION-3(a)*******")
def corCoef(x,y):
return np.corrcoef(x,y)
x = file["rain"]
#printing correlation coefficient between rain and other 6 quantities
print("\n Correlation Coefficient between Rain and: \n")
print(" Temperature = %.8f" %corCoef(x,file["temperature"])[0][1])
print(" Humidity = %.8f" %corCoef(x,file["humidity"])[0][1])
print(" Pressure = %.8f" %corCoef(x,file["pressure"])[0][1])
print(" Lightavgw/o0 = %.8f" %corCoef(x,file["lightavgw/o0"])[0][1])
print(" Lightmax = %.8f" %corCoef(x,file["lightmax"])[0][1])
print(" Moisture = %.8f" %corCoef(x,file["moisture"])[0][1])
#QUES3(b)
print("\n\n******QUESTION-3(b)*******")
def corCoef(x,y):
return np.corrcoef(x,y)
x = file["temperature"]
#printing correlation coefficient between temperature and other 6 quantities
print("\n Correlation Coefficient between Temperature and: \n")
print(" Rain = %.8f" %corCoef(x,file["rain"])[0][1])
print(" Humidity = %.8f" %corCoef(x,file["humidity"])[0][1])
print(" Pressure = %.8f" %corCoef(x,file["pressure"])[0][1])
print(" Lightavgw/o0 = %.8f" %corCoef(x,file["lightavgw/o0"])[0][1])
print(" Lightmax = %.8f" %corCoef(x,file["lightmax"])[0][1])
print(" Moisture = %.8f" %corCoef(x,file["moisture"])[0][1])
#QUES4
print("\n\n******QUESTION-4*******")
rain = file["rain"]
moisture = file["moisture"]
#Plotting histogram for rain
rain.hist()
plt.xlabel("Rain(in mm)")
plt.ylabel("Frequency")
plt.title("Histogram for Rain")
plt.show()
#Plotting histogram for moisture
moisture.hist()
plt.xlabel("Moisture(in %)")
plt.ylabel("Frequency")
plt.title("Histogram for Moisture")
plt.show()
#QUES5
print("\n\n******QUESTION-5*******")
df = pd.DataFrame(file)
#grouping the data w.r.t. stationid's
t6 = df.groupby("stationid").get_group('t6')
t7 = df.groupby("stationid").get_group('t7')
t8 = df.groupby("stationid").get_group('t8')
t9 = df.groupby("stationid").get_group('t9')
t10 = df.groupby("stationid").get_group('t10')
t11 = df.groupby("stationid").get_group('t11')
t12 = df.groupby("stationid").get_group('t12')
t13 = df.groupby("stationid").get_group('t13')
t14 = df.groupby("stationid").get_group('t14')
t15 = df.groupby("stationid").get_group('t15')
#recording data for rain in variables r6,r7,r8,r9....,r15
r6 = t6["rain"]
r7 = t7["rain"]
r8 = t8["rain"]
r9 = t9["rain"]
r10 = t10["rain"]
r11 = t11["rain"]
r12 = t12["rain"]
r13 = t13["rain"]
r14 = t14["rain"]
r15 = t15["rain"]
#plotting histogram of rain for station t6
r6.hist()
plt.xlabel("Rain(in mm)")
plt.ylabel("Frequency")
plt.title("Histogram for Rain in station t6")
plt.show()
#plotting histogram of rain for station t7
r7.hist()
plt.xlabel("Rain(in mm)")
plt.ylabel("Frequency")
plt.title("Histogram for Rain in station t7")
plt.show()
#plotting histogram of rain for station t8
r8.hist()
plt.xlabel("Rain(in mm)")
plt.ylabel("Frequency")
plt.title("Histogram for Rain in station t8")
plt.show()
#plotting histogram of rain for station t9
r9.hist()
plt.xlabel("Rain(in mm)")
plt.ylabel("Frequency")
plt.title("Histogram for Rain in station t9")
plt.show()
#plotting histogram of rain for station t10
r10.hist()
plt.xlabel("Rain(in mm)")
plt.ylabel("Frequency")
plt.title("Histogram for Rain in station t10")
plt.show()
#plotting histogram of rain for station t11
r11.hist()
plt.xlabel("Rain(in mm)")
plt.ylabel("Frequency")
plt.title("Histogram for Rain in station t11")
plt.show()
#plotting histogram of rain for station t12
r12.hist()
plt.xlabel("Rain(in mm)")
plt.ylabel("Frequency")
plt.title("Histogram for Rain in station t12")
plt.show()
#plotting histogram of rain for station t13
r13.hist()
plt.xlabel("Rain(in mm)")
plt.ylabel("Frequency")
plt.title("Histogram for Rain in station t13")
plt.show()
#plotting histogram of rain for station t14
r14.hist()
plt.xlabel("Rain(in mm)")
plt.ylabel("Frequency")
plt.title("Histogram for Rain in station t14")
plt.show()
#plotting histogram of rain for station t15
r15.hist()
plt.xlabel("Rain(in mm)")
plt.ylabel("Frequency")
plt.title("Histogram for Rain in station t15")
plt.show()
#QUES6
print("\n\n******QUESTION-6*******")
#plotting boxplot of rain(original scale)
df.boxplot(column ='rain')
plt.title("BoxPlot for Rain")
plt.ylabel("Rain(in mm)")
plt.show()
#plotting boxplot of rain(for Y in range(0-5000))[for proper observation]
df.boxplot(column ='rain')
plt.title("BoxPlot for Rain")
plt.ylabel("Rain(in mm)")
plt.ylim(0,5000)
plt.show()
#plotting boxplot of rain(for Y in range(0-50)) [for proper observation]
df.boxplot(column ='rain')
plt.title("BoxPlot for Rain")
plt.ylabel("Rain(in mm)")
plt.ylim(0,50)
plt.show()
#plotting boxplot of moisture
df.boxplot(column ='moisture')
plt.ylabel("Moisture(in %)")
plt.title("BoxPlot for Moisture")
plt.show()
|
# if elif else statements
# hungry = False
# if hungry:
# print('Feed Me!')
# else:
# print('Im not hungry')
# loc = "Bank"
# if loc == "Auto Shop":
# print("Cars are nice")
# elif loc == "Bank":
# print("Money is nice")
# else:
# print("I dont know much")
# name = "Sammy"
# if name == "Frankie":
# print("Hey Frankie")
# elif name == "Sammy":
# print("Hey Sammy")
# else:
# print("Whats your name?") |
def quickSort(A):
quick_sort2(A, 0, len(A)-1)
def quick_sort2(A, low, hi):
p = partition(A, low, hi)
# low hi and pi are indexes of the array
quick_sort2(A, low, p-1)
quick_sort2(A, p+1, hi)
def get_pivot(A, low, hi):
mid = (hi + low) // 2
pivot = hi
if A[low] < A[mid]:
if A[mid] < A[hi]:
pivot = mid
elif A[low] < A[hi]:
# That means that low is bigger than middle so we will choose low
# otherwise it will stay "hi" as it's default value
pivot = low
return pivot
def partition(A, low, hi):
pivotIndex = get_pivot(A, low, hi)
pivotValue = A[pivotIndex]
A[pivotIndex], A[low] = A[low], A[pivotIndex]
border = low
|
from keras.callbacks import Callback
from CSVDataFrame import CSVDataFrame
import numpy as np
class EvaluateCallBack(Callback):
def __init__(self,model,encoder,model_name,x_val,y_val,obj_val,test_fn,pool = None,max_wait=5):
self.x_val = x_val
self.y_val = y_val
self.obj_val = obj_val
self.pool = pool
self.test_func = test_fn
self.metrics = []
self.w_metrics = [1000.0]
self.wait = 0
self.max_wait = max_wait
self.model_name = model_name
self.model = model
self.encoder = encoder
self.frame = CSVDataFrame()
self.frame.setheader(['Epoch','% infeas problems', 'model accuracy', 'greedy accuracy', 'model_approx_ratio','greedy_approx_ratio'])
'''
def on_batch_begin(self,batch,log):
if bool != None:
if batch // 5:
m = np.zeros((100,32))
m = self.encoder.predict(pool)
self.model.get_layer('Memory').set_weights([m])
self.model.get_layer('mem').set_weights([np.transpose(m)])
'''
def on_epoch_end(self,epoch,log):
infeas, acc, acc_greedy, model_approx_ratio,greedy_approx_ratio = self.test_func(self.x_val,self.y_val,self.obj_val)
if model_approx_ratio >= min(self.w_metrics):
if self.wait >= self.max_wait:
print 'training stopped' , self.wait , self.max_wait
self.model.stop_training = True
self.wait = 0
else:
self.wait+=1
#self.metrics.append(infeas)
else:
self.wait = 0
print 'model saved...'
print model_approx_ratio , min(self.w_metrics)
self.model.save('.models/'+self.model_name+'.hdf5')
self.metrics.append([epoch+1,infeas, acc, acc_greedy, model_approx_ratio,greedy_approx_ratio])
self.w_metrics.append(model_approx_ratio)
self.frame.PassDataFrame(self.metrics)
def on_train_end(self,logs):
self.frame.save('logs/test-logs/'+self.model_name+'.csv')
|
class Solution(object):
def findOrder(self, numCourses, prerequisites):
from collections import deque
graph = { i:set() for i in range(numCourses)} # v:income number
neigh = [set() for i in range(numCourses)] # graph
res, cnts = [], 0
for [x, y] in prerequisites:
graph[x].add(y)
neigh[y].add(x)
queue = deque([i for i in graph if not graph[i]])
while queue:
v = queue.popleft()
res.append(v)
cnts += 1
for node in neigh[v]:
graph[node].remove(v)
if not graph[node]: queue.append(node)
return res if cnts == numCourses else []
# DFS
class Solution(object):
def findOrder(self, numCourses, prerequisites):
graph = [set() for i in range(numCourses)]
visit = [0 for i in range(numCourses)]
res = []
for x, y in prerequisites:
graph[y].add(x)
for i in range(numCourses):
if not self.dfs(i, graph, visit, res): return []
return res[::-1]
def dfs(self, i, graph, visit, res):
if visit[i] == 1: return True
if visit[i] == -1: return False
visit[i] = -1
for j in graph[i]:
if not self.dfs(j, graph, visit, res): return False
visit[i] = 1
res.append(i)
return True |
from django.http import HttpResponse
from django.shortcuts import render_to_response
from django.http import HttpResponseRedirect
from django.conf import settings
from django.contrib import messages
from django.core.mail import send_mail
from django.shortcuts import render
def home(request):
nothing = {}
if request.method == 'POST':
name = request.POST.get('name', '')
phone = request.POST.get('phone', '')
email = request.POST.get('email', '')
msg = request.POST.get('message', '')
message = "Hi, I'm " + str(name) + "\n\t Phone : "+ str(phone)+ "\n\t Email : "+ str(email)+ "\n\nMessage:\n\n"+ str(msg)
from_email=settings.EMAIL_HOST_USER
me="rajiv97j@gmail.com"
to_list =[me]
send_mail('test mail',message,from_email,to_list,fail_silently = True)
return HttpResponseRedirect('/')
else:
return render(request,'home/index.html',context=nothing)
|
#performing all Ec2 instance operations
import boto3
aws_mgm_con = boto3.session.Session()
ec2_dashboard_res = aws_mgm_con.resource(service_name='ec2')
ec2_dashboard_client = aws_mgm_con.client(service_name='ec2')
#print(dir(ec2_dashboard_res.instances.all()))
#print(dir(ec2_dashboard_client.start_instances))
# instance_list = []
# for each in ec2_dashboard_res.instances.all():
# instance_list.append(each.id)
#To Start EC2 instance
# ec2_dashboard_res.instances.start()
# waiter = ec2_dashboard_client.get_waiter('instance_running')
# waiter.wait(InstanceIds = instance_list )
# print("<<<<<<---You Instance is up and running now---->>>>>>>")
#print(instance_list)
#To Stop EC2 Instances
# ec2_dashboard_res.instances.stop()
# waiter = ec2_dashboard_client.get_waiter('instance_stopped')
# waiter.wait(InstanceIds = instance_list)
# print("<<<<<<---Your Instances has been stopped now---->>>>>>>")
# print(f"List of Instances id :{instance_list}" )
#resource object
print("-------------Resource-------------------")
test_server_ids = []
f1 = {"Name":"tag:Name","Values":['Testing-server']}
for each_ins in ec2_dashboard_res.instances.filter(Filters = [f1]):
test_server_ids.append(each_ins)
#Client object
test_server = []
print("----------------Client-----------------")
for each_ins_cli in ec2_dashboard_client.describe_instances(Filters = [f1])['Reservations']:
for each_in in each_ins_cli['Instances']:
test_server.append(each_in['InstanceId'])
print(test_server)
#<<<<<<Starting test Server
# print("Starting instances with ids of ",test_server )
# ec2_dashboard_client.start_instances(InstanceIds = test_server)
# waiter = ec2_dashboard_client.get_waiter('instance_running')
# waiter.wait(InstanceIds = test_server )
# print("Test server has been started")
#<<<<Stopping test server>>>
print("Stopped instances with ids of ",test_server )
ec2_dashboard_client.stop_instances(InstanceIds = test_server)
waiter = ec2_dashboard_client.get_waiter('instance_stopped')
waiter.wait(InstanceIds = test_server )
print("Test server has been stopped") |
from django.urls import include, path
from rest_framework import routers
from rest_framework_simplejwt.views import TokenObtainPairView, \
TokenRefreshView
from .views import DeleteReservation,ReservationsAll, RoomsAll, UserViewSet
router = routers.DefaultRouter()
router.register('users', UserViewSet, basename='users')
router.register('rooms', RoomsAll, basename='rooms')
router.register('reservations', ReservationsAll, basename='reservations')
urlpatterns = router.urls
urlpatterns = [
path('', include(router.urls)),
path('api/token/', TokenObtainPairView.as_view(), name='token_obtain_pair'),
path('api/token/refresh/', TokenRefreshView.as_view(), name='token_refresh'),
path("reservation/delete/<int:reservation_id>/", DeleteReservation.as_view(), name='reservation-delete')
] |
print('hello')
print('ahhh')
|
# -*- coding: utf-8 -*-
import pandas as pd
from sklearn.base import BaseEstimator, TransformerMixin
from sklearn.preprocessing import (
StandardScaler as ScikitStandardScaler,
PolynomialFeatures as SkPolynomialFeatures
)
import numpy as np
from sktutor.utils import dict_with_default, dict_default, bitwise_operator
from scipy import stats
from patsy import dmatrix
import re
from collections import OrderedDict
def mode(x):
"""Return the most frequent occurance. If two or more values are tied
with the most occurances, then return the lowest value.
:param x: A data vector.
:type x: pandas Series
:rtype: The the most frequent value in x.
"""
vc = x.value_counts()
if len(vc) > 0:
index_names = vc.index.names
vc = pd.DataFrame(vc)
vc.columns = ['counts']
vc = vc.reset_index()
# sort to keep consistent output
vc = vc.sort_values(['counts', 'index'], ascending=[False, True])
vc = vc.set_index(['index'])
vc.index.names = index_names
return vc.index[0]
else:
return None
class GroupByImputer(BaseEstimator, TransformerMixin):
"""Imputes Missing Values by Group with specified function. If a ``group``
parameter is given, it can be the name of any function which can be passed
to the ``agg`` function of a pandas ``GroupBy`` object. If a ``group``
paramter is not given, then only 'mean', 'median', and 'most_frequent'
can be used.
:param impute_type:
The type of imputation to be performed.
:type impute_type: string
:param group:
The column name or a list of column names to group the ``pandas
DataFrame``.
:type group: string or list of strings
"""
def __init__(self, impute_type, group=None):
self.group = group
if impute_type == 'most_frequent':
self.impute_type = mode
else:
self.impute_type = impute_type
def fit(self, X, y=None):
"""Fit the imputer on X
:param X: The input data.
:type X: pandas DataFrame
:rtype: Returns self.
"""
if self.group:
self.mapper = X.groupby(self.group).agg(self.impute_type).to_dict()
elif self.impute_type == mode:
self.mapper = X.mode().iloc[0, :].to_dict()
else:
if self.impute_type == 'median':
self.mapper = X.median().to_dict()
elif self.impute_type == 'mean':
self.mapper = X.mean().to_dict()
else:
raise ValueError(("Can only use 'most_frequent', 'median',"
"or 'mean' impute_types without 'group'"
"specified."))
return self
def _get_value_from_map(self, x, col):
"""get a value from the mapper, for a given column and a ``pandas
Series`` representing a row of data.
:param x: A row of data from a ``DataFrame``.
:type x: pandas Series
:param col: The name of the column to impute a missing value.
:type col: string
:rtype:
The value from self.mapper dictionary if exists, np.nan otherwise.
"""
try:
key = x[self.group]
if isinstance(key, pd.Series):
key = tuple(key)
return self.mapper[col][key]
except KeyError:
return np.nan
def transform(self, X):
"""Impute the eligible missing values in X
:param X: The input data with missing values to be imputed.
:type X: pandas DataFrame
:rtype: A ``DataFrame`` with eligible missing values imputed.
"""
X = X.copy()
if self.group:
for col in self.mapper.keys():
X[col] = X[col].fillna(X.apply(
lambda x: self._get_value_from_map(x, col), axis=1))
else:
X = X.fillna(pd.Series(self.mapper))
return X
class MissingValueFiller(BaseEstimator, TransformerMixin):
"""Fill missing values with a specified value. Should only be used with
columns of similar dtypes.
:param value: The value to impute for missing factors.
"""
def __init__(self, value):
self.value = value
def fit(self, X, y=None):
"""Fit the imputer on X.
:param X: The input data.
:type X: pandas DataFrame
:rtype: Returns self.
"""
return self
def transform(self, X):
"""Impute the eligible missing values in X.
:param X: The input data with missing values to be filled.
:type X: pandas DataFrame
:rtype: A ``DataFrame`` with eligible missing values filled.
"""
X = X.fillna(self.value)
return X
class OverMissingThresholdDropper(BaseEstimator, TransformerMixin):
"""Drop columns with more missing data than a given threshold.
:param threshold: Maximum portion of missing data that is acceptable. Must
be within the interval [0,1]
:type threshold: float
"""
def __init__(self, threshold):
if threshold > 1 or threshold < 0:
raise ValueError("threshold must be within [0,1]")
else:
self.threshold = threshold
def fit(self, X, y=None):
"""Fit the dropper on X.
:param X: The input data.
:type X: pandas DataFrame
:rtype: Returns self.
"""
length = len(X)
na_counts = X.isnull().sum()
self.cols_to_drop = na_counts[
(na_counts > int(length*(self.threshold)))].index.tolist()
return self
def transform(self, X):
"""Impute the eligible missing values in X.
:param X: The input data.
:type X: pandas DataFrame
:rtype: A ``DataFrame`` with columns dropped.
"""
X = X.drop(self.cols_to_drop, axis=1)
return X
class ValueReplacer(BaseEstimator, TransformerMixin):
"""Replaces Values in each column according to a nested dictionary.
``inverse_mapper`` is probably more intuitive for when one value replaces
many values. Only one of ``inverse_mapper`` or ``mapper`` can be used.
:param mapper: Nested dictionary with columns mapping to dictionaries
that map old values to new values.
:type mapper: dictionary
:param inverse_mapper: Nested dictionary with columns mapping to
dictionaries that map new values to a list of old
values
:type inverse_mapper: dictionary
``mapper`` takes the form::
{'column_name': {'old_value1': 'new_value1',
'old_value2': 'new_value1',
'old_value3': 'new_value2'}
}
while ``inverse_mapper`` takes the form::
{'column_name': {'new_value1': ['old_value1', 'old_value2'],
'new_value2': ['old_value1']}
}
"""
def __init__(self, mapper=None, inverse_mapper=None):
self.inverse_mapper = inverse_mapper
if inverse_mapper and mapper:
raise ValueError("Cannot use both a mapper and inverse_mapper.")
elif inverse_mapper:
mapper = {}
for k, d in inverse_mapper.items():
map2 = {}
for key, value in d.items():
for string in value:
map2[string] = key
mapper[k] = map2
elif not mapper:
raise ValueError("Must initialize with either mapper or "
"inverse_mapper.")
mapper = {key: dict_default(value) for key, value in mapper.items()}
self.mapper = mapper
def fit(self, X, y=None):
"""Fit the value replacer on X. Checks that all columns in mapper are
in present in X.
:param X: The input data.
:type X: pandas DataFrame
:rtype: Returns self.
"""
if len(set(self.mapper.keys()) - set(X.columns)) > 0:
raise ValueError("Mapper contains columns not found in input"
"data: " +
', '.join(set(self.mapper.keys())
- set(X.columns)))
return self
def transform(self, X):
"""Replace the values in X with the values in the mapper. Values not
accounted for in the mapper will be left untransformed.
:param X: The input data.
:type X: pandas DataFrame
:rtype: A ``DataFrame`` with old values mapped to new values.
"""
X = X.copy(deep=True)
for col in self.mapper.keys():
X[col] = X[col].map(self.mapper[col])
return X
class FactorLimiter(BaseEstimator, TransformerMixin):
"""For each named column, it limits factors to a list of acceptable values.
Non-comforming factors, including missing values, are replaced by a default
value.
:param factors_per_column: dictionary mapping column name keys to a
dictionary with a list of acceptable factor
values and a default factor value for
non-conforming values
:type factors_per_column: dictionary
``factors_per_column`` takes the form::
{'column_name': {'factors': ['value1', 'value2', 'value3'],
'default': 'value1'},
}
}
"""
def __init__(self, factors_per_column=None):
self.factors_per_column = factors_per_column
mapper = {}
for col, specs in factors_per_column.items():
# new_dict = dict_factory('new_dict', specs['default'])
translation = {factor: factor for factor in specs['factors']}
new_dict = dict_with_default(specs['default'], translation)
# mapper[col] = new_dict(translation)
mapper[col] = new_dict
self.mapper = mapper
def fit(self, X, y=None):
"""Fit the factor limiter on X. Checks that all columns in
factors_per_column are in present in X.
:param X: The input data.
:type X: pandas DataFrame
:rtype: Returns self.
"""
if len(set(self.mapper.keys()) - set(X.columns)) > 0:
raise ValueError("factors_per_column contains keys not found in "
"DataFrame columns:" ', '.join(
set(self.mapper.keys()) - set(X.columns)))
return self
def transform(self, X):
"""Limit the factors in X with the values in the factor_per_column.
:param X: The input data.
:type X: pandas DataFrame
:rtype: A ``DataFrame`` with factors limited to the specifications.
"""
X = X.copy(deep=True)
for col, val in self.mapper.items():
X[col] = X[col].map(val)
return X
class SingleValueAboveThresholdDropper(BaseEstimator, TransformerMixin):
"""Removes columns with a single value representing a higher percentage
of values than a given threshold
:param threshold: percentage of single value in a column to be removed
:type threshold: float
:param dropna: If True, do not consider NaN as a value
:type dropna: boolean
"""
def __init__(self, threshold=1, dropna=True):
if threshold > 1 or threshold < 0:
raise ValueError("threshold must be within [0,1]")
else:
self.threshold = threshold
self.dropna = dropna
def fit(self, X, y=None):
"""Fit the dropper on X.
:param X: The input data.
:type X: pandas DataFrame
:rtype: Returns self.
"""
length = len(X)
val_counts = X.apply(lambda x:
x.value_counts(dropna=self.dropna).iloc[0])
self.cols_to_drop = val_counts[
(val_counts >= int(length*(self.threshold)))].index.tolist()
return self
def transform(self, X):
"""Drop the columns in X with single values that exceed the threshold.
:param X: The input data.
:type X: pandas DataFrame
:rtype: A ``DataFrame`` with columns dropped to the specifications.
"""
X = X.drop(self.cols_to_drop, axis=1)
return X
class SingleValueDropper(BaseEstimator, TransformerMixin):
"""Drop columns with only one unique value
:param dropna: If True, do not consider NaN as a value
:type dropna: boolean
"""
def __init__(self, dropna=True):
self.dropna = dropna
def _unique_values(self, x):
values = x.unique().tolist()
if self.dropna and x.isnull().sum() > 0:
if None in values:
values.remove(None)
values = [value for value in values if value == value]
return len(values)
def fit(self, X, y=None):
"""Fit the dropper on X.
:param X: The input data.
:type X: pandas DataFrame
:rtype: Returns self.
"""
val_counts = X.apply(self._unique_values, axis=0)
self.cols_to_drop = val_counts[(val_counts <= 1)].index.tolist()
return self
def transform(self, X):
"""Drop the columns in X with single non-missing values.
:param X: The input data.
:type X: pandas DataFrame
:rtype: A ``DataFrame`` with columns dropper.
"""
X = X.drop(self.cols_to_drop, axis=1)
return X
class ColumnExtractor(BaseEstimator, TransformerMixin):
"""Extract a list of columns from a ``DataFrame``.
:param col: A list of columns to extract from the ``DataFrame``
:type col: list of strings
"""
def __init__(self, col):
self.col = col
def fit(self, X, y=None, **fit_params):
"""Fit the extractor on X. Checks that all columns are in X.
:param X: The input data.
:type X: pandas DataFrame
:rtype: Returns self.
"""
if len(set(self.col) - set(X.columns)) > 0:
raise ValueError("Column list contains columns not found in input"
"data: " +
', '.join(set(self.col) - set(X.columns)))
return self
def transform(self, X, **transform_params):
"""Extract the specified columns in X.
:param X: The input data.
:type X: pandas DataFrame
:rtype: A ``DataFrame`` with specified columns.
"""
return pd.DataFrame(X[self.col])
class ColumnDropper(BaseEstimator, TransformerMixin):
"""Drop a list of columns from a ``DataFrame``.
:param col: A list of columns to extract from the ``DataFrame``
:type col: list of strings
"""
def __init__(self, col):
self.col = col
def fit(self, X, y=None, **fit_params):
"""Fit the dropper on X. Checks that all columns are in X.
:param X: The input data.
:type X: pandas DataFrame
:rtype: Returns self.
"""
if len(set(self.col) - set(X.columns)) > 0:
raise ValueError("Column list contains columns not found in input "
"data: " + ', '.join(set(self.col)
- set(X.columns)))
return self
def transform(self, X, **transform_params):
"""Drop the specified columns in X.
:param X: The input data.
:type X: pandas DataFrame
:rtype: A ``DataFrame`` without specified columns.
"""
return X.drop(self.col, axis=1)
class DummyCreator(BaseEstimator, TransformerMixin):
"""Create dummy variables from categorical variables.
:param dummy_na: Add a column to indicate NaNs, if False NaNs are ignored.
:type dummy_na: boolean
:param drop_first: Whether to get k-1 dummies out of k categorical levels
by removing the first level.
:type drop_first: boolean
"""
def __init__(self, **kwargs):
self.kwargs = kwargs
def _get_dummies(self, X, fit):
if fit:
return pd.get_dummies(X, **self.kwargs)
else:
new_kwargs = self.kwargs.copy()
if 'drop_first' in self.kwargs:
del new_kwargs['drop_first']
return pd.get_dummies(X, **new_kwargs)
def _fit(self, X):
X = self._get_dummies(X, fit=True)
self.columns = X.columns
return X
def fit(self, X, y=None, **fit_params):
"""Fit the dummy creator on X. Retains a record of columns produced
with the fitting data.
:param X: The input data.
:type X: pandas DataFrame
:rtype: Returns self.
"""
self._fit(X)
return self
def fit_transform(self, X, y=None, **fit_params):
"""Fit the dummy creator on X, then transform X. Same as calling
self.fit().transform(), but more convenient and efficient.
:param X: The input data.
:type X: pandas DataFrame
:rtype: Returns self.
"""
X = self._fit(X)
return X
def transform(self, X, **transform_params):
"""Create dummies for the columns in X.
:param X: The input data.
:type X: pandas DataFrame
:rtype: A ``DataFrame`` with dummy variables.
"""
X = self._get_dummies(X, fit=False)
column_set = set(self.columns)
data_column_set = set(X.columns)
if column_set != data_column_set:
# use same column order
for col in self.columns:
if col not in data_column_set:
X[col] = 0
X = X[self.columns]
return X
class ColumnValidator(BaseEstimator, TransformerMixin):
"""Ensure that the transformed dataset has the same columns and order as
the original fit dataset. Could be useful to check at the beginning and
end of pipelines.
"""
def fit(self, X, y=None, **fit_params):
"""Fit the validator on X.
:param X: The input data.
:type X: pandas DataFrame
:rtype: Returns self.
"""
self.columns = X.columns
return self
def transform(self, X, **transform_params):
"""Checks whether a dataset to transform has the same columns as the
fitting dataset, and returns X with columns in the same order as the
dataset in fit.
:param X: The input data.
:type X: pandas DataFrame
:rtype: A ``DataFrame`` with specified columns.
"""
if len(set(self.columns) - set(X.columns)) > 0:
raise ValueError("New data is missing columns from original data: "
+ ', '.join(set(self.columns) - set(X.columns)))
elif len(set(X.columns) - set(self.columns)) > 0:
raise ValueError("New data has columns not in the original data: "
+ ', '.join(set(X.columns) - set(self.columns)))
return pd.DataFrame(X[self.columns], index=X.index)
class TextContainsDummyExtractor(BaseEstimator, TransformerMixin):
"""Extract one or more dummy variables based on whether one or more text
columns contains one or more strings.
:param mapper: a mapping of new columns to criteria to populate it as True
:type mapper: dict
``mapper`` takes the form::
{'old_column1':
{'new_column1':
[{'pattern': 'string1', 'kwargs': {'case': False}},
{'pattern': 'string2', 'kwargs': {'case': False}}
],
'new_column2':
[{'pattern': 'string3', 'kwargs': {'case': False}},
{'pattern': 'string4', 'kwargs': {'case': False}}
],
},
'old_column2':
{'new_column3':
[{'pattern': 'string5', 'kwargs': {'case': False}},
{'pattern': 'string6', 'kwargs': {'case': False}}
],
'new_column4':
[{'pattern': 'string7', 'kwargs': {'case': False}},
{'pattern': 'string8', 'kwargs': {'case': False}}
]
}
}
"""
def __init__(self, mapper):
self.mapper = mapper
def fit(self, X, y=None):
"""Fit the imputer on X.
:param X: The input data.
:type X: pandas DataFrame
:rtype: Returns self.
"""
if len(set(self.mapper.keys()) - set(X.columns)) > 0:
raise ValueError("Mapper contains columns not found in input"
"data: " +
', '.join(set(self.mapper.keys())
- set(X.columns)))
return self
def transform(self, X):
"""Impute the eligible missing values in X.
:param X: The input data with missing values to be filled.
:type X: pandas DataFrame
:rtype: A ``DataFrame`` with eligible missing values filled.
"""
X = X.copy(deep=True)
for old_col, val in self.mapper.items():
for new_col, terms in val.items():
series_list = []
for term in terms:
series_list.append(
X[old_col].str.contains(term['pattern'],
**term['kwargs'])
)
X[new_col] = bitwise_operator(
pd.DataFrame(series_list).transpose(), 'or').astype(int)
return X
class BitwiseOperator(BaseEstimator, TransformerMixin):
"""Apply a bitwise operator ``&`` or ``|`` to a list of columns.
:param mapper: A mapping from new columns which will be defined by applying
the bitwise operator to a list of old columns
:type mapper: dict
:param operator: the name of the bitwise operator to apply.
'and', 'or' are acceptable inputs
:type operator: str
``mapper`` takes the form::
{'new_column1': ['old_column1', 'old_column2', 'old_column3'],
'new_column2': ['old_column2', 'old_column4', 'old_column5']
}
"""
def __init__(self, operator, mapper):
self.mapper = mapper
if operator in ['and', 'or']:
self.operator = operator
else:
raise ValueError("parameter operator can only be 'and' or 'or'")
def fit(self, X, y=None, **fit_params):
"""Fit the dropper on X. Checks that all columns are in X.
:param X: The input data.
:type X: pandas DataFrame
:rtype: Returns self.
"""
columns = [item for sublist in
[val for val in self.mapper.values()] for item in sublist]
if len(set(columns) - set(X.columns)) > 0:
raise ValueError("Column list contains columns not found in input "
"data:" + ', '.join(set(columns)
- set(X.columns)))
return self
def transform(self, X, **transform_params):
"""Drop the specified columns in X.
:param X: The input data.
:type X: pandas DataFrame
:rtype: A ``DataFrame`` without specified columns.
"""
X = X.copy(deep=True)
for new_col, cols in self.mapper.items():
X[new_col] = bitwise_operator(X[cols], self.operator).astype(int)
return X
class BoxCoxTransformer(BaseEstimator, TransformerMixin):
"""Create BoxCox Transformations on all columns.
:param adder: the amount to add to each column before the BoxCox
transformation
:type adder: numeric
"""
def __init__(self, adder=0):
self.adder = adder
def fit(self, X, y=None, **fit_params):
"""Fit the transformer on X.
:param X: The input data.
:type X: pandas DataFrame
:rtype: Returns self.
"""
self.columns = X.columns
self.lambdas = dict()
for col in self.columns:
self.lambdas[col] = stats.boxcox(X[col] + self.adder)[1]
return self
def fit_transform(self, X, y=None, **fit_params):
"""Fit the validator on X.
:param X: The input data.
:type X: pandas DataFrame
:rtype: Returns self.
"""
X = X.copy()
self.columns = X.columns
self.lambdas = dict()
for col in self.columns:
X[col], self.lambdas[col] = stats.boxcox(X[col] + self.adder)
return X
def transform(self, X, **transform_params):
"""Checks whether a dataset to transform has the same columns as the
fitting dataset, and returns X with columns in the same order as the
dataset in fit.
:param X: The input data.
:type X: pandas DataFrame
:rtype: A ``DataFrame`` with specified columns.
"""
for col in self.lambdas:
X[col] = stats.boxcox(X[col] + self.adder, self.lambdas[col])
return X
class InteractionCreator(BaseEstimator, TransformerMixin):
"""Creates interactions across columns of a ``DataFrame``
:param columns1: first list of columns to create interactions with each of
the second list of columns
:type columns1: list of strings
:param columns2: second list of columns to create interactions with each of
the second list of columns
:type columns2: list of strings
"""
def __init__(self, columns1, columns2):
self.columns1 = columns1
self.columns2 = columns2
def fit(self, X, y=None, **fit_params):
"""Fit the transformer on X. Checks that all columns are in X.
:param X: The input data.
:type X: pandas DataFrame
:rtype: Returns self.
"""
if len((set(self.columns1) | set(self.columns2)) - set(X.columns)) > 0:
raise ValueError("Column lists contains columns not found in input"
" data: " + ', '.join((set(self.columns1)
| set(self.columns2))
- set(X.columns)))
formula = '0'
for col1 in self.columns1:
for col2 in self.columns2:
formula = formula + '+' + col1 + ':' + col2
self.formula = formula
return self
def transform(self, X, **transform_params):
"""Add specified interactions to X.
:param X: The input data.
:type X: pandas DataFrame
:rtype: A ``DataFrame`` without specified columns.
"""
model_matrix = dmatrix(self.formula, data=X, return_type='dataframe')
return pd.concat([X, model_matrix], axis=1)
class StandardScaler(ScikitStandardScaler):
"""Standardize features by removing mean and scaling to unit variance
"""
def __init__(self, columns=None, **kwargs):
self.columns = columns
super().__init__(**kwargs)
def fit(self, X, y=None, **fit_params):
"""Fit the transformer on X.
:param X: The input data.
:type X: pandas DataFrame
:rtype: Returns self.
"""
# assign columns if not defined at init
if self.columns is None:
self.columns = X.columns
super().fit(X[self.columns])
return self
def fit_transform(self, X, y=None, **fit_params):
"""Fit and transform the StandardScaler on X.
:param X: The input data.
:type X: pandas DataFrame
:rtype: Returns self.
"""
X = X.copy()
# assign columns if not defined at init
if self.columns is None:
self.columns = X.columns
super().fit(X[self.columns])
# transform proper columns
X_transform = super().transform(X[self.columns])
X_transform = pd.DataFrame(
X_transform, columns=self.columns, index=X.index
)
# keep track of order and combine transform/non-transform columns
cols_to_return = X.columns
non_transformed_cols = [
col for col in cols_to_return if col not in X_transform.columns
]
X = pd.concat([X_transform, X[non_transformed_cols]], axis=1)
# put columns back into original order
X = X[cols_to_return]
return X
def transform(self, X, partial_cols=None, **transform_params):
"""Transform X with the standard scaling
:param X: The input data.
:type X: pandas DataFrame
:param partial_cols: when specified, only return these columns
:type X: list
:rtype: A ``DataFrame`` with specified columns.
"""
X = X.copy()
# insert dummy columns into df if not provided
if partial_cols is not None:
for col in self.columns:
if col not in X.columns:
X[col] = 0
# remember order of original df
cols_to_return = X.columns
# transform columns in self.columns
X_transform = super().transform(X[self.columns])
X_transform = pd.DataFrame(
X_transform, columns=self.columns, index=X.index
)
# add columns that weren't defined to be transformed back in
non_transformed_cols = [
col for col in cols_to_return if col not in X_transform.columns
]
X = pd.concat([X_transform, X[non_transformed_cols]], axis=1)
# put columns back into original order
X = X[cols_to_return]
# return only specified columns
if partial_cols is not None:
X = X[partial_cols]
return X
def inverse_transform(self, X, partial_cols=None, **transform_params):
"""Inverse transform X with the standard scaling
:param X: The input data.
:type X: pandas DataFrame
:param partial_cols: when specified, only return these columns
:type X: list
:rtype: A ``DataFrame`` with specified columns.
"""
X = X.copy()
# insert dummy columns into df if not provided
if partial_cols is not None:
for col in self.columns:
if col not in X.columns:
X[col] = 0
# remember order of original df
cols_to_return = X.columns
# transform columns in self.columns
X_transform = super().inverse_transform(
X[self.columns]
)
X_transform = pd.DataFrame(
X_transform, columns=self.columns, index=X.index
)
# add columns that weren't defined to be transformed back in
non_transformed_cols = [
col for col in cols_to_return if col not in X_transform.columns
]
X = pd.concat([X_transform, X[non_transformed_cols]], axis=1)
# put columns back into original order
X = X[cols_to_return]
# return only specified columns
if partial_cols is not None:
X = X[partial_cols]
return X
class ColumnNameCleaner(BaseEstimator, TransformerMixin):
"""Replaces spaces and formula symbols in column names that conflict with
patsy formula interpretation
"""
def fit(self, X, y=None, **fit_params):
"""Fit the transformer on X.
:param X: The input data.
:type X: pandas DataFrame
:rtype: Returns self.
"""
matcher = re.compile(r'[^A-Z0-9_]', flags=re.IGNORECASE)
self.columns = (X.columns
.str.strip()
.str.replace('+', '_and_')
.str.replace('*', '_by_')
.str.replace('/', '_or_')
.str.replace(matcher, '_'))
print(self.columns)
return self
def transform(self, X, **transform_params):
"""Transform X with clean column names for patsy
:param X: The input data.
:type X: pandas DataFrame
:rtype: A ``DataFrame`` with specified columns.
"""
# ensure that columns are in same order as in fit
X = X.copy()
X.columns = self.columns
return X
class PolynomialFeatures(BaseEstimator, TransformerMixin):
"""Creates polynomail features from inputs.
:param degree: The degree of the polynomial
:interaction_only: if true, only interaction features are produced:
features that are products of at most degree distinct input features.
"""
def __init__(self, degree=2, interaction_only=False):
self.degree = degree
self.interaction_only = interaction_only
self.SkPolynomialFeatures = SkPolynomialFeatures(
degree=self.degree,
interaction_only=self.interaction_only,
include_bias=False
)
def fit(self, X, y=None, **fit_params):
"""Fit the transformer on X.
:param X: The input data.
:type X: pandas DataFrame
:rtype: Returns self.
"""
self.columns = X.columns
self.SkPolynomialFeatures.fit(X.values)
# get polynomial feature names
self.poly_feat = [
str(e) for e in self.SkPolynomialFeatures.get_feature_names_out()
if 'x' in e
]
# for each polynomial feature name (x0, x1, etc)
# map to df column name
self.name_dict = OrderedDict()
for n in np.arange(0, self.SkPolynomialFeatures.n_features_in_):
self.name_dict[self.poly_feat[n]] = [self.columns[n]]
# reverse OrderedDict to avoid name issues
# eg., x1 & x11 confusion in column_name_string.replace()
self.name_dict = OrderedDict(reversed(list(self.name_dict.items())))
return self
def transform(self, X, **transform_params):
"""Transform X with clean column names for patsy
:param X: The input data.
:type X: pandas DataFrame
:rtype: A ``DataFrame`` with specified columns.
"""
X = X.copy()[self.columns]
X_transform = self.SkPolynomialFeatures.transform(X.values)
# replace poly_feat names (x0, x1, etc.)
# with actual column names and cleanup
new_cols = self.poly_feat.copy()
for poly_feat in self.name_dict.keys():
for i, col in enumerate(new_cols):
new_cols[i] = (
new_cols[i]
.replace(' ', '*')
.replace(poly_feat, self.name_dict[poly_feat][0])
)
# return df with original names used
X_transform = pd.DataFrame(
X_transform,
columns=new_cols,
index=X.index
)
return X_transform
class ContinuousFeatureBinner(BaseEstimator, TransformerMixin):
"""Creates bins for continuous features
:param field: the continuous field for which to create bins
:type field: string
:param bins: The criteria to bin by.
:type bins: array-like
:param right_inclusive: interval should be right-inclusive or not
:type right_inclusive: bool
"""
def __init__(self, field, bins, right_inclusive=True):
self.field = field
self.bins = bins
self.right_inclusive = right_inclusive
def fit(self, X, y=None):
"""Fit the ContinuousFeatureBinner on X.
:param X: The input data.
:type X: pandas DataFrame
:rtype: Returns self.
"""
if self.field not in X.columns:
raise ValueError('field not in X.')
return self
def transform(self, X):
"""Transform X on ``field``, adding a new column with ``_GRP``
appended.
:param X: The input data.
:type X: pandas DataFrame
:rtype: A ``DataFrame`` with specified columns.
"""
X = X.copy(deep=True)
if self.field not in X.columns:
raise ValueError('Field not found in dataframe.')
# use pandas.cut() to create bins
X[str(self.field) + str('_GRP')] = pd.cut(
x=X[self.field],
bins=self.bins,
right=self.right_inclusive
)
# return labels as strings
X[str(self.field) + str('_GRP')] = (
X[str(self.field) + str('_GRP')].astype('str')
)
# label everything not in a bin as 'Other'
X[str(self.field) + str('_GRP')] = (
X[str(self.field) + str('_GRP')]
.replace('nan', np.NaN)
.fillna(value='Other')
)
return X
class TypeExtractor(BaseEstimator, TransformerMixin):
"""Returns dataframe with only specified field type
:param type: desired type; either 'numeric' or 'categorical'
:type type: string
"""
def __init__(self, type):
self.type = type
def fit(self, df, **fit_params):
"""Fit the TypeExtractor on X.
:param X: The input data.
:type X: pandas DataFrame
:rtype: Returns self.
"""
if self.type == 'numeric':
df = df.select_dtypes(include=[np.number])
self.selected_fields = list(df.columns)
elif self.type == 'categorical':
numeric_cols = df.select_dtypes(include=[np.number]).columns
cat_cols = [col for col in df.columns if col not in numeric_cols]
df = df[cat_cols]
self.selected_fields = cat_cols
print('Selected fields: ' + str(self.selected_fields))
return self
def transform(self, df, **transform_params):
"""Extract all columns of ``type``.
:param X: The input data.
:type X: pandas DataFrame
:rtype: A ``DataFrame`` with extracted columns.
"""
df = df[self.selected_fields]
return df
class GenericTransformer(BaseEstimator, TransformerMixin):
"""Generic transformer that applies user-defined function within
pipeline framework. Arbitrary callable should only make transformations
and does not store any fit() parameters. Lambda functions are not supported
as they cannot be pickled.
:param function: arbitrary function to use as a transformer
:type function: callable
:param params: dict with function parameter name as key and parameter value
as value
:type params: dict
"""
def __init__(self, function, params=None):
self.function = function
self.params = params
def transform(self, X, **transform_params):
if self.params is None:
X_transform = self.function(X)
else:
X_transform = self.function(X, **self.params)
return X_transform
def fit(self, X, y=None, **fit_params):
return self
class MissingColumnsReplacer(BaseEstimator, TransformerMixin):
"""Fill in missing columns to a DataFrame
:param cols: The expected list of columns.
:param value: The value to fill the new columns with by default
"""
def __init__(self, cols, value):
self.cols = cols
self.value = value
def fit(self, X, y=None):
"""Fit the imputer on X.
:param X: The input data.
:type X: pandas DataFrame
:rtype: Returns self.
"""
return self
def transform(self, X):
"""Impute the eligible missing values in X.
:param X: The input data with missing values to be filled.
:type X: pandas DataFrame
:rtype: A ``DataFrame`` with eligible missing values filled.
"""
X = X.copy(deep=True)
new_cols = sorted(list(set(self.cols) - set(X.columns)))
for col in new_cols:
X[col] = np.nan
X.loc[:, new_cols] = X[new_cols].fillna(self.value)
return X
class SklearnPandasWrapper(BaseEstimator, TransformerMixin):
"""Wrap a scikit-learn Transformer with a pandas-friendly version that
keeps columns and row indices in place. Will only work for Transformers
that do not add or change the order of columns.
:param transformer: The scikit-learn compatible Transformer object.
:type transformer: sklearn Transformer
"""
def __init__(self, transformer):
self.transformer = transformer
def fit(self, X, y=None):
"""Fit the imputer on X.
:param X: The input data.
:type X: pandas DataFrame
:rtype: Returns self.
"""
self.columns = X.columns
print(self.columns)
self.transformer.fit(X, y)
return self
def transform(self, X):
"""Transform values in X.
:param X: The input data to be transformed.
:type X: pandas DataFrame
:rtype: A ``DataFrame`` trasnformed.
"""
X_new = self.transformer.transform(X)
X_df = pd.DataFrame(X_new)
X_df.columns = self.columns
X_df.index = X.index
return X_df
|
def validate_instructor_counts(df):
'''Confirm that a post is either tagged as `instructor` or `student` but not both.
Args:
df: Pandas DataFrame
Returns:
tuple: Tuple representing the counts of `is_instructor` and `is_student` labeled posts.
'''
return df['is_instructor'].value_counts(), df['is_student'].value_counts()
def num_nested_dicts(d: dict, column: str):
'''Function that will send the number of nested dictionaries with a specified key back to the caller.
Used with len(list(.)) later on in the data pipeline.
Args:
d dict: Outermost dictionary
column str: Key upon which we wish to match
Usage:
df['num_children'] = df['children'].apply(lambda x: len(list(num_nested_dicts(x[0], 'children'))) if len(x) > 0 else 0)
'''
if column in d:
yield d['created']
for k in d:
if isinstance(d[k], list) and k == 'column':
for i in d[k]:
for j in num_nested_dicts(i):
yield j |
# *를 표현하기 위해
__all__=['test'] |
# Generated by Django 2.1.5 on 2019-02-08 08:09
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('library', '0002_auto_20190208_0227'),
]
operations = [
migrations.AddField(
model_name='booklend',
name='total_borrow',
field=models.PositiveSmallIntegerField(default=0),
),
migrations.AddField(
model_name='booklend',
name='total_store',
field=models.PositiveSmallIntegerField(default=0),
),
]
|
def main():
l = [1,2,3,4,5]
for i in range(len(l)):
print i, l[i]
if __name__=='__main__':
main()
|
# Generated by Django 2.2 on 2019-04-16 01:53
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('assets', '0002_service_data_charge'),
]
operations = [
migrations.AddField(
model_name='service_data',
name='posi',
field=models.CharField(default='', max_length=50),
),
]
|
from flask import Flask
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:////LOCAL/Data/github/python/python-flask-rest-api/database.db'
app.config['SQLALCHEMY_TRACK_MODIFICATION'] = False
|
msg = "Hello World. Huzzah!"
print(msg)
|
import random
def OptionZero():
randFunc = [OptionOne(), OptionTwo(), OptionThree(), OptionFour(), OptionFive(), OptionSix(), OptionSeven(), OptionEigth()]
pass
def OptionOne():
chapter = 3.2
nums = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
randNums = random.choice(nums)
print('\nDo problem #{} from chapter {}.'.format(randNums, chapter)
print('\n\t\t\tWelcome to the "RANDOMIZED NUMBER CALCULATOR"!\n\n')
print('\nIn starting, please select a certin chapther or ENTER \'0\' to randomize a problem within all chapter.\n')
while True:
userChoiceI = float(input('\nSo what\'s the chapter?: '))
if userChoiceI == 0:
print('\nYouv\'ve selected to randomized a problem within all chapters.\n')
OptionZero()
break
elif userChoiceI == 3.2:
print('\nYouv\'ve selected to randomized a problem within chapter {}.'.format(userChoiceI))
OptionOne()
break
else:
print('\nPlease ENTER a proper chapter! >:|')
|
"""empty message
Revision ID: 9b12acd8f289
Revises:
Create Date: 2021-05-09 21:50:01.208225
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '9b12acd8f289'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('policy')
op.drop_table('calendar')
op.drop_table('finance')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('finance',
sa.Column('finance_transaction_id', sa.TEXT(), autoincrement=False, nullable=False),
sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True),
sa.Column('policy_id', sa.TEXT(), autoincrement=False, nullable=True),
sa.Column('reason', sa.TEXT(), autoincrement=False, nullable=True),
sa.Column('premium', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('ipt', sa.INTEGER(), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('finance_transaction_id', name='finance_pkey')
)
op.create_table('calendar',
sa.Column('date', sa.DATE(), autoincrement=False, nullable=False),
sa.Column('year', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('month_number', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('month_name', sa.TEXT(), autoincrement=False, nullable=True),
sa.Column('day_of_month', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('day_of_week', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('year_month', sa.TEXT(), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('date', name='calendar_pkey')
)
op.create_table('policy',
sa.Column('policy_id', sa.TEXT(), autoincrement=False, nullable=False),
sa.Column('user_id', sa.TEXT(), autoincrement=False, nullable=True),
sa.Column('subscription_id', sa.TEXT(), autoincrement=False, nullable=True),
sa.Column('policy_start_date', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True),
sa.Column('policy_end_date', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True),
sa.Column('underwriter', sa.TEXT(), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('policy_id', name='policy_pkey')
)
# ### end Alembic commands ###
|
"""
文件操作的权限:
文件操作权限主要有:读数据(read),写数据(write),追加数据(append)
r:读取文件:如果文件存在,则可以读取文件,如果文件不存在,则直接宝座
如果权限只有r 的时候,是只能读,不能写。
r:文件的操作权限是只读
r+:文件的操作权限是可读可写
rb:按照只能的模式打开二进制文件,例如音频,视频,图片等
rb+:安扎可读可写的方式打开二进制数据
w:写入文件:如果文件存在,可以写入数据(但是会将之前文件的内容清空),如果文件不存在
如果权限只有w的时候,是只能写,不能读。
w:文件的操作权限是只写
w+:文件的操作权限是可读可写
wb:按照只写的模式打开二进制文件,例如音频,视频,图片等
wb+:按照可读可写的方式打开二进制数据
a:追加数据: 如果文件存在,则在文件末尾追加数据,如果文件不存在,则创建文件,然后写入数据
a:文件的操作权限是只能追加
a+:可读可写的追加文本数据
ab:按照只写的模式追加二进制文件,例如音频,视频,图片等
ab+:按照可读可写的方式追加二进制数据
"""
#1.写入文件
f = open("D:/test/test.jpg","wb")
f.write()
f.close()
#2.读取文件内容
f = open("D:/test/test.jpg","rb")
text = f.read()
print(text)
f.close()
#3.追加数据
f = open("D:/test/萤草.jpg","ab")
f.write()
f.close()
#4.读取追加过后的数据
f = open("D:/test/test.jpg","rb")
text = f.read()
print(text)
f.close()
|
from flask import render_template,Blueprint,jsonify,request
import json
import datetime
from slugify import slugify
from config import Setup
from templates import mongo
api_bp = Blueprint('api',__name__)
@api_bp.route('/api_status')
def api_status():
data = {
'status': 'Server Running'
}
return jsonify(data)
@api_bp.route('/api/createForm', methods=['POST'])
def create_form():
data = request.json['data']
curr_dt = datetime.datetime.now()
timestampStr = curr_dt.strftime("%d%b%Y%H%M%S%f")
data['slug'] = slugify(data['title']+timestampStr)
data['url'] = (Setup.url+'form/'+data['slug'])
data['resUrl'] = (Setup.url+'responses/'+data['slug'])
data['responses'] = []
form = mongo.db.forms
form.insert(data)
sendData=data
sendData.pop('_id')
return jsonify(data['url'])
@api_bp.route('/api/getForm', methods=['POST'])
def get_form():
formId = request.json['formId']
forms = mongo.db.forms
f = forms.find_one({'slug':formId})
if f:
f.pop('_id',None)
f['error']=False
else:
f={'error':True}
return jsonify({'formData':f})
@api_bp.route('/api/getExplore', methods=['POST'])
def explore():
# try:
forms = mongo.db.forms.find({})
returnData = []
for f in forms:
temp = {
'title':f['title'],
'link':f['url'],
'responses':f['resUrl'],
'slug':f['slug']
}
returnData.append(temp)
print(returnData)
return jsonify({'data':returnData,'error':False})
# except:
# return jsonify({'error':True})
@api_bp.route('/api/submitResponse', methods=['POST'])
def submit_response():
formRes = request.json
forms = mongo.db.forms
f = forms.find_one({'slug':formRes['formSlug']})
if f:
responses = f['responses']
else:
return jsonify({'status':'Submission Failed!'})
responses.append(formRes['formData'])
f['responses'] = responses
forms.find_one_and_update({'slug':formRes['formSlug']},{'$set':f},upsert=False)
return jsonify({'status':'success'})
@api_bp.route('/api/getResponses', methods=['POST'])
def get_response():
formId = request.json['formId']
forms = mongo.db.forms
f = forms.find_one({'slug':formId})
if f:
f.pop('_id',None)
f['error']=False
else:
f={'error':True}
return jsonify({'formData':f})
@api_bp.route('/')
def home():
return render_template("index.html")
@api_bp.route('/<path:path>')
def paths(path):
return render_template("index.html")
|
from BusinessLogicLayer.cluster.master import ActionMasterGeneral
class ActionWgCloud(ActionMasterGeneral):
def __init__(self, register_url='https://www.wiougong.space/auth/register', silence=True):
super(ActionWgCloud, self).__init__(register_url=register_url, silence=silence, life_cycle=153,
hyper_params={'v2ray': False, 'anti_slider': True})
if __name__ == '__main__':
# action_speed(ActionWgCloud, power=1, silence=True)
ActionWgCloud(silence=False).run()
|
import multiprocessing as mp
def washer(dishes, output):
for dish in dishes:
print('Washing', dish, 'dish')
output.put(dish)
def dryer(input):
while True:
dish = input.get()
print('Drying', dish, 'dish')
input.task_done()
if __name__ == '__main__':
dish_queue = mp.JoinableQueue()
dryer_proc = mp.Process(target=dryer, args=(dish_queue,))
dryer_proc.daemon = True
dryer_proc.start()
dishes = ['salad', 'bread', 'entree', 'dessert']
washer(dishes, dish_queue)
dish_queue.join()
# An attempt has been made to start a new process before the
# current process has finished its bootstrapping phase.
# This probably means that you are not using fork to start your
# child processes and you have forgotten to use the proper idiom
# in the main module:
#
# if __name__ == '__main__':
# freeze_support()
# ...
#
# The "freeze_support()" line can be omitted if the program
# is not going to be frozen to produce an executable. |
# %%
import os
import sys
import pickle
import numpy as np
import multiprocessing
import mne
import sklearn.manifold as manifold
from sklearn import svm
from sklearn import metrics
from sklearn.preprocessing import StandardScaler
import matplotlib.pyplot as plt
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..')) # noqa
import deploy
from local_tools import FileLoader, Enhancer
# %%
results_dir = os.path.join('.', 'tsne_xdawn_x2')
try:
os.mkdir(results_dir)
except:
pass
finally:
assert(os.path.isdir(results_dir))
def prepare_epochs(epochs,
events=['1', '2', '4'],
baseline=(None, 0),
crop=(0.0, 0.8)):
# A tool for prepare epochs
epochs = epochs[events]
epochs.apply_baseline(baseline)
return epochs.crop(crop[0], crop[1])
def relabel(events, sfreq=1200, T=0.5):
"""Re-label 2-> 4 when 2 is near to 1
Arguments:
events {array} -- The events array, [[idx], 0, [label]],
assume the [idx] column has been sorted.
sfreq {float} -- The sample frequency
Returns:
{array} -- The re-labeled events array
"""
events[events[:, -1] == 4, -1] = 2
# Init the pointer [j]
j = 0
# Repeat for every '1' event, remember as [a]
for a in events[events[:, -1] == 1]:
# Do until...
while True:
# Break,
# if [j] is far enough from latest '2' event,
# it should jump to next [a]
if events[j, 0] > a[0] + sfreq * T:
break
# Switch '2' into '4' event if it is near enough to the [a] event
if all([events[j, -1] == 2,
abs(events[j, 0] - a[0]) < sfreq * T]):
events[j, -1] = 4
# Add [j]
j += 1
# If [j] is out of range of events,
# break out the 'while True' loop.
if j == events.shape[0]:
break
# Return re-labeled [events]
return events
# with open(os.path.join(results_dir,
# f'{name}.json'), 'wb') as f:
# pickle.dump(predicts, f)
# %%
for idx in range(1, 11):
# Load epochs
name = f'MEG_S{idx:02d}'
loader = FileLoader(name)
loader.load_epochs(recompute=False)
print(loader.epochs_list)
# Cross validation
num_epochs = len(loader.epochs_list)
for exclude in range(num_epochs):
# Start on separate training and testing dataset
print(f'---- {name}: {exclude} | {num_epochs} ----------------------')
includes = [e for e in range(
len(loader.epochs_list)) if not e == exclude]
excludes = [exclude]
train_epochs, test_epochs = loader.leave_one_session_out(includes,
excludes)
# Xdawn
print('Xdawn --------------------------------')
enhancer = Enhancer(train_epochs=train_epochs,
test_epochs=test_epochs)
# train_epochs, test_epochs = enhancer.fit_apply()
train_data, test_data = enhancer.fit_transform()
# Get train/test x/y
print('Get data -----------------------------')
train_x = train_data.copy()
train_y = relabel(train_epochs.events.copy())[:, -1]
train_x = train_x[train_y != 3]
train_y = train_y[train_y != 3]
test_x = test_data.copy()
test_y = relabel(test_epochs.events.copy())[:, -1]
test_x = test_x[test_y != 3]
test_y = test_y[test_y != 3]
train_s = train_x.shape
test_s = test_x.shape
train_x = train_x[:, :6, :].reshape([train_s[0], 6*train_s[2]])
test_x = test_x[:, :6, :].reshape([test_s[0], 6*test_s[2]])
# TSNE
print('TSNE ---------------------------------')
tsne = manifold.TSNE(n_components=2)
# x6 = np.concatenate([train_x, test_x], axis=0)
x2 = tsne.fit_transform(np.concatenate([train_x, test_x], axis=0))
# Save
print('Save -------------------------------')
data_name = f'{name}-{exclude}.pkl'
tmpdata = dict(train_y=train_y,
test_y=test_y,
x2=x2)
with open(os.path.join(results_dir, data_name), 'wb') as f:
print(data_name)
pickle.dump(tmpdata, f)
# %%
# if False:
# tmpdata = dict(train_y=train_y,
# test_y=test_y,
# x2=x2)
# with open('tmpdata.pkl', 'wb') as f:
# pickle.dump(tmpdata, f)
# with open('tmpdata.pkl', 'rb') as f:
# tmpdata = pickle.load(f)
# x2 = tmpdata['x2']
# train_y = tmpdata['train_y']
# test_y = tmpdata['test_y']
# print(x2.shape, train_y.shape, test_y.shape)
# train_x2 = x2[:len(train_y)]
# test_x2 = x2[len(train_y):]
# print(train_x2.shape, test_x2.shape)
# train_x2 = train_x2[train_y != 3]
# train_y = train_y[train_y != 3]
# test_x2 = test_x2[test_y != 3]
# test_y = test_y[test_y != 3]
# scaler = StandardScaler()
# scaler.fit(train_x2)
# train_x2 = scaler.transform(train_x2)
# test_x2 = scaler.transform(test_x2)
# new_test_x2 = test_x2.copy()
# # train_noise = train_x2[train_y == 3]
# # test_noise = test_x2[test_y == 3]
# # print(train_noise.shape, test_noise.shape)
# # a = np.mean(train_noise, axis=0)
# # b = np.mean(test_noise, axis=0)
# # print(a, b)
# # cos = np.dot(a, b) / np.linalg.norm(a) / np.linalg.norm(b)
# # sin = np.sqrt(1 - cos ** 2)
# # rotate = np.array([[cos, sin], [-sin, cos]])
# # ratio = np.linalg.norm(a) / np.linalg.norm(b)
# # new_test_x2 = np.dot(test_x2, rotate) * ratio
# print(new_test_x2.shape)
# # %%
# fig, axes = plt.subplots(2, 2, figsize=(16, 16))
# for j in [4, 3, 2, 1]:
# print(j)
# axes[0][0].scatter(train_x2[train_y == j, 0],
# train_x2[train_y == j, 1],
# label=j,
# alpha=0.5)
# axes[1][0].scatter(train_x2[train_y == j, 0],
# train_x2[train_y == j, 1],
# label=j,
# alpha=0.3)
# axes[1][0].scatter(new_test_x2[test_y == j, 0],
# new_test_x2[test_y == j, 1],
# label=10+j,
# alpha=1)
# axes[1][1].scatter(new_test_x2[test_y == j, 0],
# new_test_x2[test_y == j, 1],
# label=10+j,
# alpha=0.5)
# axes[0][0].legend()
# axes[1][0].legend()
# axes[1][1].legend()
# # %%
# train_xe = []
# selects = []
# for j in range(len(train_y)):
# if train_y[j] == 1:
# selects.append(j-2)
# selects.append(j-1)
# selects.append(j)
# selects.append(j+1)
# selects.append(j+2)
# d = train_x2[j-3:j+4]
# if not len(d) == 7:
# train_xe.append(np.zeros(14))
# continue
# train_xe.append(np.concatenate(d))
# train_xe = np.array(train_xe)
# _train_xe = train_xe[selects]
# _train_y = train_y[selects]
# _train_y[_train_y != 1] = 0
# print(_train_xe.shape, _train_y.shape)
# pred_y = test_y * 0
# test_xe = []
# for j in range(len(test_y)):
# d = new_test_x2[j-3:j+4]
# if not len(d) == 7:
# test_xe.append(np.zeros(14))
# continue
# test_xe.append(np.concatenate(d))
# _test_xe = np.array(test_xe)
# _test_y = test_y.copy()
# _test_y[_test_y != 1] = 0
# print(_test_xe.shape, test_y.shape)
# clf = make_pipeline(StandardScaler(),
# svm.SVC(gamma='scale',
# kernel='rbf',
# class_weight={0: 1, 1: 1},
# probability=True))
# clf.fit(_train_xe, _train_y)
# _pred_y = clf.predict(_test_xe)
# _prob_y = clf.predict_proba(test_xe)
# fig, ax = plt.subplots(1, 1, figsize=(12, 12))
# ax.plot(_test_y)
# ax.plot(-_pred_y)
# ax.plot(1.5-_prob_y[:, 1])
# print(sklearn.metrics.classification_report(y_pred=_pred_y, y_true=_test_y))
# # %%
# train_xe = []
# for j in range(len(train_y)):
# d = train_x2[j-3:j+4]
# if not len(d) == 7:
# train_xe.append(np.zeros(14))
# continue
# train_xe.append(np.concatenate(d))
# train_xe = np.array(train_xe)
# print(train_xe.shape, train_y.shape)
# pred_y = test_y * 0
# test_xe = []
# for j in range(len(test_y)):
# d = new_test_x2[j-3:j+4]
# if not len(d) == 7:
# test_xe.append(np.zeros(14))
# continue
# test_xe.append(np.concatenate(d))
# test_xe = np.array(test_xe)
# print(test_xe.shape, test_y.shape)
# y_prob = _prob_y[:, 1]
# target_xe_mean = np.mean(train_xe[train_y == 1], axis=0)
# subs = np.array([target_xe_mean-e for e in test_xe])
# # subs = np.dot(subs, np.diag([0.1, 0.1,
# # 0.2, 0.2,
# # 0.5, 0.5,
# # 1, 1,
# # 0.5, 0.5,
# # 0.2, 0.2,
# # 0.1, 0.1]))
# pred_y = np.linalg.norm(subs, axis=1)
# pred_y = 1 / pred_y
# pred_y = pred_y * y_prob
# # pred_y[1:-1] = (pred_y[1:-1] + pred_y[:-2] + pred_y[2:]) / 3
# y_true = test_y.copy()
# y_true[y_true != 1] = 0
# for j in range(len(pred_y)):
# try:
# if not pred_y[j] == np.max(pred_y[j-5:j+6]):
# pred_y[j] = 0
# except ValueError:
# pass
# y_pred = pred_y * 0
# y_pred[pred_y > 0.2] = 1
# fig, axes = plt.subplots(2, 1, figsize=(16, 16))
# axes[0].plot(test_y)
# axes[0].plot(pred_y)
# axes[1].plot(y_true)
# axes[1].plot(-y_pred)
# axes[1].plot(3 + y_true - y_pred)
# print(sklearn.metrics.classification_report(y_pred=y_pred, y_true=y_true))
# # %%
# pos_1 = np.where(y == 1)[0]
# pos_11 = np.where(y == 11)[0]
# pos_12 = np.where(y > 11)[0]
# fig, axes = plt.subplots(2, 2, figsize=(16, 16))
# axes = np.ravel(axes)
# print('1')
# for j, p in enumerate(pos_1):
# sub_y = [e for e in range(p-4, p+5)]
# axes[0].scatter(x2[sub_y, 0], x2[sub_y, 1], alpha=0.2)
# axes[0].scatter(x2[p, 0], x2[p, 1])
# print('11')
# for j, p in enumerate(pos_11):
# sub_y = [e for e in range(p-4, p+5)]
# axes[1].scatter(x2[sub_y, 0], x2[sub_y, 1], alpha=0.2)
# axes[1].scatter(x2[p, 0], x2[p, 1])
# print('12')
# for j, p in enumerate(pos_12):
# sub_y = [e for e in range(p-4, p+5)]
# try:
# axes[2].scatter(x2[sub_y, 0], x2[sub_y, 1], alpha=0.2)
# # axes[2].scatter(x2[p, 0], x2[p, 1])
# except IndexError:
# pass
# for ax in axes:
# ax.set_xlim((-60, 60))
# ax.set_ylim((-60, 60))
|
import math
val = -0.5*2*math.log(0.5, 2)
print (val)
print (math.log(1,2))
information_gain = 1 - .75*val
print (information_gain)
|
# encoding: utf-8
from marrow.mongo.core import Document, Field
class Example(Document):
name = Field()
age = Field()
EXAMPLE = Example("Alice", 27)
TRUTHY_CASES = [
# Comparison
# $eq
(EXAMPLE.name == "Alice"),
(EXAMPLE.age == 27),
# $gt
(EXAMPLE.name > "Aa"),
(EXAMPLE.age > 18),
# $gte
# $lt
# $lte
# $ne
# $in
# $nin
# Logical
# $or
# $and
# $not
# $nor
# Element
# $exists
# $type
# Evaluation
# $mod
# $regex
# $text
# $where
# Geospatial
# $geoWithin
# $geoIntersects
# $near
# $nearSphere
# Array
# $all
# $elemMatch
# $size
# Bitwise
# $bitsAllSet
# $bitsAnySet
# $bitsAllClear
# $bitsAnyClear
# Comments
# $comment
]
FALSY_CASES = [
]
|
import sys
import os
from collections import defaultdict
from text_preprocess import txt_preprocesser
import math
import json
"""KMAMIN 62182275 KRISHAN AMIN"""
class bayesian_classifier:
def trainNaiveBayes(self,train_list):
preprocesser = txt_preprocesser()
class_doc_counts = defaultdict(int)
class_wc = defaultdict(lambda: defaultdict(int))
word_counts = set()
classes = set()
class_wtotals = defaultdict(int)
for element in train_list:
artist = element['artist']
classes.add(artist)
word_list = preprocesser.process(element['lyrics'],sys.argv[1])
class_doc_counts[artist] += 1
for word in word_list:
class_wc[artist][word] += 1
word_counts.add(word)
class_wtotals[artist] += 1
return [class_doc_counts,class_wc,word_counts,class_wtotals,classes]
def testNaiveBayes(self):
preprocesser = txt_preprocesser() # declare preprocessor
# lcount = 0
# tcount = 0 used for accuracy readnigs
# accuracy = 0
finalcorpus = []
with open('trainer.json') as corpus:
corpus = json.loads(corpus.read().encode('latin-1'))
for artist,songlist in corpus.items():
for song in songlist:
d = {}
d['artist'] = artist
d['lyrics'] = song['lyrics']
finalcorpus.append(d)
td = self.trainNaiveBayes(finalcorpus) # TRAIN
cdc = td[0] # docs in true and lie
cwc = td[1] # word counts in true | lie
word_counts = td[2] # word counts overall
cwt = td[3] # total words in true / false
class_list = td[4]
class_score = defaultdict(int)
for el in class_list:
class_score[el] = 0
# RETURN ALL NECC COUNTS
numdocs = sum(val for key,val in cdc.items())
tester = {}
with open('tester.json') as file:
tester = json.loads(file.read().encode('latin-1'))
totalnum = 0
totalcorrect = 0
accuracy = 0
total5 = 0
for aartist,songlist in tester.items():
for song in songlist:
for el in class_list:
class_score[el] = 0
wlist = preprocesser.process(song['lyrics'],sys.argv[1])
for artist,score in class_score.items():
for word in wlist:
score += math.log((1 + cwc[artist][word]) / (cwt[artist]))
score *= cdc[artist]/numdocs # add the P(lie)
class_score[artist] = score
sorted_results = sorted(class_score.items(), key=lambda kv: kv[1], reverse=True)
sorted_results = sorted_results[:10]
sorted_results = [e[0] for e in sorted_results]
if aartist in sorted_results:
totalcorrect += 1
if aartist == sorted_results[0]:
accuracy += 1
if aartist in sorted_results[:5]:
total5 += 1
totalnum += 1
print('Recall@1,@5,@10 , total')
print(accuracy/totalnum)
print(total5/totalnum)
print(totalcorrect/totalnum)
print(totalnum)
def main():
classifier = bayesian_classifier()
classifier.testNaiveBayes()
if __name__ == "__main__":
main()
|
from db import dataBase as database
from validator import Validator as validator
validator = validator()
class carmodel:
def __init__(self):
self.carmodelid = 0
self.carmodelname = ""
self.carmodeltype = ""
self.carmodelprice = 0
self.camodelyear = 0
self.mfid=0
def searchAllCarModel(self, cursor):
try:
cursor.execute('SELECT cm.car_model_id,cm.car_model_name,m.manufacturer_name,cm.car_model_price,cm.car_model_type,cm.car_model_year FROM dbo.Car_Model cm inner join Manufacturer m on cm.manufacturer_id=m.manufacturer_id')
dash = '-' * 150
print(dash)
print('{:<5s}{:>30s}{:>30s}{:>30s}{:>30s}{:>30s}'.format("Id", "Car Model Name","Car Manufacturer", "Price" ,"Car Type", "Year"))
print(dash)
for row in cursor:
print('{:<5s}{:>30s}{:>30s}{:>30s}{:>30s}{:>30s}'.format(str(row[0]), row[1], row[2], str(row[3]), str(row[4]),str(row[5])))
except:
print ("Something went wrong.!! Contact the administrator.!")
def searchByName(self, cursor):
try:
name = input("Enter name of model. !")
args = ['%' + name + '%']
cursor.execute('SELECT * FROM dbo.Car_Model where car_model_name like ?', args)
dash = '-' * 150
data = cursor.fetchall()
if len(data) != 0:
print(dash)
print(
'{:<5s}{:>30s}{:>30s}{:>30s}{:>30s}'.format("Id", "Car Model Name", "CarType", "Price",
"Year"))
print(dash)
for row in data:
print('{:<5s}{:>30s}{:>30s}{:>30s}{:>30s}'.format(str(row[0]), row[1],row[2],
str(row[4]), str(row[5])))
else:
print("No Car model found with that name.!")
except:
print ("Something went wrong.!! Contact the administrator.!")
def searchByMfg(self, cursor):
try:
args=input("Enter Manufacturer's name.")
args = ['%' + args + '%']
cursor.execute(
'SELECT cm.car_model_id,cm.car_model_name,m.manufacturer_name,cm.car_model_price,cm.car_model_type,cm.car_model_year FROM dbo.Car_Model cm inner join Manufacturer m on cm.manufacturer_id=m.manufacturer_id where m.manufacturer_name like ?',args)
dash = '-' * 150
print(dash)
print('{:<5s}{:>30s}{:>30s}{:>30s}{:>30s}{:>30s}'.format("Id", "Car Model Name", "Car Manufacturer", "Price",
"Car Type", "Year"))
print(dash)
for row in cursor:
print('{:<5s}{:>30s}{:>30s}{:>30s}{:>30s}{:>30s}'.format(str(row[0]), row[1], row[2], str(row[3]),
str(row[4]), str(row[5])))
except:
print("Something went wrong.!! Contact the administrator.!")
def searchByType(self,cursor):
try:
args = input("Enter Car Type.")
args = ['%' + args + '%']
cursor.execute(
'SELECT cm.car_model_id,cm.car_model_name,m.manufacturer_name,cm.car_model_price,cm.car_model_type,cm.car_model_year FROM dbo.Car_Model cm inner join Manufacturer m on cm.manufacturer_id=m.manufacturer_id where cm.car_model_type like ?',
args)
dash = '-' * 150
print(dash)
print('{:<5s}{:>30s}{:>30s}{:>30s}{:>30s}{:>30s}'.format("Id", "Car Model Name", "Car Manufacturer", "Price",
"Car Type", "Year"))
print(dash)
for row in cursor:
print('{:<5s}{:>30s}{:>30s}{:>30s}{:>30s}{:>30s}'.format(str(row[0]), row[1], row[2], str(row[3]),
str(row[4]), str(row[5])))
except:
print ("Something went wrong.!! Contact the administrator.!")
def searchByBudget(self, cursor):
try:
args = int(input("Enter Customer Budget Limit."))
cursor.execute(
'SELECT cm.car_model_id,cm.car_model_name,m.manufacturer_name,cm.car_model_price,cm.car_model_type,cm.car_model_year FROM dbo.Car_Model cm inner join Manufacturer m on cm.manufacturer_id=m.manufacturer_id where cm.car_model_price<= ?',
args)
dash = '-' * 150
print(dash)
print('{:<5s}{:>30s}{:>30s}{:>30s}{:>30s}{:>30s}'.format("Id", "Car Model Name", "Car Manufacturer", "Price",
"Car Type", "Year"))
print(dash)
for row in cursor:
print('{:<5s}{:>30s}{:>30s}{:>30s}{:>30s}{:>30s}'.format(str(row[0]), row[1], row[2], str(row[3]),
str(row[4]), str(row[5])))
except:
print("Something went wrong.!! Contact the administrator.!")
def addCarModel(self, cursor):
try:
name = input("Enter name of manufacturer. !")
args = ['%' + name + '%']
cursor.execute('SELECT * FROM dbo.Manufacturer where manufacturer_name like ?', args)
dash = '-' * 150
data = cursor.fetchall()
if len(data) != 0:
print(dash)
print('{:<5s}{:>30s}{:>30s}{:>30s}{:>30s}'.format("Id", "Name", "Location", "Email", "Phone-number"))
print(dash)
for row in data:
print('{:<5s}{:>30s}{:>30s}{:>30s}{:>30s}'.format(str(row[0]), row[1], row[2], row[3], row[4]))
db = database()
id = int(input("Enter car id from "))
name = input("Enter car model name")
while not validator.nameValidate(name):
name = input("Enter car model name")
type = input("Enter car model type")
while not validator.nameValidate(type):
type = input("Enter car model type")
price = input("Enter car price")
while not validator.numberValidate(price):
price = input("Enter car price")
year = input("Enter car year")
while not validator.numberValidate(year):
year = input("Enter car year")
print("Enter car specification")
color = input("Enter car color")
engine = input("Enter engine number")
fuel = input("Enter fuel type")
hp = input("Enter horse power")
zts = input("Enter zero to sixty")
capacity = input("Enter seating capacity")
ab = input("Airbags ? Yes or No")
variant = "<Variant><Color>"+color+"</Color><EngineNo>"+engine+"</EngineNo><Fuel>"+fuel+"</Fuel><Power>"+hp+"</Power><ZeroToSixty>"+zts+"</ZeroToSixty><SeatingCapacity>"+capacity+"</SeatingCapacity><Airbags>"+ab+"</Airbags></Variant>"
db.addCarModel(id, name, type, price, year, variant)
print("Car model entered successfully!")
else:
print("No manufacturer found with that name.!")
except:
print ("Something went wrong.!! Contact the administrator.!")
|
from ED6ScenarioHelper import *
def main():
# 蔡斯
CreateScenaFile(
FileName = 'T3102 ._SN',
MapName = 'Zeiss',
Location = 'T3102.x',
MapIndex = 1,
MapDefaultBGM = "ed60013",
Flags = 0,
EntryFunctionIndex = 0xFFFF,
Reserved = 0,
IncludedScenario = [
'',
'',
'',
'',
'',
'',
'',
''
],
)
BuildStringList(
'@FileName', # 8
'格斯塔夫维修长', # 9
'吉拉尔', # 10
'玛多克工房长', # 11
'朵洛希', # 12
'安东尼', # 13
'凯诺娜上尉', # 14
'鲁特尔', # 15
'多杰', # 16
'巴拉特', # 17
'船', # 18
'船影', # 19
'士兵', # 20
'士兵', # 21
'士兵', # 22
'蔡斯市·工房区', # 23
)
DeclEntryPoint(
Unknown_00 = 0,
Unknown_04 = 0,
Unknown_08 = 6000,
Unknown_0C = 4,
Unknown_0E = 0,
Unknown_10 = 0,
Unknown_14 = 9500,
Unknown_18 = -10000,
Unknown_1C = 0,
Unknown_20 = 0,
Unknown_24 = 0,
Unknown_28 = 2800,
Unknown_2C = 262,
Unknown_30 = 45,
Unknown_32 = 0,
Unknown_34 = 360,
Unknown_36 = 0,
Unknown_38 = 0,
Unknown_3A = 0,
InitScenaIndex = 0,
InitFunctionIndex = 0,
EntryScenaIndex = 0,
EntryFunctionIndex = 1,
)
AddCharChip(
'ED6_DT07/CH01290 ._CH', # 00
'ED6_DT07/CH02440 ._CH', # 01
'ED6_DT07/CH02620 ._CH', # 02
'ED6_DT07/CH02070 ._CH', # 03
'ED6_DT07/CH01700 ._CH', # 04
'ED6_DT07/CH02100 ._CH', # 05
'ED6_DT07/CH01020 ._CH', # 06
'ED6_DT07/CH01140 ._CH', # 07
'ED6_DT07/CH01450 ._CH', # 08
'ED6_DT07/CH01640 ._CH', # 09
)
AddCharChipPat(
'ED6_DT07/CH01290P._CP', # 00
'ED6_DT07/CH02440P._CP', # 01
'ED6_DT07/CH02620P._CP', # 02
'ED6_DT07/CH02070P._CP', # 03
'ED6_DT07/CH01700P._CP', # 04
'ED6_DT07/CH02100P._CP', # 05
'ED6_DT07/CH01020P._CP', # 06
'ED6_DT07/CH01140P._CP', # 07
'ED6_DT07/CH01450P._CP', # 08
'ED6_DT07/CH01640P._CP', # 09
)
DeclNpc(
X = -37000,
Z = -3800,
Y = 145500,
Direction = 180,
Unknown2 = 0,
Unknown3 = 1,
ChipIndex = 0x1,
NpcIndex = 0x181,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 8,
)
DeclNpc(
X = -20110,
Z = 8000,
Y = 121830,
Direction = 177,
Unknown2 = 0,
Unknown3 = 0,
ChipIndex = 0x0,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 10,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 2,
ChipIndex = 0x2,
NpcIndex = 0x181,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 11,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 3,
ChipIndex = 0x3,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 4,
ChipIndex = 0x4,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = 0,
TalkScenaIndex = 9,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 5,
ChipIndex = 0x5,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 6,
ChipIndex = 0x6,
NpcIndex = 0x181,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 12,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 7,
ChipIndex = 0x7,
NpcIndex = 0x181,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 13,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 8,
ChipIndex = 0x8,
NpcIndex = 0x181,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 7,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 0,
ChipIndex = 0x0,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 0,
ChipIndex = 0x0,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 9,
ChipIndex = 0x9,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 9,
ChipIndex = 0x9,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 9,
ChipIndex = 0x9,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = -18770,
Z = 8000,
Y = 89560,
Direction = 0,
Unknown2 = 0,
Unknown3 = 0,
ChipIndex = 0x0,
NpcIndex = 0xFF,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclEvent(
X = -43700,
Y = -4000,
Z = 146000,
Range = -41600,
Unknown_10 = 0xFFFFF830,
Unknown_14 = 0x22A4C,
Unknown_18 = 0x0,
Unknown_1C = 15,
)
DeclEvent(
X = -43200,
Y = -5000,
Z = 145000,
Range = -48600,
Unknown_10 = 0xFFFFF830,
Unknown_14 = 0x22B78,
Unknown_18 = 0x0,
Unknown_1C = 19,
)
DeclEvent(
X = -15210,
Y = 7000,
Z = 100600,
Range = -22980,
Unknown_10 = 0x2710,
Unknown_14 = 0x1938E,
Unknown_18 = 0x0,
Unknown_1C = 26,
)
DeclActor(
TriggerX = -19980,
TriggerZ = 8000,
TriggerY = 119710,
TriggerRange = 400,
ActorX = -20110,
ActorZ = 9500,
ActorY = 121830,
Flags = 0x7E,
TalkScenaIndex = 0,
TalkFunctionIndex = 14,
Unknown_22 = 0,
)
DeclActor(
TriggerX = -41010,
TriggerZ = 8000,
TriggerY = 122500,
TriggerRange = 800,
ActorX = -41010,
ActorZ = 10200,
ActorY = 122500,
Flags = 0x7C,
TalkScenaIndex = 0,
TalkFunctionIndex = 24,
Unknown_22 = 0,
)
DeclActor(
TriggerX = -38900,
TriggerZ = 8400,
TriggerY = 122040,
TriggerRange = 800,
ActorX = -38900,
ActorZ = 9900,
ActorY = 122040,
Flags = 0x7C,
TalkScenaIndex = 0,
TalkFunctionIndex = 25,
Unknown_22 = 0,
)
ScpFunction(
"Function_0_3A6", # 00, 0
"Function_1_661", # 01, 1
"Function_2_872", # 02, 2
"Function_3_9EF", # 03, 3
"Function_4_A13", # 04, 4
"Function_5_A37", # 05, 5
"Function_6_A5B", # 06, 6
"Function_7_A7F", # 07, 7
"Function_8_12E1", # 08, 8
"Function_9_1AF9", # 09, 9
"Function_10_1B32", # 0A, 10
"Function_11_40D6", # 0B, 11
"Function_12_4FD2", # 0C, 12
"Function_13_5356", # 0D, 13
"Function_14_543E", # 0E, 14
"Function_15_5443", # 0F, 15
"Function_16_5E62", # 10, 16
"Function_17_6896", # 11, 17
"Function_18_6ED9", # 12, 18
"Function_19_6F5A", # 13, 19
"Function_20_82EE", # 14, 20
"Function_21_8311", # 15, 21
"Function_22_8334", # 16, 22
"Function_23_8357", # 17, 23
"Function_24_837A", # 18, 24
"Function_25_8435", # 19, 25
"Function_26_84C3", # 1A, 26
)
def Function_0_3A6(): pass
label("Function_0_3A6")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x7F, 2)), scpexpr(EXPR_END)), "loc_3BD")
OP_A3(0x3FA)
Event(0, 16)
OP_B1("T3102_1")
label("loc_3BD")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xAB, 5)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xAC, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_3DF")
SetChrPos(0xA, -44860, -4000, 141600, 270)
ClearChrFlags(0xA, 0x80)
label("loc_3DF")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC0, 3)), scpexpr(EXPR_END)), "loc_41C")
ClearChrFlags(0xE, 0x80)
SetChrPos(0xE, -40990, 8000, 129460, 12)
OP_43(0xE, 0x0, 0x0, 0x2)
ClearChrFlags(0x10, 0x80)
SetChrPos(0x10, -44660, 8000, 129500, 5)
Jump("loc_660")
label("loc_41C")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC0, 1)), scpexpr(EXPR_END)), "loc_459")
ClearChrFlags(0xE, 0x80)
SetChrPos(0xE, -40990, 8000, 129460, 12)
OP_43(0xE, 0x0, 0x0, 0x2)
ClearChrFlags(0x10, 0x80)
SetChrPos(0x10, -44660, 8000, 129500, 5)
Jump("loc_660")
label("loc_459")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xAB, 5)), scpexpr(EXPR_END)), "loc_479")
ClearChrFlags(0x10, 0x80)
SetChrPos(0x10, -40990, 8000, 122890, 180)
Jump("loc_660")
label("loc_479")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xAB, 1)), scpexpr(EXPR_END)), "loc_4E2")
ClearChrFlags(0xE, 0x80)
SetChrPos(0xE, -47500, -4000, 151780, 261)
ClearChrFlags(0xF, 0x80)
SetChrPos(0xF, -47500, -4000, 152840, 261)
ClearChrFlags(0xC, 0x80)
SetChrPos(0xC, -40130, 8000, 125930, 237)
OP_43(0xC, 0x0, 0x0, 0x4)
ClearChrFlags(0x10, 0x80)
SetChrPos(0x10, -44750, -4000, 146070, 81)
Jump("loc_660")
label("loc_4E2")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xAA, 0)), scpexpr(EXPR_END)), "loc_522")
ClearChrFlags(0x8, 0x80)
SetChrPos(0x8, -44530, -4000, 142000, 176)
SetChrFlags(0x8, 0x10)
ClearChrFlags(0x10, 0x80)
SetChrPos(0x10, -44510, -4000, 140610, 21)
SetChrFlags(0x10, 0x10)
Jump("loc_660")
label("loc_522")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA6, 7)), scpexpr(EXPR_END)), "loc_55F")
ClearChrFlags(0x8, 0x80)
SetChrPos(0x8, -58040, 4000, 125930, 187)
OP_43(0x8, 0x0, 0x0, 0x6)
ClearChrFlags(0x10, 0x80)
SetChrPos(0x10, -44750, -4000, 146070, 81)
Jump("loc_660")
label("loc_55F")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA5, 0)), scpexpr(EXPR_END)), "loc_569")
Jump("loc_660")
label("loc_569")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA3, 2)), scpexpr(EXPR_END)), "loc_5A6")
ClearChrFlags(0x8, 0x80)
SetChrPos(0x8, -49800, 8000, 117400, 3)
OP_43(0x8, 0x0, 0x0, 0x5)
ClearChrFlags(0x10, 0x80)
SetChrPos(0x10, -44750, -4000, 146070, 81)
Jump("loc_660")
label("loc_5A6")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA2, 6)), scpexpr(EXPR_END)), "loc_5C6")
ClearChrFlags(0x10, 0x80)
SetChrPos(0x10, -44440, -4000, 153380, 90)
Jump("loc_660")
label("loc_5C6")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA2, 3)), scpexpr(EXPR_END)), "loc_5E6")
ClearChrFlags(0x10, 0x80)
SetChrPos(0x10, -44750, -4000, 146070, 81)
Jump("loc_660")
label("loc_5E6")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA2, 1)), scpexpr(EXPR_END)), "loc_606")
ClearChrFlags(0x10, 0x80)
SetChrPos(0x10, -44750, -4000, 146070, 81)
Jump("loc_660")
label("loc_606")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA1, 6)), scpexpr(EXPR_END)), "loc_626")
ClearChrFlags(0x10, 0x80)
SetChrPos(0x10, -44750, -4000, 146070, 81)
Jump("loc_660")
label("loc_626")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA0, 2)), scpexpr(EXPR_END)), "loc_660")
ClearChrFlags(0xC, 0x80)
SetChrPos(0xC, -40130, 8000, 125930, 237)
OP_43(0xC, 0x0, 0x0, 0x4)
ClearChrFlags(0x10, 0x80)
SetChrPos(0x10, -44750, -4000, 146070, 81)
label("loc_660")
Return()
# Function_0_3A6 end
def Function_1_661(): pass
label("Function_1_661")
OP_16(0x2, 0xFA0, 0xFFFD6020, 0x4E20, 0x30053)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC0, 1)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC0, 4)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_6A3")
OP_B1("T3102_3")
OP_6F(0x0, 1001)
OP_71(0x4, 0x4)
OP_71(0x5, 0x4)
OP_6F(0x3, 100)
Jump("loc_871")
label("loc_6A3")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xAE, 2)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xAC, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_70C")
OP_B1("T3102_2")
OP_6F(0x4, 1)
OP_6F(0x3, 200)
OP_71(0x6, 0x4)
OP_6F(0x0, 1001)
ClearChrFlags(0x8, 0x80)
SetChrFlags(0x8, 0x4)
SetChrPos(0x8, -43100, -3800, 144030, 270)
SoundDistance(0x75, 0xFFFF7A4A, 0xFFFFF060, 0x23280, 0x2710, 0x9C40, 0x64, 0x0)
Jump("loc_871")
label("loc_70C")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xAB, 5)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xAC, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_73C")
OP_B1("T3102_2")
OP_6F(0x0, 250)
OP_71(0x4, 0x4)
OP_71(0x5, 0x4)
OP_6F(0x3, 100)
Jump("loc_871")
label("loc_73C")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA3, 2)), scpexpr(EXPR_END)), "loc_767")
OP_B1("T3102_2")
OP_6F(0x0, 250)
OP_71(0x4, 0x4)
OP_71(0x5, 0x4)
OP_6F(0x3, 100)
Jump("loc_871")
label("loc_767")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA2, 6)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA3, 2)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_827")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA3, 0)), scpexpr(EXPR_END)), "loc_7BB")
OP_B1("T3102_2")
OP_6F(0x4, 1)
OP_6F(0x3, 200)
OP_71(0x6, 0x4)
OP_6F(0x0, 1001)
ClearChrFlags(0x8, 0x80)
SetChrFlags(0x8, 0x4)
SetChrPos(0x8, -42710, -3800, 144020, 270)
Jump("loc_824")
label("loc_7BB")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA2, 7)), scpexpr(EXPR_END)), "loc_803")
OP_B1("T3102_2")
OP_6F(0x4, 1)
OP_6F(0x3, 200)
OP_71(0x6, 0x4)
OP_6F(0x0, 1001)
ClearChrFlags(0x8, 0x80)
SetChrFlags(0x8, 0x4)
SetChrPos(0x8, -36900, -3800, 140550, 90)
Jump("loc_824")
label("loc_803")
OP_B1("T3102_2")
OP_6F(0x0, 1001)
OP_71(0x4, 0x4)
OP_71(0x5, 0x4)
OP_6F(0x3, 100)
label("loc_824")
Jump("loc_871")
label("loc_827")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA2, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_850")
OP_B1("T3102_1")
OP_6F(0x4, 1)
OP_6F(0x3, 200)
OP_6F(0x0, 1001)
Jump("loc_871")
label("loc_850")
OP_B1("T3102_2")
OP_6F(0x0, 1001)
OP_71(0x4, 0x4)
OP_71(0x5, 0x4)
OP_6F(0x3, 100)
label("loc_871")
Return()
# Function_1_661 end
def Function_2_872(): pass
label("Function_2_872")
RunExpression(0x1, (scpexpr(EXPR_RAND), scpexpr(EXPR_PUSH_LONG, 0xE), scpexpr(EXPR_IMOD), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jc((scpexpr(EXPR_GET_RESULT, 0x1), scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_897")
OP_99(0xFE, 0x0, 0x7, 0x672)
Jump("loc_9D9")
label("loc_897")
Jc((scpexpr(EXPR_GET_RESULT, 0x1), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_8B0")
OP_99(0xFE, 0x1, 0x7, 0x640)
Jump("loc_9D9")
label("loc_8B0")
Jc((scpexpr(EXPR_GET_RESULT, 0x1), scpexpr(EXPR_PUSH_LONG, 0x2), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_8C9")
OP_99(0xFE, 0x2, 0x7, 0x60E)
Jump("loc_9D9")
label("loc_8C9")
Jc((scpexpr(EXPR_GET_RESULT, 0x1), scpexpr(EXPR_PUSH_LONG, 0x3), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_8E2")
OP_99(0xFE, 0x3, 0x7, 0x5DC)
Jump("loc_9D9")
label("loc_8E2")
Jc((scpexpr(EXPR_GET_RESULT, 0x1), scpexpr(EXPR_PUSH_LONG, 0x4), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_8FB")
OP_99(0xFE, 0x4, 0x7, 0x5AA)
Jump("loc_9D9")
label("loc_8FB")
Jc((scpexpr(EXPR_GET_RESULT, 0x1), scpexpr(EXPR_PUSH_LONG, 0x5), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_914")
OP_99(0xFE, 0x5, 0x7, 0x578)
Jump("loc_9D9")
label("loc_914")
Jc((scpexpr(EXPR_GET_RESULT, 0x1), scpexpr(EXPR_PUSH_LONG, 0x6), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_92D")
OP_99(0xFE, 0x6, 0x7, 0x546)
Jump("loc_9D9")
label("loc_92D")
Jc((scpexpr(EXPR_GET_RESULT, 0x1), scpexpr(EXPR_PUSH_LONG, 0x7), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_946")
OP_99(0xFE, 0x0, 0x7, 0x677)
Jump("loc_9D9")
label("loc_946")
Jc((scpexpr(EXPR_GET_RESULT, 0x1), scpexpr(EXPR_PUSH_LONG, 0x8), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_95F")
OP_99(0xFE, 0x1, 0x7, 0x645)
Jump("loc_9D9")
label("loc_95F")
Jc((scpexpr(EXPR_GET_RESULT, 0x1), scpexpr(EXPR_PUSH_LONG, 0x9), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_978")
OP_99(0xFE, 0x2, 0x7, 0x613)
Jump("loc_9D9")
label("loc_978")
Jc((scpexpr(EXPR_GET_RESULT, 0x1), scpexpr(EXPR_PUSH_LONG, 0xA), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_991")
OP_99(0xFE, 0x3, 0x7, 0x5E1)
Jump("loc_9D9")
label("loc_991")
Jc((scpexpr(EXPR_GET_RESULT, 0x1), scpexpr(EXPR_PUSH_LONG, 0xB), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_9AA")
OP_99(0xFE, 0x4, 0x7, 0x5AF)
Jump("loc_9D9")
label("loc_9AA")
Jc((scpexpr(EXPR_GET_RESULT, 0x1), scpexpr(EXPR_PUSH_LONG, 0xC), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_9C3")
OP_99(0xFE, 0x5, 0x7, 0x57D)
Jump("loc_9D9")
label("loc_9C3")
Jc((scpexpr(EXPR_GET_RESULT, 0x1), scpexpr(EXPR_PUSH_LONG, 0xD), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_9D9")
OP_99(0xFE, 0x6, 0x7, 0x54B)
label("loc_9D9")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_9EE")
OP_99(0xFE, 0x0, 0x7, 0x5DC)
Jump("loc_9D9")
label("loc_9EE")
Return()
# Function_2_872 end
def Function_3_9EF(): pass
label("Function_3_9EF")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_A12")
OP_8D(0xFE, -19390, 119560, -16690, 116060, 3000)
Jump("Function_3_9EF")
label("loc_A12")
Return()
# Function_3_9EF end
def Function_4_A13(): pass
label("Function_4_A13")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_A36")
OP_8D(0xFE, -35820, 123780, -43940, 129220, 3000)
Jump("Function_4_A13")
label("loc_A36")
Return()
# Function_4_A13 end
def Function_5_A37(): pass
label("Function_5_A37")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_A5A")
OP_8D(0xFE, -45240, 117320, -51970, 121500, 2000)
Jump("Function_5_A37")
label("loc_A5A")
Return()
# Function_5_A37 end
def Function_6_A5B(): pass
label("Function_6_A5B")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_A7E")
OP_8D(0xFE, -56420, 122640, -59470, 129340, 2000)
Jump("Function_6_A5B")
label("loc_A7E")
Return()
# Function_6_A5B end
def Function_7_A7F(): pass
label("Function_7_A7F")
TalkBegin(0xFE)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC0, 3)), scpexpr(EXPR_END)), "loc_B02")
ChrTalk(
0xFE,
(
"呼……\x01",
"都不通知一下就检查,\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"王国军真是的,\x01",
"实在太乱来了。\x02",
)
)
CloseMessageWindow()
Jump("loc_12DD")
label("loc_B02")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC0, 1)), scpexpr(EXPR_END)), "loc_B6E")
ChrTalk(
0xFE,
(
"一会儿『赛希莉亚号』\x01",
"就要开过来了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"必须马上开始\x01",
"确认下拢岸的状况了。\x02",
)
)
CloseMessageWindow()
Jump("loc_12DD")
label("loc_B6E")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xAB, 5)), scpexpr(EXPR_END)), "loc_C2E")
ChrTalk(
0xFE,
(
"工房船现在\x01",
"马上就要出航了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"不过,\x01",
"却比预定去要塞的时间提前了很多……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
"那边发生了什么事吗?\x02",
)
CloseMessageWindow()
Jump("loc_12DD")
label("loc_C2E")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xAB, 1)), scpexpr(EXPR_END)), "loc_C7E")
ChrTalk(
0xFE,
(
"好了,\x01",
"这样飞船起航就告一段落了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"总之,\x01",
"趁这段时间整理一下货物吧。\x02",
)
)
CloseMessageWindow()
Jump("loc_12DD")
label("loc_C7E")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xAA, 0)), scpexpr(EXPR_END)), "loc_D69")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 5)), scpexpr(EXPR_END)), "loc_D00")
ChrTalk(
0xFE,
(
"话说回来,这种时候\x01",
"真是羡慕雷曼那家伙啊。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"那家伙兼任驾驶员,\x01",
"飞行前为了调整身体状态,\x01",
"早早地就回家去了。\x02",
)
)
CloseMessageWindow()
Jump("loc_D66")
label("loc_D00")
OP_A2(0x5)
ChrTalk(
0xFE,
"呼,明天还是要去要塞啊。\x02",
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"最近的工作\x01",
"好像很多啊。\x02",
)
)
CloseMessageWindow()
ClearChrFlags(0xFE, 0x10)
label("loc_D66")
Jump("loc_12DD")
label("loc_D69")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA6, 7)), scpexpr(EXPR_END)), "loc_E3E")
ChrTalk(
0xFE,
(
"中央工房的事件\x01",
"应该解决了吧。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"哎?\x01",
"犯人到现在都还没抓到?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"那还真是糟糕啊。\x01",
"下次不会来袭击工房船吧。\x02",
)
)
CloseMessageWindow()
Jump("loc_12DD")
label("loc_E3E")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA3, 2)), scpexpr(EXPR_END)), "loc_1086")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 5)), scpexpr(EXPR_END)), "loc_F35")
ChrTalk(
0xFE,
(
"呼,都是因为那个公爵大人,\x01",
"搞得大家都对王家的印象变差了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"哼,很久以前\x01",
"那种快乐纯粹的女王诞辰庆典\x01",
"是很难再出现了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"该死,那个混账公爵。\x01",
"还我的诞辰庆典来!\x02",
)
)
CloseMessageWindow()
Jump("loc_1083")
label("loc_F35")
OP_A2(0x5)
ChrTalk(
0xFE,
(
"之前的休假\x01",
"我去参观了\x01",
"艾尔·雷登瀑布……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"竟遇到那个叫杜什么的公爵,\x01",
"那个王家的人微服出行。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"而且那个人\x01",
"还蛮横任性得要命。\x01",
"真是倒了大霉了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"唔,大家都没想到\x01",
"王家的人竟会是那个样子。\x01",
"真是失望透了。\x02",
)
)
CloseMessageWindow()
label("loc_1083")
Jump("loc_12DD")
label("loc_1086")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA2, 3)), scpexpr(EXPR_END)), "loc_1137")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA2, 7)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_10B6")
ChrTalk(
0xFE,
(
"嗯……\x01",
"差不多该到返航的时候了。\x02",
)
)
CloseMessageWindow()
Jump("loc_1134")
label("loc_10B6")
ChrTalk(
0xFE,
"怎么样?很漂亮吧。\x02",
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"这可是中央工房引以为傲的\x01",
"『莱普尼兹号』啊。\x02",
)
)
CloseMessageWindow()
label("loc_1134")
Jump("loc_12DD")
label("loc_1137")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA2, 1)), scpexpr(EXPR_END)), "loc_1299")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 5)), scpexpr(EXPR_END)), "loc_11C3")
ChrTalk(
0xFE,
(
"工房好像还没找出\x01",
"昨天那种现象的原因所在吧。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"不管怎样,\x01",
"希望不要再发生这种事情。\x02",
)
)
CloseMessageWindow()
Jump("loc_1296")
label("loc_11C3")
OP_A2(0x5)
ChrTalk(
0xFE,
"昨天晚上,导力供应停止了吧?\x02",
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"不过还好不是在白天发生,\x01",
"真是万幸呀。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"如果在飞艇上出现这种情况,\x01",
"真不知道会发生什么事。\x02",
)
)
CloseMessageWindow()
label("loc_1296")
Jump("loc_12DD")
label("loc_1299")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA1, 6)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA0, 2)), scpexpr(EXPR_OR), scpexpr(EXPR_END)), "loc_12DD")
ChrTalk(
0xFE,
"好的,拢岸准备好了。\x02",
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"接下来,\x01",
"要快点进行出发前的检查了。\x02",
)
)
CloseMessageWindow()
label("loc_12DD")
TalkEnd(0xFE)
Return()
# Function_7_A7F end
def Function_8_12E1(): pass
label("Function_8_12E1")
TalkBegin(0xFE)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xAE, 2)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xAC, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_14EB")
EventBegin(0x0)
ChrTalk(
0x8,
"这就出发去雷斯顿要塞吗?\x02",
)
CloseMessageWindow()
FadeToDark(300, 0, 100)
RunExpression(0x0, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_4F(0x28, (scpexpr(EXPR_PUSH_LONG, 0x18), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Menu(
0,
10,
10,
0,
(
"【出发】\x01", # 0
"【整理装备】\x01", # 1
)
)
MenuEnd(0x0)
OP_5F(0x0)
OP_4F(0x28, (scpexpr(EXPR_PUSH_LONG, 0xFFFF), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_56(0x0)
FadeToBright(300, 0)
Switch(
(scpexpr(EXPR_GET_RESULT, 0x0), scpexpr(EXPR_END)),
(0, "loc_137A"),
(1, "loc_14A3"),
(SWITCH_DEFAULT, "loc_14E8"),
)
label("loc_137A")
OP_4A(0xA, 255)
OP_8C(0xA, 315, 400)
ChrTalk(
0x8,
(
"#693F好!\x01",
"那么快上去吧!\x02\x03",
"工房船『莱普尼兹号』,\x01",
"向目的地雷斯顿要塞进发!\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xA,
(
"#803F#2P各位游击士……\x01",
"博士的事就拜托你们了。\x02\x03",
"#800F还有的是……\x01",
"麻烦你们好好保护提妲。\x02",
)
)
CloseMessageWindow()
def lambda_142B():
TurnDirection(0xFE, 0xA, 400)
ExitThread()
QueueWorkItem(0x101, 1, lambda_142B)
def lambda_1439():
TurnDirection(0xFE, 0xA, 400)
ExitThread()
QueueWorkItem(0x102, 1, lambda_1439)
def lambda_1447():
TurnDirection(0xFE, 0xA, 400)
ExitThread()
QueueWorkItem(0x106, 1, lambda_1447)
TurnDirection(0x107, 0xA, 400)
ChrTalk(
0x107,
"#560F工房长……\x02",
)
CloseMessageWindow()
ChrTalk(
0x101,
"#006F嗯,都包在我们身上吧!\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
"#010F那么我们走了。\x02",
)
CloseMessageWindow()
Sleep(100)
Call(0, 17)
Jump("loc_14E8")
label("loc_14A3")
OP_A2(0x572)
ChrTalk(
0x8,
(
"#691F明白了。\x01",
"准备好了就说一声。\x02",
)
)
CloseMessageWindow()
OP_8C(0x8, 270, 400)
OP_4B(0x8, 255)
EventEnd(0x1)
Return()
label("loc_14E8")
Jump("loc_1AF5")
label("loc_14EB")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xAA, 0)), scpexpr(EXPR_END)), "loc_1582")
ChrTalk(
0xFE,
(
"#690F哦,稍微晚了些真是不好意思。\x02\x03",
"要塞那边又来要求我们出动了。\x01",
"我想今天之内\x01",
"就可以做好准备了。\x02\x03",
"嗯,希望和平时一样\x01",
"不要发生什么意外就行了。\x02",
)
)
CloseMessageWindow()
Jump("loc_1AF5")
label("loc_1582")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA7, 0)), scpexpr(EXPR_END)), "loc_166C")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 4)), scpexpr(EXPR_END)), "loc_15EB")
ChrTalk(
0xFE,
(
"#690F骚乱中没有人员伤亡\x01",
"就是不幸中的大幸了。\x02",
)
)
CloseMessageWindow()
Jump("loc_1669")
label("loc_15EB")
OP_A2(0x4)
ChrTalk(
0xFE,
(
"#690F不过,\x01",
"事情真是糟糕啊。\x02\x03",
"唔,骚乱中没有人员伤亡\x01",
"就是不幸中的大幸了。\x02",
)
)
CloseMessageWindow()
label("loc_1669")
Jump("loc_1AF5")
label("loc_166C")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA6, 7)), scpexpr(EXPR_END)), "loc_17FD")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 4)), scpexpr(EXPR_END)), "loc_16D5")
ChrTalk(
0xFE,
(
"#690F骚乱中没有人员伤亡\x01",
"就是不幸中的大幸了。\x02",
)
)
CloseMessageWindow()
Jump("loc_17FA")
label("loc_16D5")
OP_A2(0x4)
TurnDirection(0x8, 0x107, 400)
ChrTalk(
0xFE,
(
"#690F哦,是提妲丫头。\x02\x03",
"事情真是糟糕啊。\x01",
"没有受伤吧。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x107,
(
"#063F…………………………\x02\x03",
"#064F啊……是、是的!\x02",
)
)
CloseMessageWindow()
OP_62(0x8, 0x0, 2000, 0x0, 0x1, 0xFA, 0x2)
OP_22(0x26, 0x0, 0x64)
Sleep(1000)
ChrTalk(
0xFE,
(
"#692F……发生什么事了?\x01",
"你一直在发呆啊。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x107,
(
"#066F嗯、嗯。\x01",
"没事呢。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"#691F是吗,没受伤的话,\x01",
"那比什么都好。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x107,
"#063F…………………………\x02",
)
CloseMessageWindow()
label("loc_17FA")
Jump("loc_1AF5")
label("loc_17FD")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA3, 2)), scpexpr(EXPR_END)), "loc_1A39")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 4)), scpexpr(EXPR_END)), "loc_182F")
ChrTalk(
0xFE,
(
"#690F哦,是提妲丫头。\x01",
"多多保重哦。\x02",
)
)
CloseMessageWindow()
Jump("loc_1A36")
label("loc_182F")
OP_A2(0x4)
ChrTalk(
0xFE,
(
"#690F哦,是提妲丫头。\x01",
"又成了拉赛尔老爷子的差使吗。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x107,
(
"#060F啊,是呢。\x01",
"要到亚尔摩村去一趟。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"#692F亚尔摩村?\x01",
"喂喂,没问题吗?\x02\x03",
"之前在卡鲁迪亚隧道\x01",
"不是受到袭击了吗?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x107,
(
"#061F嘿嘿,\x01",
"这次有两个游击士做我的护卫啊,\x01",
"所以怎么说都不要紧的啦。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"#692F你们好像是上次\x01",
"来拿内燃引擎设备的……\x02\x03",
"#691F哎,你也是游击士啊。\x01",
"我还以为只是个盛气凌人的丫头呢。\x02\x03",
"那么就没什么问题了。\x01",
"路上小心点啊。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x107,
(
"#560F嗯,\x01",
"那么我们就出发了。\x02",
)
)
CloseMessageWindow()
label("loc_1A36")
Jump("loc_1AF5")
label("loc_1A39")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA3, 0)), scpexpr(EXPR_END)), "loc_1AF5")
ChrTalk(
0x8,
(
"#691F话说回来, \x01",
"这也真是个有趣的巧合啊。\x02\x03",
"那东西刚被军方还回来,\x01",
"马上就又被老爷子借走了。\x02\x03",
"那可是一般仓库都没有的\x01",
"十分稀有的物件啊。\x02",
)
)
CloseMessageWindow()
label("loc_1AF5")
TalkEnd(0xFE)
Return()
# Function_8_12E1 end
def Function_9_1AF9(): pass
label("Function_9_1AF9")
TalkBegin(0xFE)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xAB, 1)), scpexpr(EXPR_END)), "loc_1B17")
OP_22(0x192, 0x0, 0x64)
ChrTalk(
0xFE,
"喵-噢。\x02",
)
CloseMessageWindow()
Jump("loc_1B2E")
label("loc_1B17")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA0, 2)), scpexpr(EXPR_END)), "loc_1B2E")
OP_22(0x192, 0x0, 0x64)
ChrTalk(
0xFE,
"喵~噢?\x02",
)
CloseMessageWindow()
label("loc_1B2E")
TalkEnd(0xFE)
Return()
# Function_9_1AF9 end
def Function_10_1B32(): pass
label("Function_10_1B32")
TalkBegin(0x9)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC0, 3)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC0, 4)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_27E4")
OP_A2(0x604)
EventBegin(0x0)
Fade(1000)
SetChrPos(0x101, -20510, 8000, 119230, 0)
SetChrPos(0x102, -18980, 8000, 119430, 0)
def lambda_1B73():
OP_6C(0, 2000)
ExitThread()
QueueWorkItem(0x102, 1, lambda_1B73)
def lambda_1B83():
OP_6B(2750, 2000)
ExitThread()
QueueWorkItem(0x102, 2, lambda_1B83)
OP_6D(-20140, 8000, 120700, 2000)
ChrTalk(
0x9,
"啊,是你们啊。\x02",
)
CloseMessageWindow()
ChrTalk(
0x9,
(
"就像刚才我说的,\x01",
"飞艇什么时候能来还不知道呢。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x9,
(
"实在抱歉,\x01",
"你们在游击士协会等一会儿吧。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#506F嗯~其实……\x01",
"我们稍微改变了一下计划。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F非常抱歉。\x01",
"请问搭乘手续能取消吗?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x9,
(
"这样啊……\x01",
"唉,也是没办法的事。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x9,
(
"在定期船到来之前,\x01",
"是不需要支付取消手续费的。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x9,
(
"把刚才的船票\x01",
"还给我就可以了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#000F嗯,明白了。\x02",
)
CloseMessageWindow()
FadeToDark(300, 0, 100)
SetMessageWindowPos(-1, -1, -1, -1)
SetChrName("")
OP_22(0x11, 0x0, 0x64)
AnonymousTalk(
(
scpstr(SCPSTR_CODE_COLOR, 0x0),
"把两张\x07\x02",
"船票\x07\x00",
"还了回去。\x02",
)
)
CloseMessageWindow()
OP_56(0x0)
SetMessageWindowPos(72, 320, 56, 3)
FadeToBright(300, 0)
OP_3F(0x370, 2)
Sleep(500)
OP_22(0xE2, 0x0, 0x64)
OP_20(0x3E8)
OP_62(0x101, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
OP_22(0x27, 0x0, 0x64)
OP_62(0x102, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
OP_22(0x27, 0x0, 0x64)
Sleep(1000)
OP_1D(0x56)
ChrTalk(
0x9,
(
"哎呀……\x01",
"好像是军用警备飞艇来了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x9,
"来得还真早啊。\x02",
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#004F那、那么我们赶快……!\x01",
" \x02",
)
)
CloseMessageWindow()
OP_8C(0x101, 180, 600)
def lambda_1E64():
OP_8E(0xFE, 0xFFFFB12C, 0x1F40, 0x192A8, 0x1388, 0x0)
ExitThread()
QueueWorkItem(0x101, 1, lambda_1E64)
ChrTalk(
0x102,
(
"#010F麻烦您了。\x01",
"真是非常不好意思。\x02",
)
)
CloseMessageWindow()
OP_8C(0x102, 180, 600)
def lambda_1EA9():
OP_8E(0xFE, 0xFFFFB7F8, 0x1F40, 0x192A8, 0x1388, 0x0)
ExitThread()
QueueWorkItem(0x102, 1, lambda_1EA9)
ChrTalk(
0x9,
(
"没什么。\x01",
"欢迎两位下次再来乘坐。\x02",
)
)
CloseMessageWindow()
Sleep(100)
Fade(1000)
OP_6F(0x0, 1001)
OP_A1(0x11, 0x4)
OP_72(0x4, 0x4)
OP_72(0x4, 0x20)
SetChrPos(0x11, -34000, 17000, 180000, 0)
SetChrFlags(0x11, 0x4)
OP_A1(0x12, 0x5)
OP_72(0x5, 0x4)
OP_72(0xA, 0x4)
SetChrPos(0x12, -34000, -10000, 180000, 0)
SetChrFlags(0x12, 0x4)
OP_6F(0x3, 100)
OP_B0(0x4, 0x1E)
OP_6D(-34000, 17000, 170000, 0)
OP_67(0, 26070, -10000, 0)
OP_6B(3500, 0)
OP_6C(156000, 0)
OP_6E(239, 0)
StopSound(0x186A0, 0x3D090, 0x0)
OP_6F(0x4, 470)
OP_70(0x4, 0x24E)
def lambda_1FB8():
OP_8F(0xFE, 0xFFFF7B30, 0x4268, 0x29810, 0x1F40, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_1FB8)
def lambda_1FD3():
OP_8F(0xFE, 0xFFFF7B30, 0xFFFFD8F0, 0x29810, 0x1F40, 0x0)
ExitThread()
QueueWorkItem(0x12, 1, lambda_1FD3)
OP_22(0x79, 0x1, 0x28)
Sleep(100)
OP_24(0x79, 0x3C)
Sleep(100)
OP_24(0x79, 0x41)
Sleep(100)
OP_24(0x79, 0x46)
Sleep(100)
OP_24(0x79, 0x4B)
Sleep(100)
OP_24(0x79, 0x50)
Sleep(100)
OP_24(0x79, 0x55)
Sleep(100)
OP_24(0x79, 0x5A)
Sleep(100)
OP_24(0x79, 0x5F)
Sleep(100)
OP_24(0x79, 0x64)
WaitChrThread(0x11, 0x1)
OP_66(0x0)
OP_6A(0x11)
def lambda_204F():
OP_8C(0xFE, 180, 5)
ExitThread()
QueueWorkItem(0x11, 2, lambda_204F)
def lambda_205D():
OP_8C(0xFE, 180, 5)
ExitThread()
QueueWorkItem(0x12, 2, lambda_205D)
def lambda_206B():
OP_8F(0xFE, 0xFFFF7B30, 0x4268, 0x26548, 0x1D4C, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_206B)
def lambda_2086():
OP_8F(0xFE, 0xFFFF7B30, 0xFFFFD8F0, 0x26548, 0x1D4C, 0x0)
ExitThread()
QueueWorkItem(0x12, 1, lambda_2086)
Sleep(200)
def lambda_20A6():
OP_8C(0xFE, 180, 10)
ExitThread()
QueueWorkItem(0x11, 2, lambda_20A6)
def lambda_20B4():
OP_8C(0xFE, 180, 10)
ExitThread()
QueueWorkItem(0x12, 2, lambda_20B4)
def lambda_20C2():
OP_8F(0xFE, 0xFFFF7B30, 0x4268, 0x26548, 0x1B58, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_20C2)
def lambda_20DD():
OP_8F(0xFE, 0xFFFF7B30, 0xFFFFD8F0, 0x26548, 0x1B58, 0x0)
ExitThread()
QueueWorkItem(0x12, 1, lambda_20DD)
Sleep(200)
def lambda_20FD():
OP_8C(0xFE, 180, 30)
ExitThread()
QueueWorkItem(0x11, 2, lambda_20FD)
def lambda_210B():
OP_8C(0xFE, 180, 30)
ExitThread()
QueueWorkItem(0x12, 2, lambda_210B)
def lambda_2119():
OP_8F(0xFE, 0xFFFF7B30, 0x4268, 0x26548, 0x1964, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_2119)
def lambda_2134():
OP_8F(0xFE, 0xFFFF7B30, 0xFFFFD8F0, 0x26548, 0x1964, 0x0)
ExitThread()
QueueWorkItem(0x12, 1, lambda_2134)
Sleep(200)
def lambda_2154():
OP_8C(0xFE, 180, 50)
ExitThread()
QueueWorkItem(0x11, 2, lambda_2154)
def lambda_2162():
OP_8C(0xFE, 180, 50)
ExitThread()
QueueWorkItem(0x12, 2, lambda_2162)
def lambda_2170():
OP_8F(0xFE, 0xFFFF7B30, 0x4268, 0x26548, 0x1770, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_2170)
def lambda_218B():
OP_8F(0xFE, 0xFFFF7B30, 0xFFFFD8F0, 0x26548, 0x1770, 0x0)
ExitThread()
QueueWorkItem(0x12, 1, lambda_218B)
Sleep(200)
def lambda_21AB():
OP_8C(0xFE, 180, 60)
ExitThread()
QueueWorkItem(0x11, 2, lambda_21AB)
def lambda_21B9():
OP_8C(0xFE, 180, 60)
ExitThread()
QueueWorkItem(0x12, 2, lambda_21B9)
def lambda_21C7():
OP_8F(0xFE, 0xFFFF7B30, 0x4268, 0x26548, 0x157C, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_21C7)
def lambda_21E2():
OP_8F(0xFE, 0xFFFF7B30, 0xFFFFD8F0, 0x26548, 0x157C, 0x0)
ExitThread()
QueueWorkItem(0x12, 1, lambda_21E2)
Sleep(200)
def lambda_2202():
OP_8F(0xFE, 0xFFFF7B30, 0x4268, 0x26548, 0x1388, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_2202)
def lambda_221D():
OP_8F(0xFE, 0xFFFF7B30, 0xFFFFD8F0, 0x26548, 0x1388, 0x0)
ExitThread()
QueueWorkItem(0x12, 1, lambda_221D)
Sleep(200)
def lambda_223D():
OP_8F(0xFE, 0xFFFF7B30, 0x4268, 0x26548, 0x1194, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_223D)
def lambda_2258():
OP_8F(0xFE, 0xFFFF7B30, 0xFFFFD8F0, 0x26548, 0x1194, 0x0)
ExitThread()
QueueWorkItem(0x12, 1, lambda_2258)
Sleep(200)
def lambda_2278():
OP_8F(0xFE, 0xFFFF7B30, 0x4268, 0x26548, 0xFA0, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_2278)
def lambda_2293():
OP_8F(0xFE, 0xFFFF7B30, 0xFFFFD8F0, 0x26548, 0xFA0, 0x0)
ExitThread()
QueueWorkItem(0x12, 1, lambda_2293)
Sleep(200)
def lambda_22B3():
OP_8F(0xFE, 0xFFFF7B30, 0x4268, 0x26548, 0xDAC, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_22B3)
def lambda_22CE():
OP_8F(0xFE, 0xFFFF7B30, 0xFFFFD8F0, 0x26548, 0xDAC, 0x0)
ExitThread()
QueueWorkItem(0x12, 1, lambda_22CE)
Sleep(200)
def lambda_22EE():
OP_8F(0xFE, 0xFFFF7B30, 0x4268, 0x26548, 0xBB8, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_22EE)
def lambda_2309():
OP_8F(0xFE, 0xFFFF7B30, 0xFFFFD8F0, 0x26548, 0xBB8, 0x0)
ExitThread()
QueueWorkItem(0x12, 1, lambda_2309)
Sleep(200)
def lambda_2329():
OP_8F(0xFE, 0xFFFF7B30, 0x4268, 0x26548, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_2329)
def lambda_2344():
OP_8F(0xFE, 0xFFFF7B30, 0xFFFFD8F0, 0x26548, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x12, 1, lambda_2344)
Sleep(800)
def lambda_2364():
OP_8C(0xFE, 180, 50)
ExitThread()
QueueWorkItem(0x11, 2, lambda_2364)
def lambda_2372():
OP_8C(0xFE, 180, 50)
ExitThread()
QueueWorkItem(0x12, 2, lambda_2372)
Sleep(100)
def lambda_2385():
OP_8C(0xFE, 180, 40)
ExitThread()
QueueWorkItem(0x11, 2, lambda_2385)
def lambda_2393():
OP_8C(0xFE, 180, 40)
ExitThread()
QueueWorkItem(0x12, 2, lambda_2393)
Sleep(100)
def lambda_23A6():
OP_8C(0xFE, 180, 30)
ExitThread()
QueueWorkItem(0x11, 2, lambda_23A6)
def lambda_23B4():
OP_8C(0xFE, 180, 30)
ExitThread()
QueueWorkItem(0x12, 2, lambda_23B4)
Sleep(100)
def lambda_23C7():
OP_8C(0xFE, 180, 20)
ExitThread()
QueueWorkItem(0x11, 2, lambda_23C7)
def lambda_23D5():
OP_8C(0xFE, 180, 20)
ExitThread()
QueueWorkItem(0x12, 2, lambda_23D5)
Sleep(100)
def lambda_23E8():
OP_8C(0xFE, 180, 10)
ExitThread()
QueueWorkItem(0x11, 2, lambda_23E8)
def lambda_23F6():
OP_8C(0xFE, 180, 10)
ExitThread()
QueueWorkItem(0x12, 2, lambda_23F6)
OP_22(0xCC, 0x0, 0x64)
OP_6F(0x4, 590)
OP_70(0x4, 0x2B2)
WaitChrThread(0x11, 0x1)
def lambda_241C():
OP_8F(0xFE, 0xFFFF7CAC, 0xFFFFD508, 0x23280, 0x514, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_241C)
def lambda_2437():
OP_8F(0xFE, 0xFFFF7CAC, 0xFFFFD8F0, 0x23280, 0x29A, 0x0)
ExitThread()
QueueWorkItem(0x12, 1, lambda_2437)
Sleep(100)
def lambda_2457():
OP_8F(0xFE, 0xFFFF7CAC, 0xFFFFD508, 0x23280, 0x618, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_2457)
def lambda_2472():
OP_8F(0xFE, 0xFFFF7CAC, 0xFFFFD8F0, 0x23280, 0x320, 0x0)
ExitThread()
QueueWorkItem(0x12, 1, lambda_2472)
Sleep(100)
OP_6A(0x0)
ClearMapFlags(0x1)
def lambda_249A():
OP_67(-48240, 40960, 201970, 7000)
ExitThread()
QueueWorkItem(0x101, 1, lambda_249A)
def lambda_24B2():
OP_6E(262, 7000)
ExitThread()
QueueWorkItem(0x101, 2, lambda_24B2)
def lambda_24C2():
OP_6D(-32150, -6000, 142270, 7000)
ExitThread()
QueueWorkItem(0x101, 3, lambda_24C2)
def lambda_24DA():
OP_8F(0xFE, 0xFFFF7CAC, 0xFFFFD508, 0x23280, 0x79E, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_24DA)
def lambda_24F5():
OP_8F(0xFE, 0xFFFF7CAC, 0xFFFFD8F0, 0x23280, 0x3E8, 0x0)
ExitThread()
QueueWorkItem(0x12, 1, lambda_24F5)
Sleep(100)
def lambda_2515():
OP_8F(0xFE, 0xFFFF7CAC, 0xFFFFD508, 0x23280, 0xA28, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_2515)
def lambda_2530():
OP_8F(0xFE, 0xFFFF7CAC, 0xFFFFD8F0, 0x23280, 0x535, 0x0)
ExitThread()
QueueWorkItem(0x12, 1, lambda_2530)
Sleep(100)
def lambda_2550():
OP_8F(0xFE, 0xFFFF7CAC, 0xFFFFD508, 0x23280, 0xF3C, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_2550)
def lambda_256B():
OP_8F(0xFE, 0xFFFF7CAC, 0xFFFFD8F0, 0x23280, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x12, 1, lambda_256B)
Sleep(100)
def lambda_258B():
OP_8F(0xFE, 0xFFFF7CAC, 0xFFFFD508, 0x23280, 0x1450, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_258B)
def lambda_25A6():
OP_8F(0xFE, 0xFFFF7CAC, 0xFFFFD8F0, 0x23280, 0x9C4, 0x0)
ExitThread()
QueueWorkItem(0x12, 1, lambda_25A6)
Sleep(100)
WaitChrThread(0x11, 0x1)
OP_23(0x79)
OP_22(0xC8, 0x0, 0x64)
OP_7C(0x0, 0xC8, 0xBB8, 0x64)
Sleep(600)
OP_22(0x6D, 0x0, 0x64)
OP_6F(0x4, 1)
OP_70(0x4, 0xF)
Sleep(300)
FadeToDark(1000, 0, -1)
OP_0D()
OP_66(0x1)
OP_44(0x101, 0xFF)
OP_6D(-44580, -3800, 144110, 0)
OP_67(0, 7580, -10000, 0)
OP_6B(3330, 0)
OP_6C(134000, 0)
OP_6E(262, 0)
SetChrPos(0x11, -33620, -11600, 144000, 180)
SetChrPos(0x12, -33620, -10000, 144000, 180)
OP_51(0x15, 0x28, (scpexpr(EXPR_PUSH_LONG, 0x10), scpexpr(EXPR_PUSH_LONG, 0x8), scpexpr(EXPR_OR), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_51(0x14, 0x28, (scpexpr(EXPR_PUSH_LONG, 0x10), scpexpr(EXPR_PUSH_LONG, 0x8), scpexpr(EXPR_OR), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_51(0x13, 0x28, (scpexpr(EXPR_PUSH_LONG, 0x10), scpexpr(EXPR_PUSH_LONG, 0x8), scpexpr(EXPR_OR), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_51(0xD, 0x28, (scpexpr(EXPR_PUSH_LONG, 0x10), scpexpr(EXPR_PUSH_LONG, 0x8), scpexpr(EXPR_OR), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_43(0x15, 0x1, 0x0, 0x17)
OP_43(0x14, 0x1, 0x0, 0x16)
OP_43(0x13, 0x1, 0x0, 0x15)
OP_43(0xD, 0x1, 0x0, 0x14)
FadeToBright(1000, 0)
def lambda_26DB():
OP_6D(-34960, -3480, 144150, 4000)
ExitThread()
QueueWorkItem(0x101, 1, lambda_26DB)
def lambda_26F3():
OP_6B(3330, 4000)
ExitThread()
QueueWorkItem(0x101, 2, lambda_26F3)
OP_6F(0x3, 100)
OP_70(0x3, 0xC4)
Sleep(500)
OP_22(0x78, 0x0, 0x64)
Sleep(3000)
Sleep(1000)
ChrTalk(
0xD,
(
"#180F哼哼……\x01",
"这段时间还真是忙啊。\x02\x03",
"第一件事……\x01",
"就是去拜会一下玛多克工房长。\x02\x03",
"#188F不过,不愧是上校……\x01",
"连这样的方法也能想得出来。\x02",
)
)
CloseMessageWindow()
Sleep(100)
SetMapFlags(0x2000000)
OP_A2(0x3FB)
NewScene("ED6_DT01/T3101 ._SN", 100, 0, 0)
IdleLoop()
Jump("loc_40D2")
label("loc_27E4")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC0, 2)), scpexpr(EXPR_END)), "loc_286A")
ChrTalk(
0x9,
(
"再等一会儿\x01",
"『赛希莉亚号』就会来了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x9,
(
"定期船到了之后,\x01",
"把这个出示给乘务员就可以了。\x02",
)
)
CloseMessageWindow()
Jump("loc_40D2")
label("loc_286A")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC0, 1)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC0, 2)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_2A57")
OP_A2(0x602)
OP_28(0x54, 0x1, 0x2)
EventBegin(0x0)
OP_69(0x9, 0x3E8)
ChrTalk(
0x9,
"啊,你们好。\x02",
)
CloseMessageWindow()
ChrTalk(
0x9,
(
"我已经从雾香那里听说了。\x01",
"现在就办理搭乘手续吗?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#006F嗯,麻烦您了。\x02",
)
CloseMessageWindow()
ChrTalk(
0x9,
(
"那么,请你们在这张单子上\x01",
"填写姓名和联络方式吧。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
"#010F好的。\x02",
)
CloseMessageWindow()
FadeToDark(300, 0, 100)
SetMessageWindowPos(72, 320, 56, 3)
SetChrName("")
AnonymousTalk(
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"艾丝蒂尔和约修亚办理了搭乘手续。\x01",
" \x02",
)
)
CloseMessageWindow()
OP_56(0x0)
SetMessageWindowPos(72, 320, 56, 3)
FadeToBright(300, 0)
ChrTalk(
0x9,
(
"好了,\x01",
"这就是你们的船票。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x9,
(
"定期船到了之后,\x01",
"向乘务员出示船票就可以了。\x02",
)
)
CloseMessageWindow()
FadeToDark(300, 0, 100)
SetMessageWindowPos(-1, -1, -1, -1)
SetChrName("")
OP_22(0x11, 0x0, 0x64)
AnonymousTalk(
(
scpstr(SCPSTR_CODE_COLOR, 0x0),
"得到两张\x07\x02",
"船票\x07\x00",
"。\x02",
)
)
CloseMessageWindow()
OP_56(0x0)
SetMessageWindowPos(72, 320, 56, 3)
FadeToBright(300, 0)
OP_3E(0x370, 2)
EventEnd(0x1)
Jump("loc_40D2")
label("loc_2A57")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA2, 6)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA2, 7)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_39D6")
EventBegin(0x0)
OP_A2(0x517)
EventBegin(0x0)
Fade(1000)
SetChrPos(0x101, -19410, 8000, 119800, 0)
SetChrPos(0x102, -20670, 8000, 119780, 0)
def lambda_2A97():
OP_6C(315000, 2000)
ExitThread()
QueueWorkItem(0x102, 1, lambda_2A97)
def lambda_2AA7():
OP_6B(3000, 2000)
ExitThread()
QueueWorkItem(0x102, 2, lambda_2AA7)
OP_69(0x9, 0x7D0)
ChrTalk(
0x9,
"#2P哟!客人是来乘坐定期船的吗?\x02",
)
CloseMessageWindow()
ChrTalk(
0x9,
(
"#2P很不巧,\x01",
"上一班定期船刚刚开走……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#006F唔,不是呢。\x01",
"我们不是来坐定期船的。\x02\x03",
"我们是有事来找\x01",
"那位叫格斯塔夫的维修长的。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x9,
"#2P怎么,要找大叔啊。\x02",
)
CloseMessageWindow()
ChrTalk(
0x9,
(
"#2P不过很遗憾,\x01",
"大叔他现在不在这里……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#004F哎,出去了吗?\x02",
)
CloseMessageWindow()
ChrTalk(
0x9,
(
"#2P嗯,没错。\x01",
"这两三天他去了雷斯顿要塞。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x9,
(
"#2P好像是突然接到了\x01",
"那边军用飞艇的维修委托。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#505F说到雷斯顿要塞……\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F是位于瓦雷利亚湖畔的\x01",
"王国军最大的军事基地。\x02\x03",
"就在蔡斯地区的北面。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#007F唔~这样的话,\x01",
"看来维修长可没有那么快回来啊。\x02\x03",
"那博士要的东西该怎么办啊?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x9,
(
"#2P虽然不知道你们有什么事,\x01",
"不过我想他差不多也该回来了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x9,
"#2P刚刚有连络通信过来……\x02",
)
CloseMessageWindow()
OP_22(0xE2, 0x0, 0x64)
Sleep(1000)
OP_62(0x101, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
OP_22(0x27, 0x0, 0x64)
OP_62(0x102, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
OP_22(0x27, 0x0, 0x64)
Sleep(1000)
ChrTalk(
0x101,
(
"#004F咦……\x01",
"下一班定期船已经来了?\x02",
)
)
CloseMessageWindow()
OP_8C(0x9, 0, 400)
ChrTalk(
0x9,
"啊,就是这个传说中的飞艇。\x02",
)
CloseMessageWindow()
OP_A1(0x11, 0x4)
OP_72(0x4, 0x4)
OP_72(0x4, 0x20)
SetChrPos(0x11, -34000, 9000, 177000, 180)
SetChrFlags(0x11, 0x4)
OP_A1(0x12, 0x5)
OP_72(0x5, 0x4)
OP_72(0xA, 0x4)
SetChrPos(0x12, -34000, -11150, 177000, 180)
SetChrFlags(0x12, 0x4)
OP_66(0x0)
def lambda_2EBF():
OP_67(2310, 43070, 99410, 5000)
ExitThread()
QueueWorkItem(0x101, 1, lambda_2EBF)
def lambda_2ED7():
OP_6D(-32150, 15520, 142270, 5000)
ExitThread()
QueueWorkItem(0x101, 2, lambda_2ED7)
def lambda_2EEF():
OP_6B(900, 5000)
ExitThread()
QueueWorkItem(0x101, 3, lambda_2EEF)
Sleep(2000)
OP_71(0x4, 0x20)
OP_6F(0x4, 161)
OP_70(0x4, 0x104)
def lambda_2F17():
OP_8F(0xFE, 0xFFFF7B30, 0x2328, 0x26548, 0xFA0, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_2F17)
def lambda_2F32():
OP_8F(0xFE, 0xFFFF7B30, 0xFFFFD472, 0x26548, 0xFA0, 0x0)
ExitThread()
QueueWorkItem(0x12, 1, lambda_2F32)
OP_22(0x79, 0x1, 0x28)
Sleep(100)
OP_24(0x79, 0x3C)
Sleep(100)
OP_24(0x79, 0x41)
Sleep(100)
OP_24(0x79, 0x46)
Sleep(100)
OP_24(0x79, 0x4B)
Sleep(100)
OP_24(0x79, 0x50)
Sleep(100)
OP_24(0x79, 0x55)
Sleep(100)
OP_24(0x79, 0x5A)
Sleep(100)
OP_24(0x79, 0x5F)
Sleep(100)
OP_24(0x79, 0x64)
Sleep(100)
Sleep(2000)
Sleep(100)
def lambda_2FB2():
OP_67(2310, 60560, 99410, 8000)
ExitThread()
QueueWorkItem(0x101, 1, lambda_2FB2)
def lambda_2FCA():
OP_8C(0xFE, 0, 5)
ExitThread()
QueueWorkItem(0x11, 2, lambda_2FCA)
def lambda_2FD8():
OP_8C(0xFE, 0, 5)
ExitThread()
QueueWorkItem(0x12, 2, lambda_2FD8)
Sleep(100)
def lambda_2FEB():
OP_8C(0xFE, 0, 8)
ExitThread()
QueueWorkItem(0x11, 2, lambda_2FEB)
def lambda_2FF9():
OP_8C(0xFE, 0, 8)
ExitThread()
QueueWorkItem(0x12, 2, lambda_2FF9)
Sleep(100)
def lambda_300C():
OP_8C(0xFE, 0, 10)
ExitThread()
QueueWorkItem(0x11, 2, lambda_300C)
def lambda_301A():
OP_8C(0xFE, 0, 10)
ExitThread()
QueueWorkItem(0x12, 2, lambda_301A)
Sleep(100)
def lambda_302D():
OP_8C(0xFE, 0, 13)
ExitThread()
QueueWorkItem(0x11, 2, lambda_302D)
def lambda_303B():
OP_8C(0xFE, 0, 13)
ExitThread()
QueueWorkItem(0x12, 2, lambda_303B)
Sleep(100)
def lambda_304E():
OP_8C(0xFE, 0, 15)
ExitThread()
QueueWorkItem(0x11, 2, lambda_304E)
def lambda_305C():
OP_8C(0xFE, 0, 15)
ExitThread()
QueueWorkItem(0x12, 2, lambda_305C)
Sleep(100)
def lambda_306F():
OP_8C(0xFE, 0, 18)
ExitThread()
QueueWorkItem(0x11, 2, lambda_306F)
def lambda_307D():
OP_8C(0xFE, 0, 18)
ExitThread()
QueueWorkItem(0x12, 2, lambda_307D)
def lambda_308B():
OP_8F(0xFE, 0xFFFF7B30, 0x2328, 0x26548, 0x1388, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_308B)
Sleep(85)
def lambda_30AB():
OP_8C(0xFE, 0, 20)
ExitThread()
QueueWorkItem(0x11, 2, lambda_30AB)
def lambda_30B9():
OP_8C(0xFE, 0, 20)
ExitThread()
QueueWorkItem(0x12, 2, lambda_30B9)
Sleep(85)
def lambda_30CC():
OP_8C(0xFE, 0, 23)
ExitThread()
QueueWorkItem(0x11, 2, lambda_30CC)
def lambda_30DA():
OP_8C(0xFE, 0, 23)
ExitThread()
QueueWorkItem(0x12, 2, lambda_30DA)
def lambda_30E8():
OP_8F(0xFE, 0xFFFF7B30, 0x2328, 0x26548, 0xFA0, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_30E8)
Sleep(85)
def lambda_3108():
OP_8C(0xFE, 0, 25)
ExitThread()
QueueWorkItem(0x11, 2, lambda_3108)
def lambda_3116():
OP_8C(0xFE, 0, 25)
ExitThread()
QueueWorkItem(0x12, 2, lambda_3116)
Sleep(85)
def lambda_3129():
OP_8F(0xFE, 0xFFFF7B30, 0x2328, 0x26548, 0x1194, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_3129)
def lambda_3144():
OP_8C(0xFE, 0, 28)
ExitThread()
QueueWorkItem(0x11, 2, lambda_3144)
def lambda_3152():
OP_8C(0xFE, 0, 28)
ExitThread()
QueueWorkItem(0x12, 2, lambda_3152)
Sleep(85)
def lambda_3165():
OP_8C(0xFE, 0, 30)
ExitThread()
QueueWorkItem(0x11, 2, lambda_3165)
def lambda_3173():
OP_8C(0xFE, 0, 30)
ExitThread()
QueueWorkItem(0x12, 2, lambda_3173)
Sleep(85)
def lambda_3186():
OP_8C(0xFE, 0, 33)
ExitThread()
QueueWorkItem(0x11, 2, lambda_3186)
def lambda_3194():
OP_8C(0xFE, 0, 33)
ExitThread()
QueueWorkItem(0x12, 2, lambda_3194)
def lambda_31A2():
OP_8F(0xFE, 0xFFFF7B30, 0x2328, 0x26548, 0xBB8, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_31A2)
Sleep(85)
Sleep(85)
def lambda_31C7():
OP_8F(0xFE, 0xFFFF7B30, 0x2328, 0x26548, 0x9C4, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_31C7)
Sleep(170)
def lambda_31E7():
OP_8F(0xFE, 0xFFFF7B30, 0x2328, 0x26548, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_31E7)
Sleep(170)
def lambda_3207():
OP_8F(0xFE, 0xFFFF7B30, 0x2328, 0x26548, 0x5DC, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_3207)
Sleep(170)
def lambda_3227():
OP_8F(0xFE, 0xFFFF7B30, 0x2328, 0x26548, 0x3E8, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_3227)
Sleep(170)
def lambda_3247():
OP_8F(0xFE, 0xFFFF7B30, 0x2328, 0x26548, 0x1F4, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_3247)
Sleep(170)
WaitChrThread(0x11, 0x1)
Sleep(1900)
Sleep(200)
def lambda_3276():
OP_8C(0xFE, 0, 25)
ExitThread()
QueueWorkItem(0x11, 2, lambda_3276)
def lambda_3284():
OP_8C(0xFE, 0, 25)
ExitThread()
QueueWorkItem(0x12, 2, lambda_3284)
Sleep(200)
def lambda_3297():
OP_8C(0xFE, 0, 20)
ExitThread()
QueueWorkItem(0x11, 2, lambda_3297)
def lambda_32A5():
OP_8C(0xFE, 0, 20)
ExitThread()
QueueWorkItem(0x12, 2, lambda_32A5)
Sleep(200)
def lambda_32B8():
OP_8C(0xFE, 0, 15)
ExitThread()
QueueWorkItem(0x11, 2, lambda_32B8)
def lambda_32C6():
OP_8C(0xFE, 0, 15)
ExitThread()
QueueWorkItem(0x12, 2, lambda_32C6)
Sleep(200)
def lambda_32D9():
OP_8C(0xFE, 0, 10)
ExitThread()
QueueWorkItem(0x11, 2, lambda_32D9)
def lambda_32E7():
OP_8C(0xFE, 0, 10)
ExitThread()
QueueWorkItem(0x12, 2, lambda_32E7)
Sleep(200)
def lambda_32FA():
OP_8C(0xFE, 0, 7)
ExitThread()
QueueWorkItem(0x11, 2, lambda_32FA)
def lambda_3308():
OP_8C(0xFE, 0, 7)
ExitThread()
QueueWorkItem(0x12, 2, lambda_3308)
WaitChrThread(0x11, 0x2)
OP_72(0x4, 0x20)
OP_6F(0x4, 261)
OP_70(0x4, 0x19A)
def lambda_332E():
OP_8F(0xFE, 0xFFFF7B30, 0xFFFFD472, 0x24220, 0x1F4, 0x0)
ExitThread()
QueueWorkItem(0x12, 1, lambda_332E)
def lambda_3349():
OP_8F(0xFE, 0xFFFF7B30, 0xFFFFD472, 0x24220, 0x64, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_3349)
Sleep(100)
def lambda_3369():
OP_8F(0xFE, 0xFFFF7B30, 0xFFFFD472, 0x24220, 0xC8, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_3369)
Sleep(100)
def lambda_3389():
OP_8F(0xFE, 0xFFFF7B30, 0xFFFFD472, 0x24220, 0x12C, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_3389)
Sleep(100)
def lambda_33A9():
OP_8F(0xFE, 0xFFFF7B30, 0xFFFFD472, 0x24220, 0x1F4, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_33A9)
def lambda_33C4():
OP_6D(-32150, 3000, 135270, 6000)
ExitThread()
QueueWorkItem(0x101, 1, lambda_33C4)
Sleep(100)
def lambda_33E1():
OP_8F(0xFE, 0xFFFF7B30, 0xFFFFD472, 0x24220, 0x2BC, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_33E1)
Sleep(100)
def lambda_3401():
OP_8F(0xFE, 0xFFFF7B30, 0xFFFFD472, 0x24220, 0x3E8, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_3401)
Sleep(100)
def lambda_3421():
OP_8F(0xFE, 0xFFFF7B30, 0xFFFFD472, 0x24220, 0x4B0, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_3421)
Sleep(100)
def lambda_3441():
OP_8F(0xFE, 0xFFFF7B30, 0xFFFFD472, 0x24220, 0x5DC, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_3441)
Sleep(100)
def lambda_3461():
OP_8F(0xFE, 0xFFFF7B30, 0xFFFFD472, 0x24220, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_3461)
Sleep(100)
def lambda_3481():
OP_8F(0xFE, 0xFFFF7B30, 0xFFFFD472, 0x24220, 0xBB8, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_3481)
Sleep(100)
def lambda_34A1():
OP_8F(0xFE, 0xFFFF7B30, 0xFFFFD472, 0x24220, 0xFA0, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_34A1)
Sleep(100)
def lambda_34C1():
OP_8F(0xFE, 0xFFFF7B30, 0xFFFFD472, 0x24220, 0xFA0, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_34C1)
Sleep(4500)
Sleep(100)
def lambda_34E6():
OP_8F(0xFE, 0xFFFF7B30, 0xFFFFD472, 0x24220, 0xFA0, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_34E6)
Sleep(100)
def lambda_3506():
OP_8F(0xFE, 0xFFFF7B30, 0xFFFFD472, 0x24220, 0xBB8, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_3506)
Sleep(100)
def lambda_3526():
OP_8F(0xFE, 0xFFFF7B30, 0xFFFFD472, 0x24220, 0x898, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_3526)
Sleep(100)
def lambda_3546():
OP_8F(0xFE, 0xFFFF7B30, 0xFFFFD472, 0x24220, 0x5DC, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_3546)
Sleep(100)
def lambda_3566():
OP_8F(0xFE, 0xFFFF7B30, 0xFFFFD472, 0x24220, 0x4B0, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_3566)
Sleep(100)
def lambda_3586():
OP_8F(0xFE, 0xFFFF7B30, 0xFFFFD472, 0x24220, 0x3E8, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_3586)
Sleep(100)
def lambda_35A6():
OP_8F(0xFE, 0xFFFF7B30, 0xFFFFD472, 0x24220, 0x2BC, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_35A6)
Sleep(100)
def lambda_35C6():
OP_8F(0xFE, 0xFFFF7B30, 0xFFFFD472, 0x24220, 0x258, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_35C6)
Sleep(100)
def lambda_35E6():
OP_8F(0xFE, 0xFFFF7B30, 0xFFFFD472, 0x24220, 0x1F4, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_35E6)
Sleep(100)
def lambda_3606():
OP_8F(0xFE, 0xFFFF7B30, 0xFFFFD472, 0x24220, 0x190, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_3606)
OP_44(0x11, 0x1)
OP_23(0x79)
OP_22(0xC8, 0x0, 0x64)
OP_7C(0x0, 0x64, 0xBB8, 0x64)
SetChrPos(0x11, -34000, -11150, 148000, 0)
Sleep(1000)
OP_22(0x76, 0x0, 0x46)
OP_72(0x4, 0x20)
OP_6F(0x4, 60)
OP_70(0x4, 0x1)
Sleep(1100)
OP_6F(0x3, 100)
OP_70(0x3, 0xC8)
Sleep(2500)
OP_44(0x101, 0x1)
Fade(1000)
OP_44(0x8, 0xFF)
ClearChrFlags(0x8, 0x80)
SetChrFlags(0x8, 0x4)
SetChrPos(0x8, -36900, -3800, 140550, 90)
OP_66(0x1)
SetChrPos(0x101, -24600, 8000, 121410, 0)
SetChrPos(0x102, -23560, 8000, 121480, 0)
TurnDirection(0x9, 0x101, 0)
OP_44(0x101, 0xFF)
OP_6D(-23460, 8000, 121550, 0)
OP_67(0, 9450, -10000, 0)
OP_6B(2880, 0)
OP_6C(315000, 0)
OP_6E(262, 0)
OP_71(0x6, 0x4)
OP_0D()
ChrTalk(
0x101,
(
"#004F橙色的定期船……\x02\x03",
"咦咦。\x01",
"有那样的定期船吗?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F不……\x01",
"好像不是定期船。\x02\x03",
"很多地方的形状都和定期船不同,\x01",
"而且还带有作业用的扶手。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#505F啊,的确……\x02",
)
CloseMessageWindow()
ChrTalk(
0x9,
(
"#2P这是中央工房所属的工房船\x01",
"『莱普尼兹号』。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x9,
(
"#2P虽然和定期船是相同型号,\x01",
"但追加了各种设备。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x9,
(
"#2P主要是用于\x01",
"大型设备的维护和制品的搬运。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#001F嘿嘿~!\x01",
"是在天上飞的工房啊。\x02\x03",
"#006F工房船回来了,\x01",
"那么维修长应该在飞艇里面吧。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x9,
"#2P是的。\x02",
)
CloseMessageWindow()
ChrTalk(
0x9,
(
"#2P你们不是有事吗?\x01",
"快点去找他吧。\x02",
)
)
CloseMessageWindow()
TurnDirection(0x101, 0x9, 400)
ChrTalk(
0x101,
"#006F嗯,好的。\x02",
)
CloseMessageWindow()
TurnDirection(0x102, 0x9, 400)
ChrTalk(
0x102,
"#010F那么我们先告辞了。\x02",
)
CloseMessageWindow()
EventEnd(0x0)
OP_43(0x8, 0x0, 0x0, 0x2)
Jump("loc_40D2")
label("loc_39D6")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC0, 1)), scpexpr(EXPR_END)), "loc_3A46")
ChrTalk(
0x9,
(
"嗯?怎么了?\x01",
"手续已经办好了哦。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x9,
(
"定期船到了之后,\x01",
"凭刚才的票就可以乘坐了。\x02",
)
)
CloseMessageWindow()
Jump("loc_40D2")
label("loc_3A46")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xAB, 5)), scpexpr(EXPR_END)), "loc_3AC5")
ChrTalk(
0x9,
"哟,你们也很忙呀。\x02",
)
CloseMessageWindow()
ChrTalk(
0x9,
(
"好像工房船\x01",
"有很紧急的任务要执行。\x01",
"这边也已经乱成一团了。\x02",
)
)
CloseMessageWindow()
Jump("loc_40D2")
label("loc_3AC5")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xAB, 1)), scpexpr(EXPR_END)), "loc_3B0E")
ChrTalk(
0x9,
(
"『赛希莉亚号』\x01",
"已经按预定的时间出航了………\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x9,
(
"唔,就趁现在难得的空闲\x01",
"集中精神看《利贝尔通讯》吧。\x02",
)
)
CloseMessageWindow()
Jump("loc_40D2")
label("loc_3B0E")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA6, 7)), scpexpr(EXPR_END)), "loc_3CC0")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_END)), "loc_3BD5")
ChrTalk(
0x9,
"嗯嗯,对了……\x02",
)
CloseMessageWindow()
ChrTalk(
0x9,
(
"说到封面……\x01",
"最近《利贝尔通讯》上面的照片\x01",
"都变得好漂亮啊。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x9,
(
"嗯,一想到这个,\x01",
"就很期待下期的封面啊。\x01",
"……偷偷告诉你们啊。\x02",
)
)
CloseMessageWindow()
Jump("loc_3CBD")
label("loc_3BD5")
OP_A2(0x0)
ChrTalk(
0x9,
(
"中央工房的骚动\x01",
"好像是起严重的事件。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x9,
(
"竟然敢袭击中央工房,\x01",
"世上还有这样无法无天的家伙啊。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x9,
(
"唉,这样一来\x01",
"下期《利贝尔通讯》的封面\x01",
"就会是蔡斯了吧。\x02",
)
)
CloseMessageWindow()
label("loc_3CBD")
Jump("loc_40D2")
label("loc_3CC0")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA3, 2)), scpexpr(EXPR_END)), "loc_3DA1")
ChrTalk(
0x9,
(
"那个,你们看过\x01",
"《利贝尔通讯》最新一期了吗。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x9,
(
"听说卢安的市长\x01",
"是个无法无天的坏家伙,\x01",
"已经被逮捕了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x9,
(
"不过,空贼事件也好,\x01",
"这个叫戴尔蒙的家伙也好……\x01",
"最近这个世界真是不太平啊。\x02",
)
)
CloseMessageWindow()
Jump("loc_40D2")
label("loc_3DA1")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA2, 3)), scpexpr(EXPR_END)), "loc_3EBE")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA2, 6)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_3E18")
ChrTalk(
0x9,
(
"现在西向航线的定期船\x01",
"正按预定时间出发。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x9,
(
"希望定期船今天也能够\x01",
"太平顺畅地运行啊。\x02",
)
)
CloseMessageWindow()
Jump("loc_3EBB")
label("loc_3E18")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA2, 7)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA3, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_3E9D")
ChrTalk(
0x9,
(
"我们老大应该就在\x01",
"『莱普尼兹号』里面。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x9,
"赶快去问问他吧。\x02",
)
CloseMessageWindow()
Jump("loc_3EBB")
label("loc_3E9D")
ChrTalk(
0x9,
"你们见到维修长了吗?\x02",
)
CloseMessageWindow()
label("loc_3EBB")
Jump("loc_40D2")
label("loc_3EBE")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA2, 1)), scpexpr(EXPR_END)), "loc_4021")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_END)), "loc_3F50")
ChrTalk(
0x9,
(
"听说,最后好像是游击士\x01",
"解决了这次空贼事件。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x9,
(
"真是的,明明发生了这么严重的事情,\x01",
"王国军却什么事也做不了。\x02",
)
)
CloseMessageWindow()
Jump("loc_401E")
label("loc_3F50")
OP_A2(0x0)
ChrTalk(
0x9,
(
"我读过利贝尔通讯了,\x01",
"前段时间柏斯的空贼骚动\x01",
"好像闹得很大啊。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x9,
(
"定期船停航了,\x01",
"对我们接待员来说可真是噩梦啊。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x9,
(
"要把事情向客人解释清楚,\x01",
"可是一件很难的事情。\x02",
)
)
CloseMessageWindow()
label("loc_401E")
Jump("loc_40D2")
label("loc_4021")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA1, 6)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA0, 2)), scpexpr(EXPR_OR), scpexpr(EXPR_END)), "loc_40D2")
ChrTalk(
0x9,
(
"目前,西向航线的『赛希莉亚号』\x01",
"正停靠在飞艇坪中。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x9,
(
"去往格兰赛尔的旅客,\x01",
"请前往入口处准备登船。\x02",
)
)
CloseMessageWindow()
label("loc_40D2")
TalkEnd(0x9)
Return()
# Function_10_1B32 end
def Function_11_40D6(): pass
label("Function_11_40D6")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xAE, 2)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xAC, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_417A")
TalkBegin(0xA)
ChrTalk(
0xA,
(
"#800F今天乘务员们都很忙,\x01",
"一会儿在飞船里是不能\x01",
"对导力器进行修理维护的。\x02\x03",
"你们最好趁现在到街上\x01",
"把自己的装备整理好。\x02",
)
)
CloseMessageWindow()
TalkEnd(0xA)
Jump("loc_4FD1")
label("loc_417A")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xAC, 0)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xAC, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_4E79")
EventBegin(0x0)
Fade(1000)
OP_4A(0xA, 255)
OP_4A(0x8, 255)
SetChrPos(0x101, -46160, -4000, 141480, 90)
SetChrPos(0x106, -44780, -4000, 140260, 0)
SetChrPos(0x107, -45700, -4000, 140390, 45)
SetChrPos(0x102, -45780, -4000, 142250, 135)
TurnDirection(0xA, 0x107, 0)
def lambda_41E6():
OP_6C(45000, 0)
ExitThread()
QueueWorkItem(0x101, 1, lambda_41E6)
OP_6D(-45150, -4000, 141460, 0)
OP_6B(3000, 0)
OP_0D()
ChrTalk(
0xA,
(
"#800F哦哦,正等着你们呢。\x01",
"大家都准备好了吗?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x106,
"#051F啊啊,随时都能出发。\x02",
)
CloseMessageWindow()
ChrTalk(
0x107,
(
"#560F『莱普尼兹号』的准备也完成了吗?\x01",
" \x02",
)
)
CloseMessageWindow()
ChrTalk(
0xA,
(
"#801F啊,我们这次运气真好,\x01",
"刚好要塞那边急着要我们发货。\x02\x03",
"正好准备前往雷斯顿要塞。\x01",
" \x02\x03",
"随时都可以出发哦。\x02",
)
)
CloseMessageWindow()
OP_8C(0x101, 135, 400)
Sleep(200)
OP_8C(0x101, 0, 400)
Sleep(200)
TurnDirection(0x101, 0xA, 400)
Sleep(500)
ChrTalk(
0x101,
(
"#505F随时……\x02\x03",
"可是没看到那艘橙色的飞艇啊……\x01",
" \x02",
)
)
CloseMessageWindow()
OP_8C(0x102, 315, 400)
Sleep(500)
OP_62(0x102, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
OP_22(0x27, 0x0, 0x64)
Sleep(1000)
OP_8E(0x102, 0xFFFF467E, 0xFFFFF060, 0x230F0, 0x7D0, 0x0)
ChrTalk(
0x102,
"#010F艾丝蒂尔,看下面。\x02",
)
CloseMessageWindow()
TurnDirection(0x101, 0x102, 400)
def lambda_440C():
label("loc_440C")
TurnDirection(0xFE, 0x101, 0)
OP_48()
Jump("loc_440C")
QueueWorkItem2(0xA, 2, lambda_440C)
def lambda_441D():
label("loc_441D")
TurnDirection(0xFE, 0x101, 0)
OP_48()
Jump("loc_441D")
QueueWorkItem2(0x107, 2, lambda_441D)
def lambda_442E():
label("loc_442E")
TurnDirection(0xFE, 0x101, 0)
OP_48()
Jump("loc_442E")
QueueWorkItem2(0x106, 2, lambda_442E)
def lambda_443F():
OP_6D(-48810, -4000, 144860, 2000)
ExitThread()
QueueWorkItem(0x101, 0, lambda_443F)
def lambda_4457():
OP_6C(314000, 4000)
ExitThread()
QueueWorkItem(0x101, 1, lambda_4457)
def lambda_4467():
OP_6B(3500, 4000)
ExitThread()
QueueWorkItem(0x101, 2, lambda_4467)
Sleep(3000)
def lambda_447C():
OP_8E(0x101, 0xFFFF4688, 0xFFFFF060, 0x22CE0, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x101, 3, lambda_447C)
WaitChrThread(0x101, 0x2)
ChrTalk(
0x101,
(
"#501F#1P啊,在那里啊……\x02\x03",
"那我们也要到下面去吧?\x01",
" \x02",
)
)
CloseMessageWindow()
ChrTalk(
0x107,
(
"#061F#1P呵呵,姐姐。\x01",
"我们不用下去啦。\x02",
)
)
CloseMessageWindow()
TurnDirection(0x101, 0x107, 400)
ChrTalk(
0x101,
"#004F#1P咦……\x02",
)
CloseMessageWindow()
OP_22(0xA7, 0x1, 0x55)
OP_62(0x101, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
OP_22(0x27, 0x0, 0x64)
Sleep(1000)
OP_8C(0x101, 315, 400)
ChrTalk(
0x101,
"#004F#1P什、什么……!?\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
"#014F#2P飞艇的轨道……!\x02",
)
CloseMessageWindow()
ChrTalk(
0x106,
(
"#051F#2P怎么,你们连这都不知道吗?\x02\x03",
"这个城镇的飞艇坪是\x01",
"用超乎常识的方法来建造的。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#509F#1P超、超乎常识?\x02",
)
CloseMessageWindow()
OP_24(0xA7, 0x64)
OP_B0(0x0, 0xF)
OP_6F(0x0, 250)
OP_70(0x0, 0x258)
def lambda_4620():
OP_6C(339000, 3000)
ExitThread()
QueueWorkItem(0x101, 1, lambda_4620)
OP_6D(-55390, -4000, 147110, 3000)
StopSound(0xC350, 0x3D090, 0xFA0)
Sleep(100)
OP_22(0x9A, 0x0, 0x64)
def lambda_4658():
OP_6B(2200, 3500)
ExitThread()
QueueWorkItem(0x101, 1, lambda_4658)
OP_67(0, 21600, -10000, 3500)
OP_22(0x9A, 0x0, 0x64)
OP_7C(0x0, 0x190, 0xBB8, 0x64)
Sleep(500)
def lambda_4694():
OP_6B(3500, 6200)
ExitThread()
QueueWorkItem(0x101, 2, lambda_4694)
def lambda_46A4():
OP_6C(27000, 6100)
ExitThread()
QueueWorkItem(0x101, 1, lambda_46A4)
OP_6D(-36640, -4000, 148800, 6100)
OP_22(0x9A, 0x0, 0x64)
OP_7C(0x0, 0x190, 0xBB8, 0x64)
Sleep(100)
def lambda_46E0():
label("loc_46E0")
TurnDirection(0xFE, 0x8, 0)
OP_48()
Jump("loc_46E0")
QueueWorkItem2(0x102, 0, lambda_46E0)
def lambda_46F1():
label("loc_46F1")
TurnDirection(0xFE, 0x8, 0)
OP_48()
Jump("loc_46F1")
QueueWorkItem2(0x101, 0, lambda_46F1)
def lambda_4702():
OP_8E(0xFE, 0xFFFF4B60, 0xFFFFF060, 0x226C8, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x101, 3, lambda_4702)
def lambda_471D():
OP_8E(0xFE, 0xFFFF4B38, 0xFFFFF060, 0x22B28, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x102, 3, lambda_471D)
def lambda_4738():
TurnDirection(0xFE, 0x8, 400)
ExitThread()
QueueWorkItem(0xA, 2, lambda_4738)
def lambda_4746():
TurnDirection(0xFE, 0x8, 400)
ExitThread()
QueueWorkItem(0x107, 2, lambda_4746)
def lambda_4754():
TurnDirection(0xFE, 0x8, 400)
ExitThread()
QueueWorkItem(0x106, 2, lambda_4754)
def lambda_4762():
OP_6B(5500, 10000)
ExitThread()
QueueWorkItem(0x101, 2, lambda_4762)
def lambda_4772():
OP_67(0, 4000, -10000, 11800)
ExitThread()
QueueWorkItem(0x101, 1, lambda_4772)
OP_6C(90000, 9800)
OP_73(0x0)
OP_44(0x101, 0x1)
OP_23(0xA7)
OP_22(0x9A, 0x0, 0x64)
OP_7C(0x0, 0xC8, 0xBB8, 0x64)
Sleep(1000)
Fade(1000)
SoundDistance(0x75, 0xFFFF7A4A, 0xFFFFF060, 0x23280, 0x2710, 0x9C40, 0x64, 0x0)
OP_71(0x6, 0x4)
OP_A1(0x11, 0x4)
OP_72(0x4, 0x4)
OP_6F(0x4, 60)
SetChrPos(0x11, -34000, -11150, 148000, 0)
SetChrFlags(0x11, 0x4)
OP_6B(3500, 0)
OP_67(0, 11000, -10000, 0)
StopSound(0xC350, 0x1FBD0, 0x0)
OP_6D(-45210, -4000, 142090, 0)
OP_6F(0x0, 1001)
OP_44(0x101, 0xFF)
OP_44(0x102, 0xFF)
OP_0D()
ChrTalk(
0x101,
(
"#509F怎么说呢……\x01",
"我还以为已经对这种玩意习惯了……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#019F哈哈,就是啊。\x01",
"没想到还有这么厉害的设施啊……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xA,
(
"#803F顺带说一下,\x01",
"这个飞艇坪的构建理念也是……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#007F知道啦。\x01",
"又是拉赛尔博士的杰作对吧。\x02\x03",
"#008F提妲啊,\x01",
"你的爷爷还真是无所不能呢。\x01",
" \x02",
)
)
CloseMessageWindow()
ChrTalk(
0x107,
(
"#067F#2P嘿嘿……\x01",
"我也有同感呢。\x02",
)
)
CloseMessageWindow()
SetChrFlags(0x8, 0x4)
ClearChrFlags(0x8, 0x80)
SetChrPos(0x8, -36460, -4000, 144380, 270)
OP_22(0x76, 0x0, 0x64)
OP_6F(0x4, 60)
OP_70(0x4, 0x1)
Sleep(1100)
OP_22(0x78, 0x0, 0x64)
OP_6F(0x3, 100)
OP_70(0x3, 0xC8)
OP_73(0x3)
ChrTalk(
0x8,
"#6P哟,久等了。\x02",
)
CloseMessageWindow()
OP_6D(-40270, -4000, 143040, 1000)
ChrTalk(
0x107,
"#560F啊,维修长叔叔!\x02",
)
CloseMessageWindow()
def lambda_4A15():
OP_8E(0xFE, 0xFFFF57A4, 0xFFFFF128, 0x2329E, 0xBB8, 0x0)
ExitThread()
QueueWorkItem(0x8, 1, lambda_4A15)
def lambda_4A30():
label("loc_4A30")
TurnDirection(0xFE, 0x8, 400)
OP_48()
Jump("loc_4A30")
QueueWorkItem2(0xA, 2, lambda_4A30)
def lambda_4A41():
OP_6D(-44110, -3800, 143890, 3000)
ExitThread()
QueueWorkItem(0x101, 1, lambda_4A41)
Sleep(100)
def lambda_4A5E():
OP_8E(0xFE, 0xFFFF4C0A, 0xFFFFF060, 0x2385C, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x102, 3, lambda_4A5E)
Sleep(100)
def lambda_4A7E():
OP_8E(0xFE, 0xFFFF4A8E, 0xFFFFF060, 0x2341A, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x101, 3, lambda_4A7E)
Sleep(100)
def lambda_4A9E():
OP_8E(0xFE, 0xFFFF4AFC, 0xFFFFF060, 0x23082, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x107, 3, lambda_4A9E)
Sleep(100)
def lambda_4ABE():
OP_8E(0xFE, 0xFFFF4B56, 0xFFFFF060, 0x22CFE, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x106, 3, lambda_4ABE)
WaitChrThread(0x102, 0x3)
def lambda_4ADE():
label("loc_4ADE")
TurnDirection(0xFE, 0x8, 400)
OP_48()
Jump("loc_4ADE")
QueueWorkItem2(0x102, 2, lambda_4ADE)
WaitChrThread(0x101, 0x3)
def lambda_4AF4():
label("loc_4AF4")
TurnDirection(0xFE, 0x8, 400)
OP_48()
Jump("loc_4AF4")
QueueWorkItem2(0x101, 2, lambda_4AF4)
WaitChrThread(0x107, 0x3)
def lambda_4B0A():
label("loc_4B0A")
TurnDirection(0xFE, 0x8, 400)
OP_48()
Jump("loc_4B0A")
QueueWorkItem2(0x107, 2, lambda_4B0A)
WaitChrThread(0x106, 0x3)
def lambda_4B20():
label("loc_4B20")
TurnDirection(0xFE, 0x8, 400)
OP_48()
Jump("loc_4B20")
QueueWorkItem2(0x106, 2, lambda_4B20)
WaitChrThread(0x8, 0x1)
ChrTalk(
0x8,
(
"#690F提妲啊,\x01",
"详细情况我已经听工房长说了。\x02\x03",
"没想到老爷子会遇到那样的事。\x01",
" \x02\x03",
"#691F能帮上忙的话,\x01",
"我们全体维修员随时乐意效劳。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x107,
"#061F谢、谢谢!\x02",
)
CloseMessageWindow()
ChrTalk(
0x106,
"#051F抱歉,麻烦你们了。\x02",
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"#691F不要客气。\x01",
"因为老爷子也是我的恩人啊。\x02\x03",
"好了。\x01",
"这边准备OK了。\x02\x03",
"这就出发去雷斯顿要塞吗?\x02",
)
)
CloseMessageWindow()
OP_44(0x101, 0xFF)
OP_44(0x102, 0xFF)
OP_44(0x107, 0xFF)
OP_44(0x106, 0xFF)
OP_44(0xA, 0xFF)
OP_8C(0xA, 315, 400)
FadeToDark(300, 0, 100)
RunExpression(0x0, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_4F(0x28, (scpexpr(EXPR_PUSH_LONG, 0x18), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Menu(
0,
10,
10,
0,
(
"【出发】\x01", # 0
"【整理装备】\x01", # 1
)
)
MenuEnd(0x0)
OP_5F(0x0)
OP_4F(0x28, (scpexpr(EXPR_PUSH_LONG, 0xFFFF), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_56(0x0)
FadeToBright(300, 0)
Switch(
(scpexpr(EXPR_GET_RESULT, 0x0), scpexpr(EXPR_END)),
(0, "loc_4CF9"),
(1, "loc_4E2D"),
(SWITCH_DEFAULT, "loc_4E76"),
)
label("loc_4CF9")
OP_A2(0x561)
OP_28(0x43, 0x1, 0x400)
OP_28(0x44, 0x4, 0x2)
OP_28(0x44, 0x4, 0x4)
ChrTalk(
0x8,
(
"#693F好!\x01",
"那么快上去吧!\x02\x03",
"工房船『莱普尼兹号』,\x01",
"向目的地雷斯顿要塞进发!\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xA,
(
"#803F#2P各位游击士……\x01",
"博士的事就拜托你们了。\x02\x03",
"#800F还有的是……\x01",
"麻烦你们好好保护提妲。\x02",
)
)
CloseMessageWindow()
def lambda_4DB5():
TurnDirection(0xFE, 0xA, 400)
ExitThread()
QueueWorkItem(0x101, 1, lambda_4DB5)
def lambda_4DC3():
TurnDirection(0xFE, 0xA, 400)
ExitThread()
QueueWorkItem(0x102, 1, lambda_4DC3)
def lambda_4DD1():
TurnDirection(0xFE, 0xA, 400)
ExitThread()
QueueWorkItem(0x106, 1, lambda_4DD1)
TurnDirection(0x107, 0xA, 400)
ChrTalk(
0x107,
"#560F工房长……\x02",
)
CloseMessageWindow()
ChrTalk(
0x101,
"#006F嗯,都包在我们身上吧!\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
"#010F那么我们走了。\x02",
)
CloseMessageWindow()
Sleep(100)
Call(0, 17)
Jump("loc_4E76")
label("loc_4E2D")
OP_A2(0x572)
ChrTalk(
0x8,
(
"#691F明白了。\x01",
"准备好了就说一声。\x02",
)
)
CloseMessageWindow()
OP_4B(0xA, 255)
OP_4B(0x8, 255)
OP_43(0xA, 0x0, 0x0, 0x2)
EventEnd(0x0)
Return()
label("loc_4E76")
Jump("loc_4FD1")
label("loc_4E79")
TalkBegin(0xA)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 3)), scpexpr(EXPR_END)), "loc_4EF6")
ChrTalk(
0xA,
(
"#800F现在这边也正由格斯塔夫维修长\x01",
"指挥进行起飞前的准备呢。\x01",
" \x02\x03",
"如果你们准备好了,\x01",
"就再到这儿来找我吧。\x02",
)
)
CloseMessageWindow()
Jump("loc_4FCE")
label("loc_4EF6")
OP_A2(0x3)
ChrTalk(
0xA,
(
"#800F哦哦,是你们啊。\x01",
"已经准备好了吗?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F非常抱歉,\x01",
"可能还要再费些时间。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xA,
(
"#800F是吗。\x01",
"现在这边也正由格斯塔夫维修长\x01",
"指挥进行起飞前的准备呢。\x02\x03",
"如果你们准备好了,\x01",
"就再到这儿来找我吧。\x02",
)
)
CloseMessageWindow()
label("loc_4FCE")
TalkEnd(0xA)
label("loc_4FD1")
Return()
# Function_11_40D6 end
def Function_12_4FD2(): pass
label("Function_12_4FD2")
TalkBegin(0xFE)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC0, 3)), scpexpr(EXPR_END)), "loc_50B1")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_END)), "loc_503A")
ChrTalk(
0xFE,
(
"看起来定期船\x01",
"好像会晚点很长时间啊……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"我还是先回家\x01",
"再做打算吧。\x02",
)
)
CloseMessageWindow()
Jump("loc_50AE")
label("loc_503A")
OP_A2(0x1)
ChrTalk(
0xFE,
(
"看起来定期船\x01",
"好像会晚点很长时间啊……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"听说军队要盘检,\x01",
"真是麻烦啊。\x02",
)
)
CloseMessageWindow()
label("loc_50AE")
Jump("loc_5352")
label("loc_50B1")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC0, 1)), scpexpr(EXPR_END)), "loc_51B5")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_END)), "loc_50EA")
ChrTalk(
0xFE,
(
"说起来\x01",
"我是不是来得太早了?\x02",
)
)
CloseMessageWindow()
Jump("loc_51B2")
label("loc_50EA")
OP_A2(0x1)
ChrTalk(
0xFE,
(
"哦~早上好啊。\x01",
"你们也是要去王都吗?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"我呀,\x01",
"是要去飞艇公社办些事情。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"而且还想赶快把工作搞定,\x01",
"顺便参观诞辰庆典……\x02",
)
)
CloseMessageWindow()
label("loc_51B2")
Jump("loc_5352")
label("loc_51B5")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xAB, 1)), scpexpr(EXPR_END)), "loc_5352")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_END)), "loc_524F")
ChrTalk(
0xFE,
(
"飞艇的技术\x01",
"真是越来越进步了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"乘坐定期船\x01",
"到多杰的故乡\x01",
"也不再是遥远的梦想了。\x02",
)
)
CloseMessageWindow()
Jump("loc_5352")
label("loc_524F")
OP_A2(0x1)
ChrTalk(
0xFE,
(
"今天早上,\x01",
"偶然遇到了来自共和国的\x01",
"导力器商人多杰。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"因为他要在飞艇坪参观,\x01",
"我就热情地为他介绍了一下。\x02",
)
)
CloseMessageWindow()
OP_8C(0xE, 270, 400)
ChrTalk(
0xFE,
(
"看,多杰。\x01",
"那是器材的搬入口,\x01",
"造船设施就在那个地下哦。\x02",
)
)
CloseMessageWindow()
label("loc_5352")
TalkEnd(0xFE)
Return()
# Function_12_4FD2 end
def Function_13_5356(): pass
label("Function_13_5356")
TalkBegin(0xFE)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xAB, 1)), scpexpr(EXPR_END)), "loc_543A")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 2)), scpexpr(EXPR_END)), "loc_53E2")
ChrTalk(
0xFE,
(
"我将来也要\x01",
"把飞艇作为商品……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"但在那之前,\x01",
"我的城镇得有个飞艇坪才行。\x02",
)
)
CloseMessageWindow()
Jump("loc_543A")
label("loc_53E2")
OP_A2(0x2)
ChrTalk(
0xFE,
(
"呼,\x01",
"现在只能感叹眼前的景象了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
"实在是太棒了。\x02",
)
CloseMessageWindow()
label("loc_543A")
TalkEnd(0xFE)
Return()
# Function_13_5356 end
def Function_14_543E(): pass
label("Function_14_543E")
Call(0, 10)
Return()
# Function_14_543E end
def Function_15_5443(): pass
label("Function_15_5443")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA2, 7)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA3, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_5E61")
OP_A2(0x518)
OP_28(0x3F, 0x1, 0x800)
Jc((scpexpr(EXPR_EXEC_OP, "OP_29(0x3F, 0x1, 0x1000)"), scpexpr(EXPR_END)), "loc_5469")
OP_28(0x3F, 0x1, 0x2000)
label("loc_5469")
ClearMapFlags(0x1)
EventBegin(0x0)
SetChrFlags(0x8, 0x4)
ClearChrFlags(0x8, 0x80)
OP_4A(0x8, 255)
TurnDirection(0x8, 0x0, 400)
NpcTalk(
0x8,
"年长的维修员",
(
"#690F唔……\x01",
"哎哟,小姑娘你们是……\x02",
)
)
CloseMessageWindow()
Fade(1000)
SetChrPos(0x0, -44000, -3800, 144340, 135)
SetChrPos(0x1, -44420, -3800, 143430, 90)
OP_6D(-40020, -3800, 143530, 0)
OP_67(0, 6510, -10000, 0)
OP_6B(3620, 0)
OP_6C(124000, 0)
OP_6E(262, 0)
OP_0D()
ChrTalk(
0x101,
"#004F#6P啊……\x02",
)
CloseMessageWindow()
NpcTalk(
0x8,
"年长的维修员",
(
"#690F这个『莱普尼兹号』上\x01",
"堆积着像山一样的各种器材。\x02\x03",
"随便靠近可是很危险的。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#505F#6P啊,其实我们想找人……\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F#6P我们有点事情,\x01",
"请问格斯塔夫维修长在里面吗……\x02",
)
)
CloseMessageWindow()
NpcTalk(
0x8,
"年长的维修员",
"#692F怎么,找我有事啊?\x02",
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#501F#6P哎呀……\x01",
"原来大叔就是维修长啊。\x02",
)
)
CloseMessageWindow()
FadeToDark(300, 0, 100)
SetChrName("")
AnonymousTalk(
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"艾丝蒂尔他们向格斯塔夫维修长\x01",
"说明了拉赛尔博士委托借用内燃引擎一事。\x02",
)
)
CloseMessageWindow()
OP_56(0x0)
FadeToBright(300, 0)
ChrTalk(
0x8,
(
"#691F怎么。\x01",
"原来是拉赛尔老爷子啊。\x02\x03",
"要内燃引擎设备吗?\x01",
"你们来得还真是时候。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#004F#6P哎?\x02",
)
CloseMessageWindow()
ChrTalk(
0x8,
"#690F稍等一下……\x02",
)
CloseMessageWindow()
def lambda_57CF():
label("loc_57CF")
TurnDirection(0xFE, 0x8, 0)
OP_48()
Jump("loc_57CF")
QueueWorkItem2(0x101, 0, lambda_57CF)
def lambda_57E0():
label("loc_57E0")
TurnDirection(0xFE, 0x8, 0)
OP_48()
Jump("loc_57E0")
QueueWorkItem2(0x102, 0, lambda_57E0)
def lambda_57F1():
OP_6D(-37020, -3800, 144870, 3000)
ExitThread()
QueueWorkItem(0x101, 1, lambda_57F1)
OP_8E(0x8, 0xFFFF6E74, 0xFFFFF128, 0x23096, 0xBB8, 0x0)
OP_8E(0x8, 0xFFFF85EE, 0xFFFFF128, 0x24432, 0xBB8, 0x0)
Sleep(1000)
ChrTalk(
0x101,
(
"#501F#1P难道就放在工房船上?\x01",
" \x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
"#010F#1P嗯,好像是这样呢。\x02",
)
CloseMessageWindow()
OP_8E(0x8, 0xFFFF74C8, 0xFFFFF128, 0x23794, 0xBB8, 0x0)
def lambda_58A4():
OP_6D(-42590, -3800, 143930, 3000)
ExitThread()
QueueWorkItem(0x101, 1, lambda_58A4)
OP_8E(0x8, 0xFFFF592A, 0xFFFFF128, 0x23294, 0xBB8, 0x0)
WaitChrThread(0x101, 0x1)
OP_44(0x101, 0xFF)
OP_44(0x102, 0xFF)
ChrTalk(
0x8,
(
"#691F来。\x01",
"很重的,小心哦。\x02",
)
)
CloseMessageWindow()
OP_92(0x8, 0x0, 0x2BC, 0x7D0, 0x0)
FadeToDark(300, 0, 100)
SetMessageWindowPos(-1, -1, -1, -1)
SetChrName("")
OP_22(0x11, 0x0, 0x64)
AnonymousTalk(
(
scpstr(SCPSTR_CODE_COLOR, 0x0),
"得到了\x07\x02",
"内燃引擎设备\x07\x00",
"。\x02",
)
)
CloseMessageWindow()
OP_56(0x0)
SetMessageWindowPos(72, 320, 56, 3)
FadeToBright(300, 0)
OP_3E(0x368, 1)
OP_8F(0x8, 0xFFFF592A, 0xFFFFF128, 0x23294, 0xBB8, 0x0)
OP_8C(0x8, 270, 400)
OP_44(0x101, 0xFF)
OP_44(0x102, 0xFF)
Jc((scpexpr(EXPR_PUSH_VALUE_INDEX, 0xA), scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_5A7F")
ChrTalk(
0x101,
(
"#004F哇哇……\x01",
"的确是沉甸甸的啊。\x02\x03",
"#006F但也不是重到拿不动。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"#692F嘿嘿,\x01",
"想不到小姑娘还挺能干的嘛!\x02\x03",
"#693F我很中意你哦。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#506F嘿嘿,过奖啦。\x02",
)
CloseMessageWindow()
Jump("loc_5B45")
label("loc_5A7F")
ChrTalk(
0x102,
(
"#010F确实是很重……\x01",
"不过也不至于重到拿不动就是了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"#692F哦……\x01",
"小伙子好样的。\x01",
"现在的年轻人还是挺能干的嘛!\x02\x03",
"#693F我挺中意你哦。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
"#019F您过奖了。\x02",
)
CloseMessageWindow()
label("loc_5B45")
ChrTalk(
0x8,
(
"#691F话说回来,\x01",
"这也真是个有趣的巧合啊。\x02\x03",
"这东西刚从军方那里还回来,\x01",
"马上就被老爷子借走了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#004F哎……\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
"#014F从军方那里还回来?\x02",
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"#690F啊,没错啊。\x01",
"那个货样被王国军借走了一阵子。\x02\x03",
"说是什么研究要用。\x02\x03",
"一直用到今天,\x01",
"总算是还给我们工房了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#501F这样啊~\x01",
"的确是有趣的巧合呢。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
"#013F……………………………\x02",
)
CloseMessageWindow()
TurnDirection(0x101, 0x102, 400)
ChrTalk(
0x101,
"#004F约修亚,怎么了?\x02",
)
CloseMessageWindow()
TurnDirection(0x102, 0x101, 400)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA3, 1)), scpexpr(EXPR_END)), "loc_5D71")
ChrTalk(
0x102,
(
"#015F不……没什么。\x02\x03",
"#010F需要的东西都已经拿到了,\x01",
"我们快点回博士那里吧。\x02",
)
)
CloseMessageWindow()
Jump("loc_5DD5")
label("loc_5D71")
ChrTalk(
0x102,
(
"#015F不……没什么。\x02\x03",
"#010F……剩下的就是汽油了。\x01",
"马上去中央工房的地下工场吧。\x02",
)
)
CloseMessageWindow()
label("loc_5DD5")
ChrTalk(
0x101,
"#006F嗯,知道了。\x02",
)
CloseMessageWindow()
TurnDirection(0x102, 0x8, 400)
TurnDirection(0x101, 0x8, 400)
ChrTalk(
0x101,
"#001F维修长大叔,谢谢您!\x02",
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"#691F别客气。\x01",
"顺便帮我向老爷子问好哦。\x02",
)
)
CloseMessageWindow()
OP_4B(0x8, 255)
EventEnd(0x0)
label("loc_5E61")
Return()
# Function_15_5443 end
def Function_16_5E62(): pass
label("Function_16_5E62")
EventBegin(0x0)
AddParty(0x6, 0xFF)
SetChrPos(0x108, -45670, -4000, 146000, 0)
SetChrPos(0x101, -46540, -4000, 147540, 0)
SetChrPos(0x102, -47220, -4000, 146840, 0)
SetChrPos(0x107, -47150, -4000, 145610, 0)
TurnDirection(0x101, 0x108, 0)
TurnDirection(0x102, 0x108, 0)
TurnDirection(0x107, 0x108, 0)
TurnDirection(0x108, 0x102, 0)
OP_6D(-45760, -4000, 146000, 0)
OP_67(0, 9090, -10000, 0)
OP_6B(3000, 0)
OP_6C(111000, 0)
OP_6E(262, 0)
OP_6F(0x4, 1)
OP_6F(0x3, 0)
OP_71(0x6, 0x4)
OP_6F(0x0, 1001)
OP_72(0x4, 0x4)
OP_72(0x5, 0x4)
SoundDistance(0x75, 0xFFFF7A4A, 0xFFFFF060, 0x23280, 0x2710, 0x9C40, 0x64, 0x0)
OP_A2(0x559)
SetChrFlags(0x8, 0x80)
SetChrFlags(0x10, 0x80)
FadeToBright(2000, 0)
OP_0D()
ChrTalk(
0x108,
(
"#070F……真是抱歉,\x01",
"要你们特地来为我送行。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#006F这是当然的啦。\x01",
"昨天真是受到你诸多关照了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F金先生,\x01",
"这就乘定期船直接去王都吗?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x108,
(
"#072F啊啊……\x01",
"我还有要事必须去办。\x02\x03",
"要不是有事在身的话,\x01",
"我一定会留在这里帮你们\x01",
"调查绑架事件的……\x02",
)
)
CloseMessageWindow()
TurnDirection(0x108, 0x107, 400)
Sleep(400)
ChrTalk(
0x108,
"#075F抱歉了,小姑娘。\x02",
)
CloseMessageWindow()
ChrTalk(
0x107,
(
"#560F哪、哪儿的话呢。\x01",
"您已经帮了我们很多忙了……\x02\x03",
"金大哥哥,\x01",
"真的非常感谢您呢。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x108,
(
"#070F哈哈……\x01",
"你能这么说真是太好了。\x02",
)
)
CloseMessageWindow()
OP_22(0xA6, 0x0, 0x64)
Sleep(500)
SetMessageWindowPos(-1, -1, -1, -1)
SetChrName("女性的声音")
AnonymousTalk(
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"开往王都的定期船\x01",
"『赛希莉亚号』马上就要起飞了。\x02",
)
)
CloseMessageWindow()
AnonymousTalk(
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"请尚未登机的乘客尽快登机。\x02",
)
)
CloseMessageWindow()
OP_56(0x0)
Sleep(500)
ChrTalk(
0x108,
(
"#070F哎呀……\x01",
"差不多要出发了。\x02",
)
)
CloseMessageWindow()
OP_51(0x108, 0x28, (scpexpr(EXPR_PUSH_LONG, 0x10), scpexpr(EXPR_PUSH_LONG, 0x8), scpexpr(EXPR_OR), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
def lambda_6242():
OP_6D(-40990, -4000, 146200, 4000)
ExitThread()
QueueWorkItem(0x101, 2, lambda_6242)
def lambda_625A():
OP_6B(3360, 4000)
ExitThread()
QueueWorkItem(0x101, 3, lambda_625A)
def lambda_626A():
OP_6C(32000, 4000)
ExitThread()
QueueWorkItem(0x102, 2, lambda_626A)
OP_8E(0x108, 0xFFFF4B9C, 0xFFFFF060, 0x23294, 0xBB8, 0x0)
OP_8E(0x108, 0xFFFF5754, 0xFFFFF128, 0x2328A, 0xBB8, 0x0)
SetChrFlags(0x108, 0x4)
def lambda_62A7():
OP_8E(0xFE, 0xFFFF50B0, 0xFFFFF060, 0x23A50, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x101, 1, lambda_62A7)
def lambda_62C2():
OP_8E(0xFE, 0xFFFF4BD8, 0xFFFFF060, 0x23898, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x102, 1, lambda_62C2)
def lambda_62DD():
OP_8E(0xFE, 0xFFFF4BF6, 0xFFFFF060, 0x23532, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x107, 1, lambda_62DD)
OP_8E(0x108, 0xFFFF6FA0, 0xFFFFF128, 0x23294, 0xBB8, 0x0)
TurnDirection(0x108, 0x107, 400)
WaitChrThread(0x101, 0x2)
ChrTalk(
0x108,
(
"#070F那再见了。\x01",
"有机会我们再聚吧。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#501F啊,嗯!\x02\x03",
"想问一下,\x01",
"金先生会在王国呆多久呢?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x108,
(
"#073F明确时间还不知道……\x01",
"我想会呆到女王诞辰庆典吧。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#001F啊,那样的话,\x01",
"说不定我们还会再见面哦。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
"#010F到时就请多关照了。\x02",
)
CloseMessageWindow()
ChrTalk(
0x108,
"#071F哈哈,彼此彼此。\x02",
)
CloseMessageWindow()
Sleep(100)
OP_22(0x78, 0x0, 0x64)
OP_6F(0x3, 0)
OP_70(0x3, 0x64)
OP_73(0x3)
Fade(2000)
OP_6D(-33750, -7050, 155120, 0)
OP_67(0, -600, -10000, 0)
OP_6B(3170, 0)
OP_6C(163000, 0)
OP_6E(536, 0)
SetChrFlags(0x101, 0x80)
SetChrFlags(0x102, 0x80)
SetChrFlags(0x107, 0x80)
SetChrFlags(0x108, 0x80)
Sleep(1000)
OP_A1(0x11, 0x4)
OP_72(0x4, 0x4)
OP_72(0x4, 0x20)
SetChrPos(0x11, -34000, -11150, 148000, 0)
SetChrFlags(0x11, 0x4)
OP_A1(0x12, 0x5)
OP_72(0x5, 0x4)
OP_72(0xA, 0x4)
SetChrPos(0x12, -34000, -11150, 148000, 0)
SetChrFlags(0x12, 0x4)
OP_22(0x76, 0x0, 0x46)
OP_6F(0x4, 0)
OP_70(0x4, 0x3C)
OP_73(0x4)
Sleep(1000)
OP_22(0x77, 0x1, 0x64)
OP_6F(0x4, 61)
OP_70(0x4, 0xA0)
OP_73(0x4)
OP_71(0x4, 0x20)
OP_6F(0x4, 161)
OP_70(0x4, 0x104)
def lambda_655F():
OP_6D(-33750, -5050, 155120, 6000)
ExitThread()
QueueWorkItem(0x101, 1, lambda_655F)
def lambda_6577():
OP_67(0, 1800, -10000, 6000)
ExitThread()
QueueWorkItem(0x101, 2, lambda_6577)
OP_91(0x11, 0x0, 0x12C, 0x0, 0x12C, 0x0)
OP_91(0x11, 0x0, 0x320, 0x0, 0x1F4, 0x0)
Sleep(2000)
def lambda_65BC():
OP_94(0x1, 0xFE, 0x0, 0x3E8, 0x3E8, 0x0)
ExitThread()
QueueWorkItem(0x12, 1, lambda_65BC)
OP_94(0x1, 0x11, 0x0, 0x3E8, 0x3E8, 0x0)
def lambda_65E1():
OP_94(0x1, 0xFE, 0x0, 0x4B0, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x12, 1, lambda_65E1)
OP_94(0x1, 0x11, 0x0, 0x4B0, 0x7D0, 0x0)
def lambda_6606():
OP_94(0x1, 0xFE, 0x0, 0x578, 0xBB8, 0x0)
ExitThread()
QueueWorkItem(0x12, 1, lambda_6606)
OP_94(0x1, 0x11, 0x0, 0x578, 0xBB8, 0x0)
def lambda_662B():
OP_94(0x1, 0xFE, 0x0, 0x640, 0xFA0, 0x0)
ExitThread()
QueueWorkItem(0x12, 1, lambda_662B)
OP_94(0x1, 0x11, 0x0, 0x640, 0xFA0, 0x0)
def lambda_6650():
OP_94(0x1, 0xFE, 0x0, 0x708, 0x1388, 0x0)
ExitThread()
QueueWorkItem(0x12, 1, lambda_6650)
FadeToDark(2000, 0, -1)
OP_94(0x1, 0x11, 0x0, 0x708, 0x1388, 0x0)
def lambda_667F():
OP_94(0x1, 0xFE, 0x0, 0x7D0, 0x1770, 0x0)
ExitThread()
QueueWorkItem(0x12, 1, lambda_667F)
OP_94(0x1, 0x11, 0x0, 0x7D0, 0x1770, 0x0)
def lambda_66A4():
OP_94(0x1, 0xFE, 0x0, 0x898, 0x1B58, 0x0)
ExitThread()
QueueWorkItem(0x12, 1, lambda_66A4)
OP_94(0x1, 0x11, 0x0, 0x898, 0x1B58, 0x0)
def lambda_66C9():
OP_94(0x1, 0xFE, 0x0, 0xC350, 0x1F40, 0x0)
ExitThread()
QueueWorkItem(0x12, 1, lambda_66C9)
def lambda_66DF():
OP_94(0x1, 0xFE, 0x0, 0xC350, 0x1F40, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_66DF)
SoundDistance(0x75, 0xFFFF7A4A, 0xFFFFF060, 0x23280, 0x2710, 0x9C40, 0x5A, 0x0)
OP_24(0x77, 0x5A)
Sleep(100)
SoundDistance(0x75, 0xFFFF7A4A, 0xFFFFF060, 0x23280, 0x2710, 0x9C40, 0x50, 0x0)
OP_24(0x77, 0x50)
Sleep(100)
SoundDistance(0x75, 0xFFFF7A4A, 0xFFFFF060, 0x23280, 0x2710, 0x9C40, 0x46, 0x0)
OP_24(0x77, 0x46)
Sleep(100)
SoundDistance(0x75, 0xFFFF7A4A, 0xFFFFF060, 0x23280, 0x2710, 0x9C40, 0x3C, 0x0)
OP_24(0x77, 0x3C)
Sleep(100)
SoundDistance(0x75, 0xFFFF7A4A, 0xFFFFF060, 0x23280, 0x2710, 0x9C40, 0x32, 0x0)
OP_24(0x77, 0x32)
Sleep(100)
SoundDistance(0x75, 0xFFFF7A4A, 0xFFFFF060, 0x23280, 0x2710, 0x9C40, 0x28, 0x0)
OP_24(0x77, 0x28)
Sleep(100)
SoundDistance(0x75, 0xFFFF7A4A, 0xFFFFF060, 0x23280, 0x2710, 0x9C40, 0x1E, 0x0)
OP_24(0x77, 0x1E)
Sleep(100)
SoundDistance(0x75, 0xFFFF7A4A, 0xFFFFF060, 0x23280, 0x2710, 0x9C40, 0x14, 0x0)
OP_24(0x77, 0x14)
Sleep(100)
SoundDistance(0x75, 0xFFFF7A4A, 0xFFFFF060, 0x23280, 0x2710, 0x9C40, 0xA, 0x0)
OP_24(0x77, 0xA)
Sleep(100)
SoundDistance(0x75, 0xFFFF7A4A, 0xFFFFF060, 0x23280, 0x2710, 0x9C40, 0x1, 0x0)
OP_23(0x77)
OP_0D()
OP_B8(0x7)
RemoveParty(0x7, 0xFF)
OP_31(0x0, 0xFE, 0x0)
OP_31(0x1, 0xFE, 0x0)
OP_31(0x2, 0xFE, 0x0)
OP_31(0x3, 0xFE, 0x0)
OP_31(0x4, 0xFE, 0x0)
OP_31(0x5, 0xFE, 0x0)
OP_31(0x6, 0xFE, 0x0)
OP_31(0x7, 0xFE, 0x0)
OP_A2(0x3FA)
NewScene("ED6_DT01/T3101 ._SN", 100, 0, 0)
IdleLoop()
Return()
# Function_16_5E62 end
def Function_17_6896(): pass
label("Function_17_6896")
Sleep(100)
OP_20(0x3E8)
Fade(1000)
SetChrFlags(0x101, 0x80)
SetChrFlags(0x102, 0x80)
SetChrFlags(0x107, 0x80)
SetChrFlags(0x106, 0x80)
SetChrFlags(0x8, 0x80)
OP_44(0x101, 0xFF)
OP_44(0x102, 0xFF)
OP_44(0x107, 0xFF)
OP_44(0x106, 0xFF)
OP_44(0xA, 0xFF)
OP_8C(0xA, 45, 0)
SetChrPos(0xB, -45980, 0, 129680, 0)
ClearChrFlags(0xB, 0x80)
OP_23(0x75)
OP_22(0x75, 0x1, 0x64)
OP_6D(-36160, -4000, 150300, 0)
OP_67(0, 11000, -10000, 0)
OP_6B(3500, 0)
OP_6C(112000, 0)
OP_6E(415, 0)
OP_0D()
OP_1D(0x57)
OP_22(0x78, 0x0, 0x64)
OP_6F(0x3, 0)
OP_70(0x3, 0x64)
OP_73(0x3)
OP_72(0x4, 0x4)
OP_A1(0x11, 0x4)
OP_72(0x9, 0x4)
OP_72(0x9, 0x20)
SetChrPos(0x11, -34000, -11150, 148000, 0)
SetChrFlags(0x11, 0x4)
OP_A1(0x12, 0x5)
OP_72(0x5, 0x4)
OP_72(0xA, 0x4)
SetChrPos(0x12, -34000, -11150, 148000, 0)
SetChrFlags(0x12, 0x4)
def lambda_69A2():
OP_67(0, 7880, -10000, 12000)
ExitThread()
QueueWorkItem(0x101, 1, lambda_69A2)
OP_22(0x76, 0x0, 0x64)
OP_6F(0x4, 1)
OP_70(0x4, 0x3C)
OP_73(0x4)
OP_22(0x77, 0x1, 0x64)
OP_6F(0x4, 61)
OP_70(0x4, 0xA0)
OP_73(0x4)
OP_71(0x4, 0x20)
OP_6F(0x4, 161)
OP_70(0x4, 0x104)
def lambda_69F9():
OP_6E(465, 8000)
ExitThread()
QueueWorkItem(0x101, 2, lambda_69F9)
def lambda_6A09():
OP_6C(90000, 8000)
ExitThread()
QueueWorkItem(0x101, 3, lambda_6A09)
OP_91(0x11, 0x0, 0x1F4, 0x0, 0x12C, 0x0)
OP_91(0x11, 0x0, 0x3E8, 0x0, 0x258, 0x0)
Sleep(500)
def lambda_6A46():
OP_94(0x1, 0xFE, 0x0, 0x1F4, 0x3E8, 0x0)
ExitThread()
QueueWorkItem(0x12, 1, lambda_6A46)
OP_94(0x1, 0x11, 0x0, 0x1F4, 0x3E8, 0x0)
def lambda_6A6B():
OP_94(0x1, 0xFE, 0x0, 0x258, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x12, 1, lambda_6A6B)
OP_94(0x1, 0x11, 0x0, 0x258, 0x7D0, 0x0)
def lambda_6A90():
OP_94(0x1, 0xFE, 0x0, 0x2BC, 0xBB8, 0x0)
ExitThread()
QueueWorkItem(0x12, 1, lambda_6A90)
OP_94(0x1, 0x11, 0x0, 0x2BC, 0xBB8, 0x0)
def lambda_6AB5():
OP_94(0x1, 0xFE, 0x0, 0x320, 0xFA0, 0x0)
ExitThread()
QueueWorkItem(0x12, 1, lambda_6AB5)
OP_94(0x1, 0x11, 0x0, 0x320, 0xFA0, 0x0)
def lambda_6ADA():
OP_94(0x1, 0xFE, 0x0, 0x384, 0x1388, 0x0)
ExitThread()
QueueWorkItem(0x12, 1, lambda_6ADA)
OP_94(0x1, 0x11, 0x0, 0x384, 0x1388, 0x0)
def lambda_6AFF():
OP_94(0x1, 0xFE, 0x0, 0x3E8, 0x1770, 0x0)
ExitThread()
QueueWorkItem(0x12, 1, lambda_6AFF)
OP_94(0x1, 0x11, 0x0, 0x3E8, 0x1770, 0x0)
def lambda_6B24():
OP_94(0x1, 0xFE, 0x0, 0x44C, 0x1B58, 0x0)
ExitThread()
QueueWorkItem(0x12, 1, lambda_6B24)
OP_94(0x1, 0x11, 0x0, 0x44C, 0x1B58, 0x0)
def lambda_6B49():
OP_94(0x1, 0xFE, 0x0, 0xC350, 0x1F40, 0x0)
ExitThread()
QueueWorkItem(0x12, 1, lambda_6B49)
def lambda_6B5F():
OP_94(0x1, 0xFE, 0x0, 0xC350, 0x1F40, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_6B5F)
OP_43(0x11, 0x3, 0x0, 0x12)
OP_8C(0xA, 0, 400)
ChrTalk(
0xA,
"#800F#5P拜托你们了,各位游击士……\x02",
)
CloseMessageWindow()
ChrTalk(
0xB,
"#1P等、等一下~!\x02",
)
CloseMessageWindow()
def lambda_6BC7():
OP_6D(-44410, -4000, 143480, 2000)
ExitThread()
QueueWorkItem(0x101, 1, lambda_6BC7)
def lambda_6BDF():
OP_6E(273, 2000)
ExitThread()
QueueWorkItem(0x101, 2, lambda_6BDF)
def lambda_6BEF():
label("loc_6BEF")
TurnDirection(0xFE, 0xB, 400)
OP_48()
Jump("loc_6BEF")
QueueWorkItem2(0xA, 1, lambda_6BEF)
OP_8E(0xB, 0xFFFF4A52, 0xFFFFF060, 0x23348, 0x1388, 0x0)
ChrTalk(
0xB,
(
"#152F#1P哈啊哈啊……\x02\x03",
"啊啊~走掉了~……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xA,
(
"#802F#2P啊……\x01",
"这不是朵洛希吗。\x02",
)
)
CloseMessageWindow()
OP_44(0xA, 0xFF)
OP_8E(0xA, 0xFFFF4A2A, 0xFFFFF060, 0x22AC4, 0x7D0, 0x0)
TurnDirection(0xA, 0xB, 0)
TurnDirection(0xB, 0xA, 400)
ChrTalk(
0xB,
(
"#152F#1P啊,工房长先生!\x02\x03",
"刚才飞走的那艘飞艇,\x01",
"是小艾和小约他们坐的吧。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xA,
(
"#802F#2P是啊……\x01",
"你怎么知道的?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"#152F#1P我刚刚去了协会,\x01",
"是那里的负责人告诉我的。\x02\x03",
"刚才我和编辑部联络的时候,\x01",
"知道了一件非常非常不得了的大事,\x01",
"不告诉他们不行啊~\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xA,
(
"#805F#2P不得了的大事……?\x02\x03",
"#806F唔……以现在的状况,\x01",
"实在想不出还有什么更不得了的事……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"#154F#1P这个嘛……\x01",
"虽然是非公开发表的~\x02\x03",
"女王陛下的王室亲卫队\x01",
"好像以谋反的罪名被逮捕了~\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xA,
"#804F#2P#3S什、什么!?\x02",
)
OP_7C(0x0, 0xC8, 0xBB8, 0x64)
CloseMessageWindow()
FadeToDark(1000, 0, -1)
OP_0D()
OP_A2(0x3FA)
NewScene("ED6_DT01/E0002 ._SN", 100, 0, 0)
IdleLoop()
Return()
# Function_17_6896 end
def Function_18_6ED9(): pass
label("Function_18_6ED9")
Sleep(1000)
OP_24(0x77, 0x5F)
OP_24(0x75, 0x5F)
Sleep(200)
OP_24(0x77, 0x5A)
OP_24(0x75, 0x5A)
Sleep(200)
OP_24(0x77, 0x55)
OP_24(0x75, 0x55)
Sleep(200)
OP_24(0x77, 0x50)
OP_24(0x75, 0x50)
Sleep(200)
OP_24(0x77, 0x4B)
OP_24(0x75, 0x4B)
Sleep(200)
OP_24(0x77, 0x46)
OP_24(0x75, 0x46)
Sleep(200)
OP_24(0x77, 0x41)
OP_24(0x75, 0x41)
Sleep(200)
OP_24(0x77, 0x3C)
OP_24(0x75, 0x3C)
Sleep(200)
OP_24(0x77, 0x32)
OP_24(0x75, 0x32)
Sleep(200)
OP_23(0x77)
OP_23(0x75)
Return()
# Function_18_6ED9 end
def Function_19_6F5A(): pass
label("Function_19_6F5A")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC0, 2)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC0, 3)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_82ED")
EventBegin(0x0)
OP_A2(0x603)
OP_28(0x54, 0x1, 0x4)
OP_28(0x54, 0x1, 0x8)
SetChrPos(0xC, -46060, 0, 127820, 0)
ClearChrFlags(0xC, 0x80)
OP_22(0x192, 0x0, 0x64)
ChrTalk(
0xC,
"喵~呵。\x02",
)
CloseMessageWindow()
OP_62(0x101, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
OP_62(0x102, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(1000)
def lambda_6FCE():
label("loc_6FCE")
TurnDirection(0xFE, 0xC, 400)
OP_48()
Jump("loc_6FCE")
QueueWorkItem2(0x101, 3, lambda_6FCE)
def lambda_6FDF():
label("loc_6FDF")
TurnDirection(0xFE, 0xC, 400)
OP_48()
Jump("loc_6FDF")
QueueWorkItem2(0x102, 3, lambda_6FDF)
def lambda_6FF0():
OP_6D(-46010, -1000, 131740, 2500)
ExitThread()
QueueWorkItem(0x0, 1, lambda_6FF0)
def lambda_7008():
OP_67(0, 7390, -10000, 4000)
ExitThread()
QueueWorkItem(0x0, 2, lambda_7008)
def lambda_7020():
OP_6B(3700, 4000)
ExitThread()
QueueWorkItem(0x1, 1, lambda_7020)
def lambda_7030():
OP_6C(158000, 4000)
ExitThread()
QueueWorkItem(0x1, 2, lambda_7030)
Sleep(3000)
SetChrPos(0x101, -45400, -4000, 140210, 0)
SetChrPos(0x102, -46640, -4000, 140440, 0)
def lambda_7067():
OP_8E(0xFE, 0xFFFF4DA4, 0xFFFFF060, 0x21D2C, 0xBB8, 0x0)
ExitThread()
QueueWorkItem(0xC, 2, lambda_7067)
def lambda_7082():
OP_6D(-45610, -4000, 139000, 3000)
ExitThread()
QueueWorkItem(0x0, 1, lambda_7082)
Sleep(3000)
Jc((scpexpr(EXPR_EXEC_OP, "OP_29(0x2C, 0x0, 0x10)"), scpexpr(EXPR_END)), "loc_7234")
ChrTalk(
0x101,
"#004F啊,安东尼!\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
"#010F哟,昨天辛苦你了啊。\x02",
)
CloseMessageWindow()
OP_22(0x192, 0x0, 0x64)
ChrTalk(
0xC,
"喵呜~\x02",
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#509F真是的,都是因为你,\x01",
"昨天害我吓了一大跳呢。\x02\x03",
"你是不是该反省一下呢,嗯?\x02",
)
)
CloseMessageWindow()
OP_22(0x192, 0x0, 0x64)
ChrTalk(
0xC,
"咪呜?\x02",
)
CloseMessageWindow()
ChrTalk(
0x101,
"#007F都不听我说话啊。\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#019F哈哈,说不定它是在装傻呢。\x01",
" \x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#006F唉,算了。\x01",
"总之承蒙你的关照啦。\x02\x03",
"多谢了,安东尼。\x02",
)
)
CloseMessageWindow()
OP_22(0x192, 0x0, 0x64)
ChrTalk(
0xC,
"咪~呜嗯。\x02",
)
CloseMessageWindow()
Jump("loc_73C9")
label("loc_7234")
ChrTalk(
0x101,
"#004F啊,那只猫是……\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F就是那个时候\x01",
"钻进集装箱里的那只猫吧。\x02\x03",
"我记得,\x01",
"好像是叫做安东尼。\x02",
)
)
CloseMessageWindow()
OP_22(0x192, 0x0, 0x64)
ChrTalk(
0xC,
"喵呜~\x02",
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#001F啊哈哈~真可爱。\x02\x03",
"#006F真是的,都是因为你,\x01",
"昨天害我吓了一大跳呢。\x02\x03",
"你是不是该反省一下呢,嗯?\x02",
)
)
CloseMessageWindow()
OP_22(0x192, 0x0, 0x64)
ChrTalk(
0xC,
"咪呜?\x02",
)
CloseMessageWindow()
ChrTalk(
0x101,
"#007F都不听我说话啊。\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#019F哈哈,说不定它是在装傻呢。\x01",
" \x02",
)
)
CloseMessageWindow()
label("loc_73C9")
SetChrPos(0x8, -47160, 0, 129750, 0)
ClearChrFlags(0x8, 0x80)
ChrTalk(
0x8,
"#3P哦,是你们啊!\x02",
)
CloseMessageWindow()
OP_4A(0x8, 255)
OP_62(0x101, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
OP_62(0x102, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(1000)
def lambda_742E():
OP_6D(-46010, -4000, 137720, 2000)
ExitThread()
QueueWorkItem(0x0, 1, lambda_742E)
def lambda_7446():
label("loc_7446")
TurnDirection(0xFE, 0x8, 400)
OP_48()
Jump("loc_7446")
QueueWorkItem2(0x101, 3, lambda_7446)
def lambda_7457():
label("loc_7457")
TurnDirection(0xFE, 0x8, 400)
OP_48()
Jump("loc_7457")
QueueWorkItem2(0x102, 3, lambda_7457)
def lambda_7468():
OP_8E(0xFE, 0xFFFF4B92, 0xFFFFF060, 0x21E44, 0xBB8, 0x0)
ExitThread()
QueueWorkItem(0x8, 1, lambda_7468)
Sleep(1000)
def lambda_7488():
label("loc_7488")
TurnDirection(0xFE, 0x8, 0)
OP_48()
Jump("loc_7488")
QueueWorkItem2(0x101, 3, lambda_7488)
def lambda_7499():
label("loc_7499")
TurnDirection(0xFE, 0x8, 0)
OP_48()
Jump("loc_7499")
QueueWorkItem2(0x102, 3, lambda_7499)
OP_8C(0xC, 192, 800)
def lambda_74B1():
OP_8F(0xFE, 0xFFFF4DA4, 0x0, 0x1F521, 0x1388, 0x0)
ExitThread()
QueueWorkItem(0xC, 1, lambda_74B1)
WaitChrThread(0x8, 0x1)
ChrTalk(
0x101,
"#501F啊,维修长先生!\x02",
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"#691F#2P工房长都告诉我了。\x01",
"博士救出作战干得真是漂亮啊。\x02\x03",
"博士对我们这些技术人员来说,\x01",
"算是师傅一样的人物了。\x02\x03",
"我也要好好感谢你们呢。\x02",
)
)
CloseMessageWindow()
OP_44(0x101, 0xFF)
OP_44(0x102, 0xFF)
ChrTalk(
0x101,
(
"#008F嘿嘿……\x01",
"这也多亏了维修长你们的帮忙啊。\x02\x03",
"不过我真是被那孩子吓坏了呢。\x01",
" \x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F那个安东尼,\x01",
"果然是您故意放进去的吧?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"#693F#2P啊哈哈,要想欺骗敌人,\x01",
"首先要瞒过伙伴才行啊。\x02\x03",
"#691F话说回来,\x01",
"你们来飞艇坪有什么事吗?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#006F嗯,受博士的委托,\x01",
"我们现在要赶往王都。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F要乘坐11点的定期船,\x01",
"看来好像来得早了点。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"#692F#2P啊啊……\x01",
"好像要稍微晚到一会儿。\x02\x03",
"#691F因为还要花点时间卸货,\x01",
"你们到街上再转一会也没关系啦。\x01",
" \x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#505F嗯,这样啊……\x02",
)
CloseMessageWindow()
SetChrPos(0x9, -45980, 0, 128889, 0)
OP_4A(0x9, 255)
ChrTalk(
0x9,
"#3P喂,你们两位!\x02",
)
CloseMessageWindow()
OP_62(0x101, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
OP_62(0x102, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
OP_62(0x8, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(1000)
def lambda_78BA():
OP_8E(0x9, 0xFFFF4F8E, 0xFFFFF060, 0x21BC4, 0xBB8, 0x0)
ExitThread()
QueueWorkItem(0x9, 1, lambda_78BA)
def lambda_78D5():
OP_6D(-46700, -2500, 134910, 1500)
ExitThread()
QueueWorkItem(0x0, 1, lambda_78D5)
def lambda_78ED():
label("loc_78ED")
TurnDirection(0xFE, 0x9, 400)
OP_48()
Jump("loc_78ED")
QueueWorkItem2(0x101, 2, lambda_78ED)
def lambda_78FE():
label("loc_78FE")
TurnDirection(0xFE, 0x9, 400)
OP_48()
Jump("loc_78FE")
QueueWorkItem2(0x102, 2, lambda_78FE)
def lambda_790F():
label("loc_790F")
TurnDirection(0xFE, 0x9, 400)
OP_48()
Jump("loc_790F")
QueueWorkItem2(0x8, 2, lambda_790F)
Sleep(1500)
def lambda_7925():
label("loc_7925")
TurnDirection(0xFE, 0x9, 0)
OP_48()
Jump("loc_7925")
QueueWorkItem2(0x101, 2, lambda_7925)
def lambda_7936():
label("loc_7936")
TurnDirection(0xFE, 0x9, 0)
OP_48()
Jump("loc_7936")
QueueWorkItem2(0x102, 2, lambda_7936)
def lambda_7947():
label("loc_7947")
TurnDirection(0xFE, 0x9, 0)
OP_48()
Jump("loc_7947")
QueueWorkItem2(0x8, 2, lambda_7947)
def lambda_7958():
OP_6D(-46010, -4000, 137720, 2000)
ExitThread()
QueueWorkItem(0x0, 1, lambda_7958)
WaitChrThread(0x9, 0x1)
TurnDirection(0x9, 0x102, 0)
ChrTalk(
0x8,
(
"#692F#2P什么啊,这不是吉拉尔吗。\x02\x03",
"怎么,发生了什么事吗?\x02",
)
)
CloseMessageWindow()
TurnDirection(0x9, 0x8, 400)
ChrTalk(
0x9,
(
"正好,\x01",
"大叔您也在啊。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x9,
"实际上,事情变得麻烦起来了。\x02",
)
CloseMessageWindow()
ChrTalk(
0x8,
"#692F#2P你说什么,麻烦?\x02",
)
CloseMessageWindow()
ChrTalk(
0x9,
(
"嗯,是啊……\x01",
"飞艇公社发来的联络说……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x9,
(
"定期船可能要\x01",
"晚几个小时才能到。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#004F哎……!\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
"#012F#6P…………………………\x02",
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"#692F#2P喂喂……\x01",
"到底是怎么回事啊。\x02\x03",
"又有空贼作乱吗?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x9,
"啊,说起来也差不多。\x02",
)
CloseMessageWindow()
ChrTalk(
0x9,
(
"据说,有一伙打算妨碍\x01",
"女王陛下的诞辰庆典的恐怖分子\x01",
"可能在王国的某个地方潜伏着。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x9,
(
"为了调查这件事,\x01",
"所有的飞艇坪都被军队设下了哨卡。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#002F(那、那个是……)\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#015F#6P(大概是为了搜寻博士他们吧……)\x01",
" \x02",
)
)
CloseMessageWindow()
ChrTalk(
0x9,
(
"所以,开往王都的定期船\x01",
"现在还滞留在卢安那里……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x9,
(
"取而代之的好像是\x01",
"雷斯顿要塞的军用警备飞艇。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"#691F#2P原来如此,是这样啊。\x02\x03",
"不过这样一来,\x01",
"你不是就要很忙了?\x02",
)
)
CloseMessageWindow()
TurnDirection(0x9, 0x8, 400)
ChrTalk(
0x9,
(
"是啊……\x01",
"不把这件事告诉旅客们不行啊。\x02",
)
)
CloseMessageWindow()
TurnDirection(0x9, 0x101, 400)
ChrTalk(
0x9,
(
"就因为这样,\x01",
"你们也得再等一段时间了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x9,
(
"对了……\x01",
"如果你们愿意在游击士协会等的话,\x01",
"我去帮你们联系一下吧?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#505F嗯,好的……\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
"#010F#6P真是麻烦您了。\x02",
)
CloseMessageWindow()
OP_8C(0x9, 190, 400)
OP_8F(0x9, 0xFFFF4C3C, 0x0, 0x1F982, 0xBB8, 0x0)
OP_44(0x101, 0xFF)
OP_44(0x102, 0xFF)
OP_44(0x8, 0xFF)
SetChrPos(0x9, -20110, 8000, 121830, 177)
OP_4B(0x9, 255)
OP_44(0x8, 0xFF)
TurnDirection(0x8, 0x101, 400)
ChrTalk(
0x8,
(
"#690F#2P……真是可疑啊。\x02\x03",
"如果军队那帮家伙这样干的话,\x01",
"莱普尼兹号肯定也会被检查的。\x02\x03",
"我这就去和工房长说这件事。\x02",
)
)
CloseMessageWindow()
def lambda_7FB8():
label("loc_7FB8")
TurnDirection(0xFE, 0x8, 400)
OP_48()
Jump("loc_7FB8")
QueueWorkItem2(0x101, 2, lambda_7FB8)
def lambda_7FC9():
label("loc_7FC9")
TurnDirection(0xFE, 0x8, 400)
OP_48()
Jump("loc_7FC9")
QueueWorkItem2(0x102, 2, lambda_7FC9)
ChrTalk(
0x101,
(
"#002F对啊,要是查起昨天那件事的话,\x01",
"那中央工房就不好办了……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
"#012F请一定要小心啊。\x02",
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"#691F#2P哈哈,我还没有不中用到\x01",
"让你们这些小孩子担心的份儿上呢。\x02\x03",
"#693F那么告辞了!\x02",
)
)
CloseMessageWindow()
OP_8C(0x8, 190, 600)
OP_8F(0x8, 0xFFFF4B92, 0x0, 0x1F8E2, 0xFA0, 0x0)
SetChrFlags(0x8, 0x80)
OP_44(0x101, 0xFF)
OP_44(0x102, 0xFF)
def lambda_80FB():
OP_6D(-45920, -4000, 139870, 1000)
ExitThread()
QueueWorkItem(0x101, 1, lambda_80FB)
TurnDirection(0x101, 0x102, 400)
WaitChrThread(0x101, 0x1)
ChrTalk(
0x101,
(
"#002F约修亚……\x01",
"这样不就很麻烦了吗?\x02",
)
)
CloseMessageWindow()
TurnDirection(0x102, 0x101, 400)
ChrTalk(
0x102,
(
"#013F嗯……\x01",
"这样的话乘定期船就有点危险了。\x02\x03",
"#012F虽然要花点时间,\x01",
"不过还是走街道比较好吧。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#509F唔,还以为好不容易\x01",
"可以坐上久违的飞艇了呢。\x02\x03",
"我跟你没完,理查德上校!\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#019F算了算了,\x01",
"当成是继续修行不也很好吗?\x02\x03",
"#010F那么,我们赶快去接待处那里\x01",
"把搭乘手续取消吧。\x02",
)
)
CloseMessageWindow()
SetChrFlags(0xC, 0x80)
EventEnd(0x0)
label("loc_82ED")
Return()
# Function_19_6F5A end
def Function_20_82EE(): pass
label("Function_20_82EE")
SetChrFlags(0xFE, 0x4)
ClearChrFlags(0xFE, 0x80)
SetChrPos(0xFE, -34090, -4000, 144010, 270)
OP_8C(0xFE, 270, 400)
Return()
# Function_20_82EE end
def Function_21_8311(): pass
label("Function_21_8311")
SetChrFlags(0xFE, 0x4)
ClearChrFlags(0xFE, 0x80)
SetChrPos(0xFE, -35750, -4000, 143010, 90)
OP_8C(0xFE, 90, 400)
Return()
# Function_21_8311 end
def Function_22_8334(): pass
label("Function_22_8334")
SetChrFlags(0xFE, 0x4)
ClearChrFlags(0xFE, 0x80)
SetChrPos(0xFE, -35770, -4000, 144120, 90)
OP_8C(0xFE, 90, 400)
Return()
# Function_22_8334 end
def Function_23_8357(): pass
label("Function_23_8357")
SetChrFlags(0xFE, 0x4)
ClearChrFlags(0xFE, 0x80)
SetChrPos(0xFE, -36170, -4000, 145050, 90)
OP_8C(0xFE, 90, 400)
Return()
# Function_23_8357 end
def Function_24_837A(): pass
label("Function_24_837A")
FadeToDark(300, 0, 100)
SetChrName("")
SetMessageWindowPos(-1, -1, -1, -1)
AnonymousTalk(
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"定期船起降坪\x01",
"≡ 开往王都格兰赛尔\x01",
"≡ 开往卢安市\x02",
)
)
CloseMessageWindow()
AnonymousTalk(
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"※请勿携带易燃物和危险品\x01",
" 利贝尔飞艇公社\x02",
)
)
CloseMessageWindow()
OP_56(0x0)
FadeToBright(300, 0)
SetMessageWindowPos(72, 320, 56, 3)
TalkEnd(0xFF)
Return()
# Function_24_837A end
def Function_25_8435(): pass
label("Function_25_8435")
FadeToDark(300, 0, 100)
SetChrName("")
SetMessageWindowPos(-1, -1, -1, -1)
AnonymousTalk(
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
" 飞艇坪塔台 \x01",
" ※无关人员禁止入内 \x01",
"『利贝尔飞艇公社』 \x02",
)
)
CloseMessageWindow()
OP_56(0x0)
FadeToBright(300, 0)
SetMessageWindowPos(72, 320, 56, 3)
TalkEnd(0xFF)
Return()
# Function_25_8435 end
def Function_26_84C3(): pass
label("Function_26_84C3")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC0, 3)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC0, 4)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_8578")
EventBegin(0x2)
TurnDirection(0x102, 0x101, 400)
ChrTalk(
0x102,
(
"#010F首先去把搭乘手续\x01",
"去把票退掉吧。\x02\x03",
"还是过会儿\x01",
"然后我们再出发。\x02",
)
)
CloseMessageWindow()
OP_90(0x0, 0x0, 0x0, 0x5DC, 0xBB8, 0x0)
Sleep(50)
EventEnd(0x4)
label("loc_8578")
Return()
# Function_26_84C3 end
SaveToFile()
Try(main)
|
age = int(input("What is your dog age? "))
age = age * 7
print("Your dog age in dog years is", age) |
import gitwrapper
import solventwrapper
import shutil
import os
import unittest
import upseto
import osmosiswrapper
import tempfile
import subprocess
class Test(unittest.TestCase):
def setUp(self):
for key in list(os.environ.keys()):
if 'SOLVENT' in key:
del os.environ[key]
self.osmosisPair = osmosiswrapper.LocalAndOfficial()
gitwrapper.setUp()
self.fixture()
self.cleanLocalClonesDir()
self.resetFakeMount()
def resetFakeMount(self):
if os.path.exists("build/mount"):
os.unlink("build/mount")
mount = subprocess.check_output(["mount"])
with open("build/mount", "w") as f:
f.write("#!/bin/sh\ncat %s/build/mount.txt\n" % os.getcwd())
os.chmod("build/mount", 0755)
with open("build/mount.txt", "w") as f:
f.write(mount)
def cleanLocalClonesDir(self):
shutil.rmtree(gitwrapper.localClonesDir())
os.makedirs(gitwrapper.localClonesDir())
def tearDown(self):
gitwrapper.tearDown()
if self.osmosisPair is not None:
self.osmosisPair.exit()
def fixture(self):
self.project1 = gitwrapper.GitHub("project1")
self.project2 = gitwrapper.GitHub("project2")
self.requiringProject = gitwrapper.GitHub("requiringProject")
localClone1 = gitwrapper.LocalClone(self.project1)
localClone2 = gitwrapper.LocalClone(self.project2)
localRequiringProject = gitwrapper.LocalClone(self.requiringProject)
self.assertEquals(self.project1.hash('master'), localClone1.hash())
self.assertEquals(self.project2.hash('master'), localClone2.hash())
self.assertEquals(self.requiringProject.hash(), localRequiringProject.hash())
solventwrapper.upseto(localRequiringProject, "addRequirement project1")
solventwrapper.upseto(localRequiringProject, "addRequirement project2")
localRequiringProject.addCommitPushManifest()
self.recursiveProject = gitwrapper.GitHub("recursiveProject")
localRecursiveProject = gitwrapper.LocalClone(self.recursiveProject)
solventwrapper.upseto(localRecursiveProject, "addRequirement requiringProject")
localRecursiveProject.addCommitPushManifest()
def test_Fixture(self):
self.assertNotIn('/usr', upseto.__file__)
import solvent
self.assertNotIn('/usr', solvent.__file__)
localRecursiveProject = gitwrapper.LocalClone(self.recursiveProject)
solventwrapper.upseto(localRecursiveProject, "fulfillRequirements")
lines = solventwrapper.upseto(localRecursiveProject, "checkRequirements --show")
self.assertEquals(len([l for l in lines.split("\n") if 'file:///' in l]), 4)
def test_SubmitANonUpsetoedProject(self):
localClone1 = gitwrapper.LocalClone(self.project1)
hash = localClone1.hash()
self.assertFalse(localClone1.fileExists("build/product1"))
localClone1.writeFile("build/product1", "product1 contents")
self.assertTrue(localClone1.fileExists("build/product1"))
solventwrapper.run(localClone1, "submitbuild")
self.assertEquals(len(self.osmosisPair.local.client().listLabels()), 1)
label = 'solvent__project1__build__%s__dirty' % hash
self.assertEquals(self.osmosisPair.local.client().listLabels(), [label])
self.assertEquals(len(self.osmosisPair.official.client().listLabels()), 1)
self.assertEquals(self.osmosisPair.official.client().listLabels(), [label])
self.cleanLocalClonesDir()
self.assertFalse(localClone1.fileExists("build/product1"))
self.osmosisPair.local.client().checkout(path=gitwrapper.localClonesDir(), label=label)
self.assertEquals(localClone1.hash(), hash)
self.assertTrue(localClone1.fileExists("build/product1"))
def test_SubmitANonUpsetoedProject_FailsIfWorkspaceIsSullied(self):
localClone1 = gitwrapper.LocalClone(self.project1)
localClone2 = gitwrapper.LocalClone(self.project2)
solventwrapper.runShouldFail(localClone1, "submitbuild", "sullied", env=dict(SOLVENT_CLEAN="yes"))
def test_ConfigurationMissingOfficialOsmosis(self):
configuration = tempfile.NamedTemporaryFile()
solventwrapper.configurationFile = configuration.name
localClone1 = gitwrapper.LocalClone(self.project1)
solventwrapper.runShouldFail(localClone1, "submitbuild", "empty")
configuration.write("key: value\n")
configuration.flush()
solventwrapper.runShouldFail(localClone1, "submitbuild", "official")
def test_SubmitBuildNotAllowedFromANonGitProject(self):
localClone1 = gitwrapper.LocalClone(self.project1)
self.osmosisPair.exit()
solventwrapper.runShouldFail(localClone1, "submitbuild", "osmosis")
self.osmosisPair = None
def test_SubmitANonUpsetoedProjectOfficialBuild(self):
localClone1 = gitwrapper.LocalClone(self.project1)
hash = localClone1.hash()
localClone1.writeFile("build/product1", "product1 contents")
solventwrapper.configureAsOfficial()
solventwrapper.run(localClone1, "submitbuild")
self.assertEquals(len(self.osmosisPair.local.client().listLabels()), 1)
label = 'solvent__project1__build__%s__officialcandidate' % hash
self.assertEquals(self.osmosisPair.local.client().listLabels(), [label])
self.assertEquals(len(self.osmosisPair.official.client().listLabels()), 1)
self.assertEquals(self.osmosisPair.official.client().listLabels(), [label])
self.cleanLocalClonesDir()
self.osmosisPair.local.client().checkout(path=gitwrapper.localClonesDir(), label=label)
self.assertEquals(localClone1.hash(), hash)
self.assertTrue(localClone1.fileExists("build/product1"))
def test_SubmitAndApprove(self):
localClone1 = gitwrapper.LocalClone(self.project1)
hash = localClone1.hash()
localClone1.writeFile("build/product1", "product1 contents")
solventwrapper.run(localClone1, "submitbuild", env=dict(SOLVENT_CLEAN="yes"))
solventwrapper.run(localClone1, "approve", env=dict(SOLVENT_CLEAN="yes"))
self.assertEquals(len(self.osmosisPair.local.client().listLabels()), 1)
label = 'solvent__project1__build__%s__clean' % hash
self.assertEquals(self.osmosisPair.local.client().listLabels(), [label])
self.assertEquals(len(self.osmosisPair.official.client().listLabels()), 1)
self.assertEquals(self.osmosisPair.official.client().listLabels(), [label])
self.cleanLocalClonesDir()
self.assertFalse(localClone1.fileExists("build/product1"))
self.osmosisPair.local.client().checkout(path=gitwrapper.localClonesDir(), label=label)
self.assertEquals(localClone1.hash(), hash)
self.assertTrue(localClone1.fileExists("build/product1"))
def test_SubmitAndApprove_Official(self):
localClone1 = gitwrapper.LocalClone(self.project1)
hash = localClone1.hash()
localClone1.writeFile("build/product1", "product1 contents")
solventwrapper.configureAsOfficial()
solventwrapper.run(localClone1, "submitbuild")
solventwrapper.run(localClone1, "approve")
self.assertEquals(len(self.osmosisPair.local.client().listLabels()), 1)
label = 'solvent__project1__build__%s__official' % hash
self.assertEquals(self.osmosisPair.local.client().listLabels(), [label])
self.assertEquals(len(self.osmosisPair.official.client().listLabels()), 1)
self.assertEquals(self.osmosisPair.official.client().listLabels(), [label])
self.cleanLocalClonesDir()
self.assertFalse(localClone1.fileExists("build/product1"))
self.osmosisPair.local.client().checkout(path=gitwrapper.localClonesDir(), label=label)
self.assertEquals(localClone1.hash(), hash)
self.assertTrue(localClone1.fileExists("build/product1"))
def test_FulfillUpsetoRequirements(self):
localRequiringProject = gitwrapper.LocalClone(self.requiringProject)
localClone1 = gitwrapper.LocalClone(self.project1)
localClone1.writeFile("build/product1", "product1 contents")
solventwrapper.upseto(localRequiringProject, "fulfillRequirements")
localRequiringProject.writeFile("build/product2", "product2 contents")
solventwrapper.configureAsOfficial()
solventwrapper.run(localRequiringProject, "submitbuild")
solventwrapper.run(localRequiringProject, "approve")
solventwrapper.configureAsNonOfficial()
self.cleanLocalClonesDir()
localRecursiveProject = gitwrapper.LocalClone(self.recursiveProject)
solventwrapper.run(localRecursiveProject, "checkrequirements")
solventwrapper.run(localRecursiveProject, "fulfillrequirements")
self.assertTrue(localClone1.fileExists("build/product1"))
self.assertTrue(localRequiringProject.fileExists("build/product2"))
solventwrapper.run(localRecursiveProject, "checkrequirements")
labels = self.osmosisPair.local.client().listLabels()
self.assertEquals(len(labels), 1)
label = labels[0]
self.osmosisPair.local.client().eraseLabel(label)
solventwrapper.run(localRecursiveProject, "checkrequirements")
self.osmosisPair.official.client().eraseLabel(label)
solventwrapper.runShouldFail(localRecursiveProject, "checkrequirements", "label")
def test_NoRequirements_FulfillDoesNothing(self):
localClone1 = gitwrapper.LocalClone(self.project1)
solventwrapper.run(localClone1, "fulfillrequirements")
def test_FulfillUpsetoRequirements_MoreThanOneProject(self):
localClone1 = gitwrapper.LocalClone(self.project1)
localClone1.writeFile("build/product1", "product1 contents")
solventwrapper.configureAsOfficial()
solventwrapper.run(localClone1, "submitbuild")
solventwrapper.run(localClone1, "approve")
self.cleanLocalClonesDir()
localClone2 = gitwrapper.LocalClone(self.project2)
localClone2.writeFile("build/product2", "product2 contents")
solventwrapper.run(localClone2, "submitbuild")
solventwrapper.run(localClone2, "approve")
solventwrapper.configureAsNonOfficial()
self.cleanLocalClonesDir()
localRequiringProject = gitwrapper.LocalClone(self.requiringProject)
solventwrapper.run(localRequiringProject, "fulfillrequirements")
self.assertTrue(localClone1.fileExists("build/product1"))
self.assertTrue(localClone2.fileExists("build/product2"))
def test_FulfillUpsetoRequirements_NoOfficialBuild(self):
localClone1 = gitwrapper.LocalClone(self.project1)
localClone1.writeFile("build/product1", "product1 contents")
solventwrapper.configureAsOfficial()
solventwrapper.run(localClone1, "submitbuild")
self.cleanLocalClonesDir()
localClone2 = gitwrapper.LocalClone(self.project2)
localClone2.writeFile("build/product2", "product2 contents")
solventwrapper.run(localClone2, "submitbuild")
solventwrapper.run(localClone2, "approve")
solventwrapper.configureAsNonOfficial()
self.cleanLocalClonesDir()
localRequiringProject = gitwrapper.LocalClone(self.requiringProject)
solventwrapper.runShouldFail(localRequiringProject, "fulfillrequirements", "build")
def createBuildProduct(self):
self.producer = gitwrapper.GitHub("producer")
localProducer = gitwrapper.LocalClone(self.producer)
localProducer.writeFile("build/theDirectory/theProduct", "the contents")
solventwrapper.configureAsOfficial()
solventwrapper.run(localProducer, "submitproduct theProductName build")
self.assertEquals(len(self.osmosisPair.local.client().listLabels()), 1)
label = 'solvent__producer__theProductName__%s__officialcandidate' % self.producer.hash()
self.assertEquals(self.osmosisPair.local.client().listLabels(), [label])
self.assertEquals(len(self.osmosisPair.official.client().listLabels()), 1)
solventwrapper.run(localProducer, "approve --product=theProductName")
return localProducer
def test_createBuildProduct(self):
self.createBuildProduct()
self.assertEquals(len(self.osmosisPair.local.client().listLabels()), 1)
label = 'solvent__producer__theProductName__%s__official' % self.producer.hash()
self.assertEquals(self.osmosisPair.local.client().listLabels(), [label])
self.assertEquals(len(self.osmosisPair.official.client().listLabels()), 1)
self.cleanLocalClonesDir()
solventwrapper.run(
gitwrapper.localClonesDir(),
"bring --repository=producer --product=theProductName --hash=%s --destination=%s" % (
self.producer.hash(), gitwrapper.localClonesDir()))
self.assertTrue(os.path.isdir(os.path.join(gitwrapper.localClonesDir(), "theDirectory")))
self.assertTrue(os.path.exists(
os.path.join(gitwrapper.localClonesDir(), "theDirectory", "theProduct")))
def test_fulfillRequirementsLabelDoesNotExistInLocalOsmosis(self):
localRequiringProject = gitwrapper.LocalClone(self.requiringProject)
localClone1 = gitwrapper.LocalClone(self.project1)
localClone1.writeFile("build/product1", "product1 contents")
solventwrapper.upseto(localRequiringProject, "fulfillRequirements")
localRequiringProject.writeFile("build/product2", "product2 contents")
solventwrapper.configureAsOfficial()
solventwrapper.run(localRequiringProject, "submitbuild")
solventwrapper.run(localRequiringProject, "approve")
solventwrapper.configureAsNonOfficial()
labels = self.osmosisPair.local.client().listLabels()
self.assertEquals(len(labels), 1)
self.osmosisPair.local.client().eraseLabel(labels[0])
self.cleanLocalClonesDir()
localRecursiveProject = gitwrapper.LocalClone(self.recursiveProject)
solventwrapper.run(localRecursiveProject, "fulfillrequirements")
self.assertTrue(localClone1.fileExists("build/product1"))
self.assertTrue(localRequiringProject.fileExists("build/product2"))
def test_noOfficialObjectStoreConfigured(self):
localRequiringProject = gitwrapper.LocalClone(self.requiringProject)
localClone1 = gitwrapper.LocalClone(self.project1)
localClone1.writeFile("build/product1", "product1 contents")
solventwrapper.upseto(localRequiringProject, "fulfillRequirements")
localRequiringProject.writeFile("build/product2", "product2 contents")
solventwrapper.configureNoOfficial()
solventwrapper.configureAsOfficial()
solventwrapper.run(localRequiringProject, "submitbuild")
solventwrapper.run(localRequiringProject, "approve")
self.cleanLocalClonesDir()
localRecursiveProject = gitwrapper.LocalClone(self.recursiveProject)
solventwrapper.run(localRecursiveProject, "fulfillrequirements")
self.assertTrue(localClone1.fileExists("build/product1"))
self.assertTrue(localRequiringProject.fileExists("build/product2"))
def test_createBuildProduct_bringExactVersionFromManifestFile(self):
self.createBuildProduct()
self.cleanLocalClonesDir()
localClone1 = gitwrapper.LocalClone(self.project1)
solventwrapper.run(localClone1, "addrequirement --originURL=%s --hash=%s" % (
self.producer.url(), self.producer.hash()))
solventwrapper.run(
localClone1, "bring --repository=producer --product=theProductName --destination=%s" % (
os.path.join(localClone1.directory(), "build", "theProductDir")))
self.assertTrue(os.path.isdir(os.path.join(
localClone1.directory(), "build", "theProductDir", "theDirectory")))
self.assertTrue(os.path.exists(os.path.join(
localClone1.directory(), "build", "theProductDir", "theDirectory", "theProduct")))
def test_invalidInputForAddRequirementCommandLine(self):
self.createBuildProduct()
self.cleanLocalClonesDir()
localClone1 = gitwrapper.LocalClone(self.project1)
solventwrapper.runShouldFail(localClone1, "addrequirement --originURL=%s --hash=%s" % (
"thisisnotagiturl", self.producer.hash()), "invalid")
solventwrapper.runShouldFail(localClone1, "addrequirement --originURL=%s --hash=%s" % (
self.producer.url(), self.producer.hash()[: -2]), "invalid")
solventwrapper.runShouldFail(
localClone1, "bring --repository=producer --product=theProductName --destination=%s" % (
os.path.join(localClone1.directory(), "build", "theProductDir")), "requirement")
def test_updateRequirement(self):
self.createBuildProduct()
self.cleanLocalClonesDir()
localClone1 = gitwrapper.LocalClone(self.project1)
self.assertNotEquals(self.producer.hash()[-2:], "00")
solventwrapper.run(localClone1, "addrequirement --originURL=%s --hash=%s" % (
self.producer.url(), self.producer.hash()[: -2] + "00"))
previous = localClone1.readFile("solvent.manifest")
solventwrapper.run(localClone1, "addrequirement --originURL=%s --hash=%s" % (
self.producer.url(), self.producer.hash()))
self.assertEquals(len(localClone1.readFile("solvent.manifest")), len(previous))
solventwrapper.run(
localClone1, "bring --repository=producer --product=theProductName --destination=%s" % (
os.path.join(localClone1.directory(), "build", "theProductDir")))
solventwrapper.run(
localClone1, "removerequirement --originURLBasename=producer")
solventwrapper.runShouldFail(
localClone1, "bring --repository=producer --product=theProductName --destination=%s" % (
os.path.join(localClone1.directory(), "build", "theProductDir")), "requirement")
def test_workDirty(self):
self.producer = gitwrapper.GitHub("producer")
localProducer = gitwrapper.LocalClone(self.producer)
localProducer.writeFile("build/theDirectory/theProduct", "the contents")
localProducer.writeFile("imaketheprojectdirty", "dirty dirty boy")
localProducer.writeFile("../isullytheworkspace", "and my pants too")
solventwrapper.runShouldFail(
localProducer, "submitproduct theProductName build", "sullied",
env=dict(SOLVENT_CLEAN="yes"))
solventwrapper.run(localProducer, "submitproduct theProductName build")
self.assertEquals(len(self.osmosisPair.local.client().listLabels()), 1)
label = 'solvent__producer__theProductName__%s__dirty' % self.producer.hash()
self.assertEquals(self.osmosisPair.local.client().listLabels(), [label])
self.assertEquals(len(self.osmosisPair.official.client().listLabels()), 1)
solventwrapper.runWhatever(
localProducer.directory(),
"python -m coverage run --parallel-mode -m solvent.cheating --configurationFile=%s "
"changestate --fromState=dirty --toState=official --product=theProductName" %
solventwrapper.configurationFile)
self.assertEquals(len(self.osmosisPair.local.client().listLabels()), 1)
label = 'solvent__producer__theProductName__%s__official' % self.producer.hash()
self.assertEquals(self.osmosisPair.local.client().listLabels(), [label])
self.assertEquals(len(self.osmosisPair.official.client().listLabels()), 1)
def test_FetchObjectStoresConfiguration(self):
localClone1 = gitwrapper.LocalClone(self.project1)
output = solventwrapper.run(localClone1, "printobjectstores").strip()
self.assertEquals(output, "localhost:%d+localhost:%d" % (
self.osmosisPair.local.port(), self.osmosisPair.official.port()))
def test_PrintDependantLabel(self):
self.createBuildProduct()
self.cleanLocalClonesDir()
localClone1 = gitwrapper.LocalClone(self.project1)
solventwrapper.run(localClone1, "addrequirement --originURL=%s --hash=%s" % (
self.producer.url(), self.producer.hash()))
expectedLabel = 'solvent__producer__theProductName__%s__official' % self.producer.hash()
label = solventwrapper.run(
localClone1, "printlabel --repositoryBasename=producer --product=theProductName").strip()
self.assertEquals(label, expectedLabel)
def createAllStates(self):
localProducer = self.createBuildProduct()
solventwrapper.configureAsNonOfficial()
solventwrapper.run(
localProducer, "submitproduct theProductName build", env=dict(SOLVENT_CLEAN="yes"))
solventwrapper.run(
localProducer, "approve --product=theProductName", env=dict(SOLVENT_CLEAN="yes"))
solventwrapper.run(localProducer, "submitproduct theProductName build")
self.cleanLocalClonesDir()
localClone1 = gitwrapper.LocalClone(self.project1)
solventwrapper.run(localClone1, "addrequirement --originURL=%s --hash=%s" % (
self.producer.url(), self.producer.hash()))
officialLabel = 'solvent__producer__theProductName__%s__official' % self.producer.hash()
cleanLabel = 'solvent__producer__theProductName__%s__clean' % self.producer.hash()
dirtyLabel = 'solvent__producer__theProductName__%s__dirty' % self.producer.hash()
def getCleanLabel():
return solventwrapper.run(
localClone1, "printlabel --repositoryBasename=producer --product=theProductName",
env=dict(SOLVENT_CLEAN="Yes")).strip()
def getDirtyLabel():
return solventwrapper.run(
localClone1, "printlabel --repositoryBasename=producer --product=theProductName").strip()
def noCleanLabel():
return solventwrapper.runShouldFail(
localClone1,
"printlabel --repositoryBasename=producer --product=theProductName",
"requirement",
env=dict(SOLVENT_CLEAN="yes"))
def noDirtyLabel():
return solventwrapper.runShouldFail(
localClone1,
"printlabel --repositoryBasename=producer --product=theProductName",
"requirement")
return dict(
getCleanLabel=getCleanLabel, getDirtyLabel=getDirtyLabel,
noCleanLabel=noCleanLabel, noDirtyLabel=noDirtyLabel,
localClone1=localClone1, officialLabel=officialLabel,
cleanLabel=cleanLabel, dirtyLabel=dirtyLabel)
def test_priorityBetweenStates_OfficialBuild(self):
created = self.createAllStates()
solventwrapper.configureAsOfficial()
self.assertEquals(created['getCleanLabel'](), created['officialLabel'])
self.osmosisPair.local.client().eraseLabel(created['officialLabel'])
self.assertEquals(created['getCleanLabel'](), created['officialLabel'])
self.osmosisPair.official.client().eraseLabel(created['officialLabel'])
created['noCleanLabel']()
def test_priorityBetweenStates_CleanBuild(self):
created = self.createAllStates()
solventwrapper.configureAsNonOfficial()
self.assertEquals(created['getCleanLabel'](), created['officialLabel'])
self.osmosisPair.local.client().eraseLabel(created['officialLabel'])
self.assertEquals(created['getCleanLabel'](), created['cleanLabel'])
self.osmosisPair.local.client().eraseLabel(created['cleanLabel'])
self.assertEquals(created['getCleanLabel'](), created['officialLabel'])
self.osmosisPair.official.client().eraseLabel(created['officialLabel'])
self.assertEquals(created['getCleanLabel'](), created['cleanLabel'])
self.osmosisPair.official.client().eraseLabel(created['cleanLabel'])
created['noCleanLabel']()
def test_priorityBetweenStates_DirtyBuild(self):
created = self.createAllStates()
solventwrapper.configureAsNonOfficial()
self.assertEquals(created['getDirtyLabel'](), created['officialLabel'])
self.osmosisPair.local.client().eraseLabel(created['officialLabel'])
self.assertEquals(created['getDirtyLabel'](), created['cleanLabel'])
self.osmosisPair.local.client().eraseLabel(created['cleanLabel'])
self.assertEquals(created['getDirtyLabel'](), created['dirtyLabel'])
self.osmosisPair.local.client().eraseLabel(created['dirtyLabel'])
self.assertEquals(created['getDirtyLabel'](), created['officialLabel'])
self.osmosisPair.official.client().eraseLabel(created['officialLabel'])
self.assertEquals(created['getDirtyLabel'](), created['cleanLabel'])
self.osmosisPair.official.client().eraseLabel(created['cleanLabel'])
self.assertEquals(created['getDirtyLabel'](), created['dirtyLabel'])
self.osmosisPair.official.client().eraseLabel(created['dirtyLabel'])
created['noDirtyLabel']()
def test_solventCanBeConfiguredFromTheEnvironment(self):
self.producer = gitwrapper.GitHub("producer")
localProducer = gitwrapper.LocalClone(self.producer)
localProducer.writeFile("build/theDirectory/theProduct", "the contents")
localProducer.writeFile("imaketheprojectdirty", "dirty dirty boy")
localProducer.writeFile("../isullytheworkspace", "and my pants too")
solventwrapper.runShouldFail(
localProducer, "submitproduct theProductName build", "sullied",
env=dict(SOLVENT_CONFIG="CLEAN: yes"))
solventwrapper.run(localProducer, "submitproduct theProductName build")
self.assertEquals(len(self.osmosisPair.local.client().listLabels()), 1)
label = 'solvent__producer__theProductName__%s__dirty' % self.producer.hash()
self.assertEquals(self.osmosisPair.local.client().listLabels(), [label])
self.assertEquals(len(self.osmosisPair.official.client().listLabels()), 1)
def test_localize(self):
self.createBuildProduct()
self.assertEquals(len(self.osmosisPair.local.client().listLabels()), 1)
label = 'solvent__producer__theProductName__%s__official' % self.producer.hash()
self.assertEquals(self.osmosisPair.local.client().listLabels(), [label])
self.assertEquals(len(self.osmosisPair.official.client().listLabels()), 1)
self.osmosisPair.local.client().eraseLabel(label)
self.assertEquals(self.osmosisPair.local.client().listLabels(), [])
solventwrapper.run(os.getcwd(), "localize --label=%s" % label)
self.assertEquals(self.osmosisPair.local.client().listLabels(), [label])
solventwrapper.run(os.getcwd(), "localize --label=%s" % label)
self.osmosisPair.local.client().eraseLabel(label)
solventwrapper.runShouldFail(
os.getcwd(), "localize --label=%s" % label, "official",
env=dict(SOLVENT_CONFIG="WITH_OFFICIAL_OBJECT_STORE: No"))
def test_createBuildProduct_bringLabel(self):
self.createBuildProduct()
self.cleanLocalClonesDir()
label = 'solvent__producer__theProductName__%s__official' % self.producer.hash()
solventwrapper.run(
gitwrapper.localClonesDir(), "bringlabel --label=%s --destination=%s" % (
label, gitwrapper.localClonesDir()))
self.assertTrue(os.path.isdir(os.path.join(
gitwrapper.localClonesDir(), "theDirectory")))
self.assertTrue(os.path.exists(os.path.join(
gitwrapper.localClonesDir(), "theDirectory", "theProduct")))
def test_checkSolventRequirements_DependsOnSolvent__build__productName(self):
self.createBuildProduct()
self.cleanLocalClonesDir()
localClone1 = gitwrapper.LocalClone(self.project1)
solventwrapper.run(localClone1, "addrequirement --originURL=%s --hash=%s" % (
self.producer.url(), self.producer.hash()))
label = 'solvent__producer__theProductName__%s__official' % self.producer.hash()
buildLabel = 'solvent__producer__build__%s__official' % self.producer.hash()
solventwrapper.runShouldFail(localClone1, "checkrequirements", "label")
self.osmosisPair.local.client().renameLabel(label, buildLabel)
solventwrapper.run(localClone1, "checkrequirements")
def test_SubmitTwiceDoesNotWork_ForceWorks(self):
localClone1 = gitwrapper.LocalClone(self.project1)
solventwrapper.run(localClone1, "submitbuild")
solventwrapper.runShouldFail(localClone1, "submitbuild", "already")
solventwrapper.run(localClone1, "submitbuild --force")
solventwrapper.runShouldFail(localClone1, "submitbuild", "already")
solventwrapper.run(localClone1, "submitbuild", env=dict(SOLVENT_CONFIG="FORCE: yes"))
def test_ProtectAgainstCommonMistakes_ProcMounted(self):
self.producer = gitwrapper.GitHub("producer")
localProducer = gitwrapper.LocalClone(self.producer)
localProducer.writeFile("build/rootfs/etc/config", "the contents")
os.mkdir(os.path.join(localProducer.directory(), "proc"))
with open("build/mount.txt", "a") as f:
f.write("proc on %s/proc type proc (rw,nosuid,nodev,noexec,relatime)\n" % (
os.path.join(localProducer.directory(), 'build', 'rootfs', 'proc'), ))
solventwrapper.runShouldFail(
localProducer, "submitproduct rootfs build/rootfs", "mounted")
solventwrapper.run(localProducer, "submitproduct rootfs build/rootfs --noCommonMistakesProtection")
self.assertEquals(len(self.osmosisPair.local.client().listLabels()), 1)
label = 'solvent__producer__rootfs__%s__dirty' % self.producer.hash()
self.assertEquals(self.osmosisPair.local.client().listLabels(), [label])
self.assertEquals(len(self.osmosisPair.official.client().listLabels()), 1)
def test_ApproveTwiceDoesNotWork_ForceDoesNothing(self):
localClone1 = gitwrapper.LocalClone(self.project1)
solventwrapper.configureAsOfficial()
solventwrapper.run(localClone1, "submitbuild")
solventwrapper.run(localClone1, "approve")
solventwrapper.runShouldFail(localClone1, "approve", "already")
solventwrapper.runShouldFail(localClone1, "approve", "already", env=dict(
SOLVENT_CONFIG="FORCE: yes"))
def test_LabelExists(self):
localClone1 = gitwrapper.LocalClone(self.project1)
hash = localClone1.hash()
localClone1.writeFile("build/product1", "product1 contents")
solventwrapper.run(localClone1, "submitbuild")
self.assertEquals(len(self.osmosisPair.local.client().listLabels()), 1)
label = 'solvent__project1__build__%s__dirty' % hash
self.assertEquals(self.osmosisPair.local.client().listLabels(), [label])
self.assertEquals(len(self.osmosisPair.official.client().listLabels()), 1)
self.assertEquals(self.osmosisPair.official.client().listLabels(), [label])
solventwrapper.run(localClone1, 'labelexists --label=%s' % label)
solventwrapper.runShouldFail(localClone1, 'labelexists --label=%sA' % label, "exist")
solventwrapper.runShouldFail(localClone1, 'labelexists --label=A%s' % label, "exist")
solventwrapper.runShouldFail(localClone1, 'labelexists --label=A', "exist")
solventwrapper.runShouldFail(localClone1, 'labelexists --label=%s' % label[:-1], "exist")
def test_unsubmit_official(self):
self.producer = gitwrapper.GitHub("producer")
localProducer = gitwrapper.LocalClone(self.producer)
localProducer.writeFile("build/theDirectory/theProduct", "the contents")
solventwrapper.configureAsOfficial()
solventwrapper.run(localProducer, "submitbuild")
solventwrapper.run(localProducer, "submitproduct theProductName build")
self.assertEquals(len(self.osmosisPair.local.client().listLabels()), 2)
self.assertEquals(len(self.osmosisPair.official.client().listLabels()), 2)
label = 'solvent__producer__theProductName__%s__officialcandidate' % self.producer.hash()
self.assertIn(label, self.osmosisPair.local.client().listLabels())
label = 'solvent__producer__build__%s__officialcandidate' % self.producer.hash()
self.assertIn(label, self.osmosisPair.local.client().listLabels())
solventwrapper.run(localProducer, "unsubmit")
self.assertEquals(len(self.osmosisPair.official.client().listLabels()), 0)
self.assertEquals(len(self.osmosisPair.local.client().listLabels()), 0)
def test_unsubmit_dirty(self):
self.producer = gitwrapper.GitHub("producer")
localProducer = gitwrapper.LocalClone(self.producer)
localProducer.writeFile("build/theDirectory/theProduct", "the contents")
solventwrapper.run(localProducer, "submitbuild")
solventwrapper.run(localProducer, "submitproduct theProductName build")
self.assertEquals(len(self.osmosisPair.local.client().listLabels()), 2)
self.assertEquals(len(self.osmosisPair.official.client().listLabels()), 2)
label = 'solvent__producer__theProductName__%s__dirty' % self.producer.hash()
self.assertIn(label, self.osmosisPair.local.client().listLabels())
label = 'solvent__producer__build__%s__dirty' % self.producer.hash()
self.assertIn(label, self.osmosisPair.local.client().listLabels())
solventwrapper.run(localProducer, "unsubmit")
self.assertEquals(len(self.osmosisPair.official.client().listLabels()), 0)
self.assertEquals(len(self.osmosisPair.local.client().listLabels()), 0)
def test_unsubmit_empty(self):
self.producer = gitwrapper.GitHub("producer")
localProducer = gitwrapper.LocalClone(self.producer)
solventwrapper.run(localProducer, "unsubmit")
self.assertEquals(len(self.osmosisPair.official.client().listLabels()), 0)
self.assertEquals(len(self.osmosisPair.local.client().listLabels()), 0)
# indirect deep dep joined
# remove unosmosed files
if __name__ == '__main__':
unittest.main()
|
# -*- coding: utf-8 -*-
from django.utils.timezone import utc
import datetime
def utcnow():
return datetime.datetime.utcnow().replace(tzinfo=utc)
|
import pandas as pd
from datetime import datetime as dt
import re
from bokeh.plotting import figure, output_file, show
from bokeh.models import ColumnDataSource, FactorRange, Label
from bokeh.models.tools import HoverTool
from bokeh.transform import factor_cmap
from math import pi
from bokeh.layouts import layout, column,row
from bokeh.embed import file_html
from bokeh.resources import CDN
def _bdaycount(dstart,dend):
'''
Function to calculate the number of business days between dstart and dend
Keyword Arguments:
dstart, str or datetime, startdate
dend, str or datetime, endate
'''
if (pd.isna(dstart)) or (pd.isna(dend)):
return 999
else:
temp = pd.date_range(dstart,dend,freq = 'B')
return temp.shape[0]
def dataprep(CRpath,COpath):
'''
Function to prepare the raw excel files
'''
CRs = pd.read_excel(CRpath,header = 0)
COs = pd.read_excel(COpath,header = 0)
#make a temporary column for COs
tempdf = COs['Actual CO Complete'].copy().to_frame()
tempdf['today'] = dt.today()
tempdf = tempdf.min(axis = 1)
#make a temporary column for CRs
CRs.loc[CRs['Workflow State'] == 'Complete','Days Open'] = CRs.loc[CRs['Workflow State'] == 'Complete',:].apply(lambda row:row['State Arrival Date'] - row['Start Date'],
axis = 1)
CRs.loc[CRs['Workflow State'] != 'Complete','Days Open'] = CRs['Start Date'].apply(lambda x: dt.today() - x)
CRs['Days Open'] = CRs['Days Open'].apply(lambda x:x.days)
COs['Days Open'] = tempdf - COs['Actual Start']
COs['Days Open'] = COs['Days Open'].apply(lambda x:(x.days))
return [CRs,COs]
def CRprep(CRpath):
CRs = pd.read_excel(CRpath,header = 0)
#Calculate days open for CRs
completetest = (CRs['Workflow State'] == 'Complete')
#Create a new column to select a start date from one of two columns
def startselect(option1,option2):
#Option 1 is the preferred option
if pd.isna(option1):
return option2
else:
return option1
CRs['StartDate2'] = CRs.apply(lambda row: startselect(row['ECA Arrival Date'],row['Start Date']),axis = 1)
CRs.loc[CRs['Workflow State'] == 'Complete','Days Open'] = CRs.loc[CRs['Workflow State'] == 'Complete',:].apply(lambda row:row['State Arrival Date'] - row['StartDate2'],axis = 1)
CRs.loc[completetest,'BDays Open'] = CRs.loc[completetest,:].apply(lambda row: _bdaycount(row['StartDate2'],row['State Arrival Date']),axis = 1)
CRs.loc[-completetest,'Days Open'] = CRs.loc[-completetest,'StartDate2'].apply(lambda x: dt.today() - x)
CRs.loc[-completetest,'BDays Open'] = CRs.loc[-completetest,'StartDate2'].apply(lambda x: _bdaycount(x,dt.today()))
CRs['Days Open'] = CRs['Days Open'].apply(lambda x:x.days)
return CRs
def COprep(COpath,CRpath = None):
COs = pd.read_excel(COpath,header = 0)
if type(CRpath) == str:
CRs = CRprep(CRpath)
#make a temporary column for CO's
tempdf = COs['Actual CO Complete'].copy().to_frame()
tempdf['today'] = dt.today()
tempdf = tempdf.min(axis = 1)
COs['Maxdate'] = tempdf
#Calculate days open for COs
COs['Days Open'] = tempdf - COs['Actual Start']
COs['Days Open'] = COs['Days Open'].apply(lambda x:(x.days))
COs['Bdays Open'] = COs.apply(lambda row: _bdaycount(row['Actual Start'],row['Maxdate']),axis = 1)
if type(CRpath) == str:
CRs = CRprep(CRpath)
return [COs,CRs]
else:
return COs
def COgannt(COpath,CRpath = None):
'''
Function to create a dataframe to plot a Gannt chart for COs
Keyword Args:
CRpath - string, path to CR excel file
COpath - string, path to CO excel file
'''
[COs,CRs] = COprep(COpath,CRpath)
#Create a cat type for markets
markets = ['Child','School Bus','Coach','WTOR','Truck','Defense','Fire','Ambulance','Other','Construction','UTV','Outdoor','Farm','Multiple']
marketcat = pd.Categorical(markets,markets,ordered = True)
colors = {'Child':'dodgerblue','School Bus':'orangered','Coach':'coral','WTOR':'orange','Truck':'limegreen','Defense':'olivedrab',
'Fire':'red','Ambulance':'lightcoral','Other':'lightsalmon','Construction':'gold','UTV':'goldenrod',
'Outdoor':'turquoise','Multiple':'black','Farm':'brown'}
#Create a boolean column to filter out random CO's that aren't in any of the desired markets
testtable = COs['Market'].apply(lambda x: x in markets)
#Filter out only active CO's in the desired markets
COs2 = COs.loc[testtable & (COs['Current State'] != 'Active'),:]
#Get rid of parking lot
parking = COs2['Current State'].apply(lambda x:'parking' not in x.lower())
COs2 = COs2.loc[parking,:]
#Change the market conlumn to a categorical type
COs2['Market'] = COs2['Market'].astype(marketcat)
#Sort the markets in the preferred order
COs2 = COs2.sort_values(by = ['Market','Actual Start'],ascending = False)
#Create a boolean column to filter out random CR's that aren't in any of the desired markets
testtable = CRs['Market'].apply(lambda x: x in markets)
CRstemp = CRs.loc[testtable,:].copy()
#Find only complete CRs
CRstemp = CRstemp[CRstemp['Workflow State'] == 'Complete']
#Change the Market column to a categorical type
CRstemp['Market'] = CRstemp['Market'].astype(marketcat)
#Take only the relavent columns and rename hte columns
CRstemp = CRstemp[['Start Date','State Arrival Date','Number']]
CRstemp.columns = ['CRStartDate','CRFinishDate','Number']
#Add New Columns for Plotting and Merging
#Create regex expression to filter only on the base number
basenum = re.compile('[0-9]{5}')
COs2['Base #'] = COs2['CO Number'].apply(lambda x: basenum.search(x)[0])
CRstemp['Base #'] = CRstemp['Number'].apply(lambda x: basenum.search(x)[0])
#Add a Colors Column
COs2['Color'] = COs2['Market'].apply(lambda x:colors[x])
#Create a Column of today's date
COs2['Today'] = dt.today()
#Calculate the Number of days a CR is open and remove those over 14
CRstemp['CRDaysOpen'] = CRstemp['CRFinishDate'] - CRstemp['CRStartDate']
CRstemp['CRDaysOpen'] = CRstemp['CRDaysOpen'].apply(lambda x:x.days)
CRstemp = CRstemp.loc[CRstemp['CRDaysOpen'] <=14,:]
#Duplicates for Hovertools
COs2['COName'] = COs2['CO Name']
COs2['Champ'] = COs2['Project Champion']
#merge the two files
COs2 = pd.merge(COs2,CRstemp,left_on = 'Base #',right_on = 'Base #',how = 'left')
COs2['DaysOpen'] = COs2['Actual Start'].apply(lambda x: dt.today() - x).apply(lambda x:x.days)
return COs2
def CRgannt(CRpath):
'''
Function to create a dataframe to plot a Gannt chart for CRs
Keyword Args:
CRpath - string, path to CR excel file
COpath - string, path to CO excel file
'''
CRs = CRprep(CRpath)
#Create Market Data Types
markets = ['Child','School Bus','Coach','WTOR','Truck','Defense','Fire','Ambulance','Other','Construction','UTV','Outdoor','Farm','Multiple']
marketcat = pd.Categorical(markets,markets,ordered = True)
colors = {'Child':'dodgerblue','School Bus':'orangered','Coach':'coral','WTOR':'orange','Truck':'limegreen','Defense':'olivedrab',
'Fire':'red','Ambulance':'lightcoral','Other':'lightsalmon','Construction':'gold','UTV':'goldenrod',
'Outdoor':'turquoise','Multiple':'black','Farm':'brown'}
#Create a boolean column to filter out random CO's that aren't in any of the desired markets
testtable = CRs['Market'].apply(lambda x: x in markets)
#Filter out only active CO's in the desired markets
CRs2 = CRs.loc[testtable & (CRs['Workflow State'] != 'Complete'),:].copy()
#Get rid of parking lot
parking = CRs2['Workflow State'].apply(lambda x:'parking' not in x.lower())
CRs2 = CRs2.loc[parking,:]
#Change the market conlumn to a categorical type
CRs2['Market'] = CRs2['Market'].astype(marketcat)
#Sort the markets in the preferred order
CRs2 = CRs2.sort_values(by = ['Market','Start Date'],ascending = False)
#Create a boolean column to filter out random CR's that aren't in any of the desired markets
testtable = CRs2['Market'].apply(lambda x: x in markets)
CRs2 = CRs2.loc[testtable,:]
#Add New Columns for Plotting and Merging
#Create regex expression to filter only on the base number
basenum = re.compile('[0-9]{5}')
CRs2['Base #'] = CRs2['Number'].apply(lambda x: basenum.search(x)[0])
#Add a Colors Column
CRs2['Color'] = CRs2['Market'].apply(lambda x:colors[x])
#Create a Column of today's date
CRs2['Today'] = dt.today()
#Rename columns for bokeh (doesn't like spaces)
CRs2 = CRs2.rename({'Project Champion':'ProjectChampion','Total Days':'TotalDays'},axis = 1)
return CRs2
def CRshow(CRpath, render = False, outfile = None):
'''
Creates a bokeh figure object that creates a Gannt chart
of CRs in process
Keyword Arguments:
CRpath - string, path of excel CR file
'''
CRs2 = CRgannt(CRpath)
CRganntfig = figure(y_range = CRs2['Number'].tolist(), plot_width=1000, plot_height=1000, x_axis_type="datetime",title = 'Open CR\'s')
#Create a series of dates for today's dates
source1 = ColumnDataSource(CRs2)
#CRs
CRganntfig.hbar(y = 'Number', height = 0.5, left = 'Start Date', right= 'Today',color = 'Color',legend = 'Market',source = source1)
#Add the Hover Tool
TOOLTIPS = [
('Number', "@Number"),
('CR Name', "@Name"),
("Champ","@ProjectChampion"),
("Days Open","@TotalDays"),
('Market','@Market')]
h = HoverTool(tooltips = TOOLTIPS)
CRganntfig.add_tools(h)
CRganntfig.legend.location = "top_left"
CRganntfig.legend.label_text_font_size = "6pt"
CRganntfig.legend.background_fill_alpha = 0.1
CRganntfig.title.text_font_size = '18pt'
CRganntfig.title.align = 'center'
CRganntfig.toolbar.logo = None
CRganntfig.toolbar_location = None
if outfile is not None:
html = file_html(CRganntfig,CDN,"CR's")
with open(outfile,'w') as file: #Use file to refer to the file object
file.write(html)
if render == True:
show(CRganntfig)
else:
return CRganntfig
def COshow(CRpath,COpath,render = False, outfile = None):
'''
Creates a bokeh figure object that creates a Gannt chart of
CO's in process
Keyworkd Arguments:
COpath - string, path of excel CRfile
CRpath - string, path of excel CO file
'''
COs2 = COgannt(CRpath,COpath)
COs2['CONumber'] = COs2['CO Number'] #Just to make the HoverTool Work
COganntfig = figure(y_range = COs2['CO Number'].tolist(), plot_width=600, plot_height=1000, x_axis_type="datetime",title = 'Open CO\'s')
#Create a series of dates for today's dates
source1 = ColumnDataSource(COs2)
CRcomp = COs2.dropna(subset = ['Number'])
source2 = ColumnDataSource(CRcomp)
#CO's
COganntfig.hbar(y = 'CO Number', height = 0.5, left = 'Actual Start',right = 'Today',color = 'Color',legend = 'Market',source = source1)
#Now the CR's
COganntfig.hbar(y = 'CO Number',height = 0.5, left = 'CRStartDate',right= 'CRFinishDate',color = None,line_color = 'Color',source = source2)
#Add the Hover Tool
TOOLTIPS = [
('CO Number',"CONumber"),
('CO Name', "@COName"),
("Champ","@Champ"),
("Days Open","@DaysOpen"),
('Market','@Market')
]
h = HoverTool(tooltips = TOOLTIPS)
COganntfig.add_tools(h)
COganntfig.legend.location = "top_left"
COganntfig.legend.label_text_font_size = "6pt"
COganntfig.legend.background_fill_alpha = 0.1
COganntfig.title.text_font_size = '18pt'
COganntfig.title.align = 'center'
COganntfig.toolbar.logo = None
COganntfig.toolbar_location = None
#Render the plot in html
if outfile is not None:
html = file_html(COganntfig,CDN,"CR's")
with open(outfile,'w') as file: #Use file to refer to the file object
file.write(html)
#Render the plot in bokeh
if render == True:
show(COganntfig)
return None
else:
return COganntfig
#Now Work on the Weekly Statistics
def CRweekly(CRpath,render = False):
CRs = CRprep(CRpath)
#Create a df of only complete CRs
completeCRs = CRs.loc[CRs['Workflow State'] == 'Complete',:].copy()
#Create a calculated column of days to complete
completeCRs.loc[:,'Days'] = completeCRs['State Arrival Date'] - completeCRs['Start Date']
completeCRs['Days'] = completeCRs['Days'].apply(lambda x:x.days)
#Calculate mean number of days to complete a CR, sampled weekly
meanCRs = pd.Series(data = completeCRs['BDays Open'].tolist(),index = completeCRs['State Arrival Date']).resample('W').mean()
meanCRs = meanCRs.to_frame()
#Create Series of CRs open and closed dates
CRsopen = pd.Series(data = CRs['Number'].tolist(),index = CRs['Start Date'])
CRsclosed = pd.Series(data = completeCRs['Number'].tolist(),index = completeCRs['State Arrival Date'])
#Count CRs open and closed by week
CRopencount = CRsopen.resample('W').count().to_frame()
CRclosedcount = CRsclosed.resample('W').count().to_frame()
CRcounts = pd.merge(CRopencount,CRclosedcount,left_index = True,right_index = True,how = 'outer').fillna(0)
CRcounts = pd.merge(CRcounts,meanCRs,left_index = True,right_index = True,how = 'outer').fillna(0)
CRcounts.columns = ['Open','Closed','Mean']
CRcounts = CRcounts.iloc[-12:,:]
CRcounts = CRcounts.reset_index()
CRcounts = CRcounts.rename({'index':'Week'},axis = 1)
CRcounts['Week'] = CRcounts['Week'].astype(str)
#Create the bokeh plot
palette = ['lime','aqua']
x = [(week1,status) for week1 in CRcounts['Week'] for status in ['Open','Closed']]
counts = sum(zip(CRcounts['Open'], CRcounts['Closed']), ()) # like an hstack
source = ColumnDataSource(data=dict(x=x, counts=counts))
CRmetrics = figure(x_range=FactorRange(*x), plot_width = 600,plot_height = 400,title="CR History",
toolbar_location=None, tools="")
CRmetrics.vbar(x='x', top='counts', width=0.8, source=source,
fill_color=factor_cmap('x',palette=palette,factors=['Open','Closed'],start = 1,end = 2))
CRmetrics.line(x = CRcounts['Week'].tolist(),y = CRcounts['Mean'].tolist(),line_width = 3,color = 'red')
CRmetrics.xaxis.major_label_orientation = 1
CRmetrics.toolbar.logo = None
CRmetrics.toolbar_location = None
if render == True:
show(CRmetrics)
return None
else:
return CRmetrics
def COweekly(COPath,render = False):
COs = COprep(COPath)
#Find the complete COs
completeCOs = COs.loc[COs['Actual CO Complete'].apply(lambda x: not pd.isnull(x)),:].copy()
#Create a a column of days to complete a given CO
completeCOs['Days to Complete'] = completeCOs['Actual CO Complete'] - completeCOs['Actual Start']
completeCOs['Days to Complete'] = completeCOs['Days to Complete'].apply(lambda x: x.days)
#Calculate the mean days to complete a CO sampled weekly
meanCOs = pd.Series(data = completeCOs['Bdays Open'].tolist(),index = completeCOs['Actual CO Complete'].tolist()).resample('W').mean()
meanCOs = meanCOs.to_frame()
#Calculate columns of when given COs where open and closed
tempCOs = COs.dropna(subset = ['Actual Start'])
COsopen = pd.Series(data = tempCOs['CO Number'].tolist(),index = tempCOs['Actual Start']).sort_index()
COsclosed = pd.Series(data = completeCOs['CO Number'].tolist(),index = completeCOs['Actual CO Complete'])
#Count the open and closed CO's sampled weekly
COopencount = COsopen.resample('W').count().to_frame()
COclosedcount = COsclosed.resample('W').count().to_frame()
#Merge the columns
COcounts = pd.merge(COopencount,COclosedcount,left_index = True,right_index = True,how = 'outer').fillna(0)
COcounts = pd.merge(COcounts,meanCOs,left_index = True,right_index = True,how = 'outer').fillna(0)
COcounts.columns = ['Open','Closed','Mean']
#Fileter out only the last 15 weeks for plotting
COcounts = COcounts.iloc[-15:,:]
COcounts = COcounts.reset_index()
COcounts = COcounts.rename({'index':'Week'},axis = 1)
#Convert the weeks from datetime to string so I can treat it as categorical in bokeh plots
COcounts['Week'] = COcounts['Week'].astype(str)
COcounts['Week'] = COcounts['Week'].apply(lambda x:x[5:])
#Create the bokeh plot
palette = ['lime','aqua']
x = [(week1,status) for week1 in COcounts['Week'] for status in ['Open','Closed']]
counts = sum(zip(COcounts['Open'], COcounts['Closed']), ()) # like an hstack
source = ColumnDataSource(data=dict(x=x, counts=counts))
COmetrics = figure(x_range=FactorRange(*x), plot_width = 600,plot_height = 400,title="CO History",
toolbar_location=None, tools="")
COmetrics.vbar(x='x', top='counts', width=0.8, source=source,
fill_color=factor_cmap('x', palette=palette, factors=['Open','Closed'],start=1, end=2))
COmetrics.line(x = COcounts['Week'].tolist(),y = COcounts['Mean'].tolist(),line_width = 3,color = 'red')
COmetrics.xaxis.major_label_orientation = 1
COmetrics.toolbar.logo = None
COmetrics.toolbar_location = None
if render == True:
show(COmetrics)
return None
else:
return COmetrics
def CRarc(CRPath,render = False):
CRs = CRprep(CRPath)
OpenCRs = CRs.loc[CRs['Workflow State'] != 'Complete',:]
under30 = OpenCRs['Days Open'][OpenCRs['Days Open']< 30].shape[0]
over30 = OpenCRs['Days Open'][OpenCRs['Days Open'] >= 30].shape[0]
under30 = under30/(under30+over30)
over30 = 1-under30
CRcount = str(OpenCRs.shape[0])
CRarc1 = figure(plot_width=400, plot_height=400,x_range = [-2.5,2.5],y_range = [-2.5,2.5],title = 'CR Target',)
CRarc1.annular_wedge(x=0, y=0, inner_radius=1.2, outer_radius=2,
start_angle=0.0, end_angle=2*pi*under30, color="green", alpha=0.6)
CRarc1.annular_wedge(x = 0, y = 0, inner_radius = 1.2, outer_radius = 2,
start_angle = 2*pi*under30,end_angle = 2*pi,color = 'red',alpha = 0.6)
centertext = CRcount
openpct = 'On Time = {0:2.0f}%'.format(under30*100)
mytext = Label(x=0,y=0, text=centertext,text_align = 'center',text_font_size = '30pt')
mytext2 = Label(x = 2.4,y = 1.9,text = openpct,text_font_size='20pt',text_align = 'right')
CRarc1.add_layout(mytext)
CRarc1.add_layout(mytext2)
CRarc1.xaxis.visible = False
CRarc1.yaxis.visible = False
CRarc1.xgrid.grid_line_color = None
CRarc1.ygrid.grid_line_color = None
CRarc1.title.text_font_size = '12pt'
CRarc1.toolbar.logo = None
CRarc1.toolbar_location = None
if render == True:
show(CRarc1)
return None
else:
return CRarc1
def COarc(COPath,render = False):
COs = COprep(COPath)
OpenCOs = COs.loc[COs['Actual CO Complete'].apply(lambda x:pd.isnull(x)),:]
under30 = OpenCOs['Days Open'][OpenCOs['Days Open']< 30].shape[0]
over30 = OpenCOs['Days Open'][OpenCOs['Days Open'] >= 30].shape[0]
under30 = under30/(under30+over30)
over30 = 1-under30
COcount = str(OpenCOs.shape[0])
COarc1 = figure(plot_width=400, plot_height=400,x_range = [-2.5,2.5],y_range = [-2.5,2.5],title = 'CO Target',)
COarc1.annular_wedge(x=0, y=0, inner_radius=1.2, outer_radius=2,
start_angle=0.0, end_angle=2*pi*under30, color="green", alpha=0.6)
COarc1.annular_wedge(x = 0, y = 0, inner_radius = 1.2, outer_radius = 2,
start_angle = 2*pi*under30,end_angle = 2*pi,color = 'red',alpha = 0.6)
centertext = COcount
openpct = 'On Time = {0:2.0f}%'.format(under30*100)
mytext = Label(x=0, y=0, text=centertext,text_align = 'center',text_font_size = '30pt')
mytext2 = Label(x = 2.4,y = 1.9,text = openpct,text_font_size= '20pt',text_align = 'right')
COarc1.add_layout(mytext)
COarc1.add_layout(mytext2)
COarc1.xaxis.visible = False
COarc1.yaxis.visible = False
COarc1.xgrid.grid_line_color = None
COarc1.ygrid.grid_line_color = None
COarc1.title.text_font_size = '12pt'
COarc1.toolbar.logo = None
COarc1.toolbar_location = None
if render == True:
show(COarc1)
return None
else:
return COarc1
def COdash(COPath,CRPath,outfile):
leftside = column(COweekly(COPath),COarc(COPath))
rightside = column(COshow(COPath,CRPath))
codash = row(leftside,rightside)
html = file_html(codash, CDN, "CO's")
with open(outfile,'w') as file: # Use file to refer to the file object
file.write(html)
def CRdash(CRpath,outfile):
leftside = column(CRweekly(CRpath),CRarc(CRpath))
rightside = column(CRshow(CRpath))
codash = row(leftside,rightside)
html = file_html(codash,CDN,"CR's")
with open(outfile,'w') as file: #Use file to refer to the file object
file.write(html) |
# Generated by Django 2.0.2 on 2018-03-10 16:35
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('gestion_table', '0005_auto_20180310_1633'),
]
operations = [
migrations.AlterField(
model_name='table',
name='galerie',
field=models.ManyToManyField(to='galerie_photo.Theme'),
),
]
|
from helpers.proxy_utils import deploy_proxy
from brownie import *
from helpers.constants import *
from helpers.registry import registry
from config.badger_config import sett_config
from dotmap import DotMap, pprint
from enum import Enum, auto
from rich.console import Console
console = Console()
"""
Sett is a subsystem of badger.
Requires the BadgerDAO infrastructure & multisig to be deployed
"""
curve = registry.curve
tokens = registry.tokens
def deploy_sett(badger, token, controller, name, symbol, deployer):
"""
Deploy Sett Instance
"""
proxyAdmin = badger.devProxyAdmin
governance = deployer
keeper = badger.keeper
return deploy_proxy(
"Sett",
Sett.abi,
badger.logic.Sett.address,
proxyAdmin.address,
badger.logic.Sett.initialize.encode_input(
token, controller, governance, keeper, name, symbol
),
deployer,
)
def deploy_strategy(
badger,
strategyName,
controller,
params,
deployer,
governance=None,
strategist=None,
keeper=None,
guardian=None,
):
if not governance:
governance = deployer
if not strategist:
strategist = deployer
if not keeper:
keeper = badger.keeper
if not guardian:
guardian = badger.guardian
proxyAdmin = badger.devProxyAdmin
console.print(
"Deploy Strategy "+ strategyName, params
)
if strategyName == "StrategyCurveGaugeRenBtcCrv":
return deploy_proxy(
"StrategyCurveGaugeRenBtcCrv",
StrategyCurveGaugeRenBtcCrv.abi,
badger.logic.StrategyCurveGaugeRenBtcCrv.address,
proxyAdmin.address,
badger.logic.StrategyCurveGaugeRenBtcCrv.initialize.encode_input(
governance,
strategist,
controller,
keeper,
guardian,
[
params.want,
params.gauge,
params.minter,
params.swap,
params.lpComponent,
],
[
params.performanceFeeGovernance,
params.performanceFeeStrategist,
params.withdrawalFee,
params.keepCRV,
],
),
deployer,
)
if strategyName == "StrategyCurveGaugeSbtcCrv":
return deploy_proxy(
"StrategyCurveGaugeSbtcCrv",
StrategyCurveGaugeSbtcCrv.abi,
badger.logic.StrategyCurveGaugeSbtcCrv.address,
proxyAdmin.address,
badger.logic.StrategyCurveGaugeSbtcCrv.initialize.encode_input(
governance,
strategist,
controller,
keeper,
guardian,
[
params.want,
params.gauge,
params.minter,
params.swap,
params.lpComponent,
],
[
params.performanceFeeGovernance,
params.performanceFeeStrategist,
params.withdrawalFee,
params.keepCRV,
],
),
deployer,
)
if strategyName == "StrategyCurveGaugeTbtcCrv":
return deploy_proxy(
"StrategyCurveGaugeTbtcCrv",
StrategyCurveGaugeTbtcCrv.abi,
badger.logic.StrategyCurveGaugeTbtcCrv.address,
proxyAdmin.address,
badger.logic.StrategyCurveGaugeTbtcCrv.initialize.encode_input(
governance,
strategist,
controller,
keeper,
guardian,
[
params.want,
params.gauge,
params.minter,
params.swap,
params.lpComponent,
],
[
params.performanceFeeGovernance,
params.performanceFeeStrategist,
params.withdrawalFee,
params.keepCRV,
],
),
deployer,
)
if strategyName == "StrategySushiLpOptimizer":
return deploy_proxy(
"StrategySushiLpOptimizer",
StrategySushiLpOptimizer.abi,
badger.logic.StrategySushiLpOptimizer.address,
proxyAdmin.address,
badger.logic.StrategySushiLpOptimizer.initialize.encode_input(
governance,
strategist,
controller,
keeper,
guardian,
[
params.want,
params.badgerTree,
],
params.pid,
[
params.performanceFeeGovernance,
params.performanceFeeStrategist,
params.withdrawalFee
],
),
deployer,
)
if strategyName == "StrategySushiBadgerWbtc":
return deploy_proxy(
"StrategySushiBadgerWbtc",
StrategySushiBadgerWbtc.abi,
badger.logic.StrategySushiBadgerWbtc.address,
proxyAdmin.address,
badger.logic.StrategySushiBadgerWbtc.initialize.encode_input(
governance,
strategist,
controller,
keeper,
guardian,
[
params.want,
params.geyser,
params.badger,
params.badgerTree
],
[
params.performanceFeeGovernance,
params.performanceFeeStrategist,
params.withdrawalFee
],
),
deployer,
)
if strategyName == "StrategyPickleMetaFarm":
return deploy_proxy(
"StrategyPickleMetaFarm",
StrategyPickleMetaFarm.abi,
badger.logic.StrategyPickleMetaFarm.address,
proxyAdmin.address,
badger.logic.StrategyPickleMetaFarm.initialize.encode_input(
governance,
strategist,
controller,
keeper,
guardian,
[params.want, params.pickleJar, curve.pools.renCrv.swap, tokens.wbtc],
params.pid,
[
params.performanceFeeGovernance,
params.performanceFeeStrategist,
params.withdrawalFee,
],
),
deployer,
)
if strategyName == "StrategyHarvestMetaFarm":
return deploy_proxy(
"StrategyHarvestMetaFarm",
StrategyHarvestMetaFarm.abi,
badger.logic.StrategyHarvestMetaFarm.address,
proxyAdmin.address,
badger.logic.StrategyHarvestMetaFarm.initialize.encode_input(
governance,
strategist,
controller,
keeper,
guardian,
[
params.want,
params.harvestVault,
params.vaultFarm,
params.metaFarm,
params.badgerTree,
],
[
params.performanceFeeGovernance,
params.performanceFeeStrategist,
params.withdrawalFee,
],
),
deployer,
)
if strategyName == "StrategyBadgerLpMetaFarm":
return deploy_proxy(
"StrategyBadgerLpMetaFarm",
StrategyBadgerLpMetaFarm.abi,
badger.logic.StrategyBadgerLpMetaFarm.address,
proxyAdmin.address,
badger.logic.StrategyBadgerLpMetaFarm.initialize.encode_input(
governance,
strategist,
controller,
keeper,
guardian,
[params.want, params.geyser, badger.token],
[
params.performanceFeeGovernance,
params.performanceFeeStrategist,
params.withdrawalFee,
],
),
deployer,
)
if strategyName == "StrategyBadgerRewards":
return deploy_proxy(
"StrategyBadgerRewards",
StrategyBadgerRewards.abi,
badger.logic.StrategyBadgerRewards.address,
proxyAdmin.address,
badger.logic.StrategyBadgerRewards.initialize.encode_input(
governance,
strategist,
controller,
keeper,
guardian,
[badger.token, params.geyser],
[
params.performanceFeeGovernance,
params.performanceFeeStrategist,
params.withdrawalFee,
],
),
deployer,
)
def deploy_controller(badger, deployer):
# TODO: Change to prod config
governance = deployer
strategist = deployer
keeper = badger.keeper
rewards = badger.dao.agent
proxyAdmin = badger.devProxyAdmin
return deploy_proxy(
"Controller",
Controller.abi,
badger.logic.Controller.address,
proxyAdmin.address,
badger.logic.Controller.initialize.encode_input(
governance, strategist, keeper, rewards
),
deployer,
)
def deploy_sett_common_logic(deployer):
return DotMap(
Controller=Controller.deploy({"from": deployer}),
Sett=Sett.deploy({"from": deployer}),
StakingRewards=StakingRewards.deploy({"from": deployer}),
)
def deploy_sett_logic(deployer):
return DotMap(
StrategyCurveGauge=StrategyCurveGauge.deploy({"from": deployer}),
StrategyPickleMetaFarm=StrategyPickleMetaFarm.deploy({"from": deployer}),
StrategyHarvestMetaFarm=StrategyHarvestMetaFarm.deploy({"from": deployer}),
StrategyBadgerLpMetaFarm=StrategyBadgerLpMetaFarm.deploy({"from": deployer}),
StrategyBadgerRewards=StrategyBadgerRewards.deploy({"from": deployer}),
Controller=Controller.deploy({"from": deployer}),
Sett=Sett.deploy({"from": deployer}),
StakingRewards=StakingRewards.deploy({"from": deployer}),
)
def configure_sett(sett, deployer):
want = sett.strategy.want()
sett.controller.setVault(want, sett.sett, {"from": deployer})
sett.controller.approveStrategy(
want, sett.strategy, {"from": deployer},
)
sett.controller.setStrategy(
want, sett.strategy, {"from": deployer},
)
def deploy_sett_native_badger(badger, deployer):
badger.add_controller("native")
badger.deploy_sett("native")
sett = DotMap(logic=deploy_sett_common_logic(deployer))
sett.logic.StrategyBadgerRewards = StrategyBadgerRewards.deploy({"from": deployer})
sett.controller = deploy_controller(badger, sett, deployer)
sett.sett = deploy_sett(
badger,
sett,
badger.token,
sett.controller,
"Badger Sett badger",
"bBadger",
deployer,
)
sett.rewards = deploy_proxy(
"StakingRewards",
StakingRewards.abi,
sett.logic.StakingRewards.address,
badger.devProxyAdmin.address,
sett.logic.StakingRewards.initialize.encode_input(
deployer, badger.token, badger.token
),
deployer,
)
params = sett_config.native.badger.params
params.want = badger.token
params.geyser = sett.rewards
sett.strategy = deploy_strategy(
badger, sett, "StrategyBadgerRewards", sett.controller, params, deployer,
)
sett.want = interface.IERC20(sett.strategy.want())
configure_sett(sett, deployer)
# Approve Setts on specific
sett.rewards.grantRole(APPROVED_STAKER_ROLE, sett.strategy, {"from": deployer})
return sett
def deploy_sett_native_renbtc(badger, deployer):
sett = DotMap(logic=deploy_sett_common_logic(deployer))
sett.logic.StrategyCurveGauge = StrategyCurveGauge.deploy({"from": deployer})
sett.controller = deploy_controller(badger, sett, deployer)
sett.sett = deploy_sett(
badger,
sett,
sett_config.native.renCrv.params.want,
sett.controller,
"Badger Sett renCrv",
"bRenCrv",
deployer,
)
sett.strategy = deploy_strategy(
badger,
sett,
"StrategyCurveGauge",
sett.controller,
sett_config.native.renCrv.params,
deployer,
)
sett.want = interface.IERC20(sett.strategy.want())
configure_sett(sett, deployer)
return sett
def deploy_sett_native_sbtccrv(badger, deployer):
sett = DotMap(logic=deploy_sett_common_logic(deployer))
sett.logic.StrategyCurveGauge = StrategyCurveGauge.deploy({"from": deployer})
sett.controller = deploy_controller(badger, sett, deployer)
sett.sett = deploy_sett(
badger,
sett,
sett_config.native.sbtcCrv.params.want,
sett.controller,
"Badger Sett sbtcCrv",
"bSbtcCrv",
deployer,
)
sett.strategy = deploy_strategy(
badger,
sett,
"StrategyCurveGauge",
sett.controller,
sett_config.native.sbtcCrv.params,
deployer,
)
sett.want = interface.IERC20(sett.strategy.want())
configure_sett(sett, deployer)
return sett
def deploy_sett_native_tbtccrv(badger, deployer):
sett = DotMap(logic=deploy_sett_common_logic(deployer))
sett.logic.StrategyCurveGauge = StrategyCurveGauge.deploy({"from": deployer})
sett.controller = deploy_controller(badger, sett, deployer)
sett.sett = deploy_sett(
badger,
sett,
sett_config.native.tbtcCrv.params.want,
sett.controller,
"Badger Sett tbtcCrv",
"bTbtcCrv",
deployer,
)
sett.strategy = deploy_strategy(
badger,
sett,
"StrategyCurveGauge",
sett.controller,
sett_config.native.tbtcCrv.params,
deployer,
)
sett.want = interface.IERC20(sett.strategy.want())
configure_sett(sett, deployer)
return sett
def deploy_sett_harvest_renbtc(badger, deployer):
sett = DotMap(logic=deploy_sett_common_logic(deployer))
sett.logic.StrategyHarvestMetaFarm = StrategyHarvestMetaFarm.deploy(
{"from": deployer}
)
sett.controller = deploy_controller(badger, sett, deployer)
sett.sett = deploy_sett(
badger,
sett,
sett_config.harvest.renCrv.params.want,
sett.controller,
"Badger SuperSett renCrv (Harvest)",
"bSuperRenCrv (Harvest)",
deployer,
)
params = sett_config.harvest.renCrv.params
params.rewardsEscrow = badger.rewardsEscrow
sett.strategy = deploy_strategy(
badger,
sett,
"StrategyHarvestMetaFarm",
sett.controller,
sett_config.harvest.renCrv.params,
deployer,
)
sett.want = interface.IERC20(sett.strategy.want())
configure_sett(sett, deployer)
return sett
def deploy_sett_pickle_renbtc(badger, deployer):
sett = DotMap(logic=deploy_sett_common_logic(deployer))
sett.logic.StrategyPickleMetaFarm = StrategyPickleMetaFarm.deploy(
{"from": deployer}
)
sett.controller = deploy_controller(badger, sett, deployer)
sett.sett = deploy_sett(
badger,
sett,
sett_config.pickle.renCrv.params.want,
sett.controller,
"Badger SuperSett renCrv (Pickle)",
"bSuperRenCrv (Pickle)",
deployer,
)
params = sett_config.pickle.renCrv.params
sett.strategy = deploy_strategy(
badger,
sett,
"StrategyPickleMetaFarm",
sett.controller,
sett_config.pickle.renCrv.params,
deployer,
)
sett.want = interface.IERC20(sett.strategy.want())
configure_sett(sett, deployer)
return sett
def deploy_sett_system(badger, deployer):
proxyAdmin = badger.proxyAdmin
deployer = badger.deployer
# Logic
sett = DotMap(logic=deploy_sett_logic(deployer))
# Controllers
sett.native.controller = deploy_controller(badger, sett, deployer)
sett.pickle.controller = deploy_controller(badger, sett, deployer)
sett.harvest.controller = deploy_controller(badger, sett, deployer)
# Deploy each pair of vault and strategy
"""
For each group of Setts (native, harvest, pickle) iterate through each vault
entry and deploy the Sett and starting strategy
"""
# Deploy Setts
sett.native.badger = deploy_sett(
badger,
sett,
badger.token,
sett.native.controller,
"Badger Sett badger",
"bBadger",
deployer,
)
sett.native.sbtcCrv = deploy_sett(
badger,
sett,
sett_config.native.sbtcCrv.params.want,
sett.native.controller,
"Badger Sett sbtcCrv",
"bSbtcCrv",
deployer,
)
sett.native.renCrv = deploy_sett(
badger,
sett,
sett_config.native.renCrv.params.want,
sett.native.controller,
"Badger Sett renCrv",
"bRenCrv",
deployer,
)
sett.native.tbtcCrv = deploy_sett(
badger,
sett,
sett_config.native.tbtcCrv.params.want,
sett.native.controller,
"Badger Sett tbtcCrv",
"bTbtcCrv",
deployer,
)
sett.pickle.renCrv = deploy_sett(
badger,
sett,
sett_config.pickle.renCrv.params.want,
sett.pickle.controller,
"Badger SuperSett renCrv (Pickle)",
"bSuperRenCrv (Pickle)",
deployer,
)
sett.harvest.renCrv = deploy_sett(
badger,
sett,
sett_config.harvest.renCrv.params.want,
sett.harvest.controller,
"Badger SuperSett renCrv (Harvest)",
"bSuperRenCrv (Harvest)",
deployer,
)
# Deploy Strategy Staking Rewards
sett.rewards = DotMap()
sett.rewards.badger = deploy_proxy(
"StakingRewards",
StakingRewards.abi,
sett.logic.StakingRewards.address,
badger.devProxyAdmin.address,
sett.logic.StakingRewards.initialize.encode_input(
deployer, badger.token, badger.token
),
deployer,
)
# Deploy Strategies
params = sett_config.native.badger.params
params.want = badger.token
params.geyser = sett.rewards.badger
sett.native.strategies.badger = deploy_strategy(
badger, sett, "StrategyBadgerRewards", sett.native.controller, params, deployer,
)
sett.native.strategies.sbtcCrv = deploy_strategy(
badger,
sett,
"StrategyCurveGauge",
sett.native.controller,
sett_config.native.sbtcCrv.params,
deployer,
)
sett.native.strategies.renCrv = deploy_strategy(
badger,
sett,
"StrategyCurveGauge",
sett.native.controller,
sett_config.native.renCrv.params,
deployer,
)
sett.native.strategies.tbtcCrv = deploy_strategy(
badger,
sett,
"StrategyCurveGauge",
sett.native.controller,
sett_config.native.tbtcCrv.params,
deployer,
)
params = sett_config.pickle.renCrv.params
sett.pickle.strategies.renCrv = deploy_strategy(
badger,
sett,
"StrategyPickleMetaFarm",
sett.pickle.controller,
sett_config.pickle.renCrv.params,
deployer,
)
params = sett_config.harvest.renCrv.params
params.rewardsEscrow = badger.rewardsEscrow
sett.harvest.strategies.renCrv = deploy_strategy(
badger,
sett,
"StrategyHarvestMetaFarm",
sett.harvest.controller,
sett_config.harvest.renCrv.params,
deployer,
)
# Set Vaults on Controller
sett.native.controller.setVault(
badger.token, sett.native.badger, {"from": deployer}
)
sett.native.controller.setVault(
sett_config.native.sbtcCrv.params.want, sett.native.sbtcCrv, {"from": deployer}
)
sett.native.controller.setVault(
sett_config.native.renCrv.params.want, sett.native.renCrv, {"from": deployer}
)
sett.native.controller.setVault(
sett_config.native.tbtcCrv.params.want, sett.native.tbtcCrv, {"from": deployer}
)
sett.pickle.controller.setVault(
sett_config.pickle.renCrv.params.want, sett.pickle.renCrv, {"from": deployer}
)
sett.harvest.controller.setVault(
sett_config.harvest.renCrv.params.want, sett.harvest.renCrv, {"from": deployer}
)
# Approve Strategies by Controller
sett.native.controller.approveStrategy(
badger.token, sett.native.strategies.badger, {"from": deployer}
)
sett.native.controller.approveStrategy(
sett_config.native.sbtcCrv.params.want,
sett.native.strategies.sbtcCrv,
{"from": deployer},
)
sett.native.controller.approveStrategy(
sett_config.native.renCrv.params.want,
sett.native.strategies.renCrv,
{"from": deployer},
)
sett.native.controller.approveStrategy(
sett_config.native.tbtcCrv.params.want,
sett.native.strategies.tbtcCrv,
{"from": deployer},
)
sett.pickle.controller.approveStrategy(
sett_config.pickle.renCrv.params.want,
sett.pickle.strategies.renCrv,
{"from": deployer},
)
sett.harvest.controller.approveStrategy(
sett_config.harvest.renCrv.params.want,
sett.harvest.strategies.renCrv,
{"from": deployer},
)
# Set strategies on Controller
sett.native.controller.setStrategy(
badger.token, sett.native.strategies.badger, {"from": deployer}
)
sett.native.controller.setStrategy(
sett_config.native.sbtcCrv.params.want,
sett.native.strategies.sbtcCrv,
{"from": deployer},
)
sett.native.controller.setStrategy(
sett_config.native.renCrv.params.want,
sett.native.strategies.renCrv,
{"from": deployer},
)
sett.native.controller.setStrategy(
sett_config.native.tbtcCrv.params.want,
sett.native.strategies.tbtcCrv,
{"from": deployer},
)
sett.pickle.controller.setStrategy(
sett_config.pickle.renCrv.params.want,
sett.pickle.strategies.renCrv,
{"from": deployer},
)
sett.harvest.controller.setStrategy(
sett_config.harvest.renCrv.params.want,
sett.harvest.strategies.renCrv,
{"from": deployer},
)
# Approve Setts on specific
sett.rewards.badger.grantRole(
APPROVED_STAKER_ROLE, sett.native.strategies.badger, {"from": deployer}
)
return sett
def deploy_lp_rewards(token):
"""
Deploy LP rewards Strategy for given Badger<>X LP Token
"""
|
# 实现 strStr() 函数。
#
# 给你两个字符串 haystack 和 needle ,请你在 haystack 字符串中找出 needle 字符串出现的第一个位置(下标从 0 开始)。如
# 果不存在,则返回 -1 。
#
#
#
# 说明:
#
# 当 needle 是空字符串时,我们应当返回什么值呢?这是一个在面试中很好的问题。
#
# 对于本题而言,当 needle 是空字符串时我们应当返回 0 。这与 C 语言的 strstr() 以及 Java 的 indexOf() 定义相符。
#
#
#
# 示例 1:
#
#
# 输入:haystack = "hello", needle = "ll"
# 输出:2
#
#
# 示例 2:
#
#
# 输入:haystack = "aaaaa", needle = "bba"
# 输出:-1
#
#
# 示例 3:
#
#
# 输入:haystack = "", needle = ""
# 输出:0
#
#
#
#
# 提示:
#
#
# 0 <= haystack.length, needle.length <= 5 * 104
# haystack 和 needle 仅由小写英文字符组成
#
# Related Topics 双指针 字符串
# 👍 881 👎 0
# leetcode submit region begin(Prohibit modification and deletion)
class Solution:
def strStr(self, haystack: str, needle: str) -> int:
if needle.__len__() == 0:
return 0
else:
length = haystack.split(needle)[0].__len__()
if length == haystack.__len__():
return -1
else:
return length
# leetcode submit region end(Prohibit modification and deletion)
|
from django.shortcuts import render, get_object_or_404
from django.template import loader
from django.http import HttpResponseRedirect, HttpResponse
from django.urls import reverse
from login.models import User
from .models import Address
from os import sys
def index(request):
return render(request, 'mainpage/index.html', {'user': request.user})
def my_account(request):
return render(request, 'mainpage/account.html', {'user': request.user})
def complete_info(request):
print(request.user.username + str(request.user.id))
curr_user = User.objects.get(pk=request.user.id)
try:
province = request.POST['province']
city = request.POST['city']
county = request.POST['county']
street = request.POST['street']
consignee = request.POST['consignee']
consignee_tel = request.POST['consignphone']
moren = request.POST['moren']
email = request.POST['email']
if moren == 'true':
is_default = True
else:
is_default = False
cellphone = request.POST['phone']
curr_user.cellphone = cellphone
curr_user.email = email
curr_user.save()
if province!='':
print(is_default)
addr = Address(province=province, city=city, county=county, street=street,
consignee=consignee, consignee_tel=consignee_tel, user_id=curr_user,
is_default=is_default)
addr.save()
return HttpResponse(1)
except:
import traceback
print(traceback.format_exc())
return HttpResponse(0)
def location(request):
return render(request, 'mainpage/location.html', {'user': request.user}) |
import sys
import psycopg2
def connect_database(dbname, user, pwd, host_ip, port="5439"):
"""
connect to database on redshift cluster
Args:
dbname: database name to connect to server
user: user name
pwd: user password
host_ip: ip for database server
port: port for database server
Returns:
cur: cursor to the database
conn: connection to the database
"""
# connect to default database
conn = psycopg2.connect(f"host={host_ip} port={port} dbname={dbname} user={user} password={pwd}")
conn.set_session(autocommit=True)
cur = conn.cursor()
return cur, conn
def table_stats(cur, table):
"""
output table stats in the database
Args:
cur: cursor to the database
table: table to be queried
Returns:
None
"""
cur.execute(f"SELECT count(*) FROM {table};")
for result in cur.fetchall():
print(f'There are {result[0]} rows in the table')
cur.execute(f"SELECT COUNT(DISTINCT(trip_id)) FROM {table};")
for result in cur.fetchall():
print(f'There are {result[0]} distinct trips')
cur.execute(f"SELECT COUNT(DISTINCT(taxi_id)) FROM {table};")
for result in cur.fetchall():
print(f'There are {result[0]} distinct taxi')
cur.execute(f"SELECT SUM(trip_mile) FROM {table};")
for result in cur.fetchall():
print(f'Sum of all trip distance is {result[0]}')
cur.execute(f"SELECT SUM(trip_sec) FROM {table};")
for result in cur.fetchall():
print(f'Sum of all trip time is {result[0]}')
def close_connection(conn, cur):
"""close database connection"""
cur.close()
conn.close()
if __name__ == "__main__":
if len(sys.argv) == 7:
# parse command line inputs
dbname, user, pwd, host_ip, port, table = sys.argv[1:]
cur, conn = connect_database(dbname, user, pwd, host_ip, port)
print('******RedShift*****')
table_stats(cur, table)
print('*******************')
close_connection(conn, cur)
else:
print("Please check your inputs!")
|
import sqlite3
import hashlib
con = sqlite3.connect('racunalniske_igre.db')
con.row_factory = sqlite3.Row
def najdi_podjetje(ime):
sql = '''
SELECT id
FROM podjetja
WHERE ime = ?
'''
print(ime)
id = con.execute(sql, [ime]).fetchone()
if id is None:
sql = '''insert into podjetja (ime) values (?)'''
id = con.execute(sql, [ime]).lastrowid
con.commit()
return id
return id[0]
def seznam_uporabnikov():
sql = '''
SELECT id, up_ime, geslo
FROM uporabnik
'''
return list(con.execute(sql))
def vrni_imeUp(idUp):
'''vrne ime uporabnika, ki ima id enak idUp'''
sql = '''select up_ime from uporabnik where id = ?'''
if idUp is not None:
return con.execute(sql, [idUp]).fetchone()["up_ime"]
print(vrni_imeUp(104))
def sez_iger():
sql= '''
SELECT id, ime, leto, uporabnik, zaloznik, razvijalec
FROM igra
'''
return list(con.execute(sql))
def seznam_platform():
sql = '''SELECT id, katera FROM platforma '''
return list(con.execute(sql))
def seznam_zvrsti():
sql = '''SELECT id, ime FROM zvrst'''
return list(con.execute(sql))
def topDeset():
'''vrne prvih 10 iger z najboljšo povprečno oceno'''
sql = '''SELECT igra.id, igra.ime,
avg(ocena.koliko) AS ocena
FROM igra
JOIN
ocena ON igra.id = ocena.igra
GROUP BY igra.ime
ORDER BY ocena DESC
LIMIT 10'''
return list(con.execute(sql))
def IsciZBesedo(beseda):
'''vrne vse igre, ki v imenu vsebujejo besedo'''
vzorec = '%{}%'.format(beseda)
sql = '''SELECT igra.ime as ime
FROM igra
WHERE ime LIKE ?"'''
return list(con.execute(sql, [vzorec]))
def komentarjiIgre(igra):
'''za igro vrne vse dodane komentarje, kdo je komentiral
in datum komentarja'''
sql = '''SELECT uporabnik.up_ime AS dodal,
komentar.vsebina AS komentar,
komentar.datum AS datum
FROM komentar
JOIN
igra ON komentar.igra = igra.id
JOIN
uporabnik ON komentar.uporabnik = uporabnik.id
WHERE igra.id = ?'''
return list(con.execute(sql, [igra]))
def seznamPoizvedba(beseda):
'''za iskanje iger, ki imajo v imenu ali drugih komponentah niz beseda'''
vzorec = '%{}%'.format(beseda)
sql ='''SELECT DISTINCT igra.ime AS ime, igra.id AS id
FROM igra
JOIN
platforma_igra ON igra.id = platforma_igra.igra
JOIN
platforma ON platforma_igra.platformA = platforma.id
JOIN
podjetja AS zalozniki ON zalozniki.id = igra.zaloznik
JOIN
podjetja AS razvijalci ON razvijalci.id = igra.razvijalec
JOIN
uporabnik ON uporabnik.id = igra.uporabnik
JOIN
zvrst_igra ON igra.id = zvrst_igra.igra
JOIN
zvrst ON zvrst_igra.zvrst = zvrst.id
WHERE (igra.ime LIKE ?) OR
(platforma.katera LIKE ?) OR
(razvijalci.ime LIKE ?) OR
(zalozniki.ime LIKE ?) OR
(uporabnik.up_ime LIKE ?) OR
(zvrst.ime LIKE ?);'''
return list(con.execute(sql,[vzorec, vzorec, vzorec, vzorec, vzorec, vzorec]))
def zvrstiIgra(zvrst):
'''vrne igre zvrsti zvrst'''
sql = '''SELECT igra.ime AS igra
FROM igra
JOIN
zvrst_igra ON igra.id = zvrst_igra.igra
JOIN
zvrst ON zvrst_igra.zvrst = zvrst.id
WHERE zvrst.ime = ?'''
return list(con.execute(sql))
def igraPlatforme(igra):
'''vrne platforme igre igra'''
sql = '''SELECT platforma.katera AS platforma
FROM platforma
JOIN
platforma_igra ON platforma.id = platforma_igra.platforma
JOIN
igra ON platforma_igra.igra = igra.id
WHERE igra.id = ?'''
return list(con.execute(sql, [igra]))
#execute vrne iterator
def igraZvrsti(igra):
sql = '''SELECT zvrst.ime AS zvrst
FROM zvrst
JOIN
zvrst_igra ON zvrst.id = zvrst_igra.zvrst
JOIN
igra ON zvrst_igra.igra = igra.id
WHERE igra.id = ?'''
return list(con.execute(sql, [igra]))
def podatkiOigri(igra):
'''vrne ime, leto, založnika, uporabnika igre'''
sql = '''SELECT igra.ime AS ime,
igra.leto AS leto,
podjetja.ime AS zaloznik,
uporabnik.up_ime AS uporabnik
FROM igra
JOIN
podjetja ON igra.zaloznik = podjetja.id
JOIN
uporabnik ON igra.uporabnik = uporabnik.id
WHERE igra.id = ?'''
return con.execute(sql, [igra]).fetchone()
def razvijalecIgra(igra):
'''vrne razvijalca igre'''
sql = '''SELECT podjetja.ime AS razvijalec
FROM podjetja
JOIN
igra ON igra.razvijalec = podjetja.id
WHERE igra.id = ?'''
return con.execute(sql, [igra]).fetchone()
def povprecna_ocena(igra):
sql = '''SELECT AVG(koliko) AS povp FROM ocena WHERE igra = ?'''
if con.execute(sql, [igra]).fetchone()["povp"] is not None:
return round(con.execute(sql, [igra]).fetchone()["povp"],2)
else:
return "Igra še nima ocene."
##Od tu naprej
def kodirajGeslo(geslo):
'''vrne zakodirano geslo'''
return hashlib.md5(geslo.encode()).hexdigest()
def prijava(up_ime,geslo):
sql = '''
select id
from uporabnik
where up_ime = ?
and geslo = ?;
'''
oseba = con.execute(sql, [up_ime, kodirajGeslo(geslo)]).fetchone()
if oseba:
return oseba['id']
def aliVBazi(up_ime):
sql = '''select up_ime from uporabnik
where up_ime == ?'''
if con.execute(sql, [up_ime]).fetchone():
return True
return False
def aliOcenil(upId, igraId):
'''vrne 1, če je uporabnik že ocenil igro in 0, če je ni'''
sql = '''SELECT count(*)
FROM ocena
WHERE uporabnik = ? AND
igra = ?'''
return con.execute(sql,[upId,igraId]).fetchone()[0]
print(aliOcenil(14, 63))
###Dodajanje v bazo
def dodaj_uporabnik(up_ime,geslo):
if not aliVBazi(up_ime):
sql = '''
insert into uporabnik
(up_ime,geslo)
values (?,?)
'''
con.execute(sql,[up_ime,kodirajGeslo(geslo)])
con.commit()
def dodaj_komentar(vsebina, uporabnik, igra):
sql = '''INSERT INTO komentar (vsebina, uporabnik, igra, datum)
VALUES (?,?,?, DATE('now'))'''
con.execute(sql, [vsebina, uporabnik, igra])
con.commit()
def dodaj_oceno(igra, uporabnik, koliko):
sql = '''INSERT INTO ocena (igra, uporabnik, koliko)
VALUES (?,?,?)'''
con.execute(sql, [igra, uporabnik, koliko])
con.commit()
def dodaj_igro_v_bazo(ime, leto, razvijalec, zaloznik, uporabnik, platforme, zvrsti):
zid = najdi_podjetje(zaloznik)
rid = najdi_podjetje(razvijalec)
sql ='''INSERT INTO igra (ime, leto, razvijalec, zaloznik, uporabnik)
VALUES (?,?,?,?,?)'''
cur = con.execute(sql, [ime, leto, rid, zid, uporabnik])
id = cur.lastrowid
sql2 = '''insert into platforma_igra (igra, platforma) VALUES (?, ?)'''
for pl in platforme:
con.execute(sql2, [id, pl])
sql3 = '''insert into zvrst_igra (igra, zvrst) VALUES (?, ?)'''
for zv in zvrsti:
con.execute(sql3, [id, zv])
con.commit()
|
import numpy as np
from time import time
import os
from Model.model import Model
m = Model(print_obj={
# 'start_conf': True,
# 'end_conf': True
"pivot": True,
# "timing": True,
"save_tab": True
})
instance = 'examples/data/newman1'
def read_cpit(dataset):
f = open(dataset + ".cpit", 'r')
current_parameter = "NULL"
profit = []
res_constr_coeff = []
n_blocks = 0
n_periods = 0
res_constr_limits = []
for line in f:
split_line = [x for x in line.split()]
if current_parameter == "RESOURCE_CONSTRAINT_COEFFICIENTS":
if (split_line[0] != "EOF"):
res_constr_coeff[int(split_line[0])][int(split_line[1])] = float(split_line[2])
else:
break
if current_parameter == "OBJECTIVE_FUNCTION":
if split_line[0] == "RESOURCE_CONSTRAINT_COEFFICIENTS:":
current_parameter = "RESOURCE_CONSTRAINT_COEFFICIENTS"
res_constr_coeff = [[0 for j in range(n_res_constr)] for i in range(n_blocks)]
else:
profit.append(float(split_line[1]))
if current_parameter == "RESOURCE_CONSTRAINT_LIMITS":
if split_line[0] == "OBJECTIVE_FUNCTION:":
current_parameter = "OBJECTIVE_FUNCTION"
else:
res_constr_limits[int(split_line[0])][int(split_line[1])] = float(split_line[3])
if current_parameter == "NULL":
if split_line[0] == "NBLOCKS:":
n_blocks = int(split_line[1])
if split_line[0] == "NPERIODS:":
n_periods = int(split_line[1])
if split_line[0] == "NRESOURCE_SIDE_CONSTRAINTS:":
n_res_constr = int(split_line[1])
if split_line[0] == "DISCOUNT_RATE:":
discount_rate = float(split_line[1])
if split_line[0] == "RESOURCE_CONSTRAINT_LIMITS:":
current_parameter = "RESOURCE_CONSTRAINT_LIMITS"
res_constr_limits = [[0 for a in range(n_periods)] for b in range(n_res_constr)]
f.close()
return n_blocks, n_periods, n_res_constr, discount_rate, res_constr_limits, profit, res_constr_coeff
def read_blocks(dataset):
f = open(dataset + ".blocks", 'r')
x_value, y_value, z_value = [],[],[]
for line in f:
split_line = [x for x in line.split()]
x_value.append(int(split_line[1]))
y_value.append(int(split_line[2]))
z_value.append(int(split_line[3]))
f.close()
return x_value, y_value, z_value
def read_prec(dataset, n_blocks):
f = open(dataset + ".prec", 'r')
pred = [[] for i in range(n_blocks)]
for line in f:
split_line = [int(x) for x in line.split()]
block_id = split_line[0]
n_pred = split_line[1]
for i in range(n_pred):
pred[block_id].append(split_line[i + 2])
f.close()
return pred
def get_by_key(arr,key):
result = []
for i in arr:
result.append(i[key])
return np.array(result)
n_blocks, n_periods, n_res_constr, discount_rate, res_constr_limits, profit, res_constr_coeff = read_cpit(instance)
x_value, y_value, z_value = read_blocks(instance)
pred = read_prec(instance, n_blocks)
print("#blocks", n_blocks)
print("#periods", n_periods)
mean = np.mean(profit)
# profit /= mean
# m.multiplier = mean
x = []
for i in range(n_blocks):
x.append(m.add_var("real+", name=i))
x = np.array(x)
m.file_name = "examples/data/newman"
m.maximize(sum(profit*x))
# binary
for i in range(n_blocks):
m.add_constraint(x[i] <= 1)
# cost
# m.add_constraint(sum(get_by_key(blocks,"c")*x) <= max_c)
for i in range(n_blocks):
if len(pred[i]) > 0:
m.add_constraint(len(pred[i])*x[i]-sum(x[pred[i]]) <= 0)
print("all added")
t0 = time()
m.solve(revised=True)
# m.solve()
print("Solved first in %f" % (time()-t0))
print(m.get_solution_object())
|
# Generated by Django 2.1.7 on 2019-04-01 01:27
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0009_auto_20190401_0031'),
]
operations = [
migrations.AddField(
model_name='prof',
name='suffix',
field=models.CharField(blank=True, max_length=4),
),
]
|
# Degree Array
#http://rosalind.info/problems/deg/
with open('rosalind_deg.txt', 'r') as contents:
edges = [[int(i) for i in c.split(' ')] for c in contents.read().strip().split('\n')]
def getDegrees(edges):
# get the no. of vertices and no. of edges from meta[0] and meta[1] respectively
meta = edges.pop(0)
num_verts = meta[0]
num_edges = meta[1]
vert_degrees = [0]*num_verts
for e in edges:
vert_degrees[e[0]-1] += 1
vert_degrees[e[1]-1] += 1
return vert_degrees
degrees = getDegrees(edges)
with open('deg_output.txt', 'w') as output:
output.write(' '.join([str(d) for d in degrees]))
|
#!/usr/bin/env python
#-*- coding: utf-8 -*-
#pylint: disable=
"""
File : url_utils.py
Author : Valentin Kuznetsov <vkuznet AT gmail dot com>
Description:
"""
from __future__ import print_function
# system modules
import os
import sys
import urllib
import urllib2
import httplib
import json
def get_key_cert():
"""
Get user key/certificate
"""
key = None
cert = None
globus_key = os.path.join(os.environ['HOME'], '.globus/userkey.pem')
globus_cert = os.path.join(os.environ['HOME'], '.globus/usercert.pem')
if os.path.isfile(globus_key):
key = globus_key
if os.path.isfile(globus_cert):
cert = globus_cert
# First presendence to HOST Certificate, RARE
if 'X509_HOST_CERT' in os.environ:
cert = os.environ['X509_HOST_CERT']
key = os.environ['X509_HOST_KEY']
# Second preference to User Proxy, very common
elif 'X509_USER_PROXY' in os.environ:
cert = os.environ['X509_USER_PROXY']
key = cert
# Third preference to User Cert/Proxy combinition
elif 'X509_USER_CERT' in os.environ:
cert = os.environ['X509_USER_CERT']
key = os.environ['X509_USER_KEY']
# Worst case, look for cert at default location /tmp/x509up_u$uid
elif not key or not cert:
uid = os.getuid()
cert = '/tmp/x509up_u'+str(uid)
key = cert
if not os.path.exists(cert):
raise Exception("Certificate PEM file %s not found" % key)
if not os.path.exists(key):
raise Exception("Key PEM file %s not found" % key)
return key, cert
def disable_urllib2Proxy():
"""
Setup once and forever urllib2 proxy, see
http://kember.net/articles/obscure-python-urllib2-proxy-gotcha
"""
proxy_support = urllib2.ProxyHandler({})
opener = urllib2.build_opener(proxy_support)
urllib2.install_opener(opener)
class HTTPSClientAuthHandler(urllib2.HTTPSHandler):
"""
Simple HTTPS client authentication class based on provided
key/ca information
"""
def __init__(self, key=None, cert=None, level=0):
if level > 1:
urllib2.HTTPSHandler.__init__(self, debuglevel=1)
else:
urllib2.HTTPSHandler.__init__(self)
self.key = key
self.cert = cert
def https_open(self, req):
"""Open request method"""
#Rather than pass in a reference to a connection class, we pass in
# a reference to a function which, for all intents and purposes,
# will behave as a constructor
return self.do_open(self.get_connection, req)
def get_connection(self, host, timeout=300):
"""Connection method"""
if self.key:
return httplib.HTTPSConnection(host, key_file=self.key,
cert_file=self.cert)
return httplib.HTTPSConnection(host)
def getdata(url, params, headers=None, post=None, verbose=False, jsondecoder=True):
"""
Invoke URL call and retrieve data from data-service based
on provided URL and set of parameters. Use post=True to
invoke POST request.
"""
encoded_data = urllib.urlencode(params)
if not post:
if encoded_data:
url = url + '?' + encoded_data
if not headers:
headers = {}
if verbose:
print('+++ getdata, url=%s, headers=%s' % (url, headers))
obj=sys.version_info
if obj[0] == 2 and obj[1] == 7 and obj[2] >= 9:
# disable SSL verification, since it is default in python 2.7.9
# and many CMS services do not verify SSL cert.
# https://www.python.org/dev/peps/pep-0476/
import ssl
ssl._create_default_https_context = ssl._create_unverified_context
req = urllib2.Request(url)
for key, val in headers.iteritems():
req.add_header(key, val)
if verbose > 1:
handler = urllib2.HTTPHandler(debuglevel=1)
opener = urllib2.build_opener(handler)
urllib2.install_opener(opener)
ckey, cert = get_key_cert()
handler = HTTPSClientAuthHandler(ckey, cert, verbose)
if verbose:
print("handler", handler, handler.__dict__)
opener = urllib2.build_opener(handler)
urllib2.install_opener(opener)
try:
if post:
data = urllib2.urlopen(req, encoded_data)
else:
data = urllib2.urlopen(req)
info = data.info()
code = data.getcode()
if verbose > 1:
print("+++ response code:", code)
print("+++ response info\n", info)
if jsondecoder:
data = json.load(data)
else:
data = data.read()
except urllib2.HTTPError as httperror:
msg = 'HTTPError, url=%s, args=%s, headers=%s' \
% (url, params, headers)
data = {'error': 'Unable to contact %s' % url , 'reason': msg}
try:
data.update({'httperror':extract_http_error(httperror.read())})
except Exception as exp:
data.update({'httperror': None})
data = json.dumps(data)
except Exception as exp:
msg = 'HTTPError, url=%s, args=%s, headers=%s, error=%s' \
% (url, params, headers, str(exp))
data = {'error': 'Unable to contact %s' % url, 'reason': msg}
data = json.dumps(data)
return data
|
import math
N = input()
N = int(N)
Group_of_students = list(map(int,input().split()))
Group_of_students.sort(reverse = True)
groups = {1: 0, 2: 0, 3: 0,4: 0}
taxis = 0
for i in range(0,N):
groups[Group_of_students[i]] = groups[Group_of_students[i]] + 1
taxis = taxis + groups[4]
groups[4] = 0
m = min(groups[1],groups[3])
taxis = taxis + m
groups[3] = groups[3] - m
groups[1] = groups[1] - m
taxis = taxis + groups[3]
groups[3] = 0
taxis = taxis + int((groups[2]*2)/4)
groups[2] = groups[2] - (int((groups[2]*2)/4)*2)
print(groups[2])
if groups[2] == 0:
taxis = taxis + math.ceil((groups[1])/4)
else:
taxis = taxis + 1
groups[1] = groups[1] - 2
if groups[1] > 0:
taxis = taxis + math.ceil((groups[1])/4)
groups[1] = 0
print(groups)
print(int(taxis))
|
a = 'dead'
b = 'parrot'
c = 'sketch'
print (a, b, c)
|
#!env python3
# -*- coding: utf-8 -*-
import csv
import trace
nobel_winners = [{
'category': 'physics',
'name': 'Albert Einstein',
'nationality': 'Swiss',
'sex': 'male',
'year': 1921
}, {
'category': 'physics',
'name': 'Paul Dirac',
'nationality': 'British',
'sex': 'male',
'year': 1933
}, {
'category': 'chemistry',
'name': 'Marle Curie',
'nationality': 'Polish',
'sex': 'female',
'year': 1911
}]
def main():
cols = nobel_winners[0].keys()
cols = sorted(cols)
with open('../data/nobel_winners.csv', 'w') as f:
f.write(','.join(cols) + '\n')
for o in nobel_winners:
row = [str(o[col]) for col in cols]
f.write(','.join(row) + '\n')
with open('../data/nobel_winners.csv') as f:
for line in f.readlines():
print(line)
tracer = trace.Trace()
tracer.runfunc(main)
r = tracer.results(trace=0, count=0, countfuncs=1)
r.write_results(show_missing=True, coverdir=".")
|
#packing dictionary
def packer(**kwargs):
print(kwargs)
print("{first_name} {last_name}".format(**kwargs))
#unpacking dictionary
def unpacker(first_name,last_name,job):
if first_name and last_name:
print("{} {}".format(first_name,last_name))
else:
print(job)
packer(first_name="karen",last_name="ku")
unpacker(**{"first_name":"Karen","last_name":"Ku","job":"Engineer"}) |
import raspi_dashboard as rd
rd.start()
|
import math
import vtk
from PythonMetricsCalculator import PerkEvaluatorMetric
# Adapted from: Hofstad et al., A study of psychomotor skills in minimally invasive surgery: what differentiates expert and nonexpert performance, Surgical Endoscopy, 2013.
class BimanualDexterity( PerkEvaluatorMetric ):
# Static methods
@staticmethod
def GetMetricName():
return "Bimanual Dexterity: Translational & Rotational"
@staticmethod
def GetMetricUnit():
return "rho"
@staticmethod
def IsShared():
return False
@staticmethod
def GetMetricShared():
return False
@staticmethod
def GetTransformRoles():
return [ "LeftTool", "RightTool" ]
# Instance methods
def __init__( self ):
self.prevLeftInverseMatrix = None
self.prevRightInverseMatrix = None
self.prevLeftTime = None
self.prevRightTime = None
self.currLeftRotationalSpeed = None
self.currRightRotationalSpeed = None
self.currLeftTranslationalSpeed = None
self.currRightTranslationalSpeed = None
self.leftRotationalSumSquares = 0.0
self.rightRotationalSumSquares = 0.0
self.leftRotationalSum = 0.0
self.rightRotationalSum = 0.0
self.rotationalSumProducts = 0.0
self.leftTranslationalSumSquares = 0.0
self.rightTranslationalSumSquares = 0.0
self.leftTranslationalSum = 0.0
self.rightTranslationalSum = 0.0
self.translationalSumProducts = 0.0
self.leftCount = 0
self.rightCount = 0
self.totalCount = 0
def AddAnatomyRole( self, role, node ):
pass
def AddTimestamp( self, time, matrix, point, role ):
prevInverseMatrix = None
if ( role == "LeftTool" ):
prevInverseMatrix = self.prevLeftInverseMatrix
if ( role == "RightTool" ):
prevInverseMatrix = self.prevRightInverseMatrix
angle = None
distance = None
if ( prevInverseMatrix is not None ):
changeTransform = vtk.vtkTransform()
changeTransform.Concatenate( matrix )
changeTransform.Concatenate( prevInverseMatrix ) # matrix * prevInverseMatrix
# Rotation
axisAngle = [ 0, 0, 0, 0 ]
changeTransform.GetOrientationWXYZ( axisAngle ) # This is in degrees
angle = axisAngle[ 0 ]
if ( angle > 180 ):
angle = angle - 360
# Translation
position = [ 0, 0, 0 ]
changeTransform.GetPosition( position )
distance = vtk.vtkMath.Norm( position )
if ( role == "LeftTool" ):
if ( self.prevLeftTime is not None and angle is not None and distance is not None ):
# Speeds
self.currLeftRotationalSpeed = abs( angle / ( time - self.prevLeftTime ) )
self.currLeftTranslationalSpeed = abs( distance / ( time - self.prevLeftTime ) )
# Update the sums
self.leftRotationalSum += self.currLeftRotationalSpeed
self.leftRotationalSumSquares += self.currLeftRotationalSpeed * self.currLeftRotationalSpeed
self.leftTranslationalSum += self.currLeftTranslationalSpeed
self.leftTranslationalSumSquares += self.currLeftTranslationalSpeed * self.currLeftTranslationalSpeed
# Increase count
self.leftCount += 1
# Update previous
self.prevLeftTime = time
self.prevLeftInverseMatrix = vtk.vtkMatrix4x4()
self.prevLeftInverseMatrix.DeepCopy( matrix )
self.prevLeftInverseMatrix.Invert()
if ( role == "RightTool" ):
if ( self.prevRightTime is not None and angle is not None and distance is not None ):
# Speeds
self.currRightRotationalSpeed = abs( angle / ( time - self.prevRightTime ) )
self.currRightTranslationalSpeed = abs( distance / ( time - self.prevRightTime ) )
# Update the sums
self.rightRotationalSum += self.currRightRotationalSpeed
self.rightRotationalSumSquares += self.currRightRotationalSpeed * self.currRightRotationalSpeed
self.rightTranslationalSum += self.currRightTranslationalSpeed
self.rightTranslationalSumSquares += self.currRightTranslationalSpeed * self.currRightTranslationalSpeed
# Increase count
self.rightCount += 1
# Update previous
self.prevRightTime = time
self.prevRightInverseMatrix = vtk.vtkMatrix4x4()
self.prevRightInverseMatrix.DeepCopy( matrix )
self.prevRightInverseMatrix.Invert()
if ( self.currLeftRotationalSpeed is None or self.currRightRotationalSpeed is None or self.currLeftTranslationalSpeed is None or self.currRightTranslationalSpeed is None ):
return
self.rotationalSumProducts += self.currLeftRotationalSpeed * self.rotationalSumProducts
self.translationalSumProducts += self.currLeftTranslationalSpeed * self.currRightTranslationalSpeed
self.totalCount += 1
def GetMetric( self ):
if ( self.totalCount == 0 or self.leftCount == 0 or self.rightCount == 0 ):
return 0
leftRotationalMean = self.leftRotationalSum / self.leftCount
leftTranslationalMean = self.leftTranslationalSum / self.leftCount
rightRotationalMean = self.rightRotationalSum / self.rightCount
rightTranslationalMean = self.rightTranslationalSum / self.rightCount
leftRotationalStdev = math.sqrt( self.leftRotationalSumSquares / self.leftCount - leftRotationalMean * leftRotationalMean )
leftTranslationalStdev = math.sqrt( self.leftTranslationalSumSquares / self.leftCount - leftTranslationalMean * leftTranslationalMean )
rightRotationalStdev = math.sqrt( self.rightRotationalSumSquares / self.rightCount - rightRotationalMean * rightRotationalMean )
rightTranslationalStdev = math.sqrt( self.rightTranslationalSumSquares / self.rightCount - rightTranslationalMean * rightTranslationalMean )
rotationalCovariance = self.rotationalSumProducts / self.totalCount - leftRotationalMean * rightRotationalMean
translationalCovariance = self.translationalSumProducts / self.totalCount - leftTranslationalMean * rightTranslationalMean
rotationalBimanualDexterity = rotationalCovariance / ( leftRotationalStdev * rightRotationalStdev )
translationalBimanualDexterity = translationalCovariance / ( leftTranslationalStdev * rightTranslationalStdev )
bimanualDexterity = [ translationalBimanualDexterity, rotationalBimanualDexterity ]
separator = "\t"
return separator.join( map( str, bimanualDexterity ) ) |
from flask import Flask, render_template
from datetime import date
app = Flask(__name__)
@app.route('/')
def home():
birthDate = date(2001,11,21)
today = date.today()
status = birthDate.month == today.month and birthDate.day == today.day
return render_template('index.html',status = status)
if __name__ == "__main__":
app.run(debug = True) |
#coding:utf-8
from dao.dao import Dao
class TaskSerialNumberDao(Dao):
def __init__(self, db, id_, serial_no, project_id, task_id):
self.db = db
self.id_ = id_
self.serial_no = serial_no
self.project_id = project_id
self.task_id = task_id
def get(self):
sql = "SELECT * FROM task_serial_number"
param = []
return self.db.execute_query(sql, param)
def create(self):
sql = """
INSERT INTO task_serial_number
SELECT
null,
((SELECT COUNT(*) FROM task_serial_number WHERE project_id = ?) + 1),
?,
?
"""
param = [
self.project_id,
self.project_id,
self.task_id,
]
self.db.execute(sql, param)
def update(self):
pass
def delete(self):
pass
|
from tkinter import *
import math as m
#створення вікна
window=Tk()
window.title("olya`s calculator <3")
window.geometry('470x330')
#створення поля вводу
box=Entry(window, width=150,bg='#edd4f0',fg="#fbfbfb",font=("Cambria Math",20))
box.place(x=10,y=10,height=50,width=400)
#створення кнопок
buttons=['=','1','2','3','+','-','C',
'4','5','6','*','/','DEL',
'7','8','9','cos','sin','%',
'0','ln','log','ctg','tan','bin']
x=410
y=10
for i in buttons:
a=lambda z=i: fun(z)
Button(text=i,bg='#d3a6d8',fg="#f2f2f2",font=("Cambria Math",20),command=a).place(x=x,y=y,height=50,width=50)
x+=80
if x>450:
x=10
y+=60
#опис команд кнопок
def fun(buttn):
if buttn=='=':
box.insert(END,'=' + str(eval(box.get())))
elif buttn=='cos':
box.insert(END,'=' + str(m.cos(int(box.get()))))
elif buttn=='sin':
box.insert(END,'=' + str(m.sin(int(box.get()))))
elif buttn=='tan':
box.insert(END,'=' + str(m.tan(int(box.get()))))
elif buttn=='ctg':
box.insert(END,'=' + str(m.ctg(int(box.get()))))
elif buttn=='log':
box.insert(END,'=' + str(m.log(int(box.get()))))
elif buttn=='ln':
box.insert(END,'=' + str(m.log10(int(box.get()))))
elif buttn=='C':
box.delete(0,END)
elif buttn=='DEL':
box.delete(len(str(box.get()))-1)
elif buttn == "bin":
box.insert(END, '=' + str(bin(int(box.get()))))
else:
if '=' in box.get():
box.delete(0,END)
box.insert(END,buttn)
window.mainloop()
|
from tkinter import *
import time
root = Tk()
canv = Canvas (root, width = 600, height = 600)
canv.pack()
from math import *
import time
f = 0
R = 100
g = 0
while True:
g += 0.05
r = g / 360 * 2 * pi
x = 1000
canv.create_rectangle (300 + (R * sin (x * r)) * cos (r), 300 + (R * cos (x * r)) * sin (r), 300 + (R * sin (x * r)) * cos (r), 300 + (R * cos (x * r)) * sin (r))
time.sleep (0.01)
canv.update ()
#canv.delete ('all')
root.mainloop () |
print({"random": 3})
|
# python 2.7.3
import sys
import math
[m, d1, d2] = map(int, sys.stdin.readline().split())
workload = m * d1
# print 'workload is: %d' % workload
if workload % d2 == 0:
n = workload / d2
else:
n = workload / d2 + 1
for i in range(d2):
if workload >= n:
print str(n),
else:
print str(workload),
workload = max(workload - n, 0)
|
# write a program that prints out all the elements of the list that are less than 10.
def main():
a = [1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89]
predicated = [x for x in a if x < 10]
print (predicated)
if __name__ == "__main__":
main() |
from django import forms
from django.forms import ModelForm
from erp_app.models import Customers
class ExpenseForm(forms.Form):
name = forms.CharField(max_length=150)
description = forms.CharField(widget=forms.Textarea(
attrs={'rows':10, 'cols':30}))
date_paid = forms.DateField()
amount_paid = forms.DecimalField()
class CustomerForm(ModelForm):
class Meta:
model = Customers
fields = ['title', 'first_name', 'middle_name', 'last_name',
'email', 'company', 'display_name', 'billing_street', 'billing_city',
'billing_state', 'billing_zip', 'billing_country', 'other_details']
|
import copy
import time
from collections import OrderedDict
from operator import itemgetter
import random
class node:
board= None
def __init__(self,mat,level,val,x,y,cr,cc):
self.board = mat
self.depth=level
self.score=val
self.row=x
self.col=y
self.cr=cr
self.cc=cc
def adjacent(no,x,y,ft,explored):
count=0
if( x<n and x>=0 and y<n and y>=0 and explored[x][y]==1):
return 0
elif(x<n and x>=0 and y<n and y>=0 and no[x][y]==ft):
no[x][y]='*'
count+=1
explored[x][y]=1
else:
return 0
up= adjacent(no,x,y-1,ft,explored)
down=adjacent(no,x-1,y,ft,explored)
left=adjacent(no,x,y+1,ft,explored)
right=adjacent(no,x+1,y,ft,explored)
score =count+ up+down+left+right
return score
def gravity(mat):
for c in range(n):
r=n-1
i=n-2
while(r>=0 and i>=0):
if(mat[r][c]=='*' and mat[i][c]=='*'):
i=i-1
elif(mat[r][c]=='*' and mat[i][c]!='*'):
mat[r][c]=mat[i][c]
mat[i][c]='*'
r=r-1
i=i-1
elif(mat[r][c]!='*' and mat[i][c]!='*'):
r=r-1
i=i-1
elif(mat[r][c]!='*' and mat[i][c]=='*' ):
r=r-1
i=i-1
def terminal(no):
for i in range(0,n):
for j in range(0,n):
if(no[i][j]!='*'):
return False
return True
def create_successor(x,y,parent):
global count
count=count+1
f=parent.board[x][y]
no = copy.deepcopy(parent.board)
sc=copy.deepcopy(parent.score)
exp=[[0] * n for i in range(n)]
point=adjacent(no,x,y,f, exp)
gravity(no)
if(parent.depth%2==0):
sc[0]=sc[0]+(point**2)
else:
sc[1]= sc[1]+(point**2)
x=x+1
y=chr(y+65)
kid=node(no,parent.depth+1,sc,x,y,-1,-1)
return kid
def successor(mat):
d={}
explored = [[0] * n for i in range(n)]
for i in range(0,n):
for j in range(0,n):
if( explored[i][j]==0 and mat[i][j]!='*' ):
v=adjacent(mat,i,j,mat[i][j],explored)
k=chr(i+65)+chr(j+65)
d[k]=v
d= OrderedDict(sorted(d.items(), key=itemgetter(1),reverse=True))
return d
def min_val(nod,A,B):
if( terminal(nod.board) or (300-(time.time()-start_time)<=100)):
return (nod.score[0])-(nod.score[1])
mat=copy.deepcopy(nod.board)
children=successor(mat)
for each in children:
x=ord(each[0])-65
y=ord(each[1])-65
child=create_successor(x,y,nod)
res=max_val(child,A,B)
B=min(B,res)
if(B<=A):
return A
return B
def max_val(nod,A,B):
global result_matrix
global movex,movey
if(terminal(nod.board) or (300-(time.time()-start_time)<=100 )):
#nod.utility=(nod.score[0])-(nod.score[1])
return (nod.score[0])-(nod.score[1])
mat=copy.deepcopy(nod.board)
children=successor(mat)
global bf
if(nod.depth==0):
bf=len(children)
for each in children:
x=ord(each[0])-65
y=ord(each[1])-65
child=create_successor(x,y,nod)
res=min_val(child,A,B)
if(res>A):
A=res
nod.cr=child.row
nod.cc=child.col
if(nod==root):
movex=child.row
movey=child.col
result_matrix=child.board
if(A>=B):
if(nod==root):
movex=child.row
movey=child.col
result_matrix=child.board
return B
return A
start_time = time.time()
n=26
a = [[''] * n for i in range(n)]
for i in range(0,n):
for j in range(0,n):
a[i][j]=str(random.randint(1,9))
A=-float("inf")
B=float("inf")
initial=node(a,0,[0,0],-1,-1,-1,-1)
root=initial
count=1
res=max_val(initial,A,B)
calib=count/( time.time()-start_time )
#print(count)
#print (bf)
#print("--- %s seconds ---" % (time.time() - start_time))
fo=open("calibration.txt","w")
fo.write(str( calib ))
fo.close()
|
from home.url_management.base import BaseUrlRule
from accounts.models import VenueType
class VenueTypesUrlRule(BaseUrlRule):
@classmethod
def create_url(cls, identifier):
url = cls.get_stored_url(identifier)
if not url:
try:
venue_type = VenueType.active_types.get(name=identifier)
except VenueType.DoesNotExist:
pass
else:
venue_type_alias = venue_type.name.replace(' & ', '__').replace(' ', '_')
url = '/%s/' % venue_type_alias
cls.store_url(identifier, url)
return url
@classmethod
def parse_url(cls, path):
path_components = cls.get_path_components(path)
# first element is a venue type name
if len(path_components) == 1:
try:
venue_type_name = path_components[0].replace('__', ' & ').replace('_', ' ')
venue_type = VenueType.active_types.get(name=venue_type_name)
except VenueType.DoesNotExist:
pass
else:
callback = 'venues.views.venues'
extra_params = {'venue_type': venue_type.name}
return callback, [], {'extra_params': extra_params}
return None |
# 1. 입력이 빈 문자열인 경우, 빈 문자열을 반환합니다.
# 2. 문자열 w를 두 "균형잡힌 괄호 문자열" u, v로 분리합니다. 단, u는 "균형잡힌 괄호 문자열"로 더 이상 분리할 수 없어야 하며, v는 빈 문자열이 될 수 있습니다.
# 3. 문자열 u가 "올바른 괄호 문자열" 이라면 문자열 v에 대해 1단계부터 다시 수행합니다.
# 3-1. 수행한 결과 문자열을 u에 이어 붙인 후 반환합니다.
# 4. 문자열 u가 "올바른 괄호 문자열"이 아니라면 아래 과정을 수행합니다.
# 4-1. 빈 문자열에 첫 번째 문자로 '('를 붙입니다.
# 4-2. 문자열 v에 대해 1단계부터 재귀적으로 수행한 결과 문자열을 이어 붙입니다.
# 4-3. ')'를 다시 붙입니다.
# 4-4. u의 첫 번째와 마지막 문자를 제거하고, 나머지 문자열의 괄호 방향을 뒤집어서 뒤에 붙입니다.
# 4-5. 생성된 문자열을 반환합니다.
def check_right_u(u):
isRight = True
stack = 0
for i in range(len(u)):
if u[i] == '(':
stack += 1
else:
stack -= 1
if stack < 0: # stack이 0보다 작아지면 잘못된 u
isRight = False
break
return isRight
def get_uv(w):
u = ''
v = ''
stack = 0
for i in range(len(w)):
if w[i] == '(':
stack += 1
else:
stack -= 1
if stack == 0:
u = w[:i+1]
v = w[i+1:]
break
return u, v
def reverse_mark(u):
l = ''
for i in range(len(u)):
l += ')' if u[i] == '(' else '('
return ''.join(l)
def solution(p):
if p == '':
return ''
u, v = get_uv(p)
if check_right_u(u): # 옳은 u의 경우
return u + solution(v)
else: # 틀린 u의 경우
return '(' + solution(v) + ')' + reverse_mark(u[1:-1])
p = [
"()",
]
for i in p:
print(solution(i))
|
#! python3
# To write files and make a directory
import os
# To filter string
import re
# For sleeping
import time
# Googles Text to speech lib
from gtts import gTTS
# File info reader, in this case, it is for determening the length of a mp3
from mutagen.mp3 import MP3
def format_text(text: str):
"""
Removes unwanted characters
Replaces spaces with underscores
"""
whitelist = re.compile(r'[^a-zA-Z ]+')
return re.sub(whitelist, '', text).replace(' ', '_')
def create_dir(dir_name: str):
"""Creates directory when not made yet"""
if not os.path.isdir(dir_name):
os.makedirs(dir_name)
def create_spell_out(text: str):
"""Maps the characters to the NATO words"""
words_for_letters = ['Alfa', 'Bravo', 'Charlie', 'Delta', 'Echo', 'Foxtrot',
'Golf', 'Hotel', 'India', 'Juliett', 'Kilo', 'Lima',
'Mike', 'November', 'Oscar', 'Papa', 'Quebec', 'Romeo',
'Sierra', 'Tango', 'Uniform', 'Victor', 'Whiskey',
'X-ray', 'Yankee', 'Zulu', 'underscore']
# ord('{') - 97 == words_for_letters.index('underscore')
words = [word.replace('_', '{') for word in text.split(' ')]
return [words_for_letters[ord(
letter.lower()) - 97] for word in words for letter in word]
def save_audio(text: str, filename: str, dir: str):
"""
Converts text to audio and saves
Notes
-----
If the .mp3 file extension is missing in the filename, it will be added
If a file with the same name exists, it will not save, only notify the user
Returns
_______
Path : str
"""
# Make the path to the folder
path = '{0}/{1}'.format(dir, filename)
if not filename.endswith('.mp3'):
path += '.mp3'
# Generates and saves audio file
tts = gTTS(text=text, lang='en')
# Only saves when file does not exist
if os.path.isfile(path):
print("File named {0} already exist, will not safe".format(path))
else:
tts.save(path)
return path
def play_audio(path: str):
os.startfile(os.getcwd() + path[1:])
duration = MP3(path).info.length
time.sleep(duration)
# ---------- MAIN-PROGRAM ----------
if __name__ == '__main__':
print('Welcome!')
output_dir = './audio'
create_dir(output_dir)
while True:
text = format_text(input('What is the sentence?'))
output_words = 'Message incoming' + ', '.join(create_spell_out(text))
path = save_audio(output_words, text, output_dir)
play_audio(path)
if(input('Continue? y/n') not in ('y', 'yes')):
break
|
# Generated by Django 2.0 on 2018-10-03 08:25
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('mili', '0006_check'),
]
operations = [
migrations.DeleteModel(
name='Check',
),
]
|
a = "ㅈ디ㅏㅓ기ㅏㄷ적" \
"wekljrlkwejrklewjr" \
"welkjrlkjerklwejr"
print(a) |
# Generated by Django 3.1.5 on 2021-01-22 08:08
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cars', '0003_car_photo'),
]
operations = [
migrations.AlterField(
model_name='car',
name='gear',
field=models.CharField(choices=[('Robot', 'ROBOT_GEAR'), ('Automatic', 'AUTOMATIC_GEAR'), ('Manual', 'MANUAL_GEAR')], default='Manual', max_length=255),
),
]
|
# question https://www.hackerrank.com/challenges/python-arithmetic-operators/problem
# solution
if __name__ == '__main__':
a = int(input())
b = int(input())
print(f"{a+b}")
print(f"{a-b}")
print(f"{a*b}") |
from http.server import HTTPServer, CGIHTTPRequestHandler
def run(server_class=HTTPServer, handler_class=CGIHTTPRequestHandler):
server_address = ('', 8000)
httpd = server_class(server_address, handler_class)
httpd.serve_forever()
if __name__ == '__main__':
run()
|
from . import activity, vo2
from measurement.measures import Distance, Mass, Speed
from datetime import timedelta
def calculate_calories_burned(
activity: activity.Activity,
distance: Distance,
bodyweight: Mass,
elevation_gain: Distance,
duration: timedelta
) -> int:
"""
Calculates the number of calories (rounded to the nearest integer) burned during an activity.
The number of calories burned is calculated by taking the VO2 in ml/min/kg, and converting that to
L/min (multiply by athlete bodyweight in kg and divide by 1000), then multiplying that number by 5
kCalories/min, and finally multiplying that by the number of minutes. The result is then rounded
to the nearest integer.
Args:
activity (Activity):
distance (Distance):
bodyweight (Mass):
elevation_gate (Distance):
duration (timedelta):
Returns:
int: The number of calories burned performing the activity
"""
if distance.ft == 0:
return 0
if duration.total_seconds() == 0:
return 0
speed = Speed(mph = distance.mi/(duration.total_seconds()/3600))
grade = elevation_gain.ft/distance.ft
vo2_est = vo2.get_VO2(activity, speed, grade)
# Calculate VO2 in Liters/minute
vo2_l_min = (vo2_est.get_value() * bodyweight.kg)/1000
return int(round(vo2_l_min * 5 * duration.total_seconds()/60, 0))
|
#!/usr/bin/python3
from pyrob.api import *
@task(delay=0.01)
def task_9_3():
x = 1
while not wall_is_on_the_right():
move_right()
x = x + 1
move_left(x - 1)
for i in range(x):
for j in range(x):
if not (i == j or i + j == x - 1):
if i:
move_down(i)
if j:
move_right(j)
fill_cell()
if i:
move_up(i)
if j:
move_left(j)
move_down(x - 1)
if __name__ == '__main__':
run_tasks()
|
s = "I am an NLPer"
def ngram(words, N=2):
d = {}
for idx in range(len(words)):
if idx + N - 1 >= len(words):
continue
key = tuple(words[idx:idx + N])
if key in d:
d[key] += 1
else:
d[key] = 1
return d
print(ngram(s))
print(ngram(s.split(" ")))
|
# python 2.7.3
import sys
import math
m_cnt = {}
m_price = {}
for i in range(6):
name = raw_input()
device = raw_input()
price = input()
if device in m_cnt:
m_cnt[device] += 1
if price < m_price[device]:
m_price[device] = price
else:
m_cnt[device] = 1
m_price[device] = price
opt_device, opt_cnt = m_cnt.popitem()
opt_price = m_price[opt_device]
while m_cnt:
temp_device, temp_cnt = m_cnt.popitem()
temp_price = m_price[temp_device]
if temp_cnt > opt_cnt or (temp_cnt == opt_cnt and temp_price < opt_price):
opt_device = temp_device
opt_price = temp_price
opt_cnt = temp_cnt
print opt_device
|
import email
from calendar import timegm
from email.message import EmailMessage
import imaplib
from pteromyini.lib.web.liteemail.liteemail import LiteEmail
from pteromyini.lib.web.liteemail.message import Message
from pteromyini.lib.web.liteemail.parser import EmailBodyParser
from pteromyini.lib.web.liteemail.validator import MessageValidator
import time
class IMAPEmail(LiteEmail):
def __init__(self, login, password, host, body_parser: EmailBodyParser = None):
super().__init__(login, password, host, body_parser)
self._server = imaplib.IMAP4(host)
def open(self):
self._server.login(self._login, self._password)
def get_messages(self, since, sender: str = None, body_parser: EmailBodyParser = None,
validator: MessageValidator = None, wait_time_sec: int = 120) -> list:
if body_parser is None:
body_parser = self._body_parser
#recheck msg
start_time = time.time()
while time.time() < start_time + wait_time_sec:
result = self._check_messages(since, sender, body_parser, validator)
if len(result) != 0:
return result
else:
time.sleep(10)
raise Exception('Timeout waiting email')
def _check_messages(self, since, sender: str, body_parser: EmailBodyParser, validator: MessageValidator) -> list:
email_uid = self._search_uids(since, sender)
text_messages = []
for uid in email_uid:
msg = self._parse_body(uid, body_parser)
if validator is None or validator.validate(msg):
text_messages.append(msg)
return text_messages
def _parse_body(self, uid, body_parser) -> Message:
result, data = self._server.uid('fetch', uid, '(RFC822)')
msg = email.message_from_bytes(data[0][1], _class=EmailMessage)
if body_parser is not None:
text = body_parser.text_from_body(msg)
else:
text = msg.get_payload()
date_tz = email.utils.parsedate_tz(msg['Date'])
timestamp = timegm(date_tz) - date_tz[9]
msg_result = Message(msg['Subject'], timestamp, text)
return msg_result
def _search_uids(self, since, sender: str = None):
server = self._server
server.select("inbox")
date = since.strftime("%d-%b-%Y") # (datetime.date.today() - datetime.timedelta(5)).strftime("%d-%b-%Y")
if sender is None:
result, data = server.uid('search', None, "(SENTSINCE {date})".format(date=date))
else:
result, data = server.uid('search', None,
'(SENTSINCE {date} FROM "{sender}")'.format(date=date, sender=sender))
return data[0].split()
def close(self):
self._server.logout()
|
#!/usr/bin/python
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("SOURCE", help="String to be decoded")
parser.add_argument("-v","--vigenere",metavar='KEY', help="Decode with vigenere cipher")
parser.add_argument("-b","--binary",metavar='BITS', type=int, help="Decode binary string")
parser.add_argument("-f",action="store_true" ,help="Source file location")
args = parser.parse_args()
def decode_ascii(binstr, bits):
modification = bits - len(binstr)%bits
index = 0
mod_list = []
while index < modification:
binstr = binstr + '0'
index = index + 1
chrarray = []
while len(binstr) > 0:
chrnum = int(binstr[:bits],2)
if chrnum != 8:
chrarray.append(chr(chrnum))
else:
if len(chrarray) > 0:
chrarray = chrarray[:-1]
binstr = binstr[bits:]
decoded = "".join(chrarray)
return decoded
def decode_vigenere(sometext, key):
key = "".join(key.lower().split())
keyarray = []
for letter in key:
keyarray.append(26 - (ord(letter) - 97))
key = keyarray
encoded = ""
index = 0
for character in sometext:
# characters are shifted differently based on whether they are
# upper or lower case
if character.isalpha():
if character.isupper():
base = 65
else:
base = 97
# calculate the int value of the new character
charnum = ((ord(character) - base + key[index%len(key)])%26) + base
newcharacter = chr(charnum)
index = index + 1
else:
# don't encode non alpha characters
newcharacter = character
encoded = encoded + newcharacter
return encoded
if args.binary:
bits = args.binary
if not args.f:
print decode_ascii(args.SOURCE, bits)
else:
fh = open(args.SOURCE, 'r')
for line in fh.readlines():
print decode_ascii(line, bits)
elif args.vigenere:
key = args.vigenere
if not args.f:
key = args.vigenere
print decode_vigenere(args.SOURCE, key)
else:
pass
|
from pyspark import SparkContext, SparkConf
def parse_line(line):
tokens = line.split(",")
age = tokens[2]
friends = int(tokens[3])
return (age, friends)
if __name__ == "__main__":
conf = SparkConf().setMaster("local").setAppName("key_value")
sc = SparkContext(conf=conf)
lines = sc.textFile("fakefriends.csv")
age_friends = lines.map(parse_line)
counts = age_friends.mapValues(lambda x: (x, 1))
totals = counts.reduceByKey(lambda x, y: (x[0] + y[0], x[1] + y[1]))
averages = totals.mapValues(lambda x: x[0] / x[1])
results = averages.collect()
[print(result) for result in results] |
"""
@File: operate_mongo.py
@CreateTime: 2019/12/10 上午10:09
@Desc: 使用MongoDB数据库
使用链接 https://www.cnblogs.com/aademeng/articles/9779271.html
"""
import pymongo
from bson.objectid import ObjectId
from operate_database.settings import URI, DB_NAME, DOC_NAME, USERNAMR, PASSWORD
class MongoAction(object):
"""
mongo api
uri mongodb地址 db_name 数据库 doc_name 指定集合
"""
def __init__(self):
self.client = pymongo.MongoClient(URI)
self.db = self.client[DB_NAME]
self.db.authenticate(USERNAMR, PASSWORD)
self.table = self.db[DOC_NAME]
def insert_one_data(self):
student = {
"id": "20170101",
"name": "jordan",
"age": 40,
"gender": "male"
}
result = self.table.insert_one(student)
print(result)
print(result.inserted_id)
def insert_many_data(self):
student_one = {
"id": "20171124",
"name": "john",
"age": 20,
"gender": "male"
}
student_two = {
"id": "20191121",
"name": "tom",
"age": 36,
"gender": "woman"
}
result = self.table.insert_many([student_one, student_two])
print(result)
print(result.inserted_ids)
def search_from_data(self):
result = self.table.find_one({"name": "john"})
print(result)
# 根据objectId进行查询
result = self.table.find_one({'_id': ObjectId('593278c115c2602667ec6bae')})
print(result)
results = self.table.find({"age": 50})
for result in results:
print(result)
# 根据范围内进行查询 年龄大于20,也可以使用正则查询
results = self.table.find({'age': {'$gt': 20}})
# 查询以M开头的
results = self.table.find({'name': {'$regex': '^M.*'}})
def update_data(self):
"""
update_one和update_many
:return:
"""
# 更新一条
condition = {"name": "john"}
student = self.table.find_one(condition)
student['age'] = 26
result = self.table.update_one(condition, {'$set': student})
# 匹配的数据条数 和 影响的数据条数
print(result.matched_count, result.modified_count)
# 更新多条数据 年龄大于15的,更新条件为年龄加1
condition = {"age": {"$gt": 15}}
result = self.table.update_many(condition, {'$inc': {'age': 1}})
print(result.matched_count, result.modified_count)
def delete_data(self):
result = self.table.delete_one({"name": "john"})
print(result.deleted_count)
result = self.table.delete_many({"age": {"$lt": 25}})
print(result.deleted_count)
if __name__ == '__main__':
ma = MongoAction()
ma.update_data()
|
# -*- coding: utf-8 -*-
from chatterbot import ChatBot
# Create a new chat bot named Charlie
chatbot = ChatBot(
'Charlie',
trainer='chatterbot.trainers.ListTrainer'
)
chatbot.train(
[
"Hello. How are you?",
"I really like the new album of Shinedown",
"I have already booked a ticket to Ireland",
"Yep, I have visited MWC this year"
]
)
while True:
print(
chatbot.get_response(
input("What do you want to ask: ")
)
) |
from util.db import DatabaseConnection
import ibm_db
class ItemModel():
def __init__(self, name=None, price=None):
self.name = name
self.price = price
def get_all_item(self):
dbconn=DatabaseConnection()
sql = "SELECT * from ITEM"
lst=[]
try:
conn = dbconn.getDbCon()
stmt = ibm_db.exec_immediate(conn, sql)
dict = ibm_db.fetch_assoc(stmt)
while dict != False:
print(dict)
lst.append(dict)
dict = ibm_db.fetch_assoc(stmt)
ibm_db.close(conn)
except Exception as ex:
ibm_db.close(conn)
print(ex)
return lst
def get_item(self, name):
dbconn=DatabaseConnection()
sql = "SELECT * from ITEM where name='"+name+"'"
lst=[]
try:
conn = dbconn.getDbCon()
stmt = ibm_db.exec_immediate(conn, sql)
dict = ibm_db.fetch_assoc(stmt)
while dict != False:
print(dict)
lst.append(dict)
dict = ibm_db.fetch_assoc(stmt)
ibm_db.close(conn)
except Exception as ex:
ibm_db.close(conn)
print(ex)
return lst
def save_item(self, dict):
dbconn=DatabaseConnection()
sql = "INSERT INTO ITEM values (?,?)"
lst=[]
try:
conn = dbconn.getDbCon()
stmt = ibm_db.prepare(conn, sql)
ibm_db.bind_param(stmt, 1, dict['name'])
ibm_db.bind_param(stmt, 2, dict['price'])
ibm_db.execute(stmt)
ibm_db.close(conn)
except Exception as ex:
ibm_db.close(conn)
print(ex)
return "" |
from logger.models import Log
def log_cron(cron, action, data=''):
log = Log(cron=cron, action=action, data=data)
log.save()
def log_mop(mop, action, data=''):
cron = mop.player.cron
log = Log(cron=cron, mop=mop, action=action, data=data)
log.save() |
"""
Analysis dashboards module.
"""
try:
from collections.abc import Iterable
except ImportError:
from collections import Iterable
import copy
from datetime import datetime, timedelta
import json
import logging
import re
import numpy as np
import pandas as pd
from flask_login import login_required
from flask import render_template, request
from sqlalchemy import and_
from app.dashboards import blueprint
from cropcore import queries
from cropcore.utils import (
download_csv,
parse_date_range_argument,
query_result_to_array,
)
from cropcore.structure import SQLA as db
from cropcore.structure import (
SensorClass,
TypeClass,
ReadingsAegisIrrigationClass,
ReadingsEnergyClass,
ReadingsAranetCO2Class,
ReadingsAranetTRHClass,
ReadingsAranetAirVelocityClass,
)
from cropcore.constants import CONST_MAX_RECORDS, CONST_TIMESTAMP_FORMAT
# Temperature constants
TEMP_BINS = {
"Propagation": [0.0, 20.0, 23.0, 25.0, 144.0],
"FrontFarm": [0.0, 18.0, 21.0, 25.0, 144.0],
"Fridge": [0.0, 20.0, 23.0, 25.0, 144.0],
"MidFarm": [0.0, 20.0, 23.0, 25.0, 144.0],
"BackFarm": [0.0, 20.0, 25.0, 28.0, 144.0],
"Tunnel": [0.0, 20.0, 25.0, 28.0, 144.0],
"R&D": [0.0, 20.0, 23.0, 25.0, 144.0],
}
# TODO Read these from the database.
SENSOR_CATEORIES = {
18: "MidFarm",
19: "Tunnel",
20: "Propagation",
21: "FrontFarm",
22: "BackFarm",
23: "MidFarm",
24: "R&D",
25: "R&D",
26: "Fridge",
27: "MidFarm",
48: "Propagation",
49: "R&D",
}
# Ventilation constants
CONST_SFP = 2.39 # specific fan power
CONST_VTOT = 20337.0 # total volume – m3
DEFAULT_SENSOR_TYPE = "Aranet T&RH"
# Some data that varies based on sensor type.
# DATA_COLUMNS_BY_SENSOR_TYPE names the class for the readings table.
DATA_TABLES_BY_SENSOR_TYPE = {
"Aranet T&RH": lambda: queries.trh_with_vpd(db.session).subquery().c,
"Aranet CO2": lambda: ReadingsAranetCO2Class,
"Aranet Air Velocity": lambda: ReadingsAranetAirVelocityClass,
"Aegis II": lambda: ReadingsAegisIrrigationClass,
}
# DATA_COLUMNS_BY_SENSOR_TYPE names the columns of that table that we want to plot as
# data, and gives them human friendly names to display on the UI.
# TODO Could the below data be read from the database?
DATA_COLUMNS_BY_SENSOR_TYPE = {
"Aranet T&RH": [
{"column_name": "temperature", "ui_name": "Temperature (°C)"},
{"column_name": "humidity", "ui_name": "Humidity (%)"},
{"column_name": "vpd", "ui_name": "VPD (Pa)"},
],
"Aranet CO2": [
{"column_name": "co2", "ui_name": "CO2 (ppm)"},
],
"Aranet Air Velocity": [
{"column_name": "air_velocity", "ui_name": "Air velocity (m/s)"},
],
"Aegis II": [
{"column_name": "temperature", "ui_name": "Temperature (°C)"},
{"column_name": "pH", "ui_name": "pH"},
{"column_name": "dissolved_oxygen", "ui_name": "Dissolved oxygen (%)"},
{"column_name": "conductivity", "ui_name": "Conductivity (μS)"},
{"column_name": "turbidity", "ui_name": "Turbidity"},
{"column_name": "peroxide", "ui_name": "Peroxide (ppm)"},
],
}
# The above constants are defined in terms of names of the sensor_types. The code
# operates in terms of ids rather than names, so we wrap the above dictionaries into
# functions.
def get_sensor_type_name(sensor_type_id):
"""Given a sensor type ID, get the name of the sensor type from the database."""
query = db.session.query(
TypeClass.sensor_type,
).filter(TypeClass.id == sensor_type_id)
sensor_name = db.session.execute(query).fetchone()
if isinstance(sensor_name, Iterable):
sensor_name = sensor_name[0]
return sensor_name
def get_sensor_type_id(sensor_type_name):
"""Given a sensor type name, get the ID of the sensor type from the database."""
query = db.session.query(
TypeClass.id,
).filter(TypeClass.sensor_type == sensor_type_name)
sensor_id = db.session.execute(query).fetchone()
if isinstance(sensor_id, Iterable):
sensor_id = sensor_id[0]
return sensor_id
def get_table_by_sensor_type(sensor_type_id):
"""Return the SQLAlchemy table/subquery corresponding to a given sensor type ID."""
# Because of how global constants work in Flask, DATA_COLUMNS_BY_SENSOR_TYPE has
# functions that return the relevant table/subquery, rather than the
# tables/subqueries themselves. Hence the calls like `value()` and setting
# `value = lambda: None`
global DATA_TABLES_BY_SENSOR_TYPE
if sensor_type_id in DATA_TABLES_BY_SENSOR_TYPE:
return DATA_TABLES_BY_SENSOR_TYPE[sensor_type_id]()
else:
sensor_type_name = get_sensor_type_name(sensor_type_id)
if sensor_type_name in DATA_TABLES_BY_SENSOR_TYPE:
value = DATA_TABLES_BY_SENSOR_TYPE[sensor_type_name]
else:
value = lambda: None
DATA_TABLES_BY_SENSOR_TYPE[sensor_type_id] = value
return value()
def get_columns_by_sensor_type(sensor_type_id):
"""Return the names of the data columns in the table corresponding to a given sensor
type ID.
By "data columns" we mean the ones that depend on the sensor type and hold the
actual data, e.g. temperature and humidity, but not timestamp. The return values are
dictionaries with two keys, "column_name" for the name by which the database knows
this column, and "ui_name" for nice human-readable name fit for a UI.
"""
global DATA_COLUMNS_BY_SENSOR_TYPE
if sensor_type_id in DATA_COLUMNS_BY_SENSOR_TYPE:
return DATA_COLUMNS_BY_SENSOR_TYPE[sensor_type_id]
else:
sensor_type_name = get_sensor_type_name(sensor_type_id)
if sensor_type_name in DATA_COLUMNS_BY_SENSOR_TYPE:
value = DATA_COLUMNS_BY_SENSOR_TYPE[sensor_type_name]
else:
value = None
DATA_COLUMNS_BY_SENSOR_TYPE[sensor_type_id] = value
return value
def get_default_sensor_type():
"""Get the ID of the default sensor type."""
return get_sensor_type_id(DEFAULT_SENSOR_TYPE)
def is_valid_sensor_type(sensor_type_id):
"""Return True if we have the necessary metadata about the table and its columns
needed for fetching and plotting data for the given sensor type, otherwise False.
"""
return (
get_table_by_sensor_type(sensor_type_id) is not None
and get_columns_by_sensor_type(sensor_type_id) is not None
)
# # # DONE WITH GLOBAL CONSTANTS AND SENSOR TYPE METADATA, BEGIN MAIN CONTENT # # #
def resample(df, bins, dt_from, dt_to):
"""
Resamples (adds missing date/temperature bin combinations) to a dataframe.
Arguments:
df: dataframe with temperature assign to bins
bins: temperature bins as a list
dt_from: date range from
dt_to: date range to
Returns:
bins_list: a list of temperature bins
df_list: a list of df corresponding to temperature bins
"""
bins_list = []
for i in range(len(bins) - 1):
bins_list.append("(%.1f, %.1f]" % (bins[i], bins[i + 1]))
date_min = min(df["date"].min(), dt_from)
date_max = max(df["date"].max(), dt_to)
for n in range(int((date_max - date_min).days) + 1):
day = date_min + timedelta(n)
for temp_range in bins_list:
if len(df[(df["date"] == day) & (df["temp_bin"] == temp_range)].index) == 0:
df2 = pd.DataFrame(
{"date": [day], "temp_bin": [temp_range], "temp_cnt": [0]}
)
df = df.append(df2)
df = df.sort_values(by=["date", "temp_bin"], ascending=True)
df.reset_index(inplace=True, drop=True)
df_list = []
for bin_range in bins_list:
df_bin = df[df["temp_bin"] == bin_range]
del df_bin["temp_bin"]
df_bin.reset_index(inplace=True, drop=True)
df_list.append(df_bin)
return bins_list, df_list
def lights_energy_use(dt_from_, dt_to_):
"""
Energy use from Carpenter's place (with lights - called Clapham in the database)
Arguments:
dt_from_: date range from
dt_to_: date range to
Returns:
lights_results_df - a pandas dataframe with mean lights on values
"""
dt_from = pd.to_datetime(dt_from_.date()) + timedelta(hours=14)
dt_to = pd.to_datetime(dt_to_.date()) + timedelta(days=1, hours=15)
d_from = pd.to_datetime(dt_from_.date())
d_to = pd.to_datetime(dt_to_.date())
col_ec = "electricity_consumption"
sensor_device_id = "Clapham"
lights_on_cols = []
# getting eneregy data for the analysis
query = db.session.query(
ReadingsEnergyClass.timestamp,
ReadingsEnergyClass.electricity_consumption,
).filter(
and_(
SensorClass.device_id == sensor_device_id,
ReadingsEnergyClass.sensor_id == SensorClass.id,
ReadingsEnergyClass.timestamp >= dt_from,
ReadingsEnergyClass.timestamp <= dt_to,
)
)
df = pd.read_sql(query.statement, query.session.bind)
if df.empty:
return pd.DataFrame({"date": [], "mean_lights_on": []})
# Reseting index
df.sort_values(by=["timestamp"], ascending=True).reset_index(inplace=True)
# grouping data by date-hour
energy_hour = (
df.groupby(
by=[
df["timestamp"].map(
lambda x: pd.to_datetime(
"%04d-%02d-%02d-%02d" % (x.year, x.month, x.day, x.hour),
format="%Y-%m-%d-%H",
)
),
]
)["electricity_consumption"]
.sum()
.reset_index()
)
# Sorting and reseting index
energy_hour.sort_values(by=["timestamp"], ascending=True).reset_index(inplace=True)
# energy dates. Energy date starts from 4pm each day and lasts for 24 hours
energy_hour.loc[
energy_hour["timestamp"].dt.hour < 15, "energy_date"
] = pd.to_datetime((energy_hour["timestamp"] + timedelta(days=-1)).dt.date)
energy_hour.loc[
energy_hour["timestamp"].dt.hour >= 15, "energy_date"
] = pd.to_datetime(energy_hour["timestamp"].dt.date)
# Clasification of lights being on
# Lights ON 1: Lights turn on at 4pm and turn off at 9am, as scheduled.
energy_hour["lights_on_1"] = energy_hour["timestamp"].apply(
lambda x: 1 if (x.hour >= 17 or x.hour < 10) else 0
)
lights_on_cols.append("lights_on_1")
# Lights ON 2: Lights are calculated by estimating the lighting use as between
# the minima of two consecutive days. The lights are considered on when the
# energy use is above the day's first quartile of lighting of this difference.
# energy_hour['lights_on_2'] = 0
# lights_on_cols.append('lights_on_2')
# Lights ON 3: Lights are assumed to be on if the energy demand is over 30 kW
# (max load of the extraction fan)
energy_hour["lights_on_3"] = energy_hour[col_ec].apply(
lambda x: 1 if (x > 30.0) else 0
)
lights_on_cols.append("lights_on_3")
# Lights ON 4: Lights are assumed to turn on at the time of largest energy use
# increase in the day, and turn off at the time of largest energy decrease of
# the day.
# estimating energy difference
energy_hour["dE"] = energy_hour[col_ec] - energy_hour[col_ec].shift(1)
energy_hour["dE"] = energy_hour["dE"].fillna(0.0)
# finding max increase and min decrease
energy_hour["dE_min"] = energy_hour.groupby("energy_date")["dE"].transform("min")
energy_hour["dE_max"] = energy_hour.groupby("energy_date")["dE"].transform("max")
energy_hour.loc[
np.isclose(energy_hour["dE_max"], energy_hour["dE"]), "lights_on_4"
] = 1
energy_hour.loc[
np.isclose(energy_hour["dE_min"], energy_hour["dE"]), "lights_on_4"
] = 0
# repeat last?
prev_row_value = None
for df_index in energy_hour.index:
if df_index > 0:
if np.isnan(energy_hour.loc[df_index, "lights_on_4"]) and not np.isnan(
prev_row_value
):
energy_hour.loc[df_index, "lights_on_4"] = prev_row_value
prev_row_value = energy_hour.loc[df_index, "lights_on_4"]
lights_on_cols.append("lights_on_4")
# Lights ON 5: Lights are assumed on if the energy use is over 0.9
# times the days' energy use mean, and the energy demand is over 30 kW.
energy_hour["energy_date_mean"] = energy_hour.groupby("energy_date")[
col_ec
].transform("mean")
energy_hour["lights_on_5"] = np.where(
(energy_hour[col_ec] > 30.0)
& (energy_hour[col_ec] > 0.9 * energy_hour["energy_date_mean"]),
1,
0,
)
lights_on_cols.append("lights_on_5")
# getting the mean value of lights on per day
energy_date_df = energy_hour.loc[
(energy_hour["energy_date"] >= d_from) & (energy_hour["energy_date"] <= d_to)
]
energy_date_df = (
energy_date_df.groupby(by=["energy_date"])[lights_on_cols].sum().reset_index()
)
energy_date_df["mean_lights_on"] = energy_date_df[lights_on_cols].sum(axis=1) / len(
lights_on_cols
)
energy_date_df["date"] = energy_date_df["energy_date"].dt.strftime("%Y-%m-%d")
lights_results_df = energy_date_df[["date", "mean_lights_on"]]
return lights_results_df
def ventilation_energy_use(dt_from, dt_to):
"""
In our data this is called Carpenter’s Place. This reading only counts energy use for
the second extraction fan.
Arguments:
dt_from: date range from
dt_to: date range to
Returns:
ventilation_results_df - a pandas dataframe with ventilation analysis results
"""
sensor_device_id = "1a Carpenters Place"
# getting eneregy data for the analysis
query = db.session.query(
ReadingsEnergyClass.timestamp,
ReadingsEnergyClass.electricity_consumption,
).filter(
and_(
SensorClass.device_id == sensor_device_id,
ReadingsEnergyClass.sensor_id == SensorClass.id,
ReadingsEnergyClass.timestamp >= dt_from,
ReadingsEnergyClass.timestamp <= dt_to,
)
)
df = pd.read_sql(query.statement, query.session.bind)
if df.empty:
return pd.DataFrame({"timestamp": [], "ach": []})
# Reseting index
df.sort_values(by=["timestamp"], ascending=True).reset_index(inplace=True)
# grouping data by date-hour
energy_hour = (
df.groupby(
by=[
df["timestamp"].map(
lambda x: "%04d-%02d-%02d %02d:00"
% (x.year, x.month, x.day, x.hour)
),
]
)["electricity_consumption"]
.sum()
.reset_index()
)
# Sorting and reseting index
energy_hour.sort_values(by=["timestamp"], ascending=True).reset_index(inplace=True)
# Calculating air exchange per hour
energy_hour["ach"] = (
energy_hour["electricity_consumption"] / CONST_SFP * 3600.0 / (CONST_VTOT / 2.0)
)
ventilation_results_df = energy_hour[["timestamp", "ach"]]
return ventilation_results_df
def aranet_trh_analysis(dt_from, dt_to):
"""
Performs data analysis for Aranet Temperature+Relative Humidity sensors.
Arguments:
dt_from_: date range from
dt_to_: date range to
Returns:
sensor_names: a list of sensor names
sensor_temp_ranges: json data with temperate ranges
"""
logging.info(
"Calling aranet_trh_analysis with parameters %s %s"
% (
dt_from.strftime(CONST_TIMESTAMP_FORMAT),
dt_to.strftime(CONST_TIMESTAMP_FORMAT),
)
)
query = db.session.query(
ReadingsAranetTRHClass.timestamp,
ReadingsAranetTRHClass.sensor_id,
SensorClass.name,
ReadingsAranetTRHClass.temperature,
ReadingsAranetTRHClass.humidity,
).filter(
and_(
ReadingsAranetTRHClass.sensor_id == SensorClass.id,
ReadingsAranetTRHClass.timestamp >= dt_from,
ReadingsAranetTRHClass.timestamp <= dt_to,
)
)
df = pd.read_sql(query.statement, query.session.bind)
logging.info("Total number of records found: %d" % (len(df.index)))
return temperature_range_analysis(df, dt_from, dt_to)
def temperature_range_analysis(temp_df, dt_from, dt_to):
"""
Performs temperature range analysis on a given pandas dataframe.
Arguments:
temp_df:
dt_from: date range from
dt_to: date range to
Returns:
sensor_names: a list of sensor names
sensor_temp_ranges: json data with temperate ranges
"""
df = copy.deepcopy(temp_df)
df_unique_sensors = df[["sensor_id", "name"]].drop_duplicates(["sensor_id", "name"])
sensor_ids = df_unique_sensors["sensor_id"].tolist()
sensor_names = df_unique_sensors["name"].tolist()
# extracting date from datetime
df["date"] = pd.to_datetime(df["timestamp"].dt.date)
# Reseting index
df.sort_values(by=["timestamp"], ascending=True).reset_index(inplace=True)
data_by_sensor_id = {}
for sensor_name, sensor_id in zip(sensor_names, sensor_ids):
df_sensor = df[df["sensor_id"] == sensor_id]
# grouping data by date-hour and sensor id
sensor_grp = df_sensor.groupby(
by=[
df_sensor.timestamp.map(
lambda x: "%04d-%02d-%02d-%02d" % (x.year, x.month, x.day, x.hour)
),
"date",
]
)
# estimating hourly temperature mean values
sensor_grp_temp = sensor_grp["temperature"].mean().reset_index()
try:
bins = TEMP_BINS[SENSOR_CATEORIES[sensor_id]]
except KeyError:
logging.error(
f"Don't know how to categorise or bin sensor {sensor_id} "
"in the dashboard."
)
continue
# binning temperature values
sensor_grp_temp["temp_bin"] = pd.cut(sensor_grp_temp["temperature"], bins)
# converting bins to str
sensor_grp_temp["temp_bin"] = sensor_grp_temp["temp_bin"].astype(str)
# get bin counts for each sensor-day combination
sensor_grp_date = sensor_grp_temp.groupby(by=["date", "temp_bin"])
sensor_cnt = sensor_grp_date["temperature"].count().reset_index()
sensor_cnt.rename(columns={"temperature": "temp_cnt"}, inplace=True)
# Adding missing date/temp_bin combos
bins_list, df_list = resample(sensor_cnt, bins, dt_from, dt_to)
data_by_sensor_id[sensor_id] = {
"name": sensor_name,
"bins": bins_list,
"data": [
{
"date": df["date"].dt.strftime("%Y-%m-%d").to_list(),
"count": df["temp_cnt"].to_list(),
}
for df in df_list
],
}
return len(data_by_sensor_id.keys()), json.dumps(data_by_sensor_id)
def fetch_sensor_data(dt_from, dt_to, sensor_type, sensor_ids):
sensor_type_name = get_sensor_type_name(sensor_type)
if not is_valid_sensor_type(sensor_type):
raise ValueError(f"Don't know how to fetch data for sensor type {sensor_type}")
data_table = get_table_by_sensor_type(sensor_type)
data_table_columns = [
getattr(data_table, column["column_name"])
for column in get_columns_by_sensor_type(sensor_type)
]
query = db.session.query(
data_table.timestamp,
data_table.sensor_id,
SensorClass.name,
*data_table_columns,
).filter(
and_(
data_table.sensor_id == SensorClass.id,
data_table.timestamp >= dt_from,
data_table.timestamp <= dt_to,
data_table.sensor_id.in_(sensor_ids),
)
)
df = pd.read_sql(query.statement, query.session.bind)
if sensor_type_name == "Aranet T&RH":
# Rounding to two decimal places, because our precision isn't infinite, and
# long floats look really ugly on the front end.
df.loc[:, "vpd"] = df.loc[:, "vpd"].round(2)
return df
@blueprint.route("/aranet_trh_dashboard")
@login_required
def aranet_trh_dashboard():
dt_from, dt_to = parse_date_range_argument(request.args.get("range"))
num_sensors, temperature_bins_json = aranet_trh_analysis(dt_from, dt_to)
return render_template(
"aranet_trh_dashboard.html",
num_sensors=num_sensors,
temperature_bins_json=temperature_bins_json,
dt_from=dt_from.strftime("%B %d, %Y"),
dt_to=dt_to.strftime("%B %d, %Y"),
)
@blueprint.route("/energy_dashboard")
@login_required
def energy_dashboard():
dt_from, dt_to = parse_date_range_argument(request.args.get("range"))
energy_data = {}
# lights-on analysis
lights_results_df = lights_energy_use(dt_from, dt_to)
# ventilation analysis
ventilation_results_df = ventilation_energy_use(dt_from, dt_to)
# jsonify
energy_data["data"] = (
"["
+ lights_results_df.to_json(orient="records")
+ ","
+ ventilation_results_df.to_json(orient="records")
+ "]"
)
return render_template(
"energy_dashboard.html",
energy_data=energy_data,
dt_from=dt_from.strftime("%B %d, %Y"),
dt_to=dt_to.strftime("%B %d, %Y"),
)
# # # TIMESERIES DASHBOARD # # #
def add_mean_over_sensors(sensor_type, sensor_ids, df, roll_window_minutes=10):
"""Take the dataframe for timeseries, and add data for a new "sensor" that's the
mean of all the ones in the data
"""
if len(df) == 0:
return df
df_mean = df.groupby("timestamp").mean()
df_mean.loc[:, "sensor_id"] = "mean"
df_mean.loc[:, "name"] = "mean"
# The sensor data comes with a 10 minute frequency. However, the sensors may be
# "phase shifted" with respect to each other, e.g. one may have data for 00 and 10,
# while another may have 05 and 15. A 10 minute rolling mean smooths out these
# differences.
roll_window = timedelta(minutes=roll_window_minutes)
for column in get_columns_by_sensor_type(sensor_type):
column_name = column["column_name"]
df_mean[column_name] = df_mean[column_name].rolling(roll_window).mean()
df_mean = df_mean.reset_index()
df = pd.concat((df_mean, df), axis=0)
return df
def fetch_all_sensor_types():
"""Get all sensor types from the CROP database, for which we know how to render the
timeseries dashboard.
Arguments:
None
Returns:
List of dictionaries with keys "id" (int) and "sensor_type" (str).
"""
query = db.session.query(
TypeClass.id,
TypeClass.sensor_type,
)
sensor_types = db.session.execute(query).fetchall()
sensor_types = query_result_to_array(sensor_types)
sensor_types = [st for st in sensor_types if is_valid_sensor_type(st["id"])]
return sensor_types
def fetch_all_sensors(sensor_type):
"""Get all sensors of a given sensor type from the CROP database.
Arguments:
sensor_type: The database ID (primary key) of the sensor type.
Returns:
List of dictionaries with keys "id" (int) and "name" (str), sorted by "id".
"""
query = db.session.query(
SensorClass.id,
SensorClass.aranet_code,
SensorClass.name,
).filter(SensorClass.type_id == sensor_type)
sensors = db.session.execute(query).fetchall()
sensors = query_result_to_array(sensors)
sensors = {s["id"]: s for s in sorted(sensors, key=lambda x: x["id"])}
return sensors
@blueprint.route("/timeseries_dashboard", methods=["GET", "POST"])
@login_required
def timeseries_dashboard():
# Read query string
dt_from = request.args.get("startDate")
dt_to = request.args.get("endDate")
sensor_ids = request.args.get("sensorIds")
if sensor_ids is not None:
# sensor_ids is passed as a comma-separated (or space or semicolon, although
# those aren't currently used) string of ints, split it into a list of ints.
sensor_ids = tuple(map(int, re.split(r"[ ;,]+", sensor_ids.rstrip(" ,;"))))
sensor_type = request.args.get("sensorType")
if sensor_type is None:
sensor_type = get_default_sensor_type()
else:
sensor_type = int(sensor_type)
# Get the data from the database that will be required in all scenarios for how the
# page might be rendered.
sensor_types = fetch_all_sensor_types()
all_sensors = fetch_all_sensors(sensor_type)
# If we don't have the information necessary to plot data for sensors, just render
# the selector version of the page.
if (
dt_from is None
or dt_to is None
or sensor_ids is None
or not is_valid_sensor_type(sensor_type)
):
today = datetime.today()
dt_from = today - timedelta(days=7)
dt_to = today
return render_template(
"timeseries_dashboard.html",
sensor_type=sensor_type,
sensor_types=sensor_types,
all_sensors=all_sensors,
sensor_ids=sensor_ids,
dt_from=dt_from,
dt_to=dt_to,
data=dict(),
summaries=dict(),
data_columns=[],
)
# Convert datetime strings to objects and make dt_to run to the end of the day in
# question.
dt_from = datetime.strptime(dt_from, "%Y%m%d")
dt_to = (
datetime.strptime(dt_to, "%Y%m%d")
+ timedelta(days=1)
+ timedelta(milliseconds=-1)
)
df = fetch_sensor_data(dt_from, dt_to, sensor_type, sensor_ids)
if request.method == "POST":
df = df.sort_values("timestamp")
return download_csv(df, "timeseries")
data_keys = list(sensor_ids)
if len(sensor_ids) > 1:
df = add_mean_over_sensors(sensor_type, sensor_ids, df)
# Insert at start, to make "mean" be the first one displayed on the page.
data_keys.insert(0, "mean")
data_columns = get_columns_by_sensor_type(sensor_type)
data_dict = dict()
summary_dict = dict()
for key in data_keys:
df_key = (
df[df["sensor_id"] == key]
.drop(columns=["sensor_id", "name"])
.sort_values("timestamp")
)
# You may wonder, why we first to_json, and then json.loads. That's just to have
# the data in a nice nested dictionary that a final json.dumps can deal with.
data_dict[key] = json.loads(df_key.to_json(orient="records", date_format="iso"))
# Round the summary stats to two decimals, for nice front end presentation.
summary_dict[key] = json.loads(df_key.describe().round(2).to_json())
return render_template(
"timeseries_dashboard.html",
sensor_type=sensor_type,
sensor_types=sensor_types,
all_sensors=all_sensors,
sensor_ids=sensor_ids,
dt_from=dt_from,
dt_to=dt_to,
data=data_dict,
summaries=summary_dict,
data_columns=data_columns,
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.