text stringlengths 8 6.05M |
|---|
import random
from Card import *
# criando baralho
BARALHO = []
for cor in range(4):
for content in range(13):
BARALHO.append(Carta(CardColor(cor), CardContent(content)).__str__())
for black_content in range(13, 15):
BARALHO.append(Carta(CardColor(4), CardContent(black_content)).__str__())
BARALHO.append(Carta(CardColor(4), CardContent(black_content)).__str__())
class Baralho:
def __init__(self):
self.baralho = self._embaralhar()
@staticmethod
def _embaralhar():
embaralhado = 2 * BARALHO.copy()
random.shuffle(embaralhado)
# embaralhado.reverse()
return embaralhado
def dividir(self, qtd_players):
cartas_por_jogador = []
inicio = 0
qtd = 7
for _ in range(qtd_players):
cartas_por_jogador.append(self.baralho[inicio:inicio+qtd])
inicio += qtd
resto = self.baralho[inicio:-1]
return cartas_por_jogador, resto
|
from activity.models import TodoList
from activity.serializers import TodoListSerializer
from rest_framework.generics import ListCreateAPIView
from rest_framework.generics import RetrieveUpdateDestroyAPIView
class TodoListAPIListCreateView(ListCreateAPIView):
queryset = TodoList.objects.all()
serializer_class = TodoListSerializer
class TodoListAPIDetailView(RetrieveUpdateDestroyAPIView):
model = TodoList
serializer_class = TodoListSerializer
todolistapilistcreateview = TodoListAPIListCreateView.as_view()
todolistapidetailview = TodoListAPIDetailView.as_view()
|
# 转成一句话脚本
# echo aW1wb3J0IHJlcXVlc3RzCmhvc3QgPSAnIGh0dHA6Ly8xMjcuMC4wLjEnCmZvciBpIGluIHJhbmdlKDIwMDAsMjUwMCk6CiAgICBhZGQgPSBob3N0Kyc6JytzdHIoaSkKICAgIHRyeToKICAgICAgICBzID0gcmVxdWVzdHMuZ2V0KGFkZCkKICAgICAgICBwcmludChpKQogICAgICAgIHByaW50KHMudGV4dCkKICAgICAgICBleGl0KDEpCiAgICBleGNlcHQ6CiAgICAgICAgcHJpbnQoaSkKICAgICAgICBwYXNzCg== | base64 -d | python3
import requests
host = ' http://127.0.0.1'
for i in range(2000,2500):
add = host+':'+str(i)
try:
s = requests.get(add)
print(i)
print(s.text)
exit(1)
except:
print(i)
pass
|
# -*- coding: utf-8 -*-
import string
import collections
import porter
import pickle
replace_dictionary = str.maketrans(string.punctuation+'\n', ' '*len(string.punctuation+'\n'))
def replace_punctuation(text):
"""replace_punctuation(text, replace_dictionary):
"""
global replace_dictionary
return text.translate(replace_dictionary)
def replace_chars(text, charstoreplace, replacer = ' '):
"""replace_chars(text, charstoreplace, replacer = ' '):
"""
rp = string.maketrans(charstoreplace, replacer*len(charstoreplace))
return text.translate(rp)
def remove_multiple_space(text):
'''Description
'''
return " "+" ".join(text.split())+" "
def get_field(data, linestart):
#Read first line
nline = linestart
line = data[nline]
try:
field, content = line.split(' ', 1)
except ValueError:
raise ValueError('Not possible to extract the field from string: ', line, ' in line: ', nline+1, sep = '')
buff = content
nline+=1
line = data[nline]
stripedline = line.rstrip()
while(stripedline != '' and stripedline[0] == ' '):
buff += stripedline
nline+=1
line = data[nline]
stripedline = line.rstrip()
return buff, nline
def search(fields, data, linestart):
nline = linestart
while (nline < len(data)):
line = data[nline]
stripedline = line.rstrip()
if stripedline != '' and stripedline.split(' ', 1)[0] in fields:
break
nline += 1
return nline
def read_docfile(fname):
with open(fname, 'r', encoding = 'ISO-8859-1') as ftoindex:
data = ftoindex.readlines()
doc_dict = dict()
nline = 0
while(nline < len(data)):
line = data[nline]
if line.strip():
try:
field, content = line.split(' ', 1)
except Exception:
print('File:', fname)
print('Content:', line)
raise
if field == 'RN':
try:
#Read Record Number 'RN'
rnstring, nline = get_field(data, nline)
#print('RN', rnstring)
#Read Author(s)
nline = search(['AU'], data, nline)
austring, nline = get_field(data, nline)
#print('AU', austring)
#Read Title
nline = search(['TI'], data, nline)
tistring, nline = get_field(data, nline)
#print('TI', tistring)
#Read Major Subject
nline = search('MJ', data, nline)
mjstring, nline = get_field(data, nline)
#print('MJ', mjstring)
#Read Minor Subject
nline = search('MN', data, nline)
mnstring, nline = get_field(data, nline)
#print('MN', mnstring)
#Abstract Minor Subject
nline = search(['AB', 'EX'], data, nline)
abstring, nline = get_field(data, nline)
#print('AB:', abstring)
nline = search('PN', data, nline)
#nline =- 1
rn = int(rnstring.strip())
au = remove_multiple_space(replace_punctuation(austring)).strip()
ti = remove_multiple_space(replace_punctuation(tistring)).strip()
mn = remove_multiple_space(replace_punctuation(mnstring)).strip()
mj = remove_multiple_space(replace_punctuation(mjstring)).strip()
ab = remove_multiple_space(replace_punctuation(abstring)).strip()
doc_string = " ".join([au, ti, mn, mj, ab]).lower()
termslist = doc_string.split()
apply_stem(termslist)
doc_dict[rn] = (rn, len(doc_string.split()), calc_doctf(termslist))
except Exception:
print('fname', fname,'linha:(', nline,')', line)
raise
nline += 1
return doc_dict
def read_queryfile(fname):
"""
"""
with open(fname, 'r') as ftoindex:
data = ftoindex.readlines()
queries = list()
current_field = ''
nline = 0
while(nline < len(data)):
line = data[nline]
if line.strip():
field, content = line.split(' ', 1)
#Read Record Number
if(field == 'QN'):
qn = int(content.strip())
#Read Query String [QU]
nline+=1
buff = ''
line = data[nline]
field, content = line.split(' ', 1)
while(field != 'NR'):
buff += content
nline+=1
line = data[nline]
field, content = line.split(' ', 1)
qu = remove_multiple_space(replace_punctuation(buff))
#Read Number of relevant documents [NR]
line = data[nline]
field, content = line.split(' ', 1)
nr = int(content.strip())
#Read Relevant docs
nline+=1
buff = ''
line = data[nline]
field, content = line.split(' ', 1)
while(field != 'QN'):
buff += content
nline+=1
line = data[nline]
if line == '\n' or nline + 1>= len(data): #Fix EOF AND EMPTY LINE ERROR
nline+=1
break
field, content = line.split(' ', 1)
rd_docstring = remove_multiple_space(replace_punctuation(buff))
rd = docstring_split(rd_docstring, 2, returned = lambda docS: int(docS[0]))
termslist = qu.split()
apply_stem(termslist)
qu = ' '.join(termslist)
queries.append((qn, qu.lower(), nr, rd,))
#nline+=1
return queries
docstring_split = lambda docstring, n, returned = lambda x: x: [returned(docstring.split()[i:i+n]) for i in range(0, len(docstring.split()), n)]
def print_idf(index):
for term in index:
print(term+':', index[term].idf)
print(index[term].doclist)
def apply_stem(termslist):
'''
'''
for i, term in enumerate(termslist):
termslist[i] = porter.stem(term)
#vocabulary = dict()
def print_doc_dict(doc_dict):
'''
'''
for rn in doc_dict:
print(doc_dict[rn])
def calc_doctf(doc_stringlist):
'''
'''
return dict(collections.Counter(doc_stringlist))
def save_index(cfcindex, findexname):
'''
'''
with open(findexname, "wb") as index_file:
pickle.dump(cfcindex, index_file)
def load_index(findexname):
'''
'''
with open(findexname, "rb") as index_file:
cfcindex = pickle.load(index_file)
return cfcindex
|
# -*- coding: utf-8 -*-
"""
Created on Fri Dec 28 10:18:34 2018
@author: AnsonHsu
"""
'''Given a dictionary such as:'''
dict = {'Python' : '.py', 'C++' : '.cpp', 'Java' : '.java'}
'''save dictionary as csv file'''
import csv
w = csv.writer(open("output.csv", "w"))
for key, val in dict.items():
w.writerow([key, val])
'''save dictionary to json file'''
import json
json = json.dumps(dict)
f = open("dict.json","w")
f.write(json)
f.close()
'''save dictionary to text file (raw, .txt)'''
f = open("dict.txt","w")
f.write( str(dict) )
f.close()
'''save dictionary to a pickle file (.pkl)'''
import pickle
f = open("file.pkl","wb")
pickle.dump(dict,f)
f.close()
# reload a file to a variable
with open('file.pkl', 'rb') as file:
dict_pkl =pickle.load(file) #pickle 提取
print('dict_pkl = ', dict_pkl) |
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from behaviordisc import cp_detection_KSWIN, tp_detection, cp_detection_PELT, subseqeuence_clustering
import re
from statsmodels.tsa.stattools import adfuller, acf
from scipy.fftpack import fft, fftfreq
from math import ceil
EXPECTED_PERIODS = {'1H': [24, 168, 672], # expected periods for seasonal patterns: Later can be set by user
'8H': [3, 21, 84],
'1D': [7, 28, 352],
'7D': [4, 48]}
def make_timed_data(data, start_tp, time_window):
timed_data = {}
x_axis = pd.date_range(start=start_tp, periods=len(data), freq=time_window)
x_converted = [x.timestamp() * 1000 for x in x_axis]
# x_converted = x_axis.astype(np.int64) // 10**9
data_dict = data.to_dict('list')
for key, value in data_dict.items():
tmp = []
for x_val, y_val in zip(x_converted, value):
tmp.append({'x': x_val, 'y': y_val})
# timed_data[key] = str(tmp).replace('\'', '')
timed_data[key] = tmp
return timed_data
# Test for multivariate ts
def stationary_test(data): # df input
"""
Tests for all variables if its stationary using adf-test.
If at least one feature is not, returns false for the corresponding data
:param data: df object, i.e. sd_log.data
:return:
"""
for feat in data.columns:
try:
result = adfuller(data[feat], autolag='AIC')
if result[1] > 0.05:
print(str(feat) + ' is not stationary')
return False
except:
pass
return True
def make_stationary(data, count=0): # df input
"""
Makes the data stationary using diff
:param data: df object, i.e. sd_log.data
:param count: counts order of differencing
:return: stationary data as df and order of differencing
"""
if stationary_test(data):
return data, count
else:
return make_stationary(data.diff().dropna(), count + 1)
def get_period(tw, n_weeks):
# tw one of ['1H', '8H', '1D', '7D'] TODO might be more
if tw == '1H':
period = n_weeks * 168
elif tw == '8H':
period = n_weeks * 21
elif tw == '1D':
period = n_weeks * 7
elif tw == '7D':
period = 4
else:
period = None
return period
class Sdl:
def __init__(self, path, start_tp=0):
self.data = pd.read_csv(path)
self.raw_data = pd.read_csv(path)
self.start_tp = start_tp
self.series = self.data.to_numpy()
self.columns = self.data.columns
self.tw = re.findall(r'\d+[A-Z]', self.columns[0])[0] # time window of sd_log
self.timed_data = make_timed_data(self.data, start_tp=start_tp, time_window=self.tw)
self.aspect = self.columns[0].split('_')[0] # column name indicates which aspect
# variables as string
self.arrival_rate = None
self.finish_rate = None
self.num_unique_resource = None
self.process_active_time = None
self.service_time = None
# TODO
self.time_in_process = None
self.waiting_time = None
self.num_in_process = None
self.avg_arrival_rate = None
self.avg_duration = None
self.whole_duration = None
self.avg_waiting = None
self.whole_waiting = None
self.waiting_events = None
self.finished_events = None
self.idle_time = None
self.inprocess_events = None
self.unique_resources = None
self.engaged_resources = None
self.load_data()
self.isStationary = stationary_test(self.data)
self.period = self.estimate_period()
self.data_diff = make_stationary(self.data)
# TODO
self.relations = {}
self.changepoints = {}
self.turningpoints = {}
# self.calc_turning_points()
self.behavior = {}
def load_data(self):
aspect = self.aspect
if aspect.lower() == 'general' or aspect.lower() == 'organizational':
self.arrival_rate = [s for s in self.columns if "arrival" in s.lower()][0]
self.finish_rate = [s for s in self.columns if "finish" in s.lower()][0]
self.num_unique_resource = [s for s in self.columns if "resource" in s.lower()][0]
self.process_active_time = [s for s in self.columns if "active" in s.lower()][0]
self.service_time = [s for s in self.columns if "service" in s.lower()][0]
# TODO
self.time_in_process = [self.columns[5]][0]
self.waiting_time = [s for s in self.columns if "waiting" in s.lower()][0]
self.num_in_process = [self.columns[7]][0]
if aspect.lower() == 'act':
self.avg_arrival_rate = [s for s in self.columns if "avg_arrival" in s.lower()][0]
self.avg_duration = [s for s in self.columns if "avg_duration" in s.lower()][0]
self.whole_duration = [s for s in self.columns if "whole_duration" in s.lower()][0]
self.avg_waiting = [s for s in self.columns if "avgwaiting" in s.lower()][0]
self.whole_waiting = [s for s in self.columns if "wholewaiting" in s.lower()][0]
self.waiting_events = [s for s in self.columns if "waiting_events" in s.lower()][0]
self.finished_events = [s for s in self.columns if "finished_events" in s.lower()][0]
self.idle_time = [s for s in self.columns if "idle_time" in s.lower()][0]
self.inprocess_events = [s for s in self.columns if "inprocess_events" in s.lower()][0]
self.unique_resources = [s for s in self.columns if "unique_resources" in s.lower()][0]
self.engaged_resources = [s for s in self.columns if "engaged_resources" in s.lower()][0]
if aspect.lower() == 'res':
self.avg_arrival_rate = [s for s in self.columns if "avg_arrival" in s.lower()][0]
self.avg_duration = [s for s in self.columns if "avg_duration" in s.lower()][0]
self.whole_duration = [s for s in self.columns if "whole_duration" in s.lower()][0]
self.avg_waiting = [s for s in self.columns if "avgwaiting" in s.lower()][0]
self.whole_waiting = [s for s in self.columns if "wholewaiting" in s.lower()][0]
self.waiting_events = [s for s in self.columns if "waiting_events" in s.lower()][0]
self.finished_events = [s for s in self.columns if "finished_events" in s.lower()][0]
self.idle_time = [s for s in self.columns if "idle_time" in s.lower()][0]
self.inprocess_events = [s for s in self.columns if "inprocess_events" in s.lower()][0]
def preprocess_rawData(self):
# TODO, currently expecting Active (preprocessed) sdLog
data = self.rawData
data = data.fillna(method='pad') # filling missing values with previous ones
return data
# returns points as numpy array
def get_points(self, col):
return np.array(self.data[col])
# plots all aspect
def plot_all(self, title='All aspects plotted:', outputpath=None):
self.data.plot(subplots=True, xlabel="index",
figsize=(5, 10), grid=True)
if outputpath:
plt.savefig(outputpath, bbox_inches='tight')
plt.show()
def plot_all_with_cp(self, outputpath=None):
ax = self.data.plot(subplots=True, xlabel="time steps",
title='Plot for all single aspects along with changepoints',
figsize=(5, 10), grid=True)
for i, col in zip(ax, self.columns):
# detected = cp_detection_KSWIN(self.get_points(col), period=self.tw)
detected = cp_detection_PELT(self.get_points(col))
if not detected:
continue
i.axvspan(0, detected[0], label="Change Point", color="red", alpha=0.3)
for s in range(0, len(detected) - 2, 2):
i.axvspan(detected[s], detected[s + 1], label="Change Point", color="green", alpha=0.3)
i.axvspan(detected[s + 1], detected[s + 2], label="Change Point", color="red", alpha=0.3)
i.axvspan(detected[-1], len(self.data), label="Change Point", color="green", alpha=0.3)
# plt.title('Plot for all single aspects along with changepoints')
if outputpath:
plt.savefig(outputpath, bbox_inches='tight', dpi=300)
plt.show()
def calc_turning_points(self):
for feat in self.columns:
series = self.data[feat]
# period = get_period(self.tw, n_weeks=1)
tps = tp_detection(series, period=self.period)
self.turningpoints[feat] = tps
def estimate_period(self): # estimates period based on arrival rate in seasonal pattern
"""
1) estimates period by computing FFT (periodogram) and find Time Periods within the Top 3 Highest Power
2) compare period to expected ones, if they match compute acf to check significance
"""
series = self.get_points(self.columns[0])
if not self.isStationary:
series = np.diff(series)
# get top 3 seasons
no_of_seasons = 3
series_fft = fft(series)
power = np.abs(series_fft)
sample_freq = fftfreq(series_fft.size)
# Find the peak frequency
pos_mask = np.where(sample_freq > 0)
freqs = sample_freq[pos_mask]
powers = power[pos_mask]
# find top frequencies and corresponding time periods for seasonal pattern
top_powers = np.argpartition(powers, -no_of_seasons)[-no_of_seasons:]
time_periods_from_fft = 1 / freqs[top_powers]
time_periods = time_periods_from_fft.astype(int)
print('Recommended time periods: ' + str(time_periods))
time_lags_expected = EXPECTED_PERIODS[self.tw]
# One of the seasonality returned from FFT should be within range of Expected time period
for time_lag in time_lags_expected:
nearest_time_lag = time_periods.flat[np.abs(time_periods - time_lag).argmin()]
# Using 5% for range comaprison
# tmp = range(time_lag - ceil(0.05 * time_lag), time_lag + ceil(0.05 * time_lag))
if nearest_time_lag in range(
time_lag - ceil(0.05 * time_lag),
time_lag + ceil(0.05 * time_lag)):
# Check ACF value with lags identified from expected
acf_score_exp = acf(series, nlags=time_lag)[-1]
# Check ACF value with lags identified from fft
acf_score_fft = acf(series, nlags=nearest_time_lag)[-1]
# Check ACF is significant or not.
if acf_score_exp >= 2 / np.sqrt(len(series)):
# ACF is significant and FFT identifies seasonality
print('Metrics is seasonal by expected period ' + str(time_lag))
return time_lag
elif acf_score_fft >= 2 / np.sqrt(len(series)):
# ACF is significant and FFT identifies seasonality
print('Metrics is seasonal by recommended period ' + str(nearest_time_lag))
return nearest_time_lag
else:
print('ACF value of expected period is not significant')
else:
print('Seasonality could not be identified')
return None
# def summary(self):
|
#!/usr/bin/env python
from distutils.core import setup
version = '0.7.3'
setup(name='Hillup',
version=version,
description='Retrieves and prepares digital elevation data for rendering as map tiles.',
author='Michal Migurski',
author_email='mike@stamen.com',
url='https://github.com/migurski/DEM-Tools',
requires=['ModestMaps','PIL','numpy'],
packages=['Hillup', 'Hillup.data'],
scripts=['hillup-seed.py'],
download_url='https://github.com/downloads/migurski' % locals(),
license='BSD')
|
N = int( input())
A = [ int( input()) for _ in range(N)]
A = [ a-1 for a in A]
L = [ 0 for _ in range(N)]
L[0] = 1
cnt = 0
now = 0
while True:
if now == 1:
break
now = A[now]
if L[now] == 0:
L[now] = 1
cnt += 1
else:
cnt = -1
break
print(cnt)
|
#!/usr/bin/python3
# -*- coding:utf8 -*-
# Author : Arthur Yan
# Date : 2019-02-16 17:00:03
# Description : 斐波那契数列
# F(n) = F(n-1) + F(n-2)
# 1, 1, 2, 3, 5, 8 ......
def fib(num):
if num == 1:
return 1
elif num == 2:
return 1
else:
result = fib(num-1) + fib(num-2)
return result
print(fib(20))
|
def f():
print(a)
a = 0
a = 1
f()
|
#viral Advertising
n = int(input())
m = 5
ppl = 0
temp = 0
for i in range(n):
if i == 0:
ppl = (m // 2)
temp = ppl
m = temp * 3
else:
temp = (m // 2)
ppl += temp
m = temp * 3
#print(ppl , m)
print(ppl)
|
/home/ajitkumar/anaconda3/lib/python3.7/__future__.py |
# -*- coding: utf-8 -*-
from django.contrib import admin
from .models import BatchNumberGroup,BatchNumberOid
class ExamAdmin(admin.ModelAdmin):
list_display = ('batch_number','group','created',)
admin.site.register(BatchNumberGroup, ExamAdmin)
class ExamPaperAdmin(admin.ModelAdmin):
list_display = ('batch_number','out_sid','number','group','created','status',)
admin.site.register(BatchNumberOid,ExamPaperAdmin)
|
"""
4. Реализовать возможность переустановки значения цены товара.
Необходимо, чтобы и родительский, и дочерний классы получили новое значение цены.
Следует проверить это, вызвав соответствующий метод родительского класса
и функцию дочернего (функция, отвечающая за отображение информации о товаре в одной строке).
"""
class ItemDiscount:
def __init__(self, name, price):
self.__name = name
self.__price = price
@property
def name(self):
return self.__name
@property
def price(self):
return self.__price
def set_price(self, price):
self.__price = price
class ItemDiscountReport(ItemDiscount):
def get_parent_data(self):
return f'Товар {self.name}, цена {self.price}'
item = ItemDiscountReport('Диван', 11000)
item.set_price(12000)
print(item.get_parent_data())
|
#from setuptools import setup, find_packages
from setuptools import *
from pymanager import version
description='A process manager in Python.'
long_description = open('README.rst').read()
setup(
name='pymanager',
version=version,
description=description,
long_description=long_description,
url='https://github.com/baliame/pymanager',
author='Baliame',
author_email='akos.toth@cheppers.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Testing',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
keywords='process manager management',
py_modules=["pymanager"],
packages=find_packages(),
install_requires = [
'bottle', 'requests'
],
entry_points= {
'console_scripts': [
'pymanager = pymanager:main'
]
},
)
|
#!/usr/bin/python3
"""module of from_json_string"""
import json
def from_json_string(my_str):
"""function to convert from js str to py obj"""
return json.loads(my_str)
|
from flask_admin.contrib.sqla import ModelView
from flask_login import current_user, login_required
from flask import url_for, redirect, flash, render_template, current_app
class EmployeeView(ModelView):
form_columns = ['hired_date', 'active', 'email',
'first_name', 'last_name', 'password', 'access', 'hall']
column_searchable_list = ('first_name', 'last_name', 'email')
form_choices = {
'access': [
('DR', 'DR'),
('Building Director', 'Building Director'),
('Admin', 'Admin'),
]
}
def is_accessible(self):
return current_user.is_authenticated and current_user.is_admin()
def inaccessible_callback(self, name):
# check if the user is logged in
if not current_user.is_authenticated:
return current_app.login_manager.unauthorized()
# return if user is logged in but not authorized
return render_template('errors/403.html')
class StudentView(ModelView):
form_columns = ['student_id', 'first_name', 'last_name', 'email',
'room_number', 'subscribed', 'phone_numbers', 'hall']
column_searchable_list = ('first_name', 'last_name', 'email',
'room_number', 'student_id',)
def is_accessible(self):
return current_user.is_authenticated and current_user.allowed('Building Director')
def inaccessible_callback(self, name):
# check if the user is logged in
if not current_user.is_authenticated:
return current_app.login_manager.unauthorized()
# return if user is logged in but not authorized
return render_template('errors/403.html')
class HallView(ModelView):
form_columns = ['name', 'building_code']
column_searchable_list = ('name', 'building_code')
def is_accessible(self):
return current_user.is_authenticated and current_user.is_admin()
def inaccessible_callback(self, name):
# check if the user is logged in
if not current_user.is_authenticated:
return current_app.login_manager.unauthorized()
# return if user is logged in but not authorized
return render_template('errors/403.html')
class PackageView(ModelView):
form_columns = ['status', 'description', 'delivery_date',
'picked_up_date', 'perishable', 'owner', 'hall', 'inputted', 'removed']
column_searchable_list = ('description', 'delivery_date', 'picked_up_date')
def is_accessible(self):
return current_user.is_authenticated and current_user.allowed('Building Director')
def inaccessible_callback(self, name):
# check if the user is logged in
if not current_user.is_authenticated:
return current_app.login_manager.unauthorized()
# return if user is logged in but not authorized
return render_template('errors/403.html')
class MessageView(ModelView):
form_columns = ['content']
def is_accessible(self):
return current_user.is_authenticated and current_user.is_admin()
def inaccessible_callback(self, name):
# check if the user is logged in
if not current_user.is_authenticated:
return current_app.login_manager.unauthorized()
# return if user is logged in but not authorized
return render_template('errors/403.html')
class SentMailView(ModelView):
form_columns = ['sent_date', 'cc_recipients',
'employee', 'message', 'student']
def is_accessible(self):
return current_user.is_authenticated and current_user.allowed('Building Director')
def inaccessible_callback(self, name):
# check if the user is logged in
if not current_user.is_authenticated:
return current_app.login_manager.unauthorized()
# return if user is logged in but not authorized
return render_template('errors/403.html')
class LoginView(ModelView):
form_columns = ['login_date', 'logout_date', 'employee']
def is_accessible(self):
return current_user.is_authenticated and current_user.allowed('Building Director')
def inaccessible_callback(self, name):
# check if the user is logged in
if not current_user.is_authenticated:
return current_app.login_manager.unauthorized()
# return if user is logged in but not authorized
return render_template('errors/403.html')
class PhoneView(ModelView):
form_columns = ['phone_number', 'assigned']
def is_accessible(self):
return current_user.is_authenticated and current_user.is_admin()
def inaccessible_callback(self, name):
# check if the user is logged in
if not current_user.is_authenticated:
return current_app.login_manager.unauthorized()
# return if user is logged in but not authorized
return render_template('errors/403.html')
|
# -*- coding: utf-8 -*-
# @Author : 赵永健
# @Time : 2020/2/24 15:22
# 4、切回管理员,测试帐号赋值测试企业,(编辑测试帐号)--用户扩展信息
from time import sleep
import pykeyboard
from pymouse import PyMouse
from selenium.webdriver import ActionChains
from process.commonProc import commonProc
from public import excel
from util.webdr import webdr
wd=webdr()
com=commonProc()
ex=excel
class initialBaseProc(object):
#切回管理员账户
def changeMaster(self,driver):
com.tapWeb(driver)
wd.clickByXpath(driver, ex.xpathCon('setInf'))
wd.aboveByXpath(driver, ex.xpathCon('accountChange'))
com.forclick(driver, ex.xpathCon('masterAc'))
com.waitAmoment()
def quitAcc(self,driver):
com.tapWeb(driver)
wd.clickByXpath(driver, '//*[@id="x-header-app"]/div[2]/div/ul/li[6]/div')
wd.clickByXpath(driver,'//*[@id="li-sub"]')
wd.clickByXpath(driver,'/html/body/div[2]/div/div[3]/button[2]')
com.waitAmoment()
#进入组织架构,控制台,点编辑,点修改信息
def intoOrg(self,driver):
driver.get('https://www.51safety.com.cn/enterprise/orgManage')
com.waitAmoment()
if com.findItem(driver, '控制台') == False:
com.messageShow('未进入控制台!')
else:
wd.clickByXpath(driver,'/html/body/div/section/div/div/div[2]/section/section[2]/section/div[1]/div[3]/table/tbody/tr[1]/td[7]/div/div/a[3]')
com.tapWeb(driver)
# wd.clickByXpath(driver,'')
com.clickOnText(driver,'修改信息')
com.waitAmoment()
if com.findItem(driver, '测试扩展信息') == False:
com.messageShow('未进入测试扩展信息页面!')
#填写测试扩展信息
def teInfo(self,driver):
#用户类型
com.dropDownBox(driver,ex.xpathCon('customerType'),'企业用户')
com.waitAmoment()
#单位名称
com.dropDownBox(driver,ex.xpathCon('unitName'),'测试企业')
#所属部门
wd.enterByXpath(driver,ex.xpathCon('departments'),'产品部')
#所属岗位
wd.enterByXpath(driver,ex.xpathCon('station'),'测试组')
#证件类型
com.dropDownBox(driver,ex.xpathCon('idType'),'居民身份证')
#证件号
wd.enterByXpath(driver,ex.xpathCon('idNumber'),'110')
#员工工号
wd.enterByXpath(driver, ex.xpathCon('staffNumber'), '110')
#是否为特种作业人员
wd.clickByXpath(driver,ex.xpathCon('specialOperator'))
com.keyBoard()
#户籍所在地
wd.enterByXpath(driver,ex.xpathCon('domicileLocation'),'南京市')
#毕业院校
wd.enterByXpath(driver, ex.xpathCon('school'), '南京大学')
#在线学习系统角色
com.dropDownBox(driver,ex.xpathCon('onlineLearn'),'学员')
#点击确定按钮
wd.clickByXpath(driver,'//*[@id="J_body"]/div[2]/div[3]/section/footer/div/button[2]')
sleep(5)
#
# #所属岗位为空
# def notNull(self,driver):
# wd.clearByXpath(driver, ex.xpathCon('station'))
# wd.clickByXpath(driver, '//*[@id="J_body"]/div[2]/div[3]/section/footer/div/button[2]')
# com.waitAmoment()
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2019 shady <shady@MrRobot.local>
#
import logging
import time
import asyncio
from sanic import Blueprint
from sanic.response import json
from config.server import tracing
from models import *
seckill_bp = Blueprint("order", url_prefix="orders")
@seckill_bp.get("/", name="list_order")
async def list_order(request):
async with request.app.db.acquire(request) as cur:
orders = await cur.fetchall(f"SELECT * FROM `order`;")
return json(orders)
@seckill_bp.get("/<id:int>", name="get_order")
@tracing.trace()
async def get_order(request, id):
async with request.app.db.acquire(request) as cur:
order = await cur.fetchone(f"SELECT * FROM `order` WHERE id={id}")
return json(order)
|
from django.contrib.auth.admin import UserAdmin
from django.contrib.admin import register
from .forms import AccountCreationForm, AccountChangeForm
from .models import Account
@register(Account)
class AccountAdmin(UserAdmin):
"""Admin model for administration Account model. Configure admin inteface."""
add_form = AccountCreationForm
form = AccountChangeForm
model = Account
list_display = ('email', 'username', 'is_staff', 'is_active')
list_filter = ('email', 'username', 'is_staff', 'is_active')
fieldsets = (
(None, {'fields': ('email', 'password')}),
('Permissions', {'fields': ('is_staff', 'is_active')})
)
add_fieldsets = (
(None, {
'classes': ('wide',),
'fields': ('email', 'username', 'password1', 'password2', 'is_staff', 'is_active')}
),
)
search_fields = ('email',)
ordering = ('email',)
|
class Clasetotal:
def firstn(self, n):
num = 0
while num < n:
yield num
num += 1
class Clase2:
class Clase3:
def firstn(self, n):
num = n**2
while num > n:
yield num
num -= 1
def primera():
def segunda():
def qsfn(n):
#res = sum(Clasetotal.firstn(n))**2
#print(res)
num = 0
while num < n:
yield num
num += 1
def generator():
yield 'hola'
next(generator())
g = generator()
next(g)
#distintas formas de llamar un generator
#1º forma:
next(Clasetotal().firstn(5))
#2ª forma:
clase = Clasetotal()
next(clase.firstn(5))
#3ª forma:
clase = Clasetotal()
generator = clase.firstn(5)
next(generator)
#4ª forma
generator = Clasetotal().firstn(5)
next(generator)
##########################33
#formas de llamarlo usando un for
clase = Clasetotal()
for item in clase.firstn(5):
print(item)
#probamos a hacer un call de la segunda
next(Clase2().Clase3().firstn(5))
nueva_clase = Clase2()
next(nueva_clase.Clase3().firstn(5))
nueva_clase3 = Clase2().Clase3().firstn(5)
next(nueva_clase3)
def f():
return 'hola'
generator = f()
print(generator)
|
import numpy as np
import pandas as pd
import tensorflow as tf
import tensorflow_addons as tfa
import matplotlib.pyplot as plt
import warnings
warnings.filterwarnings('ignore')
from sklearn.model_selection import train_test_split, StratifiedKFold
from sklearn.utils import shuffle
from tensorflow.keras.utils import to_categorical
from tensorflow.keras.models import Model, load_model
from tensorflow.keras.layers import Input, Dense, Conv2D, MaxPooling2D, Flatten, Dropout, BatchNormalization, Activation
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint, ReduceLROnPlateau
from keras.optimizers import Adam
from tensorflow_addons.optimizers import RectifiedAdam, Lookahead
# 1. 데이터
# Resize-Image
image_size = [28, 28]
resized_image_size = [256, 256]
tr_data = pd.read_csv('./dacon2/data/train.csv', index_col=0).values
ts_data = pd.read_csv("./dacon2/data/test.csv", index_col = 0).values
print(tr_data.shape)
print(ts_data.shape)
# Train_data
tr_X = tf.convert_to_tensor(tr_data[:, 2:], dtype = tf.float32)
tr_Y = tf.squeeze(tf.convert_to_tensor(tr_data[:, 0], dtype = tf.int32))
resize = tf.reshape(tr_X, (-1, 28, 28, 1))
resize_train = tf.keras.layers.experimental.preprocessing.Resizing(64, 64)(resize)
x_train = resize_train.numpy()
y_train = to_categorical(tr_Y.numpy())
print(x_train.shape)
print(y_train.shape)
# Test_Data
ts_X = tf.convert_to_tensor(ts_data[:int(len(ts_data)/10)][:,1:], dtype = tf.float32)
resize = tf.reshape(ts_X, (-1, 28, 28, 1))
resize_test = tf.keras.layers.experimental.preprocessing.Resizing(64, 64)(resize)
print(resize_test.shape)
print(type(resize_test))
print(type(resize_test.numpy()))
x_test = resize_test.numpy()
for i in range(1,10):
ts_X_ = tf.convert_to_tensor(ts_data[int(len(ts_data)/10) * i:int(len(ts_data)/10) * (i+1)][:,1:], dtype = tf.float32)
resize = tf.reshape(ts_X_, (-1, 28, 28, 1))
resize_test = tf.keras.layers.experimental.preprocessing.Resizing(64, 64)(resize)
x_test = np.append(x_test, resize_test.numpy(), axis=0)
print(type(x_test))
print(x_test.shape)
datagen = ImageDataGenerator(
width_shift_range=(-1,1),
height_shift_range=(-1,1))
datagen2 = ImageDataGenerator()
steps = 40
skfold = StratifiedKFold(n_splits=steps, random_state=42, shuffle=True)
def cnn_model(x_train):
inputs = Input(shape=x_train.shape[1:])
layer = Conv2D(16, 3, padding='same', strides=1, activation='relu')(inputs)
layer = BatchNormalization()(layer)
layer = Dropout(0.3)(layer)
layer = Conv2D(32, 3, padding='same', strides=1, activation='relu')(layer)
layer = BatchNormalization()(layer)
layer = Conv2D(32, 5, padding='same', strides=1, activation='relu')(layer)
layer = BatchNormalization()(layer)
layer = Conv2D(32, 5, padding='same', strides=1, activation='relu')(layer)
layer = BatchNormalization()(layer)
layer = Conv2D(32, 5, padding='same', strides=1, activation='relu')(layer)
layer = BatchNormalization()(layer)
layer = MaxPooling2D(3)(layer)
layer = Dropout(0.3)(layer)
layer = Conv2D(64, 3, padding='same', strides=1, activation='relu')(layer)
layer = BatchNormalization()(layer)
layer = Conv2D(64, 5, padding='same', strides=1, activation='relu')(layer)
layer = BatchNormalization()(layer)
layer = MaxPooling2D(3)(layer)
layer = Dropout(0.3)(layer)
layer = Flatten()(layer)
layer = Dense(128, activation='relu')(layer)
layer = BatchNormalization()(layer)
layer = Dropout(0.3)(layer)
layer = Dense(64, activation='relu')(layer)
layer = BatchNormalization()(layer)
layer = Dropout(0.3)(layer)
outputs = Dense(10, activation='softmax')(layer)
model = Model(inputs=inputs, outputs=outputs)
return model
def get_opt(init_lr = 3e-3):
radam = tfa.optimizers.RectifiedAdam(
lr = init_lr, warmup_proportion = 0, min_lr = 1e-5, weight_decay = 1e-4)
ranger = tfa.optimizers.Lookahead(radam)
return ranger
val_acc = []
for i, (train_idx, val_idx) in enumerate(skfold.split(x_train, y_train.argmax(1))):
x_train_, x_val_ = x_train[train_idx], x_train[val_idx]
y_train_, y_val_ = y_train[train_idx], y_train[val_idx]
model = cnn_model(x_train)
filepath = './dacon2/data/vision_model_{}.hdf5'.format(i)
es = EarlyStopping(monitor='val_loss', patience=160, mode='auto')
cp = ModelCheckpoint(filepath=filepath, monitor='val_loss', save_best_only=True, mode='auto')
lr = ReduceLROnPlateau(monitor='val_loss', factor=0.5, patience=100)
model.compile(loss='categorical_crossentropy', optimizer=get_opt(), metrics=['accuracy'])
hist = model.fit_generator(datagen.flow(x_train_, y_train_, batch_size=32), epochs=2000,
validation_data=(datagen.flow(x_val_, y_val_)), verbose=2, callbacks=[es, cp, lr])
val_acc.append(max(hist.history['val_accuracy']))
print('{}\'s CV End'.format(i+1))
# 3. 예측
# best model select
print(val_acc)
i_max = np.argmax(val_acc)
print('Best Model is {}\'s'.format(i_max))
model = load_model('./dacon2/data/vision_model_{}.hdf5'.format(i_max))
submission = pd.read_csv('./dacon2/data/submission.csv', index_col=0, header=0)
submission['digit'] = np.argmax(model.predict(x_test), axis=1)
print(submission)
submission.to_csv('./dacon2/data/submission_model_best.csv')
# KFold 값 평균내기
submission2 = pd.read_csv('./dacon2/data/submission.csv', index_col=0, header=0)
result = 0
for i in range(steps):
model = load_model('./dacon2/data/vision_model_{}.hdf5'.format(i))
result += model.predict_generator(datagen2.flow(x_test, shuffle=False)) / steps
submission2['digit'] = result.argmax(1)
print(submission2)
submission2.to_csv('./dacon2/data/submission_model_mean.csv')
|
# coding: cp949
while True: num=int(input("홀수를 입력하세요(0<-종료): ")) point = 1 #별표 첫번d째 empty=int(num/2) if num == 0 : break elif num % 2 == 0 : continue else:
while point <= num:
print(" "*empty+"*"*point)
point=int(point+2)
empty=int(empty-1)
|
from rest_framework import serializers
from .models import *
from django.contrib import auth
from rest_framework.exceptions import AuthenticationFailed
# from .Scheduler import Schedules_operation
from rest_framework_simplejwt.serializers import TokenObtainPairSerializer
from rest_framework.response import Response
from Company_Details.models import User
class ChangePasswordSerializer(serializers.Serializer):
model = User
old_password = serializers.CharField(required=True)
new_password = serializers.CharField(required=True)
class Safety_FormSerializer(serializers.ModelSerializer):
class Meta():
model = Safety_observation_Form
fields= '__all__'
class Safety_observation_FormSerializer(serializers.ModelSerializer):
class Meta():
model = Safety_observation_Form
exclude = ('issue','observer','companyid','created_by','hod_name','due_date','remarks','closing_request','closing_descrption','closing_image','closed_date','verified_hod')
class Others_FormsSerializer(serializers.ModelSerializer):
class Meta():
model = Safety_observation_Form
exclude = ('issue','companyid','discussion_held','agreement','types','injury_potential','observation_status','hod_name','due_date','remarks','closing_request','closing_descrption','closing_image','closed_date','verified_hod')
class Safe_situationSerializer(serializers.ModelSerializer):
class Meta():
model = Safety_observation_Form
fields = ('total_hours','department','location','superviser_name','excution_department','safe_situation')
class Forward_issueSerializer(serializers.ModelSerializer):
class Meta():
model = Safety_observation_Form
fields = ('hod_name',)
class Get_Forward_issueSerializer(serializers.ModelSerializer):
class Meta():
model = Safety_observation_Form
exclude = ('companyid',)
class Assign_due_dateSerializer(serializers.ModelSerializer):
class Meta():
model = Safety_observation_Form
fields = ('due_date','remarks','observation_status')
class Closing_RequestSerializer(serializers.ModelSerializer):
class Meta():
model = Safety_observation_Form
fields = ('closing_descrption','closing_image',)
class Get_Close_RequestSerializer(serializers.ModelSerializer):
class Meta():
model = Safety_observation_Form
fields = ('ticket_no','closing_descrption','closing_image',)
class CustomTokenObtainPairSerializer(TokenObtainPairSerializer):
def validate(self, attrs):
data = super(CustomTokenObtainPairSerializer, self).validate(attrs)
data.update({'user': self.user.email})
data.update({'role': self.user.role})
return data
@classmethod
def get_token(cls, user):
token = super().get_token(user)
token['role'] = user.role
return token
class SchedulesSerializer(serializers.ModelSerializer):
class Meta():
model = Schedules
fields = '__all__'
class SchedulerSerializer(serializers.ModelSerializer):
class Meta():
model = Scheduler
fields = ('holidays','weekmask','holiday_types','observations_required',)
class OpenissuesSerializer(serializers.ModelSerializer):
class Meta():
model = Safety_observation_Form
fields = '__all__'
class CloseissuesSerializer(serializers.ModelSerializer):
class Meta():
model = Safety_observation_Form
fields = '__all__'
class Close_RequestSerializer(serializers.ModelSerializer):
class Meta():
model = Safety_observation_Form
fields = ('observation_status',)
class Schedules_userSerializer(serializers.ModelSerializer):
class Meta():
model = Schedules
fields = '__all__'
class ReschedulerSerializer(serializers.ModelSerializer):
class Meta():
model = Schedules
fields = ('date','reschedule_date','reason_reschedule')
class Start_workSerializer(serializers.ModelSerializer):
class Meta():
model = Schedules
fields = ('date',)
class CountSerializer(serializers.ModelSerializer):
class Meta():
model = Count
fields = ('count',)
class NotificationSerializer(serializers.ModelSerializer):
class Meta():
model = Notification
fields = ('notification',)
|
#coding=utf-8
#元组不可变的好处。保证数据的安全,比如我们传给一个不熟悉的方法或者数据接口,确保方法或者接口不会改变我们的数据从而导致程序问题。
#tuple
def info(a):
'''一个我们不熟悉的方法'''
a[0] = 'haha'
a = [1, 2, 3]
info(a)
print(a)
#following will cause error, but as we expected.
#b = (1,2,3)
#info(b)
# python的set和其他语言类似, 是一个无序不重复元素集, 基本功能包括关系测试和消除重复元素.
# 集合对象还支持union(联合), intersection(交), difference(差)和sysmmetric difference(对称差集)等数学运算.
info = set(['a','b', 'c'])
#add only 1 element
info.add('def')
print(info)
#adds an element, update "adds" another iterable set, list or tuple
info.update('ef','gg')
print(info)
print('a' in info)
info.remove('a')
print('a' in info)
info.remove('b')
print(info)
#due to set is
listdup = [1,5,3,2,1,6,10,9]
print(listdup)
print(list(set(listdup)))
#交集,并集,差集 & | -
a1 = set('abc')
b1 = set('efg')
c1 = set('21a')
#cross
print(a1 & c1)
#merge
print(a1 ^ b1)
#diff
print(a1 -c1)
liststr = ['haha','gag','hehe','haha']
print(list(set(liststr)))
#remove duplicate element by list method
m = []
for i in liststr:
if i not in m:
m.append(i)
print(m)
#modify a = (1,2,3) to (5,2,3) with two method
#Method 1:modify in list then covernt to tuple
a = (1,2,3)
m = []
for i in a:
if i == 1:
m.append(5)
else:
m.append(i)
print(tuple(m))
#Method 2:merge two tuple together
b = (5,)+a[1:]
print(b)
#a = set(['a','b','c'])
a = set(['a','b','c'])
a.add('jay')
b = set(['b','e','f','g'])
print(a)
print(b)
#Method1: merge a and b, include element which contain in both a and b
#merge a and b ,include element which contain in both a and b
print(a | b)
#merge a and b ,include element which contain in both a and b
print(a ^ b)
#Method2: merge a and b, include element which contain in both a and b
#use list to store
m = []
for i in a:
m.append(i)
for i in b:
m.append(i)
print(set(m))
|
import matplotlib.pyplot as plt
import euler_richardson
plt.title("")
plt.xlabel("x")
plt.ylabel("v")
w2 = 5
k = 0
S = 30
for i in range(S):
X, V = euler_richardson.simulate(x=0,
v=i,
f=lambda _v, _x: -k * _v - w2 * _x)
plt.plot(X, V, '.', color='black', linestyle='solid')
plt.show()
|
#!/usr/bin/env python
import json
import incapsula
import argparse
parser = argparse.ArgumentParser(description="Given a site_id, list status")
parser.add_argument("-s", "--site",dest='site_id', help='The site id to retrieve information for')
args = parser.parse_args()
r = json.loads(incapsula.getSiteStatus(args.site_id))
print(json.dumps(r, indent=2, separators=(',', ': ')))
|
from django.apps import apps
from django.forms.models import modelform_factory
def normalize_model_name(model_name):
return model_name.capitalize() if model_name.lower() == model_name else model_name
def get_model_form(model_name):
for model in apps.get_models():
if model.__name__ == model_name:
return modelform_factory(model, exclude=[])
raise Exception('Did not find the model %s' % model_name)
|
#install keras from https://github.com/kundajelab/keras/tree/keras_1
from __future__ import print_function
import keras
import numpy as np
from keras.optimizers import SGD
import math
import matplotlib.pyplot as plt
import sys
'''
Usage:
python 3_train_revcomp_CNN.py featureMat_directory TFID saveDir
'''
#Load training data
resDic = sys.argv[1]
TFID = sys.argv[2]
saveDir = sys.argv[3]
trainMatrix = np.load(resDic + TFID + "_trainMatrix.npy")
trainLabel = np.load(resDic + TFID + "_trainLabel.npy")
valMatrix = np.load(resDic + TFID + "_valMatrix.npy")
valLabel = np.load(resDic + TFID + "_valLabel.npy")
testMatrix = np.load(resDic + TFID + "_testMatrix.npy")
testLabel = np.load(resDic + TFID + "_testLabel.npy")
#build a sample model
model = keras.models.Sequential()
model.add(keras.layers.convolutional.RevCompConv1D(input_shape=(201,4),
nb_filter=32,
filter_length=11))
model.add(keras.layers.normalization.RevCompConv1DBatchNorm())
model.add(keras.layers.core.Activation("relu"))
#Layer
model.add(keras.layers.convolutional.RevCompConv1D(nb_filter=32,
filter_length=11))
model.add(keras.layers.normalization.RevCompConv1DBatchNorm())
model.add(keras.layers.core.Activation("relu"))
#Layer
model.add(keras.layers.convolutional.RevCompConv1D(nb_filter=32,
filter_length=11))
model.add(keras.layers.normalization.RevCompConv1DBatchNorm())
model.add(keras.layers.core.Activation("relu"))
model.add(keras.layers.pooling.MaxPooling1D(pool_length=32))
#Weighted sum
model.add(keras.layers.convolutional.WeightedSum1D(symmetric=False,
input_is_revcomp_conv=True,
bias=False,
init="he_normal"))
model.add(keras.layers.core.DenseAfterRevcompWeightedSum(output_dim=64,
W_regularizer=keras.regularizers.WeightRegularizer(l2=0)))
model.add(keras.layers.core.Activation("relu"))
#model.add(keras.layers.core.Dropout(0.5))
model.add(keras.layers.core.Dense(output_dim=64))
model.add(keras.layers.core.Activation("relu"))
model.add(keras.layers.core.Dense(output_dim=1))
model.add(keras.layers.core.Activation("sigmoid"))
#lrVec = np.arange(0.00004, 0.00011, 0.00001)
lr = 0.00004
#for lr in lrVec:
sgd = SGD(lr=lr)
model.compile(optimizer=sgd, loss="binary_crossentropy", metrics=['accuracy'])
history_callback = model.fit(x=trainMatrix, y=trainLabel, validation_data=(valMatrix, valLabel),
batch_size=128, nb_epoch=200)
model.save(filepath=sys.argv[3]+TFID+"_model.h5")
val_acc = np.array(history_callback.history["val_acc"])
train_acc = np.array(history_callback.history["acc"])
res = np.vstack((train_acc, val_acc)).T
fname = saveDir + TFID + ".txt"
header = ("train_acc", "val_acc")
np.savetxt(fname=fname,X=res,delimiter="\t",header="train_acc,val_acc")
f = plt.figure()
plt.plot(range(0,len(train_acc)), train_acc, 'b',range(0, len(val_acc)), val_acc, 'r')
plt.show()
f.savefig(saveDir + TFID + "_evaluation.pdf", bbox_inches='tight')
# PREDICTED_CLASSES = model.predict_classes(testMatrix, batch_size=128, verbose=1)
# PREDICTED_CLASSES = np.reshape(PREDICTED_CLASSES, (len(testLabel)))
# temp = sum(testLabel == PREDICTED_CLASSES)
# temp/len(testLabel)
#
posTestMat = testMatrix[range(0,(testMatrix.shape[0]/2))]
posLabel = testLabel[range(0,(testMatrix.shape[0]/2))]
negTestMat = testMatrix[range(testMatrix.shape[0]/2,testMatrix.shape[0])]
negLabel = testLabel[range(testMatrix.shape[0]/2,testMatrix.shape[0])]
posScore = model.predict(posTestMat, batch_size=128)
#np.save("/home/malab14/research/00DeepTFBS/00results/optimization/cnn_positive_score", posScore)
negScore = model.predict(negTestMat, batch_size=128)
#np.save("/home/malab14/research/00DeepTFBS/00results/optimization/cnn_negative_score", negScore)
negScore = np.load("/home/malab14/research/00DeepTFBS/00results/optimization/cnn_negative_score.npy")
posScore = np.load("/home/malab14/research/00DeepTFBS/00results/optimization/cnn_positive_score.npy")
def evalModel(posScore, negScore, threshold = 0.5, beta = 2):
TP = float(sum(posScore > threshold))
TN = float(sum(negScore <= threshold))
FP = float(len(posScore)-TP)
FN = float(len(negScore)-TN)
res = {}
res['Sn'] = TP/(TP + FN)
res['Sp'] = TN/(TN + FP)
res['Pr'] = TP/(TP + FP)
res['Acc'] = (TP+TN)/(TP+TN+FP+FN)
res['Fscore'] = ((1+beta*beta)*res['Pr']*res['Sn'])/(beta*beta*res['Pr']+res['Sn'])
res['MCC']=(TP*TN-FP*FN)/math.sqrt(((TP+FP)*(TP+FN)*(TN+FP)*(TN+FN)))
return res
res = evalModel(posScore=posScore,negScore=negScore)
np.save(saveDir + TFID + "_evaluate", res)
|
import json
import requests
from bs4 import BeautifulSoup
import sys
import string
from twilio.rest import Client
from time import sleep
global_client = Client("secret", "secret")
def send_text_message(text):
to_phone = "+11DigitPhoneToText"
from_phone = "+11DigitPhoneTextFrom"
global_client.messages.create(to="+13045501189",
from_="+12018015162",
body=text)
def containsDigit(word):
for ch in word:
if ch in string.digits:
return word
def meets_criteria(word):
if not word.isupper():
return False
if len(word) < 5 or len(word) > 14:
return False
if not containsDigit(word):
return False
return True
def check_instagram():
r = requests.get('https://www.instagram.com/chipotle/')
soup = BeautifulSoup(r.text, 'lxml')
script = soup.find('script', text=lambda t: t.startswith('window._sharedData'))
page_json = script.text.split(' = ', 1)[1].rstrip(';')
data = json.loads(page_json)
non_bmp_map = dict.fromkeys(range(0x10000, sys.maxunicode + 1), 0xfffd)
relevant_captions = []
index = 0
for post in data['entry_data']['ProfilePage'][0]['graphql']['user']['edge_owner_to_timeline_media']['edges']:
for caption in post['node']['edge_media_to_caption']['edges']:
if index < 1:
relevant_captions.append(caption['node']['text'])
index += 1
caption = relevant_captions[0].split(" ")
word_to_text = "junk"
for word in caption:
if meets_criteria(word):
word_to_text = word
break
stripped_word = word_to_text.strip()
if stripped_word != "junk":
print(stripped_word)
send_text_message(stripped_word)
else:
pass
if __name__ == '__main__':
for i in range(3600):
check_instagram()
sleep(1)
i += 1
|
import cv2
from PIL import Image, ImageFont, ImageDraw
import os
import numpy as np
from matplotlib import pyplot as plt
from keras import layers
from keras import models
from keras import optimizers
from keras.utils import plot_model
from keras import backend
path = os.path.dirname(os.path.realpath(__file__)) + "/"
NUMBER_OF_LABELS = 36
#Where image is an OpenCV array
def convertToPIL(image):
return Image.fromarray(image)
#Takes in an image and returns a list of PIL images
#image is the path to the image that is in question
def splitImage(image):
img = cv2.imread(image)
height = img.shape[0]
width = img.shape[1]
imWidth = 102
topY = 80
bottomY = 260
firstX = 46
secondX = 148
thirdX = 350
fourthX = 452
# cv2.rectangle(img, (firstX, topY), (firstX+imWidth, bottomY), (255,0,0), 1)
# cv2.rectangle(img, (secondX, topY), (secondX+imWidth, bottomY), (255,0,0), 2)
# cv2.rectangle(img, (thirdX, topY), (thirdX+imWidth, bottomY), (255,0,0), 2)
# cv2.rectangle(img, (fourthX, topY), (fourthX+imWidth, bottomY), (255,0,0), 2)
splitted = []
letterOne = img[topY:bottomY, firstX:firstX+imWidth]
letterTwo = img[topY:bottomY, secondX:secondX+imWidth]
numberOne = img[topY:bottomY, thirdX:thirdX+imWidth]
numberTwo = img[topY:bottomY, fourthX:fourthX+imWidth]
splitted.append(convertToPIL(letterOne))
splitted.append(convertToPIL(letterTwo))
splitted.append(convertToPIL(numberOne))
splitted.append(convertToPIL(numberTwo))
# cv2.imshow('Test image',numberTwo)
# cv2.waitKey(0)
# cv2.destroyAllWindows()
return splitted
def prepareData():
files = os.listdir(path+"pictures/")
#List that contains an array that holds the input and expected output as a number
datalist = []
for file in files:
letters = splitImage(path + "pictures/" + file)
#Get the plate number as a string
plateNumber = file.replace(".png","").replace("plate_","")
for i in range (0, 4):
#Assign output values (0-9 = 0-9, A-Z = 10-36)
numberLetter = ord(plateNumber[i]) - 48
if (numberLetter > 9):
numberLetter = numberLetter - 7
singledata = np.array([np.array(letters[i]), numberLetter])
datalist.append(singledata)
alldata = np.array(datalist)
np.random.shuffle(alldata)
#Split the data
X_dataset_orig = np.array([data[0] for data in alldata[:]])
Y_dataset_orig = np.array([data[1] for data in alldata]).T
#We normalize all of the pixels in each image to a value between 0 and 1
X_dataset = X_dataset_orig / 255
#Change the Y dataset to a one hot
Y_dataset = np.eye(NUMBER_OF_LABELS)[Y_dataset_orig]
return X_dataset, Y_dataset
def reset_weights(model):
session = backend.get_session()
for layer in model.layers:
if hasattr(layer, 'kernel_initializer'):
layer.kernel.initializer.run(session=session)
def train(X_dataset, Y_dataset):
conv_model = models.Sequential()
conv_model.add(layers.Conv2D(32, (3, 3), activation='relu', input_shape=(180, 102, 3)))
conv_model.add(layers.MaxPooling2D((2, 2)))
conv_model.add(layers.Conv2D(64, (3, 3), activation='relu'))
conv_model.add(layers.MaxPooling2D((2, 2)))
conv_model.add(layers.Flatten())
conv_model.add(layers.Dropout(0.5))
conv_model.add(layers.Dense(512, activation='relu'))
conv_model.add(layers.Dense(36, activation='softmax'))
conv_model.summary()
LEARNING_RATE = 1e-4
conv_model.compile(loss='categorical_crossentropy',optimizer=optimizers.RMSprop(lr=LEARNING_RATE),metrics=['acc'])
reset_weights(conv_model)
history_conv = conv_model.fit(X_dataset, Y_dataset, validation_split=0.2, epochs=10, batch_size=16)
plt.plot(history_conv.history['loss'])
plt.plot(history_conv.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train loss', 'val loss'], loc='upper left')
plt.show()
plt.plot(history_conv.history['acc'])
plt.plot(history_conv.history['val_acc'])
plt.title('model accuracy')
plt.ylabel('accuracy (%)')
plt.xlabel('epoch')
plt.legend(['train accuracy', 'val accuracy'], loc='upper left')
plt.show()
conv_model.save('plate_seperator.h5')
def list_to_string(list):
plate = ""
for c in list:
if c < 10:
plate = plate + str(c)
else:
plate = plate + str(unichr(c+55))
return plate
def test():
files = os.listdir(path+"pictures/")
model = models.load_model('plate_seperator.h5')
total_correct=0
i = 0
for file in files:
i=i+1
print(file)
letters_pil = splitImage(path + "pictures/" + file)
plate_number = file.replace(".png","").replace("plate_","")
letters = np.array([np.array(letter) for letter in letters_pil]) / 255
prediction = model.predict_classes(letters)
plate = list_to_string(prediction)
print("Predicted: " + plate + " // Actual: " +plate_number)
if plate == plate_number:
total_correct = total_correct + 1
print("Percentage Correct: "+ str(float(total_correct)/i * 100)+"%")
#Main
X_dataset, Y_dataset = prepareData()
train(X_dataset, Y_dataset)
|
#!/usr/bin/env python3
# ROS stuff
import rospy
from nav_msgs.msg import Odometry
from geometry_msgs.msg import Point, Twist
from sensor_msgs.msg import LaserScan
# other useful math tools
from tf.transformations import euler_from_quaternion
from math import atan2, sqrt
import math
# angle and distant difference constraints
# you can adjust the values for better performance
angle_eps = 0.07
dis_eps = 0.09
BURGER_MAX_LIN_VEL = 0.22
BURGER_MAX_ANG_VEL = 2.84
pub = None
# Class that will be used to read and parse /odom topic
class odomReader:
def __init__(self):
# subscribing to "/odom" topic
# function newOdom() will take care of the new incoming message
sub = rospy.Subscriber("/odom", Odometry, self.newOdom)
self.x = None
self.y = None
self.theta = None
# Function that will take care of input message from odom topic
# This function will be called whenever new message is available to read
# Subsequently odom topic is parsed to get (x,y,theta) coordinates
def newOdom(self, msg):
# get x and y coordinates
self.x = msg.pose.pose.position.x
self.y = msg.pose.pose.position.y
# convert quaternion to Euler angles
rot_q = msg.pose.pose.orientation
(self.roll, self.pitch, self.theta) = euler_from_quaternion([rot_q.x, rot_q.y, rot_q.z, rot_q.w])
# Class that is responsible to read and parse raw LaserScan data
class scanReader:
def __init__(self):
# subscribing to "/odom" topic
# function newOdom() will take care of the new incoming message
sub = rospy.Subscriber("/scan", LaserScan, self.newScan)
# divide laser scan data into 5 regions
self.region = {
'right': 0,
'fright': 0,
'front': 0,
'fleft': 0,
'left': 0,
}
# Function that will take care of input message from scan topic
# This function will be called whenever new message is available to read
# Subsequently scan topic is parsed to get region data: minimum distance to object from every sight
def newScan(self, msg):
self.ranges = msg.ranges
self.msg = msg
self.region['left'] = min(self.ranges[60:100])
self.region['fleft'] = min(self.ranges[20:60])
self.region['front'] = min(self.ranges[0:20]+self.ranges[-20:])
self.region['fright'] = min(self.ranges[300:340])
self.region['right'] = min(self.ranges[260:300])
#print "range[90]: ", msg.ranges[90]
# divide robot motion in 3 scenario
state_dict = {
0: 'go to goal',
1: 'circumnavigate obstacle',
2: 'go back to closest point',
}
# define initial scenario
state = 0
def main():
global pub
global state
# initialize ROS node
rospy.init_node("bug_1")
# run stop function when this node is killed
rospy.on_shutdown(stop)
rospy.sleep(0.5)
# define the control velocity publisher of topic type Twist
pub = rospy.Publisher('/cmd_vel', Twist, queue_size = 1)
# initialize odom and scan objects
# Use these objects to access robot position in space and scan information
odom = odomReader()
scan = scanReader()
rospy.sleep(0.5)
# initialize speed as Twist topic
speed = Twist()
# set the loop frequency
rate = rospy.Rate(80)
# Set the goal point
goal = Point()
goal.x = -1.0
goal.y = 1.0
# arbitrary far away coordinate from goal
closest_point = Point()
closest_point.x = 1000
closest_point.y = 1000
# arbitrary large number representing inf distance to goal
closest_dist = 1000
# Variable that stores the coordinate of hit point when you
# encounter obstacle for the first time
hit_point = Point()
i = 0
count = 0
hit_count = 0
while not rospy.is_shutdown():
# Decide what to do for the robot in each of these states:
# the x,y distance to the goal from current position
inc_x = goal.x - odom.x
inc_y = goal.y - odom.y
# the angle of the goal point wrt global frame
angle_to_goal = atan2(inc_y, inc_x)
# the distance to the goal from current location
dist_diff = sqrt(inc_x**2 + inc_y**2)
# find the heading angle difference
angle_diff = angle_to_goal - odom.theta
if state == 0:
# go to goal state.
'''
Here robot should go towards a the goal unless it encounters an obstacle.
When it encounters the wall it should update the hit_point, and change the state to
"circumnavigate obstacle".
It's an updated version of the "go_to_point.py"
'''
# adjust angle
if angle_diff > angle_eps:
speed.linear.x = 0.0
speed.angular.z = 0.1
elif angle_diff < -angle_eps:
speed.linear.x = 0.0
speed.angular.z = -0.1
# go to the point
elif dist_diff > dis_eps:
speed.linear.x = 0.15
speed.angular.z = 0.0
# arrived
else:
speed.linear.x = 0.0
speed.angular.z = 0.0
print("\n\nArrived!\n\n")
print("\nx: %.3f, \t y: %.3f, \ttheta: %.3f" %(odom.x, odom.y, odom.theta))
break
# if we're near a wall
if scan.region['front'] <= 0.27:
speed.linear.x = 0.0 # stop
speed.angular.z = 0.0
hit_point.x = odom.x # update our hit point
hit_point.y = odom.y
# no solution terminating
hit_count = hit_count + 1
if hit_count > 1:
print("\n\nHit object more than once, terminating...")
break
state = 1 # go-to circumnavigate obstactle
print("current state: ", state_dict[state])
elif state == 1:
# circumnavigate obstacle.
'''
Here robot should turn right/left based on your choice. And, circumnavigate the obstacle using wall following
algorithm from previous project.
While in this state, record closest point to goal where you can head towards goal.
This state terminates when you reach the same point when you hit the obstacle.
Finally, change the state.
It's an updated version of the "follow_wall.py"
'''
# if we're not close to a wall
if scan.region['front'] > 0.27:
# if we're too far from the wall
if scan.region['fleft'] > 0.27:
speed.linear.x = 0.0
speed.angular.z = 0.15 # adjust
if dist_diff < closest_dist: # if this is the closest distance
closest_dist = dist_diff # update everything accordingly
closest_point.x = odom.x
closest_point.y = odom.y
elif scan.region['fleft'] <= 0.27 and scan.region['fleft'] > 0.18: # good range to just go straight
speed.linear.x = 0.15 # go straight
speed.angular.z = 0.0
if dist_diff < closest_dist:
closest_dist = dist_diff
closest_point.x = odom.x
closest_point.y = odom.y
else:
speed.linear.x = 0.0
speed.angular.z = -0.15 # adjust
if dist_diff < closest_dist:
closest_dist = dist_diff
closest_point.x = odom.x
closest_point.y = odom.y
else:
speed.linear.x = 0.0
speed.angular.z = -0.15 # turn right
if dist_diff < closest_dist:
closest_dist = dist_diff
closest_point.x = odom.x
closest_point.y = odom.y
# if we reach hitpoint distance
tempx = hit_point.x - odom.x
tempy = hit_point.y - odom.y
tempDist = sqrt(pow(tempx, 2) + pow(tempy, 2))
if tempDist <= dis_eps and count > 800: # if we're close and this isn't our first time here (initial hit_point problem fix)
speed.linear.x = 0.0
speed.angular.z = 0.0
state = 2 # go back to closest point
print("current state: ", state_dict[state])
count = count + 1 # counter used for initial hit point error
elif state == 2:
# go back to closest point
'''
Here robot should go back to closest point encountered in state 1.
Once you reach that point, change the state to "go to goal".
It's an updated version of the "follow_wall.py"
'''
#unless the robot's front part gets too close to the wall
if scan.region['front'] > 0.3:
#if it goes too far from the wall or the left side
#is empty, turn the robot to the left
if scan.region['fleft'] > 0.25:
speed.angular.z = 0.15
speed.linear.x = 0
#when the left side of the robot is relatively close to the wall,
#let the robot move straight
elif scan.region['fleft'] <= 0.25 and scan.region['fleft'] > 0.18:
speed.angular.z = 0
speed.linear.x = 0.15
#when the robot gets too close to the wall, turn the robot to the right
else:
speed.angular.z = -0.15
speed.linear.x = 0
#if the front part of the robot meets the wall, turn to the right
else:
speed.angular.z = -0.15
speed.linear.x = 0
tempx2 = closest_point.x - odom.x
tempy2 = closest_point.y - odom.y
tempDist2 = sqrt(pow(tempx2, 2) + pow(tempy2, 2))
if tempDist2 <= dis_eps: # if we're close to closest point
speed.linear.x = 0.0
speed.angular.z = 0.0
state = 0 # go to goal
print("current state: ", state_dict[state])
print(scan.region)
pub.publish(speed)
rate.sleep()
# call this function when you press CTRL+C to stop the robot
def stop():
global pub
speed = Twist()
speed.linear.x = 0.0
speed.angular.z = 0.0
pub.publish(speed)
if __name__ == '__main__':
main()
|
# -*- coding:utf-8 -*-
# @Time : 2019/5/5 19:57
# @Author: xiaoxiao
# @File : regulax.py
import re
from common.config import config
import configparser
from common.my_logger import Logger
log=Logger(__name__)
class Regulax:
mobilephone=None
def regulax(data,p="#(.*?)#"):
while re.search(p,data):
result=re.search(p,data).group(1)#获取匹配到的组
try:
params=config.get("data",result)
except configparser.NoOptionError as e:
if hasattr(Regulax,result):
params=getattr(Regulax,result)
else:
print("找不到参数",result)
log.error("报错:{0},找不到参数{1}".format(e,result))
raise e
data = re.sub(p, params, data, count=1)
return data
|
import csv
import datetime
def convert_str_to_datetime(datetime_str):
"""
Конвертирует строку с датой в формате 11/10/2019 14:05 в объект datetime.
"""
return datetime.datetime.strptime(datetime_str, "%d/%m/%Y %H:%M")
def convert_datetime_to_str(datetime_obj):
"""
Конвертирует строку с датой в формате 11/10/2019 14:05 в объект datetime.
"""
return datetime.datetime.strftime(datetime_obj, "%d/%m/%Y %H:%M")
def write_last_log_to_csv(source_log, output):
with open(source_log) as f:
data = list(csv.reader(f))
header = data[0]
result = {}
sorted_by_date = sorted(
data[1:], key=lambda x: convert_str_to_datetime(x[2])
)
for name, email, date in sorted_by_date:
result[email] = (name, email, date)
with open(output, "w") as dest:
writer = csv.writer(dest)
writer.writerow(header)
for row in result.values():
writer.writerow(row)
if __name__ == "__main__":
write_last_log_to_csv("mail_log.csv", "example_result.csv") |
#!/usr/bin/env python
# ----------------------------------------------------------------------------
# Copyright 2014 Nervana Systems Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
"""
Utility script to compare the metrics produced on the most recent of a given
example against prior runs.
"""
import argparse
import sys
from neon.metrics.metric import MetricComparison
def parse_args():
"""
Sets up and handles command line argument parsing.
"""
parser = argparse.ArgumentParser(description='Compare the most recent '
'run of a given experiment with prior '
'runs, using data taken from the '
'specified DB file.')
parser.add_argument('db_file', type=str,
help='flat file containing prior run statistics')
parser.add_argument('yaml_file', type=str,
help='experiment settings file to be compared')
parser.add_argument('-n', '--num_comps', type=int, default=10,
help='number of most recent prior runs to compare to. '
'Defaults to 10 if not specified')
parser.add_argument('-d', '--delimiter', default='\t',
help='output field separator. Defaults to tab char')
parser.add_argument('-C', '--no_color', action='store_true',
help='turn off coloring of comparison diffs')
parser.add_argument('-t', '--color_threshold', type=float, default=.01,
help='Difference magnitude (from prior run mean) '
'needed to highlight in color. Defaults to .01')
parser.add_argument('-B', '--no_backend_match', action='store_true',
help='Compare prior runs across all types of backends')
parser.add_argument('-H', '--no_header', action='store_true',
help='omit inclusion of a header line in output')
parser.add_argument('-e', '--min_exp_width', type=int, default=1,
help='Left pad experiment field to have the specified '
'length. Defaults to 1 (no padding).')
parser.add_argument('-m', '--min_metric_name_width', type=int, default=1,
help='Right pad metric name field to have the '
'specified length. Defaults to 1 (no padding).')
return(parser.parse_args())
def main():
"""Point of code entry"""
args = parse_args()
res = MetricComparison(args.db_file, args.yaml_file,
max_comps=args.num_comps,
match_backend=not args.no_backend_match)
res.print_results(field_sep=args.delimiter, escape_colors=not args.no_color,
color_threshold=args.color_threshold,
header=not args.no_header,
min_exp_field_width=args.min_exp_width,
min_metric_name_field_width=args.min_metric_name_width)
if __name__ == '__main__':
sys.exit(main())
|
import re
import os
import pystache
from . import utils
class Renderer:
def __init__(self):
self.mustacher = pystache.Renderer()
def _process_file(self, tree, file, properties):
new_content = self.mustacher.render_path(file, properties)
with open(file, 'w') as file_hd:
file_hd.write(new_content)
def process_tree(self, properties, tree, file_names=True):
for file in os.listdir(tree):
file = os.path.join(tree, file)
if file_names:
mat = re.search(r'\{\{(.*)\}\}', file)
if mat and mat.group(1) in properties.keys():
new_name = re.sub(r'\{\{(.*)\}\}',
properties[mat.group(1)], file)
os.rename(file, new_name)
file = new_name
if os.path.isfile(file):
print(file)
if os.path.isfile(file) and not utils.is_binary(file):
self._process_file(tree, file, properties)
elif os.path.isdir(file):
self.process_tree(properties, file, file_names=file_names)
def process_file(self, properties, file, file_names=True):
if file_names:
mat = re.search(r'\{\{(.*)\}\}', file)
if mat and mat.group(1) in properties.keys():
new_name = re.sub(r'\{\{(.*)\}\}',
properties[mat.group(1)], file)
os.rename(file, new_name)
file = new_name
if os.path.isfile(file) and not utils.is_binary(file):
self._process_file(file, file, properties)
|
if __name__ == '__main__':
n = int(input())
integer_map = map(int, input().split())
tp = tuple(list([int(x) for x in integer_map]))
#print(tp)
print(hash(tp))
|
import numpy
n,m = map(int,input().split())
arr = numpy.zeros((n,m),int)
for i in range(n):
arr[i] = numpy.array(input().split(),int)
print(numpy.prod(numpy.sum(arr, axis = 0))) |
import os
from orun.core.management.base import BaseCommand, CommandError
from orun.apps import apps
from orun.core.management import commands
from orun.db import transaction, DEFAULT_DB_ALIAS
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument(
'schema', nargs='1',
help='Specify the schema and filenames.',
)
parser.add_argument(
'model', nargs='1',
help='Specify the model name.',
)
parser.add_argument(
'view_type', nargs='?',
help='View type.',
)
parser.add_argument(
'--database',
default=DEFAULT_DB_ALIAS,
help='Nominates a specific database to dump fixtures from. '
'Defaults to the "default" database.',
)
def handle(self, *args, **options):
app_config = apps.app_configs[options['schema']]
make_view(app_config, **options)
@transaction.atomic
def make_view(app_config, model_name, view_type, **options):
if isinstance(app_config, str):
app_config = apps.app_configs[app_config]
model = apps[model_name]
if not view_type:
view_type = ['form', 'list']
for v in view_type:
path = os.path.join(app_config.path, 'templates', 'views', model_name)
filename = os.path.join(path, v + '.html')
if not os.path.isdir(path):
os.makedirs(path)
view = model.get_view_info(v)
with open(filename, 'w') as f:
f.write(view['content'])
|
import math
x = -49
if ( x < 16 ):
result = x ** 5 - 68 * x ** 7 + 46
elif ( 16 <= x < 109 ):
result = math.log(math.e, math.cos(x) - 93 * x - 71) + x ** 5
elif ( 109 <= x <= 151 ):
result = 88 * x ** 8 + x - 60
elif ( x >= 151 ):
result = 19 * (math.fabs(x) + math.cos(x)) ** 4 - math.fabs(x)
print(f"{result:.2e}")
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# ResoucesPrj.py
# Author: gfcocos
# python_tool install
# 安装方法
# 1. install Gow-0.8.0.exe
# 2. install python3.msi
# 3. double click ./install/install_python_tool.py
import os,sys
bash_cmd = {
'find_python_path' : 'which python.exe',
'copy_to_site_packages' : 'cp -rf ../site-packages/*'}
log_tips = {
'bash_error' :'bash_cmd exec error',
'install_succeed' :'install python tool succeed'
}
# 初始化方法
def init():
file_handler= os.popen(bash_cmd['find_python_path'])
if file_handler:
python_path = file_handler.read()
python_path = python_path.replace('\\','/')
python_dirctionary, python_name = os.path.split(python_path)
cp_cmd = bash_cmd['copy_to_site_packages']+' '+python_dirctionary+'/Lib/site-packages'
not_succeed = os.system(cp_cmd)
if not_succeed:
print(log_tips['bash_error'])
else:
print(log_tips['install_succeed'])
else:
print(log_tips['bash_error'])
def pause():
os.system('pause')
init()
pause()
|
class AutoAttributes:
attrs = ()
def __init__(self, **kwargs):
"""Método construtor genérico"""
for attr in self.attrs:
if attr in kwargs:
setattr(attr, kwargs[attr])
def __repr__(self):
body = [f"{attr}={getattr(self, attr, None)}" for attr in self.__table__.columns]
return f"{self.__class__.__name__}({', '.join(body)})"
def to_dict(self):
return {c.name: str(getattr(self, c.name)) for c in self.__table__.columns}
def from_dict(self,data):
for field in self.attrs:
if field in data:
setattr(self,field,data[field]) |
#! /usr/bin/env python3
'''
sensu 2.0 api calls
requires python3
- validate json for asset definitions and check defintions
- sync asset and check definitions to API
- sync actual asset files to server
'''
import json
import requests
from urllib import parse
from hinoki.logger import log
from hinoki.config import config
class SensuAPICall:
headers={'Content-Type': 'application/json', 'Accept': 'application/json'}
token=""
last_resp_content={}
def test_get(self, endpoint):
try:
endpoint=config['api_items']['check']["endpoint"]
except KeyError as e:
return False
result=self.perform_api_call(endpoint=endpoint, method="GET")
def sync_definition(self, item_type, file):
try:
directory=config['api_items'][item_type]["dir"]
endpoint=config['api_items'][item_type]["endpoint"]
except KeyError as e:
log.error("Couldn't find an entry for these parameters in the project configuration")
log.debug("Item type passed to function: "+item_type)
log.debug("Filename passed to function: "+file)
log.debug(e)
return False
fh=open(directory+"/"+file, 'r')
try:
item=json.load(fh)
except (json.decoder.JSONDecodeError, UnicodeDecodeError):
message='Error importing definition for file '+file+': invalid JSON.'
log.error(message)
return False
name="/"+file.split('.json')[0]
result=self.perform_api_call(endpoint+name, request_body=json.dumps(item))
fh.close()
return result
# accepts a user and password, both required
# sets the auth token, or False and logs errors
def api_auth(self, user=config['api_user'], password=config['api_password'], auth_endpoint=config['api_auth_endpoint']):
resp=requests.get(auth_endpoint, auth=(user, password), headers=self.headers)
data=json.loads(resp.content.decode('utf-8'))
try:
token=data['access_token']
except NameError:
log.error("Unable to authenticate to API, no token retrieved.")
return False
self.token=token
return True
def test_connection(self):
try:
health=requests.get(config['api_healthcheck_endpoint'], timeout=10)
except requests.exceptions.ConnectionError:
log.error('Could not reach API health check')
return False
try:
health.raise_for_status()
except requests.exceptions.ConnectionError:
log.error('Could not reach API health check - network or DNS error')
return False
except requests.exceptions.HTTPError:
log.error('Could not reach API health check - HTTP error %s' % str(health.status_code))
log.debug('Response headers: %s' % json.dumps(dict(health.headers)))
return False
log.info('API health check completed successfully.')
log.debug('Status code: %s' % health.status_code)
return True
def perform_api_call(self, endpoint, method="PUT", request_body={}):
auth_header={'Authorization': self.token}
auth_header.update(self.headers)
request_url = parse.urljoin(config['api_url'], endpoint)
log.debug(request_url)
resp=requests.request(method, request_url, headers=auth_header, data=request_body)
try:
resp.raise_for_status()
except requests.exceptions.HTTPError:
log.error('Error '+str(resp.status_code))
log.debug('Request URL: '+request_url)
log.debug('Response headers: %s' % json.dumps(dict(resp.headers)))
log.error(str.resp.text)
return False
#og.debug(resp.content)
self.last_resp_content=resp.content.decode('utf8')
return True
sensu_connect = SensuAPICall()
|
'''
Created on 30-May-2012
@author: NANDU
'''
from initialise import screen
from pygame import image
from math import ceil
from random import random, randrange
class BALL:
def __init__(self,x=0,y=0):
self.pic = image.load('images/ball.png')
self.width = self.pic.get_width()
self.height = self.pic.get_height()
self.x = self.width + randrange( 0,ceil((screen.get_width() - x) - 2*self.width ) )
self.y = self.height + randrange( 0,ceil((screen.get_height() - y)*.9 - 2*self.height) )
self.dx = (random()*2)
self.dy = (random()*2)
def move(self):
self.x+=self.dx
self.y+=self.dy
def reverse(self):
self.dx = -self.dx
self.dy = -self.dy
def bounce(self):
if self.width/2 > self.x:
self.x = self.width/2
self.dx = -self.dx
if screen.get_width()-self.width/2 < self.x:
self.x = screen.get_width()-self.width/2
self.dx = -self.dx
if self.height/2 > self.y:
self.y = self.height/2
self.dy = -self.dy
if screen.get_height()*.9-self.height/2 < self.y:
self.y = screen.get_height()*.9-self.height/2
self.dy = -self.dy
def draw(self):
screen.blit(self.pic, (self.x-self.width/2, self.y-self.height/2))
|
# Definition for singly-linked list.
class ListNode:
def __init__(self, x):
self.val = x
self.next = None
class Solution:
def deleteDuplicates(self, head):
"""
:type head: ListNode
:rtype: ListNode
"""
if head == None:
return None
dummy = ListNode(0)
cur = dummy
dummy.next = head
nxt = head
flag = False
while nxt.next != None:
if nxt.val == nxt.next.val:
nxt = nxt.next
cur.next = nxt
flag = True
elif flag:
nxt = nxt.next
cur.next = nxt
flag = False
else:
cur = nxt
nxt = nxt.next
if flag:
cur.next = nxt.next
return dummy.next
a = ListNode(1)
b = ListNode(2)
c = ListNode(3)
d = ListNode(3)
e = ListNode(4)
f = ListNode(4)
g = ListNode(5)
a.next = b
b.next = c
c.next = d
d.next = e
e.next = f
f.next = g
Solution().deleteDuplicates(a) |
# Generated by Django 3.0.8 on 2020-07-17 14:00
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('profiles', '0007_guides_quotedcharges'),
]
operations = [
migrations.CreateModel(
name='userProfile',
fields=[
('userID', models.AutoField(primary_key=True, serialize=False)),
('name', models.CharField(max_length=100)),
('gender', models.CharField(choices=[('F', 'Female'), ('M', 'Male'), ('U', 'Undefined')], default='U', max_length=1)),
('passportNo', models.CharField(max_length=100)),
('email', models.EmailField(max_length=254)),
('address', models.CharField(max_length=300)),
('dateOfBirth', models.DateField()),
('profilePic', models.ImageField(upload_to='profilePicture')),
('owner', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.AlterField(
model_name='guides',
name='userID',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='profiles.userProfile'),
),
migrations.AlterField(
model_name='travelers',
name='userID',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='profiles.userProfile'),
),
]
|
import datetime
import enum
import flask_sqlalchemy
db = flask_sqlalchemy.SQLAlchemy()
class ModelMixin(object):
id = db.Column(db.Integer, primary_key=True)
created_at = db.Column(db.DateTime, default=datetime.datetime.utcnow)
updated_at = db.Column(db.DateTime, onupdate=datetime.datetime.utcnow)
class GenderEnum(enum.Enum):
male = "Male"
female = "Female"
all_genders = "All_Genders"
class CategoryType(db.Model):
__tablename__ = 'category_type'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64), unique=True)
class Retailer(db.Model, ModelMixin):
__tablename__ = 'retailer'
name = db.Column(db.String(64), unique=True)
gender = db.Column(db.Enum(GenderEnum), default=GenderEnum.all_genders, nullable=False)
type = db.relationship('CategoryType', backref='retailers', lazy=True)
type_id = db.Column(db.Integer, db.ForeignKey('category_type.id'), nullable=False)
class Category(db.Model, ModelMixin):
__tablename__ = 'category'
name = db.Column(db.String(64), unique=True)
gender = db.Column(db.Enum(GenderEnum), default=GenderEnum.all_genders, nullable=False)
retailer = db.relationship('Retailer', backref='retailer', lazy=True)
retailer_id = db.Column(db.Integer, db.ForeignKey('retailer.id'), nullable=False)
type = db.relationship('CategoryType', backref='categories', lazy=True)
type_id = db.Column(db.Integer, db.ForeignKey('category_type.id'), nullable=False)
|
# [ EXPRESSION for Sign in List if CONDITION]
list1 = [ x * 2 for x in range(0,5)]
print(list1)
list2 = [ x for x in range(0,10) if x % 2 == 0]
print(list2)
c=[(1,2),(3,4),(5,6)]
list3 = [ i for i,j in c]
print(list3)
list4 = [[i*2, j*3, i+j] for i,j in c]
print(list4)
|
##################################################################
# FILE : hangman.py
# WRITER : Lior Paz, lioraryepaz, 206240996
# EXERCISE : intro2cs ex4 2017-2018
# DESCRIPTION : hangman game - with a large scale of optional words,
# max errors of 6, and... a special option of hints!!!.
##################################################################
import hangman_helper
# allows us to use all of the helping functions that the great stuff of CS
# built for us :)
UNDERSCORE = '_'
# magic number that appear several times in my code
def update_word_pattern(word, pattern, letter):
"""The function gets a certain word, a pattern in which parts of the
word or all of it is hidden, and a letter that is in the word. than,
it returns us a renewed pattern in which the letter is revealed"""
list_pattern = list(pattern)
# im listing the pattern in order that i can change specific letters in it
index = -1
# im going over all of the letters in word - if a letter (or more than 1)
# is identical to the input letter, well change the pattern accordingly
for l in word:
index += 1
if l is letter:
list_pattern[index] = letter
pattern = ''.join(list_pattern)
return pattern
def filter_words_list(words_list, pattern, wrong_guess_lst):
"""This Function gets a semi-revealed pattern and aq list of wrong
guesses, and finds all of the words that could be hidden behind the
pattern"""
# increase efficiency while going over the sequence
words_set = set(words_list)
filtered_words = []
# we will go over every word in the index and look for matches
for word in words_set:
check = True
# first we will look for length match - no match, we can skip to the
# next word
if len(word) != len(pattern):
continue
# if one of the letters in the current word were looking at was
# already guessed - next word.
for letter_check in word:
if letter_check in wrong_guess_lst:
check = False
# in order to increase efficiency, i defined it as set so no need
# going over double-letters - set will delete all of the repeats.
pattern_check = set(pattern)
for i in range(len(word)):
# if a word has a letter we already typed in the game,
# in a hidden spot - it is not the word we are looking for
if (pattern[i] == UNDERSCORE) and (word[i] in pattern_check):
check = False
# if a un-hidden letter in the pattern is different from the
# letter in the same spot in the check_word - not the word we need.
if (pattern[i] != UNDERSCORE) and (pattern[i] != word[i]):
check = False
if not check:
continue
# if a word fits all of the criteria, i would include it in the
# filtered word list
filtered_words.append(word)
return filtered_words
def choose_letter(filtered_words, pattern):
"""The function gives us the most common letter out of list of words. if
there are several with the same count, it will give us the first in
a-z """
all_letters = ''.join(filtered_words)
# unite the words into one large string
max_count = (0, 0)
# a tuple with the number of appearances and the letter
for i in range(97, 123):
# the range is the ord of a-z
if chr(i) in pattern:
continue
else:
current_count = all_letters.count(chr(i))
if current_count > max_count[0]:
# every time the largest count will win and replace max count
max_count = (current_count, chr(i))
return max_count
def run_single_game(words_list):
"""The function that sets the rules, laws and the center backend of the
game itself"""
random_word = hangman_helper.get_random_word(words_list)
# choose the word randomly from the index
wrong_guess_lst = set()
pattern = UNDERSCORE * len(random_word)
msg = hangman_helper.DEFAULT_MSG
# mapping helper func to more comfortable variables i determined
ask_play = False
# i wont ask the player to play again as default, only in specific
# Circumstances
while (len(wrong_guess_lst) < hangman_helper.MAX_ERRORS) and (
pattern.find(UNDERSCORE) != -1):
error_count = len(wrong_guess_lst)
hangman_helper.display_state(pattern, error_count, wrong_guess_lst,
msg, ask_play)
# the main function that sends data to the player, the "game" itself
key, value = hangman_helper.get_input()
if key == hangman_helper.HINT:
filtered_words = filter_words_list(words_list, pattern,
wrong_guess_lst)
hint = (choose_letter(filtered_words, pattern))[1]
msg = hangman_helper.HINT_MSG + hint
continue
# every time we understand what has the player asked for, there is no
# need to check all of the other options, that's why we will continue
elif key == hangman_helper.LETTER:
letter_now = str(value)
if (len(letter_now) != 1) or ((ord(letter_now) < 97) or (ord(
letter_now) > 122)):
# checks if the input is a valid letter
msg = hangman_helper.NON_VALID_MSG
elif pattern.find(letter_now) != (-1):
# checks if we have already chosen the letter before hand
msg = hangman_helper.ALREADY_CHOSEN_MSG + letter_now
continue
elif letter_now in wrong_guess_lst:
msg = hangman_helper.ALREADY_CHOSEN_MSG + letter_now
continue
elif random_word.find(letter_now) == (-1):
# sets what happens to wrong guess
wrong_guess_lst.add(letter_now)
msg = hangman_helper.DEFAULT_MSG
continue
else:
# the proper game where the pattern updates
pattern = update_word_pattern(str(random_word), pattern,
letter_now)
msg = hangman_helper.DEFAULT_MSG
if len(wrong_guess_lst) == hangman_helper.MAX_ERRORS:
# ending the game if we have reached max errors and asks
# the player to play again while showing him loss msg
msg = hangman_helper.LOSS_MSG + random_word
error_count = hangman_helper.MAX_ERRORS
ask_play = True
if pattern.find(UNDERSCORE) == -1:
# if the world is revealed - win! msg + end game
msg = hangman_helper.WIN_MSG
ask_play = True
hangman_helper.display_state(pattern, error_count, wrong_guess_lst, msg,
ask_play)
def main():
"""The spine of hangman - start single game im the first time and every
time the player asks to play again"""
# load the word index
words_list = hangman_helper.load_words('words.txt')
run_single_game(words_list)
key, value = hangman_helper.get_input()
# evaluates whether or not to play again
if key == hangman_helper.PLAY_AGAIN:
while value:
run_single_game(words_list)
key, value = hangman_helper.get_input()
if __name__ == "__main__":
# starts the hangman.helper game
hangman_helper.start_gui_and_call_main(main)
hangman_helper.close_gui() |
import psycopg2
#define postgre db config
hostname = 'localhost'
username = 'postgres'
password = 'nicetry'
database = 'GenomeData'
port = 5432
msg = ("Connecting to database: host: {}, port: {}, dbname: {}").format(hostname, port, database)
print (msg)
#Provides a connection to postgre server. Used throughout the program
#try catch me
conn = psycopg2.connect( host=hostname, port = port, user=username, password=password, dbname=database )
def close_connection():
""" close_connection closes postgres db connection """
conn.close()
|
from app_service.service_helper.error_deal import ErrorDeal
from app_service.service_helper.code_deal.equipment_list import EquipmentList
code_map = {
'error': ErrorDeal.deal_with,
'1': EquipmentList.deal_with,
}
|
import numpy as np
import os
from tqdm import tqdm
if not os.path.exists('./Train'):
os.makedirs('./Train')
if not os.path.exists('./Train_annot'):
os.makedirs('./Train_annot')
cell_len = 80
cell_wid = 45
cell_ht = 45
max_ldc_x = 1
max_ldc_y =1
def getStabilityScore(i, j , ldc, dimn, currldc_x, currldc_y):
level = ldc[i,j]
h = dimn[2]
feasible = False
found_flat = found_not_flat = 0
if (j >= cell_len*currldc_x) and (j+dimn[0] <= cell_len*(currldc_x+1)) and\
(i >= cell_wid*currldc_y) and (i+dimn[1] <= cell_wid*(currldc_y+1)) and\
level + h <= cell_ht:
feasible = True
# --------------------------------------------------- Flat position
if len(np.unique(ldc[i:i+dimn[1], j:j+dimn[0]])) == 1:
stab_score = 1
found_flat = 1
# ---------------------------------------------------- Non-Flat position
if not found_flat:
corners = [ldc[i,j], ldc[i+dimn[1]-1,j], ldc[i,j+dimn[0]-1], ldc[i+dimn[1]-1, j+dimn[0]-1]]
if (np.max(corners) == np.min(corners)) and (np.max(corners) == np.max(ldc[i:i+dimn[1],j:j+dimn[0]])):
stab_score = - np.sum(np.max(corners)-ldc[i:i+dimn[1],j:j+dimn[0]])/(dimn[0]*dimn[1]*cell_ht)
found_not_flat = 1
if (found_flat) or (found_not_flat):
minj = np.max((cell_len*currldc_x,j-1))
maxj = np.min((cell_len*(currldc_x+1),j+dimn[0]))
mini = np.max((cell_wid*(currldc_y),i-1))
maxi = np.min((cell_wid*(currldc_y+1),i+dimn[1]))
# Border for the upper edge
if i==currldc_y*cell_wid:
upper_border = (cell_ht - 1 + np.ones_like(ldc[mini,j:(j+int(dimn[0]))])).tolist()
else:
upper_border = ldc[mini,j:(j+int(dimn[0]))].tolist()
# Stability for the upper edge
unique_ht = np.unique(upper_border)
if len(unique_ht) == 1:
stab_score += 0.5
if unique_ht[0] == level: stab_score -= 2
elif unique_ht[0] == cell_ht: stab_score += 1.5
else:
sscore = 1.-abs(unique_ht[0]-(level+h))/cell_ht
if (unique_ht[0]>level): stab_score += 1.5*sscore
else: stab_score += 0.75*sscore
else:
stab_score += 0.25*(1.-len(unique_ht)/h)
stab_score += 0.25*(1.-sum(abs(ht-(level+h)) for ht in unique_ht)/(h*len(unique_ht)))
stab_score += 0.50*sum(ht!=level for ht in unique_ht)/len(unique_ht)
#border.extend(upper_border)
del upper_border
# Border for the left edge
if j==currldc_x*cell_len:
left_border = (cell_ht - 1 + np.ones_like(ldc[i:(i+int(dimn[1])),minj])).tolist()
else:
left_border = ldc[i:(i+int(dimn[1])),minj].tolist()
# Stability for the left edge
unique_ht = np.unique(left_border)
if len(unique_ht) == 1:
stab_score += 0.5
if unique_ht[0] == level: stab_score -= 2
elif unique_ht[0] == cell_ht: stab_score += 1.5
else:
sscore = 1.-abs(unique_ht[0]-(level+h))/cell_ht
if (unique_ht[0]>level): stab_score += 1.5*sscore
else: stab_score += 0.75*sscore
else:
stab_score += 0.25*(1.-len(unique_ht)/h)
stab_score += 0.25*(1.-sum(abs(ht-(level+h)) for ht in unique_ht)/(h*len(unique_ht)))
stab_score += 0.50*sum(ht!=level for ht in unique_ht)/len(unique_ht)
#border.extend(left_border)
del left_border
# Border for the lower edge
if (i+dimn[1] < cell_wid*(currldc_y+1)): lower_border = ldc[maxi,j:(j+int(dimn[0]))].tolist()
else: lower_border = (cell_ht - 1 + np.ones_like(ldc[maxi-1,j:(j+int(dimn[0]))])).tolist()
# Stability for the lower edge
unique_ht = np.unique(lower_border)
if len(unique_ht) == 1:
stab_score += 0.5
if lower_border[0] == level: stab_score -= 2
elif lower_border[0] == cell_ht: stab_score += 1.5
else:
sscore = 1.-abs(unique_ht[0]-(level+h))/cell_ht
if (unique_ht[0]>level): stab_score += 1.5*sscore
else: stab_score += 0.75*sscore
else:
stab_score += 0.25*(1.-len(unique_ht)/h)
stab_score += 0.25*(1.-sum(abs(ht-(level+h)) for ht in unique_ht)/(h*len(unique_ht)))
stab_score += 0.50*sum(ht!=level for ht in unique_ht)/len(unique_ht)
#border.extend(lower_border)
del lower_border
# Border for the right edge
if (j+dimn[0] < (currldc_x+1)*cell_len): right_border = ldc[i:(i+int(dimn[1])),maxj].tolist()
else:
right_border = (cell_ht - 1 + np.ones_like(ldc[i:(i+int(dimn[1])),maxj-1])).tolist()
# Stability for the right edge
unique_ht = np.unique(right_border)
if len(unique_ht) == 1:
stab_score += 0.5
if right_border[0] == level:
stab_score -= 2
elif right_border[0] == cell_ht:
stab_score += 1.5
else:
sscore = 1.-abs(unique_ht[0]-(level+h))/cell_ht
if (unique_ht[0]>level):
stab_score += 1.5*sscore
else:
stab_score += 0.75*sscore
else:
stab_score += 0.25*(1.-len(unique_ht)/h)
stab_score += 0.25*(1.-sum(abs(ht-(level+h)) for ht in unique_ht)/(h*len(unique_ht)))
stab_score += 0.50*sum(ht!=level for ht in unique_ht)/len(unique_ht)
#border.extend(right_border)
del right_border
# Check the upper edge for continuity
if i == currldc_y*cell_wid: stab_score += 0.02
else:
# In the upper-left corner
if (j == currldc_x*cell_len) :
stab_score += 0.01
# In the upper-right corner
if ((j+dimn[0]) == (currldc_x+1)*cell_len) :
stab_score += 0.01
# Check the lower edge for continuity
if i+dimn[1] == cell_wid*(currldc_y+1):
stab_score += 0.02
else:
# In the lower-left corner
if (j == currldc_x*cell_len) :
stab_score += 0.01
# In the lower-right corner
if ((j+dimn[0]) == (currldc_x+1)*cell_len) :
stab_score += 0.01
# Check the left edge for continuity
if j == currldc_x*cell_len:
stab_score += 0.02
else:
# In the upper-left corner
if (i == currldc_y*cell_wid):
stab_score += 0.01
# In the lower-left corner
if (i+dimn[1] == cell_wid*(currldc_y+1)):
stab_score += 0.01
# Check the right edge for continuity
if j+dimn[0] == (currldc_x+1)*cell_len:
stab_score += 0.02
else:
# In the upper-left corner
if (i == currldc_y*cell_wid) :
stab_score += 0.01
# In the lower-left corner
if (i+dimn[1] == cell_wid*(currldc_y+1)):
stab_score += 0.01
stab_score -= currldc_x/max_ldc_x + currldc_y/max_ldc_y
stab_score -= 0.05*(i/((currldc_y+1)*cell_wid) + j/((currldc_x+1)*cell_len))
stab_score -= level / cell_ht
#stab_score += (dimn[0]*dimn[1])/(cell_len*cell_wid)
else:
stab_score = -10
return stab_score
def get_final_img(dim,img,normalise=1):
shape = img.shape
final_img = np.zeros((shape[0],shape[1],3))
channel2 = np.zeros((shape[0],shape[1]))
channel2[:dim[1],:dim[0]] = dim[2]
channel3 = np.zeros((shape[0],shape[1]))
channel3[:,shape[1]-dim[0]+1:] = 45
channel3[shape[0]-dim[1]+1:,:] = 45
final_img[:,:,0] = img
final_img[:,:,1] = channel2
final_img[:,:,2] = channel3
if normalise:
return final_img/45
else:
return final_img
for epoch in tqdm(range(100)):
episode_num = np.random.randint(0,11,1)[0]
for step in range(len(os.listdir('States/episode_'+str(episode_num)))):
ldc = np.load('States/episode_'+str(episode_num)+'/step_'+str(step)+'.npy')[:,:80]
score = np.zeros(ldc.shape)
l=np.random.randint(2,40,1)[0]
b=np.random.randint(2,40,1)[0]
h=np.random.randint(2,40,1)[0]
for i in range(score.shape[0]):
for j in range(score.shape[1]):
score[i,j] = np.round(getStabilityScore(i, j , ldc=ldc, dimn=[l,b,h], currldc_x=0, currldc_y=0))
np.save('Train_annot/annot_'+str(epoch)+'_'+str(episode_num)+'_'+str(step)+'.npy',score,allow_pickle=True)
img = get_final_img([l,b,h],ldc,0)
np.save('Train/train_'+str(epoch)+'_'+str(episode_num)+'_'+str(step)+'.npy',img,allow_pickle=True)
|
import os
import json
from multiprocessing import Pool
import billboard
import datetime
from PyLyrics import PyLyrics
from yt import get_stats
def get_data(params):
artist, title, rank, year = params
print("Fetching data for {}:{}".format(rank,title))
data = {
'artist': artist,
'title': title,
'year': year,
'rank': rank,
'fetched_lyrics': None,
'youtube_link': None,
'views': None,
'likes': None,
'dislikes': None,
'updated': str(datetime.datetime.now()),
'complete': False
}
try:
data['fetched_lyrics'] = PyLyrics.getLyrics(artist, title)
except:
pass
try:
# get youtube stats
data.update(get_stats(artist, title))
except:
pass
# set complete if all data is present
if all([data[k] != None for k in data if k != 'complete']):
data['complete'] = True
return data
def get_data_for_date(date, chartname='hot-100-songs'):
print("Fetching chart {} {}..".format(date, chartname))\
chart = billboard.ChartData(name=chartname, date=date, yearEnd=True)
params = [(e.artist, e.title, e.rank, date) for e in chart]
p = Pool()
return p.map(get_data, params)
if __name__ == '__main__':
years = range(2010, 2018)
for year in years:
data = get_data_for_date(str(year))
with open('{}_data.json'.format(year), 'w') as f:
f.write(json.dumps(data))
|
class Solution:
def totalNQueens(self, n):
"""
:type n: int
:rtype: int
"""
def dfs(queens, t45, t135):
if len(queens) == n:
result.append(queens)
return
for i in range(n):
j = len(queens)
if i not in queens and i - j not in t45 and i + j not in t135:
dfs(queens + [i], t45 + [i - j], t135 + [i + j])
result = []
dfs([], [], [])
return len(result) |
class Solution:
def nextPermutation(self, nums):
"""
:type nums: List[int]
:rtype: void Do not return anything, modify nums in-place instead.
"""
if len(nums) <= 1:
pass
else:
do_it = False
for i in range(len(nums) - 2, -1, -1):
if nums[i] < nums[i + 1]:
min_val = min([e for e in nums[i + 1:] if e > nums[i]])
min_i = nums.index(min_val, i+1)
nums[i], nums[min_i] = nums[min_i], nums[i]
nums[i+1:] = sorted(nums[i+1:])
do_it = True
break
if not do_it:
nums.reverse()
Solution().nextPermutation([5,4,7,5,3,2]) |
# Definition for a binary tree node.
class TreeNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution(object):
def maxDepth(self, root):
"""
:type root: TreeNode
:rtype: int
"""
def find(root, path):
if not root: return path
return max(find(root.left, path+1), find(root.right, path+1))
return find(root, 0)
a = TreeNode(1)
# a.left = TreeNode(2)
# a.right = TreeNode(3)
# a.right.right = TreeNode(4)
print(Solution().maxDepth(a)) |
from twisted.internet import reactor, defer
def multiplyByThree(x):
d = defer.Deferred()
reactor.callLater(2, d.callback, x * 3)
return d
def printData(d):
print(d)
d = multiplyByThree(3)
d.addCallback(printData)
# manually set up the end of the process by asking the reactor to
# stop itself in 4 seconds time
reactor.callLater(4, reactor.stop)
# start up the Twisted reactor (event loop handler) manually
reactor.run() |
import turtle
import sys
#sys.setExecutionLimit(1500000)
def seq3np1(n):
""" Print the 3n+1 sequence from n, terminating when it reaches 1."""
count = 0
while n != 1:
# print(n)
count += 1
if n % 2 == 0: # n is even
n = n // 2
else: # n is odd
n = n * 3 + 1
# print(n) # the last print is 1
return (count)
def drawTurtle(t, height):
""" Get turtle t to draw one bar, of height. """
t.write(str(height)) # write x,y values on bottom
t.up()
t.left(90)
t.forward(10)
t.right(90)
t.down()
t.begin_fill() # start filling this shape
t.left(90)
t.forward(height)
t.right(90)
t.forward(20)
t.right(90)
t.forward(height)
# t.left(90)
t.end_fill() # stop filling this shape
t.up()
t.forward(10)
t.left(90)
t.down()
window = turtle.Screen()
awesome = turtle.Turtle()
awesome.color('red')
awesome.fillcolor('pink')
# awesome.pensize(2)
awesome.speed(100)
upbound = 50
border = 2
numbars = upbound
maxheight = 120
maxsofar = 0
window.bgcolor("lightblue")
window.setworldcoordinates(0 - border, 0 - border, 20 * numbars + border, maxheight + border)
for i in range(1, upbound + 1):
start = i
result = seq3np1(start)
drawTurtle(awesome, result)
if result > maxsofar:
maxsofar = result
# print(" value:", start, " number of items:", xs)
print(maxsofar)
window.exitonclick()
|
#!/usr/bin/python3
"""Provides a function to create an object from a JSON string"""
import json
def from_json_string(my_str):
"""Create an object from a JSON string"""
return json.loads(my_str)
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.8 on 2016-07-19 01:54
from __future__ import unicode_literals
import datetime
import django.core.validators
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('webapp', '0003_team'),
]
operations = [
migrations.AddField(
model_name='team',
name='flag',
field=models.TextField(default=datetime.datetime(2016, 7, 19, 1, 54, 4, 892801, tzinfo=utc), validators=[django.core.validators.URLValidator()], verbose_name='Flag'),
preserve_default=False,
),
]
|
from nested_inline.admin import NestedModelAdmin
class CustomNestedModelAdmin(NestedModelAdmin):
def add_nested_inline_formsets(self, request, inline, formset, depth=0):
if depth > 5:
raise Exception("Maximum nesting depth reached (5)")
for form in formset.forms:
nested_formsets = []
for nested_inline in inline.get_inline_instances(request):
InlineFormSet = nested_inline.get_formset(request, form.instance)
prefix = "%s-%s" % (form.prefix, InlineFormSet.get_default_prefix())
if request.method == 'POST' and any(s.startswith(prefix) for s in request.POST.keys()):
nested_formset = InlineFormSet(request.POST, request.FILES,
instance=form.instance,
prefix=prefix, queryset=nested_inline.get_queryset(request), request = request) #TODO
else:
nested_formset = InlineFormSet(instance=form.instance,
prefix=prefix, queryset=nested_inline.get_queryset(request)) #TODO
nested_formsets.append(nested_formset)
if nested_inline.inlines:
self.add_nested_inline_formsets(request, nested_inline, nested_formset, depth=depth + 1)
form.nested_formsets = nested_formsets
|
# from os import getcwd
# print(getcwd())
from collections import Counter
import re
with open('zbior_zadan/69_geny/dane_geny.txt') as file:
genes = [gen for gen in file.read().split()]
# 69.1
species = []
for element in genes:
species.append(len(element))
print("69.1\nliczba wszystkich gatunkow: {}\nnajwiecej osobnikow: {}".format(len(Counter(species).values()), max(Counter(species).values())))
# 69.2
count = 0
for element in genes:
if re.search("AA*.BCDDC.*BB", element): # wip
count += 1
print("zmutowane: {}".format(count))
# 69.3
|
from dataclasses import dataclass
from math import pi
from rlbot.utils.game_state_util import GameState, BallState, CarState, Physics, Vector3, Rotator, GameInfoState
from rlbottraining.common_exercises.common_base_exercises import GoalieExercise
from rlbottraining.rng import SeededRandomNumberGenerator
from rlbottraining.training_exercise import Playlist
@dataclass
class GoldBallRollingToGoalie(GoalieExercise):
def make_game_state(self, rng: SeededRandomNumberGenerator) -> GameState:
self.grader.graders[1].max_duration_seconds = 2
return GameState(
game_info=GameInfoState(game_speed=1),
ball=BallState(physics=Physics(
location=Vector3(rng.uniform(-100, 100), -500, 800),
velocity=Vector3(rng.uniform(-100, 100), -1500, 900), #
angular_velocity=Vector3(0, 0, 0))),
cars={
0: CarState(
physics=Physics(
location=Vector3(rng.uniform(-300, 300), -3000, 17),
rotation=Rotator(0, pi * -0.5, 0),
velocity=Vector3(0, -1500, 0),
angular_velocity=Vector3(0, 0, 0)),
boost_amount=60)
},
)
def make_default_playlist() -> Playlist:
return [
GoldBallRollingToGoalie('GoldBallRollingToGoalie'),
] |
produtos = ('Caderno', 11.5, 'Borracha', 0.56, 'Régua', 5.99, 'Lápis', 1.5, 'Adesivo', 99.5)
print('-' * 30)
print(f'{"LISTAGEM DE PREÇOS":^30}')
print('-' * 30)
for i, produto in enumerate(produtos):
if i % 2 == 0:
print(f'{produto:.<20}:', end=' ')
else:
print(f'R$ {produto:>5.2f}')
print('-' * 30) |
from clean import data,pd,np,names
print("Running pre.py\n")
print("The Data Has 8 Features along with an Index Column(1st Column) : ")
print(names)
row,columns=np.shape(data)
print("\nThe size of each Feature in the Data is : ",row)
d_allc=data.iloc[:,8:9]
d_allc=np.ravel(d_allc)
y=[];c=0;z=[];t=0;tar=0
while(t<row):
i=d_allc[t]
if i not in y:
y.append(i)
c=c+1
z.append(1)
elif i in y:
index=y.index(i)
tar=np.int(z[index])
tar=tar+1
z[index]=tar
t=t+1
print("The number of the countries are ",c)
print("\nThe Countries in the Data Set are : ",y)
print("The number of Country (samples-1) are : ",z)
print("The number of the countries are ",c)
d_uk =(data[data['Country'] == "United Kingdom"]
.pivot_table(index="CustomerID",
columns="Description",
values="Quantity",
aggfunc="sum",
fill_value=0))
print("\nThe shape of UK data is : ",np.shape(d_uk))
print("\n\tEncoding Units in Data...",end="# ")
def encode_units(x):
if x <= 0:
return 0
if x >= 1:
return 1
uk_set = d_uk.applymap(encode_units)
try :
uk_set.drop('POSTAGE', inplace=True, axis='columns')
except:
print("\n\t\t##Dropped all Waste##")
d_uk= d_uk[(d_uk.T !=0).any()]
#print(d_uk)
def ruler():
global uk_set;
choose=str(input("\n\t\tUse Apriori to define RULES(FOR UK only) ?\nEnter [y/n]: "))
if choose=='y':
from mlxtend.frequent_patterns import apriori
from mlxtend.frequent_patterns import association_rules
frequent_itemsets = apriori(uk_set, min_support=0.07, use_colnames=True)
a_rules = association_rules(frequent_itemsets, metric="lift", min_threshold=1)
a_rules.to_excel('rules.xlsx')
a_rules[(a_rules['lift'] >= 0.6) &
(a_rules['confidence'] >= 0.8) ]
print("\n\t\tALL The Rules are saved in 'rules.xlsx' file")
print("\t\tThe Interpretation of the RULES are given in 'README.txt' file")
rules=pd.DataFrame([a_rules.antecedents,a_rules.consequents,a_rules.support,a_rules.confidence]).transpose()
print("##### SOME RULES : \n",rules.head(10))
elif choose=='n':
print("")
else:
print("Wrong Choice...TRY AGAIN\n")
ruler()
ruler()
## ## ### ##### #### #### #### ### #### # #### ### ## ### ## ## ### #### # ## ## ##
def train_test_split1(data,percent):
print("\nRunning Train_Test_Split...\n")
global row;
test_size= int((percent/100)*row)
train_size=row-test_size
print(" The size of Training Data is : ",train_size)
print(" The size of Test Data is : ",test_size,)
print(" Total : ",(train_size+test_size))
length=0
rows_t=[]
rows_s=[]
print("\n\tRandomly Choosing Training Data...\n")
train=pd.DataFrame(rows_t,columns=names)
while(length<train_size):
x1=np.random.choice(data['Sn'])
if (((x1==train.Sn).any())==False):
dict1=(data.iloc[x1-1:x1,:]).to_dict(orient='dict')
rows_t.append(dict1)########
length=length+1
train=pd.DataFrame(rows_t,columns=names)
#print(np.shape(train))
else:
print("",end="")
length=0
test=pd.DataFrame(rows_s,columns=names)
print("\tRandomly Choosing Test Data...\n")
while(length<test_size):
x1=np.random.choice(data['Sn'])
if (((x1==train.Sn).any())==False) and (((x1==test.Sn).any())==False):
dict2=(data.iloc[x1-1:x1,:]).to_dict(orient='dict')
rows_s.append(dict2)
length=length+1
test=pd.DataFrame(rows_s,columns=names)
print("\t\tThe Training and Test Data ARE Split")
return train,test
################################################################################
inputs=d_uk.index.values.tolist()
inputs=np.asarray(inputs)
inputs=inputs.reshape(-1,1)
#print(type(inputs))
print("\n\t\t *INPUTS READY! SHAPE : ",np.shape(inputs))
targets=d_uk.reset_index()
targets=targets.drop('CustomerID',axis=1)
print("\t\t*TARGETS READY! SHAPE : ",np.shape(targets))
def choser():
global inputs;global targets;
choose=str(input("\n\t\tUse Built-IN Splitter(NOT READY!!) ?\nEnter [y/n]: "))
if choose=='y':
print("\n##The Module is not Ready yet...")
print("##Check the code of this module(train_test_split1) in : 'pre.py' \n")
choser()
percent=int(input("\n\t\tEnter the TEST size(IN PERCENTAGE) : "))
percent=(percent/100)
train,test=train_test_split1(data,percent)
print("\nThe Training Data has shape :",np.shape(train))
print("The Test Data has shape :",np.shape(test))
train[names]=train[names].replace({'{':''},regex=True)
train[names]=train[names].replace({'}':''},regex=True)
train.to_excel('hello.xlsx')
#train[names]=train[names].replace({'':''},regex=True)
x_train = pd.DataFrame([train.InvoiceNo,train.StockCode,train.Quantity]).transpose()
x_test = pd.DataFrame([test.InvoiceNo,test.StockCode,test.Quantity]).transpose()
y_train = pd.DataFrame([train.Country]).transpose()
y_test = pd.DataFrame([test.Country]).transpose()
elif choose=='n':
percent=int(input("\n\t\tEnter the TEST size(IN PERCENTAGE) : "))
from sklearn.model_selection import train_test_split
print("\n\tRandomly Choosing Training and Test Data...\n")
x_train,x_test,y_train,y_test=train_test_split(inputs,targets,test_size=percent,random_state=0)
print("\t\tThe Training and Test Data ARE Split")
else:
print("Wrong Choice...TRY AGAIN\n")
choser()
return x_train,x_test,y_train,y_test
x_train,x_test,y_train,y_test=choser()
print("\n\t##Data PreProcessing Done.")
print("\t\tExiting pre.py\n")
'''
target_n=np.ravel(target_n)
target=[]
t=0
print("\nData Organizing...")
while(t<974):
i=target_n[t]
if (i=='United Kingdom'):
target.append(1)
elif (i=='France'):
target.append(2)
elif(i=='Australia'):
target.append(3)
elif(i=='Netherlands'):
target.append(3)
else:
target.append(4)
t=t+1
X = data['InvoiceNo','StockCode','Quantity','UnitPrice']
Y = data.iloc[:, 7:8]
print(Y)
'''
|
# Generated by Django 2.1 on 2018-08-20 08:05
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0002_auto_20171227_2246'),
]
operations = [
migrations.CreateModel(
name='Attendance',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('clock_in', models.DateTimeField(auto_now_add=True)),
('clock_out', models.DateTimeField()),
],
),
migrations.AddField(
model_name='user',
name='global_id',
field=models.CharField(default='test', max_length=25),
preserve_default=False,
),
]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# =============================================================================
# Created By : Krikor Herlopian
# Created Date: Wed May 12 2021
# Email Address: kherl1@unh.newhaven.edu
# =============================================================================
d=dict()
s=input("Enter the string:\n")
#loop over the string
for i in s.split():
#say first string New, d['New] = d.get('New',0)+1. That means d['New'] = 0+1 = 1
#next time we get another New it will be d['New] = d.get('New',0)+1. That means d['New'] = 1+1 = 2
d[i]=d.get(i,0)+1
#sort and prints keys/values
for key,value in sorted(d.items()):
print(key,":",value) |
from gui.Gui import MyWindow
from PyQt5 import QtWidgets
import sys
import preprocesamiento
import libreria as lib
from excepciones import BadQuery, WrongInput, MovieError
class T03Window(MyWindow):
def __init__(self):
super().__init__()
def process_query(self, queries):
# Agrega en pantalla la solucion.
results = [self.obtener_resultado(x) for x in queries]
for n in range(len(results)):
self.add_answer("Consulta {} \n".format(n + 1))
for element in results[n]:
if isinstance(results[n], list):
self.add_answer(element + "\n")
else:
self.add_answer(self.str_movie(element))
self.add_answer("\n")
@staticmethod
def str_movie(movie):
text = 'id: {}, title: {}, rating_imdb: {}, rating_metacritic: {}, ' \
'rating_rt: {}, box_office: {}, date: {} \n'\
.format(movie.id, movie.title, movie.rating_imdb,
movie.rating_metacritic, movie.rating_rt,
movie.box_office, movie.date)
return text
@staticmethod
def obtener_resultado(query):
try:
result = lib.procesar_queries(query)
except (BadQuery, WrongInput, MovieError) as err:
result = err
return result
def save_file(self, queries):
queries_answers = [self.obtener_resultado(x) for x in queries]
with open("resultados.txt", "w", encoding="utf-8") as file:
for n in range(len(queries_answers)):
file.write("Consulta {} \n".format(n + 1))
for element in queries_answers[n]:
if isinstance(queries_answers[n], list):
file.write(element + "\n")
else:
file.write(self.str_movie(element))
if __name__ == '__main__':
preprocesamiento.reviews_writer()
def hook(_type, value, traceback):
print(_type)
print(value)
print(traceback)
sys.__excepthook__ = hook
app = QtWidgets.QApplication(sys.argv)
window = T03Window()
sys.exit(app.exec_())
|
import json
import pandas as pd
def loadLabels():
test = pd.read_csv('./data/Alsafari_2020/AH-Test.csv',sep=",", encoding="utf-8", dtype={'iD': object})
train = pd.read_csv('./data/Alsafari_2020/AH-Train.csv',sep=",", encoding="utf-8", dtype={'iD': object})
return pd.concat([train, test])
def loadTexts():
with open('./data/Alsafari_2020/210218_Alsafari_2020_API_dump.json') as json_file:
data = json.load(json_file)
return data
def get_data_binary():
labels = dict()
label_data = loadLabels()
for index,row in label_data.iterrows():
labels[str(row['ID'])] = 'neutral' if row['2-Class'] == 'C' else 'abusive'
tweets = loadTexts()
full_data = list()
for elem in tweets:
full_data.append({'text':elem['full_text'],'label': labels[elem['id_str']]})
return full_data
def get_data():
labels = dict()
label_data = loadLabels()
for index,row in label_data.iterrows():
if row['3-Class'] == 'O':
labels[str(row['ID'])] = 'offensive'
if row['3-Class'] == 'C':
labels[str(row['ID'])] = 'clean'
if row['3-Class'] == 'H':
labels[str(row['ID'])] = 'hateful'
tweets = loadTexts()
full_data = list()
for elem in tweets:
full_data.append({'text':elem['full_text'],
'label': labels[elem['id_str']],
'id':str(elem['id_str'])})
return full_data
def get_complete_data():
label_data = loadLabels()
full_data = list()
for index,row in label_data.iterrows():
if row['3-Class'] == 'O':
full_data.append({'label': 'offensive','id':str(row['ID'])})
if row['3-Class'] == 'C':
full_data.append({'label': 'clean','id':str(row['ID'])})
if row['3-Class'] == 'H':
full_data.append({'label': 'hateful','id':str(row['ID'])})
return full_data
def get_available_data():
labels = dict()
label_data = loadLabels()
for index,row in label_data.iterrows():
if row['3-Class'] == 'O':
labels[str(row['ID'])] = 'offensive'
if row['3-Class'] == 'C':
labels[str(row['ID'])] = 'clean'
if row['3-Class'] == 'H':
labels[str(row['ID'])] = 'hateful'
tweets = loadTexts()
full_data = list()
for elem in tweets:
full_data.append({'text':elem['full_text'],
'label': labels[elem['id_str']],
'user': {'id':str(elem['user']['id'])},
'id':str(elem['id_str'])})
return full_data
|
"""
This is the people module and supports all the REST actions for the
people data
"""
from flask import abort
from config import db
from models import InvoiceModel, InvoiceInvoiceItemModel
from schemas import Invoice_Schema
def get_all():
invoices = InvoiceModel.query.all()
invoices_schema = Invoice_Schema(many=True)
data = invoices_schema.dump(invoices).data
return data
def create_invoice_item(invoice, invoice_item_body):
invoice_invoiceitem = InvoiceInvoiceItemModel(
invoiceId=invoice.invoiceId,
invoiceItemId=invoice_item_body["invoiceItemId"]
)
db.session.add(invoice_invoiceitem)
db.session.commit()
def create_invoice_items(invoice, invoice_items_body):
for item_body in invoice_items_body:
create_invoice_item(invoice, item_body)
def create(body):
invoice = InvoiceModel(
electionId=body["electionId"],
issuingOfficeId=body["issuingOfficeId"],
receivingOfficeId=body["receivingOfficeId"],
issuedTo=body["issuedTo"]
)
# Add the entry to the database
db.session.add(invoice)
db.session.commit()
create_invoice_items(invoice, body["invoiceItems"])
return Invoice_Schema().dump(invoice).data, 201
def update(tallySheetId, body):
"""
Append new version to the tally sheet.
"""
# Get the tally sheet
tallySheet = InvoiceModel.query.filter(
InvoiceModel.invoiceId == tallySheetId
).one_or_none()
if tallySheet is None:
abort(
404,
"Tally Sheet not found for Id: {tallySheetId}".format(tallySheetId=tallySheetId),
)
create_tallysheet_version(body, tallySheet)
schema = TallySheetVersionSchema()
return schema.dump(new_tallysheet).data, 201
|
"""
Main application file
DO NOT TOUCH/EDIT
"""
from flask import Flask
from config import Config
from routes import handler
from models import db
def create_app(config):
# Initializes flask object with config
app = Flask(
Config.APP_NAME,
template_folder=Config.TEMPLATE_FOLDER,
static_folder=Config.TEMPLATE_FOLDER
)
# Loads the needed config in the configuration file
app.config.from_object(Config)
# Initializes database, will use MySQL for now
db.init_app(app)
# Handles CORS and other Cross Origin settings
# Uses the after request decorator
@app.after_request
def after_request(response):
response.headers.add('Access-Control-Allow-Origin', app.config['ALLOWED_ORIGINS'])
response.headers.add('Access-Control-Allow-Headers', ','.join(app.config['ALLOWED_HEADERS']))
response.headers.add('Access-Control-Allow-Methods', ','.join(app.config['ALLOWED_METHODS']))
return response
app.register_blueprint(handler)
return app
if __name__ == '__main__':
app = create_app(None)
app.run(host='0.0.0.0', use_reloader=False, threaded=True)
|
acumulador = 0
numero = -1
while numero != 0:
numero = int(input("Digite um número: "))
acumulador = acumulador + numero
print("A soma de todos os números recebidos é: ", acumulador) |
from django.contrib.auth.models import User
from .models import Article, Like, Comment
from rest_framework import serializers
class ArticleSerializer(serializers.ModelSerializer):
class Meta:
model = Article
fields = ('author', 'publication_date', 'headline', 'content', 'published')
class LikeSerializer(serializers.ModelSerializer):
class Meta:
model = Like
fields = ('liked_article', 'liked_by', 'like')
class CommentSerializer(serializers.ModelSerializer):
class Meta:
model = Comment
fields = ('commented_article', 'commented_by', 'comment', 'commented_date')
class UserSerializer(serializers.ModelSerializer):
article = serializers.PrimaryKeyRelatedField(many=True, queryset=Article.objects.all())
class Meta:
model = User
fields = ('id', 'username', 'snippets')
|
import os, sys
stext = '<joint name="double_stereo_frame_joint" type="fixed">'
rtext = '<joint name="openni_rgb_frame_joint" type="fixed">\n\
<origin rpy="0.0074253744 0.0418016634 -0.0065419807" xyz="0.0440562178 -0.0135760086 0.1129906398"/>\n\
<parent link="head_plate_frame"/>\n\
<child link="openni_rgb_frame"/>\n\
</joint>\n\
<link name="openni_rgb_frame">\n\
<inertial>\n\
<mass value="0.01"/>\n\
<origin xyz="0 0 0"/>\n\
<inertia ixx="0.001" ixy="0.0" ixz="0.0" iyy="0.001" iyz="0.0" izz="0.001"/>\n\
</inertial>\n\
<visual>\n\
<origin rpy="0 0 0" xyz="0 0 0"/>\n\
<geometry>\n\
<box size="0.01 0.01 0.01"/>\n\
</geometry>\n\
</visual>\n\
</link>\n\
<joint name="openni_rgb_optical_frame_joint" type="fixed">\n\
<origin rpy="-1.5707963268 -0.0000000000 -1.5707963268" xyz="0.0000000000 0.0000000000 0.0000000000"/>\n\
<parent link="openni_rgb_frame"/>\n\
<child link="openni_rgb_optical_frame"/>\n\
</joint>\n\
<link name="openni_rgb_optical_frame"/>\n\
<joint name="double_stereo_frame_joint" type="fixed">'
def serchReplace(path):
input = open(path)
print "urdf:", path
out = path+'.tmp'
output = open(out, 'w')
for s in input.xreadlines():
output.write(s.replace(stext, rtext))
input.close()
output.close()
os.rename(out, path)
def usage():
print "\nUsage: python ", sys.argv[0], "<robot_uncalibrated_x.x.x.xml> \n"
if __name__ == "__main__":
if sys.argv.__len__() == 1:
usage()
sys.exit(2)
path = sys.argv[1]
serchReplace(path)
|
# Generated by Django 3.0.3 on 2020-02-11 11:37
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Caption',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('img_id', models.CharField(max_length=256, unique=True)),
('worker', models.CharField(max_length=64)),
('caption', models.CharField(max_length=2048)),
],
),
]
|
N, M = input().split(' ')
N, M = [int(N), int(M)]
six = 987654321
one = 987654321
ans = 987654321
for i in range(M):
a, b = input().split(' ')
a, b = [int(a), int(b)]
six = min(six, a)
one = min(one, b)
ans = min(ans, N*one)
ans = min(ans, (N//6 + 1)*six)
ans = min(ans, (N//6)*six + (N%6)*one)
print (ans)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import decoratucasa.models
class Migration(migrations.Migration):
dependencies = [
('decoratucasa', '0002_auto_20141120_0233'),
]
operations = [
migrations.CreateModel(
name='Boletin',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('archivo', models.FileField(upload_to=decoratucasa.models.nombre_modificar)),
('descripcion', models.TextField(max_length=900)),
('fecha', models.DateField(auto_now_add=True)),
],
options={
'verbose_name': 'Boletin',
'verbose_name_plural': 'Boletines',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Evento',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('descripcion', models.TextField(max_length=900)),
('fecha', models.DateField(auto_now_add=True)),
('fecha_inicio', models.DateField(auto_now_add=True)),
('fecha_final', models.DateField()),
],
options={
'verbose_name': 'Evento',
'verbose_name_plural': 'Eventos',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Oferta',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('producto', models.CharField(max_length=50)),
('precio', models.DecimalField(max_digits=5, decimal_places=2)),
('fecha', models.DateField(auto_now_add=True)),
('fecha_inicio', models.DateField(auto_now=True)),
('fecha_final', models.DateField()),
],
options={
'verbose_name': 'Oferta',
'verbose_name_plural': 'Ofertas',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Suscriptor',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('activo', models.BooleanField(default=True)),
('email', models.EmailField(max_length=254)),
('fecha', models.DateField(auto_now_add=True)),
],
options={
'verbose_name': 'Suscriptor',
'verbose_name_plural': 'Suscriptores',
},
bases=(models.Model,),
),
]
|
import falcon
from sdnms_api.resources.base_resource import BaseResource
class HealthResource(BaseResource):
def on_get(self, req, resp):
resp.status = falcon.HTTP_200
resp.body = ('{"result":"OK"}')
|
import json
import os.path
from typing import cast
SUSPICIOUS_KEYWORDS: dict[str, int] = {
"login": 25,
"log-in": 25,
"sign-in": 25,
"signin": 25,
"account": 25,
"verification": 25,
"verify": 25,
"webscr": 25,
"password": 25,
"credential": 25,
"support": 25,
"activity": 25,
"security": 25,
"update": 25,
"authentication": 25,
"authenticate": 25,
"authorize": 25,
"wallet": 25,
"alert": 25,
"purchase": 25,
"transaction": 25,
"recover": 25,
"unlock": 25,
"confirm": 20,
"live": 15,
"office": 15,
"service": 15,
"manage": 15,
"portal": 15,
"invoice": 15,
"secure": 10,
"customer": 10,
"client": 10,
"bill": 10,
"online": 10,
"safe": 10,
"form": 10,
"appleid": 70,
"icloud": 60,
"iforgot": 60,
"itunes": 50,
"apple": 30,
"office365": 50,
"microsoft": 60,
"windows": 30,
"protonmail": 70,
"tutanota": 60,
"hotmail": 60,
"gmail": 70,
"outlook": 60,
"yahoo": 60,
"google": 60,
"yandex": 60,
"twitter": 60,
"facebook": 60,
"tumblr": 60,
"reddit": 60,
"youtube": 40,
"linkedin": 60,
"instagram": 60,
"flickr": 60,
"whatsapp": 60,
"localbitcoin": 70,
"poloniex": 60,
"coinhive": 70,
"bithumb": 60,
"kraken": 50,
"bitstamp": 60,
"bittrex": 60,
"blockchain": 70,
"bitflyer": 60,
"coinbase": 60,
"hitbtc": 60,
"lakebtc": 60,
"bitfinex": 60,
"bitconnect": 60,
"coinsbank": 60,
"paypal": 70,
"moneygram": 60,
"westernunion": 60,
"bankofamerica": 60,
"wellsfargo": 60,
"citigroup": 60,
"santander": 60,
"morganstanley": 60,
"barclays": 50,
"hsbc": 50,
"scottrade": 60,
"ameritrade": 60,
"merilledge": 60,
"bank": 15,
"amazon": 60,
"overstock": 60,
"alibaba": 60,
"aliexpress": 60,
"leboncoin": 70,
"netflix": 70,
"skype": 60,
"github": 60,
"onedrive": 60,
"dropbox": 60,
"cgi-bin": 50,
"-com.": 20,
".net-": 20,
".org-": 20,
".com-": 20,
".net.": 20,
".org.": 20,
".com.": 20,
".gov-": 30,
".gov.": 30,
".gouv-": 40,
"-gouv-": 40,
".gouv.": 40,
"suivi": 50,
"laposte": 50,
"docomo": 50,
"jcb": 30,
"jibun": 50,
"kuroneko": 60,
"mitsui": 50,
"mizuho": 50,
"mufg": 50,
"ntt": 30,
"rakuten": 50,
"sagawa": 60,
"saison": 50,
"smbc": 50,
"softbank": 50,
"sumimoto": 50,
"webmoney": 50,
}
SUSPICIOUS_TLDS: list[str] = [
"ga",
"gq",
"ml",
"cf",
"tk",
"xyz",
"pw",
"cc",
"club",
"work",
"top",
"support",
"bank",
"info",
"study",
"party",
"click",
"country",
"stream",
"gdn",
"mom",
"xin",
"kim",
"men",
"loan",
"download",
"racing",
"online",
"center",
"ren",
"gb",
"win",
"review",
"vip",
"tech",
"science",
"business",
]
def load_warning_list(path: str) -> list[str]:
with open(path) as f:
data = json.loads(f.read())
return cast(list[str], data.get("list", []))
current_dir = os.path.abspath(os.path.dirname(__file__))
ALEXA_TOP_DOMAINS: list[str] = load_warning_list(
os.path.join(current_dir, "./data/alexa.json")
)
MS_DOMAINS: list[str] = load_warning_list(os.path.join(current_dir, "./data/ms.json"))
OTHER_DOMAINS: list[str] = load_warning_list(
os.path.join(current_dir, "./data/other.json")
)
HIGH_REPUTATION_DOMAINS: list[str] = ALEXA_TOP_DOMAINS + MS_DOMAINS + OTHER_DOMAINS
|
import tensorflow as tf
import numpy as np
# 创建一个常量
m1 = tf.constant([[3,3]])
m2 = tf.constant([[2],[2]])
# 创建一个矩阵常量
product = tf.matmul(m1,m2)
print(product)
# 定义一个会话,启动默认图
ss = tf.Session()
# 调用session的run方法执行矩阵乘法
result = ss.run(product)
print(result)
ss.close() # 需要手动关闭
with tf.Session() as sess:
result = sess.run(product)
print(result)
# -----------变量--------------
x = tf.Variable([1,2])
a = tf.constant([3,3])
sub = tf.subtract(x,a)
add = tf.add(x,a)
# 变量需要初始化,否则会出现 "Attempting to use uninitialized value Variable"
init = tf.global_variables_initializer()
with tf.Session() as sess:
sess.run(init)
print(sess.run(sub))
print(sess.run(add))
input1 = tf.constant(3.0)
input2 = tf.constant(2.0)
input3 = tf.constant(5.0)
add = tf.add(input1,input2)
mul = tf.multiply(input1,add)
# fetch 使用:一次性run多个operation
with tf.Session() as sess:
result = sess.run([mul,add])
print(result)
input4 = tf.placeholder(tf.float32)
input5 = tf.placeholder(tf.float32)
output = tf.multiply(input4,input5)
# feed 用法,使用占位符,以及在run的时候用feed_dict进行载入
with tf.Session() as sess:
print(sess.run(output,feed_dict = {input4:[8.0],input5:[2.]}))
x_data = np.random.rand(100)
y_data = x_data * 0.1 + 0.2
# 构造一个线性模型
b = tf.Variable(0.)
k = tf.Variable(0.)
y = k*x_data + b
# 定义一个二次代价函数
loss = tf.reduce_mean(tf.square(y_data - y)) # reduce是求根号,reduce_mean是平方后求平均值
# 定义一个优化器
optimizer = tf.train.GradientDescentOptimizer(0.2)
# 最小化代价函数
train = optimizer.minimize(loss=loss)
# 初始化变量
init = tf.global_variables_initializer() # 注意init的使用位置
with tf.Session() as sess:
sess.run(init)
for step in range(201):
sess.run(train)
if step%20 == 0:
print(step,sess.run([k,b]))
|
import csv
from furl import furl
BASE_URL = 'https://imdb.com/title'
def getMovieLinkMap(linkFilename, movieFilename):
movieLinkMap = {}
with open(linkFilename) as csvf:
reader = csv.DictReader(csvf)
for row in reader:
imdbId = row['imdbId']
f = furl(BASE_URL)
f.path.segments.append('tt{0}'.format(str(imdbId)))
f.path.segments.append('reviews')
f.path.normalize()
movieLinkMap[row['movieId']] = f.url
movieNameLinkMap = {}
with open(movieFilename) as csvf:
reader = csv.DictReader(csvf)
for row in reader:
if row['movieId'] in movieLinkMap:
movieNameLinkMap[row['title']] = movieLinkMap[row['movieId']]
return movieNameLinkMap
def fillMovieLinks(inputFilename, outputFilename, movieLinkMap):
fieldNames = ['movieId','title','genre','reviewLink']
unknownMovies = 0
inputFile = open(inputFilename, 'r')
with open(outputFilename, 'w') as csvf:
writer = csv.DictWriter(csvf, fieldnames= fieldNames)
writer.writeheader()
for movie in inputFile.readlines():
movieId,title,genre = movie.split('::')
link = '-'
if title in movieLinkMap:
link = movieLinkMap[title]
else:
unknownMovies += 1
print('not found for {0}'.format(title))
writer.writerow({
'movieId': movieId,
'title': title,
'genre': genre,
'reviewLink': link
})
print('{0} unknown movies'.format(unknownMovies))
if __name__ == '__main__':
movieLinkMap = getMovieLinkMap('./movielens/ml-25m/links.csv','./movielens/ml-25m/movies.csv')
fillMovieLinks('./movielens/ml-1m/movies-copy.dat', './dataset/movies.csv', movieLinkMap)
|
import requests
from bs4 import BeautifulSoup
def extract_news(soup):
""" Extract news from a given web page """
news_list = []
try:
table = soup.table.findAll('table')[1]
except AttributeError:
return
for i in range(0, 89, 3):
try:
tr0 = table.findAll('tr')[i]
tr1 = table.findAll('tr')[i + 1]
td_for0 = tr0.findAll('td')[2]
url = td_for0.a['href']
# print(url)
title = td_for0.a.text
# print(title)
td_for1 = tr1.findAll('td')[1]
points = td_for1.find('span', {"class": "score"}).text
if points:
points = points.rsplit(' ', 1)[0]
else:
points = 0
# print(points)
author_ = td_for1.find('a', {"class": "hnuser"}).text
if author_:
author = author_
else:
author = None
# print(author)
comments = "0"
comment = td_for1.findAll('a')[-1].text
if 'discuss' == comment:
comments = 'discuss'
else:
comments = comment
# print(comments)
news = {'author': author,
'title': title,
'comments': comments,
'points': points,
'url': url}
news_list.append(news)
except AttributeError:
pass
# print(news_list)
return news_list
def extract_next_page(soup):
""" Extract next page URL """
next = soup.find('a', {"class": "morelink"})
return next['href']
def get_news(url, n_pages=1):
""" Collect news from a given web page """
news = []
while n_pages:
print("Collecting data from page: {}".format(url))
response = requests.get(url)
soup = BeautifulSoup(response.text, "html.parser")
news_list = extract_news(soup)
next_page = extract_next_page(soup)
url = "https://news.ycombinator.com/" + next_page
news.extend(news_list)
n_pages -= 1
return news
|
#############################
#파이썬 기본 - 반복문
#############################
print('='*100)
a = [1,2,3,4,5]
for num in a:
print ( num )
a = [(1,2), (3,4), (5,6)]
for i,j in a:
print( i, j )
a = range( 1, 10 )
print ( a )
# range(x,y) => x <= n <y
for num in range( 1, 10 ):
print (num)
# 3~7단까지 구구단 ( 3 X 1 = 3, ...)
for x in range( 3, 8 ):
for y in range( 1, 10 ):
print ("%s X %s = %2s" % (x,y,x*y))
# 축약하여서 리스트로 출력
print( [ x*y for x in range( 3, 8 ) for y in range( 1, 10 ) ])
|
from ED6ScenarioHelper import *
def main():
# 格兰赛尔
CreateScenaFile(
FileName = 'C4111 ._SN',
MapName = 'Grancel',
Location = 'C4111.x',
MapIndex = 1,
MapDefaultBGM = "ed60089",
Flags = 0,
EntryFunctionIndex = 0xFFFF,
Reserved = 0,
IncludedScenario = [
'',
'',
'',
'',
'',
'',
'',
''
],
)
BuildStringList(
'@FileName', # 8
'修女艾伦', # 9
'魔兽', # 10
'魔兽', # 11
'魔兽', # 12
'魔兽', # 13
'魔兽', # 14
'魔兽', # 15
'魔兽', # 16
'魔兽', # 17
'特务兵', # 18
'特务兵', # 19
'卡露娜', # 20
'亚妮拉丝', # 21
'库拉茨', # 22
'克鲁茨', # 23
'尤莉亚中尉', # 24
'亲卫队员', # 25
'亲卫队员', # 26
'亲卫队员', # 27
'亲卫队员', # 28
'亲卫队员', # 29
'亲卫队员', # 30
'亲卫队员', # 31
'亲卫队员', # 32
)
DeclEntryPoint(
Unknown_00 = 0,
Unknown_04 = 0,
Unknown_08 = 6000,
Unknown_0C = 4,
Unknown_0E = 0,
Unknown_10 = 0,
Unknown_14 = 9500,
Unknown_18 = -10000,
Unknown_1C = 0,
Unknown_20 = 0,
Unknown_24 = 0,
Unknown_28 = 2800,
Unknown_2C = 262,
Unknown_30 = 45,
Unknown_32 = 0,
Unknown_34 = 360,
Unknown_36 = 0,
Unknown_38 = 0,
Unknown_3A = 0,
InitScenaIndex = 0,
InitFunctionIndex = 0,
EntryScenaIndex = 0,
EntryFunctionIndex = 1,
)
AddCharChip(
'ED6_DT07/CH01410 ._CH', # 00
'ED6_DT09/CH10820 ._CH', # 01
'ED6_DT09/CH10821 ._CH', # 02
'ED6_DT07/CH00100 ._CH', # 03
'ED6_DT07/CH00101 ._CH', # 04
'ED6_DT07/CH00110 ._CH', # 05
'ED6_DT07/CH00111 ._CH', # 06
'ED6_DT07/CH00170 ._CH', # 07
'ED6_DT07/CH00172 ._CH', # 08
'ED6_DT07/CH01330 ._CH', # 09
'ED6_DT07/CH00102 ._CH', # 0A
'ED6_DT07/CH00112 ._CH', # 0B
'ED6_DT07/CH01240 ._CH', # 0C
'ED6_DT07/CH01630 ._CH', # 0D
'ED6_DT07/CH01260 ._CH', # 0E
'ED6_DT07/CH01620 ._CH', # 0F
'ED6_DT07/CH02090 ._CH', # 10
'ED6_DT07/CH01320 ._CH', # 11
'ED6_DT06/CH20116 ._CH', # 12
'ED6_DT06/CH20117 ._CH', # 13
)
AddCharChipPat(
'ED6_DT07/CH01410P._CP', # 00
'ED6_DT09/CH10820P._CP', # 01
'ED6_DT09/CH10821P._CP', # 02
'ED6_DT07/CH00100P._CP', # 03
'ED6_DT07/CH00101P._CP', # 04
'ED6_DT07/CH00110P._CP', # 05
'ED6_DT07/CH00111P._CP', # 06
'ED6_DT07/CH00170P._CP', # 07
'ED6_DT07/CH00172P._CP', # 08
'ED6_DT07/CH01330P._CP', # 09
'ED6_DT07/CH00102P._CP', # 0A
'ED6_DT07/CH00112P._CP', # 0B
'ED6_DT07/CH01240P._CP', # 0C
'ED6_DT07/CH01630P._CP', # 0D
'ED6_DT07/CH01260P._CP', # 0E
'ED6_DT07/CH01620P._CP', # 0F
'ED6_DT07/CH02090P._CP', # 10
'ED6_DT07/CH01320P._CP', # 11
'ED6_DT06/CH20116P._CP', # 12
'ED6_DT06/CH20117P._CP', # 13
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 0,
ChipIndex = 0x0,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 1,
ChipIndex = 0x1,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 1,
ChipIndex = 0x1,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 1,
ChipIndex = 0x1,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 1,
ChipIndex = 0x1,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 1,
ChipIndex = 0x1,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 1,
ChipIndex = 0x1,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 1,
ChipIndex = 0x1,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 1,
ChipIndex = 0x1,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 9,
ChipIndex = 0x9,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 9,
ChipIndex = 0x9,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 12,
ChipIndex = 0xC,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 13,
ChipIndex = 0xD,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 14,
ChipIndex = 0xE,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 15,
ChipIndex = 0xF,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 16,
ChipIndex = 0x10,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 17,
ChipIndex = 0x11,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 17,
ChipIndex = 0x11,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 17,
ChipIndex = 0x11,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 17,
ChipIndex = 0x11,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 17,
ChipIndex = 0x11,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 17,
ChipIndex = 0x11,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 17,
ChipIndex = 0x11,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 17,
ChipIndex = 0x11,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclEvent(
X = 32110,
Y = -1000,
Z = -45500,
Range = 35850,
Unknown_10 = 0x7D0,
Unknown_14 = 0xFFFF84AE,
Unknown_18 = 0x0,
Unknown_1C = 2,
)
DeclEvent(
X = -88800,
Y = -1000,
Z = -3040,
Range = -85900,
Unknown_10 = 0x7D0,
Unknown_14 = 0xFFFFB7EE,
Unknown_18 = 0x0,
Unknown_1C = 5,
)
DeclEvent(
X = 70260,
Y = -1000,
Z = 32570,
Range = 56300,
Unknown_10 = 0x7D0,
Unknown_14 = 0x7602,
Unknown_18 = 0x0,
Unknown_1C = 6,
)
DeclActor(
TriggerX = -18470,
TriggerZ = 0,
TriggerY = -5070,
TriggerRange = 1500,
ActorX = -18470,
ActorZ = 1700,
ActorY = -5070,
Flags = 0x7C,
TalkScenaIndex = 0,
TalkFunctionIndex = 7,
Unknown_22 = 0,
)
ScpFunction(
"Function_0_4CE", # 00, 0
"Function_1_4F4", # 01, 1
"Function_2_507", # 02, 2
"Function_3_266C", # 03, 3
"Function_4_30F1", # 04, 4
"Function_5_3406", # 05, 5
"Function_6_3581", # 06, 6
"Function_7_36FD", # 07, 7
)
def Function_0_4CE(): pass
label("Function_0_4CE")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x7F, 2)), scpexpr(EXPR_END)), "loc_4E5")
OP_4F(0x1, (scpexpr(EXPR_PUSH_LONG, 0x54), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_A3(0x3FA)
Event(0, 3)
label("loc_4E5")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x7F, 3)), scpexpr(EXPR_END)), "loc_4F3")
OP_A3(0x3FB)
Event(0, 4)
label("loc_4F3")
Return()
# Function_0_4CE end
def Function_1_4F4(): pass
label("Function_1_4F4")
OP_16(0x2, 0xFA0, 0xFFFDDD20, 0xFFFDDD20, 0x30064)
Return()
# Function_1_4F4 end
def Function_2_507(): pass
label("Function_2_507")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC2, 5)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC2, 6)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_266B")
EventBegin(0x0)
SetChrPos(0x8, 14740, 0, -49400, 225)
SetChrPos(0x9, 12040, 0, -49370, 90)
SetChrPos(0xA, 12070, 0, -51990, 45)
SetChrPos(0xB, 14800, 0, -52250, 0)
ClearChrFlags(0x8, 0x80)
ClearChrFlags(0x9, 0x80)
ClearChrFlags(0xA, 0x80)
ClearChrFlags(0xB, 0x80)
OP_A2(0x616)
ChrTalk(
0x8,
"……呀啊啊~~!\x02",
)
CloseMessageWindow()
TurnDirection(0x101, 0x8, 0)
TurnDirection(0x102, 0x8, 0)
OP_62(0x101, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
OP_22(0x27, 0x0, 0x64)
OP_62(0x102, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
OP_22(0x27, 0x0, 0x64)
Sleep(1000)
ChrTalk(
0x101,
"#000F是女人的惨叫!?\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
"#010F在这里面,赶快!\x02",
)
CloseMessageWindow()
Sleep(100)
Fade(1000)
OP_6D(13190, 0, -50600, 0)
OP_67(0, 9500, -10000, 0)
OP_6B(3070, 0)
OP_6C(282000, 0)
OP_6E(262, 0)
SetChrPos(0x101, 20850, 0, -44670, 0)
SetChrPos(0x102, 19400, 0, -43210, 0)
ChrTalk(
0x8,
"呀啊啊啊啊啊啊!\x02",
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"救命啊!\x01",
"谁来帮忙啊!\x02",
)
)
CloseMessageWindow()
SetChrChipByIndex(0x9, 2)
def lambda_6BD():
OP_92(0xFE, 0x8, 0x0, 0x3E8, 0x0)
ExitThread()
QueueWorkItem(0x9, 1, lambda_6BD)
Sleep(50)
SetChrChipByIndex(0xA, 2)
def lambda_6DC():
OP_92(0xFE, 0x8, 0x0, 0x3E8, 0x0)
ExitThread()
QueueWorkItem(0xA, 1, lambda_6DC)
Sleep(100)
SetChrChipByIndex(0xB, 2)
def lambda_6FB():
OP_92(0xFE, 0x8, 0x0, 0x3E8, 0x0)
ExitThread()
QueueWorkItem(0xB, 1, lambda_6FB)
SetChrChipByIndex(0x101, 3)
SetChrChipByIndex(0x102, 5)
def lambda_71A():
OP_8E(0xFE, 0x3B7E, 0x0, 0xFFFF3B8E, 0x2134, 0x0)
ExitThread()
QueueWorkItem(0x101, 1, lambda_71A)
def lambda_735():
OP_8E(0xFE, 0x35DE, 0x0, 0xFFFF414C, 0x2134, 0x0)
ExitThread()
QueueWorkItem(0x102, 1, lambda_735)
Sleep(600)
SetChrChipByIndex(0x101, 10)
SetChrChipByIndex(0x102, 11)
SetChrFlags(0x101, 0x1000)
SetChrFlags(0x102, 0x1000)
def lambda_769():
OP_99(0xFE, 0x0, 0xB, 0xBB8)
ExitThread()
QueueWorkItem(0x101, 2, lambda_769)
def lambda_779():
OP_99(0xFE, 0x0, 0xB, 0xBB8)
ExitThread()
QueueWorkItem(0x102, 2, lambda_779)
WaitChrThread(0x102, 0x1)
SetChrChipByIndex(0x9, 1)
def lambda_793():
OP_95(0xFE, 0xFFFFF830, 0x0, 0x0, 0x3E8, 0x1B58)
ExitThread()
QueueWorkItem(0x9, 1, lambda_793)
SetChrChipByIndex(0xB, 1)
def lambda_7B6():
OP_95(0xFE, 0x0, 0x0, 0xFFFFF830, 0x3E8, 0x1B58)
ExitThread()
QueueWorkItem(0xB, 1, lambda_7B6)
TurnDirection(0x101, 0xB, 0)
TurnDirection(0x102, 0x9, 0)
def lambda_7E2():
OP_8F(0xFE, 0x38AE, 0x0, 0xFFFF3B16, 0x1388, 0x0)
ExitThread()
QueueWorkItem(0x101, 1, lambda_7E2)
def lambda_7FD():
OP_8F(0xFE, 0x35D4, 0x0, 0xFFFF3DF0, 0x1388, 0x0)
ExitThread()
QueueWorkItem(0x102, 1, lambda_7FD)
Sleep(150)
SetChrChipByIndex(0xA, 1)
def lambda_822():
OP_95(0xFE, 0xFFFFFD44, 0x0, 0xFFFFFD44, 0x3E8, 0x1770)
ExitThread()
QueueWorkItem(0xA, 1, lambda_822)
WaitChrThread(0x101, 0x1)
ChrTalk(
0x8,
"哎……\x02",
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#000F修女!\x01",
"已经没事了!\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F很危险,\x01",
"所以请退到后面去!\x02",
)
)
CloseMessageWindow()
def lambda_8A0():
OP_92(0xFE, 0x8, 0x0, 0xFA0, 0x0)
ExitThread()
QueueWorkItem(0x9, 1, lambda_8A0)
def lambda_8B5():
OP_92(0xFE, 0x8, 0x0, 0xFA0, 0x0)
ExitThread()
QueueWorkItem(0xA, 1, lambda_8B5)
def lambda_8CA():
OP_92(0xFE, 0x8, 0x0, 0xFA0, 0x0)
ExitThread()
QueueWorkItem(0xB, 1, lambda_8CA)
Sleep(500)
Battle(0x3A3, 0x0, 0x0, 0x0, 0xFF)
Switch(
(scpexpr(EXPR_PUSH_VALUE_INDEX, 0x3), scpexpr(EXPR_END)),
(1, "loc_8F7"),
(SWITCH_DEFAULT, "loc_8FA"),
)
label("loc_8F7")
OP_B4(0x0)
Return()
label("loc_8FA")
AddParty(0x7, 0xFF)
SetChrPos(0x108, 22520, 0, -37100, 0)
SetChrFlags(0x108, 0x80)
SetChrFlags(0x9, 0x80)
SetChrFlags(0xA, 0x80)
SetChrFlags(0xB, 0x80)
SetChrPos(0x101, 14390, 0, -50980, 225)
SetChrPos(0x102, 12920, 0, -49800, 225)
SetChrPos(0x8, 14740, 0, -49400, 225)
OP_6D(13730, 0, -50080, 0)
OP_67(0, 9500, -10000, 0)
OP_6B(3070, 0)
OP_6C(282000, 0)
OP_6E(262, 0)
SetChrChipByIndex(0x101, 3)
SetChrChipByIndex(0x102, 5)
EventBegin(0x0)
ChrTalk(
0x101,
(
"#000F呼……\x01",
"真是厉害啊。\x02",
)
)
CloseMessageWindow()
SetChrChipByIndex(0x101, 65535)
TurnDirection(0x101, 0x8, 400)
ChrTalk(
0x101,
"#000F修女,你没事吧?\x02",
)
CloseMessageWindow()
SetChrChipByIndex(0x102, 65535)
def lambda_9FE():
TurnDirection(0xFE, 0x8, 400)
ExitThread()
QueueWorkItem(0x102, 1, lambda_9FE)
ChrTalk(
0x8,
"啊,是的……多亏了你们。\x02",
)
CloseMessageWindow()
ChrTalk(
0x8,
"嗯……你们到底是……?\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F我们是游击士协会的人。\x02\x03",
"正在找人的途中,\x01",
"听到了你的惨叫,所以……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
"是……这样啊……\x02",
)
CloseMessageWindow()
ChrTalk(
0x8,
"…………………………\x02",
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#000F怎、怎么了?\x01",
"看起来好像很没精神的样子……\x02\x03",
"难道受伤了?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"没有……\x01",
"多亏了你们,才平安无事的。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"我是王都大圣堂的修女,\x01",
"名叫艾伦。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
"真是太感谢了。\x02",
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#000F啊哈哈。\x01",
"不用谢啦。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F不过,\x01",
"作为圣职者的女性\x01",
"一个人来这种地方……\x02\x03",
"你没有和其他人\x01",
"一起来吗?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"是的,不好意思,\x01",
"只有我一个人……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"其实是因为大圣堂里\x01",
"调药用的草药没有了……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"商店里也卖完了,\x01",
"所以才来这里采集的……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#000F这也太危险了。\x01",
"明明到处都是魔兽啊……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"不,以前这里\x01",
"没有这么多魔兽的……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"好像是从最近\x01",
" \x02",
)
)
CloseMessageWindow()
OP_62(0x101, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
OP_22(0x27, 0x0, 0x64)
OP_62(0x102, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
OP_22(0x27, 0x0, 0x64)
ClearChrFlags(0x101, 0x1000)
ClearChrFlags(0x102, 0x1000)
SetChrChipByIndex(0x101, 3)
SetChrChipByIndex(0x102, 5)
ClearChrFlags(0x101, 0x1000)
ClearChrFlags(0x102, 0x1000)
Sleep(500)
def lambda_E13():
OP_8E(0xFE, 0x3CD2, 0x0, 0xFFFF3EB8, 0x1388, 0x0)
ExitThread()
QueueWorkItem(0x101, 3, lambda_E13)
Sleep(100)
def lambda_E33():
OP_8E(0xFE, 0x37C8, 0x0, 0xFFFF4282, 0x1388, 0x0)
ExitThread()
QueueWorkItem(0x102, 3, lambda_E33)
OP_8C(0x8, 45, 400)
ChrTalk(
0x8,
"啊……\x02",
)
CloseMessageWindow()
OP_62(0x8, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
OP_22(0x27, 0x0, 0x64)
def lambda_E77():
OP_8F(0xFE, 0x3782, 0x0, 0xFFFF3C24, 0x3E8, 0x0)
ExitThread()
QueueWorkItem(0x8, 3, lambda_E77)
SetChrPos(0x9, 19840, 0, -40400, 0)
SetChrPos(0xA, 21100, 0, -41220, 0)
SetChrPos(0xB, 21440, 0, -39410, 0)
SetChrPos(0xC, 21420, 0, -38390, 0)
SetChrPos(0xD, 23130, 0, -39910, 0)
SetChrPos(0xE, 21460, 0, -36780, 0)
SetChrPos(0xF, 23510, 0, -37150, 0)
SetChrPos(0x10, 24560, 0, -39000, 0)
ClearChrFlags(0x9, 0x80)
ClearChrFlags(0xA, 0x80)
ClearChrFlags(0xB, 0x80)
ClearChrFlags(0xC, 0x80)
ClearChrFlags(0xD, 0x80)
ClearChrFlags(0xE, 0x80)
ClearChrFlags(0xF, 0x80)
ClearChrFlags(0x10, 0x80)
def lambda_F42():
OP_8E(0xFE, 0x3BB0, 0x0, 0xFFFF4E44, 0x5DC, 0x0)
ExitThread()
QueueWorkItem(0x9, 1, lambda_F42)
def lambda_F5D():
OP_8E(0xFE, 0x43DA, 0x0, 0xFFFF4BA6, 0x5DC, 0x0)
ExitThread()
QueueWorkItem(0xA, 1, lambda_F5D)
def lambda_F78():
OP_8E(0xFE, 0x40E2, 0x0, 0xFFFF5060, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0xB, 1, lambda_F78)
def lambda_F93():
OP_8E(0xFE, 0x3FAC, 0x0, 0xFFFF56B4, 0x5DC, 0x0)
ExitThread()
QueueWorkItem(0xC, 1, lambda_F93)
def lambda_FAE():
OP_8E(0xFE, 0x4A1A, 0x0, 0xFFFF5132, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0xD, 1, lambda_FAE)
def lambda_FC9():
OP_8E(0xFE, 0x433A, 0x0, 0xFFFF5A74, 0x5DC, 0x0)
ExitThread()
QueueWorkItem(0xE, 1, lambda_FC9)
def lambda_FE4():
OP_8E(0xFE, 0x4AF6, 0x0, 0xFFFF5ACE, 0x5DC, 0x0)
ExitThread()
QueueWorkItem(0xF, 1, lambda_FE4)
def lambda_FFF():
OP_8E(0xFE, 0x4A74, 0x0, 0xFFFF55F6, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x10, 1, lambda_FFF)
Sleep(300)
def lambda_101F():
OP_6D(19250, 0, -43570, 1500)
ExitThread()
QueueWorkItem(0x101, 1, lambda_101F)
def lambda_1037():
OP_6C(0, 4500)
ExitThread()
QueueWorkItem(0x101, 2, lambda_1037)
Sleep(1500)
def lambda_104C():
OP_6D(17110, 0, -45970, 3000)
ExitThread()
QueueWorkItem(0x101, 1, lambda_104C)
Sleep(3000)
ChrTalk(
0x101,
"#000F怎么回事啊,这些家伙们……!\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F好像是因为\x01",
"听到骚动而聚集过来了……\x02\x03",
"有这么多,还真是麻烦啊。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#000F嗯,以防万一,\x01",
"至少先让修女逃出去……\x02",
)
)
CloseMessageWindow()
ClearChrFlags(0x108, 0x80)
OP_8E(0x108, 0x528A, 0x0, 0xFFFF61B8, 0x2EE0, 0x0)
def lambda_1155():
OP_6D(17970, 0, -45090, 3000)
ExitThread()
QueueWorkItem(0x101, 1, lambda_1155)
SetChrChipByIndex(0x108, 8)
SetChrFlags(0x108, 0x20)
SetChrFlags(0x108, 0x1000)
def lambda_117C():
OP_99(0xFE, 0x0, 0x4, 0xBB8)
ExitThread()
QueueWorkItem(0x108, 2, lambda_117C)
OP_8E(0x108, 0x4CD6, 0x0, 0xFFFF5C36, 0x2EE0, 0x0)
PlayEffect(0x8, 0xFF, 0xFF, 19660, 0, -41900, 0, 0, 0, 400, 400, 400, 0xFF, 0, 0, 0, 0)
def lambda_11D5():
OP_8F(0xFE, 0x43DA, 0x0, 0xFFFF542A, 0x2EE0, 0x0)
ExitThread()
QueueWorkItem(0xF, 2, lambda_11D5)
def lambda_11F0():
OP_9F(0xFE, 0xFF, 0xFF, 0xFF, 0x0, 0x3E8)
ExitThread()
QueueWorkItem(0xF, 1, lambda_11F0)
def lambda_1202():
OP_99(0xFE, 0x4, 0x7, 0xBB8)
ExitThread()
QueueWorkItem(0x108, 2, lambda_1202)
OP_96(0x108, 0x4F9C, 0x0, 0xFFFF5EDE, 0x3E8, 0x1770)
ChrTalk(
0x108,
"哦,看起来你们遇到麻烦了?\x02",
)
CloseMessageWindow()
def lambda_123F():
TurnDirection(0xFE, 0x108, 400)
ExitThread()
QueueWorkItem(0xF, 1, lambda_123F)
def lambda_124D():
TurnDirection(0xFE, 0x108, 400)
ExitThread()
QueueWorkItem(0xE, 1, lambda_124D)
def lambda_125B():
TurnDirection(0xFE, 0x108, 400)
ExitThread()
QueueWorkItem(0x10, 1, lambda_125B)
def lambda_1269():
TurnDirection(0xFE, 0x108, 400)
ExitThread()
QueueWorkItem(0xC, 1, lambda_1269)
def lambda_1277():
TurnDirection(0xFE, 0x108, 400)
ExitThread()
QueueWorkItem(0xD, 1, lambda_1277)
ChrTalk(
0x101,
"#000F啊,金先生!?\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F太好了……\x01",
"终于发现了啊。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x108,
(
"#070F嘿嘿,\x01",
"我还以为是谁,原来是你们啊。\x02\x03",
"总之,要说的话一会儿再说,\x01",
"赶快把这些家伙们收拾掉吧!\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#000F嗯!\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
"#010F明白!\x02",
)
CloseMessageWindow()
def lambda_1380():
OP_99(0xFE, 0x0, 0x4, 0xBB8)
ExitThread()
QueueWorkItem(0x108, 2, lambda_1380)
def lambda_1390():
OP_8E(0xFE, 0x416E, 0x0, 0xFFFF4868, 0x1770, 0x0)
ExitThread()
QueueWorkItem(0x101, 3, lambda_1390)
def lambda_13AB():
OP_8E(0xFE, 0x3A0C, 0x0, 0xFFFF494E, 0x1770, 0x0)
ExitThread()
QueueWorkItem(0x102, 3, lambda_13AB)
OP_8E(0x108, 0x492A, 0x0, 0xFFFF5952, 0x1388, 0x0)
Battle(0x3A4, 0x0, 0x0, 0x0, 0xFF)
Switch(
(scpexpr(EXPR_PUSH_VALUE_INDEX, 0x3), scpexpr(EXPR_END)),
(1, "loc_13ED"),
(SWITCH_DEFAULT, "loc_13F0"),
)
label("loc_13ED")
OP_B4(0x0)
Return()
label("loc_13F0")
OP_44(0x101, 0xFF)
OP_44(0x102, 0xFF)
OP_44(0x108, 0xFF)
OP_44(0x9, 0xFF)
OP_44(0xA, 0xFF)
OP_44(0xB, 0xFF)
OP_44(0xC, 0xFF)
OP_44(0xD, 0xFF)
OP_44(0xE, 0xFF)
OP_44(0xF, 0xFF)
OP_44(0x10, 0xFF)
EventBegin(0x0)
SetChrFlags(0x9, 0x80)
SetChrFlags(0xA, 0x80)
SetChrFlags(0xB, 0x80)
SetChrFlags(0xC, 0x80)
SetChrFlags(0xD, 0x80)
SetChrFlags(0xE, 0x80)
SetChrFlags(0xF, 0x80)
SetChrFlags(0x10, 0x80)
SetChrPos(0x101, 16770, 0, -47500, 45)
SetChrPos(0x102, 15050, 0, -45990, 45)
SetChrPos(0x108, 17690, 0, -44440, 225)
SetChrPos(0x8, 14650, 0, -48360, 45)
OP_6D(15920, 0, -45970, 0)
OP_67(0, 9500, -10000, 0)
OP_6B(3200, 0)
OP_6C(0, 0)
OP_6E(262, 0)
ClearChrFlags(0x108, 0x1000)
OP_51(0x101, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_51(0x102, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_51(0x108, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
SetChrChipByIndex(0x101, 65535)
SetChrChipByIndex(0x102, 65535)
SetChrChipByIndex(0x108, 65535)
ClearChrFlags(0x108, 0x20)
ClearChrFlags(0x108, 0x1000)
ChrTalk(
0x108,
(
"#070F哎呀哎呀……\x01",
"多亏了这些家伙们,让我好好地出了一次汗。\x02\x03",
"不过,真没想到\x01",
"能在这里见到你们啊。\x02\x03",
"你们不是在\x01",
"蔡斯地区工作吗?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#000F啊哈哈,确实从那时候起\x01",
"就一直没有像这样见面呢。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F其实我们已经从蔡斯支部\x01",
"转属到格兰赛尔支部来了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x108,
(
"#070F哦,是这样啊。\x02\x03",
"也就是说,那个绑架事件,\x01",
"已经解决了吗。\x02\x03",
"那个中毒的红发小哥\x01",
"现在还好吧?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#000F嗯,已经没事了。\x02",
)
CloseMessageWindow()
ChrTalk(
0x8,
"……请问…………\x02",
)
CloseMessageWindow()
ChrTalk(
0x108,
(
"#070F哦,真是失礼了……\x02\x03",
"…………啊……………\x02",
)
)
CloseMessageWindow()
def lambda_16F3():
label("loc_16F3")
TurnDirection(0xFE, 0x8, 0)
OP_48()
Jump("loc_16F3")
QueueWorkItem2(0x108, 1, lambda_16F3)
OP_8F(0x108, 0x3D72, 0x0, 0xFFFF4DAE, 0x7D0, 0x0)
ChrTalk(
0x108,
(
"#070F喂喂……\x01",
"真是个美人啊。\x02\x03",
"是你们的同伴吗?\x02",
)
)
CloseMessageWindow()
TurnDirection(0x102, 0x108, 400)
ChrTalk(
0x102,
(
"#010F不是,\x01",
"我们也是刚认识的……\x02",
)
)
CloseMessageWindow()
TurnDirection(0x101, 0x108, 400)
ChrTalk(
0x101,
(
"#000F真是的,这么色迷迷的,\x01",
"也不知道害羞……\x02\x03",
"我去告诉雾香小姐吧?\x02",
)
)
CloseMessageWindow()
OP_44(0x108, 0xFF)
TurnDirection(0x108, 0x101, 400)
ChrTalk(
0x108,
(
"#070F呜……\x01",
"我只是说陈述客观的事实罢了……\x02\x03",
"喂,\x01",
"为什么要提到那家伙的名字啊?\x02",
)
)
CloseMessageWindow()
OP_8E(0x8, 0x3B92, 0x0, 0xFFFF4674, 0x7D0, 0x0)
ChrTalk(
0x8,
(
"那个……把我从危险的地方救出来,\x01",
"真是太感谢了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
"你们都是我的救命恩人。\x02",
)
CloseMessageWindow()
def lambda_18F4():
label("loc_18F4")
TurnDirection(0xFE, 0x8, 0)
OP_48()
Jump("loc_18F4")
QueueWorkItem2(0x108, 1, lambda_18F4)
def lambda_1905():
label("loc_1905")
TurnDirection(0xFE, 0x108, 0)
OP_48()
Jump("loc_1905")
QueueWorkItem2(0x102, 1, lambda_1905)
OP_8F(0x108, 0x3DC2, 0x0, 0xFFFF491C, 0x7D0, 0x0)
ChrTalk(
0x108,
(
"#070F没什么没什么,请别放在心上!\x02\x03",
"作为男人,\x01",
"就应该贯彻武侠之道嘛!\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
"哎呀……\x02",
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#000F好像在装帅呢。\x02\x03",
"金先生\x01",
"其实对女人没有办法呢。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F哈哈……\x01",
"说得没错。\x02",
)
)
CloseMessageWindow()
ClearChrFlags(0x11, 0x80)
ClearChrFlags(0x12, 0x80)
SetChrPos(0x11, 22440, 0, -38100, 0)
SetChrPos(0x12, 21240, 0, -37930, 0)
ChrTalk(
0x11,
"你们在干什么!?\x02",
)
CloseMessageWindow()
def lambda_1A63():
label("loc_1A63")
TurnDirection(0xFE, 0x11, 0)
OP_48()
Jump("loc_1A63")
QueueWorkItem2(0x108, 2, lambda_1A63)
def lambda_1A74():
label("loc_1A74")
TurnDirection(0xFE, 0x11, 0)
OP_48()
Jump("loc_1A74")
QueueWorkItem2(0x101, 2, lambda_1A74)
def lambda_1A85():
label("loc_1A85")
TurnDirection(0xFE, 0x11, 0)
OP_48()
Jump("loc_1A85")
QueueWorkItem2(0x102, 2, lambda_1A85)
def lambda_1A96():
label("loc_1A96")
TurnDirection(0xFE, 0x11, 0)
OP_48()
Jump("loc_1A96")
QueueWorkItem2(0x8, 2, lambda_1A96)
def lambda_1AA7():
OP_8E(0xFE, 0x48DA, 0x0, 0xFFFF540C, 0xFA0, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_1AA7)
def lambda_1AC2():
OP_8E(0xFE, 0x4434, 0x0, 0xFFFF55EC, 0xFA0, 0x0)
ExitThread()
QueueWorkItem(0x12, 1, lambda_1AC2)
OP_6D(17010, 0, -44670, 3000)
ChrTalk(
0x101,
"#000F哎……!?\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
"#010F……………………………\x02",
)
CloseMessageWindow()
WaitChrThread(0x11, 0x1)
ChrTalk(
0x11,
(
"在这种没人的地方密谈,\x01",
"真是可疑的家伙……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x12,
(
"难道……\x01",
"你们是恐怖分子吧?\x02",
)
)
CloseMessageWindow()
OP_8E(0x101, 0x4182, 0x0, 0xFFFF4BF6, 0xFA0, 0x0)
ChrTalk(
0x101,
(
"#10A#000F谁、谁是恐怖分子啊!?\x01",
"我们是——呜。\x05\x02",
)
)
Sleep(1000)
OP_8E(0x102, 0x3F2A, 0x0, 0xFFFF4BF6, 0xFA0, 0x0)
def lambda_1C45():
OP_8E(0xFE, 0x42FE, 0x0, 0xFFFF49D0, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x101, 1, lambda_1C45)
OP_8F(0x102, 0x4182, 0x0, 0xFFFF4BF6, 0x7D0, 0x0)
ChrTalk(
0x102,
(
"#010F……我们是游击士协会\x01",
"格兰赛尔支部所属的成员。\x02\x03",
"就在刚才,我们保护了\x01",
"这位修女免遭魔兽袭击。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x11,
"什么……!\x02",
)
CloseMessageWindow()
ChrTalk(
0x12,
"是游击士!?\x02",
)
CloseMessageWindow()
OP_8E(0x8, 0x3A84, 0x0, 0xFFFF4B60, 0x7D0, 0x0)
ChrTalk(
0x8,
(
"那个……\x01",
"他们说的都是真的。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"我来这里采摘草药,\x01",
"结果被魔兽袭击……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x108,
(
"#070F顺便一说,我也是游击士。\x02\x03",
"我记得和你们的同伴\x01",
"在预选赛中曾经碰过面对吗?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x11,
(
"卡尔瓦德的武术家……\x01",
"那个一个人参赛的家伙啊……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x12,
(
"哼……\x01",
"身份好像可以确定了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x11,
"这次就放过你们。\x02",
)
CloseMessageWindow()
ChrTalk(
0x11,
(
"不过,这里离艾尔贝离宫很近。\x01",
"没事不要在这边乱转。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x12,
"还有,修女小姐。\x02",
)
CloseMessageWindow()
ChrTalk(
0x12,
(
"我们把你\x01",
"送回王都去吧。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x12,
(
"不要借助\x01",
"什么游击士的力量。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
"哎,但、但是我……\x02",
)
CloseMessageWindow()
OP_62(0x101, 0x0, 2000, 0xC, 0xD, 0xFA, 0x2)
OP_22(0x31, 0x0, 0x64)
Sleep(1000)
ChrTalk(
0x101,
(
"#000F可恶,等一下,你们!\x02\x03",
"从刚才开始,\x01",
"就一直在说过分的话……\x02",
)
)
CloseMessageWindow()
OP_44(0x102, 0xFF)
TurnDirection(0x102, 0x101, 400)
ChrTalk(
0x102,
"#010F艾丝蒂尔……算了。\x02",
)
CloseMessageWindow()
TurnDirection(0x102, 0x11, 400)
ChrTalk(
0x102,
(
"#010F以后我们会注意的,\x01",
"这次能宽大处理,真是太感谢了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x11,
"哼,算了。\x02",
)
CloseMessageWindow()
ChrTalk(
0x11,
(
"你们到底只不过是普通市民。\x01",
"弄清楚自己的本分。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x12,
"那么,修女小姐,我们走吧。\x02",
)
CloseMessageWindow()
ChrTalk(
0x8,
"好、好的……\x02",
)
CloseMessageWindow()
OP_44(0x8, 0xFF)
OP_8E(0x8, 0x4268, 0x0, 0xFFFF515A, 0xBB8, 0x0)
TurnDirection(0x8, 0x108, 400)
ChrTalk(
0x8,
(
"那个,各位……\x01",
"真是太感谢了。\x02",
)
)
CloseMessageWindow()
def lambda_21FB():
OP_8E(0xFE, 0x5CB2, 0x0, 0xFFFF70F4, 0xBB8, 0x0)
ExitThread()
QueueWorkItem(0x11, 1, lambda_21FB)
Sleep(100)
def lambda_221B():
OP_8E(0xFE, 0x5CB2, 0x0, 0xFFFF70F4, 0xBB8, 0x0)
ExitThread()
QueueWorkItem(0x12, 1, lambda_221B)
Sleep(200)
def lambda_223B():
OP_8E(0xFE, 0x5CB2, 0x0, 0xFFFF70F4, 0xBB8, 0x0)
ExitThread()
QueueWorkItem(0x8, 1, lambda_223B)
Sleep(2000)
OP_62(0x101, 0x0, 1900, 0x2C, 0x2F, 0x96, 0x1)
OP_22(0x2F, 0x0, 0x64)
OP_6D(17150, 0, -46640, 1000)
Sleep(100)
OP_44(0x101, 0xFF)
OP_44(0x108, 0xFF)
OP_44(0x102, 0xFF)
ChrTalk(
0x101,
(
"#000F什、什、什……\x02\x03",
"什么啊!那些家伙们!\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x108,
(
"#070F是王国军情报部所属的\x01",
"『特务部队』的人吧。\x02\x03",
"虽然很厉害,\x01",
"不过是很阴险的家伙们呢。\x02",
)
)
CloseMessageWindow()
def lambda_237F():
TurnDirection(0xFE, 0x108, 400)
ExitThread()
QueueWorkItem(0x102, 1, lambda_237F)
TurnDirection(0x101, 0x108, 400)
ChrTalk(
0x101,
(
"#000F比起阴险来说,\x01",
"倒不如说是品行恶劣呢!\x02\x03",
"哎……\x02\x03",
"为什么金先生\x01",
"你会知道他们的事情呢?\x02",
)
)
CloseMessageWindow()
TurnDirection(0x108, 0x101, 400)
ChrTalk(
0x108,
(
"#070F啊,武术大会的预选赛,\x01",
"他们的队伍也出场了。\x02\x03",
"那时就是这样介绍的。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#000F(那些家伙也有出场……!?)\x02\x03",
"(平时进行隐秘活动那些家伙们\x01",
"这样堂堂正正地被人看到……)\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F(大概是没有\x01",
"再隐藏自己存在的必要了吧……)\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x108,
(
"#070F总之,在弄清楚原因之前,\x01",
"我们还是赶快回城去吧。\x02\x03",
"……对了,\x01",
"你们为什么会在这里的?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#000F啊……都忘了重要的事情呢。\x02\x03",
"其实,\x01",
"我们是来找金先生你的。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x108,
"#070F嗯?找我?\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F其实有件事情\x01",
"想拜托金先生。\x02\x03",
"是有关武术大会的事情……\x02",
)
)
CloseMessageWindow()
OP_A2(0x3FB)
NewScene("ED6_DT01/T4130 ._SN", 100, 0, 0)
IdleLoop()
label("loc_266B")
Return()
# Function_2_507 end
def Function_3_266C(): pass
label("Function_3_266C")
EventBegin(0x0)
SetChrPos(0x101, 11690, 0, -52560, 225)
SetChrPos(0x102, 11000, 0, -51680, 225)
SetChrPos(0x108, 10930, 0, -50240, 196)
ClearChrFlags(0x13, 0x80)
ClearChrFlags(0x14, 0x80)
ClearChrFlags(0x15, 0x80)
ClearChrFlags(0x16, 0x80)
SetChrPos(0x13, 14410, 0, -53900, 257)
SetChrPos(0x14, 14820, 0, -52280, 244)
SetChrPos(0x15, 13050, 0, -51640, 207)
SetChrPos(0x16, 13090, 0, -50260, 213)
OP_6D(11570, 250, -53400, 0)
OP_67(0, 7270, -10000, 0)
OP_6B(2710, 0)
OP_6C(225000, 0)
OP_6E(395, 0)
FadeToBright(3000, 0)
def lambda_2745():
OP_6C(249000, 5000)
ExitThread()
QueueWorkItem(0x101, 3, lambda_2745)
OP_6E(309, 5000)
ChrTalk(
0x101,
(
"#006F嗯……\x01",
"这里就是集合点吧?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F在琥耀石的石碑旁的休息场所,\x01",
"和这里完全符合。\x02\x03",
"#013F问题是,\x01",
"尤莉亚中尉他们还没来啊……\x02",
)
)
CloseMessageWindow()
ClearChrFlags(0x17, 0x80)
ClearChrFlags(0x18, 0x80)
ClearChrFlags(0x19, 0x80)
ClearChrFlags(0x1A, 0x80)
ClearChrFlags(0x1B, 0x80)
ClearChrFlags(0x1C, 0x80)
ClearChrFlags(0x1D, 0x80)
ClearChrFlags(0x1E, 0x80)
ClearChrFlags(0x1F, 0x80)
SetChrPos(0x17, 17080, 0, -45130, 225)
SetChrPos(0x18, 17100, 0, -43830, 225)
SetChrPos(0x19, 18380, 0, -45010, 225)
SetChrPos(0x1A, 17740, 0, -42700, 225)
SetChrPos(0x1B, 18600, 0, -43670, 225)
SetChrPos(0x1C, 19480, 0, -44620, 225)
SetChrPos(0x1D, 18580, 0, -41840, 225)
SetChrPos(0x1E, 19520, 0, -42690, 225)
SetChrPos(0x1F, 20400, 0, -43690, 225)
def lambda_2904():
OP_90(0xFE, 0x7D0, 0x0, 0xFA0, 0x2710, 0x0)
ExitThread()
QueueWorkItem(0x17, 1, lambda_2904)
def lambda_291F():
OP_90(0xFE, 0x7D0, 0x0, 0xFA0, 0x2710, 0x0)
ExitThread()
QueueWorkItem(0x18, 1, lambda_291F)
def lambda_293A():
OP_90(0xFE, 0x7D0, 0x0, 0xFA0, 0x2710, 0x0)
ExitThread()
QueueWorkItem(0x19, 1, lambda_293A)
def lambda_2955():
OP_90(0xFE, 0x7D0, 0x0, 0xFA0, 0x2710, 0x0)
ExitThread()
QueueWorkItem(0x1A, 1, lambda_2955)
def lambda_2970():
OP_90(0xFE, 0x7D0, 0x0, 0xFA0, 0x2710, 0x0)
ExitThread()
QueueWorkItem(0x1B, 1, lambda_2970)
def lambda_298B():
OP_90(0xFE, 0x7D0, 0x0, 0xFA0, 0x2710, 0x0)
ExitThread()
QueueWorkItem(0x1C, 1, lambda_298B)
def lambda_29A6():
OP_90(0xFE, 0x7D0, 0x0, 0xFA0, 0x2710, 0x0)
ExitThread()
QueueWorkItem(0x1D, 1, lambda_29A6)
def lambda_29C1():
OP_90(0xFE, 0x7D0, 0x0, 0xFA0, 0x2710, 0x0)
ExitThread()
QueueWorkItem(0x1E, 1, lambda_29C1)
def lambda_29DC():
OP_90(0xFE, 0x7D0, 0x0, 0xFA0, 0x2710, 0x0)
ExitThread()
QueueWorkItem(0x1F, 1, lambda_29DC)
ChrTalk(
0x17,
"#1P……请不用担心。\x02",
)
CloseMessageWindow()
OP_62(0x101, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
OP_22(0x27, 0x0, 0x64)
OP_62(0x102, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
OP_22(0x27, 0x0, 0x64)
Sleep(1000)
def lambda_2A4F():
TurnDirection(0xFE, 0x17, 400)
ExitThread()
QueueWorkItem(0x101, 1, lambda_2A4F)
def lambda_2A5D():
TurnDirection(0xFE, 0x17, 400)
ExitThread()
QueueWorkItem(0x102, 1, lambda_2A5D)
def lambda_2A6B():
TurnDirection(0xFE, 0x17, 400)
ExitThread()
QueueWorkItem(0x108, 1, lambda_2A6B)
def lambda_2A79():
TurnDirection(0xFE, 0x17, 400)
ExitThread()
QueueWorkItem(0x13, 1, lambda_2A79)
def lambda_2A87():
TurnDirection(0xFE, 0x17, 400)
ExitThread()
QueueWorkItem(0x14, 1, lambda_2A87)
def lambda_2A95():
TurnDirection(0xFE, 0x17, 400)
ExitThread()
QueueWorkItem(0x15, 1, lambda_2A95)
def lambda_2AA3():
TurnDirection(0xFE, 0x17, 400)
ExitThread()
QueueWorkItem(0x16, 1, lambda_2AA3)
def lambda_2AB1():
OP_6C(335000, 4000)
ExitThread()
QueueWorkItem(0x101, 3, lambda_2AB1)
def lambda_2AC1():
OP_6E(332, 4000)
ExitThread()
QueueWorkItem(0x102, 2, lambda_2AC1)
def lambda_2AD1():
OP_6D(13880, 0, -49890, 4000)
ExitThread()
QueueWorkItem(0x101, 2, lambda_2AD1)
def lambda_2AE9():
OP_90(0xFE, 0xFFFFF060, 0x0, 0xFFFFE0C0, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x17, 1, lambda_2AE9)
Sleep(100)
def lambda_2B09():
OP_90(0xFE, 0xFFFFF060, 0x0, 0xFFFFE0C0, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x18, 1, lambda_2B09)
def lambda_2B24():
OP_90(0xFE, 0xFFFFF060, 0x0, 0xFFFFE0C0, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x19, 1, lambda_2B24)
Sleep(100)
def lambda_2B44():
OP_90(0xFE, 0xFFFFF060, 0x0, 0xFFFFE0C0, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x1A, 1, lambda_2B44)
def lambda_2B5F():
OP_90(0xFE, 0xFFFFF060, 0x0, 0xFFFFE0C0, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x1B, 1, lambda_2B5F)
def lambda_2B7A():
OP_90(0xFE, 0xFFFFF060, 0x0, 0xFFFFE0C0, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x1C, 1, lambda_2B7A)
Sleep(100)
def lambda_2B9A():
OP_90(0xFE, 0xFFFFF060, 0x0, 0xFFFFE0C0, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x1D, 1, lambda_2B9A)
def lambda_2BB5():
OP_90(0xFE, 0xFFFFF060, 0x0, 0xFFFFE0C0, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x1E, 1, lambda_2BB5)
def lambda_2BD0():
OP_90(0xFE, 0xFFFFF060, 0x0, 0xFFFFE0C0, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x1F, 1, lambda_2BD0)
WaitChrThread(0x101, 0x2)
WaitChrThread(0x17, 0x1)
ChrTalk(
0x101,
"#004F哇,什么时候……\x02",
)
CloseMessageWindow()
ChrTalk(
0x108,
(
"#071F哈哈……\x01",
"原来有这么多人潜伏在王都啊。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x17,
(
"#176F市民中也有很多人支持我们。\x01",
" \x02\x03",
"#170F我们这边已经准备好了,\x01",
"随时可以开始作战。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x16,
"好……\x02",
)
CloseMessageWindow()
TurnDirection(0x16, 0x101, 400)
ChrTalk(
0x16,
(
"#5P艾丝蒂尔,\x01",
"请发号施令。\x02",
)
)
CloseMessageWindow()
def lambda_2D2F():
TurnDirection(0xFE, 0x101, 400)
ExitThread()
QueueWorkItem(0x102, 1, lambda_2D2F)
def lambda_2D3D():
TurnDirection(0xFE, 0x101, 400)
ExitThread()
QueueWorkItem(0x108, 1, lambda_2D3D)
def lambda_2D4B():
TurnDirection(0xFE, 0x101, 400)
ExitThread()
QueueWorkItem(0x13, 1, lambda_2D4B)
def lambda_2D59():
TurnDirection(0xFE, 0x101, 400)
ExitThread()
QueueWorkItem(0x14, 1, lambda_2D59)
def lambda_2D67():
TurnDirection(0xFE, 0x101, 400)
ExitThread()
QueueWorkItem(0x15, 1, lambda_2D67)
def lambda_2D75():
TurnDirection(0xFE, 0x101, 400)
ExitThread()
QueueWorkItem(0x16, 1, lambda_2D75)
ChrTalk(
0x101,
(
"#580F咦……?\x02\x03",
"我、我来!?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x16,
(
"#5P因为是由你们\x01",
"接受女王委托的。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x15,
(
"#5P是啊,\x01",
"由你来发号施令是理所当然的哦。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#506F可、可是……\x01",
"我还只是个新人……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x13,
(
"#6P哈哈,没关系。\x01",
"由你来我们没有异议的哦。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x14,
(
"#6P只要声音别叫得太大,\x01",
"就不会惊动到敌人的哦。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x17,
(
"#171F我们是来协助你们作战的,\x01",
"所以绝对不会有半点异议。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#503F啊,哦……\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
"#010F#5P艾丝蒂尔,要有自信。\x02",
)
CloseMessageWindow()
ChrTalk(
0x108,
(
"#071F#5P不用再细想了。\x02\x03",
"这可是老规矩了,老规矩。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#008F嗯……\x02\x03",
"#006F…………………………………\x02",
)
)
CloseMessageWindow()
OP_8C(0x101, 225, 400)
def lambda_2FDA():
label("loc_2FDA")
TurnDirection(0xFE, 0x101, 0)
OP_48()
Jump("loc_2FDA")
QueueWorkItem2(0x102, 1, lambda_2FDA)
def lambda_2FEB():
label("loc_2FEB")
TurnDirection(0xFE, 0x101, 0)
OP_48()
Jump("loc_2FEB")
QueueWorkItem2(0x108, 1, lambda_2FEB)
def lambda_2FFC():
label("loc_2FFC")
TurnDirection(0xFE, 0x101, 0)
OP_48()
Jump("loc_2FFC")
QueueWorkItem2(0x13, 1, lambda_2FFC)
def lambda_300D():
label("loc_300D")
TurnDirection(0xFE, 0x101, 0)
OP_48()
Jump("loc_300D")
QueueWorkItem2(0x14, 1, lambda_300D)
def lambda_301E():
label("loc_301E")
TurnDirection(0xFE, 0x101, 0)
OP_48()
Jump("loc_301E")
QueueWorkItem2(0x15, 1, lambda_301E)
def lambda_302F():
label("loc_302F")
TurnDirection(0xFE, 0x101, 0)
OP_48()
Jump("loc_302F")
QueueWorkItem2(0x16, 1, lambda_302F)
def lambda_3040():
OP_6C(0, 3000)
ExitThread()
QueueWorkItem(0x101, 3, lambda_3040)
OP_8E(0x101, 0x2972, 0x1E0, 0xFFFF2F40, 0x5DC, 0x0)
OP_20(0x7D0)
OP_8C(0x101, 45, 400)
WaitChrThread(0x101, 0x3)
OP_21()
ChrTalk(
0x101,
"#006F#5P我向全体作战成员宣布……\x02",
)
CloseMessageWindow()
Sleep(400)
ChrTalk(
0x101,
(
"#005F#5P艾尔贝离宫攻略战,\x01",
"暨人质解救作战现在开始!\x02",
)
)
CloseMessageWindow()
FadeToDark(1500, 0, -1)
SetMapFlags(0x2000000)
OP_A2(0x3FA)
NewScene("ED6_DT01/C4113 ._SN", 100, 0, 0)
IdleLoop()
Return()
# Function_3_266C end
def Function_4_30F1(): pass
label("Function_4_30F1")
EventBegin(0x0)
OP_6D(-26280, 0, -4400, 0)
OP_67(0, 9500, -10000, 0)
OP_6B(3200, 0)
OP_6C(45000, 0)
OP_6E(234, 0)
ClearChrFlags(0x18, 0x80)
ClearChrFlags(0x19, 0x80)
ClearChrFlags(0x1A, 0x80)
ClearChrFlags(0x1B, 0x80)
SetChrChipByIndex(0x18, 19)
SetChrChipByIndex(0x19, 19)
SetChrChipByIndex(0x1A, 19)
SetChrChipByIndex(0x1B, 19)
SetChrSubChip(0x18, 2)
SetChrSubChip(0x19, 2)
SetChrSubChip(0x1A, 2)
SetChrSubChip(0x1B, 2)
SetChrPos(0x18, -25890, 0, -4510, 180)
SetChrPos(0x19, -27370, 0, -4510, 180)
SetChrPos(0x1A, -27240, 0, -2700, 180)
SetChrPos(0x1B, -25950, 0, -2920, 180)
SetChrPos(0x108, -26570, 0, -6220, 0)
SetChrPos(0x102, -28030, 0, -6250, 45)
SetChrPos(0x101, -25360, 0, -6190, 315)
Sleep(1000)
ChrTalk(
0x108,
"#072F好,伏击组开始行动了。\x02",
)
CloseMessageWindow()
ChrTalk(
0x18,
(
"#5P我们先行一步,\x01",
"去引开前庭的残存兵力!\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x18,
(
"#5P趁此机会,\x01",
"请你们突入离宫内部!\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#006F嗯,知道了!\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
"#012F愿女神保佑你们!\x02",
)
CloseMessageWindow()
def lambda_32E7():
label("loc_32E7")
TurnDirection(0xFE, 0x18, 0)
OP_48()
Jump("loc_32E7")
QueueWorkItem2(0x101, 1, lambda_32E7)
def lambda_32F8():
label("loc_32F8")
TurnDirection(0xFE, 0x18, 0)
OP_48()
Jump("loc_32F8")
QueueWorkItem2(0x102, 1, lambda_32F8)
def lambda_3309():
label("loc_3309")
TurnDirection(0xFE, 0x18, 0)
OP_48()
Jump("loc_3309")
QueueWorkItem2(0x108, 1, lambda_3309)
SetChrChipByIndex(0x1B, 18)
def lambda_331F():
OP_8C(0xFE, 0, 800)
ExitThread()
QueueWorkItem(0x1B, 2, lambda_331F)
def lambda_332D():
OP_8E(0xFE, 0xFFFF9B60, 0x0, 0x76B6, 0x1B58, 0x0)
ExitThread()
QueueWorkItem(0x1B, 1, lambda_332D)
Sleep(200)
SetChrChipByIndex(0x1A, 18)
def lambda_3352():
OP_8C(0xFE, 0, 800)
ExitThread()
QueueWorkItem(0x1A, 2, lambda_3352)
def lambda_3360():
OP_8E(0xFE, 0xFFFF9B60, 0x0, 0x76B6, 0x1B58, 0x0)
ExitThread()
QueueWorkItem(0x1A, 1, lambda_3360)
Sleep(200)
SetChrChipByIndex(0x18, 18)
def lambda_3385():
OP_8C(0xFE, 0, 800)
ExitThread()
QueueWorkItem(0x18, 2, lambda_3385)
def lambda_3393():
OP_8E(0xFE, 0xFFFF9B60, 0x0, 0x76B6, 0x1B58, 0x0)
ExitThread()
QueueWorkItem(0x18, 1, lambda_3393)
Sleep(200)
SetChrChipByIndex(0x19, 18)
def lambda_33B8():
OP_8C(0xFE, 0, 800)
ExitThread()
QueueWorkItem(0x19, 2, lambda_33B8)
def lambda_33C6():
OP_8E(0xFE, 0xFFFF9B60, 0x0, 0x76B6, 0x1B58, 0x0)
ExitThread()
QueueWorkItem(0x19, 1, lambda_33C6)
Sleep(2000)
OP_44(0x101, 0xFF)
OP_44(0x102, 0xFF)
OP_44(0x108, 0xFF)
SetChrFlags(0x18, 0x80)
SetChrFlags(0x19, 0x80)
SetChrFlags(0x1A, 0x80)
SetChrFlags(0x1B, 0x80)
OP_A2(0x651)
EventEnd(0x0)
Return()
# Function_4_30F1 end
def Function_5_3406(): pass
label("Function_5_3406")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xCA, 1)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xCA, 2)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_3580")
EventBegin(0x1)
Jc((scpexpr(EXPR_PUSH_VALUE_INDEX, 0xA), scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_347E")
ChrTalk(
0x101,
(
"#002F……在突击的时刻是不能逃离的。\x01",
" \x02\x03",
"立刻赶去艾尔贝离宫吧!\x02",
)
)
CloseMessageWindow()
Jump("loc_3565")
label("loc_347E")
Jc((scpexpr(EXPR_PUSH_VALUE_INDEX, 0xA), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_34F0")
TurnDirection(0x102, 0x101, 400)
ChrTalk(
0x102,
(
"#012F要突击也只有趁现在了……\x02\x03",
"赶快去艾尔贝离宫吧!\x02",
)
)
CloseMessageWindow()
Jump("loc_3565")
label("loc_34F0")
Jc((scpexpr(EXPR_PUSH_VALUE_INDEX, 0xA), scpexpr(EXPR_PUSH_LONG, 0x7), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_3565")
TurnDirection(0x108, 0x1, 400)
ChrTalk(
0x108,
(
"#072F如果现在不行动的话,\x01",
"就没有突入离宫的机会了。\x02\x03",
"……赶快去艾尔贝离宫吧。\x02",
)
)
CloseMessageWindow()
label("loc_3565")
OP_90(0x0, 0x5DC, 0x0, 0x0, 0xBB8, 0x0)
Sleep(50)
EventEnd(0x4)
label("loc_3580")
Return()
# Function_5_3406 end
def Function_6_3581(): pass
label("Function_6_3581")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xCA, 1)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xCA, 2)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_36FC")
EventBegin(0x1)
Jc((scpexpr(EXPR_PUSH_VALUE_INDEX, 0xA), scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_35F9")
ChrTalk(
0x101,
(
"#002F……在突击的时刻是不能逃离的。\x01",
" \x02\x03",
"立刻赶去艾尔贝离宫吧!\x02",
)
)
CloseMessageWindow()
Jump("loc_36E1")
label("loc_35F9")
Jc((scpexpr(EXPR_PUSH_VALUE_INDEX, 0xA), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_366C")
TurnDirection(0x102, 0x101, 400)
ChrTalk(
0x102,
(
"#012F要突击也只有趁现在了……\x02\x03",
"赶快去艾尔贝离宫吧!\x02",
)
)
CloseMessageWindow()
Jump("loc_36E1")
label("loc_366C")
Jc((scpexpr(EXPR_PUSH_VALUE_INDEX, 0xA), scpexpr(EXPR_PUSH_LONG, 0x7), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_36E1")
TurnDirection(0x108, 0x1, 400)
ChrTalk(
0x108,
(
"#072F如果现在不行动的话,\x01",
"就没有突入离宫的机会了。\x02\x03",
"……赶快去艾尔贝离宫吧。\x02",
)
)
CloseMessageWindow()
label("loc_36E1")
OP_90(0x0, 0x0, 0x0, 0xFFFFFA24, 0xBB8, 0x0)
Sleep(50)
EventEnd(0x4)
label("loc_36FC")
Return()
# Function_6_3581 end
def Function_7_36FD(): pass
label("Function_7_36FD")
FadeToDark(300, 0, 100)
SetChrName("")
SetMessageWindowPos(-1, -1, -1, -1)
AnonymousTalk(
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"北 艾尔贝离宫\x01",
"东 格鲁纳门 224塞尔矩\x01",
"西 圣海姆门 256塞尔矩\x02",
)
)
CloseMessageWindow()
OP_56(0x0)
FadeToBright(300, 0)
SetMessageWindowPos(72, 320, 56, 3)
TalkEnd(0xFF)
Return()
# Function_7_36FD end
SaveToFile()
Try(main)
|
# range(10)
# print(range(10))
# print(range(2, 20, 3))
for i in range(0, 10, 1):
print(i, end=' ')
print()
for i in range(10):
print(i, end=' ')
print()
for i in range(3, 10):
print(i, end=' ')
print()
for i in range(2, 20, 2):
print(i, end=' ')
print()
for i in range(10, 1, -1):
print(i, end=' ')
print()
total = 0
for i in range(1, 101):
total = total + i
print(total)
|
import numpy as np
import cv2
import datetime
face_cascade = cv2.CascadeClassifier('haarcascade_frontalface_alt.xml')
# face_cascade = cv2.CascadeClassifier('haarcascade_profileface.xml')
# eye_cascade = cv2.CascadeClassifier('haarcascade_eye.xml')
#cap = cv2.VideoCapture(0)
cap = cv2.VideoCapture("/root/Desktop/xxx.mp4")
while(True):
_,img = cap.read()
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
faces = face_cascade.detectMultiScale(gray, 1.2, 2)
for (x,y,w,h) in faces:
cv2.rectangle(img,(x,y),(x+w,y+h),(0,250,0),5)
# print("x,y,w,h",x,y,w,h)
xx = img[y:y+h, x:x+w]
now = datetime.datetime.now()
cv2.imwrite("./img/"+ str(now) +".jpg",xx)
# roi_gray = gray[y:y+h, x:x+w]
# roi_color = img[y:y+h, x:x+w]
# eyes = eye_cascade.detectMultiScale(roi_gray)
# for (ex,ey,ew,eh) in eyes:
# cv2.rectangle(roi_color,(ex,ey),(ex+ew,ey+eh),(0,255,0),2)
cv2.imshow('rec',img)
if cv2.waitKey(10) & 0xFF == ord('q'):
break
elif cv2.waitKey(10) & 0xFF == ord("s"):
now = datetime.datetime.now()
# ts = now.strftime("%Y-%m-%d %H:%M:%S")
cv2.imwrite("./img"+ str(now) +".jpg",xx)
cap.release()
cv2.destroyAllWindows()
|
# Generated by Django 2.2.1 on 2019-06-09 12:10
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0002_phoneotp'),
]
operations = [
migrations.AddField(
model_name='phoneotp',
name='validated',
field=models.BooleanField(default=False, help_text='If it is true, means user has validated otp correctly in second API'),
),
]
|
from rest_framework import views
from rest_framework.response import Response
from django import http
from django.conf import settings
from django.shortcuts import get_object_or_404
from django.views.decorators.http import require_GET
from django.views.generic.base import RedirectView
from share.models import Source
from api import util
@require_GET
def source_icon_view(request, source_name):
source = get_object_or_404(Source, name=source_name)
if not source.icon:
raise http.Http404('Favicon for source {} does not exist'.format(source_name))
response = http.FileResponse(source.icon)
response['Content-Type'] = 'image/x-icon'
return response
class APIVersionRedirectView(RedirectView):
def get_redirect_url(self, *args, **kwargs):
return '/api/v2/{}'.format(kwargs['path'])
def get(self, request, *args, **kwargs):
url = self.get_redirect_url(*args, **kwargs)
if url:
if self.permanent:
return util.HttpSmartResponsePermanentRedirect(url)
return util.HttpSmartResponseRedirect(url)
return http.HttpResponseGone()
class ServerStatusView(views.APIView):
def get(self, request):
return Response({
'id': '1',
'type': 'Status',
'attributes': {
'status': 'up',
'version': settings.VERSION,
}
})
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from report import report_sxw
import logging
_logger = logging.getLogger('reportes')
class entrega_tecnica(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context=None):
super(entrega_tecnica, self).__init__(cr, uid, name, context=context)
self.localcontext.update({
'time': time,
'company': self._company,
})
def _company(self):
data = []
#self.cr.execute('select distinct c.name from res_users as u join res_company as c on u.company_id = c.id where u.company_id = %s', ())
place = 'pepe' #self.cr.fetchall()
values = {
'place': place,
}
data.append(values)
return data
report_sxw.report_sxw('report.entec','entrega.tecnica','trunk/entrega_tecnica/report/ent_tec_report.rml', parser=entrega_tecnica, header='false')
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
from sklearn.externals import joblib
from cfepm.util.io_utils import read_dataset
from cfepm.pipeline import Row2BeanConverter
model = joblib.load('testpipe.pkl')
df = read_dataset('ebay_52K_raw_balanced.csv')
bean_tuples = Row2BeanConverter().transform(df)
print(model)
print(model.transform(bean_tuples)) |
#!/usr/bin/python
# -*- coding: utf-8 -*-
##
# this script serves to do the dirty cleaning work for
# users' tweets.
#
# @author Yuan JIN
# @contact chengdujin@gmail.com
# @since 2012.03.06
# @latest 2012.03.08
#
# reload the script encoding
import sys
reload(sys)
sys.setdefaultencoding('UTF-8')
# CONSTANTS
# Database Configuration
DB = '176.34.54.120:27017'
MICRO_BLOGS = ['twitter', 'weibo']
NEWS_SOURCES = ['articles']
def clean(source='articles/cnBeta'):
'collect, rearrange and filter information'
import media
doc_type = media.Document()
source = source.strip().lower()
database = {}
# [0] database name [1] collection name
source_info = source.split('/')
database['db'] = source_info[0]
database['collection'] = source_info[1]
if source_info[0] in MICRO_BLOGS:
doc_type = media.Twitter()
elif source_info[0] in NEWS_SOURCES:
doc_type = media.News()
else:
return Exception('[error] cleaner: such source does not exist!')
# read data from database
docs = doc_type.collect_data(database)
# build document model
collection = []
for doc in docs:
if doc:
# item is media.Article or media.Tweet
item = doc_type.build_model(doc)
collection.append(item)
return collection
if __name__ == '__main__':
clean('articles/cnbeta')
|
import os
import urllib
import re
from WMCore.Database.CMSCouch import CouchServer
from WMCore.Configuration import loadConfigurationFile
class CouchDBConnectionBase(object):
def __init__(self, couchConfig):
self.couchURL = couchConfig.couchURL
self.acdcDB = couchConfig.acdcDBName
self.jobDumpDB = couchConfig.jobDumpDBName
def getCouchDBURL(self):
return self.couchURL
def getCouchDBName(self):
return self.jobDumpDB
def getCouchACDCURL(self):
return self.couchURL
def getCouchACDCName(self):
return self.acdcDB
def getCouchDB(self):
couchServer = CouchServer(dburl = self.couchURL)
couchDB = couchServer.connectDatabase(dbname = self.jobDumpDB)
couchDB['timeout'] = 300 # set request timeout 5 min
return couchDB
def getCouchJobsDB(self):
couchServer = CouchServer(dburl = self.couchURL)
couchDB = couchServer.connectDatabase(dbname = self.jobDumpDB + "/jobs")
couchDB['timeout'] = 300 # set request timeout 5 min
return couchDB
def getCouchACDC(self):
couchServer = CouchServer(dburl = self.couchURL)
couchDB = couchServer.connectDatabase(dbname = self.acdcDB)
return couchDB
def getCouchACDCHtmlBase(self):
"""
TODO: currently it is hard code to the front page of ACDC
When there is more information is available, it can be added
through
"""
baseURL = '%s/%s/_design/ACDC/collections.html' % (self.couchURL,
self.acdcDB)
baseURL = re.sub('://.+:.+@', '://', baseURL, 1)
return baseURL
def getCouchDBHtmlBase(self, database, design, view, path = None, options = {},
type = "show"):
"""
type should be either 'show' or 'list'
Couch server will raise an error if another type is passed
"""
baseURL = '%s/%s/_design/%s/_%s/%s' % \
(self.couchURL, database, design, type, view)
baseURL = re.sub('://.+:.+@', '://', baseURL, 1)
if (options):
data = urllib.urlencode(options)
if path:
baseURL = "%s/%s?%s" % (baseURL, path, data)
else:
baseURL = "%s?%s" % (baseURL, data)
return baseURL
|
#! usr/bin/python3
def climbStairs(n: int) -> int:
ans = 0
for x in range(n+1):
for y in range(n//2 + 1):
if x+2*y == n:
ans += cmbt(x, y)
return ans
def climbStairs1(n: int) -> int:
dp = [1, 1, 2]
if n < len(dp):
return dp[n]
else:
for i in range(3, n+1):
dp[-1], dp[-2] = dp[-1]+dp[-2], dp[-1]
return dp[-1]
def cmbt(x, y) -> int:
a, b = 1, 1
while y > 0:
a *= x+y
b *= y
y -= 1
return a//b
n = 500
print("fib=", climbStairs1(n))
# print("排列组合",climbStairs(n))
# print(pmt(5))
|
#!usr/bin/env
import rospy
import time
import math
import tf
import roslib
def brodcaster(x,y,z,w):
rospy.init_node('frame_a_to_frame_b_brodcaster_node',anonymous=False)
time.sleep(0.5)
bc=tf.TransformBroadcaster()
while not rospy.is_shutdown():
# we need to brodcast translation,rotation and time
quaternion=tf.transformations.quaternion_from_euler(x,y,z)
translation=(1.0,2.0,3.0)
Time=rospy.Time.now()
bc.sendTransform(translation,quaternion,Time,"robot_camera","robot_wheels")
if __name__=="__main__":
brodcaster(0.8,0.1,0.3,0.6) |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.test import TestCase
from django.contrib.auth.models import User
from .models import Brand, Person, Location, Data
class DataTestCase(TestCase):
def create_brand(self):
return Brand(name='Sprite', company='Coca Cola', description='Green bottle')
def create_location(self):
return Location(name='Mathare', latitude='34.393939', longitude='54.838383', description='highly propulated')
def create_person(self):
return Person(first_name='John', last_name='Doe', age=30, gender='male', description="tall man")
def test_brand_creation(self):
brand = self.create_brand()
self.assertTrue(isinstance(brand, Brand))
self.assertTrue(brand.__unicode__(), "{}".format(brand.name))
def test_location_creation(self):
location = self.create_location()
self.assertTrue(isinstance(location, Location))
self.assertTrue(location.__unicode__(), "{}".format(location.name))
def test_person_creation(self):
person = self.create_person()
self.assertTrue(isinstance(person, Person))
self.assertTrue(person.__unicode__(), "{} {}".format(person.first_name, person.last_name))
# todo add the request and response test case |
# To change this license header, choose License Headers in Project Properties.
# To change this template file, choose Tools | Templates
# and open the template in the editor.
__author__="Brandon"
__date__ ="$Sep 30, 2014 1:11:17 PM$"
import replaceQueries
if __name__ == "__main__":
replaceQueries.main()
|
import pexpect
import pytest
@pytest.mark.parametrize("src", [
"aliyun",
"douban",
"edu",
])
def test_pipsrc(src):
child = pexpect.spawn("bash")
child.sendline(f"pipsrc {src}")
child.expect(f".*{src}*.*")
child.sendline("pip install pip")
child.expect(f".*indexes.*{src}*.*")
|
from django.forms import ModelForm
from .models import Developer, User, Project
class ProjectForm(ModelForm):
class Meta:
model = Project
fields = ['project_name', 'project_overview', 'languages']
class DeveloperForm(ModelForm):
class Meta:
model = Developer
fields = ('name', 'description')
class UserForm(ModelForm):
class Meta:
model = User
fields = ('username',)
class UserDeleteForm(ModelForm):
class Meta:
model = User
fields = []
|
from django.shortcuts import render, redirect, get_object_or_404
from django.urls import reverse
from order.forms import OrderForm
import json
from afriventapp.models import Event, EventTicket, UserProfile
from order.models import Order, OrderItem
from django.http import JsonResponse
from python_paystack.objects.transactions import Transaction
from python_paystack.managers import TransactionsManager
from python_paystack.paystack_config import PaystackConfig
from order.confirm_paystack_payment import confirmPaystackPayment, CustomPaystack
from order.tasks import reconcileOrder
from django.views.decorators.cache import never_cache
from django.views.decorators.csrf import csrf_exempt
from django.contrib.auth.decorators import login_required
from celery import app
from celery import current_app
from celery.task.control import revoke
from celery.result import AsyncResult
from order.generate_barcode import barcodeGenerator
import base64
from django.core.files.base import ContentFile
from django.core.mail import EmailMessage
from django.core.mail import EmailMultiAlternatives
from django.template.loader import render_to_string
from django.utils.html import strip_tags
from django.template.loader import render_to_string
from weasyprint import HTML, CSS
import tempfile
from django.conf import settings
@never_cache
@login_required
def orderConfimation(request):
print(request.method)
order_id = request.session['order_id']
order = get_object_or_404(Order, pk=order_id)
order_items = OrderItem.objects.filter(order=order)
event = get_object_or_404(Event, id=order.event.id)
user = get_object_or_404(UserProfile, pk=order.event.creator.id)
if request.session['order_exist']:
result = reconcileOrder.apply_async((order_id,), countdown=600)
request.session['task_id'] = result.id
OrderItems = OrderItem.objects.filter(order=order)
request.session['order_exist'] = False
else:
mail_subject = "Failed Order for {}".format(order.event.event_name)
html_content = render_to_string('order/failed_order_email.html', {
'order': order,
'order_items': order_items,
})
text_content = strip_tags(html_content)
msg = EmailMessage(mail_subject, html_content, to=[user.user.email])
msg.content_subtype = "html"
msg.send()
return render(request, 'order/failedOrder.html', context={
'order': order
})
context = {
'order': order,
'event': event,
}
return render(request, 'order/celeryTimerConfirm.html', context)
@login_required
def orderProcess(request):
if request.method == 'POST':
queryDict = request.POST
print(queryDict)
dataDict = queryDict.dict()
eventID = dataDict['event']
tickets = json.loads(dataDict['ticket'])
price = json.loads(dataDict['price'])
total = dataDict['total']
event = get_object_or_404(Event, pk=eventID)
order = Order.objects.create(
user=request.user,
event=get_object_or_404(Event, pk=eventID),
total_cost=total
)
for ticket in tickets.keys():
ticketType = get_object_or_404(EventTicket, pk=ticket)
quantity = tickets[ticket] # Get the quantity the user wants
OrderItem.objects.create(
user=request.user,
order=order,
ticket=ticketType,
quantity=quantity
)
# ticketType.quantity -= quantity
ticketType.save()
request.session['order_id'] = order.id
request.session['order_exist'] = True
# return redirect('order:orderConfirmation')
# result = reconcileOrder.apply_async((order.id,), countdown=120)
# request.session['task_id'] = result.id
else:
print('hiiiiiiiiiiiiiiiiii')
# return redirect(reverse('order:process'))
# transaction = Transaction(2000, 'email@test.com')
# transaction_manager = TransactionsManager()
# transaction = transaction_manager.initialize_transaction('STANDARD', transaction)
context = {'order': order}
return render(request, 'order/celeryTimerConfirm.html', context)
@never_cache
@login_required
def processPayment(request):
order_id = request.session['order_id']
task_id = request.session['task_id']
request.session['email_generate'] = True
current_app.control.revoke(task_id)
order = get_object_or_404(Order, pk=order_id)
PaystackConfig.SECRET_KEY = settings.PAYSTACK_SECRET_KEY
PaystackConfig.PUBLIC_KEY = settings.PAYSTACK_PUBLIC_KEY
orderAmountKobo = order.total_cost * 100
transaction = Transaction(orderAmountKobo, order.user.email)
transaction_manager = CustomPaystack()
transaction = transaction_manager.initialize_transaction(
'STANDARD', transaction)
# request.session['transaction'] = transaction
order.authorization_url = transaction.__dict__['authorization_url']
order.ref_code = transaction.__dict__['reference']
order.access_code = transaction.__dict__['access_code']
order.payment_confirmation = False
order.save()
# request.session['order_exist'] = True
# send email here
return redirect(transaction.__dict__['authorization_url'])
@login_required
def confirmPayment(request):
reference_code = request.GET.dict()['reference']
order = get_object_or_404(Order, ref_code=reference_code)
order_items = OrderItem.objects.filter(order=order)
if confirmPaystackPayment(reference_code) == 'success':
order.payment_confirmation = True
user = get_object_or_404(UserProfile, pk=order.event.creator.id)
barcode = barcodeGenerator(
order.id, order.event, reference_code, order.order_unique_id, user)
user.balance += order.total_cost
user.save()
order.save()
html_string = render_to_string('order/ticket.html', {
'order': order,
'order_items': order_items,
'barcode': barcode})
html = HTML(string=html_string)
css = CSS(settings.STATIC_ + '/ticket.css')
result = html.write_pdf(stylesheets=[
css, "https://stackpath.bootstrapcdn.com/bootstrap/4.3.1/css/bootstrap.min.css"])
if request.session['email_generate']:
mail_subject = "Your Tickets for {}".format(order.event.event_name)
html_content = render_to_string('order/order-success-email.html', {
'order': order,
'order_items': order_items,
'barcode': barcode})
# email = EmailMessage(mail_subject, html_content, to=[user.user.email])
text_content = strip_tags(html_content)
msg = EmailMultiAlternatives(
mail_subject, text_content, to=[user.user.email])
with tempfile.NamedTemporaryFile(prefix='{0} Tickets for {1} event'.format(user.user, order.event.event_name), suffix='.pdf', delete=True) as output:
output.write(result)
output.flush()
msg.attach_file(output.name)
msg.send()
# email.send()
print('EMAIL SENT')
request.session['email_generate'] = False
# ticketType.quantity -= quantity
for order_item in order_items:
ticketType = get_object_or_404(
EventTicket, pk=order_item.ticket.pk)
ticketType.quantity -= order_item.quantity
ticketType.save()
else:
order.payment_confirmation = False
mail_subject = "Failed Order for {}".format(order.event.event_name)
html_content = render_to_string('order/failed_order_email.html', {
'order': order,
'order_items': order_items,
})
text_content = strip_tags(html_content)
msg = EmailMultiAlternatives(
mail_subject, text_content, to=[user.user.email])
msg.send()
order.save()
return render(request, 'order/failedPayment.html', context={
'order': order
})
context = {
'order': order,
'order_items': order_items,
'barcode': barcode
}
return render(request, 'order/success.html', context)
|
import requests
import time
import json
from hatebase_credentials import api_key
auth_path = "https://api.hatebase.org/4-4/authenticate"
query_path = "https://api.hatebase.org/4-4/get_vocabulary"
#establish api connection and get token
r_auth = requests.post(auth_path, data = {'api_key': api_key})
assert r_auth.status_code == 200, r_auth.json()
token = r_auth.json()['result']['token']
#initialize object to write to
eng_vocab = []
#first query + get num pages
r_query = requests.post(query_path, data = {'token': token, 'page': '1', 'language':'ENG'})
assert r_query.status_code == 200, json.dumps(r_auth.json(), indent=4)
res_object = r_query.json()
num_pages = res_object['number_of_pages']
eng_vocab.extend(res_object['result'])
#go through pages and get vocab
for page in range(2, num_pages+1):
print("Page %i of %i" % (page, num_pages))
r_query = requests.post(query_path, data = {'token': token, 'page': str(page), 'language':'ENG'})
assert r_query.status_code == 200, json.dumps(r_auth.json(), indent=4)
res_object = r_query.json()
eng_vocab.extend(res_object['result'])
time.sleep(1)
with open('eng_vocab.json', 'w') as outfile:
json.dump(eng_vocab, outfile, indent=4)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.