text stringlengths 8 6.05M |
|---|
# Generated by Django 2.0 on 2017-12-21 14:57
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('main', '0030_auto_20171220_1707'),
]
operations = [
migrations.RemoveField(
model_name='userprofile',
name='image',
),
migrations.AddField(
model_name='userprofile',
name='prof_post',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='profile_picture', to='main.Post'),
),
migrations.AlterField(
model_name='userprofile',
name='liked_posts',
field=models.ManyToManyField(related_name='liked_posts', to='main.Post'),
),
]
|
#!/usr/bin/env python3
import argparse
import cmd
import os
import re
import shutil
import sys
DIRECTORY_REGEX = re.compile(r'^(.+) v(\d{3}) c(\d{3}[a-z]?)$')
ARCHIVE_REGEX = re.compile(r'^(.+) v(\d{3}) c(\d{3}[a-z]?)\.(cbz)$')
PAGE_REGEX = re.compile(r'^(.+) v(\d{3}) c(\d{3}[a-z]?) p(\d{3}(?:-\d{3})?[a-z]?)\.(jpg|png|webp)$')
NUMBER_REGEX = re.compile(r'((\d+)(?:-(\d+))?([a-z])?)')
CHECKMARK = '✓'
ACTION_RENAME = 'R'
ACTION_IGNORE = 'I'
ACTION_DELETE = 'D'
TYPE_NONE = 'None'
TYPE_SERIES = 'Series'
TYPE_CHAPTER = 'Chapter'
class RenameShell(cmd.Cmd):
def __init__(self, basePath):
super().__init__()
self.basePath = os.path.abspath(basePath)
self.baseName = os.path.basename(self.basePath)
self.dirType = TYPE_NONE
self.renames = []
self.actions = []
self.intro = "Editing directory '%s'. Type 'help' or '?' for help." % (self.basePath)
self.prompt = "%s > " % (self.basePath)
self._reload()
def do_bulk(self, arg):
pattern = arg.strip()
if (pattern == ''):
pattern = None
self._reload(numberRegex = pattern)
def do_cd(self, arg):
arg = arg.strip()
path = None
if (re.match(r'^\d+$', arg)):
index, _ = self._parseIndex(arg)
if (index == None):
return
path = os.path.join(self.basePath, self.renames[index][0])
else:
path = arg
if (not os.path.isabs(path)):
path = os.path.join(self.basePath, path)
if (not os.path.exists(path)):
print('ERROR: Directory does not exist: %s' % (path))
return
if (not os.path.isdir(path)):
print('ERROR: Path is not a directory: %s' % (path))
return
self.basePath = os.path.abspath(path)
self.baseName = os.path.basename(self.basePath)
self.dirType = TYPE_NONE
self.prompt = "%s > " % (self.basePath)
self._reload()
def do_edit(self, arg):
index, arg = self._parseIndex(arg)
if (index is None):
return
if (arg is None):
print('ERROR: No new name specified.')
return
print("Editing rename index %d: '%s' -> '%s'." % (index, self.renames[index][1], arg))
self.actions[index] = ACTION_RENAME
self.renames[index][1] = arg
def do_ignore(self, arg):
index, arg = self._parseIndex(arg)
if (index is None):
return
print('Ignoring index %d.' % (index))
self.actions[index] = ACTION_IGNORE
def do_help(self, arg):
print('? / help - Display this prompt.')
print('bulk - Take a given regex and register new renames for all entries.')
print(' The pattern must capture the context number in the first capture group.')
print(' re.search is used.')
print(' On a failed match, no change will be made to the entry.')
print(' If no pattern is supplied, the natural ordering of entries will be used for numbers.')
print(' Even entries marked for ignore or deletion will get their rename changed, but their action will remain.')
print('cd - Chance the current working directory.')
print('rm - Delete a single entry from disk.')
print('edit - Edit a single rename entry.')
print('ignore - Ignore a single entry (remove it from the list of renames).')
print('ls - List the entries in the current directory.')
print('quit - Quit out of interactive mode without writing/saving the results.')
print('reload - Reload this directory from disk.')
print('type - Set the type for this directory:')
print(' (n)one, (s)eries, or (c)hapter.')
print('write - Write all the renames to disk and quit interactive mode.')
def do_ls(self, arg):
print("%s (Type: %s)" % (self.basePath, self.dirType))
for i in range(len(self.renames)):
action = self.actions[i]
original, newName = self.renames[i]
if (action == ACTION_RENAME):
mark = action
if (original == newName):
mark = CHECKMARK
print(" %03d (%s) '%s' -> '%s'" % (i, mark, original, newName))
else:
print(" %03d (%s) '%s'" % (i, action, original))
def do_quit(self, arg):
print('Quitting without writting renames.')
return True
def do_reload(self, arg):
self._reload()
print('Directory reloaded.')
def do_rm(self, arg):
index, arg = self._parseIndex(arg)
if (index is None):
return
path = os.path.join(self.basePath, self.renames[index][0])
print("Index %d (%s) marked for delete." % (index, path))
self.actions[index] = ACTION_DELETE
def do_type(self, arg):
newType = None
arg = arg.strip().lower()
if (arg == '' or arg[0] == TYPE_NONE[0].lower()):
newType = TYPE_NONE
elif (arg[0] == TYPE_SERIES[0].lower()):
newType = TYPE_SERIES
elif (arg[0] == TYPE_CHAPTER[0].lower()):
newType = TYPE_CHAPTER
else:
print("ERROR: Unknown type '%s'." % (arg))
return
print("Setting directory type to %s." % (newType))
self.dirType = newType
def do_write(self, arg):
self._commit()
self._reload()
print('Renames committed to disk.')
def do_EOF(self, arg):
return self.do_quit(None)
def precmd(self, line):
return line.strip()
# Write actions to disk.
def _commit(self):
for i in range(len(self.renames)):
action = self.actions[i]
original, newName = self.renames[i]
originalPath = os.path.join(self.basePath, original)
newPath = os.path.join(self.basePath, newName)
if (action == ACTION_RENAME and original != newName):
shutil.move(originalPath, newPath)
elif (action == ACTION_DELETE):
self._remove(originalPath)
# The regex should pull out the number in the first capture group.
def _createRename(self, original, backupNumber, numberRegex):
ext = os.path.splitext(original)[-1]
number, highestNumber = self._parseAndPad(backupNumber)
if (numberRegex is not None):
matches = re.findall(numberRegex, original)
if (matches is not None and len(matches) > 0):
number, highestNumber = self._parseAndPad(matches[-1][0])
if (number is None):
return original, backupNumber
if (self.dirType == TYPE_NONE):
return original, highestNumber
elif (self.dirType == TYPE_SERIES):
return "%s c%s%s" % (self.baseName, number, ext), highestNumber
elif (self.dirType == TYPE_CHAPTER):
return "%s p%s%s" % (self.baseName, number, ext), highestNumber
else:
raise ValueError("Unknown directory type: %s." % (self.dirType))
# Take in some text that is supposed to be a page number and return a padded version.
# Note that "page numbers" are not just digits, they can have a dash and letters.
def _parseAndPad(self, text):
text = str(text).strip()
match = NUMBER_REGEX.match(text)
if (match is None):
return None, None
number = int(match.group(2))
text = "%03d" % (number)
highestNumber = number
if (match.group(3) is not None):
number = int(match.group(3))
text += "-%03d" % (number)
if (number > highestNumber):
highestNumber = number
if (match.group(4) is not None):
text += match.group(4)
return text, highestNumber
def _parseIndex(self, arg):
arg = str(arg).strip()
match = re.match(r'^(-?\d+)\s*.*$', arg)
if (match is None):
print('ERROR: Expecting index.')
return (None, arg)
index = int(match.group(1))
arg = arg.removeprefix(match.group(1)).strip()
if (index < 0 or index >= len(self.renames)):
print("ERROR: Index (%d) out of range [0, %d)." % (index, len(self.renames)))
return (None, arg)
if (self.renames[index] is None):
print('ERROR: Index %d is ignored.' % (index))
return (None, arg)
return (index, arg)
def _reload(self, numberRegex = NUMBER_REGEX):
self.renames = []
nextNumber = 1
for dirent in sorted(os.listdir(self.basePath)):
rename, highestNumber = self._createRename(dirent, nextNumber, numberRegex)
nextNumber = max(nextNumber, highestNumber) + 1
self.renames.append([dirent, rename])
self.actions = [ACTION_RENAME] * len(self.renames)
def _remove(self, path):
if (os.path.isfile(path) or os.path.islink(path)):
os.remove(path)
elif os.path.isdir(path):
shutil.rmtree(path)
else:
raise ValueError("Path %s is not a file, link, or dir." % (path))
def main(args):
RenameShell(args.path[0]).cmdloop()
def _load_args(args):
parser = argparse.ArgumentParser(description = 'Interactively edit managa directories.')
parser.add_argument('path',
default = '.', metavar = 'PATH', type = str, nargs = '?',
help = 'a directory')
return parser.parse_args()
if (__name__ == '__main__'):
sys.exit(main(_load_args(sys.argv)))
|
"""
The Sonar API is defined here to interact with the sonar database and perform
the CLI tasks.
"""
import logging
import os
import pprint
import shutil
import sys
import textwrap
from distutils.dir_util import copy_tree
from pathlib import Path
import toml
import sonar.database
import sonar.include
from sonar.core.include import Constants, configure_logging
from sonar.exceptions import ReturnValue, SonarException
from sonar.make import MakeFile
logger = logging.getLogger(__name__)
def activate(args):
"""
Activate a particular environment by name.
Args:
args (object): Holds attributes
name (str): Name of the enviroment to activate
"""
if args.name is not None:
Env.activate(args)
else:
logger.error("Activating environment failed: no name specified")
class Env:
"""
The Env class defines functions to interact with sonar's environments.
Environments define a particular set of tools, board and repository that
users can switch between.
"""
@staticmethod
def activate(args):
"""
Activate a particular environment by name.
Args:
args (object): Holds attributes
name (str): Name of the enviroment to activate
"""
try:
sonar.database.Env.activate(args.name)
except SonarException as exc:
logger.error("Activating environment failed: %s", exc.exit_str)
sys.exit(exc.exit_code)
@staticmethod
def add(args):
"""
Add a new environment to the database.
Args:
args (object): Holds attributes
cad (str): CAD tool of the form "name:version"
sim (str): Simulation tool of the form "name:version"
hls (str): HLS tool of the form "name:version"
name (str): Name of the environment
board (str): Name of the board
repo (str): Name of the repository
"""
cad_tool = args.cad.split(":")
sim_tool = args.sim.split(":")
hls_tool = args.hls.split(":")
sonar.database.Env.add(
args.name, cad_tool, hls_tool, sim_tool, args.board, args.repo
)
@staticmethod
def remove(args):
"""
Remove an environment from the database
Args:
args (object): Holds attributes
name (str): Name of the environment to remove
"""
sonar.database.Env.remove(args.name)
@staticmethod
def f_list(_args):
"""
List the environments in the database
Args:
_args (object): Unused
"""
env = sonar.database.Env.get()
pprint.pprint(env)
@staticmethod
def clear(_args):
"""
Clear all environments in the database
Args:
_args (object): Unused
"""
sonar.database.Env.clear()
class Tool:
"""
The Tool class defines functions to interact with sonar's CAD, HLS and
simulation tools.
"""
@staticmethod
def add(args):
"""
Add a new tool to the database.
Args:
args (object): Holds attributes
name (str): Name of tool
version (str): Version of tool
cad (str): Name of CAD executable
hls (str): Name of HLS executable
sim (str): Name of simulation executable
script (str): Shell script to initialize tool
"""
try:
sonar.database.Tool.add(
args.name,
args.version,
args.cad,
args.hls,
args.sim,
args.script,
)
except SonarException as exc:
logger.exception(
"Adding a tool to the database failed: %s", exc.exit_str
)
sys.exit(exc.exit_code)
# def remove(args):
# try:
# sonar.database.Tool.remove(args)
# except SonarException as exc:
# logger.error(f"Removing a tool from the database failed: {exc.exit_str}")
# sys.exit(exc.exit_code)
# def edit(args):
# try:
# sonar.database.Tool.edit(args)
# except SonarException as exc:
# logger.error(f"Editing a tool in the database failed: {exc.exit_str}")
# sys.exit(exc.exit_code)
@staticmethod
def f_list(_args):
"""
List the tools in the sonar database
Args:
_args (object): Unused
"""
tools = sonar.database.Tool.get()
pprint.pprint(tools)
@staticmethod
def clear(_args):
"""
Clear the database of tools
Args:
_args (object): Unused
"""
sonar.database.Tool.clear()
class Board:
"""
The Board class defines functions to interact with sonar's list of hardware
boards.
"""
@staticmethod
def add(args):
"""
Add a new board to sonar database
Args:
args (object): Holds attributes
path (str): Path to the board definition
"""
sonar.database.Board.add(args.path)
@staticmethod
def remove(args):
"""
Remove a board from the sonar database
Args:
args (object): Holds attributes
name (str): Name of board to remove
"""
sonar.database.Board.remove(args.name)
@staticmethod
def show(args):
"""
Print a particular board by name
Args:
args (object): Holds attributes
name (str): Name of board to show
"""
board = sonar.database.Board.get(args.name)
pprint.pprint(board)
@staticmethod
def clear(_args):
"""
Remove all boards from the sonar database
Args:
_args (object): Unused
"""
sonar.database.Board.clear()
@staticmethod
def activate(args):
"""
Activate a board by name
Args:
args (object): Holds attributes
name (str): Name of board to activate
"""
sonar.database.Board.activate(args.name)
@staticmethod
def deactivate(_args):
"""
Deactivate the active board
Args:
_args (object): Unused
"""
sonar.database.Board.deactivate()
@staticmethod
def f_list(_args):
"""
List the active board and all boards in the sonar database
Args:
_args (object): Holds attributes
"""
active_board = sonar.database.Board.get_active()
print(f"Active board: {active_board}")
print("Available boards:")
boards = sonar.database.Board.get()
for board in boards:
print(" " + board)
class Init:
"""
The Init class defines functions to initialize sonar in various ways.
"""
@staticmethod
def one_time_setup(_args):
"""
Performs initial setup for sonar. It creates the database, copies files
over to the user sonar directory and adds the default boards to the
database.
Args:
_args (object): Unused
"""
sonar.database.init()
src_dir = os.path.join(os.path.dirname(__file__), "files_to_copy/home")
# dst_dir = os.path.join(Constants.SONAR_PATH)
# need to use this copy tree instead of shutil because shutil copytree
# expects the dst directory doesn't exist
copy_tree(src_dir, str(Constants.SONAR_PATH))
with open(Path.home().joinpath(".bashrc"), "r+") as f:
for line in f:
if "# added by sonar" in line:
break
else: # not found, we are at the eof
f.write(
f"source {Constants.SONAR_SHELL_MAIN_SOURCE} # added by sonar"
)
files = os.listdir(os.path.join(os.path.dirname(__file__), "boards"))
boards = [x for x in files if x not in ("__init__.py", "__pycache__")]
for board in boards:
path = os.path.join(os.path.dirname(__file__), "boards", board)
sonar.database.Board.add(path)
configure_logging()
@staticmethod
def vivado(args):
"""
Helper function to initialize Xilinx tools
Args:
args (object): Holds attributes
path (str): Path to the Xilinx directory containing Vivado
"""
xilinx_path = os.path.abspath(args.path)
if not os.path.exists(xilinx_path):
logger.error("Path does not exist: %s", xilinx_path)
sys.exit(ReturnValue.SONAR_NONEXISTENT_PATH)
vivado_path = os.path.join(xilinx_path, "Vivado")
try:
vivado_versions = os.listdir(vivado_path)
except FileNotFoundError:
logger.error("No 'Vivado/' directory found in %s", xilinx_path)
sys.exit(ReturnValue.SONAR_INVALID_PATH)
vivado_script = Constants.SONAR_SHELL_PATH.joinpath("setup_vivado.sh")
for version in vivado_versions:
if float(version) <= 2017.2:
include_dir = os.path.join(
xilinx_path, f"Vivado_HLS/{version}/include"
)
else:
include_dir = os.path.join(
xilinx_path, f"Vivado/{version}/include"
)
args = sonar.include.DotDict(
{
"type": "vivado",
# "ID": f"vivado_{version}",
"cad": "vivado",
"sim": "vivado",
"hls": "vivado_hls",
"version": version,
"hls_include": include_dir,
"script": textwrap.dedent(
f"""\
source {vivado_script} {xilinx_path} {version}
export SONAR_CAD_TOOL=vivado
export SONAR_CAD_VERSION={version}
export SONAR_HLS_TOOL=vivado_hls
export SONAR_HLS_VERSION={version}
export SONAR_HLS_INCLUDE={include_dir}
"""
),
}
)
Tool.add(args)
args = sonar.include.DotDict(
{
"sim": f"vivado:{version}",
"hls": f"vivado:{version}",
"cad": f"vivado:{version}",
"repo": None,
"board": None,
"name": f"vivado_{version}",
}
)
Env.add(args)
class Repo:
"""
The Repo class defines functions to interact with sonar's repositories.
"""
@staticmethod
def add(_args):
"""
Add a new repository to the database
Args:
_args (object): Unused
"""
sonar.database.Repo.add()
@staticmethod
def f_list(_args):
"""
List all the repositories in the database
Args:
_args (object): Unused
"""
repo = sonar.database.Repo.get()
pprint.pprint(repo)
@staticmethod
def clear(_args):
"""
Clear the list of repositories in the database
Args:
_args (object): Unused
"""
# TODO accept an argument to remove a particular one
sonar.database.Repo.clear()
@staticmethod
def activate(args):
"""
Activate a particular repository
Args:
args (object): Should have "name" attribute
"""
sonar.database.Repo.activate(args.name)
class Create:
"""
The Create class defines functions to create new IPs and repos in sonar.
"""
@staticmethod
def ip(args):
"""
Create a new sonar IP in the current working directory.
Args:
args (object): Should have "name" attribute
"""
curr_dir = Path(os.getcwd())
ip_dir = curr_dir.joinpath(args.name)
ip_dir.mkdir()
ip_dir.joinpath("build/bin").mkdir(parents=True)
ip_dir.joinpath("include").mkdir()
ip_dir.joinpath("src").mkdir()
ip_dir.joinpath("testbench/build/bin").mkdir(parents=True)
ip_dir.joinpath("cad").mkdir()
ip_dir.joinpath("hls").mkdir()
with open(ip_dir.joinpath(Constants.SONAR_IP_MAKEFILE), "w") as f:
ip_makefile = MakeFile()
# ip_makefile.add_ip_variables(str(ip_dir))
f.write(str(ip_makefile.ip(str(ip_dir))))
with open(ip_dir.joinpath("Makefile"), "w") as f:
f.write("include sonar.mk")
src_dir = os.path.join(
os.path.dirname(__file__), "files_to_copy/repo/ip"
)
shutil.copy(
os.path.join(src_dir, "generate_cad.tcl"), ip_dir.joinpath("cad")
)
shutil.copy(
os.path.join(src_dir, "generate_cad.sh"), ip_dir.joinpath("cad")
)
shutil.copy(
os.path.join(src_dir, "generate_hls.tcl"), ip_dir.joinpath("hls")
)
shutil.copy(
os.path.join(src_dir, "generate_hls.sh"), ip_dir.joinpath("hls")
)
shutil.copy(os.path.join(src_dir, "run.sh"), ip_dir)
base_ip_path_sh = str(ip_dir).replace(
os.getenv("SONAR_REPO"), "$SONAR_REPO"
)
base_ip_path_tcl = str(ip_dir).replace(
os.getenv("SONAR_REPO"), "${::env(SONAR_REPO)}"
)
sonar.include.replace_in_file(
str(ip_dir.joinpath("cad").joinpath("generate_cad.tcl")),
"BASE_PATH",
base_ip_path_tcl,
)
sonar.include.replace_in_file(
str(ip_dir.joinpath("cad").joinpath("generate_cad.sh")),
"BASE_PATH",
base_ip_path_sh,
)
sonar.include.replace_in_file(
str(ip_dir.joinpath("hls").joinpath("generate_hls.tcl")),
"BASE_PATH",
base_ip_path_tcl,
)
sonar.include.replace_in_file(
str(ip_dir.joinpath("hls").joinpath("generate_hls.sh")),
"BASE_PATH",
base_ip_path_sh,
)
sonar.include.replace_in_file(
str(ip_dir.joinpath("run.sh")),
"BASE_PATH",
base_ip_path_sh,
)
# active_repo = sonar.database.Repo.get_active()
sonar.database.IP.add_new(args.name, ip_dir)
# repos = sonar.database.Repo.get()
# path = repos[active_repo]["path"]
# init_toml = os.path.join(path, Constants.SONAR_CONFIG_FILE)
# init = toml.load(init_toml)
# init["project"]["ips"] = [args.name]
# print(init)
# with open(init_toml, "w") as f:
# toml.dump(init, f)
@staticmethod
def repo(args):
"""
Create a new sonar repo in the current working directory.
Args:
args (object): Should have "name" attribute
"""
curr_dir = Path(os.getcwd())
repo_dir = curr_dir.joinpath(args.name)
repo_dir.mkdir()
src_dir = os.path.join(
os.path.dirname(__file__), "files_to_copy/repo/.sonar"
)
sonar_dir = repo_dir.joinpath(".sonar")
shutil.copytree(
src_dir, sonar_dir, ignore=shutil.ignore_patterns("__pycache__*")
)
src_file = os.path.join(
os.path.dirname(__file__), "files_to_copy/repo/.gitignore"
)
shutil.copy(src_file, repo_dir)
init_toml = repo_dir.joinpath(Constants.SONAR_CONFIG_FILE_PATH)
init = toml.load(init_toml)
init["project"]["name"] = args.name
with open(init_toml, "w") as f:
toml.dump(init, f)
os.chdir(repo_dir)
args = sonar.include.DotDict({"name": args.name})
Repo.add(args)
os.chdir(curr_dir)
# class IP:
# class Add:
# def src(args):
# current_path = os.getcwd()
# # active_repo = sonar.database.Repo.get_active()
# # repo = sonar.database.Repo.get(active_repo)
# sonar.database.IP.add_src(args.name, current_path, args.type)
class Database:
"""
The Database class defines functions to interact with sonar's sonar database.
"""
@staticmethod
def f_list(_args=None):
"""
Print the database to stdout.
Args:
_args (object): unused
"""
sonar.database.print_db()
def check_database():
"""
Checks to see if the sonar database exists. If it does not, performs the
setup to initialize the database.
"""
if not sonar.database.check_database():
Init.one_time_setup(None)
|
from django.urls import path
from . import views
# specific paths with view names to call functions to perform actions on the views.py
urlpatterns = [
path("", views.index,name="index"),
path('music/', views.music, name="index"),
path('John_Legend', views.John_Legend, name="index"),
path('Brain_Mcknight', views.Brain_Mcknight, name="index"),
path('Lil_Wayne', views.Lil_Wayne, name="index"),
]
|
#
# trajectory_latent_tools.py
#
# Tools for training NNs to create
# latents of trajectories and then summarize
# them to describe policies.
# Inspired by "Robust Imitation of Diverse Behaviors":
# [1] https://arxiv.org/abs/1707.02747
import random
import numpy as np
import torch as th
# Structure of the trajectory data:
# np.ndarray of (N, D), where
# N = number of states collected and
# D = dimensionality of single observation
#
LATENT_SIZE = 256
EPOCHS = 5
# Note that each element is a single trajectory,
# so we have quite a bit of samples to go over per update.
BATCH_SIZE = 8
EPS = 1e-7
class TrajectoryEncoder(th.nn.Module):
"""
A VAE model similar to [1], using a bi-directional
LSTM to encode trajectory into a latent and autoregressive
decoder to construct the same trajectory.
Differences:
- No WaveNet-like decoder, only use simple
single-step decoder (MLP).
- No actions handled, only states.
- Decode to Gaussians and minimize llk.
"""
def __init__(self, state_dim):
super().__init__()
self.state_dim = state_dim
# Only using Normal distribution here so prior for latents is known
self.latent_prior = th.distributions.normal.Normal(
th.zeros(LATENT_SIZE),
th.ones(LATENT_SIZE),
)
self.encoder_lstm = th.nn.LSTM(
self.state_dim,
LATENT_SIZE,
bidirectional=True
)
self.encoder_mu = th.nn.Linear(LATENT_SIZE, LATENT_SIZE)
self.encoder_std = th.nn.Linear(LATENT_SIZE, LATENT_SIZE)
# Decoder maps latents + previous states ->
# means + diagonal covariances
self.decoder = th.nn.Linear(
LATENT_SIZE + self.state_dim,
self.state_dim * 2
)
def encode_lstm(self, trajectory):
"""
Encode a trajectory (N, D) into an embedding as in [1]:
1. Run trajectory through LSTM, get LSTM outputs
2. Average LSTM outputs over time, produce mu, sigma
3. Sample VAE latent from Normal distribution and return
Returns (D,) Torch tensor, representing the latents
of compressing the trajectory.
"""
encodings, _ = self.encoder_lstm(
# Add batch dimension
th.as_tensor(trajectory).float()[:, None, :]
)
# Get the "backward" output of the bidirectional LSTM.
# See https://pytorch.org/docs/stable/generated/torch.nn.LSTM.html#lstm
lstm_output = encodings.view(encodings.shape[0], 1, 2, LATENT_SIZE)[:, 0, 1, :]
latent = th.mean(lstm_output, dim=0)
return latent
def encode(self, trajectories):
"""
Encode and sample trajectories into VAE latents (the ones sampled
from construced distribution).
Input is _List_ of trajectories, each a numpy array of (N, D).
Returns (#Trajectories, LATENT_SIZE) Torch tensor.
"""
lstm_latents = th.zeros((len(trajectories), LATENT_SIZE))
for trajectory_i, trajectory in enumerate(trajectories):
lstm_latents[trajectory_i] = self.encode_lstm(trajectory)
means = self.encoder_mu(lstm_latents)
# Make sure these are positive
stds = th.nn.functional.softplus(self.encoder_std(lstm_latents))
# Sampling from a diagonal multivariate normal
distributions = th.distributions.normal.Normal(means, stds)
sampled_latents = distributions.rsample()
return sampled_latents, distributions
def decode_single(self, previous_states, sampled_latent):
"""
Decode latents using autoregressive setup where
inputs are previous state and latent, and outputs
(mu, std) for Gaussians for each input.
Inputs (N, D) Torch tensor previous_states and latents (LATENT_SIZE,),
outputs ((N, D), (N, D)) Torch tensors to represent mean/std of
outputs.
"""
# Horribly inefficient way of doing things, but oh well
decoder_inputs = th.cat(
(
previous_states,
sampled_latent[None].repeat(previous_states.shape[0], 1)
),
dim=1
)
# Heee fun indexing. Get rid of hidden states and then of batch dimension
# decoder_outputs = self.decoder(decoder_inputs[:, None, :])[0][:, 0, :]
decoder_outputs = self.decoder(decoder_inputs)
mus = decoder_outputs[:, self.state_dim:]
stds = th.nn.functional.softplus(decoder_outputs[:, :self.state_dim])
return (mus, stds)
def vae_reconstruct_loss(self, trajectories):
"""
Take in bunch of trajectories and return a VAE reconstruction
loss scalar for these inputs.
Follow [1] and train decoder to predict next state given
previous ones.
"""
# Encode first
sampled_latents, sample_distributions = self.encode(trajectories)
final_loss = 0.0
for i in range(len(trajectories)):
sampled_latent = sampled_latents[i]
trajectory = th.as_tensor(trajectories[i]).float()
previous_states = trajectory[:-1]
successive_states = trajectory[1:]
successive_mus, successive_stds = self.decode_single(previous_states, sampled_latent)
successive_distributions = th.distributions.normal.Normal(successive_mus, successive_stds)
mean_log_likelihood = successive_distributions.log_prob(successive_states).sum(-1).mean()
prior_kl = th.distributions.kl_divergence(sample_distributions, self.latent_prior).sum(-1).mean()
# Minimize KL, maximize llk.
# Take mean over all the trajectories
final_loss += (prior_kl - mean_log_likelihood) / len(trajectories)
return final_loss
def train_trajectory_encoder(trajectories):
"""
Train a fixed neural-network encoder that maps variable-length
trajectories (of states) into fixed length vectors, trained to reconstruct
said trajectories.
Returns TrajectoryEncoder.
Parameters:
trajectories (List of np.ndarray): A list of trajectories, each of shape
(?, D), where D is dimension of a state.
Returns:
encoder (TrajectoryEncoder).
"""
state_dim = trajectories[0].shape[1]
network = TrajectoryEncoder(state_dim)
optimizer = th.optim.Adam(network.parameters())
num_trajectories = len(trajectories)
num_batches_per_epoch = num_trajectories // BATCH_SIZE
# Copy trajectories as we are about to shuffle them in-place
trajectories = [x for x in trajectories]
for epoch in range(EPOCHS):
random.shuffle(trajectories)
total_loss = 0
for batch_i in range(num_batches_per_epoch):
batch_trajectories = trajectories[batch_i * BATCH_SIZE:(batch_i + 1) * BATCH_SIZE]
loss = network.vae_reconstruct_loss(batch_trajectories)
optimizer.zero_grad()
loss.backward()
optimizer.step()
total_loss += loss.item()
print("Epoch {}, Avrg loss {}".format(epoch, total_loss / num_batches_per_epoch))
return network
def encode_policy_into_gaussian(network, trajectories):
"""
Encode a policy, represented by sampled trajectories, into a single diagonal Gaussian
by embedding trajectories and fitting a Gaussian distribution on the latents.
Returns th.distributions.MultivariateNormal
"""
latents, _ = network.encode(trajectories)
mu = th.mean(latents, dim=0).detach()
std = th.std(latents, dim=0).detach()
distribution = None
# Make sure (doubly so) that we do not store gradient stuff.
with th.no_grad():
distribution = th.distributions.MultivariateNormal(mu, th.diag(std ** 2))
return distribution
if __name__ == '__main__':
# Test on random data
test_dim = 10
num_trajectories = 5
trajectories_length = [np.random.randint(15, 50) for i in range(num_trajectories)]
trajectories = [np.random.random((length, test_dim)) for length in trajectories_length]
vae = TrajectoryEncoder(test_dim)
optim = th.optim.Adam(vae.parameters())
for i in range(100):
loss = vae.vae_reconstruct_loss(trajectories)
optim.zero_grad()
loss.backward()
optim.step()
|
import pandas as pd
import util
from keras.preprocessing import text, sequence
import pickle
import numpy as np
print('loading data...')
df_train = pd.read_csv(util.train_data)
df_test = pd.read_csv(util.test_data)
df_train['comment_text'] = df_train['comment_text'].fillna('UN')
df_test['comment_text'] = df_test['comment_text'].fillna('UN')
print('df_test shape: {0}'.format(df_test.shape))
train_comments = df_train['comment_text'].tolist()
test_comments = df_test['comment_text'].tolist()
corpus = train_comments + test_comments
print('corpus size: {0}'.format(len(corpus)))
corpus_seq = list(map(len, map(text.text_to_word_sequence, corpus)))
print(np.percentile(corpus_seq, [0, 25, 50, 75, 80, 90, 95, 100]))
# exit(2)
tokenizer = text.Tokenizer(num_words=util.num_words)
tokenizer.fit_on_texts(corpus)
print('texts to seqs...')
train_word_seqs = tokenizer.texts_to_sequences(train_comments)
test_word_seqs = tokenizer.texts_to_sequences(test_comments)
print('padding seqs...')
# train_padded_words_seqs = sequence.pad_sequences(train_word_seqs, maxlen=util.maxlen)
# test_padded_words_seqs = sequence.pad_sequences(test_word_seqs, maxlen=util.maxlen)
train_padded_words_seqs = sequence.pad_sequences(train_word_seqs, maxlen=util.maxlen_ver1)
test_padded_words_seqs = sequence.pad_sequences(test_word_seqs, maxlen=util.maxlen_ver1)
print('saving data...')
# pickle.dump(train_padded_words_seqs, open(util.tmp_padded_seq_train, 'wb'))
# pickle.dump(test_padded_words_seqs, open(util.tmp_padded_seq_test, 'wb'))
pickle.dump(train_padded_words_seqs, open(util.tmp_padded_seq_train_ver1, 'wb'))
pickle.dump(test_padded_words_seqs, open(util.tmp_padded_seq_test_ver1, 'wb'))
|
from tod import app
app.run(debug=True)
|
import os
import shlex, subprocess
import sys
def execute():
wd=os.getcwd()
os.environ['SIMUL']="C:/Simul/master/Simul"
#os.environ['ue.bEnableFastIteration']='1'
#os.environ['ue.bUseUnityBuild']='false'
os.environ['QTDIR']=os.environ['DROPBOX']+'/Qt/qt5_msvc2012_64_opengl'
os.environ['SIMUL_BUILD']='1'
os.environ['VSDIR']=os.environ['ProgramFiles(x86)']+'/Microsoft Visual Studio 11.0';
args=[os.environ['VSDIR']+'/Common7/IDE/devenv.exe','./UE4.sln']
#Add to the PATH so we can debug without having the DLL's in the same directory as the exe:
os.environ['PATH']=os.environ['QTDIR']+'/bin/plugins;'+os.environ['PATH']
os.environ['PATH']=os.environ['QTDIR']+'/bin;'+os.environ['PATH']
pid=subprocess.Popen(args).pid
if __name__ == "__main__":
execute()
|
# -*- coding: utf-8 -*-
"""
Created on Fri Aug 16 18:58:05 2019
@author: gustavo.fonseca
"""
import pandas as pd
import funcs as f1
#Tarefa 17
#Função utilizada no arquivo 'funcs.py'
#Teste com as colunas 'Renda (R$)' e 'Público' da tabela do Campeonato Brasileiro.
t=pd.read_csv('file:///C:/Users/gustavo.fonseca/Downloads/tabelaBrasileirao2018.csv')
x1=t['Público']
y1=t['Renda (R$)']
r1=f1.corr(x1,y1)
|
import torch
import scipy.optimize
import numpy as np
def local_OT(D, window = 0):
window = window
p = D.shape[1]; m = D.shape[2] # p < m, e.g., p = 10, m = 20
# construct the cx, ax=b
x = torch.rand([10,p*m])
A = torch.zeros([p,p*m])
b = torch.ones([p])
for i in range(p):
A[i, (i)*m:(i+1)*m] = 1
G = torch.zeros([m, p*m])
for i in range(m):
for j in range(p):
G[i, j*m+i] = 1
h = torch.ones([m])
A_local = torch.zeros([p, p, m])
for i in range(p):
# left = np.floor((i - window) * (m*1.0/p))
# right = (i + window) * (m*1.0/p)
left = (i - window) * (m * 1.0 / p)
right = (i + 1 + window) * (m * 1.0 / p)
for j in range(m):
# if j < left or j >= right:
if j < left or j >= right:
A_local[i, i, j] = 1
# if i+window+1<=m-1:
# A_local[i, i, i+(window+1):] = 1
# if i-(window+1) >=0:
# A_local[i, i, :i-window] = 1
A_local = A_local.view([p, p*m])
b_local = torch.zeros([p])
A = torch.cat([A, A_local], 0).numpy()
b = torch.cat([b, b_local], 0).numpy()
G = G.numpy()
h = h.numpy()
T_list = []
for i in range(D.shape[0]):
c = D[i].view(-1).detach().cpu().numpy()
try:
sol = scipy.optimize.linprog(c, A_ub = G, b_ub = h, A_eq = A, b_eq = b, bounds=(0, 1)) #options={'maxiter': 200, 'sym_pos':False}
sol_x = torch.from_numpy(sol.x).view([p,m]).float()
except:
sol_x = torch.cat([torch.eye(p), torch.zeros(p, m-p)], 1)
T_list.append(sol_x)
T = torch.stack(T_list, 0)
return T.to(D.device) #(D * T.cuda()).sum() / p #(T>0.5).float() # binarize it
### for debug
# D = torch.rand([1, 10, 20])
# cost_orig = torch.diag(D[0]).sum()
# T = local_OT(D)
# cost_new = (D * T).sum()
# print(cost_orig, cost_new)
|
from state import State
import random
import timeit
import matplotlib.pyplot as plt
import statistics as stats
# If the disks are in different pins, we name the state first with where the big one is
statesString = ["b1s1", "b1s2", "b1s3", "s2b2", "s3b3", "b3s2", "b2s3", "b3s3", "b2s2", "b3s1", "b2s1", "s1b1"]
obeyProb = 0.9
moves = ["s1","s2","s3", "b1", "b2","b3"]
V = {}
V_prime = {}
pi = {}
GAMMA = 0.9
policy = {}
value = {}
# Creation of every state object
states = []
for state in statesString:
states.append(State(state, obeyProb))
state_to_action = {
states[0].name: ["s2", "s3"],
states[1].name: ["s1", "s3", "b2", "b3"],
states[2].name: ["s1", "s2", "b2", "b3"],
states[3].name: ["b1", "b3"],
states[4].name: ["b1", "b2"],
states[5].name: ["s1", "s3", "b1", "b2"],
states[6].name: ["s1", "s2", "b1", "b3"],
states[7].name: ["s3"],
states[8].name: ["s1", "s3"],
states[9].name: ["s2", "s3", "b1", "b2"],
states[10].name: ["s2", "s3", "b1", "b3"],
states[11].name: ["b2", "b3"]
}
def calculate_best_action_values(state):
best_value = float('-inf')
best_action = None
# if(state.name == "b1s3"):
# print()
# print()
# print()
# print("calculate_best_action_values: State " +state.name)
for action in state_to_action.get(state.name, []):
# if(state.name == "b1s3"):
# print()
# print()
# print("considering now the action: " +action)
expected_value = 0
for probability, reward, state_prime in state.get_transition_probs(action):
# if(state.name == "b1s3"):
# print("state_prime " +state_prime+ " with prob " +str(round(probability, 5))+ " and reward " +str(reward))
# print("And value: " +str(round(V[state_prime], 3)))
# print("the new value is: " +str(round(probability * (reward + GAMMA * V[state_prime]), 3)))
expected_value += probability * (reward + GAMMA * V[state_prime])
# if(state.name == "b1s3"):
# print("the new expected_value is: " +str(round(expected_value, 3)))
# print()
# if(state.name == "b1s3"):
# print("expected_value: " +str(expected_value)+ " best_value: " +str(best_value))
if expected_value > best_value:
best_value = expected_value
best_action = action
# print("The best value to return is: " +str(best_value))
return best_action, best_value
def value_iteration(error):
deltas = []
for state in states:
V[state.name] = 0
V_prime[state.name] = 0
pi[state.name] = ""
delta = 1
while delta > error:
delta = 0
delta_prime = 0
for state in states:
new_best_action, new_best_value = calculate_best_action_values(state)
if abs(new_best_value - V[state.name]) > delta:
delta = abs(new_best_value - V[state.name])
V_prime[state.name] = new_best_value
pi[state.name] = new_best_action
# deltas.append(delta)
for key, value in V_prime.items():
V[key] = value
# plt.plot(deltas)
# plt.show()
for key, value in V.items():
print("State " +key+ " policy: " +pi[key]+ " value: " +str(round(value, 2)))
def initialize_random_policy():
# policy is a lookup table for state -> action
# print("Random policy: ")
for state in states:
possible_actions = state_to_action.get(state.name, [])
policy[state.name] = possible_actions[random.randint(0, len(possible_actions) - 1)]
V[state.name] = 0
V_prime[state.name] = 0
value[state.name] = 0
# print(policy)
return policy
def policy_iteration():
# First we create a random policy that we will update as we learn
policy = initialize_random_policy()
# Solve V[s] = ∑s'∈S P(s'|s,π[s])(R(s,a,s')+γV[s'])
# V[s] = sum([P[s,policy[s],s1] * (R[s,policy[s],s1] + gamma*V[s1]) for s1 in range(N_STATES)])
changed = True
# print("V" +str(V))
while changed:
changed = False
print()
for state in states:
expected_value = 0
for probability, reward, state_prime in state.get_transition_probs(policy[state.name]):
expected_value += probability * (reward + GAMMA * V[state_prime])
V[state.name] = expected_value
print("For the state " +state.name+ " the expected_value is: " +str(expected_value))
# Check if the best action is the same as the actual policy
for state in states:
best_action, best_value = calculate_best_action_values(state)
print("The best action is " +best_action+ " and best_value is: " +str(best_value))
if policy[state.name] != best_action:
policy[state.name] = best_action
value[state.name] = best_value
changed = True
else: value[state.name] = best_value
print("Optimal policy: ")
for key, v in policy.items():
print("State " +key+ " policy: " +str(v)+ " value: " +str(round(value[key], 2)))
def check_error_policy_value(times):
V_Aux = {}
for x in range(1, times):
value_iteration(0.001)
policy_iteration()
# equals = True
for key, value in policy.items():
# equals = equals and pi[key] == value
if not(pi[key] == value):
break
try: V_Aux[key] = V_Aux[key] + 1
except KeyError: V_Aux[key] = 1
# if key == "b1s3":
# return
# print("State " +key+ " -> value_iteration: " +pi[key]+ " policy_iteration: " +value)
# print(equals)
print(V_Aux)
for key, value in V_Aux.items():
print("For state " +key+ " there have been " +str(value)+ " errors")
# check_error_policy_value(10000)
value_iteration(0.001)
print()
policy_iteration()
def calculate_convergence_speed(errors, n_times):
times = []
for error in errors:
times.append(calculate_avg_time_value_iteration(error, n_times))
# print(str(times[len(times) - 1])+ "s for error: " +str(error))
plt.plot(errors, times)
# plt.xscale('log')
# plt.yscale('log')
# plt.legend(errors)
plt.show()
def calculate_avg_time_value_iteration(error, times):
time = 0
t = []
for x in range(1, times):
start = timeit.default_timer()
value_iteration(error)
final = timeit.default_timer()
t.append(final - start)
print(str(round(stats.median(t),6)) + "s for error " +str(round(error,4)))
return stats.median(t)
# list_errors = []
# for x in range(1, 100):
# list_errors.append(1 - x/100)
# list_errors.append(frange(0.5, 1, 0.01))
# list_errors.append([0.5:0.01: 1])
# print(str(list_errors))
# list_errors = [2, 1, 0.9, 0.8, 0.7, 0.6, 0.5, 0.1, 0.01, 0.001]
# calculate_convergence_speed(list_errors, 100)
# value_iteration(0.001)
|
# PARAMETERS
#################################################
# DATA
resize_image_height_to = 128
resize_image_width_to = 128
smooth = 1.0
test_data_fraction = 0.15
# COMPUTATION
number_of_epochs = 40
batch_size = 80
# MODEL
#################################################
# Loss function
# dice coefficient
def dice_coef(y_true, y_pred):
# truth as vector:
y_true_f = K.flatten(y_true)
# prediction as vector:
y_pred_f = K.flatten(y_pred)
# count predicted "1"s that are also true "1"s = count true positives
intersection = K.sum(y_true_f * y_pred_f)
# 2 * count true positives / ( count true "1"s + count predicted "1"s
# returns 0 for all wrong
# returns 0 for prediction all 0
# returns 1 for all correct
# how 'good' a value in between depends is on total % of true "1"s.
return (2. * intersection + smooth) / (K.sum(y_true_f) + K.sum(y_pred_f) + smooth)
# loss is negative dice coefficient
def dice_coef_loss(y_true, y_pred):
return - dice_coef(y_true, y_pred)
# Network architecture
def get_unet():
# expected input shape
inputs = Input((resize_image_height_to, resize_image_width_to, 1)) # 1 channel, x rows, y = x columns
# convolution
# Conv2D(number of filters, (kernel X, kernel Y), .. )
conv1 = Conv2D(32, (3, 3), activation='relu', padding='same')(inputs) # -> convolution to features: 32 window: 3
conv1 = Conv2D(32, (3, 3), activation='relu', padding='same')(conv1) # -> convolution to features: 32 window: 9
pool1 = MaxPooling2D(pool_size=(2, 2))(conv1) # -> maxpool to features: 32 image : x / ( 2 )
conv2 = Conv2D(64, (3, 3), activation='relu', padding='same')(pool1) # -> convolution to features: 64 window: 18
conv2 = Conv2D(64, (3, 3), activation='relu', padding='same')(conv2) # -> convolution to features: 64 window: 18
pool2 = MaxPooling2D(pool_size=(2, 2))(conv2) # -> maxpool to features: 64 image : x / (2^2)
conv3 = Conv2D(128, (3, 3), activation='relu', padding='same')(pool2) # -> convolution to features: 128 window: 54
conv3 = Conv2D(128, (3, 3), activation='relu', padding='same')(conv3) # -> convolution to features: 128 window: 162
pool3 = MaxPooling2D(pool_size=(2, 2))(conv3) # -> maxpool to features: 128 image : x / (2^3)
conv4 = Conv2D(256, (3, 3), activation='relu', padding='same')(pool3) # -> convolution to features: 256 window: 486
conv4 = Conv2D(256, (3, 3), activation='relu', padding='same')(conv4) # -> convolution to features: 256 window: 1458
pool4 = MaxPooling2D(pool_size=(2, 2))(conv4) # -> maxpool to features: 256 window: ..
conv5 = Conv2D(512, (3, 3), activation='relu', padding='same')(pool4) # -> convolution to features: 512 window: ..
conv5 = Conv2D(512, (3, 3), activation='relu', padding='same')(conv5) # -> convolution to features: 512 image : x / (2^4)
# deconvolution
up6 = Conv2DTranspose(256, (2, 2), strides=(2, 2), padding='same')(conv5) # deconv to features: 512 x / (2^4)
concat6 = concatenate([up6, conv4], axis=3) # add conv4
conv6 = Conv2D(256, (3, 3), activation='relu', padding='same')(concat6) # convolute
conv6 = Conv2D(256, (3, 3), activation='relu', padding='same')(conv6). # convolute
up7 = Conv2DTranspose(128, (2, 2), strides=(2, 2), padding='same')(conv6)
concat7 = concatenate([up7, conv3], axis=3)
conv7 = Conv2D(128, (3, 3), activation='relu', padding='same')(concat7)
conv7 = Conv2D(128, (3, 3), activation='relu', padding='same')(conv7)
up8 = concatenate([Conv2DTranspose(64, (2, 2), strides=(2, 2), padding='same')(conv7), conv2], axis=3)
conv8 = Conv2D(64, (3, 3), activation='relu', padding='same')(up8)
conv8 = Conv2D(64, (3, 3), activation='relu', padding='same')(conv8)
up9 = concatenate([Conv2DTranspose(32, (2, 2), strides=(2, 2), padding='same')(conv8), conv1], axis=3)
conv9 = Conv2D(32, (3, 3), activation='relu', padding='same')(up9)
conv9 = Conv2D(32, (3, 3), activation='relu', padding='same')(conv9)
conv10 = Conv2D(1, (1, 1), activation='sigmoid')(conv9)
model = Model(inputs=[inputs], outputs=[conv10])
# dont know why but i had commented this line.. which stopped loading existing weights (?)
# aha: IF YOU HAVE NO WEIGHTS YET YOU NEED TO UNCOMMENT THIS LINE
# when you run the the n>1th time copy the weights.h5 file from /output/ to /checkpoints/
model.load_weights("weights.h5")
model.compile(optimizer=Adam(lr=1e-5), loss=dice_coef_loss, metrics=[dice_coef])
return model
def preprocess(imgs):
imgs_p = np.ndarray((imgs.shape[0], resize_image_height_to, resize_image_width_to), dtype=np.uint8)
for i in range(imgs.shape[0]):
imgs_p[i] = resize(imgs[i], (resize_image_width_to, resize_image_height_to), preserve_range=True)
imgs_p = imgs_p[..., np.newaxis]
return imgs_p
def train_and_predict():
print('-'*30)
print('Loading and preprocessing train data...')
print('-'*30)
imgs_train, imgs_mask_train = load_train_data()
imgs_train = preprocess(imgs_train)
imgs_mask_train = preprocess(imgs_mask_train)
imgs_train = imgs_train.astype('float32')
mean = np.mean(imgs_train) # mean for data centering
std = np.std(imgs_train) # std for data normalization
imgs_train -= mean
if std!=0:
imgs_train /= std
imgs_mask_train = imgs_mask_train.astype('float32')
imgs_mask_train /= 255. # scale masks to [0, 1]
print('mask max')
print('-'*30)
print('Creating and compiling model...')
print('-'*30)
model = get_unet()
model_checkpoint = ModelCheckpoint('/output/weights.h5', monitor='val_loss', save_best_only=True)
print('-'*30)
print('Fitting model...')
print('-'*30)
model.fit(imgs_train, imgs_mask_train, batch_size=batch_size, nb_epoch=number_of_epochs, verbose=1, shuffle=True,
validation_split=test_data_fraction,
callbacks=[model_checkpoint])
print('-'*30)
print('Loading and preprocessing test data...')
print('-'*30)
imgs_test, imgs_id_test = load_test_data()
imgs_test = preprocess(imgs_test)
imgs_test = imgs_test.astype('float32')
imgs_test -= mean
imgs_test /= std
print('-'*30)
print('Loading saved weights...')
print('-'*30)
# need to load newly trained weights from /output for prediction
# even though i load last trainings weights from /checkpoint.
# that's because floydhub only lets me write to /output/ during processing
# because I have to write them to /output/
#
model.load_weights('/output/weights.h5')
print('-'*30)
print('Predicting masks on test data...')
print('-'*30)
imgs_mask_test = model.predict(imgs_test, verbose=1)
np.save('/output/imgs_mask_test.npy', imgs_mask_test)
print('-' * 30)
print('Saving predicted masks to files...')
print('-' * 30)
pred_dir = '/output'
if not os.path.exists(pred_dir):
os.mkdir(pred_dir)
for image, image_id in zip(imgs_mask_test, imgs_id_test):
image = (image[:, :, 0] * 255.).astype(np.uint8)
imsave(os.path.join(pred_dir, str(image_id) + '_pred.png'), image)
# for image, image_id in zip(imgs_train, imgs_id_test):
# image = (image[:, :, 0] * 255.).astype(np.uint8)
# imsave(os.path.join(pred_dir, str(image_id) + '_trainpred.png'), image)
if __name__ == '__main__':
train_and_predict()
|
import requests
import re
headers ={
"Accept": "application / json, text / javascript, * / *; q = 0.01",
"User - Agent": "Mozilla / 5.0(WindowsNT10.0;Win64;x64;rv:61.0) Gecko / 20100101Firefox / 61.0",
"X - Requested - With": "XMLHttpRequest"
}
s = requests.session()
# print(s.cookies)
c = requests.cookies.RequestsCookieJar()
c.set("laravel_session","eyJpdiI6IkpKS1ZnQkxTVjBGM0ZQUXJHdnF6UXc9PSIsInZhbHVlIjoidm81bHFyRkVlb1p6a0NaTFwvYmNnelV2NklFNWRzQ0FuRDRaWkxSKzdFemNXNnNXcTRRUDYxcklwOERvQ2JtYlk4SUVoM3Zja1VGSCtTU0Exd0RKU01nPT0iLCJtYWMiOiI2OWI1YjY0NWM4ODNiZGIwMDM5OWRmNDY5OGU0OTQ1YTYwMGE1YTcxOWE5M2JjOTBhNjBhMGMwZGNjNzc4Nzk2In0%3D")
s.cookies.update(c)
#发布文章
data = {
"_token":"TA3QY9tfVlABLOI2joiaPf514mb8J1yRfwS29V3h",
"fa_base_id":76,
"is_public":1,
"tag":1,
"title":"我是测试6",
"is_announcement":0 ,
"show_time":"2018-07-06 16:25:35",
"title_page":"2018-07-04_14.26.29-f45a98893db7a32a81ca09e65d3b1e06.jpg",
"content":"测试2222"
}
r = s.post(url = "http://vip.lysy90store.xyz/story" ,data= data,headers = headers)
r_json = r.json()
# print(r_json)
#获取文章id
story_id1 = r_json["result"]["story_id"]
print(story_id1)
#删除文章
data1 ={
"_token": "TA3QY9tfVlABLOI2joiaPf514mb8J1yRfwS29V3h",
"_method":"delete",
"story_id":story_id1
}
r2 = s.post(url = "http://vip.lysy90store.xyz/story" ,data= data1,headers = headers)
print(r2.text)
|
# @Title: 自除数 (Self Dividing Numbers)
# @Author: 2464512446@qq.com
# @Date: 2019-09-28 17:14:52
# @Runtime: 28 ms
# @Memory: 11.6 MB
class Solution(object):
def selfDividingNumbers(self, left, right):
ans = []
for num in range(left,right + 1):
copy = num
while copy > 0:
div, copy = copy % 10, copy // 10
if div == 0 or num % div != 0: break
else: ans.append(num) # while … else 在循环条件为 false 时执行 else 语句块
return ans
|
import os
resourcesFolder = os.path.join(os.getcwd(), 'source', 'resources')
imgPath = os.path.join(resourcesFolder, 'pixelToolbarIcon.png')
size(512, 512)
im = ImageObject()
with im:
scale(29.01)
image(imgPath, (-1.29, -1.31))
steps = 10
w = h = width() / (steps-1)
r = w * 0.5
for i in range(steps):
for j in range(steps):
x = i * w
y = j * h
X, Y = x + w / 2, y + h / 2
color = imagePixelColor(im, (X, Y))[-1]
if color < 0.5:
continue
linearGradient((x, y+h), (x+w, y), [(0.3,), (0,)], [0, 1])
rect(X - r, Y - r, r*2, r*2)
imgPathDest = os.path.join(os.getcwd(), 'PixelToolMechanicIcon.png')
saveImage(imgPathDest)
|
class RandomListNode:
def __init__(self, x):
self.label = x
self.next = None
self.random = None
class Solution:
def copyRandomList(self, head):
#make copy of each node,link them in a single list.
if not head: return None
ptr = head
while ptr:
copy = RandomListNode(ptr.label)
copy.next = ptr.next
ptr.next = copy
ptr = ptr.next.next
#assign random pointers for the copy nodes.
ptr = head
while ptr:
if ptr.random: ptr.next.random = ptr.random.next
ptr = ptr.next.next
#restore the original list, and extract the copy list.
ptr, dummy = head, RandomListNode(0)
pre = dummy
while ptr:
pre.next = ptr.next
pre = pre.next
ptr.next = ptr.next.next
ptr = ptr.next
return dummy.next |
import os
import pandas as pd
from requests import get
from settings import INPUT_DATA_PATH
HOSPITAL_DIRPATH = os.path.join(INPUT_DATA_PATH, "hospitalisation_data")
URL = "https://opendata.ecdc.europa.eu/covid19/hospitalicuadmissionrates/csv/data.csv"
COUNTRIES = {"france", "belgium", "italy", "sweden", "uk", "spain"}
RENAME_INDICATOR = {
"Daily hospital occupancy": "hosp_occup",
"Daily ICU occupancy": "icu_occup",
"Weekly new hospital admissions per 100k": "hosp_adm_per_100K",
"Weekly new ICU admissions per 100k": "icu_adm_per_100K",
}
endpoint = (
"https://api.coronavirus.data.gov.uk/v1/data?"
"filters=areaType=overview&"
'structure={"date":"date","covidOccupiedMVBeds":"covidOccupiedMVBeds","newAdmissions":"newAdmissions","hospitalCases":"hospitalCases"}'
)
def get_data(url):
response = get(endpoint, timeout=10)
if response.status_code >= 400:
raise RuntimeError(f"Request failed: { response.text }")
return response.json()
def get_uk():
uk_df = get_data(endpoint)
uk_df = pd.DataFrame(uk_df["data"])
uk_df["date"] = pd.to_datetime(
uk_df["date"], errors="coerce", format="%Y-%m-%d", infer_datetime_format=False
)
uk_df.rename(
columns={
"covidOccupiedMVBeds": "uk_icu_occup",
"newAdmissions": "uk_hosp_adm",
"hospitalCases": "uk_hosp_occup",
},
inplace=True,
)
uk_df["year_week"] = uk_df.date.dt.strftime(date_format="%Y-W%U")
# Need to renumber week 0 to last week of 2020
uk_df.year_week.replace({"2021-W00": "2020-W53"}, inplace=True)
uk_df.groupby(["year_week"]).mean().reset_index()
return uk_df.groupby(["year_week"]).mean().reset_index()
def get_eu_countries():
eu_countries = pd.read_csv(URL)
eu_countries.country = eu_countries.country.str.lower()
eu_countries = eu_countries.loc[
eu_countries.country.isin(COUNTRIES), ["country", "indicator", "year_week", "value"]
]
eu_countries.indicator.replace(RENAME_INDICATOR, inplace=True)
eu_countries_daily = eu_countries.loc[eu_countries.indicator.isin(["hosp_occup", "icu_occup"])]
eu_countries = eu_countries.loc[~eu_countries.indicator.isin(["hosp_occup", "icu_occup"])]
eu_countries_daily = eu_countries_daily.groupby(["country", "indicator", "year_week"]).mean()
eu_countries_daily.reset_index(inplace=True)
eu_countries = eu_countries.append(eu_countries_daily)
eu_countries["cntry_ind"] = eu_countries.country.str[:3] + "_" + eu_countries.indicator
eu_countries = eu_countries.pivot_table(
index=["year_week"], columns="cntry_ind", values="value"
).reset_index()
return eu_countries
def main():
uk_df = get_uk()
eu_countries = get_eu_countries()
european_data = eu_countries.merge(uk_df, how="outer", on="year_week")
european_data.to_csv(os.path.join(HOSPITAL_DIRPATH, "european_data.csv"))
if __name__ == "__main__":
main()
|
# Root finding with bisection method
from math import *
def sign(y):
if y>0: return 1
if y<0: return -1
return 0
def bisection(f,a,b,tol):
sfa = sign(f(a)) # so we won't have to call f or sign multiple times at a or b
sfb = sign(f(b))
if sfa == 0: return a # a is a root
if sfb == 0: return b # b is a root
if sfa == sfb:
print "You didn't give me a bracket!"
quit()
while abs(a-b) > tol:
print a,b # just to see how things are proceeding
c = (a+b)/2. # midpoint
sfc = sign(f(c))
if sfc == 0: return c # hit a root exactly, by good luck
if sfc == sfa:
a = c
else:
b = c
return (a+b)/2.
def myf(x):
return cos(x) - 2*x
# Example of use:
# r = bisection(myf,0,10,1.0e-13)
# print "Root ~", r
|
# -*- coding:utf-8 -*-
import pandas as pd
import matplotlib.pyplot as plt
train_data_path = "/Users/withheart/Documents/studys/senmantic/data/ai_challenger_sentiment_analysis_trainingset_20180816/sentiment_analysis_trainingset.csv"
# 加载数据
def load_data_from_csv(file_name, header=0, encoding="utf-8"):
data_df = pd.read_csv(file_name, header=header, encoding=encoding)
columns = data_df.columns.values.tolist()
counts_column = ["not mentioned", "positive", "neutral", "negative"]
counts_df = pd.DataFrame(columns=counts_column)
for i in range(2,len(columns)):
row = data_df[columns[i]].value_counts().values
counts_df.loc[columns[i]] = row
return counts_df
if __name__ == '__main__':
counts_df = load_data_from_csv(train_data_path)
location_df = counts_df.iloc[:3,:]
service_df = counts_df.iloc[3:7,:]
price_df = counts_df.iloc[7:10,:]
env_df = counts_df.iloc[10:14,:]
dish_df = counts_df.iloc[14:18,:]
others_df = counts_df.iloc[18:,:]
others_df.plot(kind="bar")
plt.show() |
# -*- coding: utf-8 -*-
"""
Created on Fri Dec 7 00:32:20 2018
@author: home
"""
def solution(s):
if s == '':
return True
if len(s) % 2 == 1:
return False
else:
xiaoL = '('
xiaoR = ')'
zhongL = '['
zhongR = ']'
daL = '{'
daR = '}'
mid = int(len(s)/2)
left = s[0 : mid]
right = s[mid:]
if xiaoL in right or zhongL in right or daL in right:
return False
else:
right = right.replace(xiaoR, xiaoL)
right = right.replace(zhongR, zhongL)
right = right.replace(daR, daL)
if left == right:
return True
else:
return False
if __name__ == "__main__":
s = '([])'
print(solution(s)) |
"""
Basic Validation of PowerballGame Validation + Earnings Logic
"""
from django.test import TestCase
from yoolotto.lottery.game.base import LotteryResultsInvalidException, LotteryPlayInvalidException
from yoolotto.lottery.game.manager import GameManager
class PowerballGameTest(TestCase):
HANDLER = GameManager.get("Powerball")
def test_powerball_game_validate(self):
valid = [
[1, 2, 3, 4, 5, 6],
[7, 8, 9, 10, 11, 12],
[13, 14, 15, 16, 17, 18],
[19, 20, 21, 22, 23, 24],
[25, 26, 27, 28, 29, 30],
[31, 32, 33, 34, 35, 35],
[36, 37, 38, 39, 40, 17],
[41, 42, 43, 44, 45, 31],
[46, 47, 48, 49, 50, 21],
[51, 52, 53, 54, 55, 15],
[56, 55, 54, 53, 52, 7]
]
for numbers in valid:
self.assertTrue(self.HANDLER.validate_numbers(numbers))
invalid = [
[1, 1, 2, 3, 4, 5],
[1, 100, 2, 3, 4, 5],
[1, 6, 2, 3, 4, 37],
[1, 57, 2, 3, 4, 5]
]
for numbers in invalid:
with self.assertRaises(LotteryResultsInvalidException):
self.HANDLER.validate_numbers(numbers)
def test_powerball_play_validate(self):
valid = [
{'numbers': [1, 2, 3, 4, 5, 6], 'multiplier': True},
{'numbers': [7, 8, 9, 10, 11, 12], 'multiplier': True},
{'numbers': [13, 14, 15, 16, 17, 18], 'multiplier': False},
{'numbers': [19, 20, 21, 22, 23, 24], 'multiplier': True},
{'numbers': [25, 26, 27, 28, 29, 30], 'multiplier': True},
{'numbers': [31, 32, 33, 34, 35, 35], 'multiplier': False},
{'numbers': [36, 37, 38, 39, 40, 17], 'multiplier': True},
{'numbers': [41, 42, 43, 44, 45, 31], 'multiplier': True},
{'numbers': [46, 47, 48, 49, 50, 21], 'multiplier': False},
{'numbers': [51, 52, 53, 54, 55, 15], 'multiplier': True},
{'numbers': [56, 55, 54, 53, 52, 7], 'multiplier': True}
]
for play in valid:
self.assertTrue(self.HANDLER.validate_play(play))
invalid = [
{'numbers': [1, 57, 3, 4, 5, 6], 'multiplier': True},
{'numbers': [7, 8, 9, 10, 10, 12], 'multiplier': True},
{'numbers': [-1, 14, 15, 16, 17, 18], 'multiplier': False},
{'numbers': [19, 20, 21, 22, 23, 37], 'multiplier': True},
{'numbers': [31, 32, 33, 34, 35, 35], 'multiplier': "True"}
]
for data in invalid:
with self.assertRaises(LotteryPlayInvalidException):
self.HANDLER.validate_play(data)
def test_powerball_earnings(self):
results = [10, 20, 30, 40, 50, 10]
expected = [
# 0
[0, {"numbers": [1, 2, 3, 4, 5, 5], "multiplier": True}],
[4, {"numbers": [1, 2, 3, 4, 5, 10], "multiplier": False}],
# 1
[12, {"numbers": [1, 2, 3, 4, 5, 10], "multiplier": True}],
[4, {"numbers": [10, 2, 3, 4, 5, 10], "multiplier": False}],
# 2
[12, {"numbers": [10, 2, 3, 4, 5, 10], "multiplier": True}],
[7, {"numbers": [10, 20, 3, 4, 5, 10], "multiplier": False}],
[14, {"numbers": [10, 20, 3, 4, 5, 10], "multiplier": True}],
# 3
[7, {"numbers": [10, 20, 30, 4, 5, 5], "multiplier": False}],
[14, {"numbers": [10, 20, 30, 4, 5, 5], "multiplier": True}],
[100, {"numbers": [10, 20, 30, 4, 5, 10], "multiplier": False}],
[200, {"numbers": [10, 20, 30, 4, 5, 10], "multiplier": True}],
# 4
[100, {"numbers": [10, 20, 30, 40, 5, 5], "multiplier": False}],
[200, {"numbers": [10, 20, 30, 40, 5, 5], "multiplier": True}],
[10000, {"numbers": [10, 20, 30, 40, 5, 10], "multiplier": False}],
[40000, {"numbers": [10, 20, 30, 40, 5, 10], "multiplier": True}],
# 5
[1e6, {"numbers": [10, 20, 30, 40, 50, 5], "multiplier": False}],
[2e6, {"numbers": [10, 20, 30, 40, 50, 5], "multiplier": True}],
# Jackpot
[self.HANDLER.JACKPOT, {"numbers": [10, 20, 30, 40, 50, 10], "multiplier": True}],
[self.HANDLER.JACKPOT, {"numbers": [10, 20, 30, 40, 50, 10], "multiplier": True}],
]
for play in expected:
self.assertEqual(play[0], self.HANDLER._earnings(results, play[1]))
class DailyFourGameTest(TestCase):
HANDLER = GameManager.get("DailyFour")
def test_dailyfour_game_validate(self):
valid = [
[1, 2, 3, 4],
[0, 1, 2, 3],
[9, 8, 7, 6],
[5, 6, 0, 9],
[0, 0, 0, 0],
[9, 9, 9, 9]
]
for numbers in valid:
self.assertTrue(self.HANDLER.validate_numbers(numbers))
invalid = [
[1, 1, 2, 10],
[0, 0, 0, 10],
[5, 5, 5, 25]
]
for numbers in invalid:
with self.assertRaises(LotteryResultsInvalidException):
self.HANDLER.validate_numbers(numbers)
def test_dailyfour_play_validate(self):
valid = [
{"baseWager":"0.50","lineType":"BOX","numbers":[7,5,2,4],"sum":18,"sumWager":"1.00"},
{"baseWager":"1.00","lineType":"STRBOX","numbers":[6,2,7,1],"sum":16,"sumWager":"2.00"},
{"baseWager":"0.50","lineType":"FRONT","numbers":[9,2,0,0],"sum":11,"sumWager":"3.00"},
{"baseWager":"0.50","lineType":"MID","numbers":[0,8,7,0],"sum":15,"sumWager":"4.00"},
{"baseWager":"0.50","lineType":"BACK","numbers":[0,0,1,2],"sum":None,"sumWager":None},
{"baseWager":"0.50","lineType":"MID","numbers":[0,0,7,0],"sum":None,"sumWager":None},
{"baseWager":"0.50","lineType":"STR","numbers":[0,0,0,0],"sum":0,"sumWager":"5.00"},
{"baseWager":"0.50","lineType":"STR","numbers":[9,9,9,9],"sum":36,"sumWager":"5.00"}
]
for play in valid:
self.assertTrue(self.HANDLER.validate_play(play))
invalid = [
{"baseWager":None,"lineType":"BOX","numbers":[7,5,2,4],"sum":18,"sumWager":"1.00"},
{"baseWager":"1.00","lineType":"DERP","numbers":[6,2,7,1],"sum":16,"sumWager":"2.00"},
{"baseWager":"0.50","lineType":"FRONT","numbers":[9,2,0,0],"sum":11,"sumWager":"None"},
]
for data in invalid:
with self.assertRaises(LotteryPlayInvalidException):
self.HANDLER.validate_play(data)
def test_dailyfour_earnings(self):
results = [0, 0, 0, 0]
expected = [
# STR
[7500, {"baseWager":"0.50","lineType":"STR","numbers":[0, 0, 0, 0],"sum":0,"sumWager":"1.00"}],
[10000, {"baseWager":"1.00","lineType":"STR","numbers":[0, 0, 0, 0],"sum":0,"sumWager":"1.00"}],
[50000, {"baseWager":"5.00","lineType":"STR","numbers":[0, 0, 0, 0],"sum":0,"sumWager":"5.00"}],
[10000, {"baseWager":"2.00","lineType":"STR","numbers":[0, 0, 0, 0],"sum":None,"sumWager":None}],
[25000, {"baseWager":"5.00","lineType":"STR","numbers":[0, 0, 0, 0],"sum":None,"sumWager":None}],
[0, {"baseWager":"0.50","lineType":"STR","numbers":[9, 9, 9, 9],"sum":36,"sumWager":"1.00"}],
[0, {"baseWager":"1.00","lineType":"STR","numbers":[9, 9, 9, 9],"sum":36,"sumWager":"1.00"}],
[0, {"baseWager":"5.00","lineType":"STR","numbers":[9, 9, 9, 9],"sum":36, "sumWager":"5.00"}],
[0, {"baseWager":"0.50","lineType":"STR","numbers":[4, 5, 8, 2],"sum":19,"sumWager":"1.00"}],
[0, {"baseWager":"1.00","lineType":"STR","numbers":[4, 5, 8, 2],"sum":19,"sumWager":"1.00"}],
[0, {"baseWager":"5.00","lineType":"STR","numbers":[4, 5, 8, 2],"sum":19, "sumWager":"5.00"}],
# FRONT
[25, {"baseWager":"0.50","lineType":"FRONT","numbers":[0, 0, None, None],"sum":None,"sumWager": None}],
[250, {"baseWager":"5.00","lineType":"FRONT","numbers":[0, 0, None, None],"sum":None,"sumWager": None}],
[5025, {"baseWager":"0.50","lineType":"FRONT","numbers":[0, 0, None, None],"sum":0,"sumWager": "1.00"}],
[15250, {"baseWager":"5.00","lineType":"FRONT","numbers":[0, 0, None, None],"sum":0,"sumWager": "3.00"}],
# MID
[25, {"baseWager":"0.50","lineType":"MID","numbers":[None, 0, 0, None],"sum":None,"sumWager": None}],
[250, {"baseWager":"5.00","lineType":"MID","numbers":[None, 0, 0, None],"sum":None,"sumWager": None}],
[5025, {"baseWager":"0.50","lineType":"MID","numbers":[None, 0, 0, None],"sum":0,"sumWager": "1.00"}],
[15250, {"baseWager":"5.00","lineType":"MID","numbers":[None, 0, 0, None],"sum":0,"sumWager": "3.00"}],
# BACK
[25, {"baseWager":"0.50","lineType":"BACK","numbers":[None, None, 0, 0],"sum":None,"sumWager": None}],
[250, {"baseWager":"5.00","lineType":"BACK","numbers":[None, None, 0, 0],"sum":None,"sumWager": None}],
[5025, {"baseWager":"0.50","lineType":"BACK","numbers":[None, None, 0, 0],"sum":0,"sumWager": "1.00"}],
[15250, {"baseWager":"5.00","lineType":"BACK","numbers":[None, None, 0, 0],"sum":0,"sumWager": "3.00"}],
# STRBOX
# This particular play is nonsense
[7400, {"baseWager":"0.50","lineType":"STRBOX","numbers":[0, 0, 0, 0],"sum":0,"sumWager":"0.50"}],
[0, {"baseWager":"0.50","lineType":"STRBOX","numbers":[0, 0, 0, 1],"sum":1,"sumWager":"0.50"}],
# COMBO
# This particular play is nonsense
[2500, {"baseWager":"0.50","lineType":"COMBO","numbers":[0, 0, 0, 0],"sum":None,"sumWager":None}],
[0, {"baseWager":"0.50","lineType":"COMBO","numbers":[0, 0, 0, 1],"sum":1,"sumWager":"0.50"}],
]
for play in expected:
try:
self.assertEqual(play[0], self.HANDLER._earnings(results, play[1]))
except:
print play[1]
raise
# More Results
results = [9, 9, 9, 9]
expected = [
# STR
[0, {"baseWager":"0.50","lineType":"STR","numbers":[0, 0, 0, 0],"sum":0,"sumWager":"1.00"}],
[0, {"baseWager":"1.00","lineType":"STR","numbers":[0, 0, 0, 0],"sum":0,"sumWager":"1.00"}],
[0, {"baseWager":"5.00","lineType":"STR","numbers":[0, 0, 0, 0],"sum":0,"sumWager":"5.00"}],
[0, {"baseWager":"2.00","lineType":"STR","numbers":[0, 0, 0, 0],"sum":None,"sumWager":None}],
[0, {"baseWager":"5.00","lineType":"STR","numbers":[0, 0, 0, 0],"sum":None,"sumWager":None}],
[7500, {"baseWager":"0.50","lineType":"STR","numbers":[9, 9, 9, 9],"sum":36,"sumWager":"1.00"}],
[10000, {"baseWager":"1.00","lineType":"STR","numbers":[9, 9, 9, 9],"sum":36,"sumWager":"1.00"}],
[50000, {"baseWager":"5.00","lineType":"STR","numbers":[9, 9, 9, 9],"sum":36, "sumWager":"5.00"}],
[0, {"baseWager":"0.50","lineType":"STR","numbers":[4, 5, 8, 2],"sum":19,"sumWager":"1.00"}],
[0, {"baseWager":"1.00","lineType":"STR","numbers":[4, 5, 8, 2],"sum":19,"sumWager":"1.00"}],
[0, {"baseWager":"5.00","lineType":"STR","numbers":[4, 5, 8, 2],"sum":19, "sumWager":"5.00"}],
# FRONT
[0, {"baseWager":"0.50","lineType":"FRONT","numbers":[0, 0, None, None],"sum":None,"sumWager": None}],
[0, {"baseWager":"5.00","lineType":"FRONT","numbers":[0, 0, None, None],"sum":None,"sumWager": None}],
[0, {"baseWager":"0.50","lineType":"FRONT","numbers":[0, 0, None, None],"sum":0,"sumWager": "1.00"}],
[0, {"baseWager":"5.00","lineType":"FRONT","numbers":[0, 0, None, None],"sum":0,"sumWager": "3.00"}],
[25, {"baseWager":"0.50","lineType":"FRONT","numbers":[9, 9, None, None],"sum":None,"sumWager": None}],
[250, {"baseWager":"5.00","lineType":"FRONT","numbers":[9, 9, None, None],"sum":None,"sumWager": None}],
[25, {"baseWager":"0.50","lineType":"FRONT","numbers":[9, 9, None, None],"sum":18,"sumWager": "1.00"}],
[250, {"baseWager":"5.00","lineType":"FRONT","numbers":[9, 9, None, None],"sum":18,"sumWager": "3.00"}],
# MID
[0, {"baseWager":"0.50","lineType":"MID","numbers":[None, 0, 0, None],"sum":None,"sumWager": None}],
[0, {"baseWager":"5.00","lineType":"MID","numbers":[None, 0, 0, None],"sum":None,"sumWager": None}],
[0, {"baseWager":"0.50","lineType":"MID","numbers":[None, 0, 0, None],"sum":0,"sumWager": "1.00"}],
[0, {"baseWager":"5.00","lineType":"MID","numbers":[None, 0, 0, None],"sum":0,"sumWager": "3.00"}],
# BACK
[0, {"baseWager":"0.50","lineType":"BACK","numbers":[None, None, 0, 0],"sum":None,"sumWager": None}],
[0, {"baseWager":"5.00","lineType":"BACK","numbers":[None, None, 0, 0],"sum":None,"sumWager": None}],
[0, {"baseWager":"0.50","lineType":"BACK","numbers":[None, None, 0, 0],"sum":0,"sumWager": "1.00"}],
[0, {"baseWager":"5.00","lineType":"BACK","numbers":[None, None, 0, 0],"sum":0,"sumWager": "3.00"}],
# STRBOX
# This particular play is nonsense
[0, {"baseWager":"0.50","lineType":"STRBOX","numbers":[0, 0, 0, 0],"sum":0,"sumWager":"0.50"}],
[7400, {"baseWager":"0.50","lineType":"STRBOX","numbers":[9, 9, 9, 9],"sum":36,"sumWager":"0.50"}],
[0, {"baseWager":"0.50","lineType":"STRBOX","numbers":[0, 0, 0, 1],"sum":1,"sumWager":"0.50"}],
# COMBO
# This particular play is nonsense
[2500, {"baseWager":"0.50","lineType":"COMBO","numbers":[9, 9, 9, 9],"sum":None,"sumWager":None}],
[0, {"baseWager":"0.50","lineType":"COMBO","numbers":[0, 0, 0, 0],"sum":None,"sumWager":None}],
[0, {"baseWager":"0.50","lineType":"COMBO","numbers":[0, 0, 0, 1],"sum":1,"sumWager":"0.50"}],
]
for play in expected:
try:
self.assertEqual(play[0], self.HANDLER._earnings(results, play[1]))
except:
print play[1]
raise
# Some Interesting Numbers
results = [4, 5, 8, 2]
expected = [
# STR
[0, {"baseWager":"0.50","lineType":"STR","numbers":[0, 0, 0, 0],"sum":0,"sumWager":"1.00"}],
[0, {"baseWager":"1.00","lineType":"STR","numbers":[0, 0, 0, 0],"sum":0,"sumWager":"1.00"}],
[0, {"baseWager":"5.00","lineType":"STR","numbers":[0, 0, 0, 0],"sum":0,"sumWager":"5.00"}],
[0, {"baseWager":"2.00","lineType":"STR","numbers":[0, 0, 0, 0],"sum":None,"sumWager":None}],
[0, {"baseWager":"5.00","lineType":"STR","numbers":[0, 0, 0, 0],"sum":None,"sumWager":None}],
[2503, {"baseWager":"0.50","lineType":"STR","numbers":[4, 5, 8, 2],"sum":19,"sumWager":"0.50"}],
[5007, {"baseWager":"1.00","lineType":"STR","numbers":[4, 5, 8, 2],"sum":19,"sumWager":"1.00"}],
[25037, {"baseWager":"5.00","lineType":"STR","numbers":[4, 5, 8, 2],"sum":19, "sumWager":"5.00"}],
[0, {"baseWager":"0.50","lineType":"STR","numbers":[4, 5, 8, 3],"sum":19,"sumWager":"1.00"}],
[0, {"baseWager":"1.00","lineType":"STR","numbers":[4, 5, 8, 3],"sum":19,"sumWager":"1.00"}],
[0, {"baseWager":"5.00","lineType":"STR","numbers":[4, 5, 8, 3],"sum":19, "sumWager":"5.00"}],
# FRONT
[0, {"baseWager":"0.50","lineType":"FRONT","numbers":[0, 0, None, None],"sum":None,"sumWager": None}],
[0, {"baseWager":"5.00","lineType":"FRONT","numbers":[0, 0, None, None],"sum":None,"sumWager": None}],
[0, {"baseWager":"0.50","lineType":"FRONT","numbers":[0, 0, None, None],"sum":0,"sumWager": "1.00"}],
[0, {"baseWager":"5.00","lineType":"FRONT","numbers":[0, 0, None, None],"sum":0,"sumWager": "3.00"}],
[25, {"baseWager":"0.50","lineType":"FRONT","numbers":[4, 5, None, None],"sum":None,"sumWager": None}],
[250, {"baseWager":"5.00","lineType":"FRONT","numbers":[4, 5, None, None],"sum":None,"sumWager": None}],
[25, {"baseWager":"0.50","lineType":"FRONT","numbers":[4, 5, None, None],"sum":9,"sumWager": "1.00"}],
[250, {"baseWager":"5.00","lineType":"FRONT","numbers":[4, 5, None, None],"sum":9,"sumWager": "3.00"}],
# MID
[0, {"baseWager":"0.50","lineType":"MID","numbers":[None, 0, 0, None],"sum":None,"sumWager": None}],
[0, {"baseWager":"5.00","lineType":"MID","numbers":[None, 0, 0, None],"sum":None,"sumWager": None}],
[0, {"baseWager":"0.50","lineType":"MID","numbers":[None, 0, 0, None],"sum":0,"sumWager": "1.00"}],
[0, {"baseWager":"5.00","lineType":"MID","numbers":[None, 0, 0, None],"sum":0,"sumWager": "3.00"}],
[25, {"baseWager":"0.50","lineType":"MID","numbers":[None, 5, 8, None],"sum":None,"sumWager": None}],
[250, {"baseWager":"5.00","lineType":"MID","numbers":[None, 5, 8, None],"sum":None,"sumWager": None}],
[25, {"baseWager":"0.50","lineType":"MID","numbers":[None, 5, 8, None],"sum":13,"sumWager": "1.00"}],
[250, {"baseWager":"5.00","lineType":"MID","numbers":[None, 5, 8, None],"sum":13,"sumWager": "3.00"}],
# BACK
[0, {"baseWager":"0.50","lineType":"BACK","numbers":[None, None, 0, 0],"sum":None,"sumWager": None}],
[0, {"baseWager":"5.00","lineType":"BACK","numbers":[None, None, 0, 0],"sum":None,"sumWager": None}],
[0, {"baseWager":"0.50","lineType":"BACK","numbers":[None, None, 0, 0],"sum":0,"sumWager": "1.00"}],
[0, {"baseWager":"5.00","lineType":"BACK","numbers":[None, None, 0, 0],"sum":0,"sumWager": "3.00"}],
[25, {"baseWager":"0.50","lineType":"BACK","numbers":[None, None, 8, 2],"sum":None,"sumWager": None}],
[250, {"baseWager":"5.00","lineType":"BACK","numbers":[None, None, 8, 2],"sum":None,"sumWager": None}],
[25, {"baseWager":"0.50","lineType":"BACK","numbers":[None, None, 8, 2],"sum":10,"sumWager": "1.00"}],
[250, {"baseWager":"5.00","lineType":"BACK","numbers":[None, None, 8, 2],"sum":10,"sumWager": "3.00"}],
# STRBOX
# This particular play is nonsense
[0, {"baseWager":"0.50","lineType":"STRBOX","numbers":[0, 0, 0, 0],"sum":0,"sumWager":"0.50"}],
[0, {"baseWager":"0.50","lineType":"STRBOX","numbers":[9, 9, 9, 9],"sum":36,"sumWager":"0.50"}],
[0, {"baseWager":"0.50","lineType":"STRBOX","numbers":[0, 0, 0, 1],"sum":1,"sumWager":"0.50"}],
[5200, {"baseWager":"1.00","lineType":"STRBOX","numbers":[4, 5, 8, 2],"sum":None,"sumWager":None}],
[5237, {"baseWager":"1.00","lineType":"STRBOX","numbers":[4, 5, 8, 2],"sum":19,"sumWager":"5.00"}],
[100, {"baseWager":"0.50","lineType":"STRBOX","numbers":[2, 8, 5, 4],"sum":None,"sumWager":None}],
[100, {"baseWager":"0.50","lineType":"STRBOX","numbers":[4, 8, 2, 5],"sum":None,"sumWager":None}],
[100, {"baseWager":"0.50","lineType":"STRBOX","numbers":[5, 4, 8, 2],"sum":None,"sumWager":None}],
# COMBO
[2500, {"baseWager":"0.50","lineType":"COMBO","numbers":[4, 5, 8, 2],"sum":None,"sumWager":None}],
[2500, {"baseWager":"0.50","lineType":"COMBO","numbers":[5, 8, 2, 4],"sum":None,"sumWager":None}],
# This would be a $120 ticket
[25000, {"baseWager":"5.00","lineType":"COMBO","numbers":[4, 5, 8, 2],"sum":None,"sumWager":None}],
[25000, {"baseWager":"5.00","lineType":"COMBO","numbers":[5, 8, 2, 4],"sum":None,"sumWager":None}],
[0, {"baseWager":"0.50","lineType":"COMBO","numbers":[0, 0, 0, 0],"sum":None,"sumWager":None}],
[0, {"baseWager":"0.50","lineType":"COMBO","numbers":[0, 0, 0, 1],"sum":1,"sumWager":"0.50"}],
]
for play in expected:
try:
self.assertEqual(play[0], self.HANDLER._earnings(results, play[1]))
except:
print play[1]
raise |
graph = { 'A': set(['B', 'C']),
'B': set(['A', 'D', 'E']),
'C': set(['A', 'F']),
'D': set(['B']),
'E': set(['B', 'F']),
'F': set(['C', 'E'])}
graph2 = { 1: set([2, 3]),
2: set([1, 4, 5]),
3: set([1,5,7]),
4: set([2]),
5: set([2,3,6]),
6: set([5,7]),
7: set([3,6])}
def bfs(graph, start):
#keep track of nodes visited
explored = []
#keep track of nodes to be checked
queue = [start]
#loop unitl no nodes remain unchecked
while queue:
node = queue.pop(0)
if node not in explored:
explored.append(node)
neighbours = graph[node]
for neighbour in neighbours:
queue.append(neighbour)
return explored
def dfs(graph, vertex, path=[]):
path += [vertex]
for neighbor in graph[vertex]:
if neighbor not in path:
path = dfs(graph, neighbor, path)
return path
print(dfs(graph2, 1))
print(bfs(graph, 'D'))
|
#-*- coding:utf-8 -*-
import types
from functools import wraps
import constants
def _raise_when_models_empty(func):
@wraps(func)
def wrapper(self, *args, **kwargs):
if not self.registered_models:
raise RuntimeError(u"未注册models")
return func(self, *args, **kwargs)
return wrapper
class ModelFunction(object):
model_test_funcs = {}
model_permissions = {}
def __init__(self, session=None):
self.__session__ = session
self.registered_models = []
self.model_mapper_dict = {}
def register_model(self, model):
if model not in self.registered_models:
self.registered_models.append(model)
def get_session(self, obj):
if self.__session__ is None:
return obj._sa_instance_state.session
else:
return self.__session__
@_raise_when_models_empty
def get_child_models(self, class_):
"""
只能是one to many的, 相同的model只返回一次
:param class_: the model class
:type class_: types.TypeType
:return a list of model class: [(model, property, constraint(MAY, SHOULD, MUST))]
"""
assert isinstance(class_, types.TypeType) and hasattr(class_, "_sa_class_manager")
if class_ not in self.model_mapper_dict:
self.model_mapper_dict[class_] = []
for loop in self.registered_models:
if loop != class_:
for pro in loop.__mapper__.iterate_properties:
if hasattr(pro, "direction") and pro.direction.name == "MANYTOONE" and \
pro.local_remote_pairs[0][1] in class_.__table__.columns._all_cols:
self.model_mapper_dict[class_].append((loop, pro,
constants.MAY if pro.local_remote_pairs[0][
0].nullable else constants.SHOULD))
break
return self.model_mapper_dict[class_]
@_raise_when_models_empty
def get_children_generate(self, obj):
for model, pro, constraint in self.get_child_models(obj.__class__):
for i in self.get_session(obj).query(model).filter(pro.class_attribute == obj):
yield i
@_raise_when_models_empty
def get_children(self, obj):
return list(self.get_children_generate(obj))
@_raise_when_models_empty
def get_all_derivatives(self, obj):
return [{child: self.get_all_derivatives(child)} for child in self.get_children_generate(obj)]
def get_conditions(self, obj, detail):
"""
:param obj: the object to be manager
:param detail: if detail is True, show all the generato
"""
raise NotImplementedError
def test_obj(self, obj):
func = self.model_test_funcs.get(obj.__class__)
if func:
try:
return func(obj)
except TypeError:
return func(func.im_class(obj))
else:
return True
def _notify(self, obj, user):
raise NotImplementedError
def notify_user(self, obj):
def _get_users(permission):
#TODO 伪实现
from lite_mms import models
return models.User.query.filter(models.User.id == 3).all()
for child, constrain in self.get_conditions(obj, True):
if constrain < constants.MAY:
for k, v in self.model_permissions.iteritems():
if child.__class__.__name__ == k:
for user in _get_users(v):
self._notify(child, user)
@_raise_when_models_empty
def do_action(self, obj):
raise NotImplementedError
class DeleteModelFunction(ModelFunction):
def get_conditions(self, obj, detail):
def __model_relationships__(obj):
conditions = []
for child in self.get_children(obj):
for model, prop, constraint in self.model_mapper_dict[obj.__class__]:
if child.__class__ == model:
conditions.append((child, constraint))
if detail:
conditions.extend(__model_relationships__(child))
return conditions
return __model_relationships__(obj)
def delete(self, obj):
session = self.get_session(obj)
session.delete(obj)
try:
session.commit()
except:
session.rollback()
def _notify(self, obj, user):
#TODO 伪实现
print u"请求删除{0:s}".format(str(obj))
@_raise_when_models_empty
def do_action(self, obj):
"""
删除一个obj的规则为: 1、测试该obj是否可以删除,如果不能删除则抛出异常
2、真正的删除该obj
其中判断obj是否可以删除,框架无法做到,需要obj本身或者是基于该obj的扩展类给出结果。
因此需要该obj提供对应的方法。
"""
try:
if self.test_obj(obj):
conditions = self.get_conditions(obj, True)
if conditions:
raise ValueError(conditions)
for child in self.get_children(obj):
self.do_action(child)
self.delete(obj)
else:
raise ValueError(u"不能删除")
except:
raise
class ModifyModelFunction(ModelFunction):
def get_conditions(self, obj, detail):
def __model_relationships__(obj):
conditions = []
for child in self.get_children(obj):
for model, prop, constraint in self.model_mapper_dict[obj.__class__]:
if child.__class__ == model:
conditions.append((child, constraint))
if detail:
conditions.extend(__model_relationships__(child))
return conditions
return __model_relationships__(obj)
def modify(self, obj):
session = self.get_session(obj)
session.update(obj)
try:
session.commit()
except:
session.rollback()
@_raise_when_models_empty
def do_action(self, obj):
try:
if self.test_obj(obj):
conditions = self.get_conditions(obj, True)
if conditions:
raise ValueError(conditions)
for child in self.get_children(obj):
self.do_action(child)
self.modify(obj)
else:
raise ValueError(u"不能修改")
except:
raise
def _notify(self, obj, user):
print u"请求修改{}".format(str(obj))
def register_test_delete_func(class_):
"""
:param class_: 这里应该是具体的class类
"""
def decorate(func):
DeleteModelFunction.model_test_funcs[class_] = func
@wraps(func)
def f(*args, **kwargs):
return func(*args, **kwargs)
return f
return decorate
def register_delete_permissions():
#TODO 伪实现
from lite_mms.permissions import SchedulerPermission
DeleteModelFunction.model_permissions = {"WorkCommand": SchedulerPermission}
if __name__ == "__main__":
from lite_mms.basemain import app, db
from lite_mms import models
order = models.Order.query.filter_by(id=415).one()
assert order
func = ModelFunction(db.session)
for k, v in models.__dict__.items():
if isinstance(v, types.TypeType) and issubclass(v, db.Model):
func.register_model(v)
print func.get_child_models(models.Order)
print func.get_children(order)
print func.model_mapper_dict
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.index, name='index'),
path('submit_exercise', views.submit_exercise, name='submit_exercise'),
path('get_exercise', views.get_exercise, name='get_exercise'),
path('set_journal', views.set_journal, name='set_journal'),
path('delete_exercise', views.delete_exercise, name='delete_exercise')
] |
#!/usr/bin/python
### FOR THIS PROGRAM, CLASS SLIDES WHERE USED AS TEMPLATE ####
import sys
sys.path.append("/Users/ianhoyos/biopython-1.70")
import Bio
from Bio.Blast.Applications import NcbiblastpCommandline
from Bio.Blast import NCBIStandalone
### Fixed path to where blastp is located ###
### The Database utilized is a localized database for the mus mulusucus genes. Instead of searching
### Through the entire universe of genes, this way it is a more compact search.
### Also the database is localized so a terminal command is needed to run a conver
### TO turn the file into a Database 'makeblastdb -in mus_protein.fa -parse_seqids -dbtype prot'
blastp_cline = NcbiblastpCommandline(cmd ="/usr/local/ncbi/blast/bin/blastp", query="human.fa", db="mus_protein.fa", evalue=0.01, out="out.txt")
blastp_cline()
### The algorithms chosen are because these blast algorithms analyze on proteins, which
### is what we need.
### Substitution matrix?
### The evalue parameter allows us to select a range for possible matches.
result_handle = open("out.txt")
blast_parser = NCBIStandalone.BlastParser()
blast_record = blast_parser.parse(result_handle)
###
for alignment in blast_record.alignments:
for hsp in alignment.hsps:
if hsp.expect < 0.001:
print (alignment.title)
print (alignment.length)
print (hsp.expect)
print (hsp.query)
print (hsp.match)
print (hsp.sbjct)
|
class Solution:
def repeatedSubstringPattern(self, s):
"""
:type s: str
:rtype: bool
"""
s1 = s+s
sub = s1[1:-1]
if s in sub:
return True
else:
return False
print(Solution().repeatedSubstringPattern('abab')) |
#!/usr/bin/python
# SPDX-License-Identifier: GPL-2.0-only
#
# Tool for analyzing boot timing
# Copyright (c) 2013, Intel Corporation.
#
# This program is free software; you can redistribute it and/or modify it
# under the terms and conditions of the GNU General Public License,
# version 2, as published by the Free Software Foundation.
#
# This program is distributed in the hope it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# Authors:
# Todd Brandt <todd.e.brandt@linux.intel.com>
#
# Description:
# This tool is designed to assist kernel and OS developers in optimizing
# their linux stack's boot time. It creates an html representation of
# the kernel boot timeline up to the start of the init process.
#
# ----------------- LIBRARIES --------------------
import sys
import time
import os
import string
import re
import platform
import shutil
from datetime import datetime, timedelta
from subprocess import call, Popen, PIPE
import sleepgraph as aslib
def pprint(msg):
print(msg)
sys.stdout.flush()
# ----------------- CLASSES --------------------
# Class: SystemValues
# Description:
# A global, single-instance container used to
# store system values and test parameters
class SystemValues(aslib.SystemValues):
title = 'BootGraph'
version = '2.2'
hostname = 'localhost'
testtime = ''
kernel = ''
dmesgfile = ''
ftracefile = ''
htmlfile = 'bootgraph.html'
testdir = ''
kparams = ''
result = ''
useftrace = False
usecallgraph = False
suspendmode = 'boot'
max_graph_depth = 2
graph_filter = 'do_one_initcall'
reboot = False
manual = False
iscronjob = False
timeformat = '%.6f'
bootloader = 'grub'
blexec = []
def __init__(self):
self.hostname = platform.node()
self.testtime = datetime.now().strftime('%Y-%m-%d_%H:%M:%S')
if os.path.exists('/proc/version'):
fp = open('/proc/version', 'r')
val = fp.read().strip()
fp.close()
self.kernel = self.kernelVersion(val)
else:
self.kernel = 'unknown'
self.testdir = datetime.now().strftime('boot-%y%m%d-%H%M%S')
def kernelVersion(self, msg):
return msg.split()[2]
def checkFtraceKernelVersion(self):
val = tuple(map(int, self.kernel.split('-')[0].split('.')))
if val >= (4, 10, 0):
return True
return False
def kernelParams(self):
cmdline = 'initcall_debug log_buf_len=32M'
if self.useftrace:
if self.cpucount > 0:
bs = min(self.memtotal // 2, 2*1024*1024) // self.cpucount
else:
bs = 131072
cmdline += ' trace_buf_size=%dK trace_clock=global '\
'trace_options=nooverwrite,funcgraph-abstime,funcgraph-cpu,'\
'funcgraph-duration,funcgraph-proc,funcgraph-tail,'\
'nofuncgraph-overhead,context-info,graph-time '\
'ftrace=function_graph '\
'ftrace_graph_max_depth=%d '\
'ftrace_graph_filter=%s' % \
(bs, self.max_graph_depth, self.graph_filter)
return cmdline
def setGraphFilter(self, val):
master = self.getBootFtraceFilterFunctions()
fs = ''
for i in val.split(','):
func = i.strip()
if func == '':
doError('badly formatted filter function string')
if '[' in func or ']' in func:
doError('loadable module functions not allowed - "%s"' % func)
if ' ' in func:
doError('spaces found in filter functions - "%s"' % func)
if func not in master:
doError('function "%s" not available for ftrace' % func)
if not fs:
fs = func
else:
fs += ','+func
if not fs:
doError('badly formatted filter function string')
self.graph_filter = fs
def getBootFtraceFilterFunctions(self):
self.rootCheck(True)
fp = open(self.tpath+'available_filter_functions')
fulllist = fp.read().split('\n')
fp.close()
list = []
for i in fulllist:
if not i or ' ' in i or '[' in i or ']' in i:
continue
list.append(i)
return list
def myCronJob(self, line):
if '@reboot' not in line:
return False
if 'bootgraph' in line or 'analyze_boot.py' in line or '-cronjob' in line:
return True
return False
def cronjobCmdString(self):
cmdline = '%s -cronjob' % os.path.abspath(sys.argv[0])
args = iter(sys.argv[1:])
for arg in args:
if arg in ['-h', '-v', '-cronjob', '-reboot', '-verbose']:
continue
elif arg in ['-o', '-dmesg', '-ftrace', '-func']:
next(args)
continue
elif arg == '-result':
cmdline += ' %s "%s"' % (arg, os.path.abspath(next(args)))
continue
elif arg == '-cgskip':
file = self.configFile(next(args))
cmdline += ' %s "%s"' % (arg, os.path.abspath(file))
continue
cmdline += ' '+arg
if self.graph_filter != 'do_one_initcall':
cmdline += ' -func "%s"' % self.graph_filter
cmdline += ' -o "%s"' % os.path.abspath(self.testdir)
return cmdline
def manualRebootRequired(self):
cmdline = self.kernelParams()
pprint('To generate a new timeline manually, follow these steps:\n\n'\
'1. Add the CMDLINE string to your kernel command line.\n'\
'2. Reboot the system.\n'\
'3. After reboot, re-run this tool with the same arguments but no command (w/o -reboot or -manual).\n\n'\
'CMDLINE="%s"' % cmdline)
sys.exit()
def blGrub(self):
blcmd = ''
for cmd in ['update-grub', 'grub-mkconfig', 'grub2-mkconfig']:
if blcmd:
break
blcmd = self.getExec(cmd)
if not blcmd:
doError('[GRUB] missing update command')
if not os.path.exists('/etc/default/grub'):
doError('[GRUB] missing /etc/default/grub')
if 'grub2' in blcmd:
cfg = '/boot/grub2/grub.cfg'
else:
cfg = '/boot/grub/grub.cfg'
if not os.path.exists(cfg):
doError('[GRUB] missing %s' % cfg)
if 'update-grub' in blcmd:
self.blexec = [blcmd]
else:
self.blexec = [blcmd, '-o', cfg]
def getBootLoader(self):
if self.bootloader == 'grub':
self.blGrub()
else:
doError('unknown boot loader: %s' % self.bootloader)
def writeDatafileHeader(self, filename):
self.kparams = open('/proc/cmdline', 'r').read().strip()
fp = open(filename, 'w')
fp.write(self.teststamp+'\n')
fp.write(self.sysstamp+'\n')
fp.write('# command | %s\n' % self.cmdline)
fp.write('# kparams | %s\n' % self.kparams)
fp.close()
sysvals = SystemValues()
# Class: Data
# Description:
# The primary container for test data.
class Data(aslib.Data):
dmesg = {} # root data structure
start = 0.0 # test start
end = 0.0 # test end
dmesgtext = [] # dmesg text file in memory
testnumber = 0
idstr = ''
html_device_id = 0
valid = False
tUserMode = 0.0
boottime = ''
phases = ['kernel', 'user']
do_one_initcall = False
def __init__(self, num):
self.testnumber = num
self.idstr = 'a'
self.dmesgtext = []
self.dmesg = {
'kernel': {'list': dict(), 'start': -1.0, 'end': -1.0, 'row': 0,
'order': 0, 'color': 'linear-gradient(to bottom, #fff, #bcf)'},
'user': {'list': dict(), 'start': -1.0, 'end': -1.0, 'row': 0,
'order': 1, 'color': '#fff'}
}
def deviceTopology(self):
return ''
def newAction(self, phase, name, pid, start, end, ret, ulen):
# new device callback for a specific phase
self.html_device_id += 1
devid = '%s%d' % (self.idstr, self.html_device_id)
list = self.dmesg[phase]['list']
length = -1.0
if(start >= 0 and end >= 0):
length = end - start
i = 2
origname = name
while(name in list):
name = '%s[%d]' % (origname, i)
i += 1
list[name] = {'name': name, 'start': start, 'end': end,
'pid': pid, 'length': length, 'row': 0, 'id': devid,
'ret': ret, 'ulen': ulen }
return name
def deviceMatch(self, pid, cg):
if cg.end - cg.start == 0:
return ''
for p in data.phases:
list = self.dmesg[p]['list']
for devname in list:
dev = list[devname]
if pid != dev['pid']:
continue
if cg.name == 'do_one_initcall':
if(cg.start <= dev['start'] and cg.end >= dev['end'] and dev['length'] > 0):
dev['ftrace'] = cg
self.do_one_initcall = True
return devname
else:
if(cg.start > dev['start'] and cg.end < dev['end']):
if 'ftraces' not in dev:
dev['ftraces'] = []
dev['ftraces'].append(cg)
return devname
return ''
def printDetails(self):
sysvals.vprint('Timeline Details:')
sysvals.vprint(' Host: %s' % sysvals.hostname)
sysvals.vprint(' Kernel: %s' % sysvals.kernel)
sysvals.vprint(' Test time: %s' % sysvals.testtime)
sysvals.vprint(' Boot time: %s' % self.boottime)
for phase in self.phases:
dc = len(self.dmesg[phase]['list'])
sysvals.vprint('%9s mode: %.3f - %.3f (%d initcalls)' % (phase,
self.dmesg[phase]['start']*1000,
self.dmesg[phase]['end']*1000, dc))
# ----------------- FUNCTIONS --------------------
# Function: parseKernelLog
# Description:
# parse a kernel log for boot data
def parseKernelLog():
sysvals.vprint('Analyzing the dmesg data (%s)...' % \
os.path.basename(sysvals.dmesgfile))
phase = 'kernel'
data = Data(0)
data.dmesg['kernel']['start'] = data.start = ktime = 0.0
sysvals.stamp = {
'time': datetime.now().strftime('%B %d %Y, %I:%M:%S %p'),
'host': sysvals.hostname,
'mode': 'boot', 'kernel': ''}
tp = aslib.TestProps()
devtemp = dict()
if(sysvals.dmesgfile):
lf = open(sysvals.dmesgfile, 'rb')
else:
lf = Popen('dmesg', stdout=PIPE).stdout
for line in lf:
line = aslib.ascii(line).replace('\r\n', '')
# grab the stamp and sysinfo
if re.match(tp.stampfmt, line):
tp.stamp = line
continue
elif re.match(tp.sysinfofmt, line):
tp.sysinfo = line
continue
elif re.match(tp.cmdlinefmt, line):
tp.cmdline = line
continue
elif re.match(tp.kparamsfmt, line):
tp.kparams = line
continue
idx = line.find('[')
if idx > 1:
line = line[idx:]
m = re.match('[ \t]*(\[ *)(?P<ktime>[0-9\.]*)(\]) (?P<msg>.*)', line)
if(not m):
continue
ktime = float(m.group('ktime'))
if(ktime > 120):
break
msg = m.group('msg')
data.dmesgtext.append(line)
if(ktime == 0.0 and re.match('^Linux version .*', msg)):
if(not sysvals.stamp['kernel']):
sysvals.stamp['kernel'] = sysvals.kernelVersion(msg)
continue
m = re.match('.* setting system clock to (?P<d>[0-9\-]*)[ A-Z](?P<t>[0-9:]*) UTC.*', msg)
if(m):
bt = datetime.strptime(m.group('d')+' '+m.group('t'), '%Y-%m-%d %H:%M:%S')
bt = bt - timedelta(seconds=int(ktime))
data.boottime = bt.strftime('%Y-%m-%d_%H:%M:%S')
sysvals.stamp['time'] = bt.strftime('%B %d %Y, %I:%M:%S %p')
continue
m = re.match('^calling *(?P<f>.*)\+.* @ (?P<p>[0-9]*)', msg)
if(m):
func = m.group('f')
pid = int(m.group('p'))
devtemp[func] = (ktime, pid)
continue
m = re.match('^initcall *(?P<f>.*)\+.* returned (?P<r>.*) after (?P<t>.*) usecs', msg)
if(m):
data.valid = True
data.end = ktime
f, r, t = m.group('f', 'r', 't')
if(f in devtemp):
start, pid = devtemp[f]
data.newAction(phase, f, pid, start, ktime, int(r), int(t))
del devtemp[f]
continue
if(re.match('^Freeing unused kernel .*', msg)):
data.tUserMode = ktime
data.dmesg['kernel']['end'] = ktime
data.dmesg['user']['start'] = ktime
phase = 'user'
if tp.stamp:
sysvals.stamp = 0
tp.parseStamp(data, sysvals)
data.dmesg['user']['end'] = data.end
lf.close()
return data
# Function: parseTraceLog
# Description:
# Check if trace is available and copy to a temp file
def parseTraceLog(data):
sysvals.vprint('Analyzing the ftrace data (%s)...' % \
os.path.basename(sysvals.ftracefile))
# if available, calculate cgfilter allowable ranges
cgfilter = []
if len(sysvals.cgfilter) > 0:
for p in data.phases:
list = data.dmesg[p]['list']
for i in sysvals.cgfilter:
if i in list:
cgfilter.append([list[i]['start']-0.0001,
list[i]['end']+0.0001])
# parse the trace log
ftemp = dict()
tp = aslib.TestProps()
tp.setTracerType('function_graph')
tf = open(sysvals.ftracefile, 'r')
for line in tf:
if line[0] == '#':
continue
m = re.match(tp.ftrace_line_fmt, line.strip())
if(not m):
continue
m_time, m_proc, m_pid, m_msg, m_dur = \
m.group('time', 'proc', 'pid', 'msg', 'dur')
t = float(m_time)
if len(cgfilter) > 0:
allow = False
for r in cgfilter:
if t >= r[0] and t < r[1]:
allow = True
break
if not allow:
continue
if t > data.end:
break
if(m_time and m_pid and m_msg):
t = aslib.FTraceLine(m_time, m_msg, m_dur)
pid = int(m_pid)
else:
continue
if t.fevent or t.fkprobe:
continue
key = (m_proc, pid)
if(key not in ftemp):
ftemp[key] = []
ftemp[key].append(aslib.FTraceCallGraph(pid, sysvals))
cg = ftemp[key][-1]
res = cg.addLine(t)
if(res != 0):
ftemp[key].append(aslib.FTraceCallGraph(pid, sysvals))
if(res == -1):
ftemp[key][-1].addLine(t)
tf.close()
# add the callgraph data to the device hierarchy
for key in ftemp:
proc, pid = key
for cg in ftemp[key]:
if len(cg.list) < 1 or cg.invalid or (cg.end - cg.start == 0):
continue
if(not cg.postProcess()):
pprint('Sanity check failed for %s-%d' % (proc, pid))
continue
# match cg data to devices
devname = data.deviceMatch(pid, cg)
if not devname:
kind = 'Orphan'
if cg.partial:
kind = 'Partial'
sysvals.vprint('%s callgraph found for %s %s-%d [%f - %f]' %\
(kind, cg.name, proc, pid, cg.start, cg.end))
elif len(cg.list) > 1000000:
pprint('WARNING: the callgraph found for %s is massive! (%d lines)' %\
(devname, len(cg.list)))
# Function: retrieveLogs
# Description:
# Create copies of dmesg and/or ftrace for later processing
def retrieveLogs():
# check ftrace is configured first
if sysvals.useftrace:
tracer = sysvals.fgetVal('current_tracer').strip()
if tracer != 'function_graph':
doError('ftrace not configured for a boot callgraph')
# create the folder and get dmesg
sysvals.systemInfo(aslib.dmidecode(sysvals.mempath))
sysvals.initTestOutput('boot')
sysvals.writeDatafileHeader(sysvals.dmesgfile)
call('dmesg >> '+sysvals.dmesgfile, shell=True)
if not sysvals.useftrace:
return
# get ftrace
sysvals.writeDatafileHeader(sysvals.ftracefile)
call('cat '+sysvals.tpath+'trace >> '+sysvals.ftracefile, shell=True)
# Function: colorForName
# Description:
# Generate a repeatable color from a list for a given name
def colorForName(name):
list = [
('c1', '#ec9999'),
('c2', '#ffc1a6'),
('c3', '#fff0a6'),
('c4', '#adf199'),
('c5', '#9fadea'),
('c6', '#a699c1'),
('c7', '#ad99b4'),
('c8', '#eaffea'),
('c9', '#dcecfb'),
('c10', '#ffffea')
]
i = 0
total = 0
count = len(list)
while i < len(name):
total += ord(name[i])
i += 1
return list[total % count]
def cgOverview(cg, minlen):
stats = dict()
large = []
for l in cg.list:
if l.fcall and l.depth == 1:
if l.length >= minlen:
large.append(l)
if l.name not in stats:
stats[l.name] = [0, 0.0]
stats[l.name][0] += (l.length * 1000.0)
stats[l.name][1] += 1
return (large, stats)
# Function: createBootGraph
# Description:
# Create the output html file from the resident test data
# Arguments:
# testruns: array of Data objects from parseKernelLog or parseTraceLog
# Output:
# True if the html file was created, false if it failed
def createBootGraph(data):
# html function templates
html_srccall = '<div id={6} title="{5}" class="srccall" style="left:{1}%;top:{2}px;height:{3}px;width:{4}%;line-height:{3}px;">{0}</div>\n'
html_timetotal = '<table class="time1">\n<tr>'\
'<td class="blue">Init process starts @ <b>{0} ms</b></td>'\
'<td class="blue">Last initcall ends @ <b>{1} ms</b></td>'\
'</tr>\n</table>\n'
# device timeline
devtl = aslib.Timeline(100, 20)
# write the test title and general info header
devtl.createHeader(sysvals, sysvals.stamp)
# Generate the header for this timeline
t0 = data.start
tMax = data.end
tTotal = tMax - t0
if(tTotal == 0):
pprint('ERROR: No timeline data')
return False
user_mode = '%.0f'%(data.tUserMode*1000)
last_init = '%.0f'%(tTotal*1000)
devtl.html += html_timetotal.format(user_mode, last_init)
# determine the maximum number of rows we need to draw
devlist = []
for p in data.phases:
list = data.dmesg[p]['list']
for devname in list:
d = aslib.DevItem(0, p, list[devname])
devlist.append(d)
devtl.getPhaseRows(devlist, 0, 'start')
devtl.calcTotalRows()
# draw the timeline background
devtl.createZoomBox()
devtl.html += devtl.html_tblock.format('boot', '0', '100', devtl.scaleH)
for p in data.phases:
phase = data.dmesg[p]
length = phase['end']-phase['start']
left = '%.3f' % (((phase['start']-t0)*100.0)/tTotal)
width = '%.3f' % ((length*100.0)/tTotal)
devtl.html += devtl.html_phase.format(left, width, \
'%.3f'%devtl.scaleH, '%.3f'%devtl.bodyH, \
phase['color'], '')
# draw the device timeline
num = 0
devstats = dict()
for phase in data.phases:
list = data.dmesg[phase]['list']
for devname in sorted(list):
cls, color = colorForName(devname)
dev = list[devname]
info = '@|%.3f|%.3f|%.3f|%d' % (dev['start']*1000.0, dev['end']*1000.0,
dev['ulen']/1000.0, dev['ret'])
devstats[dev['id']] = {'info':info}
dev['color'] = color
height = devtl.phaseRowHeight(0, phase, dev['row'])
top = '%.6f' % ((dev['row']*height) + devtl.scaleH)
left = '%.6f' % (((dev['start']-t0)*100)/tTotal)
width = '%.6f' % (((dev['end']-dev['start'])*100)/tTotal)
length = ' (%0.3f ms) ' % ((dev['end']-dev['start'])*1000)
devtl.html += devtl.html_device.format(dev['id'],
devname+length+phase+'_mode', left, top, '%.3f'%height,
width, devname, ' '+cls, '')
rowtop = devtl.phaseRowTop(0, phase, dev['row'])
height = '%.6f' % (devtl.rowH / 2)
top = '%.6f' % (rowtop + devtl.scaleH + (devtl.rowH / 2))
if data.do_one_initcall:
if('ftrace' not in dev):
continue
cg = dev['ftrace']
large, stats = cgOverview(cg, 0.001)
devstats[dev['id']]['fstat'] = stats
for l in large:
left = '%f' % (((l.time-t0)*100)/tTotal)
width = '%f' % (l.length*100/tTotal)
title = '%s (%0.3fms)' % (l.name, l.length * 1000.0)
devtl.html += html_srccall.format(l.name, left,
top, height, width, title, 'x%d'%num)
num += 1
continue
if('ftraces' not in dev):
continue
for cg in dev['ftraces']:
left = '%f' % (((cg.start-t0)*100)/tTotal)
width = '%f' % ((cg.end-cg.start)*100/tTotal)
cglen = (cg.end - cg.start) * 1000.0
title = '%s (%0.3fms)' % (cg.name, cglen)
cg.id = 'x%d' % num
devtl.html += html_srccall.format(cg.name, left,
top, height, width, title, dev['id']+cg.id)
num += 1
# draw the time scale, try to make the number of labels readable
devtl.createTimeScale(t0, tMax, tTotal, 'boot')
devtl.html += '</div>\n'
# timeline is finished
devtl.html += '</div>\n</div>\n'
# draw a legend which describes the phases by color
devtl.html += '<div class="legend">\n'
pdelta = 20.0
pmargin = 36.0
for phase in data.phases:
order = '%.2f' % ((data.dmesg[phase]['order'] * pdelta) + pmargin)
devtl.html += devtl.html_legend.format(order, \
data.dmesg[phase]['color'], phase+'_mode', phase[0])
devtl.html += '</div>\n'
hf = open(sysvals.htmlfile, 'w')
# add the css
extra = '\
.c1 {background:rgba(209,0,0,0.4);}\n\
.c2 {background:rgba(255,102,34,0.4);}\n\
.c3 {background:rgba(255,218,33,0.4);}\n\
.c4 {background:rgba(51,221,0,0.4);}\n\
.c5 {background:rgba(17,51,204,0.4);}\n\
.c6 {background:rgba(34,0,102,0.4);}\n\
.c7 {background:rgba(51,0,68,0.4);}\n\
.c8 {background:rgba(204,255,204,0.4);}\n\
.c9 {background:rgba(169,208,245,0.4);}\n\
.c10 {background:rgba(255,255,204,0.4);}\n\
.vt {transform:rotate(-60deg);transform-origin:0 0;}\n\
table.fstat {table-layout:fixed;padding:150px 15px 0 0;font-size:10px;column-width:30px;}\n\
.fstat th {width:55px;}\n\
.fstat td {text-align:left;width:35px;}\n\
.srccall {position:absolute;font-size:10px;z-index:7;overflow:hidden;color:black;text-align:center;white-space:nowrap;border-radius:5px;border:1px solid black;background:linear-gradient(to bottom right,#CCC,#969696);}\n\
.srccall:hover {color:white;font-weight:bold;border:1px solid white;}\n'
aslib.addCSS(hf, sysvals, 1, False, extra)
# write the device timeline
hf.write(devtl.html)
# add boot specific html
statinfo = 'var devstats = {\n'
for n in sorted(devstats):
statinfo += '\t"%s": [\n\t\t"%s",\n' % (n, devstats[n]['info'])
if 'fstat' in devstats[n]:
funcs = devstats[n]['fstat']
for f in sorted(funcs, key=lambda k:(funcs[k], k), reverse=True):
if funcs[f][0] < 0.01 and len(funcs) > 10:
break
statinfo += '\t\t"%f|%s|%d",\n' % (funcs[f][0], f, funcs[f][1])
statinfo += '\t],\n'
statinfo += '};\n'
html = \
'<div id="devicedetailtitle"></div>\n'\
'<div id="devicedetail" style="display:none;">\n'\
'<div id="devicedetail0">\n'
for p in data.phases:
phase = data.dmesg[p]
html += devtl.html_phaselet.format(p+'_mode', '0', '100', phase['color'])
html += '</div>\n</div>\n'\
'<script type="text/javascript">\n'+statinfo+\
'</script>\n'
hf.write(html)
# add the callgraph html
if(sysvals.usecallgraph):
aslib.addCallgraphs(sysvals, hf, data)
# add the test log as a hidden div
if sysvals.testlog and sysvals.logmsg:
hf.write('<div id="testlog" style="display:none;">\n'+sysvals.logmsg+'</div>\n')
# add the dmesg log as a hidden div
if sysvals.dmesglog:
hf.write('<div id="dmesglog" style="display:none;">\n')
for line in data.dmesgtext:
line = line.replace('<', '<').replace('>', '>')
hf.write(line)
hf.write('</div>\n')
# write the footer and close
aslib.addScriptCode(hf, [data])
hf.write('</body>\n</html>\n')
hf.close()
return True
# Function: updateCron
# Description:
# (restore=False) Set the tool to run automatically on reboot
# (restore=True) Restore the original crontab
def updateCron(restore=False):
if not restore:
sysvals.rootUser(True)
crondir = '/var/spool/cron/crontabs/'
if not os.path.exists(crondir):
crondir = '/var/spool/cron/'
if not os.path.exists(crondir):
doError('%s not found' % crondir)
cronfile = crondir+'root'
backfile = crondir+'root-analyze_boot-backup'
cmd = sysvals.getExec('crontab')
if not cmd:
doError('crontab not found')
# on restore: move the backup cron back into place
if restore:
if os.path.exists(backfile):
shutil.move(backfile, cronfile)
call([cmd, cronfile])
return
# backup current cron and install new one with reboot
if os.path.exists(cronfile):
shutil.move(cronfile, backfile)
else:
fp = open(backfile, 'w')
fp.close()
res = -1
try:
fp = open(backfile, 'r')
op = open(cronfile, 'w')
for line in fp:
if not sysvals.myCronJob(line):
op.write(line)
continue
fp.close()
op.write('@reboot python %s\n' % sysvals.cronjobCmdString())
op.close()
res = call([cmd, cronfile])
except Exception as e:
pprint('Exception: %s' % str(e))
shutil.move(backfile, cronfile)
res = -1
if res != 0:
doError('crontab failed')
# Function: updateGrub
# Description:
# update grub.cfg for all kernels with our parameters
def updateGrub(restore=False):
# call update-grub on restore
if restore:
try:
call(sysvals.blexec, stderr=PIPE, stdout=PIPE,
env={'PATH': '.:/sbin:/usr/sbin:/usr/bin:/sbin:/bin'})
except Exception as e:
pprint('Exception: %s\n' % str(e))
return
# extract the option and create a grub config without it
sysvals.rootUser(True)
tgtopt = 'GRUB_CMDLINE_LINUX_DEFAULT'
cmdline = ''
grubfile = '/etc/default/grub'
tempfile = '/etc/default/grub.analyze_boot'
shutil.move(grubfile, tempfile)
res = -1
try:
fp = open(tempfile, 'r')
op = open(grubfile, 'w')
cont = False
for line in fp:
line = line.strip()
if len(line) == 0 or line[0] == '#':
continue
opt = line.split('=')[0].strip()
if opt == tgtopt:
cmdline = line.split('=', 1)[1].strip('\\')
if line[-1] == '\\':
cont = True
elif cont:
cmdline += line.strip('\\')
if line[-1] != '\\':
cont = False
else:
op.write('%s\n' % line)
fp.close()
# if the target option value is in quotes, strip them
sp = '"'
val = cmdline.strip()
if val and (val[0] == '\'' or val[0] == '"'):
sp = val[0]
val = val.strip(sp)
cmdline = val
# append our cmd line options
if len(cmdline) > 0:
cmdline += ' '
cmdline += sysvals.kernelParams()
# write out the updated target option
op.write('\n%s=%s%s%s\n' % (tgtopt, sp, cmdline, sp))
op.close()
res = call(sysvals.blexec)
os.remove(grubfile)
except Exception as e:
pprint('Exception: %s' % str(e))
res = -1
# cleanup
shutil.move(tempfile, grubfile)
if res != 0:
doError('update grub failed')
# Function: updateKernelParams
# Description:
# update boot conf for all kernels with our parameters
def updateKernelParams(restore=False):
# find the boot loader
sysvals.getBootLoader()
if sysvals.bootloader == 'grub':
updateGrub(restore)
# Function: doError Description:
# generic error function for catastrphic failures
# Arguments:
# msg: the error message to print
# help: True if printHelp should be called after, False otherwise
def doError(msg, help=False):
if help == True:
printHelp()
pprint('ERROR: %s\n' % msg)
sysvals.outputResult({'error':msg})
sys.exit()
# Function: printHelp
# Description:
# print out the help text
def printHelp():
pprint('\n%s v%s\n'\
'Usage: bootgraph <options> <command>\n'\
'\n'\
'Description:\n'\
' This tool reads in a dmesg log of linux kernel boot and\n'\
' creates an html representation of the boot timeline up to\n'\
' the start of the init process.\n'\
'\n'\
' If no specific command is given the tool reads the current dmesg\n'\
' and/or ftrace log and creates a timeline\n'\
'\n'\
' Generates output files in subdirectory: boot-yymmdd-HHMMSS\n'\
' HTML output: <hostname>_boot.html\n'\
' raw dmesg output: <hostname>_boot_dmesg.txt\n'\
' raw ftrace output: <hostname>_boot_ftrace.txt\n'\
'\n'\
'Options:\n'\
' -h Print this help text\n'\
' -v Print the current tool version\n'\
' -verbose Print extra information during execution and analysis\n'\
' -addlogs Add the dmesg log to the html output\n'\
' -result fn Export a results table to a text file for parsing.\n'\
' -o name Overrides the output subdirectory name when running a new test\n'\
' default: boot-{date}-{time}\n'\
' [advanced]\n'\
' -fstat Use ftrace to add function detail and statistics (default: disabled)\n'\
' -f/-callgraph Add callgraph detail, can be very large (default: disabled)\n'\
' -maxdepth N limit the callgraph data to N call levels (default: 2)\n'\
' -mincg ms Discard all callgraphs shorter than ms milliseconds (e.g. 0.001 for us)\n'\
' -timeprec N Number of significant digits in timestamps (0:S, 3:ms, [6:us])\n'\
' -expandcg pre-expand the callgraph data in the html output (default: disabled)\n'\
' -func list Limit ftrace to comma-delimited list of functions (default: do_one_initcall)\n'\
' -cgfilter S Filter the callgraph output in the timeline\n'\
' -cgskip file Callgraph functions to skip, off to disable (default: cgskip.txt)\n'\
' -bl name Use the following boot loader for kernel params (default: grub)\n'\
' -reboot Reboot the machine automatically and generate a new timeline\n'\
' -manual Show the steps to generate a new timeline manually (used with -reboot)\n'\
'\n'\
'Other commands:\n'\
' -flistall Print all functions capable of being captured in ftrace\n'\
' -sysinfo Print out system info extracted from BIOS\n'\
' -which exec Print an executable path, should function even without PATH\n'\
' [redo]\n'\
' -dmesg file Create HTML output using dmesg input (used with -ftrace)\n'\
' -ftrace file Create HTML output using ftrace input (used with -dmesg)\n'\
'' % (sysvals.title, sysvals.version))
return True
# ----------------- MAIN --------------------
# exec start (skipped if script is loaded as library)
if __name__ == '__main__':
# loop through the command line arguments
cmd = ''
testrun = True
switchoff = ['disable', 'off', 'false', '0']
simplecmds = ['-sysinfo', '-kpupdate', '-flistall', '-checkbl']
cgskip = ''
if '-f' in sys.argv:
cgskip = sysvals.configFile('cgskip.txt')
args = iter(sys.argv[1:])
mdset = False
for arg in args:
if(arg == '-h'):
printHelp()
sys.exit()
elif(arg == '-v'):
pprint("Version %s" % sysvals.version)
sys.exit()
elif(arg == '-verbose'):
sysvals.verbose = True
elif(arg in simplecmds):
cmd = arg[1:]
elif(arg == '-fstat'):
sysvals.useftrace = True
elif(arg == '-callgraph' or arg == '-f'):
sysvals.useftrace = True
sysvals.usecallgraph = True
elif(arg == '-cgdump'):
sysvals.cgdump = True
elif(arg == '-mincg'):
sysvals.mincglen = aslib.getArgFloat('-mincg', args, 0.0, 10000.0)
elif(arg == '-cgfilter'):
try:
val = next(args)
except:
doError('No callgraph functions supplied', True)
sysvals.setCallgraphFilter(val)
elif(arg == '-cgskip'):
try:
val = next(args)
except:
doError('No file supplied', True)
if val.lower() in switchoff:
cgskip = ''
else:
cgskip = sysvals.configFile(val)
if(not cgskip):
doError('%s does not exist' % cgskip)
elif(arg == '-bl'):
try:
val = next(args)
except:
doError('No boot loader name supplied', True)
if val.lower() not in ['grub']:
doError('Unknown boot loader: %s' % val, True)
sysvals.bootloader = val.lower()
elif(arg == '-timeprec'):
sysvals.setPrecision(aslib.getArgInt('-timeprec', args, 0, 6))
elif(arg == '-maxdepth'):
mdset = True
sysvals.max_graph_depth = aslib.getArgInt('-maxdepth', args, 0, 1000)
elif(arg == '-func'):
try:
val = next(args)
except:
doError('No filter functions supplied', True)
sysvals.useftrace = True
sysvals.usecallgraph = True
sysvals.rootCheck(True)
sysvals.setGraphFilter(val)
elif(arg == '-ftrace'):
try:
val = next(args)
except:
doError('No ftrace file supplied', True)
if(os.path.exists(val) == False):
doError('%s does not exist' % val)
testrun = False
sysvals.ftracefile = val
elif(arg == '-addlogs'):
sysvals.dmesglog = True
elif(arg == '-expandcg'):
sysvals.cgexp = True
elif(arg == '-dmesg'):
try:
val = next(args)
except:
doError('No dmesg file supplied', True)
if(os.path.exists(val) == False):
doError('%s does not exist' % val)
testrun = False
sysvals.dmesgfile = val
elif(arg == '-o'):
try:
val = next(args)
except:
doError('No subdirectory name supplied', True)
sysvals.testdir = sysvals.setOutputFolder(val)
elif(arg == '-result'):
try:
val = next(args)
except:
doError('No result file supplied', True)
sysvals.result = val
elif(arg == '-reboot'):
sysvals.reboot = True
elif(arg == '-manual'):
sysvals.reboot = True
sysvals.manual = True
# remaining options are only for cron job use
elif(arg == '-cronjob'):
sysvals.iscronjob = True
elif(arg == '-which'):
try:
val = next(args)
except:
doError('No executable supplied', True)
out = sysvals.getExec(val)
if not out:
print('%s not found' % val)
sys.exit(1)
print(out)
sys.exit(0)
else:
doError('Invalid argument: '+arg, True)
# compatibility errors and access checks
if(sysvals.iscronjob and (sysvals.reboot or \
sysvals.dmesgfile or sysvals.ftracefile or cmd)):
doError('-cronjob is meant for batch purposes only')
if(sysvals.reboot and (sysvals.dmesgfile or sysvals.ftracefile)):
doError('-reboot and -dmesg/-ftrace are incompatible')
if cmd or sysvals.reboot or sysvals.iscronjob or testrun:
sysvals.rootCheck(True)
if (testrun and sysvals.useftrace) or cmd == 'flistall':
if not sysvals.verifyFtrace():
doError('Ftrace is not properly enabled')
# run utility commands
sysvals.cpuInfo()
if cmd != '':
if cmd == 'kpupdate':
updateKernelParams()
elif cmd == 'flistall':
for f in sysvals.getBootFtraceFilterFunctions():
print(f)
elif cmd == 'checkbl':
sysvals.getBootLoader()
pprint('Boot Loader: %s\n%s' % (sysvals.bootloader, sysvals.blexec))
elif(cmd == 'sysinfo'):
sysvals.printSystemInfo(True)
sys.exit()
# reboot: update grub, setup a cronjob, and reboot
if sysvals.reboot:
if (sysvals.useftrace or sysvals.usecallgraph) and \
not sysvals.checkFtraceKernelVersion():
doError('Ftrace functionality requires kernel v4.10 or newer')
if not sysvals.manual:
updateKernelParams()
updateCron()
call('reboot')
else:
sysvals.manualRebootRequired()
sys.exit()
if sysvals.usecallgraph and cgskip:
sysvals.vprint('Using cgskip file: %s' % cgskip)
sysvals.setCallgraphBlacklist(cgskip)
# cronjob: remove the cronjob, grub changes, and disable ftrace
if sysvals.iscronjob:
updateCron(True)
updateKernelParams(True)
try:
sysvals.fsetVal('0', 'tracing_on')
except:
pass
# testrun: generate copies of the logs
if testrun:
retrieveLogs()
else:
sysvals.setOutputFile()
# process the log data
if sysvals.dmesgfile:
if not mdset:
sysvals.max_graph_depth = 0
data = parseKernelLog()
if(not data.valid):
doError('No initcall data found in %s' % sysvals.dmesgfile)
if sysvals.useftrace and sysvals.ftracefile:
parseTraceLog(data)
if sysvals.cgdump:
data.debugPrint()
sys.exit()
else:
doError('dmesg file required')
sysvals.vprint('Creating the html timeline (%s)...' % sysvals.htmlfile)
sysvals.vprint('Command:\n %s' % sysvals.cmdline)
sysvals.vprint('Kernel parameters:\n %s' % sysvals.kparams)
data.printDetails()
createBootGraph(data)
# if running as root, change output dir owner to sudo_user
if testrun and os.path.isdir(sysvals.testdir) and \
os.getuid() == 0 and 'SUDO_USER' in os.environ:
cmd = 'chown -R {0}:{0} {1} > /dev/null 2>&1'
call(cmd.format(os.environ['SUDO_USER'], sysvals.testdir), shell=True)
sysvals.stamp['boot'] = (data.tUserMode - data.start) * 1000
sysvals.stamp['lastinit'] = data.end * 1000
sysvals.outputResult(sysvals.stamp)
|
# encoding: cinje
: from .template import page
: def siteoptionstemplate title, ctx, updated=False
: using page title, ctx, lang="en"
<h3 style='text-align: center;'>Site Options ${'(Saved)' if updated else ''}</h3>
<form style='width: 50vw; margin: 0 auto;' action='/siteoptions' method='post'>
<div class='form-group'>
<label for='theme'>Select A Theme</label>
<select class='form-control' id='theme' name='theme'>
: for t in sorted(ctx.themes.keys())
<option ${'selected' if t == ctx.usertheme else ''}>${t}</option>
: end
</select>
</div>
<button type='submit' class='btn btn-default'>Change Options</button>
</form>
: end
: end
|
from collections import deque
import os
import numpy as np
# Define a simple TreeNode struct/record.
class TreeNode():
def __init__(self, x, parent, children, id):
self.x = x
self.parent = parent
self.children = children
self.id = id
# Parameters
MAX_TREE_SIZE = 10000
GOAL_SAMPLE_PROB = 0.1
# This function follows Algorithm 10.3 in the textbook.
# The goal is a single node rather than a set.
def rrt(x_start, x_goal, obstacles):
root = TreeNode(x_start, None, [], 1)
# Get the nearest node by go through the whole tree and find the node with least distance
def get_nearest_node(pos):
best_node = root
best_dist = (root.x[0]-pos[0])**2 + (root.x[1]-pos[1])**2
que = deque([root])
while len(que) > 0:
node = que.popleft()
dist = (node.x[0]-pos[0])**2 + (node.x[1]-pos[1])**2
if dist < best_dist:
best_dist = dist
best_node = node
que.extend(node.children)
return best_node
# Check whether the line segment from x_1 to x_2 collides with obstacles
def collision_free(x_1, x_2):
for ob in obstacles:
R = ob[2]/2 # radius
C = ob[:2] # center
# We solve the quadratic equation defined by |x_1*t+(1-t)*x_2-C|=R
# There is collision if the equation has at least one real solution t where 0<=t<=1
A = x_1 - x_2
B = x_2 - C
adota = A.dot(A)
adotb = A.dot(B)
bdotb = B.dot(B)
a = adota
b = 2*adotb
c = bdotb-R**2
if b**2 >= 4*a*c:
t1 = (-b+np.sqrt(b**2-4*a*c))/(2*a)
t2 = (-b-np.sqrt(b**2-4*a*c))/(2*a)
if 0<=t1<=1 or 0<=t2<=1:
return False
return True
tree_size = 1
# Return solution representations once goal node is added to the RRT tree
def get_sol(goal_node):
nodes = np.zeros([tree_size, 4])
edges = np.zeros([tree_size-1, 3]) # graph theory guarantee that in trees #edges = #nodes - 1
# fill nodes and edges by go through the tree
que = deque([root])
edge_index = 0
while len(que) > 0:
node = que.popleft()
nodes[node.id-1] = [node.id, node.x[0], node.x[1], np.linalg.norm(node.x-x_goal)]
que.extend(node.children)
for child in node.children:
edges[edge_index] = [node.id, child.id, np.linalg.norm(node.x-child.x)]
edge_index += 1
# get the path by follow parent pointer from the goal node
path = []
node = goal_node
while node is not None:
path.append(node.id)
node = node.parent
path.reverse()
return nodes, edges, np.array(path)
while tree_size < MAX_TREE_SIZE:
# sample goal node with certain probability as specied
if np.random.binomial(1, GOAL_SAMPLE_PROB) == 1:
x_samp = x_goal
else:
x_samp = np.random.uniform(-0.5, 0.5, 2)
node_nearest = get_nearest_node(x_samp)
# since we use straight line path; local planning is trivial
node_new = TreeNode(x_samp, None, [], None)
if collision_free(node_nearest.x, node_new.x):
# insert node_new under node_nearest
tree_size += 1
node_new.id = tree_size
node_nearest.children.append(node_new)
node_new.parent = node_nearest
# numpy returns element-wise equality so add .all()
if (node_new.x == x_goal).all():
# SUCCESS build and return solution representations
return get_sol(node_new)
# FAILURE represents by an exception.
raise Exception('No path to %s is found.' % x_goal)
obstacles =os.path.join(os.getcwd(), "results", "obstacles.csv")
node_fn = os.path.join(os.getcwd(), "results", "nodes.csv")
edge_fn = os.path.join(os.getcwd(), "results", "edges.csv")
x_start = np.array([-0.5, -0.5])
x_goal = np.array([0.5, 0.5])
nodes, edges, path = rrt(x_start, x_goal, obstacles)
with open(os.path.join(os.getcwd(), "results", "path.csv"), "w") as fd:
fd.write(",".join(map(str, path)))
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
# from django.contrib.auth.models import AbstractUser
# from django.core.urlresolvers import reverse
# from django.db import models
# from django.utils.encoding import python_2_unicode_compatible
# from django.utils.translation import ugettext_lazy as _
from django.db import models
from ssu_housing.users.models import User
from django.dispatch import receiver
# from django.conf import settings
# Create your models here.
class HousingUser(models.Model):
user = models.OneToOneField(User)
@receiver(models.signals.post_save, sender=User)
def create_housinguser(sender, instance, created, **kwargs):
if created:
HousingUser.objects.create(user=instance)
@receiver(models.signals.post_save, sender=User)
def save_housinguser(sender, instance, **kwargs):
instance.housinguser.save()
def get_listings(self):
listings = self.listings_set.all.order_by(
"-is_active", "-datetime_created")
return listings
def __str__(self):
return self.user.email
class Listing(models.Model):
FOR_RENT_BY_CHOICES = (
('owner', 'Owner'),
('management_company_or_broker', 'Management Company/Broker'),
('tenant', 'Tenant'),
)
LEASE_DURATION_CHOICES = (
('1_year', '1 Year'),
('6_months', '6 Months'),
('1_month', '1 Month'),
('sublet_temp', 'Sublet/Temporary'),
('other', 'Other(Please explain)'),
)
PROPERTY_TYPE_CHOICES = (
('apartment', 'Apartments'),
('house', 'Houses'),
('homestay', 'Homestay'),
)
FURNISHED_CHOICES = (
('none', 'Unfurnished'),
('lightly_furnished', 'Lightly furnished'),
('fully_furnished', 'Fully furnished'),
)
PARKING_SPACE_COUNT_CHOICES = (
('0', 'None'),
('1', '1'),
('2', '2'),
('3_plus', '3+'),
)
PARKING_TYPE_CHOICES = (
('garage', 'Garage'),
('carport', 'Carport'),
('off_street', 'Off street'),
('other', 'Other'),
)
PET_ALLOWED_CHOICES = (
('no_pets_allowd', 'No pets allowed'),
('small', "Small"),
('big', 'Big'),
)
LAUNDRY_TYPE_CHOICES = (
('none', 'None'),
('in_unit', 'In-unit'),
('shared', 'Shared')
)
COOLING_CHOICES = (
('none', 'None'),
('air_conditioning', 'Air conditioning'),
('central_ac', 'Central A/C'),
)
listing_owner = models.ForeignKey(HousingUser, null=True)
is_active = models.BooleanField(default=False)
datetime_created = models.DateTimeField(auto_now_add=True)
datetime_modified = models.DateTimeField(auto_now=True)
street = models.CharField(max_length=255)
city = models.CharField(max_length=255)
state = models.CharField(max_length=128)
zip_code = models.CharField(max_length=9)
for_rent_by = models.CharField(max_length=255, choices=FOR_RENT_BY_CHOICES)
contact_name = models.CharField(max_length=255)
contact_email = models.EmailField()
contact_phone = models.CharField(max_length=15)
lease_monthly_cost = models.PositiveSmallIntegerField()
lease_deposit = models.PositiveSmallIntegerField()
lease_duration = models.CharField(max_length=255, choices=LEASE_DURATION_CHOICES)
lease_duration_custom = models.CharField(max_length=128, blank=True, default='')
date_available = models.DateField()
additional_lease_terms = models.TextField(
blank=True,
help_text='Example: Owner pays for trash and sewer. Tenant responsible for gas and electric. Owner shovel snow, lawn, garden, driveway maintenance',
default=''
)
property_type = models.CharField(max_length=20, choices=PROPERTY_TYPE_CHOICES)
lease_whole_unit = models.BooleanField(default=False)
owner_in_building = models.BooleanField(default=False)
bedroom_count = models.PositiveSmallIntegerField()
bathroom_count = models.PositiveSmallIntegerField()
unit_sqft = models.PositiveSmallIntegerField(default=0)
unit_floor = models.PositiveSmallIntegerField()
furnished = models.CharField(max_length=17, choices=FURNISHED_CHOICES)
furnished_details = models.CharField(blank=True, help_text='Furnishing detail, example: bed only...', default='', max_length=512)
is_water_included = models.BooleanField(default=False)
is_electricity_included = models.BooleanField(default=False)
is_heat_included = models.BooleanField(default=False)
is_internet_included = models.BooleanField(default=False)
is_cable_included = models.BooleanField(default=False)
parking_space_count = models.CharField(max_length=2, choices=PARKING_SPACE_COUNT_CHOICES)
parking_type = models.CharField(max_length=10, choices=PARKING_TYPE_CHOICES)
pet_allowed = models.CharField(max_length=14, choices=PET_ALLOWED_CHOICES)
laundry_type = models.CharField(max_length=10, choices=LAUNDRY_TYPE_CHOICES)
property_title = models.CharField(max_length=255, help_text='Example: [home stay 1 bedroom][apartment with 3 room][in-law apartment]', default='')
property_description = models.TextField(blank=True, help_text='Example: above hill, driveway slippery, renting to international student for long time...A student from China/Moroco in one of room', default='')
has_dishwasher = models.BooleanField(default=False)
has_garbage_disposal = models.BooleanField(default=False)
has_microwave = models.BooleanField(default=False)
has_fridge = models.BooleanField(default=False)
cooling = models.CharField(max_length=20, choices=COOLING_CHOICES)
image1 = models.ImageField(upload_to='images', verbose_name='Image', blank=True)
image2 = models.ImageField(upload_to='images', verbose_name='Image', blank=True)
image3 = models.ImageField(upload_to='images', verbose_name='Image', blank=True)
def __str__(self):
return self.property_title
class Meta:
ordering = ['-datetime_modified']
def get_metric_area(self):
metric_area = self.unit_sqft * 0.0929
return int(metric_area)
def get_address(self):
address = "{0}{1}{2}{3}".format(self.street, self.city, self.state, self.zip_code)
return address
def get_listings(self):
listings = self.listings_set.all.order_by("-is_active", "-datetime_created")
return listings
|
import csv
from ..utility import nanoseconds_since_midnight as labtime
import os
from datetime import datetime
from .row_formatters import *
author = 'hasan ali demirci'
# TODO: move this to redis. There are many good reasons to do that.
logs_folder = 'hft_bcs/hft_logging/logs/'
class SessionEvents:
dispatch = {
'stage enter': format_senter,
'stage replace': format_sreplace,
'stage cancel': format_scancel,
'confirm enter': format_center,
'confirm replace': format_creplace,
'confirm cancel': format_ccancel,
'confirm execution': format_cexec,
'orders in market': format_orders,
}
def __init__(self, design):
self.backup = list()
self.raw_logs = list()
self.string_logs = list()
self.columns = ['time', 'group', 'player', 'event', 'context']
self.prefix = design
self.set_dump_path()
def set_dump_path(self):
today = datetime.now().strftime('%Y-%m-%d_%H-%M')
filename = logs_folder + self.prefix + '_' + today + '.csv'
self.path = os.path.join(os.getcwd(), filename)
def push(self, processor, **kwargs):
raw_log = processor(**kwargs)
self.raw_logs.append(raw_log)
def form(self, row):
out = row
if isinstance(row['context'], dict):
event = row['event']
out = self.__class__.dispatch[event](row)
return out
def convert(self):
self.string_logs = [self.form(row) for row in self.raw_logs]
self.backup.extend(self.raw_logs)
self.raw_logs = list()
def dump(self, header=False):
with open(self.path, 'a') as f:
writer = csv.DictWriter(f, self.columns)
if header:
writer.writeheader()
writer.writerows(self.string_logs)
events = SessionEvents('cda')
|
import pygame
from pygame.locals import *
import pytmx
from pytmx import TiledImageLayer
from pytmx import TiledObjectGroup
from pytmx import TiledTileLayer
from pytmx.util_pygame import load_pygame
class Mapa():
def __init__(self):
self.mapa = load_pygame("sprites/street.tmx") # LE O MAPA
self.mapa_width = self.mapa.width * self.mapa.tilewidth # DEFINE SEU WIDTH
self.mapa_height = self.mapa.height * self.mapa.tileheight # DEFINE SEU HEIGHT
self.mapa_data = self.mapa # DEFINI SUA DATA
def render(self, surface): # FUNCAO PARA RENDERIZAR O MAPA
ti = self.mapa_data.get_tile_image_by_gid
for layer in self.mapa_data.visible_layers:
if isinstance(layer, pytmx.TiledTileLayer):
for x, y, gid, in layer:
tile = ti(gid)
if tile:
surface.blit(tile,(x * self.mapa_data.tilewidth, y*self.mapa_data.tileheight))
def make_map(self): # FUNCAO PARA DESENHAR O MAPA
temp_surface = pygame.Surface((self.mapa_width, self.mapa_height))
self.render(temp_surface)
return temp_surface |
from django.urls import path
from . import views
urlpatterns = [
path('radio/', views.radio_form_method),
path('radio_value/', views.radio_form_method2),
path('views1/',views.template_view_method),
path('views2/',views.Template_view_class.as_view()),
path('list/' , views.InformationsListView.as_view()),
path('detail/<int:pk>' , views.InformationsDetailView.as_view()),
]
|
#-------------------------------------
# Unit Test
#-----------------------------------
import unittest
from mspack import msmath
from mspack import msstring
class MsPackMsMathTestCase(unittest.testCase):
def test_sum(self):
sum = msmath.sum(8, 12)
self.assertEqual(sum, 20)
if __name__ == '__main__':
unittest.main()
|
import pandas as pd
import numpy as np
def small_df():
return pd.DataFrame(
{
"a": [1, 2, 3, 4, 5],
"b": ["v", "W", "X", "Y", "Z"],
"c": [1.5, 2.5, 3.5, 4.5, 5.5],
"d": [
np.datetime64("now"),
np.datetime64("now"),
np.datetime64("now"),
np.datetime64("now"),
np.datetime64("now"),
],
"e": [True, True, False, False, True],
}
)
def small_df_non_int_index():
df = small_df()
df.set_index(
pd.Index(["a_one", "b_two", "c_three", "d_four", "e_five"], dtype=str),
inplace=True,
)
return df
def randint_df(size: int):
return pd.DataFrame(
np.random.randint(0, size, size=(size, 4)), columns=list("ABCD")
)
def random_float_df(size: int, num_cols: int):
return pd.DataFrame(
np.random.uniform(low=0.5, high=13.3, size=(size, num_cols)),
columns=list("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz")[
0:num_cols
],
)
def random_df(size: int):
return pd.DataFrame(
{
"a": np.random.uniform(low=0.5, high=13.3, size=(size,)),
"b": np.random.uniform(low=1000.5, high=5000.3, size=(size,)),
"c": np.random.choice(["pooh", "rabbit", "piglet"], size),
"d": np.random.randint(0, 10000, size),
"e": np.random.choice([True, False], size),
"f": np.random.uniform(low=0.5, high=13.3, size=(size,)),
"g": random_date_generator("2018-01-01 00:00:01", 100, size),
"h": random_date_generator("2018-01-01 00:00:01", 100, size),
}
)
def random_datetime_df(size: int, num_cols: int):
return pd.DataFrame(
random_date_generator("2018-01-01 00:00:01", 200, (size, num_cols)),
columns=list("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz")[
0:num_cols
],
)
def random_date_generator(start_date, range_in_days, size):
days_to_add = np.arange(0, range_in_days)
dates = np.full(size, fill_value=np.datetime64(start_date))
random_dates = dates + np.random.choice(days_to_add, size)
return random_dates
def random_df_with_nan(size: int):
df = random_df(size)
rand_ind = np.random.randint(1, size, int(size / 4))
df["a"][rand_ind] = np.nan
return df
def random_float_df_with_nan(size: int):
df = random_float_df(size, 4)
rand_ind = np.random.randint(1, size, int(size / 4))
df["A"][rand_ind] = np.nan
rand_ind = np.random.randint(1, size, int(size / 4))
df["C"][rand_ind] = np.nan
return df
def random_cat_df(size: int, num_cols: int):
return pd.DataFrame(
np.random.choice(
["one", "two", "three", "four", "five", "six", "seven", "eight", "nine"],
size=(size, num_cols),
),
columns=list("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz")[
0:num_cols
],
)
|
from os import listdir
from os.path import join as path_join
from subprocess import check_output
inDir=r'.\csv' #Directory holding input files
outDir=r'.\json' #Directory holding output files
#Change to whatever version your system is running, remove the folder if on Path
cycloneAddress=r'.\cyclonedx-win-x64'
files = listdir(inDir)
complete = 0
print("Beginning bulk csv conversion")
print(f"{len(files)} Files to Convert\n")
for each_file in files:
filename,ext = each_file.split(".")
args = [
cycloneAddress,
'convert',
'--input-file', path_join(inDir, filename+"."+ext),
'--output-file', path_join(outDir, filename+".json")
]
try:
check_output(args)
print(f"Successfully Converted {filename}.{ext} to json!")
complete += 1
except:
print(f"Something went wrong with converting {filename} to json!")
print(f"\n{complete} Successful conversions. {len(files)-complete} failures!")
|
number = "9,223,372,036,854,775,807"
cleanedNumber = ''
for char in number:
if char in '0123456789':
cleanedNumber = cleanedNumber + char
newNumber = int(cleanedNumber)
print("The number is {}",format(newNumber))
for state in ["not pinin'", "no more", "a stiff", "befeft of lift"]:
print("This parrot is "+ state)
|
from . import bp
from app.erros import bad_request
from app import cross_origin,db
from flask import jsonify,request
from app.authenticate import check_token_dec
from app.models import Cores,Direcao,CoresDirecao
@bp.route('/',methods=['GET','POST'])
@cross_origin()
@check_token_dec
def direcao_():
try:
#print(request.method)
if request.method == 'GET':
cor = CoresDirecao.query.join(Direcao,Direcao.id_direcao == CoresDirecao.direcao_id) \
.join(Cores,Cores.id_cores == CoresDirecao.cor_id) \
.add_columns(CoresDirecao.id_cores_direcao,Cores.nome,Direcao.nome) \
.all()
items= []
for item in cor:
items.append({
"id_cores_direcao":str(item[1]),
"cor":str(item[2]),
"direcao":str(item[3])
})
message = {
"items":items
}
return jsonify(message),200
data = request.get_json()
print(data)
dir_cor = CoresDirecao()
dir_cor.from_dict(data)
if request.method == 'POST':
db.session.add(dir_cor)
db.session.commit()
return jsonify({"msg": 'cor inserida com sucesso'}),201
except Exception as e:
print(e)
return bad_request(503,f"""error ao fazer a consulta no metodo {str(request.method)}""")
@bp.route('/deletar',methods=['POST'])
@cross_origin()
@check_token_dec
def delete_direcao():
try:
data = request.get_json()
print(data)
CoresDirecao.query.filter(CoresDirecao.id_cores_direcao == data['id_cores_direcao']).delete()
# print(dir_cor)
db.session.commit()
return jsonify({"msg": 'cor deletada com sucesso'}),204
except Exception as e:
print(e)
return bad_request(503,f"""error ao fazer delete""") |
from django.db import models
class ActionPermission(models.Model):
"""
This model only serves to provide a content type for action permissions.
When this table is created, Django makes a content type for the model.
We associate all auto-generated action permissions with this content type
in order to ensure that action permissions dont accidentally override
other Django permissions.
"""
class Meta:
default_permissions = ()
|
# https://leetcode.com/problems/longest-substring-without-repeating-characters/
class Solution:
def lengthOfLongestSubstring(self, s: str) -> int:
max_len = 0
dummy = []
x = 0
while x < len(s):
if s[x] not in dummy:
dummy.append(s[x])
if max_len < len(dummy):
max_len = len(dummy)
x += 1
else:
dummy.pop(0)
return max_len |
def getPossibleSuffixes(s):
if len(s) == 5:
print(0)
return
possible_suffixes = s[5:len(s)]
suffixes = []
helper_hash = {}
suffix_starts = [0 for x in range(len(possible_suffixes))]
prev_2 = ["" for x in range(len(possible_suffixes))]
suffix_starts[-1] = 1
for i in range(len(possible_suffixes)-2, -1, -1):
if suffix_starts[i+1] and prev_2[i+1] != possible_suffixes[i:i+2]:
if not helper_hash.get(possible_suffixes[i:i+2]):
suffixes.append(possible_suffixes[i:i+2])
helper_hash[possible_suffixes[i:i+2]] = True
if i-1>=0:
prev_2[i-1] = possible_suffixes[i:i+2]
suffix_starts[i-1] = 1
if i+2 < len(possible_suffixes) and suffix_starts[i+2] and prev_2[i+2] != possible_suffixes[i:i+3]:
if not helper_hash.get(possible_suffixes[i:i+3]):
suffixes.append(possible_suffixes[i:i+3])
helper_hash[possible_suffixes[i:i+3]] = True
if i-1>=0:
if prev_2[i-1] != "":
prev_2[i-1] = ""
else:
prev_2[i-1] = possible_suffixes[i:i+3]
suffix_starts[i-1] = 1
print(len(suffixes))
suffixes.sort()
for suffix in suffixes:
print(suffix)
s = input()
getPossibleSuffixes(s) |
Author = 'Liu Lei'
import json
#def sayhi(name):
# print("hello",name)
info={
'name':'liulei',
'age':22
}
f=open("text.txt","r")
data=json.loads(f.read())
print(data) |
#Interpolation is used to change the original size of the image
#We can increase the width and height of the image
#There are almost 5 types of interpolation
#INTER_AREA, INTER_CUBIC, INTER_NEAREST, INTER_LANCZOS4, INTER_LINEAR
import cv2
import numpy as np
#to load an image
image = cv2.imread('C:/Users/LENOVO IDEAPAD 320/OneDrive/Desktop/others/01_13_54_web.jpg')
#cv2.resize(image, dsize(output, imagesize), xscale, yscale, interpolation)
#changing the image to 3/4th of its size
image_scale = cv2.resize(image, None, fx = 0.75, fy = 0.75)
cv2.imshow("Scaling - Linear Transformatio", image_scale)
cv2.waitKey(0)
#Applying the INTERCUBIC interpolation
img_scale = cv2.resize(image, None, fx = 2, fy = 2, interpolation = cv2.INTER_CUBIC)
cv2.imshow("Scaling - Cubic Interpolation", img_scale)
cv2.waitKey(0)
#Applying the INTER AREA interpolation
img_area = cv2.resize(image, (900, 400), interpolation = cv2.INTER_AREA)
cv2.imshow("INTER AREA IMAGE", img_area)
#It will show the window until the user press a key
cv2.waitKey(0)
#This closes all windows
cv2.destroyAllWindows()
|
#in this example, the input was "venit"
str = "Venit"
#I then evaluate whether the ending is "it", then add "imus" to conjugate it
if str[-2:] == "i":
print("str" + "imus")
print("Error: Not an -it verb") |
print ("Digite uma letra e ela se´ra correspondente a um número do vetor")
import string
letras = list(string.ascii_lowercase)
letra =input('Digite uma letra: ')
for i in range(len(letras)):
if(letra==letras[i]):
print(i)
|
# -*- coding: utf-8 -*-
from collections import defaultdict
import pprint
import re
_re_num = re.compile(r'\s(?P<num>\d+)\s+(?P<name>(RPL|ERR)_\w+)\s*(?P<_>.*)')
_re_mask = re.compile(r'^\s{24,25}(?P<_>("(<|:).*|\S.*"$))')
def main():
print('Parsing rfc file...')
item = None
items = []
out = open('irc3/_rfc.py', 'w')
with open('irc3/rfc1459.txt') as fd:
for line in fd:
line = line.replace('<host> * <host>', '<host> * <host1>')
line = line.replace('<# visible>', '<visible>')
line = line.replace('<H|G>[*][@|+]', '<modes>')
line = line.replace('<nick!user|*!*>@<host|server>', '<mask>')
match = _re_num.search(line)
if match is not None:
if item:
items.append((int(item['num']), item))
item = defaultdict(list)
match = match.groupdict()
if '_' in match:
match.pop('_')
item.update(match)
match = _re_mask.search(line)
if match is not None:
item['mask'].append(match.groupdict()['_'])
_re_sub = re.compile('(?P<m><[^>]+>)')
out.write('''
class retcode(int):
name = None
re = None
'''.lstrip())
valids = set()
for i, item in sorted(items):
mask = item['mask']
if mask:
num = item['num']
valids.add(i)
out.write('\n')
out.write('%(name)s = retcode(%(num)s)\n' % item)
out.write('%(name)s.name = "%(name)s"\n' % item)
mask = [s.strip('"\\ ') for s in mask]
omask = ' '.join(mask)
params = []
def repl(v):
v = v.lower()
v = v.replace('nickname', 'nick')
v = v.replace('nicks', 'nicknames')
for c in '!@*':
v = v.replace(c, '')
for c in '| ':
v = v.replace(c, '_')
v = v.strip(' _')
if v.endswith('_name'):
v = v[:-5]
if v == 'client_ip_address_in_dot_form':
v = 'clientip'
if v == 'integer':
for k in 'xyz':
if k not in params:
v = k
break
if v == 'command':
v = 'cmd'
if v == 'real':
v = 'realname'
if v == 'name' and 'nick' not in params:
v = 'nick'
if v == 'user':
if 'nick' not in params and num not in ('352',):
v = 'nick'
else:
v = 'username'
return v
def tsub(m):
v = m.groupdict()['m'].strip('<>')
v = repl(v)
params.append(v)
return '{%s}' % v
if item['num'] == '303':
omask = ':<nicknames>'
elif item['num'] == '311':
omask = omask.replace('*', '<m>')
elif item['num'] == '319':
omask = ':<channels>'
elif item['num'] == '353':
omask = '<m> <channel> :<nicknames>'
tpl = _re_sub.sub(tsub, omask)
for v in ((' %d ', '{days}'),
('%d:%02d:%02d', '{hours}'),
(':%-8s %-9s %-8s', '{x} {y} {z}')):
tpl = tpl.replace(*v)
tpl_ = [':{c.srv} ' + item['num'] + ' {c.nick} ']
if len(tpl) > 60:
tpl_.extend([':' + s for s in tpl.split(':', 1)])
else:
tpl_.append(tpl)
tpl = '\n '.join([repr(v) for v in tpl_])
params = []
def msub(m):
v = m.groupdict()['m'].strip('<>')
v = repl(v)
params.append(v)
return r'(?P<%s>\S+)' % v
mask = _re_sub.sub(msub, omask)
if '???? ' in mask:
mask = mask.replace('???? ', r'\S+ ')
if ' * ' in mask:
mask = mask.replace(' * ', r' . ')
if ':' in mask:
mask = mask.split(':', 1)[0]
mask += ':(?P<data>.*)'
mask = r'(?P<srv>\S+) ' + str(i) + ' (?P<me>\\S+) "\n r"' + mask
mask = mask.replace(
r' (?P<server>\S+)',
' "\n r"(?P<server>\\S+)')
mask = mask.replace(
r' (?P<sent_messages>\S+)',
' "\n r"(?P<sent_messages>\\S+)')
item['mask'] = mask
params = [p for p in params if '<%s>' % p in mask]
if '<data>' in mask and 'data' not in params:
params.append('data')
out.write('%(name)s.re = (\n r"^:%(mask)s")\n' % item)
params = pprint.pformat(
['srv', 'me'] + params, width=60, indent=4)
if len(params) > 60:
params = params.replace('[', '[\n ')
out.write('%(name)s.tpl = (\n' % dict(item))
out.write(' %s)\n' % tpl)
out.write('%(name)s.params = %(p)s\n' % dict(item, p=params))
out.write('\n')
out.write('RETCODES = {\n')
for i, item in sorted(items):
if i in valids:
out.write(' %(num)s: %(name)s,\n' % item)
out.write('}\n')
out.close()
if __name__ == '__main__':
main()
|
import pygame
from pygame import mouse
from pygame.constants import MOUSEBUTTONDOWN
from pygame.event import clear, get
import random
class Text:
def score_text():
font = pygame.font.Font("freesansbold.ttf",32)
text = font.render(str(Game.score),True,(255,255,255))
Game.screen.blit(text,(25,25))
def floating_text():
font = pygame.font.Font("External-sources/Font.ttf",75)
text = font.render(str("Blocky Dodge"),True,(255,255,255))
Game.screen.blit(text,(200,Game.textY))
def play_text():
font = pygame.font.Font("freesansbold.ttf",32)
text = font.render(str("Play"),True,(154,205,200))
Game.screen.blit(text,(275,380))
def exit_text():
font = pygame.font.Font("freesansbold.ttf",32)
text = font.render(str("Exit"),True,(154,205,200))
Game.screen.blit(text,(450,380))
def exit_text_for_death_screen():
font = pygame.font.Font("freesansbold.ttf",32)
text = font.render(str("Exit"),True,(154,205,200))
Game.screen.blit(text,(450,380))
def try_again_text():
font = pygame.font.Font("freesansbold.ttf",32)
text = font.render(str("Retry"),True,(154,205,200))
Game.screen.blit(text,(270,380))
class Game:
# Initialization
pygame.init()
screen = pygame.display.set_mode((800,600))
title = pygame.display.set_caption("Blocky dodge")
icon = pygame.image.load("External-sources/block.png")
pygame.display.set_icon(icon)
# movement and coords
PlayerY = 50
PlayerX = 100
velocity = 0
acceleration = 0.003
enemyX = 1200
random_height = random.randint(-50 ,400)
textY = 100
increment = 1
# Time and other related
clock = pygame.time.Clock()
time_elapsed = 0
dt = clock.tick()
# technical
score = 0
death_screen = False
main_menu = True
main_game = True
running = True
# Game objects
play_button = pygame.draw.rect(screen,(0,0,0),pygame.Rect(235,350,150,100))
exit_button = pygame.draw.rect(screen,(0,0,0),pygame.Rect(410,350,150,100))
player = pygame.draw.rect(screen,(255,255,255),pygame.Rect(PlayerX,PlayerY,25,25))
enemy = pygame.draw.rect(screen,(255,69,0),pygame.Rect(enemyX,random_height,100,200))
test = pygame.draw.rect(screen,(255,255,255),pygame.Rect(100,100,200,100))
backround_screen = pygame.draw.rect(screen,(200,200,200),pygame.Rect(185,300,425,200))
exit_button_for_deathscreen = pygame.draw.rect(screen,(0,0,0),pygame.Rect(410,350,150,100))
try_again_button = pygame.draw.rect(screen,(0,0,0),pygame.Rect(235,350,150,100))
# Functions for game etc start end
def increment_speed():
Game.enemyX += -Game.increment
def start_screen():
Game.screen.fill((200,200,200))
play_button = pygame.draw.rect(Game.screen,(0,0,0),pygame.Rect(235,350,150,100))
exit_button = pygame.draw.rect(Game.screen,(0,0,0),pygame.Rect(410,350,150,100))
Text.floating_text()
Text.play_text()
Text.exit_text()
def death_menu():
backround_screen = pygame.draw.rect(Game.screen,(200,200,200),pygame.Rect(185,300,425,200))
exit_button = pygame.draw.rect(Game.screen,(0,0,0),pygame.Rect(410,350,150,100))
try_again_button = pygame.draw.rect(Game.screen,(0,0,0),pygame.Rect(235,350,150,100))
Text.exit_text_for_death_screen()
Text.try_again_text()
def collision():
player = pygame.draw.rect(Game.screen,(255,255,255),pygame.Rect(Game.PlayerX,Game.PlayerY,25,25))
enemy = pygame.draw.rect(Game.screen,(255,69,0),pygame.Rect(Game.enemyX,Game.random_height,100,200))
if player.colliderect(enemy):
return True
def maingameupdate():
Game.PlayerY += Game.velocity
Game.velocity += Game.acceleration
Game.increment_speed()
# check for position to move back
if Game.enemyX < -150:
Game.enemyX = 800
Game.score += 1
Game.random_height = random.randint(-50,400)
Game.increment += 0.05
enemy = pygame.draw.rect(Game.screen,(255,69,0),pygame.Rect(Game.enemyX,Game.random_height,300,200))
# Boundry and collision for the player
if Game.PlayerY > 625 or Game.PlayerY < -25:
Game.death_screen = True
Game.main_game = False
if Game.collision() == True:
Game.death_screen = True
Game.main_game = False
pygame.display.flip()
pygame.display.update()
def double_rendering():
Game.screen.fill((0,0,0))
Text.score_text()
player = pygame.draw.rect(Game.screen,(255,255,255),pygame.Rect(Game.PlayerX,Game.PlayerY,25,25))
enemy = pygame.draw.rect(Game.screen,(255,69,0),pygame.Rect(Game.enemyX,Game.random_height,100,200))
def gameloop():
while Game.running:
if Game.main_menu == True:
Game.start_screen()
elif Game.main_game == True:
Game.maingameupdate()
Game.double_rendering()
elif Game.death_screen == True and Game.main_game == False:
Game.death_menu()
pygame.display.flip()
if event.type == pygame.MOUSEBUTTONDOWN:
mouse_position = pygame.mouse.get_pos()
if Game.exit_button_for_deathscreen.collidepoint(mouse_position):
Game.running = False
if Game.try_again_button.collidepoint(mouse_position):
Game.death_screen = False
Game.score = 0
Game.enemyX = 800
Game.score = 0
Game.PlayerY = 50
Game.PlayerX = 100
Game.main_game = True
for event in pygame.event.get():
if event.type == pygame.QUIT:
Game.running = False
if Game.main_menu == True:
if event.type == MOUSEBUTTONDOWN:
mouse_pos = pygame.mouse.get_pos()
if Game.play_button.collidepoint(mouse_pos):
Game.main_menu = False
if Game.exit_button.collidepoint(mouse_pos):
Game.running = False
print("hello")
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_SPACE:
Game.velocity =- 0.6
pygame.display.flip()
gameloop() |
room = [line.strip() for line in open('in').readlines()]
next_room = [[None] * len(room[0]) for _ in range(len(room))]
print('\n'.join(room))
changes = 1
while changes != 0:
changes = 0
for row, line in enumerate(room):
for col, seat in enumerate(line):
if seat == '.' or seat is None:
next_room[row][col] = '.'
continue
num_adj = 0
for offset_row in (-1, 0, 1):
for offset_col in (-1, 0, 1):
if (row + offset_row < 0 or
col + offset_col < 0 or
row + offset_row >= len(room) or
col + offset_col >= len(room[0]) or
offset_col == offset_row == 0):
continue
#print(row + offset_row, offset_col + col, len(room), len(room[0]))
if room[row + offset_row][col + offset_col] == '#':
num_adj += 1
if num_adj == 0:
if room[row][col] != '#':
changes += 1
next_room[row][col] = '#'
elif num_adj >= 4:
if room[row][col] != 'L':
changes += 1
next_room[row][col] = 'L'
else:
next_room[row][col] = room[row][col]
#print(changes)
room = next_room
#print('\n'.join(''.join(l) for l in room))
next_room = [[None] * len(room[0]) for _ in range(len(room))]
print(sum(line.count('#') for line in room))
|
from backprop import *
from handleimages import *
if __name__ == '__main__':
images = all_images()
pat = [[make_input(i), make_output(i)] for i in images]
|
__author__ = 'artemiibezguzikov'
import cv2
import numpy as np
from matplotlib import pyplot as plt
img = cv2.imread('low-contrast.png')
img = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
hist, bins = np.histogram(img.flatten(), 256, [0, 256])
cdf = hist.cumsum()
cdf_m = np.ma.masked_equal(cdf,0)
cdf_m = (cdf_m - cdf_m.min()) * 255 / (cdf_m.max() - cdf_m.min())
cdf = np.ma.filled(cdf_m, 0).astype('uint8')
equ = cv2.equalizeHist(img)
img2 = cdf[img]
clahe = cv2.createCLAHE(clipLimit=2.0, tileGridSize=(8,8))
cl1 = clahe.apply(img)
res = np.hstack((img, equ))
res = np.vstack((res, np.hstack((img2, cl1))))
res = cv2.resize(res, (0,0), fx=0.5, fy=0.5)
cv2.namedWindow('frame')
cv2.imshow('frame', res)
cv2.waitKey(0)
cv2.destroyAllWindows() |
'''
Словари
'''
d1 = {
"day": 18,
"month": 6,
"year": 1983
}
d2 = dict(bananas=3,apples=5,oranges=2,bag="basket")
d3 = dict([("street","Kronverksky pr."), ("house",
49)])
d4 = dict.fromkeys(["1","2"], 3)
print("Dict d1 = ", d1)
print("Dict d2 by dict()= ", d2)
print("Dict d3 by dict([])= ", d3)
print("Dict d4 by fromkeys = ", d4)
print("\n")
startDict0 = {"ready0": 3,"set0": 2, "go0": 1}
startDict1 = dict(ready1 = 3, set1 = 2, go1 = 1)
startDict2 = dict([("ready2",3), ("set2",2), ("go2",1)])
print(startDict0)
print(startDict1)
print(startDict2)
dict1 = dict.fromkeys(["key1","key2"], 77)
print(dict1)
|
import time
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.wait import WebDriverWait
def gs_save(driver):
time.sleep(1)
driver.find_element_by_xpath(".//*[@id='tbSave']/input").click()
time.sleep(10)
# print ("Save successfully")
# Click save button on GS page
def gs_assess(driver):
time.sleep(1)
locator1 = (By.XPATH,".//*[@id='tbAssess']/input")
WebDriverWait(driver, 10,0.5).until(EC.element_to_be_clickable(locator1)).click()
time.sleep(10)
# print ("Assess successfully")
# Click assess button on GS page
def gs_register(driver):
time.sleep(1)
locator1 = (By.XPATH,".//*[@id='tbRegister']/input")
WebDriverWait(driver, 10,0.5).until(EC.element_to_be_clickable(locator1)).click()
time.sleep(20)
#print ("Register successfully")
# Click assess button on GS page |
import csv
import openpyxl
import xlrd
from collections import defaultdict
from scraper.download_zip import *
import os
class data_processor():
def __init__(self):
self.upzipped_file = download_zip()
#os.chdir(cwd)
pass
def parse_txt_file(self):
#input_txt = open('Data/foidevAdd.txt', 'r')
file_name = self.upzipped_file.file_name.replace('.zip', '.txt')
input_txt = open(file_name, 'r')
lines = input_txt.read().split('\n')
new_lines = []
for line in lines:
if line is not '':
new_lines.append(line.split('|'))
self.header = new_lines[0]
self.total_data = new_lines[1:]
input_txt.close()
#print(self.total_data)
def parse_xls_file(self):
self.company_names = []
try:
self.input_xls = xlrd.open_workbook('Data/Scraper Company Names.xls')
sheet = self.input_xls.sheet_by_index(0)
for rx in range(sheet.nrows):
if sheet.row(rx)[0].value != '' and sheet.row(rx)[0].value != 'Company Name':
self.company_names.append(sheet.row(rx)[0].value)
except:
pass
#print(self.company_names)
def save_parsed_data(self):
self.output_csv = open('Data/foidevAdd.csv', 'w', encoding='utf-8', newline='')
self.csv_writer = csv.writer(self.output_csv)
self.csv_writer.writerow(self.header)
for row in self.total_data:
self.csv_writer.writerow(row)
self.output_csv.close()
if __name__ == '__main__':
app = data_processor()
app.parse_txt_file()
app.parse_xls_file()
app.save_parsed_data()
|
def submask(f):
print("subnetting now...")
return f
def calcsubmask(f) :
print("calculating now...")
if f == 24:
sm = "255.255.255.0"
elif f == 25:
sm = "255.255.255.128"
elif f==26:
sm = "255.255.255.192"
elif f==27:
sm = "255.255.255.224"
return sm
#subnetmask = submask(int(input("here:" )))
calsubnetmask = calcsubmask((int(input("cidr: "))))
#print(f"this is {subnetmask}")
print(f"this isthe actual sm: {calsubnetmask}") |
#!/usr/bin/python
import os
import sys
import struct
def hexdump(data, length):
print "hexdump(%d):" % length,
for i in range(length):
print "%02x" % ord(data[i]),
print "\n"
def skip(data, length):
return data[length:]
def parse(data, length):
if len(data) == 0:
raise Exception("premature end in marker (expecting %d bytes, no bytes left)" % length)
if len(data) < length:
print("premature end in marker (expecting %d bytes, finding %d bytes), parsing what's available" % (length, len(data)))
return ([], data)
return (data[length:], data[0:length])
def parse8(data):
(data, value) = parse(data, 1)
value = ord(value)
return (data, value)
def parse16(data):
(data, value) = parse(data, 2)
value = struct.unpack(">H", value)[0]
return (data, value)
def parse32(data):
(data, value) = parse(data, 4)
value = struct.unpack(">I", value)[0]
return (data, value)
def parsearray(data, length):
s = ""
for i in range(length):
s = s + "%02x " % ord(data[i])
s = "[" + s[:-1] + "]"
return (data[length:], s)
def parsestring(data, length):
s = ""
for i in range(length):
v = ord(data[i])
if v < 32:
continue
s = s + chr(v)
return (data[length:], s)
def parsestartofimage(data):
print "SOI"
return data
def parseappsegment(data, marker):
app = marker - 0xffe0
print "APP%d" % app
(data, Lp) = parse16(data)
print "APP%d.Lp = %d" % (app, Lp)
data = skip(data, Lp - 2)
return data
def parsedefinequantizationtables(data):
print "DQT"
(data, Lq) = parse16(data)
print "DQT.Lq = %d" % Lq
(data, PqTq) = parse8(data)
Pq = (PqTq >> 4) & 0xf
Tq = (PqTq >> 0) & 0xf
if Pq == 0:
bits = 8
elif Pq == 1:
bits = 16
else:
raise Exception("Pq out of range (%d)" % Pq)
print "DQT.Pq = %d (%d bits)" % (Pq, bits)
print "DQT.Tq = %d" % Tq
for k in range(64):
if bits == 8:
(data, Qk) = parse8(data)
if bits == 16:
(data, Qk) = parse16(data)
#print "DQT.Q%d = %d" % (k, Qk)
return data
def parsestartofframe(data, marker):
frame = marker - 0xffc0
if frame == 0:
type = "Baseline DCT"
elif frame == 1:
type = "Extended sequential DCT"
elif frame == 2:
type = "Progressive DCT"
elif frame == 3:
type = "Lossless sequential"
print "SOF%d (%s)" % (frame, type)
(data, Lf) = parse16(data)
print "SOF%d.Lf = %d" % (frame, Lf)
(data, P) = parse8(data)
print "SOF%d.P = %d" % (frame, P)
(data, Y) = parse16(data)
print "SOF%d.Y = %d" % (frame, Y)
(data, X) = parse16(data)
print "SOF%d.X = %d" % (frame, X)
(data, Nf) = parse8(data)
print "SOF%d.Nf = %d" % (frame, Nf)
for i in range(Nf):
(data, Ci) = parse8(data)
print "SOF%d.C[%d].Ci = %d" % (frame, i, Ci)
(data, HiVi) = parse8(data)
Hi = (HiVi >> 4) & 0xf
Vi = (HiVi >> 0) & 0xf
print "SOF%d.C[%d].Hi = %d" % (frame, i, Hi)
print "SOF%d.C[%d].Vi = %d" % (frame, i, Vi)
(data, Tqi) = parse8(data)
print "SOF%d.C[%d].Tqi = %d" % (frame, i, Tqi)
return data
def parsedefinehuffmantable(data):
print "DHT"
(data, Lh) = parse16(data)
print "DHT.Lh = %d" % Lh
data = skip(data, Lh - 2)
return data
def parsestartofscan(data):
print "SOS %d" % len(data)
hexdump(data, 100)
(data, Ls) = parse16(data)
print "SOS.Ls = %d" % Ls
(data, Ns) = parse8(data)
print "SOS.Ns = %d" % Ns
for i in range(Ns):
(data, Cs) = parse8(data)
print "SOS.Cs%d = %d" % (i + 1, Cs)
(data, TdTa) = parse8(data)
Td = (TdTa >> 4) & 0xf
Ta = (TdTa >> 0) & 0xf
print "SOS.Td%d = %d" % (i + 1, Td)
print "SOS.Ta%d = %d" % (i + 1, Ta)
(data, Ss) = parse8(data)
print "SOS.Ss = %d" % Ss
(data, Se) = parse8(data)
print "SOS.Se = %d" % Se
(data, AhAl) = parse8(data)
Ah = (AhAl >> 4) & 0xf
Al = (AhAl >> 0) & 0xf
print "SOS.Ah = %d" % Ah
print "SOS.Al = %d" % Al
return data
def parsemarker(data, done):
(data, marker) = parse16(data)
if marker >= 0xffc0 and marker <= 0xffc3:
data = parsestartofframe(data, marker)
elif marker == 0xffc4:
data = parsedefinehuffmantable(data)
elif marker == 0xffd8:
data = parsestartofimage(data)
elif marker == 0xffda:
data = parsestartofscan(data)
elif marker == 0xffdb:
data = parsedefinequantizationtables(data)
elif marker >= 0xffe0 and marker <= 0xffef:
data = parseappsegment(data, marker)
else:
raise Exception("unknown marker 0x%04x with %d bytes left" % (marker, len(data)))
return (data, done)
def parsemarkers(data):
done = False
while not done:
(data, done) = parsemarker(data, done)
for path in sys.argv[1:]:
with open(path, "rb") as f:
data = f.read()
data = parsemarkers(data)
if len(data) != 0:
raise Exception("extraneous %d bytes of data at end of file" % len(data))
|
#!/usr/bin/env python
#!-*-coding:utf-8 -*-
# Time :2020/5/19 14:13
# Author : zhoudong
# File : gmphd.py
import numpy as np
from copy import deepcopy
from util.gm_component import *
myfloat = np.float32
class Gmphd:
def __init__(self, model):
"""
:param birthgmm: GMM 的新生目标
:param survival: 存活概率
:param detection: 检测概率
:param f: 状态转移矩阵
:param q: 过程噪声协方差
:param h: 观测矩阵
:param r: 观察噪声协方差
:param clutter: 杂波强度 杂波平均数目/区域面积
"""
# self.gmm = []
# self.birthgmm = birthgmm
# self.survival = myfloat(survival) # p_{s,k}(x) in paper
# self.detection = myfloat(detection) # p_{d,k}(x) in paper
# self.f = np.array(f, dtype=myfloat) # F_k-1 in paper
# self.q = np.array(q, dtype=myfloat) # Q_k-1 in paper
# self.h = np.array(h, dtype=myfloat) # H_k in paper
# self.r = np.array(r, dtype=myfloat) # R_k in paper
# self.clutter = myfloat(clutter) # KAU in paper
self.gmm = []
self.birthgmm = model.birthgmm # gmm 新生目标
self.survival = model.P_S # p_{s,k}(x) in paper
self.detection = model.P_D # p_{d,k}(x) in paper
self.f = model.F # F_k-1 in paper
self.q = model.Q # Q_k-1 in paper
self.h = model.H # H_k in paper
self.r = model.R # R_k in paper
self.clutter = model.lambda_c * model.pdf_c # KAU in paper
self.model = model
def update(self, obs):
"""
预测 更新,
:param obs: 当前时刻的观察值
:return:
"""
#############################################
# step1 - prediction for birth targets
born = [deepcopy(comp) for comp in self.birthgmm]
# # 衍生目标计算
# spawned = [GmphdComponent(
# comp.weight * self.model.w_beta_k,
# self.model.d_beta_k + np.dot(self.model.F_beta_k, comp.loc),
# self.model.Q_beta_k + np.dot(np.dot(self.model.F_beta_k, comp.cov), self.model.F_beta_k.T)
# ) for comp in self.gmm]
# spawned = GmphdComponent(
# 0.01,
# np.array([-3.5, 0, -23.5, -1.5]),
# np.diag([5, 0.01, 5, 0.01])
# )
#
# spawned = [GmphdComponent(
# spawned.weight * self.survival,
# np.dot(self.f, spawned.loc),
# self.q + np.dot(np.dot(self.f, spawned.cov), self.f.T)
# )]
##############################################
# step2 - prediction for existing targets
update = [GmphdComponent(
self.survival * comp.weight,
np.dot(self.f, comp.loc),
self.q + np.dot(np.dot(self.f, comp.cov), self.f.T)
)for comp in self.gmm]
# print("updata len is", len(update))
# print("spawned len is", len(spawned))
# print("updata len is", len(born))
predicted = born + spawned + update # 存活目标 衍生目标 新生目标
#######################################
# Step 3 - construction of PHD update components
# 利用卡尔曼滤波计算均值、权值和协方差矩阵
# nu 从状态空间转为观测空间
# s 协方差
nu = [np.dot(self.h, comp.loc) for comp in predicted]
s = [self.r + np.dot(np.dot(self.h, comp.cov), self.h.T) for comp in predicted]
# k 增益 pkk 更新协方差
k = [np.dot(np.dot(comp.cov, self.h.T), np.linalg.inv(s[index]))
for index, comp in enumerate(predicted)]
pkk = [np.dot(np.eye(len(k[index])) - np.dot(k[index], self.h), comp.cov)
for index, comp in enumerate(predicted)]
#######################################
# Step 4 - update using observations
# The 'predicted' components are kept, with a decay
newgmm = [GmphdComponent( comp.weight * (1.0 - self.detection),comp.loc, comp.cov)
for comp in predicted] # 漏检目标
# predicted 根据 obs 更新
for anobs in obs:
anobs = np.array(anobs).reshape((2, 1))
newgmmpartial = []
for j, comp in enumerate(predicted):
newgmmpartial.append(GmphdComponent(
self.detection * comp.weight * dmvnorm(nu[j], s[j], anobs),
comp.loc + np.dot(k[j], anobs - nu[j]),
pkk[j]
))
# The Kappa thing (clutter and reweight)
weightsum = sum(newcomp.weight for newcomp in newgmmpartial)
reweighter = 1.0 / (self.clutter + weightsum)
for newcomp in newgmmpartial:
newcomp.weight *= reweighter
newgmm.extend(newgmmpartial)
self.gmm = newgmm
def prune(self, truncthresh=1e-6, mergethresh=0.01, maxcomponents=100):
"""
:param truncthresh: 权值阈值
:param mergethresh: 合并阈值
:param maxcomponents: 高斯分量最大数
:return:
"""
# Truncation is easy
weightsums = [sum(comp.weight for comp in self.gmm)]
sourcegmm = list(filter(lambda comp : comp.weight > truncthresh, self.gmm))
weightsums.append(sum(comp.weight for comp in sourcegmm))
origlen = len(self.gmm)
trunclen = len(sourcegmm)
# 计算新的高斯分量
newgmm = []
while len(sourcegmm) > 0:
# 找出最大权值的分量
windex = np.argmax([comp.weight for comp in sourcegmm])
weightiest = sourcegmm[windex] # 最大权重
sourcegmm = sourcegmm[:windex] + sourcegmm[windex+1:]
# 合并高斯分量, 分布接近的合并
distance = [float(np.dot(np.dot((comp.loc-weightiest.loc).T, np.linalg.inv(comp.cov)),
comp.loc-weightiest.loc)) for comp in sourcegmm]
dosubsume = np.array([dist <= mergethresh for dist in distance]) # 小于预设值的合并
subsumed = [weightiest]
# 将要合并的高斯分量从总的gmm中分离
if np.any(dosubsume):
subsumed.extend(list(np.array(sourcegmm)[dosubsume]))
sourcegmm = list(np.array(sourcegmm)[~dosubsume])
# 计算合并的高斯分量的值
# w = w1+ w2 + .... 新权值为权值和
# m = (w1*x1 + w2*x2 +...) / w 新期望
# p = ((w1*(p1 + (max - x1)(max-x1).T))+.. )/ w 新协方差
aggweight = sum(comp.weight for comp in subsumed) #权值和
newloc = sum(comp.weight * comp.loc for comp in subsumed) / aggweight
newcomp = GmphdComponent(aggweight,
newloc,
sum(comp.weight * (comp.cov + (newloc - comp.loc) *
(newloc - comp.loc).T) for comp in subsumed ) / aggweight)
newgmm.append(newcomp)
# 按权重降序排序,选择前maxcomponents 个分量
newgmm = sorted(newgmm, key=lambda comp:comp.weight)
newgmm.reverse()
self.gmm = newgmm[:maxcomponents]
weightsums.append(sum(comp.weight for comp in newgmm))
weightsums.append(sum(comp.weight for comp in self.gmm))
print("prune(): %d -> %d -> %d -> %d" % (origlen, trunclen, len(newgmm), len(self.gmm)))
print("prune(): weightsums %g -> %g -> %g -> %g" % (weightsums[0], weightsums[1], weightsums[2], weightsums[3]))
# 剪枝不能减少权值和,需要重新规范化
weightnorm = weightsums[0] / weightsums[3]
for comp in self.gmm:
comp.weight *= weightnorm
def extractstatesusingintegral(self, bias = 1.0):
"""
提取多个目标状态,返回
先计算一共有多少目标 numtoadd
每次找到权值最大的分量,将loc 保存在状态列表里,
将该分量的权值 - 1
将总个数 - 1
:param bias: 偏差 不太理解
:return:
"""
numtoadd = int(round(float(bias) * sum(comp.weight for comp in self.gmm))) # 偏差 * 权值和
print("bias is %g, numtoadd is %i" % (bias, numtoadd))
items = []
peaks = [{'loc':comp.loc, 'weight' : comp.weight} for comp in self.gmm]
while numtoadd > 0:
windex = 0
wsize = 0
for which, peak in enumerate(peaks):
if peak['weight'] > wsize:
windex = which
wsize = peak['weight']
# add the winner
items.append(deepcopy(peaks[windex]['loc']))
peaks[windex]['weight'] -= 1.0
numtoadd -= 1
return items
def extractstates(self, bias=1.0):
"""
选择权重大于0.5 的分量的状态
:param bias:
:return:
"""
items = []
print("weights:")
print([np.round(comp.weight, 7)
for comp in self.gmm])
for comp in self.gmm:
val = comp.weight * float(bias)
if val > 0.5:
for _ in range(int(np.round(val))):
items.append(deepcopy(comp.loc))
return items |
#sıralama algoritması (bubble sort horon tepen video)
#a = [200, 3, 511, 72, 11, 13, 17, 19, 23, 249, 31, 37, 41, 43, 47]
#for j in range(len(a)-1):
# for i in range(len(a)-1):
# if(a[i]>a[i+1]):
# a[i], a[i+1] = a[i+1], a[i]
#print(a)
#ilk sıralamada en büyük değer herzaman en sağa yerleşir
#en sondaki ve bir önceki eleman sadece ilk turda karşılaştırılmalı
#bu ifadeyi sadelştirmemiz lazım
#dene! (bubble sort)
a = [200, 3, 511, 72, 11, 13, 17, 19, 23, 249, 31, 37, 41, 43, 47]
j = 0
sure = len(a)-1
degistimi = 1
while(j<sure and degistimi==1):
degistimi = 0
soneleman = a[sure]
for i in range(len(a)-1):
if(a[i]>a[i+1]):
a[i], a[i+1] = a[i+1], a[i]
degistimi = 1
sure = sure - 1
j +=1
print(a)
|
#!/usr/bin/env python3
'''Sends mail to address specified in config'''
import ssl
import smtplib
import email.utils
import email.message
import cfg
ssl_ctx = ssl.create_default_context()
server = smtplib.SMTP(cfg.smtpServer, cfg.smtpServerPort)
if cfg.smtpEncryption == 'ssl' or cfg.smtpEncryption == 'tls':
server.starttls(ssl_ctx)
if cfg.smtpUser:
server.login(cfg.smtpUser, cfg.smtpPass if cfg.smtpPass is not None else '')
def send_mail(subject, body, in_reply_to=None):
msg_id = email.utils.make_msgid()
msg = email.message.EmailMessage()
msg['Subject'] = subject
msg['From'] = cfg.BOT_NAME + ' <{0}>'.format(cfg.BOT_MAIL_ADDRESS)
msg['To'] = cfg.MAILING_LIST_ADDRESS
if in_reply_to is not None:
msg['In-Reply-To'] = in_reply_to
msg['Message-ID'] = msg_id
if not cfg.BIDIRECTIONAL_COMM:
body = 'This message was generated from a Gitlab event, do not reply!\n\n' + body
msg.set_content(body)
server.sendmail(cfg.BOT_MAIL_ADDRESS, cfg.MAILING_LIST_ADDRESS, msg.as_string())
return msg_id
|
from rest_framework.decorators import api_view, permission_classes
from django.shortcuts import get_object_or_404
from django.utils import timezone
from rest_framework.response import Response
from .serializers import ItemSerializer, OrderSerializer, OrderItemSerializer
from .models import Item, Order, OrderItem
from rest_framework.generics import CreateAPIView, RetrieveUpdateDestroyAPIView
from rest_framework import permissions
@api_view(['GET'])
def item_list(request):
items = Item.objects.all()
serializer = ItemSerializer(items,many=True)
return Response(serializer.data)
@api_view(['GET'])
def item_detail(request,item_id):
item = Item.objects.get(pk=item_id)
serializer = ItemSerializer(item,many=False)
return Response(serializer.data)
@api_view(['POST'])
@permission_classes([permissions.IsAuthenticated])
def add_to_cart(request, item_id):
# get item to add
item = get_object_or_404(Item, pk=item_id)
# create the the order then add the item
print(item.id)
order_item, create = OrderItem.objects.get_or_create(item=item, user=request.user, ordered=False)
# check if user has active order
order_query_set = Order.objects.filter(user=request.user, ordered=False)
if order_query_set.exists():
order = order_query_set[0]
if order.items.filter(item__pk=item.pk).exists():
order_item.quantity += 1
order_item.save()
return Response({"message": "quantity successfully updated"})
else:
order.items.add(order_item)
return Response({"message": "item successfully added"})
else:
order = Order.objects.create(user=request.user, order_date=timezone.now())
order.items.add(order_item)
return Response({"message": "quantity successfully updated"})
@api_view(['DELETE'])
@permission_classes([permissions.IsAuthenticated])
def remove_from_cart(request, item_id):
# get item to add
item = get_object_or_404(Item, pk=item_id)
# get users order
order_query_set = Order.objects.filter(user=request.user, ordered=False)
# check if order exists
if order_query_set.exists():
order = order_query_set[0]
# check if item exists in the order
if order.items.filter(item__pk=item_id).exists():
order_item = OrderItem.objects.filter(item=item, user=request.user, ordered=False)[0]
# remove item
order.items.remove(order_item)
return Response({"Message":"the item was removed from the order"})
else:
return Response({"Message":"the item was not in the order"})
else:
return Response({"Message":"No active order"})
@api_view(['GET'])
@permission_classes([permissions.IsAuthenticated])
def get_cart_items(request):
order = Order.objects.get(user=request.user, ordered=False)
items = order.items.all()
serializer = OrderItemSerializer(items, many=True)
return Response(serializer.data)
|
#!/usr/bin/python
#Aplicacao cliente - Versao 7.0
import sys
import socket
import os
import time
#from rsvpclient import Rsvpclient
#host = sys.argv[1] #Endereco do servidor remoto obtido atraves da CLI
host = '10.0.0.8' #Endereco do servidor remoto obtido atraves da CLI
#port = int(sys.argv[2]) #Porta do servidor remoto obtida atraves da CLI
port = 23000 #Porta do servidor remoto obtida atraves da CLI
#filetest = 'test.mov' #Arquivo de teste
qos = False #A principio, classe de servico nao disponivel
'''
output=open('resultados-sessao3-rodada3.txt','ab')
outputo=open('overhead-sessao3-rodada3.txt','ab')
'''
#Inicio da conexao
sock = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
sock.connect((host,port))
#Abertura do arquivo de teste
'''
if os.path.isfile(filetest):
os.remove(filetest)
filet = open(filetest,'w+b')
'''
cond = 2
while cond != 0:
#Envio do tipo de aplicacao
#if not qos:
#msg = raw_input('Informe o conteudo: ')
if cond == 2:
porta_servico = '25000'
#msg = 'video/25000'
msg = 'video'
msg = msg +'/'+porta_servico
else:
#Antes de Enviar a msg FIM abre o aguarda o vlc
#os.system('vlc -I rc --rc-host 10.0.0.1:'+porta_servico+' udp://@10.0.0.1:'+porta_servico+' &')
#print("Recebendo Video!!!")
time.sleep(210) #Aguarda o tempo do video 2:20 ou 140
#Fim do aguardo do vlc
msg = 'FIN'
#begin=time.time()
sock.sendall(msg)
#Resposta do servidor
data = sock.recv(4096)
msg_rec = str(data)
print(msg_rec)
cond = cond-1
if msg_rec == 'FIN':
#cond = 0
pass
#Encerramento da conexao
print 'Encerrando conexao'
sock.close()
|
import json
import matplotlib.pyplot as plt
from scipy.stats import pearsonr
import numpy as np
import pandas as pd
plt.rcParams['font.sans-serif'] = ['SimHei']
bits = 2
ax1 = plt.subplot(3, 1, 1)
plt.xlabel("点赞")
plt.ylabel("概率")
ax2 = plt.subplot(3, 1, 2, sharey=ax1)
plt.xlabel("评论")
plt.ylabel("概率")
ax3 = plt.subplot(3, 1, 3, sharey=ax1)
plt.xlabel("转发")
plt.ylabel("概率")
def paint(name):
with open(name + "_tweets.json", 'rb') as load_f:
load_dict = json.load(load_f)
likes_dict = {}
comments_dict = {}
reposts_dict = {}
likes_list = []
comments_list = []
reposts_list = []
i = 0
for d in load_dict:
i = i + 1
like_num = d['like_num']
comment_num = d['comment_num']
repost_num = d['repost_num']
likes_list.append(like_num)
comments_list.append(comment_num)
reposts_list.append(repost_num)
x = round(like_num / 10000, bits)
if x in likes_dict.keys():
likes_dict[x] = likes_dict[x] + 1
else:
likes_dict[x] = 1
x = round(comment_num / 1000, bits)
if x in comments_dict.keys():
comments_dict[x] = comments_dict[x] + 1
else:
comments_dict[x] = 1
x = round(repost_num / 1000, bits)
if x in reposts_dict.keys():
reposts_dict[x] = reposts_dict[x] + 1
else:
reposts_dict[x] = 1
likes = sorted(likes_dict.items(), key=lambda p: p[0])
comments = sorted(comments_dict.items(), key=lambda p: p[0])
reposts = sorted(reposts_dict.items(), key=lambda p: p[0])
x1 = []
y1 = []
for d in likes:
x1.append(d[0])
y1.append(float(d[1]) / (i * pow(0.1, bits)))
x2 = []
y2 = []
for d in comments:
x2.append(d[0])
y2.append(float(d[1]) / (i * pow(0.1, bits)))
x3 = []
y3 = []
for d in reposts:
x3.append(d[0])
y3.append(float(d[1]) / (i * pow(0.1, bits)))
if name == 'ysxw':
c = "red"
l = "央视新闻"
elif name == 'rmrb':
c = "blue"
l = "人民日报"
elif name == 'ttxw':
c = "green"
l = "头条新闻"
elif name == 'xhsd':
c = "yellow"
l = "新华视点"
ax1.plot(x1, y1, color=c, label=l)
ax2.plot(x2, y2, color=c, label=l)
ax3.plot(x3, y3, color=c, label=l)
n1 = np.array(likes_list)
n2 = np.array(comments_list)
n3 = np.array(reposts_list)
chart = np.array([n1, n2, n3])
chart_pd = pd.DataFrame(chart.T, columns=['点赞', '评论', '转发'])
chart_corr = chart_pd.corr(method='pearson') # 相关系数矩阵
print(chart_corr)
if __name__ == '__main__':
paint('ysxw')
paint('rmrb')
paint('xhsd')
paint('ttxw')
ax1.legend(loc='upper right')
ax2.legend(loc='upper right')
ax3.legend(loc='upper right')
plt.show()
|
import unittest
from safe_cli.api.gnosis_transaction import TransactionService
class TestTransactionService(unittest.TestCase):
def setUp(self) -> None:
self.transaction_service = TransactionService.from_network_number(4) # Rinkeby
self.safe_address = '0x7552Ed65a45E27740a15B8D5415E90d8ca64C109'
def test_get_balances(self):
balances = self.transaction_service.get_balances(self.safe_address)
self.assertIsInstance(balances, list)
def test_get_transactions(self):
transactions = self.transaction_service.get_transactions(self.safe_address)
self.assertIsInstance(transactions, list)
if __name__ == '__main__':
unittest.main()
|
import scrapy
from yellowpages.items import YellowpagesItem
from scrapy_selenium import SeleniumRequest
class YellowSpider(scrapy.Spider):
name = 'yellow'
allowed_domains = ['yellowpagesofafrica.com']
start_urls = ['https://www.yellowpagesofafrica.com/']
def start_requests(self):
for url in self.start_urls:
yield SeleniumRequest(url=url, callback=self.parse)
def parse(self, response):
urls = response.xpath('//div[@class="col-sm-12"]//div[@class="col-sm-4 col-xs-6 col-md-4 col-lg-3"]/a/@href').extract()
for url in urls:
yield SeleniumRequest(
url = response.urljoin(url), callback = self.parse_country_page
)
def parse_country_page(self, response):
urls = response.xpath('//div[@class="col-sm-12 col-lg-12 ct-u-marginBottom40"]//div[@class="row"]/a/@href').extract()
for url in urls:
yield SeleniumRequest(
url = response.urljoin(url), callback = self.parse_companies_detail
)
def parse_companies_detail(self, response):
country,industry = response.url.split('/')[-3:-1]
names = response.xpath('//div[@class="col-sm-6 col-md-6 col-lg-4"]//div[@class="ct-product--tilte"]/text()').extract()
names = [i.strip() for i in names]
websites = response.xpath('//div[@class="row ct-js-search-results ct-showProducts--list ct-u-marginTop10"]//div[@class="col-sm-6 col-md-6 col-lg-4"]//div[@class="ct-product--description"]')
for i,j in enumerate(websites):
w = j.xpath('a/@href').extract()
websites[i]= w if w else ""
mails = numbers = []
for i in response.xpath('//div[@class="row ct-js-search-results ct-showPproduct--description"]//div[@class="buttonShowCo"]'):
id_ = i.xpath('@onclick').get().split("'")[-2]
m = response.xpath(f'//*[@id="{id_}"]/a/@href').extract()
mails.append(m if m else "")
p = response.xpath(f'//*[@id="{id_}"]/text()').extract_first()
numbers.append(p if p else "")
for name, website, mail, number in zip(names, websites, mails, numbers):
item = YellowpagesItem()
item['name'] = name
item['website'] = website
item['mail'] = mail
item['number'] = number
item['country'] = country.title()
item['industry'] = industry.title()
print ('**parse_companies_detail:', item["name"], item["website"])
yield item
nxt = response.xpath('//div[@class="ct-pagination text-center"]/ul/li/a/@href').extract()
if nxt:
nxt = nxt[-1]
if nxt!='#':
yield SeleniumRequest(
url = response.urljoin(nxt), callback = self.parse_companies_detail
)
|
from django.shortcuts import render
from .models import *
from rest_framework import viewsets,permissions
from .serializers import *
from rest_framework.pagination import LimitOffsetPagination,PageNumberPagination
from .pagination import PostPageNumberPagination
from rest_framework.filters import SearchFilter,OrderingFilter
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework import generics
from rest_framework.views import APIView
from rest_framework.response import Response
# вывод всех продуктов по категории
class PolotencaViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows users to be viewed or edited.
"""
permission_classes = [permissions.AllowAny, ]
queryset = Polotenca.objects.all()
serializer_class = PolotencaSerializer
filter_backends = [DjangoFilterBackend,OrderingFilter,SearchFilter]
filter_fields = ['slug','brend','consist','size','type','filler_weight','top']
pagination_class = PostPageNumberPagination#PageNumberPagination #LimitOffsetPagination
class SearchAPIView(generics.ListCreateAPIView):
permission_classes = [permissions.AllowAny, ]
search_fields = ['brend__name','tkan__name','name']
filter_backends = (SearchFilter,)
queryset = Polotenca.objects.all()
serializer_class = PolotencaSerializer
class PolotencaTcanViewSet(viewsets.ModelViewSet):
permission_classes = [permissions.AllowAny, ]
queryset = Tkan.objects.all()
serializer_class = TkanSerializer
# filter_fields = ('slug',)
class PolotencaBrendViewSet(viewsets.ModelViewSet):
permission_classes = [permissions.AllowAny, ]
queryset = Brend.objects.all()
serializer_class = BrendSerializer
class PolotencaSizeViewSet(viewsets.ModelViewSet):
permission_classes = [permissions.AllowAny, ]
queryset = Size.objects.all()
serializer_class = SizeSerializer
class PolotencaTypeViewSet(viewsets.ModelViewSet):
permission_classes = [permissions.AllowAny, ]
queryset = Type.objects.all()
serializer_class = SizeSerializer
class PolotencaFillerWeightViewSet(viewsets.ModelViewSet):
permission_classes = [permissions.AllowAny, ]
queryset = FillerWeight.objects.all()
serializer_class = SizeSerializer
class GetPolotencaImageViewSet(viewsets.ModelViewSet):
permission_classes = [permissions.AllowAny, ]
queryset = PolotencaImage.objects.all()
serializer_class = PolotencaImageSerializer
filter_fields = ('product',)
|
class Rectangle:
def __init__(self,length, breadth):
self.length = length
self.breadth = breadth
def area(self):
return self.length * self.breadth
@property
def getData(self):
return (self.length, self.breadth)
r1 = Rectangle(10, 20)
print(f"Area = {r1.area()}")
print(f"Length, Breadth = {r1.getData}")
|
import json
import re
import discord
def saveFile(Settings : dict, filename : str):
settings_file = open(filename, "w")
settings_file.write(json.dumps(Settings, ensure_ascii=False))
settings_file.close()
def discord_trim(str):
result = []
trimLen = 0
lastLen = 0
while trimLen <= len(str):
trimLen += 1999
result.append(str[lastLen:trimLen])
lastLen += 1999
return result
def fix_mentions(bot, string):
mentions = re.findall('<[@!]+[0-9]+>', string)
for mention in mentions:
string = string.replace(mention, "@\u200b" + str(
discord.utils.get(bot.get_all_members(), id=re.sub('[<>@!]', '', mention))))
return string.replace('@', '@\u200b') |
from django.conf.urls import url, include
import views
urlpatterns = [
url(r'^index/', views.index),
url(r'^article_page/(?P<article_id>[0-9]+)', views.article_page, name='article_page'),
url(r'^edit_page/(?P<article_id>[0-9]+)', views.edit_page, name='edit_page'),
url(r'^edit_action/', views.edit_action, name='edit_action'),
] |
#!/usr/bin/env python
# coding: utf-8
# 라이브러리 불러오기
from pandas import DataFrame
from datetime import datetime
import os
import cv2
import pyzbar
from pyzbar.pyzbar import decode
from pyzbar.pyzbar import ZBarSymbol
import winsound as ws
# 비프음 함수
def beepsound():
freq = 1000 # range : 37 ~ 32767
dur = 200 # ms
ws.Beep(freq, dur) # winsound.Beep(frequency, duration)
# 오늘 날짜 정보 가져오기
today = datetime.today().strftime('%Y%m%d')
# PC 이름 입력받기
pc_num= input('PC 번호를 입력하세요 : ')
# 파일 이름 생성하기
file_name = today + '_' + 'PC' + '_' + pc_num
# 같은 이름의 파일이 있다면 파일 이름 수정
if os.path.exists('./'+ file_name +'.xlsx') == True:
print('\n오늘 자 파일이 이미 존재합니다')
num = input('몇 번째 파일입니까? : ')
file_name = today + '_' + 'PC' + '_' + pc_num + '_' + '(' + num + ')'
# 성도 정보 데이터 프레임 생성
df = DataFrame(columns = ['교회', '구역', '이름','소속','연락처','체온','문진사항','방문시간'])
count = 1
brethren = []
# QRCode 스캔 및 성도 정보 입력
capture = cv2.VideoCapture(1)
while True:
_, frame = capture.read()
cv2.imshow('QR Code Scanner', frame)
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
decoded_data = decode(gray, symbols=[ZBarSymbol.QRCODE])
try:
data = decoded_data[0][0]
# 스캔 정보 utf-8로 디코딩
data = data.decode('utf-8')
# 비프음 내기
print(beepsound())
print("\nQR코드가 스캔되었습니다.")
print(data)
# 성도 정보 입력
# QRcode 형식: "교회/구역/이름/소속/연락처/"
brethren = data
brethren = list(brethren.split('/'))
# 체온과 문진사항 입력
brethren[5] = input('체온을 입력해주세요: ')
brethren.append(input('문진사항을 입력해주세요(o/x): '))
if brethren[6] == '': # Enter 입력 'x' 처러
brethren[6] = 'x'
# 현재시간을 방문시간으로 저장
brethren.append(datetime.today().strftime("%Y/%m/%d %H:%M"))
# 성도 정보 데이터 프레임에 저장 및 엑셀 저장
df.loc[count] = brethren
df.to_excel(file_name+ '.xlsx')
count = count + 1
# 종료 or 이전 기록 삭제
opt = input("프로그램 종료: q / 이전 기록 삭제: d / 다음 단계로 이동: Enter: ")
if opt == 'q':
break
elif opt == 'd':
df.loc[count-1] = ['','','','','','','','']
df.to_excel(file_name + '.xlsx')
count = count - 1
else:
pass
except:
pass
# 'q'입력 시 스캔 종료
key = cv2.waitKey(1)
if key == ord('q'):
break
|
from datetime import datetime
import re
import csv
import click
def echo(message, quiet):
"""
Print the given message to standard out via click
unless quiet is True.
:param message: the message to print out
:param quiet: don't print the message when this is True
"""
if not quiet:
click.echo(message)
def isperiod(t_str):
"""
Return true if t_str only includes digit and ':',
false otherwise.
:param t_str: the string to be tested
"""
if t_str:
l = t_str.split(':')
if len(l) > 1 and ''.join(l).isdigit():
return True
return False
else:
return False
def t2i(t_str):
"""
Return a integer that represents time inteval in minutes.
:param t_str: the string to be converted
"""
hms = t_str.split(':')
decade = int(hms[0]) if hms[0].isdigit() else 0
unit =int(hms[1]) if hms[1].isdigit() else 0
i_time = decade * 60 + unit
return i_time
def time_interval(start, end, fmt_s):
"""
Return a integer that represents time inteval between 'start' and
'end' in minutes.
:param t_str: the string to be converted
"""
try:
interval = datetime.strptime(end, fmt_s) - datetime.strptime(start, fmt_s)
except ValueError:
return 0
return int(interval.total_seconds()/60)
def str_to_esfield(raw_str):
"""
Return a string that meets the field name requirements in ES.
:param raw_str: the string to be converted
"""
def f(c):
"""
Remove all characters except alphabetic, space, hyphen
and underscore
"""
if c.isalpha() or c in [' ', '-', '_']:
return c
else:
return ''
new_str = raw_str.strip()
new_str = ''.join(map(f, new_str))
# use one '_' to join individual words.
new_str = '_'.join(re.findall('[A-Z][A-Z]+|[a-zA-Z][a-z]*', new_str))
return new_str.lower()
def get_fieldnames(file_obj):
"""
Return a list that includes all field names.
:param file_obj: one file object to be readed.
"""
reader_obj = csv.reader(file_obj)
# delimited file should include the field names as the first row
fields = [str_to_esfield(item) for item in next(reader_obj)]
return fields
def index_op(doc, meta):
"""
Return a document-indexing operation that can be passed to
'bulk' method.
:arg doc: A dict mapping of fields
:arg meta: A dict mapping of underscore-prefixed fields with special
meaning to ES, like ``_id`` and ``_type``
"""
def underscore_keys(d):
"""Return a dict with every key prefixed by an underscore."""
return dict(('_%s' % k, v) for k, v in d.items())
action = underscore_keys(meta)
action['_source'] = doc
return action
def index_body(doc_type, mapping=None, setting=None):
"""
Return body that includes index seetings and mappings.
:parma doc_type: document type
:parma mapping: a dict that are field mapping
:parma seeting: a dict that are index settings
"""
body = {}
if setting is not None:
body['settings'] = setting
if mapping is not None:
d = {}
d[doc_type] = mapping
body['mappings'] = d
return body
|
## Automatically adapted for numpy.oldnumeric Jul 30, 2007 by
import Tkinter
from opengltk.OpenGL import GL
import unittest
import os
import numpy.oldnumeric as Numeric
class OGLTkWidget(Tkinter.Widget, Tkinter.Misc):
def __init__(self, master, cnf={}, expand=1, **kw):
if not kw.has_key('width'): kw['width']=150
if not kw.has_key('height'): kw['height']=150
if not kw.has_key('double'): kw['double']=1
from opengltk.OpenGL import Tk
from os import path
ToglPath = path.dirname(path.abspath(Tk.__file__))
# get TCL interpreter auto_path variable
tclpath = master.tk.globalgetvar('auto_path')
# ToglPath not already in there, add it
from string import split
if ToglPath not in tclpath:
tclpath = (ToglPath,) + tclpath
master.tk.globalsetvar('auto_path', tclpath )
#load Togl extension into TCL interpreter
master.tk.call('package', 'require', 'Togl', '1.7')
# create an Tk-OpenGL widget
Tkinter.Widget.__init__(self, master, 'togl', cnf, kw)
self.bind('<Expose>', self.tkExpose)
self.bind('<Enter>', self.Enter_cb)
self.bind('<Configure>', self.Configure)
self.pack(side='left')
def initProjection(self):
GL.glMatrixMode (GL.GL_PROJECTION)
GL.glLoadIdentity ()
GL.glOrtho(-10., 10., -10., 10., -10., 10.)
GL.glMatrixMode(GL.GL_MODELVIEW)
GL.glLoadIdentity()
GL.glTranslatef(0, 0, 10.0)
def tkExpose(self, *dummy):
self.tk.call(self._w, 'makecurrent')
self.initProjection()
self.tkRedraw()
def Activate(self):
self.tk.call(self._w, 'makecurrent')
def Enter_cb(self, event):
"""Call back function trigger when the mouse enters the camera"""
self.tk.call(self._w, 'makecurrent')
def Configure(self, *dummy):
"""Cause the opengl widget to redraw itself."""
#print 'Configure 0'
width = self.winfo_width()
height = self.winfo_height()
GL.glViewport(0, 0, width, height)
def tkRedraw(self, *dummy):
#if not self.winfo_ismapped(): return
self.update_idletasks()
self.tk.call(self._w, 'makecurrent')
self.initProjection()
GL.glPushMatrix()
self.redraw()
GL.glFlush()
GL.glPopMatrix()
self.tk.call(self._w, 'swapbuffers')
def setupLightModel(self):
# this method has to be called explicitly by the derived classes if
# a default lighting model is wanted
GL.glLight(GL.GL_LIGHT0, GL.GL_AMBIENT, [.5, .5, .5, 1.0])
GL.glLight(GL.GL_LIGHT0, GL.GL_DIFFUSE, [.5, .5, .5, 1.0])
GL.glLight(GL.GL_LIGHT0, GL.GL_SPECULAR, [.5, .5, .5, 1.0])
GL.glLight(GL.GL_LIGHT0, GL.GL_POSITION, [1.0, 1.0, 1.0, 0.0]);
GL.glLight(GL.GL_LIGHT1, GL.GL_AMBIENT, [.5, .5, .5, 1.0])
GL.glLight(GL.GL_LIGHT1, GL.GL_DIFFUSE, [.5, .5, .5, 1.0])
GL.glLight(GL.GL_LIGHT1, GL.GL_SPECULAR, [.5, .5, .5, 1.0])
GL.glLight(GL.GL_LIGHT1, GL.GL_POSITION, [-1.0, 1.0, 1.0, 0.0]);
GL.glLightModel(GL.GL_LIGHT_MODEL_AMBIENT, [0.2, 0.2, 0.2, 1.0])
GL.glEnable(GL.GL_LIGHTING)
GL.glEnable(GL.GL_LIGHT0)
GL.glEnable(GL.GL_LIGHT1)
def redraw(self):
GL.glColor3f( 0., 0., 0. )
GL.glClear(GL.GL_COLOR_BUFFER_BIT | GL.GL_DEPTH_BUFFER_BIT)
class TestTogl(unittest.TestCase):
def test_0030( self):
# creates an OpenGL context inside a togl widget
# can be used to test any opengl function
import Tkinter
root = Tkinter.Tk()
vi = OGLTkWidget(root)
root.after(500, root.quit )
root.mainloop()
root.destroy()
def test_0031(self ):
# example we test glMultMatrixf
import Tkinter
root = Tkinter.Tk()
vi = OGLTkWidget(root)
id = Numeric.array([1.,0.,0.,0.,
0.,1.,0.,0.,
0.,0.,1.,0.,
0.,0.,0.,1.], "d")
from opengltk.extent import _gllib as gllib
#GL.glMultMatrixf(id)
try:
gllib.cvar.checkArgumentsInCWrapper = 0
GL.glMultMatrixf(id)
# calling with bad argument
gllib.cvar.checkArgumentsInCWrapper = 1
#import numpy.oldnumeric as Numeric
id = Numeric.identity(4).astype('d')
try:
GL.glMultMatrixf(id)
raise RuntimeError('failed to catch type error in wrapper')
except TypeError:
print 'Type Error caught succefully in wrapper'
except ImportError:
pass
root.after(1000, root.quit )
root.mainloop()
root.destroy()
def test_0032(self ):
import Tkinter
root = Tkinter.Tk()
vi = OGLTkWidget(root)
GL.glBegin( GL.GL_TRIANGLES)
GL.glColor3f( 1.0, 0.0, 0.0)
GL.glVertex2f( 5.0, 5.0)
GL.glColor3f( 0.0, 1.0, 0.0)
GL.glVertex2f( 25.0, 5.0)
GL.glColor3f( 0.0, 0.0, 1.0)
GL.glVertex2f( 5.0, 25.0)
GL.glEnd()
root.after(500, root.quit )
root.mainloop()
root.destroy()
if __name__ == '__main__':
test_cases = ['TestTogl']
unittest.main(argv=([__name__, '-v'])+test_cases )
|
#counties = ["Arapahoe","Denver","Jefferson"]
#if counties[1] == 'Denver':
#print(counties[1])
#temperature = int(input("What is the temperature outside? "))
#if temperature > 80:
# print("Turn on the AC.")
#else:
# print("Open the windows.")
#What is the score?
#score = int(input("What is your test score? "))
# Determine the grade.
#if score >= 90:
# print('Your grade is an A.')
#elif score >= 80:
#print('Your grade is a B.')
#elif score >= 70:
#print('Your grade is a C.')
#elif score >= 60:
#print('Your grade is a D.')
#else:
#print('Your grade is an F.')
#if "El Paso" in counties:
# print("El Paso is in the list of counties.")
#else:
#print("El Paso is not the list of counties.")
#candidate_votes = int(input("How many votes did the candidate get in the election? "))
#total_votes = int(input("What is the total number of votes in the election? "))
#message_to_candidate = (
# f"You received {candidate_votes} number of votes. "
# f"The total number of votes in the election was {total_votes}. "
# f"You received {candidate_votes / total_votes * 100}% of the total votes.")
#print(message_to_candidate)
#message_to_candidate = (
# f"You received {candidate_votes:,} number of votes. "
# f"The total number of votes in the election was {total_votes:,}. "
# f"You received {candidate_votes / total_votes * 100:.2f}% of the total votes.")
#Below is how I would find the path to a file
#Resources/election_analysis.csv
# WORKING WITH DEPENDENCIES
#Import the datetime class from the datetime module
#import datetime as dt
#Use the now() attribute on the datetime class to get the current time
#now = dt.datetime.now()
#Print the present time.
#print("The time right now is, ", now)
# NOW, LET'S LOOK AT HOW TO USE THE CSV MODULE
#import datetime (this is a module)
#print(dir(datetime))
#import csv
#print (dir(csv))
#IMPORTANT DATA TYPES AND STRUCTURES
#INT, FLOAT, BOOL, LIST, TUPLE, DICT, DATETIME
# IMPORTANT MODULES
#CSV, RANDOM, NUMPY
file_to_load = 'Resources/election_results.csv'
# Open the election results and READ the file.
#election_data = open(file_to_load, 'r')
# To do: perform analysis.
# Close the file.
#election_data.close()
#if I want a more efficient set of code, I can use a "with" statement
# Open the election results and read the file
with open(file_to_load) as election_data:
# To do: perform analysis.
print(election_data)
|
# coding=utf-8
try:
# py3
from urllib.request import Request, urlopen, URLError, HTTPError
#from urllib.parse import urlencode
except ImportError:
# py2
from urllib2 import Request, urlopen, URLError, HTTPError
#from urllib import urlencode
import re
import sys
def dataFromUrl(url, waittime):
import contextlib
try:
req = Request( url )
with contextlib.closing( urlopen(req,timeout=waittime) ) as resp: # HTTP Error 404: Not Found
rslt = resp.read()
'''
except: HTTPError, e:
print 'The server couldn\'t fulfill the request. Error code: , e.code
'''
except URLError, e:
sys.stdout.write( 'except while access url:' + url + '\r\n' )
if hasattr(e,"code"): # HTTP Error 404: Not Found
sys.stdout.write( "The server couldn't fulfill the request. Error code:" + str(e.code) + '\r\n' ) # + " Return content:" + e.read()
if e.code==404:
return 'url not found'
#elif hasattr(e,"reason"): # Errno 10054:连接被远端重置 Errno 110] Connection timed out 10060:timeout
sys.stdout.write( "Failed to reach the server. The reason:" + str(e.reason) + '\r\n' )
return ''
except IOError, e:
sys.stdout.write( 'except while access url:' + url + 'IOError: ' + str(e) + '\r\n' )
return ''
return rslt
'''
import pycurl
import StringIO
crl = pycurl.Curl()
crl.setopt(pycurl.VERBOSE,1)
crl.setopt(pycurl.FOLLOWLOCATION, 0)
crl.setopt(pycurl.MAXREDIRS, 5)
crl.fp = StringIO.StringIO()
crl.setopt(pycurl.URL, url)
crl.setopt(crl.WRITEFUNCTION, crl.fp.write)
crl.perform()
return crl.fp.getvalue()
'''
def regDataFromUrl(url, waittime, formatStr):
res = dataFromUrl(url, waittime)
return re.findall( formatStr, res )
lines=str(resp.read().strip()).split('\n')
r=[]
# 港股查询结果格式不同
for ln in lines:
data=ln.split('"')[1].split(",")
r.append({})
i=0
for k in itemLst:
r[-1][k]=data[i]
i=i+1
return r
#return str(resp.read().decode('utf-8').strip())
|
from py2neo import Graph
graph = Graph()
|
s = '\xe5\x86\x96\xe7\x8e\x8b\xe5\xa4\xa7\xe4\xbb\xa4\xe6\x9e\xad\xe4\xba\xba\xe6\x9b\xb0\xe6\x9a\x82\xe5\x86\x96\xe7\x94\xb0\xe5\x85\xb6\xe5\x8f\x97\xe5\xb9\xb4\xe5\x86\x96\xe5\x8d\x81\xe4\xb8\x80'
sss = s.encode('raw_unicode_escape').decode()
print(sss)
""" import base64
import cv2
import numpy as np
import PIL.Image
import matplotlib.pyplot
import pdb
filepath = "D:/Users/84460/Desktop/Oracle_Split/outpic/01/a/"
filename="m_01_0003_a_6_6.jpg"
image = cv2.imread(filepath + filename)
print(image)
f = open(image, 'rb') # 二进制方式打开图文件
# 参数image:图像base64编码
img = base64.b64encode(f.read())
print(f.read())
#print(image.tobytes())
x=image.tobytes()
im = cv2.imdecode(np.fromstring(x, np.uint8),1)
print(im) """ |
import numpy as np
import cv2
import matplotlib.pyplot as plt
import os
import sys
import math
def skinToneData(img):
hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)
height, width, channel = hsv.shape
freq = np.zeros((180, 256)) # Hue: 0 - 179 | Sat: 0 - 255
for i in range(height):
for j in range(width):
hue = hsv[i][j][0]
sat = hsv[i][j][1]
freq[hue][sat] += 1
freq_norm = cv2.normalize(freq, None, 0, 255, cv2.NORM_MINMAX, cv2.CV_8U)
return freq_norm
def SkinDetect(freq, img):
hsvImg = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)
height, width, channel = hsvImg.shape
for i in range(height):
for j in range(width):
hue = hsvImg[i][j][0]
sat = hsvImg[i][j][1]
if freq[hue][sat] == 0:
hsvImg[i][j][2] = 0
hsvImg = cv2.cvtColor(hsvImg, cv2.COLOR_HSV2BGR)
return hsvImg
def Histo2D(freq):
fig, ax = plt.subplots(figsize = (10, 10))
ax.imshow(
freq, cmap = plt.cm.nipy_spectral,
extent=[
0, 180,
0, 256
]
)
ax.set_title('2D Histogram of Trained Images')
ax.set_xlabel('Hue')
ax.set_ylabel('Saturation')
plt.savefig('result.png')
plt.show()
if __name__ == '__main__':
# Training Images
sampGun = cv2.imread('gun1_test.bmp')
dataGun = skinToneData(sampGun)
sampPoint = cv2.imread('pointer1_test.bmp')
dataPoint = skinToneData(sampPoint)
sampJoy = cv2.imread('joy1_test.bmp')
dataJoy = skinToneData(sampJoy)
sampOne = cv2.imread('skin1.bmp')
dataOne = skinToneData(sampOne)
sampTwo = cv2.imread('skin2.bmp')
dataTwo = skinToneData(sampTwo)
sampThree = cv2.imread('skin3.bmp')
dataThree = skinToneData(sampThree)
sampFour = cv2.imread('skin4.bmp')
dataFour = skinToneData(sampFour)
result = dataGun + dataPoint + dataJoy + dataOne + dataTwo + dataThree + dataFour
Histo2D(result)
# Testing Image
img = cv2.imread('gun1.bmp')
final_gun = SkinDetect(result, img)
img = cv2.imread('pointer1.bmp')
final_pointer = SkinDetect(result, img)
img = cv2.imread('joy1.bmp')
final_joy = SkinDetect(result, img)
# This does nothing to the logic or algorithm.
# Just used to show images in one window.
allImg = np.hstack((final_gun, final_pointer, final_joy))
cv2.imwrite("allSkinImage.bmp", allImg)
cv2.imwrite("gunSkinImage.bmp", final_gun)
cv2.imwrite("pointerSkinImage.bmp", final_pointer)
cv2.imwrite("joySkinImage.bmp", final_joy)
cv2.imshow("Segmentation Image", allImg)
cv2.waitKey(0) |
#쉽게 설명한 퀵정렬
#입력:리스트a
#출력:정렬된 새 리스트
def quick_sort(a):
n=len(a)
if n <= 1:
return a
pivot=a[-1]
g1=[]
g2=[]
for i in range(0,n-1):
if a[i]<pivot:
g1.append(a[i])
else:
g2.append(a[i])
return quick_sort(g1)+[pivot]+quick_sort(g2)
d=[6,8,3,9,10,1,2,4,7,5]
print(quick_sort(d))
#퀵정렬
#입력:리스트a
#출력:없다(a를 바로 정렬)
#리스트 a내에서 start부터 end까지 정렬 대상인지 범위를 지정하여 정렬하는 재귀호출함수
def quick_sort_sub(a,start,end):
if end<=start:
return
pivot=a[end]
i=start
for j in range(start,end):
if a[j]<=pivot:
a[i],a[j]=a[j],a[i]
i+=1
a[i],a[end]=a[end],a[i]
quick_sort_sub(a,start,i-1)
quick_sort_sub(a,i+1,end)
def quick_sort1(a):
quick_sort_sub(a,0,len(a)-1)
d=[6,8,3,9,10,1,2,4,7,5]
quick_sort1(d)
print(d)
#O(nlogn)~O(n^2) |
from TopEFT.Analysis.MCBasedEstimate import MCBasedEstimate
class estimatorList:
def __init__(self, setup, samples=['TTZ', 'WZ', 'TTX', 'TTW', 'ZG', 'rare', 'pseudoData', 'ZZ', 'XG','ZZZ','WZZ','WWZ','TZQ']): #rare_noZZ
for s in samples:
setattr(self, s, MCBasedEstimate(name="%s_%s"%(s, setup.year), sample=setup.samples[s]))
def constructEstimatorList(self, samples):
self.estimatorList = [ getattr(self, s) for s in samples ]
return self.estimatorList
|
import time
import sys
import json
import os
from vectors import Vector
def step(cells, lines, columns):
create = []
die = []
for x in range(columns):
for y in range(lines):
position = (x, y)
neighbors = count_neighbors(position, cells)
is_cell = position in cells
if not is_cell and neighbors == 3:
create.append(position)
elif not 1 < neighbors < 4 and is_cell:
die.append(position)
"""
for position, state in grid.items():
neighbors = count_neighbots(position, grid)
is_cell = state == _ALIVE
if not is_cell and neighbors == 3:
create.append(position)
elif not 1 < neighbors < 4 and is_cell:
die.append(position)
for new_cell in create:
cells[new_cell] == _ALIVE
for dying_cell in die:
cells[dying_cell] == _DEAD
"""
for new_cell in create:
cells.add(new_cell)
for dead_cell in die:
cells.remove(dead_cell)
def display_grid(cells, lines, columns, default_symbol=' '):
os.system('cls')
for y in range(lines):
for x in range(columns):
symbol = default_symbol
if (x, y) in cells:
symbol = 'O'
print(symbol, end='')
print()
def shift_to_middle(template, lines, cols):
mid_of_grid = Vector((cols // 2, lines // 2))
longest_line = max(len(line) for line in template)
mid_of_template = Vector((longest_line // 2, len(template) // 2))
shift_per_point = mid_of_grid - mid_of_template
return shift_per_point
def count_neighbors(position, cells):
neighbors = [
(-1, -1), (-1, 0), (-1, 1),
( 0, -1), ( 0, 1),
( 1, -1), ( 1, 0), ( 1, 1)
]
x, y = position
return sum((x+dx, y+dy) in cells
for dy, dx
in neighbors
# Innerhalb des Feldes
if x+dx >= 0 and y+dy >= 0)
def parse_string(template, row_offset=0, col_offset=0, alive='#'):
cells = set()
for row_idx, row in enumerate(template):
for col_idx, char in enumerate(row):
if char is alive:
cells.add((col_idx + col_offset,
row_idx + row_offset))
return cells
def from_file(file, config_name, lines, cols, middle):
with open(file, 'rt') as f:
configs = json.load(f)
config = configs[config_name]
if middle:
config['col_offset'],\
config['row_offset'] = shift_to_middle(config['template'], lines, cols)
return parse_string(**config)
def sim(cells, lines, columns, steps=None):
if steps is None:
while cells:
sim(cells, lines, columns, 10)
else:
for _ in range(steps):
step(cells, lines, columns)
display_grid(cells, lines, columns)
time.sleep(0.3)
if __name__ == '__main__':
_init = set()
if len(sys.argv) >= 3:
_init = from_file(*sys.argv[1:3])
_lines, _cols = 30, 150
sim(_init, _lines, _cols)
|
'''
叮咚提示音
使用ha的tts服务进行回复
'''
import ha_api
def on_wake( va_config ):
"""唤醒后的处理函数"""
ha_api.play_audio_file(va_config["media_player"], "ding.wav")
def on_command( va_config ):
"""读入语音命定后的处理函数"""
ha_api.play_audio_file(va_config["media_player"], "dong.wav")
def on_react( speech_in, va_config ):
"""获得语音命令文本后的处理函数"""
speech_out = f"你对我说,{speech_in}。但是我还没有想好怎么处理它。"
ha_api.play_tts( speech_out, va_config["tts_service"], va_config["media_player"] )
|
#########################################################################
# coding=utf-8
# File Name: sax.py
# Author: aaronbai
# e-mail: wangyibaimengbo@163.com
# Created Time: Thu 26 Mar 2015 11:08:22 AM CST
#########################################################################
#! /usr/bin/env python
"""
This script can extract weibo-data-body from original weibo file which contains following items:
id 文章编号
article 正文
discuss 评论数目
insertTime 正文插入时间
origin 来源
person_id 所属人物的id
time 正文发布时间
transmit 转发
Using python built-in module "xml" and the type of "sax"
"""
import xml.sax
import os
import re
from optparse import OptionParser
from log import *
def excludeUserName(strOri):
"""
exclude user name
for e.g.,
@小艳子kiki @光影魔术师之择日而栖 @就是爱黑巧克力 尝试新的外景风格,亲们,我有木有拍婚纱照的潜质?
====>
尝试新的外景风格,亲们,我有木有拍婚纱照的潜质?
Input strOri:[str]:[the sentence to be processed]:
"""
#pattern = re.compile(r'\s*[@].+\s+')
# match Simplify-Chinese [\u4e00-\u9fa5]
pattern = re.compile(ur'@[\u4e00-\u9fa5a-zA-Z0-9_-]{2,30}')
discard = re.findall(pattern, strOri)
if 0 != len(discard):
for word in discard:
logger.info(word.encode('utf-8') + 'has been eliminated!')
strTem = re.sub(pattern, '', strOri)
strResult = re.sub(pattern, '', strOri)
return strResult
def excludeUrl(article):
"""
exclue weibo-url
for e.g.,
http://t.cn/akkpTu。我在想。。。
====>
我在想。。。
Input strOri:[str]:[the sentence to be processed]:
"""
pattern = re.compile(r'http://[a-zA-Z\.0-8\/]+')
discard = re.findall(pattern, article)
if 0 != len(discard):
logger.info(str(discard) + 'has been eliminated!')
strResult = re.sub(pattern, '', article)
return strResult
def excludeSpecial(article):
"""
exclude special character
for e.g.,
I&atm:m fighing for us
===>
I m fighting for us
Input strOri:[str]:[the sentence to be processed]:
"""
pattern = re.compile(r'&')
discard = re.findall(pattern, article)
if 0 != len(discard):
logger.info(str(discard) + 'has been eliminated!')
strResult = re.sub(pattern, '', article)
return strResult
class MovieHandler( xml.sax.ContentHandler ):
"""
Customed handler for content
"""
def __init__(self):
self.CurrentData = ""
# 元素开始事件处理
def startElement(self, tag, attributes):
self.CurrentData = tag
# 元素结束事件处理
#def endElement(self, tag):
# if self.CurrentData == "article":
# print self.article
# self.CurrentData = ""
# 内容事件处理
def characters(self, content):
if self.CurrentData == "article":
# 去除@user_name
article = excludeUserName(content)
article = excludeUrl(article)
article = excludeSpecial(article)
# 过滤过短语句
if len(article) > 15:
outFile.write(article)
outFile.write('\n')
if __name__ == "__main__":
## Cmdline parameters
cmd_parser = OptionParser()
cmd_parser.add_option("-f", "--from", type="string", dest="inputFile")
cmd_parser.add_option("-t", "--to", type="string", dest="outputFile")
(options, args) = cmd_parser.parse_args()
logger = FinalLogger.getLogger("../log/purifyWeibo.log")
if not options.inputFile or not options.outputFile:
print "[ Error: Please specify the inputFile and outputFile! ]"
print "[E.g.]: python sax.py -f ../data/weibo_content.xml -t ../data/processed.data"
exit(1)
if False == os.path.exists(options.inputFile):
print "[ Error: source file doesn't exist! ]"
exit(1)
if False == os.path.exists(options.outputFile):
os.mknod(options.outputFile)
outFile = open(options.outputFile, "w")
# 创建一个 XMLReader
parser = xml.sax.make_parser()
# turn off namepsaces
parser.setFeature(xml.sax.handler.feature_namespaces, 0)
# 重写 ContextHandler
Handler = MovieHandler()
parser.setContentHandler(Handler)
print '[ Parsing origin data file, this will take minutes ... ]'
parser.parse(options.inputFile)
outFile.close()
print "Data has been transfered"
|
#!/usr/bin/env python
'''
Verification Bot
verification.py
Christopher Su
http://christophersu.net/
Checks Google Spreadsheet linked to form for new data and applies verification flair accordingly.
'''
import gspread
import praw
from praw.handlers import MultiprocessHandler
import logging
import json
import os
from time import gmtime, strftime
import AccountDetails
def loadJSON():
try:
rulesFile = open(os.path.join(dir, "already_added.json"), "r")
except IOError:
logging.exception("Error opening already_added.json.")
killBot()
rulesStr = rulesFile.read()
rulesFile.close()
try:
already_added = json.loads(rulesStr)
except ValueError:
logging.exception("Error parsing already_added.json.")
killBot()
return already_added
def saveJSON(already_added):
with open(os.path.join(dir, 'already_added.json'), 'w') as outfile:
json.dump(already_added, outfile)
def main():
logging.info("Starting bot: " + strftime("%Y-%m-%d %H:%M:%S", gmtime()))
gc = gspread.login(AccountDetails.GSPREAD_USERNAME, AccountDetails.GSPREAD_PASSWORD)
doc = gc.open_by_key(AccountDetails.GSPREAD_SHEET).sheet1
usernames = doc.col_values(2)
usernames.pop(0) # remove header that just contains the question
handler = MultiprocessHandler()
r = praw.Reddit(user_agent='Subot 1.0', handler=handler)
r.login(AccountDetails.REDDIT_USERNAME_I, AccountDetails.REDDIT_PASSWORD_I)
flairName = 'registered' # choose which flair to apply
subreddit = AccountDetails.SUBREDDIT
sub = r.get_subreddit(subreddit) # and which subreddit to run in
already_added = loadJSON()
for user in usernames:
if user not in already_added:
sub.set_flair(user, '', flairName)
already_added.append(user)
r.send_message(user, "Registered!", "Your registration has been received. Check /r/%s now to view your flair." % subreddit)
logging.info("Registering: %s at %s" %(user, strftime("%Y-%m-%d %H:%M:%S", gmtime())))
saveJSON(already_added)
def killBot():
sys.exit(1)
if __name__ == "__main__":
dir = os.path.dirname(__file__)
LOG_FILENAME = os.path.join(dir, 'bot.log')
logging.basicConfig(filename=LOG_FILENAME,level=logging.INFO)
try:
main()
except SystemExit:
logging.info("Exit called.")
except:
logging.exception("Uncaught exception.")
logging.shutdown() |
from rest_framework import generics, permissions
from rest_framework.response import Response
#from knox.models import AuthToken
from django.contrib.auth import login
from rest_framework.authtoken.serializers import AuthTokenSerializer
from rest_framework.renderers import TemplateHTMLRenderer
#from knox.views import LoginView as KnoxLoginView
from myauth.models import *
from LandingPage.models import *
from facilitators.models import *
from django.shortcuts import render , redirect
import json
from django.contrib import messages
from facilitators.forms import *
import io
from rest_framework.parsers import JSONParser
from rest_framework.views import APIView
from django.views import View
from rest_framework.decorators import parser_classes
from rest_framework.parsers import FileUploadParser
from django.core import serializers
from rest_framework.decorators import permission_classes,api_view
from rest_framework.permissions import IsAuthenticated
from django.http import HttpResponse, JsonResponse
from rest_framework.response import Response
from django.views.decorators.csrf import csrf_exempt
from rest_framework.parsers import JSONParser
from LandingPage.models import *
from facilitators.api.serializers import *
from mailing.views import CourseCreationEmailToAdmin ,CourseCreationEmailToFacilitator
# Facilitator Register API
class CreateCourseApi(APIView):
def get(self, request, *args, **kwargs):
return render(request, 'facilitators/register/mysignup.html', context)
def post(self, request, *args, **kwargs):
cvideo=request.FILES['cvideo'] # take a course 1 minute video
cthumbnail=request.FILES['cthumbnail'] # take course thumbnail
details=json.loads(request.data.pop('data')[0]) # take the other course detail
subcategory_detail=details.pop('subcategory') # take the all the list of subcategories
course_detail=details.pop('course') #take the course detain in dictionary
# make subategory objects
subcat= subcategory_detail[0]
subcategory_obj=SubCategory.objects.get(subCat_id=int(subcat))
# calculate the duration in month + days
print(course_detail)
days=int(course_detail.pop('days'))
month=days//30
day=int(days % 30)
months=str(month)+" month "+str(day)+" days"
#collect all the details in one dictionry "course_detail"
course_detail['days']=days
course_detail['months']=months
course_detail['video']=cvideo
course_detail['thumbnail']=cthumbnail
course_detail['subCat_id'] =subcategory_obj.subCat_id
course_detail['categories']=subcategory_obj.cat_id.cat_id
course_obj = CourseSerializers(data=course_detail)
if course_obj.is_valid(raise_exception=True):
obj=course_obj.save()
obj.code="LPD-"+str(request.user.user.facilitator.Fid)+str(obj.Cid)
obj.save()
offering=offer.objects.create(Cid=obj,Fid=request.user.user.facilitator)
offering.save()
CourseCreationEmailToFacilitator(obj)
CourseCreationEmailToAdmin(obj)
return Response({'success':'recorded Video is created'},status=201)
# print(subcat)
# ofr={}
# offering=None
# course_detail['subCat_id'] =subcat.subCat_id
# c_code=course_detail.get('code')
# try:
# course=Course.objects.get(code=c_code)
# except Course.DoesNotExist:
# course=None
# if details['svideo']=='true':
# if course is None:
# video_detail['course']=ins.Cid
# ofr['Cid']=ins.Cid
# ofr['Fid']=request.user.user.facilitator
# offering=offer.objects.create(Cid=ofr['Cid'],Fid=ofr['Fid'])
# offering.save()
# vs= LiveSessionsSerializer(data=video_detail)
# if vs.is_valid(raise_exception=True):
# vs.save()
# return Response({'success':'live session is created with new course'},status=201)
# else:
# video_detail['course']=course.Cid
# vs= LiveSessionsSerializer(data=video_detail)
# if vs.is_valid(raise_exception=True):
# vs.save()
# return Response({'success':'live session is created'},status=201)
# else:
# if course is None:
# cs = CourseSerializers(data=course_detail)
# if cs.is_valid(raise_exception=True):
# ins=cs.save()
# video_detail['course']=ins.Cid
# ofr['Cid']=ins
# ofr['Fid']=request.user.user.facilitator
# offering=offer.objects.create(Cid=ofr['Cid'],Fid=ofr['Fid'])
# offering.save()
# vs= VideoRecordedSerializer(data=video_detail)
# if vs.is_valid(raise_exception=True):
# vs.save()
# return Response({'success':'recorded video is created with new course'},status=201)
# else:
# video_detail['course']=course.Cid
# vs= VideoRecordedSerializer(data=video_detail)
# if vs.is_valid(raise_exception=True):
# vs.save()
# return Response({'success':'recorded Video is created'},status=201)
# Considering request.user has Fid=2.
@csrf_exempt
@api_view(['GET'])
@permission_classes([IsAuthenticated])
def courses(request):
if request.method=='GET':
print(request.user)
courses=offer.objects.filter(Fid=2)
newlist=[]
for i in range(0,len(courses)):
course_details=Course.objects.get(title=courses[i].Cid)
newlist.append(course_details)
course_data=CourseSerializers(newlist,many=True)
# print(course_data.data)
return Response(course_data)
@csrf_exempt
def support(request):
if request.method=='POST':
serializer=QueriesSerializer(data=request.POST)
if serializer.is_valid():
serializer.save(Fid=Facilitator.objects.get(name='vijay gwala'))
return JsonResponse(serializer.data)
if request.method=='GET':
queries=Queries.objects.filter(Fid=2)
serializer=QueriesSerializer(queries,many=True)
return JsonResponse(serializer.data,safe=False)
|
from turtle import Turtle, Screen
import random
screen = Screen()
screen.setup(width=500,height=400)
user_bet = screen.textinput(title="make your bet", prompt = "which turetle will win the race, enter the colour")
colors = ["red", "orange", "yellow", "green", "blue", "purple"]
all_turtles = []
y_position = 0
for i in range(6):
tommy = Turtle(shape="turtle")
tommy.penup()
tommy.color(colors[i])
tommy.goto(x=-230, y=(-90 + y_position))
y_position += 30
all_turtles.append(tommy)
is_game_on = False
if user_bet:
is_game_on = True
while is_game_on:
for turtle in all_turtles:
move_forward = random.randint(0,10)
turtle.forward(move_forward)
if turtle.xcor() > 230:
is_game_on = False
winner_turtle = turtle.pencolor()
if user_bet == winner_turtle:
print(f"you won, the winner is {winner_turtle}")
else:
print(f"you lost, the winner is {winner_turtle}")
screen.exitonclick() |
'''
CECS 100
Project 5
Name: Newton Bao
I.D.#:018286708
Date: May 2, 2019
'''
import numpy as base
x=base.array([[1,3],[-2,4]])
print(x)
a=base.array([[2,1,0,3],[-1,0,2,4],[4,-2,7,0]])
print(a)
b=base.array([[-4,3,5,1],[2,2,0,-1],[3,2,-4,5]])
r=a+b
print(r)
A=base.array([[1,2,4],[2,6,0]])
print(A)
B=base.array([[4,1,4,3],[0,-1,3,1],[2,7,5,2]])
print(B)
C=base.dot(A,B)
print("A times B with multiplication is", C)
|
from parsing_exp import parse_reg_exp
from building_from_exp import build_automaton
def main():
exp = input()
parts = parse_reg_exp(exp, 0)
build_automaton(parts)
if __name__ == "__main__":
main()
|
# -*- coding: utf-8 -*-
""" Definition of callbacks that can be passed to the fit function. """
import torch
from torch import Tensor
from copy import deepcopy
class callback():
def __call__():
""" Called at each epoch. """
raise NotImplementedError
def end():
""" Called at the end of the training. """
raise NotImplementedError
class store_best_model(callback):
""" Identifies the best model parameters found during training and stores them. """
def __init__(self, model):
self.model = model
self.lowest_train_loss = float("inf")
self.best_model_state = deepcopy(self.model.state_dict())
def __call__(self):
# check if the last computed loss is lower that the best seen one
curr_loss = self.model.history.train_losses[-1]
if curr_loss < self.lowest_train_loss:
self.lowest_train_loss = curr_loss
self.best_model_state = deepcopy(self.model.state_dict())
def end(self):
self.model.save_model(self.best_model_state)
class keep_best_model(callback):
""" Identifies the best model parameters found during training and loads them
in the model at the end of the training. """
def __init__(self, model, use_test_loss=False):
self.model = model
self.use_test_loss = use_test_loss
self.lowest_loss = float("inf")
self.best_model_state = deepcopy(self.model.state_dict())
def __call__(self):
curr_loss = self.model.history.test_losses[-1] if self.use_test_loss else self.model.history.train_losses[-1]
# check if the last computed loss is lower that the best seen one
if curr_loss < self.lowest_loss:
self.lowest_loss = curr_loss
self.best_model_state = deepcopy(self.model.state_dict())
def end(self):
self.model.load_state_dict(self.best_model_state) |
from numpy import ndarray
_steps = {
"1.01": "Convert to Canonical Form for Base Indices of {}",
"1.02": "Basis:\n{}",
"1.03": "Corresponding Coefficient Entries: {}",
"1.04": "Basis Inverse:\n{}",
"1.05": "y Vector:\n{}",
"2.01": "Is {} Feasible?",
"2.02": "{} is Feasible:",
"2.03": "* P is in SEF",
"2.04": "* All Entries of {} is Nonnegative",
"2.05": "* Constraints are Satisfied (Ax = b)",
"2.06": "{} is Not Feasible:",
"2.07": "* Some Entries of {} are Negative",
"2.08": "* Constraints are Not Satisfied (Ax ≠ b)",
"2.09": "* Entry at Index {} is Negative",
"2.10": "* Entry at Index {} is Not a Free Variable",
"2.11": "* {} • {} = {} and {} is Not ≤ {}",
"2.12": "* {} • {} = {} and {} is Not ≥ {}",
"2.13": "* {} • {} = {} and {} is Not ≠ {}",
"2.14": "* Constraints are Satisfied",
"2.15": "* All Entries are Either Nonnegative or is a Free Variable",
"3.01": "Convert to SEF",
"3.02": "Take Negative of Coefficient Vector to Set Objective to Maximization",
"3.03": "{} => {}",
"3.04": "Free Variables at Index x",
"4.01": "Is {} A Basic Solution",
"4.02": "Column {} Is Not a Zero",
"4.03": "Ax ≠ b",
"4.04": "{} is A Basic Solution for Basis {}",
"4.05": "{} is Not A Basic Solution for Basis {}",
"4.06": "Ax = b",
"4.07": "Columns of Basis are Zero",
"5.01": "{}",
"5.02": "Iteration {} =================================",
"5.03": "Solution: {}",
"5.04": "Optimal Basis: {}",
"5.05": "The Program is Unbounded",
"5.06": "Optimality Certificate: {}",
}
_cleanup_rules = [("[ ", "["), (" ]", "]"), ("[[", "["), ("]] ", "]"), (" [", "[")]
# TODO minor format bug
def render_descriptor(key, arguments):
if key not in _steps:
raise KeyError()
for i in range(len(arguments)):
if isinstance(arguments[i], ndarray):
text = " ".join(filter(None, str(arguments[i]).split(" ")))
for rule in _cleanup_rules:
text = text.replace(rule[0], rule[1])
arguments[i] = text
return _steps[key].format(*arguments)
def assert_correctness(steps):
pass
|
# Quiz 3
#
#
# Instrucciones: Dado un intervalo de tiempo en segundos, calcular los segundos restantes que
# corresponden para convertirse exactamente en minutos. Este programa debe funcionar para 5 oportunidades.
def get_segundos(S):
segundos = S % 60
return 60 - segundos
if __name__ == '__main__':
for x in range(5):
segundosT = int(input("Introduzca la cantidad de segundos: "))
pSegundos = get_segundos(segundosT)
print("faltan {0} segundos para completar el siguiente minuto".format(pSegundos))
input() |
#!/usr/bin/env python3.3
# -*- coding: utf8 -*-
#
# Management Interface
#
# Copyright (c) 2015 NorthernSec
# Copyright (c) 2015 Pieter-Jan Moreels
# Imports
import os
import sys
runpath=os.path.dirname(os.path.realpath(__file__))
sys.path.append(os.path.join(runpath, '..'))
import argparse
from lib.DatabaseLayer import addTokens, selectAllFrom
from lib.Toolkit import is_hex
from lib.Objects import HoneyToken
from lib.Configuration import Configuration as conf
if __name__=='__main__':
description='''Management script'''
parser = argparse.ArgumentParser(description=description)
parser.add_argument('-L', action='store_true', help='List')
parser.add_argument('-A', action='store_true', help='Add')
parser.add_argument('-t', metavar='token', help='Token to add or remove')
parser.add_argument('-a', metavar='action', help='Action to take when triggered (accept/block/drop)')
parser.add_argument('-d', metavar='database', help='Database to be modified')
parser.add_argument('-I', action='store_true', help='Case Insensitive')
parser.add_argument('-B', action='store_true', help='Binary Blob (enter in hex)')
parser.add_argument('-N', action='store_true', help='Notify - Alert the user right away')
args = parser.parse_args()
db=args.d if args.d else conf.getDB()
if args.L:
for x in conf.getTables():
print("="*80 + "\n%s\n"%(x) + "="*80)
for y in selectAllFrom(db, x):
sys.stdout.write("| ")
for z in sorted(y.keys()):
sys.stdout.write("%s: %s | "%(z, y[z]))
print("")
elif args.A:
if args.t:
# if args.B (Binary), get the clean hex version
token=args.t if not args.B else is_hex(args.t)
action=args.a.lower() if args.a else conf.getDefaultAction()
alert=True if args.N else False
CI=True if args.I else False
IB=True if args.B else False
# check if everything is allright
if action not in conf.getActions(): sys.exit("Unknown action: %s"%args.a.lower())
if args.B and not token: sys.exit("Invalid hex!")
HoneyToken(token, action, alert, CI, IB)
addTokens(db, HoneyToken(token, action, alert, CI, IB))
|
# coding=utf-8
from django.db import models
from article.models import Article
class Author(models.Model):
author_nameFirst = models.CharField(max_length=30, verbose_name=u'Имя')
author_nameLast = models.CharField(max_length=30, verbose_name=u'Фамилия')
author_wiki = models.URLField(verbose_name=u'Wiki', blank=True)
user = models.ForeignKey('customuser.CustomUser', blank=True, null=True, verbose_name=u'пользователь')
class Meta:
db_table = 'Author'
verbose_name = u'автора'
verbose_name_plural = u'Автор'
def get_quantity_articles(self):
articles = Article.objects.filter(article_categories=self.id)
return len(articles)
get_quantity_articles.short_description = u'количество статей'
def __unicode__(self):
return u'{0} {1}'.format(self.author_nameFirst, self.author_nameLast)
|
# -*- coding: utf-8 -*-
#############
#
# Copyright - Nirlendu Saha
#
# author - nirlendu@gmail.com
#
#############
from __future__ import unicode_literals
import sys
import inspect
from django.db import models
from libs.logger import app_logger as log
class UrlManager(models.Manager):
def store_url(
self,
url,
url_title,
url_desc=None,
url_imagefile=None,
url_weight=0,
):
log.debug('URL create operation')
url = Url.objects.create(
url=url,
url_title=url_title,
url_desc=url_desc,
url_imagefile=url_imagefile,
url_weight=url_weight,
)
return url.pk
class Url(models.Model):
url = models.CharField(
max_length=150,
unique=True,
)
url_title = models.CharField(
max_length=150,
null=False,
)
url_desc = models.CharField(
max_length=200,
default=None,
null=True,
)
url_imagefile = models.CharField(
max_length=100,
default=None,
null=True,
)
url_weight = models.DecimalField (
default=0,
max_digits=15,
decimal_places=10
)
url_updated = models.DateTimeField(
auto_now_add=True,
)
url_created = models.DateTimeField(
auto_now_add=True,
)
objects = UrlManager() |
age = int(input("Enter you age: "))
if age <= 1:
print("Infant")
elif 1 < age <= 10:
print("Child")
elif 10 < age <= 18:
print("Teen")
elif 18 < age <= 45:
print("Adult")
else:
print("Old")
|
list=[1,2,3,4,5]
for x in list1:
print(x)
print(list[2:5])
dict={0:'zero',1:'one',2:'two',3:'three',4:'four','repeat':{0:'zero',1:'one',2:'two',3:'three',4:'four'}}
print(dict['repeat'])
newlist=["11",'22',33,44,{0:'zero',1:'one',2:'two',3:'three',4:'four','repeat':{0:'zero',1:'one',2:'two',3:'three',4:'four'}}]
list1=[0,1,2,3,4,['zero','one',[0.0,1.0,2.0,3.0,4.0],'two','three','four'],'five','six']
list2=['zero','one',2,3,4,[0,1,[1.0,2.0,3.0,4.0],'two','three','four'],5,6]
list3=[0,1,2,3,4,['zero','one',{"one":1.0,"two":2.0,"three":3.0},'two','three','four'],'five','six']
list4=[0,1,2,3,4,('zero','one',[1.0,2.0,3.0,4.0],'two','three','four'),'five','six']
for x in list1:
print(x)
print('list '+str(list1[5][2][2]))
tuple1=[0,1,2,3,4,['zero','one',[1.0,2.0,3.0,4.0],'two','three','four'],'five','six']
tuple2=['zero','one',2,3,4,[0,1,[0.0,1.0,2.0,3.0,4.0],'two','three','four'],5,6]
tuple3=[0,1,2,3,4,['zero','one',{"one":1.0,"two":2.0,"three":3.0},'two','three','four'],'five','six']
tuple4=[0,1,2,3,4,('zero','one',[1.0,2.0,3.0,4.0],'two','three','four'),'five','six']
print('tuple '+str(tuple1[5][2][2]))
dict1={'zero':0,'one':1,'two':2,'three':3,'four':4,'new':['zero','one',[1.0,2.0,3.0,4.0],'two','three','four'],'five','six'}
dict2={'zero','one',2,3,4,[0,1,[0.0,1.0,2.0,3.0,4.0],'two','three','four'],5,6}
dict3={0,1,2,3,4,['zero','one',{"one":1.0,"two":2.0,"three":3.0},'two','three','four'],'five','six'}
dict4={0,1,2,3,4,('zero','one',[1.0,2.0,3.0,4.0],'two','three','four'),'five','six'}
print('dict '+str(dict1[new][2][2]))
#print(int('1'))
|
from common.services.template_functions import all_template_functions_dict
# during docker build common is copied into each subproject
# specify the render_template method
# for flask this will the imported render_template instead of a standalone jinja2 render_template function
from src.template_base import render_template
# jinjaenv used, required to add functions used in templates to globals
from src.template_base import env
env.globals.update(all_template_functions_dict)
|
# -*- encoding: utf-8 -*-
from pyramid.view import view_config
from pyramid.url import route_url
from pyramid.httpexceptions import HTTPFound
from pyramid.security import authenticated_userid
from tempus_ui.views.api import TemplateAPI
@view_config(
route_name='tempusroot',
renderer='tempus_ui:templates/tempus/ui.pt',
request_method='GET')
def tempus_gui(context, request):
"""
Vista principal del tempus
"""
# # Necessitem tota la informació
key = authenticated_userid(request)
api = TemplateAPI(context, request, 'Tempus')
# User info
user = request.user
if not user:
raise HTTPFound(location=route_url('logout', request))
return dict(
user=user,
api=api,
key=key
)
|
from __future__ import unicode_literals
from django.db import models
# from django.contrib.auth.models import AbstructBaseUser
# from django.contrib.auth.models import BaseUserManager
from django.contrib.auth.models import User
# Create your models here.
class Account(models.Model):
MARITAL_STATUS = (
('U', 'Unmarried'),
('M', 'Married')
)
GENDER = (
('M', 'Male'),
('F', 'Female')
)
# name = models.CharField(max_length=40)
# nick_name = models.CharField(max_length=40)
# email = models.EmailField(unique=True)
user = models.OneToOneField(User, on_delete=models.CASCADE, related_name='account_ref_user')
position = models.CharField(max_length=40, default='New Member')
phone = models.CharField(max_length=15)
marital_status = models.CharField(max_length=1, choices=MARITAL_STATUS)
gender = models.CharField(max_length=1, choices=GENDER)
date_of_birth = models.DateTimeField(null=True)
hobby = models.CharField(max_length=40)
address = models.TextField()
bio = models.TextField()
image = models.CharField(max_length=250, null=True)
def __str__(self):
return self.user.username
def get_password(self):
return self.user.password
class SocialInfo(models.Model):
account = models.ForeignKey(Account, on_delete=models.CASCADE, related_name='account_social')
facebook = models.CharField(max_length=50)
twitter = models.CharField(max_length=50)
skype = models.CharField(max_length=50)
bitbucket = models.CharField(max_length=50)
class Token(models.Model):
user = models.ForeignKey(User, related_name='token_user')
token = models.CharField(max_length=250)
expire_date = models.DateTimeField()
|
from django.urls import path,include
from .views import show,bigshow
urlpatterns = [
path('detail/<int:productpk>',show,name='product'),
path('bigdetail/<int:productpk>',bigshow,name='bigshow'),
] |
a=[]
b=[]
for i in range(3):
c,d=input().split()
a.append(c)
b.append(d)
if a.count(a[0])==3 or b.count(b[0])==3:
print("yes")
else:
print("no")
|
import traceback
import json
from struct import *
import random
import math
from emailapp.decorate import login_required
from emailapp.sql_helpers import templates, authenticate, user, lists, \
campaign_helper, email_result, ready_to_send_email_helper, \
campaign_stats, campaign_winning_combination, emails_unsubscribe, \
campaign_setup, email_subject, ab_campaigns_helper
from emailapp import app, session, request, redirect, render_template, \
url_for, bcrypt, jsonify
from conf.config import EMAIL_OPEN_URL, APP_ROOT, APP_STATIC
from datetime import datetime, timedelta
campaign_stats_helper = campaign_stats.CampaignStatsHelper()
template_helper = templates.Templates()
user_authenticate = authenticate.Authenticate()
campaign_helper_obj = campaign_helper.CampaignHelper()
email_result_helper = email_result.EmailResultHelper()
ready_to_send_email_helper_obj = ready_to_send_email_helper.\
ReadyToSendEmailsHelper()
email_user = user.User()
list_helper = lists.Lists()
campaign_winning_helper = campaign_winning_combination.CampaignWinningHelper()
emails_unsubscribe_obj = emails_unsubscribe.EmailUnsubscribeHelper()
campaign_setup_obj = campaign_setup.CampaignSetupHelper()
email_subject_obj = email_subject.EmailSubjectHelper()
ab_campaigns_helper_obj = ab_campaigns_helper.AbCampaignsHelper()
def create_list():
list_name = 'all user'
list_id = list_helper.create_list(list_name, request.user)
user = email_user.get_email_user()
for emails in user:
list_helper.add_list_by_id(emails['email'], list_id)
@login_required
def segments():
list_data = list_helper.get_list(request.user)
lst_data = []
list_id = 0
open_count = []
clicks_count = []
open_percentage = 0
click_percentage = 0
for item in list_data:
count = list_helper.get_listsegment_count(item['id'])
all_open_data = list_helper.get_listsegment_open_by_list_id(item['id'])
for index, val in enumerate(all_open_data):
open_count.append(val)
open_count_len = len(open_count)
open_percentage = int(math.trunc(open_count_len*100)/count)
all_clicks_data = list_helper.get_listsegment_clicks_by_list_id(item['id'])
for index, val in enumerate(all_clicks_data):
clicks_count.append(val)
clicks_count_len = len(clicks_count)
clicks_percentage = int(math.trunc(clicks_count_len*100)/count)
lst = {"id": item['id'], "name": item['list_name'], "count": count,
"created_on": item['created_on'], "open_len": open_percentage,
"click_len": clicks_percentage}
lst_data.append(lst)
context = {'list_data': lst_data}
return render_template('segments/segments.html', ** context)
@login_required
def edit_segments(list_id):
list_data = list_helper.get_list_name_by_listid(list_id)
list_name = list_data['list_name']
if request.method == 'POST':
segment_name = request.form['segment_name']
list_helper.update_segment_name_by_id(segment_name, list_id)
return redirect('/segments/')
context = {'list_name': list_name}
return render_template('segments/edit_segment.html', **context)
@login_required
def create_segments():
user_id = request.user
list_data = list_helper.get_list(user_id)
if not list_data:
create_list()
lst_data = []
for item in list_data:
count = list_helper.get_listsegment_count(item['id'])
lst = {"id": item['id'], "name": item['list_name'], "count": count}
lst_data.append(lst)
if request.method == 'POST':
segment_name = request.form['segment_name']
list_id = request.form['list_id']
return redirect('/segments/')
context = {'list_data': lst_data}
return render_template('segments/create_segments.html', **context)
|
# encoding: utf-8
import time
import sys
import copy
class HumanPlayer(object):
def __init__(self, name, color, board, rulebook):
self.name = name
self.color = color
self.board = board
self.rulebook = rulebook
self.highlightBoard = 0
def copyBoard(self):
return copy.deepcopy(self.board)
def getHighlight(self, board):
valid_moves = self.rulebook.getValidMoves(self.color, self.board)
if not valid_moves:
self.rulebook.pass_turn += 1
return board, 1
else:
board_copy = self.copyBoard()
for move in valid_moves:
i, j = move[0][0], move[0][1]
board_copy.placePieceInPosition(board_copy.highlight, i, j)
return board_copy, 0
def highlightValidMoves(self, valid_moves, board):
for move in valid_moves:
i, j = move[0][0], move[0][1]
board.placePieceInPosition(self.board.highlight, i, j)
self.highlightBoard = board
def chooseMove(self, valid_moves):
print '\nEscolha uma casa: [linha, coluna]'
choice = raw_input("> ").strip().split()
choice = [int(x) for x in choice]
invalid_move = False
for move in valid_moves:
if (choice == move[0]):
invalid_move = False
if((len(choice) == 2) and not invalid_move):
return choice
else:
print "Movimento inválido"
return self.chooseMove(valid_moves)
def play(self):
if(self.board.isBoardFull() or self.board.noMoreMoves() or self.rulebook.pass_turn == 2):
self.rulebook.end_game = True
else:
valid_moves = self.rulebook.getValidMoves(self.color, self.board)
if not valid_moves:
print "No moves available, player must pass"
self.rulebook.pass_turn += 1
else:
board_copy = self.copyBoard()
self.highlightValidMoves(valid_moves, board_copy)
board_copy.printBoard()
human_move = self.chooseMove(valid_moves)
flip_directions = []
for moves in valid_moves:
if (moves[0] == human_move):
flip_directions.append(moves[1])
self.board.placePieceInPosition(self.color, human_move[0], human_move[1])
self.rulebook.pass_turn = 0
self.board.flipPieces(self.color, human_move, flip_directions)
return human_move
def playWindow(self, human_move):
if(self.board.isBoardFull() or self.board.noMoreMoves() or self.rulebook.pass_turn == 2):
self.rulebook.end_game = True
else:
valid_moves = self.rulebook.getValidMoves(self.color, self.board)
if not valid_moves:
print "No moves available, player must pass"
else:
flip_directions = []
for moves in valid_moves:
if (moves[0] == human_move):
flip_directions.append(moves[1])
self.board.placePieceInPosition(self.color, human_move[0], human_move[1])
self.rulebook.pass_turn = 0
self.board.flipPieces(self.color, human_move, flip_directions)
return human_move
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.