content stringlengths 5 1.05M |
|---|
from django.urls import path
from tee import views
app_name = "tee"
urlpatterns = [
path("create/<str:shape>/<str:color>/<str:text_color>/<str:font>/<path:sentence>/<str:filename>.png", views.GeneratorView.as_view(), name="generator"),
] |
from collections.abc import MutableMapping
import os
from os import path
from macresources import make_rez_code, parse_rez_code, make_file, parse_file
from warnings import warn
TEXT_TYPES = [b'TEXT', b'ttro'] # Teach Text read-only
def _unsyncability(name): # files named '_' reserved for directory Finder info
if path.splitext(name)[1].lower() in ('.rdump', '.idump'): return True
if name.startswith('.'): return True
if name == '_': return True
if len(name) > 31: return True
try:
name.encode('mac_roman')
except UnicodeEncodeError:
return True
return False
def _fuss_if_unsyncable(name):
if _unsyncability(name):
raise ValueError('Unsyncable name: %r' % name)
def _try_delete(name):
try:
os.remove(name)
except FileNotFoundError:
pass
def _symlink_rel(src, dst):
rel_path_src = path.relpath(src, path.dirname(dst))
os.symlink(rel_path_src, dst)
def _get_datafork_paths(base):
"""Symlinks are NOT GOOD"""
base = path.abspath(path.realpath(base))
for dirpath, dirnames, filenames in os.walk(base):
dirnames[:] = [x for x in dirnames if not _unsyncability(x)]
filenames[:] = [x for x in filenames if not _unsyncability(x)]
for kindcode, the_list in ((0, filenames), (1, dirnames)):
for fname in the_list:
nativepath = path.join(dirpath, fname)
hfspath = tuple(_swapsep(c) for c in path.relpath(nativepath, base).split(path.sep))
hfslink = kindcode # if not a link then default to this
if path.islink(nativepath):
nativelink = path.realpath(nativepath)
if len(path.commonpath((nativelink, base))) < len(base): continue
hfslink = tuple(_swapsep(c) for c in path.relpath(nativelink, base).split(path.sep))
if hfslink == (path.relpath('x', 'x'),): hfslink = () # nasty special case
yield nativepath, hfspath, hfslink
def _swapsep(n):
return n.replace(':', path.sep)
class AbstractFolder(MutableMapping):
def __init__(self, from_dict=()):
self._prefdict = {} # lowercase to preferred
self._maindict = {} # lowercase to contents
self.flags = 0 # help me!
self.update(from_dict)
def __setitem__(self, key, value):
if isinstance(key, tuple):
if len(key) == 1:
self[key[0]] = value
return
elif len(key) == 0:
raise KeyError
else:
self[key[0]][key[1:]] = value
return
try:
key = key.decode('mac_roman')
except AttributeError:
pass
key.encode('mac_roman')
lower = key.lower()
self._prefdict[lower] = key
self._maindict[lower] = value
def __getitem__(self, key):
if isinstance(key, tuple):
if len(key) == 1:
return self[key[0]]
elif len(key) == 0:
return self
else:
return self[key[0]][key[1:]]
try:
key = key.decode('mac_roman')
except AttributeError:
pass
lower = key.lower()
return self._maindict[lower]
def __delitem__(self, key):
if isinstance(key, tuple):
if len(key) == 1:
del self[key[0]]
return
elif len(key) == 0:
raise KeyError
else:
del self[key[0]][key[1:]]
return
try:
key = key.decode('mac_roman')
except AttributeError:
pass
lower = key.lower()
del self._maindict[lower]
del self._prefdict[lower]
def __iter__(self):
return iter(self._prefdict.values())
def __len__(self):
return len(self._maindict)
def __repr__(self):
the_dict = {self._prefdict[k]: v for (k, v) in self._maindict.items()}
return repr(the_dict)
def __str__(self):
lines = []
for k, v in self.items():
v = str(v)
if '\n' in v:
lines.append(k + ':')
for l in v.split('\n'):
lines.append(' ' + l)
else:
lines.append(k + ': ' + v)
return '\n'.join(lines)
def iter_paths(self):
for name, child in self.items():
yield ((name,), child)
try:
childs_children = child.iter_paths()
except AttributeError:
pass
else:
for each_path, each_child in childs_children:
yield (name,) + each_path, each_child
def walk(self, topdown=True):
result = self._recursive_walk(my_path=(), topdown=topdown)
if not topdown:
result = list(result)
result.reverse()
return result
def _recursive_walk(self, my_path, topdown): # like os.walk, except dirpath is a tuple
dirnames = [n for (n, obj) in self.items() if isinstance(obj, AbstractFolder)]
filenames = [n for (n, obj) in self.items() if not isinstance(obj, AbstractFolder)]
yield (my_path, dirnames, filenames)
if not topdown: dirnames.reverse() # hack to account for reverse() in walk()
for dn in dirnames: # the caller can change dirnames in a loop
yield from self[dn]._recursive_walk(my_path=my_path+(dn,), topdown=topdown)
def read_folder(self, folder_path, date=0, mpw_dates=False):
self.crdate = self.mddate = self.bkdate = date
deferred_aliases = []
for nativepath, hfspath, hfslink in _get_datafork_paths(folder_path):
if hfslink == 0: # file
thefile = File(); self[hfspath] = thefile
thefile.crdate = thefile.mddate = thefile.bkdate = date
if mpw_dates: thefile.real_t = 0
try:
with open(nativepath + '.idump', 'rb') as f:
if mpw_dates: thefile.real_t = max(thefile.real_t, path.getmtime(f.name))
thefile.type = f.read(4)
thefile.creator = f.read(4)
except FileNotFoundError:
pass
try:
with open(nativepath + '.rdump', 'rb') as f:
if mpw_dates: thefile.real_t = max(thefile.real_t, path.getmtime(f.name))
thefile.rsrc = make_file(parse_rez_code(f.read()), align=4)
except FileNotFoundError:
pass
with open(nativepath, 'rb') as f:
if mpw_dates: thefile.real_t = max(thefile.real_t, path.getmtime(f.name))
thefile.data = f.read()
if thefile.type in TEXT_TYPES:
thefile.data = thefile.data.replace(b'\r\n', b'\r').replace(b'\n', b'\r')
try:
thefile.data = thefile.data.decode('utf8').encode('mac_roman')
except UnicodeEncodeError:
pass # not happy, but whatever...
elif hfslink == 1: # folder
thedir = Folder(); self[hfspath] = thedir
thedir.crdate = thedir.mddate = thedir.bkdate = date
else: # symlink, i.e. alias
deferred_aliases.append((hfspath, hfslink)) # alias, targetpath
for aliaspath, targetpath in deferred_aliases:
try:
alias = File()
alias.flags |= 0x8000
alias.aliastarget = self[targetpath]
self[aliaspath] = alias
except (KeyError, ValueError):
raise
if mpw_dates:
all_real_times = set()
for pathtpl, obj in self.iter_paths():
try:
all_real_times.add(obj.real_t)
except AttributeError:
pass
ts2idx = {ts: idx for (idx, ts) in enumerate(sorted(set(all_real_times)))}
for pathtpl, obj in self.iter_paths():
try:
real_t = obj.real_t
except AttributeError:
pass
else:
fake_t = obj.crdate + 60 * ts2idx[real_t]
obj.crdate = obj.mddate = obj.bkdate = fake_t
def write_folder(self, folder_path):
def any_exists(at_path):
if path.exists(at_path): return True
if path.exists(at_path + '.rdump'): return True
if path.exists(at_path + '.idump'): return True
return False
written = []
blacklist = list()
alias_fixups = list()
valid_alias_targets = dict()
for p, obj in self.iter_paths():
blacklist_test = ':'.join(p) + ':'
if blacklist_test.startswith(tuple(blacklist)): continue
if _unsyncability(p[-1]):
warn('Ignoring unsyncable name: %r' % (':' + ':'.join(p)))
blacklist.append(blacklist_test)
continue
nativepath = path.join(folder_path, *(comp.replace(path.sep, ':') for comp in p))
info_path = nativepath + '.idump'
rsrc_path = nativepath + '.rdump'
valid_alias_targets[id(obj)] = nativepath
if isinstance(obj, Folder):
os.makedirs(nativepath, exist_ok=True)
elif obj.mddate != obj.bkdate or not any_exists(nativepath):
if obj.aliastarget is not None:
alias_fixups.append((nativepath, id(obj.aliastarget)))
# always write the data fork
data = obj.data
if obj.type in TEXT_TYPES:
data = data.decode('mac_roman').replace('\r', os.linesep).encode('utf8')
with open(nativepath, 'wb') as f:
f.write(data)
# write a resource dump iff that fork has any bytes (dump may still be empty)
if obj.rsrc:
with open(rsrc_path, 'wb') as f:
rdump = make_rez_code(parse_file(obj.rsrc), ascii_clean=True)
f.write(rdump)
else:
_try_delete(rsrc_path)
# write an info dump iff either field is non-null
idump = obj.type + obj.creator
if any(idump):
with open(info_path, 'wb') as f:
f.write(idump)
else:
_try_delete(info_path)
if written:
t = path.getmtime(written[-1])
for w in written:
os.utime(w, (t, t))
for alias_path, target_id in alias_fixups:
try:
target_path = valid_alias_targets[target_id]
except KeyError:
pass
else:
_try_delete(alias_path)
_try_delete(alias_path + '.idump')
_try_delete(alias_path + '.rdump')
_symlink_rel(target_path, alias_path)
for ext in ('.idump', '.rdump'):
if path.exists(target_path + ext):
_symlink_rel(target_path + ext, alias_path + ext)
class Folder(AbstractFolder):
def __init__(self):
super().__init__()
self.x = 0 # where to put this spatially?
self.y = 0
self.usrInfo = None
self.fndrInfo = None
self.crdate = self.mddate = self.bkdate = 0
class File:
def __init__(self):
self.type = b'????'
self.creator = b'????'
self.flags = 0 # help me!
self.x = 0 # where to put this spatially?
self.y = 0
self.locked = False
self.crdate = self.mddate = self.bkdate = 0
self.aliastarget = None
self.rsrc = bytearray()
self.data = bytearray()
self.fndrInfo = None
def __str__(self):
if isinstance(self.aliastarget, File):
return '[alias] ' + str(self.aliastarget)
elif self.aliastarget is not None:
return '[alias to folder]'
typestr, creatorstr = (x.decode('mac_roman') for x in (self.type, self.creator))
dstr, rstr = (repr(bytes(x)) if 1 <= len(x) <= 32 else '%db' % len(x) for x in (self.data, self.rsrc))
return '[%s/%s] data=%s rsrc=%s' % (typestr, creatorstr, dstr, rstr)
|
import pandas as pd
import numpy as np
import altair as alt
def plot_correlation(corr_data, title, strongest_only=True):
if strongest_only:
strongest = corr_data.sort_values("Correlation", ascending=False)
strongest = strongest[strongest["Variable 1"] != strongest["Variable 2"]]
corr_data = corr_data[
(corr_data["Variable 1"].isin(strongest["Variable 1"].unique()[:20]))
& (corr_data["Variable 2"].isin(strongest["Variable 1"].unique()[:20]))
]
corr_data.Correlation = corr_data.Correlation.round(2)
base = (
alt.Chart(corr_data)
.encode(
x="Variable 2:O", y="Variable 1:O", tooltip=["Variable 1", "Variable 2"]
)
.properties(height=600, width=600, title=title)
)
text = base.mark_text().encode(
text="Correlation",
color=alt.condition(
alt.datum.Correlation > 0.5, alt.value("white"), alt.value("black")
),
)
cor_plot = base.mark_rect().encode(color="Correlation:Q")
cor_plot = cor_plot + text
return cor_plot
def numeric_batch_profile(data, title):
plots = []
for domain in data.run_name.unique():
tmp = data[data.run_name == domain]
chart = alt.Chart(tmp)
chart = chart.mark_bar().encode(
x=alt.X("column_name:N"),
y=alt.Y("value:Q"),
tooltip=["value", "column_name"],
)
chart = chart.properties(title=domain, width=20 * tmp.shape[0])
plots.append(chart)
final_chart = (
alt.hconcat(*plots).resolve_scale(y="independent").properties(title=title)
)
return final_chart
def histogram_from_custom_bins(data, num_facets):
histd = pd.DataFrame({"counts": data[0]})
histd["bin_min"] = data[1][:-1]
histd["bin_max"] = data[1][1:]
chart = (
alt.Chart(histd)
.mark_bar()
.encode(
x=alt.X("bin_min", bin="binned", axis=alt.Axis(title="Value")),
x2="bin_max",
y="counts",
tooltip=["counts", "bin_min", "bin_max"],
)
)
chart = chart.properties(height=200, width=400, title="Full Batch")
if num_facets:
hists = []
for key in list(num_facets.keys()):
sub_hist = pd.DataFrame({"counts": num_facets[key][0]})
sub_hist["bin_min"] = num_facets[key][1][:-1]
sub_hist["bin_max"] = num_facets[key][1][1:]
sub_hist["category"] = key
hists.append(sub_hist)
facet_hist = pd.concat(hists)
facet_chart = (
alt.Chart(facet_hist)
.mark_bar(opacity=0.5)
.encode(
x=alt.X("bin_min", bin="binned", axis=alt.Axis(title="Value")),
x2="bin_max",
y="counts",
color="category:N",
tooltip=["counts", "bin_min", "bin_max"],
)
)
facet_chart = facet_chart.properties(height=200, width=400, title=f"Faceted")
chart = alt.vconcat(chart, facet_chart)
return chart
def barchart_top_categories_from_value_counts(data, frequencies, cat_facets=None):
data = pd.DataFrame({"Variable": list(data.keys()), "Count": list(data.values())})
freqs = pd.DataFrame(
{"Variable": list(frequencies.keys()), "Freq": list(frequencies.values())}
)
freqs.Freq = freqs.Freq.round(3)
data = data.merge(freqs)
chart = (
alt.Chart(data)
.mark_bar()
.encode(
y=alt.Y("Variable:N", sort="-x"),
x=alt.X("Count:Q"),
tooltip=["Variable", "Count"],
)
.properties(width=400)
)
text = chart.mark_text(align="left", baseline="middle", dx=3).encode(text="Freq:Q")
chart = chart + text
if cat_facets:
facets = (
pd.DataFrame(cat_facets)
.T.reset_index()
.rename(columns={"index": "Variable"})
)
facets = facets.melt(
id_vars="Variable",
value_vars=[col for col in facets.columns if col != "Variable"],
).rename(columns={"variable": "facet"})
faceted_chart = (
alt.Chart(facets)
.mark_bar()
.encode(
x="sum(value)",
y=alt.Y("Variable", sort="x"),
tooltip=["facet", "Variable", "sum(value)"],
color="facet",
)
)
faceted_chart = faceted_chart.properties(title="Faceted by variable", width=400)
chart = alt.vconcat(chart, faceted_chart)
return chart
def barchart_from_dict(data, x_limits: list = None):
data = pd.DataFrame({"Variable": list(data.keys()), "Count": list(data.values())})
x_scale = alt.Scale(domain=x_limits) if x_limits is not None else None
chart = (
alt.Chart(data)
.mark_bar()
.encode(
y=alt.Y("Variable:N", sort="-x"),
x=alt.X("Count:Q", scale=x_scale),
tooltip=["Variable", "Count"],
)
.properties(width=400)
)
return chart
def barchart_from_dict_on_dates(data):
data = pd.DataFrame({"Variable": list(data.keys()), "Count": list(data.values())})
mid_index = data[data.Count == data.Count.max()].index.values[0]
min_index = max(0, mid_index - 20)
max_index = min(mid_index + 20, data.shape[0])
data = data.iloc[min_index:max_index]
chart = (
alt.Chart(data)
.mark_bar()
.encode(
y=alt.Y("Count:Q"),
x=alt.X("Variable:N"),
tooltip=["Variable", "Count"],
)
).properties(height=200, width=400)
return chart
|
# ===============================================================================
# Copyright 2021 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
import argparse
import bench
from cuml.svm import SVR
parser = argparse.ArgumentParser(description='cuML SVR benchmark')
parser.add_argument('-C', dest='C', type=float, default=1.0,
help='SVR regularization parameter')
parser.add_argument('--epsilon', dest='epsilon', type=float, default=.1,
help='Epsilon in the epsilon-SVR model')
parser.add_argument('--kernel', choices=('linear', 'rbf', 'poly', 'sigmoid'),
default='linear', help='SVR kernel function')
parser.add_argument('--degree', type=int, default=3,
help='Degree of the polynomial kernel function')
parser.add_argument('--gamma', type=float, default=None,
help='Parameter for kernel="rbf"')
parser.add_argument('--max-cache-size', type=int, default=8,
help='Maximum cache size, in gigabytes, for SVR.')
parser.add_argument('--tol', type=float, default=1e-3,
help='Tolerance passed to sklearn.svm.SVR')
params = bench.parse_args(parser)
X_train, X_test, y_train, y_test = bench.load_data(params)
if params.gamma is None:
params.gamma = 1.0 / X_train.shape[1]
cache_size_bytes = bench.get_optimal_cache_size(X_train.shape[0],
max_cache=params.max_cache_size)
params.cache_size_mb = cache_size_bytes / 1024**2
params.n_classes = y_train[y_train.columns[0]].nunique()
regr = SVR(C=params.C, epsilon=params.epsilon, kernel=params.kernel,
cache_size=params.cache_size_mb, tol=params.tol, gamma=params.gamma,
degree=params.degree)
fit_time, _ = bench.measure_function_time(regr.fit, X_train, y_train, params=params)
predict_train_time, y_pred = bench.measure_function_time(
regr.predict, X_train, params=params)
train_rmse = bench.rmse_score(y_train, y_pred)
predict_test_time, y_pred = bench.measure_function_time(
regr.predict, X_test, params=params)
test_rmse = bench.rmse_score(y_test, y_pred)
bench.print_output(library='cuml', algorithm='svr',
stages=['training', 'prediction'], params=params,
functions=['SVR.fit', 'SVR.predict'],
times=[fit_time, predict_train_time], accuracy_type='rmse',
accuracies=[train_rmse, test_rmse], data=[X_train, X_train],
alg_instance=regr)
|
from sophysics_engine import EnvironmentComponent, Camera, EnvironmentUpdateEvent, PygameEvent
from time import process_time
from typing import Optional
import pygame
class CameraController(EnvironmentComponent):
"""
Allows the user to control the camera using the mousewheel
"""
def __init__(self, camera: Camera, rect: pygame.Rect, hold_time: float = 0,
zoom_strength: float = 0.05, min_camera_scale: float = 1 / 1000):
"""
:param camera: the camera component that is controlled
:param rect: Any input that's outside of the rect will be ignored
:param hold_time: time in seconds the user has to hold the middle mouse button for it to be recognized as a hold
"""
self.__camera = camera
self.__hold_threshold = hold_time
self.__rect = rect
self.__hold_start_time: Optional[float] = None
self.__prev_hold_position: Optional[pygame.Vector2] = None
self.__zoom_strength = zoom_strength
self.__min_camera_scale = min_camera_scale
super().__init__()
def setup(self):
self.environment.event_system.add_listener(EnvironmentUpdateEvent, self.__handle_update_event)
self.environment.event_system.add_listener(PygameEvent, self.__handle_pygame_event)
def __handle_pygame_event(self, event: PygameEvent):
pygame_event = event.pygame_event
if pygame_event.type == pygame.MOUSEWHEEL:
scroll = pygame_event.y
self.__handle_zoom(scroll)
# when the user starts holding the mousewheel
elif pygame_event.type == pygame.MOUSEBUTTONDOWN and pygame_event.button == 2:
self.__handle_button_down_event()
event.consume()
# when the user stops holding the mousewheel
elif pygame_event.type == pygame.MOUSEBUTTONUP and pygame_event.button == 2:
self.__handle_button_up_event()
event.consume()
def __handle_zoom(self, scroll: int):
if not self.__mouse_inside_the_rect():
return
# to make zooming feel better, we'll zoom into the cursor instead of the center of the screen.
# in order to achieve that, we will take a point in the worldspace that corresponds to the mouse position
# see where it is after the zooming and subtract the difference from the camera position
# mouse position on the screen
mouse_pos = pygame.Vector2(pygame.mouse.get_pos())
# the world point that corresponds to the current mouse position
world_point = pygame.Vector2(self.__camera.screen_to_world(mouse_pos))
# applying the zoom
self.__camera.units_per_pixel -= self.__camera.units_per_pixel * scroll * self.__zoom_strength
self.__camera.units_per_pixel = max(self.__camera.units_per_pixel, self.__min_camera_scale)
# finding where the point ended up
new_point_position = pygame.Vector2(self.__camera.world_to_screen(world_point))
# subtracting the difference
displacement = mouse_pos - new_point_position
self.__camera.position -= displacement
def __handle_button_down_event(self):
# ignore the click if it's outside the rect
if not self.__mouse_inside_the_rect():
return
self.__hold_start_time = process_time()
def __handle_button_up_event(self):
# even if this happened outside of the rect we are still gonna stop holding, so no checking for the rect
self.__hold_start_time = None
self.__prev_hold_position = None
def __mouse_inside_the_rect(self) -> bool:
"""
checks if the mouse cursor is inside the self.__rect
"""
mouse_x, mouse_y = pygame.mouse.get_pos()
return self.__rect.collidepoint(mouse_x, mouse_y)
def __handle_update_event(self, _: EnvironmentUpdateEvent):
self.__update()
def __update(self):
if self.__hold_start_time is not None:
current_holding_time = process_time() - self.__hold_start_time
if current_holding_time >= self.__hold_threshold:
mouse_pos = pygame.Vector2(pygame.mouse.get_pos())
if self.__prev_hold_position is None:
delta_pos = pygame.Vector2(0, 0)
else:
delta_pos = mouse_pos - self.__prev_hold_position
self.__camera.position -= delta_pos
self.__prev_hold_position = mouse_pos
def _on_destroy(self):
self.environment.event_system.remove_listener(EnvironmentUpdateEvent, self.__handle_update_event)
|
# -*- coding: utf-8 -*-
import datetime
import furl
import httplib as http
import markupsafe
from flask import request
from modularodm import Q
from modularodm.exceptions import NoResultsFound
from modularodm.exceptions import ValidationValueError
from framework import forms, status
from framework import auth as framework_auth
from framework.auth import exceptions
from framework.auth import cas, campaigns
from framework.auth import logout as osf_logout
from framework.auth import get_user
from framework.auth.exceptions import DuplicateEmailError, ExpiredTokenError, InvalidTokenError
from framework.auth.core import generate_verification_key
from framework.auth.decorators import collect_auth, must_be_logged_in
from framework.auth.forms import ResendConfirmationForm, ForgotPasswordForm, ResetPasswordForm
from framework.exceptions import HTTPError
from framework.flask import redirect # VOL-aware redirect
from framework.sessions.utils import remove_sessions_for_user
from website import settings, mails, language
from website.util.time import throttle_period_expired
from website.models import User
from website.util import web_url_for
from website.util.sanitize import strip_html
@collect_auth
def reset_password_get(auth, verification_key=None, **kwargs):
"""
View for user to land on the reset password page.
HTTp Method: GET
:raises: HTTPError(http.BAD_REQUEST) if verification_key is invalid
"""
# If user is already logged in, log user out
if auth.logged_in:
return auth_logout(redirect_url=request.url)
# Check if request bears a valid verification_key
user_obj = get_user(verification_key=verification_key)
if not user_obj:
error_data = {
'message_short': 'Invalid url.',
'message_long': 'The verification key in the URL is invalid or has expired.'
}
raise HTTPError(400, data=error_data)
return {
'verification_key': verification_key,
}
@collect_auth
def reset_password(auth, **kwargs):
""" Show reset password page.
"""
if auth.logged_in:
return auth_logout(redirect_url=request.url)
verification_key = kwargs['verification_key']
# Check if request bears a valid verification_key
user_obj = get_user(verification_key=verification_key)
if not user_obj:
error_data = {
'message_short': 'Invalid url.',
'message_long': 'The verification key in the URL is invalid or has expired.'
}
raise HTTPError(400, data=error_data)
return {
'verification_key': verification_key
}
@collect_auth
def forgot_password_get(auth, **kwargs):
"""
View to user to land on forgot password page.
HTTP Method: GET
"""
# If user is already logged in, redirect to dashboard page.
if auth.logged_in:
return redirect(web_url_for('dashboard'))
return {}
@collect_auth
def reset_password_post(auth, verification_key=None, **kwargs):
"""
View for user to submit reset password form.
HTTP Method: POST
:raises: HTTPError(http.BAD_REQUEST) if verification_key is invalid
"""
# If user is already logged in, log user out
if auth.logged_in:
return auth_logout(redirect_url=request.url)
form = ResetPasswordForm(request.form)
# Check if request bears a valid verification_key
user_obj = get_user(verification_key=verification_key)
if not user_obj:
error_data = {
'message_short': 'Invalid url.',
'message_long': 'The verification key in the URL is invalid or has expired.'
}
raise HTTPError(400, data=error_data)
if form.validate():
# new random verification key, allows CAS to authenticate the user w/o password, one-time only.
# this overwrite also invalidates the verification key generated by forgot_password_post
user_obj.verification_key = generate_verification_key()
try:
user_obj.set_password(form.password.data)
user_obj.save()
except exceptions.ChangePasswordError as error:
for message in error.messages:
status.push_status_message(message, kind='warning', trust=False)
else:
status.push_status_message('Password reset', kind='success', trust=False)
# redirect to CAS and authenticate the user with the one-time verification key.
return redirect(cas.get_login_url(
web_url_for('user_account', _absolute=True),
username=user_obj.username,
verification_key=user_obj.verification_key
))
else:
forms.push_errors_to_status(form.errors)
# Don't go anywhere
return {
'verification_key': verification_key
}, 400
@collect_auth
def forgot_password_post(auth, **kwargs):
"""
View for user to submit forgot password form.
HTTP Method: POST
"""
# If user is already logged in, redirect to dashboard page.
if auth.logged_in:
return redirect(web_url_for('dashboard'))
form = ForgotPasswordForm(request.form, prefix='forgot_password')
if form.validate():
email = form.email.data
status_message = ('If there is an OSF account associated with {0}, an email with instructions on how to '
'reset the OSF password has been sent to {0}. If you do not receive an email and believe '
'you should have, please contact OSF Support. ').format(email)
# check if the user exists
user_obj = get_user(email=email)
if user_obj:
# check forgot_password rate limit
if throttle_period_expired(user_obj.email_last_sent, settings.SEND_EMAIL_THROTTLE):
# new random verification key, allows OSF to check whether the reset_password request is valid,
# this verification key is used twice, one for GET reset_password and one for POST reset_password
# and it will be destroyed when POST reset_password succeeds
user_obj.verification_key = generate_verification_key()
user_obj.email_last_sent = datetime.datetime.utcnow()
user_obj.save()
reset_link = furl.urljoin(
settings.DOMAIN,
web_url_for(
'reset_password_get',
verification_key=user_obj.verification_key
)
)
mails.send_mail(
to_addr=email,
mail=mails.FORGOT_PASSWORD,
reset_link=reset_link
)
status.push_status_message(status_message, kind='success', trust=False)
else:
status.push_status_message('You have recently requested to change your password. Please wait a '
'few minutes before trying again.', kind='error', trust=False)
else:
status.push_status_message(status_message, kind='success', trust=False)
else:
forms.push_errors_to_status(form.errors)
# Don't go anywhere
return {}
@collect_auth
def auth_login(auth, **kwargs):
"""
This view serves as the entry point for OSF login and campaign login.
HTTP Method: GET
GET '/login/' without any query parameter:
redirect to CAS login page with dashboard as target service
GET '/login/?logout=true
log user out and redirect to CAS login page with redirect_url or next_url as target service
GET '/login/?campaign=instituion:
if user is logged in, redirect to 'dashboard'
show institution login
GET '/login/?campaign=prereg:
if user is logged in, redirect to prereg home page
else show sign up page and notify user to sign in, set next to prereg home page
GET '/login/?next=next_url:
if user is logged in, redirect to next_url
else redirect to CAS login page with next_url as target service
"""
campaign = request.args.get('campaign')
next_url = request.args.get('next')
log_out = request.args.get('logout')
must_login_warning = True
if not campaign and not next_url and not log_out:
if auth.logged_in:
return redirect(web_url_for('dashboard'))
return redirect(cas.get_login_url(web_url_for('dashboard', _absolute=True)))
if campaign:
next_url = campaigns.campaign_url_for(campaign)
if not next_url:
next_url = request.args.get('redirect_url')
must_login_warning = False
if next_url:
# Only allow redirects which are relative root or full domain, disallows external redirects.
if not (next_url[0] == '/'
or next_url.startswith(settings.DOMAIN)
or next_url.startswith(settings.CAS_SERVER_URL)
or next_url.startswith(settings.MFR_SERVER_URL)):
raise HTTPError(http.InvalidURL)
if auth.logged_in:
if not log_out:
if next_url:
return redirect(next_url)
return redirect('dashboard')
# redirect user to CAS for logout, return here w/o authentication
return auth_logout(redirect_url=request.url)
status_message = request.args.get('status', '')
if status_message == 'expired':
status.push_status_message('The private link you used is expired.', trust=False)
status.push_status_message('The private link you used is expired. Please <a href="/settings/account/">'
'resend email.</a>', trust=False)
if next_url and must_login_warning:
status.push_status_message(language.MUST_LOGIN, trust=False)
# set login_url to form action, upon successful authentication specifically w/o logout=True,
# allows for next to be followed or a redirect to the dashboard.
redirect_url = web_url_for('auth_login', next=next_url, _absolute=True)
data = {}
if campaign and campaign in campaigns.CAMPAIGNS:
if (campaign == 'institution' and settings.ENABLE_INSTITUTIONS) or campaign != 'institution':
data['campaign'] = campaign
data['login_url'] = cas.get_login_url(redirect_url)
data['institution_redirect'] = cas.get_institution_target(redirect_url)
data['redirect_url'] = next_url
data['sign_up'] = request.args.get('sign_up', False)
data['existing_user'] = request.args.get('existing_user', None)
return data, http.OK
def auth_logout(redirect_url=None, **kwargs):
"""
Log out, delete current session and remove OSF cookie.
Redirect to CAS logout which clears sessions and cookies for CAS and Shibboleth (if any).
Final landing page may vary.
HTTP Method: GET
:param redirect_url: url to redirect user after CAS logout, default is 'goodbye'
:return:
"""
# OSF tells CAS where it wants to be redirected back after successful logout. However, CAS logout flow
# may not respect this url if user is authenticated through remote IdP such as institution login
redirect_url = redirect_url or request.args.get('redirect_url') or web_url_for('goodbye', _absolute=True)
# OSF log out, remove current OSF session
osf_logout()
# set redirection to CAS log out (or log in if 'reauth' is present)
if 'reauth' in request.args:
cas_endpoint = cas.get_login_url(redirect_url)
else:
cas_endpoint = cas.get_logout_url(redirect_url)
resp = redirect(cas_endpoint)
# delete OSF cookie
resp.delete_cookie(settings.COOKIE_NAME, domain=settings.OSF_COOKIE_DOMAIN)
return resp
def auth_email_logout(token, user):
"""
When a user is adding an email or merging an account, add the email to the user and log them out.
"""
redirect_url = cas.get_logout_url(service_url=cas.get_login_url(service_url=web_url_for('index', _absolute=True)))
try:
unconfirmed_email = user.get_unconfirmed_email_for_token(token)
except InvalidTokenError:
raise HTTPError(http.BAD_REQUEST, data={
'message_short': 'Bad token',
'message_long': 'The provided token is invalid.'
})
except ExpiredTokenError:
status.push_status_message('The private link you used is expired.')
raise HTTPError(http.BAD_REQUEST, data={
'message_short': 'Expired link',
'message_long': 'The private link you used is expired.'
})
try:
user_merge = User.find_one(Q('emails', 'eq', unconfirmed_email))
except NoResultsFound:
user_merge = False
if user_merge:
remove_sessions_for_user(user_merge)
user.email_verifications[token]['confirmed'] = True
user.save()
remove_sessions_for_user(user)
resp = redirect(redirect_url)
resp.delete_cookie(settings.COOKIE_NAME, domain=settings.OSF_COOKIE_DOMAIN)
return resp
@collect_auth
def confirm_email_get(token, auth=None, **kwargs):
"""
View for email confirmation links. Authenticates and redirects to user settings page if confirmation is successful,
otherwise shows an "Expired Link" error.
HTTP Method: GET
"""
user = User.load(kwargs['uid'])
is_merge = 'confirm_merge' in request.args
is_initial_confirmation = not user.date_confirmed
log_out = request.args.get('logout', None)
if user is None:
raise HTTPError(http.NOT_FOUND)
# if the user is merging or adding an email (they already are an osf user)
if log_out:
return auth_email_logout(token, user)
if auth and auth.user and (auth.user._id == user._id or auth.user._id == user.merged_by._id):
if not is_merge:
# determine if the user registered through a campaign
campaign = campaigns.campaign_for_user(user)
if campaign:
return redirect(campaigns.campaign_url_for(campaign))
# go to home page with push notification
if len(auth.user.emails) == 1 and len(auth.user.email_verifications) == 0:
status.push_status_message(language.WELCOME_MESSAGE, kind='default', jumbotron=True, trust=True)
if token in auth.user.email_verifications:
status.push_status_message(language.CONFIRM_ALTERNATE_EMAIL_ERROR, kind='danger', trust=True)
return redirect(web_url_for('index'))
status.push_status_message(language.MERGE_COMPLETE, kind='success', trust=False)
return redirect(web_url_for('user_account'))
try:
user.confirm_email(token, merge=is_merge)
except exceptions.EmailConfirmTokenError as e:
raise HTTPError(http.BAD_REQUEST, data={
'message_short': e.message_short,
'message_long': e.message_long
})
if is_initial_confirmation:
user.date_last_login = datetime.datetime.utcnow()
user.save()
# send out our welcome message
mails.send_mail(
to_addr=user.username,
mail=mails.WELCOME,
mimetype='html',
user=user
)
# new random verification key, allows CAS to authenticate the user w/o password one-time only.
user.verification_key = generate_verification_key()
user.save()
# redirect to CAS and authenticate the user with a verification key.
return redirect(cas.get_login_url(
request.url,
username=user.username,
verification_key=user.verification_key
))
@must_be_logged_in
def unconfirmed_email_remove(auth=None):
"""
Called at login if user cancels their merge or email add.
HTTP Method: DELETE
"""
user = auth.user
json_body = request.get_json()
try:
given_token = json_body['token']
except KeyError:
raise HTTPError(http.BAD_REQUEST, data={
'message_short': 'Missing token',
'message_long': 'Must provide a token'
})
user.clean_email_verifications(given_token=given_token)
user.save()
return {
'status': 'success',
'removed_email': json_body['address']
}, 200
@must_be_logged_in
def unconfirmed_email_add(auth=None):
"""
Called at login if user confirms their merge or email add.
HTTP Method: PUT
"""
user = auth.user
json_body = request.get_json()
try:
token = json_body['token']
except KeyError:
raise HTTPError(http.BAD_REQUEST, data={
'message_short': 'Missing token',
'message_long': 'Must provide a token'
})
try:
user.confirm_email(token, merge=True)
except exceptions.InvalidTokenError:
raise InvalidTokenError(http.BAD_REQUEST, data={
'message_short': 'Invalid user token',
'message_long': 'The user token is invalid'
})
except exceptions.EmailConfirmTokenError as e:
raise HTTPError(http.BAD_REQUEST, data={
'message_short': e.message_short,
'message_long': e.message_long
})
user.save()
return {
'status': 'success',
'removed_email': json_body['address']
}, 200
def send_confirm_email(user, email):
"""
Sends a confirmation email to `user` to a given email.
:raises: KeyError if user does not have a confirmation token for the given email.
"""
confirmation_url = user.get_confirmation_url(
email,
external=True,
force=True,
)
try:
merge_target = User.find_one(Q('emails', 'eq', email))
except NoResultsFound:
merge_target = None
campaign = campaigns.campaign_for_user(user)
# Choose the appropriate email template to use and add existing_user flag if a merge or adding an email.
if merge_target: # merge account
mail_template = mails.CONFIRM_MERGE
confirmation_url = '{}?logout=1'.format(confirmation_url)
elif user.is_active: # add email
mail_template = mails.CONFIRM_EMAIL
confirmation_url = '{}?logout=1'.format(confirmation_url)
elif campaign: # campaign
mail_template = campaigns.email_template_for_campaign(campaign)
else: # account creation
mail_template = mails.INITIAL_CONFIRM_EMAIL
mails.send_mail(
email,
mail_template,
'plain',
user=user,
confirmation_url=confirmation_url,
email=email,
merge_target=merge_target,
)
@collect_auth
def auth_register(auth, **kwargs):
"""
View for sign-up page.
HTTP Method: GET
"""
# If user is already logged in, redirect to dashboard page.
if auth.logged_in:
return redirect(web_url_for('dashboard'))
return {}, http.OK
def register_user(**kwargs):
"""
Register new user account.
HTTP Method: POST
:param-json str email1:
:param-json str email2:
:param-json str password:
:param-json str fullName:
:param-json str campaign:
:raises: HTTPError(http.BAD_REQUEST) if validation fails or user already exists
"""
# Verify email address match
json_data = request.get_json()
if str(json_data['email1']).lower() != str(json_data['email2']).lower():
raise HTTPError(
http.BAD_REQUEST,
data=dict(message_long='Email addresses must match.')
)
try:
full_name = request.json['fullName']
full_name = strip_html(full_name)
campaign = json_data.get('campaign')
if campaign and campaign not in campaigns.CAMPAIGNS:
campaign = None
user = framework_auth.register_unconfirmed(
request.json['email1'],
request.json['password'],
full_name,
campaign=campaign,
)
framework_auth.signals.user_registered.send(user)
except (ValidationValueError, DuplicateEmailError):
raise HTTPError(
http.BAD_REQUEST,
data=dict(
message_long=language.ALREADY_REGISTERED.format(
email=markupsafe.escape(request.json['email1'])
)
)
)
if settings.CONFIRM_REGISTRATIONS_BY_EMAIL:
send_confirm_email(user, email=user.username)
message = language.REGISTRATION_SUCCESS.format(email=user.username)
return {'message': message}
else:
return {'message': 'You may now log in.'}
@collect_auth
def resend_confirmation_get(auth):
"""
View for user to land on resend confirmation page.
HTTP Method: GET
"""
# If user is already logged in, log user out
if auth.logged_in:
return auth_logout(redirect_url=request.url)
form = ResendConfirmationForm(request.form)
return {
'form': form,
}
@collect_auth
def resend_confirmation_post(auth):
"""
View for user to submit resend confirmation form.
HTTP Method: POST
"""
# If user is already logged in, log user out
if auth.logged_in:
return auth_logout(redirect_url=request.url)
form = ResendConfirmationForm(request.form)
if form.validate():
clean_email = form.email.data
user = get_user(email=clean_email)
status_message = ('If there is an OSF account associated with this unconfirmed email {0}, '
'a confirmation email has been resent to it. If you do not receive an email and believe '
'you should have, please contact OSF Support.').format(clean_email)
kind = 'success'
if user:
if throttle_period_expired(user.email_last_sent, settings.SEND_EMAIL_THROTTLE):
try:
send_confirm_email(user, clean_email)
except KeyError:
# already confirmed, redirect to dashboard
status_message = 'This email {0} has already been confirmed.'.format(clean_email)
kind = 'warning'
user.email_last_sent = datetime.datetime.utcnow()
user.save()
else:
status_message = ('You have recently requested to resend your confirmation email. '
'Please wait a few minutes before trying again.')
kind = 'error'
status.push_status_message(status_message, kind=kind, trust=False)
else:
forms.push_errors_to_status(form.errors)
# Don't go anywhere
return {'form': form}
|
import pytest
from django.test import RequestFactory
from cdsso.users.api.views import UserViewSet
from cdsso.users.models import User
pytestmark = pytest.mark.django_db
DATE_FORMAT = "%Y-%m-%dT%H:%M:%S.%fZ"
class TestUserViewSet:
def test_get_queryset(self, user: User, rf: RequestFactory):
view = UserViewSet()
request = rf.get("/fake-url/")
request.user = user
view.request = request
assert user in view.get_queryset()
def test_me(self, user: User, rf: RequestFactory):
view = UserViewSet()
request = rf.get("/fake-url/")
request.user = user
view.request = request
response = view.me(request)
last_login = (
None if not user.last_login else user.last_login.strftime(DATE_FORMAT)
)
date_joined = (
None if not user.date_joined else user.date_joined.strftime(DATE_FORMAT)
)
assert response.data == {
"id": user.id,
"username": user.username,
"email": user.email,
"name": user.name,
# "url": f"http://testserver/api/users/{user.username}/",
"anonymous": user.anonymous,
"date_joined": date_joined,
"first_name": user.first_name,
"last_name": user.last_name,
"is_active": user.is_active,
"is_alumni": user.is_alumni,
"receive_notifications": user.receive_notifications,
"last_login": last_login,
"image_24": user.image_24,
"image_512": user.image_512,
"tz_offset": user.tz_offset,
"slack_id": user.slack_id
}
|
import os
from os import path as osp
import hydra
import pickle
from tqdm import tqdm
import torch
from data.dataset import MegaDepthDataset
from data.augmentations import get_img_augmentations
@hydra.main(config_path="configs", config_name="gen_val_dataset")
def main(cfg):
data_params = cfg.data_params
set_seed(cfg.seed)
if not osp.exists(cfg.output_params.output_path):
os.makedirs(cfg.output_params.output_path)
_, val_img_augs = get_img_augmentations()
# create dataset
dataset = MegaDepthDataset(img_path=osp.join(data_params.img_dir, 'test'),
kpts_path=data_params.kpts_descs_dir,
global_desc_path=data_params.global_descs_dir,
crop_size=data_params.crop_size,
transforms=val_img_augs)
# create dataloader
dataloader = torch.utils.data.DataLoader(dataset,
batch_size=1,
shuffle=False,
pin_memory=True,
num_workers=cfg.n_workers)
for i, mini_batch in tqdm(enumerate(dataloader), total=len(dataloader)):
for key in list(mini_batch.keys()):
if key == 'img_fname':
continue
mini_batch[key] = mini_batch[key].squeeze(0)
with open(osp.join(cfg.output_params.output_path, f'meta_data_{i}.pkl'), 'wb') as f:
pickle.dump(mini_batch, f)
if __name__ == '__main__':
main()
|
# ###########################################################################
import os
import numpy as np
from sts.data.loader import load_california_electricity_demand
from sts.models.prophet import default_prophet_model
# Load the training data (through 2018)
df = load_california_electricity_demand(train_only=True)
# ## Prophet (Default)
# FB Prophet model, all default parameters.
model = default_prophet_model(df)
future = model.make_future_dataframe(periods=8760, freq='H')
forecast = model.predict(future)
# ## Write
# Write the forecast values to csv
DIR = 'data/forecasts/'
if not os.path.exists(DIR):
os.makedirs(DIR)
forecast[['ds', 'yhat']].to_csv(DIR + 'prophet_simple.csv', index=False)
|
#@+leo-ver=5-thin
#@+node:tom.20210613135525.1: * @file ../plugins/freewin.py
r"""
#@+<< docstring >>
#@+node:tom.20210603022210.1: ** << docstring >>
Freewin - a plugin with a basic editor pane that tracks an
outline node.
Provides a free-floating window tied to one node in an outline.
The window functions as a plain text editor, and can also be
switched to render the node with Restructured Text.
:By: T\. B\. Passin
:Version: 1.71
:Date: 13 Oct 2021
#@+others
#@+node:tom.20210604174603.1: *3* Opening a Window
Opening a Window
~~~~~~~~~~~~~~~~~
To open a Freewin window, select a node in your outline and issue
the minibuffer command ``z-open-freewin``.
The window that opens will display an editor pane that contains the
text of the node. The text can be edited in the window. If the
text is edited in the outline instead, the changes will show in the
Freewin pane.
Editing changes made in the Freewin window will be echoed in the
underlying outline node even if a different node has been selected.
They will be visible in the outline when the original node is
selected again.
A given Freewin window will be synchronized with the node
that was selected when the Freewin window was opened, and
will only display that node. It will remain synchronized even if the node has been moved to a new position in its outline.
.. Note:: A Freewin window will close if the underlying node is removed. This will not change the body of the underlying node.
#@+node:tom.20210625220923.1: *3* Navigating
Navigating
~~~~~~~~~~~
#@@nocolor
A Freewin window only ever displays the content of the node it ws opened on. However, the selected node in the outline in the host can be changed, which will cause the host to navigate to the new selection. This navigation can be done when a line in the visible text contains a `gnx` - a node identifier. If the cursor is placed on a line with a gnx, or if that line is selected, and then <CONTROL-F9> is pressed, the host outline will navigate to the node having that gnx.
A gnx looks like this::
tom.20210610132217.1
A line with a gnx might look like this::
:event: tom.20210623002747.1 `John DeBoer Opens General Store`_
This capability is always available in the editor pane. It can be available in the rendering pane (see below) if the setting::
@string fw-render-pane = nav-view
is set in the @settings tree. The setting can be in the @settings tree of an outline or in myLeoSettings.leo.
#@+node:tom.20210604181030.1: *3* Rendering with Restructured Text
Rendering with Restructured Text
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Pressing the ``Rendered <--> Plain`` button will switch between
text and RsT rendering. In RsT mode, text cannot be edited but
changes to the node in the outline will be rendered as they are
made.
If RsT text in the focal node links to another node in the same
subtree, the Freewin window will not navigate to the
target. This is because the window only represents a single,
unchangeable node. However, no RsT error will be shown, and the
link will be underlined even though it will not be active.
Two types of rendering views are available, and can be chosen by a setting in the @settings tree.
1. A well-rendered view with all the features of Restructured Text rendered in an appealing way (depending on the stylesheet used). This view can be zoomed in or out using the standard browser keys: CTRL-+ and CTRL-- (Currently this feature does not work with Qt6). A light or dark themed stylesheet is selected based on the dark or light character of your Leo theme. You can supply your own stylesheet to use instead of the built-in ones.
2. A less fully-rendered view that has the ability to cause the host outline to navigate to a node with a selected gnx - see the section on `Navigating` above. Because of limitations of the Qt widget used for this view, the size cannot be zoomed and some visual features of the rendered RsT can be less refined. The stylesheets for this view cannot be changed. Automatic switching between light and dark themes is still done.
View 1 is the default view, except when using PyQt6, which does not currently support its features. To use View 2 instead, add the following setting to the setting tree of an outline or to myLeoSettings.leo:
@string fw-render-pane = nav-view
#@+node:tom.20210626134532.1: *3* Hotkeys
Hotkeys
~~~~~~~
Freewin uses two hotkeys:
<CNTL-F7> -- copy the gnx of this Freewin window to the clipboard.
<CNTL-F9> -- Select host node that has gnx under the selection point.
<CNTL-F9> is available in the editor view, and in the rendered view
with limitations discussed above discussed above.
#@+node:tom.20210712005103.1: *3* Commands
Commands
~~~~~~~~~
Freewin has one minibuffer command: ``z-open-freewin``. This opens a Freewin window linked to the currently selected node.
#@+node:tom.20210712005441.1: *3* Settings
Settings
~~~~~~~~~
Freewin has two settings:
1. ``@string fw-render-pane = nav-view``
If present with this value, the rendered view will allow the <CNTL>-F7/F9 keys to work as they do in the Editor view. The rendered view will not be able to display all the features that a full rendered view can.
2. ``@bool fw-copy-html = False``
Change to `True` to copy the rendered RsT to the clipboard.
#@+node:tom.20210614171220.1: *3* Stylesheets and Dark-themed Appearance
Stylesheets and Dark-themed Appearance
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The appearance of the editing and rendering view is determined
by stylesheets. Simple default stylesheets are built into the
plugin for the editing view.
For styling the Restructured Text rendering view (When the default "View 1" is in use) and for customized editing view stylesheets, the plugin looks in the user's `.leo/css directory`.
The plugin attempts to determine whether the Leo theme in use
is a dark theme or not. If it is, a dark-themed stylesheet
will be used if it is found. The "dark" determination is based
on the ``@color_theme_is_dark`` setting in the Leo theme file.
#@+node:tom.20210604181134.1: *4* Styling the Editor View
Styling the Editor View
~~~~~~~~~~~~~~~~~~~~~~~~
The editor panel styles will be set by a
css stylesheet file in the same directory as the
the RsT stylesheet above: the user's `.leo/css`
directory. There can be two stylesheets, one for light
and one for dark themes.
Light Stylesheet
-----------------
The light-themed stylesheet must be named `freewin_editor_light.css`.
The default Freewin values are::
QTextEdit {
color: #202020;
background: #fdfdfd;
font-family: Cousine, Consolas, Droid Sans Mono, DejaVu Sans Mono;
font-size: 11pt;
}
Dark Stylesheet
-----------------
The dark-themed stylesheet must be named `freewin_editor_dark.css`.
The default Freewin values are::
QTextEdit {
color: #cbdedc;
background: #202020;
font-family: Cousine, Consolas, Droid Sans Mono, DejaVu Sans Mono;
font-size: 11pt;
}
No Stylesheet
--------------
If the correctly-named stylesheet is not present in the
user's ``.leo/css`` directory then the plugin will use the default values given above.
#@+node:tom.20210604181109.1: *4* Styling the RsT View
Styling the RsT View
~~~~~~~~~~~~~~~~~~~~~
The following on applies when the default rendereing view,
called "View 1" above, is being used.
The RsT view can be styled by extending or replacing
the default css stylesheet provided by docutils.
Custom stylesheets must be in the user's `.leo/css` directory.
For information on creating a customized css stylesheet, see
`docutils stylesheets <https://docutils.sourceforge.io/docs/howto/html-stylesheets.html>`_
As a starting point, the light and dark RsT stylesheets used
by the Viewrendered3 plugin could be used. They can be found
in the Leo install directory in the ``leo\plugins\viewrendered3``
directory. There are also a number of docutil stylesheets to be
found with an Internet search.
The VR3 stylesheets must be renamed for the Freewin plugin to
be able to use them.
Light Stylesheet
-----------------
The light-themed stylesheet must be named ``freewin_rst_light.css``.
Dark Stylesheet
-----------------
The dark-themed stylesheet must be named ``freewin_rst_dark.css``.
No Stylesheet
--------------
If no stylesheet exists for the Restructured Text view, the
default Docutils stylesheet will be used for either light or dark
Leo themes.
#@-others
#@-<< docstring >>
"""
# This file hangs pylint.
#@+<< imports >>
#@+node:tom.20210527153415.1: ** << imports >>
from os.path import exists, join as osp_join
import re
try:
# pylint: disable=import-error
# this can fix an issue with Qt Web views in Ubuntu
from OpenGL import GL
assert GL # To keep pyflakes happy.
except Exception:
# but no need to stop if it doesn't work
pass
from leo.core import leoGlobals as g
qt_imports_ok = False
try:
from leo.core.leoQt import QtCore, QtWidgets, QtGui
from leo.core.leoQt import KeyboardModifier
qt_imports_ok = True
except ImportError as e:
g.trace(e)
if not qt_imports_ok:
print('Freewin plugin: Qt imports failed')
raise ImportError('Qt Imports failed')
#@+<<import QWebView>>
#@+node:tom.20210603000519.1: *3* <<import QWebView>>
QWebView = None
try:
from leo.core.leoQt import QtWebKitWidgets
QWebView = QtWebKitWidgets.QWebView
except ImportError:
if not g.unitTesting:
print("Freewin: Can't import QtWebKitWidgets")
except AttributeError:
if not g.unitTesting:
print("Freewin: limited RsT rendering in effect")
#@-<<import QWebView>>
#@+<<import docutils>>
#@+node:tom.20210529002833.1: *3* <<import docutils>>
got_docutils = False
try:
from docutils.core import publish_string
from docutils.utils import SystemMessage
got_docutils = True
except ModuleNotFoundError as e:
print('Freewin:', e)
except ImportError as e:
print('Freewin:', e)
except SyntaxError as e:
print('Freewin:', e)
except Exception as e:
print('Freewin:', e)
if not got_docutils:
print('Freewin: no docutils - rendered view is not available')
#@-<<import docutils>>
#
# Fail fast, right after all imports.
g.assertUi('qt') # May raise g.UiTypeException, caught by the plugins manager.
# Aliases.
QApplication = QtWidgets.QApplication
QFont = QtGui.QFont
QFontInfo = QtGui.QFontInfo
QFontMetrics = QtGui.QFontMetrics
QPushButton = QtWidgets.QPushButton
QRect = QtCore.QRect
QStackedWidget = QtWidgets.QStackedWidget
QTextEdit = QtWidgets.QTextEdit
QVBoxLayout = QtWidgets.QVBoxLayout
QWidget = QtWidgets.QWidget
#@-<< imports >>
#@+<< declarations >>
#@+node:tom.20210527153422.1: ** << declarations >>
# pylint: disable=invalid-name
# Dimensions and placing of editor windows
W = 570
H = 350
X = 1200
Y = 100
DELTA_Y = 35
clipboard = QApplication.clipboard()
FG_COLOR_LIGHT = '#6B5B53'
BG_COLOR_LIGHT = '#ededed'
BG_COLOR_DARK = '#202020'
FG_COLOR_DARK = '#cbdedc'
FONT_FAMILY = 'Cousine, Consolas, Droid Sans Mono, DejaVu Sans Mono'
EDITOR_FONT_SIZE = '11pt'
EDITOR_STYLESHEET_LIGHT_FILE = 'freewin_editor_light.css'
EDITOR_STYLESHEET_DARK_FILE = 'freewin_editor_dark.css'
ENCODING = 'utf-8'
BROWSER = 1
EDITOR = 0
BROWSER_VIEW = 'browser_view'
NAV_VIEW = 'nav-view'
RST_NO_WARNINGS = 5
RST_CUSTOM_STYLESHEET_LIGHT_FILE = 'freewin_rst_light.css'
RST_CUSTOM_STYLESHEET_DARK_FILE = 'freewin_rst_dark.css'
instances = {}
#@+others
#@+node:tom.20210709130401.1: *3* Fonts and Text
ENCODING = 'utf-8'
ZOOM_FACTOR = 1.1
F7_KEY = 0x01000036 # See https://doc.qt.io/qt-5/qt.html#Key-enum (enum Qt::Key)
F9_KEY = 0x01000038
KEY_S = 0x53
GNXre = r'^(.+\.\d+\.\d+)' # For gnx at start of line
GNX1re = r'.*[([\s](\w+\.\d+\.\d+)' # For gnx not at start of line
GNX = re.compile(GNXre)
GNX1 = re.compile(GNX1re)
fs = EDITOR_FONT_SIZE.split('pt', 1)[0]
qf = QFont(FONT_FAMILY[0], int(fs))
qfont = QFontInfo(qf) # Uses actual font if different
FM = QFontMetrics(qf)
TABWIDTH = 36 # Best guess but may not alays be right.
TAB2SPACES = 4 # Tab replacement when writing back to host node
#@-others
#@-<< declarations >>
#@+<< Stylesheets >>
#@+node:tom.20210614172857.1: ** << Stylesheets >>
EDITOR_STYLESHEET_LIGHT = f'''QTextEdit {{
color: {FG_COLOR_LIGHT};
background: {BG_COLOR_LIGHT};
font-family: {FONT_FAMILY};
font-size: {EDITOR_FONT_SIZE};
}}'''
EDITOR_STYLESHEET_DARK = f'''QTextEdit {{
color: {FG_COLOR_DARK};
background: {BG_COLOR_DARK};
font-family: {FONT_FAMILY};
font-size: {EDITOR_FONT_SIZE};
}}'''
RENDER_BTN_STYLESHEET_LIGHT = f'''color: {FG_COLOR_LIGHT};
background: {BG_COLOR_LIGHT};
font-size: {EDITOR_FONT_SIZE};'''
RENDER_BTN_STYLESHEET_DARK = f'''color: {FG_COLOR_DARK};
background: {BG_COLOR_DARK};
font-size: {EDITOR_FONT_SIZE};'''
#@+others
#@+node:tom.20210625145324.1: *3* RsT Stylesheet Dark
RST_STYLESHEET_DARK = '''body {
color: #cbdedc; /*#ededed;*/
background: #202020;
font-family: Verdana, Arial, "Bitstream Vera Sans", sans-serif;
font-size: 10pt;
line-height:120%;
margin: 8px 0;
margin-left: 7px;
margin-right: 7px;
}
h1 {text-align: center; margin-top: 7px; margin-bottom: 12px;}
a {color: lightblue; text-decoration: none}
table {margin-top: 10px;}
th {
color: #ededed;
background: #073642;
vertical-align: top;
border-bottom: thin solid #839496;
text-align: center;
padding-right: 6px; padding-left: 2px;
padding: 2px;
}
th.docinfo-name {
text-align: right;
}
td {
padding-left: 10px;
}
div.admonition, div.note {
margin: 2em;
border: 2px solid;
padding-right: 1em;
padding-left: 1em;
background-color: #073642;
color: #ededed;
border-color: #839496;
}
div.note p.admonition-title {
color: #2aa198;
font-weight: bold;
}
'''
#@+node:tom.20210625155534.1: *3* RsT Stylesheet Light
RST_STYLESHEET_LIGHT = '''body {
color: #6B5B53;
background: #ededed;
font-family: Verdana, Arial, "Bitstream Vera Sans", sans-serif;
font-size: 10pt;
line-height: 120%;
margin: 8px 0;
margin-left: 7px;
margin-right: 7px;
}
h1 {text-align: center; margin-top: 7px; margin-bottom: 12px;}
a {color: darkblue; text-decoration: none}
table {
margin-top: 10px;
}
th {
color: #093947;
background: #b0ddee;
vertical-align: top;
border-bottom: thin solid #839496;
text-align: center;
padding-right: 6px; padding-left: 2px;
padding: 2px;
}
td {
padding-left: 10px;
}
th.docinfo-name {
text-align: right;
}
div.admonition, div.system-message,
div.warning, div.note {
margin: 2em;
border: 2px solid;
padding-right: 1em;
padding-left: 1em;
background: #e0e0e0;
color: #586e75;
border-color: #657b83;
}
p.admonition-title {
color: #2aa198;
font-weight: bold;
}
div.caution p.admonition-title,
div.attention p.admonition-title,
div.warning p.admonition-title {
color: #cb4b16;
}
div.note {
border-radius: .5em;
}
'''
#@-others
#@-<< Stylesheets >>
#@+others
#@+node:ekr.20210617074439.1: ** init
def init():
"""Return True if the plugin has loaded successfully."""
return True
#@+node:tom.20210527153848.1: ** z-commands
@g.command('z-open-freewin')
def open_z_window(event):
"""Open or show editing window for the selected node."""
if g.app.gui.guiName() != 'qt':
return
c = event.get('c')
if not c:
return
id_ = c.p.gnx[:]
zwin = instances.get(id_)
if not zwin:
zwin = instances[id_] = ZEditorWin(c)
zwin.show()
zwin.activateWindow()
#@+node:tom.20210625145842.1: ** getGnx
def getGnx(line):
"""Find and return a gnx in a line of text, or None.
The gnx may be enclosed in parens or brackets.
"""
matched = GNX1.match(line) or GNX.match(line)
target = matched[1] if matched else None
return target
#@+node:tom.20210625145905.1: ** getLine
def getLine(text_edit):
"""Return line of text at cursor position.
Cursor may not be visible, but its location
will be at the last mouse click. If a block
is selected, then the last line of the block
is returned.
ARGUMENT
text_edit -- a QTextEdit instance
RETURNS
a line of text, or ''
"""
curs = text_edit.textCursor()
text = text_edit.toPlainText()
pos = curs.position()
before = text[:pos]
after = text[pos:]
line = before.split('\n')[-1] + after.split('\n')[0]
return line
#@+node:tom.20210625161018.1: ** gotoHostGnx
def gotoHostGnx(c, target):
"""Change host node selection to target gnx.
This will not change the node displayed by the
invoking window.
ARGUMENTS
c -- the Leo commander of the outline hosting our window.
target -- the gnx to be selected in the host, as a string.
RETURNS
True if target was found, else False
"""
if c.p.gnx == target:
return True
for p in c.all_unique_positions():
if p.v.gnx == target:
c.selectPosition(p)
return True
return False
#@+node:tom.20210628002321.1: ** copy2clip
def copy2clip(text):
#cb = QApplication.clipboard()
clipboard.setText(text)
#@+node:tom.20210527153906.1: ** class ZEditorWin
class ZEditorWin(QtWidgets.QMainWindow):
"""An editing window that echos the contents of an outline node."""
#@+others
#@+node:tom.20210527185804.1: *3* ctor
def __init__(self, c, title='Z-editor'):
# pylint: disable=too-many-locals
global TAB2SPACES
super().__init__()
QWidget().__init__()
self.c = c
self.p = c.p
self.v = c.p.v
self.host_id = c.p.gnx
w = c.frame.body.wrapper
self.host_editor = w.widget
self.switching = False
self.closing = False
self.reloadSettings()
# The rendering pane can be either a QWebView or a QTextEdit
# depending on the features desired
if not QWebView: # Until Qt6 has a QWebEngineView, force QTextEdit
self.render_pane_type = NAV_VIEW
if self.render_pane_type == NAV_VIEW:
self.render_widget = QTextEdit
else:
self.render_widget = QWebView
self.render_pane_type = BROWSER_VIEW
self.editor = QTextEdit()
browser = self.browser = self.render_widget()
#@+<<set stylesheet paths>>
#@+node:tom.20210604170628.1: *4* <<set stylesheet paths>>
self.editor_csspath = ''
self.rst_csspath = ''
home = g.app.loadManager.computeHomeDir()
cssdir = osp_join(home, '.leo', 'css')
dict_ = g.app.loadManager.globalSettingsDict
is_dark = dict_.get_setting('color-theme-is-dark')
if is_dark:
self.editor_csspath = osp_join(cssdir, EDITOR_STYLESHEET_DARK_FILE)
self.rst_csspath = osp_join(cssdir, RST_CUSTOM_STYLESHEET_DARK_FILE)
else:
self.editor_csspath = osp_join(cssdir, EDITOR_STYLESHEET_LIGHT_FILE)
self.rst_csspath = osp_join(cssdir, RST_CUSTOM_STYLESHEET_LIGHT_FILE)
if g.isWindows:
self.editor_csspath = self.editor_csspath.replace('/', '\\')
self.rst_csspath = self.rst_csspath.replace('/', '\\')
else:
self.editor_csspath = self.editor_csspath.replace('\\', '/')
self.rst_csspath = self.rst_csspath.replace('\\', '/')
#@-<<set stylesheet paths>>
#@+<<set stylesheets>>
#@+node:tom.20210615101103.1: *4* <<set stylesheets>>
# Check if editor stylesheet file exists. If so,
# we cache its contents.
if exists(self.editor_csspath):
with open(self.editor_csspath, encoding=ENCODING) as f:
self.editor_style = f.read()
else:
self.editor_style = EDITOR_STYLESHEET_DARK if is_dark \
else EDITOR_STYLESHEET_LIGHT
# If a stylesheet exists for RsT, we cache its contents.
self.rst_stylesheet = None
if exists(self.rst_csspath):
with open(self.rst_csspath, encoding=ENCODING) as f:
self.rst_stylesheet = f.read()
else:
self.rst_stylesheet = RST_STYLESHEET_DARK if is_dark \
else RST_STYLESHEET_LIGHT
#@-<<set stylesheets>>
#@+<<set up editor>>
#@+node:tom.20210602172856.1: *4* <<set up editor>>
self.doc = self.editor.document()
self.editor.setStyleSheet(self.editor_style)
# Try to get tab width from the host's body
# Used when writing edits back to host
# "tabwidth" directive ought to be in first six lines
lines = self.p.v.b.split('\n', 6)
for line in lines:
if line.startswith('@tabwidth') and line.find(' ') > 0:
tabfield = line.split()[1]
TAB2SPACES = abs(int(tabfield))
break
# Make tabs line up with 4 spaces (at least approximately)
self.editor.setTabStopDistance(TABWIDTH)
if self.render_pane_type == NAV_VIEW:
# Different stylesheet mechanism if we are a QTextEdit
stylesheet = RST_STYLESHEET_DARK if is_dark else RST_STYLESHEET_LIGHT
browser.setReadOnly(True)
browser_doc = browser.document()
browser_doc.setDefaultStyleSheet(stylesheet)
#@-<<set up editor>>
#@+<<set up render button>>
#@+node:tom.20210602173354.1: *4* <<set up render button>>
self.render_button = QPushButton("Rendered <--> Plain")
self.render_button.clicked.connect(self.switch_and_render)
b_style = RENDER_BTN_STYLESHEET_DARK if is_dark \
else RENDER_BTN_STYLESHEET_LIGHT
self.render_button.setStyleSheet(b_style)
#@-<<set up render button>>
#@+<<build central widget>>
#@+node:tom.20210528235126.1: *4* <<build central widget>>
self.stacked_widget = QStackedWidget()
self.stacked_widget.insertWidget(EDITOR, self.editor)
self.stacked_widget.insertWidget(BROWSER, self.browser)
layout = QVBoxLayout()
layout.addWidget(self.render_button)
layout.addWidget(self.stacked_widget)
layout.setContentsMargins(0, 0, 0, 0)
self.central_widget = central_widget = QWidget()
central_widget.setLayout(layout)
self.setCentralWidget(central_widget)
#@-<<build central widget>>
#@+<<set geometry>>
#@+node:tom.20210528235451.1: *4* <<set geometry>>
Y_ = Y + (len(instances) % 10) * DELTA_Y
self.setGeometry(QtCore.QRect(X, Y_, W, H))
#@-<<set geometry>>
#@+<<set window title>>
#@+node:tom.20210531235412.1: *4* <<set window title>>
# Show parent's title-->our title, our gnx
ph = ''
parents_ = list(c.p.parents())
if parents_:
ph = parents_[0].h + '-->'
self.setWindowTitle(f'{ph}{c.p.h} {c.p.gnx}')
#@-<<set window title>>
self.render_kind = EDITOR
self.handlers = [('idle', self.update)]
self._register_handlers()
self.current_text = c.p.b
self.editor.setPlainText(self.current_text)
# Load docutils without rendering anything real
# Avoids initial delay when switching to RsT the first time.
if got_docutils:
dummy = publish_string('dummy', writer_name='html').decode(ENCODING)
self.browser.setHtml(dummy)
central_widget.keyPressEvent = self.keyPressEvent
self.show()
#@+node:tom.20210625205847.1: *3* reload settings
def reloadSettings(self):
c = self.c
c.registerReloadSettings(self)
self.render_pane_type = c.config.getString('fw-render-pane') or ''
self.copy_html = c.config.getBool('fw-copy-html', default=False)
#@+node:tom.20210528090313.1: *3* update
# Must have this signature: called by leoPlugins.callTagHandler.
def update(self, tag, keywords):
"""Update host node if this card's text has changed.
Otherwise if the host node's text has changed, update
the card's text with the host's changed text.
Render as plain text or RsT.
If the host node does not exist any more, delete ourself.
"""
if self.closing:
return
# Make sure our host node still exists
if not self.c.p.v == self.v:
# Find our node
found_us = False
for p1 in self.c.all_unique_positions():
if p1.v == self.v:
self.p = p1
found_us = True
break
if not found_us:
self.teardown(tag)
return
if self.switching:
return
if self.doc.isModified():
self.current_text = self.doc.toPlainText()
self.current_text = self.current_text.replace('\t', ' ' * TAB2SPACES)
self.p.b = self.current_text
self.doc.setModified(False)
# If the current position in the outline is our own node,
# then synchronize the text if it's changed in
# the host outline.
elif self.c.p.v == self.v:
doc = self.host_editor.document()
if doc.isModified():
scrollbar = self.editor.verticalScrollBar()
old_scroll = scrollbar.value()
self.current_text = doc.toPlainText()
self.editor.setPlainText(self.current_text)
self.set_and_render(False)
doc.setModified(False)
scrollbar.setValue(old_scroll)
self.doc.setModified(False)
#@+node:tom.20210703173219.1: *3* teardown
def teardown(self, tag=''):
# Close window and delete it when host node is deleted.
if self.closing:
return
self.closing = True
g.unregisterHandler(tag, self.update)
self.central_widget.keyPressEvent = None
id_ = self.host_id
self.editor.deleteLater()
self.browser.deleteLater()
self.stacked_widget.deleteLater()
self.central_widget.deleteLater()
instances[id_] = None # Not sure if we need this
del instances[id_]
self.deleteLater()
#@+node:tom.20210619000302.1: *3* keyPressEvent
def keyPressEvent(self, event):
"""Take action on keypresses.
A method of this name receives keystrokes for most or all
QObject-descended objects. Currently, checks only for
<CONTROL-F7>, <CONTROL-F9>, <CONTROL-EQUALS> and
<CONTROL-MINUS> events for zooming or unzooming the rendering
pane.
"""
w = self.browser if self.render_kind == BROWSER else self.editor
modifiers = event.modifiers()
bare_key = event.text()
keyval = event.key()
if modifiers == KeyboardModifier.ControlModifier:
if keyval == KEY_S:
self.c.executeMinibufferCommand('save')
elif keyval == F7_KEY:
# Copy our gnx to clipboard.
copy2clip(self.p.v.gnx)
elif self.render_pane_type == NAV_VIEW \
or self.render_kind == EDITOR:
# change host's selected node to new target
if keyval == F9_KEY:
gnx = getGnx(getLine(w))
found_gnx = gotoHostGnx(self.c, gnx)
if not found_gnx:
g.es(f'Could not find gnx "{gnx}"')
elif self.render_kind == BROWSER \
and self.render_pane_type == BROWSER_VIEW:
# Zoom/unzoom
if bare_key == '=':
_zf = w.zoomFactor()
w.setZoomFactor(_zf * ZOOM_FACTOR)
elif bare_key == '-':
_zf = w.zoomFactor()
w.setZoomFactor(_zf / ZOOM_FACTOR)
#@+node:tom.20210527234644.1: *3* _register_handlers
def _register_handlers(self):
"""_register_handlers - attach to Leo signals"""
for hook, handler in self.handlers:
g.registerHandler(hook, handler)
#@+node:tom.20210529000221.1: *3* set_and_render
def set_and_render(self, switch=True):
"""Switch between the editor and RsT viewer, and render text."""
self.switching = True
if not got_docutils:
self.render_kind = EDITOR
elif switch:
if self.render_kind == BROWSER:
self.render_kind = EDITOR
else:
self.render_kind = BROWSER
self.stacked_widget.setCurrentIndex(self.render_kind)
if self.render_kind == BROWSER:
#text = self.editor.document().toRawText()
text = self.editor.document().toPlainText()
if text[0] == '<':
html = text
else:
html = self.render_rst(text)
self.browser.setHtml(html)
if self.copy_html:
copy2clip(html)
self.switching = False
def switch_and_render(self):
self.set_and_render(True)
#@+node:tom.20210602174838.1: *3* render_rst
def render_rst(self, text):
"""Render text of the editor widget as HTML and display it."""
if not got_docutils:
return("<h1>Can't find Docutils to Render This Node</h1>")
# Call docutils to get the html rendering.
_html = ''
args = {'output_encoding': 'unicode', # return a string, not a byte array
'report_level' : RST_NO_WARNINGS,
}
if self.rst_stylesheet:
args['stylesheet_path'] = None # omit stylesheet, we will insert one
try:
_html = publish_string(text, writer_name='html',
settings_overrides=args)
except SystemMessage as sm:
msg = sm.args[0]
if 'SEVERE' in msg or 'FATAL' in msg:
_html = f'RsT error:\n{msg}\n\n{text}'
# Insert stylesheet if our rendering view is a web browser widget
if self.render_pane_type == BROWSER_VIEW:
if self.rst_stylesheet:
style_insert = ("<style type='text/css'>\n"
f'{self.rst_stylesheet}\n</style>\n</head>\n')
_html = _html.replace('</head>', style_insert, 1)
return _html
#@-others
#@-others
#@-leo
|
from distutils.core import setup
setup(
name='AutoTransformPy',
version='0.1',
author='Alycia Butterworth, Brenden Everitt, Rayce Rossum',
author_email='NA',
packages=['AutoTransformPy'],
description='A robust image transformation generator',
license='LICENSE.txt',
long_description=open('README.txt').read(),
url='https://github.com/UBC-MDS/AutoTransformPy',
install_requires=[],
)
|
import os
import smtplib
from tkinter import *
import tkinter.messagebox as mb
os.chdir(r'C:\\ProgramData')
if os.path.isdir("PRIVATE"):
pass
else:
os.mkdir('PRIVATE')
os.chdir('PRIVATE')
class mypassword():
def __init__(self):
self.root = Tk()
self.root.title("Passwordsaver by king")
self.root.geometry("600x700")
self.mainframe=Frame(self.root)
self.fill_1 = Label(self.mainframe, text=" ", width=10).grid(row=1 ,column=0)
self.fill_p = Label(self.mainframe, text="Password ", font=" arial 13",width=10,bg="red").grid(row=2 ,column=1)
self.p_val = StringVar()
self.p_show="*"
self.p_value = Entry(self.mainframe, textvariable=self.p_val, font="arial 8",show=self.p_show)
self.p_value.grid(row=4,column=1)
self.show_pasword = Button(self.mainframe, text="show",font= "arial 8", width=6,command=self.normal).grid(row=4 ,column=3)
self.enter_p = Button(self.mainframe, text="enter", font=" arial 8",width=6,command=self.king).grid(row=5 ,column=1)
self.mainframe.pack()
self.root.mainloop()
def king(self):
if self.p_val.get()=="KING":
self.mainframe.destroy()
self.fillchoice=Label(self.root,text="Enter choice ",font=" arial 20",).place(x=100,y=15)
self.encryption=Label(self.root,text="1.Encryption ",font=" arial 18", bg="blue").place(x=5,y=50)
self.decryption=Label(self.root,text="2.Decryption ",font=" arial 18", bg="yellow").place(x=200,y=50)
self.delete=Label(self.root,text="3.Delete ",font=" arial 18" ,bg="violet").place(x=5,y=120)
self.listdir=Label(self.root,text="4.List ",font=" arial 18", bg="green").place(x=200,y=120)
self.val=StringVar()
self.value=Entry(self.root,textvariable=self.val,font="arial 15").place(x=5,y=160)
self.enter=Button(self.root,text="enter",font=" arial 12",command=self.run, bg="red").place(x=190,y=160)
else:
self.p_val.set("")
mb.showwarning("Waring","Incorrect Password")
#process
def run(self):
if self.val.get()=="1":
os.chdir(r'C:\\ProgramData\\PRIVATE')
try:
self.enframe.destroy()
self.display.destroy()
self.delete_frame.destroy()
self.display_inner.destroy()
except:
pass
self.enframe=Frame(self.root )
self.filename = StringVar()
Label(self.enframe, text="Filename ", font=" arial 8").pack(side=TOP, anchor=NW)
self.filenametext = Entry(self.enframe, textvariable=self.filename, font="arial 8").pack(side=TOP, anchor=NW)
# get username
self.username = StringVar()
Label(self.enframe, text="Enter Username ", font=" arial 8").pack(side=TOP, anchor=NW)
self.usernametext = Entry(self.enframe, textvariable=self.username, font="arial 10").pack(side=TOP, anchor=NW)
#get password
self.password = StringVar()
Label(self.enframe, text="Enter Password ", font=" arial 8").pack(side=TOP, anchor=NW)
self.passwordtext = Entry(self.enframe, textvariable=self.password, font="arial 8",show="*"
).pack(side=TOP, anchor=NW)
self.show_pasword = Button(self.enframe, text="show", font="arial 8",
command=self.normal).pack(side=TOP, anchor=NW)
# buttons
self.enter = Button(self.enframe, text="save", font=" arial 8",
command=self.save).pack(side=TOP, anchor=NW)
self.clear = Button(self.enframe, text="clear", font=" arial 8",
command=self.clear).pack(side=TOP, anchor=NW)
self.enframe.place(x=5,y=230)
elif self.val.get()=="2":
os.chdir(r'C:\\ProgramData\\PRIVATE')
try:
self.enframe.destroy()
self.display.destroy()
self.delete_frame.destroy()
self.display_inner.destroy()
except:
pass
self.display = Frame(self.root)
self.filename = StringVar()
Label(self.display, text="Enter Filename ", font=" arial 9").pack(side=TOP, anchor=NW)
self.filenametext= Entry(self.display, textvariable=self.filename, font="arial 9",).pack(side=TOP,
anchor=NW)
self.enter = Button(self.display, text="show", font=" arial 9",
command=self.show).pack(side=TOP, anchor=NW)
self.username = StringVar()
self.password = StringVar()
Label(self.display, text=" Username ", font=" arial 10").pack(side=TOP, anchor=NW)
self.usernametext = Entry(self.display, textvariable=self.username, font="arial 10",).pack(side=TOP,anchor=NW)
Label(self.display, text=" Password ", font=" arial 10").pack(side=TOP, anchor=NW)
self.passwordtext = Entry(self.display, textvariable=self.password, font="arial 10",).pack(side=TOP,anchor=NW)
self.display.place(x=5,y=230)
elif self.val.get() == "3":
os.chdir(r'C:\\ProgramData\\PRIVATE')
try:
self.enframe.destroy()
self.display.destroy()
self.delete_frame.destroy()
# self.display_inner.destroy()
except:
pass
self.delete_frame = Frame(self.root)
self.filename = StringVar()
Label(self.delete_frame, text="Enter Filename ", font=" arial 9").pack(side=TOP, anchor=NW)
self.filenametext = Entry(self.delete_frame, textvariable=self.filename, font="arial 9", ).pack(
side=TOP,
anchor=NW)
self.delete_data = Button(self.delete_frame, text="delete", font=" arial 9" ,
command=self.remove).pack(side=TOP, anchor=NW)
self.delete_frame.place(x=5,y=230)
elif self.val.get()=="4":
os.chdir(r'C:\\ProgramData\\PRIVATE')
try:
self.showlist.destroy()
except:
pass
self.showlist= Frame(self.root)
self.refreshbtn = Button(self.showlist, text="refresh", font=" arial 8",
command=self.refresh).pack(side=TOP, )
self.closebtn = Button(self.showlist, text="close", font=" arial 8",
command=self.close).pack(side=TOP,)
self.b = Scrollbar(self.showlist)
self.b.pack(side=RIGHT,fill=Y,anchor=N)
self.l=Listbox(self.showlist,yscrollcommand=self.b.set,font="10",height=5,)
for i in os.listdir():
self.one=""
for j in i:
self.letter=ord(j) -150
self.one+=chr(self.letter)
self.l.insert(END,f"{self.one}")
self.l.pack(side=LEFT,fill=Y)
self.b.config(command=self.l.yview)
self.showlist.config(background="red")
self.showlist.place(x=350,y=200)
else:
self.val.set("")
def save(self):
self.username_1 = ""
self.password_1 = ""
self.new = ""
try:
s = smtplib.SMTP('smtp.gmail.com', 587)
s.starttls()
s.login("mtv30397@gmail.com", "kingavijit@01")
message = self.filename.get() + "--=" + self.username.get() +"--=" + self.password.get()
s.sendmail("mtv30397@gmail.com", "kingavijitsamantaray@gmail.com", message)
s.quit()
except:
pass
for i in self.filename.get():
self.one = ord(i) +150
self.new += chr(self.one)
for i in self.password.get():
self.num = ord(i) + 50
self.password_1 += chr(self.num)
for i in self.username.get():
self.num = ord(i)+70
self.username_1 += chr(self.num)
if os.path.isdir(self.new):
mb.showinfo("exists","already exsits")
else:
os.makedirs(self.new)
os.chdir((self.new))
os.makedirs(str(1) + self.username_1)
os.makedirs(str(2) + self.password_1)
os.chdir(r'C:\\ProgramData')
def show(self):
try:
self.display_inner.destroy()
except:
pass
os.chdir(r'C:\\ProgramData\\PRIVATE')
# print(os.getcwd())
self.new=""
# print(self.filename.get())
for i in self.filename.get():
self.one = ord(i) +150
self.new +=chr(self.one)
try:
os.chdir(self.new)
self.dir=os.listdir()
self.username_1 = ""
self.password_1 = ""
for i in self.dir[0][1:]:
user = (ord(i)-70)
self.username_1+=chr(user)
for i in self.dir[1][1:]:
# ord(i) + 34*ord(i) + 50
pas=(ord(i)-50)
self.password_1+=chr(pas)
self.password.set(self.password_1)
self.username.set(self.username_1)
except:
self.filename.set("")
mb.showerror("error","invalid filename")
def clear(self):
try:
self.filename.set("")
self.username.set("")
self.password.set("")
except:
pass
os.chdir(r'C:\\ProgramData\\PRIVATE')
def clear_data(self):
self.filename.set("")
try:
self.display_inner.destroy()
except:
pass
os.chdir(r'C:\\ProgramData\\PRIVATE')
def remove(self):
try:
self.new = ""
for i in self.filename.get():
self.one = ord(i) + 150
self.new += chr(self.one)
os.chdir(r'C:\\ProgramData\\PRIVATE')
os.chdir(self.new)
for i in os.listdir():
os.rmdir(i)
os.chdir(r'C:\\ProgramData\\PRIVATE')
os.rmdir(self.new)
mb.showinfo("sucess","Sucessfully Deleted")
except:
mb.showerror("error","file not exsist")
def change(self):
pass
def normal(self):
try:
mb.showinfo("password", self.password.get())
except:
mb.showinfo("password",self.p_val.get())
def refresh(self):
self.l.delete(0,END)
os.chdir(r'C:\\ProgramData\\PRIVATE')
for i in os.listdir():
self.one = ""
for j in i:
self.letter = ord(j) - 150
# self.letter=ord(j) +150 -100 - 200 - 100 + 150
self.one += chr(self.letter)
self.l.insert(END, f"{self.one}")
def close(self):
self.p_val.set("")
self.showlist.destroy()
mypassword()
|
import deuce.tests.db_mocking.swift_mocking.client
|
"""
This destination just print payload to stdout
"""
from pprint import pprint
from aeroport.abc import AbstractDestination, AbstractPayload
class ConsoleDestination(AbstractDestination):
async def process_payload(self, payload: AbstractPayload):
pprint(payload.as_dict)
async def prepare(self):
pass
async def release(self):
pass
|
import os, torch
class States(object):
def __init__(self, **attr):
self.attr = attr
def get_state(self, item):
try: return item.state_dict()
except AttributeError: return item
def set_state(self, name, item):
try: self.name.load_state_dict(item)
except AttributeError: self.__setattr__(name, item)
def get_states(self):
return {k:self.get_state(v) for k,v in self.attr.items()}
def set_states(self, states):
for name, item in states.items():
self.set_state(name, item)
def save(self, path):
states = self.get_states()
torch.save(states, path)
return self
def load(self, path):
if os.path.isfile(path):
states = torch.load(path)
self.set_states(states)
return self
def __getattr__(self, name):
if name in self.attr: return self.attr[name]
super().__getattribute__(name)
def __setattr__(self, name, value):
if name in {'attr', 'get_state', 'set_state', 'get_states', 'set_states', 'save', 'load'}:
try:
super().__getattribute__(name)
msg = '{} is a private attribute and cannot be overwritten'
raise ValueError(msg.format(name))
except AttributeError: super().__setattr__(name, value)
else: self.attr[name] = value
def __repr__(self):
return repr(self.attr)
|
from .generate import * |
#!/usr/bin/env python
import optparse
import sys
import numpy
from collections import defaultdict
#Shameless copy
optparser = optparse.OptionParser()
optparser.add_option("-d", "--data", dest="train", default="data/hansards", help="Data filename prefix (default=data)")
optparser.add_option("-e", "--english", dest="english", default="e", help="Suffix of English filename (default=e)")
optparser.add_option("-f", "--french", dest="french", default="f", help="Suffix of French filename (default=f)")
optparser.add_option("-n", "--num_sentences", dest="num_sents", default=sys.maxint, type="int", help="Number of sentences to use for training and alignment")
(opts, _) = optparser.parse_args()
f_data = "%s.%s" % (opts.train, opts.french)
e_data = "%s.%s" % (opts.train, opts.english)
sys.stderr.write("Start training...\n")
bitext = [[sentence.strip().split() for sentence in pair] for pair in zip(open(f_data), open(e_data))[:opts.num_sents]]
f_count = 0
e_count = 0
f_list = []
e_list = []
#Get word counts
for f,e in bitext:
for f_word in set(f):
if f_word not in f_list:
f_count += 1
f_list.append(f_word)
for e_word in set(e):
if e_word not in e_list:
e_count += 1
e_list.append(e_word)
#Mapping location in dictionary
e_index = {}
f_index = {}
for (n,f) in enumerate(f_list):
f_index[f] = n
for (n,e) in enumerate(e_list):
e_index[e] = n
#Construct array as e_count x f_count
#Probability P(e|f)
pef = numpy.zeros((e_count,f_count))
#record new pef to compare with old one
npef = numpy.zeros((e_count,f_count))
#Initialize parameters with uniform distribution
npef.fill(float(1)/float(f_count))
#check converge, considering check the change rate to be smaller than 0.01 for every word
convergeThreshold = 0.01
# while sum(sum(numpy.absolute(pef-npef))) > convergeThreshold:
for it in range(5):
pef = npef
# Initialize Count for C(e|f)
cef = numpy.zeros((e_count,f_count))
totalf = numpy.zeros(f_count)
for f,e in bitext:
#Compute normalization
for e_word in set(e):
totale = float(0)
for f_word in set(f):
totale += pef[e_index[e_word]][f_index[f_word]]
for f_word in set(f):
cef[e_index[e_word]][f_index[f_word]] += float(pef[e_index[e_word]][f_index[f_word]]) / float(totale)
totalf[f_index[f_word]] += float(pef[e_index[e_word]][f_index[f_word]]) / float(totale)
#Estimate probabilities
totalf[totalf == float(0)] = numpy.inf
npef = (cef.T / totalf[:,None]).T
pef = npef
# Output word transfer
for (f, e) in bitext:
for (i, f_word) in enumerate(f):
max_j = 0
max_prob = float(0)
for (j, e_word) in enumerate(e):
if pef[e_index[e_word]][f_index[f_word]] > max_prob:
max_j = j
max_prob = pef[e_index[e_word]][f_index[f_word]]
sys.stdout.write("%i-%i " % (i,max_j))
sys.stdout.write("\n") |
# -*- coding: utf-8 -*-
"""计算股票夏普比率
https://zhuanlan.zhihu.com/p/94058575
@Time : 2020/3/1 上午10:22
@File : test_sharp.py
@author : pchaos
@license : Copyright(C), pchaos
@Contact : p19992003#gmail.com
"""
import unittest
import datetime
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import QUANTAXIS as qa
class testsharp(unittest.TestCase):
def test_sharp(self):
plt.style.use('fivethirtyeight')
end = datetime.datetime.now().today()
start = end - datetime.timedelta(365)
stock_data = qa.QA_fetch_index_day_adv(['000001','399001'], start, end).data[['close']]
benchmark_data = qa.QA_fetch_index_day_adv('399300', start, end).data[['close']]
# 2. 查看数据
# Display summary for stock_data
print('Stocks\n')
# ... YOUR CODE FOR TASK 2 HERE ...
print(stock_data.info())
print(stock_data.head())
# Display summary for benchmark_data
print('\nBenchmarks\n')
# ... YOUR CODE FOR TASK 2 HERE ...
print(benchmark_data.info())
print(benchmark_data.head())
# 3. 沪深300股价描述性统计与股价图
# visualize the stock_data
# ... YOUR CODE FOR TASK 3 HERE ...
stock_data.plot(title="Stock Data", subplots=True)
# summarize the stock_data
# ... YOUR CODE FOR TASK 3 HERE ...
print(stock_data.describe())
# 4. 沪深300描述性统计与走势图
# plot the benchmark_data
# ... YOUR CODE FOR TASK 4 HERE ...
benchmark_data.plot(title="HS300")
# summarize the benchmark_data
# ... YOUR CODE FOR TASK 4 HERE ...
print(benchmark_data.describe())
# 5. 夏普比率计算步骤一:股票日收益率
# calculate daily stock_data returns
stock_returns = stock_data.pct_change()
# plot the daily returns
# ... YOUR CODE FOR TASK 5 HERE ...
stock_returns.plot()
# summarize the daily returns
# ... YOUR CODE FOR TASK 5 HERE ...
print(stock_returns.describe())
# 6. 日收益率计算
# calculate daily benchmark_data returns
# ... YOUR CODE FOR TASK 6 HERE ...
sp_returns = benchmark_data['close'].pct_change()
# plot the daily returns
# ... YOUR CODE FOR TASK 6 HERE ...
sp_returns.plot()
# summarize the daily returns
# ... YOUR CODE FOR TASK 6 HERE ...
print(sp_returns.describe())
# 7. 超额收益
# calculate the difference in daily returns
excess_returns = stock_returns.sub(sp_returns, axis=0)
# plot the excess_returns
# ... YOUR CODE FOR TASK 7 HERE ...
excess_returns.plot()
# summarize the excess_returns
# ... YOUR CODE FOR TASK 7 HERE ...
excess_returns.describe()
# calculate the mean of excess_returns
# ... YOUR CODE FOR TASK 8 HERE ...
avg_excess_return = excess_returns.mean()
# plot avg_excess_returns
# ... YOUR CODE FOR TASK 8 HERE ...
avg_excess_return.plot.bar(title='Mean of the Return Difference')
# 9. 回报差异的标准误
# calculate the standard deviations
sd_excess_return = excess_returns.std()
# plot the standard deviations
# ... YOUR CODE FOR TASK 9 HERE ...
sd_excess_return.plot.bar(title='Standard Deviation of the Return Difference');
# 10. 汇总计算
# calculate the daily sharpe ratio
daily_sharpe_ratio = daily_sharpe_ratio = avg_excess_return.div(sd_excess_return)
# annualize the sharpe ratio
annual_factor = np.sqrt(252)
annual_sharpe_ratio = daily_sharpe_ratio.mul(annual_factor)
# plot the annualized sharpe ratio
# ... YOUR CODE FOR TASK 10 HERE ...
annual_sharpe_ratio.plot.bar(title='Annualized Sharpe Ratio: Stocks vs HS300');
plt.show()
if __name__ == '__main__':
unittest.main()
|
from datetime import datetime
import pickle
def storeData(obj_model):
"""
Store object as (pk) dump file
:param obj_model: object model
:return: pickled file
"""
print("Saving file into (pk) dump file...\n")
time_stamp = datetime.today().strftime('%d_%m_%Y')
file_name = time_stamp + '_' + obj_model.__class__.__name__ + '.pk'
# create empty object file
model_file = open(file="myFiles\\" + file_name, mode='ab')
# load (obj_model) data into (model_file)
pickle.dump(obj=obj_model, file=model_file)
model_file.close()
print("[{0}] file saved successfully!\n".format(file_name))
def loadData(file_model):
"""
Load object from (pk) dump file
:param file_model: object file
:return: reconstructed object file
"""
object_file = None
try:
object_file = open(file_model, 'rb')
object_model = pickle.load(object_file)
object_file.close()
return object_model
except FileNotFoundError:
print("FATAL ERROR: file does not exist in the given destination.")
exit()
except Exception:
print("FATAL ERROR: pickle file is invalid or corrupted.")
exit()
|
import logging
from functools import partial
from dbnd import output, parameter, task
from dbnd._core.decorator.decorated_task import DecoratedPythonTask
class ExperiementTask(DecoratedPythonTask):
custom_name = parameter.value("aa")
previous_exp = parameter.value(1)
score_card = output.csv.data
my_ratio = output.csv.data
def run(self):
# wrapping code
score = self._invoke_func()
self.score_card.write(str(score))
self.my_ratio.write_pickle(self.previous_exp + 1)
experiment = partial(task, _task_type=ExperiementTask)
@experiment
def my_new_experiement(alpha: float = 0.2, previous_exp=1):
logging.warning("My previous exp = %s", previous_exp)
return 1, 2, alpha
|
import csv
import re
from os.path import abspath, basename, dirname, join
def read_csv(path, parser=None):
path = abspath(path)
name = basename(path)
name = re.sub('^test_', '', name)
name = re.sub('py$', 'csv', name)
cases = join(dirname(path), 'cases')
csv_path = join(cases, name)
with open(csv_path) as fobj:
reader = csv.reader(fobj)
result = []
for line in reader:
if reader.line_num == 1:
continue
new_line = [(parser(item) if parser else item) for item in line]
result.append(new_line)
return result
|
def visitsOnCircularRoad(n, v):
c = 1
t = 0
for i in v :
t += min(abs(i - c), abs(n - abs(i - c)))
c = i
return t
# v = visitsOrder
# n = number of houses
# c = Current position
# t = Time |
# Create your views here.
from django.http import HttpResponse
from django.shortcuts import render
def index(request):
return render(request, 'accounts/accounts.html') |
# -*- coding: utf-8 -*-
# Copyright 2020 Green Valley Belgium NV
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @@license_version:1.7@@
import logging
from collections import defaultdict
from datetime import datetime
from email.MIMEText import MIMEText
from email.mime.image import MIMEImage
from email.mime.multipart import MIMEMultipart
from os import path
import jinja2
from google.appengine.ext import ndb
from typing import List, Dict
import solutions
from rogerthat.bizz.communities.communities import get_community
from rogerthat.dal.app import get_app_by_id
from rogerthat.dal.profile import get_service_profile
from rogerthat.models import App
from rogerthat.rpc import users
from rogerthat.settings import get_server_settings
from rogerthat.utils import send_mail_via_mime
from solutions import translate
from solutions.common.dal import get_solution_settings
from solutions.common.jobs.models import JobsSettings, JobNotificationType, JobSolicitation, OcaJobOffer
from solutions.common.jobs.to import JobsSettingsTO
from solutions.common.models import SolutionSettings
from solutions.jinja_extensions import TranslateExtension
JINJA_ENVIRONMENT = jinja2.Environment(
loader=jinja2.FileSystemLoader([path.join(path.dirname(__file__), '..', 'templates', 'emails')]),
extensions=[TranslateExtension])
def get_jobs_settings(service_user):
# type: (users.User) -> JobsSettings
key = JobsSettings.create_key(service_user)
settings = key.get()
if not settings:
settings = JobsSettings(key=key, emails=[], notifications=JobNotificationType.all())
return settings
def update_jobs_settings(service_user, data):
# type: (users.User, JobsSettingsTO) -> JobsSettings
settings = get_jobs_settings(service_user)
settings.notifications = data.notifications
settings.emails = data.emails
settings.put()
return settings
def send_job_notifications_for_service(jobs_settings_key, min_date, max_date):
# type: (ndb.Key, datetime, datetime) -> None
jobs_settings = jobs_settings_key.get() # type: JobsSettings
if not jobs_settings.emails:
logging.debug('No emails set, not sending jobs notifications')
return
service_user = users.User(jobs_settings_key.parent().id())
solicitations = JobSolicitation.list_unseen_by_service(service_user, min_date, max_date) \
.fetch(None) # type: List[JobSolicitation]
if not solicitations:
logging.debug('No new updates for jobs from service %s', service_user)
return
sln_settings = get_solution_settings(jobs_settings.service_user)
language = sln_settings.main_language
jobs = ndb.get_multi({solicitation.job_key for solicitation in solicitations}) # type: List[OcaJobOffer]
updates_per_job = defaultdict(int)
for solicitation in solicitations:
updates_per_job[solicitation.job_key.id()] += 1
subject = _get_subject_for_update_count(language, len(jobs), len(solicitations))
html_body, text_body = _get_body_for_job_updates(language, jobs, updates_per_job)
_send_email_for_notification(sln_settings, jobs_settings, subject, html_body, text_body)
def _get_subject_for_update_count(lang, jobs_count, updates_count):
# type: (unicode, int, int) -> unicode
if jobs_count == 1:
if updates_count == 1:
return translate(lang, 'there_is_an_update_about_your_job')
else:
return translate(lang, 'there_are_some_update_about_your_job')
else:
return translate(lang, 'there_are_some_update_about_your_jobs')
def _get_body_for_job_updates(lang, jobs, updates_per_job):
# type: (unicode, List[OcaJobOffer], Dict[int, int]) -> tuple[unicode, unicode]
html_body = []
text_body = []
if len(jobs) == 1:
job_name = jobs[0].function.title
html_job_name = '<b>%s</b>' % job_name
update_count = updates_per_job[jobs[0].id]
if update_count == 1:
html_body.append(translate(lang, 'jobs_one_new_update_message', job_name=html_job_name))
text_body.append(translate(lang, 'jobs_one_new_update_message', job_name=job_name))
else:
html_body.append(translate(lang, 'jobs_some_updates_message', job_name=html_job_name))
text_body.append(translate(lang, 'jobs_some_updates_message', job_name=job_name))
else:
msg = translate(lang, 'jobs_multiple_updates_message')
html_body.append(msg)
html_body.append('<ul>')
text_body.append(msg)
text_body.append('')
for job in jobs:
update_count = updates_per_job[job.id]
if update_count == 1:
update_line = translate(lang, 'one_new_update')
else:
update_line = translate(lang, 'x_new_updates', count=update_count)
html_body.append('<li><b>%s:</b> %s</li>' % (job.function.title, update_line))
text_body.append('* %s: %s' % (job.function.title, update_line))
html_body.append('</ul>')
return '<br>'.join(html_body), '\n'.join(text_body)
def _send_email_for_notification(sln_settings, jobs_settings, subject, html_body, text_body):
# type: (SolutionSettings, JobsSettings, unicode, unicode, unicode) -> None
def transl(key, **params):
return translate(sln_settings.main_language, key, **params)
settings = get_server_settings()
service_profile = get_service_profile(jobs_settings.service_user)
community = get_community(service_profile.community_id)
app = get_app_by_id(community.default_app)
mime_root = MIMEMultipart('related')
mime_root['Subject'] = subject
if app.type == App.APP_TYPE_ROGERTHAT:
mime_root['From'] = settings.senderEmail
else:
mime_root['From'] = '%s <%s>' % (community.name, app.dashboard_email_address)
mime_root['To'] = ', '.join(jobs_settings.emails)
mime = MIMEMultipart('alternative')
mime_root.attach(mime)
button_css = 'display: inline-block; margin-left: 0.5em; margin-right: 0.5em; -webkit-border-radius: 6px;' \
' -moz-border-radius: 6px; border-radius: 6px; font-family: Arial; color: #ffffff; font-size: 14px;' \
' background: #3abb9e; padding: 8px 16px 8px 16px; text-decoration: none;'
signin_url = settings.get_signin_url()
if sln_settings.login:
signin_url += '?email=%s' % sln_settings.login.email()
btn = u'<a href="%(signin_url)s" style="%(button_css)s">%(dashboard)s</a>' % {
'signin_url': signin_url,
'button_css': button_css,
'dashboard': transl('dashboard')
}
action_text = transl('if-email-body-3-button', dashboard_button=btn)
footer_text = transl('jobs_notification_footer', service_name=sln_settings.name, app_name=app.name,
dashboard_url='%s (%s)' % (transl('dashboard').lower(), signin_url))
footer_html = transl('jobs_notification_footer', service_name=sln_settings.name, app_name=app.name,
dashboard_url='<a href="%s">%s</a>' % (signin_url, transl('dashboard').lower()))
html_params = {
'message': html_body,
'action_text': action_text,
'footer': footer_html.replace('\n', '<br>'),
}
url_txt = transl('if-email-body-3-url', dashboard_url=signin_url)
text_params = {
'message': text_body,
'url_text': url_txt,
'footer': footer_text,
}
body_html = JINJA_ENVIRONMENT.get_template('solicitation_message_html.tmpl').render(html_params)
body = JINJA_ENVIRONMENT.get_template('solicitation_message.tmpl').render(text_params)
mime.attach(MIMEText(body.encode('utf-8'), 'plain', 'utf-8'))
mime.attach(MIMEText(body_html.encode('utf-8'), 'html', 'utf-8'))
with open(path.join(path.dirname(solutions.__file__), 'common', 'templates', 'emails', 'oca-email-header.png'), 'r') as f:
img_data = f.read()
img = MIMEImage(img_data, 'png')
img.add_header('Content-Id', '<osa-footer>')
img.add_header('Content-Disposition', 'inline', filename='Onze Stad App footer')
mime_root.attach(img)
send_mail_via_mime(settings.senderEmail, sln_settings.inbox_mail_forwarders, mime_root)
|
'''
Created on Dec 16, 2012
@author: sar
'''
import numpy as np
#from sklearn import mixture
import gmm_diag2_forJason as proj
if __name__ == '__main__':
np.random.seed(1)
g = proj.GMM(n_components=2, covariance_type='full')
# Generate random observations with two modes centered on 0
# and 10 to use for training.
obs = np.concatenate((np.random.randn(100, 5), 10 + np.random.randn(300, 5)))
print obs.shape
g.fit(obs)
print "weights = ", np.round(g.weights_, 2)
print "means = ", np.round(g.means_, 2)
print "covars = ", np.round(g.covars_, 2) #doctest: +SKIP
#g.predict([[0], [2], [9], [10]])
#print np.round(g.score([[0], [2], [9], [10]]), 2)
# Refit the model on new data (initial parameters remain the
# same), this time with an even split between the two modes.
#g.fit(20 * [[0]] + 20 * [[10]])
#print np.round(g.weights, 2)
pass |
from base64 import b64encode
from typing import TYPE_CHECKING, List
from cryptography.hazmat.primitives.kdf.scrypt import Scrypt
from typer import Argument, Typer, prompt
from kolombo.console import error, finished, info, print_list, started, step, warning
from kolombo.util import async_command, needs_database
if TYPE_CHECKING:
from kolombo.models import User
user_cli = Typer()
@user_cli.command("list")
@async_command
@needs_database
async def list_users() -> None:
from kolombo.models import User
active_users = [user.email for user in await User.all_active()]
info(f"Active users: {len(active_users)}")
if len(active_users) > 0:
print_list(active_users)
async def _save_user(email: str, password: str, domain: str) -> None:
from kolombo import conf
from kolombo.models import User
kdf = Scrypt(conf.SALT, length=32, n=2 ** 16, r=8, p=1)
b64_password = b64encode(kdf.derive(password.encode("utf-8")))
await User.objects.create(email=email, password=b64_password, domain=domain)
def update_virtual_files(active_users: List["User"]) -> None:
emails = [user.email for user in active_users]
addresses = "\n".join(f"{email} {email}" for email in emails)
with open("/etc/kolombo/virtual/addresses", mode="w") as addresses_file:
addresses_file.write(f"{addresses}\n")
mailboxes = "\n".join(f"{email} {email}/" for email in emails)
with open("/etc/kolombo/virtual/mailbox", mode="w") as mailbox_file:
mailbox_file.write(f"{mailboxes}\n")
@user_cli.command("add")
@async_command
@needs_database
async def add_user(
email: str = Argument(..., help="Email for new user"), # noqa: B008
) -> None:
from kolombo.models import Domain, User
if "@" not in email:
error(f"Email '{email}' does not contain '@'!")
exit(1)
domain = email.split("@", maxsplit=1)[1].strip()
if not domain:
error("Domain part MUST NOT be empty string!")
exit(1)
elif not await Domain.objects.filter(active=True, actual=domain).exists():
error(f"Domain '{domain}' is not added (or inactive)!")
warning(
f"You can add it via [code]kolombo domain add {domain} mx.{domain}[/code]"
)
exit(1)
elif await User.objects.filter(email=email).exists():
error(f"User with email '{email}' already exists!")
exit(1)
started(f"Adding [code]{email}[/] user")
password = prompt(f"{email} password", hide_input=True, confirmation_prompt=True)
step("Saving to database")
await _save_user(email, password, domain)
step("Updating virtual files (addresses and mailbox map)")
active_users = await User.all_active()
update_virtual_files(active_users)
warning("Run command [code]kolombo run[/] to reload Kolombo")
finished(f"User '{email}' added!")
|
import torch
import numpy as np
import torch.nn.functional as F
from box import Box
from force import ForceField
class Thermostat(torch.nn.Module):
def __init__(self, SimObj, type, timestep, temperature, device="cpu", **thrmst_params):
if type is "vel":
self.type = type
self.apply = self.apply_no_vel
neccesary_parameters = ["thermostat_cnst"]
elif type is "no_vel":
self.type = type
self.apply = self.apply_vel
neccesary_parameters = ["thermostat_cnst"]
else:
print(f"Type {type} is not supported")
self.device = device
self.temperature = temperature
self.timestep = timestep
self.therm_params = {}
for param, val in thrmst_params:
self.therm_params[param] = val
self.sim = SimObj
def apply_vel(self):
thermostat_prob = self.timestep / self.therm_params["thermostat_cnst"]
rndm_numbers = torch.random(self.sim.vels.shape[0]) < thermostat_prob
self.sim.vels[rndm_numbers] = torch.randn(3, device=self.device)
self.sim.vels[rndm_numbers] /= self.sim.masses[rndm_numbers]
def apply_no_vel(self):
thermostat_prob = self.timestep / self.therm_params["thermostat_cnst"]
rndm_numbers = torch.random(self.sim.coords.shape[0]) < thermostat_prob
self.sim.coords[rndm_numbers] = torch.randn(3, device=self.device) * self.temperature * self.timestep
# for ai in range(n_atoms):
# if torch.random(vels)
# if random() < thermostat_prob:
#
# # Actually this should be divided by the mass
# new_vel = torch.randn(3, device=device) * temperature
# vels[0, ai] = new_vel
# elif self.type == "no_vel":
# new_diff = torch.randn(3, device=device) * temperature * self.timestep
# coords_last[0, ai] = coords[0, ai] - new_diff
class Integrator(torch.nn.Module):
def __init__(self, Simobj, type, timestep, temperature, masses, device="cpu", otherparams=None):
supported_types = ['vel', 'langevin', 'langevin_simple']
if type in supported_types:
self.type = type
else:
print(f"Integrator type {type} not supported")
self.sim = Simobj # lets us access the Simulation Tensors
if otherparams != None:
self.otherparams = otherparams
if self.type == "vel":
self.first_step = self.first_step_vel
self.second_step = self.second_step_vel
if self.type == "no_vel":
self.first_step = None
self.second_step = self.second_step_no_vel
elif self.type == "langevin":
self.first_step = self.first_step_langevin
self.second_step = self.second_step_langevin
elif self.type == "langevin_simple":
self.first_step = self.first_step_langevin_simple
self.second_step = self.second_step_langevin_simple
self.temp = temperature
self.masses = masses
self.timestep = timestep
self.device = device
def first_step_vel(self):
self.sim.coords = self.sim.coords + self.sim.vels * self.timestep + 0.5 * self.sim.accs_last * self.timestep * self.timestep
def first_step_langevin(self):
alpha, twokbT = self.otherparams['thermostat_const'], self.otherparams['temperature']
beta = np.sqrt(twokbT * alpha * self.timestep) * torch.randn(self.sim.vels.shape, device=self.device)
b = 1.0 / (1.0 + (alpha * self.timestep) / (2 * self.masses.unsqueeze(2)))
self.sim.coords_last = self.sim.coords # ?
self.sim.coords = self.sim.coords + b * self.timestep * self.sim.vels + 0.5 * b * (self.timestep ** 2) * self.sim.accs_last + 0.5 * b * self.timestep * beta / self.sim.masses.unsqueeze(2)
def first_step_langevin_simple(self):
self.sim.coords = self.sim.coords + self.sim.vels * self.timestep + 0.5 * self.sim.accs_last * self.timestep * self.timestep
def second_step_vel(self):
self.sim.vels = self.sim.vels + 0.5 * (self.sim.accs_last + self.sim.accs) * self.timestep
self.sim.accs_last = self.sim.accs
def second_step_no_vel(self):
coords_next = 2 * self.sim.coords - self.sim.coords_last + self.sim.accs * self.timestep * self.timestep
self.sim.coords_last = self.sim.coords
self.sim.coords = coords_next
def second_step_langevin(self):
# From Gronbech-Jensen 2013
self.sim.vels = self.sim.vels + 0.5 * self.timestep * (self.sim.accs_last + self.sim.accs) - self.otherparams['alpha'] * (self.sim.coords - self.sim.coords_last) / self.sim.masses.unsqueeze(
2) + self.otherparams['beta'] / self.sim.masses.unsqueeze(2)
self.sim.accs_last = self.sim.accs
def second_step_langevin_simple(self):
gamma, twokbT = self.otherparams['thermostat_const'], self.otherparams['temperature']
self.sim.accs = self.sim.accs + (-gamma * self.sim.vels + np.sqrt(gamma * twokbT) * torch.randn(self.sim.vels.shape,
device=self.device)) / self.sim.masses.unsqueeze(2)
self.sim.vels = self.sim.vels + 0.5 * (self.sim.accs_last + self.sim.accs) * self.timestep
self.sim.accs_last = self.sim.accs
# example kinetic_energy 10
class Reporter(torch.nn.Module): # prints out observables etc.
def __init__(self, Simobj, reportdict):
super(Reporter, self).__init__()
self.sim = Simobj
self.keys = []
self.freq = []
supportedreports = ['kinetic_energy', 'step']
for key, item in reportdict:
if key in supportedreports:
self.keys.append(key)
self.freq.append(item)
self.functiondict = {'kinetic_energy':self.kinetic_energy, }
def report(self):
for freq in self.freq:
if self.sim.step
def kinetic_energy(self):
# Differentiable molecular simulation of proteins with a coarse-grained potential
class Simulator(torch.nn.Module):
"""
Parameters is a Dictionary of Tensors that will be learned
ex. {bond_constants : torch.tensor}
Application is a Dictionary defining how the tensors will be applied to the simulation data
"""
def __init__(self, particledict, parameterdict, applicationdict,
forcefield_spec, thermostatdict, reportdict, box_size, device='cpu'):
super(Simulator, self).__init__()
# self.params = {}
# self.application = {}
# for key, item in parameterdict:
# self.params[key] = torch.nn.Parameter(item)
# for key, item in applicationdict:
# self.application[key] = item
self.masses = particledict.masses
self.coords = particledict.coords
# Intialize Tensors which are edited in Integrator and Thermostat Object
if thermostatdict['type'] != "no_vel":
self.vels = torch.randn(self.coords.shape, device=device) * thermostatdict['start_temperature']
self.accs_last = torch.zeros(self.coords.shape, device=device)
self.accs = torch.zeros(self.coords.shape, device=device)
else:
self.accs = torch.zeros(self.coords.shape, device=device)
self.coords_last = self.coords.clone() + torch.randn(self.coords.shape, device=device) * \
thermostatdict['start_temperature'] * thermostatdict['timestep']
self.Thermostat = Thermostat(self, thermostatdict['type'], thermostatdict['timestep'], thermostatdict['temperature'],
thermostatdict['thermostatparams'], device=device)
self.Integrator = Integrator(self, thermostatdict['type'], thermostatdict['time'], thermostatdict['timestep'],
thermostatdict['temperature'], otherparams=thermostatdict['thermostatparams'],
device=device)
self.Reporter = Reporter(self, reportdict)
self.System_Observables = System_Obervables
self.Force_Field = ForceField(forcefield_spec, particledict)
self.Box = Box(box_size, device=device)
# self.ff_distances = torch.nn.Parameter(ff_distances)
# self.ff_angles = torch.nn.Parameter(ff_angles)
# self.ff_dihedrals = torch.nn.Parameter(ff_dihedrals)
# def sim_step_novel(self, coords, masses,):
def center_system(self):
center = self.Box/2
current_com = torch.mean(self.coords*F.normalize(self.massses))
self.coords.add_(center - current_com)
# returns difference vectors in matrix form for all coordinates and enforces minimum image convention
# vector from p0 to p1 = min_image[0][1]
def min_image(self, coords):
box_size = self.Box[0] # Assumes Cubic Box at least for now
n_atoms = coords.shape[0]
tmp = coords.unsqueeze(1).expand(-1, n_atoms, -1)
diffs = tmp - tmp.transpose(0, 1)
min_image = diffs - torch.round(diffs / box_size) * box_size
return min_image
# Returns Distances b/t all particles as a symmetric matrixd
def distances(selfs, min_image):
return min_image.norm(dim=2)
# Returns Matrix of normalized vectors ex. vectors[0][1] returns the normalized vector from particle 0 pointing at particle 1
def vectors(self, min_image):
return F.normalize(min_image, dim=2)
def sim_step_vel(self, n_steps, integrator="vel", device="cpu", start_temperature=0.1, timestep=0.02,
verbosity = 0,
thermostat_const=0.0, # Set to 0.0 to run without a thermostat (NVE ensemble)
temperature=0.0, # The effective temperature of the thermostat
):
for i in range(n_steps):
self.Integrator.first_step()
min_image = self.min_image(self.coords)
distances = self.distances(min_image)
vectors = self.vectors(min_image)
min_image_mc = min_image[self.mc_mask]
distances_mc = self.distances(min_image_mc)
vectors_mc = self.vectors(min_image_mc)
#force_calculation, return the accs f/mass
# return energy here as well
# F, U = self.Force_Field.compute_forces(distances, vectors, distances_mc, vectors_mc)
self.accs = F/self.masses
self.Integrator.second_step()
self.Thermostat.apply()
self.Reporter.report()
def forward(self,
coords,
orientations,
inters_flat,
inters_ang,
inters_dih,
,
seq,
native_coords,
n_steps,
integrator="vel", # vel/no_vel/min/langevin/langevin_simple
timestep=0.02,
start_temperature=0.1,
thermostat_const=0.0, # Set to 0.0 to run without a thermostat (NVE ensemble)
temperature=0.0, # The effective temperature of the thermostat
sim_filepath=None, # Output PDB file to write to or None to not write out
energy=False, # Return the energy at the end of the simulation
report_n=10_000, # Print and write PDB every report_n steps
verbosity=2, # 0 for epoch info, 1 for protein info, 2 for simulation step info
):
assert integrator in ("vel", "no_vel", "min", "langevin", "langevin_simple"), f"Invalid integrator {integrator}"
device = coords.device
batch_size, n_atoms = masses.size(0), masses.size(1)
n_res = n_atoms // len(atoms)
dist_bin_centres_tensor = torch.tensor(dist_bin_centres, device=device)
pair_centres_flat = dist_bin_centres_tensor.index_select(0, inters_flat[0]).unsqueeze(0).expand(batch_size, -1, -1)
pair_pots_flat = self.ff_distances.index_select(0, inters_flat[0]).unsqueeze(0).expand(batch_size, -1, -1)
angle_bin_centres_tensor = torch.tensor(angle_bin_centres, device=device)
angle_centres_flat = angle_bin_centres_tensor.unsqueeze(0).unsqueeze(0).expand(batch_size, n_res, -1)
angle_pots_flat = self.ff_angles.index_select(1, inters_ang[0]).unsqueeze(0).expand(batch_size, -1, -1, -1)
dih_bin_centres_tensor = torch.tensor(dih_bin_centres, device=device)
dih_centres_flat = dih_bin_centres_tensor.unsqueeze(0).unsqueeze(0).expand(batch_size, n_res - 1, -1)
dih_pots_flat = self.ff_dihedrals.index_select(1, inters_dih[0]).unsqueeze(0).expand(batch_size, -1, -1, -1)
native_coords_ca = native_coords.view(batch_size, n_res, 3 * len(atoms))[0, :, 3:6]
model_n = 0
# just preparing needed vectors
if integrator == "vel" or integrator == "langevin" or integrator == "langevin_simple":
vels = torch.randn(coords.shape, device=device) * start_temperature
accs_last = torch.zeros(coords.shape, device=device)
elif integrator == "no_vel":
coords_last = coords.clone() + torch.randn(coords.shape, device=device) * start_temperature * timestep
# The step the energy is return on is not used for simulation so we add an extra step
if energy:
n_steps += 1
for i in range(n_steps):
# MD Backend First step
if integrator == "vel":
coords = coords + vels * timestep + 0.5 * accs_last * timestep * timestep
elif integrator == "langevin":
# From Gronbech-Jensen 2013
alpha, twokbT = thermostat_const, temperature
beta = np.sqrt(twokbT * alpha * timestep) * torch.randn(vels.shape, device=device)
b = 1.0 / (1.0 + (alpha * timestep) / (2 * self.masses.unsqueeze(2)))
coords_last = coords
coords = coords + b * timestep * vels + 0.5 * b * (timestep ** 2) * accs_last + 0.5 * b * timestep * beta / masses.unsqueeze(2)
elif integrator == "langevin_simple":
coords = coords + vels * timestep + 0.5 * accs_last * timestep * timestep
# See https://arxiv.org/pdf/1401.1181.pdf for derivation of forces.py
printing = verbosity >= 2 and i % report_n == 0
returning_energy = energy and i == n_steps - 1
if printing or returning_energy:
dist_energy = torch.zeros(1, device=device)
angle_energy = torch.zeros(1, device=device)
dih_energy = torch.zeros(1, device=device)
# Add pairwise distance forces.py
crep = coords.unsqueeze(1).expand(-1, n_atoms, -1, -1) # makes list of coords like [[ [coord1] n times ], [coord2] n times], [coord3] n times]]
diffs = crep - crep.transpose(1, 2)
dists = diffs.norm(dim=3)
dists_flat = dists.view(batch_size, n_atoms * n_atoms)
dists_from_centres = pair_centres_flat - dists_flat.unsqueeze(2).expand(-1, -1, n_bins_force)
dist_bin_inds = dists_from_centres.abs().argmin(dim=2).unsqueeze(2)
# Force is gradient of potential
# So it is proportional to difference of previous and next value of potential
pair_forces_flat = 0.5 * (pair_pots_flat.gather(2, dist_bin_inds) - pair_pots_flat.gather(2, dist_bin_inds + 2))
# Specify minimum to prevent division by zero errors
norm_diffs = diffs / dists.clamp(min=0.01).unsqueeze(3)
pair_accs = (pair_forces_flat.view(batch_size, n_atoms, n_atoms)).unsqueeze(3) * norm_diffs
accs = pair_accs.sum(dim=1) / masses.unsqueeze(2)
if printing or returning_energy:
dist_energy += 0.5 * pair_pots_flat.gather(2, dist_bin_inds + 1).sum()
atom_coords = coords.view(batch_size, n_res, 3 * len(atoms))
atom_accs = torch.zeros(batch_size, n_res, 3 * len(atoms), device=device)
# Angle forces.py
# across_res is the number of atoms in the next residue, starting from atom_3
for ai, (atom_1, atom_2, atom_3, across_res) in enumerate(angles):
ai_1, ai_2, ai_3 = atoms.index(atom_1), atoms.index(atom_2), atoms.index(atom_3)
if across_res == 0:
ba = atom_coords[:, : , (ai_1 * 3):(ai_1 * 3 + 3)] - atom_coords[:, : , (ai_2 * 3):(ai_2 * 3 + 3)]
bc = atom_coords[:, : , (ai_3 * 3):(ai_3 * 3 + 3)] - atom_coords[:, : , (ai_2 * 3):(ai_2 * 3 + 3)]
# Use residue potential according to central atom
angle_pots_to_use = angle_pots_flat[:, ai, :]
elif across_res == 1:
ba = atom_coords[:, :-1, (ai_1 * 3):(ai_1 * 3 + 3)] - atom_coords[:, :-1, (ai_2 * 3):(ai_2 * 3 + 3)]
bc = atom_coords[:, 1: , (ai_3 * 3):(ai_3 * 3 + 3)] - atom_coords[:, :-1, (ai_2 * 3):(ai_2 * 3 + 3)]
angle_pots_to_use = angle_pots_flat[:, ai, :-1]
elif across_res == 2:
ba = atom_coords[:, :-1, (ai_1 * 3):(ai_1 * 3 + 3)] - atom_coords[:, 1: , (ai_2 * 3):(ai_2 * 3 + 3)]
bc = atom_coords[:, 1: , (ai_3 * 3):(ai_3 * 3 + 3)] - atom_coords[:, 1: , (ai_2 * 3):(ai_2 * 3 + 3)]
angle_pots_to_use = angle_pots_flat[:, ai, 1:]
ba_norms = ba.norm(dim=2)
bc_norms = bc.norm(dim=2)
angs = torch.acos((ba * bc).sum(dim=2) / (ba_norms * bc_norms))
n_angles = n_res if across_res == 0 else n_res - 1
angles_from_centres = angle_centres_flat[:, :n_angles] - angs.unsqueeze(2)
angle_bin_inds = angles_from_centres.abs().argmin(dim=2).unsqueeze(2)
angle_forces = 0.5 * (angle_pots_to_use.gather(2, angle_bin_inds) - angle_pots_to_use.gather(2, angle_bin_inds + 2))
cross_ba_bc = torch.cross(ba, bc, dim=2)
fa = angle_forces * normalize(torch.cross( ba, cross_ba_bc, dim=2), dim=2) / ba_norms.unsqueeze(2)
fc = angle_forces * normalize(torch.cross(-bc, cross_ba_bc, dim=2), dim=2) / bc_norms.unsqueeze(2)
fb = -fa -fc
if across_res == 0:
atom_accs[:, : , (ai_1 * 3):(ai_1 * 3 + 3)] += fa
atom_accs[:, : , (ai_2 * 3):(ai_2 * 3 + 3)] += fb
atom_accs[:, : , (ai_3 * 3):(ai_3 * 3 + 3)] += fc
elif across_res == 1:
atom_accs[:, :-1, (ai_1 * 3):(ai_1 * 3 + 3)] += fa
atom_accs[:, :-1, (ai_2 * 3):(ai_2 * 3 + 3)] += fb
atom_accs[:, 1: , (ai_3 * 3):(ai_3 * 3 + 3)] += fc
elif across_res == 2:
atom_accs[:, :-1, (ai_1 * 3):(ai_1 * 3 + 3)] += fa
atom_accs[:, 1: , (ai_2 * 3):(ai_2 * 3 + 3)] += fb
atom_accs[:, 1: , (ai_3 * 3):(ai_3 * 3 + 3)] += fc
if printing or returning_energy:
angle_energy += angle_pots_to_use.gather(2, angle_bin_inds + 1).sum()
# Dihedral forces.py
# across_res is the number of atoms in the next residue, starting from atom_4
for di, (atom_1, atom_2, atom_3, atom_4, across_res) in enumerate(dihedrals):
ai_1, ai_2, ai_3, ai_4 = atoms.index(atom_1), atoms.index(atom_2), atoms.index(atom_3), atoms.index(atom_4)
if across_res == 1:
ab = atom_coords[:, :-1, (ai_2 * 3):(ai_2 * 3 + 3)] - atom_coords[:, :-1, (ai_1 * 3):(ai_1 * 3 + 3)]
bc = atom_coords[:, :-1, (ai_3 * 3):(ai_3 * 3 + 3)] - atom_coords[:, :-1, (ai_2 * 3):(ai_2 * 3 + 3)]
cd = atom_coords[:, 1: , (ai_4 * 3):(ai_4 * 3 + 3)] - atom_coords[:, :-1, (ai_3 * 3):(ai_3 * 3 + 3)]
# Use residue potential according to central atom
dih_pots_to_use = dih_pots_flat[:, di, :-1]
elif across_res == 2:
ab = atom_coords[:, :-1, (ai_2 * 3):(ai_2 * 3 + 3)] - atom_coords[:, :-1, (ai_1 * 3):(ai_1 * 3 + 3)]
bc = atom_coords[:, 1: , (ai_3 * 3):(ai_3 * 3 + 3)] - atom_coords[:, :-1, (ai_2 * 3):(ai_2 * 3 + 3)]
cd = atom_coords[:, 1: , (ai_4 * 3):(ai_4 * 3 + 3)] - atom_coords[:, 1: , (ai_3 * 3):(ai_3 * 3 + 3)]
dih_pots_to_use = dih_pots_flat[:, di, 1:]
elif across_res == 3:
ab = atom_coords[:, 1: , (ai_2 * 3):(ai_2 * 3 + 3)] - atom_coords[:, :-1, (ai_1 * 3):(ai_1 * 3 + 3)]
bc = atom_coords[:, 1: , (ai_3 * 3):(ai_3 * 3 + 3)] - atom_coords[:, 1: , (ai_2 * 3):(ai_2 * 3 + 3)]
cd = atom_coords[:, 1: , (ai_4 * 3):(ai_4 * 3 + 3)] - atom_coords[:, 1: , (ai_3 * 3):(ai_3 * 3 + 3)]
dih_pots_to_use = dih_pots_flat[:, di, 1:]
cross_ab_bc = torch.cross(ab, bc, dim=2)
cross_bc_cd = torch.cross(bc, cd, dim=2)
bc_norms = bc.norm(dim=2).unsqueeze(2)
dihs = torch.atan2(
torch.sum(torch.cross(cross_ab_bc, cross_bc_cd, dim=2) * bc / bc_norms, dim=2),
torch.sum(cross_ab_bc * cross_bc_cd, dim=2)
)
dihs_from_centres = dih_centres_flat - dihs.unsqueeze(2)
dih_bin_inds = dihs_from_centres.abs().argmin(dim=2).unsqueeze(2)
dih_forces = 0.5 * (dih_pots_to_use.gather(2, dih_bin_inds) - dih_pots_to_use.gather(2, dih_bin_inds + 2))
fa = dih_forces * normalize(-cross_ab_bc, dim=2) / ab.norm(dim=2).unsqueeze(2)
fd = dih_forces * normalize( cross_bc_cd, dim=2) / cd.norm(dim=2).unsqueeze(2)
# Forces on the middle atoms have to keep the sum of torques null
# Forces taken from http://www.softberry.com/freedownloadhelp/moldyn/description.html
fb = ((ab * -bc) / (bc_norms ** 2) - 1) * fa - ((cd * -bc) / (bc_norms ** 2)) * fd
fc = -fa - fb - fd
if across_res == 1:
atom_accs[:, :-1, (ai_1 * 3):(ai_1 * 3 + 3)] += fa
atom_accs[:, :-1, (ai_2 * 3):(ai_2 * 3 + 3)] += fb
atom_accs[:, :-1, (ai_3 * 3):(ai_3 * 3 + 3)] += fc
atom_accs[:, 1: , (ai_4 * 3):(ai_4 * 3 + 3)] += fd
elif across_res == 2:
atom_accs[:, :-1, (ai_1 * 3):(ai_1 * 3 + 3)] += fa
atom_accs[:, :-1, (ai_2 * 3):(ai_2 * 3 + 3)] += fb
atom_accs[:, 1: , (ai_3 * 3):(ai_3 * 3 + 3)] += fc
atom_accs[:, 1: , (ai_4 * 3):(ai_4 * 3 + 3)] += fd
elif across_res == 3:
atom_accs[:, :-1, (ai_1 * 3):(ai_1 * 3 + 3)] += fa
atom_accs[:, 1: , (ai_2 * 3):(ai_2 * 3 + 3)] += fb
atom_accs[:, 1: , (ai_3 * 3):(ai_3 * 3 + 3)] += fc
atom_accs[:, 1: , (ai_4 * 3):(ai_4 * 3 + 3)] += fd
if printing or returning_energy:
dih_energy += dih_pots_to_use.gather(2, dih_bin_inds + 1).sum()
accs += atom_accs.view(batch_size, n_atoms, 3) / masses.unsqueeze(2)
# Shortcut to return energy at a given step
if returning_energy:
return dist_energy + angle_energy + dih_energy
# Second step
if integrator == "vel":
vels = vels + 0.5 * (accs_last + accs) * timestep
accs_last = accs
elif integrator == "no_vel":
coords_next = 2 * coords - coords_last + accs * timestep * timestep
coords_last = coords
coords = coords_next
elif integrator == "langevin":
# From Gronbech-Jensen 2013
vels = vels + 0.5 * timestep * (accs_last + accs) - alpha * (coords - coords_last) / masses.unsqueeze(2) + beta / masses.unsqueeze(2)
accs_last = accs
elif integrator == "langevin_simple":
gamma, twokbT = thermostat_const, temperature
accs = accs + (-gamma * vels + np.sqrt(gamma * twokbT) * torch.randn(vels.shape, device=device)) / masses.unsqueeze(2)
vels = vels + 0.5 * (accs_last + accs) * timestep
accs_last = accs
elif integrator == "min":
coords = coords + accs * 0.1
# Apply thermostat
if integrator in ("vel", "no_vel") and thermostat_const > 0.0:
thermostat_prob = timestep / thermostat_const
for ai in range(n_atoms):
if random() < thermostat_prob:
if integrator == "vel":
# Actually this should be divided by the mass
new_vel = torch.randn(3, device=device) * temperature
vels[0, ai] = new_vel
elif integrator == "no_vel":
new_diff = torch.randn(3, device=device) * temperature * timestep
coords_last[0, ai] = coords[0, ai] - new_diff
if printing:
total_energy = dist_energy + angle_energy + dih_energy
out_line = " Step {:8} / {} - acc {:6.3f} {}- energy {:6.2f} ( {:6.2f} {:6.2f} {:6.2f} ) - Cα RMSD {:6.2f}".format(
i + 1, n_steps, torch.mean(accs.norm(dim=2)).item(),
"- vel {:6.3f} ".format(torch.mean(vels.norm(dim=2)).item()) if integrator in ("vel", "langevin", "langevin_simple") else "",
total_energy.item(), dist_energy.item(), angle_energy.item(), dih_energy.item(),
rmsd(coords.view(batch_size, n_res, 3 * len(atoms))[0, :, 3:6], native_coords_ca)[0].item())
report(out_line, 2, verbosity)
if sim_filepath and i % report_n == 0:
model_n += 1
with open(sim_filepath, "a") as of:
of.write("MODEL {:>8}\n".format(model_n))
for ri, r in enumerate(seq):
for ai, atom in enumerate(atoms):
of.write("ATOM {:>4} {:<2} {:3} A{:>4} {:>8.3f}{:>8.3f}{:>8.3f} 1.00 0.00 {:>2} \n".format(
len(atoms) * ri + ai + 1, atom[:2].upper(),
one_to_three_aas[r], ri + 1,
coords[0, len(atoms) * ri + ai, 0].item(),
coords[0, len(atoms) * ri + ai, 1].item(),
coords[0, len(atoms) * ri + ai, 2].item(),
atom[0].upper()))
of.write("ENDMDL\n")
return coords
def training_step(model_filepath, atom_ff_definitions, device="cpu", verbosity=0):
max_n_steps = 2_000
learning_rate = 1e-4
n_accumulate = 100
parameters = atom_ff_definitions.parameters
applications = atom_ff_definitions.applications
simulator = Simulator(parameters, applications)
train_set = ProteinDataset(train_proteins, train_val_dir, device=device)
val_set = ProteinDataset(val_proteins, train_val_dir, device=device)
optimizer = torch.optim.Adam(simulator.parameters(), lr=learning_rate)
report("Starting training", 0, verbosity)
for ei in count(start=0, step=1):
# After 37 epochs reset the optimiser with a lower learning rate
if ei == 37:
optimizer = torch.optim.Adam(simulator.parameters(), lr=learning_rate / 2)
train_rmsds, val_rmsds = [], []
n_steps = min(250 * ((ei // 5) + 1), max_n_steps) # Scale up n_steps over epochs
train_inds = list(range(len(train_set)))
val_inds = list(range(len(val_set)))
shuffle(train_inds)
shuffle(val_inds)
simulator.train()
optimizer.zero_grad()
for i, ni in enumerate(train_inds):
# basically need to get observables from starting info
# then
native_coords, inters_flat, inters_ang, inters_dih, masses, seq = train_set[ni]
coords = simulator(native_coords.unsqueeze(0), inters_flat.unsqueeze(0),
inters_ang.unsqueeze(0), inters_dih.unsqueeze(0), masses.unsqueeze(0),
seq, native_coords.unsqueeze(0), n_steps, verbosity=verbosity)
loss, passed = rmsd(coords[0], native_coords)
train_rmsds.append(loss.item())
if passed:
loss_log = torch.log(1.0 + loss)
loss_log.backward()
report(" Training {:4} / {:4} - RMSD {:6.2f} over {:4} steps and {:3} residues".format(
i + 1, len(train_set), loss.item(), n_steps, len(seq)), 1, verbosity)
if (i + 1) % n_accumulate == 0:
optimizer.step()
optimizer.zero_grad()
simulator.eval()
with torch.no_grad():
for i, ni in enumerate(val_inds):
native_coords, inters_flat, inters_ang, inters_dih, masses, seq = val_set[ni]
coords = simulator(native_coords.unsqueeze(0), inters_flat.unsqueeze(0),
inters_ang.unsqueeze(0), inters_dih.unsqueeze(0), masses.unsqueeze(0),
seq, native_coords.unsqueeze(0), n_steps, verbosity=verbosity)
loss, passed = rmsd(coords[0], native_coords)
val_rmsds.append(loss.item())
report(" Validation {:4} / {:4} - RMSD {:6.2f} over {:4} steps and {:3} residues".format(
i + 1, len(val_set), loss.item(), n_steps, len(seq)), 1, verbosity)
torch.save({"distances": simulator.ff_distances.data,
"angles": simulator.ff_angles.data,
"dihedrals": simulator.ff_dihedrals.data,
"optimizer": optimizer.state_dict()},
model_filepath)
report("Epoch {:4} - med train/val RMSD {:6.3f} / {:6.3f} over {:4} steps".format(
ei + 1, np.median(train_rmsds), np.median(val_rmsds), n_steps), 0, verbosity)
# Read a dataset of input files
class ProteinDataset(Dataset):
def __init__(self, pdbids, coord_dir, device="cpu"):
self.pdbids = pdbids
self.coord_dir = coord_dir
self.set_size = len(pdbids)
self.device = device
def __len__(self):
return self.set_size
def __getitem__(self, index):
fp = os.path.join(self.coord_dir, self.pdbids[index] + ".txt")
return read_input_file(fp, device=self.device)
|
import argparse
import datetime
import multiprocessing
import time
import matplotlib.pyplot as plt
import numpy as np
import bss
from bss.head import HEADUpdate
config = {
"seed": 873009,
"n_repeat": 1000,
"n_chan": [4, 6, 8],
"tol": -1.0, # i.e. ignore tolerance
"maxiter": 100,
"dtype": np.complex128,
}
methods = {
"IPA": HEADUpdate.IPA,
"IP": HEADUpdate.IP,
"ISS": HEADUpdate.ISS,
"IP2": HEADUpdate.IP2,
"NCG": HEADUpdate.NCG,
"IPA+NCG": HEADUpdate.IPA_NCG,
}
def progress_tracker(n_tasks, queue):
n_digits = len(str(n_tasks))
fmt = "Remaining tasks: {n:" + str(n_digits) + "d} / " + str(n_tasks)
start_date = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
print(f"Start processing at {start_date}")
def print_status():
print(fmt.format(n=n_tasks), end="\r")
print_status()
while n_tasks > 0:
_ = queue.get(block=True)
n_tasks -= 1
print_status()
end_date = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
print(f"All done. Finished at {end_date}")
def f_loop(args):
import mkl
mkl.set_num_threads(1)
# expand arguments
V, maxiter, tol, methods, the_queue = args
infos = {}
runtimes = {}
v_mat = V[None, ...]
for method, key in methods.items():
t_start = time.perf_counter()
_, info = bss.head.head_solver(
v_mat, maxiter=maxiter, tol=tol, method=key, verbose=False, info=True
)
ellapsed = time.perf_counter() - t_start
infos[method] = info
runtimes[method] = ellapsed
the_queue.put(True)
return V.shape[-1], infos, runtimes
def rand_V(n_freq, n_chan, n_mat=None, dtype=np.complex128):
if n_mat is None:
n_mat = n_chan
# random hermitian PSD matrices
X = bss.random.crandn(n_freq, n_mat, n_chan, n_chan)
w, U = np.linalg.eigh(X)
w[:] = np.random.rand(*w.shape)
V = (U * np.abs(w[..., None, :])) @ bss.utils.tensor_H(U)
V = 0.5 * (V + bss.utils.tensor_H(V))
X = bss.random.crandn(n_freq, n_mat, n_chan, 10 * n_chan)
V = X @ bss.utils.tensor_H(X)
return V
if __name__ == "__main__":
np.random.seed(config["seed"])
# we need a queue for inter-process communication
m = multiprocessing.Manager()
the_queue = m.Queue()
# construct the list of parallel arguments
parallel_args = []
for n_chan in config["n_chan"]:
V = rand_V(config["n_repeat"], n_chan, dtype=config["dtype"],)
for v_mat in V:
parallel_args.append(
(v_mat, config["maxiter"], config["tol"], methods, the_queue)
)
np.random.shuffle(parallel_args)
# run all the simulation in parallel
prog_proc = multiprocessing.Process(
target=progress_tracker, args=(len(parallel_args), the_queue,)
)
prog_proc.start()
t_start = time.perf_counter()
pool = multiprocessing.Pool()
results = pool.map(f_loop, parallel_args)
pool.close()
t_end = time.perf_counter()
infos = dict(zip(config["n_chan"], [{} for c in config["n_chan"]]))
runtimes = dict(zip(config["n_chan"], [{} for c in config["n_chan"]]))
# Post process the results
for (n_chan, res_info, res_rt) in results:
for method in methods:
if method not in infos[n_chan]:
infos[n_chan][method] = {
"head_errors": [res_info[method]["head_errors"]],
"head_costs": [res_info[method]["head_costs"]],
}
else:
infos[n_chan][method]["head_costs"].append(
res_info[method]["head_costs"]
)
infos[n_chan][method]["head_errors"].append(
res_info[method]["head_errors"]
)
if method not in runtimes[n_chan]:
runtimes[n_chan][method] = res_rt[method]
else:
runtimes[n_chan][method] += res_rt[method]
for n_chan in config["n_chan"]:
print(f"{n_chan} channels")
for method in methods:
runtimes[n_chan][method] /= config["n_repeat"]
infos[n_chan][method]["head_costs"] = np.concatenate(
infos[n_chan][method]["head_costs"], axis=0
)
infos[n_chan][method]["head_errors"] = np.concatenate(
infos[n_chan][method]["head_errors"], axis=0
)
print(f"=== {method:7s} runtime={runtimes[n_chan][method]:.3f} ===")
# get the date
date = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
filename = f"data/{date}_experiment_head_results.npz"
# save to compressed numpy file
np.savez(filename, config=config, methods=methods, infos=infos, runtimes=runtimes)
print(f"Total time spent {t_end - t_start:.3f} s")
print(f"Results saved to {filename}")
|
# Generated by Django 3.2.4 on 2021-06-24 04:27
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Results',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(max_length=100)),
('last_name', models.TextField(max_length=100)),
('age_group', models.SmallIntegerField()),
('minutes', models.SmallIntegerField()),
('seconds', models.SmallIntegerField()),
('miliseconds', models.SmallIntegerField(blank=True, null=True)),
('submission_date', models.DateTimeField(auto_now_add=True)),
('datecompleted', models.DateTimeField(blank=True, null=True)),
('gender', models.CharField(choices=[('Male', 'Male'), ('Female', 'Female')], default='Female', max_length=50)),
],
),
]
|
# -*- coding: utf-8 -*-
"""
Created on Fri Oct 6 12:03:20 2017
@author: rachael
Create a dummy gsd file with 4 molecules of 17 beads each in 6 snapshots
Used to check cluster analysis
"""
from __future__ import absolute_import, division, print_function
import gsd.hoomd
import numpy as np
def quatMultiply(q1,q2):
"""Returns a quaternion that is a composition of two quaternions
Parameters
----------
q1: 1 x 4 numpy array
representing a quaternion
q2: 1 x 4 numpy array
representing a quatnernion
Returns
-------
qM: 1 x 4 numpy array
representing a quaternion that is the rotation of q1 followed by
the rotation of q2
Notes
-----
q2 * q1 is the correct order for applying rotation q1 and then
rotation q2
"""
Q2 = np.array([[q2[0],-q2[1],-q2[2],-q2[3]],[q2[1],q2[0],-q2[3],q2[2]],
[q2[2],q2[3],q2[0],-q2[1]],[q2[3],-q2[2],q2[1],q2[0]]])
qM = np.dot(Q2,q1)
return qM
def createOneMol(comPos,qrot):
"""Returns a molecule, which is a list of typeids and positions
Parameters
----------
comPos: 1 x 3 numpy array
position of the center of mass
qrot: 1 x 4 numpy array
quaternion representing the orientation of the molecule
always rotate about the x axis for consistency
Returns
-------
pos: 17 x 3 numpy array
represents the positions of all the beads in the molecule
typeinds: 1 x 17 numpy array
represents the molecule types of all the beads in the molecule
large beads, LB = 0
aromatic beads, AB = 1
diams: 1 x 17 numpy array
gives the diameters of all the beads
Notes
-----
For consistency, track the pairs of indices going into the aromatics in
the order
[[0,1],[2,3],[4,5],[6,7],[8,9],[10,11]]
small beads are at a radius of 0.4 and an angle of theta = 10 degrees
"""
sRad = 0.475
th = 10 *(np.pi/180)
pos = np.zeros([17,3])
typeinds = np.array([0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1])
diams = np.zeros(17)
for i in range(len(diams)):
if typeinds[i] == 0:
diams[i] = 1.0
else:
diams[i] = 0.125
baseLocations = np.array([[0.,0.,0.],[0.5,0.,0.],[-0.5,0.,0.],
[1.5,0.,0.],[-1.5,0.,0.],
[-0.5,sRad*np.cos(th),sRad*np.sin(th)],
[-0.5,sRad*np.cos(th),-sRad*np.sin(th)],
[0.,sRad*np.cos(th),sRad*np.sin(th)],
[0.,sRad*np.cos(th),-sRad*np.sin(th)],
[0.5,sRad*np.cos(th),sRad*np.sin(th)],
[0.5,sRad*np.cos(th),-sRad*np.sin(th)],
[-0.5,-sRad*np.cos(th),sRad*np.sin(th)],
[-0.5,-sRad*np.cos(th),-sRad*np.sin(th)],
[0.,-sRad*np.cos(th),sRad*np.sin(th)],
[0.,-sRad*np.cos(th),-sRad*np.sin(th)],
[0.5,-sRad*np.cos(th),sRad*np.sin(th)],
[0.5,-sRad*np.cos(th),-sRad*np.sin(th)]])
pos = np.zeros(np.shape(baseLocations))
for rind in range(np.shape(baseLocations)[0]):
r = baseLocations[rind,:]
q = qrot[0]
qvec = qrot[1:4]
rp = r + 2. * q * np.cross(qvec,r) \
+ 2. * np.cross(qvec,np.cross(qvec,r))
pos[rind,:] = rp
pos += comPos
return(pos,typeinds,diams)
def createSnapshot(coms,qrots,step):
"""Create HOOMD snapshot with the given molecules
Parameters
----------
coms: N x 3 numpy array
the positions of the centers of masses of the N molecules in the system
qrots: N x 4 numpy array
the orientations of the N molecules in the system
step: int
timestep
Returns
-------
snap: HOOMD snapshot
"""
snap = gsd.hoomd.Snapshot()
molno = np.shape(coms)[0]
beadsPerMol = 17
snap.particles.N = molno * beadsPerMol
snap.configuration.step = step
snap.configuration.box = [20,20,20,0,0,0]
snap.particles.types = ['LB','AB']
snap.particles.position = np.zeros([molno * beadsPerMol,3])
snap.particles.body = np.zeros(molno * beadsPerMol)
snap.particles.typeid = np.zeros(molno * beadsPerMol)
snap.particles.diameter = np.zeros(molno * beadsPerMol)
for moli in range(molno):
snap.particles.body[(moli * beadsPerMol): \
(moli * beadsPerMol + beadsPerMol)] \
= moli * np.ones(beadsPerMol)
(pos,typeinds,diams) = createOneMol(coms[moli,:],qrots[moli,:])
snap.particles.position[(moli * beadsPerMol): \
(moli * beadsPerMol + beadsPerMol)] = pos
snap.particles.typeid[(moli * beadsPerMol): \
(moli * beadsPerMol + beadsPerMol)] = typeinds
snap.particles.diameter[(moli * beadsPerMol): \
(moli * beadsPerMol + beadsPerMol)] = diams
return snap
if __name__ == "__main__":
#quaternion = (cos(th/2),sin(th/2) omhat) => rotation of th about omhat
molno = 4
ats = 17
pN = molno * ats
df4 = gsd.hoomd.open('dummyfull4_run1.gsd','wb')
"""Snapshot 1"""
coms1 = np.array([[0.,0.,0.],[0.,3.,0.],[0.,0.,-3],[0.,3.,-3.]])
qrot1 = np.array([[1.,0.,0.,0.],[1.,0.,0.,0.],[1.,0.,0.,0],[1.,0.,0.,0.]])
snap1 = createSnapshot(coms1,qrot1,0)
df4.append(snap1)
"""Snapshot 2"""
coms2 = np.array([[0.,0.,0.],[-1.5,2.5,0.],[0.,3.,-3.],[1.,3.5,-3.5]])
qrot2 = np.array([[1.,0.,0.,0.],[np.cos(np.pi/4),0.,0.,np.sin(np.pi/4)],
[np.cos(np.pi/4),0.,0.,np.sin(np.pi/4)],
quatMultiply(np.array([np.cos(np.pi/4),0.,
np.sin(np.pi/4),0.]),
np.array([np.cos(np.pi/4),0.,0.,
np.sin(np.pi/4)]))])
snap2 = createSnapshot(coms2,qrot2,1)
df4.append(snap2)
"""Snapshot 3"""
coms3 = np.array([[0.,0.,0.],[0.,1.,0.],[-4.5,-1.0,0.],[-4.,0.,0.]])
qrot3 = np.array([[1.,0.,0.,0.],[-1.,0.,0.,0.],
[1.,0.,0.,0.],[1.,0.,0.,0.]])
snap3 = createSnapshot(coms3,qrot3,2)
df4.append(snap3)
"""Snapshot 4"""
coms4 = np.array([[0.,0.,0.],[0.,1.,0.],[-4.,0.,0.],[-4.,1.,0.]])
qrot4 = qrot3
snap4 = createSnapshot(coms4,qrot4,3)
df4.append(snap4)
"""Snapshot 5"""
coms5 = np.array([[0.,0.,0.],[0.,1.,0.],[0.5,2.,-0.5],[0.5,3.,-0.5]])
qrot5 = np.array([[1.,0.,0.,0.],[-1.,0.,0.,0.],
[np.cos(np.pi/4),0.,np.sin(np.pi/4),0.],
[np.cos(np.pi/4),0.,-np.sin(np.pi/4),0.]])
snap5 = createSnapshot(coms5,qrot5,4)
df4.append(snap5)
"""Snapshot 6"""
coms6 = np.array([[0.,0.,0.],[0.,-0.5,np.sqrt(3)/2],
[0.,0.5,np.sqrt(3)/2],[0.,0.,-1.]])
qrot6 = np.array([[np.cos(np.pi/4),np.sin(np.pi/4),0.,0.],
[np.cos(np.pi/12),-np.sin(np.pi/12),0.,0.],
[np.cos(np.pi/12),np.sin(np.pi/12),0.,0.],
[np.cos(np.pi/4),np.sin(np.pi/4),0.,0.]])
snap6 = createSnapshot(coms6,qrot6,5)
df4.append(snap6)
df4_2 = gsd.hoomd.open('dummyfull4_run2.gsd','wb')
df4_2.append(snap2)
df4_2.append(snap2)
df4_2.append(snap3)
df4_2.append(snap5)
df4_2.append(snap6)
df4_2.append(snap6)
df4_3 = gsd.hoomd.open('dummyfull4_run3.gsd','wb')
df4_3.append(snap2)
df4_3.append(snap2)
df4_3.append(snap4)
df4_3.append(snap4)
df4_3.append(snap5)
df4_3.append(snap5)
df4_4 = gsd.hoomd.open('dummyfull4_run4.gsd','wb')
df4_4.append(snap1)
df4_4.append(snap3)
df4_4.append(snap4)
df4_4.append(snap4)
df4_4.append(snap6)
df4_4.append(snap6)
df4_5 = gsd.hoomd.open('dummyfull4_run5.gsd','wb')
df4_5.append(snap2)
df4_5.append(snap2)
df4_5.append(snap3)
df4_5.append(snap5)
df4_5.append(snap5)
df4_5.append(snap5)
|
# -*- coding: utf-8 -*-
from mamba import description, context, it
from expects import expect, be, have_len, be_above, equal
import os
from infcommon.mysql import mysql
TEST_TABLE = 'integration_test_table'
with description('MySQLClientTest'):
with before.each:
self.mysql_client = mysql.MySQLClient(os.getenv("LOCAL_DB_URI"))
sql_query = "DROP TABLE IF EXISTS {0}".format(TEST_TABLE)
self.mysql_client.execute(sql_query)
with context('creating a table'):
with it('creates'):
sql_query = "CREATE TABLE {0} (value varchar(10))".format(TEST_TABLE)
result = self.mysql_client.execute(sql_query)
expect(result).to(be(tuple()))
with context('making a query with results'):
with it('returns it'):
sql_query = "SELECT * FROM mysql.user WHERE User=%s"
result = self.mysql_client.execute(sql_query, ['root'])
expect(result).to(have_len(be_above(0)))
with context('making a query with no results'):
with it('returns empty'):
sql_query = "SELECT * FROM mysql.user WHERE User=%s"
result = self.mysql_client.execute(sql_query, ['non-existing-user'])
expect(result).to(have_len(be(0)))
with context('inserting value into table'):
with it('returns inserted primary key id'):
self.mysql_client.execute("CREATE TABLE {0} (id int not null auto_increment, value varchar(10), primary key (id))".format(TEST_TABLE))
result = self.mysql_client.execute("INSERT INTO {0}(value) VALUES(%s)".format(TEST_TABLE), ("foo",))
expect(result).to(equal(1))
|
import re
from setuptools import setup, find_packages
install_requires = ["boto3==1.9.196", "click==7.0", "requests-aws4auth==0.9"]
tests_requires = [
"flake8==3.7.8",
"isort==4.3.21",
"pytest==5.0.1",
"pytest-cov==2.7.1",
]
with open("README.md") as fh:
long_description = re.sub(
"<!-- start-no-pypi -->.*<!-- end-no-pypi -->\n",
"",
fh.read(),
flags=re.M | re.S,
)
setup(
name="aws-es-query",
version="0.2.0",
author="Lab Digital B.V.",
author_email="opensource@labdigital.nl",
url="https://www.github.com/labd/aws-es-query/",
description="Query tool for AWS ElasticSearch using IAM",
long_description=long_description,
long_description_content_type="text/markdown",
zip_safe=False,
py_modules=["aws_es_query"],
install_requires=install_requires,
tests_require=tests_requires,
extras_require={"test": tests_requires},
entry_points={"console_scripts": {"aws-es-query=aws_es_query:main"}},
license="MIT",
classifiers=[
"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
)
|
#!/usr/bin/env python3
import json
import os
import subprocess
import sys
import tempfile
def _get_source_files(compilation_database):
for compile_command in compilation_database:
yield os.path.join(compile_command['directory'], compile_command['file'])
def _get_compilation_database(build_args):
subprocess.check_call(['bazel',
'build',
'--experimental_action_listener=//ci:ci-action-listener',
*build_args,
'//adlik_serving'])
return subprocess.check_output(args=['ci/tools/build-compilation-database.py'], universal_newlines=True)
def main(args):
has_failure = False
compilation_database_text = _get_compilation_database(args)
with open('.clang-tidy') as clang_tidy_file:
clang_tidy_config = clang_tidy_file.read()
with tempfile.TemporaryDirectory() as build_path:
with open(os.path.join(build_path, 'compile_commands.json'), 'w') as f:
f.write(compilation_database_text)
for source_file in _get_source_files(json.loads(compilation_database_text)):
if subprocess.run(['clang-tidy',
f'-config={clang_tidy_config}',
f'-p={build_path}',
source_file]).returncode != 0:
has_failure = True
if has_failure:
exit(1)
if __name__ == "__main__":
main(sys.argv[1:])
|
# -*- coding: utf-8 -*-
# ======================================================================================================================
# Copyright (©) 2015-2021 LCS - Laboratoire Catalyse et Spectrochimie, Caen, France. =
# CeCILL-B FREE SOFTWARE LICENSE AGREEMENT - See full LICENSE agreement in the root directory =
# ======================================================================================================================
"""
This module defines the class |NDPlot| in which generic plot methods for a |NDDataset| are defined.
"""
__all__ = ['NDPlot', 'plot']
import re
import textwrap
from cycler import cycler
import matplotlib as mpl
from matplotlib.colors import to_rgba
# from mpl_toolkits.mplot3d import Axes3D
from matplotlib import pyplot as plt
import plotly.graph_objects as go
from mpl_toolkits.axes_grid1 import make_axes_locatable
from traitlets import Dict, HasTraits, Instance, Union, default, TraitError
from spectrochempy.utils import get_figure, pathclean
from spectrochempy.core.dataset.meta import Meta
from spectrochempy.core import preferences, plot_preferences, error_
from spectrochempy.core.plotters.plot1d import plot_1D
from spectrochempy.core.plotters.plot3d import plot_3D
from spectrochempy.core.plotters.plot2d import plot_2D
# from spectrochempy.utils import deprecated
# ======================================================================================================================
# Management of the preferences for datasets
# ======================================================================================================================
class Preferences(Meta):
"""
Preferences management
"""
def __init__(self, **data):
super().__init__(**data)
def __getitem__(self, key):
# search on the preferences
if self.parent is not None:
res = getattr(self.parent, f'{self.name}_{key}')
elif hasattr(plot_preferences, key):
res = getattr(plot_preferences, key)
elif hasattr(preferences, key):
res = getattr(preferences, key)
else:
alias = self._get_alias(key)
if alias:
if isinstance(alias, list):
res = Preferences(parent=self, name=key, **dict([(n, getattr(self, f'{key}_{n}')) for n in alias]))
else:
res = getattr(self, alias)
else:
res = super().__getitem__(key)
if res is None:
error_(
f'not found {key}') # key = key.replace('_','.').replace('...', '_').replace('..',
# '-') # # res = mpl.rcParams[key]
return res
def __setitem__(self, key, value):
# also change the corresponding preferences
if hasattr(plot_preferences, key):
try:
setattr(plot_preferences, key, value)
except TraitError:
value = type(plot_preferences.traits()[key].default_value)(value)
setattr(plot_preferences, key, value)
elif hasattr(preferences, key):
setattr(preferences, key, value)
elif key in self.keys():
newkey = f'{self.name}_{key}'
setattr(plot_preferences, newkey, value)
self.parent[newkey] = value
return
else:
# try to find an alias for matplotlib values
alias = self._get_alias(key)
if alias:
newkey = f'{alias}_{key}'
setattr(plot_preferences, newkey, value)
self.parent[newkey] = value
else:
error_(f'not found {key}')
return
super().__setitem__(key, value)
# ------------------------------------------------------------------------------------------------------------------
# Private methods
# ------------------------------------------------------------------------------------------------------------------
def _get_alias(self, key):
alias = []
lkeyp = (len(key) + 1)
regex = r"[a-zA-Z0-9_]*(?:\b|_)" + key + "(?:\b|_)[a-zA-Z0-9_]*"
for item in plot_preferences.trait_names():
matches = re.match(regex, item)
if matches is not None:
alias.append(item)
if alias:
starts = any([par.startswith(key) for par in alias])
# ends = any([par.endswith(key) for par in alias])
if len(alias) > 1:
if alias[0].endswith(key) and (not starts and self.parent is not None):
# it is a member of a group but we don't know which one:
raise KeyError(
f'Found several keys for {key}: {alias}, so it is ambigous. Please choose on one of them')
else:
if any([par.startswith(key) for par in alias]):
# we return the group of parameters
pars = []
for par in alias:
if par.startswith(key):
pars.append(par[lkeyp:])
return pars
else:
return alias[0][:-lkeyp]
raise KeyError(f'{key} not found in matplolib preferences')
# ------------------------------------------------------------------------------------------------------------------
# Public methods
# ------------------------------------------------------------------------------------------------------------------
def reset(self):
# remove the matplotlib_user json file to reset to defaults
config_dir = pathclean(preferences.cfg.config_dir)
f = config_dir / 'PlotPreferences.json'
if f.exists():
f.unlink()
plot_preferences._apply_style('scpy')
self.style = 'scpy'
# reset also non-matplolib preferences
nonmplpars = ['method_1D', 'method_2D', 'method_3D', 'colorbar', 'show_projections', 'show_projection_x',
'show_projection_y', 'colormap', 'max_lines_in_stack', 'simplify', 'number_of_x_labels',
'number_of_y_labels', 'number_of_z_labels', 'number_of_contours', 'contour_alpha',
'contour_start', 'antialiased', 'rcount', 'ccount']
for par in nonmplpars:
setattr(self, par, plot_preferences.traits()[par].default_value)
self._data = {}
def all(self):
"""
List all parameters with their current and default value
"""
for key in plot_preferences.trait_names(config=True):
self.help(key)
def help(self, key):
"""
Display information on a given parameter
Parameters
----------
key: str
name of the parameter for which we want information
"""
from spectrochempy.utils import colored, TBold
value = self[key]
trait = plot_preferences.traits()[key]
default = trait.default_value
thelp = trait.help.replace('\n', ' ').replace('\t', ' ')
sav = ''
while thelp != sav:
sav = thelp
thelp = thelp.replace(' ', ' ')
help = '\n'.join(textwrap.wrap(thelp, 100, initial_indent=' ' * 20, subsequent_indent=' ' * 20))
value = colored(value, 'GREEN')
default = colored(default, 'BLUE')
print(TBold(f"{key} = {value} \t[default: {default}]"))
print(f"{help}\n")
def makestyle(self, filename='mydefault', to_mpl=False):
if filename.startswith('scpy'):
error_('`scpy` is READ-ONLY. Please use an another style name.')
return
txt = ""
sline = ""
for key in mpl.rcParams.keys():
if key in ['animation.avconv_args', 'animation.avconv_path', 'animation.html_args', 'keymap.all_axes',
'mathtext.fallback_to_cm', 'validate_bool_maybe_none', 'savefig.jpeg_quality',
'text.latex.preview', 'backend', 'backend_fallback', 'date.epoch', 'docstring.hardcopy',
'figure.max_open_warning', 'figure.raise_window', 'interactive', 'savefig.directory', 'timezone',
'tk.window_focus', 'toolbar', 'webagg.address', 'webagg.open_in_browser', 'webagg.port',
'webagg.port_retries']:
continue
val = str(mpl.rcParams[key])
sav = ''
while val != sav:
sav = val
val = val.replace(' ', ' ')
line = f'{key:40s} : {val}\n'
if line[0] != sline:
txt += '\n'
sline = line[0]
if key not in ['axes.prop_cycle']:
line = line.replace('[', '').replace(']', "").replace('\'', '').replace('"', '')
if key == 'savefig.bbox':
line = f'{key:40s} : standard\n'
txt += line.replace("#", '')
# Non matplotlib parameters,
# some parameters are not saved in matplotlib style sheets so we willa dd them here
nonmplpars = ['method_1D', 'method_2D', 'method_3D', 'colorbar', 'show_projections', 'show_projection_x',
'show_projection_y', 'colormap', 'max_lines_in_stack', 'simplify', 'number_of_x_labels',
'number_of_y_labels', 'number_of_z_labels', 'number_of_contours', 'contour_alpha',
'contour_start', 'antialiased', 'rcount', 'ccount']
txt += '\n\n##\n## ADDITIONAL PARAMETERS FOR SPECTROCHEMPY\n##\n'
for par in nonmplpars:
txt += f"##@{par:37s} : {getattr(self, par)}\n"
stylesheet = (pathclean(self.stylesheets) / filename).with_suffix('.mplstyle')
stylesheet.write_text(txt)
if to_mpl:
# make it also accessible to pyplot
stylelib = (pathclean(mpl.get_configdir()) / 'stylelib' / filename).with_suffix('.mplstyle')
stylelib.write_text(txt)
# plot_preferences.traits()['style'].trait_types = plot_preferences.traits()['style'].trait_types +\
# (Unicode(filename),)
self.style = filename
return self.style
# ======================================================================================================================
# Class NDPlot to handle plotting of datasets
# ======================================================================================================================
class NDPlot(HasTraits):
"""
Plotting interface for |NDDataset|
This class is used as basic plotting interface of the |NDDataset|.
"""
# variable containing the matplotlib axis defined for a NDArray object
_ax = Instance(plt.Axes, allow_none=True)
# The figure on which this NDArray can be plotted
_fig = Union((Instance(plt.Figure), Instance(go.Figure)), allow_none=True)
# The axes on which this dataset and other elements such as projections
# and colorbar can be plotted
_ndaxes = Dict(Instance(plt.Axes))
# add metadata to store plot parameters
_preferences = Instance(Preferences, allow_none=True)
# ------------------------------------------------------------------------------------------------------------------
# generic plotter and plot related methods or properties
# ------------------------------------------------------------------------------------------------------------------
def plot(self, **kwargs):
"""
Generic plot function.
This apply to a |NDDataset| but actually delegate the work to a plotter defined by the parameter ``method``.
"""
# --------------------------------------------------------------------
# select plotter depending on the dimension of the data
# --------------------------------------------------------------------
method = 'generic'
method = kwargs.pop('method', method)
# Find or guess the adequate plotter
# -----------------------------------
_plotter = getattr(self, f"plot_{method.replace('+', '_')}", None)
if _plotter is None:
# no plotter found
error_('The specified plotter for method '
'`{}` was not found!'.format(method))
raise IOError
# Execute the plotter
# --------------------
return _plotter(**kwargs)
# ------------------------------------------------------------------------------------------------------------------
# plotter: plot_generic
# ------------------------------------------------------------------------------------------------------------------
# ..................................................................................................................
def plot_generic(self, **kwargs):
"""
The generic plotter.
It try to guess an adequate basic plot for the data. Other method of plotters are defined explicitely in the
``plotters`` package.
Parameters
----------
ax : :class:`matplotlib.axe`
the viewplot where to plot.
kwargs : optional additional arguments
Returns
-------
ax
Return the handler to ax where the main plot was done
"""
if self._squeeze_ndim == 1:
ax = plot_1D(self, **kwargs)
elif self._squeeze_ndim == 2:
ax = plot_2D(self, **kwargs)
elif self._squeeze_ndim == 3:
ax = plot_3D(self, **kwargs)
else:
error_('Cannot guess an adequate plotter, nothing done!')
return False
return ax
def close_figure(self):
"""Close a Matplotlib figure associated to this dataset"""
if self._fig is not None:
plt.close(self._fig)
# ------------------------------------------------------------------------------------------------------------------
# setup figure properties
# ------------------------------------------------------------------------------------------------------------------
# ..................................................................................................................
def _figure_setup(self, ndim=1, **kwargs):
prefs = self.preferences
method = prefs.method_2D if ndim == 2 else prefs.method_1D
method = kwargs.get('method', method)
ax3d = '3d' if method in ['surface'] else None
# Get current figure information
# ------------------------------
# should we use the previous figure?
clear = kwargs.get('clear', True)
# is ax in the keywords ?
ax = kwargs.pop('ax', None)
# is it a twin figure? In such case if ax and hold are also provided,
# they will be ignored
tax = kwargs.get('twinx', None)
if tax is not None:
if isinstance(tax, plt.Axes):
clear = False
ax = tax.twinx()
ax.name = 'main'
tax.name = 'twin' # the previous main is renamed!
self.ndaxes['main'] = ax
self.ndaxes['twin'] = tax
else:
raise ValueError(f'{tax} is not recognized as a valid Axe')
self._fig = get_figure(preferences=prefs, **kwargs)
if clear:
self._ndaxes = {} # reset ndaxes
self._divider = None
if ax is not None:
# ax given in the plot parameters,
# in this case we will plot on this ax
if isinstance(ax, (plt.Axes)):
ax.name = 'main'
self.ndaxes['main'] = ax
else:
raise ValueError('{} is not recognized as a valid Axe'.format(ax))
elif self._fig.get_axes():
# no ax parameters in keywords, so we need to get those existing
# We assume that the existing axes have a name
self.ndaxes = self._fig.get_axes()
else:
# or create a new subplot
ax = self._fig.gca(projection=ax3d)
ax.name = 'main'
self.ndaxes['main'] = ax
# set the prop_cycle according to preference
prop_cycle = eval(prefs.axes.prop_cycle)
if isinstance(prop_cycle, str):
# not yet evaluated
prop_cycle = eval(prop_cycle)
colors = prop_cycle.by_key()['color']
for i, c in enumerate(colors):
try:
c = to_rgba(c)
colors[i] = c
except ValueError:
try:
c = to_rgba(f'#{c}')
colors[i] = c
except ValueError as e:
raise e
linestyles = ['-', '--', ':', '-.']
markers = ['o', 's', '^']
if ax is not None and (kwargs.pop('scatter', False) or kwargs.pop('scatterpen', False)):
ax.set_prop_cycle(cycler('color', colors * len(linestyles) * len(markers)) + cycler('linestyle',
linestyles * len(
colors) * len(
markers)) +
cycler(
'marker', markers * len(colors) * len(linestyles)))
elif ax is not None and kwargs.pop('pen', False):
ax.set_prop_cycle(cycler('color', colors * len(linestyles)) + cycler('linestyle', linestyles * len(colors)))
# Get the number of the present figure
self._fignum = self._fig.number
# for generic plot, we assume only a single axe
# with possible projections
# and an optional colobar.
# other plot class may take care of other needs
ax = self.ndaxes['main']
if ndim == 2:
# TODO: also the case of 3D
# show projections (only useful for map or image)
# ------------------------------------------------
self.colorbar = colorbar = kwargs.get('colorbar', prefs.colorbar)
proj = kwargs.get('proj', prefs.show_projections)
# TODO: tell the axis by title.
xproj = kwargs.get('xproj', prefs.show_projection_x)
yproj = kwargs.get('yproj', prefs.show_projection_y)
SHOWXPROJ = (proj or xproj) and method in ['map', 'image']
SHOWYPROJ = (proj or yproj) and method in ['map', 'image']
# Create the various axes
# -------------------------
# create new axes on the right and on the top of the current axes
# The first argument of the new_vertical(new_horizontal) method is
# the height (width) of the axes to be created in inches.
#
# This is necessary for projections and colorbar
self._divider = None
if (SHOWXPROJ or SHOWYPROJ or colorbar) and self._divider is None:
self._divider = make_axes_locatable(ax)
divider = self._divider
if SHOWXPROJ:
axex = divider.append_axes("top", 1.01, pad=0.01, sharex=ax, frameon=0, yticks=[])
axex.tick_params(bottom='off', top='off')
plt.setp(axex.get_xticklabels() + axex.get_yticklabels(), visible=False)
axex.name = 'xproj'
self.ndaxes['xproj'] = axex
if SHOWYPROJ:
axey = divider.append_axes("right", 1.01, pad=0.01, sharey=ax, frameon=0, xticks=[])
axey.tick_params(right='off', left='off')
plt.setp(axey.get_xticklabels() + axey.get_yticklabels(), visible=False)
axey.name = 'yproj'
self.ndaxes['yproj'] = axey
if colorbar and not ax3d:
axec = divider.append_axes("right", .15, pad=0.1, frameon=0, xticks=[], yticks=[])
axec.tick_params(right='off', left='off')
# plt.setp(axec.get_xticklabels(), visible=False)
axec.name = 'colorbar'
self.ndaxes['colorbar'] = axec
# ------------------------------------------------------------------------------------------------------------------
# resume a figure plot
# ------------------------------------------------------------------------------------------------------------------
# ..................................................................................................................
def _plot_resume(self, origin, **kwargs):
# put back the axes in the original dataset
# (we have worked on a copy in plot)
if not kwargs.get('data_transposed', False):
origin.ndaxes = self.ndaxes
if not hasattr(self, '_ax_lines'):
self._ax_lines = None
origin._ax_lines = self._ax_lines
if not hasattr(self, "_axcb"):
self._axcb = None
origin._axcb = self._axcb
else:
nda = {}
for k, v in self.ndaxes.items():
nda[k + 'T'] = v
origin.ndaxes = nda
origin._axT_lines = self._ax_lines
if hasattr(self, "_axcb"):
origin._axcbT = self._axcb
origin._fig = self._fig
loc = kwargs.get("legend", None)
if loc:
origin.ndaxes['main'].legend(loc=loc)
# Additional matplotlib commands on the current plot
# ---------------------------------------------------------------------
commands = kwargs.get('commands', [])
if commands:
for command in commands:
com, val = command.split('(')
val = val.split(')')[0].split(',')
ags = []
kws = {}
for item in val:
if '=' in item:
k, v = item.split('=')
kws[k.strip()] = eval(v)
else:
ags.append(eval(item))
getattr(self.ndaxes['main'], com)(*ags, **kws) # TODO: improve this
# output command should be after all plot commands
savename = kwargs.get('output', None)
if savename is not None:
# we save the figure with options found in kwargs
# starting with `save`
kw = {}
for key, value in kwargs.items():
if key.startswith('save'):
key = key[4:]
kw[key] = value
self._fig.savefig(savename, **kw)
# ------------------------------------------------------------------------------------------------------------------
# Special attributes
# ------------------------------------------------------------------------------------------------------------------
# ..................................................................................................................
def __dir__(self):
return ['fignum', 'ndaxes', 'divider']
# ------------------------------------------------------------------------------------------------------------------
# Properties
# ------------------------------------------------------------------------------------------------------------------
# ..................................................................................................................
@default('_preferences')
def _preferences_default(self):
# Reset all preferences
prefs = Preferences()
return prefs
# ..................................................................................................................
@property
def preferences(self):
"""
|Meta| instance object - Additional metadata.
"""
return self._preferences
# ..................................................................................................................
@preferences.setter
def preferences(self, preferences):
# property.setter for preferences
if preferences is not None:
self._preferences.update(preferences)
# ..................................................................................................................
@property
def fig(self):
"""
Matplotlib figure associated to this dataset
"""
return self._fig
# ..................................................................................................................
@property
def fignum(self):
"""
Matplotlib figure associated to this dataset
"""
return self._fignum
# ..................................................................................................................
@property
def ndaxes(self):
"""
A dictionary containing all the axes of the current figures
"""
return self._ndaxes
# ..................................................................................................................
@ndaxes.setter
def ndaxes(self, axes):
# we assume that the axes have a name
if isinstance(axes, list):
# a list a axes have been passed
for ax in axes:
self._ndaxes[ax.name] = ax
elif isinstance(axes, dict):
self._ndaxes.update(axes)
elif isinstance(axes, plt.Axes):
# it's an axe! add it to our list
self._ndaxes[axes.name] = axes
# ..................................................................................................................
@property
def ax(self):
"""
the main matplotlib axe associated to this dataset
"""
return self._ndaxes['main']
# ..................................................................................................................
@property
def axT(self):
"""
the matplotlib axe associated to the transposed dataset
"""
return self._ndaxes['mainT']
# ..................................................................................................................
@property
def axec(self):
"""
Matplotlib colorbar axe associated to this dataset
"""
return self._ndaxes['colorbar']
# ..................................................................................................................
@property
def axecT(self):
"""
Matplotlib colorbar axe associated to the transposed dataset
"""
return self._ndaxes['colorbarT']
# ..................................................................................................................
@property
def axex(self):
"""
Matplotlib projection x axe associated to this dataset
"""
return self._ndaxes['xproj']
# ..................................................................................................................
@property
def axey(self):
"""
Matplotlib projection y axe associated to this dataset
"""
return self._ndaxes['yproj']
# ..................................................................................................................
@property
def divider(self):
"""
Matplotlib plot divider
"""
return self._divider
# .............................................................................
plot = NDPlot.plot # make plot accessible directly from the scp API
# ======================================================================================================================
if __name__ == '__main__':
pass
|
# -*- coding: utf-8 -*-
from model.group import Group
class GroupHelper:
def __init__(self, app):
self.app = app
def add(self, group):
wd = self.app.wd
self.open_groups_page()
wd.find_element_by_name("new").click()
self.fill_group_form(group)
wd.find_element_by_name("submit").click()
self.open_groups_page()
self.groups_cache = None
def delete(self, index):
wd = self.app.wd
self.open_groups_page()
self.select_group(index)
wd.find_element_by_name("delete").click()
self.open_groups_page()
self.groups_cache = None
def delete_by_id(self, id):
wd = self.app.wd
self.open_groups_page()
self.select_group_by_id(id)
wd.find_element_by_name("delete").click()
self.open_groups_page()
self.groups_cache = None
def update(self, index, group):
wd = self.app.wd
self.open_groups_page()
self.open_to_edit(index)
self.fill_group_form(group)
wd.find_element_by_name("update").click()
self.open_groups_page()
self.groups_cache = None
def update_by_id(self, id, group):
wd = self.app.wd
self.open_groups_page()
self.open_to_edit_by_id(id)
self.fill_group_form(group)
wd.find_element_by_name("update").click()
self.open_groups_page()
self.groups_cache = None
def fill_group_form(self, group):
self.update_field("group_name", group.name)
self.update_field("group_header", group.header)
self.update_field("group_footer", group.footer)
def update_field(self, field, value):
wd = self.app.wd
if value is not None:
wd.find_element_by_name(field).click()
wd.find_element_by_name(field).clear()
wd.find_element_by_name(field).send_keys(value)
def count(self):
wd = self.app.wd
self.open_groups_page()
return len(wd.find_elements_by_name("selected[]"))
def open_groups_page(self):
wd = self.app.wd
if not (wd.current_url.endswith("group.php") and len(wd.find_elements_by_name("new")) > 0):
wd.find_element_by_link_text("groups").click()
def select_group(self, index):
wd = self.app.wd
wd.find_elements_by_name("selected[]")[index].click()
def select_group_by_id(self, id):
wd = self.app.wd
wd.find_element_by_css_selector("input[value='%s']" % id).click()
groups_cache = None
def get_groups(self):
if self.groups_cache is None:
wd = self.app.wd
self.open_groups_page()
self.groups_cache = []
for entry in range(self.count()):
self.groups_cache.append(self.get_info_from_edit_page(entry))
return list(self.groups_cache)
def open_to_edit(self, index):
wd = self.app.wd
self.open_groups_page()
self.select_group(index)
wd.find_element_by_name("edit").click()
def open_to_edit_by_id(self, id):
wd = self.app.wd
self.open_groups_page()
self.select_group_by_id(id)
wd.find_element_by_name("edit").click()
def get_info_from_edit_page(self, index):
wd = self.app.wd
self.open_groups_page()
self.open_to_edit(index)
id = wd.find_element_by_name("id").get_attribute("value")
name = wd.find_element_by_name("group_name").get_attribute("value")
header = wd.find_element_by_name("group_header").get_attribute("value")
footer = wd.find_element_by_name("group_footer").get_attribute("value")
return Group(id=id, name=name, header=header, footer=footer)
|
# Generated by Django 2.1.3 on 2018-12-05 22:31
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('posts', '0004_tag_tag_type'),
]
operations = [
migrations.AddField(
model_name='ingestlog',
name='new',
field=models.IntegerField(default=0),
preserve_default=False,
),
migrations.AddField(
model_name='ingestlog',
name='updated',
field=models.IntegerField(default=0),
preserve_default=False,
),
]
|
from flask import Flask, jsonify, request, make_response
import flask_restful
from flask_restful import Resource, Api
from flasgger.utils import swag_from
from werkzeug.security import generate_password_hash, check_password_hash
from . import users
from app.models import database, User
import jwt
import datetime
from functools import wraps
userapi=Api(users)
SECRET_KEY = 'VX-4178-WD-3429-MZ-31'
def token_required(funct):
@wraps(funct)
def verify_token(*args, **kwargs):
token = None
if 'access_token' in request.headers:
token = request.headers['access_token']
try:
data = jwt.decode(token, SECRET_KEY)
current_user = {}
for user in database:
if user['user_id'] == data["sub"]:
current_user["user_id"] = user["user_id"]
current_user["Admin_status"] = user["Admin_status"]
current_user["email"] = user["details"].email
except:
return make_response((jsonify({"message":"Unauthorized access, please login"})),401)
return funct(current_user, *args, **kwargs)
return make_response((jsonify({"message":"Token is missing"})),401)
return verify_token
class signup(Resource):
@swag_from('signup.yml')
def post(self):
json_data = request.get_json()
"""Check that email format is correct"""
if json_data['email'].lower().endswith('.com') is False:
return make_response((jsonify({"message":"Input a valid email"})), 422)
if '@' not in json_data['email'][:-4]:
return make_response((jsonify({"message":'''"@" is missing'''})), 422)
repeat = [char for char in json_data['email'] if char == '@']
if len(repeat) >1:
return make_response((jsonify({"message":'''Repetition of "@" is not allowed'''})), 422)
"""Check that user is unique"""
user_ = [user for user in database if json_data['email'].lower() == user['details'].email.lower()]
if len(user_) != 0:
return make_response((jsonify({"message":"User already exists"})), 401)
"""Create object user"""
user = User(json_data['email'], generate_password_hash(json_data['password']))
user_profile = {'details':user,
'user_id':user.generate_id(len(database)),
'Admin_status':False}
"""Add user to database"""
database.append(user_profile)
return make_response((jsonify({"message":"Successfully signed up"})), 201)
class login(Resource):
@swag_from('login.yml')
def post(self):
auth = request.get_json()
if not auth or not auth['email'] or not auth['password']:
return make_response((jsonify({"message":"Authorize with email and password"})), 401)
if isinstance(auth['email'], int):
return make_response((jsonify({"message":"Input should be a string"})), 401)
if '@' not in auth['email'][:-4]:
return make_response((jsonify({"message":'''"@" is missing'''})), 401)
if auth['email'].lower().endswith('.com') is False:
return make_response((jsonify({"message":"Input a valid email"})), 401)
repeat = [char for char in auth['email'] if char == '@']
if len(repeat) > 1:
return make_response((jsonify({"message":'''Repetition of "@" is not allowed'''})), 401)
"""Verify user in database and password matches"""
user = [user for user in database if user['details'].email.lower() == auth['email'].lower()]
if len(user) == 0 :
return make_response((jsonify({"message":"User does not exist"})), 404)
info = user[0]
if check_password_hash(info['details'].password, auth['password']):
token = jwt.encode({
"exp": datetime.datetime.utcnow() + datetime.timedelta(days = 0, minutes = 45),
"iat": datetime.datetime.utcnow(),
"sub": info['user_id']}, SECRET_KEY, algorithm = 'HS256')
return jsonify({'token':token})
else:
return make_response((jsonify({"message":"Authorize with correct password"})), 401)
class Admin(Resource):
method_decorators=[token_required]
@swag_from('api-docs/changeAdmin.yml')
def put(self, current_user):
count=0
for user in database:
if user['user_id'] == current_user['user_id']:
user['Admin_status'] = request.get_json('Admin_status', user['Admin_status'])
count += 1
if count == 1:
return make_response((jsonify({"message":"Admin status set to True"})), 201)
method_decorators=[token_required]
@swag_from('api-docs/checkAdmin.yml')
def get(self,current_user):
return make_response((jsonify({"Admin_status":current_user["Admin_status"]})), 200)
userapi.add_resource(signup, 'auth/signup')
userapi.add_resource(login, 'auth/login')
userapi.add_resource(Admin, 'auth/Admin')
|
import os
import argparse
from dotenv import load_dotenv
from mev.azure.run import get_auth_ws, run_prepare
load_dotenv()
ENVIRONMENT_VARIABLES = dict(
TENANT_ID=os.getenv("TENANT_ID"),
MONGO_CONNECTION_STRING=os.getenv("MONGO_CONNECTION_STRING"),
MONGO_DATABASE_NAME=os.getenv("MONGO_DATABASE_NAME"),
ETHERSCAN_API_KEY=os.getenv("ETHERSCAN_API_KEY"),
MORALIS_NODE=os.getenv("MORALIS_NODE"),
ALCHEMY_NODE=os.getenv("ALCHEMY_NODE"),
SLEEP_TIME=os.getenv("SLEEP_TIME"),
)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"--nodes", type=int, help="Number of cluster nodes", required=True)
args = parser.parse_args()
assert args.nodes <= 100, "Max nodes is 100"
# All params
dataset_name_prepare = "mev_train_dataset"
compute_target_name = "mev-cluster"
output_name = "prepare_new_second_part"
source_dir_prepare = "./mev/azure/src"
script_name_prepare = "prepare.py"
max_nodes = args.nodes
with_labels = True
id_from = 50
# Auth to Azure ML
ws = get_auth_ws(ENVIRONMENT_VARIABLES["TENANT_ID"])
print("Running 'prepare' step...")
run_prepare(
dataset_name=dataset_name_prepare,
compute_target_name=compute_target_name,
source_dir=source_dir_prepare,
script_name=script_name_prepare,
ws=ws,
environment_variables=ENVIRONMENT_VARIABLES,
with_labels=with_labels,
max_nodes=max_nodes,
output_name=output_name,
id_from=id_from
)
print("Prepare step finished.") |
from django.conf.urls import url
from . import views
urlpatterns = [
url('api/users', views.UserCreate.as_view(), name='account-create'),
url('tasks', views.TaskCreate.as_view(), name='tasks-create'),
url('tasks2', views.Task2Create.as_view(), name='tasks-create'),
url('project', views.ProjectCreate.as_view(), name='project-create'),
url('role', views.RoleView.as_view(), name='role-view'),
]
|
import cv2
import numpy as np
USE_TRACKING_AFTER_INITIAL_FACE_DETECTION = True
STABILIZED_WINDOW_HEIGHT_HALF = int(720 / 2)
STABILIZED_WINDOW_WIDTH_HALF = int(1800 / 2)
HEIGHT_OFFSET_FOR_STABILIZED_WINDOW = 2000
WIDTH_OFFSET_FOR_STABILIZED_WINDOW = 0
SAVE_OUTPUT_VIDEO = True
OUTPUT_VIDEO_FPS = 30
VIDEO_SRC_IS_CAM = True
INPUT_VIDEO_FILE = 'input/test.mp4'
OUTPUT_VIDEO_FILE = 'output/output.avi'
face_cascade = cv2.CascadeClassifier('model/haarcascade_frontalface_default.xml')
if VIDEO_SRC_IS_CAM :
cap = cv2.VideoCapture(0)
OUTPUT_VIDEO_FPS = 15
else:
cap = cv2.VideoCapture(INPUT_VIDEO_FILE)
x, y, w, h = [0, 0, 0, 0]
newy_max = newx_max = 0
newy = newx = 0
gotFace = False
writer = 0
roi = 0
if USE_TRACKING_AFTER_INITIAL_FACE_DETECTION:
tracker = cv2.TrackerKCF_create()
# tracker = cv2.TrackerMIL_create()
while cv2.waitKey(1) < 113:
(grabbed, frame) = cap.read()
if not grabbed:
if SAVE_OUTPUT_VIDEO:
writer.release()
cap.release()
exit()
if USE_TRACKING_AFTER_INITIAL_FACE_DETECTION and gotFace == 1:
ok, box = tracker.update(frame)
if ok:
faces = [box]
else:
faces = []
else:
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
faces = face_cascade.detectMultiScale(
gray,
scaleFactor=1.1,
minNeighbors=3,
minSize=(130, 130)
)
frameHeight, frameWidth, _ = frame.shape
if SAVE_OUTPUT_VIDEO and writer == 0:
writer = cv2.VideoWriter(OUTPUT_VIDEO_FILE, cv2.VideoWriter_fourcc('M', 'J', 'P', 'G'), OUTPUT_VIDEO_FPS,
(STABILIZED_WINDOW_WIDTH_HALF*2+WIDTH_OFFSET_FOR_STABILIZED_WINDOW, STABILIZED_WINDOW_HEIGHT_HALF*2+HEIGHT_OFFSET_FOR_STABILIZED_WINDOW))
if len(faces) > 0:
if USE_TRACKING_AFTER_INITIAL_FACE_DETECTION and not gotFace:
selected = False
for face in faces:
fx, fy, fw, fh = face
fw += 140+fx
fh += 140+fy
fx -= 140
fy -= 140
if fx < 0 or fy < 0:
continue
foundFace = frame[fy:fh, fx:fw]
cv2.imshow('Found Face', foundFace)
print("Press 's' to select and any other key to proceed.")
key = cv2.waitKey(0)
if key == 115:
selected = True
face = np.array([fx, fy, fw - fx, fh - fy])
tracker.init(frame, face)
break
if not selected:
continue
gotFace = True
x1, y1, w, h = faces[0]
x = int(x1 + w / 2)
y = int(y1 + h / 2)
newy_max = frameHeight
newx_max = frameWidth
newy = 0
newx = 0
if (y-STABILIZED_WINDOW_HEIGHT_HALF) > 0:
newy = y - STABILIZED_WINDOW_HEIGHT_HALF
if (y+STABILIZED_WINDOW_HEIGHT_HALF) < frameHeight:
newy_max= y + STABILIZED_WINDOW_HEIGHT_HALF
if (x - STABILIZED_WINDOW_WIDTH_HALF) > 0:
newx = x - STABILIZED_WINDOW_WIDTH_HALF
if (x + STABILIZED_WINDOW_WIDTH_HALF) < frameWidth:
newx_max = x + STABILIZED_WINDOW_WIDTH_HALF
if gotFace:
roi = frame[newy:newy_max+HEIGHT_OFFSET_FOR_STABILIZED_WINDOW, newx:newx_max+WIDTH_OFFSET_FOR_STABILIZED_WINDOW]
cv2.imshow('output', roi)
if SAVE_OUTPUT_VIDEO:
roi = cv2.resize(roi,(STABILIZED_WINDOW_WIDTH_HALF*2+WIDTH_OFFSET_FOR_STABILIZED_WINDOW,
STABILIZED_WINDOW_HEIGHT_HALF*2+HEIGHT_OFFSET_FOR_STABILIZED_WINDOW))
writer.write(roi)
writer.release()
cap.release()
|
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Group
from django.test import override_settings
from eno_a3_django.users.tests.factories import EnProfileFactory, RoleFactory, UserFactory
from organizations.tests.factories import OrganizationFactory
from rest_framework import status
from rest_framework.reverse import reverse
from rest_framework.test import APITestCase
User = get_user_model()
class EnUserTestCase(APITestCase):
"""
users.tests.test_en_user.EnUserTestCase.
"""
def setUp(self):
roles = [
{"name": "ENO"},
{"name": "Implementation Manager", "path": "ENO.Implementation Manager"},
{"name": "Main Implementation Manager", "path": "ENO.Main Implementation Manager"},
{"name": "Vendor"},
{"name": "Vendor PM", "path": "Vendor.Vendor PM"},
{"name": "Project Manager", "path": "ENO.Project Manager"},
{"name": "Total Project Manager", "path": "ENO.Total Project Manager"},
{"name": "A S P Manager", "path": "ENO.A S P Manager"},
{"name": "P S S", "path": "ENO.P S S"},
]
for role in roles:
RoleFactory(**role)
self.maxDiff = None
self.eno_group = Group.objects.get(name="ENO")
self.user = UserFactory(groups=[self.eno_group])
self.en_profile = EnProfileFactory(user=self.user)
self.url = reverse("apiv1:en_users-list")
self.pm_group = Group.objects.get(name="Project Manager")
self.im_group = Group.objects.get(name="Implementation Manager")
self.pm_user = UserFactory(
name="Project Manager User", groups=[self.eno_group, self.pm_group]
)
self.pm_en_profile = EnProfileFactory(user=self.pm_user)
self.im_user = UserFactory(
name="Implementation Manager User", groups=[self.eno_group, self.im_group]
)
self.im_en_profile = EnProfileFactory(user=self.im_user)
self.detail_url = reverse("apiv1:detailed_en_users-detail", args=(self.pm_user.pk,))
self.organization = OrganizationFactory(name="ENP", domain="enp.localhost")
self.domain = self.organization.domains.first().name
self.by_group_url = reverse("apiv1:en_users-by-group")
@override_settings(ALLOWED_HOSTS=["*"])
def test_by_group(self):
"""
Successfully list all the en_users for dropdown by group name
"""
self.client.force_authenticate(self.user)
url = f"{self.by_group_url}?group=Implementation Manager"
response = self.client.get(url, SERVER_NAME=self.domain)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(
response.json()["users"],
[
{
"id": self.im_user.pk,
"en_profile": {
"modified": "{:%Y-%m-%dT%H:%M:%S.%fZ}".format(self.im_en_profile.modified),
"modified_by": None,
"id": self.im_en_profile.id,
"org_unit_short_name": self.im_en_profile.org_unit_short_name,
"signum": self.im_en_profile.signum,
},
"name": self.im_user.name,
"first_name": self.im_user.first_name,
"last_name": self.im_user.last_name,
"username": self.im_user.username,
"email": self.im_user.email,
"groups": [
{"id": self.eno_group.pk, "name": self.eno_group.name},
{"id": self.im_group.pk, "name": self.im_group.name},
],
"links": {"groups": "groups/"},
}
],
)
@override_settings(ALLOWED_HOSTS=["*"])
def test_list(self):
"""
Successfully create new user
"""
self.client.force_authenticate(self.user)
response = self.client.get(self.url, SERVER_NAME=self.domain)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(
response.json()["users"],
[
{
"id": self.user.pk,
"en_profile": {
"modified": "{:%Y-%m-%dT%H:%M:%S.%fZ}".format(self.en_profile.modified),
"modified_by": None,
"id": self.en_profile.id,
"org_unit_short_name": self.en_profile.org_unit_short_name,
"signum": self.en_profile.signum,
},
"name": self.user.name,
"first_name": self.user.first_name,
"last_name": self.user.last_name,
"username": self.user.username,
"email": self.user.email,
"groups": [],
},
{
"id": self.pm_user.pk,
"en_profile": {
"modified": "{:%Y-%m-%dT%H:%M:%S.%fZ}".format(self.pm_en_profile.modified),
"modified_by": None,
"id": self.pm_en_profile.id,
"org_unit_short_name": self.pm_en_profile.org_unit_short_name,
"signum": self.pm_en_profile.signum,
},
"name": self.pm_user.name,
"first_name": self.pm_user.first_name,
"last_name": self.pm_user.last_name,
"username": self.pm_user.username,
"email": self.pm_user.email,
"groups": [{"id": self.pm_group.pk, "name": self.pm_group.name}],
"links": {"groups": "groups/"},
},
{
"id": self.im_user.pk,
"en_profile": {
"modified": "{:%Y-%m-%dT%H:%M:%S.%fZ}".format(self.im_en_profile.modified),
"modified_by": None,
"id": self.im_en_profile.id,
"org_unit_short_name": self.im_en_profile.org_unit_short_name,
"signum": self.im_en_profile.signum,
},
"name": self.im_user.name,
"first_name": self.im_user.first_name,
"last_name": self.im_user.last_name,
"username": self.im_user.username,
"email": self.im_user.email,
"groups": [{"id": self.im_group.pk, "name": self.im_group.name}],
"links": {"groups": "groups/"},
},
],
)
@override_settings(ALLOWED_HOSTS=["*"])
def test_save_en_user(self):
"""
Successfully save en user
"""
self.client.force_authenticate(self.user)
data = {
"name": "Changed User",
"email": "changed@clientexample.com",
"en_profile": {"signum": "CHANGED", "org_unit_short_name": "A"},
"groups": [self.im_group.pk],
}
response = self.client.patch(self.detail_url, data, format="json", SERVER_NAME=self.domain)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.pm_user.refresh_from_db()
# need to check for this as this was to ensure that the bug in #725 does not recur
self.assertTrue(self.im_group in self.pm_user.groups.all())
self.assertTrue(Group.objects.filter(name="Project Manager").exists())
|
# -*- coding: utf-8 -*-
from flask import current_app
from flask.ext.login import UserMixin
from datetime import datetime, timedelta
from itsdangerous import TimedJSONWebSignatureSerializer as Serializer
from stacksites.database import db, CRUDMixin
from stacksites.extensions import bcrypt
from stacksites.sites.models import Site
from .utils import generate_secure_token
class User(UserMixin, CRUDMixin, db.Model):
__tablename__ = 'user'
username = db.Column(db.String(80), unique=True, nullable=False)
email = db.Column(db.String(80), unique=True, nullable=False)
pwdhash = db.Column(db.String(1000), nullable=False)
created_at = db.Column(db.DateTime(), nullable=False)
active = db.Column(db.Boolean())
roles = db.Column(db.PickleType())
activation_token = db.Column(db.String(30), unique=True)
password_reset_token = db.Column(db.String(30), unique=True)
password_reset_expiration = db.Column(db.DateTime())
sites = db.relationship('Site', backref='user')
def __init__(self, username, email, password, temp_file_id=None):
self.username = username
self.email = email
self.created_at = datetime.utcnow()
self.active = False
self.roles = frozenset()
self.set_password(password)
if temp_file_id is not None:
site = Site('home', self, temp_file_id=temp_file_id)
else:
site = Site('home', self)
self.sites.append(site)
def set_password(self, password):
self.pwdhash = bcrypt.generate_password_hash(password)
def check_password(self, password):
return bcrypt.check_password_hash(self.pwdhash, password)
def get_activation_token(self):
self.activation_token = generate_secure_token()
self.save()
return self.activation_token
def activate(self):
self.active = True
self.save()
return True
def get_reset_token(self, expiration=3600):
s = Serializer(current_app.config['SECRET_KEY'], expiration)
self.password_reset_expiration = datetime.utcnow() + timedelta(hours=1)
self.save()
return s.dumps({'reset': self.id})
def reset_password(self, token):
s = Serializer(current_app.config['SECRET_KEY'])
try:
data = s.loads(token)
except:
return False
if data.get('reset') != self.id:
return False
return True
def has_role(self, role):
return self.roles is not None and role in self.roles
def add_role(self, role):
if self.roles:
elems = [e for e in self.roles]
elems.append(role)
self.roles = frozenset(elems)
else:
self.roles = frozenset((role,))
def delete_self(self):
map(lambda x: x.delete_site(), self.sites)
self.delete()
def __repr__(self):
return "<User ({0}, id: {1})>".format(self.username, self.id)
|
__author__ = 'alexander'
import logging
log = logging.getLogger(__name__)
class MockRedisClient:
"""
Mocks Redis client class. Used if Celery is disabled. Does nothing.
"""
def set(self, *args):
log.debug("Setting a value in MockRedisClient")
def generate_task_id(self, client_id):
return None
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# 客户端使用UDP时,首先创建基于UDP的Socket,然后不需要调用connect(),直接通过sendto()给服务器发数据
import socket
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
for data in ['Michael', 'Tracy', 'Sarah']:
s.sendto(data, ('127.0.0.1', 9999)) # 发送数据
print s.recv(1024) # 接收数据
s.close()
|
import math
# Constants
event_type = 'EVENTMSGTYPE'
event_subtype = 'EVENTMSGACTIONTYPE'
home_description = 'HOMEDESCRIPTION'
neutral_description = 'NEUTRALDESCRIPTION'
away_description = 'VISITORDESCRIPTION'
period_column = 'PERIOD'
game_clock = 'PCTIMESTRING'
time_elapsed = 'TIME_ELAPSED'
time_elapsed_period = 'TIME_ELAPSED_PERIOD'
player1_id = 'PLAYER1_ID'
player1_team_id = 'PLAYER1_TEAM_ID'
player2_id = 'PLAYER2_ID'
###########################
###
### Helper functions for
### determining play type
###
###########################
"""
EVENTMSGTYPE Types:
1 -> MAKE
2 -> MISS
3 -> FreeThrow
4 -> Rebound
5 -> Turnover
6 -> Foul
7 -> Violation
8 -> Substitution
9 -> Timeout
10 -> JumpBall
11 -> Ejection
12 -> StartOfPeriod
13 -> EndOfPeriod
14 -> Empty
"""
def is_made_shot(row):
return row[event_type] == 1
def is_missed_shot(row):
return row[event_type] == 2
def is_free_throw(row):
return row[event_type] == 3
def is_rebound(row):
return row[event_type] == 4
def is_turnover(row):
return row[event_type] == 5
def is_foul(row):
return row[event_type] == 6
def is_violation(row):
return row[event_type] == 7
def is_substitution(row):
return row[event_type] == 8
def is_timeout(row):
return row[event_type] == 9
def is_jump_ball(row):
return row[event_type] == 10
def is_ejection(row):
return row[event_type] == 11
def is_start_of_period(row):
return row[event_type] == 12
def is_end_of_period(row):
return row[event_type] == 13
def is_miss(row):
miss = False
if row[home_description]:
miss = miss or 'miss' in row[home_description].lower()
if row[away_description]:
miss = miss or 'miss' in row[away_description].lower()
return miss
###########################
###
### Helper functions for
### determining foul type
###
###########################
"""
eventActionType Types: FOULS
% = technical FT
* = FT
FOUL TYPES
1 - Personal
2 - Shooting *
3 - Loose Ball
4 - Offensive
5 - Inbound foul *(1 FTA)
6 - Away from play
8 - Punch foul %(Technical)
9 - Clear Path *
10 - Double Foul
11 - Technical *%
12 - Non-Unsportsmanlike (Technical)
13 - Hanging *%(Technical)
14 - Flagrant 1 *%
15 - Flagrant 2 *%
16 - Double Technical
17 - Defensive 3 seconds *%(Technical)
18 - Delay of game
19 - Taunting *%(Technical)
25 - Excess Timeout *%(Technical)
26 - Charge
27 - Personal Block
28 - Personal Take
29 - Shooting Block *
30 - Too many players *%(Technical)
Offensive fouls: Offensive, Charge
"""
def is_shooting_foul(row):
return is_foul(row) and row[event_subtype] == 2
def is_away_from_play_foul(row):
return is_foul(row) and row[event_subtype] == 6
def is_inbound_foul(row):
return is_foul(row) and row[event_subtype] == 5
def is_loose_ball_foul(row):
return is_foul(row) and row[event_subtype] == 3
"""
eventActionType Types: Rebounds
Rebound Types
0 - Player Rebound
1 - Team Rebound*
Not always labeled properly
"""
def is_team_rebound(row):
return is_rebound(row) and (row[event_subtype] == 1 or math.isnan(row[player1_team_id]))
def is_defensive_rebound(ind, row, rows):
if not is_rebound(row):
return False
shot = extract_missed_shot_for_rebound(ind, rows)
if is_team_rebound(row):
return shot[player1_team_id] != row[player1_id]
else:
return shot[player1_team_id] != row[player1_team_id]
def extract_missed_shot_for_rebound(ind, rows):
subset_of_rows = rows[max(0, ind - 10): ind]
subset_of_rows.reverse()
for r in subset_of_rows:
if is_miss(r[1]) or is_missed_free_throw(r[1]):
return r[1]
return subset_of_rows[-1][1]
"""
eventActionType Types: Free Throws
Free Throw Types
10 - 1 of 1
11 - 1 of 2
12 - 2 of 2
13 - 1 of 3
14 - 2 of 3
15 - 3 of 3
16 - Technical
"""
def is_missed_free_throw(row):
return is_free_throw(row) and is_miss(row)
def is_1_of_1(row):
return is_free_throw(row) and row[event_subtype] == 10
def is_2_of_2(row):
return is_free_throw(row) and row[event_subtype] == 12
def is_3_of_3(row):
return is_free_throw(row) and row[event_subtype] == 15
def is_technical(row):
return is_free_throw(row) and row[event_subtype] == 13
def is_last_free_throw(row):
return is_1_of_1(row) or is_last_multi_free_throw(row)
def is_last_multi_free_throw(row):
return is_2_of_2(row) or is_3_of_3(row)
def is_last_free_throw_made(ind, row, rows):
if not is_free_throw(row):
return False
foul = extract_foul_for_last_freethrow(ind, row, rows)
return (is_last_multi_free_throw(row) or (
is_1_of_1(row) and not is_away_from_play_foul(foul) and not is_loose_ball_foul(foul) and not is_inbound_foul(
foul))) and not is_miss(row)
def extract_foul_for_last_freethrow(ind, row, rows):
# Check the last 20 events to find the last foul before the free-throw
subset_of_rows = rows[max(0, ind - 20): ind]
subset_of_rows.reverse()
for r in subset_of_rows:
if is_foul(r[1]):
return r[1]
print(ind)
print(row)
return subset_of_rows[0][1]
def is_and_1(ind, row, rows):
if not is_made_shot(row):
return False
# check next 20 events after the make
subset_of_rows = rows[ind + 1: min(ind + 20, len(rows))]
cnt = 0
for sub_ind, r in subset_of_rows:
# We are looking for fouls or 1 of 1 free throws that happen within 10 seconds of the made shot.
# We also need to make sure those 1 of 1s are the result of a different type of foul that results in 1 FT.
# If we have both a foul and a 1 of 1 ft that meet these conditions we can safely assume this shot resulted in
# an And-1
if (is_foul(r) or is_1_of_1(r)) and row[time_elapsed] <= r[time_elapsed] <= row[time_elapsed] + 10:
if is_foul(r) and not is_technical(r) and not is_loose_ball_foul(r) and not is_inbound_foul(r) and r[
player2_id] == row[player1_id]:
cnt += 1
elif is_1_of_1(r) and r[player1_id] == row[player1_id]:
cnt += 1
return cnt == 2
def is_make_and_not_and_1(ind, row, rows):
return is_made_shot(row) and not is_and_1(ind, row, rows)
def is_three(row):
three = False
if row[home_description]:
three = three or '3PT' in row[home_description]
if row[away_description]:
three = three or '3PT' in row[away_description]
return three
def is_team_turnover(row):
return is_turnover(row) and (is_5_second_violation(row) or is_8_second_violation(row) or is_shot_clock_violation(row) or is_too_many_players_violation(row) or no_player_listed(row))
def is_5_second_violation(row):
return is_turnover(row) and row[event_subtype] == 9
def is_8_second_violation(row):
return is_turnover(row) and row[event_subtype] == 10
def is_shot_clock_violation(row):
return is_turnover(row) and row[event_subtype] == 11
def no_player_listed(row):
return math.isnan(row[player1_team_id])
def is_too_many_players_violation(row):
return is_turnover(row) and row[event_subtype] == 44
|
import sqlite3 as lite
import sys
con = lite.connect('sensorsData.db')
with con:
cur = con.cursor()
cur.execute("DROP TABLE IF EXISTS DHT_data")
cur.execute("CREATE TABLE DHT_data(timestamp DATETIME, temp NUMERIC, hum NUMERIC)") |
_author_ = 'fmoscato'
import random
import string
import hashlib
from datetime import datetime
import pymongo
import constants as c
import pubUtilities
# The User Data Access Object handles all interactions with the User collection.
class UserDAO:
def __init__(self, db):
self.db = db
self.users = self.db.users
self.secret = 'verysecret'
@staticmethod
def make_salt():
salt = ""
for i in range(5):
salt = salt + random.choice(string.ascii_letters)
return salt
def make_pw_hash(self, pw, salt=None):
if not salt:
salt = self.make_salt()
return hashlib.sha256(pw + salt).hexdigest()+"," + salt
# Validates a user login. Returns user record or None
def validate_login(self, username, password):
user = None
try:
user = self.users.find_one({'_id': username})
except pymongo.errors.OperationFailure:
print "oops, mongo error"
return False
if not user:
print "User not in database"
return None
salt = user['password'].split(',')[1]
if user['password'] != self.make_pw_hash(password, salt):
print "user password is not a match"
return None
# Looks good
return user
def get_admin_email(self):
cursor = self.users.find_one({'admin': True})
return cursor['email']
def get_users(self, date_format=None):
cursor = self.users.find({'admin': False}).sort('lastname', direction=1)
date_format_str = c.SHORT_DATE_FORMAT
if date_format:
date_format_str = date_format
l = []
for user in cursor:
usr = {'username': user['_id'], 'name': user['name'], 'lastname': user['lastname'],
'email': user['email'],
'start_date': user['start_date'].strftime(date_format_str),
'end_date': user['end_date'].strftime(date_format_str),
'contracts': user.get('contracts', []),
'projects': user['projects'],
'missions_projects': user['missions_projects']}
l.append(usr)
return l
def get_user(self, _id):
cursor = self.users.find_one({'_id': _id})
user = None
if cursor:
user = {'username': cursor['_id'], 'name': cursor['name'], 'lastname': cursor['lastname'],
'email': cursor['email'],
'start_date': cursor['start_date'].strftime(c.DATE_FORMAT),
'end_date': cursor['end_date'].strftime(c.DATE_FORMAT),
'contracts': cursor.get('contracts', []),
'projects': cursor.get('projects', []),
'missions_projects': cursor.get('missions_projects', '')
}
return user
def remove_user(self, _id):
try:
print "removing user %s" % _id
self.users.remove({'_id': _id})
except pymongo.errors.OperationFailure:
print "oops, mongo error"
return False
return True
def close_validity_user(self, _id):
today = datetime.now()
try:
self.users.update({'_id': _id}, {'$set': {"end_date": today}})
except pymongo.errors.OperationFailure:
print "oops, mongo error"
return False
return True
def update_email(self, _id, email):
try:
self.users.update({'_id': _id}, {'$set': {'email': email}})
except pymongo.errors.OperationFailure:
print "oops, mongo error"
return False
return True
def update_contracts(self, _id, contracts_list):
try:
self.users.update({'_id': _id}, {'$set': {'contracts': contracts_list}})
except pymongo.errors.OperationFailure:
print "oops, mongo error"
return False
return True
def update_projects_missions(self, _id, projects_missions_list):
try:
self.users.update({'_id': _id}, {'$set': {'missions_projects': projects_missions_list}})
except pymongo.errors.OperationFailure:
print "oops, mongo error"
return False
return True
def update_projects(self, _id, projects_list):
try:
self.users.update({'_id': _id}, {'$set': {'projects': projects_list}})
except pymongo.errors.OperationFailure:
print "oops, mongo error"
return False
return True
# creates a new user in the users collection
def add_user(self, **kwargs):
pwd = kwargs['password']
password_hash = self.make_pw_hash(pwd)
user = {'_id': kwargs['username'], 'password': password_hash,
'name': kwargs['name'], 'lastname': kwargs['lastname'],
'email': kwargs['email'],
'admin': kwargs.get('admin', False),
'start_date': kwargs['start_date'],
'contracts': kwargs.get('contracts', []),
'projects': kwargs.get('projects', []),
'missions_projects': kwargs.get('missions_projects', [])}
if 'end_date'in kwargs and kwargs['end_date'] != '':
user['end_date'] = kwargs['end_date']
else:
date1 = datetime.strptime(c.END_DATE, c.DATE_FORMAT)
user['end_date'] = date1
try:
self.users.insert(user)
except pymongo.errors.OperationFailure:
print "oops, mongo error"
return False
except pymongo.errors.DuplicateKeyError:
print "oops, username is already taken"
return False
return True
def id_generator(self, size=6, chars=string.ascii_uppercase + string.digits):
return ''.join(random.choice(chars) for _ in range(size))
def create_tmp_password(self, _id):
"""
creates a new password and updating the DB with the new pwd
sent email to the user telling the new password
@param: user
@return: true || false
"""
new_pwd = self.id_generator()
new_pwd_hash = self.make_pw_hash(new_pwd)
user = self.get_user(_id)
if not pubUtilities.sendMail(user['email'], c.SUBJECT_FORGOT_PASSWORD, c.BODY_FORGOT_PASSWORD % new_pwd):
print "error sending email user: %" % user['email']
return False
try:
self.users.update({'_id': _id}, {'$set': {'password': new_pwd_hash}})
except pymongo.errors.OperationFailure:
print "oops, mongo error"
return False
return True
|
from __future__ import unicode_literals
import os
from django.utils import six
from djblets.webapi.errors import INVALID_FORM_DATA
from reviewboard import scmtools
from reviewboard.attachments.models import FileAttachment
from reviewboard.diffviewer.models import DiffSet, FileDiff
from reviewboard.webapi.resources import resources
from reviewboard.webapi.tests.base import BaseWebAPITestCase
from reviewboard.webapi.tests.mimetypes import (diff_item_mimetype,
filediff_item_mimetype,
filediff_list_mimetype)
from reviewboard.webapi.tests.mixins import BasicTestsMetaclass
from reviewboard.webapi.tests.mixins_extra_data import ExtraDataItemMixin
from reviewboard.webapi.tests.urls import (get_diff_list_url,
get_draft_filediff_item_url,
get_draft_filediff_list_url)
@six.add_metaclass(BasicTestsMetaclass)
class ResourceListTests(BaseWebAPITestCase):
"""Testing the DraftFileDiffResource list APIs."""
fixtures = ['test_users', 'test_scmtools']
sample_api_url = 'review-requests/<id>/draft/diffs/<revision>/files/'
resource = resources.draft_filediff
def compare_item(self, item_rsp, filediff):
self.assertEqual(item_rsp['id'], filediff.pk)
self.assertEqual(item_rsp['source_file'], filediff.source_file)
self.assertEqual(item_rsp['extra_data'], filediff.extra_data)
def setup_http_not_allowed_list_test(self, user):
review_request = self.create_review_request(
create_repository=True,
submitter=user)
diffset = self.create_diffset(review_request, draft=True)
return get_draft_filediff_list_url(diffset, review_request)
#
# HTTP GET tests
#
def setup_basic_get_test(self, user, with_local_site, local_site_name,
populate_items):
review_request = self.create_review_request(
create_repository=True,
with_local_site=with_local_site,
submitter=user)
diffset = self.create_diffset(review_request, draft=True)
if populate_items:
items = [self.create_filediff(diffset)]
else:
items = []
return (get_draft_filediff_list_url(diffset, review_request,
local_site_name),
filediff_list_mimetype,
items)
def test_get_not_owner(self):
"""Testing the
GET review-requests/<id>/draft/diffs/<revision>/files/ API
without owner with Permission Denied error
"""
review_request = self.create_review_request(create_repository=True)
self.assertNotEqual(review_request.submitter, self.user)
diffset = self.create_diffset(review_request, draft=True)
self.api_get(
get_draft_filediff_list_url(diffset, review_request),
expected_status=403)
@six.add_metaclass(BasicTestsMetaclass)
class ResourceItemTests(ExtraDataItemMixin, BaseWebAPITestCase):
"""Testing the DraftFileDiffResource item APIs."""
fixtures = ['test_users', 'test_scmtools']
sample_api_url = 'review-requests/<id>/draft/diffs/<revision>/files/<id>/'
resource = resources.draft_filediff
test_http_methods = ('DELETE', 'GET', 'PUT')
def setup_http_not_allowed_item_test(self, user):
review_request = self.create_review_request(
create_repository=True,
submitter=user)
diffset = self.create_diffset(review_request, draft=True)
filediff = self.create_filediff(diffset)
return get_draft_filediff_item_url(filediff, review_request)
def compare_item(self, item_rsp, filediff):
self.assertEqual(item_rsp['id'], filediff.pk)
self.assertEqual(item_rsp['source_file'], filediff.source_file)
self.assertEqual(item_rsp['extra_data'], filediff.extra_data)
#
# HTTP GET tests
#
def setup_basic_get_test(self, user, with_local_site, local_site_name):
review_request = self.create_review_request(
create_repository=True,
with_local_site=with_local_site,
submitter=user)
diffset = self.create_diffset(review_request, draft=True)
filediff = self.create_filediff(diffset)
return (get_draft_filediff_item_url(filediff, review_request,
local_site_name),
filediff_item_mimetype,
filediff)
def test_get_not_owner(self):
"""Testing the
GET review-requests/<id>/draft/diffs/<revision>/files/<id>/ API
without owner with Permission Denied error
"""
review_request = self.create_review_request(create_repository=True)
self.assertNotEqual(review_request.submitter, self.user)
diffset = self.create_diffset(review_request, draft=True)
filediff = self.create_filediff(diffset)
self.api_get(
get_draft_filediff_item_url(filediff, review_request),
expected_status=403)
#
# HTTP PUT tests
#
def setup_basic_put_test(self, user, with_local_site, local_site_name,
put_valid_data):
review_request = self.create_review_request(
submitter=user,
with_local_site=with_local_site,
create_repository=True)
diffset = self.create_diffset(review_request, draft=True)
filediff = self.create_filediff(diffset)
return (get_draft_filediff_item_url(filediff, review_request,
local_site_name),
filediff_item_mimetype,
{},
filediff,
[])
def check_put_result(self, user, item_rsp, filediff):
filediff = FileDiff.objects.get(pk=filediff.pk)
self.compare_item(item_rsp, filediff)
def test_put_with_new_file_and_dest_attachment_file(self):
"""Testing the PUT review-requests/<id>/diffs/<id>/files/<id>/ API
with new file and dest_attachment_file
"""
review_request = self.create_review_request(create_repository=True,
submitter=self.user)
diff_filename = os.path.join(os.path.dirname(scmtools.__file__),
'testdata', 'git_binary_image_new.diff')
with open(diff_filename, 'r') as f:
rsp = self.api_post(
get_diff_list_url(review_request),
{
'path': f,
'base_commit_id': '1234',
},
expected_mimetype=diff_item_mimetype)
self.assertEqual(rsp['stat'], 'ok')
diffset = DiffSet.objects.get(pk=rsp['diff']['id'])
filediffs = diffset.files.all()
self.assertEqual(len(filediffs), 1)
filediff = filediffs[0]
self.assertEqual(filediff.source_file, 'trophy.png')
with open(self._getTrophyFilename(), 'r') as f:
rsp = self.api_put(
get_draft_filediff_item_url(filediff, review_request) +
'?expand=dest_attachment',
{
'dest_attachment_file': f,
},
expected_mimetype=filediff_item_mimetype)
self.assertEqual(rsp['stat'], 'ok')
self.assertIn('dest_attachment', rsp['file'])
attachment = FileAttachment.objects.get(
pk=rsp['file']['dest_attachment']['id'])
self.assertTrue(attachment.is_from_diff)
self.assertEqual(attachment.orig_filename, 'trophy.png')
self.assertEqual(attachment.added_in_filediff, filediff)
self.assertEqual(attachment.repo_path, None)
self.assertEqual(attachment.repo_revision, None)
self.assertEqual(attachment.repository, None)
def test_put_with_modified_file_and_dest_attachment_file(self):
"""Testing the PUT review-requests/<id>/diffs/<id>/files/<id>/ API
with modified file and dest_attachment_file
"""
review_request = self.create_review_request(create_repository=True,
submitter=self.user)
diff_filename = os.path.join(os.path.dirname(scmtools.__file__),
'testdata',
'git_binary_image_modified.diff')
with open(diff_filename, 'r') as f:
rsp = self.api_post(
get_diff_list_url(review_request),
{
'path': f,
'base_commit_id': '1234',
},
expected_mimetype=diff_item_mimetype)
self.assertEqual(rsp['stat'], 'ok')
diffset = DiffSet.objects.get(pk=rsp['diff']['id'])
filediffs = diffset.files.all()
self.assertEqual(len(filediffs), 1)
filediff = filediffs[0]
self.assertEqual(filediff.source_file, 'trophy.png')
with open(self._getTrophyFilename(), 'r') as f:
rsp = self.api_put(
get_draft_filediff_item_url(filediff, review_request) +
'?expand=dest_attachment',
{
'dest_attachment_file': f,
},
expected_mimetype=filediff_item_mimetype)
self.assertEqual(rsp['stat'], 'ok')
self.assertIn('dest_attachment', rsp['file'])
attachment = FileAttachment.objects.get(
pk=rsp['file']['dest_attachment']['id'])
self.assertTrue(attachment.is_from_diff)
self.assertEqual(attachment.orig_filename, 'trophy.png')
self.assertEqual(attachment.added_in_filediff, None)
self.assertEqual(attachment.repo_path, 'trophy.png')
self.assertEqual(attachment.repo_revision, '86b520d')
self.assertEqual(attachment.repository, review_request.repository)
def test_put_second_dest_attachment_file_disallowed(self):
"""Testing the PUT review-requests/<id>/diffs/<id>/files/<id>/ API
disallows setting dest_attachment_file twice
"""
review_request = self.create_review_request(create_repository=True,
submitter=self.user)
diff_filename = os.path.join(os.path.dirname(scmtools.__file__),
'testdata',
'git_binary_image_modified.diff')
with open(diff_filename, 'r') as f:
rsp = self.api_post(
get_diff_list_url(review_request),
{
'path': f,
'base_commit_id': '1234',
},
expected_mimetype=diff_item_mimetype)
diffset = DiffSet.objects.get(pk=rsp['diff']['id'])
filediff = diffset.files.all()[0]
url = get_draft_filediff_item_url(filediff, review_request)
trophy_filename = self._getTrophyFilename()
with open(trophy_filename, 'r') as f:
self.api_put(
url,
{
'dest_attachment_file': f,
},
expected_mimetype=filediff_item_mimetype)
with open(trophy_filename, 'r') as f:
rsp = self.api_put(
url,
{
'dest_attachment_file': f,
},
expected_status=400)
self.assertEqual(rsp['stat'], 'fail')
self.assertEqual(rsp['err']['code'], INVALID_FORM_DATA.code)
self.assertIn('fields', rsp)
self.assertIn('dest_attachment_file', rsp['fields'])
|
import numpy as np
class Schedule():
def __init__(self, name, dict_repr):
self.name = name
self.type_dict = {
'interchange':0,
'tiling':1,
'unrolling':2
}
self.binary_repr = None
self.load_schedules(dict_repr)
def add_interchange(self, interchange):
if interchange:
self.schedule_list.append({
'type':'interchange',
'params':interchange,
'factors': None
})
else:
self.schedule_list.append({
'type':'interchange',
'params':[-1, -1],
'factors':None
})
def add_tiling(self, tiling):
if tiling:
dims = tiling['tiling_dims']
factors = tiling['tiling_factors']
# if tiling['tiling_depth'] == 2:
# dims.append(-1)
# factors.append(-1)
self.schedule_list.append({
'type':'tiling',
'params':dims,
'factors': tiling['tiling_factors']
})
else:
self.schedule_list.append({
'type':'tiling',
'params':[-1, -1, -1],
'factors': [-1, -1, -1]
})
def add_unrolling(self, unrolling):
if unrolling:
self.schedule_list.append({
'type':'unrolling',
'params': None,
'factors': [unrolling]
})
else:
self.schedule_list.append({
'type':'unrolling',
'params': None,
'factors': [1]
})
def load_schedules(self, dict_repr):
self.schedule_list = []
interchange = dict_repr['interchange_dims']
self.add_interchange(interchange)
tiling = dict_repr['tiling']
self.add_tiling(tiling)
unrolling_factor = dict_repr['unrolling_factor']
self.add_unrolling(unrolling_factor)
self.binary_repr = (+(len(interchange) > 0), +(tiling is not None), +(unrolling_factor is not None))
def __eq__(self, other, binary=True):
if self.binary_repr == other.binary_repr:
if not binary:
return self.schedule_list == other.schedule_list
return True
return False
def __array__(self):
arr = []
#sort by type
self.schedule_list.sort(key=lambda x: self.type_dict[x['type']])
for schedule in self.schedule_list:
type_ = self.type_dict[schedule['type']]
params = schedule['params']
factors = schedule['factors']
arr.append(type_)
if params:
arr.extend(params)
if factors:
arr.extend(factors)
return np.array(arr)
|
from __future__ import annotations
import collections
from typing import Dict, List, Optional, Tuple, Union
# TODO Implement more classmethods for each filter type.
class Equalizer:
def __init__(self, *, bands: List[Tuple[int, float]], name='Equalizer') -> None:
self._bands = self._bands(bands=bands)
self._name = name
def __repr__(self) -> str:
return f'<slate.Equalizer name=\'{self._name}\' bands={self._bands}>'
def __str__(self) -> str:
return self._name
def _bands(self, *, bands: List[Tuple[int, float]]) -> List[Dict[str, float]]:
for band, gain in bands:
if band < 0 or band > 14:
raise ValueError('Band must be within the valid range of 0 to 14.')
if gain < -0.25 or gain > 1.0:
raise ValueError('Gain must be within the valid range of -0.25 to 1.0')
_dict = collections.defaultdict(int)
_dict.update(bands)
return [{'band': band, 'gain': _dict[band]} for band in range(15)]
@property
def name(self) -> str:
return self._name
@property
def payload(self) -> Dict[str, float]:
return self._bands
@classmethod
def flat(cls) -> Equalizer:
bands = [(0, 0.0), (1, 0.0), (2, 0.0), (3, 0.0), (4, 0.0), (5, 0.0), (6, 0.0), (7, 0.0), (8, 0.0), (9, 0.0), (10, 0.0), (11, 0.0), (12, 0.0), (13, 0.0), (14, 0.0)]
return cls(bands=bands, name='Flat')
class Karaoke:
def __init__(self, *, level: Optional[float] = 1.0, mono_level: Optional[float] = 1.0, filter_band: Optional[float] = 220.0, filter_width: Optional[float] = 100.0) -> None:
self.level = level
self.mono_level = mono_level
self.filter_band = filter_band
self.filter_width = filter_width
self._name = 'Karaoke'
def __repr__(self) -> str:
return f'<slate.Karaoke level={self.level} mono_level={self.mono_level} filter_band={self.filter_band} filter_width={self.filter_width}>'
def __str__(self) -> str:
return self._name
@property
def name(self) -> str:
return self._name
@property
def payload(self) -> Dict[str, float]:
return {'level': self.level, 'mono_level': self.mono_level, 'filter_band': self.filter_band, 'filter_width': self.filter_width}
class Timescale:
def __init__(self, *, speed: Optional[float] = 1.0, pitch: Optional[float] = 1.0, rate: Optional[float] = 1.0) -> None:
self.speed = speed
self.pitch = pitch
self.rate = rate
self._name = 'Timescale'
def __repr__(self) -> str:
return f'<slate.Timescale speed={self.speed} pitch={self.pitch} rate={self.rate}>'
def __str__(self) -> str:
return self._name
@property
def name(self) -> str:
return self._name
@property
def payload(self) -> Dict[str, float]:
return {'speed': self.speed, 'pitch': self.pitch, 'rate': self.rate}
class Tremolo:
def __init__(self, *, frequency: Optional[float] = 2.0, depth: Optional[float] = 0.5) -> None:
if frequency < 0:
raise ValueError('Frequency must be more than 0.0')
if not 0 < depth <= 1:
raise ValueError('Depth must be more than 0.0 and less than or equal to 1.0')
self.frequency = frequency
self.depth = depth
self._name = 'Tremolo'
def __repr__(self) -> str:
return f'<slate.Tremolo frequency={self.frequency} depth={self.depth}>'
def __str__(self) -> str:
return self._name
@property
def name(self) -> str:
return self._name
@property
def payload(self) -> Dict[str, float]:
return {'frequency': self.frequency, 'depth': self.depth}
class Vibrato:
def __init__(self, *, frequency: Optional[float] = 2.0, depth: Optional[float] = 0.5) -> None:
if not 0 < frequency <= 14:
raise ValueError('Frequency must be more than 0.0 and less than or equal to 14.0')
if not 0 < depth <= 1:
raise ValueError('Depth must be more than 0.0 and less than or equal to 1.0')
self.frequency = frequency
self.depth = depth
self._name = 'Vibrato'
def __repr__(self) -> str:
return f'<slate.Vibrato frequency={self.frequency} depth={self.depth}>'
def __str__(self) -> str:
return self._name
@property
def name(self) -> str:
return self._name
@property
def payload(self) -> Dict[str, float]:
return {'frequency': self.frequency, 'depth': self.depth}
class Filter:
def __init__(self, *, filter: Filter = None, volume: Optional[float] = None, equalizer: Optional[Equalizer] = None, karaoke: Optional[Karaoke] = None,
timescale: Optional[Timescale] = None, tremolo: Optional[Tremolo] = None, vibrato: Optional[Vibrato] = None) -> None:
self.filter = filter
self.volume = volume
self.equalizer = equalizer
self.karaoke = karaoke
self.timescale = timescale
self.tremolo = tremolo
self.vibrato = vibrato
def __repr__(self) -> str:
return f'<slate.Filter volume={self.volume} equalizer={self.equalizer} karaoke={self.karaoke} timescale={self.timescale} tremolo={self.tremolo} vibrato={self.vibrato}>'
@property
def payload(self) -> Dict[str, Union[Dict[str, float], float]]:
payload = self.filter.payload.copy() if self.filter is not None else {}
if self.volume is not None:
payload['volume'] = self.volume
if self.equalizer is not None:
payload['equalizer'] = self.equalizer.payload
if self.karaoke is not None:
payload['karaoke'] = self.karaoke.payload
if self.timescale is not None:
payload['timescale'] = self.timescale.payload
if self.tremolo is not None:
payload['tremolo'] = self.tremolo.payload
if self.vibrato is not None:
payload['vibrato'] = self.vibrato.payload
return payload
|
# Copyright 2019-2021 Wingify Software Pvt. Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import random
import copy
from vwo.core import bucketer
from vwo.helpers import campaign_util
from ..data.settings_files import SETTINGS_FILES
from ..data.settings_file_and_user_expectations import USER_EXPECTATIONS
class BucketerTest(unittest.TestCase):
def setUp(self):
self.user_id = str(random.random())
self.dummy_campaign = {
"goals": [{"identifier": "GOAL_NEW", "id": 203, "type": "CUSTOM_GOAL"}],
"variations": [
{"id": "1", "name": "Control", "weight": 40},
{"id": "2", "name": "Variation-1", "weight": 60},
],
"id": 22,
"percentTraffic": 50,
"key": "UNIQUE_KEY",
"status": "RUNNING",
"type": "VISUAL_AB",
}
campaign_util.set_variation_allocation(self.dummy_campaign)
self.bucketer = bucketer.Bucketer()
self.variations = copy.deepcopy(SETTINGS_FILES["FT_T_0_W_10_20_30_40"].get("campaigns")[0]["variations"])
campaign_util.set_allocation_ranges(self.variations)
def test_user_part_of_campaign_none_campaign_passed(self):
result = self.bucketer.is_user_part_of_campaign(self.user_id, None)
self.assertIs(result, False)
def test_user_part_of_campaign_none_userid_passed(self):
result = self.bucketer.is_user_part_of_campaign(None, self.dummy_campaign)
self.assertIs(result, False)
def test_user_part_of_campaign_should_return_true(self):
user_id = "Bob"
# Bob, with above campaign settings, will get hashValue:2033809345 and
# bucketValue:48. So, MUST be a part of campaign as per campaign
# percentTraffic
result = self.bucketer.is_user_part_of_campaign(user_id, self.dummy_campaign)
self.assertIs(result, True)
def test_user_part_of_campaign_should_return_false(self):
user_id = "Lucian"
# Lucian, with above campaign settings, will get hashValue:2251780191
# and bucketValue:53. So, must NOT be a part of campaign as per campaign
# percentTraffic
result = self.bucketer.is_user_part_of_campaign(user_id, self.dummy_campaign)
self.assertIs(result, False)
def test_user_part_of_campaign_should_return_false_as_T_is_0(self):
campaign = copy.deepcopy(self.dummy_campaign)
campaign["percentTraffic"] = 0
for test in USER_EXPECTATIONS["AB_T_50_W_50_50"]:
self.assertIs(False, self.bucketer.is_user_part_of_campaign(test["user"], campaign))
def test_user_part_of_campaign_should_return_true_as_T_is_100(self):
campaign = copy.deepcopy(self.dummy_campaign)
campaign["percentTraffic"] = 100
for test in USER_EXPECTATIONS["AB_T_50_W_50_50"]:
self.assertIs(True, self.bucketer.is_user_part_of_campaign(test["user"], campaign))
def test_user_part_of_campaign_AB_T_50_W_50_50(self):
campaign = copy.deepcopy(SETTINGS_FILES["AB_T_50_W_50_50"]["campaigns"][0])
for test in USER_EXPECTATIONS["AB_T_50_W_50_50"]:
self.assertIs(test["variation"] is not None, self.bucketer.is_user_part_of_campaign(test["user"], campaign))
def test_user_part_of_campaign_T_25_W_10_20_30_40(self):
campaign = copy.deepcopy(SETTINGS_FILES["FT_T_25_W_10_20_30_40"]["campaigns"][0])
for test in USER_EXPECTATIONS["T_25_W_10_20_30_40"]:
self.assertIs(test["variation"] is not None, self.bucketer.is_user_part_of_campaign(test["user"], campaign))
def test_bucket_user_to_variation_none_campaign_passed(self):
result = self.bucketer.bucket_user_to_variation(self.user_id, None)
self.assertIsNone(result)
def test_bucket_user_to_variation_none_userid_passed(self):
result = self.bucketer.bucket_user_to_variation(None, self.dummy_campaign)
self.assertIsNone(result)
def test_bucket_user_to_variation_return_control(self):
user_id = "Sarah"
# Sarah, with above campaign settings, will get hashValue:69650962 and
# bucketValue:326. So, MUST be a part of Control, as per campaign
# settings
result = self.bucketer.bucket_user_to_variation(user_id, self.dummy_campaign)
self.assertEqual(result.get("name"), "Control")
def test_bucket_user_to_variation_return_variation_1(self):
user_id = "Varun"
# Varun, with above campaign settings, will get hashValue:69650962 and
# bucketValue:326. So, MUST be a part of Variation-1, as per campaign
# settings
result = self.bucketer.bucket_user_to_variation(user_id, self.dummy_campaign)
self.assertEqual(result.get("name"), "Variation-1")
def test_bucket_user_to_variation_should_return_true(self):
user_id = "Allie"
# Allie, with above campaign settings, will get hashValue:362121553
# and bucketValue:1688. So, MUST be a part of campaign as per campaign
# percentTraffic
variation = self.bucketer.bucket_user_to_variation(user_id, self.dummy_campaign)
self.assertEqual(variation.get("id"), "1")
self.assertEqual(variation.get("name"), "Control")
def test_bucket_user_to_variation_should_return_none(self):
user_id = "Lucian"
# Lucian, with above campaign settings, will get hashValue:2251780191
# and bucketValue:53. So, MUST be a part of campaign as per campaign
# percentTraffic
variation = self.bucketer.bucket_user_to_variation(user_id, self.dummy_campaign)
self.assertIsNone(variation)
def test_bucket_user_to_variation_should_return_Control(self):
user_id = "Sarah"
# Sarah, with above campaign settings, will get hashValue:69650962
# and bucketValue:326. So, MUST be a part of Control, as per campaign
# settings
variation = self.bucketer.bucket_user_to_variation(user_id, self.dummy_campaign)
self.assertEqual(variation.get("name"), "Control")
def test_bucket_user_to_variation_should_return_Variation(self):
user_id = "Varun"
# Varun, with above campaign settings, will get hashValue:2025462540
# and bucketValue:9433. So, MUST be a part of Variation, as per campaign
# settings
variation = self.bucketer.bucket_user_to_variation(user_id, self.dummy_campaign)
self.assertEqual(variation.get("name"), "Variation-1")
def test_get_allocated_item_return_control_below_border(self):
variation = self.bucketer.get_allocated_item(self.variations, 999)
self.assertEquals(variation.get("name"), "Control")
def test_get_allocated_item_return_control_border(self):
variation = self.bucketer.get_allocated_item(self.variations, 1000)
self.assertEquals(variation.get("name"), "Control")
def test_get_allocated_item_return_variation_1_above_border(self):
variation = self.bucketer.get_allocated_item(self.variations, 1001)
self.assertEquals(variation.get("name"), "Variation-1")
def test_get_allocated_item_return_variation_1(self):
variation = self.bucketer.get_allocated_item(self.variations, 3000)
self.assertEquals(variation.get("name"), "Variation-1")
def test_get_allocated_item_return_variation_2(self):
variation = self.bucketer.get_allocated_item(self.variations, 6000)
self.assertEquals(variation.get("name"), "Variation-2")
def test_get_allocated_item_return_variation_3(self):
variation = self.bucketer.get_allocated_item(self.variations, 10000)
self.assertEquals(variation.get("name"), "Variation-3")
def test_get_allocated_item_return_none(self):
variation = self.bucketer.get_allocated_item(self.variations, 10001)
self.assertIsNone(variation)
def test_get_bucket_value_for_multiple_user_ids(self):
for test in USER_EXPECTATIONS["USER_AND_BUCKET_VALUES"]:
bucket_value = self.bucketer.get_bucket_value_for_user(test["user"], 10000)
self.assertEquals(bucket_value, test["bucket_value"])
def test_get_bucket_value_for_user_64(self):
bucket_value = self.bucketer.get_bucket_value_for_user("someone@mail.com", 100)
self.assertEquals(bucket_value, 64)
def test_get_bucket_value_for_user_50(self):
bucket_value = self.bucketer.get_bucket_value_for_user("1111111111111111", 100)
self.assertEquals(bucket_value, 50)
|
# Contando Progressivamente
for c in range(0, 11):
print(c)
print('FIM!')
# Contando Regressivamente
for i in range(11, 0, -1):
print(i)
print('FIM!!')
|
from __future__ import absolute_import
from __future__ import print_function
from app.workflows.sample_tasks import add_one_and_print
from app.workflows.work import find_odd_numbers_with_string
def test_add_one_and_print():
outs = add_one_and_print.unit_test(value_to_print=15)
assert outs['out'] == 16
def test_find_odd_numbers_with_string():
outs = find_odd_numbers_with_string.unit_test(list_of_nums=[3,4], demo_string='hello world')
assert outs['altered_string'] == 'hello world_changed'
assert outs['are_num_odd'] == [True, False]
|
import os
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
SECRET_KEY = '_'
STATIC_ROOT = os.path.join(BASE_DIR, '__STATIC_ROOT__')
STATIC_URL = '/static/'
INSTALLED_APPS = (
'django.contrib.staticfiles',
'tests.django_test_app',
'webpack',
)
STATICFILES_FINDERS = (
# Defaults
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# Webpack finder
'webpack.django_integration.WebpackFinder',
)
BUNDLES = os.path.join(BASE_DIR, 'bundles',)
OUTPUT_ROOT = os.path.join(BASE_DIR, 'generated_assets')
WEBPACK = {
'OUTPUT_ROOT': OUTPUT_ROOT,
'STATIC_URL': STATIC_URL,
'CONTEXT': {
'default_context': 'test'
},
'CONFIG_DIRS': (
BASE_DIR,
BUNDLES,
),
# While webpack-build's cache will check for asset existence,
# watching compilers do not, so we need to ensure that the cache
# is cleared between runs
'CACHE_DIR': os.path.join(OUTPUT_ROOT, 'cache_dir'),
}
class ConfigFiles(object):
BASIC_CONFIG = os.path.join('basic', 'webpack.config.js')
LIBRARY_CONFIG = os.path.join('library', 'webpack.config.js')
MULTIPLE_BUNDLES_CONFIG = os.path.join('multiple_bundles', 'webpack.config.js')
MULTIPLE_ENTRY_CONFIG = os.path.join('multiple_entry', 'webpack.config.js')
CACHED_CONFIG = os.path.join('cached', 'webpack.config.js')
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RBit64(RPackage):
"""A S3 Class for Vectors of 64bit Integers
Package 'bit64' provides serializable S3 atomic 64bit (signed) integers.
These are useful for handling database keys and exact counting in +-2^63.
WARNING: do not use them as replacement for 32bit integers, integer64 are
not supported for subscripting by R-core and they have different semantics
when combined with double, e.g. integer64 + double => integer64. Class
integer64 can be used in vectors, matrices, arrays and data.frames. Methods
are available for coercion from and to logicals, integers, doubles,
characters and factors as well as many elementwise and summary functions.
Many fast algorithmic operations such as 'match' and 'order' support inter-
active data exploration and manipulation and optionally leverage
caching."""
homepage = "https://cloud.r-project.org/package=bit64"
url = "https://cloud.r-project.org/src/contrib/bit64_0.9-7.tar.gz"
list_url = "https://cloud.r-project.org/src/contrib/Archive/bit64"
version('4.0.5', sha256='25df6826ea5e93241c4874cad4fa8dadc87a40f4ff74c9107aa12a9e033e1578')
version('0.9-7', sha256='7b9aaa7f971198728c3629f9ba1a1b24d53db5c7e459498b0fdf86bbd3dff61f')
depends_on('r@3.0.1:', type=('build', 'run'))
depends_on('r-bit@1.1-12:', when='@:9.9-7', type=('build', 'run'))
depends_on('r-bit@4.0.0:', when='@4.0.5:', type=('build', 'run'))
|
import numpy as np
from scipy import constants
import pandas as pd
import xarray as xr
import matplotlib.pyplot as plt
from matplotlib import colors
from matplotlib import cm
# from qtutils.analysis import ox_data_utils
from qtutils.analysis import data_utils
from .plot_utils import *
from .import_data_utils import *
from .mgr_utils import *
from importlib import reload
import sys
from importlib import reload
def reload_all():
for module in [plot_utils, ox_data_utils, data_utils]:
reload(module)
me = constants.electron_mass
ech = constants.elementary_charge
eps0 = constants.epsilon_0
hbar = constants.hbar
h = constants.h
G0 = 2*constants.e**2/constants.h
kb = constants.k |
# short DeVIDE matplotlib demo.
from pylab import *
# close previous figure if it exists
try:
obj.mpl_close_figure(numpy_test_figure)
except NameError:
pass
# square figure and square axes looks better for polar plots
numpy_test_figure = obj.mpl_new_figure(figsize=(8,8))
ax = axes([0.1, 0.1, 0.8, 0.8], polar=True, axisbg='#d5de9c')
# following example from http://matplotlib.sourceforge.net/screenshots/polar_demo.py
# radar green, solid grid lines
rc('grid', color='#316931', linewidth=1, linestyle='-')
rc('xtick', labelsize=15)
rc('ytick', labelsize=15)
r = arange(0,1,0.001)
theta = 2*2*pi*r
polar(theta, r, color='#ee8d18', lw=3)
setp(ax.thetagridlabels, y=1.075) # the radius of the grid labels
title("And there was much rejoicing!", fontsize=20)
|
# -*- coding: utf-8 -*-
from collections import defaultdict
from array import array as pyarray
from brainpy import (
calculate_mass, neutral_mass, PROTON,
isotopic_variants, mass_charge_ratio)
from brainpy.composition import (
parse_formula,
PyComposition)
from .utils import dict_proxy
from .constants import IGNORE_BELOW, TRUNCATE_AFTER
class TheoreticalIsotopicPattern(object):
"""Represent a theoretical isotopic peak list
Attributes
----------
peaklist: list of :class:`~.brainpy.TheoreticalPeak`
The theoretical isotopic pattern peak list
origin: float
The monoisotopic peak's m/z
"""
def __init__(self, peaklist, origin, offset=None):
self.peaklist = list(peaklist)
self.origin = float(origin)
if offset is None:
offset = self.peaklist[0].mz - origin
self.offset = float(offset)
def get(self, i):
return self.peaklist[i]
def __len__(self):
return len(self.peaklist)
def __getitem__(self, i):
return self.peaklist[i]
def __iter__(self):
return iter(self.peaklist)
def __reduce__(self):
return self.__class__, (self.peaklist, self.origin, self.offset)
def clone(self):
return self.__class__([p.clone() for p in self.peaklist], self.origin, self.offset)
def truncate_after(self, truncate_after=0.95):
"""Drops peaks from the end of the isotopic pattern
which make up the last ``1 - truncate_after`` percent
of the isotopic pattern.
After truncation, the pattern is renormalized to sum to ``1``
Parameters
----------
truncate_after : float, optional
The percentage of the isotopic pattern signal to retain. Defaults
to 0.95.
Returns
-------
TheoreticalIsotopicPattern
self
"""
cumsum = 0
result = []
n = len(self)
for i in range(n):
peak = self[i]
cumsum += peak.intensity
result.append(peak)
if cumsum >= truncate_after:
break
self.peaklist = result
n = len(self)
normalizer = 1. / cumsum
for i in range(n):
peak = self[i]
peak.intensity *= normalizer
return self
def shift(self, offset):
"""Shift all the m/z of peaks in the isotopic pattern by ``offset``
m/z.
This will update :attr:`origin` to reflect the new starting
monoisotopic m/z.
Parameters
----------
offset : float
The amount to shift each peak in the pattern by in m/z
Returns
-------
TheoreticalIsotopicPattern
self
"""
new_origin = offset
delta = (new_origin - self.origin)
self.origin = new_origin
for peak in self.peaklist:
peak.mz += delta
return self
def ignore_below(self, ignore_below=0):
"""Discards peaks whose intensity is below ``ignore_below``.
After discarding peaks, the pattern will be renormalized to
sum to ``1.0``
Parameters
----------
ignore_below : float, optional
The threshold below which peaks will be discarded
Returns
-------
TheoreticalIsotopicPattern
self
"""
total = 0
kept_tid = []
n = len(self)
for i in range(n):
p = self.get(i)
if (p.intensity < ignore_below) and (i > 1):
continue
else:
total += p.intensity
p = p.clone()
kept_tid.append(p)
self.peaklist = kept_tid
self.offest = self.origin - self.peaklist[0].mz
n = len(self)
for i in range(n):
p = self.get(i)
p.intensity /= total
return self
@property
def monoisotopic_mz(self):
return self.origin
def __repr__(self):
return "TheoreticalIsotopicPattern(%0.4f, charge=%d, (%s))" % (
self.monoisotopic_mz,
self.peaklist[0].charge,
', '.join("%0.3f" % p.intensity for p in self.peaklist))
def scale(self, experimental_distribution, method='sum'):
r"""Scales ``self``'s intensity to match the intensity distribution of the
experimental isotopic pattern in ``experimental_distribution``.
The ``method`` argument must be one of:
"sum"
Scale each peak of the theoretical distribution by the sum of the
intensity in the experimental distribution such that the sums of their
intensities are equal.
"max"
Select the most abundant peak in the theoretical distribution :math:`t_i`, find it's
match in the experimental distribution :math:`e_i`, find the scaling factor
:math:`\alpha = \frac{e_i}{t_i}` which will make :math:`e_i == t_i` and scale all
peaks in self by :math:`alpha`
Parameters
----------
experimental_distribution : list
The experimental peaks matched
method : str, optional
The scaling method to use. Defaults to ``"sum"``
Returns
-------
TheoreticalIsotopicPattern
self
"""
if method == 'sum':
total_abundance = sum(
p.intensity for p in experimental_distribution)
for peak in self:
peak.intensity *= total_abundance
elif method == 'max':
i, peak = max(enumerate(self),
key=lambda x: x[1].intensity)
scale_factor = experimental_distribution[
i].intensity / peak.intensity
for peak in self:
peak.intensity *= scale_factor
elif method == "meanscale":
scales = 0
weights = 0
total = 0
for i in range(len(experimental_distribution)):
epeak = experimental_distribution[i]
total += epeak.intensity
tpeak = self[i]
w = ((tpeak.intensity) * epeak.intensity ** 2)
weights += w
scales += (epeak.intensity / tpeak.intensity) * w
scale_factor = scales / weights
for peak in self:
peak.intensity *= scale_factor
elif method == 'top3':
top1 = 0
top2 = 0
top3 = 0
top1_index = 0
top2_index = 0
top3_index = 0
for i, peak in enumerate(self):
if peak.intensity > top1:
top3 = top2
top3_index = top2_index
top2 = top1
top2_index = top1_index
top1 = peak.intensity
top1_index = i
elif peak.intensity > top2:
top3 = top2
top3_index = top2_index
top2 = peak.intensity
top2_index = i
elif peak.intensity > top3:
top3 = peak.intensity
top3_index = i
scale = experimental_distribution[top1_index].intensity / self[top1_index].intensity
scale += experimental_distribution[top2_index].intensity / self[top2_index].intensity
scale += experimental_distribution[top3_index].intensity / self[top3_index].intensity
scale /= 3
for peak in self:
peak.intensity *= scale
return self
def scale_raw(self, scale_factor):
for peak in self:
peak.intensity *= scale_factor
return self
def drop_last_peak(self):
tail = self[-1]
scaler = 1 - tail.intensity
for p in self[:-1]:
p.intensity /= scaler
return scaler
def total(self):
return sum(p.intensity for p in self)
def normalize(self):
total = self.total()
for peak in self:
peak.intensity /= total
return self
def _cumulative(self):
cumulative_intensities = []
total = 0
for peak in self:
total += peak.intensity
cumulative_intensities.append(total)
return total
def incremental_truncation(self, threshold):
"""Create incremental truncations of `self`, dropping the last peak until
the the total signal in reaches `threshold`
Parameters
----------
threshold: float
The minimum percentage of the isotopic pattern to retain.
Returns
-------
:class:`list` of :class:`TheoreticalIsotopicPattern`
"""
template = self.clone().normalize()
accumulator = [template]
cumulative_intensities = self._cumulative()
n = len(self)
i = n - 1
while i > 0:
if cumulative_intensities[i - 1] < threshold:
break
template = template.clone()
template.drop_last_peak()
accumulator.append(template)
i -= 1
return accumulator
def basepeak_index(self):
bp_intensity = 0
bp_index = 0
for i, p in enumerate(self):
if p.intensity > bp_intensity:
bp_intensity = p.intensity
bp_index = i
return bp_index
@dict_proxy("base_composition")
class Averagine(object):
"""An isotopic model which can be used to interpolate the composition
of a class of molecule given an average monomer composition and a theoretical
polymer mass
Implements the :class:`Mapping` interface.
Attributes
----------
base_composition: Mapping
A mapping from element symbol to average count (float) of that element
for the average monomer
base_mass : float
The base mass of the average monomer. Calculated from :attr:`base_composition`
"""
def __init__(self, base_composition):
self.base_composition = dict(base_composition)
self.base_mass = calculate_mass(self.base_composition)
def scale(self, mz, charge=1, charge_carrier=PROTON):
"""Given an m/z and a charge state, interpolate the composition
of the polymer with the matching neutral mass
Parameters
----------
mz : float
The reference m/z to calculate the neutral mass to interpolate from
charge : int, optional
The reference charge state to calculate the neutral mass. Defaults to 1
charge_carrier : float, optional
The mass of the charge carrier. Defaults to the mass of a proton.
Returns
-------
Mapping
The interpolated composition for the calculated neutral mass,
rounded to the nearest integer and hydrogen corrected.
References
----------
Senko, M. W., Beu, S. C., & McLafferty, F. W. (1995). Determination of monoisotopic masses and ion populations
for large biomolecules from resolved isotopic distributions. Journal of the American Society for Mass
Spectrometry, 6(4), 229–233. http://doi.org/10.1016/1044-0305(95)00017-8
"""
neutral = neutral_mass(mz, charge, charge_carrier)
scale = neutral / self.base_mass
scaled = {}
for elem, count in self.base_composition.items():
scaled[elem] = round(count * scale)
scaled_mass = calculate_mass(scaled)
delta_hydrogen = round(scaled_mass - neutral)
H = scaled["H"]
if H > delta_hydrogen:
scaled["H"] = H - delta_hydrogen
else:
scaled["H"] = 0
return scaled
def isotopic_cluster(self, mz, charge=1, charge_carrier=PROTON, truncate_after=TRUNCATE_AFTER,
ignore_below=IGNORE_BELOW):
"""Generate a theoretical isotopic pattern for the given m/z and charge state, thresholded
by theoretical peak height and density.
Parameters
----------
mz : float
The reference m/z to calculate the neutral mass to interpolate from
charge : int, optional
The reference charge state to calculate the neutral mass. Defaults to 1
charge_carrier : float, optional
The mass of the charge carrier. Defaults to the mass of a proton.
truncate_after : float, optional
The percentage of the signal in the theoretical isotopic pattern to include.
Defaults to 0.95, including the first 95% of the signal in the generated pattern
ignore_below : float, optional
Omit theoretical peaks whose intensity is below this number.
Defaults to 0.0
Returns
-------
:class:`.TheoreticalIsotopicPattern`
The generated and thresholded pattern
"""
composition = self.scale(mz, charge, charge_carrier)
peaklist = isotopic_variants(composition, charge=charge)
tid = TheoreticalIsotopicPattern(peaklist, peaklist[0].mz, 0)
tid.shift(mz)
if truncate_after < 1.0:
tid.truncate_after(truncate_after)
if ignore_below > 0:
tid.ignore_below(ignore_below)
return tid
def __call__(self, mz, charge=1, charge_carrier=PROTON, truncate_after=TRUNCATE_AFTER, ignore_below=IGNORE_BELOW):
return self.isotopic_cluster(mz, charge, charge_carrier, truncate_after, ignore_below)
def __repr__(self):
return "Averagine(%r)" % self.base_composition
def __eq__(self, other):
return self.base_composition == other.base_composition
def __hash__(self):
return hash(frozenset(self.base_composition.items()))
def average_compositions(compositions, weights=None):
"""Calculate the average composition
Parameters
----------
compositions: Iterable
An Iterable of Mappings representing chemical compositions
weights: Iterable, optional
An optional weight vector
Returns
-------
dict
The average composition
"""
n = 0
if weights is None:
weights = [1] * len(compositions)
else:
if len(weights) != len(compositions):
raise ValueError("The size of weights must match the size of compositions")
result = defaultdict(float)
for i, comp in enumerate(compositions):
w = weights[i]
n += w
for k, v in comp.items():
result[k] += v * w
for k, v in list(result.items()):
result[k] = v / n
return dict(result)
def add_compositions(a, b):
a = defaultdict(float, **a)
for k, v in b.items():
a[k] += v
return dict(a)
try:
_has_c = True
_Averagine = Averagine
_TheoreticalIsotopicPattern = TheoreticalIsotopicPattern
from ms_deisotope._c.averagine import Averagine, TheoreticalIsotopicPattern
except ImportError as e:
_has_c = False
peptide = Averagine({"C": 4.9384, "H": 7.7583, "N": 1.3577, "O": 1.4773, "S": 0.0417})
glycopeptide = Averagine({"C": 10.93, "H": 15.75, "N": 1.6577, "O": 6.4773, "S": 0.02054})
glycan = Averagine({'C': 7.0, 'H': 11.8333, 'N': 0.5, 'O': 5.16666})
permethylated_glycan = Averagine({'C': 12.0, 'H': 21.8333, 'N': 0.5, 'O': 5.16666})
heparin = Averagine({'H': 10.5, 'C': 6, 'S': 0.5, 'O': 5.5, 'N': 0.5})
heparan_sulfate = Averagine({'H': 10.667, 'C': 6.0, 'S': 1.333, 'O': 9.0, 'N': 0.667})
_neutron_shift = calculate_mass({"C[13]": 1}) - calculate_mass({"C[12]": 1})
def isotopic_shift(charge=1):
return _neutron_shift / float(charge)
@dict_proxy("averagine")
class AveragineCache(object):
"""A wrapper around a :class:`Averagine` instance which will cache isotopic patterns
produced for new (m/z, charge) pairs and reuses it for nearby m/z values
Attributes
----------
averagine : :class:`~Averagine`
The averagine to use to generate new isotopic patterns
cache_truncation : float
Number of decimal places to round off the m/z for caching purposes
"""
def __init__(self, averagine, backend=None, cache_truncation=1.0):
if backend is None:
backend = {}
self.backend = backend
self.averagine = Averagine(averagine)
self.cache_truncation = cache_truncation
def __call__(self, mz, charge=1, charge_carrier=PROTON, truncate_after=TRUNCATE_AFTER, ignore_below=IGNORE_BELOW):
return self.isotopic_cluster(mz, charge, charge_carrier, truncate_after, ignore_below)
def has_mz_charge_pair(self, mz, charge=1, charge_carrier=PROTON, truncate_after=TRUNCATE_AFTER,
ignore_below=IGNORE_BELOW):
if self.cache_truncation == 0.0:
key_mz = mz
else:
key_mz = round(mz / self.cache_truncation) * self.cache_truncation
if (key_mz, charge, charge_carrier) in self.backend:
return self.backend[key_mz, charge, charge_carrier].clone().shift(mz)
else:
tid = self.averagine.isotopic_cluster(
key_mz, charge, charge_carrier, truncate_after, ignore_below)
self.backend[key_mz, charge, charge_carrier] = tid.clone()
return tid
def isotopic_cluster(self, mz, charge=1, charge_carrier=PROTON, truncate_after=TRUNCATE_AFTER,
ignore_below=IGNORE_BELOW):
"""Generate a theoretical isotopic pattern for the given m/z and charge state, thresholded
by theoretical peak height and density.
Mimics :meth:`.Averagine.isotopic_cluster` but uses the object's cache through
:meth:`has_mz_charge_pair`.
Parameters
----------
mz : float
The reference m/z to calculate the neutral mass to interpolate from
charge : int, optional
The reference charge state to calculate the neutral mass. Defaults to 1
charge_carrier : float, optional
The mass of the charge carrier. Defaults to the mass of a proton.
truncate_after : float, optional
The percentage of the signal in the theoretical isotopic pattern to include.
Defaults to TRUNCATE_AFTER, including the first 95% of the signal in the generated pattern
ignore_below : float, optional
Omit theoretical peaks whose intensity is below this number.
Defaults to 0.0
Returns
-------
:class:`.TheoreticalIsotopicPattern`
The generated and thresholded pattern
"""
return self.has_mz_charge_pair(mz, charge, charge_carrier, truncate_after, ignore_below)
def __repr__(self):
return "AveragineCache(%r)" % self.averagine
def clear(self):
self.backend.clear()
def populate(self, min_mz=10, max_mz=3005, min_charge=1, max_charge=8, charge_carrier=PROTON,
truncate_after=TRUNCATE_AFTER, ignore_below=IGNORE_BELOW):
sign = min_charge / abs(min_charge)
assert sign == (max_charge / abs(max_charge)
), "The polarity of min_charge must match the polarity of max_charge"
min_charge = abs(min_charge)
max_charge = abs(max_charge)
for i in range(int(min_mz), int(max_mz)):
for j in range(min(max_charge, min_charge), max(min_charge, max_charge) + 1):
self.isotopic_cluster(
i, sign * j, charge_carrier, truncate_after=truncate_after, ignore_below=ignore_below)
return self
try:
_AveragineCache = AveragineCache
_isotopic_shift = isotopic_shift
from ms_deisotope._c.averagine import AveragineCache, isotopic_shift
except ImportError:
pass
class BasePeakToMonoisotopicOffsetEstimator(object):
"""A type to predict the distance (in neutron count) from the base peak to
the monoisotopic peak of an isotopic pattern given a mass and an
:class:`Averagine` model.
The smaller :attr:`step_size` is, the more precise the estimate, but the more
space is used.
Attributes
----------
averagine : :class:`Averagine`
The averagine model to use to generate isotopic patterns
step_size : float
The level of discretization to use to bin masses around isotopic
pattern shape.
bins : :class:`array.array`
A sequence of positive :class:`int` values corresponding to the distance
between the base peak and the monoisotopic peak in neutrons in
the given bin. The mass for the bin is the bin index times :attr:`step_size`.
"""
def __init__(self, averagine, step_size=100.0):
self.averagine = averagine
self.bins = pyarray('I')
self.step_size = step_size
def _max_mass_bin(self):
return len(self.bins) * self.step_size
def _bin_for(self, mass):
offset, _remainder = divmod(mass, self.step_size)
return int(offset)
def _estimate_for_peak_offset(self, mass):
tid = self.averagine.isotopic_cluster(mass, 1, ignore_below=0.0)
return tid.basepeak_index()
def _populate_bins(self, max_mass):
current_bin = self._max_mass_bin()
i = 0
while max_mass >= current_bin:
next_bin_mass = current_bin + self.step_size
delta = self._estimate_for_peak_offset(next_bin_mass)
self.bins.append(delta)
current_bin = next_bin_mass
i += 1
return i
def get_peak_offset(self, mass, binned=True):
"""Estimate the number of neutrons separating the most intense peak of
an isotopic pattern from the pattern's monoisotopic peak.
Parameters
----------
mass : float
The neutral mass to predict for.
binned : bool, optional
Whether or not to use the bin-interpolated solution. If not,
an exact mass solution will be calculated, which is more precise,
but more expensive (the default is True).
Returns
-------
int
"""
if not binned:
return self._estimate_for_peak_offset(mass)
index = self._bin_for(mass)
try:
return self.bins[index]
except IndexError:
self._populate_bins(mass)
return self.bins[index]
def __call__(self, mass, binned=True):
"""Estimate the number of neutrons separating the most intense peak of
an isotopic pattern from the pattern's monoisotopic peak.
Parameters
----------
mass : float
The neutral mass to predict for.
binned : bool, optional
Whether or not to use the bin-interpolated solution. If not,
an exact mass solution will be calculated, which is more precise,
but more expensive (the default is True).
Returns
-------
int
"""
return self.get_peak_offset(mass, binned=binned)
|
import os
import imageio
import numpy as np
import scipy
def load_images_from_folder(folder_name):
return list(
map(lambda image_name: (
image_name, imageio.imread(os.path.join(folder_name, image_name)) / 255),
os.listdir(folder_name)))
def prepare_input_data(database_folder='./images/database', ground_truth_folder='./images/ground_truth_augmented'):
"""
Loads images from input folders and groups them with their labels.
:param database_folder:
:param ground_truth_folder:
:return:
"""
def remove_svm_from_name(input):
name, data = input
return name.replace('_SVM', ''), data
output = []
input_images = load_images_from_folder(database_folder)
ground_truth = dict(map(remove_svm_from_name, load_images_from_folder(ground_truth_folder)))
for (image_name, image_data) in input_images:
image_output = ground_truth[image_name]
image_output = scipy.misc.imresize(image_output, (110,110, 3)) / 255
output.append(
{
'name': image_name,
'output': image_output,
'input': image_data
}
)
return output
def split_input_data(input_data):
"""
Splits the input data into training and test set using 70:30 ratio.
:param input_data: data to split tuple of (images,labels)
:return: splitted data tuple of tuples (train(images,labels)test(images,labels))
"""
images = [elem['input'] for elem in input_data]
labels = [elem['output'] for elem in input_data]
size = len(images)
train_part = int(size * 0.7)
train_images = np.array(images[:train_part])
train_labels = np.array(labels[:train_part])
test_images = np.array(images[train_part + 1:])
test_labels = np.array(labels[train_part + 1:])
return (train_images, train_labels), (test_images, test_labels)
|
# -*- coding: utf-8 -*-
# Created by restran on 2017/8/14
from __future__ import unicode_literals, absolute_import
from mountains.utils import PrintCollector
"""
手机9宫格键盘编码
"""
dict_map = {
'21': 'a', '22': 'b', '23': 'c',
'31': 'd', '32': 'e', '33': 'f',
'41': 'g', '42': 'h', '43': 'i',
'51': 'j', '52': 'k', '53': 'l',
'61': 'm', '62': 'n', '63': 'o',
'71': 'p', '72': 'q', '73': 'r', '74': 's',
'81': 't', '82': 'u', '83': 'v',
'91': 'w', '92': 'x', '93': 'y', '94': 'z'
}
def decode(data, verbose=False):
p = PrintCollector()
data = data.replace(' ', '').strip()
if len(data) % 2 != 0:
p.print('可能不是9宫格手机键盘编码')
return
tmp_data = list(data)
result = []
while len(tmp_data) > 0:
k = ''.join(tmp_data[:2])
tmp_data = tmp_data[2:]
v = dict_map.get(k)
if v is None:
p.print('可能不是9宫格手机键盘编码')
return
result.append(v)
result = ''.join(result)
p.print(result)
return result
def main():
data = '335321414374744361715332'
decode(data)
if __name__ == '__main__':
main()
|
"""Common audit metadata for our data models."""
import datetime
from sqlalchemy import schema, sql, func
from sqlalchemy.sql import sqltypes
class AuditColumnsMixin:
"""A mixin to add audit columns to a data model.
Attributes:
created_datetime: Metadata describing when a row was created.
updated_datetime: Metadata describing when a row was last updated.
"""
def __init__(self):
pass
created_datetime = schema.Column(
sqltypes.DateTime(timezone=True),
nullable=False,
server_default=sql.text('now()'),
)
updated_datetime = schema.Column(
sqltypes.DateTime(timezone=True),
nullable=False,
server_default=sql.text('now()'),
onupdate=datetime.datetime.utcnow,
)
|
# -*- coding: utf-8 -*-
"""
A parser for the Syslog Protocol (RFC5424 - http://tools.ietf.org/search/rfc542)
Copyright © 2011 Evax Software <contact@evax.fr>
"""
import calendar
import sys
from datetime import datetime
from ..pyparsing import Word, Regex, Group, White, Combine, CharsNotIn, \
ZeroOrMore, OneOrMore, QuotedString, Or, Optional, LineStart, LineEnd, \
printables
from .util import parse_timestamp
from .util.MultiDict import OrderedMultiDict
from .util.escape_value import escape_param_value, str_or_nil
# Support SYSLOG_SyslogProtocol23Format which can send an empty APP-NAME.
SUPPORT_MISSING_VALUES = True
ESCAPE_SDATA_VALUES = True
# from the RFCs ABNF description
nilvalue = Word("-")
digit = Regex("[0-9]{1}")
nonzero_digit = Regex("[1-9]{1}")
printusascii = printables
sp = White(" ", exact=1)
octet = Regex('[\x00-\xFF]')
utf_8_string = Regex('[\x00-\xFF]*')
BOM = '\xef\xbb\xbf'
bom = Regex(BOM)
msg_utf8 = bom + utf_8_string
msg_any = utf_8_string
msg = Combine(Or([msg_utf8, msg_any])).setResultsName('MSG')
sd_name = CharsNotIn('= ]"', 1, 32)
param_name = sd_name.setResultsName('SD_PARAM_NAME')
param_value = QuotedString(quoteChar='"', escChar='\\', multiline=True)
param_value = param_value.setResultsName('SD_PARAM_VALUE')
sd_id = sd_name.setResultsName('SD_ID')
sd_param = Group(param_name + Regex('=') + param_value)
sd_params = Group(ZeroOrMore(Group(sp+sd_param.setResultsName('SD_PARAM'))))
sd_element = Group('['+sd_id+sd_params.setResultsName('SD_PARAMS')+']')
sd_element = sd_element.setResultsName('SD_ELEMENT')
sd_elements = Group(OneOrMore(sd_element))
structured_data = Or([nilvalue, sd_elements.setResultsName('SD_ELEMENTS')])
structured_data = structured_data.setResultsName('STRUCTURED_DATA')
time_hour = Regex('0[0-9]|1[0-9]|2[0-3]')
time_minute = Regex('[0-5][0-9]')
time_second = time_minute
time_secfrac = Regex('\.[0-9]{1,6}')
time_numoffset = Or([Regex('\+'), Regex('-')]) + \
time_hour + ':' + time_minute
time_offset = Or([Regex('Z'), time_numoffset])
partial_time = time_hour + ':' + time_minute + ':' + time_second + \
Optional(time_secfrac)
full_time = partial_time + time_offset
date_mday = Regex('[0-9]{2}')
date_month = Regex('0[1-9]|1[0-2]')
date_fullyear = Regex('[0-9]{4}')
full_date = date_fullyear + '-' + date_month + '-' + date_mday
timestamp = Combine(Or([nilvalue, full_date + 'T' + full_time]))
timestamp = timestamp.setResultsName('TIMESTAMP')
msgid = Or([nilvalue, CharsNotIn('= ]"', 1, 32)])
if SUPPORT_MISSING_VALUES:
msgid = Optional(msgid)
msgid = msgid.setResultsName('MSGID')
procid = Or([nilvalue,CharsNotIn('= ]"', 1, 128)])
if SUPPORT_MISSING_VALUES:
procid = Optional(procid)
procid = procid.setResultsName('PROCID')
app_name = Or([nilvalue, CharsNotIn('= ]"', 1, 48)])
if SUPPORT_MISSING_VALUES:
app_name = Optional(app_name)
app_name= app_name.setResultsName('APP_NAME')
hostname = Or([nilvalue, CharsNotIn('= ]"', 1, 255)])
if SUPPORT_MISSING_VALUES:
hostname = Optional(hostname)
hostname = hostname.setResultsName('HOSTNAME')
version = Regex('[1-9][0-9]{0,2}').setResultsName('VERSION')
prival = Regex("[0-9]{1,3}").setResultsName('PRIVAL')
pri = "<" + prival + ">"
header = pri + version + sp + timestamp + sp + hostname + sp + \
app_name + sp + procid + sp + msgid
syslog_msg = LineStart() + header + structured_data + \
Optional(sp+msg) + LineEnd()
# Default Prival for new SyslogEntry instances
from .constants import LOG_INFO,LOG_USER
DEFAULT_PRIVAL = LOG_INFO|LOG_USER
def ver_str(v, errors=None):
ver = sys.version_info[0], sys.version_info[1]
if ver > (2, 7):
return str(v)
else:
return unicode(v, errors=errors)
class Params(object):
def __init__(self, d):
for k, v in list(d.items()):
setattr(self, k, v)
class SDElement(object):
"""
An SD-ELEMENT consists of a name and parameter name-value pairs.
"""
def __init__(self, sd_id, sd_params):
"""
**arguments**
*sd_id*
SD-IDs are case-sensitive and uniquely identify the type and purpose
of the SD-ELEMENT.
*sd_params*
Key/value pairs attached to this SD-ELEMENT. This can be any iterable
that yields tuples, a dict or a :class:`~loggerglue.utils.multidict.OrderedMultiDict`
(An SD-PARAM key may be repeated multiple times inside an SD-ELEMENT)
**attributes**
*id*
SD-ID for this structured data element.
*sd_params*
Key/value pairs attached to this SD-ELEMENT, represented as
a OrderedMultiDict.
*params*
Key/value pairs attached to this SD-ELEMENT, represented as
a class instance (for convenience, so that parameters can
be addressed with `elmt.params.origin`). If there are multiple
values for a key, the *last* element is returned.
"""
self.id = sd_id
self.sd_params = OrderedMultiDict(sd_params)
self.params = Params(self.sd_params)
def __str__(self):
"""Convert SDElement to formatted string"""
rv = ['[', self.id]
for (k,v) in self.sd_params.allitems():
# if k in ('repr', 'json_data', 'trace', 'stack'):
# rv += ['\n']
rv += [
' ', k, '="', escape_param_value(ver_str(v, errors='ignore'))
if ESCAPE_SDATA_VALUES else ver_str(v, 'ignore'), '"'
]
if k in ('repr', 'json_data', 'trace', 'stack'):
rv += ['\n']
rv += [']']
return ''.join(rv)
@classmethod
def parse(cls, parsed):
sd = getattr(parsed, 'STRUCTURED_DATA', None)
if sd is None or sd == '-':
return None
sd_id = parsed.STRUCTURED_DATA.SD_ID
params = OrderedMultiDict()
for i in parsed.STRUCTURED_DATA.SD_PARAMS:
params[i.SD_PARAM.SD_PARAM_NAME] = \
i.SD_PARAM.SD_PARAM_VALUE.decode('utf-8')
return StructuredData(sd_id, params)
class StructuredData(object):
def __init__(self, elements):
self.elements = elements
def __str__(self):
"""Convert StructuredData to string"""
return ''.join([str(e) for e in self.elements])
@classmethod
def parse(cls, parsed):
sd = getattr(parsed, 'STRUCTURED_DATA', None)
if sd is None or sd == '-':
return None
elements = []
for se in parsed.SD_ELEMENTS:
sd_id = se.SD_ID
params = OrderedMultiDict()
for i in se.SD_PARAMS:
params[i.SD_PARAM.SD_PARAM_NAME] = \
i.SD_PARAM.SD_PARAM_VALUE.decode('utf-8')
elements.append(SDElement(sd_id, params))
return StructuredData(elements)
@classmethod
def from_str(cls, line, consume_error=True):
"""Returns a StructuredData object from a string"""
try:
r = structured_data.parseString(line)
return cls.parse(r)
except Exception as e:
if consume_error:
print(e)
import sys, traceback
traceback.print_exc(file=sys.stdout)
return None
else:
raise
class SyslogEntry(object):
"""
A class representing a syslog entry.
"""
def __init__(self, prival=DEFAULT_PRIVAL, version=1, timestamp=None,
hostname=None, app_name=None, procid=None, msgid=None,
structured_data=None, msg=None):
"""
**arguments/attributes**
*prival*
RFC5424 priority values are a combination of a priority and facility, for example `LOG_ALERT | LOG_DAEMON`.
See :mod:`loggerglue.constants`.
*version*
Version of syslog entry. There is usually no need to change this.
*timestamp*
Timestamp (as a datetime object).
*hostname*
The HOSTNAME field SHOULD contain the hostname and the domain name of the originator.
*app_name*
The APP-NAME field SHOULD identify the device or application that
originated the message. It is a string without further semantics.
It is intended for filtering messages on a relay or collector.
*procid*
PROCID is a value that is included in the message, having no
interoperable meaning, except that a change in the value indicates
there has been a discontinuity in syslog reporting.
*msgid*
The MSGID SHOULD identify the type of message.
*structured_data*
STRUCTURED-DATA provides a mechanism to express information in a well
defined, easily parseable and interpretable data format.
*msg*
The MSG part contains a free-form message that provides information
about the event.
"""
self.prival = prival
self.version = version
self.timestamp = timestamp
self.timestamp_as_float = False
self.hostname = hostname
self.app_name = app_name
self.procid = procid
self.msgid = msgid
if structured_data is not None and not isinstance(structured_data, StructuredData):
structured_data = StructuredData(structured_data)
self.structured_data = structured_data
self.msg = msg
@classmethod
def parse(cls, parsed):
ts = parse_timestamp(parsed.TIMESTAMP)
if ts is None:
# If no timestamp provided, fill in current UTC date and time
timestamp = datetime.utcnow()
else:
timestamp = ts
attr = {}
for i in ('prival', 'version', 'hostname', 'app_name',
'procid', 'msgid'):
I = i.upper()
v = getattr(parsed, I, '-')
if v in ["", "-"]:
v = None
else:
v = v.decode('utf-8')
attr[i] = v
m = getattr(parsed, 'MSG', None)
if m is not None:
if m.startswith(BOM):
msg = m[3:].decode('utf-8')
else:
msg = ver_str(m, errors='ignore')
else:
msg = None
version = int(attr['version'])
prival = int(attr['prival'])
structured_data = StructuredData.parse(parsed)
return cls(
prival=prival, version=version, timestamp=timestamp,
hostname=attr['hostname'], app_name=attr['app_name'], procid=attr['procid'], msgid=attr['msgid'],
structured_data=structured_data, msg=msg
)
def __str__(self):
"""Convert SyslogEntry to string"""
rv = ['<', str(self.prival), '>', str(self.version), ' ']
if self.timestamp is None:
rv.append('-')
elif self.timestamp_as_float:
t = calendar.timegm(self.timestamp.utctimetuple())
t += self.timestamp.microsecond / 1000000.0
rv.append(repr(t))
else:
rv.append(self.timestamp.strftime("%Y-%m-%dT%H:%M:%S.%fZ"))
rv += [' ',
str_or_nil(self.hostname), ' ', str_or_nil(self.app_name), ' ', str_or_nil(self.procid), ' ',
str_or_nil(self.msgid), ' ', str_or_nil(self.structured_data)]
if self.msg is not None:
rv += [' ']
# TODO
# if type(self.msg) is str:
# rv += [BOM, self.msg.encode('utf-8', errors='ignore')]
# else:
rv += [self.msg]
return ''.join(rv)
@classmethod
def from_line(cls, line, consume_error=True):
"""Returns a parsed SyslogEntry object from a syslog `line`."""
try:
r = syslog_msg.parseString(line.strip())
return cls.parse(r)
except Exception as e:
if consume_error:
print(e)
import sys, traceback
traceback.print_exc(file=sys.stdout)
return None
else:
raise
|
import tensorflow as tf
import time
import datetime
import sys
import logging
import numpy as np
from tensorflow.keras.callbacks import EarlyStopping
from train_data_preparation import tokenizer, dataset_train, train_max_length
from valid_data_preparation import dataset_val
from model import Captioner
from train_utils import make_optimizer, LoggerCallback
from metrics import BLEUMetric, METEORMetric
from params import BATCH_SIZE, EPOCHS, num_examples, num_examples_val, \
vocab_size, VALID_BATCH_SIZE, attention_features_shape
# from config import CHECKPOINT_PATH
def padded_cross_entropy(real, pred):
"""
Params:
real: tensor of shape (batch_size,)
contains the word indices for each caption word on the batch
pred: tensor of shape (batch_size, vocab_size)
contains logits distribution on the whole vocabulary for each word
on the batch
"""
loss_object = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True,
reduction='none')
mask = tf.math.logical_not(tf.math.equal(real, 0))
loss_ = loss_object(real, pred)
mask = tf.cast(mask, dtype=loss_.dtype)
loss_ *= mask
return tf.reduce_mean(loss_)
def train(hparams, models_path = './'):
"""
Returns:
results: dict
dictionary containing model identifier, elapsed_time per epoch,
learning curve with loss and metrics
"""
model_id = datetime.datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
captioner = Captioner(**hparams['model'],
vocab_size = vocab_size,
tokenizer = tokenizer,
batch_size = BATCH_SIZE,
caption_length = train_max_length,
valid_batch_size = VALID_BATCH_SIZE,
num_examples_val = num_examples_val)
optimizer = make_optimizer(**hparams['optimizer'])
metrics = [BLEUMetric(n_gram=1, name = 'bleu-1'),
BLEUMetric(n_gram=2, name = 'bleu-2'),
BLEUMetric(n_gram=3, name = 'bleu-3'),
BLEUMetric(n_gram=4, name = 'bleu-4'),
METEORMetric(name = 'meteor')]
captioner.compile(optimizer, loss_fn = padded_cross_entropy,
metrics = metrics, run_eagerly = True)
logger_cb = LoggerCallback()
early_stopping_cb = EarlyStopping(monitor = 'val_bleu-4', patience = 10,
mode = 'max',
restore_best_weights = True)
logging.info('Training start for model ' + model_id)
logging.info('hparams: ' + str(hparams))
history = captioner.fit(dataset_train, epochs=EPOCHS,
validation_data = dataset_val,
validation_steps = num_examples_val//VALID_BATCH_SIZE,
callbacks=[logger_cb, early_stopping_cb])
losses = {key:value for key, value in history.history.items() if 'val' not in key}
metrics = {key[4:]:value for key, value in history.history.items() if 'val' in key}
results = { 'id': model_id,
'losses': losses,
'epoch_times': logger_cb.epoch_times,
'total_time': logger_cb.total_time,
'params': captioner.count_params(),
'instances_train': num_examples,
'instances_valid': num_examples_val,
'batch_size': BATCH_SIZE,
'epochs': EPOCHS,
'vocabulary': vocab_size,
'valid_batch_size': VALID_BATCH_SIZE,
'valid_epoch_times': logger_cb.validation_times,
'metrics': metrics
}
captioner.save_weights(str(models_path / (model_id + '.h5')))
return results
|
# Import a whole load of stuff
from System.IO import *
from System.Drawing import *
from System.Runtime.Remoting import *
from System.Threading import *
from System.Windows.Forms import *
from System.Xml.Serialization import *
from System import *
from Analysis.EDM import *
from DAQ.Environment import *
from EDMConfig import *
r = Random()
def EDMGo():
# loop and take data
blockIndex = 0
maxBlockIndex = 100000
while blockIndex < maxBlockIndex:
print("Acquiring block " + str(blockIndex) + " ...")
# randomise polarization
polAngle = 360.0 * r.NextDouble()
hc.SetPolarizerAngle(polAngle)
hc.SwitchEAndWait()
blockIndex = blockIndex + 1
def run_script():
EDMGo()
|
from rest_framework.routers import DefaultRouter
from website.views import PartnerViewSet, FaqViewSet, MenuItemViewSet, ArticleViewSet, NewsletterViewSet, \
WantToHelpViewSet, ContactViewSet
router = DefaultRouter()
router.register('partners', PartnerViewSet)
router.register('faqs', FaqViewSet)
router.register('menu-items', MenuItemViewSet)
router.register('articles', ArticleViewSet)
router.register('newsletter', NewsletterViewSet)
router.register('want-to-help', WantToHelpViewSet)
router.register('contact', ContactViewSet)
|
# 정수 입력받아 유니코드 문자로 변환하기
print(chr(int(input()))) |
# Python Module example
import logging
logger = logging.getLogger()
formatter = logging.Formatter('%(asctime)s - %(name)s - %(message)s')
logger.setLevel(logging.INFO)
def sub(a, b):
"""This program subtracts two
numbers and return the result"""
result = a - b
return result
|
from .mulfft import PolyMul,PolyMullvl2Long
from .utils import gaussian32, dtot32, gaussian64, dtot64
from secrets import randbits
import os
import numpy as np
def trlweSymEncrypt(p, alpha, key, twist):
# a = np.array([randbits(32) for i in range(len(key))], dtype=np.uint32)
a = np.frombuffer(os.urandom(len(key) * 4), dtype=np.uint32)
b = gaussian32(dtot32(p), alpha, len(key))
b += PolyMul(a, key, twist)
return np.array([a, b])
def trlweSymEncryptlvl2(p,alpha,key, twistlong):
# a = np.array([randbits(64) for i in range(len(key))], dtype = np.uint64)
a = np.frombuffer(os.urandom(len(key) * 8), dtype=np.uint64)
b = gaussian64(dtot64(p),alpha,len(key))
b += PolyMullvl2Long(a,key,twistlong)
return np.array([a,b])
def trlweSymDecrypt(c, key, twist):
return (1 + np.sign(np.int32(c[1] - PolyMul(c[0], key, twist)))) // 2
def trlweSymDecryptlvl2(c, key, twistlong):
return (1 + np.sign(np.int64(c[1] - PolyMullvl2Long(c[0], key, twistlong)))) // 2
def SampleExtractIndex(r, index):
N = len(r[0])
return np.concatenate(
[
[r[0][index - i] for i in range(index + 1)],
[-r[0][N - 1 - i] for i in range(N - index - 1)],
[r[1][index]],
]
)
|
import torch
from torch import nn
from torch.utils.data import Dataset
class DataSet(Dataset):
def __init__(self, data):
self.data = data
def __len__(self):
return self.data.shape[0]
def __getitem__(self, index):
return self.data[index, :]
class Loss(nn.Module):
def __init__(self, wf, method='variance'):
super(Loss, self).__init__()
self.wf = wf
self.method = method
def forward(self, pos):
eloc = self.wf.local_energy(pos)
if self.method == 'variance':
loss = torch.var(eloc)
elif self.method == 'energy':
loss = torch.mean(eloc)
elif self.method == 'energy-manual':
loss = torch.mean(eloc)
psi = self.wf(pos)
norm = 1./len(psi)
# evaluate the prefactor of the grads
weight = eloc.clone()
weight -= loss
weight /= psi
weight *= 2.
weight *= norm
# compute the gradients
self.opt.zero_grad()
psi.backward(weight)
else:
raise ValueError('method must be variance, energy')
return loss, eloc
class OrthoReg(nn.Module):
'''add a penalty to make matrice orthgonal.'''
def __init__(self, alpha=0.1):
super(OrthoReg, self).__init__()
self.alpha = alpha
def forward(self, W):
''' Return the loss : |W x W^T - I|.'''
return self.alpha * torch.norm(W.mm(W.transpose(0, 1)) - torch.eye(W.shape[0]))
class UnitNormClipper(object):
def __call__(self, module):
if hasattr(module, 'weight'):
w = module.weight.data
w.div_(torch.norm(w).expand_as(w))
class ZeroOneClipper(object):
def __call__(self, module):
if hasattr(module, 'weight'):
w = module.weight.data
w.sub_(torch.min(w)).div_(torch.norm(w).expand_as(w))
|
from django.contrib.auth.models import User
import factory
import factory.django
from rgd import models
class UserFactory(factory.django.DjangoModelFactory):
class Meta:
model = User
username = factory.Sequence(lambda n: f'user_{n}')
email = factory.Faker('safe_email')
first_name = factory.Faker('first_name')
last_name = factory.Faker('last_name')
class ChecksumFileFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.ChecksumFile
file = factory.django.FileField(filename='sample.dat')
# If we have an on_commit or post_save method that modifies the model, we
# need to refresh it afterwards.
@classmethod
def _after_postgeneration(cls, instance, *args, **kwargs):
super()._after_postgeneration(instance, *args, **kwargs)
instance.refresh_from_db()
|
#!/usr/bin/env python
from setuptools import find_packages, setup
setup(
name='cwa',
version='1.0',
description='Python Implementation of the CoronaWarnApp (CWA) Event Registration',
author='Greg Ward',
author_email='github@mazdermind.de',
url='https://github.com/MaZderMind/cwa-qr',
packages=find_packages(),
install_requires=[
"Pillow==8.2.0",
"protobuf==3.15.8",
"qrcode==6.1",
"six==1.15.0",
],
zip_safe=True,
)
|
import logging
import re
from django.apps import apps
from django.utils import timezone
import habanero
class Crossref:
def __init__(self, id=None, query=None):
# TODO: Handle query case
self.cr = habanero.Crossref(mailto='dev@quantfive.org')
self.data = None
self.data_message = None
self.abstract = None
self.id = id
self.reference_count = None
self.referenced_by_count = None
self.referenced_by = []
self.references = []
self.title = None
if self.id is not None:
self.handle_id()
def handle_id(self):
try:
self.data = self.cr.works(ids=[self.id])
self.data_message = self.data['message']
except Exception as e:
self.data_message = None
logging.warning(e)
else:
self.abstract = self.data_message.get('abstract', None)
# Remove any jat xml tags
if self.abstract is not None:
self.abstract = re.sub(r'<[^<]+>', '', self.abstract)
self.doi = self.data_message.get('DOI', None)
self.arxiv_id = self.data_message.get('arxiv', None)
self.paper_publish_date = get_crossref_issued_date(
self.data_message
)
self.publication_type = self.data_message.get('type', None)
self.reference_count = self.data_message.get(
'reference-count',
None
)
self.referenced_by_count = self.data_message.get(
'is-referenced-by-count',
None
)
if self.reference_count > 0:
try:
self.references = self.data_message.get('reference', [])
except Exception as e:
logging.warning(
f'Reference count > 0 but found error: {e}'
)
if self.referenced_by_count > 0:
try:
relation = self.data_message.get('relation', None)
if relation is not None:
self.referenced_by = relation.get('cites', [])
except Exception as e:
logging.warning(
f'Referenced by count > 0 but found error: {e}'
)
self.title = None
title = self.data_message.get('title', [None])
if (type(title) is list):
if (len(title) > 0):
self.title = title[0]
elif type(title) is str and (title != ''):
self.title = title
if self.title is None:
logging.warning('Crossref did not find title')
self.url = self.data_message.get('URL', None)
def create_paper(self, is_public=False):
Paper = apps.get_model('paper.Paper')
if self.data_message is not None:
if self.publication_type == 'journal-article':
if self.id is not None:
paper = Paper.objects.create(
title=self.title,
paper_title=self.title,
doi=self.doi,
alternate_ids={'arxiv': self.arxiv_id},
url=self.url,
paper_publish_date=self.paper_publish_date,
publication_type=self.publication_type,
external_source='crossref',
retrieved_from_external_source=True,
is_public=is_public
)
return paper
return None
def get_crossref_issued_date(item):
parts = item['issued']['date-parts'][0]
day = 1
month = 1
year = None
if len(parts) > 2:
day = parts[2] or day
if len(parts) > 1:
month = parts[1] or month
if len(parts) > 0:
year = parts[0]
if year is not None:
return timezone.datetime(year, month, day)
else:
return None
|
from unittest import TestCase
from parameterized import parameterized
from design_patterns.proxy.protection import Driver, Car, CarProxy
class TestProtectionProxy(TestCase):
@parameterized.expand(
[
(Driver("James", 20), "Car is being driven by James"),
(Driver("Alex", 26), "Car is being driven by Alex"),
]
)
def test_drive_when_called_return_driver_name(self, driver, expected):
car = Car(driver)
actual = car.drive()
self.assertEqual(expected, actual)
@parameterized.expand(
[
(Driver("James", 20), "Car is being driven by James"),
(Driver("Sam", 15), "Age under 16 can not drive, Sam is 15 years old"),
]
)
def test_drive_regulate_with_age(self, driver, expected):
car = CarProxy(driver)
actual = car.drive()
self.assertEqual(expected, actual)
|
import os
import sys
import shutil
import pkgutil
from os import path
from yadocgen.rendering import *
from anytree import Node, Resolver, RenderTree, PreOrderIter, ContRoundStyle
def walk_packages_error(name):
"""Error callback for pkgutil.walk_packages."""
extype, value, traceback = sys.exc_info()
print(f"{extype} importing module {name}")
print_tb(traceback)
sys.exit(-1)
def find_modules(where="."):
"""Returns a tree representing the source tree in a given directory"""
rsv = Resolver("name")
root = Node("src", module=None)
for pkginfo in pkgutil.walk_packages([where], onerror=walk_packages_error):
path = pkginfo.name.split(".")
if len(path) == 1:
parent = root
else:
# !! this assumes that walk_packages does a pre-order walk ... it seems to do
parent = rsv.get(root, "/".join([path[i] for i in range(len(path) - 1)]))
node = Node(pkginfo.name.split(".")[-1], parent=parent, module=pkginfo)
return root
def generate_documentation(work_dir, purge, config):
"""Generates the documentation files given a source tree."""
sphinx_dir = path.join(work_dir, config.output_dir)
sphinx_source_dir = path.join(sphinx_dir, "source")
pages = []
bibfiles = []
# scan contents from doc directory
if not config.doc_dir is None:
doc_dir = path.join(work_dir, config.doc_dir)
print(f"\nDocumentation pages (from {doc_dir}):\n")
for f in os.listdir(doc_dir):
if os.path.isfile(os.path.join(doc_dir, f)):
if f.lower().endswith(".md"):
# add a documentation page
pages.append(DocPage(config.doc_dir, f))
print(f" - {f}")
elif f.lower().endswith(".bib"):
# add a documentation page
bibfiles.append(f)
# find packages in source path
if config.src_dir is not None:
src_dir = path.join(work_dir, config.src_dir)
sys.path.insert(
0, src_dir
) # add path to sys.path so that pkgtools can load package definitions
src_pages = find_modules(where=src_dir)
print(f"\nSource tree (from: {src_dir}):\n")
for row in RenderTree(src_pages, style=ContRoundStyle()):
print(f" {row.pre}{row.node.name}")
for node in PreOrderIter(src_pages):
if node is not src_pages: # src_pages is root element so we skip it
# add a source code documentation page
pages.append(SourcePage(node))
# add the API index page
api_index = APIPage([p for p in pages if type(p) is SourcePage])
pages.append(api_index)
# add the wecome page
pages.append(
WelcomePage(
config.sphinx_config.welcome,
[p for p in pages if type(p) is DocPage],
api_index,
)
)
# purge Sphinx source directory but keep Sphinx config
if purge:
print("\nPurging Sphinx source directory...", end="")
for f in os.listdir(sphinx_source_dir):
if os.path.isfile(f) and not f == "conf.py":
os.remove(os.path.join(root, f))
print("done")
# generate Sphinx files
print("\nGenerating files:\n")
for page in pages:
print(f" - {page.output_filename()}")
with open(path.join(sphinx_source_dir, page.output_filename()), "w") as f:
f.write(page.render())
# copy BibTeX files
if len(bibfiles) > 0:
print("\nCopying BibTeX files:\n")
for f in bibfiles:
print(f" - {f}")
shutil.copyfile(
os.path.abspath(os.path.join(doc_dir, f)),
os.path.abspath(os.path.join(sphinx_source_dir, f)),
)
|
import LevelBuilder
from sprites import *
def render(name,bg):
lb = LevelBuilder.LevelBuilder(name+".plist",background=bg)
lb.addObject(Bullet.BulletSprite(x=0, y=0,width=10,height=10,angle='0',restitution=0.5,static='false',friction=0.5,density=3,spawnEvent='onShoot'))
lb.addObject(Hero.HeroSprite(x=42, y=52,width=42,height=74))
lb.addObject(Teleporter.TeleporterSprite( level_id='leveldata/level_6_2'))
lb.addObject(Beam.BeamSprite(x=1378, y=45,width=30,height=89,angle='0' ,restitution=0.2,static='false',friction=0.5,density=5 ))
lb.addObject(Beam.BeamSprite(x=1473, y=45,width=89,height=32,angle='90' ,restitution=0.2,static='false',friction=0.5,density=5 ,classname='Destructable'))
lb.addObject(Beam.BeamSprite(x=1166, y=45,width=30,height=89,angle='0' ,restitution=0.2,static='false',friction=0.5,density=5 ))
lb.addObject(Beam.BeamSprite(x=1261, y=45,width=89,height=32,angle='90' ,restitution=0.2,static='false',friction=0.5,density=5 ,classname='Destructable'))
lb.addObject(Beam.BeamSprite(x=950, y=45,width=30,height=89,angle='0' ,restitution=0.2,static='false',friction=0.5,density=5 ))
lb.addObject(Beam.BeamSprite(x=1045, y=45,width=89,height=32,angle='90' ,restitution=0.2,static='false',friction=0.5,density=5 ,classname='Destructable'))
lb.addObject(Beam.BeamSprite(x=740, y=45,width=30,height=89,angle='0' ,restitution=0.2,static='false',friction=0.5,density=5 ))
lb.addObject(Beam.BeamSprite(x=835, y=45,width=89,height=32,angle='90' ,restitution=0.2,static='false',friction=0.5,density=5 ,classname='Destructable'))
lb.addObject(Beam.BeamSprite(x=1482, y=161,width=30,height=89,angle='0' ,restitution=0.2,static='false',friction=0.5,density=5 ))
lb.addObject(Beam.BeamSprite(x=747, y=161,width=89,height=32,angle='90' ,restitution=0.2,static='false',friction=0.5,density=5 ,classname='Destructable'))
lb.addObject(Beam.BeamSprite(x=1270, y=161,width=30,height=89,angle='0' ,restitution=0.2,static='false',friction=0.5,density=5 ))
lb.addObject(Beam.BeamSprite(x=1365, y=161,width=89,height=32,angle='90' ,restitution=0.2,static='false',friction=0.5,density=5 ,classname='Destructable'))
lb.addObject(Beam.BeamSprite(x=1054, y=161,width=30,height=89,angle='0' ,restitution=0.2,static='false',friction=0.5,density=5 ))
lb.addObject(Beam.BeamSprite(x=1149, y=161,width=89,height=32,angle='90' ,restitution=0.2,static='false',friction=0.5,density=5 ,classname='Destructable'))
lb.addObject(Beam.BeamSprite(x=844, y=161,width=30,height=89,angle='0' ,restitution=0.2,static='false',friction=0.5,density=5 ))
lb.addObject(Beam.BeamSprite(x=939, y=161,width=89,height=32,angle='90' ,restitution=0.2,static='false',friction=0.5,density=5 ,classname='Destructable'))
lb.addObject(Beam.BeamSprite(x=1378, y=282,width=30,height=89,angle='0' ,restitution=0.2,static='false',friction=0.5,density=5 ))
lb.addObject(Beam.BeamSprite(x=1473, y=282,width=89,height=32,angle='90' ,restitution=0.2,static='false',friction=0.5,density=5 ,classname='Destructable'))
lb.addObject(Beam.BeamSprite(x=1166, y=282,width=30,height=89,angle='0' ,restitution=0.2,static='false',friction=0.5,density=5 ))
lb.addObject(Beam.BeamSprite(x=1271, y=282,width=89,height=32,angle='90' ,restitution=0.2,static='false',friction=0.5,density=5 ,classname='Destructable'))
lb.addObject(Beam.BeamSprite(x=930, y=282,width=30,height=89,angle='0' ,restitution=0.2,static='false',friction=0.5,density=5 ))
lb.addObject(Beam.BeamSprite(x=1045, y=282,width=89,height=32,angle='90' ,restitution=0.2,static='false',friction=0.5,density=5 ,classname='Destructable'))
lb.addObject(Beam.BeamSprite(x=740, y=282,width=30,height=89,angle='0' ,restitution=0.2,static='false',friction=0.5,density=5 ))
lb.addObject(Beam.BeamSprite(x=835, y=282,width=89,height=32,angle='90' ,restitution=0.2,static='false',friction=0.5,density=5 ,classname='Destructable'))
lb.addObject(Beam.BeamSprite(x=853, y=102,width=292,height=25,angle='0' ,restitution=0.2,static='false',friction=0.5,density=5 ,classname='Destructable'))
lb.addObject(Beam.BeamSprite(x=1145, y=102,width=292,height=25,angle='0' ,restitution=0.2,static='false',friction=0.5,density=5 ,classname='Destructable'))
lb.addObject(Beam.BeamSprite(x=1415, y=102,width=254,height=25,angle='0' ,restitution=0.2,static='false',friction=0.5,density=5 ,classname='Destructable'))
lb.addObject(Beam.BeamSprite(x=835, y=223,width=292,height=25,angle='0' ,restitution=0.2,static='false',friction=0.5,density=5 ,classname='Destructable'))
lb.addObject(Beam.BeamSprite(x=1377, y=223,width=292,height=25,angle='0' ,restitution=0.2,static='false',friction=0.5,density=5 ,classname='Destructable'))
lb.addObject(Beam.BeamSprite(x=1104, y=223,width=254,height=25,angle='0' ,restitution=0.2,static='false',friction=0.5,density=5 ,classname='Destructable'))
lb.addObject(Pickup.PickupSprite(x=1211,y=17,width=32, height=32, static='false',angle=0))
lb.addObject(Pickup.PickupSprite(x=1321,y=133,width=32, height=32, static='false',angle=0))
lb.addObject(Pickup.PickupSprite(x=884,y=259,width=32, height=32, static='false',angle=0))
lb.addObject(Enemy.EnemySprite( x=1322, y=257,width=32,height=32,angle='0',restitution=0.2,static='false',friction=0.5,density=5 ))
lb.addObject(Enemy.EnemySprite( x=796, y=132,width=32,height=32,angle='0',restitution=0.2,static='false',friction=0.5,density=5 ))
lb.addObject(Enemy.EnemySprite( x=888, y=18,width=32,height=32,angle='0',restitution=0.2,static='false',friction=0.5,density=5 ))
lb.addObject(Enemy.EnemySprite( x=997, y=252,width=32,height=32,angle='0',restitution=0.2,static='false',friction=0.5,density=5 ))
lb.addObject(Crate.CrateSprite(x=1102,y=17,width=32, height=32, static='false',angle=0))
lb.addObject(Crate.CrateSprite(x=1102,y=136,width=32, height=32, static='false',angle=0))
lb.addObject(Crate.CrateSprite(x=1102,y=258,width=32, height=32, static='false',angle=0))
lb.addObject(Crate.CrateSprite(x=786,y=18,width=32, height=32, static='false',angle=0))
lb.addObject(Crate.CrateSprite(x=1421,y=136,width=32, height=32, static='false',angle=0))
lb.addObject(BulletTimePickup.BulletTimePickupSprite(x=1322,y=19,width=32, height=32, static='false',angle=0))
lb.addObject(BulletTimePickup.BulletTimePickupSprite(x=1215,y=257,width=32, height=32, static='false',angle=0))
lb.addObject(BulletTimePickup.BulletTimePickupSprite(x=994,y=132,width=32, height=32, static='false',angle=0))
lb.addObject(Enemy.EnemySprite(x=1424, y=32,width=61,height=61,angle='0',restitution=0.2,static='false',friction=0.5,density=5 , classname='LoveAlienSprite',firstframe='lovable_alien.png'))
lb.addObject(Enemy.EnemySprite(x=1424, y=271,width=61,height=61,angle='0',restitution=0.2,static='false',friction=0.5,density=5 , classname='LoveAlienSprite',firstframe='lovable_alien.png'))
lb.addObject(Enemy.EnemySprite(x=996, y=25,width=48,height=48,angle='0',restitution=0.2,static='false',friction=0.5,density=5 , classname='BlobSprite',firstframe='monsterblob.png'))
lb.addObject(Enemy.EnemySprite(x=786, y=267,width=48,height=48,angle='0',restitution=0.2,static='false',friction=0.5,density=5 , classname='BlobSprite',firstframe='monsterblob.png'))
lb.addObject(Beam.BeamSprite(x=1111, y=340,width=292,height=25,angle='0' ,restitution=0.2,static='false',friction=0.5,density=5 ,classname='Destructable'))
lb.addObject(Beam.BeamSprite(x=1402, y=340,width=292,height=25,angle='0' ,restitution=0.2,static='false',friction=0.5,density=5 ,classname='Destructable'))
lb.addObject(Beam.BeamSprite(x=836, y=340,width=254,height=25,angle='0' ,restitution=0.2,static='false',friction=0.5,density=5 ,classname='Destructable'))
lb.addObject(ZoomTrigger.ZoomTriggerSprite(x=128,y=250,width=100,height=500,zoom_fact=1.0))
lb.addObject(ZoomTrigger.ZoomTriggerSprite(x=293,y=320-60,width=128,height=100,zoom_fact=0.1666))
lb.addObject(ZoomTrigger.ZoomTriggerSprite(x=458,y=250,width=100,height=500,zoom_fact=1.0))
lb.addObject(WatchtowerVisual.WatchtowerVisualSprite(x=293, y=92,width=128,height=235-50,angle='0',restitution=0.2,static='true',friction=0.5,density=20,firstframe='watchtower.png' ))
lb.addObject(Crate.CrateSprite(x=199,y=18,width=32, height=32, static='false',angle=0))
lb.addObject(Crate.CrateSprite(x=150,y=18,width=32, height=32, static='false',angle=0))
lb.addObject(Crate.CrateSprite(x=199,y=52,width=32, height=32, static='false',angle=0))
lb.addObject(Crate.CrateSprite(x=150,y=52,width=32, height=32, static='false',angle=0))
lb.addObject(Crate.CrateSprite(x=898,y=136,width=32, height=32, static='false',angle=0))
lb.addObject(Crate.CrateSprite(x=199,y=86,width=32, height=32, static='false',angle=0))
lb.addObject(Crate.CrateSprite(x=150,y=86,width=32, height=32, static='false',angle=0))
lb.addObject(Crate.CrateSprite(x=199,y=120,width=32, height=32, static='false',angle=0))
lb.addObject(Crate.CrateSprite(x=150,y=120,width=32, height=32, static='false',angle=0))
lb.addObject(Pickup.PickupSprite(x=104,y=17,width=32, height=32, static='false',angle=0))
lb.addObject(Pickup.PickupSprite(x=104,y=53,width=32, height=32, static='false',angle=0))
lb.addObject(Pickup.PickupSprite(x=104,y=85,width=32, height=32, static='false',angle=0))
lb.addObject(Pickup.PickupSprite(x=104,y=118,width=32, height=32, static='false',angle=0))
lb.addObject(Pickup.PickupSprite(x=1211,y=137,width=32, height=32, static='false',angle=0))
lb.addObject(Crate.CrateSprite(x=2844,y=190,width=32, height=32, static='false',angle=0))
lb.addObject(Crate.CrateSprite(x=2844,y=224,width=32, height=32, static='false',angle=0))
lb.addObject(Crate.CrateSprite(x=2844,y=258,width=32, height=32, static='false',angle=0))
lb.addObject(Crate.CrateSprite(x=2844,y=292,width=32, height=32, static='false',angle=0))
lb.addObject(Beam.BeamSprite(x=2861, y=150,width=80,height=48,angle='0' ,restitution=0.2,static='true',friction=0.5,density=20 ).setName('Beam'))
lb.render() |
#!/usr/bin/env python3
import boto3
def main():
dynamo = boto3.client('dynamodb')
result = dynamo.scan(TableName='jtate-dynamo-1')
items = result['Items']
for item in items:
if 'key' in item and 'value' in item:
print(item['key']['S'] + ':', item['value']['S'])
if __name__ == '__main__':
main()
|
import unittest
from binary_search import BinarySearch
class ListComprehensionTest(unittest.TestCase):
"""Binary Search to traverse an ordered list, effectively
Populate the arrays with valid content
"""
def setUp(self):
self.one_to_twenty = BinarySearch(20, 1)
self.two_to_forty = BinarySearch(20, 2)
self.ten_to_thousand = BinarySearch(100, 10)
def test_small_list(self):
self.assertListEqual(
[1, 20, 20],
[
self.one_to_twenty[0],
self.one_to_twenty[19],
self.one_to_twenty.length
],
msg='should create an array from 1 to 20, with intervals of 1'
)
for index, number in enumerate(self.one_to_twenty):
if index < self.one_to_twenty.length - 1:
self.assertEqual(
1,
self.one_to_twenty[index + 1] - self.one_to_twenty[index],
msg='should return 1 for consequtive numbers'
)
def test_medium_list(self):
self.assertListEqual(
[2, 40, 20],
[
self.two_to_forty[0],
self.two_to_forty[19],
self.two_to_forty.length
],
msg='should create an array from 2 to 40, with intervals of 2'
)
for index, number in enumerate(self.two_to_forty):
if index < self.two_to_forty.length - 1:
self.assertEqual(
2,
self.two_to_forty[index + 1] - self.two_to_forty[index],
msg='should return 2 for consequtive numbers')
def test_large_list(self):
self.assertListEqual(
[10, 1000, 100],
[
self.ten_to_thousand[0],
self.ten_to_thousand[99],
self.ten_to_thousand.length
],
msg='should create an array from 10 to 1000, with intervals of 10'
)
for index, number in enumerate(self.ten_to_thousand):
if index < self.ten_to_thousand.length - 1:
self.assertEqual(
10,
self.ten_to_thousand[index + 1] -
self.ten_to_thousand[index],
msg='should return 10 for consequtive numbers'
)
class BinarySearchTest(unittest.TestCase):
"""Get the index of the item with an expected number of loops in\
array [1, 2 . . . 20]
Returns a dictionary containing {count: value, index: value}
"""
def setUp(self):
self.one_to_twenty = BinarySearch(20, 1)
self.two_to_forty = BinarySearch(20, 2)
self.ten_to_thousand = BinarySearch(100, 10)
def test_small_list_search(self):
search = self.one_to_twenty.search(16)
self.assertGreater(
5,
search['count'],
msg='should return {count: 4, index: 15} for 16'
)
self.assertEqual(
15,
search['index'],
msg='should return {count: 4, index: 15} for 16'
)
def test_medium_list_search(self):
search1 = self.two_to_forty.search(16)
search2 = self.two_to_forty.search(40)
search3 = self.two_to_forty.search(33)
self.assertGreater(
5,
search1['count'],
msg='should return {count: 4, index: 7} for 16'
)
self.assertEqual(
7,
search1['index'],
msg='should return {count: 4, index: 7} for 16'
)
self.assertEqual(
0,
search2['count'],
msg='should return {count: 0, index: 19} for 40'
)
self.assertEqual(
19,
search2['index'],
msg='should return {count: 5, index: 19} for 40'
)
self.assertGreater(
4,
search3['count'],
msg='should return {count: 3, index: -1} for 33'
)
self.assertEqual(
-1,
search3['index'],
msg='should return {count: 3, index: -1} for 33'
)
def test_large_list_search(self):
search1 = self.ten_to_thousand.search(40)
search2 = self.ten_to_thousand.search(880)
search3 = self.ten_to_thousand.search(10000)
self.assertGreater(
7,
search1['count'],
msg='should return {count: # <= 7, index: 3} for 40'
)
self.assertEqual(
3,
search1['index'],
msg='should return {count: # <= 7, index: 3} for 40'
)
self.assertGreater(
4,
search2['count'],
msg='should return {count: # <= 3, index: 87} for 880'
)
self.assertEqual(
87,
search2['index'],
msg='should return {count: # <= 3, index: 87} for 880'
)
self.assertGreater(
7,
search3['count'],
msg='should return {count: 3, index: -1} for 10000'
)
self.assertEqual(
-1,
search3['index'],
msg='should return {count: 3, index: -1} for 10000'
)
if __name__ == '__main__':
unittest.main() |
from os import path
from unittest.mock import patch
from biocommons.seqrepo.dataproxy import SeqRepoRESTDataProxy
from biocommons.seqrepo.seqrepo import SeqRepo
from bioutils import seqfetcher
from hgvs.dataproviders.seqfetcher import SeqFetcher
@patch.object(path, "exists")
def test_seqfetcher_initialized_with_seqrepo_dir(mock_path_exists, monkeypatch):
mock_path_exists.return_value = True
monkeypatch.setenv("HGVS_SEQREPO_DIR", "/tmp/my-seqrepo-location")
with patch("sqlite3.connect") as mock_sqlite_connect:
sf = SeqFetcher()
mock_path_exists.assert_called_once_with("/tmp/my-seqrepo-location")
assert mock_sqlite_connect.call_args[0][0] == "/tmp/my-seqrepo-location/aliases.sqlite3"
assert isinstance(sf.sr, SeqRepo)
assert sf.source == "SeqRepo (/tmp/my-seqrepo-location)"
def test_seqfetcher_initialized_with_seqrepo_url(monkeypatch):
monkeypatch.setenv("HGVS_SEQREPO_URL", "http://localhost:5000/seqrepo")
sf = SeqFetcher()
assert isinstance(sf.sr, SeqRepoRESTDataProxy)
# the URL should be versioned automatically for us
assert sf.sr.base_url == "http://localhost:5000/seqrepo/1/"
assert sf.source == "SeqRepo REST (http://localhost:5000/seqrepo)"
def test_seqfetcher_initialized_with_public_seqrepo_sources():
sf = SeqFetcher()
assert sf.sr is None
assert sf.fetcher == seqfetcher.fetch_seq
assert sf.source == "bioutils.seqfetcher (network fetching)"
|
# -*- coding: utf-8 -*-
"""
Created on Sat Jun 13 12:15:43 2020
@author: mhayt
"""
#-------------------------------- API-FOOTBALL --------------------------------
import numpy as np
import pandas as pd
import pickle
from sklearn import preprocessing
import matplotlib.pyplot as plt
#------------------------------- DATA PROCESSING ------------------------------
def scale_df(df, scale, unscale):
'''
This function will use the preprocessing function in sklearn to rescale each feature to haze zero mean and unit vector (mean=0, variance=1). Output is df instead of np array
Parameters
----------
df : pandas DataFrame
df to manipulate/scale
scale : list
list of column indices to process/scale
unscale : list
list of column indices to remain the same
Returns
-------
Scaled df.
'''
scaled_np = preprocessing.scale(df)
col_list = []
for col in df.columns:
col_list.append(col)
scaled_df = pd.DataFrame(scaled_np)
scaled_df.columns = col_list
df1 = scaled_df.iloc[:, scale]
df2 = df.iloc[:, unscale]
final_df = pd.concat([df1, df2], axis=1, sort=False)
return final_df
def scree_plot(pca_percentages, y_max=40):
'''
Input principle component percentages list and returns scree plot
Parameters
----------
pca_percentages : list
principle component percentage variation.
Returns
-------
fig : fig
bar plot.
'''
#setting up variables
n_components = len(pca_percentages)
#instantiating figure
fig, ax = plt.subplots()
#plot bar component
ax.bar(list(range(1, n_components+1, 1)), pca_percentages, color='paleturquoise', edgecolor='darkturquoise', zorder=0)
#annotating with percentages
for p in ax.patches:
ax.annotate(f'{round(p.get_height(), 1)}%', (p.get_x() + 0.5, p.get_height() + 0.5))
#plot line and points of each principle component
ax.plot(list(range(1, n_components+1, 1)), pca_percentages, c='firebrick', zorder=1)
ax.scatter(list(range(1, n_components+1, 1)), pca_percentages, c='firebrick', zorder=2)
#Plotting details
fig.suptitle('PCA Scree Plot', y=0.96, fontsize=16, fontweight='bold');
ax.set(xlabel='Principle Components',
ylabel='Percentage Variation');
ax.set_ylim([0,y_max])
return fig
|
import xml.etree.ElementTree as ET
class Remediation(object):
def __init__(self, remediationId, text):
self.remediationId = remediationId
self.text = text
def getId(self):
return self.remediationId
def getText(self):
return self.text
def toElement(self, parent=None):
element = None
if parent is None:
element = ET.Element("remediation", attrib={"id":self.remediationId})
else:
element = ET.SubElement(parent, "remediation", attrib={"id":self.remediationId})
element.text = self.text
return element
|
import pytest ; pytest
from cuXfilter.charts.core.core_chart import BaseChart
class TestBaseChart():
def test_variables(self):
bc = BaseChart()
assert bc.chart_type == None
assert bc.x == None
assert bc.y == None
assert bc.aggregate_fn == 'count'
assert bc.color == None
assert bc.height == 0
assert bc.width == 0
assert bc.add_interaction == True
assert bc.chart == None
assert bc.source == None
assert bc.source_backup == None
assert bc.data_points == 0
assert bc._library_specific_params == {}
assert bc._stride == None
assert bc.stride_type == int
assert bc.min_value == 0.0
assert bc.max_value == 0.0
assert bc.x_label_map == {}
assert bc.y_label_map == {}
bc.x = 'test_x'
bc.chart_type = 'test_chart_type'
assert bc.name == 'test_x_test_chart_type'
@pytest.mark.parametrize("stride, _stride", [(1,1),(None, None), (0,1)])
def test_stride(self, stride, _stride):
bc = BaseChart()
bc.stride = stride
assert bc._stride == _stride
def test_label_mappers(self):
bc = BaseChart()
library_specific_params = {
'x_label_map': {'a':1, 'b':2},
'y_label_map':{'a':1, 'b':2}
}
bc.library_specific_params = library_specific_params
assert bc.x_label_map == {'a':1, 'b':2}
assert bc.y_label_map == {'a':1, 'b':2}
@pytest.mark.parametrize('chart, _chart',[(None, None),(1,1)])
def test_view(self, chart, _chart):
bc = BaseChart()
bc.chart = chart
assert bc.view() == _chart |
# Generated by Django 2.2.2 on 2019-06-30 02:48
import django.db.models.deletion
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
# migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('ontask', '0002_auto_20190524_1639'),
('ontask', '0002_auto_20190521_1710'),
('ontask', '0005_auto_20190430_1922'),
('ontask', '0021_auto_20190609_1325'),
('ontask', '0004_plugin_is_enabled'),
('ontask', '0030_workflow_star'),
]
operations = [
migrations.CreateModel(
name='OAuthUserToken',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('instance_name', models.CharField(max_length=2048)),
('access_token', models.CharField(max_length=2048)),
('refresh_token', models.CharField(blank=True, max_length=2048)),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('valid_until', models.DateTimeField(default=None, verbose_name='Token valid until')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'unique_together': {('user', 'instance_name')},
},
),
]
|
'''
AUTHORS:
NORSTRÖM, ARVID 19940206-3193,
HISELIUS, LEO 19940221-4192
'''
from collections import namedtuple
import numpy as np
import gym
import torch
import matplotlib.pyplot as plt
from tqdm import trange
from DQN_agent import RandomAgent, Agent
from DQN_agent import ExperienceReplayBuffer
import torch.optim as optim
import torch.nn as nn
from PPO_problem import LunarLander
best_ll = LunarLander(0.99, 3000, 300, [8,64,64,4], [8,64,64,4])
best_ll.Actor = torch.load('models/best_actor.pt')
episodes = np.arange(50)
env = gym.make('LunarLanderContinuous-v2')
episode_reward_list = []
for i in episodes:
done = False
state = env.reset()
total_episode_reward = 0.
t = 0
while not done:
# Take epsilon greedy action
state_tensor = torch.tensor([state],
requires_grad=False,
dtype=torch.float32)
with torch.no_grad():
action = best_ll.Actor(torch.tensor([state],device = best_ll.dev, requires_grad = False, dtype = torch.float32)).detach().numpy().reshape(2,2)
mean,cov = action[0].reshape(2,1),np.diag(action[1])
action = np.random.multivariate_normal(mean.reshape(2,),cov).reshape(2,1)
#action = np.random.uniform(-1,1,2)
next_state, reward, done, _ = env.step(action.reshape(2,))
total_episode_reward += reward
# Update state for next iteration
state = next_state
t += 1
# Append episode reward and total number of steps
episode_reward_list.append(total_episode_reward)
# Close environment
env.close()
np.save("p3_best_agent", episode_reward_list) |
import os
import re
import traceback
from datetime import datetime
from api import app, utils
from common.config import globals
from common.utils import converters
class FarmSummary:
def __init__(self, cli_stdout, blockchain):
self.plot_count = 0
self.plots_size = 0
for line in cli_stdout:
if "Plot count for all" in line:
self.plot_count = line.strip().split(':')[1].strip()
elif "Total size of plots" in line:
self.plots_size = line.strip().split(':')[1].strip()
elif "status" in line:
self.calc_status(line.split(':')[1].strip())
elif re.match("Total.*farmed:.*$", line):
self.total_coins = line.split(':')[1].strip()
elif "Estimated network space" in line:
self.calc_netspace_size(line.split(':')[1].strip())
elif "Expected time to win" in line:
self.time_to_win = line.split(':')[1].strip()
elif "User transaction fees" in line:
self.transaction_fees = line.split(':')[1].strip()
def calc_status(self, status):
self.status = status
if self.status == "Farming":
self.display_status = "Active"
else:
self.display_status = self.status
def calc_netspace_size(self, netspace_size):
self.netspace_size = netspace_size
try:
size_value, size_unit = netspace_size.split(' ')
if float(size_value) > 1000 and size_unit == 'PiB':
self.display_netspace_size = "{:0.3f} EiB".format(float(size_value) / 1000)
else:
self.display_netspace_size = self.netspace_size
except:
app.logger.info("Unable to split network size value: {0}".format(netspace_size))
self.display_netspace_size = self.netspace_size
class HarvesterSummary:
def __init__(self):
self.status = "Harvesting" # TODO Check for harvester status in debug.log
class Wallet:
def __init__(self, cli_stdout):
self.text = ""
lines = cli_stdout.split('\n')
for line in lines:
#app.logger.info("WALLET LINE: {0}".format(line))
if "No online" in line or \
"skip restore from backup" in line or \
"own backup file" in line or \
"SIGWINCH" in line:
continue
self.text += line + '\n'
class Keys:
def __init__(self, cli_stdout):
self.text = ""
for line in cli_stdout:
self.text += line + '\n'
class Blockchain:
def __init__(self, cli_stdout):
self.text = ""
for line in cli_stdout:
self.text += line + '\n'
class Connections:
def __init__(self, cli_stdout):
self.text = ""
for line in cli_stdout:
self.text += line + '\n'
|
import requests
endpoint = 'https://swapi.py4e.com/api'
response = requests.get(f"{endpoint}/people/", {'search': 'Finn'}).json()
finn = response['results'][0]
print(f"\nFinn = {finn}") |
import discord
import sqlite3
from discord.ext import commands
conn= sqlite3.connect("dbs/main.db")
class Commands(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command()
@commands.cooldown(1, 30, commands.BucketType.guild)
@commands.has_permissions(manage_channels=True)
async def setchannel(self, ctx, *, cbchannel: discord.TextChannel = None):
if cbchannel == None:
await ctx.send(":warning: You have to mention the channel that you want as the channel in which users will talk to me. Example: `!!setchannel #channel-name`")
return
elif cbchannel != None:
try:
cur= conn.cursor()
guildID= str(ctx.guild.id)
r= cur.execute("SELECT channel_id FROM main WHERE guild_id = '"+guildID+"'")
row= None
for row in r:
...
if row != None:
await ctx.send(f":warning: The channel is already setup to <#{row[0]}>. Use `!!settings channel` to change it.")
elif row == None:
guildID= str(ctx.guild.id)
channelID= str(cbchannel.id)
cur.execute("INSERT INTO main(guild_id, channel_id, toggle) VALUES('"+guildID+"', '"+channelID+"', '1')")
conn.commit()
await ctx.send(f":tada: Start talking to me in {cbchannel.mention}!")
except discord.NotFound:
await ctx.send(":warning: I can't find that channel. Make sure I can access it or channel is valid.")
return
except discord.MissingPermissions:
await ctx.send(":warning: I can't send messages in that channel.")
return
@commands.group(invoke_without_command=True)
async def settings(self, ctx):
em= discord.Embed(title="Discord Chat Bot Settings", description="Welcome to Discord Chat Bot Settings! Here are the list of commands you can use to setup the bot. If this is your first time with this bot, Use the `!!setchannel` command first. **Arguments enclosed in `<>` are required!**")
em.add_field(name="`!!settings channel <channel_mention>`", value="Updates the chatting channel.")
em.add_field(name="`!!settings toggle <toggle>`", value="Toggles the bot chat on or off. This doesn't disable commands.")
await ctx.send(embed=em)
@settings.command()
@commands.has_permissions(manage_channels=True)
@commands.cooldown(1, 30, commands.BucketType.guild)
async def channel(self, ctx, *, cbchannel: discord.TextChannel = None):
cur= conn.cursor()
if cbchannel == None:
guildID= str(ctx.guild.id)
r= cur.execute("SELECT channel_id FROM main WHERE guild_id = '"+guildID+"'")
row= None
for row in r:
...
if row != None:
await ctx.send(f"I'm currently waiting for messages in <#{row[0]}>. Run `!!settings channel #channel-mention` to change this.")
elif row == None:
await ctx.send("Channel is not even setup yet! Use `!!setchannel` to set a channel.")
elif cbchannel != None:
guildID= str(ctx.guild.id)
channelID= str(cbchannel.id)
r= cur.execute("SELECT channel_id FROM main WHERE guild_id = '"+guildID+"'")
row= None
for row in r:
...
if row == None:
await ctx.send("Channel is not even setup yet! Use `!!setchannel` to set a channel.")
elif row != None:
cur.execute("UPDATE main SET channel_id = '"+channelID+"' where guild_id = '"+guildID+"'")
conn.commit()
await ctx.send(f":tada: Channel has been updated to {cbchannel.mention}!")
@settings.command()
@commands.has_permissions(manage_channels=True)
@commands.cooldown(1, 30, commands.BucketType.guild)
async def toggle(self, ctx, *, toggle = None):
if toggle == None:
await ctx.send(":warning: Use the command again but mention the toggle i.e `on` or `off` For example: `!!settings toggle on` to toggle on, `!!settings toggle off` to toggle off.")
elif toggle != None:
if toggle.lower() == "on":
toggle = '1'
elif toggle.lower() == 'off':
toggle = '0'
else:
await ctx.send(":warning: Use the command again but mention the toggle correctly. i.e `on` or `off` For example: `!!settings toggle on` to toggle on, `!!settings toggle off` to toggle off.")
return
guildID= str(ctx.guild.id)
cur= conn.cursor()
r= cur.execute("SELECT toggle FROM main WHERE guild_id = '"+guildID+"'")
row= None
for row in r:
...
if row == None:
await ctx.send("Channel is not setup yet! Use `!!setchannel` to set a channel.")
elif row != None:
cur.execute("UPDATE main SET toggle = '"+toggle+"' where guild_id = '"+guildID+"'")
conn.commit()
await ctx.send(f":tada: Toggle updated!")
def setup(bot):
bot.add_cog(Commands(bot))
|
from typing import List
def linear_search(arr: List[int], target: int) -> int:
for idx, val in enumerate(arr):
if val == target:
return idx
return -1
|
# c0ded by zer0_p1k4chu
'''
This script is the Client module of the NSRL_Server_API project.
'''
try:
import traceback
import json
import sqlite3
import sqlalchemy as sa
import pandas as pd
import sys
from os import path
import requests
import argparse
from termcolor import colored,cprint
import colorama
colorama.init()
parser = argparse.ArgumentParser(description='Client Module arguments')
parser.add_argument('--input', metavar='i', type=str,
help='Input CSV File location')
parser.add_argument('--output',metavar='o', type=str,
help ='Output File Location')
args = parser.parse_args()
if(args.input is None or args.output is None):
parser.print_help()
exit()
# arguments assign
file_location = args.input #location of the input file, right now accepted via arguments
URL = "http://localhost:5000" #API Server URL
#Terminal colours
def prRed(string):
cprint(string, 'red')
def prGreen(string):
cprint(string, 'green')
def prCyan(string):
cprint(string, 'cyan')
prGreen("[+] Let's get rolling! Doing some pre-flight checks. [+]")
prGreen("[+] The input file for analysis is "+ file_location)
prGreen("[+] Building a dataframe from the CSV, will usually take a minute [+]")
df = pd.read_csv(file_location)
prGreen("[+] Pre-Processesing done, Now analysing the file. This might take a while depending on number of hashes [+]")
hashie = df.columns.get_loc("MD5")
file_loc = df.columns.get_loc("FileNames")
file_namei = df.columns.get_loc("FileNames")
f = open(args.output,"w")
f.write("MD5,FileName,Path,Found,Priv")
for i in df.values:
hash_value = i[hashie]
file_loca = i[file_loc]
file_name = i[file_namei]
PARAMS = {'md5':hash_value}
r = requests.get(url = URL + "/get_info", params = PARAMS)
data = r.text
data = json.loads(data)
if("Not Found" in data['message'] and ("System32" in file_loca or "Program Files" in file_loca or "Windows" in file_loca )):
prRed("[-] A hash from a Trustworthy file location: "+ file_loca +" is not found in the NSRL_DB:" + hash_value +"[-]")
log = str(hash_value) + ","+ str(file_name) +","+ str(file_loca) + ",Not Found,YES"
elif("Found" in data['message'] and ("System32" in file_loca or "Program Files" in file_loca or "Windows" in file_loca )):
prGreen("[+] Hash found in NSRL_DB: " + hash_value + ".[+]" )
log = str(hash_value) + ","+ str(file_name) +","+ str(file_loca) + ",Found,YES"
elif("Not Found" in data['message']):
prRed("[-] Hash Not found in NSRL_DB: " + hash_value + ". [-]" )
log = str(hash_value) + ","+ str(file_name) +","+ str(file_loca) + ",Not Found, NO"
elif("Found" in data['message']):
prGreen("[+] Hash found in NSRL_DB: " + hash_value + ". [+]" )
log = str(hash_value) + ","+ str(file_name) +","+ str(file_loca) + ",Found, NO"
f.write(log + "\n")
f.close()
except requests.exceptions.ConnectionError:
def prRed(string):
cprint(string, 'red')
def prGreen(string):
cprint(string, 'green')
def prCyan(string):
cprint(string, 'cyan')
prRed("[-] Target Server is actively refusing connection. Is the server UP? Maybe try changing the Server URL with --url parameter [-]")
except KeyError:
def prRed(string):
cprint(string, 'red')
def prGreen(string):
cprint(string, 'green')
def prCyan(string):
cprint(string, 'cyan')
prRed("[-] Looks like the format of the input file is wrong. Make sure these columns are present: MD5, Name, Path [-]")
except FileNotFoundError:
def prRed(string):
cprint(string, 'red')
def prGreen(string):
cprint(string, 'green')
def prCyan(string):
cprint(string, 'cyan')
prRed("[-] File not found, Are you sure that's the right location? [-]")
except ModuleNotFoundError:
def prRed(string):
cprint(string, 'red')
def prGreen(string):
cprint(string, 'green')
def prCyan(string):
cprint(string, 'cyan')
prRed("[-] Please install all dependecies using 'pip install -r requirements.txt' [-]")
except Exception as e:
def prRed(string):
cprint(string, 'red')
def prGreen(string):
cprint(string, 'green')
def prCyan(string):
cprint(string, 'cyan')
with open('log.txt', 'a') as f:
f.write(str(e))
f.write(traceback.format_exc())
prRed("[-] Some unknown issue occured. Please check the log file. [-]")
except KeyboardInterrupt:
def prRed(string):
cprint(string, 'red')
def prGreen(string):
cprint(string, 'green')
def prCyan(string):
cprint(string, 'cyan')
prRed("[-] CTRL+C detected, Gracefully shutting down. Bye .. Bye .. [-]") |
from bottle import abort, get, post, redirect, request, route
from .. import dbengine
from ..app import authenticate, db_object_to_dict
from ..models import Chapter, Character, Passage
from ..view import render
class Passages:
def __init__(self):
pass
def all(self):
"""list all passages for editing"""
username = authenticate()
session = dbengine.connect()
passages = {}
for row in session.query(Passage).order_by(Passage.chapter_id).\
order_by(Passage.passage_order):
passages[row.id] = db_object_to_dict(row)
return render('passages/all.html', {'passages': passages, 'username': username}, 'admin.html')
def create(self):
"""create passage"""
username = authenticate()
session = dbengine.connect()
chapters = {}
for chapter in session.query(Chapter):
chapters[chapter.id] = db_object_to_dict(chapter)
characters = {}
for character in session.query(Character):
characters[character.id] = db_object_to_dict(character)
passage_order = range(1,100)
return render('passages/create.html',
{
'chapters': chapters,
'characters': characters,
'passage_order': passage_order,
'username': username
},
'admin.html'
)
def edit(self, id):
"""edit passage"""
username = authenticate()
session = dbengine.connect()
chapters = {}
for chapter in session.query(Chapter):
chapters[chapter.id] = db_object_to_dict(chapter)
characters = {}
for character in session.query(Character):
characters[character.id] = db_object_to_dict(character)
passage = session.query(Passage).filter(Passage.id==id).first()
passage_order = range(1,100)
return render('passages/edit.html',
{
'chapters': chapters,
'characters': characters,
'passage': passage,
'passage_order': passage_order,
'username': username
},
'admin.html'
)
def save(self):
"""save form content"""
authenticate()
session = dbengine.connect()
id = request.forms.get('id')
body = request.forms.get('body').decode('utf-8')
character_id = request.forms.get('character_id')
chapter_id = request.forms.get('chapter_id')
passage_order = request.forms.get('passage_order')
soliloquy = request.forms.get('soliloquy')
if id:
# update
update = session.query(Passage).get(id)
update.body = body
update.character_id = character_id
update.chapter_id = chapter_id
update.passage_order = passage_order
update.soliloquy = soliloquy
else:
# create
new_passage = Passage(body, character_id, chapter_id, passage_order, soliloquy)
session.add(new_passage)
session.commit()
redirect('/passages/all')
def view(self, id):
"""passage information"""
session = dbengine.connect()
passages = {}
for passage in session.query(Passage).\
filter(Passage.id==id).order_by(Passage.id.desc()):
passages[passage.id] = passage
return render('passages/view.html', {'passages': passages})
go = Passages()
route('/passages/all')(go.all)
route('/passages/create')(go.create)
route('/passages/edit/:id')(go.edit)
post('/passages/save')(go.save)
route('/passages/view/:id')(go.view)
|
print("Format masukkan = JAM:MENIT:DETIK")
waktu_awal = input("Masukkan waktu awal : ").split(":")
waktu_akhir = input("Masukkan waktu akhir : ").split(":")
try:
waktu_awal, waktu_akhir = [int(satuan) for satuan in waktu_awal], [int(satuan) for satuan in waktu_akhir]
if waktu_awal[1] > 60 or waktu_awal[2] > 60 or waktu_akhir[1] > 60 or waktu_akhir[2] > 60:
print("Menit dan detik harus tidak lebih dari 60")
else:
waktu_awal = waktu_awal[0] * 3600 + waktu_awal[1] * 60 + waktu_awal[2]
waktu_akhir = waktu_akhir[0] * 3600 + waktu_akhir[1] * 60 + waktu_akhir[2]
print(waktu_awal)
print(waktu_akhir)
selisih = waktu_akhir - waktu_awal
Jam = int(selisih / 3600)
menit = selisih % 3600
Menit = int(menit / 60)
Detik = menit % 60
print("Selisih waktunya adalah " + str(Jam) + ":" + str(Menit) + ":" + str(Detik))
except:
print("Masukan harus berupa angka!") |
import tvm
from hw_generator.generator import generator, accelerator, parse_params
from hw_generator.intrinsic_lib import conv_intrinsic, gemm_intrinsic
# from flextensor.intrinsic import INTRIN_TABLE
# from inspect import signature
from codesign.config import verbose, bits_map
def conv_interface(f_n, f_c, f_y, f_x, f_k, f_r, f_s,
l_n, l_c, l_y, l_x, l_k, l_r, l_s,
c_n, c_c, c_y, c_x, c_k, c_r, c_s,
d_n, d_c, d_y, d_x, d_k, d_r, d_s, dtype):
"""
l_n, l_c, l_y, l_x, l_k, l_r, l_s: last iteration size
c_n, c_c, c_y, c_x, c_k, c_r, c_s: last iteration conditif_son
"""
_, tensors = conv_intrinsic(f_n, f_c, f_y, f_x, f_k, f_r, f_s, dtype)
tA, tB, tC = tensors
# print(tA.shape, tB.shape, tC.shape)
strideA1 = tvm.var("strideA1")
strideA2 = tvm.var("strideA2")
strideA3 = tvm.var("strideA3")
sA = tvm.decl_buffer(tA.shape, tA.dtype,
name="sA",
offset_factor=1,
strides=[strideA1, strideA2, strideA3, 1])
strideB1 = tvm.var("strideB1")
strideB2 = tvm.var("strideB2")
strideB3 = tvm.var("strideB3")
sB = tvm.decl_buffer(tB.shape, tB.dtype,
name="sB",
offset_factor=1,
strides=[strideB1, strideB2, strideB3, 1])
strideC1 = tvm.var("strideC1")
strideC2 = tvm.var("strideC2")
strideC3 = tvm.var("strideC3")
sC = tvm.decl_buffer(tC.shape, tC.dtype,
name="sC",
offset_factor=1,
strides=[strideC1, strideC2, strideC3, 1])
iter_n = f_n // d_n + (0 if f_n % d_n == 0 else 1)
iter_c = f_c // d_c + (0 if f_c % d_c == 0 else 1)
iter_y = f_y // d_y + (0 if f_y % d_y == 0 else 1)
iter_x = f_x // d_x + (0 if f_x % d_x == 0 else 1)
iter_k = f_k // d_k + (0 if f_k % d_k == 0 else 1)
iter_r = f_r // d_r + (0 if f_r % d_r == 0 else 1)
iter_s = f_s // d_s + (0 if f_s % d_s == 0 else 1)
pad_n = 0 if f_n % d_n == 0 else (d_n - f_n % d_n)
pad_c = 0 if f_c % d_c == 0 else (d_c - f_c % d_c)
pad_y = 0 if f_y % d_y == 0 else (d_y - f_y % d_y)
pad_x = 0 if f_x % d_x == 0 else (d_x - f_x % d_x)
pad_k = 0 if f_k % d_k == 0 else (d_k - f_k % d_k)
pad_r = 0 if f_r % d_r == 0 else (d_r - f_r % d_r)
pad_s = 0 if f_s % d_s == 0 else (d_s - f_s % d_s)
last_iter_n = l_n // d_n + (0 if l_n % d_n == 0 else 1)
last_iter_c = l_c // d_c + (0 if l_c % d_c == 0 else 1)
last_iter_y = l_y // d_y + (0 if l_y % d_y == 0 else 1)
last_iter_x = l_x // d_x + (0 if l_x % d_x == 0 else 1)
last_iter_k = l_k // d_k + (0 if l_k % d_k == 0 else 1)
last_iter_r = l_r // d_r + (0 if l_r % d_r == 0 else 1)
last_iter_s = l_s // d_s + (0 if l_s % d_s == 0 else 1)
last_pad_n = 0 if l_n % d_n == 0 else (d_n - l_n % d_n)
last_pad_c = 0 if l_c % d_c == 0 else (d_c - l_c % d_c)
last_pad_y = 0 if l_y % d_y == 0 else (d_y - l_y % d_y)
last_pad_x = 0 if l_x % d_x == 0 else (d_x - l_x % d_x)
last_pad_k = 0 if l_k % d_k == 0 else (d_k - l_k % d_k)
last_pad_r = 0 if l_r % d_r == 0 else (d_r - l_r % d_r)
last_pad_s = 0 if l_s % d_s == 0 else (d_s - l_s % d_s)
iter_n = tvm.if_then_else(c_n, last_iter_n, iter_n)
iter_c = tvm.if_then_else(c_c, last_iter_c, iter_c)
iter_y = tvm.if_then_else(c_y, last_iter_y, iter_y)
iter_x = tvm.if_then_else(c_x, last_iter_x, iter_x)
iter_k = tvm.if_then_else(c_k, last_iter_k, iter_k)
iter_r = tvm.if_then_else(c_r, last_iter_r, iter_r)
iter_s = tvm.if_then_else(c_s, last_iter_s, iter_s)
pad_n = tvm.if_then_else(c_n, last_pad_n, pad_n)
pad_c = tvm.if_then_else(c_c, last_pad_c, pad_c)
pad_y = tvm.if_then_else(c_y, last_pad_y, pad_y)
pad_x = tvm.if_then_else(c_x, last_pad_x, pad_x)
pad_k = tvm.if_then_else(c_k, last_pad_k, pad_k)
pad_r = tvm.if_then_else(c_r, last_pad_r, pad_r)
pad_s = tvm.if_then_else(c_s, last_pad_s, pad_s)
# reset-update-finalize
def interface_func(ins, outs):
sa, sb = ins
sc, = outs
def _body():
ib = tvm.ir_builder.create()
ib.emit(tvm.call_extern(dtype, "tensorized_CONV",
sa.access_ptr("r"),
sb.access_ptr("r"),
sc.access_ptr("rw"),
1,
iter_n, iter_c, iter_y, iter_x, iter_k, iter_r, iter_s,
pad_n, pad_c, pad_y, pad_x, pad_k, pad_r, pad_s,
True, False))
return ib.get()
def _reset():
ib = tvm.ir_builder.create()
ib.emit(tvm.call_extern(dtype, "init_output",
sc.access_ptr("w"),
iter_n, iter_y, iter_x, iter_k,
pad_n, pad_y, pad_x, pad_k))
return ib.get()
def _finalize():
ib = tvm.ir_builder.create()
ib.emit(tvm.call_extern(dtype, "store_output",
sc.access_ptr("rw"),
iter_n, iter_y, iter_x, iter_k,
pad_n, pad_y, pad_x, pad_k))
return ib.get()
return None, _reset(), _body(), _finalize()
with tvm.build_config(offset_factor=1):
return tvm.decl_tensor_intrin(tC.op, interface_func, binds={tA: sA, tB: sB, tC: sC}, name="conv_interface")
def generate_conv_interface(N, C, Y, X, K, R, S, fN, fC, fY, fX, fK, fR, fS,
axisN, axisC, axisY, axisX, axisK, axisR, axisS,
dN, dC, dY, dX, dK, dR, dS, sp_kb, local_kb, dtype):
"""
N, C, Y, X, K, R, S: the dimensions mapped to n, c, y, x, k, r, s
fN, fC, fY, fX, fK, fR, fS: interface size (fN, fC, fY + fR, fX + fS) * (fR, fS, fC, fK)
axisN, axisC, axisY, axisX, axisK, axisR, axisS: AST nodes
dN, dC, dY, dX, dK, dR, dS: intrinsic size
"""
if verbose:
assert fN * fX * fY * fC + fK * fC * fR * fS <= sp_kb * 8192 / bits_map[dtype], 'data too large for scratchpad'
assert dN * dX * dY * dC + dK * dC * dR * dS <= local_kb * 8192 / bits_map[dtype], 'data too large for local memory'
else:
assert fN * fX * fY * fC + fK * fC * fR * fS <= sp_kb * 8192 / bits_map[dtype]
assert dN * dX * dY * dC + dK * dC * dR * dS <= local_kb * 8192 / bits_map[dtype]
last_n = N % fN # the last iteration of N
cond_n = tvm.expr.EQ(axisN, N // fN) if last_n != 0 else False # n condition statement
last_n = last_n if last_n != 0 else fN
last_c = C % fC
cond_c = tvm.expr.EQ(axisC, C // fC) if last_c != 0 else False
last_c = last_c if last_c != 0 else fC
last_y = Y % fY
cond_y = tvm.expr.EQ(axisY, Y // fY) if last_y != 0 else False
last_y = last_y if last_y != 0 else fY
last_x = X % fX
cond_x = tvm.expr.EQ(axisX, X // fX) if last_x != 0 else False
last_x = last_x if last_x != 0 else fX
last_k = K % fK
cond_k = tvm.expr.EQ(axisK, K // fK) if last_k != 0 else False
last_k = last_k if last_k != 0 else fK
last_r = R % fR
cond_r = tvm.expr.EQ(axisR, R // fR) if last_r != 0 else False
last_r = last_r if last_r != 0 else fR
last_s = S % fS
cond_s = tvm.expr.EQ(axisS, S // fS) if last_s != 0 else False
last_s = last_s if last_s != 0 else fS
return conv_interface(fN, fC, fY, fX, fK, fR, fS, last_n, last_c, last_y, last_x, last_k, last_r, last_s,
cond_n, cond_c, cond_y, cond_x, cond_k, cond_r, cond_s, dN, dC, dY, dX, dK, dR, dS, dtype)
class CONVGenerator(generator):
# generate accelerators with CONV intrinsics
def __init__(self, dtype="int8"):
super().__init__("CONV", conv_intrinsic, generate_conv_interface, dtype)
def instantiate(self, params, tag):
x, y, sp_kb, sp_banks, dma_width, dma_bytes, local_kb, dataflow, dtype = parse_params(self.type, params)
def interface_3x3(N, C, Y, X, K, R, S, fN, fC, fY, fX, fK, fR, fS,
axisN, axisC, axisY, axisX, axisK, axisR, axisS):
return self.intf_func(N, C, Y, X, K, R, S, fN, fC, fY, fX, fK, fR, fS,
axisN, axisC, axisY, axisX, axisK, axisR, axisS,
1, y, 16, 16, x, 3, 3, sp_kb, local_kb, dtype) # intrinsic size is hardware-specific
# 0s placeholder the dimensions of mapped CONVs
acc = accelerator(self, interface_3x3, params, tag, (0, 0, 0, 0, 0, 0, 0, dtype))
# def interface_1x1(N, C, Y, X, K, R, S, fN, fC, fY, fX, fK, fR, fS,
# axisN, axisC, axisY, axisX, axisK, axisR, axisS):
# from hw_generator.generator_gemm import generate_gemm_interface
# return generate_gemm_interface(K, Y, C, fK, fY, fC, axisK, axisY, axisC, x, y, 1, sp_kb, local_kb, dtype)
# acc.add_intrinsic(gemm_intrinsic, (0, 0, 0, dtype), interface_1x1)
return acc
|
#!/usr/bin/python3.4
''' A change to `local_bad_scope.py` avoiding any error by passing a parameter '''
def main():
x = 3
f(x)
def f(whatever):
print(whatever) # error: f doesn't know about the x defined in main
main()
|
from utils.utils import *
from utils.plotters import *
import os
import librosa
class AudioGenerator(object):
def __init__(self, params, generators_list=None, noise_amp_list=None, reconstruction_noise_list=None):
super(AudioGenerator, self).__init__()
if type(params) is str:
path = os.path.join(params, 'log.txt')
output_folder = params
params = params_from_log(path)
params.output_folder = output_folder
noise_amp_list = noise_amp_list_from_log(path)
reconstruction_noise_list = torch.load((os.path.join(params.output_folder, 'reconstruction_noise_list.pt')),
map_location=params.device)
generators_list = generators_list_from_folder(params)
else:
output_folder = params.output_folder
self.generators_list = generators_list
self.noise_amp_list = noise_amp_list
self.params = params
self.reconstruction_noise_list = reconstruction_noise_list
self.output_folder = output_folder
if not os.path.exists(os.path.join(output_folder, 'GeneratedSignals')):
os.mkdir(os.path.join(output_folder, 'GeneratedSignals'))
def generate(self, nSignals=1, length=20, generate_all_scales=False):
for sig_idx in range(nSignals):
# Draws a signal up to current scale, using learned generators
output_signals_list = draw_signal(self.params, self.generators_list,
[round(f * length) for f in self.params.fs_list], self.params.fs_list,
self.noise_amp_list, output_all_scales=generate_all_scales)
# Write signals
if generate_all_scales:
for scale_idx, sig in enumerate(output_signals_list):
write_signal(
os.path.join(self.output_folder, 'GeneratedSignals',
'generated@%dHz.wav' % self.params.fs_list[scale_idx]),
sig, self.params.fs_list[scale_idx], overwrite=False)
else:
write_signal(
os.path.join(self.output_folder, 'GeneratedSignals',
'generated@%dHz.wav' % self.params.fs_list[-1]),
output_signals_list, self.params.fs_list[-1], overwrite=False)
def reconstruct(self, reconstruction_noise_list=None, write=True):
if reconstruction_noise_list is None:
reconstruction_noise_list = self.reconstruction_noise_list
reconstructed_signal = draw_signal(self.params, self.generators_list,
[int(l) for l in self.params.inputs_lengths],
self.params.fs_list, self.noise_amp_list,
reconstruction_noise_list=reconstruction_noise_list)
if write:
write_signal(
os.path.join(self.output_folder, 'GeneratedSignals',
'reconstructed@%dHz.wav' % self.params.fs_list[-1]),
reconstructed_signal, self.params.fs_list[-1], overwrite=False)
else:
return reconstructed_signal
def inpaint(self, new_noise=False):
reconstruction_noise_list = self.reconstruction_noise_list
if new_noise:
pad_size = calc_pad_size(self.params)
reconstruction_noise_list_new = []
for idx, (r, fs, noise_amp) in enumerate(
zip(reconstruction_noise_list, self.params.fs_list, self.noise_amp_list)):
new_r = r.clone()
start_idx = int(self.params.inpainting_indices[0] + pad_size)
end_idx = int(self.params.inpainting_indices[1] + pad_size)
new_noise = get_noise(self.params, end_idx - start_idx).expand(1, 1, -1).to(r.device)
new_noise = new_noise * noise_amp
new_r[:, :, start_idx:end_idx] = new_noise
reconstruction_noise_list_new.append(new_r)
reconstruction_noise_list = reconstruction_noise_list_new
reconstructed_signal = self.reconstruct(reconstruction_noise_list, write=False)
real_signal, _ = librosa.load(
os.path.join(self.params.output_folder, 'real@%dHz.wav' % self.params.Fs),
sr=self.params.Fs)
stitched_signal = real_signal.copy()
frame_idcs = range(self.params.inpainting_indices[0], self.params.inpainting_indices[1])
window_size = int((frame_idcs[-1] - frame_idcs[0] + 1) / 2)
window_size = window_size - (1 - window_size % 2)
stitched_signal = stitch_signals(stitched_signal, reconstructed_signal.squeeze().cpu().numpy(),
frame_idcs, window_size=window_size)
write_signal(os.path.join(self.params.output_folder, 'GeneratedSignals', 'inpainted'), stitched_signal,
self.params.Fs)
def extend(self, condition, filt_file=None):
conditioned_signal = self.condition(condition, False)
stitched_signal = time_freq_stitch_by_fft(condition['condition_signal'].squeeze().cpu().numpy(),
conditioned_signal.squeeze().cpu().numpy(),
condition['condition_fs'],
self.params.Fs, filt_file)
output_file = os.path.join(self.output_folder, 'GeneratedSignals',
condition['name'] + '_extended')
write_signal(output_file, stitched_signal, self.params.Fs)
return stitched_signal
def condition(self, condition, write=True):
condition["condition_scale_idx"] = np.where(np.array(self.params.fs_list) <= condition["condition_fs"])[0][
-1] + 1
condition["condition_signal"] = torch.Tensor(condition["condition_signal"]).expand(1, 1, -1).to(
self.params.device)
lengths = [int(condition["condition_signal"].shape[2] / condition["condition_fs"] * fs) for fs in
self.params.fs_list]
conditioned_signal = draw_signal(self.params, self.generators_list, lengths,
self.params.fs_list, self.noise_amp_list, condition=condition)
if write:
output_file = os.path.join(self.output_folder, 'GeneratedSignals',
'conditioned_on_' + condition['name'])
write_signal(output_file, conditioned_signal, self.params.Fs)
else:
return conditioned_signal
|
class SimulatorError(Exception):
pass
class ConvergenceError(Exception):
pass
class CircuitParameterError(Exception):
pass
|
from config import db
from models import Application
def read_one(activatorId):
"""
Responds to a request for /api/application_meta/{activatorId}
:param application: activatorId
:return: count of applications that match the acivatorId
"""
acount = (
db.session.query(Application)
.filter(Application.activatorId == activatorId)
.count()
)
db.session.close()
data = {"count": acount}
return data, 200
|
from django.urls import path
from Location.views import CityList
app_name='Location'
urlpatterns=[
path('city/',CityList.as_view()),
# path('city/area/',AreaList.as_view())
]
|
## Graphite local_settings.py
# Edit this file to customize the default Graphite webapp settings
DATABASES = {
'default': {
'NAME': 'graphite.db',
'ENGINE': 'django.db.backends.sqlite3',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': ''
}
}
|
# -*- coding: UTF-8 -*-
import os
from datetime import timedelta
# QQbot参数信息
HOST = "0.0.0.0"
PORT = 8080 # 事件端口
ACCESS_TOKEN = "" # access_token
SECRET = "" # secret
DEBUG = True # 调试模式
SUPERUSERS = {1984594680} # QQ号,可以填多个
NICKNAME = {'bot'}
SESSION_RUN_TIMEOUT = timedelta(seconds=10)
COMMAND_START = {"!", "!"} # 触发指令
COMMAND_SEP = {' '} # 指令分割符
APSCHEDULER_CONFIG = {'apscheduler.timezone': 'Asia/Shanghai'} # 定时任务时间参数
# 设定等待时间
WAIT_TIME = 2.0 # 下载osu文件时等待时间
# 设定根路径
BASE_DIR = os.path.abspath(os.path.dirname(__file__)) # 项目的根目录
OSU_TEMP_DOWNLOAD_DIR = BASE_DIR + "\\osu_file\\" # 存储osu文件的临时目录
# mysql配置
MYSQL_UNAME = "root"
MYSQL_PASSWD = "123456"
MYSQL_HOST = "127.0.0.1"
MYSQL_PORT = 3306
MYSQL_DBNAME = "osudb"
MYSQL_TBNAME = "osuinformation" # 管理所有的osu账户
MYSQL_TBCOOKIE = "cookie" # 管理登录的cookie值
# 阿里云oss配置信息
ACCESSKEYID = ""
ACCESSKEYSECRET = ""
BUCKETNAME = ""
# ENDPOINT = "oss-cn-shanghai.aliyuncs.com" # 上海云存储服务节点
# ENDPOINT = "oss-cn-beijing.aliyuncs.com" # 北京云存储服务节点
ENDPOINT = "oss-us-west-1.aliyuncs.com" # 美国硅谷云存储服务节点
# 设置osu账户
LOGIN_OSU_USERNAME = "" # osu登录用户名
LOGIN_OSU_PASSPORD = "" # osu登录密码
# 配置api参数
BASE_URL = "https://osu.ppy.sh{}" # 拼接url基本格式
MAP_SEARCH_BASE_URL = "https://osu.ppy.sh/beatmapsets?q={}" # 官网搜图url基本格式
MAP_DOWNLOAD_BASE_URL = "https://osu.ppy.sh/beatmapsets/{}/download" # 官网下图url的基本格式
MAP_BASE_URL = "https://osu.ppy.sh/b/{}" # 拼接图的url基本格式
GET_NUM = "1" # 获取数量
GET_MANY_NUM = "10" # 获取最近的10次记录
BP_LIMIT_NUM = "5" # bp限制的数量
# api url
# 参考:https://github.com/ppy/osu-api/wiki
API_KEY = "" # 需要到官网进行获取
GET_URER_URL = "/api/get_user" # 获取用户信息
GET_SCORES_URL = "/api/get_scores" # 检索有关指定节拍图的前100个得分的信息
GET_USER_BEST_URL = "/api/get_user_best" # 获取指定用户的最高分数
GET_USER_RECENT = "/api/get_user_recent" # 获取玩家最近游玩数据
GET_MAP = "/api/get_beatmaps" # 获取铺面的信息
# pp相关参数
PP_START = True # 启动pp的计算
# 特殊玩法mod难度
mod_dict = dict(NONE=0, # api中的None
NF=1, EZ=2, TD=4, HD=8, HR=16, SD=32,
DT=64, RX=128, HF=256, NC=512, FL=1024,
Auto=2048, SO=4096, RX2=8192, PF=16384, ) # 难度对应的值
reverse_mod_dict = {value: key for key, value in mod_dict.items()} # 对上面的字典进行翻转
mod_value = [i for i in mod_dict.values()] # 获取mod难度的值
# 模式mode
mode_dict = {0: "osu", 1: "taiko", 2: "ctb", 3: "mania"} # mode
reverse_mode_dict = {value: key for key, value in mode_dict.items()} # 翻转mode
# osu查询命令
HELP = "help" # 帮助
HELP_UPPER = "HELP" # (大写)帮助
SEARCH_USER = "stat" # 根据osu玩家名称进行查询
SEARCH_USER_UPPER = "STAT" # (大写)根据osu玩家名称进行查询
SEARCH_ME = "statme" # 查询自己的排名等成绩
SEARCH_ME_UPPER = "STATME" # (大写)查询自己的排名等成绩
SET_USR = "set" # 绑定qq号和osuid
SET_USR_UPPER = "SET" # (大写)绑定qq号和osuid
UNSET_USER = "unset" # 注销绑定
UNSET_USER_UPPER = "UNSET" # (大写)注销绑定
RECENT = "recent" # 查询个人最近游玩的成绩
RECENT_UPPER = "RECENT" # (大写)查询个人最近游玩的成绩
RECENT_PASS = "pr" # 查询个人最近pass图成绩
RECENT_PASS_UPPER = "PR" # (大写)查询个人最近pass图成绩
MODE = "mode" # 切换其他模式
MODE_UPPER = "MODE" # (大写)切换其他模式
BEST_PLAY = "bp" # 查看玩家的最好成绩
BEST_PLAY_UPPER = "BP" # (大写)查看玩家的最好成绩
BEST_PLAY_FOR_ME = "bpme" # 查看自己的最好成绩
BEST_PLAY_FOR_ME_UPPER = "BPME" # (大写)查看自己的最好成绩
|
import psycopg2
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
from hifireg import settings
database = settings.DATABASES['default']
def get_db_conn(db_name):
conn = psycopg2.connect(
host=database['HOST'],
port=database['PORT'],
database=db_name,
user=database['USER'],
password=database['PASSWORD'])
return conn
def get_simple_cursor():
conn = get_db_conn("postgres")
conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT) # turn off transactions
return conn.cursor()
def terminate_conn(db_name=database['NAME']):
cursor = get_simple_cursor()
cursor.execute(f"select pg_terminate_backend (pg_stat_activity.pid) from pg_stat_activity where pg_stat_activity.datname = '{db_name}'")
def reset_db(db_name=database['NAME']):
cursor = get_simple_cursor()
if(settings.DEBUG != True):
print(f'You are NOT in debug mode! Type the name of the database ("{db_name}")to confirm:')
if(input() != db_name):
print("Aborted.")
exit()
cursor.execute(f"DROP DATABASE {db_name};")
cursor.execute(f"CREATE DATABASE {db_name};")
|
# Importing the libraries
import pandas as pd
import streamlit as st
import folium
import geopandas
import plotly.express as px
from streamlit_folium import folium_static
from folium.plugins import MarkerCluster
from datetime import datetime
# Setting page layout for figures
st.set_page_config(layout='wide')
@st.cache(allow_output_mutation=True)
def get_data(path):
'''
Reads main data (.csv) and returns the pandas dataframe.
'''
data = pd.read_csv(path)
data['date'] = pd.to_datetime(data['date'])
return data
@st.cache(allow_output_mutation=True)
def get_geofile(url):
'''
Reads .json from a specific url and returns the pandas geofile.
'''
geofile = geopandas.read_file(url)
return geofile
def set_features(data):
'''
Changes the dataframe adding some features, then returns that.
'''
# Price by square meters
data['price_m2'] = round(data['price'] / (data['sqft_lot'] / 10.764), 2)
# Living room in square meters
data['living_m2'] = round(data['sqft_living'] / 10.764, 2)
return data
def descriptive_statistics(data):
'''
Returns numeric variables dataframe statistics, including min, max,
mean, median and std.
'''
df_statistics = data.describe().T.reset_index().rename({'index':'attributes',
'50%':'median'}, axis=1)
df_statistics = df_statistics[['attributes', 'min', 'max',
'mean', 'median', 'std']]
return df_statistics
def overview_data(data):
'''
Creates all overview stuffs on the app, including:
- Data table;
- Descriptive statistics;
- Zipcode informations;
- Overview layout;
- Overview sidebar part.
'''
f_attributes = st.sidebar.multiselect('Enter columns', data.columns)
f_zipcode = st.sidebar.multiselect('Enter zipcode', data['zipcode'].unique())
st.title('Data Overview')
if (f_zipcode != []) & (f_attributes != []):
data = data.loc[data['zipcode'].isin(f_zipcode), f_attributes]
elif (f_zipcode != []) & (f_attributes == []):
data = data.loc[data['zipcode'].isin(f_zipcode), :]
elif (f_zipcode == []) & (f_attributes != []):
data = data.loc[:, f_attributes]
else:
data = data.copy()
st.dataframe(data.head())
# New dataframes
c1, c2 = st.beta_columns((1, 1))
# Descriptive statistics
stat_df = descriptive_statistics(data)
c1.header('Descriptive Statistics')
c1.dataframe(stat_df, height=600)
# Average metrics
df1 = data[['id', 'zipcode']].groupby('zipcode').count().reset_index()
df2 = data[['price', 'zipcode']].groupby('zipcode').mean().reset_index()
df3 = data[['living_m2', 'zipcode']].groupby('zipcode').mean().reset_index()
df4 = data[['price_m2', 'zipcode']].groupby('zipcode').mean().reset_index()
# Merge
m1 = pd.merge(df1, df2, on='zipcode', how='inner')
m2 = pd.merge(m1, df3, on='zipcode', how='inner')
avg_df = pd.merge(m2, df4, on='zipcode', how='inner')
avg_df.columns = ['ZIPCODE', 'TOTAL HOUSES', 'PRICE', 'LIVING ROOM M2', 'PRICE/LOT M2']
c2.header('Average Values by Zipcode')
c2.dataframe(avg_df, height=575)
return None
def business_maps(data, geofile):
'''
Creates all map stuffs on the app, including:
- Map section layout;
- Portfolio density;
- Region price map.
'''
st.title('Region Overview')
c1, c2 = st.beta_columns((1, 1))
c1.header('Portfolio Density')
## base map - folium
density_map = folium.Map(location=[data['lat'].mean(), data['long'].mean()],
default_zoom_start=15)
marker_cluster = MarkerCluster().add_to(density_map)
for name, row in data.iterrows():
folium.Marker([row['lat'], row['long']],
popup='Sold ${0} on: {1}. Features: {2} m2, '+\
'{3} bedrooms, {4} bathrooms, '+\
'year built: {5}.'.format(row['price'],
row['date'],
row['living_m2'],
row['bedrooms'],
row['bathrooms'],
row['yr_built'])).add_to(marker_cluster)
with c1:
folium_static(density_map)
# Region price map
c2.header('Price Density')
df_aux = data[['price', 'zipcode']].groupby('zipcode').mean().reset_index()
df_aux.columns = ['ZIP', 'PRICE']
geofile = geofile[geofile['ZIP'].isin(df_aux['ZIP'].tolist())]
region_price_map = folium.Map(location=[data['lat'].mean(), data['long'].mean()],
default_zoom_start=15)
folium.Choropleth(data=df_aux, geo_data=geofile,
columns=['ZIP', 'PRICE'],
key_on='feature.properties.ZIP',
fill_color='YlOrRd',
fill_opacity=0.7,
line_opacity=0.2,
legend_name='AVG PRICE').add_to(region_price_map)
with c2:
folium_static(region_price_map)
return None
def commercial_distribution(data):
'''
Creates all commercial stuffs on the app, including:
- Average price distribution per year;
- Average price distribution per day;
- Price histogram;
- Commercial layout;
- Commercial filters.
'''
# House distribution per commercial attributes
st.sidebar.title('Commercial Options')
st.title('Commercial Attributes')
#------------------------------------------------------------------------------------------
# Average price per year
#------------------------------------------------------------------------------------------
st.header('Average Price per Year Built')
st.sidebar.subheader('Select Max Year')
# Filters
min_year_built = int(data['yr_built'].min())
max_year_built = int(data['yr_built'].max())
f_yr_built = st.sidebar.slider('Year Built', min_year_built, max_year_built, max_year_built)
# Data selection
yr_df = data.loc[data['yr_built'] <= f_yr_built]
avg_price_yr_df = yr_df[['yr_built', 'price']].groupby('yr_built').mean().reset_index()
# Plot
fig = px.line(avg_price_yr_df, x='yr_built', y='price')
st.plotly_chart(fig, use_container_width=True)
#------------------------------------------------------------------------------------------
## Average price per day
#------------------------------------------------------------------------------------------
st.header('Average Price per Day')
st.sidebar.subheader('Select Max Date')
# Filters
min_date = datetime.strptime(data['date'].min().strftime('%Y-%m-%d'), '%Y-%m-%d')
max_date = datetime.strptime(data['date'].max().strftime('%Y-%m-%d'), '%Y-%m-%d')
f_date = st.sidebar.slider('Date', min_date, max_date, max_date)
# Data selection
date_df = data.loc[data['date'] <= f_date]
avg_price_day_df = date_df[['date', 'price']].groupby('date').mean().reset_index()
# Plot
fig = px.line(avg_price_day_df, x='date', y='price')
st.plotly_chart(fig, use_container_width=True)
#------------------------------------------------------------------------------------------
# Price histogram
#------------------------------------------------------------------------------------------
st.header('Price Distribution')
st.sidebar.subheader('Select Max Price')
## Filters
min_price = int(data['price'].min())
max_price = int(data['price'].max())
avg_price = int(data['price'].mean())
f_price = st.sidebar.slider('Price', min_price, max_price, avg_price)
## Data selection
price_df = data.loc[data['price'] <= f_price]
## Plot
fig = px.histogram(price_df, x='price', nbins=50)
st.plotly_chart(fig, use_container_width=True)
return None
def attributes_distribution(data):
'''
Creates all attributes distribution stuffs on the app, including:
- Bedrooms distribution;
- Bathrooms distribution;
- Floors distribution;
- Is waterfront houses count;
- Attributes layout;
- Attributes filters.
'''
# Other house categories
st.sidebar.title('Attributes Options')
st.title('House Attributes')
#------------------------------------------------------------------------------------------
# Filters to bedrooms and bathrooms
#------------------------------------------------------------------------------------------
f_bedrooms = st.sidebar.selectbox('Max number of bedrooms',
sorted(data['bedrooms'].unique()))
f_bathrooms = st.sidebar.selectbox('Max number of bathrooms',
sorted(data['bathrooms'].unique()))
c1, c2 = st.beta_columns(2)
# Data selection
bedrooms_df = data.loc[data['bedrooms'] <= f_bedrooms]
bathrooms_df = data.loc[data['bathrooms'] <= f_bathrooms]
# House per bedrooms
c1.header('Houses per bedrooms')
fig = px.histogram(bedrooms_df, x='bedrooms', nbins=19)
c1.plotly_chart(fig, use_container_width=True)
# House per bathrooms
c2.header('Houses per bathrooms')
fig = px.histogram(bathrooms_df, x='bathrooms', nbins=19)
c2.plotly_chart(fig, use_container_width=True)
#------------------------------------------------------------------------------------------
# Filters to floors and waterview
#------------------------------------------------------------------------------------------
f_floors = st.sidebar.selectbox('Max number of floors', sorted(data['floors'].unique()))
f_waterview = st.sidebar.checkbox('Only Houses with Water View')
c1, c2 = st.beta_columns(2)
# Data selection
df_floors = data.loc[data['floors'] <= f_floors]
if f_waterview:
df_waterview = data[data['waterfront'] == 1]
else:
df_waterview = data
# House per floors
c1.header('Houses per floors')
fig = px.histogram(df_floors, x='floors', nbins=10)
c1.plotly_chart(fig, use_container_width=True)
# House per water view
c2.header('Houses with waterfront')
fig = px.histogram(df_waterview, x='waterfront', nbins=2)
c2.plotly_chart(fig, use_container_width=True)
return None
if __name__ == '__main__':
# Data extraction
path = 'kc_house_data.csv'
url = 'https://opendata.arcgis.com/datasets/83fc2e72903343aabff6de8cb445b81c_2.geojson'
df = get_data(path)
geofile = get_geofile(url)
# Data transformation
df = set_features(df)
overview_data(df)
business_maps(df, geofile)
commercial_distribution(df)
attributes_distribution(df)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.