content stringlengths 5 1.05M |
|---|
#!/usr/bin/env python
import os
import argparse
import logging
SCRIPT_PATH = os.path.abspath(__file__)
FORMAT = '[%(asctime)s] %(levelname)s %(message)s'
l = logging.getLogger()
lh = logging.StreamHandler()
lh.setFormatter(logging.Formatter(FORMAT))
l.addHandler(lh)
l.setLevel(logging.INFO)
debug = l.debug
info = l.info
warning = l.warning
error = l.error
DESCRIPTION = '''
Parse bam-readcount output with optional filters
'''
EPILOG = '''
'''
class CustomFormatter(argparse.ArgumentDefaultsHelpFormatter,
argparse.RawDescriptionHelpFormatter):
pass
parser = argparse.ArgumentParser(description=DESCRIPTION,
epilog=EPILOG,
formatter_class=CustomFormatter)
parser.add_argument('bam_readcount_output')
parser.add_argument('--min-cov',
action='store',
type=int,
help='Minimum coverage to report variant',
default=0)
parser.add_argument('--min-vaf',
action='store',
type=float,
help='Minimum VAF to report variant',
default=0.0)
parser.add_argument('-v',
'--verbose',
action='store_true',
help='Set logging level to DEBUG')
args = parser.parse_args()
if args.verbose:
l.setLevel(logging.DEBUG)
debug('%s begin', SCRIPT_PATH)
headers = [
'chrom', 'position', 'ref', 'base', 'vaf', 'depth', 'count',
'avg_basequality', 'avg_pos_as_fraction'
]
print('\t'.join(headers))
# Per-base/indel data fields
# IMPORTANT: this relies on Python 3.6+ to maintain insertion order
# Each field is a key with value a function to convert to the
# appropriate data type
base_fields = {
'base': str,
'count': int,
'avg_mapping_quality': float,
'avg_basequality': float,
'avg_se_mapping_quality': float,
'num_plus_strand': int,
'num_minus_strand': int,
'avg_pos_as_fraction': float,
'avg_num_mismatches_as_fraction': float,
'avg_sum_mismatch_qualities': float,
'num_q2_containing_reads': int,
'avg_distance_to_q2_start_in_q2_reads': float,
'avg_clipped_length': float,
'avg_distance_to_effective_3p_end': float
}
# Open the bam-readcount output file and read it line by line
# Note that the output has no header, so we consume every line
with open(args.bam_readcount_output) as in_fh:
for line in in_fh:
# Strip newline from end of line
line = line.strip()
# Fields are tab-separated, so split into a list on \t
fields = line.split('\t')
# The first four fields contain overall information about the position
chrom = fields[0] # Chromosome/reference
position = int(fields[1]) # Position (1-based)
reference_base = fields[2] # Reference base
depth = int(fields[3]) # Depth of coverage
# The remaining fields are data for each base or indel
# Iterate over each base/indel
for base_data_string in fields[4:]:
# We will store per-base/indel data in a dict
base_data = {}
# Split the base/indel data on ':'
base_values = base_data_string.split(':')
# Iterate over each field of the base/indel data
for i, base_field in enumerate(base_fields.keys()):
# Store each field of data, converting to the appropriate
# data type
base_data[base_field] = base_fields[base_field](base_values[i])
# Skip zero-depth bases
if depth == 0:
continue
# Skip reference bases and bases with no counts
if base_data['base'] == reference_base or base_data['count'] == 0:
continue
# Calculate an allele frequency (VAF) from the base counts
vaf = base_data['count'] / depth
# Filter on minimum depth and VAF
if depth >= args.min_cov and vaf >= args.min_vaf:
# Output count and VAF data as well as avg_pos_as_fraction
print('\t'.join(
str(x) for x in (chrom, position, reference_base, base_data['base'],
'%0.2f' % (vaf), depth, base_data['count'],
base_data['avg_basequality'], base_data['avg_pos_as_fraction'])))
debug('%s end', (SCRIPT_PATH))
|
from __future__ import print_function, division
import sys,os
qspin_path = os.path.join(os.getcwd(),"../")
sys.path.insert(0,qspin_path)
#print(os.environ["OMP_NUM_THREADS"])
from quspin.basis import spin_basis_1d
from quspin.operators import hamiltonian
from quspin.tools.evolution import expm_multiply_parallel
from scipy.sparse.linalg import expm_multiply
from scipy.sparse import random,eye
import numpy as np
def test_imag_time(L=20,seed=0):
np.random.seed(seed)
basis = spin_basis_1d(L,m=0,kblock=0,pblock=1,zblock=1)
J = [[1.0,i,(i+1)%L] for i in range(L)]
static = [["xx",J],["yy",J],["zz",J]]
H = hamiltonian(static,[],basis=basis,dtype=np.float64)
(E,),psi_gs = H.eigsh(k=1,which="SA")
psi_gs = psi_gs.ravel()
A = -(H.tocsr() - E*eye(H.Ns,format="csr",dtype=np.float64))
U = expm_multiply_parallel(A)
v1 = np.random.uniform(-1,1,size=H.Ns)
v1 /= np.linalg.norm(v1)
v2 = v1.copy()
for i in range(100):
v2 = U.dot(v2)
v2 /= np.linalg.norm(v2)
v1 = expm_multiply(A,v1)
v1 /= np.linalg.norm(v1)
if(np.abs(H.expt_value(v2)-E) < 1e-15):
break #
i += 1
np.testing.assert_allclose(v1,v2,rtol=0,atol=1e-15,err_msg='imaginary time test failed, seed {:d}'.format(seed) )
def test_ramdom_matrix(N=3500,ntest=10,seed=0):
np.random.seed(seed)
i = 0
while(i<ntest):
print("testing random matrix {}".format(i+1))
A = (random(N,N,density=np.log(N)/N) + 1j*random(N,N,density=np.log(N)/N))
A = A.tocsr()
# A = (A + A.H)/2.0
v = np.random.normal(0,1,size=N) + 1j * np.random.normal(0,1,size=N)
v /= np.linalg.norm(v)
v1 = expm_multiply(A,v)
v2 = expm_multiply_parallel(A).dot(v)
np.testing.assert_allclose(v1,v2,rtol=0,atol=1e-15,err_msg='random matrix test failed, seed {:d}'.format(seed) )
i += 1
test_imag_time()
test_ramdom_matrix()
print("expm_multiply_parallel tests passed!")
|
import torch
class Config:
IMGS_PATH = '/home/data'
DEVICE = 'cuda' if torch.cuda.is_available() else 'cpu'
N_DOMAINS = 11
N_CLASSES = 2
IMG_SIZE = 224
BATCH_SIZE = 32
EPOCHS = 10000
LEARNING_RATE = 0.001
WEIGHT_DECAY = 0.000000001
MOMENTUM = 0.9 |
"""
Tests The functions in PGComp/Matrixtools.py
"""
import pytest
import numpy
import os
from spHNF_manip.matrix_tools import *
def test_create_nxn_matrices():
assert 3 == len(create_nxn_matrices(1))
assert 81 == len(create_nxn_matrices(2))
def test_det_is_n():
assert 1 == det_is_n(1, [[1, 0, 0], [0, 1, 0], [0, 0, 1]])
assert 0 == det_is_n(2, [[1, 0, 0], [0, 1, 0], [0, 0, 1]])
assert 1 == det_is_n(2, [[2, 0, 0], [0, 1, 0], [0, 0, 1]])
def test_edit_struct_enum():
edit_struct_enum("tests/struct_enum.in", [[1, 0, 0], [0, 1, 0], [0, 0, 1]])
with open('tests/test_output/IdentityTestStruct_enum.in', 'r') as struct_enum:
expected_struct_data1 = struct_enum.readlines()
with open('tests/struct_enum.in', 'r') as struct_enum:
struct_data1 = struct_enum.readlines()
assert struct_data1 == expected_struct_data1
edit_struct_enum("tests/struct_enum.in", [[1.1, 1.1, 1], [1.1, 1, 1], [1.1, 1, 1]])
with open('tests/test_output/TestStruct_enum1.in', 'r') as struct_enum:
expected_struct_data2 = struct_enum.readlines()
with open('tests/struct_enum.in', 'r') as struct_enum:
struct_data2 = struct_enum.readlines()
assert struct_data2 == expected_struct_data2
def test_read_pg_out():
data1 = read_pg_out("tests/test_output/pgx_outTest1.txt")
expected_data1 = [[[-1, 0, 0], [0, 0, -1], [0, -1, 0]], [[0, 0, 1], [-1, 0, 0], [0, 1, 0]]]
assert expected_data1 == data1
data2 = read_pg_out("tests/test_output/pgx_outTest2.txt")
expected_data2 = [[[-1, 0, 0], [-1, 0, 1], [-1, 1, 0]], [[-1, 0, 1], [-1, 0, 0], [-1, 1, 0]],
[[-1, 0, 0], [0, -1, 0], [0, 0, -1]]]
assert expected_data2 == data2
def test_calculate_transform():
identity = [[1, 0, 0], [0, 1, 0], [0, 0, 1]]
m1 = [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
transform1 = [[0, 0, 1], [0, 1, 0], [1, 0, 0]]
transform2 = [[1, 1, 5], [1, 1, 0], [1, 5, 1]]
assert numpy.array_equal(identity, calculate_transform(identity, identity))
assert numpy.array_equal(m1, calculate_transform(m1, identity))
assert numpy.array_equal(m1, calculate_transform(identity, m1))
assert numpy.array_equal([[7, 8, 9], [4, 5, 6], [1, 2, 3]], calculate_transform(transform1, m1))
assert numpy.array_equal([[40, 47, 54], [5, 7, 9], [28, 35, 42]], calculate_transform(transform2, m1))
def test_create_matrix():
identity = [[1, 0, 0], [0, 1, 0], [0, 0, 1]]
m1 = [[1.1, 0, 0], [0, -.1, 0], [0, 0, -0]]
assert numpy.array_equal(identity, create_matrix("1 0 0", "0 1 0", "0 0 1"))
assert numpy.array_equal(m1, create_matrix("1.1 0 0", "0 -0.1 0", "0 0 0"))
def test_save_matrix():
identity = [[1, 0, 0], [0, 1, 0], [0, 0, 1]]
save_matrix(identity, "tests/test_output/saveMatrixTest1.txt")
with open('tests/test_output/saveMatrixTest1.txt', 'r') as Data:
data1 = Data.readlines()
with open("tests/test_output/saveMatrixExpected1.txt", 'r') as Data:
expected_data1 = Data.readlines()
assert data1 == expected_data1
m1 = [[1.1111111, 0, 0], [0, 1, 0], [0, 0, 1]]
save_matrix(m1, "tests/test_output/saveMatrixTest2.txt")
with open('tests/test_output/saveMatrixTest2.txt', 'r') as Data:
data2 = Data.readlines()
with open("tests/test_output/saveMatrixExpected2.txt", 'r') as Data:
expected_data2 = Data.readlines()
assert data2 == expected_data2
def test_matrix_float_to_int():
identity = [[1, 0, 0], [0, 1, 0], [0, 0, 1]]
assert numpy.array_equal(identity, matrix_float_to_int(identity))
m1 = [[1.01, 0.00001, 0], [0, .9999999, 0], [0, 0.000005, 1]]
assert numpy.array_equal(identity, matrix_float_to_int(m1))
def test_check_similarities():
identity = [[1, 0, 0], [0, 1, 0], [0, 0, 1]]
assert 2 == check_similarities([identity, identity], [identity, identity])
def test_find_equivalent_basis():
basis = [[1, 0, 0], [0, 1, 0], [0, 0, 1]]
def test_get_URT():
pg_identity = [[[1, 0, 0], [0, 1, 0], [0, 0, 1]],[[1, 0, 0], [0, 1, 0], [0, 0, 1]]]
assert 1 == get_URT(pg_identity)
pg1 = [[[1, 0, 1], [0, 1, 0], [0, 0, 1]],[[1, 0, 0], [0, 1, 0], [0, 0, 1]]]
assert 0 == get_URT(pg1)
pg2 = [[[0, 0, 0], [0, 0, 0], [0, 0, 0]],[[0, 0, 0], [0, 0, 0], [0, 0, 0]]]
assert 1 == get_URT(pg2)
pg3 = [[[1, 0, 0], [0, 1, 0], [0, 0, 1]],[[1, 0, 0], [0, 1, 1], [0, 0, 1]]]
assert 0 == get_URT(pg3)
def test_is_one():
pg_identity = [[[1, 0, 0], [0, 1, 0], [0, 0, 1]],[[1, 0, 0], [0, 1, 0], [0, 0, 1]]]
assert 1 == is_one(pg_identity)
pg1 = [[[1, 0, 1], [0, 1, 0], [0, 0, 1]],[[1, 0, 0], [0, 1, 0], [0, 0, 1]]]
assert 1 == is_one(pg1)
pg2 = [[[0, 0, 0], [0, 0, 0], [0, 0, 0]],[[0, 0, 0], [0, 0, 0], [0, 0, 3]]]
assert 0 == is_one(pg2)
pg3 = [[[1, 0, 0], [0, 2, 0], [0, 0, 1]],[[1, 0, 0], [0, 1, 1], [0, 0, 1]]]
assert 0 == is_one(pg3)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# @Author: Niv Drory (drory@astro.as.utexas.edu)
# @Filename: tiledb.py
# @License: BSD 3-clause (http://www.opensource.org/licenses/BSD-3-Clause)
import itertools
import astropy
import cycler
import matplotlib.pyplot as plt
import numpy
from matplotlib import animation
import lvmsurveysim.target
from lvmsurveysim.schedule.tiledb import TileDB
from lvmsurveysim.schedule.scheduler import Scheduler
from lvmsurveysim.schedule.plan import ObservingPlan
from lvmsurveysim import IFU, config
from lvmsurveysim.schedule.plan import ObservingPlan
from lvmsurveysim.utils.plot import __MOLLWEIDE_ORIGIN__, get_axes, transform_patch_mollweide, convert_to_mollweide
numpy.seterr(invalid='raise')
__all__ = ['Simulator']
class Simulator(object):
"""Simulates an observing schedule for a list of targets (tile database) following and observing plan.
Parameters
----------
tiledb : ~lvmsurveysim.schedule.tiledb.TileDB
The `~lvmsurveysim.schedule.tiledb.TileDB` instance with the table of
tiles to schedule.
observing_plan : l`.ObservingPlan` or None
The `.ObservingPlan` to use (one for each observatory).
If `None`, it will be created from the ``observing_plan``
section in the configuration file. Contains dates and sun/moon data for the
duration of the survey as well as Observatory data.
ifu : ~lvmsurveysim.ifu.IFU
The `~lvmsurveysim.ifu.IFU` to use. Defaults to the one from the
configuration file. Used only for plotting the survey footprint.
Attributes
----------
tiledb : ~lvmsurveysim.schedule.tiledb.TileDB
Instance of the tile database to observe.
schedule : ~astropy.table.Table
An astropy table with the results of the scheduling. Includes
information about the JD of each observation, the target observed,
the index of the pointing in the target tiling, coordinates, etc.
"""
def __init__(self, tiledb, observing_plan=None, ifu=None):
assert isinstance(tiledb, lvmsurveysim.schedule.tiledb.TileDB), \
'tiledb must be a lvmsurveysim.schedule.tiledb.TileDB instances.'
# get rid of the special tiles, we do not need them for the simulator
tdb = tiledb.tile_table
tiledb.tile_table = tdb[numpy.where(tdb['TileID'] >= tiledb.tileid_start)[0]]
if observing_plan is None:
observing_plan = self._create_observing_plan()
assert isinstance(observing_plan, ObservingPlan), 'observing_plan is not an instance of ObservingPlan.'
self.zenith_avoidance = config['scheduler']['zenith_avoidance']
self.time_step = config['scheduler']['timestep']
self.observing_plan = observing_plan
self.tiledb = tiledb
self.targets = tiledb.targets
self.ifu = ifu or IFU.from_config()
self.schedule = None
def __repr__(self):
return (f'<Simulator (observing_plan={self.observing_plan.observatory.name}, '
f'tiles={len(self.tiledb.tile_table)})>')
def save(self, path, overwrite=False):
"""
Saves the results of the scheduling simulation to a FITS file.
"""
assert isinstance(self.schedule, astropy.table.Table), \
'cannot save empty schedule. Execute Scheduler.run() first.'
targfile = str(self.targets.filename) if self.targets.filename != None else 'NA'
self.schedule.meta['targfile'] = targfile
self.schedule.write(path+'.fits', format='fits', overwrite=overwrite)
@classmethod
def load(cls, path, tiledb=None, observing_plan=None):
"""Creates a new instance from a schedule file.
Parameters
----------
path : str or ~pathlib.Path
The path to the schedule file and the basename, no extension. The
routine expects to find path.fits and path.npy
tiledb : ~lvmsurveysim.schedule.tiledb.TileDB or path-like
Instance of the tile database to observe.
observing_plan : `.ObservingPlan` or None
The `.ObservingPlan` to use (one for each observatory).
"""
schedule = astropy.table.Table.read(path+'.fits')
if not isinstance(tiledb, lvmsurveysim.schedule.tiledb.TileDB):
assert tiledb != None and tiledb != 'NA', \
'invalid or unavailable tiledb file path.'
tiledb = TileDB.load(tiledb)
observing_plan = observing_plan or []
sim = cls(tiledb, observing_plan=observing_plan)
sim.schedule = schedule
return sim
def run(self, progress_bar=True):
"""Schedules the pointings for the whole survey defined
in the observing plan.
Parameters
----------
progress_bar : bool
If `True`, shows a progress bar.
"""
# Make self.schedule a list so that we can add rows. Later we'll make
# this an Astropy Table.
self.schedule = []
plan = self.observing_plan
# Instance of the Scheduler
scheduler = Scheduler(plan)
# observed exposure time for each pointing
observed = numpy.zeros(len(self.tiledb.tile_table), dtype=numpy.float)
# range of dates for the survey
min_date = numpy.min(plan['JD'])
max_date = numpy.max(plan['JD'])
dates = range(min_date, max_date + 1)
if progress_bar:
generator = astropy.utils.console.ProgressBar(dates)
else:
generator = dates
for jd in generator:
if progress_bar is False:
print(f'scheduling JD={jd}.')
# Skips JDs not found in the plan or those that don't have good weather.
if jd not in plan['JD'] or plan[plan['JD'] == jd]['is_clear'][0] == 0:
continue
observed += self.schedule_one_night(jd, scheduler, observed)
# Convert schedule to Astropy Table.
self.schedule = astropy.table.Table(
rows=self.schedule,
names=['JD', 'observatory', 'target', 'group', 'tileid', 'index', 'ra', 'dec', 'pa',
'airmass', 'lunation', 'shadow_height', "moon_dist", 'lst', 'exptime', 'totaltime'],
dtype=[float, 'S10', 'S20', 'S20', int, int, float, float, float,
float, float, float, float, float, float, float])
def schedule_one_night(self, jd, scheduler, observed):
"""Schedules a single night at a single observatory.
This method is not intended to be called directly. Instead, use `.run`.
Parameters
----------
jd : int
The Julian Date to schedule. Must be included in ``plan``.
scheduler : .Scheduler
The Scheduler instance that will determine the observing sequence.
observed : ~numpy.array
An array of the length of the tiledb that records the observing time
accumulated on each tile thus far
Returns
-------
exposure_times : `~numpy.ndarray`
Array with the exposure times in seconds added to each tile during
this night.
"""
# initialize the scheduler for the night
scheduler.prepare_for_night(jd, self.observing_plan, self.tiledb)
# shortcut
tdb = self.tiledb.tile_table
# begin at twilight
current_jd = scheduler.evening_twi
# While the current time is before morning twilight ...
while current_jd < scheduler.morning_twi:
# obtain the next tile to observe
observed_idx, current_lst, hz, alt, lunation = scheduler.get_optimal_tile(current_jd, observed)
if observed_idx == -1:
# nothing available
self._record_observation(current_jd, self.observing_plan.observatory,
lst=current_lst,
exptime=self.time_step,
totaltime=self.time_step)
current_jd += (self.time_step) / 86400.0
continue
# observe it, give it one quantum of exposure
exptime = tdb['VisitExptime'].data[observed_idx]
observed[observed_idx] += exptime
# collect observation data to put in table
tileid_observed = tdb['TileID'].data[observed_idx]
target_index = tdb['TargetIndex'].data[observed_idx]
target_name = self.targets[target_index].name
groups = self.targets[target_index].groups
target_group = groups[0] if groups else 'None'
target_overhead = self.targets[target_index].overhead
# Get the index of the first value in index_to_target that matches
# the index of the target.
target_index_first = numpy.nonzero(tdb['TargetIndex'].data == target_index)[0][0]
# Get the index of the pointing within its target.
pointing_index = observed_idx - target_index_first
# Record angular distance to moon
dist_to_moon = scheduler.moon_to_pointings[observed_idx]
# Update the table with the schedule.
airmass = 1.0 / numpy.cos(numpy.radians(90.0 - alt))
self._record_observation(current_jd, self.observing_plan.observatory,
target_name=target_name,
target_group=target_group,
tileid = tileid_observed,
pointing_index=pointing_index,
ra=tdb['RA'].data[observed_idx],
dec=tdb['DEC'].data[observed_idx],
pa=tdb['PA'].data[observed_idx],
airmass=airmass,
lunation=lunation,
shadow_height= hz, #hz[valid_priority_idx[obs_tile_idx]],
dist_to_moon=dist_to_moon,
lst=current_lst,
exptime=exptime,
totaltime=exptime * target_overhead)
current_jd += exptime * target_overhead / 86400.0
return observed
def animate_survey(self, filename='lvm_survey.mp4', step=100,
observatory=None, projection='mollweide'):
"""Create an animation of the survey progress and save as an mp4 file.
Parameters
----------
filename : str
Name of the mp4 file, defaults to ``'lvm_survey.mp4'``.
step : int
Number of observations per frame of movie.
observatory : str
Either ``'LCO'`` or ``'APO'`` or `None` (plots both).
projection : str
Which projection of the sphere to use. Defaults to Mollweide.
"""
data = self.schedule[self.schedule['target'] != '-']
if observatory:
data = data[data['observatory'] == observatory]
ll = int(len(data) / step)
x,y = convert_to_mollweide(data['ra'], data['dec'])
tt = [target.name for target in self.targets]
g = numpy.array([tt.index(i) for i in data['target']], dtype=float)
t = data['JD']
fig, ax = get_axes(projection=projection)
# scat = ax.scatter(x[:1], y[:1], c=g[:1], s=1, edgecolor=None, edgecolors=None)
scat = ax.scatter(x, y, c=g % 19, s=0.05, edgecolor=None, edgecolors=None, cmap='tab20')
# fig.show()
# return
def animate(ii):
if ii % 10 == 0:
print('%.1f %% done\r' % (ii / ll * 100))
scat.set_offsets(numpy.stack((x[:ii * step], y[:ii * step]), axis=0).T)
scat.set_array(g[:ii * step])
ax.set_title(str(t[ii]))
return scat,
anim = animation.FuncAnimation(fig, animate, frames=range(1, ll), interval=1,
blit=True, repeat=False)
anim.save(filename, fps=24, extra_args=['-vcodec', 'libx264'])
def plot(self, observatory=None, projection='mollweide', tname=None, fast=False, annotate=False, edge=False):
"""Plots the observed pointings.
Parameters
----------
observatory : str
Plot only the points for that observatory. Otherwise, plots all
the pointings.
projection : str
The projection to use, either ``'mollweide'`` or ``'rectangular'``.
tname : str
Select only a particular target name to plot
fast : bool
Plot IFU sized and shaped patches if `False`. This is the default.
Allows accurate zooming and viewing. If `True`, plot scatter-plot
dots instead of IFUs, for speed sacrificing accuracy.
This is MUCH faster.
annotate : bool
Write the targets' names next to the target coordinates. Implies
``fast=True``.
edge : bool
Draw tile edges and make tiles partly transparent to better judge overlap.
Makes zoomed-out view look odd, so use default False.
Returns
-------
figure : `matplotlib.figure.Figure`
The figure with the plot.
"""
if annotate is True:
fast = True
color_cycler = cycler.cycler(bgcolor=['b', 'r', 'g', 'y', 'm', 'c', 'k'])
fig, ax = get_axes(projection=projection)
if tname != None:
data = self.schedule[self.schedule['target'] == tname]
else:
data = self.schedule[self.schedule['target'] != '-']
if observatory:
data = data[data['observatory'] == observatory]
if fast is True:
if projection == 'mollweide':
x,y = convert_to_mollweide(data['ra'], data['dec'])
else:
x,y = data['ra'], data['dec']
tt = [target.name for target in self.targets]
g = numpy.array([tt.index(i) for i in data['target']], dtype=float)
ax.scatter(x, y, c=g % 19, s=0.05, edgecolor=None, edgecolors=None, cmap='tab20')
if annotate is True:
_, text_indices = numpy.unique(g, return_index=True)
for i in range(len(tt)):
plt.text(x[text_indices[i]], y[text_indices[i]], tt[i], fontsize=9)
else:
for ii, sty in zip(range(len(self.targets)), itertools.cycle(color_cycler)):
target = self.targets[ii]
name = target.name
target_data = data[data['target'] == name]
if edge:
patches = [self.ifu.get_patch(scale=target.telescope.plate_scale, centre=[p['ra'], p['dec']], pa=p['pa'],
edgecolor='k', linewidth=1, alpha=0.5, facecolor=sty['bgcolor'])[0]
for p in target_data]
else:
patches = [self.ifu.get_patch(scale=target.telescope.plate_scale, centre=[p['ra'], p['dec']], pa=p['pa'],
edgecolor='None', linewidth=0.0, facecolor=sty['bgcolor'])[0]
for p in target_data]
if projection == 'mollweide':
patches = [transform_patch_mollweide(patch) for patch in patches]
for patch in patches:
ax.add_patch(patch)
if observatory != None:
ax.set_title(f'Observatory: {observatory}')
return fig
def _create_observing_plan(self):
"""Returns an `.ObservingPlan` from the configuration file."""
observatory = config['observing_plan']
obs_data = config['observing_plan'][observatory]
start_date = obs_data['start_date']
end_date = obs_data['end_date']
return ObservingPlan(start_date, end_date, observatory=observatory)
def _record_observation(self, jd, observatory, target_name='-', target_group='-',
tileid=-1, pointing_index=-1, ra=-999., dec=-999., pa=-999.,
airmass=-999., lunation=-999., shadow_height=-999., dist_to_moon=-999.,
lst=-999.,
exptime=0., totaltime=0.):
"""Adds a row to the schedule."""
self.schedule.append((jd, observatory, target_name, target_group, tileid, pointing_index,
ra, dec, pa, airmass, lunation, shadow_height, dist_to_moon, lst, exptime,
totaltime))
def get_target_time(self, tname, group=False, observatory=None, lunation=None,
return_lst=False):
"""Returns the JDs or LSTs for a target at an observatory.
Parameters
----------
tname : str
The name of the target or group. Use ``'-'`` for unused time.
group : bool
If not true, ``tname`` will be the name of a group not a single
target.
observatory : str
The observatory to filter for.
lunation : list
Restrict the data to a range in lunation. Defaults to returning
all lunations. Set to ``[lmin, lmax]`` to return values of
``lmin < lunation <= lmax``.
return_lst : bool
If `True`, returns an array with the LSTs of all the unobserved
times.
Returns
-------
table : `~numpy.ndarray`
An array containing the times the target is observed at an
observatory, as JDs. If ``return_lst=True`` returns an array of
the corresponding LSTs.
"""
column = 'group' if group is True else 'target'
t = self.schedule[self.schedule[column] == tname]
if observatory:
t = t[t['observatory'] == observatory]
if lunation != None:
t = t[(t['lunation'] > lunation[0]) * (t['lunation'] <= lunation[1])]
if return_lst:
return t['lst'].data
else:
return t['JD'].data
def print_statistics(self, out_file=None, out_format="ascii", overwrite_out=True, observatory=None, targets=None, return_table=False):
"""Prints a summary of observations at a given observatory.
Parameters
----------
observatory : str
The observatory to filter for.
targets : `~lvmsurveysim.target.TargetList`
The targets to summarize. If `None`, use ``self.targets``.
return_table : bool
If `True`, return a `~astropy.table.Table` with the results.
out_file : str
Outfile to write statistics.
out_format : str
Outfile format consistent with astropy.table dumps
"""
if targets is None:
targets = self.targets
names = [t.name for t in targets]
time_on_target = {} # time spent exposing target
exptime_on_target = {} # total time (exp + overhead) on target
tile_area = {} # area of a single tile
target_ntiles = {} # number of tiles in a target tiling
target_ntiles_observed = {} # number of observed tiles
target_nvisits = {} # number of visits for each tile
surveytime = 0.0 # total time of survey
names.append('-') # deals with unused time
for tname, i in zip(names, range(len(names))):
if (tname != '-'):
target = self.targets[i]
tile_area[tname] = target.get_pixarea(ifu=self.ifu)
target_ntiles[tname] = len(numpy.where(self.tiledb.tile_table['TargetIndex'] == i)[0])
target_nvisits[tname] = float(target.n_exposures / target.min_exposures)
else:
tile_area[tname] = -999
target_ntiles[tname] = -999
target_nvisits[tname] = 1
tdata = self.schedule[self.schedule['target'] == tname]
if observatory:
tdata = tdata[tdata['observatory'] == observatory]
exptime_on_target[tname] = numpy.sum(tdata['exptime'].data)
target_ntiles_observed[tname] = len(tdata) / target_nvisits[tname]
target_total_time = numpy.sum(tdata['totaltime'].data)
time_on_target[tname] = target_total_time
surveytime += target_total_time
# targets that completely overlap with others have no tiles
for t in self.targets:
if target_ntiles[t.name] == 0:
print(t.name + ' has no tiles')
target_ntiles[t.name] = 1
rows = [
(t if t != '-' else 'unused',
numpy.float(target_ntiles[t]),
numpy.around(target_ntiles_observed[t], decimals=2),
numpy.around(time_on_target[t] / 3600.0, decimals=2),
numpy.around(exptime_on_target[t] / 3600.0, decimals=2),
numpy.around(time_on_target[t] / surveytime, decimals=2),
numpy.around(target_ntiles_observed[t] * tile_area[t],
decimals=2) if t != '-' else -999,
numpy.around(float(target_ntiles_observed[t]) / float(target_ntiles[t]),
decimals=2) if t != '-' else -999)
for t in names]
stats = astropy.table.Table(rows=rows,
names=['Target', 'tiles', 'tiles_obs', 'tottime/h',
'exptime/h', 'timefrac', 'area', 'areafrac'],
dtype=('S8', 'f4', 'f4', 'f4', 'f4', 'f4', 'f4', 'f4'))
print('%s :' % (observatory if observatory != None else 'APO+LCO'))
stats.pprint(max_lines=-1, max_width=-1)
if out_file != None:
stats.write(out_file, format=out_format, overwrite=overwrite_out)
if return_table:
return stats
def plot_survey(self, observatory=None, bin_size=30., targets=None, groups=None,
use_groups=False, use_primary_group=True,
show_ungrouped=True, cumulative=False, lst=False,
lunation=None,
show_unused=True, skip_fast=False, show_mpld3=False):
"""Plot the hours spent on target.
Parameters
----------
observatory : str
The observatory to plot. If `None`, all observatories.
bin_size : int
The number of days in each bin of the plot.
targets : list
A list with the names of the targets to plot. If empty, plots all
targets.
groups : list
A list with the names of the groups to plot. If empty, plots all
groups.
use_groups : bool
If set, the targets are grouped together using the
``Target.groups`` list.
use_primary_group : bool
If `True`, a target will only be added to its primary group (the
first one in the group list). Only used when ``use_groups=True``.
show_ungrouped : bool
If `True`, targets that don't belong to any group are plotted
individually. Only used when ``use_groups=True``.
cumulative : bool or str
If `True`, plots the cumulative sum of hours spent on each target.
If ``'group'``, it plots the cumulative on-target hours normalised
by the total hours needed to observe the target group. If ``'survey'``,
plots the cumulative hours normalised by the total survey hours.
When ``cumulative`` is not `False`, ``bin_size`` is set to 1.
lst : bool
Whether to bin the used time by LST instead of JD.
show_unused : bool
Display the unused time.
lunation : list
Range of lunations to include in statistics. Defaults to all lunations.
Set to ``[lmin, lmax]`` to return values of ``lmin < lunation <= lmax``.
Can be used to restrict lst usage plots to only bright, grey, or
dark time.
skip_fast : bool
If set, do not plot targets that complete in the first 20% of the
survey.
Return
------
fig : `~matplotlib.figure.Figure`
The Matplotlib figure of the plot.
"""
assert self.schedule != None, 'you still have not run a simulation.'
if not targets:
targets = [target.name for target in self.targets]
ncols = 2 if len(targets) > 15 else 1
if lst:
bin_size = 1. if bin_size == 30. else bin_size
assert cumulative is False, 'cumulative cannot be used with lst=True.'
if cumulative != False:
bin_size = 1
fig, ax = plt.subplots(figsize=(12, 8))
# Leaves a margin on the right to put the legend
fig.subplots_adjust(right=0.65 if ncols == 2 else 0.8)
ax.set_prop_cycle(color=['r', 'g', 'b', 'c', 'm', 'y', 'g', 'b', 'c', 'm', 'y', 'r', 'b',
'c', 'm', 'y', 'r', 'g', 'c', 'm', 'y', 'r', 'g', 'b', ],
linestyle=['-', '--', '-.', ':', '-', '--', '-.', ':', '-', '--', '-.',
':', '-', '--', '-.', ':', '-', '--', '-.', ':', '-', '--',
'-.', ':'])
min_b = (numpy.min(self.schedule['JD']) - 2451545.0) if not lst else 0.0
max_b = (numpy.max(self.schedule['JD']) - 2451545.0) if not lst else 24.0
b = numpy.arange(min_b, max_b + bin_size, bin_size)
# Creates a list of groups to plot. If use_groups=False,
# this is just the list of targets.
if not use_groups:
groups = [target.name for target in self.targets]
else:
groups = groups or self.targets.get_groups()
# Adds the ungrouped targets.
if show_ungrouped:
for target in self.targets:
if len(target.groups) == 0:
groups.append(target.name)
for group in groups:
# Cumulated group heights
group_heights = numpy.zeros(len(b) - 1, dtype=numpy.float)
group_target_tot_time = 0.0
# If we are not using groups or the "group"
# name is that of an ungrouped target.
if not use_groups or group in self.targets._names:
targets = [group]
else:
targets = self.targets.get_group_targets(group, primary=use_primary_group)
for tname in targets:
t = self.targets.get_target(tname)
tindex = [target.name for target in self.targets].index(tname)
# plot each target
tt = self.get_target_time(tname, observatory=observatory, lunation=lunation, return_lst=lst)
if len(tt) == 0:
continue
if not lst:
tt -= 2451545.0
heights, bins = numpy.histogram(tt, bins=b)
heights = numpy.array(heights, dtype=float)
heights *= t.exptime * t.min_exposures / 3600.0
ntiles = len(numpy.where(self.tiledb.tile_table['TargetIndex'].data == tindex)[0])
target_tot_time = ntiles * t.exptime * t.n_exposures / 3600.
if skip_fast:
completion = heights.cumsum() / target_tot_time
if numpy.quantile(completion, 0.2) >= 1:
continue
group_heights += heights
group_target_tot_time += target_tot_time
# Only plot the heights if they are not zero. This prevents
# targets that are not observed at an observatory to be displayed.
if numpy.sum(group_heights) > 0:
if cumulative is False:
ax.plot(bins[:-1] + numpy.diff(bins) / 2, group_heights, label=group)
else:
ax.plot(bins[:-1] + numpy.diff(bins) / 2, numpy.cumsum(group_heights)/group_target_tot_time, label=group)
# deal with unused time
tt = self.get_target_time('-', observatory=observatory, return_lst=lst)
if not lst:
tt -= 2451545.0
heights, bins = numpy.histogram(tt, bins=b)
heights = numpy.array(heights, dtype=float)
heights *= self.time_step / 3600.0
if cumulative:
heights = heights.cumsum()
if show_unused and cumulative is False:
ax.plot(bins[:-1] + numpy.diff(bins) / 2, heights, ':',
color='k', label='Unused')
ax.set_xlabel('JD - 2451545.0' if not lst else 'LST / h')
if cumulative is False:
ax.set_ylabel('Hours on target / %.f %s' % ((bin_size, 'days')
if not lst else (bin_size, 'h')))
elif cumulative is True:
ax.set_ylabel('Hours on target [cumulative]')
elif cumulative == 'target':
ax.set_ylabel('Fraction of target completed')
elif cumulative == 'survey':
ax.set_ylabel('Fraction of survey time spent on target')
ax.set_title(observatory if observatory != None else 'APO+LCO')
# Move legend outside the plot
ax.legend(loc='upper left', bbox_to_anchor=(1.05, 1.0), ncol=ncols)
return fig
def plot_lunation(self, tname, group=False, observatory=None, dark_limit=0.2):
"""
plot the lunation distribution for a target. use '-' for unused time
Parameters
----------
tname : str
The name of the target or group. Use ``'-'`` for unused time.
group : bool
If not true, ``tname`` will be the name of a group not a single
target.
observatory : str
The observatory to filter for.
dark_limit : float
Limiting lunation value to count as dark time. Defaults to 0.2.
Return
------
fig : `~matplotlib.figure.Figure`
The Matplotlib figure of the plot.
"""
dark = self.get_target_time(tname, group=group, lunation=[-0.01, dark_limit],
observatory=observatory, return_lst=True)
bright = self.get_target_time(tname, group=group, lunation=[dark_limit, 1.0],
observatory=observatory, return_lst=True)
bin_size = 1
b = numpy.arange(0, 24 + bin_size, bin_size)
heights_dark, bins = numpy.histogram(dark, bins=b)
heights_dark = numpy.array(heights_dark, dtype=float)
heights_bright, bins = numpy.histogram(bright, bins=b)
heights_bright = numpy.array(heights_bright, dtype=float)
fig, ax = plt.subplots()
ax.plot(bins[:-1] + numpy.diff(bins) / 2, heights_dark, label='dark')
ax.plot(bins[:-1] + numpy.diff(bins) / 2, heights_bright, label='bright')
ax.legend()
plt.xlabel('LST')
plt.ylabel('# of exposures')
plt.title('unused' if tname == '-' else tname)
return fig
def plot_shadow_height(self, tname=None, group=False, observatory=None, norm=False, cumulative=0, linear_log=False):
"""
plot the shadow height distribution for a target. use '-' for unused time
Parameters
----------
tname : str
The name of the target or group. Use 'ALL' for all groups and group==True.
group : bool
If not true, ``tname`` will be the name of a group not a single
target.
observatory : str
The observatory to filter for.
norm : bool
Normalize the histograms instead of plotting raw numbers.
Return
------
fig : `~matplotlib.figure.Figure`
The Matplotlib figure of the plot.
"""
if linear_log is False:
b = numpy.logspace(numpy.log10(100.),numpy.log10(100000.),100)
else:
b = numpy.linspace(2, 5, 31)
fig, ax = plt.subplots()
self._plot_histograms(ax, 'shadow_height', b, tname=tname, group=group, observatory=observatory,
norm=norm, cumulative=cumulative, linear_log=linear_log)
if linear_log is False:
ax.set_xscale("log")
plt.xlabel('shadow height / km')
plt.ylabel('# of exposures')
plt.legend()
#plt.show()
return fig
def plot_airmass(self, tname=None, group=False, observatory=None, norm=False, cumulative=0):
"""
plot the airmass distribution for a target or group(s).
Parameters
----------
tname : str
The name of the target or group. Use 'ALL' for all groups and group==True.
group : bool
If not true, ``tname`` will be the name of a group not a single
target.
observatory : str
The observatory to filter for.
norm : bool
Normalize the histograms instead of plotting raw numbers.
Return
------
fig : `~matplotlib.figure.Figure`
The Matplotlib figure of the plot.
"""
b = numpy.linspace(1.0,2.0,51)
fig, ax = plt.subplots()
self._plot_histograms(ax, 'airmass', b, tname=tname, group=group, observatory=observatory,
norm=norm, cumulative=cumulative)
plt.xlabel('airmass')
plt.ylabel('# of exposures' if norm==False else 'frequency')
plt.legend()
#plt.show()
return fig
def _plot_histograms(self, ax, keyword, bins, tname=None, group=False, observatory=None,
norm=False, cumulative=0, linear_log=False):
"""
plot a histogram of 'keyword' for a target or group(s).
Parameters
----------
ax : pyplot.ax
axes object to plot into
keyword : str
name of the column in the schedule table to plot.
bins : numpy.array
the array of bins
tname : str
The name of the target or group. Use 'ALL' for all groups and group==True.
group : bool
If not true, ``tname`` will be the name of a group not a single
target.
observatory : str
The observatory to filter for.
norm : bool
Normalize the histograms instead of plotting raw numbers.
cumulative : int
plot cumulative histogram (>0), reverse accumulation (<0)
"""
column = 'group' if group is True else 'target'
if tname != None and tname != 'ALL':
t = self.schedule[self.schedule[column] == tname]
else:
t = self.schedule
if observatory:
t = t[t['observatory'] == observatory]
if group==True and tname=='ALL':
groups = self.targets.get_groups()
for group in groups:
tt = t[t['group'] == group]
am = tt[keyword]
am = am[numpy.where(am>0)]
if linear_log is True:
am = numpy.log10(am)
ax.hist(am, bins=bins, histtype='step', label=group, density=norm, cumulative=cumulative)
else:
am = t[keyword]
am = am[numpy.where(am>0)]
if linear_log is True:
am = numpy.log10(am)
ax.hist(am, bins=bins, histtype='step', label=tname, density=norm, cumulative=cumulative)
|
# -*- coding: utf-8 -*-
import datetime
import re
import time
import traceback
import base64
import response_data
from dateutil.relativedelta import relativedelta
from lxml import etree
from base_request_param import mix_current_time
if __name__ == '__main__':
import sys
sys.path.append('../../../')
sys.path.append('../../../..')
from crawler.base_crawler import BaseCrawler
from base_info_crawler import *
else:
from worker.crawler.base_crawler import BaseCrawler
from worker.crawler.china_mobile.chongqing.base_info_crawler import *
# const url
START_URL = "https://service.cq.10086.cn/httpsFiles/pageLogin.html"
ICS_SERVICE_URL = "http://service.cq.10086.cn/ics"
HTTPS_ICS_SERVICE_URL = "https://service.cq.10086.cn/ics"
LOGIN_URL = "https://cq.ac.10086.cn/SSO/loginbox"
AUTH_RETURN_URL = "http://service.cq.10086.cn/CHOQ/authentication/authentication_return.jsp"
SUCCESS_LOGIN_URL = "http://service.cq.10086.cn/ics?service=page/login&listener=getLoginInfo"
# const variable
XML_SESSION_TIMEOUT_INFO = "session is time out"
# const personal info url
MY_MOBILE_URL = "http://service.cq.10086.cn/myMobile/myMobile.html"
# const personal info variable
FORMAL_KEY_TO_CQ_KEY = {'full_name': 'NAME', 'is_realname_register': 'smzyz', 'open_date': 'STARTDATE'}
PERSONAL_INFO_EVENT_NAME_DICT = {'userInfo': "GRXX", "userInfo2": "GRXX", "starService": "XJFW", "new_uwer_info": "XFMX"}
# const detail bill url
DETAIL_BILL_URL = "http://service.cq.10086.cn/myMobile/detailBill.html"
class Crawler(BaseCrawler):
def __init__(self, **kwargs):
super(Crawler, self).__init__(**kwargs)
self.image_validate_count = 0
def need_parameters(self, **kwargs):
return ['pin_pwd', 'sms_verify']
def get_login_verify_type(self, **kwargs):
return 'SMS'
def get_verify_type(self, **kwargs):
return ''
def send_login_verify_request(self, **kwargs):
# self.session.cookies.clear()
# Go to Login Page
headers = {
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
"Referer": "http://service.cq.10086.cn/httpsFiles/pageLogin.html"
}
code, key, resp = self.get(START_URL, headers=headers)
if code != 0:
return code, key, ""
verify_type = kwargs.get('verify_type', '')
if verify_type in ['', 'sms']:
url = "https://service.cq.10086.cn/ics"
headers = {
"X-Requested-With": "XMLHttpRequest",
"Content-Type": "application/x-www-form-urlencoded; charset=utf-8",
"Referer": "https://service.cq.10086.cn/httpsFiles/pageLogin.html",
}
data = {
"service": "ajaxDirect/1/login/login/javascript/"
, "pagename": "login"
, "eventname": "interfaceSendSMS"
, "cond_SERIAL_NUMBER": kwargs['tel']
, "ajaxSubmitType": "post"
, "ajax_randomcode": mix_current_time()
}
code, key, resp = self.post(url, headers=headers, params=data)
if code != 0:
return code, key, ""
try:
if u'获取短信验证码过于频繁' in resp.text:
self.log("user", 'send_sms_too_quick_error', resp)
return 9, 'send_sms_too_quick_error', ''
if '"FLAG":"true"' not in resp.text:
self.log("crawler", u"未知原因发送短信失败", resp)
return 9, "send_sms_error", ""
except:
error = traceback.format_exc()
self.log("crawler", u"解析短信发送结果失败".format(error), resp)
return 9, "html_error", ""
return 0, "success", ''
def increase_image_validate_count(self):
"""
Increase the time count when request a new validate image
:return: None
"""
self.image_validate_count += 1
def reset_image_validate_count(self):
"""
Reset the time count when request a new validate image
:return: None
"""
self.image_validate_count = 0
def login(self, **kwargs):
"""
Login process
1. Request SSO Login
2. Request Start Event
3. Request Login Box
4. Request Auth Return
5. Request Return Success
return
status_key: str, 狀態碼金鑰,參考status_code,若無錯誤則為空字串
level: int, 錯誤等級
message: unicode, 詳細的錯誤信息
"""
for i in range(self.max_retry):
codetype = '3004'
code, key, resp = get_validate_image(self)
if code != 0:
continue
# 云打码
key, result, cid = self._dama(resp.content, codetype)
# print result, cid, "***---***" * 10
if key == "success" and result != "":
captcha_code = str(result)
else:
self.log("website", "website_busy_error: 云打码失败{}".format(result), '')
code, key = 9, "auto_captcha_code_error"
continue
# Input Validation Check
level, key, message = is_validate(kwargs['tel'], kwargs['pin_pwd'], captcha_code, self)
if level != 0:
self.log("crawler", str(key) + ": " + message, "")
return level, key
# Request SSO Login
headers = {
'Referer': 'https://service.cq.10086.cn/httpsFiles/pageLogin.html',
'Origin': 'https://service.cq.10086.cn',
'Host': 'service.cq.10086.cn',
"X-Requested-With": "XMLHttpRequest",
"Content-Type": "application/x-www-form-urlencoded; charset=utf-8"
}
params = get_sso_login_param(kwargs['tel'], kwargs['pin_pwd'], captcha_code, kwargs['sms_code'])
code, key, resp = self.post(HTTPS_ICS_SERVICE_URL, headers=headers, params=params)
if code != 0:
return code, key
if '"RESULT":"0"' not in resp.text:
# 如返回{"RESULTINFO":"","RESULT":"-1"},则大概率为访问过于频繁导致的
if "短信随机码不正确或已过期" in resp.text or "短信验证码输入错误" in resp.text:
self.log("user", 'verify_error', resp)
return 9, 'verify_error'
if "超过限制" in resp.text:
# 您在24小时之内登录短信验证码错误超过限制,请24小时之后再试
self.log("user", "短信错误次数超限制", resp)
return 9, 'over_query_limit'
if '"WhitenumFLAG":"false","FLAG":"false"' in resp.text:
self.log("crawler", "login_param_error", resp)
return 1, 'login_param_error'
if "验证码不正确" in resp.text:
self.log("website", "打码失败", resp)
self._dama_report(cid)
code, key = 9, "auto_captcha_code_error"
continue
self.log("crawler", 'login_param_error', resp)
return 2, 'login_param_error'
break
else:
return code, key
self.reset_image_validate_count()
# 将二次验证移至这里:
url = "http://service.cq.10086.cn/ics"
data = {
"service": "ajaxDirect/1/secondValidate/secondValidate/javascript/",
"pagename": "secondValidate",
"eventname": "checkSMSINFO",
"cond_USER_PASSSMS": base64.b64encode(kwargs['pin_pwd']),
"cond_CHECK_TYPE": "DETAIL_BILL",
"cond_loginType": "0",
"ajaxSubmitType": "post",
"ajax_randomcode": mix_current_time()
}
headers = {
"X-Requested-With": "XMLHttpRequest",
"Content-Type": "application/x-www-form-urlencoded; charset=utf-8",
"Referer": "http://service.cq.10086.cn/myMobile/detailBill.html"
}
code, key, resp = self.post(url, data=data, headers=headers)
if code != 0:
return code, key
if u'验证成功' not in resp.text:
self.log("user", 'pin_pwd_error', resp)
return 9, 'pin_pwd_error'
return 0, 'success'
def is_session_timeout(self, data_string):
"""
Check if the session timeout keyword is in the target string
:param data_string: Response returned by various of requests
:return: True/False
"""
if XML_SESSION_TIMEOUT_INFO in data_string:
self.reset_image_validate_count()
return True
return False
def crawl_info(self, **kwargs):
"""
Crawl user's personal info
1. Go to Personal Info Page
2. Get Good IDs
3. Push Local IP to Cookie
4. Get Personal Info
:param kwargs:
:return:
status_key: str, 狀態碼金鑰,參考status_code,若無錯誤則為空字串
level: int, 錯誤等級
message: unicode, 詳細的錯誤信息
info: dict, 帳戶信息,參考帳戶信息格式
"""
# Go to Personal Info Page
event_name = "userInfo2"
headers = {
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
"Referer": "http://service.cq.10086.cn/"
}
code, key, resp = self.get(MY_MOBILE_URL, headers=headers)
if code != 0:
return code, key, {}
detail_bill_good_ename = "XFMX"
headers = {
"X-Requested-With": "XMLHttpRequest",
"Content-Type": "application/x-www-form-urlencoded; charset=utf-8",
"Referer": "http://service.cq.10086.cn/myMobile/detailBill.html"
}
code, key, resp_good_id = self.post(ICS_SERVICE_URL, headers=headers, params=get_good_id_param(detail_bill_good_ename))
if code != 0:
return code, key, {}
# Push Local IP to Cookie
headers = {
"X-Requested-With": "XMLHttpRequest",
"Content-Type": "application/x-www-form-urlencoded; charset=utf-8"
}
code, key, resp = self.get(ICS_SERVICE_URL, params=get_ip_to_cookie_param(), headers=headers)
if code != 0:
return code, key, {}
# Extract Good ID from XML
if self.is_session_timeout(resp_good_id.text):
# self.log("crawl_error", "session is time out", request_good_id_result)
return 9, "outdated_sid", {}
level, key, message, result = get_good_id_from_xml(resp_good_id.text)
if level != 0:
self.log("crawler", "{}: {}".format(key, message), resp_good_id)
return level, key, {}
else:
good_id = result
# Push Local IP to Cookie
headers = {
"X-Requested-With": "XMLHttpRequest",
"Content-Type": "application/x-www-form-urlencoded; charset=utf-8"
}
code, key, resp = self.get(ICS_SERVICE_URL, params=get_ip_to_cookie_param(), headers=headers)
if code != 0:
return code, key, {}
# Get Personal Info
good_ename = PERSONAL_INFO_EVENT_NAME_DICT[event_name]
good_name = PERSONAL_INFO_GOOD_NAME_DICT[good_ename]
headers = {
"X-Requested-With": "XMLHttpRequest",
"Content-Type": "application/x-www-form-urlencoded; charset=utf-8",
"Referer": "http://service.cq.10086.cn/myMobile/myMobile.html"
}
code, key, resp = self.get(ICS_SERVICE_URL, headers=headers, params=get_personal_info_param(event_name, good_ename, good_id, good_name))
if code != 0:
return code, key, {}
# Extract Personal Info from Personal Info Result
if self.is_session_timeout(resp.text):
return 9, "outdated_sid", {}
code, key, message, result = get_personal_info_from_xml(resp.text)
if code != 0:
return code, key, {}
personal_info_dict = dict()
for key in FORMAL_KEY_TO_CQ_KEY:
personal_info_dict[key] = result.get(FORMAL_KEY_TO_CQ_KEY[key], "")
personal_info_dict["open_date"] = self.time_transform(personal_info_dict["open_date"], str_format="%Y-%m-%d")
personal_info_dict["address"] = ""
if personal_info_dict['is_realname_register'] == '1':
personal_info_dict['is_realname_register'] = True
else:
personal_info_dict['is_realname_register'] = False
personal_info_dict['id_card'] = ''
return 0, 'success', personal_info_dict
def send_verify_request(self, **kwargs):
"""
Send SMS verify code
1. Go to Detail Info Page
2. Trigger Second Validation
:param kwargs:
:return:
status_key: str, 狀態碼金鑰,參考status_code,若無錯誤則為空字串
level: int, 錯誤等級
message: unicode, 詳細的錯誤信息
image_str: str, Captcha圖片的base64字串, SMS則回空
"""
# Go to Detail Info Page
event_name = "new_uwer_info"
headers = {
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
"Referer": "http://service.cq.10086.cn/myMobile/myMobile.html"
}
code, key, resp = self.get(DETAIL_BILL_URL, headers=headers)
if code != 0:
return code, key, ""
# Get Good IDs
headers = {
"X-Requested-With": "XMLHttpRequest",
"Content-Type": "application/x-www-form-urlencoded; charset=utf-8",
"Referer": "http://service.cq.10086.cn/myMobile/detailBill.html"
}
code, key, resp = self.post(ICS_SERVICE_URL, headers=headers, params=get_good_id_param(PERSONAL_INFO_EVENT_NAME_DICT[event_name]))
if code != 0:
return code, key, ""
# Push Local IP to Cookie
headers = {
"X-Requested-With": "XMLHttpRequest",
"Content-Type": "application/x-www-form-urlencoded; charset=utf-8"
}
code, key, resp = self.get(ICS_SERVICE_URL, params=get_ip_to_cookie_param(), headers=headers)
if code != 0:
return code, key, ""
# Trigger Second Validation(Send SMS Validate Code to Cellphone)
headers = {
"X-Requested-With": "XMLHttpRequest",
"Content-Type": "application/x-www-form-urlencoded; charset=utf-8",
"Referer": "http://service.cq.10086.cn/myMobile/detailBill.html"
}
code, key, resp = self.post(ICS_SERVICE_URL, headers=headers, params=get_second_validation_param())
if code != 0:
return code, key, ""
if self.is_session_timeout(resp.text):
return 9, "outdated_sid", ""
return 0, 'success', ''
def verify(self, **kwargs):
"""
Check SMS validate code
:param kwargs:
:return:
status_key: str, 狀態碼金鑰,參考status_code,若無錯誤則為空字串
level: int, 錯誤等級
message: unicode, 詳細的錯誤信息
"""
# Check SMS Validate Code
headers = {
"X-Requested-With": "XMLHttpRequest",
"Content-Type": "application/x-www-form-urlencoded; charset=utf-8",
"Referer": "http://service.cq.10086.cn/myMobile/detailBill.html"
}
code, key, resp_data = self.post(ICS_SERVICE_URL, headers=headers, params=get_check_sms_info_param(kwargs['sms_code']))
if code != 0:
return code, key
# Push Local IP to Cookie
headers = {
"X-Requested-With": "XMLHttpRequest",
"Content-Type": "application/x-www-form-urlencoded; charset=utf-8"
}
code, key, resp_ip = self.get(ICS_SERVICE_URL, params=get_ip_to_cookie_param(), headers=headers)
if code != 0:
return code, key
# Extract SMS Validate Result
if self.is_session_timeout(resp_data.text):
return 9, "outdated_sid"
code, key, err = get_second_validate_result_from_xml(resp_data.text)
if code != 0:
if code in [1, 2]:
self.log("user", "{}: {}".format(key, err), resp_data)
else:
self.log("crawler", "{}: {}".format(key, err), resp_data)
return code, key
def time_transform(self, time_str, bm='utf-8', str_format="%Y-%m-%d %H:%M:%S"):
# 12-06 09:57:32
if time_str == '':
return ''
if str_format == "%Y-%m-%d %H:%M:%S" and len(time_str) == 14:
today_month = datetime.date.today().month
today_year = datetime.date.today().year
str_month = int(time_str[:2])
if str_month > today_month:
str_year = today_year - 1
else:
str_year = today_year
time_str = str(str_year) + "-" + time_str
if str_format == "%Y-%m-%d":
time_str += " 12:00:00"
str_format = "%Y-%m-%d %H:%M:%S"
time_type = time.strptime(time_str.encode('utf-8'), str_format)
return str(int(time.mktime(time_type)))
def time_format(self, time_str, **kwargs):
time_str = time_str.encode('utf-8')
if 'exec_type' in kwargs:
exec_type = kwargs['exec_type']
if (exec_type == 1):
# print "********"
# print time_str
# print "********"
xx = re.match(r'(.*时)?(.*分)?(.*秒)?', time_str)
h, m, s = 0, 0, 0
if xx.group(1):
hh = re.findall('\d+', xx.group(1))[0]
h = int(hh)
if xx.group(2):
mm = re.findall('\d+', xx.group(2))[0]
m = int(mm)
if xx.group(3):
ss = re.findall('\d+', xx.group(3))[0]
s = int(ss)
real_time = h * 60 * 60 + m * 60 + s
if (exec_type == 2):
xx = re.findall(r'\d*', time_str)
h, m, s = map(int, xx[::2])
real_time = h * 60 * 60 + m * 60 + s
return str(real_time)
def crawl_call_log(self, **kwargs):
"""
Crawl user's detail bill info
1. Get Good IDs
2. Get Detail Info
:param kwargs:
:return:
status_key: str, 狀態碼金鑰,參考status_code,若無錯誤則為空字串
level: int, 錯誤等級
message: unicode, 詳細的錯誤信息
call_log: list, 通信詳單,參考詳單格式
"""
# Get Good IDs
pos_miss_list = []
miss_list = []
detail_bill_good_ename = "XFMX"
message_list = []
headers = {
"Accept": "application/xml, text/xml, */*; q=0.01",
"X-Requested-With": "XMLHttpRequest",
"Content-Type": "application/x-www-form-urlencoded; charset=utf-8",
"Referer": "http://service.cq.10086.cn/myMobile/detailBill.html"
}
code, key, resp = self.post(ICS_SERVICE_URL, headers=headers, params=get_good_id_param(detail_bill_good_ename))
if code != 0:
return code, key, [], miss_list, pos_miss_list
# Extract Good ID from XML
if self.is_session_timeout(resp.text):
return 9, "outdated_sid", [], miss_list, pos_miss_list
level, key, message, result = get_good_id_from_xml(resp.text)
if level != 0:
self.log("crawler", "{}: {}".format(key, message), resp)
return level, key, [], miss_list, pos_miss_list
else:
good_id = result
# Get Detail Info
current_time = datetime.datetime.now()
headers = {
"X-Requested-With": "XMLHttpRequest",
"Content-Type": "application/x-www-form-urlencoded; charset=utf-8",
"Referer": "http://service.cq.10086.cn/myMobile/detailBill.html"
}
detail_bill_list = list()
for month_offset in range(0, 6):
year_month = (current_time - relativedelta(months=month_offset)).strftime('%Y%m')
for i in range(self.max_retry):
code, key, resp = self.post(ICS_SERVICE_URL, params=get_detail_bill_param(year_month, good_id), headers=headers)
if code != 0:
message = "network_request_error"
continue
# Extract Detail Info
if self.is_session_timeout(resp.text):
key = "outdated_sid"
message = "会话超时"
continue
level, key, message, result = get_detail_bill_from_xml(resp.text)
# 无详单
if "NO DATA" in message and key == "success":
self.log("crawler", "记录NO DATA信息", resp)
continue
if key == 'expected_key_error' and message == u'用户的详单查询月份小于用户的开户月份':
break
# 从xml中获取call_log失败
if level != 0:
self.log("crawler", "记录call_log获取失败信息", resp)
continue
# 从result中构造结果集
try:
temp = []
call_from_set = set()
for record in result:
raw_call_from = record['c1']
call_from, error = self.formatarea(raw_call_from)
if not call_from:
call_from = raw_call_from
call_from_set.add(raw_call_from)
# self.log("crawler", "{} {}".format(error, call_from), "")
_tmp = {
"month": year_month,
"call_from": call_from,
"call_time": self.time_transform(record['c0']),
"call_to": "",
"call_tel": record['c3'],
"call_method": record['c2'],
"call_type": record['c5'],
"call_cost": record['c7'],
"call_duration": self.time_format(record['c4'], exec_type=1)}
temp.append(_tmp)
if call_from_set:
self.log("crawler", "call_from_set: {}".format(call_from_set), "")
detail_bill_list.extend(temp)
break
except:
error = traceback.format_exc()
key, level, message = "unknown_error: "+error, 9, "转换时间单位失败%s" % error
continue
else:
if message != "network_request_error":
self.log("crawler", "{}{}".format(key, message), resp)
if "NO DATA" in message and key == 'success':
pos_miss_list.append(year_month)
if level != 0 and "NO DATA" not in message:
message_list.append(key)
miss_list.append(year_month)
if len(miss_list +pos_miss_list) == 6:
temp_list = map(lambda x: x.count('request_error') or x.count('website_busy_error') or x.count('outdated_sid') or x.count('success') or 0, message_list)
if temp_list.count(0) == 0:
return 9, 'website_busy_error', detail_bill_list, miss_list, pos_miss_list
else:
return 9, 'crawl_error', detail_bill_list, miss_list, pos_miss_list
if not detail_bill_list:
self.log("crawler", "获取数据为全空", "")
return 0, "success", detail_bill_list, miss_list, pos_miss_list
def crawl_phone_bill(self, **kwargs):
miss_list = []
phone_bill = list()
message_list = []
crawl_phone_bill_good_ename = 'WDZD'
for searchMonth in self.__monthly_period(6, '%Y%m'):
crawl_phone_bill_data = {
'service': 'ajaxDirect/1/myMobile/myMobile/javascript/',
'pagename': 'myMobile',
'eventname': 'getUserBill2',
'cond_QUERY_DATE': searchMonth,
'cond_GOODS_ID': get_good_id_param(crawl_phone_bill_good_ename)
}
# print get_good_id_param(crawl_phone_bill_good_ename)
URL_PHONE_BILL = 'http://service.cq.10086.cn/ics'
headers = {
"X-Requested-With": "XMLHttpRequest",
"Content-Type": "application/x-www-form-urlencoded; charset=utf-8",
"Referer": "http://service.cq.10086.cn/myMobile/queryBill.html"
}
for i in range(self.max_retry):
code, key, resp = self.post(URL_PHONE_BILL, data=crawl_phone_bill_data, headers=headers)
if code != 0:
message = "network_request_error"
continue
level, key, message, result = response_data.phone_bill_data(resp.text, searchMonth)
# print result
if level != 0:
continue
if result:
phone_bill.append(result)
break
else:
if message != "network_request_error":
self.log("crawler", "{}: {}".format(key, message), resp)
message_list.append(key)
miss_list.append(searchMonth)
now_month = datetime.datetime.now().strftime("%Y%m")
now_month in miss_list and miss_list.remove(now_month)
if len(miss_list) == 5:
temp_list = map(lambda x: x.count('request_error') or x.count('website_busy_error') or x.count('outdated_sid') or x.count('success') or 0, message_list)
if temp_list.count(0) == 0:
return 9, 'website_busy_error', phone_bill, miss_list
else:
return 9, "crawl_error", phone_bill, miss_list
return 0, 'success', phone_bill, miss_list
def __monthly_period(self, length=6, strf='%Y%m'):
current_time = datetime.datetime.now()
for month_offset in range(0, length):
yield (current_time - relativedelta(months=month_offset)).strftime(strf)
if __name__ == '__main__':
# from worker.crawler.main import *
c = Crawler()
USER_ID = "15826472370"
USER_PASSWORD = "088616"
# self_test
c.self_test(tel=USER_ID, pin_pwd=USER_PASSWORD)
|
#The MIT License (MIT)
#
#Copyright (c) 2014-2015 Bohdan Danishevsky ( dbn@aminis.com.ua )
#
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is
#furnished to do so, subject to the following conditions:
#
#The above copyright notice and this permission notice shall be included in all
#copies or substantial portions of the Software.
#
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
#SOFTWARE.
"""
This file is part of sim-module package. Can be used for HTTP requests making.
sim-module package allows to communicate with SIM 900 modules: send SMS, make HTTP requests and use other
functions of SIM 900 modules.
Copyright (C) 2014-2015 Bohdan Danishevsky ( dbn@aminis.com.ua ) All Rights Reserved.
"""
from lib.sim900.gsm import *
class SimInetGSMConnection:
inetUnknown = -1
inetConnecting = 0
inetConnected = 1
inetClosing = 2
inetClosed = 3
class SimInetGSM(SimGsm):
def __init__(self, port, logger):
SimGsm.__init__(self, port, logger)
self.__ip = None
#user agent
self.__userAgent = "Aminis SIM-900 module client (version 0.1)"
self.__connectionState = SimInetGSMConnection.inetUnknown
self.__httpResult = 0
self.__httpResponse = None
@property
def connectionState(self):
return self.__connectionState
@property
def httpResult(self):
return self.__httpResult
@property
def httpResponse(self):
return self.__httpResponse
@property
def ip(self):
return self.__ip
@property
def userAgent(self):
return self.__userAgent
@userAgent.setter
def userAgent(self, value):
self.__userAgent = value
def checkGprsBearer(self, bearerNumber = 1):
"""
Checks GPRS connection. After calling of this method
:param bearerNumber: bearer number
:return: True if checking was without mistakes, otherwise returns False
"""
self.logger.debug("checking GPRS bearer connection")
ret = self.commandAndStdResult(
"AT+SAPBR=2,{0}".format(bearerNumber),
1000,
["OK"]
)
if (ret is None) or (self.lastResult != "OK"):
self.setError("{0}: error, lastResult={1}, ret={2}".format(inspect.stack()[0][3], self.lastResult, ret))
return False
ret = str(ret).strip()
self.logger.debug("{0}: result = {1}".format(inspect.stack()[0][3], ret))
response = str(ret).split(":")
if len(response) < 2:
self.setError("{0}:error, wrong response length, ret = {1}".format(inspect.stack()[0][3], ret))
return False
#parsing string like:
# +SAPBR: 1,1,"100.80.75.124" - when connected (channel 1)
# +SAPBR: 1,3,"0.0.0.0" - when disconnected (channel 1)
if response[0] != "+SAPBR":
self.setWarn("{0}: warning, response is not '+SAPBR', response = {1}".format(inspect.stack()[0][3], response[0]))
return False
response = splitAndFilter(response[1], ",")
self.logger.debug("{0}: sapbr result = \"{1}\"".format(inspect.stack()[0][3], response))
if len(response) < 3:
self.setError("{0}: wrong SAPBR result length, (sapbr result = '{1}')".format(inspect.stack()[0][3], response[1]))
return False
if response[0] != str(bearerNumber):
return
self.__ip = None
if response[1] == "0":
self.__connectionState = SimInetGSMConnection.inetConnecting
elif response[1] == "1":
self.__connectionState = SimInetGSMConnection.inetConnected
self.__ip = response[2].strip("\"").strip()
elif response[1] == "2":
self.__connectionState = SimInetGSMConnection.inetClosing
elif response[1] == "3":
self.__connectionState = SimInetGSMConnection.inetClosed
else:
self.__connectionState = SimInetGSMConnection.inetUnknown
return True
def attachGPRS(self, apn, user=None, password=None, bearerNumber = 1):
"""
Attaches GPRS connection for SIM module
:param apn: Access Point Name
:param user: User name (Login)
:param password: Password
:param bearerNumber: Bearer number
:return: True if everything was OK, otherwise returns False
"""
#checking current connection state
if not self.checkGprsBearer(bearerNumber):
return False
#going out if already connected
if self.connectionState == SimInetGSMConnection.inetConnected:
return True
#Closing the GPRS PDP context. We dont care of result
self.execSimpleOkCommand("AT+CIPSHUT", 500)
#initialization sequence for GPRS attaching
commands = [
["AT+SAPBR=3,{0},\"CONTYPE\",\"GPRS\"".format(bearerNumber), 1000 ],
["AT+SAPBR=3,{0},\"APN\",\"{1}\"".format(bearerNumber, apn), 500 ],
["AT+SAPBR=3,{0},\"USER\",\"{1}\"".format(bearerNumber, user), 500 ],
["AT+SAPBR=3,{0},\"PWD\",\"{1}\"".format(bearerNumber, password), 500 ],
["AT+SAPBR=1,{0}".format(bearerNumber), 10000 ]
]
#executing commands sequence
if not self.execSimpleCommandsList(commands):
return False
#returning GPRS checking sequence
return self.checkGprsBearer()
def disconnectTcp(self):
"""
Disconnects TCP connection
:return:
"""
return self.commandAndStdResult("AT+CIPCLOSE", 1000, ["OK"])
def dettachGPRS(self, bearerNumber = 1):
"""
Detaches GPRS connection
:param bearerNumber: bearer number
:return: True if de
"""
#Disconnecting TCP. Ignoring result
self.disconnectTcp()
#checking current GPRS connection state
if self.checkGprsBearer(bearerNumber):
if self.connectionState == SimInetGSMConnection.inetClosed:
return True
#disconnecting GPRS connection for given bearer number
return self.execSimpleOkCommand("AT+SAPBR=0,{0}".format(bearerNumber), 1000)
def terminateHttpRequest(self):
"""
Terminates current HTTP request.
:return: True if when operation processing was without errors, otherwise returns False
"""
return self.execSimpleOkCommand("AT+HTTPTERM", 500)
def __parseHttpResult(self, httpResult, bearerChannel = None):
"""
Parses http result string.
:param httpResult: string to parse
:param bearerChannel: bearer channel
:return: returns http result code and response length
"""
self.logger.debug("{0}: dataLine = {1}".format(inspect.stack()[0][3], httpResult))
response = splitAndFilter(httpResult, ":")
if len(response) < 2:
self.setWarn("{0}: wrong HTTP response length, length = {1}".format(inspect.stack()[0][3], len(response)))
return None
if response[0] != "+HTTPACTION":
self.setWarn("{0}: http response is not a '+HTTPACTION', response = '{1}'".format(inspect.stack()[0][3], response[0]))
return None
response = splitAndFilter(response[1], ",")
if len(response) < 3:
self.setWarn("{0}: wrong response length".format(inspect.stack()[0][3]))
return None
#checking bearer channel if necessary
if bearerChannel is not None:
if response[0] != str(bearerChannel):
self.setWarn("{0}: bad bearer number".format(inspect.stack()[0][3]))
return None
httpResultCode = str(response[1])
if not httpResultCode.isnumeric():
self.setWarn("{0}: response code is not numeric!".format(inspect.stack()[0][3]))
return None
httpResultCode = int(httpResultCode)
if httpResultCode != 200:
return [httpResultCode, 0]
responseLength = str(response[2])
if not responseLength.isnumeric():
self.setWarn("{0}: response length is not numeric".format(inspect.stack()[0][3]))
return False
return [httpResultCode, int(responseLength)]
def __readHttpResponse(self, httpMethodCode, responseLength):
"""
Reads http response data from SIM module buffer
:param httpMethodCode: ?
:param responseLength: response length
:return: True if reading was successful, otherwise returns false
"""
self.logger.debug("asking for http response (length = {0})".format(responseLength))
#trying to read HTTP response data
ret = self.commandAndStdResult(
"AT+HTTPREAD={0},{1}".format(httpMethodCode, responseLength),
10000,
["OK"]
)
if (ret is None) or (self.lastResult != "OK"):
self.setError("{0}: error reading http response data".format(inspect.stack()[0][3]))
return False
#removing leading \n symbols
#TODO: we must remove only 1 \n, not all! Fix it!
ret = str(ret).strip()
#reading first string in response (it must be "+HTTPREAD")
httpReadResultString = ""
while True:
if len(ret) == 0:
break
httpReadResultString += ret[0]
ret = ret[1:]
if "\n" in httpReadResultString:
break
httpReadResultString = str(httpReadResultString).strip()
if len(httpReadResultString) == 0:
self.setError("{0}: wrong http response. Result is empty".format(inspect.stack()[0][3]))
return False
httpReadResult = str(httpReadResultString).strip()
self.logger.debug("{0}: httpReadResult = {1}".format(inspect.stack()[0][3], httpReadResult))
httpReadResult = splitAndFilter(httpReadResult, ":")
if (len(httpReadResult) < 2) or (httpReadResult[0] != "+HTTPREAD"):
self.setError("{0}: bad response (cant find '+HTTPREAD'".format(inspect.stack()[0][3]))
return False
if int(httpReadResult[1]) != responseLength:
self.setWarn("{0}: bad response, wrong responseLength = {1}".format(inspect.stack()[0][3], responseLength))
return False
self.__httpResponse = ret
return True
@staticmethod
def ___isOkHttpResponseCode(code):
"""
Checks that given HTTP return code is successful result code
:param code: http result code for checking
:return: true if given code is HTTP operation successful
"""
return code in [200, 201, 202, 203, 204, 205, 206, 207, 226]
@staticmethod
def __isNoContentResponse(code):
"""
Checks that HTTP result code is 'NO CONTENT' result code
:param code: code for analysis
:return: true when code is 'NO CONTENT' code, otherwise returns false
"""
return code == 204
@staticmethod
def ___isHttpResponseCodeReturnsData(code):
"""
Checks that http operation returns data by given http result code
:param code: given http call result code
:return: true if http request must return data, otherwise returns false
"""
return code in [200, 206]
def httpGet(self, server, port = 80, path = "/", bearerChannel = 1):
"""
Makes HTTP GET request to the given server and script
:param server: server (host) address
:param port: http port
:param path: path to the script
:param bearerChannel: bearer channel number
:return: true if operation was successfully finished. Otherwise returns false
"""
self.__clearHttpResponse()
#TODO: close only when opened
self.terminateHttpRequest()
#HTTP GET request sequence
simpleCommands = [
[ "AT+HTTPINIT", 2000 ],
[ "AT+HTTPPARA=\"CID\",\"{0}\"".format(bearerChannel), 1000 ],
[ "AT+HTTPPARA=\"URL\",\"{0}:{2}{1}\"".format(server, path,port), 500 ],
[ "AT+HTTPPARA=\"UA\",\"{0}\"".format(self.userAgent), 500 ],
[ "AT+HTTPPARA=\"REDIR\",\"1\"", 500 ],
[ "AT+HTTPPARA=\"TIMEOUT\",\"45\"", 500 ],
[ "AT+HTTPACTION=0", 10000 ]
]
#executing http get sequence
if not self.execSimpleCommandsList(simpleCommands):
self.setError("error executing HTTP GET sequence")
return False
#reading HTTP request result
dataLine = self.readDataLine(10000)
if dataLine is None:
return False
#parsing string like this "+HTTPACTION:0,200,15"
httpResult = self.__parseHttpResult(dataLine, 0)
if httpResult is None:
return False
#assigning HTTP result code
self.__httpResult = httpResult[0]
#it's can be bad http code, let's check it
if not self.___isOkHttpResponseCode(self.httpResult):
self.terminateHttpRequest()
return True
#when no data from server we just want go out, everything if OK
if not self.___isHttpResponseCodeReturnsData(self.httpResult):
self.terminateHttpRequest()
return True
responseLength = httpResult[1]
if responseLength == 0:
self.terminateHttpRequest()
return True
self.logger.debug("reading http response data")
if not self.__readHttpResponse(0, responseLength):
return False
return True
def __clearHttpResponse(self):
self.__httpResponse = None
self.__httpResult = 0
def httpPOST(self, server, port, path, parameters, bearerChannel = 1):
"""
Makes HTTP POST request to the given server and script
:param server: server (host) address
:param port: server port
:param path: path to the script
:param parameters: POST parameters
:param bearerChannel: bearer channel number
:return: True if operation was successfully finished. Otherwise returns False
"""
self.__clearHttpResponse()
#TODO: close only when opened
self.terminateHttpRequest()
#HTTP POST request commands sequence
simpleCommands = [
[ "AT+HTTPINIT", 2000 ],
[ "AT+HTTPPARA=\"CID\",\"{0}\"".format(bearerChannel), 1000 ],
[ "AT+HTTPPARA=\"URL\",\"{0}:{1}{2}\"".format(server, port, path), 500 ],
[ "AT+HTTPPARA=\"CONTENT\",\"application/x-www-form-urlencoded\"", 500 ],
[ "AT+HTTPPARA=\"UA\",\"{0}\"".format(self.userAgent), 500 ],
[ "AT+HTTPPARA=\"REDIR\",\"1\"", 500 ],
[ "AT+HTTPPARA=\"TIMEOUT\",\"45\"", 500 ]
]
#executing commands sequence
if not self.execSimpleCommandsList(simpleCommands):
return False
#uploading data
self.logger.debug("uploading HTTP POST data")
ret = self.commandAndStdResult(
"AT+HTTPDATA={0},10000".format(len(parameters)),
7000,
["DOWNLOAD", "ERROR"]
)
if (ret is None) or (self.lastResult != "DOWNLOAD"):
self.setError("{0}: can't upload HTTP POST data".format(inspect.stack()[0][3]))
return False
self.simpleWriteLn(parameters)
dataLine = self.readDataLine(500)
if (dataLine is None) or (dataLine != "OK"):
self.setError("{0}: can't upload HTTP POST data".format(inspect.stack()[0][3]))
return
self.logger.debug("actually making request")
#TODO: check CPU utilization
if not self.execSimpleOkCommand("AT+HTTPACTION=1", 15000):
return False
#reading HTTP request result
dataLine = self.readDataLine(15000)
if dataLine is None:
self.setError("{0}: empty HTTP request result string".format(inspect.stack()[0][3]))
return False
#parsing string like this "+HTTPACTION:0,200,15"
httpResult = self.__parseHttpResult(dataLine, bearerChannel)
if httpResult is None:
return False
#assigning HTTP result code
self.__httpResult = httpResult[0]
#it's can be bad http code, let's check it
if not self.___isOkHttpResponseCode(self.httpResult):
self.terminateHttpRequest()
return True
#when no data from server we just want go out, everything if OK
if (
(self.__isNoContentResponse(self.httpResult)) or
(not self.___isHttpResponseCodeReturnsData(self.httpResult))
):
self.terminateHttpRequest()
return True
responseLength = httpResult[1]
if responseLength == 0:
self.terminateHttpRequest()
return True
self.logger.debug("reading http request response data")
if not self.__readHttpResponse(0, responseLength):
return False
return True
# self.disconnectTcp()
#
# return True
#
# int res= gsm.read(result, resultlength);
# //gsm.disconnectTCP();
# return res; |
# -*- coding: utf-8 -*-
# Copyright 2017 Tecnativa - Jairo Llopis
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
from odoo import api, fields, models
from odoo.tools import safe_eval
class MassMailingList(models.Model):
_inherit = "mail.mass_mailing.list"
dynamic = fields.Boolean(
help="Set this list as dynamic, to make it autosynchronized with "
"orders from a given criteria.",
)
sync_method = fields.Selection(
[
("sender", "Senders: Add new emails from order sender details"),
("recipient", "Recipients: Add new emails from order recipient details"),
],
default="sender",
required=True,
help="Choose the syncronization method for this list if you want to "
"make it dynamic",
)
is_synced = fields.Boolean(
help="Helper field to make the user aware of unsynced changes",
default=True,
)
def action_sync(self):
"""Sync contacts in dynamic lists."""
Contact = self.env["mail.mass_mailing.contact"].with_context(syncing=True)
extract_methods = {
"sender": { "model": "sale.order", "email": "x_snd_email", "name": "x_snd_name" },
"recipient": { "model": "sale.order", "email": "x_rcv_email", "name": "x_rcv_name" },
}
# Skip non-dynamic lists
dynamic = self.filtered("dynamic")
for one in dynamic:
extract = extract_methods[one.sync_method]
x_model = self.env[extract["model"]].with_context(syncing=True)
sync_domain = [(extract["email"], "!=", "")]
desired_contacts = x_model.search(sync_domain)
desired_contacts_emailname = [] if len(desired_contacts)==0 else desired_contacts.mapped(lambda r: { "email": r[extract["email"]].lower(), "name": r[extract["name"]], "isNew": True })
current_contacts = Contact.search([("list_id", "=", one.id)])
current_contacts_emailname = [] if len(current_contacts)==0 else current_contacts.mapped(lambda r: { "email": r.email.lower(), "name": r.name, "isNew": False })
unique_contacts = { r["email"]: r for r in desired_contacts_emailname + current_contacts_emailname }.values()
new_unique_contacts = [r for r in unique_contacts if r["isNew"]]
# Add new contacts
for new_contact in new_unique_contacts:
Contact.create({
"list_id": one.id,
"email": new_contact["email"],
"name": new_contact["name"],
})
one.is_synced = True
# Invalidate cached contact count
self.invalidate_cache(["contact_nbr"], dynamic.ids)
@api.onchange("dynamic", "sync_method")
def _onchange_dynamic(self):
if self.dynamic:
self.is_synced = False
|
#!/usr/bin/env python
# Copyright (c) 2019 Daniel Hammer. All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
class MovingWeightedWindow(object):
def __init__(self, size=0):
if not size or not isinstance(size, int):
raise ValueError("Invalid window size!")
self._size = size
self._weights = self._set_weights()
# def get_avg(self):
# raise NotImplementedError
def get_size(self):
return self._size
def get_weighted_sum(self, array=[]):
if not isinstance(array, list) and not isinstance(array, tuple):
raise ValueError("Input array is neither an array nor a tuple!")
if not array or all(array):
raise ValueError("Input array is empty!")
sum = 0
for elem, weight in zip(array, self._weights):
sum += elem * weight
return sum
def _set_weights(self):
raise NotImplementedError
class MovingWeightedSigWindow(MovingWeightedWindow):
def __init__(self, size, alpha=10):
if not isinstance(alpha, float) and not isinstance(alpha, int):
raise ValueError("Alpha must be float or int!")
if not alpha:
raise ValueError("Alpha can't be zero!")
self._alpha = alpha
super(MovingWeightedSigWindow, self).__init__(size)
# def get_avg(self):
# y = np.zeros(self._size)
# for i in range(self._size):
# upper_sum = 0
# for k in range(i + 1):
# upper_sum += self._window[k] * self._weights[k]
# y[i] = upper_sum / np.sum(self._weights[:i + 1])
# return y[-1]
def _set_weights(self):
x = np.linspace(1, 0, self._size)
w = np.zeros(self._size)
for i in range(self._size):
w[i] = 1 / (1 + np.exp(self._alpha * (x[i] - 0.5)))
return w
|
#!/usr/bin/python
# coding=utf-8
# 公众号:testerzhang
__author__ = 'testerzhang'
import time
import traceback
from appium import webdriver
from selenium.common.exceptions import NoSuchElementException
from tqdm import tqdm
import parse
from loguru import logger
import config
logger.add(config.MIAO_LOG)
def wait_time_pbar(wait_sec):
logger.debug(f"等待{wait_sec}秒")
wait_value = 10 * wait_sec
for i in tqdm(range(wait_value)):
time.sleep(0.1)
logger.debug("")
class TaoBao(object):
def __init__(self):
device_port = config.DEVICE_PORT
desired_caps = config.DESIRED_CAPS
self.skip_list = config.SKIP_LIST
url = "http://localhost:{}/wd/hub".format(device_port)
self.driver = webdriver.Remote(url, desired_caps)
logger.debug("1.打开淘宝")
wait_time_pbar(2)
# 关闭
def close(self):
wait_time_pbar(5)
logger.debug("6.关闭淘宝")
self.driver.quit()
# 判断某些任务是不是直接跳过
def continue_task(self, content):
is_continue = True
for skip in self.skip_list:
if skip in content:
logger.warning(f"任务=[{content}]暂时不做")
is_continue = False
break
return is_continue
# 首页查找入口
def active_page(self):
sleep_time = 2
logger.debug(f"2.查找喵币入口")
# 如果屏幕小,可能需要滑动一下
# self.start_x = 26
# self.start_y = 540
# self.distance = 200
#
# self.driver.swipe(self.start_x, self.start_y, self.start_x, self.start_y - self.distance)
# wait_time_pbar(1)
wait_time_pbar(sleep_time)
try:
img_div = '//android.widget.FrameLayout[contains(@index,15)]'
img_button = self.driver.find_elements_by_xpath(img_div)
logger.debug(f"img_button={img_button},img_button_len={len(img_button)}")
if len(img_button) > 0:
logger.debug("开始点击喵币入口")
img_button[0].click()
logger.debug("点击喵币入口完毕")
except NoSuchElementException as msg:
img_button = self.driver.find_elements_by_xpath(img_div)
logger.debug(f"img_button={img_button}")
if len(img_button) > 0:
logger.debug("尝试第二次点击喵币入口")
img_button[0].click()
logger.debug("点击第二次喵币入口完毕")
except:
raise Exception("找不到喵币入口")
# 加载新页面时间
wait_time_pbar(5)
# gzh:testerzhang 做任务列表,还不能做全部,后续再看看。
def do_task(self):
wait_time_pbar(4)
# 未做:下单可得大额喵币(0/2) 参与合伙赚喵币(0/1), adidas合成赢大礼(0/3) 关键字 赢
task_list = config.TASK_LIST
for task in task_list:
while True:
# 先检查是否存在
if task != '/20)' and task != '/2)' \
and task != '/3)' and task != '/5)' \
and task != '逛' and task != '搜':
try:
logger.debug(f"检查任务:【{task}】是否存在")
task_div = f'//*[@text="{task}"]'
task_button = self.driver.find_element_by_xpath(task_div)
except:
logger.warning(f"该任务:【{task}】不执行")
# logger.debug(f"【{task}】点击异常={traceback.format_exc()}")
break
logger.debug(f"开始真正做任务列表:【{task}】")
if task == "领取奖励" or task == "领取" \
or task == "签到" or task == "关闭":
try:
logger.debug(f"开始做任务列表:【{task}】")
task_button.click()
except:
logger.warning(f"【{task}】点击异常={traceback.format_exc()}")
else:
wait_time_pbar(8)
# todo:目前没这个,先保留
elif task == "参与组队领红包(0/1)":
try:
logger.debug(f"开始做任务列表:【{task}】")
task_button.click()
except:
logger.debug(f"【{task}】点击异常={traceback.format_exc()}")
else:
wait_time_pbar(8)
try:
logger.debug(f"开始做任务列表:【{task}】")
return_div = f'//*[@text="返回我的猫"]'
self.driver.find_element_by_xpath(return_div).click()
except:
logger.debug(f"【{task}】点击异常={traceback.format_exc()}")
else:
wait_time_pbar(8)
elif task == "去搜索" or task == "去浏览":
try:
logger.debug(f"开始做任务列表:【{task}】")
task_button.click()
except:
logger.warning(f"【{task}】点击异常={traceback.format_exc()}")
else:
wait_time_pbar(5)
browse_times = 1
browse_max_times = 11
browse_10_times = True
while browse_times <= browse_max_times:
try:
logger.debug(f"browse_times={browse_times}")
# 进入种草喵币城
browse_div = f'//android.view.View[@text="逛店最多"]'
browse_button = self.driver.find_element_by_xpath(browse_div)
browse_button.click()
wait_time_pbar(20)
logger.debug(f"返回一下")
self.driver.back()
wait_time_pbar(2)
except NoSuchElementException:
logger.warning(f"没有在【种草喵币城】找到【逛店】的按钮")
browse_10_times = False
break
finally:
browse_times = browse_times + 1
if not browse_10_times and browse_times == 2:
wait_time_pbar(20)
else:
wait_time_pbar(2)
logger.debug(f"返回一下")
self.driver.back()
wait_time_pbar(5)
elif (')' in task) or ('逛' in task) or ('搜' in task):
try:
wait_time_pbar(5)
logger.debug(f"开始做多任务列表:【{task}】")
task_title_div = f'//android.view.View[contains(@text, "{task}")]'
# task_title_div = f'//*[starts-with(@text, {task})]'
task_title_button = self.driver.find_element_by_xpath(task_title_div)
logger.debug(f"task_title_div:{task_title_div}")
logger.debug(f"task_title_button.text:{task_title_button.text}")
if task_title_button.text == '':
logger.warning(f"任务【{task}】退出:没有找到文本内容")
break
except NoSuchElementException as msg:
logger.warning(f"任务【{task}】退出:没找到元素")
break
except:
logger.warning(f"任务【{task}】退出")
logger.warning(f"【{task}】点击异常={traceback.format_exc()}")
break
else:
try:
task_title_content = task_title_button.text
logger.debug(f"task_title_content={task_title_content}")
try:
# 找当前节点的后面一个节点,拿到文字内容
brother_browse_div = f'//android.view.View[contains(@text, "{task}")]/following-sibling::android.view.View/android.view.View'
brother_browse_text = self.driver.find_element_by_xpath(brother_browse_div)
task_text = brother_browse_text.text
logger.debug(f"任务副标题:{task_text}")
# 判断是否任务跳过
is_continue = self.continue_task(task_text)
if not is_continue:
logger.warning(f"满足跳过任务关键字,退出2")
break
except:
logger.warning(f"找兄弟节点的文字内容异常=[{traceback.format_exc()}]")
# 判断是否任务跳过
is_continue = self.continue_task(task_title_content)
if not is_continue:
logger.warning(f"满足跳过任务关键字,退出")
break
result = parse.parse("{temp}({now_times}/{total_times})", f"{task_title_content}")
now_times = int(result['now_times'])
total_times = int(result['total_times'])
logger.debug(f"now_times={now_times},total_times={total_times}")
if now_times == total_times and total_times > 0:
break
else:
while now_times <= total_times:
task_title_button.click()
wait_time_pbar(5 + 20)
logger.debug(f"返回一下")
self.driver.back()
wait_time_pbar(8)
now_times = now_times + 1
except:
logger.warning(f"【{task}】点击异常={traceback.format_exc()}")
break
else:
logger.warning(f"其他任务不做:【{task}】")
break
return
# gzh:testerzhang 点击生产出来的喵币
def click_coin(self):
try:
logger.debug("开始点击【自生产猫币】图标")
feed_div = '//*[contains(@text, "自生产猫币")]'
feed_button = self.driver.find_element_by_xpath(feed_div)
logger.debug(f"feed_button.text=[{feed_button.text}]")
feed_button.click()
except:
logger.warning(f"【自生产猫币】点击异常={traceback.format_exc()}")
return
# gzh:testerzhang 点击领喵币,然后进入具体的任务列表
def do_coins(self, button_name):
try:
logger.debug(f"开始点击[{button_name}]按钮")
button_div = f'//*[@text="{button_name}"]'
self.driver.find_element_by_xpath(button_div).click()
except:
logger.warning(f"【{button_name}】点击异常={traceback.format_exc()}")
else:
wait_time_pbar(5)
# 最新任务列表签到
self.do_task()
# gzh:testerzhang 喵星人首页按钮处理
def feed_cat(self, key_name):
# 加多一层最大喂养次数,防止循环。
max_times = 3
if key_name == "喂猫升级":
logger.debug("欢迎进入【喂猫升级】")
# 亲爱的主人我去淘宝人生玩耍了快来找我回家吧~
# try:
# logger.debug(f"检查小猫是否跑走了")
# find_cat_div = f'//*[@text="亲爱的主人我去淘宝人生玩耍了快来找我回家吧~"]'
# find_cat_button = self.driver.find_element_by_xpath(find_cat_div)
# except:
# # logger.debug(f"【{task}】点击异常={traceback.format_exc()}")
# pass
# else:
# try:
# logger.debug(f"开始点击【找猫猫】")
# find_cat_button.click()
# except:
# logger.warning(f"【找猫猫】点击异常={traceback.format_exc()}")
# return
# else:
# wait_time_pbar(5)
#
# # todo: class="android.widget.Image" 找到这个元素退出
# self.driver.back()
# wait_time_pbar(8)
times = 1
logger.debug(f"开始执行,最大执行次数={max_times}次")
while True:
logger.debug(f"开始执行第{times}次")
if times > max_times:
break
try:
logger.debug("开始点击【喂猫领红包】入口")
# 喂猫领红包,每次消耗60000喵币,再升1级领红包
feed_div = '//*[contains(@text, "喂猫领红包,")]'
self.driver.find_element_by_xpath(feed_div).click()
except NoSuchElementException as msg:
logger.warning(f"【喂猫领红包】点击无法找到元素")
break
except:
logger.warning(f"【喂猫领红包】点击异常={traceback.format_exc()}")
break
else:
wait_time_pbar(5)
close_div = '//*[contains(@text, "关闭")]'
try:
self.driver.find_element_by_xpath(close_div).click()
wait_time_pbar(5)
break
except:
# logger.debug(f"【关闭】异常={traceback.format_exc()}")
pass
receive_div = '//*[contains(@text, "开心收下,喵")]'
try:
self.driver.find_element_by_xpath(receive_div).click()
wait_time_pbar(5)
except:
# logger.debug(f"【开心收下】异常={traceback.format_exc()}")
pass
times = times + 1
return
# gzh:testerzhang 进入H5页面
def cat(self):
# 获取入口
self.active_page()
logger.debug("3.准备切换H5页面")
wait_time_pbar(5)
# 下面需要切换view
source = self.driver.page_source
# ['NATIVE_APP']
web_view = self.driver.contexts
logger.debug(web_view)
if config.DO_COINS_FLAG:
# 领喵币
self.do_coins('领喵币')
# 点击收取生产的喵币
if config.RECEIVE_COINS_FLAG:
self.click_coin()
# 开始喂猫
self.feed_cat('喂猫升级')
def main():
taobao = TaoBao()
taobao.cat()
taobao.close()
exit("退出")
if __name__ == '__main__':
main()
|
# Builtin modules
from __future__ import annotations
from typing import List, Union, Tuple
# Third party modules
# Local modules
# Program
class Levels:
levels:List[ Tuple[int, str, str] ] = [
(100, "DISABLED", "DIS"),
(60, "CRITICAL", "CRI"),
(50, "ERROR", "ERR"),
(40, "WARNING", "WAR"),
(30, "INFO", "INF"),
(20, "DEBUG", "DBG"),
(10, "TRACE", "TRC"),
]
@classmethod
def addLevel(cls, id:int, name:str, shortName:str) -> None:
assert id > 0, "ID must be higher as zero"
cls.levels.append((id, name, shortName))
cls.levels = sorted(cls.levels, key=lambda x: -x[0])
return None
@classmethod
def removeLevel(cls, level:Union[int, str]) -> None:
levelID = cls.parse(level)
for i, d in enumerate(cls.levels):
if d[0] == levelID:
del cls.levels[i]
return
return None
@classmethod
def getLevelIDByName(cls, name:str) -> int:
name = name.upper()
d:Tuple[int, str, str]
for d in cls.levels:
if d[1] == name:
return d[0]
return 0
@classmethod
def getLevelIDByShortName(cls, shortName:str) -> int:
shortName = shortName.upper()
d:Tuple[int, str, str]
for d in cls.levels:
if d[2] == shortName:
return d[0]
return 0
@classmethod
def getLevelNameByID(cls, id:int) -> str:
d:Tuple[int, str, str]
for d in cls.levels:
if d[0] == id:
return d[1]
raise KeyError("Levels: Unknown level: {}".format(id))
@classmethod
def getLevelShortNameByID(cls, id:int) -> str:
d:Tuple[int, str, str]
for d in cls.levels:
if d[0] == id:
return d[2]
raise KeyError("Levels: Unknown level: {}".format(id))
@classmethod
def parse(cls, level:Union[int, str]) -> int:
r:int
if isinstance(level, str) and level.isdigit():
level = int(level)
if isinstance(level, int):
for d in cls.levels:
if d[0] == level:
return level
else:
r = cls.getLevelIDByName(level)
if r == 0:
r = cls.getLevelIDByShortName(level)
if r != 0:
return r
raise KeyError("Levels: Unknown level: {}".format(level))
|
import ecs
class Manager:
def __init__(self):
"""A Manager keeps track of all Entities, Components, and Systems
A Manager contains the 'database' for all entity/component connections
as well as call the process function for Systems that are
assigned to it.
"""
self._systems = []
self._next_entity_id = 0
self._components = {}
self._entities = {}
self._dead_entities = set()
def clear_database(self):
"""Clears all Components and Entities from the Manager
Also resets the entity id count
"""
self._components.clear()
self._entities.clear()
self._dead_entities.clear()
self._next_entity_id = 0
def add_system(self, system_instance):
"""Add a System instance to the Manager
:param system_instance: An instance of a System that
is a subclass of SystemTemplate
"""
assert issubclass(system_instance.__class__, ecs.SystemTemplate)
system_instance.Manager = self
self._systems.append(system_instance)
def remove_system(self, system_type):
"""Removes a System class type from the Manager
:param system_type: The System class type to be removed
"""
for system in self._systems:
if type(system) == system_type:
system.Manager = None
self._systems.remove(system)
def new_entity(self, *components):
"""Creates an Entity
:param components: Optional components to add to the new Entity
:return: The ID of the new Entity
"""
self._next_entity_id += 1
self._entities[self._next_entity_id] = {}
for component in components:
self.add_component_to_entity(component, self._next_entity_id)
return self._next_entity_id
def remove_entity(self, entity, immediate=False):
"""Removes an Entity from the Manager
By default this method only adds the entity to the list of dead
Entities which is taken care of when Manager.process gets called.
If immediate however is set to true it will remove the entity
at that time.
Raises a KeyError if the entity is not in the database
:param entity: ID of the entity to delete
:param immediate: When True entity is removed immediatly
"""
if immediate:
for component_type in self._entities[entity]:
self._components[component_type].discard(entity)
if not self._components[component_type]:
del self._components[component_type]
del self._entities[entity]
else:
self._dead_entities.add(entity)
def get_component_from_entity(self, component_type, entity):
"""Get component instance of specified type from given entity
Raises KeyError if either Entity or Component type does not exist
:param component_type: The class type of the Component you want to get
:param entity: ID of the entity to get the component from
:return: The component instance that was requested from given entity
"""
return self._entities[entity][component_type]
def get_all_components_from_entity(self, entity):
"""Get all components attached to given entity
Meant to be used for saving the state or transfering
an entity to another manager
Raises KeyError if Entity does not exist
:param entity: ID of the entity to get the components from
"""
return tuple(self._entities[entity].values())
def has_component(self, entity, component_type):
"""Check if an entity has a specific component type
:param entity: ID of the entity to check
:param component_type: The type of the component to check for
:return: True if entity has a component of given type,
else False
"""
return component_type in self._entities[entity]
def add_component_to_entity(self, component_instance, entity):
"""Add a component onto the given entity
If the component to add already exists on the entity the old
component will be replaced
:param component_instance: A component instance
:param entity: ID of the entity to add to
"""
component_type = type(component_instance)
if component_type not in self._components:
self._components[component_type] = set()
self._components[component_type].add(entity)
if entity not in self._entities:
self._entities[entity] = {}
self._entities[entity][component_type] = component_instance
def remove_component_from_entity(self, component_type, entity):
"""Remove a component from an entity by type
Raises a KeyError if either the component type or entity
does not exist in database
:param component_type: Type of component to remove
:param entity: ID of entity to remove the component from
"""
self._components[component_type].discard(entity)
if not self._components[component_type]:
del self._components[component_type]
del self._entities[entity][component_type]
if not self._entities[entity]:
del self._entities[entity]
def get_component(self, component_type):
"""Get an iterator for entity, component pairs
:param component_type: The component type to get
:return: An iterator for (entity, component) tuples
"""
entity_db = self._entities
for entity in self._components.get(component_type, []):
yield entity, entity_db[entity][component_type]
def get_components(self, *component_types):
"""Get an iterator for entities with a specific set of components
:param component_types: Two or more component types
:return: An iterator for (entity, (componentA, componentB, ...)) tuples
"""
entity_db = self._entities
component_db = self._components
try:
for entity in set.intersection(*[component_db[component_type]
for component_type
in component_types]):
yield entity, [entity_db[entity][component_type]
for component_type in component_types]
except KeyError:
pass
def process(self, *args):
"""Call the process method for all systems
Calls all process methods for all attached systems as well
as removing any entities marked for removal
:param args: Optional arguments that will be passed directly
to the process method of the attached systems
"""
if self._dead_entities:
for entity in self._dead_entities:
self.remove_entity(entity, immediate=True)
self._dead_entities.clear()
for system in self._systems:
system.process(*args)
|
import pandas as pd
import numpy as np
from sklearn.preprocessing import StandardScaler
from sklearn.decomposition import PCA
import matplotlib.pyplot as plt
from matplotlib.colors import ListedColormap
from sklearn.linear_model import LogisticRegression
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis as LDA
from sklearn.datasets import make_moons
from sklearn.datasets import make_circles
from sklearn.decomposition import KernelPCA
from scipy.spatial.distance import pdist, squareform
from scipy import exp
from scipy.linalg import eigh
from matplotlib.ticker import FormatStrFormatter
from rbf_kernel_pca import *
# for sklearn 0.18's alternative syntax
from distutils.version import LooseVersion as Version
from sklearn import __version__ as sklearn_version
if Version(sklearn_version) < '0.18':
from sklearn.grid_search import train_test_split
from sklearn.lda import LDA
else:
from sklearn.model_selection import train_test_split
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis as LDA
#############################################################################
print(50 * '=')
print('Section: Projecting new data points')
print(50 * '-')
X, y = make_moons(n_samples=100, random_state=123)
plt.figure(1)
plt.scatter(X[y == 0, 0], X[y == 0, 1], color='red', marker='^', alpha=0.5)
plt.scatter(X[y == 1, 0], X[y == 1, 1], color='blue', marker='o', alpha=0.5)
#plt.show()
alphas, lambdas = rbf_kernel_pca2(X, gamma=15, n_components=1)
x_new = X[25]
print('New data point x_new:', x_new)
#x_proj = alphas[25] # original projection
# rbf_kernel_pca.py にコメントで書いたように
# ここは固有ベクトルaに特異値sqrt(lambda)を掛けるべき
x_proj = np.sqrt(lambdas[0]) * alphas[25]
print('Original projection x_proj:', x_proj)
def project_x(x_new, X, gamma, alphas, lambdas):
pair_dist = np.array([np.sum((x_new - row)**2) for row in X])
k = np.exp(-gamma * pair_dist)
# ここではsqrt(lambda_i)で割るのが正しい気がする
# return k.dot(alphas / lambdas)
return k.dot(alphas / np.sqrt(lambdas))
# データx_newを主成分に直すにはv方向への射影をとって
# x'_i = x_new^t v_i とする必要がある
# ここで正規化された特異ベクトルの間の関係を使う。
# X^t a_i = sigma_i v_i (sigma_i = sqrt(lambda_i))
# これを代入して
# x'_i = x_new^t X^t a_i / sigma_i
# = \sum_n k(x_new, x_n) a_i[n] / sqrt(lambda(i))
# 本文にあるように
# v_i = X^t a_i と定義していたらこの規格化項は出てこない
# 一方、コード上では v_i, a_i がともにノルム1で規格化されているので、
# v_i = X^t a_i / sigma_i という関係になっている
# もともとのコードでは
# X v_i / sigma_i をデータとしているので、余分にsigma_iで割る必要があり
# x'_i = \sum_n k(x_new, x_n) a_i[n] / lambda(i)
# となる
# projection of the "new" datapoint
x_reproj = project_x(x_new, X, gamma=15, alphas=alphas, lambdas=lambdas)
print('Reprojection x_reproj:', x_reproj)
#alphas2 = alphas
# これも規格化直すならこっち
alphas2 = np.sqrt(lambdas[0]) * alphas
plt.figure(2)
plt.scatter(alphas2[y == 0, 0], np.zeros((50)),
color='red', marker='^', alpha=0.5)
plt.scatter(alphas2[y == 1, 0], np.zeros((50)),
color='blue', marker='o', alpha=0.5)
plt.scatter(x_proj, 0, color='black',
label='original projection of point X[25]', marker='^', s=100)
plt.scatter(x_reproj, 0, color='green',
label='remapped point X[25]', marker='x', s=500)
plt.legend(scatterpoints=1)
# plt.tight_layout()
# plt.savefig('./figures/reproject.png', dpi=300)
plt.show()
|
#import SimpleCV
from SimpleCV import *
import time
time_start = time.clock()
tic = time_start
img = Image('IMG_3080.JPG')
toc = time.clock()
delta_t = (toc-tic)
print " %f s to open image" % delta_t
tic=toc
blue_distance = img.hueDistance(Color.BLUE).invert().threshold(210)
toc = time.clock()
delta_t = (toc-tic)
print " %f s to calculate blue hue distance" % delta_t
tic=toc
#blue_distance.save('outputBD_v2.png')
toc = time.clock()
delta_t = (toc-tic)
print " %f s to save image file" % delta_t
tic=toc
blobs_blue = blue_distance.findBlobs(threshval=200, minsize=50)
toc = time.clock()
delta_t = (toc-tic)
print " %f s to find blue blobs" % delta_t
tic=toc
blobs_blue.draw(color=Color.RED, width=2)
#blue_ditance.show()
blobs_blue_filter = np.zeros(len(blobs_blue),np.bool) #create list the length of blobs_blue with bool elements
for index in range(len(blobs_blue)):
#code to determine if blob is a circle, mostly taken from Blob.py because using isCircle was printing text to screen for some reason
w = blobs_blue[index].mHullMask.width
h = blobs_blue[index].mHullMask.height
#print "%d %d blob size" % (w, h)
idealcircle = Image((w,h))
radius = min(w,h) / 2
#print "%d radius" % radius
idealcircle.dl().circle((w/2, h/2), radius, filled= True, color=Color.WHITE)
idealcircle = idealcircle.applyLayers()
netdiff = (idealcircle - blobs_blue[index].mHullMask) + (blobs_blue[index].mHullMask - idealcircle)
numblack, numwhite = netdiff.histogram(2)
#print "%d %d black and white" % (numblack, numwhite)
circle_variance = float(numwhite) / (radius * radius * np.pi)
# print "%d %d blob size" % (w, h)
# print "%d %d blob location" % (blobs_blue[index].x, blobs_blue[index].y)
# print "%f circle variance" % circle_variance
# print "%d blob area" % blobs_blue[index].area()
# sqa = w*h
# print "%d sq area" % sqa
# circa = radius * radius * np.pi
# print "%d circ area" % circa
# print " "
if circle_variance<=.3:
blobs_blue_filter[index] = 1;
blue_distance.dl().circle(blobs_blue[index].centroid(),3,color=Color.HOTPINK)
blobs_blue_selected = blobs_blue.filter(blobs_blue_filter)
blobs_blue_centroid = np.zeros((len(blobs_blue_selected),2),np.float) #create list the length of blobs_blue_selected with bool elements
for index in range(len(blobs_blue_centroid)):
blobs_blue_centroid[index] = blobs_blue_selected[index].centroid()
# print blobs_blue_centroid
blobs_blue_selected.draw(color=Color.HOTPINK, width=3) #make outlines around selected object bolder
print "%d blue blobs found, %d selected" % (len(blobs_blue), len(blobs_blue_selected))
toc = time.clock()
delta_t = (toc-tic)
print " %f s to do blue blob selection" % delta_t
tic=toc
# --------------------------------------------------------------------
red_distance = img.hueDistance(Color.RED).invert().threshold(210)
#red_distance.save('outputRD_v2.png')
blobs_red = red_distance.findBlobs(threshval=200, minsize=50)
blobs_red.draw(color=Color.FORESTGREEN, width=2)
blobs_red_filter = np.zeros(len(blobs_red),np.bool) #create list the length of blobs_red with bool elements
for index in range(len(blobs_red)):
#code to determine if blob is a circle, mostly taken from Blob.py because using isCircle was printing text to screen for some reason
w = blobs_red[index].mHullMask.width
h = blobs_red[index].mHullMask.height
#print "%d %d blob size" % (w, h)
idealcircle = Image((w,h))
radius = min(w,h) / 2
#print "%d radius" % radius
idealcircle.dl().circle((w/2, h/2), radius, filled= True, color=Color.WHITE)
idealcircle = idealcircle.applyLayers()
netdiff = (idealcircle - blobs_red[index].mHullMask) + (blobs_red[index].mHullMask - idealcircle)
numblack, numwhite = netdiff.histogram(2)
#print "%d %d black and white" % (numblack, numwhite)
circle_variance = float(numwhite) / (radius * radius * np.pi)
if circle_variance<=.3:
blobs_red_filter[index] = 1;
red_distance.dl().circle(blobs_red[index].centroid(),3,color=Color.YELLOW)
blobs_red_selected = blobs_red.filter(blobs_red_filter)
blobs_red_centroid = np.zeros((len(blobs_red_selected),2),np.float) #create list the length of blobs_red with bool elements
for index in range(len(blobs_red_centroid)):
blobs_red_centroid[index] = blobs_red_selected[index].centroid()
# print blobs_red_centroid
blobs_red_selected.draw(color=Color.LIME, width=3) #make outlines around selected object bolder
print "%d red blobs found, %d selected" % (len(blobs_red), len(blobs_red_selected))
# --------------------------------------------------------------------
# Calculate the distance between every combination of selected (circular) red and blue blobs
blob_distances = np.zeros((len(blobs_blue_selected),len(blobs_red_selected)),np.float)
markers = np.zeros((1,3)) #array is x, y, alpha (angle between red and blue off vertical. angle is zero is blue is directly above red)
count = 0
for index_b in range(len(blobs_blue_selected)):
for index_r in range(len(blobs_red_selected)):
blob_distances[index_b,index_r] = np.sqrt(np.square(blobs_blue_centroid[index_b,0] - blobs_red_centroid[index_r,0]) + np.square(blobs_blue_centroid[index_b,1] - blobs_red_centroid[index_r,1]))
mean_blob_diam = (blobs_blue_selected[index_b].width() + blobs_red_selected[index_r].width() + blobs_blue_selected[index_b].height() + blobs_red_selected[index_r].height())/4
# print "%d mean blob diam" % mean_blob_diam
# print "%d blob dist" % blob_distances[index_b,index_r]
if (blob_distances[index_b,index_r] < (5*mean_blob_diam)) & (blob_distances[index_b,index_r] > (1.0*mean_blob_diam)) : #if the blobs are within 1.5 to 3*blob_diam of eachother
blue_blob_area = blobs_blue_selected[index_b].area()
red_blob_area = blobs_red_selected[index_r].area()
blob_area_perc_diff = (abs(blue_blob_area-red_blob_area) / ((blue_blob_area + red_blob_area)/2) )
# print "%f blob area perc diff" % blob_area_perc_diff
if blob_area_perc_diff<.5: # if blob area percent difference is less than 50%
region_y_mean = (blobs_blue_centroid[index_b,1] + blobs_red_centroid[index_r,1])/2
region_x_mean = (blobs_blue_centroid[index_b,0] + blobs_red_centroid[index_r,0])/2
region_size = mean_blob_diam/np.sqrt(2)*.5
region_x_min = region_x_mean-region_size
region_y_min = region_y_mean-region_size
region_x_max = region_x_mean+region_size
region_y_max = region_y_mean+region_size
region_angle = np.arctan2(blobs_red_centroid[index_r,0] - blobs_blue_centroid[index_b,0] , blobs_red_centroid[index_r,1] - blobs_blue_centroid[index_b,1])*57.2958 # angle between red and blue blobs converted to degrees
# print "%f %f %f" % (region_x_mean,region_y_mean,region_size)
red_distance.dl().rectangle2pts((region_x_min,region_y_min),(region_x_max, region_y_max), color=Color.SILVER)
subreg1 = img.regionSelect(region_x_min,region_y_min,region_x_max,region_y_max)
subreg = subreg1.smooth().grayscale().binarize(40).invert() #the binarize threshold can be tuned
subreg_meanColor = subreg.meanColor()
subreg_value = max(subreg_meanColor)
# print "subregion value %f " % subreg_value
subreg.save("output_v2_sr"+str(index_b)+"_"+str(index_r)+".png")
if (subreg_value < 5): # if the subregion has less than 5 pixels that were above the binarize threshold
red_distance.dl().line(blobs_blue_centroid[index_b,:],blobs_red_centroid[index_r,:],width=5,color=Color.SILVER)
#img.drawText(text="V=%d" % subreg_value,x=blobs_blue_centroid[index_b,:],y=blobs_red_centroid[index_r,:],fontsize=40)
print "Marker located at %d %d" % (region_x_mean,region_y_mean)
markers=np.append(markers,[[region_x_mean,region_y_mean,region_angle]], axis=0)
count = count+1
print markers
#Apply the drawing layers to the image
img.addDrawingLayer(blue_distance.dl())
img.addDrawingLayer(red_distance.dl())
# Calculate image rotation between the first two markers
rotate_angle = -1*np.mean(markers[1:,2])
print "Image should be rotated by %f" % rotate_angle
img.save('output_v2.png')
# AQUAMARINE
# AZURE
# BACKGROUND
# BEIGE
# BLACK
# BLUE
# CHARCOAL
# CRIMSON
# CYAN
# DEFAULT
# FOREGROUND
# FORESTGREEN
# FUCHSIA
# GOLD
# GRAY
# GREEN
# HOTPINK
# INDIGO
# IVORY
# KHAKI
# LEGO_BLUE
# LEGO_ORANGE
# LIME
# MAROON
# MAYBE_BACKGROUND
# MAYBE_FOREGROUND
# MEDIUMBLUE
# NAVYBLUE
# OLIVE
# ORANGE
# PLUM
# PUCE
# RED
# ROYALBLUE
# SALMON
# SILVER
# TAN
# TEAL
# VIOLET
# WATERSHED_BG
# WATERSHED_FG
# WATERSHED_UNSURE
# WHEAT
# WHITE
# YELLOW
|
# This program reads in a text file from a prompt on the command line and outputs every second line of the text file
# John Dunne 2019-03-14
# This program is adapted from the extra reading I completed here: https://stackoverflow.com/questions/30551945/how-do-i-get-python-to-read-only-every-other-line-from-a-file-that-contains-a-po
# Returned to add sys.argv after viewing the week 9 lecture on command line arguments
# The sys.argv part of this solution is adapted from the example we covered in lecture on command line arguments: https://web.microsoftstream.com/video/65df155a-ac29-460b-869d-2de6ffc6c3fc
import sys # sys is imported and sys.argv will be used to read the file from a command typed at the command line
# I have added this after viewing the week 9 lecture on command line arguments
if len(sys.argv) == 2: # The file will be opened only if 2 arguments have been called at the command line
with open (sys.argv[1], 'r') as f: # with open is used to open the file and I have asked for read mode
# sys.argv[1] means the second command typed on the command line in the index 1 position will be opened
count = 0 #count is set to 0 for the first iteration of the for loop
for line in f: # for loop is used to loop through the lines in the file
count = count + 1 # 1 is added to the value of count each time the file loops through a line
if count % 2 == 0: # Modulo operator used to establish if the line is an even numbered line (every second line)
print(line) # Every second line will be printed as output
else: # If more or less than 2 arguments have been called at the command line this line will be printed to the screen
print("Please enter a single file name. The file name is myfile.txt") # The user will be prompted to enter the file name
# This program takes the file name from an argument on the command line using the sys.argv function
# Count is used to assign a starting value of zero to the first line in a file and a for loop will loop through each line of the file
# Every time the loop runs one is added to the value of count and an if statement with modulo operator will identify even numbered lines
# Example - In the first iteration of the for loop the first line is assigned value of one and thus wont be printed and so on
# Finally all even numbered lines are printed to the screen as output
# We covered this material in the week 7 lecture video- "opening files for reading and writing"
# The with open command is the recommended way to open a file from the command line as the file is closed automatically after operation
# I read about the with open keyword here: https://docs.python.org/3/tutorial/inputoutput.html#reading-and-writing-files
# I have asked for the file to be opened in "r" mode which is read only mode
# The first steps I completed in solving this problem was to create a file to be read and have some text printed to the file
# Below are the commands I used when setting up the file to be read by this program - myfile.txt
# First command to write to the text file:
#with open('myfile.txt', 'w') as f:
# f.write("This file will be used to run the program in solution 9 on the problem set\n")
# Second command to append text to the text file:
#with open('myfile.txt', 'a') as f:
#f.write("I have written the text in this file from solution_9.py file\n")
#f.write("I first asked for the file to open in 'w' mode to write the first line\n")
#f.write("Opening a file with 'w' overwrites the contents of the entire file\n")
#f.write("I then used 'a' to append the text here to the existing text\n")
#f.write("backslash n is used to go to a new line each time\n")
#f.write("In this program I will attempt to have every second line of the file printed to the screen\n")
#f.write("In the last line I am not putting a blackslash end") |
import pandas as pd
import os
from tqdm import tqdm
import pickle
import torch
import dgl
import networkx as nx
import matplotlib.pyplot as plt
def draw_dgl(G):
nx_G = G.to_networkx()
nx.draw(nx_G, with_labels=True)
plt.show()
class Pipeline:
def __init__(self, ratio, root):
self.ratio = ratio
self.root = root
self.sources = None
self.train_file_path = None
self.dev_file_path = None
self.test_file_path = None
self.size = None
# parse source code
def parse_source(self, output_file, option):
path = self.root+output_file
if os.path.exists(path) and option is 'existing':
source = pd.read_pickle(path)
else:
from pycparser import c_parser
parser = c_parser.CParser()
source = pd.read_pickle(self.root+'programs.pkl')
source.columns = ['id', 'code', 'label']
print(len(source))
# code_len = len(source['code'])
# source_code = list(source['code'])
# source_ast = []
# for i in tqdm(range(code_len),ncols = 20):
# source_ast.append(parser.parse(source_code[i]))
# source['code'] = source_ast
source['code'] = source['code'].apply(parser.parse)
# with open(path,'wb') as f:
# pickle.dump(source,f)
# source.to_pickle(path)
self.sources = source
return source
# split data for training, developing and testing
def split_data(self,exists = False):
def check_or_create(path):
if not os.path.exists(path):
os.mkdir(path)
train_path = self.root+'train/'
check_or_create(train_path)
self.train_file_path = train_path+'train_.pkl'
# train.to_pickle(self.train_file_path)
dev_path = self.root+'dev/'
check_or_create(dev_path)
self.dev_file_path = dev_path+'dev_.pkl'
# dev.to_pickle(self.dev_file_path)
test_path = self.root+'test/'
check_or_create(test_path)
self.test_file_path = test_path+'test_.pkl'
# test.to_pickle(self.test_file_path)
if exists:
return
data = self.sources
data_num = len(data)
ratios = [int(r) for r in self.ratio.split(':')]
train_split = int(ratios[0]/sum(ratios)*data_num)
val_split = train_split + int(ratios[1]/sum(ratios)*data_num)
data = data.sample(frac=1, random_state=666)
train = data.iloc[:train_split]
dev = data.iloc[train_split:val_split]
test = data.iloc[val_split:]
# def check_or_create(path):
# if not os.path.exists(path):
# os.mkdir(path)
# train_path = self.root+'train/'
# check_or_create(train_path)
# self.train_file_path = train_path+'train_.pkl'
train.to_pickle(self.train_file_path)
# dev_path = self.root+'dev/'
# check_or_create(dev_path)
# self.dev_file_path = dev_path+'dev_.pkl'
dev.to_pickle(self.dev_file_path)
# test_path = self.root+'test/'
# check_or_create(test_path)
# self.test_file_path = test_path+'test_.pkl'
test.to_pickle(self.test_file_path)
# construct dictionary and train word embedding
def dictionary_and_embedding(self, input_file, size,exists = False):
if exists:
self.size = size
return
self.size = size
if not input_file:
input_file = self.train_file_path
trees = pd.read_pickle(input_file)
if not os.path.exists(self.root+'train/embedding'):
os.mkdir(self.root+'train/embedding')
from prepare_data import get_sequences
def trans_to_sequences(ast):
sequence = []
get_sequences(ast, sequence)
return sequence
corpus = trees['code'].apply(trans_to_sequences)
str_corpus = [' '.join(c) for c in corpus]
trees['code'] = pd.Series(str_corpus)
trees.to_csv(self.root+'train/programs_ns.tsv')
from gensim.models.word2vec import Word2Vec
w2v = Word2Vec(corpus, size=size, workers=16, sg=1, min_count=3)
w2v.save(self.root+'train/embedding/node_w2v_' + str(size))
# generate block sequences with index representations
def generate_block_seqs(self,data_path,part):
from prepare_data import get_blocks as func
from gensim.models.word2vec import Word2Vec
word2vec = Word2Vec.load(self.root+'train/embedding/node_w2v_' + str(self.size)).wv
vocab = word2vec.vocab
max_token = word2vec.syn0.shape[0]
def tree_to_index(node):
token = node.token
result = [vocab[token].index if token in vocab else max_token]
children = node.children
for child in children:
result.append(tree_to_index(child)) # [父节点,[子节点1,[]],[子节点2],[...]]
return result
def trans2seq(r):
blocks = []
func(r, blocks)
tree = []
for b in blocks:
btree = tree_to_index(b)
tree.append(btree)
return tree
trees = pd.read_pickle(data_path)
trees['code'] = trees['code'].apply(trans2seq)
trees_code = list(trees['code'])
code_graph = []
for e in tqdm(trees_code,ncols = 20,desc = 'generate graph'):
# tmp = input("show graph")
# draw_dgl(self.generate_graphs(e))
code_graph.append(self.generate_graphs(e))
with open(self.root + part + '/graph.pkl','wb') as f:
pickle.dump(code_graph,f)
# trees['code'] = trees['code'].apply(self.generate_graphs)
trees.to_pickle(self.root+part+'/blocks.pkl')
def add_children(self,l,g,parent_idx):
if type(l) == type([]) and len(l) != 0:
for child in l:
g.add_nodes(1)
g.nodes[g.number_of_nodes()-1].data['idx'] = torch.tensor([child[0]])
g.add_edge(parent_idx,g.number_of_nodes()-1)
g.add_edge(g.number_of_nodes()-1,parent_idx)
part_idx = g.number_of_nodes()-1
self.add_children(child[1:],g,part_idx)
elif type(l) == type([]) and len(l) == 0:
return
else:
print("maybe something error")
print(l)
print(g)
assert(False)
def generate_graphs(self,data_list):
g = dgl.DGLGraph()
g.add_nodes(1)
g.nodes[g.number_of_nodes()-1].data['idx'] = torch.tensor([0])
# g.nodes[g.number_of_nodes()-1].data['idx'] = 0
for part in data_list:
g.add_nodes(1)
g.nodes[g.number_of_nodes()-1].data['idx'] = torch.tensor([part[0]])
g.add_edge(0,g.number_of_nodes()-1)
g.add_edge(g.number_of_nodes()-1,0)
part_idx = g.number_of_nodes()-1
self.add_children(part[1:],g,part_idx)
return g
# run for processing data to train
def run(self):
print('parse source code...')
# self.parse_source(output_file='ast.pkl',option='existing')
print('split data...')
self.split_data(exists = True)
print('train word embedding...')
self.dictionary_and_embedding(None,128,exists = True)
print('generate block sequences...')
self.generate_block_seqs(self.train_file_path, 'train')
self.generate_block_seqs(self.dev_file_path, 'dev')
self.generate_block_seqs(self.test_file_path, 'test')
ppl = Pipeline('3:1:1', 'data/')
ppl.run()
|
from asyncio import open_connection
from contextlib import closing
from pyiced import (
Align, container, IcedApp, image, ImageHandle, Length, Settings,
text, WindowSettings,
)
class ImageExample(IcedApp):
def __init__(self):
self.__handle = None
class settings(Settings):
class window(WindowSettings):
size = (640, 320)
def title(self):
return 'An Image'
def new(self):
return [load_image()]
def update(self, msg, clipboard):
match msg:
case ('ImageHandle', handle):
self.__handle = handle
def view(self):
if self.__handle is None:
return text('Loading …')
return container(
image(
self.__handle,
height=Length.units(300),
width=Length.units(600), # the aspect ratio is preserved
),
align_x=Align.CENTER, align_y=Align.CENTER,
width=Length.FILL, height=Length.FILL,
)
async def load_image():
HOST = 'upload.wikimedia.org'
PATH = '/wikipedia/de/b/bb/Png-logo.png'
query = (
f"GET {PATH} HTTP/1.0\r\n"
f"Host: {HOST}\r\n"
f"Connection: closed\r\n"
f"User-Agent: Mozilla/1.22 (compatible; MSIE 2.0; Windows 95)\r\n"
f"\r\n"
).encode('US-ASCII')
reader, writer = await open_connection(HOST, 443, ssl=True)
with closing(writer):
writer.write(query)
await writer.drain()
while (await reader.readline()) != b'\r\n':
continue
data = await reader.read()
await writer.wait_closed()
return ('ImageHandle', ImageHandle.from_memory(data))
if __name__ == '__main__':
ImageExample().run()
|
import logging
import time
logger = logging.getLogger(__name__.rsplit(".")[-1])
class RS485:
import serial
baud = 9600
enabled = True
master = None
port = None
ser = None
timeLastTx = 0
def __init__(self, master):
self.master = master
classname = self.__class__.__name__
# Unload if this module is disabled or misconfigured
if "interface" in master.config and classname in master.config["interface"]:
self.enabled = master.config["interface"][classname].get("enabled", True)
if not self.enabled:
self.master.releaseModule("lib.TWCManager.Interface", classname)
return None
# There are two places that the baud rate for the RS485 adapter may be stored.
# The first is the legacy configuration path, and the second is the new
# dedicated interface configuration. We check either/both for this value
bauda = master.config["config"].get("baud", 0)
baudb = None
if "interface" in master.config:
baudb = master.config["interface"]["RS485"].get("baud", 0)
if baudb:
self.baud = baudb
elif bauda:
self.baud = bauda
# Similarly, there are two places to check for a port defined.
porta = master.config["config"].get("rs485adapter", "")
portb = None
if "interface" in master.config:
portb = master.config["interface"]["RS485"].get("port", "")
if portb:
self.port = portb
elif porta:
self.port = porta
# Connect to serial port
self.ser = self.serial.serial_for_url(self.port, self.baud, timeout=0)
def close(self):
# Close the serial interface
return self.ser.close()
def getBufferLen(self):
# This function returns the size of the recieve buffer.
# This is used by read functions to determine if information is waiting
return self.ser.inWaiting()
def read(self, len):
# Read the specified amount of data from the serial interface
return self.ser.read(len)
def send(self, msg):
# Send msg on the RS485 network. We'll escape bytes with a special meaning,
# add a CRC byte to the message end, and add a C0 byte to the start and end
# to mark where it begins and ends.
msg = bytearray(msg)
checksum = 0
for i in range(1, len(msg)):
checksum += msg[i]
msg.append(checksum & 0xff)
# Escaping special chars:
# The protocol uses C0 to mark the start and end of the message. If a C0
# must appear within the message, it is 'escaped' by replacing it with
# DB and DC bytes.
# A DB byte in the message is escaped by replacing it with DB DD.
#
# User FuzzyLogic found that this method of escaping and marking the start
# and end of messages is based on the SLIP protocol discussed here:
# https://en.wikipedia.org/wiki/Serial_Line_Internet_Protocol
i = 0
while i < len(msg):
if msg[i] == 0xc0:
msg[i : i + 1] = b"\xdb\xdc"
i = i + 1
elif msg[i] == 0xdb:
msg[i : i + 1] = b"\xdb\xdd"
i = i + 1
i = i + 1
msg = bytearray(b"\xc0" + msg + b"\xc0")
logger.log(logging.INFO9, "Tx@: " + self.master.hex_str(msg))
self.ser.write(msg)
self.timeLastTx = time.time()
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.contrib.auth.models import User
from django.shortcuts import redirect
from django.db.models.signals import post_save
from django.dispatch import receiver
# Create your models here.
class Neighborhood(models.Model):
image = models.ImageField(upload_to='neighborhood_photos/', null=True)
name = models.CharField(max_length=30, default='Unknown', blank=True)
location = models.CharField(max_length=100, default='Somewhere in Nairobi', blank=True)
population = models.CharField(max_length=128, default='Unknown', blank=True)
police = models.CharField(max_length=12, default='911', blank=True)
ambulance = models.CharField(max_length=12, default='911', blank=True)
def __str__(self):
return self.name
class Meta:
verbose_name = 'Neighborhood'
verbose_name_plural = 'Neighborhoods'
def get_url(self):
return redirect("show_neighborhood", kwargs={"id" : self.id})
def delete_neighborhood(self):
self.delete()
@classmethod
def search(cls, query):
neighborhood = cls.objects.filter(name__icontains=query)
return neighborhood
class UserProfile(models. Model):
user = models.OneToOneField(User, on_delete=models.CASCADE, unique=True, related_name='user')
bio = models.TextField(max_length=500, blank=True)
image = models.ImageField(upload_to='user_dps', blank=True)
idnumber = models.CharField(max_length=10,)
neighborhood = models.ForeignKey(Neighborhood, blank=True)
class Meta:
verbose_name = 'User Profile'
verbose_name_plural = 'User Profiles'
class Business(models. Model):
name = models.CharField(max_length=30, default='Unknown')
image = models.ImageField(upload_to='business_images', blank=True)
location = models.CharField(max_length=30, default='Unknown')
additional_details = models.CharField(max_length=30, blank=True)
neighborhood = models.ForeignKey(Neighborhood, blank=True, null=True)
class Meta:
verbose_name = 'Business'
verbose_name_plural = 'Businesses'
def __str__(self):
return self.name
class Post(models. Model):
image = models.ImageField(upload_to='uploaded_images', blank=True, null=True)
text_post = models.CharField(max_length=1000)
author = models.ForeignKey(User)
neighborhood = models.ForeignKey(Neighborhood)
class Meta:
verbose_name = 'Post'
verbose_name_plural = 'Posts'
def __str__(self):
return self.text_post
|
"""
Remove missing values from one or more dataframes.
"""
from tasrif.processing_pipeline import ProcessingOperator
from tasrif.processing_pipeline.validators import InputsAreDataFramesValidatorMixin
class DropFeaturesOperator(InputsAreDataFramesValidatorMixin, ProcessingOperator):
"""
Examples
--------
>>> import pandas as pd
>>> import numpy as np
>>>
>>> from tasrif.processing_pipeline import DropFeaturesOperator
>>>
>>> df0 = pd.DataFrame([['Tom', 10], ['Alfred', 15], ['Alfred', 18], ['Juli', 14]], columns=['name', 'score'])
>>> df1 = pd.DataFrame({"name": ['Alfred', 'juli', 'Tom', 'Ali'],
... "height": [np.nan, 155, 159, 165],
... "born": [pd.NaT, pd.Timestamp("2010-04-25"), pd.NaT,
... pd.NaT]})
>>>
>>> operator = DropFeaturesOperator(feature_names=['name'])
>>> df0, df1 = operator.process(df0, df1)
>>>
>>> print(df0)
>>> print(df1)
name score
0 Tom 10
1 Alfred 15
2 Alfred 18
3 Juli 14
name height born
0 Alfred NaN NaT
1 juli 155.0 2010-04-25
2 Tom 159.0 NaT
3 Ali 165.0 NaT
"""
def __init__(self, feature_names: list):
"""
Initializes the operator
Args:
feature_names:
features (columns) to drop from each dataframe
"""
super().__init__()
self.feature_names = feature_names
def __str__(self):
return self.__class__.__name__
def _process(self, *data_frames):
"""Process the passed data using the processing configuration specified
in the constructor
Args:
*data_frames (list of pd.DataFrame):
Variable number of pandas dataframes to be processed
Returns:
data_frames
Processed data frames
Raises:
ValueError: Occurs when one of the objects in feature_names is not a column within
*data_frames
"""
processed = []
for dataframe in data_frames:
for col in self.feature_names:
if col not in dataframe.columns:
raise ValueError(str(col) + " not in columns")
dataframe = dataframe.drop(self.feature_names, axis=1)
processed.append(dataframe)
return tuple(processed)
|
# Copyright 2016-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: MIT-0
import sys
from awsglue.transforms import Join
from awsglue.utils import getResolvedOptions
from pyspark.context import SparkContext
from awsglue.context import GlueContext
from awsglue.job import Job
glueContext = GlueContext(SparkContext.getOrCreate())
# catalog: database and table names
db_name = "legislators"
tbl_persons = "persons_json"
tbl_membership = "memberships_json"
tbl_organization = "organizations_json"
# output s3 and temp directories
output_history_dir = "s3://glue-sample-target/output-dir/legislator_history"
output_lg_single_dir = "s3://glue-sample-target/output-dir/legislator_single"
output_lg_partitioned_dir = "s3://glue-sample-target/output-dir/legislator_part"
redshift_temp_dir = "s3://glue-sample-target/temp-dir/"
# Create dynamic frames from the source tables
persons = glueContext.create_dynamic_frame.from_catalog(database=db_name, table_name=tbl_persons)
memberships = glueContext.create_dynamic_frame.from_catalog(database=db_name, table_name=tbl_membership)
orgs = glueContext.create_dynamic_frame.from_catalog(database=db_name, table_name=tbl_organization)
# Keep the fields we need and rename some.
orgs = orgs.drop_fields(['other_names', 'identifiers']).rename_field('id', 'org_id').rename_field('name', 'org_name')
# Join the frames to create history
l_history = Join.apply(orgs, Join.apply(persons, memberships, 'id', 'person_id'), 'org_id', 'organization_id').drop_fields(['person_id', 'org_id'])
# ---- Write out the history ----
# Write out the dynamic frame into parquet in "legislator_history" directory
print("Writing to /legislator_history ...")
glueContext.write_dynamic_frame.from_options(frame = l_history, connection_type = "s3", connection_options = {"path": output_history_dir}, format = "parquet")
# Write out a single file to directory "legislator_single"
s_history = l_history.toDF().repartition(1)
print("Writing to /legislator_single ...")
s_history.write.parquet(output_lg_single_dir)
# Convert to data frame, write to directory "legislator_part", partitioned by (separate) Senate and House.
print("Writing to /legislator_part, partitioned by Senate and House ...")
l_history.toDF().write.parquet(output_lg_partitioned_dir, partitionBy=['org_name'])
# ---- Write out to relational databases ----
# Convert the data to flat tables
print("Converting to flat tables ...")
dfc = l_history.relationalize("hist_root", redshift_temp_dir)
# Cycle through and write to Redshift.
for df_name in dfc.keys():
m_df = dfc.select(df_name)
print("Writing to Redshift table: ", df_name, " ...")
glueContext.write_dynamic_frame.from_jdbc_conf(frame = m_df, catalog_connection = "redshift3", connection_options = {"dbtable": df_name, "database": "testdb"}, redshift_tmp_dir = redshift_temp_dir)
|
# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
stolen from: https://github.com/SuchismitaSahu1993/nemo_asr_app
thanks to: Suchismita Sahu
"""
import pytorch_lightning as pl
from nemo.collections.asr.models import EncDecCTCModel
from nemo.core.config import hydra_runner
from nemo.utils.exp_manager import exp_manager
@hydra_runner(config_path="conf", config_name="config.yaml")
def main(cfg):
trainer = pl.Trainer(**cfg.trainer)
exp_manager(trainer, cfg.get("exp_manager", None))
asr_model = EncDecCTCModel.from_pretrained(model_name=cfg.get("name"))
asr_model.change_vocabulary(new_vocabulary=cfg.labels)
asr_model._trainer = trainer
asr_model.setup_optimization(cfg.model.optim)
# Point to the data we'll use for fine-tuning as the training set
asr_model.setup_training_data(train_data_config=cfg.model.train_ds)
# Point to the new validation data for fine-tuning
asr_model.setup_validation_data(val_data_config=cfg.model.validation_ds)
trainer.fit(asr_model)
asr_model.save_to(save_path="QuartzNet15x5.nemo")
if __name__ == '__main__':
main() # noqa pylint: disable=no-value-for-parameter
|
import cv2
import numpy as np
import os
def stack_dataset_examples(directory):
l = []
for item in os.listdir(directory):
if not item.startswith('.'):
l.append(item)
l = sorted(l)
images = []
for li in l:
li = directory+"/"+li
temp = cv2.imread(li)
temp = cv2.resize(temp,(700,450))
images.append(temp)
numpy_horizontal1 = np.hstack((images[0], images[1], images[2], images[3]))
numpy_horizontal2 = np.hstack((images[4], images[5], images[6], images[7]))
numpy_horizontal = np.vstack((numpy_horizontal1, numpy_horizontal2))
return numpy_horizontal
directories= []
for item in os.listdir('.'):
if not item.startswith('.') and not os.path.isfile(os.path.join('.', item)):
directories.append(item)
directories=sorted(directories)
stacked_images = []
for d in directories:
stacked_images.append(stack_dataset_examples(d))
cluster_imgs = []
for i in range(0,len(stacked_images),3):
if i+1>len(stacked_images):
cluster_imgs.append(stacked_images[i])
elif i+2>len(stacked_images):
cluster_imgs.append(np.hstack((stacked_images[i], stacked_images[i+1])))
else:
cluster_imgs.append(np.hstack((stacked_images[i], stacked_images[i+1], stacked_images[i+2])))
fin_image = np.vstack(cluster_imgs)
cv2.imwrite("Figure.png",fin_image)
|
from django import template
from django.contrib.contenttypes.models import ContentType
from ..models import Comment
from ..forms import CommentForm
register = template.Library()
@register.simple_tag
def get_comment_count(obj):
content_type = ContentType.objects.get_for_model(obj)
return Comment.objects.filter(content_type=content_type, object_id=obj.pk).count()
@register.simple_tag
def get_comment_form(obj):
content_type = ContentType.objects.get_for_model(obj)
form = CommentForm(initial={'content_type': content_type.model, 'object_id': obj.pk, 'reply_comment_id': 0})
return form
@register.simple_tag
def get_comment_list(obj):
content_type = ContentType.objects.get_for_model(obj)
comments = Comment.objects.filter(content_type=content_type, object_id=obj.pk, parent=None)
return comments.order_by('-comment_time') |
"""Tables for User, Password, Comments, Friends and Posts
Revision ID: 3d6c688278ae
Revises:
Create Date: 2019-10-08 15:19:44.799296
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '3d6c688278ae'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('user',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('username', sa.String(length=30), nullable=True),
sa.Column('email', sa.String(length=30), nullable=True),
sa.Column('password_hash', sa.String(length=128), nullable=True),
sa.Column('first_name', sa.String(length=30), nullable=True),
sa.Column('last_name', sa.String(length=30), nullable=True),
sa.Column('education', sa.String(length=30), nullable=True),
sa.Column('employment', sa.String(length=30), nullable=True),
sa.Column('music', sa.String(length=30), nullable=True),
sa.Column('movie', sa.String(length=30), nullable=True),
sa.Column('nationality', sa.String(length=30), nullable=True),
sa.Column('birthday', sa.Date(), nullable=True),
sa.Column('last_login_try', sa.DateTime(), nullable=True),
sa.Column('failed_logins', sa.Integer(), nullable=True),
sa.Column('is_blocked', sa.Boolean(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_user_email'), 'user', ['email'], unique=True)
op.create_index(op.f('ix_user_username'), 'user', ['username'], unique=True)
op.create_table('friends',
sa.Column('u_id', sa.Integer(), nullable=False),
sa.Column('f_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['f_id'], ['user.id'], ),
sa.ForeignKeyConstraint(['u_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('u_id', 'f_id')
)
op.create_table('password',
sa.Column('u_id', sa.Integer(), nullable=False),
sa.Column('password_hash', sa.String(length=128), nullable=False),
sa.Column('creation_time', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['u_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('u_id', 'password_hash')
)
op.create_index(op.f('ix_password_creation_time'), 'password', ['creation_time'], unique=False)
op.create_table('posts',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('u_id', sa.Integer(), nullable=True),
sa.Column('content', sa.Integer(), nullable=True),
sa.Column('image', sa.String(), nullable=True),
sa.Column('creation_time', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['u_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_posts_creation_time'), 'posts', ['creation_time'], unique=False)
op.create_table('comments',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('p_id', sa.Integer(), nullable=True),
sa.Column('u_id', sa.Integer(), nullable=True),
sa.Column('comment', sa.String(length=140), nullable=True),
sa.Column('creation_time', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['p_id'], ['posts.id'], ),
sa.ForeignKeyConstraint(['u_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('comments')
op.drop_index(op.f('ix_posts_creation_time'), table_name='posts')
op.drop_table('posts')
op.drop_index(op.f('ix_password_creation_time'), table_name='password')
op.drop_table('password')
op.drop_table('friends')
op.drop_index(op.f('ix_user_username'), table_name='user')
op.drop_index(op.f('ix_user_email'), table_name='user')
op.drop_table('user')
# ### end Alembic commands ###
|
# Generated by Django 3.0.6 on 2020-07-22 18:25
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('survey', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='question',
options={'ordering': ('survey', 'order'), 'verbose_name': 'question', 'verbose_name_plural': 'questions'},
),
migrations.AddField(
model_name='question',
name='text',
field=models.CharField(default='foo', max_length=500, verbose_name='Text'),
preserve_default=False,
),
]
|
# _*_ coding: utf-8 _*_
"""
Created by Allen7D on 2018/6/17.
"""
from sqlalchemy import Column, Integer, String, ForeignKey
from app.models.base import Base
from app.models.image import Image
__author__ = 'Allen7D'
class Theme2Product(Base):
__tablename__ = 'theme_product'
theme_id = Column(Integer, ForeignKey('theme.id'), primary_key=True, comment='主题外键')
product_id = Column(Integer, ForeignKey('product.id'), primary_key=True, comment='商品外键')
class Product2Image(Base):
__tablename__ = 'product_image'
id = Column(Integer, primary_key=True, autoincrement=True)
img_id = Column(Integer, ForeignKey('image.id'), nullable=False, comment='外键,关联图片表')
order = Column(Integer, nullable=False, comment='图片排序序号')
product_id = Column(Integer, ForeignKey('product.id'), nullable=False, comment='外键, 商品id')
def keys(self):
self.hide('id', 'img_id', 'product_id', 'order').append('img_url')
return self.fields
@property
def img_url(self):
return Image.get_img_by_id(id=self.img_id).url
class Product2Property(Base):
__tablename__ = 'product_property'
id = Column(Integer, primary_key=True, autoincrement=True)
name = Column(String(30), comment='详情属性名称')
detail = Column(String(255), nullable=False, comment='详情属性')
product_id = Column(Integer, ForeignKey('product.id'), nullable=False, comment='外键, 商品id')
class Order2Product(Base):
__tablename__ = 'order_product'
order_id = Column(Integer, primary_key=True, comment='联合主键,订单id')
product_id = Column(Integer, primary_key=True, comment='联合主键,商品id')
count = Column(Integer, nullable=False, comment='商品数量')
def __init__(self, order_id=None, product_id=None, count=None):
self.order_id = order_id
self.product_id = product_id
self.count = count
super(Order2Product, self).__init__()
|
"""
mcpython - a minecraft clone written in python licenced under the MIT-licence
(https://github.com/mcpython4-coding/core)
Contributors: uuk, xkcdjerry (inactive)
Based on the game of fogleman (https://github.com/fogleman/Minecraft), licenced under the MIT-licence
Original game "minecraft" by Mojang Studios (www.minecraft.net), licenced under the EULA
(https://account.mojang.com/documents/minecraft_eula)
Mod loader inspired by "Minecraft Forge" (https://github.com/MinecraftForge/MinecraftForge) and similar
This project is not official by mojang and does not relate to it.
"""
import asyncio
import itertools
import math
import typing
from abc import ABC
import mcpython.client.gui.ContainerRenderer
import mcpython.client.gui.Slot
import mcpython.client.rendering.ui.Buttons
import mcpython.client.rendering.ui.SearchBar
import mcpython.common.event.TickHandler
import mcpython.engine.event.EventBus
import mcpython.engine.ResourceLoader
import mcpython.util.texture as texture_util
import PIL.Image
import pyglet
from mcpython import shared
from mcpython.client.gui.util import getTabTexture, CreativeTabScrollbar
from mcpython.common.container.ItemGroup import FilteredItemGroup, ItemGroup
from mcpython.common.container.ResourceStack import ItemStack, LazyClassLoadItemstack
from mcpython.engine import logger
from pyglet.window import key, mouse
class ICreativeView(mcpython.client.gui.ContainerRenderer.ContainerRenderer, ABC):
"""
Base class for a creative tab
Comes with some helper code
"""
def __init__(self):
super().__init__()
self.tab_icon = None
self.tab_icon_selected = None
self.is_selected = False
self.tab_slot = mcpython.client.gui.Slot.Slot()
self.icon_position = 0, 0
def update_rendering(self):
pass
def get_icon_stack(self) -> ItemStack:
raise NotImplementedError
def get_view_size(self) -> typing.Tuple[int, int]:
raise NotImplementedError
def draw_at(self, position: typing.Tuple[int, int], hovering_slot=None):
pass
def draw(self, hovering_slot=None):
self.bg_anchor = "MM"
self.window_anchor = "MM"
self.bg_image_size = self.get_view_size()
x, y = self.get_position()
self.draw_at((x, y), hovering_slot=hovering_slot)
CT_MANAGER.draw_tabs((x, y), self.bg_image_size)
for slot in self.get_draw_slots():
slot.draw(x, y, hovering=slot == hovering_slot)
for slot in self.get_draw_slots():
slot.draw_label()
if self.custom_name is not None:
if self.custom_name_label.text != self.custom_name:
self.custom_name_label.text = self.custom_name
self.custom_name_label.x = x + 15
self.custom_name_label.y = y + self.bg_image_size[1] - 10
self.custom_name_label.draw()
async def on_activate(self):
await super().on_activate()
await CT_MANAGER.activate()
async def on_deactivate(self):
await super().on_deactivate()
shared.state_handler.active_state.parts[0].activate_mouse = True
await CT_MANAGER.deactivate()
class CreativeItemTab(ICreativeView):
bg_texture: pyglet.image.AbstractImage = None
@classmethod
async def reload(cls):
cls.bg_texture = texture_util.to_pyglet_image(
mcpython.util.texture.to_pillow_image(
(await mcpython.engine.ResourceLoader.read_pyglet_image(
"minecraft:gui/container/creative_inventory/tab_items"
)).get_region(0, 120, 194, 255 - 120)
).resize((2 * 195, 2 * 136), PIL.Image.NEAREST)
)
def __init__(
self, name: str, icon: ItemStack, group: ItemGroup = None, linked_tag=None
):
super().__init__()
self.icon = icon
self.group = group if group is not None else ItemGroup()
self.scroll_offset = 0
self.old_scroll_offset = 0
self.linked_tag = linked_tag
self.custom_name = self.name = name
if linked_tag is not None:
# If there is a tag linked to this tab, subscribe to the reload event
import mcpython.common.data.ResourcePipe
mcpython.common.data.ResourcePipe.handler.register_data_processor(
self.load_from_tag
)
self.scroll_bar = CreativeTabScrollbar(self.set_scrolling)
def set_scrolling(self, progress: int):
self.scroll_offset = round(progress - 1)
self.update_rendering()
def load_from_tag(self):
"""
Helper method for reloading the content from the underlying tag
Use only when self.linked_tag is set, otherwise, this will crash
"""
if self.linked_tag is None:
raise RuntimeError("tag must be set for reloading")
tag = shared.tag_handler.get_entries_for(self.linked_tag, "items")
self.group.entries.clear()
self.group.entries += filter(
lambda stack: not stack.is_empty(),
(ItemStack(e, warn_if_unarrival=False) for e in tag),
)
self.scroll_bar.set_max_value(
max(1, (math.ceil(len(self.group.entries) / 9) - 4))
)
self.update_rendering(force=True)
def update_rendering(self, force=False):
"""
Updates the slot content of the rendering system
:param force: force update, also when nothing changed
"""
self.group.load_lazy()
if self.old_scroll_offset == self.scroll_offset and not force:
return
self.old_scroll_offset = self.scroll_offset
entries = list(self.group.view()) # todo: cache value!
self.scroll_bar.set_max_value(max(math.ceil(len(entries) / 9) - 4, 1))
# print("cycling at", self.name, "entries:", entries)
entries = iter(entries)
if self.scroll_offset != 0:
for _ in range(9 * self.scroll_offset):
next(entries)
for i, slot in enumerate(self.slots[9:]):
try:
entry = next(entries)
except StopIteration:
# todo: can we simply clean the itemstack?
slot.set_itemstack_force(ItemStack.create_empty())
else:
# print("writing at", i, "stack", entry)
# todo: can we change the item in the stack?
slot.set_itemstack_force(entry)
for slot in self.slots[:9]:
slot.invalidate()
async def create_slot_renderers(self):
"""
Creates the slots
"""
def work(i):
return (
lambda: shared.world.get_active_player().inventory_main.slots[i]
if shared.world.world_loaded
else None
)
slots = [
[
mcpython.client.gui.Slot.SlotInfiniteStack(
ItemStack.create_empty(), position=(18 + x * 36, 61 + y * 36)
)
for x in range(9)
]
for y in range(4, -1, -1)
]
# some black magic...
return [
mcpython.client.gui.Slot.SlotCopyWithDynamicTarget(
work(j),
position=(
20 + j * 36,
16,
),
)
for j in range(9)
] + sum(slots, [])
def add_item(self, item: typing.Union[ItemStack, LazyClassLoadItemstack, str]):
"""
Adds an item to the underlying item group
:param item: the item stack or the item name
"""
if isinstance(item, str):
item = LazyClassLoadItemstack(item)
self.group.add(item)
return self
def get_icon_stack(self) -> ItemStack:
return self.icon
def get_view_size(self) -> typing.Tuple[int, int]:
return 2 * 195, 2 * 136
def draw_at(self, position: typing.Tuple[int, int], hovering_slot=None):
self.bg_texture.blit(*position)
self.scroll_bar.draw_at(
(position[0] + 176 * 2, position[1] + 8 * 2), self.get_view_size()[1] - 50
)
def clear(self):
pass
async def on_deactivate(self):
await super().on_deactivate()
self.scroll_bar.deactivate()
async def on_activate(self):
await super().on_activate()
self.scroll_bar.activate()
self.update_rendering(True)
def on_mouse_button_press(
self,
relative_x: int,
relative_y: int,
button: int,
modifiers: int,
item_stack,
slot,
) -> bool:
if (
2 * 16 <= relative_x <= 2 * 170
and 24 * 2 <= relative_y <= 119 * 2
and not item_stack.is_empty()
and (
slot is None
or not slot.get_itemstack().contains_same_resource(item_stack)
)
):
item_stack.clean()
return True
return False
def __repr__(self):
return f"CreateItemTab({self.name}, entry_count={len(self.group.entries)})"
def update_shift_container(self):
shared.inventory_handler.shift_container_handler.container_A = self.slots[:9]
shared.inventory_handler.shift_container_handler.container_B = self.slots[9:]
if not shared.IS_TEST_ENV:
shared.tick_handler.schedule_once(CreativeItemTab.reload())
class CreativeTabSearchBar(CreativeItemTab):
@classmethod
async def reload(cls):
cls.bg_texture = texture_util.to_pyglet_image(
mcpython.util.texture.to_pillow_image(
(await mcpython.engine.ResourceLoader.read_pyglet_image(
"minecraft:gui/container/creative_inventory/tab_item_search"
)).get_region(0, 120, 194, 255 - 120)
).resize((2 * 195, 2 * 136), PIL.Image.NEAREST)
)
def __init__(
self, name: str, icon: ItemStack, group: ItemGroup = None, linked_tag=None
):
super().__init__(name, icon, group, linked_tag)
self.group: FilteredItemGroup = self.group.filtered()
self.search_bar = mcpython.client.rendering.ui.SearchBar.SearchBar(
change_callback=lambda text: self.group.apply_raw_filter(f"(.*){text}(.*)"),
enter_callback=lambda: self.search_bar.disable(),
exit_callback=lambda: self.search_bar.disable(),
enable_mouse_to_enter=True,
)
self.tab_icon = CreativeTabManager.UPPER_TAB
self.tab_icon_selected = CreativeTabManager.UPPER_TAB_SELECTED
self.need_reload = True
def setNeedReload():
self.need_reload = True
import mcpython.common.data.ResourcePipe as ResourcePipe
ResourcePipe.handler.register_data_processor(setNeedReload)
async def on_deactivate(self):
await super().on_deactivate()
self.search_bar.disable()
async def on_activate(self):
await super().on_activate()
self.group.apply_raw_filter("(.*)")
if self.need_reload:
self.need_reload = False
self.group.entries.clear()
for page in CT_MANAGER.pages:
for tab in page:
if isinstance(tab, CreativeItemTab):
self.group.entries += tab.group.entries
self.group.sort_after_item_name()
self.update_rendering(True)
class CreativePlayerInventory(ICreativeView):
TEXTURE_SIZE = 195 * 2, 136 * 2
TEXTURE = None
@classmethod
async def reload(cls):
cls.TEXTURE = texture_util.resize_image_pyglet(
(await mcpython.engine.ResourceLoader.read_pyglet_image(
"minecraft:gui/container/creative_inventory/tab_inventory"
)).get_region(0, 120, 195, 136),
cls.TEXTURE_SIZE,
)
def __init__(self):
super().__init__()
self.stack = ItemStack("minecraft:chest")
self.tab_icon = CreativeTabManager.LOWER_TAB
self.tab_icon_selected = CreativeTabManager.LOWER_TAB_SELECTED
async def on_activate(self):
await super().on_activate()
shared.tick_handler.schedule_once(self.reload_config())
def get_icon_stack(self) -> ItemStack:
return self.stack
def get_view_size(self) -> typing.Tuple[int, int]:
return self.TEXTURE_SIZE
def draw_at(self, position: typing.Tuple[int, int], hovering_slot=None):
self.TEXTURE.blit(*position)
async def create_slot_renderers(self):
"""
Creates the slots
"""
def work(i):
return lambda: shared.world.get_active_player().inventory_main.slots[i]
# some black magic...
return [
mcpython.client.gui.Slot.SlotCopyWithDynamicTarget(
work(j),
)
for j in range(40)
] + [
mcpython.client.gui.Slot.SlotCopyWithDynamicTarget(
work(45),
),
mcpython.client.gui.Slot.SlotTrashCan(),
]
@staticmethod
def get_config_file() -> str or None:
return "assets/config/inventory/player_inventory_main_creative.json"
if not shared.IS_TEST_ENV:
shared.tick_handler.schedule_once(CreativePlayerInventory.reload())
class CreativeTabManager:
TAB_SIZE = 28 * 2, 30 * 2
# todo: make this reload-able!
UPPER_TAB = None
UPPER_TAB_SELECTED = None
LOWER_TAB = None
LOWER_TAB_SELECTED = None
@classmethod
async def reload(cls):
cls.UPPER_TAB = texture_util.resize_image_pyglet(
getTabTexture().get_region(0, 224, 28, 30), cls.TAB_SIZE
)
cls.UPPER_TAB_SELECTED = texture_util.resize_image_pyglet(
getTabTexture().get_region(0, 224 - 30, 28, 30), cls.TAB_SIZE
)
cls.LOWER_TAB = texture_util.resize_image_pyglet(
getTabTexture().get_region(0, 164, 28, 30), cls.TAB_SIZE
)
cls.LOWER_TAB_SELECTED = texture_util.resize_image_pyglet(
getTabTexture().get_region(0, 128, 28, 30), cls.TAB_SIZE
)
def __init__(self):
self.pages: typing.List[typing.List[ICreativeView]] = [[]]
self.inventory_instance = None
self.search_instance = None
self.saved_hotbars = None
self.current_page = 0
self.underlying_event_bus: mcpython.engine.event.EventBus.EventBus = (
shared.event_handler.create_bus(active=False)
)
self.underlying_event_bus.subscribe("user:mouse:press", self.on_mouse_press)
self.underlying_event_bus.subscribe("user:mouse:drag", self.on_mouse_move)
self.underlying_event_bus.subscribe("user:mouse:motion", self.on_mouse_move)
self.underlying_event_bus.subscribe("user:keyboard:press", self.on_key_press)
self.hovering_tab = None
self.page_left = (
mcpython.client.rendering.ui.Buttons.arrow_button_left(
(0, 0), lambda: self.increase_page(-1)
)
if not shared.IS_TEST_ENV
else None
)
self.page_right = (
mcpython.client.rendering.ui.Buttons.arrow_button_right(
(0, 0), lambda: self.increase_page(1)
)
if not shared.IS_TEST_ENV
else None
)
self.page_label = pyglet.text.Label(anchor_x="center", anchor_y="center")
self.lower_left_position = 0, 0
self.container_size = 1, 1
self.current_tab: typing.Optional[ICreativeView] = None
def is_multi_page(self):
return len(self.pages) > 1
async def on_key_press(self, button, mod):
if shared.state_handler.global_key_bind_toggle:
return
if button == key.E:
await shared.inventory_handler.hide(self.current_tab)
elif button == key.N and self.is_multi_page():
self.current_page = max(self.current_page - 1, 0)
elif button == key.M and self.is_multi_page():
self.current_page = min(self.current_page + 1, len(self.pages) - 1)
def on_mouse_move(self, x, y, dx, dy, *_):
tab = self.get_tab_at(x, y)
self.hovering_tab = tab
def init_tabs_if_needed(self):
if self.inventory_instance is None:
self.inventory_instance = CreativePlayerInventory()
if self.search_instance is None:
self.search_instance = CreativeTabSearchBar(
"Search", ItemStack("minecraft:paper")
)
async def activate(self):
mcpython.common.event.TickHandler.handler.bind(
self.underlying_event_bus.activate, 1
)
if self.is_multi_page():
await self.page_left.activate()
await self.page_right.activate()
async def deactivate(self):
self.underlying_event_bus.deactivate()
await self.page_left.deactivate()
await self.page_right.deactivate()
async def on_mouse_press(self, mx, my, button, modifiers):
if not button & mouse.LEFT:
return
tab = self.get_tab_at(mx, my)
if tab is not None:
await self.switch_to_tab(tab)
def get_tab_at(self, mx, my) -> typing.Optional[ICreativeView]:
tx, ty = self.TAB_SIZE
tabs = self.pages[self.current_page]
x, y = self.lower_left_position
for tab in tabs[4:]:
# y is here not a mistake as tabs are going down, instead of up
if 0 <= mx - x <= tx and 0 <= y - my <= ty:
return tab
x += self.TAB_SIZE[0]
x = self.lower_left_position[0]
y += self.container_size[1]
for tab in tabs[:4]:
if 0 <= mx - x <= tx and 0 <= my - y <= ty:
return tab
x += self.TAB_SIZE[0]
# todo: add extension point here
for tab in (self.inventory_instance, self.search_instance, self.saved_hotbars):
# print(mx, my, tab.icon_position if tab is not None else None)
if (
tab is not None
and 0 <= mx - tab.icon_position[0] <= tx
and 0 <= my - tab.icon_position[1] <= ty
):
return tab
def add_tab(self, tab: ICreativeView):
tab.update_rendering()
if len(self.pages[-1]) < 9:
self.pages[-1].append(tab)
tab.tab_icon = (
self.UPPER_TAB if len(self.pages[-1]) <= 4 else self.LOWER_TAB
)
tab.tab_icon_selected = (
self.UPPER_TAB_SELECTED
if len(self.pages[-1]) <= 4
else self.LOWER_TAB_SELECTED
)
tab.tab_slot.set_itemstack(tab.get_icon_stack())
else:
self.pages.append([tab])
tab.tab_icon = self.UPPER_TAB
tab.tab_icon_selected = self.UPPER_TAB_SELECTED
tab.tab_slot.set_itemstack(tab.get_icon_stack())
return self
def draw_tab(self, tab: ICreativeView, x: int, y: int):
icon = (
tab.tab_icon
if not tab.is_selected and tab != self.hovering_tab
else tab.tab_icon_selected
)
icon.blit(x, y)
tab.tab_slot.draw(x + 10, y + 10)
tab.icon_position = x, y
def draw_tabs(
self,
lower_left_position: typing.Tuple[int, int],
container_size: typing.Tuple[int, int],
):
self.lower_left_position = lower_left_position
self.container_size = container_size
tabs = self.pages[self.current_page]
x, y = lower_left_position
for tab in tabs[4:]:
self.draw_tab(tab, x, y - self.TAB_SIZE[1])
x += self.TAB_SIZE[0]
x = lower_left_position[0]
y += container_size[1]
for tab in tabs[:4]:
self.draw_tab(tab, x, y)
x += self.TAB_SIZE[0]
x, y = lower_left_position
self.draw_tab(
self.inventory_instance,
x + container_size[0] - self.TAB_SIZE[0],
y - self.TAB_SIZE[1],
)
self.draw_tab(
self.search_instance,
x + container_size[0] - self.TAB_SIZE[0],
y + container_size[1],
)
if self.is_multi_page():
self.page_left.active = self.current_page != 0
self.page_left.position = (
lower_left_position[0] - 10,
lower_left_position[1] + container_size[1] + self.TAB_SIZE[1] + 10,
)
self.page_left.draw()
self.page_right.active = self.current_page != len(self.pages) - 1
self.page_right.position = (
lower_left_position[0] + container_size[0] + 10,
lower_left_position[1] + container_size[1] + self.TAB_SIZE[1] + 10,
)
self.page_right.draw()
self.page_label.text = f"{self.current_page + 1} / {len(self.pages)}"
self.page_label.position = (
lower_left_position[0] + container_size[0] // 2 + 10,
lower_left_position[1] + container_size[1] + self.TAB_SIZE[1] + 19,
)
self.page_label.draw()
async def open(self):
if self.current_tab is None:
self.init_tabs_if_needed()
await self.switch_to_tab(self.inventory_instance)
else:
await shared.inventory_handler.show(self.current_tab)
def increase_page(self, count: int):
previous = self.current_page
self.current_page = max(0, min(self.current_page + count, len(self.pages) - 1))
if previous != self.current_page:
asyncio.get_event_loop().run_until_complete(
self.switch_to_tab(self.pages[self.current_page][0])
)
async def switch_to_tab(self, tab: ICreativeView):
if self.current_tab is not None:
await shared.inventory_handler.hide(self.current_tab)
self.current_tab.is_selected = False
self.current_tab = tab
tab.is_selected = True
await shared.inventory_handler.show(tab)
def print_missing(self):
for page in self.pages:
for tab in page:
if isinstance(tab, CreativeItemTab):
entries = []
for itemstack in tab.group.entries:
if (
isinstance(itemstack, LazyClassLoadItemstack)
and itemstack.is_empty()
):
entries.append("- " + itemstack.lazy_item_name)
if entries:
logger.write_into_container(
entries, header=f"Missing items in {tab.name}"
)
if not shared.IS_TEST_ENV:
shared.tick_handler.schedule_once(CreativeTabManager.reload())
CT_MANAGER = CreativeTabManager()
BuildingBlocks = None
Decoration = None
Redstone = None
Transportation = None
Miscellaneous = None
Food = None
Tools = None
Weapons = None
Brewing = None
Test = None
async def init():
global BuildingBlocks, Decoration, Redstone, Transportation, Miscellaneous, Food, Tools, Weapons, Brewing, Test
BuildingBlocks = CreativeItemTab(
"Building Blocks",
ItemStack("minecraft:bricks"),
linked_tag="#minecraft:tab_building_blocks",
)
Decoration = CreativeItemTab(
"Decoration",
ItemStack("minecraft:peony"),
linked_tag="#minecraft:tab_decoration",
)
Redstone = CreativeItemTab(
"Redstone",
ItemStack("minecraft:redstone"),
linked_tag="#minecraft:tab_redstone",
)
Transportation = CreativeItemTab(
"Transportation",
ItemStack("minecraft:powered_rail"),
linked_tag="#minecraft:tab_transportation",
)
Miscellaneous = CreativeItemTab(
"Miscellaneous",
ItemStack("minecraft:lava_bucket"),
linked_tag="#minecraft:tab_miscellaneous",
)
Food = CreativeItemTab(
"Food", ItemStack("minecraft:potato"), linked_tag="#minecraft:tab_food"
)
Tools = CreativeItemTab(
"Tools", ItemStack("minecraft:iron_axe"), linked_tag="#minecraft:tab_tools"
)
Weapons = CreativeItemTab(
"Weapons",
ItemStack("minecraft:golden_sword"),
linked_tag="#minecraft:tab_weapons",
)
Brewing = CreativeItemTab(
"Brewing", ItemStack("minecraft:barrier"), linked_tag="#minecraft:tab_brewing"
)
CT_MANAGER.add_tab(BuildingBlocks).add_tab(Decoration).add_tab(Redstone).add_tab(
Transportation
).add_tab(Miscellaneous).add_tab(Food).add_tab(Tools).add_tab(Weapons).add_tab(
Brewing
)
if not shared.IS_TEST_ENV:
shared.mod_loader("minecraft", "stage:item_groups:load")(init())
|
# Definition for a binary tree node.
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def maxPathSum(self, root: TreeNode) -> int:
'''
记录每个节点的最大路径值,
返回节点所在最大路径值时, 1: 只能选取一侧的值; 2: 小于0的值不进行选择
'''
if root.left is None and root.right is None:
return root.val
self.ans = []
def pathSum(node):
if node is None:
return 0
self.ans.append(node.val)
if node.left is None and node.right is None:
return node.val
lv = pathSum(node.left)
rv = pathSum(node.right)
v = node.val + max(max(rv, lv), 0)
self.ans.append(max(v, v + min(rv, lv)))
return max(v, 0)
pathSum(root)
return max(self.ans)
def maxPathSumF(self, root) -> int:
def pathSum(node):
if node is None:
return 0
lv = pathSum(node.left)
rv = pathSum(node.right)
self.ans = max(node.val + lv + rv, self.ans)
return max(node.val + max(rv, lv), 0)
self.ans = float('-inf')
pathSum(root)
return self.ans
n = TreeNode(1)
n1 = TreeNode(-2)
n2 = TreeNode(3)
n.left = n1
n.right = n2
# [5,4,8,11,null,13,4,7,2,null,null,null,1] [-10,9,20,null,null,15,7] [9,6,-3,null,null,-6,2,null,null,2,null,-6,-6,-6]
s = Solution()
print(s.maxPathSum(n)) |
# -*- coding: utf-8 -*-
"""FlyBase database models."""
from sqlalchemy import Column, Integer, String
from sqlalchemy.ext.declarative import declarative_base
import pybel.dsl
from .constants import MODULE_NAME
Base = declarative_base()
FLY_GENE_TABLE_NAME = f'{MODULE_NAME}_flyGene'
class FlyGene(Base): # type: ignore
"""Gene table."""
__tablename__ = FLY_GENE_TABLE_NAME
id = Column(Integer, primary_key=True)
flybase_id = Column(String(255), nullable=False, index=True)
symbol = Column(String(255), nullable=False, index=True)
def __repr__(self):
"""Return FlyBase symbol."""
return str(self.flybase_id)
def __str__(self):
"""Return FlyBase symbol."""
return str(self.flybase_id)
def serialize_to_protein_node(self) -> pybel.dsl.Gene:
"""Serialize to PyBEL node data dictionary."""
return pybel.dsl.Gene(
namespace=MODULE_NAME,
name=self.symbol,
identifier=str(self.flybase_id)
)
|
import torch
import torch.nn as nn
import torch.distributions as dists
import numpy as np
def difference_function(x, model):
d = torch.zeros_like(x)
orig_out = model(x).squeeze()
for i in range(x.size(1)):
x_pert = x.clone()
x_pert[:, i] = 1. - x[:, i]
delta = model(x_pert).squeeze() - orig_out
d[:, i] = delta
return d
def approx_difference_function(x, model):
x = x.requires_grad_()
gx = torch.autograd.grad(model(x).sum(), x)[0]
wx = gx * -(2. * x - 1)
return wx.detach()
# Gibbs-With-Gradients for binary data
class DiffSampler(nn.Module):
def __init__(self, dim, n_steps=10, approx=False, multi_hop=False, fixed_proposal=False, temp=2., step_size=1.0):
print('gwg sampler')
super().__init__()
self.dim = dim
self.n_steps = n_steps
self._ar = 0.
self._mt = 0.
self._pt = 0.
self._hops = 0.
self._phops = 0.
self.approx = approx
self.fixed_proposal = fixed_proposal
self.multi_hop = multi_hop
self.temp = temp
self.step_size = step_size
if approx:
self.diff_fn = lambda x, m: approx_difference_function(x, m) / self.temp
else:
self.diff_fn = lambda x, m: difference_function(x, m) / self.temp
def step(self, x, model):
x_cur = x
a_s = []
m_terms = []
prop_terms = []
if self.multi_hop:
if self.fixed_proposal:
delta = self.diff_fn(x, model)
cd = dists.Bernoulli(probs=delta.sigmoid() * self.step_size)
for i in range(self.n_steps):
changes = cd.sample()
x_delta = (1. - x_cur) * changes + x_cur * (1. - changes)
la = (model(x_delta).squeeze() - model(x_cur).squeeze())
a = (la.exp() > torch.rand_like(la)).float()
x_cur = x_delta * a[:, None] + x_cur * (1. - a[:, None])
a_s.append(a.mean().item())
self._ar = np.mean(a_s)
else:
for i in range(self.n_steps):
forward_delta = self.diff_fn(x_cur, model)
cd_forward = dists.Bernoulli(logits=(forward_delta * 2 / self.temp))
changes = cd_forward.sample()
lp_forward = cd_forward.log_prob(changes).sum(-1)
x_delta = (1. - x_cur) * changes + x_cur * (1. - changes)
reverse_delta = self.diff_fn(x_delta, model)
cd_reverse = dists.Bernoulli(logits=(reverse_delta * 2 / self.temp))
lp_reverse = cd_reverse.log_prob(changes).sum(-1)
m_term = (model(x_delta).squeeze() - model(x_cur).squeeze())
la = m_term + lp_reverse - lp_forward
a = (la.exp() > torch.rand_like(la)).float()
x_cur = x_delta * a[:, None] + x_cur * (1. - a[:, None])
a_s.append(a.mean().item())
m_terms.append(m_term.mean().item())
prop_terms.append((lp_reverse - lp_forward).mean().item())
self._ar = np.mean(a_s)
self._mt = np.mean(m_terms)
self._pt = np.mean(prop_terms)
else:
if self.fixed_proposal:
delta = self.diff_fn(x, model)
cd = dists.OneHotCategorical(logits=delta)
for i in range(self.n_steps):
changes = cd.sample()
x_delta = (1. - x_cur) * changes + x_cur * (1. - changes)
la = (model(x_delta).squeeze() - model(x_cur).squeeze())
a = (la.exp() > torch.rand_like(la)).float()
x_cur = x_delta * a[:, None] + x_cur * (1. - a[:, None])
a_s.append(a.mean().item())
self._ar = np.mean(a_s)
else:
for i in range(self.n_steps):
forward_delta = self.diff_fn(x_cur, model)
cd_forward = dists.OneHotCategorical(logits=forward_delta)
changes = cd_forward.sample()
lp_forward = cd_forward.log_prob(changes)
x_delta = (1. - x_cur) * changes + x_cur * (1. - changes)
reverse_delta = self.diff_fn(x_delta, model)
cd_reverse = dists.OneHotCategorical(logits=reverse_delta)
lp_reverse = cd_reverse.log_prob(changes)
m_term = (model(x_delta).squeeze() - model(x_cur).squeeze())
la = m_term + lp_reverse - lp_forward
a = (la.exp() > torch.rand_like(la)).float()
x_cur = x_delta * a[:, None] + x_cur * (1. - a[:, None])
return x_cur
# Gibbs-With-Gradients variant which proposes multiple flips per step
class MultiDiffSampler(nn.Module):
def __init__(self, dim, n_steps=10, approx=False, temp=1., n_samples=1):
super().__init__()
self.dim = dim
self.n_steps = n_steps
self._ar = 0.
self._mt = 0.
self._pt = 0.
self._hops = 0.
self._phops = 0.
self.approx = approx
self.temp = temp
self.n_samples = n_samples
self.succ = 0
self.count = 0
if approx:
self.diff_fn = lambda x, m: approx_difference_function(x, m) / self.temp
else:
self.diff_fn = lambda x, m: difference_function(x, m) / self.temp
def step(self, x, model):
x_cur = x
a_s = []
m_terms = []
prop_terms = []
for i in range(self.n_steps):
forward_delta = self.diff_fn(x_cur, model)
prob = torch.softmax(forward_delta, dim=-1)
prob_s = torch.sum(prob, dim=-1, keepdim=True)
prob = prob / prob_s
cd_forward = dists.OneHotCategorical(probs=prob)
changes_all = cd_forward.sample((self.n_samples,))
lp_forward = cd_forward.log_prob(changes_all).sum(0)
changes = (changes_all.sum(0).long() % 2).float() # with backtrack
x_delta = (1. - x_cur) * changes + x_cur * (1. - changes)
self._phops = (x_delta != x).float().sum(-1).mean().item()
reverse_delta = self.diff_fn(x_delta, model)
prob = torch.softmax(reverse_delta, dim=-1)
prob_s = torch.sum(prob, dim=-1, keepdim=True)
prob = prob / prob_s
cd_reverse = dists.OneHotCategorical(probs=prob)
lp_reverse = cd_reverse.log_prob(changes_all).sum(0)
m_term = (model(x_delta).squeeze() - model(x_cur).squeeze())
la = m_term + lp_reverse - lp_forward
a = (la.exp() > torch.rand_like(la)).float()
self.succ += a.sum().item()
self.count += a.shape[0]
x_cur = x_delta * a[:, None] + x_cur * (1. - a[:, None])
a_s.append(a.mean().item())
m_terms.append(m_term.mean().item())
prop_terms.append((lp_reverse - lp_forward).mean().item())
self._ar = np.mean(a_s)
self._mt = np.mean(m_terms)
self._pt = np.mean(prop_terms)
self._hops = (x != x_cur).float().sum(-1).mean().item()
return x_cur
def approx_difference_function_multi_dim(x, model):
x = x.requires_grad_()
gx = torch.autograd.grad(model(x).sum(), x)[0]
gx_cur = (gx * x).sum(-1)[:, :, None]
return gx - gx_cur
def difference_function_multi_dim(x, model):
d = torch.zeros_like(x)
orig_out = model(x).squeeze()
for i in range(x.size(1)):
for j in range(x.size(2)):
x_pert = x.clone()
x_pert[:, i] = 0.
x_pert[:, i, j] = 1.
delta = model(x_pert).squeeze() - orig_out
d[:, i, j] = delta
return d
# Gibbs-With-Gradients for categorical data
class DiffSamplerMultiDim(nn.Module):
def __init__(self, dim, n_steps=10, approx=False, temp=1.):
super().__init__()
print("using categorical gwg sampler")
self.dim = dim
self.n_steps = n_steps
self._ar = 0.
self._mt = 0.
self._pt = 0.
self._hops = 0.
self._phops = 0.
self.approx = approx
self.temp = temp
if approx:
self.diff_fn = lambda x, m: approx_difference_function_multi_dim(x, m) / self.temp
else:
self.diff_fn = lambda x, m: difference_function_multi_dim(x, m) / self.temp
def step(self, x, model):
x_cur = x
a_s = []
m_terms = []
prop_terms = []
for i in range(self.n_steps):
forward_delta = self.diff_fn(x_cur, model)
# make sure we dont choose to stay where we are!
forward_logits = forward_delta - 1e9 * x_cur
#print(forward_logits)
cd_forward = dists.OneHotCategorical(logits=forward_logits.view(x_cur.size(0), -1))
changes = cd_forward.sample()
# compute probability of sampling this change
lp_forward = cd_forward.log_prob(changes)
# reshape to (bs, dim, nout)
changes_r = changes.view(x_cur.size())
# get binary indicator (bs, dim) indicating which dim was changed
changed_ind = changes_r.sum(-1)
# mask out cuanged dim and add in the change
x_delta = x_cur.clone() * (1. - changed_ind[:, :, None]) + changes_r
reverse_delta = self.diff_fn(x_delta, model)
reverse_logits = reverse_delta - 1e9 * x_delta
cd_reverse = dists.OneHotCategorical(logits=reverse_logits.view(x_delta.size(0), -1))
reverse_changes = x_cur * changed_ind[:, :, None]
lp_reverse = cd_reverse.log_prob(reverse_changes.view(x_delta.size(0), -1))
m_term = (model(x_delta).squeeze() - model(x_cur).squeeze())
la = m_term + lp_reverse - lp_forward
a = (la.exp() > torch.rand_like(la)).float()
x_cur = x_delta * a[:, None, None] + x_cur * (1. - a[:, None, None])
a_s.append(a.mean().item())
m_terms.append(m_term.mean().item())
prop_terms.append((lp_reverse - lp_forward).mean().item())
self._ar = np.mean(a_s)
self._mt = np.mean(m_terms)
self._pt = np.mean(prop_terms)
self._hops = (x != x_cur).float().sum(-1).sum(-1).mean().item()
return x_cur
|
#!/usr/bin/env python
from __future__ import print_function
import pcapy
# import os
import sys
import subprocess # use commandline
import requests # whois
import re
from netaddr import valid_ipv4, valid_mac
import os
# import platform # socket alternative to get hostname
import socket
"""
[kevin@Tardis test]$ ./pmap5.py -p test2.pcap -d
sudo tcpdump -s 0 -i en1 -w test.pcap
-s 0 will set the capture byte to its maximum i.e. 65535 and will not truncate
-i en1 captures Ethernet interface
-w test.pcap will create that pcap file
tcpdump -qns 0 -X -r osx.pcap
[kevin@Tardis tmp]$ sudo tcpdump -w osx.pcap
tcpdump: data link type PKTAP
tcpdump: listening on pktap, link-type PKTAP (Packet Tap), capture size 65535 bytes
^C4414 packets captured
4416 packets received by filter
0 packets dropped by kernel
"""
def checkSudo():
return os.geteuid() == 0
# def command(cmd):
# ans = subprocess.Popen([cmd], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True).communicate()[0]
# return ans
class Commands(object):
"""
Unfortunately the extremely simple/useful commands was depreciated in favor
of the complex/confusing subprocess ... this aims to simplify.
"""
def command(self, cmd):
ans = subprocess.Popen([cmd], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True).communicate()[0]
return ans
class WhoIs(object):
"""
Updated
"""
record = {}
def __init__(self, ip):
if not valid_ipv4(ip):
print('Error: the IPv4 address {} is invalid'.format(ip))
return
rec = requests.get('http://whois.arin.net/rest/ip/{}.txt'.format(ip))
if rec.status_code != 200:
print('Error')
return
ans = {}
r = re.compile(r"\s\s+")
b = rec.text.split('\n')
for l in b:
if l and l[0] != '#':
l = r.sub('', l)
a = l.split(':')
# print a
ans[a[0]] = a[1]
self.record = ans # remove?
self.CIDR = ans['CIDR']
self.NetName = ans['NetName']
self.NetRange = ans['NetRange']
self.Organization = ans['Organization']
self.Updated = ans['Updated']
# return None
class GetHostName(Commands):
name = None
def __init__(self, ip):
"""Use the avahi (zeroconfig) tools or dig to find a host name given an
ip address.
in: ip
out: string w/ host name or 'unknown' if the host name couldn't be found
"""
# handle invalid ip address
if not valid_ipv4(ip):
print('Error: the IPv4 address {} is invalid'.format(ip))
return
# handle a localhost ip address
if ip == '127.0.0.1' or ip == 'localhost':
# self.name = platform.node()
self.name = socket.gethostname()
return
# ok, now do more complex stuff
name = 'unknown'
if sys.platform == 'linux' or sys.platform == 'linux2':
name = self.command("avahi-resolve-address {} | awk '{print $2}'".format(ip)).rstrip().rstrip('.')
elif sys.platform == 'darwin':
name = self.command('dig +short -x {} -p 5353 @224.0.0.251'.format(ip)).rstrip().rstrip('.')
# detect any remaining errors
if name.find('connection timed out') >= 0:
name = 'unknown'
elif name == '':
name = 'unknown'
self.name = name
# def cmdLine(self, cmd):
# return subprocess.Popen([cmd], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True).communicate()[0]
class CapturePackets(object):
"""
todo
"""
def __init__(self, iface, filename='test.pcap', pcFilter=None, num_packets=3000):
# list all the network devices
# print pcapy.findalldevs()
max_bytes = 1024
promiscuous = False
read_timeout = 100 # in milliseconds
pc = pcapy.open_live(iface, max_bytes, promiscuous, read_timeout)
if pcFilter: pc.setfilter(pcFilter)
self.dumper = pc.dump_open(filename)
pc.loop(num_packets, self.recv_pkts) # capture packets
# callback for received packets
def recv_pkts(self, hdr, data):
try:
# print data
self.dumper.dump(hdr, data)
except KeyboardInterrupt: # probably show throw error instead
exit('keyboard exit')
except:
exit('crap ... something went wrong')
def run(self):
pass
# max_bytes = 1024
# promiscuous = False
# read_timeout = 100 # in milliseconds
# pc = pcapy.open_live(iface, max_bytes, promiscuous, read_timeout)
# if filter: pc.setfilter(filter)
# self.dumper = pc.dump_open(filename)
# pc.loop(num_packets, self.recv_pkts) # capture packets
class MacLookup(object):
def __init__(self, mac, full=False):
self.vendor = self.get(mac, full)
def get(self, mac, full):
"""
json response from www.macvendorlookup.com:
{u'addressL1': u'1 Infinite Loop',
u'addressL2': u'',
u'addressL3': u'Cupertino CA 95014',
u'company': u'Apple',
u'country': u'UNITED STATES',
u'endDec': u'202412195315711',
u'endHex': u'B817C2FFFFFF',
u'startDec': u'202412178538496',
u'startHex': u'B817C2000000',
u'type': u'MA-L'}
"""
unknown = {'company': 'unknown'}
if not valid_mac(mac):
print('Error: the mac addr {} is not valid'.format(mac))
return
try:
r = requests.get('http://www.macvendorlookup.com/api/v2/' + mac)
except requests.exceptions.HTTPError as e:
print ("HTTPError:", e.message)
return unknown
if r.status_code == 204: # no content found, bad MAC addr
print ('ERROR: Bad MAC addr:', mac)
return unknown
elif r.headers['content-type'] != 'application/json':
print ('ERROR: Wrong content type:', r.headers['content-type'])
return unknown
a = {}
try:
if full: a = r.json()[0]
else: a['company'] = r.json()[0]['company']
# print 'GOOD:',r.status_code,r.headers,r.ok,r.text,r.reason
except:
print ('ERROR:', r.status_code, r.headers, r.ok, r.text, r.reason)
a = unknown
return a
|
import torch
from torch import nn
class CalibrationWrapper(nn.Module):
def __init__(self):
super().__init__()
self.start_indexes = []
self.end_indexes = []
self.models = nn.ModuleList([])
def add_model(self, model, start_index, end_index):
self.models.append(model)
self.start_indexes.append(start_index)
self.end_indexes.append(end_index)
def forward(self, inputs):
corrected_inputs = []
if self.start_indexes[0] != 0:
corrected_inputs.append(inputs[..., :self.start_indexes[0]])
for model, start_index, end_index in zip(self.models, self.start_indexes, self.end_indexes):
corrected_inputs.append(model(inputs[..., start_index:end_index]))
if self.end_indexes[-1] != inputs.shape[1]:
corrected_inputs.append(inputs[..., self.end_indexes[-1]:])
corrected_inputs = torch.cat(corrected_inputs, dim=-1)
return corrected_inputs
class LinearModel(nn.Module):
def __init__(self, alpha=1., beta=0.):
super().__init__()
self.alpha = nn.Parameter(torch.tensor(alpha))
self.beta = nn.Parameter(torch.tensor(beta))
def forward(self, inputs):
return self.alpha * inputs + self.beta
class TemperatureScaling(nn.Module):
def __init__(self, temperature=1):
super().__init__()
self.temperature = nn.Parameter(torch.tensor(temperature))
def forward(self, inputs):
return inputs / self.temperature
|
import os
import sys
BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), 'sitecomber_article_tests'))
sys.path.append(BASE_DIR)
from sitecomber_article_tests.utils import get_misspelled_words, simplify_word
from sitecomber_article_tests.dictionary import core_dictionary
lang = "en"
def test_word(word):
print(u"Testing word: %s" % (word))
print(u"Is the word '%s' in the core dictionary? %s" % (word, (word in core_dictionary)))
if (word not in core_dictionary):
simplify_word(word, core_dictionary, True)
get_misspelled_words(word, lang, core_dictionary)
test_word(input("Please enter a word to test: "))
|
from pathlib import Path
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
from scipy import stats
class Painter():
def __init__(self, figsize = (12, 8)):
plt.rcParams['font.sans-serif'] = ['serif']
sns.set(context="paper", style="ticks",
font="serif", font_scale=2, rc={"figure.figsize": figsize}
)
def scatter_regression(self, save_folder, name, df, key1, key2, x_lim = [-35, 60], y_lim = [-35, 60], text1pos = [-25, 50], text2pos = [-25, 40]):
fig = plt.figure()
# get coeffs of linear fit
slope, intercept, r_value, p_value, std_err = stats.linregress(df[key1], df[key2])
# use line_kws to set line label for legend
ax = sns.regplot(x = key1, y = key2, data = df, color = 'k',
line_kws={'label':f"Y = {np.round(slope, 2)} * X + {np.round(intercept, 2)}"})
ax.set_xlim(*x_lim)
ax.set_ylim(*y_lim)
# plot legend
# ax.legend()
plt.text(*text1pos, f"y = {np.round(slope, 2)} * x + {np.round(intercept, 2)}", size = 20,\
family = "serif", color = "k", style = "italic", weight = "light",\
bbox = dict(facecolor = "w", alpha = 0.2))
plt.text(*text2pos, f"r2: {np.round(r_value ** 2, 2)}", size = 20,\
family = "serif", color = "k", style = "italic", weight = "light",\
bbox = dict(facecolor = "w", alpha = 0.2))
# plt.text(-25, 30, f"p: {np.round(p_value, 2)}", size = 20,\
# family = "serif", color = "k", style = "italic", weight = "light",\
# bbox = dict(facecolor = "w", alpha = 0.2))
# plt.text(-25, 20, f"std: {np.round(std_err, 2)}", size = 20,\
# family = "serif", color = "k", style = "italic", weight = "light",\
# bbox = dict(facecolor = "w", alpha = 0.2))
ax.set_xlabel(key1)
ax.set_ylabel(key2)
fig.set_figheight(8)
fig.set_figwidth(12)
fig.set_size_inches((12, 8))
fig.tight_layout()
fig.subplots_adjust(wspace=.02, hspace=.02, right = 0.95, top = 0.92)
title_name = name
ax.set_title(title_name, fontsize = 24, color='k')
plt.savefig(save_folder.joinpath(name + ".pdf"), dpi = 600, format = "pdf")
def lineplot(self, save_folder, name, df, key1, key2):
fig, ax = plt.subplots()
ax.plot(df.index, df[key1], "g*-", lw = 2, label = key1)
ax.plot(df.index, df[key2], "bo-", lw = 2, label = key2)
# ax.set_xticks(ax.get_xticks()[::40])
ax.tick_params(axis='x', rotation=20)
ax.legend()
title_name = name
ax.set_title(name, fontsize = 24, color='k')
plt.savefig(save_folder.joinpath(name + ".pdf"), dpi = 600, format = "pdf")
|
from typing import Union
import aiohttp
from utils.exceptions import NotAllowedStatusException
class HTTP:
@staticmethod
async def get(
url: str,
params: dict = None,
json: bool = False,
status: int = None,
**kwargs,
) -> Union[str, dict]:
async with aiohttp.ClientSession(**kwargs) as session:
async with session.get(url, params=params) as r:
if status is not None:
if r.status != status:
raise NotAllowedStatusException(r.status)
if json:
return await r.json()
return await r.text()
@staticmethod
async def post(
url: str,
data: dict,
params: dict = None,
json: bool = False,
status: int = None,
**kwargs,
) -> Union[str, dict]:
async with aiohttp.ClientSession(**kwargs) as session:
async with session.post(url, data=data, params=params) as r:
if status is not None:
if r.status != status:
raise NotAllowedStatusException(r.status)
if json:
return await r.json()
return await r.text()
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from msrest.serialization import Model
from knack.log import get_logger
from .file_cache import get_cli_cache
from .uri import uri_parse
logger = get_logger(__name__)
class VstsGitUrlInfo(object):
""" VstsGitUrlInfo.
"""
def __init__(self, remote_url):
from msrest import Serializer, Deserializer
from msrest.exceptions import DeserializationError, SerializationError
self.project = None
self.repo = None
self.uri = None
if remote_url is not None:
logger.debug("Remote url: %s", remote_url)
models = {'_RemoteInfo': self._RemoteInfo}
remote_url = remote_url.lower()
remote_info = None
if _git_remote_info_cache[remote_url]:
deserializer = Deserializer(models)
try:
remote_info = deserializer.deserialize_data(_git_remote_info_cache[remote_url], '_RemoteInfo')
except DeserializationError as ex:
logger.debug(ex, exc_info=True)
if remote_info is not None:
self.project = remote_info.project
self.repo = remote_info.repository
self.uri = remote_info.server_url
if remote_info is None:
vsts_info = self.get_vsts_info(remote_url)
if vsts_info is not None:
self.project = vsts_info.repository.project.id
self.repo = vsts_info.repository.id
apis_path_segment = '/_apis/'
apis_path_segment_pos = vsts_info.repository.url.find(apis_path_segment)
if apis_path_segment_pos >= 0:
self.uri = vsts_info.repository.url[:apis_path_segment_pos]
else:
self.uri = vsts_info.server_url
serializer = Serializer(models)
try:
_git_remote_info_cache[remote_url] = \
serializer.serialize_data(self._RemoteInfo(self.project, self.repo, self.uri),
'_RemoteInfo')
except SerializationError as ex:
logger.debug(ex, exc_info=True)
@staticmethod
def get_vsts_info(remote_url):
from azext_devops.vstsCompressed.git.v4_1.git_client import GitClient
from .services import _get_credentials
components = uri_parse(remote_url.lower())
if components.scheme == 'ssh':
# Convert to https url.
netloc = VstsGitUrlInfo.convert_ssh_netloc_to_https_netloc(components.netloc)
if netloc is None:
return None
# New ssh urls do not have _ssh so path is like org/project/repo
# We need to convert it into project/_git/repo/ or org/project/_git/repo for dev.azure.com urls
path = components.path
ssh_path_segment = '_ssh/'
ssh_path_segment_pos = components.path.find(ssh_path_segment)
if ssh_path_segment_pos < 0: # new ssh url
path_vals = components.path.strip('/').split('/')
if path_vals and len(path_vals) == 3:
if 'visualstudio.com' in netloc:
path = '{proj}/{git}/{repo}'.format(proj=path_vals[1], git='_git', repo=path_vals[2])
elif 'dev.azure.com' in netloc:
path = '{org}/{proj}/{git}/{repo}'.format(
org=path_vals[0], proj=path_vals[1], git='_git', repo=path_vals[2])
else:
logger.debug("Unsupported url format encountered in git repo url discovery.")
uri = 'https://' + netloc + '/' + path
else: # old ssh urls
uri = 'https://' + netloc + '/' + path.strip('/')
ssh_path_segment_pos = uri.find(ssh_path_segment)
if ssh_path_segment_pos >= 0:
uri = uri[:ssh_path_segment_pos] + '_git/' + uri[ssh_path_segment_pos + len(ssh_path_segment):]
else:
uri = remote_url
credentials = _get_credentials(uri)
return GitClient.get_vsts_info_by_remote_url(uri, credentials=credentials)
@staticmethod
def convert_ssh_netloc_to_https_netloc(netloc):
if netloc is None:
return None
if netloc.find('@') < 0:
# on premise url
logger.warning('TFS SSH URLs are not supported for repo auto-detection yet. See the following issue for ' +
'latest updates: https://github.com/Microsoft/azure-devops-cli-extension/issues/142')
return None
# hosted url
import re
regex = re.compile(r'([^@]+)@[^\.]+(\.[^:]+)')
match = regex.match(netloc)
if match is not None:
# Handle new and old url formats
if match.group(1) == 'git' and match.group(2) == '.dev.azure.com':
return match.group(2).strip('.')
return match.group(1) + match.group(2)
return None
@staticmethod
def is_vsts_url_candidate(url):
if url is None:
return False
components = uri_parse(url.lower())
if components.netloc == 'github.com':
return False
if components.path is not None \
and (components.path.find('/_git/') >= 0 or components.path.find('/_ssh/') >= 0 or
components.scheme == 'ssh'):
return True
return False
class _RemoteInfo(Model):
_attribute_map = {
'project': {'key': 'project', 'type': 'str'},
'repository': {'key': 'repository', 'type': 'str'},
'server_url': {'key': 'serverUrl', 'type': 'str'}
}
def __init__(self, project=None, repository=None, server_url=None):
super(VstsGitUrlInfo._RemoteInfo, self).__init__() # pylint: disable=protected-access
self.project = project
self.repository = repository
self.server_url = server_url
_git_remote_info_cache = get_cli_cache('remotes', 0)
|
"""
Data Structures :: LRU Cache
"""
class ListNode:
def __init__(self, key, value, prev=None, next=None):
"""A node in a doubly-linked list-based LRU cache.
:param key : Key by which to access nodes.
:param value : Value accessed by key.
:param prev [ListNode] : Previous ListNode in list, defaults to None
:param next [ListNode] : Next ListNode in list, defaults to None
"""
self.key = key
self.value = value
self.prev = prev
self.next = next
def delete(self):
"""Rearranges the node's previous and next pointers
accordingly, effectively deleting it."""
if self.prev:
self.prev.next = self.next
if self.next:
self.next.prev = self.prev
class DoublyLinkedList:
def __init__(self, node=None):
"""Doubly-linked list class that holds references to
the list's head and tail nodes, and list length."""
self.head = node
self.tail = node
self.length = 1 if node is not None else 0
def __len__(self):
"""Returns length of list; for use with the built-in
`len()` function."""
return self.length
def add_to_head(self, key, value):
"""Wraps the given value in a ListNode and inserts it
as the new head of the list."""
new_node = ListNode(key, value)
self.length += 1
if not self.head and not self.tail:
self.head = new_node
self.tail = new_node
else:
new_node.next = self.head
self.head.prev = new_node
self.head = new_node
def remove_from_tail(self):
"""Removes the List's current tail node, making the
current tail's previous node the new tail of the List.
Returns the value of the removed Node.
:return value : Value of the removed Node or None.
"""
if self.tail is not None:
self.delete(self.tail)
def move_to_head(self, node):
"""Removes the input node from its current spot in the
List and inserts it as the new head node of the List.
:param node (ListNode) : Node to be moved to head.
"""
if node is self.head:
return
key, value = node.key, node.value
self.delete(node)
self.add_to_head(key, value)
def delete(self, node):
"""Removes a node from the list and handles cases where
the node was the head or the tail.
:param node (ListNode) : Node to be removed from list.
"""
# TODO: Catch errors if empty or node not in list
self.length -= 1 # Update length
# If head and tail, both get set to None
if self.head is self.tail:
self.head = None
self.tail = None
elif node is self.head: # If head, set current head to next
self.head = self.head.next
node.delete()
elif node is self.tail: # If tail, set current tail to prev
self.tail = self.tail.prev
node.delete()
else: # If regular node, just delete
node.delete()
class LRUCache:
def __init__(self, limit=10):
"""The LRUCache class keeps track of the max number of nodes it
can hold, the current number of nodes it is holding, a doubly-
linked list that holds the key-value entries in the correct
order, as well as a storage dict that provides fast access
to every node stored in the cache.
Head node is most recent. Tail node is oldest.
:param node [ListNode] : Optional initial ListNode.
:param limit [int] : Max number of elements in cache, default 10.
"""
self.limit = limit
self.storage = DoublyLinkedList()
def get(self, key):
"""Retrieves the value associated with the given key.
Moves the key-value pair to the end of the order
such that the pair is considered most-recently used.
Returns the value associated with the key or None if the
key-value pair doesn't exist in the cache.
"""
if len(self.storage) < 1: # In case nothing in cache
return None
node = self.storage.head # Start at the head
while node: # Loop through nodes, looking for key
if node.key == key:
value = node.value # Return value of node
if node is not self.storage.head: # If head, no need to move
self.storage.move_to_head(node)
return value # Returning value implies breaking loop
node = node.next # Iterate
def set(self, key, value):
"""Adds the given key-value pair to the cache.
The newly-added pair is considered the most-recently used
entry in the cache. If the cache is already at max capacity
before this entry is added, then the oldest entry in the
cache is removed to make room. In the case that the key
already exists in the cache, the old value associated with
the key is overwritten by the new value.
"""
# Look for key in cache using `self.get()`
if self.get(key) is not None:
# If exists, the call will relocate it to head position
# Thus, head will only need to be updated with new value
# Length of list does not change; does not need checking
self.storage.head.value = value
else:
# If not exists (returns None), add key-value to head
# Before adding, check length of list
# If length == limit, remove from tail first
if len(self.storage) == self.limit:
self.storage.remove_from_tail()
self.storage.add_to_head(key, value)
# cache = LRUCache(3)
# cache.set("item1", "a")
# cache.set("item2", "b")
# cache.set("item3", "c")
# cache.set("item2", "z")
# cache.set("item1", "a")
# cache.set("item2", "b")
# cache.set("item3", "c")
# cache.get("item1")
# cache.set("item4", "d")
# cache.get("item1")
# cache.get("item3")
# cache.get("item4")
# cache.get("item2")
|
# PROJECT : kungfucms
# TIME : 19-2-11 上午11:31
# AUTHOR : Younger Shen
# EMAIL : younger.x.shen@gmail.com
# CELL : 13811754531
# WECHAT : 13811754531
# https://github.com/youngershen/
from django.core.mail import send_mail
from django.utils.translation import ugettext as _
from django.utils import timezone
from django.db import models
from django.contrib.auth.models import UserManager, PermissionsMixin
from django.contrib.auth.base_user import AbstractBaseUser
from kungfucms.apps.core.models import BaseModel
class Manager(UserManager):
def _create_user(self, username, password, **extra_fields):
username = self.model.normalize_username(username)
user = self.model(username=username, **extra_fields)
user.set_password(password)
user.save(using=self._db)
return user
def create_user(self, username, password=None, **extra_fields):
extra_fields.setdefault('is_staff', False)
extra_fields.setdefault('is_superuser', False)
return self._create_user(username, password, **extra_fields)
def create_staff(self, username, password, **extra_fields):
extra_fields.setdefault('is_staff', True)
extra_fields.setdefault('is_superuser', False)
return self._create_user(username, password, **extra_fields)
def create_superuser(self, username, password=None, **extra_fields):
extra_fields.setdefault('is_staff', True)
extra_fields.setdefault('is_superuser', True)
return self._create_user(username, password, **extra_fields)
class AbstractUser(AbstractBaseUser, PermissionsMixin):
is_staff = models.BooleanField(
_('staff status'),
default=False,
help_text=_('Designates whether the user can log into this admin site.'),
)
is_active = models.BooleanField(
_('active'),
default=True,
help_text=_(
'Designates whether this user should be treated as active. '
'Unselect this instead of deleting accounts.'
),
)
date_joined = models.DateTimeField(_('date joined'), default=timezone.now)
objects = Manager()
USERNAME_FIELD = 'username'
EMAIL_FIELD = 'email'
def get_username_field(self):
return self.USERNAME_FIELD
class Meta:
verbose_name = _('user')
verbose_name_plural = _('users')
abstract = True
def clean(self):
super().clean()
class User(AbstractUser, BaseModel):
username = models.CharField(max_length=128,
unique=True,
verbose_name=_('Username'))
email = models.CharField(max_length=128,
blank=True,
null=True,
db_index=True,
verbose_name=_('Email'))
cellphone = models.CharField(max_length=128,
blank=True,
null=True,
db_index=True,
verbose_name=_('Cellphone'))
@classmethod
def safe_get(cls, **kwargs):
try:
obj = cls.objects.get(**kwargs)
except cls.DoesNotExist:
obj = None
return obj
class Meta(AbstractUser.Meta):
swappable = 'AUTH_USER_MODEL'
verbose_name = _('User')
verbose_name_plural = _('Users')
class Profile(BaseModel):
user = models.ForeignKey(User,
on_delete=models.CASCADE,
related_name='profiles',
related_query_name='profile')
name = models.CharField(max_length=128,
db_index=True,
verbose_name=_('Name'))
value = models.CharField(max_length=255,
blank=True,
null=True,
default='',
verbose_name=_('Value'))
class Meta:
ordering = ['id']
verbose_name = _('Profile')
verbose_name_plural = _('Profiles')
class OauthLoginProvider(BaseModel):
provider = models.CharField(max_length=128, db_index=True, verbose_name=_('Provider Name'))
token = models.CharField(max_length=128, db_index=True, verbose_name=_('Provider Token'))
user = models.ForeignKey(User,
on_delete=models.CASCADE,
related_query_name='oauth_token',
related_name='oauth_tokens',
verbose_name=_('User'))
class Meta:
verbose_name = _('Oauth Login Provider')
verbose_name_plural = _('Oauth Login Providers')
unique_together = ('provider', 'token', 'user')
indexes = [models.Index(fields=('user', 'provider'))]
|
from .feature_pyramid_net import *
from .polar_head import *
from .stage_backbone import *
|
__all__ = ["BasicStream"]
class BasicStream:
def __init__(self, stream_name: str = None) -> None:
self.name = stream_name
self._next_stream = None
def input(self, data, *args, **kwargs):
"""
read data from buffer
:param data: data that move in the stream
:return data: None if there is nothing to return
"""
return None
def connect(self, next_stream, *args, **kwargs) -> None:
self._next_stream = next_stream
def next_stream(self):
return self._next_stream
|
import fbe
import proto
from proto import proto
from unittest import TestCase
class TestCreate(TestCase):
def test_create_and_access(self):
# Create a new account using FBE model into the FBE stream
writer = proto.AccountModel(fbe.WriteBuffer())
self.assertEqual(writer.model.fbe_offset, 4)
model_begin = writer.create_begin()
account_begin = writer.model.set_begin()
writer.model.id.set(1)
writer.model.name.set("Test")
writer.model.state.set(proto.State.good)
wallet_begin = writer.model.wallet.set_begin()
writer.model.wallet.currency.set("USD")
writer.model.wallet.amount.set(1000.0)
writer.model.wallet.set_end(wallet_begin)
asset_begin = writer.model.asset.set_begin(True)
asset_wallet_begin = writer.model.asset.value.set_begin()
writer.model.asset.value.currency.set("EUR")
writer.model.asset.value.amount.set(100.0)
writer.model.asset.set_end(asset_begin)
writer.model.asset.value.set_end(asset_wallet_begin)
order = writer.model.orders.resize(3)
order_begin = order.set_begin()
order.id.set(1)
order.symbol.set("EURUSD")
order.side.set(proto.OrderSide.buy)
order.type.set(proto.OrderType.market)
order.price.set(1.23456)
order.volume.set(1000.0)
order.set_end(order_begin)
order.fbe_shift(order.fbe_size)
order_begin = order.set_begin()
order.id.set(2)
order.symbol.set("EURUSD")
order.side.set(proto.OrderSide.sell)
order.type.set(proto.OrderType.limit)
order.price.set(1.0)
order.volume.set(100.0)
order.set_end(order_begin)
order.fbe_shift(order.fbe_size)
order_begin = order.set_begin()
order.id.set(3)
order.symbol.set("EURUSD")
order.side.set(proto.OrderSide.buy)
order.type.set(proto.OrderType.stop)
order.price.set(1.5)
order.volume.set(10.0)
order.set_end(order_begin)
order.fbe_shift(order.fbe_size)
writer.model.set_end(account_begin)
serialized = writer.create_end(model_begin)
self.assertEqual(serialized, writer.buffer.size)
self.assertTrue(writer.verify())
writer.next(serialized)
self.assertEqual(writer.model.fbe_offset, (4 + writer.buffer.size))
# Check the serialized FBE size
self.assertEqual(writer.buffer.size, 252)
# Access the account model in the FBE stream
reader = proto.AccountModel(fbe.ReadBuffer())
self.assertEqual(reader.model.fbe_offset, 4)
reader.attach_buffer(writer.buffer)
self.assertTrue(reader.verify())
account_begin = reader.model.get_begin()
id = reader.model.id.get()
self.assertEqual(id, 1)
name = reader.model.name.get()
self.assertEqual(name, "Test")
state = reader.model.state.get()
self.assertTrue(state.has_flags(proto.State.good))
wallet_begin = reader.model.wallet.get_begin()
wallet_currency = reader.model.wallet.currency.get()
self.assertEqual(wallet_currency, "USD")
wallet_amount = reader.model.wallet.amount.get()
self.assertEqual(wallet_amount, 1000.0)
reader.model.wallet.get_end(wallet_begin)
self.assertTrue(reader.model.asset.has_value)
asset_begin = reader.model.asset.get_begin()
asset_wallet_begin = reader.model.asset.value.get_begin()
asset_wallet_currency = reader.model.asset.value.currency.get()
self.assertEqual(asset_wallet_currency, "EUR")
asset_wallet_amount = reader.model.asset.value.amount.get()
self.assertEqual(asset_wallet_amount, 100.0)
reader.model.asset.value.get_end(asset_wallet_begin)
reader.model.asset.get_end(asset_begin)
self.assertEqual(reader.model.orders.size, 3)
o1 = reader.model.orders[0]
order_begin = o1.get_begin()
order_id = o1.id.get()
self.assertEqual(order_id, 1)
order_symbol = o1.symbol.get()
self.assertEqual(order_symbol, "EURUSD")
order_side = o1.side.get()
self.assertEqual(order_side, proto.OrderSide.buy)
order_type = o1.type.get()
self.assertEqual(order_type, proto.OrderType.market)
order_price = o1.price.get()
self.assertEqual(order_price, 1.23456)
order_volume = o1.volume.get()
self.assertEqual(order_volume, 1000.0)
o1.get_end(order_begin)
o2 = reader.model.orders[1]
order_begin = o2.get_begin()
order_id = o2.id.get()
self.assertEqual(order_id, 2)
order_symbol = o2.symbol.get()
self.assertEqual(order_symbol, "EURUSD")
order_side = o2.side.get()
self.assertEqual(order_side, proto.OrderSide.sell)
order_type = o2.type.get()
self.assertEqual(order_type, proto.OrderType.limit)
order_price = o2.price.get()
self.assertEqual(order_price, 1.0)
order_volume = o2.volume.get()
self.assertEqual(order_volume, 100.0)
o1.get_end(order_begin)
o3 = reader.model.orders[2]
order_begin = o3.get_begin()
order_id = o3.id.get()
self.assertEqual(order_id, 3)
order_symbol = o3.symbol.get()
self.assertEqual(order_symbol, "EURUSD")
order_side = o3.side.get()
self.assertEqual(order_side, proto.OrderSide.buy)
order_type = o3.type.get()
self.assertEqual(order_type, proto.OrderType.stop)
order_price = o3.price.get()
self.assertEqual(order_price, 1.5)
order_volume = o3.volume.get()
self.assertEqual(order_volume, 10.0)
o1.get_end(order_begin)
reader.model.get_end(account_begin)
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
"""Chromium presubmit script for src/tools/ios.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details on the presubmit API built into gcl.
"""
WHITELIST_FILE = 'build/ios/grit_whitelist.txt'
def _CheckWhitelistSorted(input_api, output_api):
for path in input_api.LocalPaths():
if WHITELIST_FILE == path:
lines = open(os.path.join('../..', WHITELIST_FILE)).readlines()
sorted = all(lines[i] <= lines[i + 1] for i in xrange(len(lines) - 1))
if not sorted:
return [output_api.PresubmitError(
'The file ' + WHITELIST_FILE + ' must be sorted.')]
return []
def _CommonChecks(input_api, output_api):
"""Checks common to both upload and commit."""
results = []
results.extend(_CheckWhitelistSorted(input_api, output_api))
return results
def CheckChangeOnUpload(input_api, output_api):
results = []
results.extend(_CommonChecks(input_api, output_api))
return results
def CheckChangeOnCommit(input_api, output_api):
results = []
results.extend(_CommonChecks(input_api, output_api))
return results
|
# -*- coding: utf-8 -*-
"""
Created on Thu Oct 22 10:56:21 2020
Create plots of time series with example sources.
@author: rouhinen
"""
import numpy as np
from numpy.linalg import norm
from numpy.random import randn
from scipy import signal, stats
import matplotlib.pyplot as plt
import glob
from fidelityOpMinimal import _compute_weights
""" Set subject directory and file patterns. """
dataPath = 'C:\\temp\\fWeighting\\fwSubjects_p\\sub (5)'
fileSourceIdentities = glob.glob(dataPath + '\\sourceIdentities_parc68.npy')[0]
fileForwardOperator = glob.glob(dataPath + '\\forwardOperatorMEEG.npy')[0]
fileInverseOperator = glob.glob(dataPath + '\\inverseOperatorMEEG.npy')[0]
""" Settings. """
n_samples = 1000
n_cut_samples = 40
widths = np.arange(5, 6)
## Get subjects list, and number of parcels.
identities = np.load(fileSourceIdentities) # Source length vector. Expected ids for parcels are 0 to n-1, where n is number of parcels, and -1 for sources that do not belong to any parcel.
forward = np.matrix(np.load(fileForwardOperator)) # sensors x sources
inverse = np.matrix(np.load(fileInverseOperator)) # sources x sensors
idSet = set(identities) # Get unique IDs
idSet = [item for item in idSet if item >= 0] # Remove negative values (should have only -1 if any)
n_parcels = len(idSet)
""" Generate signals for parcels. """
np.random.seed(0)
s = randn(n_parcels, n_samples+2*n_cut_samples)
for i in np.arange(0, n_parcels):
s[i, :] = signal.cwt(s[i, :], signal.ricker, widths)
s = signal.hilbert(s)
parcelSeries = s[:, n_cut_samples:-n_cut_samples]
""" Parcel series to source series. 0 signal for sources not belonging to a parcel. """
sourceSeries = parcelSeries[identities]
sourceSeries[identities < 0] = 0
""" Forward then inverse model source series. """
# Values wrapping? Normalize forward and inverse operators to avoid this, norm to 1.
forward = forward / norm(forward)
inverse = inverse / norm(inverse)
sourceSeries = np.dot(inverse, np.dot(forward, sourceSeries))
""" Compute weighted inverse operator. Time series from complex to real.
Make source series using the weighted operator. """
inverse_w, weights, cplvs = _compute_weights(sourceSeries, parcelSeries, identities, inverse)
parcelSeries = np.real(parcelSeries)
sourceSeries = np.real(sourceSeries)
sourceSeries_w = np.dot(inverse_w, np.dot(forward, sourceSeries))
""" Collapse source modeled series. """
# Collapse estimated source series to parcel series
sourceParcelMatrix = np.zeros((n_parcels,len(identities)), dtype=np.int8)
for i,identity in enumerate(identities):
if identity >= 0: # Don't place negative values. These should be sources not belonging to any parcel.
sourceParcelMatrix[identity,i] = 1
parcelSeries_eo = np.dot(sourceParcelMatrix, sourceSeries)
parcelSeries_ew = np.dot(sourceParcelMatrix, sourceSeries_w)
""" Plotted parcels and sources selections. """
## Set time and parcels of interest. These are for sub 5 parc68. Source list will not match another subject or parcellation.
# Parcel and source lists. 0 (sTS_L) and 36 (iFocp_L) are low fidelity; 14 (iP_L) 22 (IO_L) and 58 (sP_L) high fidelity. The fidelity goodness is from average levels.
parcelList = [0, 22, 14, 58, 63]
sourceLists = [[942, 1207, 1251], [129, 342, 546], [386, 473, 852, 511, 897], [415, 432, 954, 1019, 1196], [4482, 4477, 4511, 4672, 4749]]
# ## Set time and parcels of interest. These are for sub_3. Source list will not match another subject.
# sourceLists = [[890, 963, 1125], [235, 280, 341], [2891, 2615, 2499], [2468, 2494, 2510]] #[890, 963, 1125] for 0. [235, 280, 341] for 22. [2891, 2615, 2499] for 36. [2468, 2494, 2510] for 54.
timeStart = 60
timeEnd = 160
n_sensors = 5
sensor_cor_dist = 3 # For sensor selection. Indexes sensor output sorted abs(correlation)[-n_sensors*sensor_cor_dist:-sensor_cor_dist]
selections = [2, 4]
parcels = []
sources = []
for i, selection in enumerate(selections):
parcels.append(parcelList[selection])
sources.append(sourceLists[selection])
""" Normalize time series to same amplitude at time of interest. """
parcelSeries_n = np.einsum('ij,i->ij', parcelSeries,
1/np.max(abs(parcelSeries)[:,timeStart:timeEnd], axis=1)) # Value error with this one for some reason with a normal divide.
parcelSeries_neo = parcelSeries_eo / np.max(abs(parcelSeries_eo)[:,timeStart:timeEnd], axis=1)
parcelSeries_new = parcelSeries_ew / np.max(abs(parcelSeries_ew)[:,timeStart:timeEnd], axis=1)
sourceSeries_n = sourceSeries / np.max(sourceSeries[:,timeStart:timeEnd], axis=1)
sourceSeries_nw = sourceSeries_w / np.max(sourceSeries_w[:,timeStart:timeEnd], axis=1)
""" Make sensor series. Find sensors with highest correlations to parcels of interest. """
sensorSeries = np.real(np.dot(forward, sourceSeries))
# Normalize sensors so that different types of sensors are in the same range.
for i in range(forward.shape[0]):
sensorSeries[i,:] = sensorSeries[i,:] / max(abs(np.ravel(sensorSeries[i,timeStart:timeEnd])))
## Find correlations separately for selections. May find same sensors for different parcels.
sensors_high = np.zeros((len(selections), n_sensors), dtype=int)
for i, selection in enumerate(selections):
correl = np.zeros(forward.shape[0])
for ii in range(forward.shape[0]):
correl[ii], _ = stats.spearmanr(np.real(parcelSeries[parcels[i], timeStart:timeEnd]),
np.ravel(np.real(sensorSeries[ii, timeStart:timeEnd])))
ind = np.argpartition(abs(correl), -n_sensors)[-n_sensors*sensor_cor_dist::sensor_cor_dist]
sensors_high[i,:] = ind[np.argsort(correl[ind])]
""" Set global figure parameters, including CorelDraw compatibility (.fonttype) """
import matplotlib.pylab as pylab
params = {'legend.fontsize':'7',
'figure.figsize':(11, 3),
'axes.labelsize':'14',
'axes.titlesize':'14',
'xtick.labelsize':'14',
'ytick.labelsize':'14',
'lines.linewidth':'0.5',
'pdf.fonttype':42,
'ps.fonttype':42,
'font.sans-serif':'Arial'}
pylab.rcParams.update(params)
colors = [['royalblue', 'aqua', 'dodgerblue', 'cadetblue', 'turquoise'],
['orchid', 'crimson', 'hotpink', 'plum', 'deeppink']]
""" Plot time series of selections. Figure 1. """
for ii, parcel in enumerate(selections):
## Source series.
fig = plt.figure()
ax = fig.add_subplot(1, 3, 1)
for i, source in enumerate(sources[ii]):
ax.plot(np.ravel(np.real(sourceSeries_n[source, timeStart:timeEnd])) -2*i,
color=colors[ii][i], linestyle='-', label='Original, ' + str(source))
ax.plot(np.ravel(np.real(sourceSeries_nw[source, timeStart:timeEnd])) -2*i,
color=colors[ii][i], linestyle=':', linewidth=1, label='Weighted, ' + str(source))
ax.set_ylabel('Modeled source series')
ax.set_xlabel('Time, samples, parcel ' + str(parcels[ii]))
ax.spines['top'].set_visible(False)
ax.spines['bottom'].set_visible(False)
ax.spines['left'].set_visible(False)
ax.spines['right'].set_visible(False)
legend = ax.legend(loc='best', shadow=False)
legend.get_frame()
plt.tight_layout(pad=0.1)
## Parcel series
ax = fig.add_subplot(1, 3, 2)
# Ground truth
ax.plot(np.ravel(np.real(parcelSeries_n[parcels[ii], timeStart:timeEnd]))-0,
color='black', linestyle='-', label='Ground truth')
# Original inv op
ax.plot(np.ravel(np.real(parcelSeries_neo[parcels[ii], timeStart:timeEnd]))-1,
color='dimgray', linestyle='-', label='Estimated, Original inv op')
# Weighted
ax.plot(np.ravel(np.real(parcelSeries_new[parcels[ii], timeStart:timeEnd]))-2,
color='black', linestyle=':', linewidth=1, label='Estimated, Weighted inv op')
ax.set_ylabel('Parcel series')
ax.set_xlabel('Time, samples, parcel ' + str(parcels[ii]))
ax.spines['top'].set_visible(False)
ax.spines['bottom'].set_visible(False)
ax.spines['left'].set_visible(False)
ax.spines['right'].set_visible(False)
legend = ax.legend(loc='best', shadow=False)
legend.get_frame()
plt.tight_layout(pad=0.1)
## Sensor series
ax = fig.add_subplot(1, 3, 3)
for i, sensor in enumerate(sensors_high[ii]):
ax.plot(np.ravel(sensorSeries[sensor, timeStart:timeEnd])-1*i,
color='black', linestyle='-', label='High cor., sensor ' + str(sensor))
ax.set_ylabel('Sensor series')
ax.set_xlabel('Time, samples, parcel ' + str(parcels[ii]))
ax.spines['top'].set_visible(False)
ax.spines['bottom'].set_visible(False)
ax.spines['left'].set_visible(False)
ax.spines['right'].set_visible(False)
legend = ax.legend(loc='best', shadow=False)
legend.get_frame()
plt.tight_layout(pad=0.1)
plt.show()
""" Plot time series of subselections. Figure 2. """
## Change weighted source signal so that smaller weight means smaller amplitude, and larger weight larger amplitude.
# As weights can get large, multiply sources by set values instead of actual weights. Sort sources by weights and apply set weights.
fakeWeights = np.linspace(0.2, 1.8, len(sources[0]))
sourceSeries_nwf = sourceSeries_nw.copy()
for ii, parcel in enumerate(selections):
exampleSources = sources[ii]
# sort by weights
ind = np.argsort(abs(weights[exampleSources])) # Sorted by weight
sourceSeries_nwf[exampleSources,:] = np.einsum('ij,i->ij',sourceSeries_nw[exampleSources,:],
fakeWeights[ind]*np.sign(weights[exampleSources][ind]))
## Compute sums of sources selected for visualization.
# Init summation arrays
sumArray_n = np.zeros((len(selections), n_samples), dtype=float) # Original inv op
sumArray_w = np.zeros((len(selections), n_samples), dtype=float) # Weighted inv op
for ii, parcel in enumerate(selections):
sumArray_n[ii,:] = np.sum(sourceSeries_n[sources[ii]], axis=0)
sumArray_w[ii,:] = np.sum(sourceSeries_nw[sources[ii]], axis=0)
# Normalize summed arrays for visualization.
sumArray_n = np.einsum('ij,i->ij', sumArray_n,
1/np.max(abs(sumArray_n)[:,timeStart:timeEnd], axis=1)) # Parcel-wise normalization.
sumArray_w = np.einsum('ij,i->ij', sumArray_w,
1/np.max(abs(sumArray_w)[:,timeStart:timeEnd], axis=1))
for ii, parcel in enumerate(selections):
## Simulated source series (so parcel series many times) with original inv op modeled series.
fig = plt.figure()
ax = fig.add_subplot(1, 3, 1)
for i, source in enumerate(sources[ii]):
# Ground truth
ax.plot(np.ravel(np.real(parcelSeries_n[parcels[ii], timeStart:timeEnd]))-2*i,
color=colors[ii][i], linestyle='-', label='Simulated, parcel' + str(parcels[ii]))
ax.plot(np.ravel(np.real(sourceSeries_n[source, timeStart:timeEnd]))-2*i,
color=colors[ii][i], linestyle=':', linewidth=1, label='Orig inv op, source' + str(source))
ax.set_ylabel('Source series')
ax.set_xlabel('Time, samples, parcel ' + str(parcels[ii]))
ax.spines['top'].set_visible(False)
ax.spines['bottom'].set_visible(False)
ax.spines['left'].set_visible(False)
ax.spines['right'].set_visible(False)
legend = ax.legend(loc='best', shadow=False)
legend.get_frame()
plt.tight_layout(pad=0.1)
## Simulated source series with weighted inv op modeled series.
ax = fig.add_subplot(1, 3, 2)
for i, source in enumerate(sources[ii]):
ax.plot(np.ravel(np.real(parcelSeries_n[parcels[ii], timeStart:timeEnd]))-2*i,
color=colors[ii][i], linestyle='-', label='Simulated, parcel' + str(parcels[ii]))
ax.plot(np.ravel(np.real(sourceSeries_nwf[source, timeStart:timeEnd])) -2*i,
color=colors[ii][i], linestyle=':', linewidth=1, label='Weighted inv op, source' + str(source))
ax.set_ylabel('Source series')
ax.set_xlabel('Time, samples, parcel ' + str(parcels[ii]))
ax.spines['top'].set_visible(False)
ax.spines['bottom'].set_visible(False)
ax.spines['left'].set_visible(False)
ax.spines['right'].set_visible(False)
legend = ax.legend(loc='best', shadow=False)
legend.get_frame()
plt.tight_layout(pad=0.1)
## Summed selected sources series as "parcel" representatives
ax = fig.add_subplot(1, 3, 3)
# Ground truth and original inv op
ax.plot(np.ravel(np.real(parcelSeries_n[parcels[ii], timeStart:timeEnd]))-0,
color='black', linestyle='-', label='Simulated, parcel' + str(parcels[ii]))
ax.plot(sumArray_n[ii,timeStart:timeEnd]-0,
color='black', linestyle=':', linewidth=1, label='Estimated, Sum Orig' + str(parcels[ii]))
# Ground truth and weighted inv op
ax.plot(np.ravel(np.real(parcelSeries_n[parcels[ii], timeStart:timeEnd]))-2,
color='black', linestyle='-', label='Simulated, parcel' + str(parcels[ii]))
ax.plot(sumArray_w[ii,timeStart:timeEnd]-2,
color='dimgray', linestyle=':', linewidth=1, label='Estimated, Sum Weight' + str(parcels[ii]))
ax.set_ylabel('Source sum series')
ax.set_xlabel('Time, samples, parcel ' + str(parcels[ii]))
ax.spines['top'].set_visible(False)
ax.spines['bottom'].set_visible(False)
ax.spines['left'].set_visible(False)
ax.spines['right'].set_visible(False)
legend = ax.legend(loc='best', shadow=False)
legend.get_frame()
plt.tight_layout(pad=0.1)
|
from agent import Agent
from random import random, sample
from itertools import combinations
from math import comb
# from mission import Mission
class Evolver(Agent):
'''
Maintains probabilities of all possible worlds.
Calculates the probabilty of each player being a spy from set of worlds.
World probabilities are updated on mission outcomes, voting patterns and
proposers.
Behavioural data passed in as a dictionary.
'''
def __init__(self, data, name='Evolver'):
self.data = data
self.name = name
def calc_threshold(self, vec):
'''
Converts a 4D vector into an unbounded threshold value
'''
return vec[0]*self.rnd*self.rnd + vec[1]*self.rnd + vec[2]*self.fails + vec[3]
def calc_rate(self, vec):
'''
Converts a 4D vector into a probability between 0.01 and 0.99
'''
return min(0.99, max(0.01, self.calc_threshold(vec)))
def is_spy(self): return self.spies != []
def average_suspicion(self):
return self.spy_count[self.num_players] / self.num_players
def betrayals_required(self):
return self.fails_required[self.num_players][self.rnd]
def new_game(self, num_players, player_number, spies):
'''
Initialises the game, spies is empty if player is not a spy
'''
self.rnd = 0
self.successes = 0
self.fails = 0
self.downvotes = 0
self.num_players = num_players
self.player_number = player_number
self.num_spies = self.spy_count[self.num_players]
self.spies = spies
self.failed_teams = [] # teams that betrayed - avoid them
self.votes_for = [] # players that voted for the last proposed mission
# self.missions = []
worlds = list(combinations(range(self.num_players), self.num_spies))
self.worlds = {w: 1/len(worlds) for w in worlds}
self.update_suspicions()
def possible_teams(self, l):
'''
Returns list of all possible teams of length l including self,
in ascending average suspicion
'''
teams = [t for t in list(combinations(range(self.num_players), l)) if self.player_number in t]
return sorted(teams, key=lambda t: sum([self.suspicions[x] for x in t]))
def num_spies_in(self, mission):
'''
Spy method
returns number of spies on mission
'''
return len([x for x in self.spies if x in mission])
def enough_spies(self, mission):
'''
Spy method
returns True iff there are enough spies in mission to fail the mission
'''
return self.num_spies_in(mission) >= self.betrayals_required()
def bad_mission(self, mission):
'''
Returns True iff this mission configuration has already ended in
betrayal
'''
for m in self.failed_teams:
if mission == m or set(mission).issubset(m): return True
return False
def propose_mission(self, team_size, betrayals_required = 1):
'''
Propose the least suspicious team including self.
If spy and two betrayals required, try to return the least suspicious
team containing two spies.
'''
ps = self.possible_teams(team_size)
if not self.is_spy() or betrayals_required == 1:
team = ps[0]
for n in range(1, len(ps)):
if self.bad_mission(team): team = ps[n]
else: return team
elif betrayals_required == 2:
team = ps[0]
for n in range(1, len(ps)):
if self.bad_mission(team) or not self.enough_spies(team): team = ps[n]
else: return team
if self.successes < 2:
team = ps[0]
for n in range(1, len(ps)):
if self.bad_mission(team): team = ps[n]
else: return team
else:
team = ps[0]
for n in range(1, len(ps)):
if not self.enough_spies(team): team = ps[n]
else: return team
return [self.player_number] + \
sample([x for x in range(self.num_players) if x != self.player_number], team_size-1)
def mission_suspicion(self, mission):
'''
Returns the average suspicion of a mission. Does not include self
'''
others = [self.suspicions[x] for x in mission if x != self.player_number]
return sum(others) / len(others)
def vote(self, mission, proposer):
if self.rnd == 0 or proposer == self.player_number or self.downvotes == 4: return True
res_vote = self.mission_suspicion(mission) <= \
self.calc_threshold(self.data['vote_threshold']) * self.average_suspicion()
if self.is_spy():
if self.successes == 2:
return True if self.enough_spies(mission) else False
if self.enough_spies(mission) and not self.bad_mission(mission):
return res_vote or self.mission_suspicion(mission) <= \
self.calc_threshold(self.data['failable_vote_threshold']) * self.average_suspicion()
if self.bad_mission(mission): return False
if self.player_number not in mission and len(mission) >= self.num_players - self.num_spies: return False
return res_vote
def vote_outcome(self, mission, proposer, votes_for):
# self.missions.append(Mission(self.num_players, self.rnd, proposer, mission, votes))
self.votes_for = votes_for
if 2 * len(votes_for) <= self.num_players: self.downvotes += 1
def betray(self, mission, proposer):
if self.is_spy():
if self.fails == 2 and self.enough_spies(mission): return True
if self.successes == 2: return True
elif self.num_spies_in(mission) > self.betrayals_required():
return random() < self.calc_rate(self.data['risky_betray_rate'])
elif self.num_spies_in(mission) < self.betrayals_required(): return False
else: return random() < self.calc_rate(self.data['betray_rate'])
return False # is resistance
def update_suspicions(self):
'''
Updates self.suspicions to reflect the probability of each player being
a spy
'''
self.suspicions = {x: 0 for x in range(self.num_players)}
worlds = self.worlds.items()
for x in range(self.num_players):
for w, wp in worlds:
if x in w: self.suspicions[x] += wp
# def print_suspicions(self):
# print(f'\nPlayer {self.player_number}:')
# print({s[0]: round(s[1],5) for s in self.suspicions.items()})
def outcome_probability(self, spies_in_mission, betrayals, betray_rate):
'''
Probability of a mission outcome given a world
'''
if spies_in_mission < betrayals: return 0
if spies_in_mission == 0 and betrayals == 0: return 1
return betray_rate ** betrayals * (1-betray_rate) ** (spies_in_mission-betrayals) \
* comb(spies_in_mission, betrayals)
def vote_probability(self, world, sf, ss, rf, rs, mission_success):
'''
Probability of a voting pattern for a mission outcome given a world
'''
p = 1
for x in range(self.num_players):
if x in world and x in self.votes_for:
if mission_success: p *= ss
else: p *= sf
elif x in world and x not in self.votes_for:
if mission_success: p *= (1-ss)
else: p *= (1-sf)
elif x not in world and x in self.votes_for:
if mission_success: p *= rs
else: p *= rf
elif x not in world and x not in self.votes_for:
if mission_success: p *= (1-rs)
else: p *= (1-rf)
return p
def proposer_probability(self, world, proposer, sf, ss, rf, rs, mission_success):
'''
Probability of proposer causing a mission outcome given a world
'''
if proposer in world and mission_success: return ss
elif proposer in world and not mission_success: return sf
elif proposer not in world and mission_success: return rs
elif proposer not in world and not mission_success: return rf
def mission_outcome(self, mission, proposer, betrayals, mission_success):
'''
Update the last Mission object with mission info
Update world probabilities
'''
# self.missions[-1].betrayals = betrayals
# self.missions[-1].success = mission_success
if not mission_success: self.failed_teams.append(mission)
if len(self.worlds) > 1 and self.rnd < 4:
br = self.calc_rate(self.data['opponent_betray_rate'])
vsf = self.calc_rate(self.data['spy_vote_failed'])
vss = self.calc_rate(self.data['spy_vote_success'])
vrf = self.calc_rate(self.data['res_vote_failed'])
vrs = self.calc_rate(self.data['res_vote_success'])
psf = self.calc_rate(self.data['spy_propose_failed'])
pss = self.calc_rate(self.data['spy_propose_success'])
prf = self.calc_rate(self.data['res_propose_failed'])
prs = self.calc_rate(self.data['res_propose_success'])
outcome_prob = 0 # overall probability of this mission outcome
for w, wp in self.worlds.items():
spies_in_mission = len([x for x in w if x in mission])
outcome_prob += self.outcome_probability(spies_in_mission, betrayals, br) * wp
impossible_worlds = []
for w, wp in self.worlds.items():
spies_in_mission = len([x for x in w if x in mission])
if spies_in_mission == betrayals and len(mission) == betrayals:
self.worlds = {w:1}
break
new_p = self.outcome_probability(spies_in_mission, betrayals, br) * self.worlds[w] / outcome_prob
if new_p == 0: impossible_worlds.append(w)
mw = self.calc_rate(self.data['outcome_weight'])
self.worlds[w] = mw * new_p + (1-mw) * self.worlds[w]
for w in impossible_worlds: self.worlds.pop(w, None)
voting_prob = 0 # overall probability of a voting pattern given mission outcome
for w, wp in self.worlds.items():
voting_prob += self.vote_probability(w, vsf, vss, vrf, vrs, mission_success) * wp
for w in self.worlds.keys():
new_p = self.vote_probability(w, vsf, vss, vrf, vrs, mission_success) * self.worlds[w] / voting_prob
vw = self.calc_rate(self.data['vote_weight'])
self.worlds[w] = vw * new_p + (1-vw) * self.worlds[w]
proposer_prob = 0 # overall probability of a proposer given mission outcome
for w, wp in self.worlds.items():
proposer_prob += self.proposer_probability(w, proposer, psf, pss, prf, prs, mission_success) * wp
for w in self.worlds.keys():
new_p = self.proposer_probability(w, proposer, psf, pss, prf, prs, mission_success) \
* self.worlds[w] / proposer_prob
pw = self.calc_rate(self.data['proposer_weight'])
self.worlds[w] = pw * new_p + (1-pw) * self.worlds[w]
self.update_suspicions()
def round_outcome(self, rounds_complete, missions_failed):
# self.missions[-1].success = (missions_failed == self.fails)
self.rnd = rounds_complete
self.fails = missions_failed
self.successes = rounds_complete - missions_failed
self.downvotes = 0
def game_outcome(self, spies_win, spies): pass
|
import unittest
import jamband
class TestMain(unittest.TestCase):
def test_pigeons(self):
msg = "My brain has degenerated today so much that I’m listening to pigeons playing ping pong enthusiastically"
match = jamband.refersToJamBand(msg)
self.assertTrue(match)
def test_phish(self):
# msgs = ["p***h", "p///h", "ph*sh", "p h i s h", "p!aah"]
msgs = ["p h i s h"]
for msg in msgs:
match = jamband.refersToJamBand(msg)
if not match:
print(msg)
self.assertTrue(match)
if __name__ == '__main__':
unittest.main() |
#!/usr/bin/python3
import numpy as np
import matplotlib.pyplot as plt
import sys
import classifier as c
from plotDecBoundaries import plotDecBoundaries
def error_rate(classifications, true_classifications):
if (np.shape(classifications) != np.shape(true_classifications)):
raise RuntimeError("Size not equal")
return np.count_nonzero(classifications - true_classifications) / np.size(classifications)
def main(training_csv, test_csv, plot_fname = "", features = np.array([0, 1])):
print ("Training Data: %s" % training_csv)
print ("Test Data: %s" % test_csv)
print("Features", features)
features = np.append(features, -1)
training_data = np.loadtxt(training_csv, delimiter = ',', usecols = features)
test_data = np.loadtxt(test_csv, delimiter = ',', usecols = features)
training_data_shape = np.shape(training_data)
test_data_shape = np.shape(test_data)
if (training_data_shape[1] != test_data_shape[1]):
raise RuntimeError("Size of training and test data do not match")
return
classifier = c.NearestMeanClassifier(training_data_shape[1] - 1)
classifier = classifier.train(training_data)
classifications = classifier.evaluate(training_data[:, :-1])
error_rate_training = error_rate(classifications, training_data[:, -1])
print("Error-rate of training data classifications = %.4f" % error_rate_training)
classifications = classifier.evaluate(test_data[:, :-1])
error_rate_test = error_rate(classifications, test_data[:, -1])
print("Error-rate of test data classifications = %.4f" % error_rate_test)
if (len(plot_fname)):
plt = plotDecBoundaries(training_data[:, :-1], training_data[:, -1], classifier.feature_means)
plt.savefig(plot_fname)
plt.clf()
return (error_rate_training, error_rate_test)
if (__name__ == '__main__'):
if (len(sys.argv) < 3):
print("Unspecified traning data and/or test data")
exit(1)
elif (len(sys.argv) == 3):
feature_size = 2
else:
feature_size = int(sys.argv[3])
if (feature_size < 2):
print("feature size has to be 2 or above")
exit(1)
training_csv = sys.argv[1]
test_csv = sys.argv[2]
error_rate_training_list = np.array([])
error_rate_test_list = np.array([])
features_list = np.array([])
if (len(sys.argv) > 4):
plot_fname = sys.argv[4]
try:
if (len(sys.argv) < 7):
for i in range(feature_size - 1):
for j in range(i + 1, feature_size):
features = np.array([i, j])
if (len(sys.argv) < 5):
(er_training, er_test) = main(training_csv, test_csv, features = features)
else:
(er_training, er_test) = main(training_csv, test_csv, plot_fname, features)
error_rate_training_list = np.append(error_rate_training_list, er_training)
error_rate_test_list = np.append(error_rate_test_list, er_test)
if (np.size(features_list) == 0):
features_list = np.array([features])
else:
features_list = np.concatenate((features_list, np.array([features])))
if (feature_size > 2):
print("Standard deviation of error-rate on traning/test: %s" % np.std([error_rate_training_list, error_rate_test_list], axis = 1))
print("Minimum error-rate on training %.4f, with featrues: %s" % (np.min(error_rate_training_list), features_list[np.argmin(error_rate_training_list)]))
print("Maximum error-rate on training %.4f, with featrues: %s" % (np.max(error_rate_training_list), features_list[np.argmax(error_rate_training_list)]))
print("Minimum error-rate on test %.4f, with featrues: %s" % (np.min(error_rate_test_list), features_list[np.argmin(error_rate_test_list)]))
print("Maximum error-rate on test %.4f, with featrues: %s" % (np.max(error_rate_test_list), features_list[np.argmax(error_rate_test_list)]))
else:
main(training_csv, test_csv, plot_fname, features = np.array([int(sys.argv[5]), int(sys.argv[6])]))
except RuntimeError as e:
print("RuntimeError raised\n", e.args)
exit(1)
|
#////////1/////////2/////////3/////////4/////////5/////////6/////////7/////////8
# Name :
# Author : Avi
# Revision : $Revision: #10 $
#
# Copyright 2009-2020 ECMWF.
# This software is licensed under the terms of the Apache Licence version 2.0
# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
# In applying this licence, ECMWF does not waive the privileges and immunities
# granted to it by virtue of its status as an intergovernmental organisation
# nor does it submit to any jurisdiction.
#////////1/////////2/////////3/////////4/////////5/////////6/////////7/////////8
from ecflow import Alias, AttrType, Autocancel, CheckPt, ChildCmdType, Client, Clock, Cron, DState, Date, Day, Days, \
Defs, Ecf, Event, Expression, Family, FamilyVec, File, Flag, FlagType, FlagTypeVec, InLimit, \
JobCreationCtrl, Label, Late, Limit, Meter, Node, NodeContainer, NodeVec, PartExpression, PrintStyle, \
Repeat, RepeatDate, RepeatDay, RepeatEnumerated, RepeatInteger, RepeatString, SState, State, Style, \
Submittable, Suite, SuiteVec, Task, TaskVec, Time, TimeSeries, TimeSlot, Today, UrlCmd, Variable, \
VariableList, Verify, WhyCmd, ZombieAttr, ZombieType, ZombieUserActionType, Trigger, Complete, Edit, Defstatus
import ecflow_test_util as Test
import unittest
import shutil # used to remove directory tree
import os,sys
def test_def_file():
#return "test.def"
return "test_tutorial_def_" + str(os.getpid()) + ".def"
def test_compile(text):
# replace test.def in the text with test_def_file() so that we have a unique file per process
text = text.replace('test.def',test_def_file())
test_file = "py_u_test_tutorial_" + str(os.getpid()) + ".def"
file = open(test_file ,'w')
file.write(text)
file.close()
# execfile(test_file) only work for python 2.7
with open(test_file) as f:
code = compile(f.read(), test_file, 'exec')
exec(code)
os.remove(test_file)
def do_tear_down():
Ecf.set_debug_equality(False)
try: os.remove(test_def_file())
except: pass
######################################################################################################
class TestNewSuite(unittest.TestCase):
def setUp(self):
Ecf.set_debug_equality(True)
home = os.path.join(os.getenv("HOME"),"course")
self.defs = Defs()
suite = self.defs.add_suite("test")
suite.add_variable("ECF_HOME", home)
suite.add_task("t1")
with Defs() as self.defs2:
with self.defs2.add_suite("test") as suite:
suite.add_variable("ECF_HOME", home)
suite.add_task("t1")
with Defs() as self.defs3:
self.defs3.add(
Suite("test",
Edit(ECF_HOME=home),
Task("t1")))
self.defs4 = Defs() + (Suite("test") + Edit(ECF_HOME=home))
self.defs4.test += Task("t1")
self.defs5 = Defs().add(
Suite("test").add(
Edit(ECF_HOME=home),
Task("t1")))
#print("Creating suite definition")
home = os.path.join(os.getenv("HOME"),"course")
defs = Defs(
Suite('test',
Task('t1'),
ECF_HOME=home))
self.assertEqual(self.defs,defs,"defs not equal\n" + str(self.defs) + "\n" + str(defs))
text = """import os
from ecflow import Defs,Suite,Task,Edit
#xx print("Creating suite definition")
home = os.path.join(os.getenv("HOME"), "course")
defs = Defs(
Suite('test',
Edit(ECF_HOME=home),
Task('t1')))
#xx print(defs)
defs.save_as_defs('test.def')
"""
test_compile(text)
test_defs = Defs(test_def_file())
self.assertEqual(test_defs,defs,"defs not equal\n" + str(test_defs) + "\n" + str(defs))
def test_defs_equal(self):
self.assertEqual(self.defs, self.defs2, "defs not the equal")
self.assertEqual(self.defs, self.defs3, "defs not the equal")
self.assertEqual(self.defs, self.defs4, "defs not the equal")
self.assertEqual(self.defs, self.defs5, "defs not the equal")
def tearDown(self):
do_tear_down()
######################################################################################################
class TestFamilies(unittest.TestCase):
def setUp(self):
Ecf.set_debug_equality(True)
home = os.path.join(os.getenv("HOME"), "course")
defs = Defs().add(
Suite("test").add(
Edit(ECF_HOME=home),Edit(ECF_INCLUDE=home),
Family("f1").add(
Task("t1"),
Task("t2"))))
defs.save_as_defs(test_def_file())
self.defs = defs
def test_me0(self):
text = """import os
from ecflow import Defs,Suite,Family,Task,Edit
def create_family_f1():
return Family("f1",
Task("t1"),
Task("t2"))
#xx print("Creating suite definition")
home = os.path.join(os.getenv("HOME"), "course")
defs = Defs(
Suite("test",
Edit(ECF_HOME=home),Edit(ECF_INCLUDE=home),
create_family_f1()))
#xx print(defs)
#xx print("Checking job creation: .ecf -> .job0")
#print(defs.check_job_creation())
#xx print("Saving definition to file 'test.def'")
defs.save_as_defs('test.def')
"""
test_compile(text)
test_defs = Defs(test_def_file())
self.assertEqual(test_defs,self.defs,"defs not equal\n" + str(test_defs) + "\n" + str(self.defs))
def test_me(self):
import os
#print("Creating suite definition")
home = os.path.join(os.getenv("HOME"), "course")
defs = Defs() + (Suite("test") + Edit(ECF_HOME=home) + Edit(ECF_INCLUDE=home))
defs.test += Family("f1") + [ Task("t{0}".format(i)) for i in range(1,3) ]
defs.save_as_defs(test_def_file())
self.assertEqual(self.defs,defs,"defs not equal:\n" + str(self.defs) + "\n" + str(defs))
def tearDown(self):
do_tear_down()
class TestVariables(unittest.TestCase):
def setUp(self):
Ecf.set_debug_equality(True)
def create_family_f1():
return Family("f1" ).add(
Task("t1").add(Edit(SLEEP=20)),
Task("t2").add(Edit(SLEEP=20)))
home = os.path.join(os.getenv("HOME"), "course")
defs = Defs().add( Suite("test").add(
Edit(ECF_HOME=home),Edit(ECF_INCLUDE=home),
create_family_f1()))
defs.save_as_defs(test_def_file())
self.defs = defs
def test_me0(self):
text = """#import os
from ecflow import Defs,Suite,Family,Task,Edit
def create_family_f1():
return Family("f1",
Task("t1",Edit(SLEEP=20)),
Task("t2",Edit(SLEEP=20)))
#xx print("Creating suite definition")
home = os.path.join(os.getenv("HOME"), "course")
defs = Defs(
Suite("test",
Edit(ECF_HOME=home),Edit(ECF_INCLUDE=home),
create_family_f1()))
#xx print(defs)
#xx print("Checking job creation: .ecf -> .job0")
#print(defs.check_job_creation())
#xx print("Saving definition to file 'test.def'")
defs.save_as_defs('test.def')
"""
test_compile(text)
test_defs = Defs(test_def_file())
self.assertEqual(test_defs,self.defs,"defs not equal\n" + str(test_defs) + "\n" + str(self.defs))
def test_me2(self):
import os
home = os.path.join(os.getenv("HOME"), "course")
with Defs() as defs:
defs += Suite("test").add(
Edit(ECF_HOME=home),Edit(ECF_INCLUDE=home))
defs.test += Family("f1").add(
Task("t1").add(Edit(SLEEP= 20)),
Task("t2").add(Edit(SLEEP= 20)))
defs.save_as_defs(test_def_file())
self.assertEqual(self.defs,defs,"defs not equal:\n" + str(self.defs) + "\n" + str(defs))
def test_me3(self):
import os
home = os.path.join(os.getenv("HOME"), "course")
defs = Defs() + (Suite("test") + Edit(ECF_HOME=home) + Edit(ECF_INCLUDE=home))
defs.test += Family("f1") + (Task("t1") + Edit(SLEEP=20)) + (Task("t2") + Edit(SLEEP=20))
defs.save_as_defs(test_def_file())
self.assertEqual(self.defs,defs,"defs not equal:\n" + str(self.defs) + "\n" + str(defs))
def tearDown(self):
do_tear_down()
class TestVariableInheritance(unittest.TestCase):
def setUp(self):
Ecf.set_debug_equality(True)
def create_family_f1():
return Family("f1").add(
Edit(SLEEP=20),
Task("t1"),
Task("t2"))
home = os.path.join(os.getenv("HOME"), "course")
defs = Defs().add(Suite("test").add(
Edit(ECF_HOME=home),Edit(ECF_INCLUDE=home),
create_family_f1() ))
defs.save_as_defs(test_def_file())
self.defs = defs;
def test_me0(self):
text = """import os
from ecflow import Defs,Suite,Family,Task,Edit
def create_family_f1():
return Family("f1",
Edit(SLEEP=20),
Task("t1"),
Task("t2"))
#xx print("Creating suite definition")
home = os.path.join(os.getenv("HOME"), "course")
defs = Defs(
Suite("test",
Edit(ECF_HOME=home),Edit(ECF_INCLUDE=home),
create_family_f1()))
#xx print(defs)
#xx print("Checking job creation: .ecf -> .job0")
#print(defs.check_job_creation())
#xx print("Saving definition to file 'test.def'")
defs.save_as_defs('test.def')
"""
test_compile(text)
test_defs = Defs(test_def_file())
self.assertEqual(test_defs,self.defs,"defs not equal\n" + str(test_defs) + "\n" + str(self.defs))
def test_me(self):
import os
def create_family_f1():
return Family("f1") + Edit(SLEEP=20) + Task("t1") + Task("t2")
home = os.path.join(os.getenv("HOME"), "course")
defs = Defs() + (Suite("test") + create_family_f1())
defs.test += [ Edit(ECF_HOME=home) , Edit(ECF_INCLUDE=home) ]
defs.save_as_defs(test_def_file())
self.assertEqual(self.defs,defs,"defs not equal:\n" + str(self.defs) + "\n" + str(defs))
def tearDown(self):
do_tear_down()
class TestTriggers(unittest.TestCase):
def setUp(self):
Ecf.set_debug_equality(True)
def create_family_f1():
return Family("f1").add(
Edit(SLEEP=20),
Task("t1"),
Task("t2").add(Trigger("t1 == complete")))
home = os.path.join(os.getenv("HOME"), "course")
defs = Defs().add(Suite("test").add(
Edit(ECF_HOME=home),Edit(ECF_INCLUDE=home),
create_family_f1()))
defs.save_as_defs(test_def_file())
self.defs = defs;
def test_me0(self):
text = """import os
from ecflow import Defs,Suite,Family,Task,Edit,Trigger
def create_family_f1():
return Family("f1",
Edit(SLEEP=20),
Task("t1"),
Task("t2",Trigger("t1 == complete")))
#xx print("Creating suite definition")
home = os.path.join(os.getenv("HOME"), "course")
defs = Defs(
Suite("test",
Edit(ECF_HOME=home),Edit(ECF_INCLUDE=home),
create_family_f1()))
#xx print(defs)
#xx print("Checking job creation: .ecf -> .job0")
#print(defs.check_job_creation())
#xx print("Checking trigger expressions")
errors = defs.check()
assert len(errors) == 0,errors
#xx print("Saving definition to file 'test.def'")
defs.save_as_defs('test.def')
"""
test_compile(text)
test_defs = Defs(test_def_file())
self.assertEqual(test_defs,self.defs,"defs not equal\n" + str(test_defs) + "\n" + str(self.defs))
def test_me(self):
import os
home = os.path.join(os.getenv("HOME"), "course")
with Suite("test") as suite:
suite += [ Edit(ECF_HOME=home) , Edit(ECF_INCLUDE=home) ]
suite += Family("f1") + Edit(SLEEP=20) + Task("t1") + Task("t2")
suite.f1.t2 += Trigger(["t1"])
defs = Defs().add( suite )
defs.save_as_defs(test_def_file())
self.assertEqual(self.defs,defs,"defs not equal:\n" + str(self.defs) + "\n" + str(defs))
def tearDown(self):
do_tear_down()
class TestEvents(unittest.TestCase):
def setUp(self):
Ecf.set_debug_equality(True)
def create_family_f1():
return Family("f1").add(
Edit(SLEEP=20),
Task("t1"),
Task("t2").add(
Trigger("t1 == complete"),
Event("a"),
Event("b")),
Task("t3").add(Trigger("t2:a")),
Task("t4").add(Trigger("t2:b")))
home = os.path.join(os.getenv("HOME"), "course")
defs = Defs().add(
Suite("test").add(
Edit(ECF_HOME=home),Edit(ECF_INCLUDE=home),
create_family_f1() ))
defs.save_as_defs(test_def_file())
self.defs = defs;
def test_me0(self):
text = """import os
from ecflow import Defs,Suite,Family,Task,Edit,Trigger,Event
def create_family_f1():
return Family("f1",
Edit(SLEEP=20),
Task("t1"),
Task("t2",
Trigger("t1 == complete"),
Event("a"),
Event("b")),
Task("t3",
Trigger("t2:a")),
Task("t4",
Trigger("t2:b")))
#xx print("Creating suite definition")
home = os.path.join(os.getenv("HOME"), "course")
defs = Defs(
Suite("test",
Edit(ECF_HOME=home),Edit(ECF_INCLUDE=home),
create_family_f1()))
#xx print(defs)
#xx print("Checking job creation: .ecf -> .job0")
#print(defs.check_job_creation())
#xx print("Checking trigger expressions")
errors = defs.check()
assert len(errors) == 0,errors
#xx print("Saving definition to file 'test.def'")
defs.save_as_defs('test.def')
"""
test_compile(text)
test_defs = Defs(test_def_file())
self.assertEqual(test_defs,self.defs,"defs not equal\n" + str(test_defs) + "\n" + str(self.defs))
def test_me(self):
import os
def create_family_f1():
f1 = Family("f1") + [ Edit(SLEEP=20),
Task("t1"),
Task("t2") + Trigger(["t1"]) + Event("a") + Event("b"),
Task("t3") + Trigger("t2:a"),
Task("t4") + Trigger("t2:b") ]
return f1
home = os.path.join(os.getenv("HOME"), "course")
defs = Defs() + Suite("test")
defs.test += [ Edit(ECF_HOME=home),Edit(ECF_INCLUDE=home), create_family_f1()]
defs.save_as_defs(test_def_file())
self.assertEqual(self.defs,defs,"defs not equal:\n" + str(self.defs) + "\n" + str(defs))
def tearDown(self):
do_tear_down()
class TestComplete(unittest.TestCase):
def setUp(self):
Ecf.set_debug_equality(True)
def create_family_f1():
return Family("f1").add(
Edit(SLEEP= 20),
Task("t1"),
Task("t2").add(Trigger("t1 == complete"),
Event("a"), Event("b")),
Task("t3").add(Trigger("t2:a")),
Task("t4").add(Trigger("t2 == complete"),
Complete("t2:b") ))
home = os.path.join(os.getenv("HOME"), "course")
defs = Defs().add(Suite("test").add(
Edit(ECF_HOME=home),Edit(ECF_INCLUDE=home),
create_family_f1() ))
defs.save_as_defs(test_def_file())
self.defs = defs;
def test_me0(self):
text = """import os
from ecflow import Defs,Suite,Family,Task,Edit,Trigger,Complete,Event
def create_family_f1():
return Family("f1",
Edit(SLEEP= 20),
Task("t1"),
Task("t2",
Trigger("t1 == complete"),
Event("a"),
Event("b")),
Task("t3",
Trigger("t2:a")),
Task("t4",
Trigger("t2 == complete"),
Complete("t2:b")))
#xx print("Creating suite definition")
home = os.path.join(os.getenv("HOME"), "course")
defs = Defs(
Suite("test",
Edit(ECF_HOME=home),Edit(ECF_INCLUDE=home),
create_family_f1()))
#xx print(defs)
#xx print("Checking job creation: .ecf -> .job0")
#print(defs.check_job_creation())
#xx print("Checking trigger expressions")
errors = defs.check()
assert len(errors) == 0,errors
#xx print("Saving definition to file 'test.def'")
defs.save_as_defs('test.def')
"""
test_compile(text)
test_defs = Defs(test_def_file())
self.assertEqual(test_defs,self.defs,"defs not equal\n" + str(test_defs) + "\n" + str(self.defs))
def test_me(self):
import os
def create_family_f1():
f1 = Family("f1") + Edit(SLEEP=20)
f1 += [ Task("t{0}".format(i)) for i in range(1,5) ]
f1.t2 += [ Trigger(["t1"]), Event("a"), Event("b") ]
f1.t3 += Trigger("t2:a")
f1.t4 += [ Trigger(["t2"]),Complete("t2:b") ]
return f1
home = os.path.join(os.getenv("HOME"), "course")
defs = Defs() + (Suite("test") + Edit(ECF_HOME=home) + Edit(ECF_INCLUDE=home))
defs.test += create_family_f1()
defs.save_as_defs(test_def_file())
self.assertEqual(self.defs,defs,"defs not equal:\n" + str(self.defs) + "\n" + str(defs))
def tearDown(self):
do_tear_down()
class TestMeter(unittest.TestCase):
def setUp(self):
Ecf.set_debug_equality(True)
def create_family_f1():
return Family("f1").add(
Edit(SLEEP= 20),
Task("t1").add(Meter("progress", 1, 100, 90)),
Task("t2").add(Trigger("t1 == complete"),
Event("a"),
Event("b")),
Task("t3").add(Trigger("t2:a")),
Task("t4").add(Trigger("t2 == complete"),
Complete("t2:b")),
Task("t5").add(Trigger("t1:progress ge 30")),
Task("t6").add(Trigger("t1:progress ge 60")),
Task("t7").add(Trigger("t1:progress ge 90")))
home = os.path.join(os.getenv("HOME"), "course")
defs = Defs().add(Suite("test").add(
Edit(ECF_HOME=home),Edit(ECF_INCLUDE=home),
create_family_f1() ))
defs.save_as_defs(test_def_file())
self.defs = defs;
def test_me0(self):
text = """import os
from ecflow import Defs,Suite,Family,Task,Edit,Trigger,Complete,Event,Meter
def create_family_f1():
return Family("f1",
Edit(SLEEP= 20),
Task("t1", Meter("progress", 1, 100, 90)),
Task("t2", Trigger("t1 == complete"), Event("a"), Event("b")),
Task("t3", Trigger("t2:a")),
Task("t4", Trigger("t2 == complete"), Complete("t2:b")),
Task("t5", Trigger("t1:progress ge 30")),
Task("t6", Trigger("t1:progress ge 60")),
Task("t7", Trigger("t1:progress ge 90")))
#xx print("Creating suite definition")
home = os.path.join(os.getenv("HOME"), "course")
defs = Defs(
Suite("test",
Edit(ECF_HOME=home),Edit(ECF_INCLUDE=home),
create_family_f1()))
#xx print(defs)
#xx print("Checking job creation: .ecf -> .job0")
#print(defs.check_job_creation())
#xx print("Checking trigger expressions")
errors = defs.check()
assert len(errors) == 0,errors
#xx print("Saving definition to file 'test.def'")
defs.save_as_defs('test.def')
"""
test_compile(text)
test_defs = Defs(test_def_file())
self.assertEqual(test_defs,self.defs,"defs not equal\n" + str(test_defs) + "\n" + str(self.defs))
def test_me(self):
import os
def create_family_f1():
f1 = Family("f1") + Edit(SLEEP=20)
f1 += [ Task("t{0}".format(i)) for i in range(1,8)]
f1.t1 += Meter("progress", 1, 100, 90)
f1.t2 += [ Trigger(["t1"]), Event("a"), Event("b") ]
f1.t3 += Trigger("t2:a")
f1.t4 += [ Trigger(["t2"]),Complete("t2:b") ]
f1.t5 += Trigger("t1:progress ge 30")
f1.t6 += Trigger("t1:progress ge 60")
f1.t7 += Trigger("t1:progress ge 90")
return f1
home = os.path.join(os.getenv("HOME"), "course")
defs = Defs() + (Suite("test") + Edit(ECF_HOME=home) + Edit(ECF_INCLUDE=home))
defs.test += create_family_f1()
defs.save_as_defs(test_def_file())
self.assertEqual(self.defs,defs,"defs not equal:\n" + str(self.defs) + "\n" + str(defs))
def tearDown(self):
do_tear_down()
class TestTime(unittest.TestCase):
def setUp(self):
Ecf.set_debug_equality(True)
def create_family_f2():
f2 = Family("f2")
f2.add_variable("SLEEP", 20)
f2.add_task("t1").add_time( "00:30 23:30 00:30" ) # start(hh:mm) end(hh:mm) increment(hh:mm)
f2.add_task("t2").add_day( "sunday" )
# for add_date(): day,month,year; here 0 means every month, and every year
t3 = f2.add_task("t3")
t3.add_date(1, 0, 0) # day month year, first of every month or every year
t3.add_time( 12, 0 ) # hour, minutes at 12 o'clock
f2.add_task("t4").add_time( 0, 2, True ) # hour, minutes, relative to suite start
# 2 minutes after family f2 start
f2.add_task("t5").add_time( 0, 2 ) # hour, minutes suite site
# 2 minutes past midnight
return f2
defs = Defs()
suite = defs.add_suite("test")
suite.add_variable("ECF_HOME", os.path.join(os.getenv("HOME"), "course"))
suite.add_variable("ECF_INCLUDE", os.path.join(os.getenv("HOME"), "course"))
#suite.add_family( create_family_f1() )
suite.add_family( create_family_f2() )
errors = defs.check()
assert len(errors) == 0,errors
defs.save_as_defs(test_def_file())
self.defs = defs;
def test_me0(self):
text = """import os
from ecflow import Defs,Suite,Family,Task,Edit,Trigger,Complete,Event,Meter,Time,Day,Date
def create_family_f2():
return Family("f2",
Edit(SLEEP=20),
Task("t1", Time("00:30 23:30 00:30")), # start(hh:mm) end(hh:mm) increment(hh:mm)
Task("t2", Day("sunday")),
Task("t3", Date("1.*.*"), Time("12:00")), # Date(day,month,year) - * means every day,month,year
Task("t4", Time("+00:02")), # + means realative to suite begin/requeue time
Task("t5", Time("00:02"))) # 2 minutes past midnight
#xx print("Creating suite definition")
home = os.path.join(os.getenv("HOME"), "course")
defs = Defs(
Suite("test",
Edit(ECF_HOME=home),Edit(ECF_INCLUDE=home),
#create_family_f1(),
create_family_f2()
))
#xx print(defs)
#xx print("Checking job creation: .ecf -> .job0")
#print(defs.check_job_creation())
#xx print("Checking trigger expressions")
errors = defs.check()
assert len(errors) == 0,errors
#xx print("Saving definition to file 'test.def'")
defs.save_as_defs('test.def')
"""
test_compile(text)
test_defs = Defs(test_def_file())
self.assertEqual(test_defs,self.defs,"defs not equal\n" + str(test_defs) + "\n" + str(self.defs))
def test_me(self):
import os
def create_family_f2():
f1 = Family("f2") + Edit(SLEEP=20)
f1 += [ Task("t{0}".format(i)) for i in range(1,6) ]
f1.t1 += Time("00:30 23:30 00:30") # start(hh:mm) end(hh:mm) increment(hh:mm)
f1.t2 += Day( "sunday" )
f1.t3 += [ Date("1.*.*"),
Time("12:00")]
f1.t4 += Time("+00:02")
f1.t5 += Time("00:02")
return f1
home = os.path.join(os.getenv("HOME"), "course")
defs = Defs() + ( Suite("test") + Edit(ECF_HOME=home) + Edit(ECF_INCLUDE=home))
defs.test += create_family_f2()
defs.save_as_defs(test_def_file())
self.assertEqual(self.defs,defs,"defs not equal:\n" + str(self.defs) + "\n" + str(defs))
def test_add(self):
import os
def create_family_f2():
return Family("f2").add(
Edit(SLEEP=20),
Task("t1").add( Time("00:30 23:30 00:30")),
Task("t2").add( Day( "sunday" )),
Task("t3").add( Date("1.*.*"),
Time("12:00")),
Task("t4").add( Time("+00:02")),
Task("t5").add( Time("00:02")))
home = os.path.join(os.getenv("HOME"), "course")
defs = Defs().add(Suite("test").add(
Edit(ECF_HOME=home),Edit(ECF_INCLUDE=home),
create_family_f2()))
defs.save_as_defs(test_def_file())
self.assertEqual(self.defs,defs,"defs not equal:\n" + str(self.defs) + "\n" + str(defs))
def tearDown(self):
do_tear_down()
class TestCron(unittest.TestCase):
def setUp(self):
Ecf.set_debug_equality(True)
def create_family_house_keeping():
cron = Cron()
cron.set_week_days( [0] )
cron.set_time_series( "22:30" )
f2 = Family("house_keeping")
f2.add_task("clear_log").add_cron(cron)
return f2
defs = Defs()
suite = defs.add_suite("test")
suite.add_variable("ECF_HOME", os.path.join(os.getenv("HOME"), "course"))
suite.add_variable("ECF_INCLUDE", os.path.join(os.getenv("HOME"), "course"))
#suite.add_family( create_family_f1() )
suite.add_family( create_family_house_keeping() )
errors = defs.check()
assert len(errors) == 0,errors
defs.save_as_defs(test_def_file())
self.defs = defs;
def test_me0(self):
text = """import os
from ecflow import Defs,Suite,Family,Task,Edit,Trigger,Complete,Event,Meter,Cron
def create_family_house_keeping():
return Family("house_keeping",
Task("clear_log",
Cron("22:30",days_of_week=[0])))
print("Creating suite definition")
home = os.path.join(os.getenv("HOME"), "course")
defs = Defs(
Suite("test",
Edit(ECF_HOME=home),Edit(ECF_INCLUDE=home),
create_family_house_keeping()))
#xx print(defs)
#xx print("Checking job creation: .ecf -> .job0")
#print(defs.check_job_creation())
#xx print("Checking trigger expressions")
errors = defs.check()
assert len(errors) == 0,errors
#xx print("Saving definition to file 'test.def'")
defs.save_as_defs('test.def')
"""
test_compile(text)
test_defs = Defs(test_def_file())
self.assertEqual(test_defs,self.defs,"defs not equal\n" + str(test_defs) + "\n" + str(self.defs))
def tearDown(self):
do_tear_down()
class TestIndentation(unittest.TestCase):
def setUp(self):
Ecf.set_debug_equality(True)
#version = sys.version_info
#if version[1] < 7:
# #print "This example requires python version 2.7, but found : " + str(version)
# exit(0)
with Defs() as defs:
with defs.add_suite("test") as suite:
suite += Edit(ECF_HOME=os.path.join(os.getenv("HOME"), "course"))
suite += Edit(ECF_INCLUDE =os.path.join(os.getenv("HOME"), "course"))
with suite.add_family("f1") as f1:
f1 += Edit(SLEEP=20)
f1 += Task("t1", Meter("progress", 1, 100, 90))
f1 += Task("t2", Trigger("t1 == complete"), Event("a"), Event("b"))
f1 += Task("t3", Trigger("t2:a"))
f1 += Task("t4", Trigger("t2 == complete"), Complete("t2:b"))
f1 += Task("t5", Trigger("t1:progress ge 30"))
f1 += Task("t6", Trigger("t1:progress ge 60"))
f1 += Task("t7", Trigger("t1:progress ge 90"))
with suite.add_family("f2") as f2:
f2 += Edit(SLEEP=20)
f2 += Task("t1", Time("00:30 23:30 00:30"))
f2 += Task("t2", Day("sunday"))
f2 += Task("t3", Date(1, 0, 0), Time(12, 0))
f2 += Task("t4", Time(0, 2, True))
f2 += Task("t5", Time(0, 2))
defs.save_as_defs(test_def_file())
self.defs = defs;
def test_preferred(self):
text = """import os
from ecflow import Defs,Suite,Family,Task,Edit,Trigger,Complete,Event,Meter,Time,Day,Date
#xx print("Creating suite definition")
home = os.path.join(os.getenv("HOME"), "course")
defs = Defs(
Suite("test",
Edit(ECF_HOME=home),Edit(ECF_INCLUDE=home),
Family("f1",
Edit(SLEEP=20),
Task("t1", Meter("progress", 1, 100, 90)),
Task("t2", Trigger("t1 == complete"),Event("a"),Event("b")),
Task("t3", Trigger("t2:a")),
Task("t4", Trigger("t2 == complete"), Complete("t2:b")),
Task("t5", Trigger("t1:progress ge 30")),
Task("t6", Trigger("t1:progress ge 60")),
Task("t7", Trigger("t1:progress ge 90"))),
Family("f2",
Edit(SLEEP=20),
Task("t1", Time( "00:30 23:30 00:30" )),
Task("t2", Day( "sunday" )),
Task("t3", Date("1.*.*"), Time("12:00")),
Task("t4", Time("+00:02")),
Task("t5", Time("00:02")))))
#xx print(defs)
#xx print("Checking job creation: .ecf -> .job0")
#print(defs.check_job_creation())
#xx print("Checking trigger expressions")
assert len(defs.check()) == 0, defs.check()
#xx print("Saving definition to file 'test.def'")
defs.save_as_defs('test.def')
"""
test_compile(text)
test_defs = Defs(test_def_file())
self.assertEqual(test_defs,self.defs,"defs not equal\n" + str(test_defs) + "\n" + str(self.defs))
def test_me(self):
import os
home = os.path.join(os.getenv("HOME"), "course")
with Defs() as defs:
with defs.add_suite("test") as suite:
suite += [ Edit(ECF_HOME=home),Edit(ECF_INCLUDE=home) ]
with suite.add_family("f1") as f1:
f1 += [ Task("t{0}".format(i)) for i in range(1,8)]
f1 += Edit(SLEEP=20)
f1.t1 += Meter("progress", 1, 100, 90)
f1.t2 += [ Trigger(["t1"]), Event("a"), Event("b") ]
f1.t3 += Trigger("t2:a")
f1.t4 += [ Trigger(["t2"]), Complete("t2:b") ]
f1.t5 += Trigger("t1:progress ge 30")
f1.t6 += Trigger("t1:progress ge 60")
f1.t7 += Trigger("t1:progress ge 90")
with suite.add_family("f2") as f2:
f2 += [ Edit(SLEEP=20),[ Task("t{0}".format(i)) for i in range(1,6)] ]
f2.t1 += Time( "00:30 23:30 00:30" )
f2.t2 += Day( "sunday" )
f2.t3 += [ Date("1.*.*"), Time("12:00") ]
f2.t4 += Time("+00:02")
f2.t5 += Time("00:02")
defs.save_as_defs(test_def_file())
self.assertEqual(self.defs,defs,"defs not equal:\n" + str(self.defs) + "\n" + str(defs))
def tearDown(self):
do_tear_down()
class TestLabel(unittest.TestCase):
def setUp(self):
Ecf.set_debug_equality(True)
def create_family_f3():
fam = Family('f3')
fam.add_task('t1').add_label("info","")
return fam
home = os.path.join(os.getenv("HOME"), "course")
defs = Defs()
suite = defs.add_suite("test")
suite.add_variable("ECF_HOME", home)
suite.add_variable("ECF_INCLUDE", home)
suite.add_family( create_family_f3() )
errors = defs.check()
assert len(errors) == 0,errors
defs.save_as_defs(test_def_file())
self.defs = defs;
def test_me(self):
text = """import os
from ecflow import Defs,Suite,Family,Task,Edit,Trigger,Complete,Event,Meter,Time,Day,Date,Label
def create_family_f3():
return Family("f3",
Task("t1",
Label("info","")))
#xx print("Creating suite definition")
home = os.path.join(os.getenv("HOME"), "course")
defs = Defs(
Suite("test",
Edit(ECF_HOME=home),Edit(ECF_INCLUDE=home),
create_family_f3()))
#xx print(defs)
#xx print("Checking job creation: .ecf -> .job0")
#print(defs.check_job_creation())
#xx print("Checking trigger expressions")
assert len(defs.check()) == 0, defs.check()
#xx print("Saving definition to file 'test.def'")
defs.save_as_defs('test.def')
"""
test_compile(text)
test_defs = Defs(test_def_file())
self.assertEqual(test_defs,self.defs,"defs not equal\n" + str(test_defs) + "\n" + str(self.defs))
def tearDown(self):
do_tear_down()
class TestRepeat(unittest.TestCase):
def setUp(self):
Ecf.set_debug_equality(True)
self.ecf_home = File.build_dir() + "/Pyext/test/data/course_py" + str(sys.version_info[0]) + "_" + str(os.getpid()) # allow paralled runs
self.ecf_includes = File.source_dir() + "/Pyext/test/data/includes"
#print("self.ecf_home ",self.ecf_home )
try: os.makedirs( self.ecf_home + "/test/f4/f5")
except: pass
t1_ecf = '%include <head.h>\n'
t1_ecf += 'ecflow_client --label=info "My name is %NAME% My value is %VALUE% My date is %DATE%\n'
t1_ecf += 'ecflow_client --label=date "year:%DATE_YYYY% month:%DATE_MM% day of month:%DATE_DD% day of week:%DATE_DOW%"\n'
t1_ecf += 'sleep %SLEEP%\n'
t1_ecf += '%include <tail.h>\n'
self.t1_ecf_path = self.ecf_home + "/test/f4/f5/t1.ecf"
#print("self.t1_ecf_path",self.t1_ecf_path)
file = open(self.t1_ecf_path ,"w")
file.write(t1_ecf)
file.close()
def tearDown(self):
unittest.TestCase.tearDown(self)
do_tear_down()
os.remove(self.t1_ecf_path)
shutil.rmtree(self.ecf_home, ignore_errors=True)
def test_repeat0(self):
def create_family_f4():
return Family("f4",
Edit(SLEEP=2),
RepeatString("NAME", ["a", "b", "c", "d", "e", "f" ]),
Family("f5",
RepeatInteger("VALUE", 1, 10),
Task("t1",
RepeatDate("DATE", 20101230, 20110105),
Label("info", ""),
Label("date",""))))
defs = Defs(
Suite("test",
Edit(ECF_HOME=self.ecf_home, ECF_INCLUDE=self.ecf_includes ),
create_family_f4()))
#print(defs)
result = defs.check_job_creation()
self.assertEqual(result, "", "expected job creation to succeed " + result)
text = """import os
from ecflow import Defs,Suite,Family,Task,Edit,Trigger,Complete,Event,Meter,Time,Day,Date,Label, \
RepeatString,RepeatInteger,RepeatDate
def create_family_f4():
return Family("f4",
Edit(SLEEP=2),
RepeatString("NAME", ["a", "b", "c", "d", "e", "f" ]),
Family("f5",
RepeatInteger("VALUE", 1, 10),
Task("t1",
RepeatDate("DATE", 20101230, 20110105),
Label("info",""),
Label("date",""))))
#xx print("Creating suite definition")
home = os.path.join(os.getenv("HOME"), "course")
defs = Defs(
Suite("test",
Edit(ECF_HOME=home),Edit(ECF_INCLUDE=home),
create_family_f4()))
#xx print(defs)
#xx print("Checking job creation: .ecf -> .job0")
#print(defs.check_job_creation())
#xx print("Checking trigger expressions")
assert len(defs.check()) == 0,defs.check()
#xx print("Saving definition to file 'test.def'")
defs.save_as_defs('test.def')
"""
test_compile(text)
def test_repeat(self):
#print("Creating suite definition")
defs = Defs(
Suite("test",
Edit(ECF_HOME=self.ecf_home,ECF_INCLUDE=self.ecf_includes),
Family("f4",
Edit(SLEEP=2),
RepeatString("NAME", ["a", "b", "c", "d", "e", "f" ]),
Family("f5",
RepeatInteger("VALUE", 1, 10),
Task("t1",
RepeatDate("DATE", 20101230, 20110105),
Label("info", ""),
Label("date",""))))))
#print(defs)
result = defs.check_job_creation()
self.assertEqual(result, "", "expected job creation to succeed " + result)
defs.save_as_defs(test_def_file())
def test_repeat3(self):
defs = Defs().add( Suite("test") )
defs.test += [ { "ECF_INCLUDE":self.ecf_includes, "ECF_HOME":self.ecf_home },
Family("f4") ]
defs.test.f4 += [ Edit(SLEEP=2),
RepeatString("NAME", ["a", "b", "c", "d", "e", "f" ]),
Family("f5") ]
defs.test.f4.f5 += [ RepeatInteger("VALUE", 1, 10),
Task("t1")]
defs.test.f4.f5.t1 += [ RepeatDate("DATE", 20101230, 20110105),
Label("info", ""),
Label("date","") ]
#print(defs)
result = defs.check_job_creation()
self.assertEqual(result, "", "expected job creation to succeed " + result)
defs.save_as_defs(test_def_file())
class TestLimit(unittest.TestCase):
def setUp(self):
Ecf.set_debug_equality(True)
def create_family_f5() :
f5 = Family("f5")
f5.add_inlimit("l1")
f5.add_variable("SLEEP", 20)
for i in range(1, 10):
f5.add_task( "t" + str(i) )
return f5
defs = Defs()
suite = defs.add_suite("test")
suite.add_variable("ECF_HOME", os.path.join(os.getenv("HOME"), "course"))
suite.add_variable("ECF_INCLUDE", os.path.join(os.getenv("HOME"), "course"))
suite.add_limit("l1", 2)
suite.add_family( create_family_f5() )
defs.save_as_defs(test_def_file())
self.defs = defs
def test_me0(self):
text = """import os
from ecflow import Defs,Suite,Family,Task,Edit,Trigger,Complete,Event,Meter,Time,Day,Date,Label, \
RepeatString,RepeatInteger,RepeatDate,InLimit,Limit
def create_family_f5() :
return Family("f5",
InLimit("l1"),
Edit(SLEEP=20),
[ Task('t{0}'.format(i)) for i in range(1,10) ] )
#xx print("Creating suite definition")
home = os.path.join(os.getenv("HOME"),"course")
defs = Defs(
Suite("test",
Edit(ECF_HOME=home),Edit(ECF_INCLUDE=home),
Limit("l1",2),
create_family_f5()))
#xx print(defs)
#xx print("Checking job creation: .ecf -> .job0")
#print(defs.check_job_creation())
#xx print("Checking trigger expressions")
assert len(defs.check()) == 0,defs.check()
#xx print("Saving definition to file 'test.def'")
defs.save_as_defs(test_def_file())
"""
test_compile(text)
test_defs = Defs(test_def_file())
self.assertEqual(test_defs,self.defs,"defs not equal\n" + str(test_defs) + "\n" + str(self.defs))
def tearDown(self):
do_tear_down()
class TestLateAttribute(unittest.TestCase):
def setUp(self):
Ecf.set_debug_equality(True)
def create_family_f6() :
f6 = Family("f6")
f6.add_variable("SLEEP", 120)
t1 = f6.add_task("t1")
late = Late()
late.complete(0,1,True) # hour,minute,relative, set late flag if task take longer than a minute
t1.add_late(late)
return f6
defs = Defs()
suite = defs.add_suite("test")
suite.add_variable("ECF_HOME", os.path.join(os.getenv("HOME"), "course"))
suite.add_variable("ECF_INCLUDE", os.path.join(os.getenv("HOME"), "course"))
suite.add_family( create_family_f6() )
#print(defs)
assert len(defs.check()) == 0, defs.check()
defs.save_as_defs(test_def_file())
self.defs = defs
def test_me0(self):
text = """import os
from ecflow import Defs,Suite,Family,Task,Edit,Trigger,Complete,Event,Meter,Time,Day,Date,Label, \
RepeatString,RepeatInteger,RepeatDate,InLimit,Limit,Late
def create_family_f6():
# set late flag if task t1 takes longer than a minute
return Family("f6",
Edit(SLEEP=120),
Task("t1",
Late(complete='+00:01')))
#xx print("Creating suite definition")
home = os.path.join(os.getenv("HOME"),"course")
defs = Defs(
Suite("test",
Edit(ECF_HOME=home),Edit(ECF_INCLUDE=home),
create_family_f6()))
#xx print(defs)
#xx print("Checking job creation: .ecf -> .job0")
#print(defs.check_job_creation())
#xx print("Checking trigger expressions")
assert len(defs.check()) == 0,defs.check()
#xx print("Saving definition to file 'test.def'")
defs.save_as_defs('test.def')
"""
test_compile(text)
test_defs = Defs(test_def_file())
self.assertEqual(test_defs,self.defs,"defs not equal\n" + str(test_defs) + "\n" + str(self.defs))
def tearDown(self):
do_tear_down()
class TestPythonScripting(unittest.TestCase):
def setUp(self):
Ecf.set_debug_equality(True)
def create_suite(name) :
suite = Suite(name)
for i in range(1, 7) :
fam = suite.add_family("f" + str(i))
for t in ( "a", "b", "c", "d", "e" ) :
fam.add_task(t)
return suite
self.defs = Defs(create_suite('s1'))
def test_me(self):
def create_suite(name) :
return Suite(name,
[ Family("f{0}".format(i),
[ Task(t) for t in ( "a", "b", "c", "d", "e") ])
for i in range(1,7) ])
defs = Defs(create_suite('s1'))
self.assertEqual(self.defs,defs,"defs not equal\n" + str(self.defs) + "\n" + str(defs))
def tearDown(self):
Ecf.set_debug_equality(False)
class TestPythonScripting2(unittest.TestCase):
def setUp(self):
Ecf.set_debug_equality(True)
def create_sequential_suite(name) :
suite = Suite(name)
for i in range(1, 7) :
fam = suite.add_family("f" + str(i))
if i != 1:
fam.add_trigger("f" + str(i-1) + " == complete") # or fam.add_family( "f%d == complete" % (i-1) )
for t in ( "a", "b", "c", "d", "e" ) :
fam.add_task(t)
return suite
self.defs = Defs(create_sequential_suite('s1'))
def test_me(self):
def create_sequential_suite(name) :
suite = Suite(name)
for i in range(1, 7) :
fam = suite.add_family("f" + str(i))
if i != 1:
fam += Trigger("f" + str(i-1) + " == complete") # or fam.add_family( "f%d == complete" % (i-1) )
for t in ( "a", "b", "c", "d", "e" ) :
fam.add_task(t)
return suite
defs = Defs(create_sequential_suite ('s1'))
self.assertEqual(self.defs,defs,"defs not equal\n" + str(self.defs) + "\n" + str(defs))
def tearDown(self):
Ecf.set_debug_equality(False)
class TestDataAquistionSolution(unittest.TestCase):
def setUp(self):
Ecf.set_debug_equality(True)
defs = Defs()
suite = defs.add_suite("data_aquisition")
suite.add_repeat( RepeatDay(1) )
suite.add_variable("ECF_HOME", os.getenv("HOME") + "/course")
suite.add_variable("ECF_INCLUDE", os.getenv("HOME") + "/course")
suite.add_variable("ECF_FILES", os.getenv("HOME") + "/course/data")
suite.add_variable("SLEEP","2")
for city in ( "Exeter", "Toulouse", "Offenbach", "Washington", "Tokyo", "Melbourne", "Montreal" ) :
fcity = suite.add_family(city)
fcity.add_task("archive")
for obs_type in ( "observations", "fields", "images" ):
type_fam = fcity.add_family(obs_type)
if city in ("Exeter", "Toulouse", "Offenbach"): type_fam.add_time("00:00 23:00 01:00")
if city in ("Washington") : type_fam.add_time("00:00 23:00 03:00")
if city in ("Tokyo") : type_fam.add_time("12:00")
if city in ("Melbourne") : type_fam.add_day( "monday" )
if city in ("Montreal") : type_fam.add_date(1, 0, 0)
type_fam.add_task("get")
type_fam.add_task("process").add_trigger("get eq complete")
type_fam.add_task("store").add_trigger("get eq complete")
#print(defs)
self.defs = defs
def test_me0(self):
text = """import os
from ecflow import Defs,Suite,Family,Task,Edit,Trigger,Complete,Event,Meter,Time,Day,Date,Label, \
RepeatString,RepeatInteger,RepeatDate
home = os.path.join(os.getenv("HOME"), "course")
defs = Defs(
Suite("data_aquisition",
RepeatDay(1),
Edit(ECF_HOME=home),
Edit(ECF_INCLUDE=home),
Edit(ECF_FILES=home + "/data"),
Edit(SLEEP=2)))
for city in ( "Exeter", "Toulouse", "Offenbach", "Washington", "Tokyo", "Melbourne", "Montreal" ) :
fcity = defs.data_aquisition.add_family(city)
fcity += Task("archive")
for obs_type in ( "observations", "fields", "images" ):
type_fam = fcity.add_family(obs_type)
if city in ("Exeter", "Toulouse", "Offenbach"): type_fam + Time("00:00 23:00 01:00")
if city in ("Washington") : type_fam + Time("00:00 23:00 03:00")
if city in ("Tokyo") : type_fam + Time("12:00")
if city in ("Melbourne") : type_fam + Day( "monday" )
if city in ("Montreal") : type_fam + Date(1, 0, 0)
type_fam + Task("get") + Task("process",Trigger("get eq complete")) + Task("store",Trigger("get eq complete"))
#xx print("Checking job creation: .ecf -> .job0")
#print(defs.check_job_creation())
#xx print("Checking trigger expressions")
assert len(defs.check()) == 0, defs.check()
#xx print("Saving definition to file 'test.def'")
defs.save_as_defs('test.def')
"""
test_compile(text)
test_defs = Defs(test_def_file())
self.assertEqual(test_defs,self.defs,"defs not equal\n" + str(test_defs) + "\n" + str(self.defs))
def test_me(self):
import os
home = os.path.join(os.getenv("HOME"), "course")
defs = Defs() + Suite("data_aquisition").add(
RepeatDay(1),
Edit(ECF_HOME=home),
Edit(ECF_INCLUDE=home),
Edit(ECF_FILES=home + "/data"),
Edit(SLEEP=2))
for city in ( "Exeter", "Toulouse", "Offenbach", "Washington", "Tokyo", "Melbourne", "Montreal" ) :
fcity = defs.data_aquisition.add_family(city)
fcity += Task("archive")
for obs_type in ( "observations", "fields", "images" ):
type_fam = fcity.add_family(obs_type)
if city in ("Exeter", "Toulouse", "Offenbach"): type_fam + Time("00:00 23:00 01:00")
if city in ("Washington") : type_fam + Time("00:00 23:00 03:00")
if city in ("Tokyo") : type_fam + Time("12:00")
if city in ("Melbourne") : type_fam + Day( "monday" )
if city in ("Montreal") : type_fam + Date(1, 0, 0)
type_fam += [ Task("get"),Task("process"),Task("store") ]
type_fam.process += Trigger("get eq complete")
type_fam.store += Trigger("get eq complete")
self.assertEqual(self.defs, defs, "defs not equal")
def tearDown(self):
Ecf.set_debug_equality(False)
class TestOperationalSolution(unittest.TestCase):
def setUp(self):
Ecf.set_debug_equality(True)
defs = Defs()
suite = defs.add_suite("operation_suite")
suite.add_repeat( RepeatDay(1) )
suite.add_variable("ECF_HOME", os.getenv("HOME") + "/course")
suite.add_variable("ECF_INCLUDE", os.getenv("HOME") + "/course")
suite.add_variable("ECF_FILES", os.getenv("HOME") + "/course/oper")
# Defines the triggers for the first cycle
cycle_triggers = "1"
for cycle in ( "00" , "12" ):
if cycle == "12" :
last_step = 240
else:
last_step = 24
fcycle_fam = suite.add_family(cycle)
fcycle_fam.add_variable("CYCLE", cycle)
fcycle_fam.add_variable("LAST_STEP", last_step)
if cycle_triggers != "1" :
fcycle_fam.add_trigger(cycle_triggers)
analysis_fam = fcycle_fam.add_family("analysis")
analysis_fam.add_task("get_observations")
analysis_fam.add_task("run_analysis").add_trigger("get_observations == complete")
analysis_fam.add_task("post_processing").add_trigger("run_analysis == complete")
forecast_fam = fcycle_fam.add_family("forecast")
forecast_fam.add_trigger("analysis == complete")
forecast_fam.add_task("get_input_data")
run_forecast_task = forecast_fam.add_task("run_forecast")
run_forecast_task.add_trigger("get_input_data == complete")
run_forecast_task.add_meter("step", 0, last_step, last_step)
archive_fam = fcycle_fam.add_family("archive")
fam_analsis = archive_fam.add_family("analysis")
fam_analsis.add_variable("TYPE","analysis")
fam_analsis.add_variable("STEP","0")
fam_analsis.add_trigger("../analysis/run_analysis == complete")
fam_analsis.add_task("save")
for i in range(6, last_step+1, 6):
step_fam = fam_analsis.add_family("step_" + str(i))
step_fam.add_variable("TYPE", "forecast")
step_fam.add_variable("STEP", i)
step_fam.add_trigger("../../forecast/run_forecast:step ge " + str(i))
step_fam.add_task("save")
# Defines the triggers for the next cycle
cycle_triggers = "./" + cycle + " == complete"
#print(defs)
self.defs = defs
def test_sol1(self):
import os
home = os.getenv("HOME") + "/course"
cycle_triggers = None
last_step = { "12": 240,
"00": 24, }
def cycle_trigger(cycle):
if cycle == "12": return Trigger("./00 == complete")
return None
defs = Defs(
Suite("operation_suite",
RepeatDay(1),
Edit(ECF_HOME= home),
Edit(ECF_INCLUDE= home),
Edit(ECF_FILES= home + "/oper"),
[ Family(cycle,
Edit(CYCLE=cycle),
Edit(LAST_STEP=last_step[cycle]),
cycle_trigger(cycle),
Family("analysis",
Task("get_observations"),
Task("run_analysis", Trigger(["get_observations"])),
Task("post_processing", Trigger(["run_analysis"]))
),
Family("forecast",
Trigger(["analysis"]),
Task("get_input_data"),
Task("run_forecast",
Trigger(["get_input_data"]),
Meter("step", 0, last_step[cycle])),
),
Family("archive",
Family("analysis",
Edit(TYPE="analysis"),
Edit(STEP=0),
Trigger("../analysis/run_analysis == complete"),
Task("save"),
[ Family("step_{0}".format(i),
Edit(TYPE="forecast"),
Edit(STEP=i),
Trigger("../../forecast/run_forecast:step ge {0}".format(i)),
Task("save"))
for i in range(6, last_step[cycle]+1, 6) ]
)
)
) for cycle in ( "00" , "12" ) ]
)
)
#print(defs)
self.assertEqual(self.defs, defs, "defs not equal")
def tearDown(self):
Ecf.set_debug_equality(False)
class TestBackArchivingSolution(unittest.TestCase):
def setUp(self):
Ecf.set_debug_equality(True)
home = os.path.join(os.getenv("HOME"), "course")
defs = Defs()
suite = defs.add_suite("back_archiving")
suite.add_repeat( RepeatDay(1) )
suite.add_variable("ECF_HOME", home)
suite.add_variable("ECF_INCLUDE", home)
suite.add_variable("ECF_FILES", home + "/back")
suite.add_variable("SLEEP", "2")
suite.add_limit("access", 2)
for kind in ( "analysis", "forecast", "climatology", "observations", "images" ):
find_fam = suite.add_family(kind)
find_fam.add_repeat( RepeatDate("DATE", 19900101, 19950712) )
find_fam.add_variable("KIND", kind)
find_fam.add_task("get_old").add_inlimit("access")
find_fam.add_task("convert").add_trigger("get_old == complete")
find_fam.add_task("save_new").add_trigger("convert == complete")
#print(defs)
self.defs = defs
def test_sol1(self):
import os
home = os.path.join(os.getenv("HOME"), "course")
defs = Defs(
Suite("back_archiving",
RepeatDay(1),
Edit(ECF_HOME=home),
Edit(ECF_INCLUDE=home),
Edit(ECF_FILES= home + "/back"),
Edit(SLEEP=2),
Limit("access", 2),
[ Family(kind,
RepeatDate( "DATE", 19900101, 19950712 ),
Edit(KIND=kind),
Task("get_old", InLimit("access")),
Task("convert", Trigger("get_old == complete")),
Task("save_new", Trigger("convert == complete")))
for kind in ( "analysis", "forecast", "climatology", "observations", "images" ) ] ))
#print(defs)
self.assertEqual(self.defs, defs, "defs not equal")
def tearDown(self):
Ecf.set_debug_equality(False)
if __name__ == "__main__":
unittest.main()
print("All Tests pass")
|
## ===============================================================================
## Authors: AFRL/RQQA
## Organization: Air Force Research Laboratory, Aerospace Systems Directorate, Power and Control Division
##
## Copyright (c) 2017 Government of the United State of America, as represented by
## the Secretary of the Air Force. No copyright is claimed in the United States under
## Title 17, U.S. Code. All Other Rights Reserved.
## ===============================================================================
## This file was auto-created by LmcpGen. Modifications will be overwritten.
__all__ = [ "SeriesEnum", "PowerConfiguration", "RadioConfiguration", "RadioTowerConfiguration", "RadioState", "RadioTowerState", "ImpactPayloadConfiguration", "DeployImpactPayload", "PowerPlantState", "BatchRoutePlanRequest", "BatchRoutePlanResponse", "TaskTimingPair", "BatchSummaryRequest", "BatchSummaryResponse", "TaskSummary", "VehicleSummary", "ImpactHeartbeat", "ImpactComponentJoin", "ImpactComponentLeave", "SpeedAltPair", "ImpactAutomationRequest", "ImpactAutomationResponse", "PointOfInterest", "LineOfInterest", "AreaOfInterest", "ImpactPointSearchTask", "PatternSearchTask", "AngledAreaSearchTask", "ImpactLineSearchTask", "WatchTask", "MultiVehicleWatchTask", "CommRelayTask", "CordonTask", "BlockadeTask", "EscortTask", "ConfigurationRequest", "WaterReport", "WaterZone", "AreaSearchPattern", "PowerPlant", "AreaActionOptions", "ImpactPayloadType", ]
SERIES_NAME = "IMPACT"
#Series Name turned into a long for quick comparisons.
SERIES_NAME_ID = 5281966179208134656
SERIES_VERSION = 13
|
from datetime import timedelta
import time
import unittest
from cockroach import util
class RetryTest(unittest.TestCase):
def test_retry(self):
opts = util.RetryOptions(timedelta(microseconds=10), timedelta(seconds=1), 2, 10)
retries = [0]
def fn():
retries[0] += 1
if retries[0] >= 3:
return util.RetryStatus.BREAK
return util.RetryStatus.CONTINUE
util.retry_with_backoff(opts, fn)
self.assertEqual(retries[0], 3)
def texst_retry_exceeds_max_backoff(self):
opts = util.RetryOptions(timedelta(microseconds=10), timedelta(microseconds=10), 1000, 3)
start = time.time()
with self.assertRaises(util.RetryMaxAttemptsError):
util.retry_with_backoff(opts, lambda: util.RetryStatus.CONTINUE)
end = time.time()
self.assertLess(end - start, 1.0,
"max backoff not respected: 1000 attempts took %ss" % (end - start))
def test_retry_exceeds_max_attempts(self):
opts = util.RetryOptions(timedelta(microseconds=10), timedelta(seconds=1), 2, 3)
retries = [0]
def fn():
retries[0] += 1
return util.RetryStatus.CONTINUE
with self.assertRaises(util.RetryMaxAttemptsError):
util.retry_with_backoff(opts, fn)
self.assertEqual(retries[0], 3)
def test_retry_function_raises_error(self):
opts = util.RetryOptions(timedelta(microseconds=10), timedelta(seconds=1), 2)
with self.assertRaises(ZeroDivisionError):
util.retry_with_backoff(opts, lambda: 1/0)
def test_retry_reset(self):
opts = util.RetryOptions(timedelta(microseconds=10), timedelta(seconds=1), 2, 1)
# Backoff loop has 1 allowed retry; we always return RESET, so just
# make sure we get to 2 retries and then break.
count = [0]
def fn():
count[0] += 1
if count[0] == 2:
return util.RetryStatus.BREAK
return util.RetryStatus.RESET
util.retry_with_backoff(opts, fn)
self.assertEqual(count[0], 2)
|
"""
Generator based data loader for Cloud38 and L8CCA clouds segmentation datasets.
If you plan on using this implementation, please cite our work:
@INPROCEEDINGS{Grabowski2021IGARSS,
author={Grabowski, Bartosz and Ziaja, Maciej and Kawulok, Michal
and Nalepa, Jakub},
booktitle={IGARSS 2021 - 2021 IEEE International Geoscience
and Remote Sensing Symposium},
title={Towards Robust Cloud Detection in
Satellite Images Using U-Nets},
year={2021},
note={in press}}
"""
import numpy as np
from einops import rearrange
from matplotlib import pyplot as plt
from pathlib import Path
from tensorflow import keras
from typing import Dict, List, Tuple
from cloud_detection.utils import (
pad, open_as_array, load_38cloud_gt, load_l8cca_gt,
strip_nir, load_image_paths
)
class DG_38Cloud(keras.utils.Sequence):
"""
Data generator for Cloud38 clouds segmentation dataset.
Works with Keras generators.
"""
def __init__(
self,
files: List[Dict[str, Path]],
batch_size: int,
balance_classes: bool = False,
balance_snow: bool = False,
dim: Tuple[int, int] = (384, 384),
shuffle: bool = True,
with_gt: bool = True,
):
"""
Prepare generator and init paths to files containing image channels.
:param files: List of dicts containing paths to rgb channels of each
image in dataset.
:param batch_size: size of generated batches, only one batch is loaded
to memory at a time.
:param balance_classes: if True balance classes.
:param balance_snow: if True balances patches with snow and clouds.
:param dim: Tuple with x, y image patches dimensions.
:param shuffle: if True shuffles dataset before training
and on each epoch end.
:param with_gt: if True returns y along with x.
"""
self._batch_size: int = batch_size
self._dim: Tuple[int, int] = dim
self._shuffle: bool = shuffle
self._with_gt: bool = with_gt
self._balance_snow: bool = balance_snow
self._balance_classes: bool = balance_classes
self.n_bands: int = len(files[0]) - 1 # -1, because one channel is GT
self._files: List[Dict[str, Path]] = files
self._file_indexes = np.arange(len(self._files))
if self._balance_classes:
self._balance_file_indexes()
if self._balance_snow:
self._balance_snow_indexes()
if self._shuffle:
np.random.shuffle(self._file_indexes)
def _perform_balancing(self, labels: List[int]):
"""
Perform balancing on given images.
:param labels: List of pseudo-labels for indexing for each patch,
either 0 or 1. The smaller group will be resampled
to match the size of the bigger group.
"""
pos_idx = self._file_indexes[np.array(labels, dtype=bool)]
neg_idx = self._file_indexes[~np.array(labels, dtype=bool)]
if len(pos_idx) < len(neg_idx):
resampled_idx = np.random.choice(pos_idx, len(neg_idx))
self._file_indexes = np.concatenate(
[neg_idx, resampled_idx], axis=0)
elif len(pos_idx) > len(neg_idx):
resampled_idx = np.random.choice(neg_idx, len(pos_idx))
self._file_indexes = np.concatenate(
[pos_idx, resampled_idx], axis=0)
self._file_indexes = np.sort(self._file_indexes)
def _balance_file_indexes(self):
""" Upsamples the file indexes of the smaller class. """
labels = self._get_labels_for_balancing()
self._perform_balancing(labels)
def _balance_snow_indexes(self):
""" Upsamples the file indexes with snow and clouds. """
labels = self._get_labels_for_snow_balancing()
self._perform_balancing(labels)
def _get_labels_for_snow_balancing(
self,
brightness_thr: float = 0.4,
frequency_thr: float = 0.1
) -> List[int]:
"""
Returns the pseudo-labels for each patch. Pseudo-label being
1 if certain percent of pixels in patch are above brightness threshold,
and 0 otherwise.
:param brightness_thr: brightness threshold of the pixel
(in relation to brightest pixel in patch)
to classify it as snow.
:param frequency_thr: frequency threshold of snow pixels to
classify patch as snowy.
:return: list of labels (0 - not snowy, 1 - snowy).
"""
labels = []
print(len(self._files))
for file_ in self._files:
img = open_as_array(file_)
if (np.count_nonzero(img > brightness_thr) / img.size) > \
frequency_thr:
labels.append(1)
else:
labels.append(0)
print("Snowy pseudo-labels number:", np.count_nonzero(labels))
return labels
def _get_labels_for_balancing(
self,
min_prop: float = 0.1,
max_prop: float = 0.9
) -> List[int]:
"""
Returns the pseudo-labels for each patch. Pseudo-label being
1 if clouds proportion between min_prop and max_prop, and 0 otherwise.
:param min_prop: min proportion of clouds to classify into class "1".
:param max_prop: max proportion of clouds to classify into class "1".
:return: list of pseudo-labels.
"""
labels = []
for file_ in self._files:
gt = load_38cloud_gt(file_)
clouds_prop = np.count_nonzero(gt) / np.prod(self._dim)
if clouds_prop > min_prop and clouds_prop < max_prop:
labels.append(1)
else:
labels.append(0)
return labels
def _data_generation(self, file_indexes_to_gen: np.arange) -> Tuple:
"""
Generates data for the given indexes.
:param file_indexes_to_gen: Sequence of indexes of files from which
images should be loaded.
:return: (x, y) (or (x, None) if with_gt is False) data for one batch,
where x is set of RGB + nir images and y is set of
corresponding cloud masks.
"""
x = np.empty((len(file_indexes_to_gen), *self._dim, 4))
if self._with_gt:
y = np.empty((len(file_indexes_to_gen), *self._dim, 1))
else:
y = None
for i, file_index in enumerate(file_indexes_to_gen):
x[i] = open_as_array(self._files[file_index])
if self._with_gt:
y[i] = load_38cloud_gt(self._files[file_index])
return x, y
def on_epoch_end(self):
"""
Triggered after each epoch,
if shuffle is True randomises file indexing.
"""
if self._shuffle:
np.random.shuffle(self._file_indexes)
def __len__(self) -> int:
"""
Denotes the number of batches per epoch.
:return: number of batches per epoch.
"""
return int(np.ceil(len(self._file_indexes) / self._batch_size))
def __getitem__(self, index: int) -> Tuple[np.ndarray]:
"""
Generates one batch of data.
:param index: index of the batch to return.
:return: (x, y) (or (x, None) if with_gt is False) data for one batch,
where x is set of RGB + nir images and y is set of
corresponding cloud masks.
"""
# Generate indexes of the batch
indexes_in_batch = self._file_indexes[
index * self._batch_size: (index + 1) * self._batch_size
]
# Generate data
return self._data_generation(indexes_in_batch)
class DG_L8CCA(keras.utils.Sequence):
"""
Data generator for L8CCA clouds segmentation dataset.
Works with Keras generators.
"""
def __init__(
self,
img_paths: List[Path],
batch_size: int,
data_part: Tuple[float] = (0., 1.),
with_gt: bool = False,
patch_size: int = 384,
bands: Tuple[int] = (4, 3, 2, 5),
bands_names: Tuple[str] = ("red", "green", "blue", "nir"),
resize: bool = False,
normalize: bool = True,
standardize: bool = False,
shuffle: bool = True,
):
"""
Prepare generator and init paths to files containing image channels.
:param img_paths: paths to the dirs containing L8CCA images files.
:param batch_size: size of generated batches, only one batch is loaded
to memory at a time.
:param data_part: part of data to include, e.g., (0., 0.2) generates
dataloader with samples up to 20-th percentile without
20-th percentile, while (0.3, 0.8) generates dataloader
with samples from 30-th percentile (including it) up to
80-th percentile (without it).
(x, 1.) is an exception to the rule, generating dataloader
with samples from the x-th percentile (including it) up to
AND INCLUDING the last datapoint.
To include all samples, use (0., 1.).
If shuffle=True, partitions dataset based on shuffled data
(with a set seed), else partitions unshuffled dataset.
:param with_gt: whether to include groundtruth.
:param patch_size: size of the patches.
:param bands: band numbers to load
:param bands_names: names of the bands to load. Should have the same number
of elements as bands.
:param resize: whether to resize img to gt. If True and with_gt=False,
will load GT to infer its shape and then delete it.
:param normalize: whether to normalize the image.
:param standardize: whether to standardize the image.
:param shuffle: if True shuffles dataset before training and on each epoch end,
else returns dataloader sorted according to img_paths order.
"""
self._img_paths: List[Path] = img_paths
self._batch_size: int = batch_size
self._data_part: Tuple[float] = data_part
self._with_gt: bool = with_gt
self._patch_size: int = patch_size
self._normalize: bool = normalize
self._standardize: bool = standardize
self._resize: bool = resize
self._shuffle: bool = shuffle
self.n_bands: int = len(bands)
if self._with_gt or self._resize:
self._generate_gt_patches()
self._generate_img_patches(bands=bands, bands_names=bands_names)
self._patches_indexes = np.arange(len(self.patches))
if self._data_part != (0., 1.):
self._partition_data()
if self._shuffle:
np.random.shuffle(self._patches_indexes)
def _generate_img_patches(self, bands: Tuple[int] = (4, 3, 2, 5),
bands_names: Tuple[str] = (
"red", "green", "blue", "nir"
)):
"""
Create image patches from the provided bands.
:param bands: band numbers to load
:param bands_names: names of the bands to load. Should have the same
number of elements as bands.
"""
self.patches = np.empty(
(0, self._patch_size, self._patch_size, self.n_bands))
self.img_shapes = []
for i, img_path in enumerate(self._img_paths):
channel_files = {}
for name, band in zip(bands_names, bands):
channel_files[name] = list(img_path.glob(f"*_B{band}.TIF"))[0]
img = open_as_array(channel_files=channel_files,
channel_names=bands_names,
size=self.original_gt_shapes[i]
if self._resize else None,
normalize=self._normalize,
standardize=self._standardize)
img = pad(img, self._patch_size)
self.img_shapes.append(img.shape)
img_patches = rearrange(
img, "(r dr) (c dc) b -> (r c) dr dc b",
dr=self._patch_size, dc=self._patch_size
)
self.patches = np.concatenate((self.patches, img_patches))
del img
def _generate_gt_patches(self):
"""
Create GT patches.
"""
if self._with_gt:
self.gt_patches = np.empty(
(0, self._patch_size, self._patch_size, 1)
)
self.original_gt_shapes = []
for img_path in self._img_paths:
gt = load_l8cca_gt(path=img_path)
self.original_gt_shapes.append(gt.shape)
if self._with_gt:
gt = pad(gt, self._patch_size)
img_gt_patches = rearrange(
gt, "(r dr) (c dc) 1 -> (r c) dr dc 1",
dr=self._patch_size, dc=self._patch_size
)
self.gt_patches = np.concatenate(
(self.gt_patches, img_gt_patches)
)
del gt
def _partition_data(self, seed=42):
"""
Partition data based on data_part arg.
:param seed: random seed.
"""
if self._shuffle:
saved_seed = np.random.get_state()
np.random.seed(seed)
np.random.shuffle(self._patches_indexes)
np.random.set_state(saved_seed)
assert len(self._data_part) == 2
for perc in self._data_part:
assert (type(perc) is float) and (perc >= 0.) and (perc <= 1.)
from_, to_ = self._data_part
idx_from = int(from_ * len(self._patches_indexes))
idx_to = int(to_ * len(self._patches_indexes))
if to_ < 1.:
self._patches_indexes = self._patches_indexes[idx_from:idx_to]
elif to_ == 1.:
self._patches_indexes = self._patches_indexes[idx_from:]
self.patches = self.patches[self._patches_indexes]
if self._with_gt:
self.gt_patches = self.gt_patches[self._patches_indexes]
self._patches_indexes = np.arange(len(self.patches))
def on_epoch_end(self):
"""
Triggered after each epoch,
if shuffle is True randomises file indexing.
"""
if self._shuffle:
np.random.shuffle(self._patches_indexes)
def __len__(self):
"""
Denotes the number of batches per epoch.
:return: number of batches per epoch.
"""
return int(np.ceil(len(self._patches_indexes) / self._batch_size))
def __getitem__(self, index: int) -> Tuple[np.ndarray, None]:
"""
Generates one batch of data.
:param index: index of the batch to return.
:return: (x, y) (or (x, None) if with_gt is False) data for one batch,
where x is set of RGB + nir images and y is set of
corresponding cloud masks.
"""
indexes_in_batch = self._patches_indexes[
index * self._batch_size: (index + 1) * self._batch_size
]
if self._with_gt:
y = self.gt_patches[indexes_in_batch]
else:
y = None
return (
self.patches[indexes_in_batch],
y,
)
def main_38Cloud():
""" Demo 38Cloud data loading. """
base_path = Path("datasets/clouds/38-Cloud/38-Cloud_training")
split_names = ("train", "validation", "test")
splits = load_image_paths(
base_path=base_path, split_ratios=(0.8, 0.15, 0.05))
for name, split in zip(split_names, splits):
dg = DG_38Cloud(files=split, batch_size=16)
sample_batch_x, sample_batch_y = dg[3]
sample_batch_y = sample_batch_y[:, :, :, 0]
plt.figure()
plt.subplot(1, 3, 1)
plt.imshow(strip_nir(sample_batch_x[0]))
plt.title(f"Split: { name }\n sample image")
plt.subplot(1, 3, 2)
plt.imshow(sample_batch_y[0])
plt.title(f"Split: { name }\n sample gt mask")
plt.show()
def main_L8CCA():
""" Demo L8CCA data loading. """
base_path = Path(
"datasets/clouds/"
+ "Landsat-Cloud-Cover-Assessment-Validation-Data-Partial"
)
img_paths = [base_path / "Barren" / "LC81390292014135LGN00",
base_path / "Forest" / "LC80160502014041LGN00"]
split_names = ("train", "validation", "test")
splits = ((0., 0.8), (0.8, 0.95), (0.95, 1.))
for name, split in zip(split_names, splits):
dg = DG_L8CCA(img_paths=img_paths, batch_size=16,
data_part=split, with_gt=True)
sample_batch_x, sample_batch_y = dg[2]
sample_batch_y = sample_batch_y[:, :, :, 0]
plt.figure()
plt.subplot(1, 3, 1)
plt.imshow(strip_nir(sample_batch_x[0]))
plt.title(f"Split: { name }\n sample image")
plt.subplot(1, 3, 2)
plt.imshow(sample_batch_y[0])
plt.title(f"Split: { name }\n sample gt mask")
plt.show()
if __name__ == "__main__":
print("38Cloud demo")
main_38Cloud()
print("L8CCA demo")
main_L8CCA()
|
# Copyright 2018 The TensorFlow Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for util.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import absltest
import numpy as np
from light_curve_util import periodic_event
from light_curve_util import util
class LightCurveUtilTest(absltest.TestCase):
def testPhaseFoldTime(self):
time = np.arange(0, 2, 0.1)
# Simple.
tfold = util.phase_fold_time(time, period=1, t0=0.45)
expected = [
-0.45, -0.35, -0.25, -0.15, -0.05, 0.05, 0.15, 0.25, 0.35, 0.45, -0.45,
-0.35, -0.25, -0.15, -0.05, 0.05, 0.15, 0.25, 0.35, 0.45
]
self.assertSequenceAlmostEqual(expected, tfold)
# Large t0.
tfold = util.phase_fold_time(time, period=1, t0=1.25)
expected = [
-0.25, -0.15, -0.05, 0.05, 0.15, 0.25, 0.35, 0.45, -0.45, -0.35, -0.25,
-0.15, -0.05, 0.05, 0.15, 0.25, 0.35, 0.45, -0.45, -0.35
]
self.assertSequenceAlmostEqual(expected, tfold)
# Negative t0.
tfold = util.phase_fold_time(time, period=1, t0=-1.65)
expected = [
-0.35, -0.25, -0.15, -0.05, 0.05, 0.15, 0.25, 0.35, 0.45, -0.45, -0.35,
-0.25, -0.15, -0.05, 0.05, 0.15, 0.25, 0.35, 0.45, -0.45
]
self.assertSequenceAlmostEqual(expected, tfold)
# Negative time.
time = np.arange(-3, -1, 0.1)
tfold = util.phase_fold_time(time, period=1, t0=0.55)
expected = [
0.45, -0.45, -0.35, -0.25, -0.15, -0.05, 0.05, 0.15, 0.25, 0.35, 0.45,
-0.45, -0.35, -0.25, -0.15, -0.05, 0.05, 0.15, 0.25, 0.35
]
self.assertSequenceAlmostEqual(expected, tfold)
def testSplit(self):
# Single segment.
all_time = np.concatenate([np.arange(0, 1, 0.1), np.arange(1.5, 2, 0.1)])
all_flux = np.ones(15)
# Gap width 0.5.
split_time, split_flux = util.split(all_time, all_flux, gap_width=0.5)
self.assertLen(split_time, 2)
self.assertLen(split_flux, 2)
self.assertSequenceAlmostEqual(np.arange(0, 1, 0.1), split_time[0])
self.assertSequenceAlmostEqual(np.ones(10), split_flux[0])
self.assertSequenceAlmostEqual(np.arange(1.5, 2, 0.1), split_time[1])
self.assertSequenceAlmostEqual(np.ones(5), split_flux[1])
# Multi segment.
all_time = [
np.concatenate([
np.arange(0, 1, 0.1),
np.arange(1.5, 2, 0.1),
np.arange(3, 4, 0.1)
]),
np.arange(4, 5, 0.1)
]
all_flux = [np.ones(25), np.ones(10)]
self.assertEqual(len(all_time), 2)
self.assertEqual(len(all_time[0]), 25)
self.assertEqual(len(all_time[1]), 10)
self.assertEqual(len(all_flux), 2)
self.assertEqual(len(all_flux[0]), 25)
self.assertEqual(len(all_flux[1]), 10)
# Gap width 0.5.
split_time, split_flux = util.split(all_time, all_flux, gap_width=0.5)
self.assertLen(split_time, 4)
self.assertLen(split_flux, 4)
self.assertSequenceAlmostEqual(np.arange(0, 1, 0.1), split_time[0])
self.assertSequenceAlmostEqual(np.ones(10), split_flux[0])
self.assertSequenceAlmostEqual(np.arange(1.5, 2, 0.1), split_time[1])
self.assertSequenceAlmostEqual(np.ones(5), split_flux[1])
self.assertSequenceAlmostEqual(np.arange(3, 4, 0.1), split_time[2])
self.assertSequenceAlmostEqual(np.ones(10), split_flux[2])
self.assertSequenceAlmostEqual(np.arange(4, 5, 0.1), split_time[3])
self.assertSequenceAlmostEqual(np.ones(10), split_flux[3])
# Gap width 1.0.
split_time, split_flux = util.split(all_time, all_flux, gap_width=1)
self.assertLen(split_time, 3)
self.assertLen(split_flux, 3)
self.assertSequenceAlmostEqual([
0., 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.5, 1.6, 1.7, 1.8, 1.9
], split_time[0])
self.assertSequenceAlmostEqual(np.ones(15), split_flux[0])
self.assertSequenceAlmostEqual(np.arange(3, 4, 0.1), split_time[1])
self.assertSequenceAlmostEqual(np.ones(10), split_flux[1])
self.assertSequenceAlmostEqual(np.arange(4, 5, 0.1), split_time[2])
self.assertSequenceAlmostEqual(np.ones(10), split_flux[2])
def testRemoveEvents(self):
time = np.arange(20, dtype=np.float)
flux = 10 * time
# One event.
events = [periodic_event.Event(period=4, duration=1.5, t0=3.5)]
output_time, output_flux = util.remove_events(time, flux, events)
self.assertSequenceAlmostEqual([1, 2, 5, 6, 9, 10, 13, 14, 17, 18],
output_time)
self.assertSequenceAlmostEqual(
[10, 20, 50, 60, 90, 100, 130, 140, 170, 180], output_flux)
# Two events.
events.append(periodic_event.Event(period=7, duration=1.5, t0=6.5))
output_time, output_flux = util.remove_events(time, flux, events)
self.assertSequenceAlmostEqual([1, 2, 5, 9, 10, 17, 18], output_time)
self.assertSequenceAlmostEqual([10, 20, 50, 90, 100, 170, 180], output_flux)
# Multi segment light curve.
time = [np.arange(10, dtype=np.float), np.arange(10, 20, dtype=np.float)]
flux = [10 * t for t in time]
output_time, output_flux = util.remove_events(time, flux, events)
self.assertLen(output_time, 2)
self.assertLen(output_flux, 2)
self.assertSequenceAlmostEqual([1, 2, 5, 9], output_time[0])
self.assertSequenceAlmostEqual([10, 20, 50, 90], output_flux[0])
self.assertSequenceAlmostEqual([10, 17, 18], output_time[1])
self.assertSequenceAlmostEqual([100, 170, 180], output_flux[1])
# One segment totally removed with include_empty_segments = True.
time = [np.arange(5, dtype=np.float), np.arange(10, 20, dtype=np.float)]
flux = [10 * t for t in time]
events = [periodic_event.Event(period=10, duration=2, t0=2.5)]
output_time, output_flux = util.remove_events(
time, flux, events, width_factor=3, include_empty_segments=True)
self.assertLen(output_time, 2)
self.assertLen(output_flux, 2)
self.assertSequenceEqual([], output_time[0])
self.assertSequenceEqual([], output_flux[0])
self.assertSequenceAlmostEqual([16, 17, 18, 19], output_time[1])
self.assertSequenceAlmostEqual([160, 170, 180, 190], output_flux[1])
# One segment totally removed with include_empty_segments = False.
time = [np.arange(5, dtype=np.float), np.arange(10, 20, dtype=np.float)]
flux = [10 * t for t in time]
events = [periodic_event.Event(period=10, duration=2, t0=2.5)]
output_time, output_flux = util.remove_events(
time, flux, events, width_factor=3, include_empty_segments=False)
self.assertLen(output_time, 1)
self.assertLen(output_flux, 1)
self.assertSequenceAlmostEqual([16, 17, 18, 19], output_time[0])
self.assertSequenceAlmostEqual([160, 170, 180, 190], output_flux[0])
def testInterpolateMaskedSpline(self):
all_time = [
np.arange(0, 10, dtype=np.float),
np.arange(10, 20, dtype=np.float),
np.arange(20, 30, dtype=np.float),
]
all_masked_time = [
np.array([0, 1, 2, 3, 8, 9], dtype=np.float), # No 4, 5, 6, 7
np.array([10, 11, 12, 13, 14, 15, 16], dtype=np.float), # No 17, 18, 19
np.array([], dtype=np.float)
]
all_masked_spline = [2 * t + 100 for t in all_masked_time]
interp_spline = util.interpolate_masked_spline(all_time, all_masked_time,
all_masked_spline)
self.assertLen(interp_spline, 3)
self.assertSequenceAlmostEqual(
[100, 102, 104, 106, 108, 110, 112, 114, 116, 118], interp_spline[0])
self.assertSequenceAlmostEqual(
[120, 122, 124, 126, 128, 130, 132, 132, 132, 132], interp_spline[1])
self.assertTrue(np.all(np.isnan(interp_spline[2])))
def testCountTransitPoints(self):
time = np.concatenate([
np.arange(0, 10, 0.1, dtype=np.float),
np.arange(15, 30, 0.1, dtype=np.float),
np.arange(50, 100, 0.1, dtype=np.float)
])
event = periodic_event.Event(period=10, duration=5, t0=9.95)
points_in_transit = util.count_transit_points(time, event)
np.testing.assert_array_equal([25, 50, 25, 0, 25, 50, 50, 50, 50],
points_in_transit)
if __name__ == "__main__":
absltest.main()
|
# -*- coding: utf-8 -*-
# ----------------------------------------------------------------------------
# Copyright © 2016, Continuum Analytics, Inc. All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
# ----------------------------------------------------------------------------
from __future__ import absolute_import, print_function
import os
from conda_kapsel.test.project_utils import project_dir_disable_dedicated_env
from conda_kapsel.test.environ_utils import (minimal_environ, minimal_environ_no_conda_env)
from conda_kapsel.env_spec import EnvSpec
from conda_kapsel.local_state_file import LocalStateFile
from conda_kapsel.plugins.registry import PluginRegistry
from conda_kapsel.plugins.requirement import UserConfigOverrides
from conda_kapsel.plugins.requirements.conda_env import CondaEnvRequirement
from conda_kapsel.internal.test.tmpfile_utils import with_directory_contents
from conda_kapsel.internal import conda_api
conda_env_var = conda_api.conda_prefix_variable()
def _empty_default_requirement():
return CondaEnvRequirement(registry=PluginRegistry(), env_specs=dict(default=EnvSpec('default', [], [])))
def test_env_var():
registry = PluginRegistry()
requirement = CondaEnvRequirement(registry)
assert requirement.env_var == conda_env_var
def test_conda_env_title_and_description():
requirement = _empty_default_requirement()
assert requirement.title == 'A Conda environment'
assert requirement.description == 'The project needs a Conda environment containing all required packages.'
def test_conda_default_env_not_set():
def check_conda_default_env_not_set(dirname):
requirement = _empty_default_requirement()
project_dir_disable_dedicated_env(dirname)
local_state = LocalStateFile.load_for_directory(dirname)
status = requirement.check_status(
minimal_environ_no_conda_env(PROJECT_DIR=dirname),
local_state,
'default',
UserConfigOverrides())
expected = "'{}' doesn't look like it contains a Conda environment yet.".format(os.path.join(dirname, 'envs',
'default'))
assert expected == status.status_description
with_directory_contents(dict(), check_conda_default_env_not_set)
def test_conda_default_env_is_bogus():
def check_conda_default_env_is_bogus(dirname):
requirement = _empty_default_requirement()
project_dir_disable_dedicated_env(dirname)
local_state = LocalStateFile.load_for_directory(dirname)
status = requirement.check_status(
minimal_environ_no_conda_env(**{'PROJECT_DIR': dirname}),
local_state,
'default',
UserConfigOverrides(inherited_env="not_a_real_env_anyone_has"))
expected = "'not_a_real_env_anyone_has' doesn't look like it contains a Conda environment yet."
assert expected == status.status_description
with_directory_contents(dict(), check_conda_default_env_is_bogus)
def test_conda_fails_while_listing_installed(monkeypatch):
def check_fails_while_listing_installed(dirname):
def sabotaged_installed_command(prefix):
from conda_kapsel.internal import conda_api
raise conda_api.CondaError("sabotage!")
monkeypatch.setattr('conda_kapsel.internal.conda_api.installed', sabotaged_installed_command)
project_dir_disable_dedicated_env(dirname)
local_state = LocalStateFile.load_for_directory(dirname)
requirement = CondaEnvRequirement(registry=PluginRegistry(),
env_specs=dict(default=EnvSpec('default', ['not_a_real_package'], [])))
environ = minimal_environ(PROJECT_DIR=dirname)
status = requirement.check_status(environ,
local_state,
'default',
UserConfigOverrides(inherited_env=environ.get(conda_env_var)))
assert status.status_description.startswith("Conda failed while listing installed packages in ")
assert status.status_description.endswith(": sabotage!")
with_directory_contents(dict(), check_fails_while_listing_installed)
def test_missing_package():
def check_missing_package(dirname):
requirement = CondaEnvRequirement(
registry=PluginRegistry(),
env_specs=dict(default=EnvSpec('default', ['boguspackage', 'boguspackage2'], [])))
project_dir_disable_dedicated_env(dirname)
local_state = LocalStateFile.load_for_directory(dirname)
environ = minimal_environ(PROJECT_DIR=dirname)
status = requirement.check_status(environ,
local_state,
'default',
UserConfigOverrides(inherited_env=environ.get(conda_env_var)))
assert "Conda environment is missing packages: boguspackage, boguspackage2" == status.status_description
with_directory_contents(dict(), check_missing_package)
|
import os
from pathlib import Path
import click
import palm.project_setup_utils as project_setup_utils
def test_has_env(tmp_path):
os.chdir(tmp_path)
assert not project_setup_utils.has_env()
Path('.env').touch()
assert project_setup_utils.has_env()
def test_optionally_create_env(tmp_path, monkeypatch):
monkeypatch.setattr(click, 'confirm', lambda x: True)
os.chdir(tmp_path)
project_setup_utils.optionally_create_env()
assert Path('.env').exists()
def test_make_executable(tmp_path):
os.chdir(tmp_path)
Path('file.sh').touch()
project_setup_utils.make_executable('file.sh')
assert os.access('file.sh', os.X_OK)
|
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Sample python code"""
def hello_world(request):
request_json = request.get_json()
if request.args and 'message' in request.args:
return request.args.get('message')
elif request_json and 'message' in request_json:
return request_json['message']
else:
return 'Hello World!'
|
from django.db import models
class Government(models.Model):
name = models.CharField(max_length=128)
def __str__(self):
return self.name
class Agency(models.Model):
name = models.CharField(max_length=128)
def __str__(self):
return self.name
class Department(models.Model):
name = models.CharField(max_length=128)
def __str__(self):
return self.name
class Title(models.Model):
name = models.CharField(max_length=128)
def __str__(self):
return self.name
class Wage(models.Model):
first_name = models.CharField(db_index=True, max_length=64)
last_name = models.CharField(db_index=True, max_length=64)
middle_name = models.CharField(max_length=32)
government = models.ForeignKey(Government,
db_index=True,
on_delete=models.CASCADE)
agency = models.ForeignKey(Agency,
db_index=True,
on_delete=models.CASCADE)
dept = models.ForeignKey(Department,
db_index=True,
on_delete=models.CASCADE)
title = models.ForeignKey(Title, db_index=True, on_delete=models.CASCADE)
wage = models.DecimalField(db_index=True, max_digits=10, decimal_places=2)
year = models.IntegerField(db_index=True, default=0)
def __str__(self):
return '{last}, {first} {middle}'.format(
last=self.last_name,
first=self.first_name,
middle=self.middle_name
)
class Meta:
unique_together = ("first_name",
"last_name",
"middle_name",
"government",
"dept",
"title",
"wage",
"year")
|
import numpy
import pandas
from scipy import stats
def df_accepted(df, key, include=None, exclude=None):
if include is None:
if exclude is None:
return df
return df[~df[key].isin(exclude)]
if exclude is None:
return df[df[key].isin(include)]
return df[df[key].isin(include) & ~df[key].isin(exclude)]
def df_complete_n_index(df, n, fill = 0):
zeros = [fill for _ in range(n)]
df_zero = pandas.DataFrame({k: zeros for k in df.columns}, index=[i for i in range(n)])
return df.add(df_zero, fill_value=0)
def df_adjust_index(df, add, name=None):
n = name or 'AdjustIndex'
df[n] = df.index + add
return df.set_index(n)
def df_adjust_index_to_zero(df, name=None):
return df_complete_n_index(
df_adjust_index(df, -numpy.min(df.index), name),
numpy.max(df.index)
)
def df_sum(to, series, name=None):
n = name or series.name
return to.add(series.to_frame(n), fill_value=0)
def df_index_sums(series, index, name=None):
n = name or series.name
return series.to_frame(n).join(index).groupby(index.name).sum()
def df_sum_by_index(to, series, index, name=None):
return to.add(df_index_sums(series, index, name), fill_value=0)
def df_from_iterator(rows):
df = pandas.DataFrame()
for name, data in rows:
df = df.append(pandas.Series(data, name=name))
return df
def write_or_print(df, csv_name=None):
if csv_name:
df.to_csv(csv_name)
else:
print(df)
def nth_delta(series, n=1):
if series.shape[0] > n:
return series.values[n] - series.values[n - 1]
else:
return numpy.nan
def groupby_ranges(groupby, key):
vals = groupby[key].max() - groupby[key].min()
return vals[vals > 0]
def groupby_nth_deltas(groupby, key, n=1):
return groupby[key].apply(nth_delta, n=n).dropna()
def groupby_as_ones(groupby):
return groupby.apply(lambda _: 1) if len(groupby) > 0 else pandas.Series()
def as_minute_scalar(series):
return series.astype('int64') // 1e9 / 60
def split_outliers(series, max_z_score=2.5):
if series.empty:
return series, pandas.Series()
accept = numpy.abs(stats.zscore(series)) < max_z_score
return series[accept], series[~accept]
def strip_outliers(key, series, max_z_score=2.5):
accept, strip = split_outliers(series, max_z_score)
if not strip.empty:
print(f'Stripped outliers for "{key}"', strip.to_numpy())
return accept
def median_or_mean(series):
m = numpy.median(series)
return (numpy.mean(series), 'M') if numpy.isnan(m) else (m, 'Md')
def measures(series):
if series is None:
return None
if series.empty:
return {'n': 0, 'label': 'na', 'm': 0, 'mad': 0}
m, label = median_or_mean(series)
mad, _ = median_or_mean(numpy.absolute(series - m))
return {'n': series.shape[0], 'label': label, 'm': m, 'mad': mad}
def sums_measures(series):
if series is None:
return None
if series.empty:
return {'n': 0, 'label': 'na', 'm': 0, 'mad': 0}
n = numpy.sum(series)
m = numpy.sum(i * series[i] for i in range(series.shape[0])) / n
mad = numpy.sum(abs(i - m) * series[i] for i in range(series.shape[0])) / n
return {'n': n, 'label': 'M', 'm': m, 'mad': mad}
|
#!/usr/bin/env python3
# Foundations of Python Network Programming, Third Edition
# https://github.com/brandon-rhodes/fopnp/blob/m/py3/chapter01/search4.py
import socket
from urllib.parse import quote_plus
request_text = """\
GET /maps/api/geocode/json?address={}&sensor=false HTTP/1.1\r\n\
Host: maps.google.com:80\r\n\
User-Agent: search4.py (Foundations of Python Network Programming)\r\n\
Connection: close\r\n\
\r\n\
"""
def geocode(address):
sock = socket.socket()
sock.connect(('maps.google.com', 80))
request = request_text.format(quote_plus(address))
sock.sendall(request.encode('ascii'))
raw_reply = b''
while True:
more = sock.recv(4096)
if not more:
break
raw_reply += more
print(raw_reply.decode('utf-8'))
if __name__ == '__main__':
geocode('207 N. Defiance St, Archbold, OH')
|
asciiArt = '''
__________ __ .__ _________ .__ .__ __
\______ \___.__._/ |_| |__ ____ ____ \_ ___ \_____ | | ____ __ __| | _____ _/ |_ ___________
| ___< | |\ __\ | \ / _ \ / \ / \ \/\__ \ | | _/ ___\| | \ | \__ \\ __\/ _ \_ __ \
| | \___ | | | | Y ( <_> ) | \ \ \____/ __ \| |_\ \___| | / |__/ __ \| | ( <_> ) | \/
|____| / ____| |__| |___| /\____/|___| / \______ (____ /____/\___ >____/|____(____ /__| \____/|__|
\/ \/ \/ \/ \/ \/ \/
'''
print (asciiArt)
def main():
firstNumber = int(input("Type a number: "))
secondNumber = int(input("Another one: "))
operation = input("Choose an operation (+ ADDITION) (- SUBTRACTION) (* MULTIPLICATION) (/ DIVISION) (** POTENTIATION) ")
sumOperation = firstNumber + secondNumber
subsubtractionOperation = firstNumber - secondNumber
multiplicationOperation = firstNumber * secondNumber
divisionOperation = firstNumber / secondNumber
potentiationOperation = firstNumber ** secondNumber
if operation == "+":
print("The result is: ", sumOperation)
elif operation == "-":
print("The result is: ", subsubtractionOperation)
elif operation == "*":
print("The result is: ", multiplicationOperation)
elif operation == "/":
print("The result is: ", divisionOperation)
elif operation == "**":
print(firstNumber, "to", secondNumber, "is", potentiationOperation)
while True:
main()
if input("Do you want to do another operation? (Y/N)").strip().upper() != 'Y':
print("See you next time!")
break
|
#! Code voor een ASN betaalrekening
from os import path
from typing import Dict, Optional, Tuple
from beancount.core import data, number, position
from beangulp.importers import csvbase
import csv as pycsv
import re
pycsv.register_dialect("asnbankdialect", delimiter=",")
class Importer(csvbase.Importer):
encoding = "utf8"
names = False
dialect = "asnbankdialect"
def __init__(
self,
known_accounts: Dict[str, str],
currency: str = "EUR",
interest_account: Optional[str] = None,
investment_account: Optional[str] = None,
profit_loss_account: Optional[str] = None,
) -> None:
self.known_accounts = known_accounts
self.interest_account = interest_account
self.investment_account = investment_account
self.profit_loss_account = profit_loss_account
self.columns = {
"date": csvbase.Date(0, "%d-%m-%Y"),
"own_account": csvbase.Column(1),
"other_account": csvbase.Column(2),
"payee": csvbase.Column(3),
"amount": csvbase.Amount(10),
"narration": csvbase.Column(17),
"balance_before": csvbase.Amount(8),
"booking_code": csvbase.Column(14),
}
super().__init__("ACCOUNT_PLACEHOLDER", currency)
def filename(self, filepath) -> str:
return "asnbank." + path.basename(filepath)
def account(self, filepath: str) -> str:
for row in self.read(filepath):
return self.known_accounts[row.own_account]
def identify(remap, file) -> bool:
with open(file) as fd:
head = fd.read(1024)
return not re.search("^\d\d-\d\d-\d\d\d\d,", head) is None
def extract(self, filepath, existing) -> data.Entries:
entries = super().extract(filepath, existing)
# Build a balance entry. The balance is not after but before, so this requires custom code.
if len(entries) != 0:
date = entries[-1].date
(row, lineno) = next(
(row, lineno)
for lineno, row in enumerate(self.read(filepath))
if row.date == date
)
units = data.Amount(row.balance_before, self.currency)
meta = data.new_metadata(filepath, lineno)
balance = data.Balance(
meta, date, self.known_accounts[row.own_account], units, None, None
)
# Now insert at the correct location
found_index = 0
for index, value in enumerate(entries):
if value.date == date:
found_index = index
break
entries.insert(found_index, balance)
return entries
def finalize(self, transaction, row):
# Set the account number
transaction.postings[0] = transaction.postings[0]._replace(
account=self.known_accounts[row.own_account]
)
# Fix extra quotes
if transaction.narration != "GEEN":
transaction = transaction._replace(narration=transaction.narration[1:-1])
# Add known accounts as a posting
if row.other_account in self.known_accounts.keys():
transaction.postings.append(
data.Posting(
self.known_accounts[row.other_account],
-transaction.postings[0].units,
None,
None,
None,
None,
)
)
# Handle interest posting
elif self.interest_account != None and row.booking_code == "RNT":
transaction.postings.append(
data.Posting(
self.interest_account,
-transaction.postings[0].units,
None,
None,
None,
None,
)
)
# Aan-/verkoop beleggingen
# TODO: Why not add a balance check. (Available in the narration.)
elif (
self.investment_account != None
and self.profit_loss_account != None
and row.booking_code == "EFF"
):
regex = "^Voor\s+u\s+([a-z]+kocht)\s+via\s+Euronext\s+Fund\s+Services:\s+(\d+ \d+)\s+Participaties\s+(.*)\s+a\s+EUR\s+(\d+ \d+)."
result = re.match(regex, transaction.narration)
profit_loss = False
if result != None:
transaction_type = result.group(1)
share_amount = number.D(result.group(2).replace(" ", "."))
share_type = result.group(3).replace(" ", "")
share_cost = number.D(result.group(4).replace(" ", "."))
(
other_account,
share_currency,
) = self.get_investment_account_and_currency(share_type)
price = None
cost = None
if transaction_type == "verkocht":
share_amount = -share_amount
price = data.Amount(share_cost, "EUR")
cost = position.CostSpec(None, None, None, None, None, None)
transaction = transaction._replace(narration="Verkoop beleggen")
profit_loss = True
elif transaction_type == "gekocht":
cost = position.CostSpec(share_cost, None, "EUR", None, None, None)
transaction = transaction._replace(narration="Aankoop beleggen")
else:
print("Warning: Unknown transaction type: " + transaction_type)
other_amount = data.Amount(share_amount, share_currency)
transaction.postings.append(
data.Posting(other_account, other_amount, cost, price, None, None)
)
if profit_loss:
transaction.postings.append(
data.Posting(self.profit_loss_account, None, None, None, None, None)
)
return transaction
def get_investment_account_and_currency(self, share_type: str) -> Tuple[str, str]:
if share_type == "ASNDuurzaamMixfondsZeerDefensief":
other_account = self.investment_account + ":DuurzaamMixfondsZeerDefensief"
share_currency = "ASN_MIXFONDS_ZEER_DEFENSIEF"
elif share_type == "ASNDuurzaamMixfondsDefensief":
other_account = self.investment_account + ":DuurzaamMixfondsDefensief"
share_currency = "ASN_MIXFONDS_DEFENSIEF"
elif share_type == "ASNDuurzaamMixfondsNeutraal":
other_account = self.investment_account + ":DuurzaamMixfondsNeutraal"
share_currency = "ASN_MIXFONDS_NEUTRAAL"
elif share_type == "ASNDuurzaamMixfondsOffensief":
other_account = self.investment_account + ":DuurzaamMixfondsOffensief"
share_currency = "ASN_MIXFONDS_OFFENSIEF"
elif share_type == "ASNDuurzaamMixfondsZeerOffensief":
other_account = self.investment_account + ":DuurzaamMixfondsZeerOffensief"
share_currency = "ASN_MIXFONDS_ZEER_OFFENSIEF"
elif share_type == "ASNMilieu&Waterfonds":
other_account = self.investment_account + ":MilieuEnWaterfonds"
share_currency = "ASN_MILIEU_EN_WATERFONDS"
elif share_type == "ASN-NovibMicrokredietfonds":
other_account = self.investment_account + ":Microkredietfonds"
share_currency = "ASN_MICROKREDIETFONDS"
elif share_type == "ASNDuurzaamObligatiefonds":
other_account = self.investment_account + ":Obligatiefonds"
share_currency = "ASN_OBLIGATIEFONDS"
elif share_type == "ASNMicrokredietfonds":
other_account = self.investment_account + ":Microkredietfonds"
share_currency = "ASN_MICROKREDIETFONDS"
elif share_type == "ASNGroenprojectenfonds":
other_account = self.investment_account + ":Groenprojectenfonds"
share_currency = "ASN_GROENPROJECTENFONDS"
else:
print("Warning: Unknown share type: " + share_type)
return [other_account, share_currency]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from __future__ import absolute_import
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('ilsgateway', '0002_auto_20160104_1600'),
]
operations = [
migrations.CreateModel(
name='ILSMigrationProblem',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('domain', models.CharField(max_length=128, db_index=True)),
('object_id', models.CharField(max_length=128, null=True)),
('object_type', models.CharField(max_length=30)),
('description', models.CharField(max_length=128)),
('external_id', models.CharField(max_length=128)),
('last_modified', models.DateTimeField(auto_now=True)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='ILSMigrationStats',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('products_count', models.IntegerField(default=0)),
('locations_count', models.IntegerField(default=0)),
('sms_users_count', models.IntegerField(default=0)),
('web_users_count', models.IntegerField(default=0)),
('domain', models.CharField(max_length=128, db_index=True)),
('last_modified', models.DateTimeField(auto_now=True)),
],
options={
},
bases=(models.Model,),
)
]
|
# -*- coding: utf-8 -*-
# Scrapy settings for liferay project
#
# For simplicity, this file contains only settings considered important or
# commonly used. You can find more settings consulting the documentation:
#
# http://doc.scrapy.org/en/latest/topics/settings.html
# http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html
# http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html
BOT_NAME = 'liferay'
SPIDER_MODULES = ['liferay.spiders']
NEWSPIDER_MODULE = 'liferay.spiders'
# Obey robots.txt rules
ROBOTSTXT_OBEY = True
# COOKIES
COOKIES_ENABLES = False
COOKIES_DEBUG = False
# The maximum number of concurrent (ie. simultaneous) requests that will
# be performed to any single domain.
# CONCURRENT_REQUESTS_PER_DOMAIN = 100
# CONCURRENT_REQUESTS_PER_IP = 0
# CONCURRENT_REQUESTS_PER_SPIDER = 100
DNSCACHE_ENABLED = True
# DOWNLOAD_DELAY = 2
DOWNLOAD_TIMEOUT = 20
# DEFAULT_REQUEST_HEADERS = {
# 'Referer': 'http://Google.com'
# }
# Retry many times since proxies often fail
RETRY_TIMES = 20
# Retry on most error codes since proxies fail for different reasons
RETRY_HTTP_CODES = [500, 503, 504, 400, 403, 404, 408, 302, 304]
ITEM_PIPELINES = {
'liferay.pipelines.MongoPipeline': 300,
'scrapy_redis.pipelines.RedisPipeline': 400
}
ROBOTSTXT_OBEY = False
#
SCHEDULER = "scrapy_redis.scheduler.Scheduler"
#
SCHEDULER_QUEUE_CLASS = 'scrapy_redis.queue.SpiderQueue'
DOWNLOADER_MIDDLEWARES = {
'scrapy.contrib.downloadermiddleware.useragent.UserAgentMiddleware': None,
'scrapy.contrib.downloadermiddleware.httpproxy.HttpProxyMiddleware': 110,
'scrapy.contrib.downloadermiddleware.retry.RetryMiddleware': 300,
'liferay.rotate_useragent.RotateUserAgentMiddleware': 400,
# 'magic_mirror.spiders.rotate_useragent.RotateUserAgentMiddleware': 400,
'scrapy.contrib.downloadermiddleware.cookies.CookiesMiddleware': 700
}
# custom settings
REDIS_HOST = '0.0.0.0'
REDIS_PORT = 6379
LOG_PATH = ''
MONGO_HOST = 'localhost'
MONGO_PORT = 27017
MONGO_DATABASE_NAME = {'default': 'liferay'}
DUPEFILTER_CLASS = "scrapy_redis.dupefilter.RFPDupeFilter"
|
'''[skope-rules](https://github.com/scikit-learn-contrib/skope-rules) (based on [this implementation](https://github.com/scikit-learn-contrib/skope-rules))
'''
from .skope_rules import SkopeRules
from .rule import Rule, replace_feature_name
__all__ = ['SkopeRules', 'Rule']
|
import kernelwidget
import bufferswidget
import displaywidget
import newdialog
import stylesheet
import settings
import icons
import utils
import pygpuip
import os
from PySide import QtGui, QtCore
class MainWindow(QtGui.QMainWindow):
def __init__(self, path, settings = None):
super(MainWindow, self).__init__()
self.setPath(path)
self.setWindowIcon(icons.get("pug"))
self.setStyleSheet(stylesheet.data)
# Start in center of the screen, covering 80%
r = QtGui.QDesktopWidget().availableGeometry()
self.setGeometry(r.width()*0.10, r.height() * 0.10,
r.width() * 0.80, r.height() * 0.80)
self.toolbarIconSize = QtCore.QSize(32,32)
self.interactive = False
self.createMenuAndActions()
self.createDockWidgets()
# Central tab widget (main part of the gui)
self.kernelTabWidget = QtGui.QTabWidget(self)
self.setCentralWidget(self.kernelTabWidget)
self.reset()
self.settings = settings
if self.settings:
self.initFromSettings()
self.needsBuild = True
self.needsAllocate = True
self.needsImport = True
def setPath(self, path):
self.path = path
if path:
self.setWindowTitle("gpuip - %s" % self.path)
else:
self.setWindowTitle("gpuip")
def new(self):
dialog = newdialog.NewDialog(self)
if dialog.exec_():
self.setPath(None)
self.settings = dialog.getSettings()
self.initFromSettings()
self.log("Creating a new session")
def newFromExisting(self):
f = QtGui.QFileDialog.getOpenFileName(
None, "New from existing", QtCore.QDir.currentPath(), "ip (*ip)")
if f[0]:
s = settings.Settings()
s.read(f[0])
dialog = newdialog.NewDialog(self)
dialog.initFromSettings(s)
if dialog.exec_():
self.setPath(None)
self.settings = dialog.getSettings()
self.initFromSettings()
self.log("Creating new session from previous " + f[0])
def open(self):
f = QtGui.QFileDialog.getOpenFileName(
self, "Open", QtCore.QDir.currentPath(), "ip (*ip)")
if f[0]:
self.settings = settings.Settings()
self.settings.read(f[0])
self.initFromSettings()
self.setPath(f[0])
self.log("Opening " + f[0])
def save(self):
if self.path:
self.updateSettings()
self.settings.write(self.path)
self.log("Saved current session to %s" % self.path)
else:
self.saveAs()
def saveAs(self):
f = QtGui.QFileDialog.getSaveFileName(
self, "Save", QtCore.QDir.currentPath(), "ip (*ip)")
if f[0]:
self.setPath(f[0])
self.save()
def updateSettings(self):
# Get buffer input and outputs
for b in self.settings.buffers:
b.input = self.buffersWidget.getBufferInput(b.name)
b.output = self.buffersWidget.getBufferOutput(b.name)
# Get in buffers, out buffers and param values
for k in self.settings.kernels:
kw = self.kernelWidgets[k.name]
k.code = str(kw.codeEditor.toPlainText())
for inb in k.inBuffers:
inb.buffer = str(kw.inBuffers[inb.name].cbox.currentText())
for outb in k.outBuffers:
outb.buffer = str(kw.outBuffers[outb.name].cbox.currentText())
for p in k.params:
kernelParam = kw.params[p.name]
p.value = utils.safeEval(kernelParam.lineEdit.text())
def initFromSettings(self):
self.reset()
self.ip, self.buffers, self.kernels = self.settings.create()
self.displayWidget.setBuffers(self.buffers)
bufferNames = [b.name for b in self.settings.buffers]
for b in self.settings.buffers:
bufinputpath = b.input
if bufinputpath != "" and not os.path.isfile(bufinputpath):
bufinputpath = ""
self.logError("Buffer %s input path not valid: %s" %
(b.name, b.input))
self.buffersWidget.addBuffer(b.name, b.type,
b.channels, bufinputpath, b.output)
self.buffersWidget.layout.addStretch()
setBoilerPlate = True
self.kernelWidgets = {}
for k in self.settings.kernels:
w = kernelwidget.KernelWidget(self.kernelTabWidget,
self.interactiveProcess)
for inb in k.inBuffers:
w.addInBuffer(inb.name, inb.buffer, bufferNames)
for outb in k.outBuffers:
w.addOutBuffer(outb.name, outb.buffer, bufferNames)
for p in k.params:
w.addParameter(p.name, p.value, p.default, p.min, p.max, p.type)
self.kernelTabWidget.addTab(w, k.name)
self.kernelWidgets[k.name] = w
if k.code != "":
w.codeEditor.setText(k.code)
setBoilerPlate = False
if setBoilerPlate:
self.setBoilerplateCode(True)
def reset(self):
self.logBrowser.clear()
self.ip = None
self.bufferData = None
self.kernels = []
self.buffers = {}
self.kernelWidgets = {}
# Re-add GUI components for buffers widget
scroll = QtGui.QScrollArea(self)
scroll.setWidgetResizable(True)
self.buffersWidget = bufferswidget.BuffersWidget(scroll)
scroll.setWidget(self.buffersWidget)
self.dockBuffers.setWidget(scroll)
self.buffersWidget.show()
# Remove all kernel widgets from the kernel tab widget
for i in range(self.kernelTabWidget.count()):
self.kernelTabWidget.removeTab(0)
def build(self):
kernelNames = ""
for kernel in self.kernels:
kernelWidget = self.kernelWidgets[kernel.name]
kernel.code = str(kernelWidget.codeEditor.toPlainText())
kernelNames += kernel.name + ", "
self.log("Building kernels [ <i>%s</i> ] ..." % kernelNames[:-2])
clock = utils.StopWatch()
err = self.ip.Build()
if not err:
self.logSuccess("All kernels were built.", clock)
self.needsBuild = False
return True
else:
self.logError(err)
QtGui.QMessageBox.critical(self, self.tr("Kernel Build Error"),
self.tr(err), QtGui.QMessageBox.Ok,
QtGui.QMessageBox.Ok)
return False
def import_from_images(self):
self.updateSettings()
clock = utils.StopWatch()
for b in self.settings.buffers:
if b.input:
self.log("Importing data from image <i>%s</i> to <i>%s</i>." \
% (b.input, b.name))
err = self.buffers[b.name].Read(b.input, utils.getNumCores())
if err:
self.logError(err)
return False
self.logSuccess("Image data imported", clock)
self.displayWidget.refreshDisplay()
self.needsImport = False
return True
def allocate(self):
self.updateSettings()
clock = utils.StopWatch()
bufferNames = [b.name for b in self.settings.buffers]
self.log("Allocating buffers <i> %s </i> ..." % bufferNames)
width, height = utils.allocateBufferData(self.buffers)
self.ip.SetDimensions(width, height)
err = self.ip.Allocate()
clock = utils.StopWatch()
if err:
self.logError(err)
return False
else:
self.logSuccess("All buffers were allocated.", clock)
self.needsAllocate = False
clock = utils.StopWatch()
for b in self.settings.buffers:
if b.input:
err = self.ip.WriteBufferToGPU(self.buffers[b.name])
if err:
self.logError(err)
return False
self.logSuccess("Data transfered to GPU.", clock)
return True
def interactiveProcess(self):
if self.interactive:
self.run()
def run(self):
self.updateSettings()
# Run previous steps if necessary. If any fails, return function
if (self.needsBuild and not self.build()) or \
(self.needsAllocate and not self.allocate()) or \
(self.needsImport and not self.import_from_images()):
return False
self.log("Running kernels...")
self.settings.updateKernels(self.kernels, self.buffers)
clock = utils.StopWatch()
err = self.ip.Run()
if err:
self.logError(err)
return False
self.logSuccess("All kernels processed.", clock)
clock = utils.StopWatch()
for b in self.buffers:
err = self.ip.ReadBufferFromGPU(self.buffers[b])
if err:
self.logError(err)
return False
self.logSuccess("Data transfered from GPU.", clock)
self.displayWidget.refreshDisplay()
return True
def export_to_images(self):
self.updateSettings()
clock = utils.StopWatch()
for b in self.settings.buffers:
if b.output:
self.log("Exporting data from buffer <i>%s</i> to <i>%s</i>." \
% (b.name, b.output))
err = self.buffers[b.name].Write(b.output, utils.getNumCores())
if err:
self.logError(err)
return False
self.logSuccess("Buffer data transfered to images.", clock)
return True
def run_all_steps(self):
for f in ["build","import_from_images","allocate","process","export_to_images"]:
getattr(self,f)() # run func
QtGui.QApplication.instance().processEvents() # update gui
return True
def log(self, msg):
self.logBrowser.append(utils.getTimeStr() + msg)
def logSuccess(self, msg, clock):
success = "<font color='green'>Success: </font>"
clockStr= "<i> " + str(clock) + "</i>"
self.logBrowser.append(utils.getTimeStr() + success + msg + clockStr)
def logError(self, msg):
error = "<font color='red'>Error: </font>"
self.logBrowser.append(utils.getTimeStr() + error + msg)
def toggleInteractive(self):
self.interactive = not self.interactive
def refreshCodeFromFile(self):
ret = QtGui.QMessageBox.warning(
self, self.tr("Refresh Code From File"),
self.tr("Refreshing the code will not save current"+\
" code. \nDo you want to continue?"),
QtGui.QMessageBox.Ok | QtGui.QMessageBox.Cancel,
QtGui.QMessageBox.Cancel)
if ret != QtGui.QMessageBox.StandardButton.Cancel:
self.settings.updateCode()
for k in self.settings.kernels:
editor = self.kernelWidgets[k.name].codeEditor
editor.clear()
editor.setText(k.code)
def setBoilerplateCode(self, skipDialog = False):
if not skipDialog:
ret = QtGui.QMessageBox.warning(
self, self.tr("Set Boilerplate Code"),
self.tr("Setting the boilerplate code will remove previous"+\
" code. \nDo you want to continue?"),
QtGui.QMessageBox.Ok | QtGui.QMessageBox.Cancel,
QtGui.QMessageBox.Cancel)
if ret == QtGui.QMessageBox.StandardButton.Cancel:
return
for kernel in self.kernels:
editor = self.kernelWidgets[kernel.name].codeEditor
if skipDialog and str(editor.toPlainText()) != "":
return
code = self.ip.BoilerplateCode(kernel)
editor.clear()
editor.setText(code)
def createDockWidgets(self):
LEFT = QtCore.Qt.LeftDockWidgetArea
RIGHT = QtCore.Qt.RightDockWidgetArea
# Create Log dock
dock = QtGui.QDockWidget("Log", self)
self.logBrowser = QtGui.QTextBrowser(dock)
dock.setAllowedAreas(QtCore.Qt.BottomDockWidgetArea)
dock.setWidget(self.logBrowser)
self.addDockWidget(QtCore.Qt.BottomDockWidgetArea, dock)
self.windowsMenu.addAction(dock.toggleViewAction())
# Create buffers dock
self.dockBuffers = QtGui.QDockWidget("Buffers", self)
self.dockBuffers.setAllowedAreas(LEFT | RIGHT)
self.buffersWidget = bufferswidget.BuffersWidget(self)
self.dockBuffers.setWidget(self.buffersWidget)
self.addDockWidget(QtCore.Qt.LeftDockWidgetArea, self.dockBuffers)
self.windowsMenu.addAction(self.dockBuffers.toggleViewAction())
# Create display dock
dock = QtGui.QDockWidget("Display", self)
dock.setAllowedAreas(LEFT | RIGHT)
self.displayWidget = displaywidget.DisplayWidget(dock)
checkBox = self.displayWidget.interactiveCheckBox
checkBox.stateChanged.connect(self.toggleInteractive)
dock.setWidget(self.displayWidget)
self.addDockWidget(QtCore.Qt.LeftDockWidgetArea, dock)
self.windowsMenu.addAction(dock.toggleViewAction())
# The buffers tab starts with being stacked on the display dock
self.tabifyDockWidget(dock, self.dockBuffers)
def createMenuAndActions(self):
menuNames = ["&File", "&Editor", "&Run", "&Windows", "&Help"]
fileMenu, editorMenu, runMenu, self.windowsMenu, helpMenu = \
[self.menuBar().addMenu(name) for name in menuNames]
toolBar = self.addToolBar("Toolbar")
toolBar.setIconSize(self.toolbarIconSize)
def _addAction(icon, actionName, shortcut, func, menu, toolbar):
action = QtGui.QAction(icon, actionName, self)
action.triggered.connect(func)
if shortcut:
action.setShortcut(shortcut)
menu.addAction(action)
if toolbar:
toolbar.addAction(action)
_addAction(icons.get("new"), "&New", QtGui.QKeySequence.New,
self.new, fileMenu, toolBar)
_addAction(icons.get("newExisting"), "&New from existing", None,
self.newFromExisting, fileMenu, toolBar),
_addAction(icons.get("open"), "&Open", QtGui.QKeySequence.Open,
self.open, fileMenu, toolBar),
_addAction(icons.get("save"), "&Save", QtGui.QKeySequence.Save,
self.save, fileMenu, toolBar),
_addAction(icons.get("save"), "&Save As", QtGui.QKeySequence.SaveAs,
self.saveAs, fileMenu, None),
_addAction(QtGui.QIcon(""), "&Quit", "Ctrl+Q",
self.close, fileMenu, None),
toolBar.addSeparator()
_addAction(icons.get("refresh"), "&Refresh Code From File", "Ctrl+R",
self.refreshCodeFromFile,editorMenu,toolBar),
_addAction(icons.get("boilerplate"), "&Set Boilerplate Code", "Ctrl+L",
self.setBoilerplateCode,editorMenu,toolBar),
toolBar.addSeparator()
_addAction(icons.get("build"), "1. &Build", "Ctrl+B",
self.build, runMenu, toolBar),
_addAction(icons.get("import"), "2. &Import from images", "Ctrl+W",
self.import_from_images, runMenu, toolBar),
_addAction(icons.get("init"), "3. &Allocate", "Ctrl+I",
self.allocate, runMenu, toolBar),
_addAction(icons.get("process"), "4. &Run", "Ctrl+P",
self.run, runMenu, toolBar),
_addAction(icons.get("export"), "5. &Export to images", "Ctrl+E",
self.export_to_images, runMenu, toolBar),
_addAction(QtGui.QIcon(""), "&All steps", "Ctrl+A",
self.run_all_steps, runMenu, None),
_addAction(QtGui.QIcon(""), "About &Qt", None,
QtGui.qApp.aboutQt, helpMenu, None)
|
# Copyright 2018 AimBrain Ltd.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch
import numpy as np
from torch.nn.parameter import Parameter
from torch.nn.modules.module import Module
import torch.nn as nn
import torch.nn.functional as F
class NeighbourhoodGraphConvolution(Module):
"""
Implementation of: https://arxiv.org/pdf/1611.08402.pdf (MoNet) where we
consider a fixed sized neighbourhood of nodes for each feature
"""
def __init__(self,
in_feat_dim,
out_feat_dim,
n_kernels,
coordinate_dim,
bias=False):
super(NeighbourhoodGraphConvolution, self).__init__()
"""
## Variables:
- in_feat_dim: dimensionality of input features
- out_feat_dim: dimensionality of output features
- n_kernels: number of Gaussian kernels to use
- coordinate_dim : dimensionality of the pseudo coordinates
- bias: whether to add a bias to convolutional kernels
"""
# Set parameters
self.n_kernels = n_kernels
self.coordinate_dim = coordinate_dim
self.in_feat_dim = in_feat_dim
self.out_feat_dim = out_feat_dim
self.bias = bias
# Convolution filters weights
self.conv_weights = nn.ModuleList([nn.Linear(
in_feat_dim, out_feat_dim//n_kernels, bias=bias) for i in range(n_kernels)])
# Parameters of the Gaussian kernels
self.mean_rho = Parameter(torch.Tensor(n_kernels, 1))
self.mean_theta = Parameter(torch.Tensor(n_kernels, 1))
self.precision_rho = Parameter(torch.Tensor(n_kernels, 1))
self.precision_theta = Parameter(torch.Tensor(n_kernels, 1))
self.init_parameters()
def init_parameters(self):
# Initialise Gaussian parameters
self.mean_theta.data.uniform_(-np.pi, np.pi)
self.mean_rho.data.uniform_(0, 1.0)
self.precision_theta.data.uniform_(0.0, 1.0)
self.precision_rho.data.uniform_(0.0, 1.0)
def forward(self, neighbourhood_features, neighbourhood_pseudo_coord):
"""
Inputs: - neighbourhood_features (batch_size, K,
neighbourhood_size, in_feat_dim)
- neighbourhood_pseudo_coord (batch_size, K, neighbourhood_size,
coordinate_dim)
Returns: - convolved_features (batch_size, K, neighbourhood_size,
out_feat_dim)
"""
# set parameters
batch_size = neighbourhood_features.size(0)
K = neighbourhood_features.size(1)
neighbourhood_size = neighbourhood_features.size(2)
# compute pseudo coordinate kernel weights
weights = self.get_gaussian_weights(neighbourhood_pseudo_coord)
weights = weights.view(
batch_size*K, neighbourhood_size, self.n_kernels)
# compute convolved features
neighbourhood_features = neighbourhood_features.reshape(
batch_size*K, neighbourhood_size, -1)
convolved_features = self.convolution(neighbourhood_features, weights)
convolved_features = convolved_features.view(-1, K, self.out_feat_dim)
return convolved_features
def get_gaussian_weights(self, pseudo_coord):
"""
## Inputs:
- pseudo_coord (batch_size, K, K, pseudo_coord_dim)
## Returns:
- weights (batch_size*K, neighbourhood_size, n_kernels)
"""
# compute rho weights
diff = (pseudo_coord[:, :, :, 0].contiguous().view(-1, 1) - self.mean_rho.view(1, -1))**2
weights_rho = torch.exp(-0.5 * diff /
(1e-14 + self.precision_rho.view(1, -1)**2))
# compute theta weights
first_angle = torch.abs(pseudo_coord[:, :, :, 1].contiguous().view(-1, 1) - self.mean_theta.view(1, -1))
second_angle = torch.abs(2 * np.pi - first_angle)
weights_theta = torch.exp(-0.5 * (torch.min(first_angle, second_angle)**2)
/ (1e-14 + self.precision_theta.view(1, -1)**2))
weights = weights_rho * weights_theta
weights[(weights != weights).detach()] = 0
# normalise weights
weights = weights / torch.sum(weights, dim=1, keepdim=True)
return weights
def convolution(self, neighbourhood, weights):
"""
## Inputs:
- neighbourhood (batch_size*K, neighbourhood_size, in_feat_dim)
- weights (batch_size*K, neighbourhood_size, n_kernels)
## Returns:
- convolved_features (batch_size*K, out_feat_dim)
"""
# patch operator
weighted_neighbourhood = torch.bmm(
weights.transpose(1, 2), neighbourhood)
# convolutions
weighted_neighbourhood = [self.conv_weights[i](weighted_neighbourhood[:, i]) for i in range(self.n_kernels)]
convolved_features = torch.cat([i.unsqueeze(1) for i in weighted_neighbourhood], dim=1)
convolved_features = convolved_features.view(-1, self.out_feat_dim)
return convolved_features
class GraphLearner(Module):
def __init__(self, in_feature_dim, combined_feature_dim, n_obj, dropout=0.0):
"""
eq(1): A=EE^T, build adj matrix
## Variables:
- in_feature_dim: dimensionality of input features
- combined_feature_dim: dimensionality of the joint hidden embedding
- k: number of graph nodes/objects on the image
"""
super(GraphLearner, self).__init__()
# Parameters
self.in_dim = in_feature_dim
self.combined_dim = combined_feature_dim
self.n_obj = n_obj
# Embedding layers
self.edge_layer_1 = nn.Linear(in_feature_dim,
combined_feature_dim)
self.edge_layer_2 = nn.Linear(combined_feature_dim,
combined_feature_dim)
# Regularisation
self.dropout = nn.Dropout(p=dropout)
self.edge_layer_1 = nn.utils.weight_norm(self.edge_layer_1)
self.edge_layer_2 = nn.utils.weight_norm(self.edge_layer_2)
def forward(self, graph_nodes):
"""
## Inputs:
- graph_nodes (batch_size, K, in_feat_dim): input features
## Returns:
- adjacency matrix (batch_size, K, K)
"""
# graph_nodes = graph_nodes.view(-1, self.in_dim)
# layer 1
h = self.edge_layer_1(graph_nodes)
h = F.relu(h)
# layer 2
h = self.edge_layer_2(h)
h = F.relu(h)
# outer product
h = h.view(-1, self.n_obj, self.combined_dim)
adjacency_matrix = torch.matmul(h, h.transpose(1, 2))
# adjacency_matrix = torch.matmul(graph_nodes, graph_nodes.transpose(1, 2))
return adjacency_matrix
|
class Snake:
def __init__(self, width=600, height=400):
self.width = width
self.height = height
def xml(self):
return """
<script type="application/processing">
var width = %s;
var height = %s;
var x = width/2;
var y = height/2;
var vx = 2;
var vy = 0;
void setup() {
size(width,height);
smooth();
frameRate(10);
loop();
}
void draw() {
fill(255,0,0);
ellipse(x,y,10,10);
x = (x+vx+width) %% width;
y = (y+vy+height) %% height;
}
void keyPressed() {
if(key == CODED) {
if(keyCode==UP) { vx = 0; vy-=1;}
if(keyCode==DOWN) { vx = 0; vy+=1;}
if(keyCode==LEFT) { vx-=1; vy = 0;}
if(keyCode==RIGHT) { vx+=1; vy = 0;}
}
}
void mousePressed() {
}
</script><canvas width="%spx" height="%spx"></canvas>
""" % (self.width,self.height,self.width, self.height)
|
"""
Viewsets for users app
"""
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework import filters, mixins, viewsets
from rest_framework.decorators import action
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from kaznet.apps.main.mixins import KaznetViewsetMixin
from kaznet.apps.users.filters import UserProfileOrderingFilter
from kaznet.apps.users.models import UserProfile
from kaznet.apps.users.permissions import IsOwnUserProfileOrAdmin
from kaznet.apps.users.serializers import UserProfileSerializer
# pylint: disable=too-many-ancestors
class UserProfileViewSet(KaznetViewsetMixin, mixins.CreateModelMixin,
mixins.ListModelMixin, mixins.RetrieveModelMixin,
mixins.DestroyModelMixin, mixins.UpdateModelMixin,
viewsets.GenericViewSet):
"""
ViewSet class for UserProfiles
"""
serializer_class = UserProfileSerializer
permission_classes = [IsAuthenticated, IsOwnUserProfileOrAdmin]
filter_backends = [
DjangoFilterBackend, UserProfileOrderingFilter, filters.SearchFilter
]
filter_fields = ['role', 'expertise', 'ona_username']
search_fields = [
'user__first_name', 'user__last_name', 'ona_username', 'user__email',
'national_id'
]
ordering_fields = [
'user__first_name', 'user__last_name', 'submission_count', 'created',
'national_id'
]
queryset = UserProfile.objects.all() # pylint: disable=no-member
# pylint: disable=unused-argument
# pylint: disable=invalid-name
@action(detail=False)
def profile(self, request, pk=None):
"""
Action that returns the Currently logged in Users
Profile
"""
if request.user.is_authenticated:
userprofile = request.user.userprofile
userprofile_data = self.get_serializer(userprofile).data
return Response(userprofile_data)
return Response({})
|
#! /usr/bin/python
# -*- coding: utf-8 -*-
'''
Created on 25-01-2011
@author: fearless-spider
'''
import urllib2, sys, time, random
import BaseHTTPServer
import csv
from BeautifulSoup import BeautifulSoup
from htmlentitydefs import name2codepoint as n2cp
import re
def substitute_entity(match):
ent = match.group(2)
if match.group(1) == "#":
return unichr(int(ent))
else:
cp = n2cp.get(ent)
if cp:
return unichr(cp)
else:
return match.group()
def decode_htmlentities(string):
entity_re = re.compile("&(#?)(\d{1,5}|\w{1,8});")
return entity_re.subn(substitute_entity, string)[0]
listingWriter = csv.writer(open('datak.csv', 'wb'), delimiter='^', quotechar='~')
url = 'http://www.yell.com'
channel_id = ['geoListings']
adverts_class = ['listingDetail']
span_class = ['listingTitle']
phone_class = ['phoneNumber']
address_class = ['address']
category_class = ['ypgCategoryLink']
web_class = ['noPrint']
advertsphone_class = ['listingDetailRHS']
for pagenr in range(1, 51):
print pagenr
page = urllib2.urlopen('http://www.yellowpages.ca/search/si/' + str(pagenr) + '/k/Canada')
website_content = page.read()
start = website_content.find('<div id="geoListings">')
website_content = website_content[start:]
end = website_content.find('<div id="rightPane">')
website_content = website_content[:end]
while True:
sstart = website_content.find('<script')
if sstart == -1:
break
send = website_content.find('</script>')
website_content = website_content[:sstart] + website_content[send + 9:]
soup = BeautifulSoup(website_content)
for channel in soup.findAll('div'):
if channel.has_key('id') and channel['id'] in channel_id:
for adverts in channel.findAll('div'):
title = ''
phone = ''
category = ''
address = ''
web = ''
if adverts.has_key('class') and adverts['class'] in adverts_class:
row = []
for span in adverts.findAll('span'):
if span.has_key('class') and span['class'] in span_class:
title = str(span.string).strip().decode('utf-8')
row.append(title)
if span.has_key('class') and span['class'] in category_class:
for ahref in span.findAll('a'):
category = ahref.string
row.append(category.strip().replace('&', '&'))
# print category
if adverts.has_key('class') and adverts['class'] in advertsphone_class:
for ahref in adverts.findAll('a'):
if ahref.has_key('class') and ahref['class'] in phone_class:
phone = ahref.string
row.append(decode_htmlentities(phone.strip()))
if adverts.has_key('class') and adverts['class'] in address_class:
address = adverts.string
row.append(address.decode('utf-8').strip())
if adverts.has_key('class') and adverts['class'] in web_class:
for ahref in adverts.findAll('a'):
web = str(ahref.get('title')).strip().decode('utf-8')
if web.find('www') != -1:
web = web[web.find('www'):web.rfind('-') - 1]
row.append(web.strip())
listingWriter.writerow(row)
page.close()
|
import inspect
import os
import unittest
import pytest
from approvaltests.approvals import verify_all
from approvaltests.reporters.generic_diff_reporter_factory import (
GenericDiffReporterFactory,
)
from mockito import mock, when, unstub, ANY
from robot.utils import WINDOWS
from selenium import webdriver
from SeleniumLibrary.keywords.webdrivertools import SeleniumOptions, WebDriverCreator
@pytest.fixture(scope="module")
def options():
return SeleniumOptions()
@pytest.fixture(scope="module")
def reporter():
path = os.path.dirname(__file__)
reporter_json = os.path.abspath(
os.path.join(path, "..", "approvals_reporters.json")
)
factory = GenericDiffReporterFactory()
factory.load(reporter_json)
return factory.get_first_working()
def teardown_function():
unstub()
@unittest.skipIf(WINDOWS, reason="ApprovalTest do not support different line feeds")
def test_parse_options_string(options, reporter):
results = []
results.append(options._parse('method("arg1")'))
results.append(options._parse('method("arg1", "arg2")'))
results.append(options._parse("method(True)"))
results.append(options._parse("method(1)"))
results.append(options._parse('method("arg1", 2, None, False, "arg2")'))
results.append(options._parse('method ( " arg1 " , 2 , None , False , " arg2 " )'))
results.append(options._parse('attribute="arg1"'))
results.append(options._parse(" attribute = True "))
results.append(options._parse('method("arg1");attribute=True'))
results.append(options._parse('method("arg1") ; attribute=True ; method("arg2")'))
results.append(options._parse("attribute"))
results.append(options._parse("method()"))
results.append(options._parse("method(None)"))
results.append(options._parse('method("--proxy 10.10.1.3:2345")'))
results.append(options._parse('method(";arg1")'))
results.append(options._parse('method ( "arg1" , 2 ,"arg2" )'))
results.append(options._parse("method('arg1')"))
results.append(
options._parse(
'add_argument("-profile"); add_argument("C:\\\\path\\to\\\\profile")'
)
)
results.append(
options._parse(
r'add_argument("-profile"); add_argument("C:\\path\\to\\profile")'
)
)
results.append(options._parse("attribute=None"))
results.append(
options._parse(
'method("foo", {"key": False});attribute=True;method("bar", {"key": None})'
)
)
verify_all("Selenium options string to dict", results, reporter=reporter)
@unittest.skipIf(WINDOWS, reason="ApprovalTest do not support different line feeds")
def test_index_of_separator(options, reporter):
results = []
results.append(options._get_arument_index('method({"key": "value"})'))
results.append(options._get_arument_index('attribute={"key": "value"}'))
results.append(options._get_arument_index('method(foo={"key": "value"})'))
results.append(options._get_arument_index('attribute=("value1", "value2")'))
verify_all("Get argument index", results, reporter=reporter)
@unittest.skipIf(WINDOWS, reason="ApprovalTest do not support different line feeds")
def test_parse_complex_object(options, reporter):
results = []
results.append(options._parse_to_tokens('method({"key": "value"})'))
results.append(options._parse_to_tokens('attribute={"key": "value"}'))
results.append(options._parse_to_tokens('attribute=("value1", "value2")'))
results.append(options._parse_to_tokens('method("foo", {"key": "value"})'))
verify_all("Parse complex Python object", results, reporter=reporter)
@unittest.skipIf(WINDOWS, reason="ApprovalTest do not support different line feeds")
def test_parse_arguemnts(options, reporter):
results = []
results.append(options._parse_arguments(("arg1",), True))
results.append(options._parse_arguments("arg1", False))
results.append(options._parse_arguments({"key": "value"}, False))
results.append(options._parse_arguments(["value1", "value2"], False))
results.append(options._parse_arguments(("foo", {"key": "value"}), False))
verify_all("Parse arguments from complex object", results, reporter=reporter)
@unittest.skipIf(WINDOWS, reason="ApprovalTest do not support different line feeds")
def test_parse_options_string_errors(options, reporter):
results = []
results.append(error_formatter(options._parse, 'method("arg1)', True))
results.append(error_formatter(options._parse, 'method(arg1")', True))
results.append(error_formatter(options._parse, "method(arg1)", True))
results.append(error_formatter(options._parse, "attribute=arg1", True))
results.append(error_formatter(options._parse, "attribute=webdriver", True))
results.append(error_formatter(options._parse, 'method(argument="value")', True))
verify_all("Selenium options string errors", results, reporter=reporter)
@unittest.skipIf(WINDOWS, reason="ApprovalTest do not support different line feeds")
def test_split_options(options, reporter):
results = []
results.append(options._split('method("arg1");method("arg2")'))
results.append(options._split('method("arg1")'))
results.append(options._split("attribute=True"))
results.append(
options._split('attribute="semi;colons;middle";other_attribute=True')
)
results.append(options._split('method("arg1;");method(";arg2;")'))
results.append(options._split(' method ( " arg1 ") ; method ( " arg2 " ) '))
verify_all("Selenium options string splitting", results, reporter=reporter)
@unittest.skipIf(WINDOWS, reason="ApprovalTest do not support different line feeds")
def test_options_create(options, reporter):
results = []
options_str = 'add_argument("--disable-dev-shm-usage")'
sel_options = options.create("chrome", options_str)
results.append(sel_options.arguments)
options_str = f'{options_str};add_argument("--headless")'
sel_options = options.create("chrome", options_str)
results.append(sel_options.arguments)
options_str = f'{options_str};add_argument("--proxy-server=66.97.38.58:80")'
sel_options = options.create("chrome", options_str)
results.append(sel_options.arguments)
options_str = f'{options_str};binary_location("too", "many", "args")'
try:
options.create("chrome", options_str)
except Exception as error:
results.append(error.__str__()[:7])
chrome_options = webdriver.ChromeOptions()
chrome_options.add_argument("--disable-dev-shm-usage")
sel_options = options.create("chrome", chrome_options)
results.append(sel_options.arguments)
sel_options = options.create("chrome", None)
results.append(sel_options)
verify_all("Selenium options", results, reporter=reporter)
@unittest.skipIf(WINDOWS, reason="ApprovalTest do not support different line feeds")
def test_create_with_android(options, reporter):
results = []
chrome_options = webdriver.ChromeOptions()
chrome_options.add_experimental_option("androidPackage", "com.android.chrome")
sel_options = options.create("android", chrome_options)
results.append([sel_options.arguments, sel_options.experimental_options])
verify_all("Selenium options with android", results, reporter=reporter)
@unittest.skipIf(WINDOWS, reason="ApprovalTest do not support different line feeds")
def test_get_options(options, reporter):
options_str = 'add_argument("--proxy-server=66.97.38.58:80")'
sel_options = options.create("chrome", options_str)
results = [sel_options.arguments]
verify_all("Selenium options with string.", results, reporter=reporter)
@unittest.skipIf(WINDOWS, reason="ApprovalTest do not support different line feeds")
def test_importer(options, reporter):
results = []
results.append(options._import_options("firefox"))
results.append(options._import_options("headless_firefox"))
results.append(options._import_options("chrome"))
results.append(options._import_options("headless_chrome"))
results.append(options._import_options("ie"))
results.append(options._import_options("opera"))
results.append(options._import_options("edge"))
results.append(error_formatter(options._import_options, "phantomjs"))
results.append(error_formatter(options._import_options, "safari"))
results.append(error_formatter(options._import_options, "htmlunit"))
results.append(error_formatter(options._import_options, "htmlunit_with_js"))
results.append(options._import_options("android"))
results.append(error_formatter(options._import_options, "iphone"))
verify_all("Selenium options import", results, reporter=reporter)
def error_formatter(method, arg, full=False):
try:
return method(arg)
except Exception as error:
if full:
return f"{arg} {error}"
return "{} {}".format(arg, error.__str__()[:15])
@pytest.fixture(scope="module")
def creator():
curr_dir = os.path.dirname(os.path.abspath(__file__))
output_dir = os.path.abspath(os.path.join(curr_dir, "..", "..", "output_dir"))
return WebDriverCreator(output_dir)
@pytest.fixture(scope="module")
def output_dir():
curr_dir = os.path.dirname(os.path.abspath(__file__))
output_dir = os.path.abspath(os.path.join(curr_dir, "..", "..", "output_dir"))
return output_dir
def test_create_chrome_with_options(creator):
options = mock()
expected_webdriver = mock()
when(webdriver).Chrome(
service_log_path=None, options=options, executable_path="chromedriver"
).thenReturn(expected_webdriver)
driver = creator.create_chrome({}, None, options=options)
assert driver == expected_webdriver
def test_create_chrome_with_options_and_remote_url(creator):
url = "http://localhost:4444/wd/hub"
caps = webdriver.DesiredCapabilities.CHROME.copy()
options = mock()
expected_webdriver = mock()
file_detector = mock_file_detector(creator)
when(webdriver).Remote(
command_executor=url,
desired_capabilities=caps,
browser_profile=None,
options=options,
file_detector=file_detector,
).thenReturn(expected_webdriver)
driver = creator.create_chrome({}, url, options=options)
assert driver == expected_webdriver
def test_create_headless_chrome_with_options(creator):
options = mock()
expected_webdriver = mock()
when(webdriver).Chrome(
service_log_path=None, options=options, executable_path="chromedriver"
).thenReturn(expected_webdriver)
driver = creator.create_headless_chrome({}, None, options=options)
assert driver == expected_webdriver
def test_create_firefox_with_options(creator, output_dir):
log_file = os.path.join(output_dir, "geckodriver-1.log")
options = mock()
profile = mock()
expected_webdriver = mock()
when(webdriver).FirefoxProfile().thenReturn(profile)
when(webdriver).Firefox(
options=options,
firefox_profile=profile,
executable_path="geckodriver",
service_log_path=log_file,
).thenReturn(expected_webdriver)
driver = creator.create_firefox({}, None, None, options=options)
assert driver == expected_webdriver
def test_create_firefox_with_options_and_remote_url(creator):
url = "http://localhost:4444/wd/hub"
profile = mock()
when(webdriver).FirefoxProfile().thenReturn(profile)
caps = webdriver.DesiredCapabilities.FIREFOX.copy()
options = mock()
expected_webdriver = mock()
file_detector = mock_file_detector(creator)
when(webdriver).Remote(
command_executor=url,
desired_capabilities=caps,
browser_profile=profile,
options=options,
file_detector=file_detector,
).thenReturn(expected_webdriver)
driver = creator.create_firefox({}, url, None, options=options)
assert driver == expected_webdriver
def test_create_headless_firefox_with_options(creator, output_dir):
log_file = os.path.join(output_dir, "geckodriver-1.log")
options = mock()
profile = mock()
expected_webdriver = mock()
when(webdriver).FirefoxProfile().thenReturn(profile)
when(webdriver).Firefox(
options=options,
firefox_profile=profile,
executable_path="geckodriver",
service_log_path=log_file,
).thenReturn(expected_webdriver)
driver = creator.create_headless_firefox({}, None, None, options=options)
assert driver == expected_webdriver
def test_create_ie_with_options(creator):
options = mock()
expected_webdriver = mock()
when(webdriver).Ie(
service_log_path=None, options=options, executable_path="IEDriverServer.exe"
).thenReturn(expected_webdriver)
driver = creator.create_ie({}, None, options=options)
assert driver == expected_webdriver
def test_create_ie_with_options_and_remote_url(creator):
url = "http://localhost:4444/wd/hub"
caps = webdriver.DesiredCapabilities.INTERNETEXPLORER.copy()
options = mock()
expected_webdriver = mock()
file_detector = mock_file_detector(creator)
when(webdriver).Remote(
command_executor=url,
desired_capabilities=caps,
browser_profile=None,
options=options,
file_detector=file_detector,
).thenReturn(expected_webdriver)
driver = creator.create_ie({}, url, options=options)
assert driver == expected_webdriver
def test_create_ie_with_options_and_log_path(creator):
options = mock()
expected_webdriver = mock()
when(webdriver).Ie(
options=options, service_log_path=None, executable_path="IEDriverServer.exe"
).thenReturn(expected_webdriver)
driver = creator.create_ie({}, None, options=options)
assert driver == expected_webdriver
def test_has_options(creator):
assert creator._has_options(webdriver.Chrome)
assert creator._has_options(webdriver.Firefox)
assert creator._has_options(webdriver.Ie)
assert creator._has_options(webdriver.Edge) is False
assert creator._has_options(webdriver.Opera)
assert creator._has_options(webdriver.Safari) is False
def test_create_opera_with_options(creator):
options = mock()
expected_webdriver = mock()
executable_path = "operadriver"
when(webdriver).Opera(
options=options, service_log_path=None, executable_path=executable_path
).thenReturn(expected_webdriver)
driver = creator.create_opera({}, None, options=options)
assert driver == expected_webdriver
def test_create_opera_with_options_and_remote_url(creator):
url = "http://localhost:4444/wd/hub"
caps = webdriver.DesiredCapabilities.OPERA.copy()
options = mock()
expected_webdriver = mock()
file_detector = mock_file_detector(creator)
when(webdriver).Remote(
command_executor=url,
desired_capabilities=caps,
browser_profile=None,
options=options,
file_detector=file_detector,
).thenReturn(expected_webdriver)
driver = creator.create_opera({}, url, options=options)
assert driver == expected_webdriver
def test_create_safari_no_options_support(creator):
options = mock()
expected_webdriver = mock()
executable_path = "/usr/bin/safaridriver"
when(webdriver).Safari(executable_path=executable_path).thenReturn(
expected_webdriver
)
driver = creator.create_safari({}, None, options=options)
assert driver == expected_webdriver
def test_create_phantomjs_no_options_support(creator):
options = mock()
expected_webdriver = mock()
executable_path = "phantomjs"
when(webdriver).PhantomJS(
service_log_path=None, executable_path=executable_path
).thenReturn(expected_webdriver)
driver = creator.create_phantomjs({}, None, options=options)
assert driver == expected_webdriver
def test_create_htmlunit_no_options_support(creator):
caps = webdriver.DesiredCapabilities.HTMLUNIT.copy()
options = mock()
expected_webdriver = mock()
file_detector = mock_file_detector(creator)
when(webdriver).Remote(
command_executor="None",
desired_capabilities=caps,
browser_profile=None,
options=options,
file_detector=file_detector,
).thenReturn(expected_webdriver)
driver = creator.create_htmlunit(
{"desired_capabilities": caps}, None, options=options
)
assert driver == expected_webdriver
def test_create_htmlunit_with_js_no_options_support(creator):
caps = webdriver.DesiredCapabilities.HTMLUNITWITHJS.copy()
options = mock()
expected_webdriver = mock()
file_detector = mock_file_detector(creator)
when(webdriver).Remote(
command_executor="None",
desired_capabilities=caps,
browser_profile=None,
options=options,
file_detector=file_detector,
).thenReturn(expected_webdriver)
driver = creator.create_htmlunit_with_js({}, None, options=options)
assert driver == expected_webdriver
def test_android_options_support(creator):
caps = webdriver.DesiredCapabilities.ANDROID.copy()
options = mock()
expected_webdriver = mock()
file_detector = mock_file_detector(creator)
when(webdriver).Remote(
command_executor="None",
desired_capabilities=caps,
browser_profile=None,
options=options,
file_detector=file_detector,
).thenReturn(expected_webdriver)
driver = creator.create_android({}, None, options=options)
assert driver == expected_webdriver
def test_iphone_options_support(creator):
caps = webdriver.DesiredCapabilities.IPHONE.copy()
options = mock()
expected_webdriver = mock()
file_detector = mock_file_detector(creator)
when(webdriver).Remote(
command_executor="None",
desired_capabilities=caps,
browser_profile=None,
options=options,
file_detector=file_detector,
).thenReturn(expected_webdriver)
driver = creator.create_iphone({}, None, options=options)
assert driver == expected_webdriver
def test_create_driver_chrome(creator):
str_options = "add_argument:--disable-dev-shm-usage"
options = mock()
expected_webdriver = mock()
when(creator.selenium_options).create("chrome", str_options).thenReturn(options)
executable_path = "chromedriver"
when(creator)._get_executable_path(ANY).thenReturn(executable_path)
when(webdriver).Chrome(
service_log_path=None, options=options, executable_path=executable_path
).thenReturn(expected_webdriver)
driver = creator.create_driver(
"Chrome", desired_capabilities={}, remote_url=None, options=str_options
)
assert driver == expected_webdriver
def test_create_driver_firefox(creator, output_dir):
log_file = os.path.join(output_dir, "geckodriver-1.log")
str_options = "add_argument:--disable-dev-shm-usage"
options = mock()
profile = mock()
when(webdriver).FirefoxProfile().thenReturn(profile)
expected_webdriver = mock()
when(creator.selenium_options).create("firefox", str_options).thenReturn(options)
executable_path = "geckodriver"
when(creator)._get_executable_path(ANY).thenReturn(executable_path)
when(webdriver).Firefox(
options=options,
firefox_profile=profile,
executable_path=executable_path,
service_log_path=log_file,
).thenReturn(expected_webdriver)
driver = creator.create_driver(
"FireFox", desired_capabilities={}, remote_url=None, options=str_options
)
assert driver == expected_webdriver
def mock_file_detector(creator):
file_detector = mock()
when(creator)._get_sl_file_detector().thenReturn(file_detector)
return file_detector
|
#############################################################################
# Transport properties of solvent-permeable hard spheres #
# The basic theory is based on Riest et al. Soft Matter (2015) #
# and simulation data from Abade et al. (2012) #
# #
# Used in the paper: #
# [1] Park and N{\"a}gele, JCP, 2020 #
# doi: 10.1063/5.0020986 #
# #
# [2] Park and N{\"a}gele, Membranes, 2021 #
# doi: https://doi.org/10.3390/membranes11120960 #
# #
# #
# Code Developer: Park, Gun Woo (g.park@fz-juelich.de) #
# MIT Open License (see LICENSE file in the main directory) #
# #
# Update (June 2021): #
# The original code only applicable for the hollow fiber #
# New version support for the channel between two flat sheets: #
# 1. FMM: channel flow between flat membrane (top) / membrane (bottom) #
# 2. FMS: channel flow between flat membrane (top) / substrate (bottom) #
# For this reason, the hollow fiber expression will be renamed as HF #
# #
# Important note: #
# The new updte is based on the coordination y (in the new manuscript) #
# This is exactly the same treatment with r in the code #
#############################################################################
from numpy import *
def fcn_unity(phi, cond_GT):
re = ones(size(phi))
return re
def eta_inf_div_eta0_HS_S3(phi, gamma):
""" High-frequency viscosity reduced by solvent (or dilution-limit) viscosity using the third order Saito function on phi.
The expression is already reported by Riest et al. Soft Matter (2015) although their actual use with solvent-permeable hard sphere is based on the linear fit with phi (see Saito_fcn_SPHS function).
"""
s = Saito_fcn_HS_S3(phi)
return 1 + (5./2.)*phi*(1.+s)/(1. - phi*(1.+s))
def Gamma_S_HS_S3(phi, gamma):
""" Generalized Stokes-Einstein function for self-diffusion coefficient of hard spheres
"""
return Ds_div_D0_SPHS(phi, gamma)*eta_inf_div_eta0_HS_S3(phi, gamma)
def eta_div_eta0_HS_S3(phi, cond_GT):
""" High-frequency viscosity reduced by solvent (or dilution-limit) viscosity using the first order Saito function on phi.
The main reference is Riest et al. Soft Matter (2015).
It is based on the "first-order" polynomial fit for permeable hard spheres based on the simulation result reported by Abade et al. JCP (2012).
If we consider solvent-impermeable hard sphere case (i.e., gamma=1), the better polynomial fit is avaliable.
For such a normal hard sphere case, see Saito_fcn_HS_S3 function.
Remark: In the case of a typical (solvent-impermeable) hard sphere dispersions, we would recommend the function "Saito_fcn_HS_S3".
"""
gamma = cond_GT['gamma']
return eta_inf_div_eta0_HS_S3(phi, gamma)*(1 + (1/Gamma_S_HS_S3(phi, gamma))*Del_eta_noHI_div_eta0_SPHS(phi))
# Below are the originally used functions
def eta_div_eta0_SPHS(phi, cond_GT):
gamma = cond_GT['gamma']
return eta_inf_div_eta0_SPHS(phi, gamma)*(1 + (1/Gamma_S_SPHS(phi, gamma))*Del_eta_noHI_div_eta0_SPHS(phi))
def eta_div_eta0_SPHS_2(phi, gamma):
return eta_inf_div_eta0_SPHS(phi, gamma)*(1 + (1/Gamma_S_SPHS(phi, gamma))*Del_eta_noHI_div_eta0_SPHS(phi))
def Dc_short_div_D0_SPHS(phi, cond_GT):
""" Short-time collective diffusion coefficient of solvent-permeable hard spheres.
Arguments:
phi: volume fraction of particles
cond_GT: dictionary for basic condition. Here, the only necessary key is cond_GT['gamma']
Return:
Dc/D0: Reduced short-time collective diffusion coefficient (reducing factor is the Stokes-Einstein self-diffusion coefficient)
"""
gamma = cond_GT['gamma']
return K_SPHS(phi, gamma)/S0_CS(phi)
def Dc_short_div_D0_SPHS_2(phi, gamma):
""" The same as Dc_short_div_D0_SPHS except the arguments.
"""
return K_SPHS(phi, gamma)/S0_CS(phi)
# Auxilary functions
def Ds_div_D0_SPHS(phi, gamma):
""" Self-diffusion coefficient of solvent-permeable hard spheres.
"""
return 1 + lambda_t_SPHS(gamma)*phi*(1 + 0.12*phi - 0.70*phi**2)
def lambda_t_SPHS(gamma):
return -1.8315 + 7.820 *(1-gamma) - 14.231 *(1-gamma)**2 + 14.908* (1-gamma)**3 - 9.383* (1-gamma)**4 + 2.717* (1-gamma)**5
def eta_inf_div_eta0_SPHS(phi, gamma):
return 1 + 2.5*gamma**3*phi*(1 + Saito_fcn_SPHS(phi, gamma))*(1 - gamma**3 * phi * (1 + Saito_fcn_SPHS(phi, gamma)))
# Here, the Saito function take up to the third order
def Saito_fcn_HS_S3(phi):
""" Saito-type function: Third-order polynomial on phi (S3)
The expression is already reported by Riest et al. Soft Matter (2015) although their actual use with solvent-permeable hard sphere is based on the linear fit with phi (see Saito_fcn_SPHS function).
"""
return phi*(1. + 0.95*phi - 2.15*phi**2.0)
# return phi*(1. + 0.95*phi - 2.15*phi**2.0)*(lambda_V_SPHS(gamma)/(2.5*gamma**3) - gamma**3)
# return phi*(lambda_V_SPHS(gamma)/(2.5*gamma**3) - gamma**3)
def Saito_fcn_SPHS(phi, gamma):
""" Saito-type function: First-order polynomial on phi (S3)
The main reference is Riest et al. Soft Matter (2015).
It is based on the "first-order" polynomial fit for permeable hard spheres based on the simulation result reported by Abade et al. JCP (2012).
If we consider solvent-impermeable hard sphere case (i.e., gamma=1), the better polynomial fit is avaliable.
For such a normal hard sphere case, see Saito_fcn_HS_S3 function.
Remark: In the case of a typical (solvent-impermeable) hard sphere dispersions, we would recommend the function "Saito_fcn_HS_S3".
"""
return phi*(lambda_V_SPHS(gamma)/(2.5*gamma**3) - gamma**3)
def lambda_V_SPHS(gamma):
return 5.0021 - 39.279* (1-gamma) + 143.179 *(1-gamma)**2 - 288.202* (1-gamma)**3 + 254.581 *(1- gamma)**4
def Del_eta_noHI_div_eta0_SPHS(phi):
# (Note: phi_RCP ~ 0.64 is already applied)
return (12/5.)*phi**2*(1 - 7.085*phi + 20.182*phi**2)/(1 - phi/0.64)
def Gamma_S_SPHS(phi, gamma):
""" Generalized Stokes-Einstein function for self-diffusion coefficient of solvent-permeable hard spheres
"""
return Ds_div_D0_SPHS(phi, gamma)*eta_inf_div_eta0_SPHS(phi, gamma)
def K_SPHS(phi, gamma):
""" Sedimentation coefficient of solvent-permeable hard spheres
"""
gp = gamma*phi
return 1 + lambda_K_SPHS(gamma)*phi*(1 - 3.348*(gp) + 7.426*(gp)**2 - 10.034*(gp)**3 + 5.882*(gp)**4)
def lambda_K_SPHS(gamma):
return -6.5464 + 8.592*(1-gamma) - 3.901*(1-gamma)**2 + 2.011*(1-gamma)**3 - 0.142*(1-gamma)**4
def S0_CS(phi):
""" Compressibility factor using Carnahan-Starling equation
"""
return (1-phi)**4/((1+2*phi)**2 + phi**3*(phi - 4))
|
from mediaman.core.clients.multi import abstract
from mediaman.core.clients.multi import methods
def gen_all(gen):
try:
result = next(gen)
yield result
while True:
result = gen.send(True)
yield result
except StopIteration:
pass
class Multiclient(abstract.AbstractMulticlient):
def list_files(self):
return gen_all(methods.list_files(self.clients))
def has(self, request):
hash = request.hash
return gen_all(methods.has_hash(self.clients, hash))
def search_by_name(self, file_name):
return gen_all(methods.search_by_name(self.clients, file_name))
def fuzzy_search_by_name(self, file_name):
return gen_all(methods.fuzzy_search_by_name(self.clients, file_name))
def upload(self, request):
path = request.path
return gen_all(methods.upload(self.clients, path))
def download(self, root, file_path):
raise RuntimeError() # `mm all get` isn't allowed
def stream(self, root, file_path):
raise RuntimeError() # `mm all stream` isn't allowed
def stream_range(self, root, file_path, offset, length):
raise RuntimeError() # `mm all streamrange` isn't allowed
def stats(self):
return gen_all(methods.stats(self.clients))
def capacity(self):
return gen_all(methods.capacity(self.clients))
def refresh(self):
raise RuntimeError() # `mm all refresh` isn't allowed
def remove(self, request):
raise RuntimeError() # `mm all remove` isn't allowed
def refresh_global_hashes(self, request):
raise NotImplementedError()
def search_by_hash(self, hash):
return gen_all(methods.search_by_hash(self.clients, hash))
|
#coding:utf-8
# 自定义异常类
class CustomError(Exception):
def __init__(self, ErrorInfo):
self.errorinfo = ErrorInfo
Exception.__init__(self, ErrorInfo)
def __str__(self):
return self.errorinfo
|
from azure.core.exceptions import HttpResponseError
from msrest import Serializer
import pytest
import time
import json
from kubernetes import client
from common.kubernetes_crd_utility import watch_crd_instance
from common.kubernetes_pod_utility import (
watch_pod_status,
get_pod_logs,
get_pod_logs_since_seconds,
)
from common.kubernetes_configmap_utility import get_namespaced_configmap
from common.kubernetes_configuration_utility import show_kubernetes_configuration
from common.kubernetes_secret_utility import watch_kubernetes_secret
from common.kubernetes_namespace_utility import watch_namespace
from common.results_utility import append_result_output
# This function checks the status of the namespaces of the kubernetes cluster. The namespaces to be monitored are passed in as a list.
def check_namespace_status(outfile=None, namespace_list=None, timeout=300):
namespace_dict = {}
for namespace in namespace_list:
namespace_dict[namespace] = 0
append_result_output("Namespace dict: {}\n".format(namespace_dict), outfile)
print("Generated the namespace dictionary.")
# THe callback function to check the namespace status
def namespace_event_callback(event):
try:
append_result_output("{}\n".format(event), outfile)
namespace_name = event["raw_object"].get("metadata").get("name")
namespace_status = event["raw_object"].get("status")
if not namespace_status:
return False
if namespace_status.get("phase") == "Active":
namespace_dict[namespace_name] = 1
if all(ele == 1 for ele in list(namespace_dict.values())):
return True
return False
except Exception as e:
pytest.fail("Error occured while processing the namespace event: " + str(e))
# Checking the namespace status
api_instance = client.CoreV1Api()
watch_namespace(api_instance, timeout, namespace_event_callback)
# This function checks the status of pods in a given namespace. The pods to be monitored are identified using the pod label list parameter.
def check_kubernetes_pods_status(
pod_namespace, outfile=None, pod_label_list=None, timeout=300
):
pod_label_dict = {}
if (
pod_label_list
): # This parameter is a list of label values to identify the pods that we want to monitor in the given namespace
for pod_label in pod_label_list:
pod_label_dict[pod_label] = 0
append_result_output("Pod label dict: {}\n".format(pod_label_dict), outfile)
print("Generated the pods dictionary.")
# The callback function to check if the pod is in running state
def pod_event_callback(event):
try:
append_result_output("{}\n".format(event), outfile)
pod_status = event["raw_object"].get("status")
pod_metadata = event["raw_object"].get("metadata")
pod_metadata_labels = pod_metadata.get("labels")
if not pod_metadata_labels:
return False
pod_metadata_label_values = (
pod_metadata_labels.values()
) # It contains the list of all label values for the pod whose event was called.
current_label_value = None # This label value will be common in pod event and label list provided and will be monitored
for label_value in pod_metadata_label_values:
if label_value in pod_label_dict:
current_label_value = label_value
if not current_label_value:
return False
if pod_status.get("containerStatuses"):
for container in pod_status.get("containerStatuses"):
if container.get("restartCount") > 0:
pytest.fail(
"The pod {} was restarted. Please see the pod logs for more info.".format(
container.get("name")
)
)
if not container.get("state").get("running"):
pod_label_dict[current_label_value] = 0
return False
else:
pod_label_dict[current_label_value] = 1
if all(ele == 1 for ele in list(pod_label_dict.values())):
return True
return False
except Exception as e:
pytest.fail("Error occured while processing the pod event: " + str(e))
# Checking status of all pods
if pod_label_dict:
api_instance = client.CoreV1Api()
watch_pod_status(api_instance, pod_namespace, timeout, pod_event_callback)
# Function to check if the crd instance status has been updated with the status fields mentioned in the 'status_list' parameter
def check_kubernetes_crd_status(
crd_group,
crd_version,
crd_namespace,
crd_plural,
crd_name,
status_dict={},
outfile=None,
timeout=300,
):
# The callback function to check if the crd event received has been updated with the status fields
def crd_event_callback(event):
try:
append_result_output("{}\n".format(event), outfile)
crd_status = event["raw_object"].get("status")
if not crd_status:
return False
for status_field in status_dict:
if not crd_status.get(status_field):
return False
if crd_status.get(status_field) != status_dict.get(status_field):
pytest.fail(
"The CRD instance status has been updated with incorrect value for '{}' field.".format(
status_field
)
)
return True
except Exception as e:
pytest.fail("Error occured while processing crd event: " + str(e))
# Checking if CRD instance has been updated with status fields
api_instance = client.CustomObjectsApi()
watch_crd_instance(
api_instance,
crd_group,
crd_version,
crd_namespace,
crd_plural,
crd_name,
timeout,
crd_event_callback,
)
# Function to monitor the pod logs. It will ensure that are logs passed in the 'log_list' parameter are present in the container logs.
def check_kubernetes_pod_logs(
pod_namespace,
pod_name,
container_name,
logs_list=None,
error_logs_list=None,
outfile=None,
timeout_seconds=300,
):
logs_dict = {}
for log in logs_list:
logs_dict[log] = 0
print("Generated the logs dictionary.")
# The callback function to examine the pod log
def pod_log_check(logs):
try:
for error_log in error_logs_list:
if error_log in logs:
pytest.fail("Error log found: " + logs)
for log in logs_dict:
if log in logs:
logs_dict[log] = 1
if all(ele == 1 for ele in list(logs_dict.values())):
return True
return False
except Exception as e:
pytest.fail("Error occured while processing pod log event: " + str(e))
# Checking the pod logs
api_instance = client.CoreV1Api()
pod_logs = get_pod_logs(api_instance, pod_namespace, pod_name, container_name)
if not pod_log_check(pod_logs):
timeout = time.time() + timeout_seconds
while True:
time.sleep(60)
pod_logs_since_interval = get_pod_logs_since_seconds(
api_instance, pod_namespace, pod_name, container_name, 90
)
if pod_log_check(pod_logs_since_interval):
break
if time.time() > timeout:
pytest.fail("The watch on the pod logs has timed out.")
# Function to check the compliance state of the kubernetes configuration
def check_kubernetes_configuration_state(
kc_client,
resource_group,
cluster_rp,
cluster_type,
cluster_name,
configuration_name,
outfile=None,
timeout_seconds=300,
):
timeout = time.time() + timeout_seconds
while True:
get_kc_response = show_kubernetes_configuration(
kc_client,
resource_group,
cluster_rp,
cluster_type,
cluster_name,
configuration_name,
)
append_result_output('GET config response: {}\n'.format(Serializer().serialize_data(get_kc_response, 'FluxConfiguration', keep_readonly=True)), outfile)
provisioning_state = get_kc_response.provisioning_state
compliance_state = get_kc_response.compliance_state or "Unknown"
if provisioning_state == "Succeeded" and compliance_state == "Compliant":
break
if provisioning_state == "Failed" or provisioning_state == "Cancelled":
error_message = "ERROR: The kubernetes configuration creation finished with terminal provisioning state {}. ".format(
provisioning_state
)
append_result_output(error_message, outfile)
pytest.fail(error_message)
if time.time() > timeout:
error_message = "ERROR: Timeout. The kubernetes configuration is in {} provisioning state and {} compliance state.".format(
provisioning_state, compliance_state
)
append_result_output(error_message, outfile)
pytest.fail(error_message)
time.sleep(10)
def check_kubernetes_configuration_delete_state(
kc_client,
resource_group,
cluster_rp,
cluster_type,
cluster_name,
configuration_name,
outfile=None,
timeout_seconds=300,
):
timeout = time.time() + timeout_seconds
while True:
try:
get_kc_response = kc_client.get(
resource_group,
cluster_rp,
cluster_type,
cluster_name,
configuration_name,
)
provisioning_state = get_kc_response.provisioning_state
append_result_output(
"Kubernetes Configuration still exists with Provisioning State: {}\n".format(
provisioning_state
),
outfile,
)
except HttpResponseError as e:
if e.status_code == 404:
append_result_output(
"Kubernetes Configuration {} successfully deleted\n".format(
configuration_name
),
outfile,
)
break
if time.time() > timeout:
pytest.fail(
"ERROR: Timeout. The kubernetes configuration is in {} provisioning state.".format(
provisioning_state
)
)
time.sleep(10)
# Function to monitor the kubernetes secret. It will determine if the secret has been successfully created.
def check_kubernetes_secret(secret_namespace, secret_name, timeout=300):
# The callback function to check if the secret event received has secret data
def secret_event_callback(event):
try:
secret_data = event["raw_object"].get("data")
if not secret_data:
return False
return True
except Exception as e:
pytest.fail("Error occured while processing secret event: " + str(e))
# Checking the kubernetes secret
api_instance = client.CoreV1Api()
watch_kubernetes_secret(
api_instance, secret_namespace, secret_name, timeout, secret_event_callback
)
def get_azure_arc_agent_version(api_instance, namespace, configmap_name):
configmap = get_namespaced_configmap(api_instance, namespace, configmap_name)
azure_arc_agent_version = configmap.data.get("AZURE_ARC_AGENT_VERSION")
if not azure_arc_agent_version:
pytest.fail(
"The azure arc configmap does not contain the azure arc agent version."
)
return azure_arc_agent_version
|
from subprocess import Popen
from shlex import split
class ProcessUtil():
def __init__(self):
super().__init__()
self.train_process = None
self.evaluate_process = None
self.tensorboard_process = None
def execute_training(self):
self.train_process = Popen(split(cmd))
def is_training(self)->bool:
if( self.train_process == None):
return False
if( self.train_process.poll()== None):
return True
else:
return False
def kill_training(self):
if( self.train_process == None):
return
self.train_process.kill()
self.train_process.wait()
self.train_process = None
def execute_evaluation(self):
self.train_process = Popen(split(cmd))
def is_evaluating(self)->bool:
if( self.train_process == None):
return False
if( self.train_process.poll()== None):
return True
else:
return False
def kill_evaluation(self):
if( self.evaluate_process == None):
return
self.evaluate_process.kill()
self.evaluate_process.wait()
self.evaluate_process = None
def execute_tensorboard(self):
if( self.tensorboard_process != None):
return
self.tensorboard_process = Popen(split("tensorboard --logdir=."))
print('execute done?')
def is_tensorboard_running(self):
if( self.tensorboard_process == None):
return False
if( self.tensorboard_process.poll()== None):
return True
else:
return False
def kill_tensorboard(self):
if( self.tensorboard_process == None):
return
self.tensorboard_process.kill()
self.tensorboard_process.wait()
self.tensorboard_process = None
def kill_all(self):
self.kill_training()
self.kill_evaluation()
self.kill_tensorboard() |
import setuptools
from dict_tiny import version
VERSION = version.__version__
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name=version.name,
version=VERSION,
author="louie",
author_email="louiehan1015@gmail.com",
description=version.DESCRIPTION,
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/louieh/dict-tiny",
keywords='python youdao dictionary command-line plumbum translator translate google-translation-api deepl deepl-translation-api',
packages=setuptools.find_packages(),
install_requires=[
'requests',
'lxml',
'plumbum',
'pyperclip'
],
entry_points={
'console_scripts': [
'dict-tiny = dict_tiny.main:Dict_tiny'
]
},
classifiers=(
"Environment :: Console",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
),
python_requires='>=3.0',
)
|
import copy
import json
import torch
from mmf.common.sample import Sample
from mmf.datasets.mmf_dataset import MMFDataset
class MMIMDbFeaturesDataset(MMFDataset):
def __init__(self, config, dataset_type, imdb_file_index, *args, **kwargs):
super().__init__(
"mmimdb", config, dataset_type, imdb_file_index, *args, **kwargs
)
assert (
self._use_features
), "config's 'use_features' must be true to use feature dataset"
def __getitem__(self, idx):
sample_info = self.annotation_db[idx]
current_sample = Sample()
plot = sample_info["plot"]
if isinstance(plot, list):
plot = plot[0]
processed_sentence = self.text_processor({"text": plot})
current_sample.text = processed_sentence["text"]
if "input_ids" in processed_sentence:
current_sample.update(processed_sentence)
if self._use_features is True:
features = self.features_db[idx]
current_sample.update(features)
processed = self.answer_processor({"answers": sample_info["genres"]})
current_sample.answers = processed["answers"]
current_sample.targets = processed["answers_scores"]
return current_sample
class MMIMDbImageDataset(MMFDataset):
def __init__(self, config, dataset_type, imdb_file_index, *args, **kwargs):
super().__init__(
"mmimdb", config, dataset_type, imdb_file_index, *args, **kwargs
)
assert (
self._use_images
), "config's 'use_images' must be true to use image dataset"
def init_processors(self):
super().init_processors()
# Assign transforms to the image_db
self.image_db.transform = self.image_processor
def __getitem__(self, idx):
sample_info = self.annotation_db[idx]
current_sample = Sample()
plot = sample_info["plot"]
if isinstance(plot, list):
plot = plot[0]
processed_sentence = self.text_processor({"text": plot})
current_sample.text = processed_sentence["text"]
if "input_ids" in processed_sentence:
current_sample.update(processed_sentence)
if self._use_images is True:
current_sample.image = self.image_db[idx]["images"][0]
processed = self.answer_processor({"answers": sample_info["genres"]})
current_sample.answers = processed["answers"]
current_sample.targets = processed["answers_scores"]
return current_sample
|
from django.conf.urls import url
from tax import views
urlpatterns = [
url(r'^auth/', views.auth, name='tax_auth'),
url(r'^search/', views.search, name='tax_search'),
]
|
""" Module to perfome command line operation in the services."""
from fabric.api import run, cd
from fabric.context_managers import settings, hide
def execute_cmd_in_node(node, user, passwd, cmd):
with hide('everything'):
with settings(host_string='%s@%s' % (user, node), password=password,
warn_only=True, abort_on_prompts=False):
output = run(cmd)
return output
def get_status(node, user, passwd, service):
cmd = "service %s status" % serivce
return execute_cmd_in_node(node, user, passwd, cmd)
|
from modules.constants import (
NAV_BAR_TOP,
)
#################################
# Front panel navigation buttons
#################################
class NavButton(object):
def __init__(self, g_vars, fill, font):
self.nav_bar_top = NAV_BAR_TOP
self.font = font
self.fill = fill
self.g_vars = g_vars
# figure out key map in use
self.key_map_name = g_vars.get('key_map')
self.key_map = g_vars['key_mappings'][self.key_map_name]['key_functions']
self.map_type = g_vars['key_mappings'][self.key_map_name]['type']
#######################################
# Rendering of buttons on screen
#######################################
def render_button(self, label, position):
# invert if using symbols
if self.map_type == 'symbol':
rect_fill = 255
self.fill = 0
self.g_vars['draw'].rectangle((position, self.nav_bar_top, position + 25, self.nav_bar_top + 15), outline=0, fill=rect_fill)
self.g_vars['draw'].text((position, self.nav_bar_top), label, fill=self.fill, font=self.font)
return
def back(self, function="back"):
pos = self.key_map[function]['position']
label = self.key_map[function]['label']
self.render_button(label, pos)
return
def next(self, function="next"):
pos = self.key_map[function]['position']
label = self.key_map[function]['label']
self.render_button(label, pos)
return
def down(self, function="down"):
pos = self.key_map[function]['position']
label = self.key_map[function]['label']
self.render_button(label, pos)
return
|
from typing import (
TYPE_CHECKING,
Sequence,
)
import numpy as np
from ...helper import typename
if TYPE_CHECKING:
from ...types import (
DocumentArrayIndexType,
)
class DelItemMixin:
"""Provide help function to enable advanced indexing in `__delitem__`"""
def __delitem__(self, index: 'DocumentArrayIndexType'):
if isinstance(index, (int, np.generic)) and not isinstance(index, bool):
self._del_doc_by_offset(int(index))
elif isinstance(index, str):
if index.startswith('@'):
raise NotImplementedError(
'Delete elements along traversal paths is not implemented'
)
else:
self._del_doc(index)
elif isinstance(index, slice):
self._del_docs_by_slice(index)
elif index is Ellipsis:
self._del_all_docs()
elif isinstance(index, Sequence):
if (
isinstance(index, tuple)
and len(index) == 2
and (
isinstance(index[0], (slice, Sequence, str, int))
or index[0] is Ellipsis
)
and isinstance(index[1], (str, Sequence))
):
# TODO: add support for cases such as da[1, ['text', 'id']]?
if isinstance(index[0], (str, int)) and isinstance(index[1], str):
# ambiguity only comes from the second string
if index[1] in self:
del self[index[0]]
del self[index[1]]
else:
self._set_doc_attr_by_id(index[0], index[1], None)
elif isinstance(index[0], (slice, Sequence)):
_attrs = index[1]
if isinstance(_attrs, str):
_attrs = (index[1],)
for _d in self[index[0]]:
for _aa in _attrs:
self._set_doc_attr_by_id(_d.id, _aa, None)
elif isinstance(index[0], bool):
self._del_docs_by_mask(index)
elif isinstance(index[0], int):
for t in sorted(index, reverse=True):
del self[t]
elif isinstance(index[0], str):
for t in index:
del self[t]
elif isinstance(index, np.ndarray):
index = index.squeeze()
if index.ndim == 1:
del self[index.tolist()]
else:
raise IndexError(
f'When using np.ndarray as index, its `ndim` must =1. However, receiving ndim={index.ndim}'
)
else:
raise IndexError(f'Unsupported index type {typename(index)}: {index}')
|
from __future__ import absolute_import
import abc
from copy import deepcopy
import time
from enum import Enum
import six
from simpleflow.base import Submittable
from simpleflow.history import History
from . import futures
from .activity import Activity
if False:
from typing import Optional, Any, Dict, Union, Type # NOQA
def get_actual_value(value):
"""
Unwrap the result of a Future or return the value.
"""
if isinstance(value, futures.Future):
return futures.get_result_or_raise(value)
return value
@six.add_metaclass(abc.ABCMeta)
class Task(Submittable):
"""A Task represents a work that can be scheduled for execution.
"""
@abc.abstractproperty
def name(self):
raise NotImplementedError()
@staticmethod
def resolve_args(*args):
return [get_actual_value(arg) for arg in args]
@staticmethod
def resolve_kwargs(**kwargs):
return {key: get_actual_value(val) for
key, val in kwargs.items()}
class ActivityTask(Task):
"""
Activity task.
:type activity: Activity
:type idempotent: Optional[bool]
:type id: str
"""
def __init__(self, activity, *args, **kwargs):
if not isinstance(activity, Activity):
raise TypeError('Wrong value for `activity`, got {} instead'.format(type(activity)))
# Keep original arguments for use in subclasses
# For instance this helps casting a generic class to a simpleflow.swf.task,
# see simpleflow.swf.task.ActivityTask.from_generic_task() factory
self._args = deepcopy(args)
self._kwargs = deepcopy(kwargs)
self.activity = activity
self.idempotent = activity.idempotent
self.context = kwargs.pop("context", None)
self.args = self.resolve_args(*args)
self.kwargs = self.resolve_kwargs(**kwargs)
self.id = None
@property
def name(self):
return 'activity-{}'.format(self.activity.name)
def __repr__(self):
return '{}(activity={}, args={}, kwargs={}, id={})'.format(
self.__class__.__name__,
self.activity,
self.args,
self.kwargs,
self.id)
def execute(self):
method = self.activity.callable
if getattr(method, 'add_context_in_kwargs', False):
self.kwargs["context"] = self.context
if hasattr(method, 'execute'):
task = method(*self.args, **self.kwargs)
task.context = self.context
result = task.execute()
if hasattr(task, 'post_execute'):
task.post_execute()
return result
else:
# NB: the following line attaches some *state* to the callable, so it
# can be used directly for advanced usage. This works well because we
# don't do multithreading, but if we ever do, DANGER!
method.context = self.context
return method(*self.args, **self.kwargs)
def propagate_attribute(self, attr, val):
"""
Propagate to the activity.
"""
setattr(self.activity, attr, val)
class WorkflowTask(Task):
"""
Child workflow.
:type executor: type(simpleflow.executor.Executor)
:type workflow: type(simpleflow.workflow.Workflow)
:type id: str
"""
def __init__(self, executor, workflow, *args, **kwargs):
# Keep original arguments for use in subclasses
# For instance this helps casting a generic class to a simpleflow.swf.task,
# see simpleflow.swf.task.WorkflowTask.from_generic_task() factory
self._args = deepcopy(args)
self._kwargs = deepcopy(kwargs)
self.executor = executor
self.workflow = workflow
self.idempotent = getattr(workflow, 'idempotent', False)
get_workflow_id = getattr(workflow, 'get_workflow_id', None)
self.args = self.resolve_args(*args)
self.kwargs = self.resolve_kwargs(**kwargs)
if get_workflow_id:
self.id = get_workflow_id(workflow, *self.args, **self.kwargs)
else:
self.id = None
@property
def name(self):
return 'workflow-{}'.format(self.workflow.name)
def __repr__(self):
return '{}(workflow={}, args={}, kwargs={}, id={})'.format(
self.__class__.__name__,
self.workflow.__module__ + '.' + self.workflow.__name__,
self.args,
self.kwargs,
self.id)
def execute(self):
workflow = self.workflow(self.executor)
return workflow.run(*self.args, **self.kwargs)
def propagate_attribute(self, attr, val):
"""
Propagate to the workflow.
"""
setattr(self.workflow, attr, val)
class SignalTask(Task):
"""
Signal.
"""
def __init__(self, name, *args, **kwargs):
self._name = name
self.args = self.resolve_args(*args)
self.kwargs = self.resolve_kwargs(**kwargs)
@property
def name(self):
"""
:return:
:rtype: str
"""
return self._name
def execute(self):
pass
class MarkerTask(Task):
def __init__(self, name, details):
"""
:param name: Marker name
:param details: Serializable marker details
"""
self._name = name
self.args = self.resolve_args(details)
self.kwargs = {}
@property
def name(self):
"""
:return:
:rtype: str
"""
return self._name
@property
def details(self):
return self.args[0]
def execute(self):
pass
class TimerTask(Task):
"""
Timer.
"""
def __init__(self, timer_id, timeout, control=None):
self.timer_id = timer_id
self.timeout = timeout
self.control = control
self.args = ()
self.kwargs = {}
@property
def name(self):
return self.timer_id
@property
def id(self):
return self.timer_id
def __repr__(self):
return '<{} timer_id="{}" timeout={}>'.format(self.__class__.__name__, self.timer_id, self.timeout)
def execute(self):
# Local execution
time.sleep(self.timeout)
class CancelTimerTask(Task):
"""
Timer cancellation.
"""
def __init__(self, timer_id):
self.timer_id = timer_id
self.args = ()
self.kwargs = {}
@property
def name(self):
return self.timer_id
@property
def id(self):
return self.timer_id
def __repr__(self):
return '<{} timer_id="{}">'.format(self.__class__.__name__, self.timer_id)
def execute(self):
# Local execution: no-op
return
class TaskFailureContext(object):
"""
Some context for a task/workflow failure.
"""
class Decision(Enum):
none = 0
abort = 1
ignore = 2
retry_now = 3
retry_later = 4
cancel = 5
handled = 6
def __init__(self,
a_task, # type: Union[ActivityTask, WorkflowTask]
event, # type: Dict[str, Any]
future, # type: futures.Future
exception_class, # type: Type[Exception]
history=None, # type: Optional[History]
):
self.a_task = a_task
self.event = event
self.future = future
self.exception_class = exception_class
self.history = history
self.decision = TaskFailureContext.Decision.none
self.retry_wait_timeout = None
self._task_error = None
def __repr__(self):
return '<TaskFailureContext' \
' task type={type}' \
' task.id={id}' \
' task.name={name}' \
' event={event}' \
' future={future}' \
' task_error={task_error}' \
' current_started_decision_id={started_decision_id}' \
' last_completed_decision_id={completed_decision_id}' \
' decision={decision}' \
' retry_wait_timeout={retry_wait_timeout}' \
'>' \
.format(
type=type(self.a_task),
id=getattr(self.a_task, 'id', None),
name=getattr(self.a_task, 'name', None),
event=self.event,
future=self.future,
task_error=self.task_error,
started_decision_id=self.current_started_decision_id,
completed_decision_id=self.last_completed_decision_id,
decision=self.decision,
retry_wait_timeout=self.retry_wait_timeout,
)
@property
def retry_count(self):
return self.event.get('retry')
@property
def task_name(self):
if hasattr(self.a_task, 'payload'):
return self.a_task.payload.name
if hasattr(self.a_task, 'name'):
return self.a_task.name
return None
@property
def exception(self):
return self.future.exception
@property
def current_started_decision_id(self):
return self.history.started_decision_id if self.history else None
@property
def last_completed_decision_id(self):
return self.history.completed_decision_id if self.history else None
@property
def task_error(self):
if self._task_error is None:
from simpleflow.exceptions import TaskFailed
from simpleflow.utils import json_loads_or_raw
self._task_error = () # falsy value different from None
if isinstance(self.exception, TaskFailed) and self.exception.details:
details = json_loads_or_raw(self.exception.details)
if isinstance(details, dict) and 'error' in details:
self._task_error = details['error']
return self._task_error
@property
def id(self):
# type: () -> Optional[int]
event = self.event
return History.get_event_id(event)
|
from django.db import models
from django.utils.translation import gettext_lazy as _
from django.conf import settings
from django_comments.abstracts import CommentAbstractModel
from mptt.models import MPTTModel, TreeForeignKey
from diventi.core.models import PromotableModel
class DiventiComment(MPTTModel, CommentAbstractModel, PromotableModel):
parent = TreeForeignKey('self', null=True, blank=True, related_name='children', verbose_name=_('parent'), on_delete=models.SET_NULL)
class MPTTMeta:
order_insertion_by = ['submit_date']
class Meta:
verbose_name = _('comment')
verbose_name_plural = _('comments')
def __str__(self):
return self.comment
|
import sys
from PyQt5 import QtCore, QtGui, QtWidgets
import gui.motion_player as mp
class HmaUi(object):
DEFAULT_WIDTH = 1600
DEFAULT_HEIGHT = 1200
"""
this class makes ui for main window and connect ui signals to event handlers of main window
custom event handlers in main window must be implemented
"""
def setupUi(self, MainWindow: QtWidgets.QMainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(HmaUi.DEFAULT_WIDTH, HmaUi.DEFAULT_HEIGHT)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
# centralwidget
MainWindow.setCentralWidget(self.centralwidget)
self.central_layout = QtWidgets.QHBoxLayout()
self.centralwidget.setLayout(self.central_layout)
# motion_player
self.motion_player = mp.MotionPlayer()
# self.motion_player.setGeometry(QtCore.QRect(0, 0, 960, 720))
print(self.motion_player.size())
print(self.motion_player.sizeHint())
print(self.motion_player.minimumSizeHint())
self.motion_player.setSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
self.motion_player.setMouseTracking(False)
self.motion_player.setObjectName("motion_player")
self.central_layout.addWidget(self.motion_player)
# menubar
self.menubar = QtWidgets.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 1280, 47))
self.menubar.setObjectName("menubar")
self.menuFile = QtWidgets.QMenu(self.menubar)
self.menuFile.setObjectName("menuFile")
self.menuAnalysis = QtWidgets.QMenu(self.menubar)
self.menuAnalysis.setObjectName("menuAnalysis")
MainWindow.setMenuBar(self.menubar)
self.actionNew = QtWidgets.QAction(MainWindow)
self.actionNew.setObjectName("actionNew")
self.actionOpen = QtWidgets.QAction(MainWindow)
self.actionOpen.setObjectName("actionOpen")
self.actionImport = QtWidgets.QAction(MainWindow)
self.actionImport.setObjectName("actionImport")
self.actionExport = QtWidgets.QAction(MainWindow)
self.actionExport.setObjectName("actionExport")
self.actionExit = QtWidgets.QAction(MainWindow)
self.actionExit.setObjectName("actionExit")
self.menuFile.addAction(self.actionNew)
self.menuFile.addAction(self.actionOpen)
self.menuFile.addSeparator()
self.menuFile.addAction(self.actionImport)
self.menuFile.addAction(self.actionExport)
self.menuFile.addSeparator()
self.menuFile.addAction(self.actionExit)
self.menubar.addAction(self.menuFile.menuAction())
# status bar
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
# tool bar
self.toolbar = QtWidgets.QToolBar(MainWindow)
self.toolbar.setObjectName("toolbar")
MainWindow.addToolBar(self.toolbar)
self.toolBtn0 = QtWidgets.QAction("toolBtn0", MainWindow)
self.toolBtn1 = QtWidgets.QAction("toolBtn1", MainWindow)
self.toolBtn2 = QtWidgets.QAction("toolBtn2", MainWindow)
self.toolbar.addAction(self.toolBtn0)
self.toolbar.addAction(self.toolBtn1)
self.toolbar.addSeparator()
self.toolbar.addAction(self.toolBtn2)
#MainWindow.addToolBar("toolbar")
self.toolbar2 = QtWidgets.QToolBar(MainWindow)
self.toolbar.setObjectName("toolbar2")
MainWindow.addToolBar(self.toolbar2)
self.toolbar2.addAction(self.toolBtn2)
# dock widgets
self.dock0 = QtWidgets.QDockWidget("dock0", MainWindow)
self.dock0.setAllowedAreas(QtCore.Qt.LeftDockWidgetArea | QtCore.Qt.RightDockWidgetArea)
self.dock1 = QtWidgets.QDockWidget("dock1", MainWindow)
self.dock1.setAllowedAreas(QtCore.Qt.LeftDockWidgetArea | QtCore.Qt.RightDockWidgetArea)
MainWindow.addDockWidget(QtCore.Qt.LeftDockWidgetArea, self.dock0)
MainWindow.addDockWidget(QtCore.Qt.RightDockWidgetArea, self.dock1)
self.toolbar3 = QtWidgets.QToolBar("toolbar3", MainWindow)
MainWindow.addToolBar(self.toolbar3)
self.toolbar3.addAction(self.dock0.toggleViewAction())
self.toolbar3.addAction(self.dock1.toggleViewAction())
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
self.connectUi(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow"))
self.menuFile.setTitle(_translate("MainWindow", "File"))
self.actionNew.setText(_translate("MainWindow", "New"))
self.actionOpen.setText(_translate("MainWindow", "Open"))
self.actionImport.setText(_translate("MainWindow", "Import"))
self.actionExport.setText(_translate("MainWindow", "Export"))
self.actionExit.setText(_translate("MainWindow", "Exit"))
def connectUi(self, MainWindow):
self.actionNew.triggered.connect(MainWindow.action_new_event)
self.actionOpen.triggered.connect(MainWindow.action_open_event)
self.actionImport.triggered.connect(MainWindow.action_import_event)
self.actionExport.triggered.connect(MainWindow.action_export_event)
self.actionExit.triggered.connect(MainWindow.action_exit_event)
# self.horizontalSlider.valueChanged.connect(self.main_view.go_to_frame)
# self.playMotionButton.pressed.connect(self.main_view.start_timer)
|
'''Crie um programa que vai ler vários números e colocar em uma lista.
Depois disso, mostre:
A) Quantos números foram digitados.
B) A lista de valores, ordenada de forma decrescente.
C) Se o valor 5 foi digitado e está ou não na lista.'''
numero = list()
while True:
n = int(input("Digite um valor: "))
if n not in numero:
numero.append(n)
else:
print("Valor duplicado! Não adicionado!")
resposta = str(input("Quer continuar? [S/N] ")).upper().strip()
if resposta in "N":
break
print("=+" *30)
print(f'Você digitou {len(numero)} elementos.')
print(f'Você digitou os valores {numero}')
numero.sort()
print(f'Os valores em ordem crescente são: {numero}')
numero.sort(reverse=True)
print(f'Os valores em ordem decrescente são: {numero}')
if 5 in numero:
print("O valor 5 foi digitado.")
else:
print("O valor 5 não foi digitado.")
|
"""Defines actions that may be associated with flows packets."""
# System imports
from enum import IntEnum
from math import ceil
# Local source tree imports
from pyof.foundation.base import GenericStruct
from pyof.foundation.basic_types import (
FixedTypeList, Pad, UBInt8, UBInt16, UBInt32)
from pyof.v0x04.common.flow_match import OxmTLV
# Third-party imports
__all__ = ('ActionExperimenterHeader', 'ActionGroup', 'ActionHeader',
'ActionCopyTTLIn', 'ActionCopyTTLOut', 'ActionDecMPLSTTL',
'ActionSetMPLSTTL', 'ActionDecNWTTL', 'ActionSetNWTTL',
'ActionOutput', 'ActionPopMPLS', 'ActionPopPBB', 'ActionPopVLAN',
'ActionPush', 'ActionSetField', 'ActionSetQueue', 'ActionType',
'ControllerMaxLen', 'ListOfActions')
# Enums
class ActionType(IntEnum):
"""Actions associated with flows and packets."""
#: Output to switch port.
OFPAT_OUTPUT = 0
#: Copy TTL "outwards" -- from next-to-outermost to outermost
OFPAT_COPY_TTL_OUT = 11
#: Copy TTL "inwards" -- from outermost to next-to-outermost
OFPAT_COPY_TTL_IN = 12
#: MPLS TTL
OFPAT_SET_MPLS_TTL = 15
#: Decrement MPLS TTL
OFPAT_DEC_MPLS_TTL = 16
#: Push a new VLAN tag
OFPAT_PUSH_VLAN = 17
#: Pop the outer VLAN tag
OFPAT_POP_VLAN = 18
#: Push a new MPLS tag
OFPAT_PUSH_MPLS = 19
#: Pop the outer MPLS tag
OFPAT_POP_MPLS = 20
#: Set queue id when outputting to a port
OFPAT_SET_QUEUE = 21
#: Apply group.
OFPAT_GROUP = 22
#: IP TTL.
OFPAT_SET_NW_TTL = 23
#: Decrement IP TTL.
OFPAT_DEC_NW_TTL = 24
#: Set a header field using OXM TLV format.
OFPAT_SET_FIELD = 25
#: Push a new PBB service tag (I-TAG)
OFPAT_PUSH_PBB = 26
#: Pop the outer PBB service tag (I-TAG)
OFPAT_POP_PBB = 27
#: Experimenter type
OFPAT_EXPERIMENTER = 0xffff
class ControllerMaxLen(IntEnum):
"""A max_len of OFPCML_NO_BUFFER means not to buffer.
The packet should be sent.
"""
#: maximum max_len value which can be used to request a specific byte
#: length.
OFPCML_MAX = 0xffe5
#: indicates that no buffering should be applied and the whole packet is to
#: be sent to the controller.
OFPCML_NO_BUFFER = 0xffff
# Classes
class ActionHeader(GenericStruct):
"""Action header that is common to all actions.
The length includes the header and any padding used to make the action
64-bit aligned.
NB: The length of an action *must* always be a multiple of eight.
"""
#: One of OFPAT_*.
action_type = UBInt16(enum_ref=ActionType)
#: Length of action, including this header. This is the length of actions,
#: including any padding to make it 64-bit aligned.
length = UBInt16()
# Pad for 64-bit alignment.
# This should not be implemented, as each action type has its own padding.
# pad = Pad(4)
_allowed_types = ()
def __init__(self, action_type=None, length=None):
"""Create an ActionHeader with the optional parameters below.
Args:
action_type (~pyof.v0x04.common.action.ActionType):
The type of the action.
length (int): Length of action, including this header.
"""
super().__init__()
self.action_type = action_type
self.length = length
def get_size(self, value=None):
"""Return the action length including the padding (multiple of 8)."""
if isinstance(value, ActionHeader):
return value.get_size()
elif value is None:
current_size = super().get_size()
return ceil(current_size / 8) * 8
raise ValueError(f'Invalid value "{value}" for Action*.get_size()')
def unpack(self, buff, offset=0):
"""Unpack a binary message into this object's attributes.
Unpack the binary value *buff* and update this object attributes based
on the results.
Args:
buff (bytes): Binary data package to be unpacked.
offset (int): Where to begin unpacking.
Raises:
Exception: If there is a struct unpacking error.
"""
self.action_type = UBInt16(enum_ref=ActionType)
self.action_type.unpack(buff, offset)
for cls in ActionHeader.__subclasses__():
if self.action_type.value in cls.get_allowed_types():
self.__class__ = cls
break
super().unpack(buff, offset)
@classmethod
def get_allowed_types(cls):
"""Return allowed types for the class."""
return cls._allowed_types
class ActionExperimenterHeader(ActionHeader):
"""Action structure for OFPAT_EXPERIMENTER."""
experimenter = UBInt32()
_allowed_types = ActionType.OFPAT_EXPERIMENTER,
def __init__(self, length=None, experimenter=None):
"""Create ActionExperimenterHeader with the optional parameters below.
Args:
experimenter (int): The experimenter field is the Experimenter ID,
which takes the same form as in struct ofp_experimenter.
"""
super().__init__(action_type=ActionType.OFPAT_EXPERIMENTER)
self.length = length
self.experimenter = experimenter
class ActionGroup(ActionHeader):
"""Action structure for OFPAT_GROUP."""
group_id = UBInt32()
_allowed_types = ActionType.OFPAT_GROUP,
def __init__(self, group_id=None):
"""Create an ActionGroup with the optional parameters below.
Args:
group_id (int): The group_id indicates the group used to process
this packet. The set of buckets to apply depends on the group
type.
"""
super().__init__(action_type=ActionType.OFPAT_GROUP, length=8)
self.group_id = group_id
class ActionDecMPLSTTL(ActionHeader):
"""Action structure for OFPAT_DEC_MPLS_TTL."""
pad = Pad(4)
_allowed_types = ActionType.OFPAT_DEC_MPLS_TTL,
def __init__(self):
"""Create an ActionDecMPLSTTL."""
super().__init__(action_type=ActionType.OFPAT_DEC_MPLS_TTL, length=8)
class ActionSetMPLSTTL(ActionHeader):
"""Action structure for OFPAT_SET_MPLS_TTL."""
mpls_ttl = UBInt8()
pad = Pad(3)
_allowed_types = ActionType.OFPAT_SET_MPLS_TTL,
def __init__(self, mpls_ttl=None):
"""Create an ActionSetMPLSTTL with the optional parameters below.
Args:
mpls_ttl (int): The mpls_ttl field is the MPLS TTL to set.
"""
super().__init__(action_type=ActionType.OFPAT_SET_MPLS_TTL, length=8)
self.mpls_ttl = mpls_ttl
class ActionCopyTTLIn(ActionHeader):
"""Action structure for OFPAT_COPY_TTL_IN."""
pad = Pad(4)
_allowed_types = ActionType.OFPAT_COPY_TTL_IN,
def __init__(self):
"""Create an ActionCopyTTLIn."""
super().__init__(action_type=ActionType.OFPAT_COPY_TTL_IN, length=8)
class ActionCopyTTLOut(ActionHeader):
"""Action structure for OFPAT_COPY_TTL_OUT."""
pad = Pad(4)
_allowed_types = ActionType.OFPAT_COPY_TTL_OUT,
def __init__(self):
"""Create an ActionCopyTTLOut."""
super().__init__(action_type=ActionType.OFPAT_COPY_TTL_OUT, length=8)
class ActionPopVLAN(ActionHeader):
"""Action structure for OFPAT_POP_VLAN."""
pad = Pad(4)
_allowed_types = ActionType.OFPAT_POP_VLAN,
def __init__(self):
"""Create an ActionPopVLAN."""
super().__init__(action_type=ActionType.OFPAT_POP_VLAN, length=8)
class ActionPopPBB(ActionHeader):
"""Action structure for OFPAT_POP_PBB."""
pad = Pad(4)
_allowed_types = ActionType.OFPAT_POP_PBB,
def __init__(self):
"""Create an ActionPopPBB."""
super().__init__(action_type=ActionType.OFPAT_POP_PBB, length=8)
class ActionDecNWTTL(ActionHeader):
"""Action structure for OFPAT_DEC_NW_TTL."""
pad = Pad(4)
_allowed_types = ActionType.OFPAT_DEC_NW_TTL,
def __init__(self):
"""Create a ActionDecNWTTL."""
super().__init__(action_type=ActionType.OFPAT_DEC_NW_TTL, length=8)
class ActionSetNWTTL(ActionHeader):
"""Action structure for OFPAT_SET_NW_TTL."""
nw_ttl = UBInt8()
pad = Pad(3)
_allowed_types = ActionType.OFPAT_SET_NW_TTL,
def __init__(self, nw_ttl=None):
"""Create an ActionSetNWTTL with the optional parameters below.
Args:
nw_ttl (int): the TTL address to set in the IP header.
"""
super().__init__(action_type=ActionType.OFPAT_SET_NW_TTL, length=8)
self.nw_ttl = nw_ttl
class ActionOutput(ActionHeader):
"""Defines the actions output.
Action structure for :attr:`ActionType.OFPAT_OUTPUT`, which sends packets
out :attr:`port`. When the :attr:`port` is the
:attr:`.Port.OFPP_CONTROLLER`, :attr:`max_length` indicates the max number
of bytes to send. A :attr:`max_length` of zero means no bytes of the packet
should be sent.
"""
port = UBInt32()
max_length = UBInt16()
pad = Pad(6)
_allowed_types = ActionType.OFPAT_OUTPUT,
def __init__(self, port=None,
max_length=ControllerMaxLen.OFPCML_NO_BUFFER):
"""Create a ActionOutput with the optional parameters below.
Args:
port (:class:`Port` or :class:`int`): Output port.
max_length (int): Max length to send to controller.
"""
super().__init__(action_type=ActionType.OFPAT_OUTPUT, length=16)
self.port = port
self.max_length = max_length
class ActionPopMPLS(ActionHeader):
"""Action structure for OFPAT_POP_MPLS."""
ethertype = UBInt16()
pad = Pad(2)
_allowed_types = ActionType.OFPAT_POP_MPLS,
def __init__(self, ethertype=None):
"""Create an ActionPopMPLS with the optional parameters below.
Args:
ethertype (int): indicates the Ethertype of the payload.
"""
super().__init__(action_type=ActionType.OFPAT_POP_MPLS)
self.ethertype = ethertype
class ActionPush(ActionHeader):
"""Action structure for OFPAT_PUSH_[VLAN/MPLS/PBB]."""
ethertype = UBInt16()
pad = Pad(2)
_allowed_types = (ActionType.OFPAT_PUSH_VLAN, ActionType.OFPAT_PUSH_MPLS,
ActionType.OFPAT_PUSH_PBB)
def __init__(self, action_type=None, ethertype=None):
"""Create a ActionPush with the optional parameters below.
Args:
action_type (:class:`ActionType`): indicates which tag will be
pushed (VLAN, MPLS, PBB).
ethertype (int): indicates the Ethertype of the new tag.
"""
super().__init__(action_type, length=8)
self.ethertype = ethertype
class ActionSetField(ActionHeader):
"""Action structure for OFPAT_SET_FIELD."""
field = OxmTLV()
_allowed_types = ActionType.OFPAT_SET_FIELD,
def __init__(self, field=None):
"""Create a ActionSetField with the optional parameters below.
Args:
length (int): length padded to 64 bits, followed by exactly
oxm_len bytes containing a single OXM TLV, then
exactly ((oxm_len + 4) + 7)/8*8 - (oxm_len + 4)
(between 0 and 7) bytes of all-zero bytes
field (:class:`OxmTLV`): OXM field and value.
"""
super().__init__(action_type=ActionType.OFPAT_SET_FIELD)
self.field = OxmTLV() if field is None else field
def pack(self, value=None):
"""Pack this structure updating the length and padding it."""
self._update_length()
packet = super().pack()
return self._complete_last_byte(packet)
def _update_length(self):
"""Update the length field of the struct."""
action_length = 4 + len(self.field.pack())
overflow = action_length % 8
self.length = action_length
if overflow:
self.length = action_length + 8 - overflow
def _complete_last_byte(self, packet):
"""Pad until the packet length is a multiple of 8 (bytes)."""
padded_size = self.length
padding_bytes = padded_size - len(packet)
if padding_bytes > 0:
packet += Pad(padding_bytes).pack()
return packet
class ActionSetQueue(ActionHeader):
"""Action structure for OFPAT_SET_QUEUE."""
queue_id = UBInt32()
_allowed_types = ActionType.OFPAT_SET_QUEUE,
def __init__(self, queue_id=None):
"""Create an ActionSetQueue with the optional parameters below.
Args:
queue_id (int): The queue_id send packets to given queue on port.
"""
super().__init__(action_type=ActionType.OFPAT_SET_QUEUE, length=8)
self.queue_id = queue_id
class ListOfActions(FixedTypeList):
"""List of actions.
Represented by instances of ActionHeader and used on ActionHeader objects.
"""
def __init__(self, items=None):
"""Create a ListOfActions with the optional parameters below.
Args:
items (~pyof.v0x04.common.action.ActionHeader):
Instance or a list of instances.
"""
super().__init__(pyof_class=ActionHeader, items=items)
|
import os
from tool.model import GetModelInfo
GIANT_SETTING = {
"BASE_DIR": os.path.dirname(os.path.realpath(__file__)), # 这个是必不可少的
"TEST_FILES": ["gaintstar_api.json"],
"DATA_FILES": {"default": "./dataTable/data.xlsx"},
"PARAMETRIC_CLASS": [GetModelInfo],
"REMOTE": False,
"DEBUG": True,
}
|
from kivy.properties import ObjectProperty, StringProperty
from kivy.uix.screenmanager import Screen
class ChangeLevelScreen(Screen):
game_screen = ObjectProperty(None)
stars_img = StringProperty('')
|
import pytest
@pytest.fixture()
def valid_spelled_content_pack(pack):
"""
Create a pack with valid spelled content.
"""
for i in range(3):
pack.create_release_notes(
version=f"release-note-{i}",
content="\n#### Scripts\n##### ScriptName\n- Added a feature"
)
pack.create_integration(name=f"integration-{i}", yml={"category": "category"})
pack.create_incident_field(name=f"incident-field-{i}", content={"test": "test"})
pack.create_script(name=f"script-{i}", yml={"script": "script"})
pack.create_layout(name=f"layout-{i}", content={"test": "test"})
return pack
@pytest.fixture()
def invalid_spelled_content_pack(pack):
"""
Create a pack with invalid spelled content.
"""
misspelled_files = set()
for i in range(3):
rn = pack.create_release_notes(
version=f"release-note-{i}",
content="\n#### Scipt\n##### SciptName\n- Added a feature"
)
misspelled_files.add(rn.path)
integration = pack.create_integration(
name=f"integration-{i}", yml={"display": "invalidd", "description": "invalidd", "category": "category"}
)
misspelled_files.add(integration.yml.path)
pack.create_incident_field(name=f"incident-field-{i}", content={"invalidd": "invalidd"})
script = pack.create_script(name=f"script-{i}", yml={"comment": "invalidd", "script": "script"})
misspelled_files.add(script.yml.path)
pack.create_layout(name=f"layout-{i}", content={"invalidd": "invalidd"})
return pack, misspelled_files
@pytest.fixture()
def misspelled_integration(invalid_spelled_content_pack):
"""
Returns a misspelled integration.
"""
return invalid_spelled_content_pack[0].integrations[0]
|
"""A package for client"""
# pylint: skip-file
__all__ = [
'demo_client'
]
|
import numpy as np
import matplotlib.pyplot as plt
n, t1, t2, t3, t4, t5, t6, t7, t8 = np.genfromtxt('python/statisch.txt', unpack=True)
plt.plot(5*n-5, t1, 'bo', markersize=1, label=r'$T_1$,\;Messing (breit)')
plt.plot(5*n-5, t4, 'ro', markersize=1, label=r'$T_4$,\;Messing (schmal)')
plt.plot(5*n-5, t5, 'ko', markersize=1, label=r'$T_5$,\;Aluminium')
plt.plot(5*n-5, t8, 'yo', markersize=1, label=r'$T_8$,\;Edelstahl')
plt.legend()
plt.grid()
plt.xlabel(r'$t\:/\:\si{\second}$')
plt.ylabel(r'$T\:/\:\si{\celsius}$')
plt.xlim(0, 960)
plt.ylim(15, 51)
plt.tight_layout(pad=0)
plt.savefig('build/statisch.pdf', bbox_inches='tight', pad_inches=0)
plt.clf()
print('t=700')
print('T1', t1[139])
print('T4', t4[139])
print('T5', t5[139])
print('T8', t8[139])
plt.plot(5*n, t7-t8, 'yx', markersize=3, label=r'$T_7-T_8$')
plt.plot(5*n, t2-t1, 'bx', markersize=3, label=r'$T_2-T_1$')
plt.legend(loc='lower right')
plt.grid()
plt.xlabel(r'$t\:/\:\si{\second}$')
plt.ylabel(r'$\symup{Δ}T\:/\:\si{\celsius}$')
plt.xlim(0, 960)
plt.ylim(-2, 12)
plt.tight_layout(pad=0)
plt.savefig('build/statisch-unterschied.pdf', bbox_inches='tight', pad_inches=0)
plt.clf()
np.savetxt('build/statisch.txt', np.column_stack([5*n, t1, t2, t2-t1, t4, t5, t7, t8, t7-t8]), header='t, T1, T2, T2-T1, T4, T5, T7, T8, T7-T8')
As = 0.004 * 0.007
Ab = 0.004 * 0.012
deltax = 0.03
zeiten = np.array([20, 60, 100, 140, 180])
messingschmall = -90*As*(t3[zeiten]-t4[zeiten])/deltax
messingbreit = -90*Ab*(t2[zeiten]-t1[zeiten])/deltax
aluminium = -220*Ab*(t6[zeiten]-t5[zeiten])/deltax
edelstahl = -21*Ab*(t7[zeiten]-t8[zeiten])/deltax
np.savetxt('build/waermestrom.txt', np.column_stack([5*zeiten, messingschmall, messingbreit, aluminium, edelstahl]), header='zeiten, messingschmall, messingbreit, aluminium, edelstahl')
print('aschmall', As)
print('abreit', Ab)
print('messingschmall/7, messingbreit/12')
print(np.column_stack([messingschmall/7, messingbreit/12]))
|
# -*- coding: utf-8 -*-
from mamonsu.plugins.pgsql.plugin import PgsqlPlugin as Plugin
from .pool import Pooler
import time
class PgHealth(Plugin):
DEFAULT_CONFIG = {'uptime': str(60 * 10), 'cache': str(80)}
def run(self, zbx):
start_time = time.time()
Pooler.query('select 1 as health')
zbx.send('pgsql.ping[]', (time.time() - start_time) * 100)
result = Pooler.query("select \
date_part('epoch', now() - pg_postmaster_start_time())")
zbx.send('pgsql.uptime[]', int(result[0][0]))
result = Pooler.query('select \
round(sum(blks_hit)*100/sum(blks_hit+blks_read), 2) \
from pg_catalog.pg_stat_database')
zbx.send('pgsql.cache[hit]', int(result[0][0]))
def items(self, template):
result = template.item({
'name': 'PostgreSQL: ping',
'key': 'pgsql.ping[]',
'value_type': Plugin.VALUE_TYPE.numeric_float,
'units': Plugin.UNITS.ms
}) + template.item({
'name': 'PostgreSQL: service uptime',
'key': 'pgsql.uptime[]',
'value_type': Plugin.VALUE_TYPE.numeric_unsigned,
'units': Plugin.UNITS.uptime
}) + template.item({
'name': 'PostgreSQL: cache hit ratio',
'key': 'pgsql.cache[hit]',
'value_type': Plugin.VALUE_TYPE.numeric_unsigned,
'units': Plugin.UNITS.percent
})
return result
def graphs(self, template):
items = [
{'key': 'pgsql.cache[hit]'},
{'key': 'pgsql.uptime[]', 'color': 'DF0101', 'yaxisside': 1}
]
graph = {'name': 'PostgreSQL uptime', 'items': items}
return template.graph(graph)
def triggers(self, template):
result = template.trigger({
'name': 'PostgreSQL service was restarted on '
'{HOSTNAME} (uptime={ITEM.LASTVALUE})',
'expression': '{#TEMPLATE:pgsql.uptime[].last'
'()}<' + str(self.plugin_config('uptime'))
}) + template.trigger({
'name': 'PostgreSQL cache hit ratio too low on '
'{HOSTNAME} ({ITEM.LASTVALUE})',
'expression': '{#TEMPLATE:pgsql.cache[hit].last'
'()}<' + str(self.plugin_config('cache'))
})
return result
|
import os
import pathlib
from dataclasses import dataclass
from datetime import datetime
from enum import Enum
from fnmatch import fnmatch
from glob import glob
from typing import List, Optional
from urllib.parse import urlparse
from collection_manager.entities.exceptions import MissingValueCollectionError
class CollectionStorageType(Enum):
LOCAL = 1
S3 = 2
@dataclass(frozen=True)
class Collection:
dataset_id: str
projection: str
dimension_names: frozenset
slices: frozenset
path: str
historical_priority: int
forward_processing_priority: Optional[int] = None
date_from: Optional[datetime] = None
date_to: Optional[datetime] = None
@staticmethod
def from_dict(properties: dict):
try:
date_to = datetime.fromisoformat(properties['to']) if 'to' in properties else None
date_from = datetime.fromisoformat(properties['from']) if 'from' in properties else None
collection = Collection(dataset_id=properties['id'],
projection=properties['projection'],
dimension_names=frozenset(properties['dimensionNames'].items()),
slices=frozenset(properties['slices'].items()),
path=properties['path'],
historical_priority=properties['priority'],
forward_processing_priority=properties.get('forward-processing-priority', None),
date_to=date_to,
date_from=date_from)
return collection
except KeyError as e:
raise MissingValueCollectionError(missing_value=e.args[0])
def storage_type(self):
if urlparse(self.path).scheme == 's3':
return CollectionStorageType.S3
else:
return CollectionStorageType.LOCAL
def directory(self):
if urlparse(self.path).scheme == 's3':
return self.path
elif os.path.isdir(self.path):
return self.path
else:
return os.path.dirname(self.path)
def owns_file(self, file_path: str) -> bool:
if urlparse(file_path).scheme == 's3':
return file_path.find(self.path) == 0
else:
if os.path.isdir(file_path):
raise IsADirectoryError()
if os.path.isdir(self.path):
return pathlib.Path(self.path) in pathlib.Path(file_path).parents
else:
return fnmatch(file_path, self.path)
|
# coding: utf-8
"""
AVM
This is api for AVM (automated valuation machine) # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: info@enbisys.com
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
class FloorLevel(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
allowed enum values
"""
BASEMENT = "basement"
GROUND = "ground"
MIDDLE = "middle"
TOP = "top"
FLOOR_1 = "floor_1"
FLOOR_2 = "floor_2"
FLOOR_3 = "floor_3"
FLOOR_4 = "floor_4"
FLOOR_5 = "floor_5"
FLOOR_6 = "floor_6"
FLOOR_7 = "floor_7"
FLOOR_8 = "floor_8"
FLOOR_9 = "floor_9"
FLOOR_10 = "floor_10"
FLOOR_11 = "floor_11"
FLOOR_12 = "floor_12"
FLOOR_13 = "floor_13"
FLOOR_14 = "floor_14"
FLOOR_15 = "floor_15"
FLOOR_16 = "floor_16"
FLOOR_17 = "floor_17"
FLOOR_18 = "floor_18"
FLOOR_19 = "floor_19"
FLOOR_20 = "floor_20"
FLOOR_21_OR_ABOVE = "floor_21_or_above"
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
}
attribute_map = {
}
def __init__(self): # noqa: E501
"""FloorLevel - a model defined in OpenAPI""" # noqa: E501
self.discriminator = None
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, FloorLevel):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
import numpy as np
import os
from tqdm import tqdm
from absl import flags, app
from code.utils.calculate_cds import get_cds
from code.utils.csv_util import read_features
def confusion_matrix_test(features_dir, matrix_dir):
validate_dict = read_features(features_dir, "validate")
# 混淆矩阵
confusion_matrix = np.zeros((40, 40), dtype=np.int16)
# 统计
for val_label, val_feat in tqdm(validate_dict):
enroll_dict = read_features(features_dir, "enroll")
distance = [get_cds(val_feat, enroll_feat) for _, enroll_feat in enroll_dict]
predict_label = np.argmax(distance, axis=0)
confusion_matrix[val_label][predict_label] += 1
np.savetxt(os.path.join(matrix_dir, "confusion_matrix.csv"), confusion_matrix, fmt='%d', delimiter=",")
root_dir = os.path.abspath(os.path.join(os.getcwd(), "../.."))
FLAGS = flags.FLAGS
flags.DEFINE_string(
"features_dir", os.path.join(root_dir, "results/features"),
"the enrolled data dir")
flags.DEFINE_string(
"matrix_dir", default=os.path.join(root_dir, "results"),
help="the dir of saving confusion matrix")
def main(argv):
confusion_matrix_test(FLAGS.features_dir, FLAGS.matrix_dir)
if __name__ == "__main__":
app.run(main)
|
from kivy.uix.boxlayout import BoxLayout
from kivy.graphics import Color,Rectangle
class Colored_layout(BoxLayout):
def __init__(self, l_color=(0.7, 0, 0, 1), **kw):
super().__init__(**kw)
self.padding="10dp"
with self.canvas.before:
Color(*l_color)
self.rect=Rectangle(size=self.size, pos=self.pos)
self.bind(size=self._update_rect, pos=self._update_rect)
def _update_rect(self, instance, value):
self.rect.pos=instance.pos
self.rect.size=instance.size
|
from enum import IntEnum
class Language(IntEnum):
IPA = 0
ENG = 1
CHN = 2
GER = 3
|
from django.forms import ModelForm
from .models import Profile
class AddData(ModelForm):
class Meta:
model = Profile
fields = '__all__' |
"""Console script for pyfmt."""
import sys
import click
from .pyfmt import fmt
@click.command()
@click.argument("fmt_folder")
@click.option("--folder", help="The folder to format")
def main(folder: str, fmt_folder: str):
"""Console script for pyfmt."""
click.echo("Formating ... ")
click.echo("See click documentation at https://click.palletsprojects.com/")
if fmt_folder:
fmt(fmt_folder)
else:
fmt(folder)
return 0
if __name__ == "__main__":
sys.exit(main()) # pragma: no cover
|
from pytpp.tools.helpers.date_converter import from_date_string
from pytpp.properties.response_objects.dataclasses import certificate
class Certificate:
@staticmethod
def Certificate(response_object: dict):
if not isinstance(response_object, dict):
response_object = {}
return certificate.Certificate(
created_on=response_object.get('CreatedOn'),
dn=response_object.get('DN'),
guid=response_object.get('Guid'),
name=response_object.get('Name'),
parent_dn=response_object.get('ParentDn'),
schema_class=response_object.get('SchemaClass'),
x509=Certificate._X509(response_object.get('X509')),
links=[Certificate.Link(link) for link in response_object.get('_links', [])],
)
@staticmethod
def Link(response_object: dict):
if not isinstance(response_object, dict):
response_object = {}
return certificate.Link(
details=response_object.get('Details'),
next=response_object.get('Next'),
previous=response_object.get('Previous'),
)
@staticmethod
def CSR(response_object: dict):
if not isinstance(response_object, dict):
response_object = {}
return certificate.CSR(
details=Certificate._CSRDetails(response_object.get('Details')),
enrollable=response_object.get('Enrollable'),
)
@staticmethod
def Policy(response_object: dict):
if not isinstance(response_object, dict):
response_object = {}
return certificate.Policy(
certificate_authority=Certificate._LockedSingleValue(response_object.get('CertificateAuthority')),
csr_generation=Certificate._LockedSingleValue(response_object.get('CsrGeneration')),
key_generation=Certificate._LockedSingleValue(response_object.get('KeyGeneration')),
key_pair=Certificate._LockedKeyPair(response_object.get('KeyPair')),
management_type=Certificate._LockedSingleValue(response_object.get('ManagementType')),
private_key_reuse_allowed=response_object.get('PrivateKeyReuseAllowed'),
subj_alt_name_dns_allowed=response_object.get('SubjAltNameDnsAllowed'),
subj_alt_name_email_allowed=response_object.get('SubjAltNameEmailAllowed'),
subj_alt_name_ip_allowed=response_object.get('SubjAltNameIpAllowed'),
subj_alt_name_upn_allowed=response_object.get('SubjAltNameUpnAllowed'),
subj_alt_name_uri_allowed=response_object.get('SubjAltNameUriAllowed'),
subject=Certificate._LockedSubject(response_object.get('Subject')),
unique_subject_enforced=response_object.get('UniqueSubjectEnforced'),
whitelisted_domains=response_object.get('WhitelistedDomains'),
wildcards_allowed=response_object.get('WildcardsAllowed'),
)
@staticmethod
def CertificateDetails(response_object: dict):
if not isinstance(response_object, dict):
response_object = {}
return certificate.CertificateDetails(
c=response_object.get('C'),
cn=response_object.get('CN'),
enhanced_key_usage=response_object.get('EnhancedKeyUsage'),
issuer=response_object.get('Issuer'),
key_algorithm=response_object.get('KeyAlgorithm'),
key_size=response_object.get('KeySize'),
key_usage=response_object.get('KeyUsage'),
l=response_object.get('L'),
o=response_object.get('O'),
ou=response_object.get('OU'),
public_key_hash=response_object.get('PublicKeyHash'),
s=response_object.get('S'),
ski_key_identifier=response_object.get('SKIKeyIdentifier'),
serial=response_object.get('Serial'),
signature_algorithm=response_object.get('SignatureAlgorithm'),
signature_algorithm_oid=response_object.get('SignatureAlgorithmOID'),
store_added=response_object.get('StoreAdded'),
subject=response_object.get('Subject'),
subject_alt_name_dns=response_object.get('SubjectAltNameDNS'),
subject_alt_name_email=response_object.get('SubjectAltNameEmail'),
subject_alt_name_ip=response_object.get('SubjectAltNameIp'),
subject_alt_name_upn=response_object.get('SubjectAltNameUpn'),
subject_alt_name_uri=response_object.get('SubjectAltNameUri'),
thumbprint=response_object.get('Thumbprint'),
valid_from=from_date_string(response_object.get('ValidFrom')),
valid_to=from_date_string(response_object.get('ValidTo')),
)
@staticmethod
def PreviousVersions(response_object: dict):
if not isinstance(response_object, dict):
response_object = {}
return certificate.PreviousVersions(
certificate_details=Certificate.CertificateDetails(response_object.get('CertificateDetails')),
vault_id=response_object.get('VaultId'),
)
@staticmethod
def ProcessingDetails(response_object: dict):
if not isinstance(response_object, dict):
response_object = {}
return certificate.ProcessingDetails(
in_error=response_object.get('InError'),
stage=response_object.get('Stage'),
status=response_object.get('Status'),
)
@staticmethod
def RenewalDetails(response_object: dict):
if not isinstance(response_object, dict):
response_object = {}
return certificate.RenewalDetails(
city=response_object.get('City'),
country=response_object.get('Country'),
organization=response_object.get('Organization'),
organizational_unit=response_object.get('OrganizationUnit'),
state=response_object.get('State'),
subject=response_object.get('Subject'),
subject_alt_name_dns=response_object.get('SubjectAltNameDNS'),
subject_alt_name_email=response_object.get('SubjectAltNameEmail'),
subject_alt_name_ip_address=response_object.get('SubjectAltNameIPAddress'),
subject_alt_name_other_name_upn=response_object.get('SubjectAltNameOtherNameUPN'),
subject_alt_name_uri=response_object.get('SubjectAltNameURI'),
valid_from=from_date_string(response_object.get('ValidFrom')),
valid_to=from_date_string(response_object.get('ValidTo')),
)
@staticmethod
def ValidationDetails(response_object: dict):
if not isinstance(response_object, dict):
response_object = {}
return certificate.ValidationDetails(
last_validation_state_update=response_object.get('LastValidationStateUpdate'),
validation_state=response_object.get('ValidationState'),
)
@staticmethod
def SslTls(response_object: dict):
if not isinstance(response_object, dict):
response_object = {}
return certificate.SslTls(
host=response_object.get('Host'),
ip_address=response_object.get('IpAddress'),
port=response_object.get('Port'),
result=Certificate._SslTlsResult(response_object.get('Result')),
sources=response_object.get('Sources'),
)
@staticmethod
def File(response_object: dict):
if not isinstance(response_object, dict):
response_object = {}
return certificate.File(
installation=response_object.get('Installation'),
performed_on=from_date_string(response_object.get('PerformedOn')),
result=response_object.get('Result'),
)
@staticmethod
def _SslTlsResult(response_object: dict):
if not isinstance(response_object, dict):
response_object = {}
return certificate._SslTlsResult(
chain=Certificate._BitMaskValues(response_object.get('Chain')),
end_entity=Certificate._BitMaskValues(response_object.get('EndEntity')),
id=response_object.get('ID'),
protocols=Certificate._BitMaskValues(response_object.get('Protocols')),
)
@staticmethod
def _BitMaskValues(response_object: dict):
if not isinstance(response_object, dict):
response_object = {}
return certificate._BitMaskValues(
bitmask=response_object.get('BitMask'),
values=response_object.get('Values'),
)
@staticmethod
def _SANS(response_object: dict):
if not isinstance(response_object, dict):
response_object = {}
return certificate._SANS(
dns=response_object.get('DNS'),
ip=response_object.get('IP'),
)
@staticmethod
def _X509(response_object: dict):
if not isinstance(response_object, dict):
response_object = {}
return certificate._X509(
cn=response_object.get('CN'),
issuer=response_object.get('Issuer'),
key_algorithm=response_object.get('KeyAlgorithm'),
key_size=response_object.get('KeySize'),
sans=response_object.get('SANS'),
serial=response_object.get('Serial'),
subject=response_object.get('Subject'),
thumbprint=response_object.get('Thumbprint'),
valid_from=from_date_string(response_object.get('ValidFrom')),
valid_to=from_date_string(response_object.get('ValidTo')),
)
@staticmethod
def _CompliantSingleValue(response_object: dict):
if not isinstance(response_object, dict):
response_object = {}
return certificate._CompliantSingleValue(
compliant=response_object.get('Compliant'),
value=response_object.get('Value'),
)
@staticmethod
def _CompliantMultiValue(response_object: dict):
if not isinstance(response_object, dict):
response_object = {}
return certificate._CompliantMultiValue(
compliant=response_object.get('Compliant'),
values=response_object.get('Values'),
)
@staticmethod
def _LockedSingleValue(response_object: dict):
if not isinstance(response_object, dict):
response_object = {}
return certificate._LockedSingleValue(
locked=response_object.get('Locked'),
value=response_object.get('Value'),
)
@staticmethod
def _LockedMultiValue(response_object: dict):
if not isinstance(response_object, dict):
response_object = {}
return certificate._LockedMultiValue(
locked=response_object.get('Locked'),
values=response_object.get('Values'),
)
@staticmethod
def _LockedKeyPair(response_object: dict):
if not isinstance(response_object, dict):
response_object = {}
return certificate._LockedKeyPair(
key_algorithm=Certificate._LockedSingleValue(response_object.get('KeyAlgorithm')),
key_size=Certificate._LockedSingleValue(response_object.get('KeySize')),
)
@staticmethod
def _LockedSubject(response_object: dict):
if not isinstance(response_object, dict):
response_object = {}
return certificate._LockedSubject(
city=Certificate._LockedSingleValue(response_object.get('City')),
country=Certificate._LockedSingleValue(response_object.get('Country')),
organization=Certificate._LockedSingleValue(response_object.get('Organization')),
organizational_units=Certificate._LockedMultiValue(response_object.get('OrganizationalUnit')),
state=Certificate._LockedSingleValue(response_object.get('State')),
)
@staticmethod
def _CSRDetails(response_object: dict):
if not isinstance(response_object, dict):
response_object = {}
return certificate._CSRDetails(
city=Certificate._CompliantSingleValue(response_object.get('City')),
common_name=Certificate._CompliantSingleValue(response_object.get('CommonName')),
country=Certificate._CompliantSingleValue(response_object.get('Country')),
key_algorithm=Certificate._CompliantSingleValue(response_object.get('KeyAlgorithm')),
key_size=Certificate._CompliantSingleValue(response_object.get('KeySize')),
organization=Certificate._CompliantSingleValue(response_object.get('Organization')),
organizational_unit=Certificate._CompliantMultiValue(response_object.get('OrganizationalUnit')),
private_key_reused=Certificate._CompliantSingleValue(response_object.get('PrivateKeyReused')),
state=Certificate._CompliantSingleValue(response_object.get('State')),
subj_alt_name_dns=Certificate._CompliantMultiValue(response_object.get('SubjAltNameDns')),
subj_alt_name_email=Certificate._CompliantMultiValue(response_object.get('SubjAltNameEmail')),
subj_alt_name_ip=Certificate._CompliantMultiValue(response_object.get('SubjAltNameIp')),
subj_alt_name_upn=Certificate._CompliantMultiValue(response_object.get('SubjAltNameUpn')),
subj_alt_name_uri=Certificate._CompliantMultiValue(response_object.get('SubjAltNameUri')),
)
|
import math
import time
start_time = time.time()
data = [[0,1],[1,2],[2,3],[3,4],[4,5],[5,6],[100000000000000,-90000000000000]]
prediction_x_value = 90
start_m = -100
start_c = -20
acuracy_for_m = 0.00001
acuracy_for_c = 0.00001
step_size = 0.01
acceptible_error = 0.1
def calculate_error(m,c,data):
total_error_squared = 0
for i in range(len(data)):
estimate_y = m * data[i][0] + c
error = data[i][1] - estimate_y
total_error_squared += (error ** 2)
return total_error_squared
def calculate_error_2(m,c,data):
total_error_squared = 0
for i in range(len(data)):
y_intercept = data[i][1] + (data[i][0] / m)
x_of_closest_point = (y_intercept - c) / (m - ((-1) / m))
y_of_closest_point = m * x_of_closest_point + c
error = math.sqrt(((x_of_closest_point - data[i][0]) ** 2) + ((y_of_closest_point - data[i][1]) ** 2))
total_error_squared += error ** 2
return total_error_squared
def calculate_error_derivative(m,c,data):
c_derivative1 = calculate_error(m,c - acuracy_for_c,data)
c_derivative2 = calculate_error(m,c + acuracy_for_c,data)
c_derivative = (c_derivative1 - c_derivative2) / (-2 * acuracy_for_c)
m_derivative1 = calculate_error(m - acuracy_for_m,c,data)
m_derivative2 = calculate_error(m + acuracy_for_m,c,data)
m_derivative = (m_derivative1 - m_derivative2) / (-2 * acuracy_for_m)
return m_derivative, c_derivative
m = start_m
c = start_c
change_m, change_c = calculate_error_derivative(m,c,data)
while change_c + change_m > acceptible_error:
change_m,change_c = calculate_error_derivative(m,c,data)
m = m - step_size * change_m
c = c - step_size * change_c
print("time taken:"+str(time.time() - start_time))
print("prediction for x = "+str(prediction_x_value)+" is "+str(m * prediction_x_value + c))
print("final error is:"+str(calculate_error_derivative(m,c,data)))
print("equation of final line is:y = "+str(m)+"X + "+str(c))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.