blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
69
| license_type
stringclasses 2
values | repo_name
stringlengths 5
118
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
63
| visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 2.91k
686M
โ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 23
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 213
values | src_encoding
stringclasses 30
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 2
10.3M
| extension
stringclasses 246
values | content
stringlengths 2
10.3M
| authors
listlengths 1
1
| author_id
stringlengths 0
212
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
5287ea85f3001b72c450c0caac0ac4f3ed597f8c
|
b2afa86304d4ff715b60b7bdf9f5a7a8d238eb2b
|
/articulos/models.py
|
09303bc42406c7ea30df488a65ef8942439cfe39
|
[] |
no_license
|
luis2906/APIs-pedido
|
dc58d922b0ce4dbc42ae2ac964ea90a7d427a416
|
bd6e6097b804067e61e9abf42c9a2e2053e92637
|
refs/heads/master
| 2022-06-06T20:15:51.809544
| 2019-09-30T17:07:37
| 2019-09-30T17:07:37
| 211,903,595
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 512
|
py
|
from django.db import models
# Create your models here.
class BaseModel(models.Model):
nombre = models.CharField(unique=True, max_length=255)
class Meta:
abstract = True
def __str__(self):
return self.nombre
class Marca(BaseModel):
pass
def __str__(self):
return self.nombre
class Articulo(BaseModel):
codigo = models.CharField(unique=True, max_length=255)
marca = models.ForeignKey(Marca, related_name="Articulo_marca", on_delete=models.PROTECT)
def __str__(self):
return self.nombre
|
[
"luis.hernandez@sourcemeridian.com"
] |
luis.hernandez@sourcemeridian.com
|
1a9d990fbd2ef3a7ebad4d4ab0747d0f363daf29
|
7fd79f23ed85a5b4459379a1744a3cf7d2b03703
|
/ๆๅๆฐๅญ่ฏๅซDNN/mnist_loader.py
|
405644f9574f0ec24cb5bb30cdec83f6dd37e3c9
|
[] |
no_license
|
xuyanbo03/ml-workbook
|
ba2a98a8df0d7d7c22a221ae321322f0f492631f
|
41d036bf470ba7c63a798d254d8dc3b42007798b
|
refs/heads/master
| 2022-01-28T21:16:26.850821
| 2019-07-16T09:08:14
| 2019-07-16T09:08:14
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,400
|
py
|
"""
mnist_loader
~~~~~~~~~~~~
A library to load the MNIST image data. For details of the data
structures that are returned, see the doc strings for ``load_data``
and ``load_data_wrapper``. In practice, ``load_data_wrapper`` is the
function usually called by our neural network code.
"""
#### Libraries
# Standard library
import pickle
import gzip
# Third-party libraries
import numpy as np
def load_data():
"""Return the MNIST data as a tuple containing the training data,
the validation data, and the test data.
The ``training_data`` is returned as a tuple with two entries.
The first entry contains the actual training images. This is a
numpy ndarray with 50,000 entries. Each entry is, in turn, a
numpy ndarray with 784 values, representing the 28 * 28 = 784
pixels in a single MNIST image.
The second entry in the ``training_data`` tuple is a numpy ndarray
containing 50,000 entries. Those entries are just the digit
values (0...9) for the corresponding images contained in the first
entry of the tuple.
The ``validation_data`` and ``test_data`` are similar, except
each contains only 10,000 images.
This is a nice data format, but for use in neural networks it's
helpful to modify the format of the ``training_data`` a little.
That's done in the wrapper function ``load_data_wrapper()``, see
below.
"""
f = gzip.open('mnist.pkl.gz', 'rb')
training_data, validation_data, test_data = pickle.load(f, encoding="latin1")
f.close()
return (training_data, validation_data, test_data)
def load_data_wrapper():
"""Return a tuple containing ``(training_data, validation_data,
test_data)``. Based on ``load_data``, but the format is more
convenient for use in our implementation of neural networks.
In particular, ``training_data`` is a list containing 50,000
2-tuples ``(x, y)``. ``x`` is a 784-dimensional numpy.ndarray
containing the input image. ``y`` is a 10-dimensional
numpy.ndarray representing the unit vector corresponding to the
correct digit for ``x``.
``validation_data`` and ``test_data`` are lists containing 10,000
2-tuples ``(x, y)``. In each case, ``x`` is a 784-dimensional
numpy.ndarry containing the input image, and ``y`` is the
corresponding classification, i.e., the digit values (integers)
corresponding to ``x``.
Obviously, this means we're using slightly different formats for
the training data and the validation / test data. These formats
turn out to be the most convenient for use in our neural network
code."""
tr_d, va_d, te_d = load_data()
training_inputs = [np.reshape(x, (784, 1)) for x in tr_d[0]]
training_results = [vectorized_result(y) for y in tr_d[1]]
training_data = zip(training_inputs, training_results)
validation_inputs = [np.reshape(x, (784, 1)) for x in va_d[0]]
validation_data = zip(validation_inputs, va_d[1])
test_inputs = [np.reshape(x, (784, 1)) for x in te_d[0]]
test_data = zip(test_inputs, te_d[1])
return (training_data, validation_data, test_data)
def vectorized_result(j):
"""Return a 10-dimensional unit vector with a 1.0 in the jth
position and zeroes elsewhere. This is used to convert a digit
(0...9) into a corresponding desired output from the neural
network."""
e = np.zeros((10, 1))
e[j] = 1.0
return e
|
[
"610958401@qq.com"
] |
610958401@qq.com
|
9155110a9ae58bc903e5e05dc9dfed7c7bdc4cea
|
c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c
|
/cases/synthetic/tree-big-1645.py
|
4a5e725248f8e0b1a645420a84fb9273621ed52e
|
[] |
no_license
|
Virtlink/ccbench-chocopy
|
c3f7f6af6349aff6503196f727ef89f210a1eac8
|
c7efae43bf32696ee2b2ee781bdfe4f7730dec3f
|
refs/heads/main
| 2023-04-07T15:07:12.464038
| 2022-02-03T15:42:39
| 2022-02-03T15:42:39
| 451,969,776
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 23,292
|
py
|
# Binary-search trees
class TreeNode(object):
value:int = 0
left:"TreeNode" = None
right:"TreeNode" = None
def insert(self:"TreeNode", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode(x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode(x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode2(object):
value:int = 0
value2:int = 0
left:"TreeNode2" = None
left2:"TreeNode2" = None
right:"TreeNode2" = None
right2:"TreeNode2" = None
def insert(self:"TreeNode2", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode2(x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode2(x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode2", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode2(x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode2(x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode2", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode2", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode3(object):
value:int = 0
value2:int = 0
value3:int = 0
left:"TreeNode3" = None
left2:"TreeNode3" = None
left3:"TreeNode3" = None
right:"TreeNode3" = None
right2:"TreeNode3" = None
right3:"TreeNode3" = None
def insert(self:"TreeNode3", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode3(x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode3(x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode3", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode3(x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode3(x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert3(self:"TreeNode3", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode3(x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode3(x, x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode3", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode3", x:int, x2:int) -> bool:
if $Exp < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains3(self:"TreeNode3", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode4(object):
value:int = 0
value2:int = 0
value3:int = 0
value4:int = 0
left:"TreeNode4" = None
left2:"TreeNode4" = None
left3:"TreeNode4" = None
left4:"TreeNode4" = None
right:"TreeNode4" = None
right2:"TreeNode4" = None
right3:"TreeNode4" = None
right4:"TreeNode4" = None
def insert(self:"TreeNode4", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode4", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert3(self:"TreeNode4", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert4(self:"TreeNode4", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode4", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode4", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains3(self:"TreeNode4", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains4(self:"TreeNode4", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode5(object):
value:int = 0
value2:int = 0
value3:int = 0
value4:int = 0
value5:int = 0
left:"TreeNode5" = None
left2:"TreeNode5" = None
left3:"TreeNode5" = None
left4:"TreeNode5" = None
left5:"TreeNode5" = None
right:"TreeNode5" = None
right2:"TreeNode5" = None
right3:"TreeNode5" = None
right4:"TreeNode5" = None
right5:"TreeNode5" = None
def insert(self:"TreeNode5", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode5", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert3(self:"TreeNode5", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert4(self:"TreeNode5", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert5(self:"TreeNode5", x:int, x2:int, x3:int, x4:int, x5:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode5", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode5", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains3(self:"TreeNode5", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains4(self:"TreeNode5", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains5(self:"TreeNode5", x:int, x2:int, x3:int, x4:int, x5:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class Tree(object):
root:TreeNode = None
size:int = 0
def insert(self:"Tree", x:int) -> object:
if self.root is None:
self.root = makeNode(x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree2(object):
root:TreeNode2 = None
root2:TreeNode2 = None
size:int = 0
size2:int = 0
def insert(self:"Tree2", x:int) -> object:
if self.root is None:
self.root = makeNode2(x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree2", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode2(x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree2", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree2", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree3(object):
root:TreeNode3 = None
root2:TreeNode3 = None
root3:TreeNode3 = None
size:int = 0
size2:int = 0
size3:int = 0
def insert(self:"Tree3", x:int) -> object:
if self.root is None:
self.root = makeNode3(x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree3", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode3(x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert3(self:"Tree3", x:int, x2:int, x3:int) -> object:
if self.root is None:
self.root = makeNode3(x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree3", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree3", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains3(self:"Tree3", x:int, x2:int, x3:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree4(object):
root:TreeNode4 = None
root2:TreeNode4 = None
root3:TreeNode4 = None
root4:TreeNode4 = None
size:int = 0
size2:int = 0
size3:int = 0
size4:int = 0
def insert(self:"Tree4", x:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree4", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert3(self:"Tree4", x:int, x2:int, x3:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert4(self:"Tree4", x:int, x2:int, x3:int, x4:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree4", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree4", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains3(self:"Tree4", x:int, x2:int, x3:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains4(self:"Tree4", x:int, x2:int, x3:int, x4:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree5(object):
root:TreeNode5 = None
root2:TreeNode5 = None
root3:TreeNode5 = None
root4:TreeNode5 = None
root5:TreeNode5 = None
size:int = 0
size2:int = 0
size3:int = 0
size4:int = 0
size5:int = 0
def insert(self:"Tree5", x:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree5", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert3(self:"Tree5", x:int, x2:int, x3:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert4(self:"Tree5", x:int, x2:int, x3:int, x4:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert5(self:"Tree5", x:int, x2:int, x3:int, x4:int, x5:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree5", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree5", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains3(self:"Tree5", x:int, x2:int, x3:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains4(self:"Tree5", x:int, x2:int, x3:int, x4:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains5(self:"Tree5", x:int, x2:int, x3:int, x4:int, x5:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def makeNode(x: int) -> TreeNode:
b:TreeNode = None
b = TreeNode()
b.value = x
return b
def makeNode2(x: int, x2: int) -> TreeNode2:
b:TreeNode2 = None
b2:TreeNode2 = None
b = TreeNode2()
b.value = x
return b
def makeNode3(x: int, x2: int, x3: int) -> TreeNode3:
b:TreeNode3 = None
b2:TreeNode3 = None
b3:TreeNode3 = None
b = TreeNode3()
b.value = x
return b
def makeNode4(x: int, x2: int, x3: int, x4: int) -> TreeNode4:
b:TreeNode4 = None
b2:TreeNode4 = None
b3:TreeNode4 = None
b4:TreeNode4 = None
b = TreeNode4()
b.value = x
return b
def makeNode5(x: int, x2: int, x3: int, x4: int, x5: int) -> TreeNode5:
b:TreeNode5 = None
b2:TreeNode5 = None
b3:TreeNode5 = None
b4:TreeNode5 = None
b5:TreeNode5 = None
b = TreeNode5()
b.value = x
return b
# Input parameters
n:int = 100
n2:int = 100
n3:int = 100
n4:int = 100
n5:int = 100
c:int = 4
c2:int = 4
c3:int = 4
c4:int = 4
c5:int = 4
# Data
t:Tree = None
t2:Tree = None
t3:Tree = None
t4:Tree = None
t5:Tree = None
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
k:int = 37813
k2:int = 37813
k3:int = 37813
k4:int = 37813
k5:int = 37813
# Crunch
t = Tree()
while i < n:
t.insert(k)
k = (k * 37813) % 37831
if i % c != 0:
t.insert(i)
i = i + 1
print(t.size)
for i in [4, 8, 15, 16, 23, 42]:
if t.contains(i):
print(i)
|
[
"647530+Virtlink@users.noreply.github.com"
] |
647530+Virtlink@users.noreply.github.com
|
bcb5ce8be64ba7b9ed474b57c0bb84c1a14ea3a8
|
28ee4df44811a7eff6f57dba03bcfe7e7a444d1f
|
/website_IPS/djangoServer/product_injection/models.py
|
807f758629c343a2a2db96e109cbe7abd75eea21
|
[] |
no_license
|
fxanhkhoa/angularjs-SaleWebsite
|
5ddab11abd65bbd8b94911a6c2456143b1ba8d73
|
498d35b8dc967a30c1dc275777364f6a88f84810
|
refs/heads/master
| 2022-05-16T01:13:01.745273
| 2022-03-16T10:48:13
| 2022-03-16T10:48:13
| 225,559,401
| 0
| 0
| null | 2022-03-16T10:48:38
| 2019-12-03T07:39:57
|
SCSS
|
UTF-8
|
Python
| false
| false
| 598
|
py
|
from django.db import models
# Create your models here.
class Product(models.Model):
barcode = models.CharField(max_length=20)
type = models.CharField(max_length=100)
product_name = models.CharField(max_length=100)
price_main = models.FloatField()
price1 = models.CharField(max_length=100)
price2 = models.CharField(max_length=100)
price3 = models.CharField(max_length=100)
quantity = models.IntegerField()
expired_day = models.DateField()
class Meta:
managed = False
db_table = 'product'
def __str__(self):
return self.barcode
|
[
"fxanhkhoa@gmail.com"
] |
fxanhkhoa@gmail.com
|
644f53da5330e99d42a57e2457baa4815d3cc52f
|
d0a54a3faa1891b647f8c621521cd26c13bd2926
|
/backend/mytts.py
|
b5bd2373d11ec245d0b144f5f903e259d2fd903f
|
[
"MIT"
] |
permissive
|
ishine/PTTS-WebAPP
|
166318593d3247c88d458c9d4fe39dca27ef408f
|
dcc07a79d8dd695ca15e4dd5a69811b3ddd91709
|
refs/heads/main
| 2023-04-02T06:03:41.237351
| 2021-04-14T02:37:16
| 2021-04-14T02:37:16
| 357,388,655
| 0
| 0
|
MIT
| 2021-04-14T02:37:17
| 2021-04-13T01:31:11
| null |
UTF-8
|
Python
| false
| false
| 2,595
|
py
|
#!/usr/bin/env python
import os.path as osp
import librosa
import torch
from .hparams import HParam
from .transform import StandardNorm, TextProcessor
from .models import MelGenerator, ParallelText2Mel
from .synthesizer import Synthesizer
try:
from .manager import GPUManager
except ImportError as err:
print(err); gm = None
else:
gm = GPUManager()
def select_device(device):
cpu_request = device.lower() == 'cpu'
# if device requested other than 'cpu'
if device and not cpu_request:
c = 1024 ** 2 # bytes to MB
x = torch.cuda.get_device_properties(int(device))
s = f'Using torch {torch.__version__} '
print("%sCUDA:%s (%s, %dMB)" % (s, device, x.name, x.total_memory / c))
return torch.device(f'cuda:{device}')
else:
print(f'Using torch {torch.__version__} CPU')
return torch.device('cpu')
class MyTTS:
def __init__(self, config=None, device=None):
if torch.cuda.is_available():
index = device if device else str(0 if gm is None else gm.auto_choice())
else:
index = 'cpu'
self.device = device = select_device(index)
self.hparams = hparams = HParam(config) \
if config else HParam(osp.join(osp.dirname(osp.abspath(__file__)), "config", "default.yaml"))
checkpoint = osp.join(osp.dirname(osp.abspath(__file__)), "pretrained", hparams.parallel.checkpoint)
vocoder_checkpoint = osp.join(osp.dirname(osp.abspath(__file__)), "pretrained", hparams.vocoder.checkpoint)
normalizer = StandardNorm(hparams.audio.spec_mean, hparams.audio.spec_std)
processor = TextProcessor(hparams.text)
text2mel = ParallelText2Mel(hparams.parallel)
text2mel.eval()
vocoder = MelGenerator(hparams.audio.n_mel_channels).to(device)
vocoder.eval(inference=True)
self.synthesizer = Synthesizer(
model=text2mel,
checkpoint=checkpoint,
vocoder=vocoder,
vocoder_checkpoint=vocoder_checkpoint,
processor=processor,
normalizer=normalizer,
device=device
)
def __call__(self, texts, speed, volume, tone):
rate = int(tone) / 3
alpha = (4 / int(speed)) * rate
beta = int(volume) / 3
wave = self.synthesizer.inference(texts, alpha=alpha, beta=beta)
wave = wave.cpu().detach().numpy()
sr = self.hparams.audio.sampling_rate
# use TSM + resample to change tone
wave = librosa.core.resample(wave, int(sr*rate), sr)
return wave, sr
|
[
"atomicoo95@gmail.com"
] |
atomicoo95@gmail.com
|
86141b295083fbfbabc34f7f63036b63e6caada7
|
e7a76e109e38c241b103b69c56b3a1c5786eec41
|
/plot_oceanoptics.py
|
e9f2c06008eddc5e40a6cf0b5f0a6cf1e793a560
|
[] |
no_license
|
sdickreuter/auswertung
|
5e6baca12e3f56dda485256888e1871042e6af72
|
eb4f01ce1284cfa3d7e1b979ed90af55b36e242c
|
refs/heads/master
| 2021-05-09T13:16:34.952174
| 2019-03-01T15:52:02
| 2019-03-01T15:52:02
| 119,030,645
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 834
|
py
|
import numpy as np
from plotsettings import *
import os
import re
import seaborn as sns
sns.set_context("poster", rc= { "xtick.major.width":0.5,
"ytick.major.width":0.5,
"xtick.minor.width":0.5,
"ytick.minor.width":0.5})
path = '/home/sei/Spektren/PosterPlot/'
maxwl = 800
minwl = 460
wl, counts = np.loadtxt(open(path + "E1_corr.csv", "rb"), delimiter=",", skiprows=16, unpack=True)
mask = (wl >= minwl) & (wl <= maxwl)
wl = wl[mask]
counts = counts[mask]
counts /= counts.max()
fig = newfig(1.1)
plt.plot(wl, counts)
plt.xlim((minwl,maxwl))
plt.ylabel(r'\boldmath$Intensit"at \; / \; a.u.$')
plt.xlabel(r'\boldmath$Wellenl"ange \; / \; nm$')
plt.tight_layout()
# plt.plot(wl, counts)
plt.savefig(path+"spec.png",dpi=600)
plt.close()
|
[
"Simon.Dickreuter@uni-tuebingen.de"
] |
Simon.Dickreuter@uni-tuebingen.de
|
5be1631a2109500c70886c510b62a60b37f20ede
|
1ad01c9f7335c14456fc10da41bcf942fcb58482
|
/main.py
|
e870545fed45d5d9f323a6174e957b915918d5e9
|
[] |
no_license
|
AP-MI-2021/lab-3-VargaIonut23
|
1159cd1d576fa3d944c6f633d2a37aaeaf02ba0a
|
c419dd62ac441196e952301de31d20608bb99515
|
refs/heads/main
| 2023-08-22T01:20:38.356254
| 2021-10-12T05:13:08
| 2021-10-12T05:13:08
| 413,956,453
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,324
|
py
|
def is_prime(n):
'''
:param n: se va verifica daca n este prim
:return: 1 daca n este prim ,0 in caz contrar
'''
if n < 2:
return 0
elif n == 2:
return 1
elif n % 2 == 0:
return 0
else:
for i in range(3, n, 2):
if n % i == 0:
return 0
return 1
def is_all_prime(l):
'''
determina daca toate elementele date sunt prime sau nu
'''
for i in l :
if is_prime(i) == 0 :
return False
return True
def test_get_longest_all_primes ():
assert get_longest_all_primes([2 , 3 , 7 , 11]) == [2 , 3 , 7 , 11]
assert get_longest_all_primes([2 , 2 , 7 , 23]) == [2 , 2 , 7 ,23]
assert get_longest_all_primes([1 , 3 , 3 , 7]) == [3 , 3 ,7]
assert get_longest_all_primes([1 , 1 , 0 , 1 ]) == []
def get_longest_all_primes(l):
'''
determina cea mai lunga segventa de numnere prime din vectorul dat
'''
subsecventa_max = []
for i in range(len(l)):
for j in range(i , len(l)+1):
if(is_all_prime(l[i:j+1])) and len(l[i:j+1]) > len(subsecventa_max):
subsecventa_max = l[i:j+1]
return subsecventa_max
def test_get_longest_prime_digits():
assert get_longest_prime_digits([2 , 2 , 2 , 3]) == [2 , 2 , 2 , 3]
assert get_longest_prime_digits([1 , 2 , 7 , 9]) == [2 , 7]
assert get_longest_prime_digits([2 , 2 , 7 , 5]) == [2 , 2 , 7 , 5]
assert get_longest_prime_digits([1 , 3 , 5 , 7]) == [3 , 5 ,7]
def au_toate_cifrele_prime(n) :
while n != 0 :
c = n % 10
if is_prime(c) == 0 :
return False
n = n // 10
return True
def sunt_bune(l) :
'''
verifica daca toate numerele din secventa data au toate cifrele pare
:param l:
:return:
'''
for i in l :
if au_toate_cifrele_prime(i) == 0 :
return 0
return 1
def get_longest_prime_digits(l):
'''
determina cea mai lunga secventa de numere a caror tuturor cifre sunt prime
:param l:
:return: cea mai lunga secventa de numere a caror tuturor cifre sunt prime
'''
subsecventa_max = []
for i in range(len(l)):
for j in range(i , len(l)+1):
if (sunt_bune(l[i:j + 1])) and len(l[i:j + 1]) > len(subsecventa_max):
subsecventa_max = l[i:j + 1]
return subsecventa_max
def citire_lista():
l = []
listasstring = input("Dati lista ")
numberasstring = listasstring.split(",")
for x in numberasstring:
l.append(int(x))
return l
def pare(l):
'''
determina daca toate numerele din secventa sunt pare
:return:
'''
c = 0
for i in l :
if i % 2 == 1 :
return 0
return 1
def get_longest_all_even(l):
'''
:return: cea mai lunga secventa de numere pare
'''
subsecventa_max = []
for i in range(len(l)):
for j in range(i, len(l) + 1):
if (pare(l[i:j + 1])) and len(l[i:j + 1]) > len(subsecventa_max):
subsecventa_max = l[i:j + 1]
return subsecventa_max
def test_get_longest_all_even():
assert get_longest_all_even([ 2 , 4 , 6 ,22 , 3]) == [2 , 4 , 6 ,22]
assert get_longest_all_even([4 , 6 ,23 , 1 , 1]) == [4 , 6]
assert get_longest_all_even([3 , 1]) == []
assert get_longest_all_even([22 , 24 , 88 , 54 , 24 , 56]) == [22 , 24 , 88 , 54 , 24 , 56]
def print_menu():
print("1. Citire lista")
print("2. Afisare cea mai lunga secventa de numere prime")
print("3. Afisare cea mai lunga secventa de numere ale caror cifre sunt prime")
print("4. Afisare cea mai lunga secventa de numere pare")
print("5. Iesire")
def main():
test_get_longest_all_primes()
test_get_longest_prime_digits()
test_get_longest_all_even()
l = []
while True:
print_menu()
optiune = input("Dati optiunea: ")
if optiune == "1" :
l = citire_lista()
elif optiune == "2" :
print(get_longest_all_primes(l))
elif optiune == "3" :
print(get_longest_prime_digits(l))
elif optiune == "4" :
print(get_longest_all_even(l))
elif optiune == "5" :
break
else :
print("Optiune gresita! Reincercati: ")
if __name__ == "__main__":
main()
|
[
"91500038+VargaIonut23@users.noreply.github.com"
] |
91500038+VargaIonut23@users.noreply.github.com
|
a8b5e01ad95ded27a6c98be1480ba945be7e440d
|
8097f658e27de527be5756d64bdaa380ecb6dfb7
|
/MCU03_REC.py
|
c6d2761849e2779c686c710bb2387fdbfd98aed0
|
[] |
no_license
|
kadonotakashi/sound_evaluate
|
b065cd976860e29b68c4f09f582f8edbc72dc310
|
549508738397231fc0c3da27e69cb61fb11b7b1a
|
refs/heads/main
| 2022-12-27T02:05:27.137923
| 2020-10-14T01:20:14
| 2020-10-14T01:20:14
| 303,869,184
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,205
|
py
|
# -*- coding: utf-8 -*-
import wave
import pyaudio
import time
"""Sanwa suply USB MIC MCU03 """
class MCU03_REC():
def __init__(self):
self.devindex_mic = -1
self.fnameRec = 'sndRec.wav'
self.ch_Mic = 1
self.qb_Mic = 2
self.fq_Mic = 16000
def SetFileName(self, fname):
self.fnameRec = fname
def CreateWaveFile(self):
self.wfrec = wave.open(self.fnameRec, 'wb')
self.wfrec.setnchannels(self.ch_Mic)
self.wfrec.setsampwidth(self.qb_Mic)
self.wfrec.setframerate(self.fq_Mic)
print("record file is ")
print(self.fnameRec, self.ch_Mic, self.qb_Mic, self.fq_Mic)
def sndrec(self, in_data, frame_count, time_info, status):
self.wfrec.writeframes(in_data)
return(None, pyaudio.paContinue)
def getDeviceIndex(self):
info = self.p.get_host_api_info_by_index(0)
numdevices = info.get('deviceCount')
for i in range(0, numdevices):
name = self.p.get_device_info_by_host_api_device_index(0, i).get('name')
if name[0:3] == "ใใคใฏ":
if (self.p.get_device_info_by_host_api_device_index(0, i).get('maxInputChannels')) > 0:
self.devindex_mic = i
print('Sanwa MCU03 index is', self.devindex_mic)
def RECORD(self):
self.p = pyaudio.PyAudio()
self.getDeviceIndex()
if (self.devindex_mic == -1):
print("can't open XVF3510 as Input Device")
while True:
time.sleep(1)
self.CreateWaveFile()
self.stmrec = self.p.open(
format=self.p.get_format_from_width(self.qb_Mic),
channels=self.ch_Mic,
rate=self.fq_Mic,
input=True,
frames_per_buffer=1024,
stream_callback=self.sndrec,
input_device_index=self.devindex_mic
)
self.stmrec.start_stream()
while self.stmrec.is_active():
cmd = input('when want to stop, type"exit"')
if cmd == 'exit':
break
self.stmrec.stop_stream()
self.stmrec.close()
self.p.terminate()
self.wfrec.close()
def START(self):
self.p = pyaudio.PyAudio()
self.getDeviceIndex()
if (self.devindex_mic == -1):
print("can't open XVF3510 as Input Device")
while True:
time.sleep(1)
self.CreateWaveFile()
self.stmrec = self.p.open(
format=self.p.get_format_from_width(self.qb_Mic),
channels=self.ch_Mic,
rate=self.fq_Mic,
input=True,
frames_per_buffer=1024,
stream_callback=self.sndrec,
input_device_index=self.devindex_mic
)
self.stmrec.start_stream()
def check_active(self):
return self.stmrec.is_active()
def STOP(self):
self.stmrec.stop_stream()
self.stmrec.close()
self.p.terminate()
self.wfrec.close()
def main():
DeviceIn = MCU03_REC()
DeviceIn.SetFileName('./record/sndRecXVF.wav')
DeviceIn.RECORD()
if __name__ == '__main__':
main()
|
[
"kadono@mail.glory.co.jp"
] |
kadono@mail.glory.co.jp
|
8860fd14e571f6895267fbdf6e37de2a1b996050
|
dfab6798ece135946aebb08f93f162c37dd51791
|
/timber/luban.timber/__init__.py
|
a1439a83d3cd277a7cb77b0454e6d6e5598f66c6
|
[] |
no_license
|
yxqd/luban
|
405f5f7dcf09015d214079fe7e23d644332be069
|
00f699d15c572c8bf160516d582fa37f84ac2023
|
refs/heads/master
| 2020-03-20T23:08:45.153471
| 2012-05-18T14:52:43
| 2012-05-18T14:52:43
| 137,831,650
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 840
|
py
|
# -*- Python -*-
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# Jiao Lin
# California Institute of Technology
# (C) 2006-2011 All Rights Reserved
#
# {LicenseText}
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# ************************************************************
# bad bad
import luban
luban.__doc__ += """* timber: default extension of luban core
"""
# ************************************************************
# activate extensions
from . import elements, actions
from . import luban_ext
from . import controller # replace the core controllers with timber controllers. see eg .controllers.CherrypyController
from .controller import setUploadPath
# End of file
|
[
"linjiao@caltech.edu"
] |
linjiao@caltech.edu
|
113e34a17a867e34e5f5a4feab529de8c4ea4cad
|
7a6903357d830fb49c5e1ff4765ea8ea5f1ff18d
|
/lesson2/venv/bin/python-config
|
b9371e208651370827cd89a204a2c8a5422305e4
|
[] |
no_license
|
StephanRaab/lambdaSchool
|
08ee36ae9cc2946c43b8fc4c9282c166651a71d4
|
fec83185e0e5bdb849cf69f459d61becf6965fe6
|
refs/heads/master
| 2021-01-13T12:26:24.520000
| 2017-03-09T03:07:47
| 2017-03-09T03:07:47
| 81,151,871
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,362
|
#!/home/gnome/Github/lambdaSchool/lesson2/venv/bin/python
import sys
import getopt
import sysconfig
valid_opts = ['prefix', 'exec-prefix', 'includes', 'libs', 'cflags',
'ldflags', 'help']
if sys.version_info >= (3, 2):
valid_opts.insert(-1, 'extension-suffix')
valid_opts.append('abiflags')
if sys.version_info >= (3, 3):
valid_opts.append('configdir')
def exit_with_usage(code=1):
sys.stderr.write("Usage: {0} [{1}]\n".format(
sys.argv[0], '|'.join('--'+opt for opt in valid_opts)))
sys.exit(code)
try:
opts, args = getopt.getopt(sys.argv[1:], '', valid_opts)
except getopt.error:
exit_with_usage()
if not opts:
exit_with_usage()
pyver = sysconfig.get_config_var('VERSION')
getvar = sysconfig.get_config_var
opt_flags = [flag for (flag, val) in opts]
if '--help' in opt_flags:
exit_with_usage(code=0)
for opt in opt_flags:
if opt == '--prefix':
print(sysconfig.get_config_var('prefix'))
elif opt == '--exec-prefix':
print(sysconfig.get_config_var('exec_prefix'))
elif opt in ('--includes', '--cflags'):
flags = ['-I' + sysconfig.get_path('include'),
'-I' + sysconfig.get_path('platinclude')]
if opt == '--cflags':
flags.extend(getvar('CFLAGS').split())
print(' '.join(flags))
elif opt in ('--libs', '--ldflags'):
abiflags = getattr(sys, 'abiflags', '')
libs = ['-lpython' + pyver + abiflags]
libs += getvar('LIBS').split()
libs += getvar('SYSLIBS').split()
# add the prefix/lib/pythonX.Y/config dir, but only if there is no
# shared library in prefix/lib/.
if opt == '--ldflags':
if not getvar('Py_ENABLE_SHARED'):
libs.insert(0, '-L' + getvar('LIBPL'))
if not getvar('PYTHONFRAMEWORK'):
libs.extend(getvar('LINKFORSHARED').split())
print(' '.join(libs))
elif opt == '--extension-suffix':
ext_suffix = sysconfig.get_config_var('EXT_SUFFIX')
if ext_suffix is None:
ext_suffix = sysconfig.get_config_var('SO')
print(ext_suffix)
elif opt == '--abiflags':
if not getattr(sys, 'abiflags', None):
exit_with_usage()
print(sys.abiflags)
elif opt == '--configdir':
print(sysconfig.get_config_var('LIBPL'))
|
[
"sarcartist@gmail.com"
] |
sarcartist@gmail.com
|
|
145a90c675971039d677b9e3411c7b6f30d2cde6
|
59be93c710d9e1750d2767f1c98f347ed3dc635c
|
/elements/when.py
|
308f8c8d7c8fbbc825abfaf9cd8a8914f92fd203
|
[
"MIT"
] |
permissive
|
artemZholus/elements
|
802d14eb574be0c3f18a50fdbc87ee262fbcd01a
|
21b4f27e854d91a65619e8fc81b3916386c5ef66
|
refs/heads/main
| 2023-07-10T05:21:28.947510
| 2021-08-18T18:02:05
| 2021-08-18T18:02:05
| 397,594,638
| 0
| 0
|
MIT
| 2021-08-18T12:35:49
| 2021-08-18T12:35:49
| null |
UTF-8
|
Python
| false
| false
| 711
|
py
|
class Every:
def __init__(self, every):
self._every = every
self._last = None
def __call__(self, step):
step = int(step)
if not self._every:
return False
if self._last is None:
self._last = step
return True
if step >= self._last + self._every:
self._last += self._every
return True
return False
class Once:
def __init__(self):
self._once = True
def __call__(self):
if self._once:
self._once = False
return True
return False
class Until:
def __init__(self, until):
self._until = until
def __call__(self, step):
step = int(step)
if not self._until:
return True
return step < self._until
|
[
"mail@danijar.com"
] |
mail@danijar.com
|
664fef8dbbee5f880d4f0a0866edc6ccd5676737
|
0facb323be8a76bb4c168641309972fa77cbecf2
|
/Configurations/HWWSemiLepHighMass/Full_v6Production/template_seed/templates_jhchoi/MassPoints2018/List_MX.py
|
ca93c1c06e444ba9cee292c6bdab834fd117111f
|
[] |
no_license
|
bhoh/SNuAnalytics
|
ef0a1ba9fa0d682834672a831739dfcfa1e7486b
|
34d1fc062e212da152faa83be50561600819df0e
|
refs/heads/master
| 2023-07-06T03:23:45.343449
| 2023-06-26T12:18:28
| 2023-06-26T12:18:28
| 242,880,298
| 0
| 1
| null | 2020-02-25T01:17:50
| 2020-02-25T01:17:49
| null |
UTF-8
|
Python
| false
| false
| 396
|
py
|
List_MX=[
115 ,
120 ,
124 ,
125 ,
126 ,
130 ,
135 ,
140 ,
145 ,
150 ,
155 ,
160 ,
165 ,
170 ,
175 ,
180 ,
190 ,
200 ,
210 ,
230 ,
250 ,
270 ,
300 ,
350 ,
400 ,
450 ,
500 ,
550 ,
600 ,
650 ,
700 ,
750 ,
800 ,
900 ,
1000 ,
1500 ,
2000 ,
2500 ,
3000 ,
4000 ,
5000 ,
]
if __name__ == '__main__':
#print('( '+" ".join(str(MX) for MX in List_MX)+' )')
print " ".join(str(MX) for MX in List_MX)
|
[
"soarnsoar@gmail.com"
] |
soarnsoar@gmail.com
|
6d0725c591adc561c92b70e30aa85c54b60ff22e
|
c43825a9c2e88edd36af5f2e6088eae5f0b30822
|
/service3/__init__.py
|
1408208fb651cc957765054f47331f765c1c360c
|
[] |
no_license
|
NaailC/milestone
|
a7d218ec60a74679aa8ba9633d14b5e6dbcc3f40
|
28189ce4d521696876a80c7f4ad31edec94bdb04
|
refs/heads/main
| 2023-03-02T04:41:27.558763
| 2021-02-01T14:21:19
| 2021-02-01T14:21:19
| 334,973,445
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 381
|
py
|
import logging
import random
from random import randint
import azure.functions as func
def main(req: func.HttpRequest) -> func.HttpResponse:
logging.info('Python HTTP trigger function processed a request.')
numbers = ''
for _ in range(5):
numbers += str(random.randint(0,9))
return func.HttpResponse(
numbers,
status_code=200
)
|
[
"naailchoudhury@gmail.com"
] |
naailchoudhury@gmail.com
|
48c1efe4bbfd4c809710895b5744853d0202dcf6
|
b59d1471c918168da512f52c33ca18f02caf75e3
|
/examples/mylexer.py
|
2d7cfc728152e68cc6e687d7236873688a18dd16
|
[] |
no_license
|
lordmauve/dsls-talk
|
0473fda93d1229aad3be27da3a0cc1f04df978a3
|
8cb0df9ecc0c84e472f774b681b499d8c4973853
|
refs/heads/master
| 2020-07-23T11:16:44.556189
| 2015-07-24T10:48:15
| 2015-07-24T10:48:15
| 207,540,462
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 253
|
py
|
import ply.lex as lex
tokens = 'ADDOP MULOP LPAREN RPAREN NUMBER'.split()
t_ADDOP = r'[+-]'
t_MULOP = r'[*/]'
t_LPAREN = r'\('
t_RPAREN = r'\)'
def t_NUMBER(t):
r'\d+'
t.value = int(t.value)
return t
t_ignore = ' \t'
lexer = lex.lex()
|
[
"lordmauve@users.noreply.github.com"
] |
lordmauve@users.noreply.github.com
|
87477ba53d15435cb55aa99b65ce10afdee5a360
|
e52501eb4db862d90ae5541bd512a50df30e0726
|
/Chapter 2+3 Intro + Variables + Strings/Chapter3-7 ShrinkingGuestList.py
|
47c9f766034b09d285472e34a1f448ce0ac89821
|
[] |
no_license
|
ericnwin/Python-Crash-Course-Lessons
|
b2b12c221f545c961a47f2343b2aa3dac901927b
|
7b755c0b4ce65528f4880b3583aca3be9547b33b
|
refs/heads/master
| 2022-12-19T10:55:35.987330
| 2020-09-13T23:23:54
| 2020-09-13T23:23:54
| 295,255,228
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,190
|
py
|
# You just found out that your new dinner table wonโt
# arrive in time for the dinner, and you have space for only two guests.
# โข Start with your program from Exercise 3-6. Add a new line that prints a
# message saying that you can invite only two people for dinner.
# โข Use pop() to remove guests from your list one at a time until only two
# names remain in your list. Each time you pop a name from your list, print
# a message to that person letting them know youโre sorry you canโt invite
# them to dinner.
# โข Print a message to each of the two people still on your list, letting them
# know theyโre still invited.
# โข Use del to remove the last two names from your list, so you have an empty
# list. Print your list to make sure you actually have an empty list at the end
# of your program.
dinner_guests = ['Joeji', 'Elon Musk', 'OpenAI']
print(
f"Hey {dinner_guests[0]} I'm a huge fan of your music! Please join me for dinner. ")
print(f"Hey {dinner_guests[1]} can I get a free car? We can talk over dinner.")
print(f"Hey {dinner_guests[2]} teach me AI. I gib food as payment.")
# Declare who can't make it
declined_invitations = "OpenAI"
dinner_guests.remove(declined_invitations)
print(f"Unfortunately {declined_invitations} can't make it.\n")
# Adding new person to invite list
new_person_invite = "Kanye West"
dinner_guests.append(new_person_invite)
print(dinner_guests)
# Making 2nd set of invitations
print(
'\n' f"Hey {dinner_guests[0]} I'm a huge fan of your music! Please join me for dinner. ")
print(f"Hey {dinner_guests[1]} can I get a free car? We can talk over dinner.")
print(f"Hey {dinner_guests[2]} I loved you in Titanic. Please eat with me.\n")
# shrinking down to 2 people and sending msg to those who are invited
print(f"Hey sorry we only have room for two... I'm uninviting one of you sorry.\n")
uninvited = dinner_guests.pop()
print(f"Hey sorry {uninvited} you've been uninvited :( \n")
print(f"Hey {dinner_guests[0]} you're still invited.")
print(f"Hey {dinner_guests[1]} you're still invited.")
# Remove last 2 names from list and printing out an empty list
del dinner_guests[0]
del dinner_guests[0]
print(dinner_guests)
|
[
"noreply@github.com"
] |
ericnwin.noreply@github.com
|
22ea3dfa2f6015821056ffa9d08c9af757802e96
|
c845f20c02b44809cc4a514637f01de7770c9661
|
/goods/็ไบง่ฎธๅฏ่ฏ.py
|
528cf03319bf36a2f89122ad11910d904f90d092
|
[] |
no_license
|
20181105879/Student
|
2c2e275cfb334decd36e74d7c35d268f6947f6e5
|
90c31298997ec2c9b88c9f6d8dafd90438c75c77
|
refs/heads/main
| 2023-05-04T05:16:19.773862
| 2021-05-23T14:28:58
| 2021-05-23T14:28:58
| 369,209,581
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,012
|
py
|
import requests
import json
if __name__=='__main__':
url='http://scxk.nmpa.gov.cn:81/xk/itownet/portalAction.do?method=getXkzsList'
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.93 Safari/537.36'
}
idlist=[]
itemlist=[]
for i in range(0,10):
page=str(i)
data={
'on':' true',
'page':page,
'pageSize':' 15',
'productName':'',
'conditionType':' 1',
'applyname':'',
'pplysn':'',
}
jsonlist=requests.post(url=url,data=data,headers=headers).json()
posturl='http://scxk.nmpa.gov.cn:81/xk/itownet/portalAction.do?method=getXkzsById'
for item in jsonlist['list']:
idlist.append(item['ID'])
data={
'id':item['ID']
}
itemlist.append(requests.post(url=posturl,data=data,headers=headers).json())
for item in itemlist:
print(item)
|
[
"260575145@com"
] |
260575145@com
|
9dbcb505b639e22b90865fcb3eba2f55754885fd
|
4ee88fa57d9bfe91032ef8b0ce8cce1e56bf728e
|
/api/content/test/test_router.py
|
1b35b6dade56e0c4d493fb880b4e2793135df061
|
[] |
no_license
|
mnbbrown/contenter
|
b42198aa232d2fac6f3630f76e29319dac809ad3
|
3eda6777d1aae253afbde7a00fee362f61e45490
|
refs/heads/master
| 2021-05-21T19:07:05.140753
| 2020-04-03T15:01:33
| 2020-04-03T15:01:33
| 252,764,461
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,015
|
py
|
import pytest
from api.content.models import ContentType, Entity
def test_list_content_types(client):
response = client.get("/admin/content/")
assert response.status_code == 200
def test_create_content_type(client):
response = client.post(
"/admin/content/", json={"name": "content type", "fields": [{"name": "name", "type": "string"}]}
)
assert response.status_code == 200
def test_create_bad_content_type(client):
response = client.post("/admin/content/", json={"fields": []})
assert response.status_code == 400
def test_list_entities(client, db):
content_type = ContentType("test", None, [{"name": "name", "type": "string"}])
db.add(content_type)
content_type_id = content_type.public_id
db.flush()
entity = Entity(content_type.id, {"name": "test"})
db.add(entity)
db.commit()
response = client.get(f"/admin/content/{content_type_id}/entities")
assert response.status_code == 200
assert response.json()[0].get("name") == "test"
|
[
"me@matthewbrown.io"
] |
me@matthewbrown.io
|
3b1f4b3c51f2e5d87d10b4dd24faf630961a85e1
|
7910b0c9a7ef23dd9400ef336813977a2c48affb
|
/sanskrit/migrations/0009_auto_20200227_0233.py
|
7972577ca21b97af2a21f8651b72e60086c1e4a5
|
[] |
no_license
|
Rohit-Bhandari/LearnSanskrit
|
0b2f0991b056a34f678129a38a25798c265cb4ba
|
330e68dd9dbf56205f047ead93af479cfe6d5321
|
refs/heads/main
| 2023-05-03T00:42:49.655941
| 2021-05-22T15:48:31
| 2021-05-22T15:48:31
| 369,543,460
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 511
|
py
|
# Generated by Django 2.2.6 on 2020-02-26 21:03
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('sanskrit', '0008_auto_20200227_0202'),
]
operations = [
migrations.RemoveField(
model_name='sanskritlessons',
name='completed',
),
migrations.AddField(
model_name='userprofile',
name='completed',
field=models.BooleanField(default=False),
),
]
|
[
"rohitbhandari080@gmail.com"
] |
rohitbhandari080@gmail.com
|
b2fcc624e79ef9ef10c62818cb0c7d2d93c0d250
|
080bbe77da955b3917435c25fc63b90b0f3c724e
|
/botorch/utils/multi_objective/box_decomposition.py
|
e566f0c69e493acd4370a0a28582374334f572aa
|
[
"MIT"
] |
permissive
|
irinaespejo/botorch
|
3d15d962ff0f5bb34fbd11b2eb7549db755af705
|
e4dcf603fdaf83f0e5f8b9b392f943c89dfff7eb
|
refs/heads/master
| 2023-07-11T18:02:11.853790
| 2021-08-19T15:57:21
| 2021-08-19T15:58:12
| 316,017,084
| 0
| 0
|
MIT
| 2020-11-25T18:02:11
| 2020-11-25T18:02:09
| null |
UTF-8
|
Python
| false
| false
| 744
|
py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
r"""
DEPRECATED - Box decomposition algorithms.
Use the botorch.utils.multi_objective.box_decompositions instead.
"""
import warnings
from botorch.utils.multi_objective.box_decompositions.non_dominated import ( # noqa F401
NondominatedPartitioning,
)
warnings.warn(
"The botorch.utils.multi_objective.box_decomposition module has "
"been renamed to botorch.utils.multi_objective.box_decompositions. "
"botorch.utils.multi_objective.box_decomposition will be removed in "
"the next release.",
DeprecationWarning,
)
|
[
"facebook-github-bot@users.noreply.github.com"
] |
facebook-github-bot@users.noreply.github.com
|
199f6cc9892da170451dd4f3f6f97aebd4b38111
|
a582676e281e8bd490bce79f17880d6195d96b83
|
/vote_site/settings.py
|
b803bffe6f7207c5ecaa63b42c223f78bfec2c91
|
[] |
no_license
|
kyledavelaar/django-voting-app
|
1e435b85bed0891fe2a56715495e10cedabf6793
|
c79aa8a723483a63bf0aae663c6eed3431b79f8a
|
refs/heads/master
| 2021-06-27T20:17:52.850025
| 2017-09-09T21:24:13
| 2017-09-09T21:24:13
| 102,985,888
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,135
|
py
|
"""
Django settings for vote_site project.
Generated by 'django-admin startproject' using Django 1.11.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'uot43!i1z%7l9_!cigd59onuddtj(f@li&novbl30pv-*fvjcg'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'polls.apps.PollsConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'vote_site.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'vote_site.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
|
[
"kyle@Kyles-MacBook-Pro.local"
] |
kyle@Kyles-MacBook-Pro.local
|
8c040bcc27e5e281cd4496ce81d4e54bdd56d799
|
52125c518edbad99f5bb76d662397e34c0c281db
|
/users/forms.py
|
70f19ee26329d74b3bb5578e77871a3eb6c02ab6
|
[
"MIT"
] |
permissive
|
Mich45/geddit
|
531a883e07ec282eaecc482591817ab6c9be77e2
|
6b985221d117e982b17dc48697bb8f611baa3366
|
refs/heads/master
| 2022-11-21T01:28:48.389908
| 2020-07-20T07:29:20
| 2020-07-20T07:29:20
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 676
|
py
|
from django import forms
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm
from .models import Profile
class UserRegisterForm(UserCreationForm):
email = forms.EmailField()
class Meta:
model = User
fields = ['username', 'email', 'first_name']
class UserUpdateForm(forms.ModelForm):
email = forms.EmailField()
class Meta:
model = User
fields = ['username', 'email', 'first_name']
class ProfileUpdateForm(forms.ModelForm):
class Meta:
model = Profile
fields = ['image']
labels = {'image': 'Image'}
widgets = {'image': forms.FileInput()}
|
[
"arthtyagi7@gmail.com"
] |
arthtyagi7@gmail.com
|
187fe74df27234c89d53fd123734475bb1c8d243
|
c1631ba56600424e5cc473ccf160b5d935b4301c
|
/demo/BDTB.py
|
843840155faaf0525e85e266cbb4d22b3d9890f1
|
[] |
no_license
|
misasagi1988/webcrawler
|
149155a6b5c1141722bcb9e06c13c93d862fbf58
|
bf3a1ea732cef3830ecebd90a67bf34a448422d5
|
refs/heads/master
| 2020-11-29T14:51:26.137966
| 2016-08-29T05:49:31
| 2016-08-29T05:49:31
| 66,517,150
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,709
|
py
|
# -*- coding:utf-8 -*-
import urllib2
import re
class Tool(object):
pattern_img = re.compile('<img.*?>| {7}')
pattern_addr = re.compile('<a.*?>|</a>')
pattern_line = re.compile('<tr>|<div>|</div>|</p>')
pattern_td = re.compile('<td>')
pattern_para = re.compile('<p.*?>')
pattern_br = re.compile('<br><br>|<br>')
def replace(self, content):
content = re.sub(self.pattern_img, '', content)
content = re.sub(self.pattern_addr, '', content)
content = re.sub(self.pattern_line, '\n', content)
content = re.sub(self.pattern_td, '\t', content)
content = re.sub(self.pattern_para, '\n ', content)
content = re.sub(self.pattern_br, '\n', content)
return content.strip()
class BDTB(object):
def __init__(self, baseurl, see_lz):
self.baseurl = baseurl
self.see_lz = "?see_lz=" + str(see_lz)
self.tool = Tool()
self.default_filename = 'bdtb.txt'
self.floor = 1
def deleteFile(self):
with open(self.default_filename, 'w'):
print 'clear default file content'
def getPage(self, page_index):
try:
request = urllib2.Request(self.baseurl + self.see_lz + "&pn=" + str(page_index))
content = urllib2.urlopen(request).read().decode("utf-8")
return content
except urllib2.URLError, e:
if hasattr(e, "code"):
print e.code
if hasattr(e, "reason"):
print e.reason
return None
def getTitle(self, content):
#content = self.getPage(1)
reg = '<h3 class="core_title_txt.*?>(.*?)</h3>'
pattern = re.compile(reg, re.S)
match_res = re.search(pattern, content)
if match_res:
return match_res.group(1).strip()
else:
return None
def getPageNum(self, content):
#content = self.getPage(1)
reg = '<li class="l_reply_num.*?</span>.*?<span.*?>(.*?)</span>'
pattern = re.compile(reg, re.S)
match_res = re.search(pattern, content)
if match_res:
return match_res.group(1).strip()
else:
return None
def getContent(self, page_index):
content = self.getPage(page_index)
if content:
reg = '<div id="post_content_.*?>(.*?)</div>'
pattern = re.compile(reg, re.S)
match_res = re.findall(pattern, content)
cont_list = []
for item in match_res:
item = '\n' + self.tool.replace(item) + '\n'
cont_list.append(item.encode('utf-8'))
return cont_list
else:
return None
def writeFile(self, content, filename = None):
if filename is None:
filename = self.default_filename
if not content:
return
with open(filename, 'a+') as f:
for item in content:
floortag = '\n%d floor------------------------------------------------------------\n' %self.floor
self.floor = self.floor + 1
f.write(floortag)
f.write(item)
def run(self):
self.deleteFile()
page_index = self.getPage(1)
page_title = self.getTitle(page_index)
page_number = self.getPageNum(page_index)
print 'page title is: ', page_title
print 'page number is: ', page_number
for i in range(1, int(page_number) + 1):
print 'now crawling page %s: ' %int(i)
self.writeFile(self.getContent(i))
print 'crawling end, total floor count: ', self.floor - 1
bdtb = BDTB("http://tieba.baidu.com/p/3138733512", 1)
bdtb.run()
|
[
"meng_yujing@yeah.net"
] |
meng_yujing@yeah.net
|
c949825bd197d7e4f5ac77c89814626a99bf58ae
|
d36546287721db2e97e0a4323e143163a14ce0b1
|
/2017/20/particle_swarm.py
|
9eec47de4d5275002ef18cae18495acda5fa59a1
|
[
"Unlicense"
] |
permissive
|
GeoffRiley/AdventOfCode
|
ca258edee05ad7a4b6e6db2e59b83e8879b48af0
|
567df9cb5645bc6cf4c22063a84a621039069311
|
refs/heads/master
| 2023-01-12T03:42:11.099541
| 2022-12-25T17:16:20
| 2022-12-25T17:16:20
| 225,139,440
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,628
|
py
|
import re
from collections import deque, Counter
from copy import deepcopy
from dataclasses import dataclass
from operator import add
from statistics import mean
from typing import List
@dataclass
class Particle(object):
id: int
pos: List[int]
vec: List[int]
acc: List[int]
@property
def dist(self):
return sum(abs(x) for x in self.pos)
@property
def pos_hash(self):
return hash(tuple(self.pos))
def move(self):
self.vec = list(map(add, self.vec, self.acc))
self.pos = list(map(add, self.pos, self.vec))
VECTOR_RE = re.compile(r'^p=<([-\d,]+)>,\s+'
r' v=<([-\d,]+)>,\s+'
r' a=<([-\d,]+)>', re.VERBOSE)
def particle_swarm(particles: List[Particle], part1=True):
last_close = deque([0 for _ in range(20)], maxlen=150)
if part1:
closest_particle = calc_closest(particles)
else:
closest_particle = len(particles)
while mean(last_close) != closest_particle:
last_close.append(closest_particle)
move_particles(particles)
if part1:
closest_particle = calc_closest(particles)
else:
remove_collisions(particles)
closest_particle = len(particles)
return closest_particle
def remove_collisions(particles: List[Particle]) -> None:
collision_collection = Counter(map(lambda p: p.pos_hash, particles))
for pos, cnt in collision_collection.items():
if cnt == 1:
continue
particle_array = [p for p in particles if p.pos_hash == pos]
for p in particle_array:
particles.remove(p)
def move_particles(particles: List[Particle]) -> None:
for particle in particles:
particle.move()
def calc_closest(particles: List[Particle]) -> int:
min_part = min(particles, key=lambda p: p.dist)
return min_part.id
def parse_particle_list(inp: List[str]) -> List[Particle]:
particles = []
for n, line in enumerate(inp):
pos_s, vec_s, acc_s = (list(map(int, s.split(','))) for s in VECTOR_RE.match(line).groups())
particles.append(Particle(n, pos_s, vec_s, acc_s))
return particles
if __name__ == '__main__':
with open('input.txt') as swarm_file:
swarm_list = swarm_file.read().splitlines(keepends=False)
particle_list = parse_particle_list(swarm_list)
particle_list_2 = deepcopy(particle_list)
print(f'Day 20, part 1: {particle_swarm(particle_list)}')
print(f'Day 20, part 2: {particle_swarm(particle_list_2, False)}')
# Day 20, part 1: 243
# Day 20, part 2: 648
|
[
"gir6@ou.ac.uk"
] |
gir6@ou.ac.uk
|
934aee2b86464868a709c9fd85fcb5b54022c3bb
|
0d88817307e26e3d4e5c26a0e6c8a1d5bf37711f
|
/stopp/stopp.py
|
aaace6eb13d18932db4d6d6b91b63be27f01183f
|
[
"MIT"
] |
permissive
|
AbdelrhmanBassiouny/stopp
|
12b406824d780dea56a20eed5a046ef8b56df258
|
2713d49e038f640e87ecf8513705651c0ee3e0d1
|
refs/heads/master
| 2022-06-15T08:04:56.196553
| 2021-08-01T12:57:54
| 2021-08-01T12:57:54
| 252,969,836
| 4
| 3
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,758
|
py
|
import numpy as np
from .data_structs import RobotKinematics
from .joint_profile import ConstructJointProfile
from .trajectory_utils import EnsurePathHasOddSize, FindMaxJoint, SynchronizeJointsToTrajectory, ValidateRobotPath
class Robot:
""" define a robot with certain kinematic limits to be used when constructing a trajectory profile"""
def __init__(self, n_joints, j_max, a_max, v_max):
"""
initialize robot kinematic parameters with user specified parameters
Note:: If the robot joints have different kinematic limits, It is recommended to use the lowest values here
to ensure safety and correct performance.
:param n_joints: defining robot's number of joints
:type n_joints: int
:param j_max: defining joint maximum jerk to use in trajectory (rad/sec^3)
:type j_max: float
:param a_max: defining joint maximum acceleration to use in trajectory (rad/sec^2)
:type a_max: float
:param v_max: defining joint maximum velocity to use in trajectory (rad/sec)
:type v_max: float
"""
if n_joints <= 0:
raise ValueError("Robot number of joints should be greater than zero")
if j_max <= 0:
raise ValueError("Robot jerk limit should be greater than zero")
if a_max <= 0:
raise ValueError("Robot acceleration limit should be greater than zero")
if v_max <= 0:
raise ValueError("Robot velocity limit should be greater than zero")
self.rob_k = RobotKinematics(n_joints, j_max, a_max, v_max)
def TimeParameterizePath(self, robot_path, interp_time_step=None):
"""Construct the trajectory of the robot from a predefined path.
:param robot_path: Union[ndarray, List, Tuple] -> rows=number of joints, columns= number of path points.
:param interp_time_step: if not None, interpolation process will be added using this time step.
:return rob_trajectory: (list) each list entry is a (TrajectoryPoint) containing a joint trajectory.
"""
# Check path dimensions, types, and number of joints.
ValidateRobotPath(robot_path, self.rob_k.j_num)
# Copy path to a numpy nd-array object.
rob_path = np.copy(robot_path)
# If path points are even, Add a point in the middle to make it Odd.
rob_path = EnsurePathHasOddSize(rob_path)
# Find Max Joint, which is the joint that moves the greatest distance.
max_j_num = FindMaxJoint(rob_path)
# Construct Max Joint Profile.
max_trajectory = ConstructJointProfile(self.rob_k, rob_path[max_j_num], interp_time_step)
# Construct Other Joints' Profiles from the Max Joint Profile, to have them Synchronized.
rob_trajectory = SynchronizeJointsToTrajectory(rob_path, max_trajectory)
return rob_trajectory
if __name__ == "__main__":
"""This is an Example usage of the library, Enjoy!"""
rob_j_max = 800.0
rob_a_max = 50
rob_v_max = 6
joints = 2
n_points = 31
time_step = 0.004
path = np.array([np.linspace(0, 50*(j+1), n_points) for j in range(joints)]) * (np.pi / 180)
my_rob = Robot(joints, rob_j_max, rob_a_max, rob_v_max)
trajectory = my_rob.TimeParameterizePath(path, time_step)
for j in range(joints):
print("joint {} trajectory points time = {}".format(j, trajectory[j].t))
print("joint {} trajectory points position = {}".format(j, trajectory[j].pos))
print("joint {} trajectory points velocity = {}".format(j, trajectory[j].vel))
print("joint {} trajectory points acceleration = {}".format(j, trajectory[j].acc))
print("============================================================")
|
[
"Bassio@programmer.com"
] |
Bassio@programmer.com
|
3c2ddbefb534733402dab2315f80ebe6a3f1e70b
|
4f4ecdacdd57fddfec039439589472382875c539
|
/arelle/ModelRenderingObject.py
|
b01745af7d306800437c03ceca950e84d7240f08
|
[
"Apache-2.0"
] |
permissive
|
irjudson/Arelle
|
0fadce7cf36d41115b1e833c9e30fb717c120613
|
d03be32dce33c34c3388e54afbe837bf83a4ff48
|
refs/heads/master
| 2020-05-29T11:39:16.391796
| 2013-11-07T23:00:14
| 2013-11-07T23:00:14
| 1,867,690
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 69,169
|
py
|
'''
Created on Mar 7, 2011
@author: Mark V Systems Limited
(c) Copyright 2011 Mark V Systems Limited, All rights reserved.
'''
import inspect, os
from arelle import XmlUtil, XbrlConst, XPathParser, Locale, XPathContext
from arelle.ModelDtsObject import ModelResource
from arelle.ModelInstanceObject import ModelDimensionValue
from arelle.ModelValue import qname, QName
from arelle.ModelObject import ModelObject
from arelle.ModelFormulaObject import (Trace, ModelFormulaResource, ModelFormulaRules, ModelConceptName,
ModelParameter, Aspect, aspectStr)
from arelle.ModelInstanceObject import ModelFact
from arelle.FormulaEvaluator import (filterFacts as formulaEvaluatorFilterFacts,
aspectsMatch, factsPartitions, VariableBinding)
from arelle.PrototypeInstanceObject import FactPrototype
ROLLUP_NOT_ANALYZED = 0
CHILD_ROLLUP_FIRST = 1
CHILD_ROLLUP_LAST = 2
CHILDREN_BUT_NO_ROLLUP = 3
OPEN_ASPECT_ENTRY_SURROGATE = '\uDBFF'
EMPTY_SET = set()
def definitionNodes(nodes):
return [(ord.definitionNodeObject if isinstance(node, StructuralNode) else node) for node in nodes]
# table linkbase structural nodes for rendering
class StructuralNode:
def __init__(self, parentStructuralNode, definitionNode, zInheritance=None, contextItemFact=None, breakdownTableNode=None):
self.parentStructuralNode = parentStructuralNode
self._definitionNode = definitionNode
self._rendrCntx = getattr(definitionNode.modelXbrl, "rendrCntx", None) # None for EU 2010 table linkbases
self.variables = {}
self.aspects = {}
self.childStructuralNodes = []
self.rollUpStructuralNode = None
self.choiceStructuralNodes = []
self.zInheritance = zInheritance
if contextItemFact is not None:
self.contextItemBinding = VariableBinding(self._rendrCntx,
boundFact=contextItemFact)
if isinstance(self.contextItemBinding.yieldedFact, FactPrototype):
for aspect in definitionNode.aspectsCovered():
if aspect != Aspect.DIMENSIONS:
self.aspectEntryObjectId = self.aspects[aspect] = contextItemFact.aspectEntryObjectId
break
else:
self.contextItemBinding = None
self.subtreeRollUp = ROLLUP_NOT_ANALYZED
self.depth = parentStructuralNode.depth + 1 if parentStructuralNode else 0
if breakdownTableNode is not None:
self.breakdownTableNode = breakdownTableNode
self.tagSelector = definitionNode.tagSelector
self.isLabeled = True
@property
def modelXbrl(self):
return self._definitionNode.modelXbrl
@property
def isAbstract(self):
if self.subtreeRollUp:
return self.subtreeRollUp == CHILDREN_BUT_NO_ROLLUP
try:
try:
return self.abstract # ordinate may have an abstract attribute
except AttributeError: # if none use axis object
return self.definitionNode.isAbstract
except AttributeError: # axis may never be abstract
return False
@property
def isRollUp(self):
return self.definitionNode.isRollUp
@property
def cardinalityAndDepth(self):
return self.definitionNode.cardinalityAndDepth(self)
@property
def structuralDepth(self):
if self.parentStructuralNode is not None:
return self.parentStructuralNode.structuralDepth + 1
return 0
@property
def definitionNode(self):
if self.choiceStructuralNodes:
return self.choiceStructuralNodes[getattr(self,"choiceNodeIndex",0)]._definitionNode
return self._definitionNode
def breakdownNode(self, tableELR):
definitionNode = self._definitionNode
if isinstance(definitionNode, ModelBreakdown):
return definitionNode
axisSubtreeRelSet = definitionNode.modelXbrl.relationshipSet((XbrlConst.tableBreakdownTree, XbrlConst.tableBreakdownTreeMMDD, XbrlConst.tableBreakdownTree201305, XbrlConst.tableDefinitionNodeSubtree, XbrlConst.tableDefinitionNodeSubtreeMMDD, XbrlConst.tableDefinitionNodeSubtree201305, XbrlConst.tableDefinitionNodeSubtree201301, XbrlConst.tableAxisSubtree2011), tableELR)
while (True):
for parentRel in axisSubtreeRelSet.toModelObject(definitionNode):
definitionNode = parentRel.fromModelObject
if isinstance(definitionNode, ModelBreakdown):
return definitionNode
break # recurse to move to this node's parent breakdown node
return definitionNode # give up here
def constraintSet(self, tagSelectors=None):
definitionNode = self.definitionNode
if tagSelectors:
for tag in tagSelectors:
if tag in definitionNode.constraintSets:
return definitionNode.constraintSets[tag]
return definitionNode.constraintSets.get(None) # returns None if no default constraint set
def aspectsCovered(self):
return _DICT_SET(self.aspects.keys()) | self.definitionNode.aspectsCovered()
def hasAspect(self, aspect, inherit=True):
return (aspect in self.aspects or
self.definitionNode.hasAspect(self, aspect) or
(inherit and
self.parentStructuralNode is not None and
self.parentStructuralNode.hasAspect(aspect, inherit)))
def aspectValue(self, aspect, inherit=True, dims=None, depth=0, tagSelectors=None):
xc = self._rendrCntx
if self.choiceStructuralNodes: # use aspects from choice structural node
chosenStructuralNode = self.choiceStructuralNodes[getattr(self,"choiceNodeIndex",0)]
aspects = chosenStructuralNode.aspects
definitionNode = chosenStructuralNode._definitionNode
contextItemBinding = chosenStructuralNode.contextItemBinding
else:
aspects = self.aspects
definitionNode = self._definitionNode
contextItemBinding = self.contextItemBinding
constraintSet = self.constraintSet(tagSelectors)
if aspect == Aspect.DIMENSIONS:
if dims is None: dims = set()
if inherit and self.parentStructuralNode is not None:
dims |= self.parentStructuralNode.aspectValue(aspect, dims=dims, depth=depth+1)
if aspect in aspects:
dims |= aspects[aspect]
elif constraintSet is not None and constraintSet.hasAspect(self, aspect):
dims |= set(definitionNode.aspectValue(xc, aspect) or {})
if constraintSet is not None and constraintSet.hasAspect(self, Aspect.OMIT_DIMENSIONS):
dims -= set(constraintSet.aspectValue(xc, Aspect.OMIT_DIMENSIONS))
return dims
if aspect in aspects:
return aspects[aspect]
elif constraintSet is not None and constraintSet.hasAspect(self, aspect):
if isinstance(definitionNode, ModelSelectionDefinitionNode):
# result is in the indicated variable of ordCntx
return self.variables.get(self._definitionNode.variableQname)
elif isinstance(definitionNode, ModelFilterDefinitionNode):
if contextItemBinding:
return contextItemBinding.aspectValue(aspect)
elif isinstance(definitionNode, ModelTupleDefinitionNode):
if aspect == Aspect.LOCATION and contextItemBinding:
return contextItemBinding.yieldedFact
# non-location tuple aspects don't leak into cell bindings
else:
return constraintSet.aspectValue(xc, aspect)
if inherit and self.parentStructuralNode is not None:
return self.parentStructuralNode.aspectValue(aspect, depth=depth+1)
return None
'''
@property
def primaryItemQname(self): # for compatibility with viewRelationsihps
if Aspect.CONCEPT in self.aspects:
return self.aspects[Aspect.CONCEPT]
return self.definitionNode.primaryItemQname
@property
def explicitDims(self):
return self.definitionNode.explicitDims
'''
def objectId(self, refId=""):
return self._definitionNode.objectId(refId)
def header(self, role=None, lang=None, evaluate=True, returnGenLabel=True, returnMsgFormatString=False):
# if ord is a nested selectionAxis selection, use selection-message or text contents instead of axis headers
isZSelection = isinstance(self._definitionNode, ModelSelectionDefinitionNode) and hasattr(self, "zSelection")
if role is None:
# check for message before checking for genLabel
msgsRelationshipSet = self._definitionNode.modelXbrl.relationshipSet(
(XbrlConst.tableDefinitionNodeSelectionMessage201301, XbrlConst.tableAxisSelectionMessage2011)
if isZSelection else
(XbrlConst.tableDefinitionNodeMessage201301, XbrlConst.tableAxisMessage2011))
if msgsRelationshipSet:
msg = msgsRelationshipSet.label(self._definitionNode, XbrlConst.standardMessage, lang, returnText=False)
if msg is not None:
if evaluate:
if returnMsgFormatString:
return msg.formatString # not possible to evaluate (during resolution)
else:
return self.evaluate(msg, msg.evaluate)
else:
return XmlUtil.text(msg)
if isZSelection: # no message, return text of selection
return self.variables.get(self._definitionNode.variableQname, "selection")
if returnGenLabel:
label = self._definitionNode.genLabel(role=role, lang=lang)
if label:
return label
if self.isEntryAspect:
# True if open node bound to a prototype, false if boudn to a real fact
return OPEN_ASPECT_ENTRY_SURROGATE # sort pretty high, work ok for python 2.7/3.2 as well as 3.3
# if there's a child roll up, check for it
if self.rollUpStructuralNode is not None: # check the rolling-up child too
return self.rollUpStructuralNode.header(role, lang, evaluate, returnGenLabel, returnMsgFormatString)
# if aspect is a concept of dimension, return its standard label
concept = None
for aspect in self.aspectsCovered():
aspectValue = self.aspectValue(aspect)
if isinstance(aspect, QName) or aspect == Aspect.CONCEPT: # dimension or concept
if isinstance(aspectValue, QName):
concept = self.modelXbrl.qnameConcepts[aspectValue]
break
elif isinstance(aspectValue, ModelDimensionValue):
if aspectValue.isExplicit:
concept = aspectValue.member
elif aspectValue.isTyped:
return XmlUtil.innerTextList(aspectValue.typedMember)
elif isinstance(aspectValue, ModelObject):
text = XmlUtil.innerTextList(aspectValue)
if not text and XmlUtil.hasChild(aspectValue, aspectValue.namespaceURI, "forever"):
text = "forever"
return text
if concept is not None:
label = concept.label(lang=lang)
if label:
return label
# if there is a role, check if it's available on a parent node
if role and self.parentStructuralNode is not None:
return self.parentStructuralNode.header(role, lang, evaluate, returnGenLabel, returnMsgFormatString)
return None
def evaluate(self, evalObject, evalMethod, otherOrdinate=None, evalArgs=()):
xc = self._rendrCntx
if self.contextItemBinding and not isinstance(xc.contextItem, ModelFact):
previousContextItem = xc.contextItem # xbrli.xbrl
xc.contextItem = self.contextItemBinding.yieldedFact
else:
previousContextItem = None
if self.choiceStructuralNodes and hasattr(self,"choiceNodeIndex"):
variables = self.choiceStructuralNodes[self.choiceNodeIndex].variables
else:
variables = self.variables
removeVarQnames = []
for variablesItems in (self.tableDefinitionNode.parameters.items(), variables.items()):
for qn, value in variablesItems:
if qn not in xc.inScopeVars:
removeVarQnames.append(qn)
xc.inScopeVars[qn] = value
if self.parentStructuralNode is not None:
result = self.parentStructuralNode.evaluate(evalObject, evalMethod, otherOrdinate, evalArgs)
elif otherOrdinate is not None:
# recurse to other ordinate (which will recurse to z axis)
result = otherOrdinate.evaluate(evalObject, evalMethod, None, evalArgs)
elif self.zInheritance is not None:
result = self.zInheritance.evaluate(evalObject, evalMethod, None, evalArgs)
else:
try:
result = evalMethod(xc, *evalArgs)
except XPathContext.XPathException as err:
xc.modelXbrl.error(err.code,
_("%(element)s set %(xlinkLabel)s \nException: %(error)s"),
modelObject=evalObject, element=evalObject.localName,
xlinkLabel=evalObject.xlinkLabel, error=err.message)
result = ''
for qn in removeVarQnames:
xc.inScopeVars.pop(qn)
if previousContextItem is not None:
xc.contextItem = previousContextItem # xbrli.xbrl
return result
def hasValueExpression(self, otherAxisStructuralNode=None):
return (self.definitionNode.hasValueExpression or
(otherAxisStructuralNode is not None and otherAxisStructuralNode.definitionNode.hasValueExpression))
def evalValueExpression(self, fact, otherAxisStructuralNode=None):
for structuralNode in (self, otherAxisStructuralNode):
if structuralNode is not None and structuralNode.definitionNode.hasValueExpression:
return self.evaluate(self.definitionNode, structuralNode.definitionNode.evalValueExpression, otherAxisStructuralNode=otherAxisStructuralNode, evalArgs=(fact,))
return None
@property
def isEntryAspect(self):
# true if open node and bound to a fact prototype
return self.contextItemBinding is not None and isinstance(self.contextItemBinding.yieldedFact, FactPrototype)
def isEntryPrototype(self, default=False):
# true if all axis open nodes before this one are entry prototypes (or not open axes)
if self.contextItemBinding is not None:
# True if open node bound to a prototype, false if boudn to a real fact
return isinstance(self.contextItemBinding.yieldedFact, FactPrototype)
if self.parentStructuralNode is not None:
return self.parentStructuralNode.isEntryPrototype(default)
return default # nothing open to be bound to a fact
@property
def tableDefinitionNode(self):
if self.parentStructuralNode is None:
return self.breakdownTableNode
else:
return self.parentStructuralNode.tableDefinitionNode
@property
def tagSelectors(self):
try:
return self._tagSelectors
except AttributeError:
if self.parentStructuralNode is None:
self._tagSelectors = set()
else:
self._tagSelectors = self.parentStructuralNode.tagSelectors
if self.tagSelector:
self._tagSelectors.add(self.tagSelector)
return self._tagSelectors
@property
def leafNodeCount(self):
childLeafCount = 0
for childStructuralNode in self.childStructuralNodes:
childLeafCount += childStructuralNode.leafNodeCount
if childLeafCount == 0:
return 1
if not self.isAbstract and isinstance(self.definitionNode, (ModelClosedDefinitionNode, ModelEuAxisCoord)):
childLeafCount += 1 # has a roll up
return childLeafCount
def setHasOpenNode(self):
if self.parentStructuralNode is not None:
self.parentStructuralNode.setHasOpenNode()
else:
self.hasOpenNode = True
def inheritedPrimaryItemQname(self, view):
return (self.primaryItemQname or self.inheritedPrimaryItemQname(self.parentStructuralNode, view))
def inheritedExplicitDims(self, view, dims=None, nested=False):
if dims is None: dims = {}
if self.parentOrdinateContext:
self.parentStructuralNode.inheritedExplicitDims(view, dims, True)
for dim, mem in self.explicitDims:
dims[dim] = mem
if not nested:
return {(dim,mem) for dim,mem in dims.items() if mem != 'omit'}
def inheritedAspectValue(self, otherAxisStructuralNode,
view, aspect, tagSelectors,
xAspectStructuralNodes, yAspectStructuralNodes, zAspectStructuralNodes):
aspectStructuralNodes = xAspectStructuralNodes.get(aspect, EMPTY_SET) | yAspectStructuralNodes.get(aspect, EMPTY_SET) | zAspectStructuralNodes.get(aspect, EMPTY_SET)
structuralNode = None
if len(aspectStructuralNodes) == 1:
structuralNode = aspectStructuralNodes.pop()
elif len(aspectStructuralNodes) > 1:
if aspect == Aspect.LOCATION:
hasClash = False
for _aspectStructuralNode in aspectStructuralNodes:
if not _aspectStructuralNode.definitionNode.aspectValueDependsOnVars(aspect):
if structuralNode:
hasClash = True
else:
structuralNode = _aspectStructuralNode
else:
# take closest structural node
hasClash = True
''' reported in static analysis by RenderingEvaluator.py
if hasClash:
from arelle.ModelFormulaObject import aspectStr
view.modelXbrl.error("xbrlte:aspectClash",
_("Aspect %(aspect)s covered by multiple axes."),
modelObject=view.modelTable, aspect=aspectStr(aspect))
'''
if structuralNode:
definitionNodeConstraintSet = structuralNode.constraintSet(tagSelectors)
if definitionNodeConstraintSet is not None and definitionNodeConstraintSet.aspectValueDependsOnVars(aspect):
return self.evaluate(definitionNodeConstraintSet,
definitionNodeConstraintSet.aspectValue, # this passes a method
otherAxisStructuralNode=otherAxisStructuralNode,
evalArgs=(aspect,))
return structuralNode.aspectValue(aspect, tagSelectors=tagSelectors)
return None
def __repr__(self):
return ("structuralNode[{0}]{1})".format(self.objectId(),self.definitionNode))
# Root class for rendering is formula, to allow linked and nested compiled expressions
def definitionModelLabelsView(mdlObj):
return tuple(sorted([("{} {} {} {}".format(label.localName,
str(rel.order).rstrip("0").rstrip("."),
os.path.basename(label.role),
label.xmlLang),
label.stringValue)
for rel in mdlObj.modelXbrl.relationshipSet((XbrlConst.elementLabel,XbrlConst.elementReference)).fromModelObject(mdlObj)
for label in (rel.toModelObject,)] +
[("xlink:label", mdlObj.xlinkLabel)]))
# 2010 EU Table linkbase
class ModelEuTable(ModelResource):
def init(self, modelDocument):
super(ModelEuTable, self).init(modelDocument)
self.aspectsInTaggedConstraintSets = set()
@property
def aspectModel(self):
return "dimensional"
@property
def propertyView(self):
return ((("id", self.id),) +
self.definitionLabelsView)
def header(self, role=None, lang=None, strip=False, evaluate=True):
return self.genLabel(role=role, lang=lang, strip=strip)
@property
def parameters(self):
return {}
@property
def definitionLabelsView(self):
return definitionModelLabelsView(self)
def __repr__(self):
return ("table[{0}]{1})".format(self.objectId(),self.propertyView))
class ModelEuAxisCoord(ModelResource):
def init(self, modelDocument):
super(ModelEuAxisCoord, self).init(modelDocument)
@property
def abstract(self):
return self.get("abstract") or 'false'
@property
def isAbstract(self):
return self.abstract == "true"
@property
def isMerged(self):
return False
@property
def parentChildOrder(self):
return self.get("parentChildOrder")
@property
def isRollUp(self):
return False
@property
def parentDefinitionNode(self):
try:
return self._parentDefinitionNode
except AttributeError:
parentDefinitionNode = None
for rel in self.modelXbrl.relationshipSet(XbrlConst.euAxisMember).toModelObject(self):
parentDefinitionNode = rel.fromModelObject
break
self._parentDefinitionNode = parentDefinitionNode
return parentDefinitionNode
def aspectsCovered(self):
aspectsCovered = set()
if XmlUtil.hasChild(self, XbrlConst.euRend, "primaryItem"):
aspectsCovered.add(Aspect.CONCEPT)
if XmlUtil.hasChild(self, XbrlConst.euRend, "timeReference"):
aspectsCovered.add(Aspect.INSTANT)
for e in XmlUtil.children(self, XbrlConst.euRend, "explicitDimCoord"):
aspectsCovered.add(self.prefixedNameQname(e.get("dimension")))
return aspectsCovered
@property
def constraintSets(self):
return {None: self}
@property
def tagSelector(self): # default constraint set for ruleNode has name None
return None
def hasAspect(self, structuralNode, aspect):
if aspect == Aspect.CONCEPT:
return XmlUtil.hasChild(self, XbrlConst.euRend, "primaryItem")
elif aspect == Aspect.DIMENSIONS:
return XmlUtil.hasChild(self, XbrlConst.euRend, "explicitDimCoord")
elif aspect in (Aspect.PERIOD_TYPE, Aspect.INSTANT):
return XmlUtil.hasChild(self, XbrlConst.euRend, "timeReference")
elif isinstance(aspect, QName):
for e in XmlUtil.children(self, XbrlConst.euRend, "explicitDimCoord"):
if self.prefixedNameQname(e.get("dimension")) == aspect:
return True
return False
def aspectValueDependsOnVars(self, aspect):
return False
def aspectValue(self, xpCtx, aspect, inherit=False):
if aspect == Aspect.DIMENSIONS:
dims = set(self.prefixedNameQname(e.get("dimension"))
for e in XmlUtil.children(self, XbrlConst.euRend, "explicitDimCoord"))
if inherit and self.parentDefinitionNode is not None:
dims |= self.parentDefinitionNode.aspectValue(None, aspect, inherit)
return dims
if inherit and not self.hasAspect(None, aspect):
if self.parentDefinitionNode is not None:
return self.parentDefinitionNode.aspectValue(None, aspect, inherit)
return None
if aspect == Aspect.CONCEPT:
priItem = XmlUtil.childAttr(self, XbrlConst.euRend, "primaryItem", "name")
if priItem is not None:
return self.prefixedNameQname(priItem)
return None
elif aspect == Aspect.PERIOD_TYPE:
if XmlUtil.hasChild(self, XbrlConst.euRend, "timeReference"):
return "instant"
elif aspect == Aspect.INSTANT:
return XmlUtil.datetimeValue(XmlUtil.childAttr(self, XbrlConst.euRend, "timeReference", "instant"),
addOneDay=True)
elif isinstance(aspect, QName):
for e in XmlUtil.children(self, XbrlConst.euRend, "explicitDimCoord"):
if self.prefixedNameQname(e.get("dimension")) == aspect:
return self.prefixedNameQname(e.get("value"))
return None
'''
@property
def primaryItemQname(self):
priItem = XmlUtil.childAttr(self, XbrlConst.euRend, "primaryItem", "name")
if priItem is not None:
return self.prefixedNameQname(priItem)
return None
@property
def explicitDims(self):
return {(self.prefixedNameQname(e.get("dimension")),
self.prefixedNameQname(e.get("value")))
for e in XmlUtil.children(self, XbrlConst.euRend, "explicitDimCoord")}
@property
def instant(self):
return XmlUtil.datetimeValue(XmlUtil.childAttr(self, XbrlConst.euRend, "timeReference", "instant"),
addOneDay=True)
'''
def cardinalityAndDepth(self, structuralNode):
return (1, 1)
def header(self, role=None, lang=None, strip=False, evaluate=True):
return self.genLabel(role=role, lang=lang, strip=strip)
@property
def hasValueExpression(self):
return False
@property
def definitionLabelsView(self):
return definitionModelLabelsView(self)
@property
def propertyView(self):
explicitDims = self.aspectValue(None, Aspect.DIMENSIONS, inherit=True)
return ((("id", self.id),
("primary item", self.aspectValue(None, Aspect.CONCEPT, inherit=True)),
("dimensions", "({0})".format(len(explicitDims)),
tuple((str(dim),str(self.aspectValue(None, dim, inherit=True)))
for dim in sorted(explicitDims)))
if explicitDims else (),
("abstract", self.abstract)) +
self.definitionLabelsView)
def __repr__(self):
return ("axisCoord[{0}]{1})".format(self.objectId(),self.propertyView))
# 2011 Table linkbase
class ModelTable(ModelFormulaResource):
def init(self, modelDocument):
super(ModelTable, self).init(modelDocument)
self.modelXbrl.modelRenderingTables.add(self)
self.modelXbrl.hasRenderingTables = True
self.aspectsInTaggedConstraintSets = set()
@property
def aspectModel(self):
return self.get("aspectModel", "dimensional") # attribute removed 2013-06, always dimensional
@property
def descendantArcroles(self):
return (XbrlConst.tableFilter, XbrlConst.tableFilterMMDD, XbrlConst.tableFilter201305, XbrlConst.tableFilter201301, XbrlConst.tableFilter2011,
XbrlConst.tableBreakdown, XbrlConst.tableBreakdownMMDD, XbrlConst.tableBreakdown201305, XbrlConst.tableBreakdown201301, XbrlConst.tableAxis2011,
XbrlConst.tableParameter, XbrlConst.tableParameterMMDD)
@property
def filterRelationships(self):
try:
return self._filterRelationships
except AttributeError:
rels = [] # order so conceptName filter is first (if any) (may want more sorting in future)
for rel in self.modelXbrl.relationshipSet((XbrlConst.tableFilter, XbrlConst.tableFilterMMDD, XbrlConst.tableFilter201305, XbrlConst.tableFilter201301, XbrlConst.tableFilter2011)).fromModelObject(self):
if isinstance(rel.toModelObject, ModelConceptName):
rels.insert(0, rel) # put conceptName filters first
else:
rels.append(rel)
self._filterRelationships = rels
return rels
@property
def parameters(self):
try:
return self._parameters
except AttributeError:
self._parameters = {}
xc = self.modelXbrl.rendrCntx
for rel in self.modelXbrl.relationshipSet((XbrlConst.tableParameter, XbrlConst.tableParameterMMDD)).fromModelObject(self):
if isinstance(rel.toModelObject, ModelParameter):
varQname = rel.variableQname
parameter = rel.toModelObject
if isinstance(parameter, ModelParameter):
self._parameters[varQname] = xc.inScopeVars.get(var.qname)
return self._parameters
def header(self, role=None, lang=None, strip=False, evaluate=True):
return self.genLabel(role=role, lang=lang, strip=strip)
@property
def definitionLabelsView(self):
return definitionModelLabelsView(self)
@property
def propertyView(self):
return ((("id", self.id),) +
self.definitionLabelsView)
def __repr__(self):
return ("modlTable[{0}]{1})".format(self.objectId(),self.propertyView))
class ModelDefinitionNode(ModelFormulaResource):
def init(self, modelDocument):
super(ModelDefinitionNode, self).init(modelDocument)
@property
def parentDefinitionNode(self):
return None
@property
def descendantArcroles(self):
return (XbrlConst.tableDefinitionNodeMessage201301, XbrlConst.tableAxisMessage2011,
XbrlConst.tableDefinitionNodeSubtree201305,
XbrlConst.tableDefinitionNodeSubtree, XbrlConst.tableDefinitionNodeSubtreeMMDD)
def hasAspect(self, structuralNode, aspect):
return False
def aspectValueDependsOnVars(self, aspect):
return False
@property
def variablename(self):
"""(str) -- name attribute"""
return self.getStripped("name")
@property
def variableQname(self):
"""(QName) -- resolved name for an XPath bound result having a QName name attribute"""
varName = self.variablename
return qname(self, varName, noPrefixIsNoNamespace=True) if varName else None
def aspectValue(self, xpCtx, aspect, inherit=True):
if aspect == Aspect.DIMENSIONS:
return []
return None
def aspectsCovered(self):
return set()
@property
def constraintSets(self):
return {None: self}
@property
def tagSelector(self):
return self.get("tagSelector")
@property
def valueExpression(self):
return self.get("value")
@property
def hasValueExpression(self):
return bool(self.valueProg) # non empty program
def compile(self):
if not hasattr(self, "valueProg"):
value = self.valueExpression
self.valueProg = XPathParser.parse(self, value, self, "value", Trace.VARIABLE)
# duplicates formula resource for RuleAxis but not for other subclasses
super(ModelDefinitionNode, self).compile()
def evalValueExpression(self, xpCtx, fact):
# compiled by FormulaResource compile()
return xpCtx.evaluateAtomicValue(self.valueProg, 'xs:string', fact)
'''
@property
def primaryItemQname(self): # for compatibility with viewRelationsihps
return None
@property
def explicitDims(self):
return set()
'''
@property
def isAbstract(self):
return False
@property
def isMerged(self):
return False
@property
def isRollUp(self):
return self.get("rollUp") == 'true'
def cardinalityAndDepth(self, structuralNode):
return (1,
1 if (structuralNode.header(evaluate=False) is not None) else 0)
def header(self, role=None, lang=None, strip=False, evaluate=True):
if role is None:
# check for message before checking for genLabel
msgsRelationshipSet = self.modelXbrl.relationshipSet((XbrlConst.tableDefinitionNodeMessage201301, XbrlConst.tableAxisMessage2011))
if msgsRelationshipSet:
msg = msgsRelationshipSet.label(self, XbrlConst.standardMessage, lang, returnText=False)
if msg is not None:
if evaluate:
result = msg.evaluate(self.modelXbrl.rendrCntx)
else:
result = XmlUtil.text(msg)
if strip:
return result.strip()
return result
return self.genLabel(role=role, lang=lang, strip=strip)
@property
def definitionNodeView(self):
return XmlUtil.xmlstring(self, stripXmlns=True, prettyPrint=True)
@property
def definitionLabelsView(self):
return definitionModelLabelsView(self)
class ModelBreakdown(ModelDefinitionNode):
def init(self, modelDocument):
super(ModelBreakdown, self).init(modelDocument)
@property
def parentChildOrder(self):
return self.get("parentChildOrder")
@property
def descendantArcroles(self):
return (XbrlConst.tableBreakdownTree, XbrlConst.tableBreakdownTreeMMDD, XbrlConst.tableBreakdownTree201305)
@property
def propertyView(self):
return ((("id", self.id),
("parent child order", self.parentChildOrder),
("definition", self.definitionNodeView)) +
self.definitionLabelsView)
class ModelClosedDefinitionNode(ModelDefinitionNode):
def init(self, modelDocument):
super(ModelClosedDefinitionNode, self).init(modelDocument)
@property
def abstract(self):
return self.get("abstract")
@property
def isAbstract(self):
return self.abstract == 'true'
@property
def parentChildOrder(self):
return self.get("parentChildOrder")
@property
def descendantArcroles(self):
return (XbrlConst.tableDefinitionNodeSubtree, XbrlConst.tableDefinitionNodeSubtreeMMDD, XbrlConst.tableDefinitionNodeSubtree201305, XbrlConst.tableDefinitionNodeSubtree201301, XbrlConst.tableAxisSubtree2011, XbrlConst.tableDefinitionNodeMessage201301, XbrlConst.tableAxisMessage2011)
def filteredFacts(self, xpCtx, facts):
aspects = self.aspectsCovered()
axisAspectValues = dict((aspect, self.aspectValue(xpCtx, aspect))
for aspect in aspects)
fp = FactPrototype(self, axisAspectValues)
return set(fact
for fact in facts
if aspectsMatch(xpCtx, fact, fp, aspects))
class ModelConstraintSet(ModelFormulaRules):
def init(self, modelDocument):
super(ModelConstraintSet, self).init(modelDocument)
self._locationSourceVar = self.source(Aspect.LOCATION_RULE, acceptFormulaSource=False)
self._locationAspectCovered = set()
self.aspectValues = {} # only needed if error blocks compiling this node, replaced by compile()
self.aspectProgs = {} # ditto
if self._locationSourceVar: self._locationAspectCovered.add(Aspect.LOCATION) # location is parent (tuple), not sibling
def hasAspect(self, structuralNode, aspect, inherit=None):
return self._hasAspect(structuralNode, aspect, inherit)
def _hasAspect(self, structuralNode, aspect, inherit=None): # opaque from ModelRuleDefinitionNode
if aspect == Aspect.LOCATION and self._locationSourceVar:
return True
return self.hasRule(aspect)
def aspectValue(self, xpCtx, aspect, inherit=None):
try:
if xpCtx is None: xpCtx = self.modelXbrl.rendrCntx
if aspect == Aspect.LOCATION and self._locationSourceVar in xpCtx.inScopeVars:
return xpCtx.inScopeVars[self._locationSourceVar]
return self.evaluateRule(xpCtx, aspect)
except AttributeError:
return '(unavailable)' # table defective or not initialized
def aspectValueDependsOnVars(self, aspect):
return aspect in _DICT_SET(self.aspectProgs.keys()) or aspect in self._locationAspectCovered
def aspectsCovered(self):
return _DICT_SET(self.aspectValues.keys()) | _DICT_SET(self.aspectProgs.keys()) | self._locationAspectCovered
# provide model table's aspect model to compile() method of ModelFormulaRules
@property
def aspectModel(self):
for frameRecord in inspect.stack():
obj = frameRecord[0].f_locals['self']
if isinstance(obj,ModelTable):
return obj.aspectModel
return None
'''
@property
def primaryItemQname(self):
return self.evaluateRule(self.modelXbrl.rendrCntx, Aspect.CONCEPT)
@property
def explicitDims(self):
dimMemSet = set()
dims = self.evaluateRule(self.modelXbrl.rendrCntx, Aspect.DIMENSIONS)
if dims: # may be none if no dim aspects on this ruleAxis
for dim in dims:
mem = self.evaluateRule(self.modelXbrl.rendrCntx, dim)
if mem: # may be none if dimension was omitted
dimMemSet.add( (dim, mem) )
return dimMemSet
@property
def instant(self):
periodType = self.evaluateRule(self.modelXbrl.rendrCntx, Aspect.PERIOD_TYPE)
if periodType == "forever":
return None
return self.evaluateRule(self.modelXbrl.rendrCntx,
{"instant": Aspect.INSTANT,
"duration": Aspect.END}[periodType])
'''
def cardinalityAndDepth(self, structuralNode):
if self.aspectValues or self.aspectProgs or structuralNode.header(evaluate=False) is not None:
return (1, 1)
else:
return (0, 0)
class ModelRuleSet(ModelConstraintSet, ModelFormulaResource):
def init(self, modelDocument):
super(ModelRuleSet, self).init(modelDocument)
@property
def tagName(self): # can't call it tag because that would hide ElementBase.tag
return self.get("tag")
class ModelRuleDefinitionNode(ModelConstraintSet, ModelClosedDefinitionNode):
def init(self, modelDocument):
super(ModelRuleDefinitionNode, self).init(modelDocument)
@property
def merge(self):
return self.get("merge")
@property
def isMerged(self):
return self.merge == "true"
@property
def constraintSets(self):
try:
return self._constraintSets
except AttributeError:
self._constraintSets = dict((ruleSet.tagName, ruleSet)
for ruleSet in XmlUtil.children(self, self.namespaceURI, "ruleSet"))
if self.aspectsCovered(): # any local rule?
self._constraintSets[None] = self
return self._constraintSets
def hasAspect(self, structuralNode, aspect):
return any(constraintSet._hasAspect(structuralNode, aspect)
for constraintSet in self.constraintSets.values())
@property
def aspectsInTaggedConstraintSet(self):
try:
return self._aspectsInTaggedConstraintSet
except AttributeError:
self._aspectsInTaggedConstraintSet = set()
for tag, constraintSet in self.constraitSets().items():
if tag is not None:
for aspect in constraintSet.aspectsCovered():
if aspect != Aspect.DIMENSIONS:
self._aspectsInTaggedConstraintSet.add(aspect)
return self._aspectsInTaggedConstraintSet
def compile(self):
super(ModelRuleDefinitionNode, self).compile()
for constraintSet in self.constraintSets.values():
if constraintSet != self: # compile nested constraint sets
constraintSet.compile()
@property
def propertyView(self):
return ((("id", self.id),
("abstract", self.abstract),
("merge", self.merge),
("definition", self.definitionNodeView)) +
self.definitionLabelsView)
def __repr__(self):
return ("modelRuleDefinitionNode[{0}]{1})".format(self.objectId(),self.propertyView))
# deprecated 2013-05-17
class ModelTupleDefinitionNode(ModelRuleDefinitionNode):
def init(self, modelDocument):
super(ModelTupleDefinitionNode, self).init(modelDocument)
@property
def descendantArcroles(self):
return (XbrlConst.tableTupleContent201301, XbrlConst.tableTupleContent2011, XbrlConst.tableDefinitionNodeMessage201301, XbrlConst.tableAxisMessage2011)
@property
def contentRelationships(self):
return self.modelXbrl.relationshipSet((XbrlConst.tableTupleContent201301, XbrlConst.tableTupleContent2011)).fromModelObject(self)
def hasAspect(self, structuralNode, aspect, inherit=None):
return aspect == Aspect.LOCATION # non-location aspects aren't leaked to ordinate for Tuple or self.hasRule(aspect)
def aspectValue(self, xpCtx, aspect, inherit=None):
return self.evaluateRule(xpCtx, aspect)
def aspectsCovered(self):
return {Aspect.LOCATION} # tuple's aspects don't leak to ordinates
def tupleAspectsCovered(self):
return _DICT_SET(self.aspectValues.keys()) | _DICT_SET(self.aspectProgs.keys()) | {Aspect.LOCATION}
def filteredFacts(self, xpCtx, facts):
aspects = self.aspectsCovered()
axisAspectValues = dict((aspect, self.tupleAspectsCovered(aspect))
for aspect in aspects
if aspect != Aspect.LOCATION) # location determined by ordCntx, not axis
fp = FactPrototype(self, axisAspectValues)
return set(fact
for fact in facts
if fact.isTuple and aspectsMatch(xpCtx, fact, fp, aspects))
class ModelCompositionDefinitionNode(ModelClosedDefinitionNode):
def init(self, modelDocument):
super(ModelCompositionDefinitionNode, self).init(modelDocument)
@property
def abstract(self): # always abstract, no filters, no data
return 'true'
class ModelRelationshipDefinitionNode(ModelClosedDefinitionNode):
def init(self, modelDocument):
super(ModelRelationshipDefinitionNode, self).init(modelDocument)
def aspectsCovered(self):
return {Aspect.CONCEPT}
@property
def conceptQname(self):
name = self.getStripped("conceptname")
return qname(self, name, noPrefixIsNoNamespace=True) if name else None
@property
def relationshipSourceQname(self):
sourceQname = XmlUtil.child(self, (XbrlConst.table, XbrlConst.tableMMDD, XbrlConst.table201305, XbrlConst.table201301, XbrlConst.table2011), "relationshipSource")
if sourceQname is not None:
return qname( sourceQname, XmlUtil.text(sourceQname) )
return None
@property
def linkrole(self):
return XmlUtil.childText(self, (XbrlConst.table, XbrlConst.tableMMDD, XbrlConst.table201305, XbrlConst.table201301, XbrlConst.table2011), "linkrole")
@property
def axis(self):
a = XmlUtil.childText(self, (XbrlConst.table, XbrlConst.tableMMDD, XbrlConst.table201305, XbrlConst.table201301, XbrlConst.table2011), ("axis", "formulaAxis"))
if not a: a = 'descendant' # would be an XML error
return a
@property
def isOrSelfAxis(self):
return self.axis.endswith('-or-self')
@property
def generations(self):
try:
return _INT( XmlUtil.childText(self, (XbrlConst.table, XbrlConst.tableMMDD, XbrlConst.table201305, XbrlConst.table201301, XbrlConst.table2011), "generations") )
except (TypeError, ValueError):
if self.axis in ('sibling', 'child', 'parent'):
return 1
return 0
@property
def relationshipSourceQnameExpression(self):
return XmlUtil.childText(self, (XbrlConst.table, XbrlConst.tableMMDD, XbrlConst.table201305, XbrlConst.table201301, XbrlConst.table2011), "relationshipSourceExpression")
@property
def linkroleExpression(self):
return XmlUtil.childText(self, (XbrlConst.table, XbrlConst.tableMMDD, XbrlConst.table201305, XbrlConst.table201301, XbrlConst.table2011), "linkroleExpression")
@property
def axisExpression(self):
return XmlUtil.childText(self, (XbrlConst.table, XbrlConst.tableMMDD, XbrlConst.table201305, XbrlConst.table201301, XbrlConst.table2011), ("axisExpression", "formulAxisExpression"))
@property
def generationsExpression(self):
return XmlUtil.childText(self, (XbrlConst.table, XbrlConst.tableMMDD, XbrlConst.table201305, XbrlConst.table201301, XbrlConst.table2011), "generationsExpression")
def compile(self):
if not hasattr(self, "relationshipSourceQnameExpressionProg"):
self.relationshipSourceQnameExpressionProg = XPathParser.parse(self, self.relationshipSourceQnameExpression, self, "relationshipSourceQnameExpressionProg", Trace.VARIABLE)
self.linkroleExpressionProg = XPathParser.parse(self, self.linkroleExpression, self, "linkroleQnameExpressionProg", Trace.VARIABLE)
self.axisExpressionProg = XPathParser.parse(self, self.axisExpression, self, "axisExpressionProg", Trace.VARIABLE)
self.generationsExpressionProg = XPathParser.parse(self, self.generationsExpression, self, "generationsExpressionProg", Trace.VARIABLE)
super(ModelRelationshipDefinitionNode, self).compile()
def variableRefs(self, progs=[], varRefSet=None):
if self.relationshipSourceQname and self.relationshipSourceQname != XbrlConst.qnXfiRoot:
if varRefSet is None: varRefSet = set()
varRefSet.add(self.relationshipSourceQname)
return super(ModelRelationshipDefinitionNode, self).variableRefs(
[p for p in (self.relationshipSourceQnameExpressionProg,
self.linkroleExpressionProg, self.axisExpressionProg,
self.generationsExpressionProg)
if p], varRefSet)
def evalRrelationshipSourceQname(self, xpCtx, fact=None):
if self.relationshipSourceQname:
return self.relationshipSourceQname
return xpCtx.evaluateAtomicValue(self.relationshipSourceQnameExpressionProg, 'xs:QName', fact)
def evalLinkrole(self, xpCtx, fact=None):
if self.linkrole:
return self.linkrole
return xpCtx.evaluateAtomicValue(self.linkroleExpressionProg, 'xs:anyURI', fact)
def evalAxis(self, xpCtx, fact=None):
if self.axis:
return self.axis
return xpCtx.evaluateAtomicValue(self.axisExpressionProg, 'xs:token', fact)
def evalGenerations(self, xpCtx, fact=None):
if self.generations:
return self.generations
return xpCtx.evaluateAtomicValue(self.generationsExpressionProg, 'xs:integer', fact)
def cardinalityAndDepth(self, structuralNode):
return self.lenDepth(self.relationships(structuralNode),
self.axis.endswith('-or-self'))
def lenDepth(self, nestedRelationships, includeSelf):
l = 0
d = 1
for rel in nestedRelationships:
if isinstance(rel, list):
nl, nd = self.lenDepth(rel, False)
l += nl
nd += 1 # returns 0 if sublist is not nested
if nd > d:
d = nd
else:
l += 1
if includeSelf:
l += 1 # root relationships include root in addition
if includeSelf:
d += 1
return (l, d)
@property
def propertyView(self):
return ((("id", self.id),
("abstract", self.abstract),
("definition", self.definitionNodeView)) +
self.definitionLabelsView)
def __repr__(self):
return ("modelRelationshipDefinitionNode[{0}]{1})".format(self.objectId(),self.propertyView))
class ModelConceptRelationshipDefinitionNode(ModelRelationshipDefinitionNode):
def init(self, modelDocument):
super(ModelConceptRelationshipDefinitionNode, self).init(modelDocument)
def hasAspect(self, structuralNode, aspect):
return aspect == Aspect.CONCEPT
@property
def arcrole(self):
return XmlUtil.childText(self, (XbrlConst.table, XbrlConst.tableMMDD, XbrlConst.table201305, XbrlConst.table201301, XbrlConst.table2011), "arcrole")
@property
def arcQname(self):
arcnameElt = XmlUtil.child(self, (XbrlConst.table, XbrlConst.tableMMDD, XbrlConst.table201305, XbrlConst.table201301, XbrlConst.table2011), "arcname")
if arcnameElt is not None:
return qname( arcnameElt, XmlUtil.text(arcnameElt) )
return None
@property
def linkQname(self):
linknameElt = XmlUtil.child(self, (XbrlConst.table, XbrlConst.tableMMDD, XbrlConst.table201305, XbrlConst.table201301, XbrlConst.table2011), "linkname")
if linknameElt is not None:
return qname( linknameElt, XmlUtil.text(linknameElt) )
return None
def compile(self):
if not hasattr(self, "arcroleExpressionProg"):
self.arcroleExpressionProg = XPathParser.parse(self, self.arcroleExpression, self, "arcroleExpressionProg", Trace.VARIABLE)
self.linkQnameExpressionProg = XPathParser.parse(self, self.linkQnameExpression, self, "linkQnameExpressionProg", Trace.VARIABLE)
self.arcQnameExpressionProg = XPathParser.parse(self, self.arcQnameExpression, self, "arcQnameExpressionProg", Trace.VARIABLE)
super(ModelConceptRelationshipDefinitionNode, self).compile()
def variableRefs(self, progs=[], varRefSet=None):
return super(ModelConceptRelationshipDefinitionNode, self).variableRefs(
[p for p in (self.arcroleExpressionProg,
self.linkQnameExpressionProg, self.arcQnameExpressionProg)
if p], varRefSet)
def evalArcrole(self, xpCtx, fact=None):
if self.arcrole:
return self.arcrole
return xpCtx.evaluateAtomicValue(self.arcroleExpressionProg, 'xs:anyURI', fact)
def evalLinkQname(self, xpCtx, fact=None):
if self.linkQname:
return self.linkQname
return xpCtx.evaluateAtomicValue(self.linkQnameExpressionProg, 'xs:QName', fact)
def evalArcQname(self, xpCtx, fact=None):
if self.arcQname:
return self.arcQname
return xpCtx.evaluateAtomicValue(self.arcQnameExpressionProg, 'xs:QName', fact)
@property
def arcroleExpression(self):
return XmlUtil.childText(self, (XbrlConst.table, XbrlConst.tableMMDD, XbrlConst.table201305, XbrlConst.table201301, XbrlConst.table2011), "arcroleExpression")
@property
def linkQnameExpression(self):
return XmlUtil.childText(self, (XbrlConst.table, XbrlConst.tableMMDD, XbrlConst.table201305, XbrlConst.table201301, XbrlConst.table2011), "linknameExpression")
@property
def arcQnameExpression(self):
return XmlUtil.childText(self, (XbrlConst.table, XbrlConst.tableMMDD, XbrlConst.table201305, XbrlConst.table201301, XbrlConst.table2011), "arcnameExpression")
def coveredAspect(self, ordCntx=None):
return Aspect.CONCEPT
def relationships(self, structuralNode):
self._sourceQname = structuralNode.evaluate(self, self.evalRrelationshipSourceQname) or XbrlConst.qnXfiRoot
linkrole = structuralNode.evaluate(self, self.evalLinkrole)
if not linkrole:
linkrole = "XBRL-all-linkroles"
linkQname = (structuralNode.evaluate(self, self.evalLinkQname) or () )
arcrole = (structuralNode.evaluate(self, self.evalArcrole) or () )
arcQname = (structuralNode.evaluate(self, self.evalArcQname) or () )
self._axis = (structuralNode.evaluate(self, self.evalAxis) or () )
self._generations = (structuralNode.evaluate(self, self.evalGenerations) or () )
return concept_relationships(self.modelXbrl.rendrCntx,
None,
(self._sourceQname,
linkrole,
arcrole,
self._axis.replace('-or-self',''),
self._generations,
linkQname,
arcQname),
True) # return nested lists representing concept tree nesting
class ModelDimensionRelationshipDefinitionNode(ModelRelationshipDefinitionNode):
def init(self, modelDocument):
super(ModelDimensionRelationshipDefinitionNode, self).init(modelDocument)
def hasAspect(self, structuralNode, aspect):
return aspect == self.coveredAspect(structuralNode) or aspect == Aspect.DIMENSIONS
def aspectValue(self, xpCtx, aspect, inherit=None):
if aspect == Aspect.DIMENSIONS:
return (self.coveredAspect(xpCtx), )
return None
def aspectsCovered(self):
return {self.dimensionQname}
@property
def dimensionQname(self):
dimensionElt = XmlUtil.child(self, (XbrlConst.table, XbrlConst.tableMMDD, XbrlConst.table201305, XbrlConst.table201301, XbrlConst.table2011), "dimension")
if dimensionElt is not None:
return qname( dimensionElt, XmlUtil.text(dimensionElt) )
return None
@property
def dimensionQnameExpression(self):
return XmlUtil.childText(self, (XbrlConst.table, XbrlConst.tableMMDD, XbrlConst.table201305, XbrlConst.table201301, XbrlConst.table2011), "dimensionExpression")
def compile(self):
if not hasattr(self, "dimensionQnameExpressionProg"):
self.dimensionQnameExpressionProg = XPathParser.parse(self, self.dimensionQnameExpression, self, "dimensionQnameExpressionProg", Trace.VARIABLE)
super(ModelDimensionRelationshipDefinitionNode, self).compile()
def variableRefs(self, progs=[], varRefSet=None):
return super(ModelDimensionRelationshipDefinitionNode, self).variableRefs(self.dimensionQnameExpressionProg, varRefSet)
def evalDimensionQname(self, xpCtx, fact=None):
if self.dimensionQname:
return self.dimensionQname
return xpCtx.evaluateAtomicValue(self.dimensionQnameExpressionProg, 'xs:QName', fact)
def coveredAspect(self, structuralNode=None):
try:
return self._coveredAspect
except AttributeError:
self._coveredAspect = self.dimRelationships(structuralNode, getDimQname=True)
return self._coveredAspect
def relationships(self, structuralNode):
return self.dimRelationships(structuralNode, getMembers=True)
def dimRelationships(self, structuralNode, getMembers=False, getDimQname=False):
self._dimensionQname = structuralNode.evaluate(self, self.evalDimensionQname)
self._sourceQname = structuralNode.evaluate(self, self.evalRrelationshipSourceQname) or XbrlConst.qnXfiRoot
linkrole = structuralNode.evaluate(self, self.evalLinkrole)
if not linkrole and getMembers:
linkrole = "XBRL-all-linkroles"
dimConcept = self.modelXbrl.qnameConcepts.get(self._dimensionQname)
sourceConcept = self.modelXbrl.qnameConcepts.get(self._sourceQname)
self._axis = (structuralNode.evaluate(self, self.evalAxis) or () )
self._generations = (structuralNode.evaluate(self, self.evalGenerations) or () )
if ((self._dimensionQname and (dimConcept is None or not dimConcept.isDimensionItem)) or
(self._sourceQname and self._sourceQname != XbrlConst.qnXfiRoot and (
sourceConcept is None or not sourceConcept.isItem))):
return ()
if dimConcept is not None:
if getDimQname:
return self._dimensionQname
if sourceConcept is None:
sourceConcept = dimConcept
if getMembers:
return concept_relationships(self.modelXbrl.rendrCntx,
None,
(self._sourceQname,
linkrole,
"XBRL-dimensions", # all dimensions arcroles
self._axis.replace('-or-self',''),
self._generations),
True) # return nested lists representing concept tree nesting
if getDimQname:
if sourceConcept is not None:
# look back from member to a dimension
return self.stepDimRel(sourceConcept, linkrole)
return None
def stepDimRel(self, stepConcept, linkrole):
if stepConcept.isDimensionItem:
return stepConcept.qname
for rel in self.modelXbrl.relationshipSet("XBRL-dimensions").toModelObject(stepConcept):
if not linkrole or linkrole == rel.consecutiveLinkrole:
dim = self.stepDimRel(rel.fromModelObject, rel.linkrole)
if dim:
return dim
return None
coveredAspectToken = {"concept": Aspect.CONCEPT,
"entity-identifier": Aspect.VALUE,
"period-start": Aspect.START, "period-end": Aspect.END,
"period-instant": Aspect.INSTANT, "period-instant-end": Aspect.INSTANT_END,
"unit": Aspect.UNIT}
class ModelOpenDefinitionNode(ModelDefinitionNode):
def init(self, modelDocument):
super(ModelOpenDefinitionNode, self).init(modelDocument)
# deprecated 2013-05-17
class ModelSelectionDefinitionNode(ModelOpenDefinitionNode):
def init(self, modelDocument):
super(ModelSelectionDefinitionNode, self).init(modelDocument)
@property
def descendantArcroles(self):
return (XbrlConst.tableDefinitionNodeMessage201301, XbrlConst.tableAxisMessage2011, XbrlConst.tableDefinitionNodeSelectionMessage201301, XbrlConst.tableAxisSelectionMessage2011)
def clear(self):
XPathParser.clearNamedProg(self, "selectProg")
super(ModelSelectionDefinitionNode, self).clear()
def coveredAspect(self, structuralNode=None):
try:
return self._coveredAspect
except AttributeError:
coveredAspect = self.get("coveredAspect")
if coveredAspect in coveredAspectToken:
self._coveredAspect = coveredAspectToken[coveredAspect]
else: # must be a qname
self._coveredAspect = qname(self, coveredAspect)
return self._coveredAspect
def aspectsCovered(self):
return {self.coveredAspect}
def hasAspect(self, structuralNode, aspect):
return aspect == self.coveredAspect() or (isinstance(self._coveredAspect,QName) and aspect == Aspect.DIMENSIONS)
@property
def select(self):
return self.get("select")
def compile(self):
if not hasattr(self, "selectProg"):
self.selectProg = XPathParser.parse(self, self.select, self, "select", Trace.PARAMETER)
super(ModelSelectionDefinitionNode, self).compile()
def variableRefs(self, progs=[], varRefSet=None):
return super(ModelSelectionDefinitionNode, self).variableRefs(self.selectProg, varRefSet)
def evaluate(self, xpCtx, typeQname=None):
if typeQname:
return xpCtx.evaluateAtomicValue(self.selectProg, typeQname)
else:
return xpCtx.flattenSequence(xpCtx.evaluate(self.selectProg, None))
aspectNodeAspectCovered = {"conceptAspect": Aspect.CONCEPT,
"unitAspect": Aspect.UNIT,
"entityIdentifierAspect": Aspect.ENTITY_IDENTIFIER,
"periodAspect": Aspect.PERIOD}
class ModelFilterDefinitionNode(ModelOpenDefinitionNode):
def init(self, modelDocument):
super(ModelFilterDefinitionNode, self).init(modelDocument)
@property
def descendantArcroles(self):
return (XbrlConst.tableAspectNodeFilter, XbrlConst.tableAspectNodeFilterMMDD, XbrlConst.tableAspectNodeFilter201305, XbrlConst.tableFilterNodeFilter2011, XbrlConst.tableAxisFilter2011,XbrlConst.tableAxisFilter201205, XbrlConst.tableDefinitionNodeMessage201301, XbrlConst.tableAxisMessage2011,
XbrlConst.tableDefinitionNodeSubtree, XbrlConst.tableDefinitionNodeSubtreeMMDD, XbrlConst.tableDefinitionNodeSubtree201305, XbrlConst.tableDefinitionNodeSubtree201301, XbrlConst.tableAxisSubtree2011, XbrlConst.tableDefinitionNodeMessage201301, XbrlConst.tableAxisMessage2011)
@property
def filterRelationships(self):
try:
return self._filterRelationships
except AttributeError:
rels = [] # order so conceptName filter is first (if any) (may want more sorting in future)
for rel in self.modelXbrl.relationshipSet((XbrlConst.tableAspectNodeFilter, XbrlConst.tableAspectNodeFilterMMDD, XbrlConst.tableAspectNodeFilter201305, XbrlConst.tableFilterNodeFilter2011, XbrlConst.tableAxisFilter2011,XbrlConst.tableAxisFilter201205)).fromModelObject(self):
if isinstance(rel.toModelObject, ModelConceptName):
rels.insert(0, rel) # put conceptName filters first
else:
rels.append(rel)
self._filterRelationships = rels
return rels
def hasAspect(self, structuralNode, aspect):
return aspect in self.aspectsCovered()
def aspectsCovered(self, varBinding=None):
try:
return self._aspectsCovered
except AttributeError:
self._aspectsCovered = set()
self._dimensionsCovered = set()
self.includeUnreportedValue = False
if self.localName == "aspectNode": # after 2-13-05-17
aspectElt = XmlUtil.child(self, self.namespaceURI, ("conceptAspect", "unitAspect", "entityIdentifierAspect", "periodAspect", "dimensionAspect"))
if aspectElt is not None:
if aspectElt.localName == "dimensionAspect":
dimQname = qname(aspectElt, aspectElt.textValue)
self._aspectsCovered.add(dimQname)
self._aspectsCovered.add(Aspect.DIMENSIONS)
self._dimensionsCovered.add(dimQname)
self.includeUnreportedValue = aspectElt.get("includeUnreportedValue") in ("true", "1")
else:
self._aspectsCovered.add(aspectNodeAspectCovered[aspectElt.localName])
else:
# filter node (prior to 2013-05-17)
for rel in self.filterRelationships:
if rel.isCovered:
_filter = rel.toModelObject
self._aspectsCovered |= _filter.aspectsCovered(varBinding)
self._dimensionsCovered = set(aspect for aspect in self._aspectsCovered if isinstance(aspect,QName))
if self._dimensionsCovered:
self._aspectsCovered.add(Aspect.DIMENSIONS)
return self._aspectsCovered
def aspectValue(self, xpCtx, aspect, inherit=None):
if aspect == Aspect.DIMENSIONS:
return self._dimensionsCovered
# does not apply to filter, value can only come from a bound fact
return None
def filteredFactsPartitions(self, xpCtx, facts):
filteredFacts = formulaEvaluatorFilterFacts(xpCtx, VariableBinding(xpCtx),
facts, self.filterRelationships, None)
if not self.includeUnreportedValue:
# remove unreported falue
reportedAspectFacts = set()
for fact in filteredFacts:
if all(fact.context is not None and
isinstance(fact.context.dimValue(dimAspect), ModelDimensionValue)
for dimAspect in self._dimensionsCovered):
reportedAspectFacts.add(fact)
else:
reportedAspectFacts = filteredFacts
return factsPartitions(xpCtx, reportedAspectFacts, self.aspectsCovered())
@property
def propertyView(self):
return ((("id", self.id),
("aspect", ", ".join(aspectStr(aspect)
for aspect in self.aspectsCovered()
if aspect != Aspect.DIMENSIONS)),
("definition", self.definitionNodeView)) +
self.definitionLabelsView)
from arelle.ModelObjectFactory import elementSubstitutionModelClass
elementSubstitutionModelClass.update((
# IWD
(XbrlConst.qnTableTableMMDD, ModelTable),
(XbrlConst.qnTableBreakdownMMDD, ModelBreakdown),
(XbrlConst.qnTableRuleSetMMDD, ModelRuleSet),
(XbrlConst.qnTableRuleNodeMMDD, ModelRuleDefinitionNode),
(XbrlConst.qnTableConceptRelationshipNodeMMDD, ModelConceptRelationshipDefinitionNode),
(XbrlConst.qnTableDimensionRelationshipNodeMMDD, ModelDimensionRelationshipDefinitionNode),
(XbrlConst.qnTableAspectNodeMMDD, ModelFilterDefinitionNode),
# PWD 2013-08-28
(XbrlConst.qnTableTable, ModelTable),
(XbrlConst.qnTableBreakdown, ModelBreakdown),
(XbrlConst.qnTableRuleNode, ModelRuleDefinitionNode),
(XbrlConst.qnTableConceptRelationshipNode, ModelConceptRelationshipDefinitionNode),
(XbrlConst.qnTableDimensionRelationshipNode, ModelDimensionRelationshipDefinitionNode),
(XbrlConst.qnTableAspectNode, ModelFilterDefinitionNode),
# PWD 2013-05-17
(XbrlConst.qnTableTable201305, ModelTable),
(XbrlConst.qnTableBreakdown201305, ModelBreakdown),
(XbrlConst.qnTableRuleNode201305, ModelRuleDefinitionNode),
(XbrlConst.qnTableConceptRelationshipNode201305, ModelConceptRelationshipDefinitionNode),
(XbrlConst.qnTableDimensionRelationshipNode201305, ModelDimensionRelationshipDefinitionNode),
(XbrlConst.qnTableAspectNode201305, ModelFilterDefinitionNode),
# PWD 2013-01-17
(XbrlConst.qnTableTable201301, ModelTable),
(XbrlConst.qnTableRuleNode201301, ModelRuleDefinitionNode),
(XbrlConst.qnTableCompositionNode201301, ModelCompositionDefinitionNode),
(XbrlConst.qnTableConceptRelationshipNode201301, ModelConceptRelationshipDefinitionNode),
(XbrlConst.qnTableDimensionRelationshipNode201301, ModelDimensionRelationshipDefinitionNode),
(XbrlConst.qnTableSelectionNode201301, ModelSelectionDefinitionNode),
(XbrlConst.qnTableFilterNode201301, ModelFilterDefinitionNode),
(XbrlConst.qnTableTupleNode201301, ModelTupleDefinitionNode),
# PWD 2011 Montreal
(XbrlConst.qnTableTable2011, ModelTable),
(XbrlConst.qnTableRuleAxis2011, ModelRuleDefinitionNode),
(XbrlConst.qnTableCompositionAxis2011, ModelCompositionDefinitionNode),
(XbrlConst.qnTableConceptRelationshipAxis2011, ModelConceptRelationshipDefinitionNode),
(XbrlConst.qnTableSelectionAxis2011, ModelSelectionDefinitionNode),
(XbrlConst.qnTableFilterAxis2011, ModelFilterDefinitionNode),
(XbrlConst.qnTableTupleAxis2011, ModelTupleDefinitionNode),
(XbrlConst.qnTableDimensionRelationshipAxis2011, ModelDimensionRelationshipDefinitionNode),
# Eurofiling
(XbrlConst.qnEuTable, ModelEuTable),
(XbrlConst.qnEuAxisCoord, ModelEuAxisCoord),
))
# import after other modules resolved to prevent circular references
from arelle.FunctionXfi import concept_relationships
|
[
"fischer@markv.com"
] |
fischer@markv.com
|
2c76743609405faec1c19b81f0f1195064f29b9a
|
0672600aca6eecc0e2b51ec794063103f5effd43
|
/jobads/migrations/0024_auto_20210112_2355.py
|
8789a38c385842dcfb66e6a453d74d908cf1899a
|
[] |
no_license
|
pxian/careercare
|
871f22459e1544e3e78b71dea1ba393c5d6f3d03
|
91e483bb65affb9d415e53fb4142ec1e9b2dbccc
|
refs/heads/main
| 2023-02-20T18:45:38.175948
| 2021-01-25T03:30:00
| 2021-01-25T03:30:00
| 330,128,770
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,986
|
py
|
# Generated by Django 3.1.3 on 2021-01-12 15:55
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('jobads', '0023_merge_20210111_2144'),
]
operations = [
migrations.CreateModel(
name='test_employee',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100, null=True)),
('openess', models.IntegerField()),
('conscientiousness', models.IntegerField()),
('extraversion', models.IntegerField()),
('agreeableness', models.IntegerField()),
('neuroticism', models.IntegerField()),
('chart', models.CharField(max_length=100, null=True)),
],
),
migrations.AlterField(
model_name='jobad',
name='closing_date',
field=models.DateField(blank=True, null=True, verbose_name='Date'),
),
migrations.AlterField(
model_name='jobad',
name='max_salary',
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AlterField(
model_name='jobad',
name='min_salary',
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.DeleteModel(
name='personality',
),
migrations.AddField(
model_name='test_employee',
name='JobAd_id',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='jobads.jobad'),
),
migrations.AddField(
model_name='test_employee',
name='job_id',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='jobads.joblist'),
),
]
|
[
"phooixian@hotmail.com"
] |
phooixian@hotmail.com
|
a8569f82ed1a73ffbd59f8b49866754ec53e411d
|
9dfb3372a1e4516d970a6e9d0a9fd8360580eae7
|
/python pySerial/maping_data.py
|
feb9a76200b26899373a1eeba25711e6b4835877
|
[] |
no_license
|
clambering-goat/cameron_pyton
|
d1cd0e7b04da14e7ba4f89dcb4d973f297a4626c
|
df0b0365b86e75cfcfc2c1fc21608f1536a3b79f
|
refs/heads/master
| 2021-07-14T20:37:37.021401
| 2019-02-28T07:52:11
| 2019-02-28T07:52:11
| 137,251,669
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 418
|
py
|
import serial
y_points=[]
with serial.Serial('COM4', 9600, timeout=1) as ser:
for q in range(20000):
line =ser.readline()
x=line.decode("utf-8")
#print(x)
y_points.append(int(x))
import matplotlib.pyplot as plt
x_points=[]
for q in range(len(y_points)):
x_points.append(q)
plt.plot(x_points,y_points)
plt.ylabel('some numbers')
plt.xlabel('some numbers')
plt.show()
|
[
"camerondrain@gmail.com"
] |
camerondrain@gmail.com
|
df04408ea19ba007c8c896dee247c69359930c60
|
7cd865dbf48dfdf9bfe7404b7046e6d026e24b87
|
/tree/BST/Print BST keys in the given range.py
|
29397ca31a77eca6c25a0344f7712e26834e7f32
|
[] |
no_license
|
rishikeshpuri/Algorithms-and-Data-Structure
|
93e718f7f73cdf8eacfd56cb6de651dbe5ba0eec
|
6d9d7e2003327461a8bc5ac00d2037bc0d61f3f3
|
refs/heads/master
| 2020-12-15T17:20:57.893997
| 2020-01-20T20:43:56
| 2020-01-20T20:43:56
| 235,192,251
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 595
|
py
|
class Node:
def __init__(self, value):
self.value = value
self.left = None
self.right = None
def print_range(root, k1, k2):
if root is None:
return
if k1 < root.value:
print_range(root.left, k1,k2)
if k1 <=root.value and k2 >= root.value:
print(root.value, end=' ')
if k2 > root.value:
print_range(root.right, k1, k2)
k1= 10
k2 =25
root = Node(20)
root.left = Node(8)
root.right = Node(22)
root.left.left = Node(4)
root.left.right = Node(12)
print_range(root, k1, k2)
print()
print()
removeOutsideRange(root,k1,k2)
|
[
"noreply@github.com"
] |
rishikeshpuri.noreply@github.com
|
b45e4c13c16af6af208d3b3e8386d1021777db67
|
685a0a66d6499849f9ccdbca59bf79f2f64a4203
|
/manpy/simulation/applications/CapacityStations/CapacityStation.py
|
3a8d13e57b5ff2e1a1bb870295e00da860ca0b75
|
[
"MIT"
] |
permissive
|
sunhughees/manpy
|
b05271f53875f8b0f5a09b2f0df01cc6c05df869
|
0056eb6e93cba3bf2a1061f9170aa2a1edf248f6
|
refs/heads/master
| 2022-12-14T10:06:59.334817
| 2020-08-20T17:31:30
| 2020-08-20T17:31:30
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,222
|
py
|
# ===========================================================================
# Copyright 2013 University of Limerick
#
# This file is part of DREAM.
#
# DREAM is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# DREAM is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with DREAM. If not, see <http://www.gnu.org/licenses/>.
# ===========================================================================
"""
Created on 5 June 2013
@author: George
"""
"""
a station that can process a specified capacity in every time period
"""
from manpy.simulation.Queue import Queue
import simpy
# ===========================================================================
# the CapacityStation object
# ===========================================================================
class CapacityStation(Queue):
family = "CapacityStation"
# ===========================================================================
# the __init__ method of the CapacityStation
# ===========================================================================
def __init__(
self,
id,
name,
capacity=float("inf"),
intervalCapacity=[],
schedulingRule="FIFO",
gatherWipStat=False,
sharedResources={},
intervalCapacityStart=0,
intervalCapacityExceptions={},
notProcessOutsideThreshold=False,
**kw
):
Queue.__init__(self, id, name, capacity=capacity)
# a list that holds the capacity (manhours) that is available in each interval
self.intervalCapacity = intervalCapacity
# a list that holds the capacity (manhours) that is available in each interval for the remaining time
self.remainingIntervalCapacity = list(self.intervalCapacity)
# blocks the entry of the capacity station, so that it can be manipulated to accept only in certain moments of simulation time
self.isLocked = True
# dict that holds information if station shares workpower with some other station
self.sharedResources = sharedResources
self.intervalCapacityStart = intervalCapacityStart
self.intervalCapacityExceptions = intervalCapacityExceptions
self.notProcessOutsideThreshold = int(notProcessOutsideThreshold)
def initialize(self):
Queue.initialize(self)
# if the station shares resources and the capacity is not defined in this
# then read it from some other of the sharing stations
if not self.intervalCapacity and self.sharedResources:
for stationId in self.sharedResources.get("stationIds", []):
import manpy.simulation.Globals as Globals
station = Globals.findObjectById(stationId)
if station.intervalCapacity:
self.intervalCapacity = station.intervalCapacity
break
# initialize variables
self.remainingIntervalCapacity = list(self.intervalCapacity)
for i in range(self.intervalCapacityStart):
self.remainingIntervalCapacity.pop(0)
self.isLocked = True
self.utilisationDict = [] # a list of dicts for the utilization results
self.detailedWorkPlan = [] # a list of dicts to keep detailed data
from manpy.simulation.Globals import G
if hasattr(G, "CapacityStationList"):
G.CapacityStationList.append(self)
else:
G.CapacityStationList = []
G.CapacityStationList.append(self)
def canAccept(self, callerObject=None):
if self.isLocked:
return False
return Queue.canAccept(self)
# =======================================================================
# outputs results to JSON File
# =======================================================================
def outputResultsJSON(self):
from manpy.simulation.Globals import G
json = {
"_class": "manpy.%s" % self.__class__.__name__,
"id": self.id,
"family": self.family,
"results": {},
}
if G.numberOfReplications == 1:
# if we had just one replication output the results as numbers
json["results"]["capacityUsed"] = self.utilisationDict
meanUtilization = 0
for entry in self.utilisationDict:
meanUtilization += entry["utilization"] / float(
len(self.utilisationDict)
)
assert (entry["utilization"]) < 1.00001, "utilization greater than 1"
json["results"]["meanUtilization"] = meanUtilization
json["results"]["detailedWorkPlan"] = self.detailedWorkPlan
G.outputJSON["elementList"].append(json)
|
[
"pedro@datarevenue.com"
] |
pedro@datarevenue.com
|
c07aa82c886d791ed37e80ecf66b26fe3ba26449
|
f59860bb4d04007cf03258753aefcbf58e760db0
|
/music/migrations/0005_song_datetime.py
|
a64764e5215f82e94025a21d14a4720153be91ab
|
[] |
no_license
|
Arefeh902/station_49
|
fc306d7668d64c68df7dba35adbdc25d5600544a
|
3076e4ab616759f5aa0a973525c0436b603f942f
|
refs/heads/master
| 2023-07-01T10:25:39.820956
| 2021-08-10T18:47:28
| 2021-08-10T18:47:28
| 391,368,241
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 386
|
py
|
# Generated by Django 2.1.9 on 2021-08-07 08:21
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('music', '0004_auto_20210807_0806'),
]
operations = [
migrations.AddField(
model_name='song',
name='datetime',
field=models.DateTimeField(auto_now=True),
),
]
|
[
"alimahdiyar77@gmail.com"
] |
alimahdiyar77@gmail.com
|
5f7222976fb35436291dddf6bb8506aa13684468
|
c6e2731a9d9757cb37c849266d7dd68ff8f1e879
|
/accounts/migrations/0001_initial.py
|
c57fcbfff1355c533895c1fde4a2580a33ef48c5
|
[] |
no_license
|
lucaskruk13/dogfightWebsiteNew
|
aa989d344b87b050fd3691fcbb132c6050528c02
|
d9a05efbdbb02b24ab5e9707aba1bc69e38f825a
|
refs/heads/master
| 2020-04-17T05:49:09.325257
| 2019-01-17T21:16:43
| 2019-01-17T21:16:43
| 166,298,690
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,101
|
py
|
# Generated by Django 2.1.5 on 2019-01-17 03:19
import accounts.models
from django.conf import settings
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('feed', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Profile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('bio', models.TextField(blank=True, max_length=500)),
('location', models.CharField(blank=True, max_length=30)),
('birth_date', models.DateField(blank=True, null=True)),
('handicap', models.CharField(default=0, max_length=6, validators=[django.core.validators.RegexValidator(code='invalid_handicap', message='Invalid Handicap', regex='^[+]?\\d*\\.?\\d*$')])),
('initial', models.BooleanField(default=True)),
('profile_image', models.ImageField(blank=True, null=True, upload_to=accounts.models.user_directory_path)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Scores',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('score', models.IntegerField(default=0)),
('created_at', models.DateTimeField(auto_now_add=True)),
('countable', models.BooleanField(default=False)),
('dogfight', models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, related_name='scores_dogfight', to='feed.Dogfight')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='scores', to=settings.AUTH_USER_MODEL)),
],
),
]
|
[
"skywalker@lucass-air.lan"
] |
skywalker@lucass-air.lan
|
84b5119f4a7da520c9709538e33f4b9ed1d635b2
|
ab499e9d6927ded1e11874975bc12c21a107973b
|
/Code - Data_Cleaning_Analysis/taxi6/reduce.py
|
b486e29d2b0040c273bfc39d22674ae22a857757
|
[] |
no_license
|
adewin/NYC-Weather-vs-Taxi-analysis
|
5dc6a6df5ba8007370b94cc5508387f13b8d0228
|
9b7718fed555adfa95be8bc4283f9f304c7e1627
|
refs/heads/master
| 2020-07-01T05:53:19.696772
| 2017-04-29T16:34:20
| 2017-04-29T16:34:20
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 405
|
py
|
#!/usr/bin/python
import sys
oldKey = None
totalSum = 0
for line in sys.stdin:
data = line.strip().split("\t")
if len(data) != 2:
continue
thisKey, thisValue = data;
if oldKey and oldKey != thisKey:
print "%s,%i"%(oldKey,totalSum)
oldKey = thisKey;
totalSum = 0
oldKey = thisKey;
totalSum += int(1)
if oldKey != None:
print "%s,%i"%(oldKey,totalSum)
|
[
"da1722@nyu.edu"
] |
da1722@nyu.edu
|
72467a52dc9e6ecba8e2954a405841694aeb43f4
|
6168968d8dd813a9070f87fb2309366852a1d627
|
/run.py
|
8271c3b793b6f476b9de2f50e899c0a9078d8f16
|
[] |
no_license
|
Edb83/love-sandwiches
|
58d0b92dccc46b3738eb0a8b41b618020eebe83d
|
6ac727b610d54ee31e48a98dce0d0dafdde7bc24
|
refs/heads/main
| 2023-07-29T19:09:46.819604
| 2021-09-08T18:13:08
| 2021-09-08T18:13:08
| 403,935,070
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,131
|
py
|
import gspread
from google.oauth2.service_account import Credentials
from pprint import pprint
SCOPE = [
"https://www.googleapis.com/auth/spreadsheets",
"https://www.googleapis.com/auth/drive.file",
"https://www.googleapis.com/auth/drive"
]
CREDS = Credentials.from_service_account_file('creds.json')
SCOPED_CREDS = CREDS.with_scopes(SCOPE)
GSPREAD_CLIENT = gspread.authorize(SCOPED_CREDS)
SHEET = GSPREAD_CLIENT.open('love_sandwiches')
def get_sales_data():
"""
Get sales figures input from user
"""
while True:
print("Please enter sales data from the last market.")
print("Data should be six numbers, separated by commas.")
print("Examples: 10,20,30,40,50,60\n")
data_str = input("Enter your data here: ")
sales_data = data_str.split(',')
if validate_data(sales_data):
print("Data is valid!")
break
return sales_data
def validate_data(values):
"""
Inside the try, converts all string values into integers.
Raises ValueError if strings cannot be converted to int,
or if there aren't exactly 6 values.
"""
try:
[int(value) for value in values]
if len(values) != 6:
raise ValueError(
f"6 values required, you provided {len(values)}"
)
except ValueError as e:
print(f"Invalid data: {e}, please try again.\n")
return False
return True
def update_worksheet(data, worksheet):
"""
Receives a list of integers to be inserted into a worksheet
Updates the relevant worksheet with the data provided
"""
print(f"Updating {worksheet} worksheet...\n")
worksheet_to_update = SHEET.worksheet(worksheet)
worksheet_to_update.append_row(data)
print(f"{worksheet} worksheet updated successfully.\n")
def calculate_surplus(sales_row):
"""
Compare sales with stock and calculate the surplus for each item type.
The surplus is defined as the sales figure subtracted from the stock:
+ive surplus indicates waste
-ive surplus indicates extra made when stock was sold out.
"""
print("Calculating surplus data...\n")
stock_data = SHEET.worksheet('stock').get_all_values()
stock_row = stock_data[-1]
surplus_data = []
for stock, sales in zip(stock_row, sales_row):
surplus = int(stock) - sales
surplus_data.append(surplus)
return surplus_data
def get_last_5_entries_sales():
"""
Collects columms of data from sales worksheet, collecting
the last 5 entries for each sandwich and returns the data
as a list of lists
"""
sales = SHEET.worksheet('sales')
columns = []
for i in range(1, len(sales.get_all_values()[0]) + 1):
column = sales.col_values(i)[-5:]
columns.append(column)
return columns
def calculate_stock_data(data):
"""
Calculate average stock for each item type, adding 10%
"""
print("Calculating stock data...\n")
new_stock_data = []
for column in data:
int_column = [int(num) for num in column]
average = sum(int_column) / len(int_column)
stock_num = average * 1.1
new_stock_data.append(round(stock_num))
return new_stock_data
def get_stock_values(data):
"""
Create dictionary using sheet headings and values from
data passed in
"""
print("Make the following numbers of sandwiches for next market:\n\n")
headings = SHEET.worksheet('stock').get_all_values()[0]
result = dict(zip(headings, data))
print(f"{result}\n")
return result
def main():
"""
Run all program functions
"""
data = get_sales_data()
sales_data = [int(num) for num in data]
update_worksheet(sales_data, 'sales')
new_surplus_data = calculate_surplus(sales_data)
update_worksheet(new_surplus_data, 'surplus')
sales_columns = get_last_5_entries_sales()
stock_data = calculate_stock_data(sales_columns)
update_worksheet(stock_data, 'stock')
get_stock_values(stock_data)
print("Welcome to Love Sandwiches Data Automation")
main()
|
[
"62900492+Edb83@users.noreply.github.com"
] |
62900492+Edb83@users.noreply.github.com
|
c7718ce92c4ae61570c07bd6d5c0985424cb7b5d
|
922da7c12b4f675c9a538c710e25752322918106
|
/archive/mitsuscreenshots/mitsuscreenshots/main.py
|
6823005f0611350fc5fbd93fc99407067fb1e29b
|
[
"Unlicense",
"LicenseRef-scancode-unknown-license-reference",
"Python-2.0"
] |
permissive
|
NikaDark16/python-projects
|
412b991086b4cde49576844792d0823415ef1aaa
|
569dae888d3dbf05fc39b759de5635164a79922c
|
refs/heads/master
| 2021-05-23T12:20:12.845526
| 2020-01-27T11:33:31
| 2020-01-27T11:33:31
| 253,283,491
| 3
| 1
|
Unlicense
| 2020-09-30T19:20:43
| 2020-04-05T16:45:22
| null |
UTF-8
|
Python
| false
| false
| 907
|
py
|
# !/usr/bin/python
# -*- coding: utf-8 -*-
import argparse
import mitsuscreenshots.cli as cli
import mitsuscreenshots.gui as gui
import mitsuscreenshots.organize as organize
__author__ = "IceArrow256"
__version__ = '3'
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-c', '--config', help='show program\'s config path and exit', action='store_true')
parser.add_argument('-V', '--version', help='show program\'s version number and exit', action='store_true')
parser.add_argument('--gui', help='launch the MitsuScreenshots GUI', action='store_true')
args = parser.parse_args()
if args.version:
print("MitsuScreenshots ({})".format(__version__))
elif args.config:
print("MitsuScreenshots config path: " + organize.get_config_path())
elif args.gui:
gui.main()
else:
cli.main()
if __name__ == '__main__':
main()
|
[
"icearrow256@gmail.com"
] |
icearrow256@gmail.com
|
02e5bd573ae6af8746e876ded4b58af4d7b07a9d
|
22f63ddb67d7b170754550fba80277461f90b23f
|
/Bump_attractor_network.py
|
9e22e8d725be2bd8213bd0fe99b3aca36ca4ae10
|
[] |
no_license
|
HenriAton/CA6
|
bd1745697af067b6e6751f4554275bf1b697563c
|
18e5ccce0029cf3ac9cddc2b6fff751bf67aba52
|
refs/heads/master
| 2021-08-30T06:02:45.907256
| 2017-12-16T10:04:35
| 2017-12-16T10:04:35
| 109,322,073
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 14,953
|
py
|
import matplotlib.pyplot as plt
import numpy as np
import scipy as sp
from scipy import signal
import os #to create directory if needed
## Define the parameters
#Parameters model
#Number of neurons
N=360
# All-to-all connectivity matrices
W0=1 #not given in the paper
# Strength of excitation
GEE=6 # strength of excitation to excitatory neurons
GEI=4 # strength of excitation to inhibitory neurons
GIE=3.4
GII=0.85
#Initial currents
Ie0=0.2
Ii0=0.5
# Input-output function
Im=0
#Initial firing-rates
re=np.zeros(N)
ri=np.zeros(N)
#Firing-rate
sigmae=1
sigmai=3
taue=20
taui=10
#Stimulus
stimulus=np.ones(N)*100
#Parameters simulation
#Time
deltat = 2
T = 4200. # ms
t = np.linspace (0, T, int(T/deltat))
#Realtime(): Initialization of store variables
stock_ex=np.zeros(N)
stock_in=np.zeros(N)
#Sim(): Steps
tstep=500
nbstep=3
nbneuron=1
#variator()
stepsim=1
lim=5
#plot_var_param(): given-time
given_time=10
## Build the model
# Connectivity matrices
WII=WIE=np.ones((N,N))
WEI=-WIE #inhibition =>useless
def ex_matrix(x):
WEE=np.zeros((N,N))
window = signal.gaussian(N, std=100)
window=window*(-1)+1 #reversed gaussian
for i in range(len(WEE)):
for j in range(len(WEE)):
WEE[i,j]=window[abs(i-j)]
WEE=WEE/x
return(WEE)
WEE=ex_matrix(100)
# Input-output function (Phi)
def transfer(x):
if x<0:
x=x*x
elif x>0 and x<1:
x=x
elif x>=1:
x=np.sqrt(4*x-3)
return(x)
#Phi output
def phi(y):
for i in range(len(y)):
y[i]=transfer(y[i])
return(y)
# Simulate
#One step
def realtime(t, stim):
for i in range (t): #for i in range ( len(t) - 1 ):
#take input
global stock_ex
global stock_in
global Ie
global Ii
global re
global ri
#build the list
stock_ex=np.vstack([stock_ex,re])
stock_in=np.vstack([stock_in,ri])
#update values of excitatory neurons
Ie=GEE*np.matmul(WEE,re)+(Ie0-GIE*np.mean(ri))*np.ones(N)+stim
gaussian_noise_ex=sigmae*np.random.randn(N) #ok
re=re+deltat/taue*(-re+phi(Ie)+gaussian_noise_ex) #addition of phi(Ie) and gaussian_noise-_ex so the
#update values of inhibitory neurons
Ii=(GEI*np.mean(re)-GII*np.mean(ri)+Ii0)*np.ones(N)
gaussian_noise_inh=sigmai*np.random.randn(N)
ri=ri+deltat/taui*(-ri+phi(Ii)+gaussian_noise_inh)
return
#The four steps
def sim():
#take input
global tstep
global nbstep
#execute steps
if nbstep>0:
realtime(tstep, 0)
if nbstep>1:
realtime(tstep, stimulus)
if nbstep>2:
realtime(tstep,0)
if nbstep>3:
realtime(tstep, -stimulus)
return
#Plot xth excitatory neuron and xth inhibitory neuron
def plot_neuron(x):
ne=np.zeros(tstep*nbstep) #not efficient
ni=np.zeros(tstep*nbstep)
for i in range(tstep*nbstep):
ne[i]=stock_ex[i][x]
ni[i]=stock_in[i][x]
plt.plot(ne)
plt.plot(ni)
return
#Simulate and plot
def total():
sim()
plt.figure(1)
plt.title(str(N)+' excitatory neurons')
plt.plot(stock_ex)
plt.figure(2)
plt.plot(stock_in)
plt.title(str(N)+' inhibitory neurons')
plt.figure(3)
plot_neuron(nbneuron)
plt.title(' Excitatory and inhibitory neurons nยฐ'+str(nbneuron))
plt.show()
return
#Plot the firing rate according to time according to different strenghts of the parameter
def fire_stim(keep, plot="yes",param="inconnu", save="no",type_neuron="exc"): #give param between quotes
count=0
for i in range(int(lim/stepsim)):
count+=stepsim
plt.plot(keep[i], label = str(count))
plt.legend(loc = 4)
#optionnal, to save plot if needed
if save=="yes":
#create dir
address_dir='/home/lucdufour/Documents/Cogmaster/Cours/S3/CA6/Project/Bump_attractor_model/variator/'+param+"/"
if not os.path.exists(address_dir):
os.makedirs(address_dir)
#save figure
address='/home/lucdufour/Documents/Cogmaster/Cours/S3/CA6/Project/Bump_attractor_model/variator/'+param+"/"+'variator_'+param+"_"+type_neuron+'.png'
print(address)
plt.savefig(address)
#plot figure by default
if plot=="yes":
plt.show()
return
#Plot at a given time the firing rate according to the strength of the parameter
def plot_var_param(given_time, plot="yes",param="inconnu", save="no"):
ne=[]
ni=[]
absciss=np.arange(stepsim, lim+stepsim, stepsim)
for i in range(int(lim/stepsim)):
ne.append(keep_ex[i][given_time])
ni.append(keep_in[i][given_time])
plt.plot(absciss, ne)
plt.plot(absciss, ni)
#optionnal, to save plot if needed
if save=="yes":
#create dir
address_dir='/home/lucdufour/Documents/Cogmaster/Cours/S3/CA6/Project/Bump_attractor_model/variator/'+param+"/"
if not os.path.exists(address_dir):
os.makedirs(address_dir)
#save figure
address='/home/lucdufour/Documents/Cogmaster/Cours/S3/CA6/Project/Bump_attractor_model/variator/'+param+"/"+'var_param_'+param+"_"+'.png'
print(address)
plt.savefig(address)
#plot figure by default
if plot=="yes":
plt.show()
return
#Save and/or plot figures created thanks to variator
def save_plot(param,plot="no",save="yes"):
if save=="yes":
plt.close()
fire_stim(keep_ex,"no",param,"yes","exc")
plt.close()
fire_stim(keep_in,"no",param,"yes","inh")
plt.close()
plot_var_param(given_time,"no",param,"yes")
plt.close()
if plot=="yes":
plt.figure(1)
plt.title(str(N)+' excitatory neurons')
fire_stim(keep_ex,"yes",param,"no","exc")
plt.figure(2)
plt.title(str(N)+' inhibitory neurons')
fire_stim(keep_in,"yes",param,"no","inh")
plt.figure(3)
plt.title(' Excitatory and inhibitory neurons')
plot_var_param(given_time,"yes",param,"no")
return
#Quick variator and save/plot
def all_included(dep,arr,varia,zero,param,plot="yes",save="no"):
quick_variator(dep,arr,varia,zero)
save_plot(param,plot,save)
return
#Close windows
def close(x):
for i in range(x):
plt.close()
return
##Simulate and plot
#Simulate
sim()
#Plot all excitatory neurons firing rate
plt.plot(stock_ex) #works => very nice figure +++
plt.show()
#Plot all inhibitory neurons firing rate
plt.plot(stock_in)
plt.show()
#Compare the two
plt.figure(1) #to let the index start at 1
plt.plot(stock_ex)
plt.figure(2)
plt.plot(stock_in)
plt.show()
#Plot xth excitatory neuron and xth inhibitory neuron
plot_neuron(nbneuron)
plt.show()
#Simulate and plot
total()
#Stimulus
theta = [0:N-1]/N*2*pi;
theta=theta-pi;
v = exp(kappa*cos(theta));
v = v/sum(v);
stimulus = stim*v'
## Analysis: change the strength of a parameter
# Change the strength of the parameter
def variator(varia, zero): #zero=0 or zero=np.zeros(N)
#take input
global keep_ex
global keep_in
global stock_ex #even if I don't modify the variable, I have to call it with global +++
global stock_in
global Ie0 # vary with the parameter
#variation
for e in range(int(lim/stepsim)):
#reinitialize essential parameters
re=np.zeros(N)
ri=np.zeros(N)
stock_ex=np.zeros(N)
stock_in=np.zeros(N)
#reinitialize optionnal parameters (to play with)
GEE=6
GEI=4
GIE=3.4
GII=0.85
Ie0=0.2
Ii0=0.5
stimulus=np.ones(N)*100
WEE=ex_matrix(100)
#parameters of variator
row_ex=[]
row_in=[]
#vary
varia=zero
varia=stepsim+e*stepsim
Ie0=varia # vary with the parameter
#execution
sim()
for j in range(tstep*nbstep):
row_ex.append(np.mean(stock_ex[j]))
row_in.append(np.mean(stock_in[j]))
print(row_ex) #speaker here
keep_ex=np.vstack([keep_ex,row_ex])
keep_in=np.vstack([keep_in,row_in])
return
#quick variator
def quick_variator(stepsi, li,varia,zero):
#take input
global stepsim
global lim
global keep_ex
global keep_in
#modify stepsim and lim for the plot
stepsim=stepsi
lim=li
#reinitialize keep_ex and keep_in
keep_ex=np.zeros(tstep*nbstep)
keep_in=np.zeros(tstep*nbstep)
#variator
variator(varia,zero)
return
##Try
#Parameter studied = GEI
quick_variator(1,5,GEI,0)
#Plot the firing rate according to time according to different strenghts of the stimulus
#Plot at a given time the firing rate according to the strength of the stimulus
save_plot("GEI","yes","no") #to vary
close(3)
#Parameter studied = stimulus
quick_variator(50, 250,stimulus,np.zeros(N))
save_plot("stimulus","yes","no") #to vary
close(3)
all_included(50,150,stimulus,np.zeros(N),"stimulus",plot="yes",save="no") #to vary
close(3)
#Parameter studied = Ie0
all_included(0.2,1,Ie0,0,"Ie0",plot="yes",save="yes")
close(3)
## Draft
fe=2
fe=a
print(fe)
stimulus=2
def ttt(var=stimulus):
var=3
print(stimulus)
return
ttt(stimulus)
#quick variator
def quick_variator(stepsi, li):
#take input
global stepsim
global lim
global keep_ex
global keep_in
#modify stepsim and lim for the plot
stepsim=stepsi
lim=li
#reinitialize keep_ex and keep_in
keep_ex=np.zeros(tstep*nbstep)
keep_in=np.zeros(tstep*nbstep)
#variator
variator(stimulus,np.zeros(N)) #to vary
return
#2 possibles errors : variable is not global, so it stays at its inital value. Or sim doesn't change according to the variable because ot doesn't take it in input => I have to check that.
#Call a variable with global: if I modify after calling this variable, this modification is avaible everywhere,included recursively in the function
a=34
def check():
global a
print(a)
a=2
print(a)
rer()
return
#a=rer()
#return(a)
check()
def rer():
global a
print(a)
return(a)
rer()
#Bug
def var_stim(stepsim,lim):
global keep
for i in range(int(lim/stepsim)):
re=np.zeros(N)
ri=np.zeros(N)
Ie0=0.2
Ii0=0.5
stock_ex=np.zeros(N)
stock_in=np.zeros(N)
row=[]
stimulus=np.zeros(N)+stepsim
sim(tstep, nbstep)
for i in range(len(stock_ex)-1):
row.append(np.mean(stock_ex[i]))
keep=np.vstack([keep,row])
return
lim=150
stepsim=50
keep=np.zeros(tstep*nbstep)
for i in range(int(lim/stepsim)):
re=np.zeros(N)
ri=np.zeros(N)
Ie0=0.2
Ii0=0.5
stock_ex=np.zeros(N)
stock_in=np.zeros(N)
row=[]
stimulus=np.zeros(N)+stepsim
sim(tstep, nbstep)
for i in range(len(stock_ex)-1):
row.append(np.mean(stock_ex[i]))
keep=np.vstack([keep,row])
print(len(row))
print(len(keep))
row=[]
for i in range(len(stock_ex)):
row.append(np.mean(stock_ex[i]))
print(row)
print(stock_ex)
print(len(row))
#Phi input
#Ie=GEE*np.matmul(WEE,re)+(Ie0-GIE*np.mean(ri))*np.ones(N) #be careful with matrices calculus => if I don't use np.matmul, can do errors without signaling it => for example a matrix (WEE) multiplied by an array (re) gives a matrix instead of an array
#Ii=(GEI*np.mean(re)-GII*np.mean(ri)+Ii0)*np.ones(N)
#Can't modify global variables in the script
stimulus=np.ones(N)*200
def try():
stimulus=np.ones(N)*200
for in range(5):
essai()
return
def essai():
stimulus=stimulus+1
return
essai()
#Useless packages
import networkx as nx # module useful to manipulate graphs such as connectivity matrices
from scipy.fftpack import fft, fftshift
#Print first neuron firing rate
r1=np.zeros(3)
re=np.append([[1, 2, 3], [4, 5, 6]], [[7, 8, 9]], axis=0)
for i in range(len(re-1)):
r1[i]=re[i][0]
print(r1)
#Append list vertically
stock=np.zeros(5)
for i in range(4):
stock=np.vstack([stock,np.ones(5)])
print(stock)
#my way
Ie=WEE[1]*re0*GEE-WEI[1]*ri0*GIE+Ie0
Ii=WIE[1]*re0*GEI-WII[1]*ri0*GII+Ii0
#sum
def output_neurons_in(u):
output_inh=0
for v in range(len(WIE)):
output_inh+=WIE[u,v]*1*GEI-WII[u,v]*1*GII
return(int(output_inh))
ess=output_neurons_in(2)
print(ess)
print((4-0.85)*360)
#transfer
a=transfer(output_neurons_in(1))
print(a)
#firing-rate
#re[0]=0?
print(ri)
for i in range ( len(t) - 1 ):
re[i+1]=re[i]+deltat/T*(re[i]+transfer(output_neurons_in(i)+Ie0)+gaussian_noise_ex)
for i in range ( len(t) - 1 ):
print(transfer(output_neurons_in(i)))
#Initial currents
Ie0par=0.5
Ii0par=0.2
# Initial currents
Ie0=np.ones(N)*Ie0par #initial current for excitatory neurons
Ii0=np.ones(N)*Ii0par #initial current for inhibitory neurons
# Firing-rate
re=(re0-phie-sigmae*epsilon0)*np.exp(-t/taue)+phie+sigmae*epsilon
ri=(ri0-phii-sigmai*epsilon0)*np.exp(-t/taui)+phii+sigmai*epsilon
#Calculus with matrices
print(np.ones((5,5))*np.ones(5))
print(np.matmul(np.ones((5,5)),np.ones(5)))
print(np.ones(5))
print(np.ones((1,5))*np.ones((5,1)))
#Phi input
Ie=GEE*WEE-GEI*WEI+Ie0+Im
Ii=GIE*WIE-GII*WII+Ii0
#Connectivity matrices using module network
# All-to-all connectivity matrices
G=nx.complete_graph(10) # example of a connectivity matrice
print(G.nodes())
print(G.edges())
A=nx.adjacency_matrix(G)
print(A.todense())
A=A*2 # weight of the edges
print(A.todense())
WIE=nx.complete_graph(512) #paper matrices
WIE=nx.adjacency_matrix(WIE)
WIE=WIE*W0
WII=WIE
WEI=WIE
#module network
import networkx as nx
G=nx.Graph()
listneurons=list(range(512))
print(listneurons)
G.add_nodes_from(listneurons)
print(G.nodes)
G.add_edges_from([1,2])
G.add_edges_from([(1,2),(1,3)])
A=nx.adjacency_matrix(G, weight='3')
print(A.todense())
#square matrices
rr=np.zeros((5,5))
print(rr)
rr+=1
print(rr)
|
[
"noreply@github.com"
] |
HenriAton.noreply@github.com
|
4d8b9892b21dc06b532dbbcb1ec60eb0dfe5ef79
|
a8de5fb308cc351cb84ff465c7619d25080d5cd5
|
/s.py
|
f1b086e1387c8bd7b4f6800fc64b3a129e68b485
|
[] |
no_license
|
Anil2144/python-basics
|
6ab4c3c7a77763a3f5fd7e71dc42d402ecea2d92
|
7ece9f9a58af0959eef19a99437a4b32b426d836
|
refs/heads/master
| 2022-11-27T06:27:04.344515
| 2020-07-21T07:22:20
| 2020-07-21T07:22:20
| 275,876,169
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 22
|
py
|
print("anil\treddy")
|
[
"noreply@github.com"
] |
Anil2144.noreply@github.com
|
cdc7e016f9ad364f72d1571461759254c34eab98
|
8955cc12cc644b8ed1dbc59c6801242cf0824f87
|
/edmshyft_code/GaussFit.py
|
3bd05561fa9d37db0069313955d121bc9cb1faed
|
[] |
no_license
|
mattbellis/Siena_College_Danielle_Berish
|
565e76392c65c0be9b8fba294671bb00e08b8956
|
11eaef1dfc2e3d165f978f9014469a635a44d5bd
|
refs/heads/master
| 2020-04-16T02:45:40.070587
| 2014-04-01T08:39:40
| 2014-04-01T08:39:40
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,511
|
py
|
#!/usr/bin/env python
from ROOT import TH1D, TH1, TCanvas
import ROOT
import numpy as np
import sys
import matplotlib.pyplot as plt
#################################################################################
# Read in the file from the command line
infile = None
if len(sys.argv) > 1:
infile = open(sys.argv[1],'r')
else:
print "Need to pass in an input file"
exit(-1)
content = np.array(infile.read().split()).astype('float')
#print content
# Separate content into designated lists
mean = []
meanError = []
std = []
stdError = []
i = 0
count = 0
while count < len(content):
mean.append(content[i])
meanError.append(content[i+1])
std.append(content[i+2])
stdError.append(content[i+3])
i += 4
count += 4
mean = np.array(mean)
std = np.array(std)
############################################
# List of pT's
pT = []
n = 170
while n <= 740:
pT.append(n)
n += 20
###########################################
# Fit the mean and std. dev.
slope,intercept = np.polyfit(pT,mean[8:37],1)
print "Slope of Mean: ",slope
print "Intercept of Mean: ",intercept
slope_std,intercept_std = np.polyfit(pT,std[8:37],1)
print "Slope of Std. Dev.: ", slope_std
print "Intercept of Std. Dev.: ",intercept_std
############################################
# Plot the mean and std. dev.
plt.figure(1)
plt.subplot(211)
plt.ylabel("Mean")
plt.scatter(pT,mean[8:37])
plt.subplot(212)
plt.xlabel("Top pT")
plt.ylabel("Std. Dev.")
plt.scatter(pT,std[8:37])
plt.show()
|
[
"berish@serenity"
] |
berish@serenity
|
b563563bd985a3f9d737ea973f8314bd6fb8f40d
|
9d2b33eb85fca3a81ccb7272422c41a08467a8c4
|
/pdm/formats/requirements.py
|
37c230c8eab420dab9e877204074086b6fe605a6
|
[
"MIT"
] |
permissive
|
skyoo2003/pdm
|
9b9d20079f325c087855c7a37a4270d36fa71131
|
95a758ee259dff02f00f0a3eab79fa23e5d2aa97
|
refs/heads/master
| 2022-12-12T18:01:36.331560
| 2020-09-04T01:16:41
| 2020-09-04T01:16:41
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,729
|
py
|
import hashlib
import urllib.parse
from pip._internal.req.req_file import parse_requirements
from pdm.models.markers import Marker
from pdm.models.requirements import parse_requirement
from pdm.utils import get_finder
def _requirement_to_str_lowercase_name(requirement):
"""Formats a packaging.requirements.Requirement with a lowercase name."""
parts = [requirement.name.lower()]
if requirement.extras:
parts.append("[{0}]".format(",".join(sorted(requirement.extras))))
if requirement.specifier:
parts.append(str(requirement.specifier))
if requirement.url:
parts.append("@ {0}".format(requirement.url))
if requirement.marker:
parts.append("; {0}".format(requirement.marker))
return "".join(parts)
def requirement_from_ireq(ireq):
"""Formats an `InstallRequirement` instance as a
`pdm.models.requirement.Requirement`.
Generic formatter for pretty printing InstallRequirements to the terminal
in a less verbose way than using its `__str__` method.
:param :class:`InstallRequirement` ireq: A pip **InstallRequirement** instance.
:return: A formatted string for prettyprinting
:rtype: str
"""
if ireq.editable:
line = "{}".format(ireq.link)
else:
line = _requirement_to_str_lowercase_name(ireq.req)
if str(ireq.req.marker) != str(ireq.markers):
if not ireq.req.marker:
line = "{}; {}".format(line, ireq.markers)
else:
name, markers = line.split(";", 1)
markers = Marker(markers) & ireq.markers
line = "{}; {}".format(name, markers)
return parse_requirement(line, ireq.editable)
def parse_requirement_file(filename):
from pip._internal.req.constructors import install_req_from_parsed_requirement
finder = get_finder([])
ireqs = [
install_req_from_parsed_requirement(pr)
for pr in parse_requirements(filename, finder.session, finder)
]
return ireqs, finder
def check_fingerprint(project, filename):
import tomlkit
with open(filename, encoding="utf-8") as fp:
try:
tomlkit.parse(fp.read())
except ValueError:
# the file should be a requirements.txt if it not a TOML document.
return True
else:
return False
def convert_url_to_source(url, name=None):
if not name:
name = hashlib.sha1(url.encode("utf-8")).hexdigest()[:6]
return {"name": name, "url": url, "verify_ssl": url.startswith("https://")}
def convert(project, filename):
ireqs, finder = parse_requirement_file(str(filename))
reqs = [requirement_from_ireq(ireq) for ireq in ireqs]
data = {"dependencies": dict(req.as_req_dict() for req in reqs)}
if finder.index_urls:
sources = [convert_url_to_source(finder.index_urls[0], "pypi")]
sources.extend(convert_url_to_source(url) for url in finder.index_urls[1:])
data["source"] = sources
return data
def export(project, candidates, options):
lines = []
for candidate in candidates:
req = candidate.req.as_line()
lines.append(req)
if options.hashes and candidate.hashes:
for item in candidate.hashes.values():
lines.append(f" \\\n --hash={item}")
lines.append("\n")
sources = project.tool_settings.get("source", [])
for source in sources:
url = source["url"]
prefix = "--index-url" if source["name"] == "pypi" else "--extra-index-url"
lines.append(f"{prefix} {url}\n")
if not source["verify_ssl"]:
host = urllib.parse.urlparse(url).hostname
lines.append(f"--trusted-host {host}\n")
return "".join(lines)
|
[
"mianghong@gmail.com"
] |
mianghong@gmail.com
|
eb1a62f56f2ffc107571e481f289733b9e687a09
|
5642ba9c9ba25768757e31f681eb8573c02e2e52
|
/django/ws17/index/admin.py
|
2940d331ae074237d884291b067a9e0922f74f0e
|
[] |
no_license
|
intaekShin/TIL-c9
|
f2ce3f857481feeb9de7f388d8e25cbdc11cacc5
|
f63bb7b71601196e91303ff03a26c25f1e07b445
|
refs/heads/master
| 2020-04-17T17:17:57.184912
| 2019-05-02T03:09:08
| 2019-05-02T03:09:08
| 166,777,132
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 204
|
py
|
from django.contrib import admin
from .models import Student
class StudentAdmin(admin.ModelAdmin):
list_display = ('name', )
# Register your models here.
admin.site.register(Student, StudentAdmin)
|
[
"sit921212@gmail.com"
] |
sit921212@gmail.com
|
8023c70563e9521e74a25e84b2e0594f82363218
|
5357547a3d7eb14904c7a7c15f6d804e095cbca3
|
/August 2019/09th August 2019/validateBinarySearchTree.py
|
982ead0197bd3aee7e86e33b36c5a790a97b2508
|
[] |
no_license
|
MichaelOgunsanmi/dailyCodingChallenge
|
3aaff98e07263dfde79192c5e040fc5f2523066b
|
2ca318b2be10be6af5b7130dd14627047cf257e8
|
refs/heads/master
| 2023-03-16T13:57:07.998943
| 2023-03-09T10:38:15
| 2023-03-09T10:38:15
| 195,297,943
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,369
|
py
|
# Source: https://leetcode.com/problems/validate-binary-search-tree/
# Level: Medium
#
# Date: 09th August 2019
"""
Given a binary tree, determine if it is a valid binary search tree (BST).
Assume a BST is defined as follows:
The left subtree of a node contains only nodes with keys less than the node's key.
The right subtree of a node contains only nodes with keys greater than the node's key.
Both the left and right subtrees must also be binary search trees.
Example 1:
2
/ \
1 3
Input: [2,1,3]
Output: true
Example 2:
5
/ \
1 4
/ \
3 6
Input: [5,1,4,null,null,3,6]
Output: false
Explanation: The root node's value is 5 but its right child's value is 4.
"""
# Solution
# Definition for a binary tree node.
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def isValidBST(self, rootNode: TreeNode) -> bool:
return isBST(rootNode, float('-inf'), float('inf'))
def isBST(currentNode, leftMin, rightMax):
if currentNode is None:
return True
checkRoot = leftMin < currentNode.val < rightMax
return checkRoot and isBST(currentNode.left, leftMin, currentNode.val) and isBST(currentNode.right, currentNode.val,
rightMax)
|
[
"ogunsanmimichael@gmail.com"
] |
ogunsanmimichael@gmail.com
|
bef3b9ad03bdc33f7171cc9b588f198ce873e861
|
62922a76e40003f3d3a7d02282853f9a2b76c6fc
|
/cv2/ch22/test1.py
|
1172f8de48d2bc9bfba7168431a2727b16325054
|
[] |
no_license
|
cchangcs/ai_learning_record
|
a7d0d9c7fcdc1e97d8869aa7e63b535f8cf62df2
|
235a90ff5fe0205334376a927d462b8ae64e4e70
|
refs/heads/master
| 2020-04-01T16:59:31.203223
| 2018-11-21T11:12:34
| 2018-11-21T11:12:34
| 153,408,023
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,103
|
py
|
# encoding:utf-8
'''
ๆ็นๆฃๆตSimpleBlodDetector()
ๆ็นๆฃๆต๏ผ้ป่ฎคๆฃๆต้ป่ฒ็น๏ผๅฆๆ่ฆๆฃๆต็ฝ่ฒ็็น้่ฆ่ฎพ็ฝฎbycolorไธบtrue๏ผๅนถไธcolorๆฐๅผไธบ255
ๆ็น้ๅธธๆฏๆไธๅจๅดๆ็้ข่ฒๅ็ฐๅบฆๅทฎๅซ็ๅบๅ๏ผๅจๅฎ้
็ๅพไธญ๏ผๅพๅพๅญๅจ็ๅคง้่ฟๆ ท็ๆ็น๏ผๅฆไธๆฃตๆ ๆฏไธไธชๆ็น๏ผไธๅ่ๅฐๆฏไธไธชๆ็นใ
็ฑไบๆ็นไปฃ่กจ็ๆฏไธไธชๅบๅ๏ผ็ธๆฏๅ็บฏ็่ง็น๏ผๅฎ็็จณๅฎๆงๆดๅฅฝ๏ผๆๅชๅฃฐ่ฝๅๆดๅผบ๏ผๆไปฅๅฎๅจๅพๅ้
ๅไธๆฎๆผ็้่ฆ็่ง่ฒใ
ๅๆถๆๆถๅพๅไธญ็ๆ็นไนๆฏๆไปฌๅ
ณๅฟ็ๅบๅ๏ผๆฏๅฆๅจๅปๅญฆไธ็็ฉ้ขๅ๏ผๆไปฌ้่ฆไปไธไบXๅ
็
ง็ๆ็ป่ๆพๅพฎ็
ง็ไธญๆๅไธไบๅ
ทๆ็นๆฎๆไน็ๆ็น็ไฝ็ฝฎๆๆฐ้
'''
import cv2
import numpy as np
im = cv2.imread('blob.jpg', cv2.IMREAD_GRAYSCALE)
detector = cv2.SimpleBlobDetector_create()
keypoints = detector.detect(im)
im_with_keypoints = cv2.drawKeypoints(im, keypoints, np.array([]), (0, 0, 255), cv2.DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS)
cv2.imshow("Keypoints", im_with_keypoints)
cv2.waitKey(0)
cv2.destroyAllWindows()
|
[
"752340690@qq.com"
] |
752340690@qq.com
|
2787877ab7c4b47fa32149dee05ec29535e288c6
|
e52d1885191694c5098e17c10c6957a8222f753f
|
/PhotoStudio/views.py
|
7681e1fbe834b88eaabe3df3f27b43cfee8076f9
|
[] |
no_license
|
LalitNath1221/pUkStudio
|
17ea355ccca61ee59294706d142cbc83cb6cecd1
|
55ae04ddec8b30f6c2b0b09a981ad972fafb1337
|
refs/heads/master
| 2023-06-14T00:32:59.265234
| 2021-07-11T06:13:29
| 2021-07-11T06:13:29
| 384,870,202
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,693
|
py
|
from django.http import HttpResponse, request
from django.shortcuts import render, redirect
from django.core.mail import BadHeaderError, send_mail, EmailMultiAlternatives
from django.conf import settings
from .models import Appointments, Queries
from socket import gaierror, timeout
from django.contrib import messages
# Create your views here.
def index(request):
if (request.method == "POST"):
userName = request.POST["Name"]
userEmail = request.POST["Email"]
userPhno = request.POST["ContactNum"]
userMsg = request.POST["userQuery"]
body = f"""<div style="border: 2px solid black; padding: 1rem;"><p><b>Name : {userName}</b></p><br><p>Phone no : {userPhno}</p><hr><u><b>query</b></u><br><p>{userMsg}</p></div>"""
subj = "Query"
msg, status = send_email(subj, userEmail, body)
#print(userName," ",userMsg," ",userPhno," ",userEmail)
params = {'responce':msg, 'status':status}
if (status=="success"):
data = Queries.objects.create(
User_Name= userName,
User_Email= userEmail,
User_Contact= userPhno,
User_Discription= userMsg)
data.save()
return render(request, 'index.html', params)
else:
params = {}
return render(request, 'index.html', params)
def base(request):
return render(request, 'base.html')
def bookings(request):
if (request.method == "POST"):
userFirstName = request.POST["CFirstName"]
userLastName = request.POST["CLastName"]
userEmail = request.POST["cEmail"]
userContact = request.POST["cContactNo"]
BookingDate = request.POST["BookedOn"]
apptDate = request.POST["apptDate"]
userEvent = request.POST["cEvent"]
userMsg = request.POST["cMessage"]
body = f"""<div style="border: 2px solid black; padding: 1rem;"><p><u><b> User Name : {userFirstName}{userLastName} </b></u><br />User Email: {userEmail}<br />Phon No : {userContact}<br />Booked on :{BookingDate}<br />Appointment Date : {apptDate}<br />For {userEvent} <hr><u><b>Message :</b></u>{userMsg}</p></div>"""
subj = "Regards Booking Appointment"
msg, status = send_email(subj, userEmail, body)
params = {'responce':msg, 'status':status}
if (status=="success"):
Adata = Appointments.objects.create(User_FirstName=userFirstName,
User_LastName= userLastName,
User_Email= userEmail,
User_Contact= userContact,
User_BookedOn= BookingDate,
User_ApptDate= apptDate,
User_Event= userEvent,
User_Suggestion= userMsg)
print(status)
Adata.save()
print(status)
return render(request, 'book.html', params)
else:
params = {}
return render(request, 'book.html', params)
def send_email(subject, Uemail, body):
text_content = "A mail from UkPhotography user"
html_content = f'{body}'
from_mail = settings.EMAIL_HOST_USER
msg = EmailMultiAlternatives(subject, text_content, from_mail, ['ukphotography2002@gmail.com'], reply_to=[Uemail,])
msg.attach_alternative(html_content, "text/html")
try:
msg.send()
except BadHeaderError:
Msg = "Invalid header found"
status = "error"
return(Msg, status)
except (gaierror, timeout):
Msg = "SORRY!<br/>Check your internet connection or try again after some time"
status = "error"
return(Msg, status)
Msg="Thank you for getting in touch!<br/>we will get back in touch with you soon!<br>Have a great day!"
status = "success"
return(Msg, status)
|
[
"lalitgosai2002@gmail.com"
] |
lalitgosai2002@gmail.com
|
43f563d0761110d21a9addf1d57a833d60af3c04
|
e25f1cafc3e63e468d103918ebcf901867e3d786
|
/Assignment2/q5-2.py
|
cbfaf8a06d698af1b5fab4edcd8104d4f56d406d
|
[] |
no_license
|
lebrice/IFT6135
|
726ee55c4d331a3a632a2038af8cf81e207db510
|
e0a28e21124718a59027ffce68c47b0cb4562aef
|
refs/heads/master
| 2020-04-18T14:47:19.555720
| 2019-04-23T00:13:03
| 2019-04-23T00:13:03
| 167,597,823
| 0
| 0
| null | 2019-03-19T03:11:36
| 2019-01-25T18:57:37
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 2,427
|
py
|
import torch
from torch import nn
import numpy as np
import matplotlib.pyplot as plt
from models import FullTransformer
from q5 import get_best_model, train_data, ptb_iterator, repackage_hidden, Batch, device
def run_batch(model, data):
model.eval()
if not isinstance(model, FullTransformer):
hidden = model.init_hidden()
hidden = hidden.to(device)
loss_fn = nn.CrossEntropyLoss()
# LOOP THROUGH MINIBATCHES
x, y = next(ptb_iterator(data, model.batch_size, model.seq_len))
model.zero_grad()
if isinstance(model, FullTransformer):
batch = Batch(torch.from_numpy(x).long().to(device))
outputs = model.forward(batch.data, batch.mask).transpose(1,0)
#print ("outputs.shape", outputs.shape)
else:
inputs = torch.from_numpy(x.astype(np.int64)).transpose(0, 1).contiguous().to(device)
# hidden = repackage_hidden(hidden)
outputs, new_hidden = model(inputs, hidden)
targets = torch.from_numpy(y.astype(np.int64)).contiguous().to(device)
tt = torch.squeeze(targets.view(-1, model.batch_size * model.seq_len))
def register_grad_hook(tensor):
def hook(grad):
tensor.hidden_grad = grad
tensor.register_hook(hook)
for hidden_state in model.hidden_states:
register_grad_hook(hidden_state)
# LOSS COMPUTATION
# This line currently averages across all the sequences in a mini-batch
# and all time-steps of the sequences.
# For problem 5.3, you will (instead) need to compute the average loss
# at each time-step separately.
loss = loss_fn(outputs.contiguous().view(-1, model.vocab_size), tt)
loss.backward()
hidden_grads = torch.stack(model.hidden_states)
return hidden_grads.mean(1).norm(p=2, dim=-1)
if __name__ == "__main__":
for model_type in ['RNN', 'GRU']:
model = get_best_model(model_type)
model = model.to(device)
hidden_grads = run_batch(model, train_data)
normalized = (hidden_grads - hidden_grads.min()) / (hidden_grads.max() - hidden_grads.min())
plt.plot(range(model.seq_len), normalized.detach().cpu().numpy(), label=model_type)
plt.title(f'Normalized norm of average hidden state gradients at each time-step')
plt.legend()
plt.xlabel('Time-step')
plt.ylabel('Normalizaed norm of average hidden state gradient')
plt.savefig(f'Q5_2_grad_wrt_time_steps.jpg')
|
[
"jerome-pl2@hotmail.ca"
] |
jerome-pl2@hotmail.ca
|
72171071e30159663bde7ae3c5659354955ca45c
|
9aecf38356351986889b162fced066c1c4cf775a
|
/name.py
|
af29007a7745d852251fdf3d870d28d5904c987f
|
[] |
no_license
|
Saitop/CS50.4_Python-JS_web_programming
|
51c2c6e7911c28eb4ddf65c0c981fcb1d031db9f
|
c2ed428fda5dcecc713dfa91bf28c2e520109c37
|
refs/heads/master
| 2022-11-24T16:43:16.725590
| 2020-07-25T17:06:10
| 2020-07-25T17:06:10
| 282,488,453
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 80
|
py
|
print("please input your name:")
name = input("Name: ")
print(f"Hello, {name}")
|
[
"saitoplam@hotmail.com"
] |
saitoplam@hotmail.com
|
f8624618ec8af27c85fe2f7d142c1b79ab69ae1b
|
66c0e47287c8e7737cf93adc6c42f84bf1dfe31c
|
/sagemaker/serve/utils_nlp.py
|
48e18ef2ac7236d0fe3874a04a3e0ae489e663cb
|
[] |
no_license
|
reneang17/deep-toxic-analysis
|
ed1c10e5631dbd2bd3255aea9ce2e51e2072fcb5
|
b42147366f54aefa2009506b9e1498286624c59d
|
refs/heads/master
| 2020-09-03T02:37:15.164813
| 2020-01-02T03:36:16
| 2020-01-02T03:36:16
| 219,363,839
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 734
|
py
|
import re
rep_numbers=re.compile(r'\d+',re.IGNORECASE) # Numbers
rep_special_chars= re.compile("[^\w']|_") # Special character but not apostrophes
def apostrophes(text):
return re.findall(r"\w+(?=n't)|n't|\w+(?=')|'\w+|\w+",
text, re.IGNORECASE | re.DOTALL)
def text_to_words(text):
text=rep_special_chars.sub(' ', text) # Remove special characters but apostrophes
text = rep_numbers.sub('n', text) # substitute all numbers
words = text.lower()
words = apostrophes(words)[:120]# Split string into words
return words
def tokenize(word_dict, text):
words = text_to_words(text)
words=[word_dict[w] if w in word_dict else word_dict['<unk>'] for w in words]
return words
|
[
"ec2-user@ip-172-16-29-12.us-east-2.compute.internal"
] |
ec2-user@ip-172-16-29-12.us-east-2.compute.internal
|
622882398cd5c1e5077722f60c7aa9e77ef203af
|
0ad8fc76aebe7ce22abe771fbeadf227e5b471cb
|
/app/productdb/tasks.py
|
8cb6cd358ef178da1d7cd5290af4ab4a8389c040
|
[
"MIT"
] |
permissive
|
ppavlu/product-database
|
354c6a1a3e9ebfdc931f2aacf8751ed0f149401c
|
09610c09600c63eb91106c0b5a2fa995b134dbf4
|
refs/heads/master
| 2021-01-17T22:51:43.247027
| 2015-10-11T11:37:12
| 2015-10-11T11:37:12
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,251
|
py
|
from django_project.celery import app as app
from app.productdb.models import Settings
import app.productdb.crawler.cisco_eox_api_crawler as cisco_eox_api_crawler
import logging
logger = logging.getLogger(__name__)
@app.task(serializer='json', name="synchronize_with_cisco_eox_api")
def execute_task_to_synchronize_cisco_eox_states():
"""
This task will automatically synchronize the Cisco EoX states with the local database. It will execute the
configured queries and saves the information to the local database. There are two types of operation:
* cisco_eox_api_auto_sync_auto_create_elements is set to true - will create any element which is not part of the blacklist and not in the
database
* cisco_eox_api_auto_sync_auto_create_elements is set to false - will only update entries, which are already included in the database
:return:
"""
logger.info("execute synchronize Cisco EoX update task...")
# update based on the configured query settings
result = cisco_eox_api_crawler.synchronize_with_cisco_eox_api()
logger.info("result: %s" % str(result))
s = Settings.objects.get(id=0)
s.eox_api_sync_task_id = ""
s.save()
return result
|
[
"henry@codingnetworker.com"
] |
henry@codingnetworker.com
|
ad04f80a228f6453c99bb617772d78fcf7ed8e48
|
517ff269c8aaca5edf8b754dc81fbbdb37adde28
|
/PatternCounting.py
|
40c03215ec5a2c5feb9c15105d29891cf5726a20
|
[] |
no_license
|
Andrewwu73/BioInformaticsAlgorithms
|
0d681bcc43b721dd743c1030042b2b19fbb04393
|
e2bd4029ef60ee4fee96567b45e93f673eecc665
|
refs/heads/master
| 2020-06-24T18:43:20.462859
| 2020-02-27T00:30:11
| 2020-02-27T00:30:11
| 199,049,930
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 135
|
py
|
a = input("")
b = input("")
count = 0
for k in range(len(a)-len(b)+1):
if(a[k:k+len(b)]==b):
count = count + 1
print(count)
|
[
"andrewswu2000@gmail.com"
] |
andrewswu2000@gmail.com
|
89c2127cdb13c6c6248abfba21d3cdb0eba90d73
|
3b9fdd117bfcfa86b1e354b2c193727e7567cb76
|
/proxyuser17/proxyuser17/apps/myapp/models.py
|
9a06f4b7a626979e8ea1971891cbd06fbebd22c1
|
[
"BSD-3-Clause"
] |
permissive
|
marcofucci/django-ticket-24506
|
84beb913b2db2993034e199c3a63c2cde60fa9aa
|
88a36d792e77fb70c12224ea6a02774015ddbc84
|
refs/heads/master
| 2021-01-21T11:37:23.621884
| 2015-03-22T12:55:25
| 2015-03-22T12:55:25
| 32,672,309
| 0
| 1
| null | 2015-03-22T12:55:25
| 2015-03-22T11:11:24
|
Python
|
UTF-8
|
Python
| false
| false
| 314
|
py
|
from django.db import models
class FKUserModel(models.Model):
user = models.ForeignKey('core.User')
def __unicode__(self):
return u'%s' % self.user
class OneToOneUserModel(models.Model):
user = models.OneToOneField('core.User')
def __unicode__(self):
return u'%s' % self.user
|
[
"marcofucci@gmail.com"
] |
marcofucci@gmail.com
|
31e927f3c4978a6c84c65f9dd512111ab7794a3b
|
34af993fb0c26e9b154effc01a3117e38a134ce7
|
/esp32-micropython/sys/boot.py
|
5089da4e1aa9574e20c3423bbaaa39057060fa80
|
[] |
no_license
|
dida1012/Edublocks_ESP32
|
467401c2c100a09acffd1afced80b8fa6956520f
|
b4d97afba499a303f7558c561484625b86e2b4b4
|
refs/heads/master
| 2023-01-22T07:29:23.147930
| 2019-11-17T22:02:45
| 2019-11-17T22:02:45
| 222,307,141
| 3
| 0
| null | 2023-01-12T05:57:25
| 2019-11-17T20:24:56
|
TypeScript
|
UTF-8
|
Python
| false
| false
| 1,023
|
py
|
import sys
import gc
import webrepl
from lib import screen
from lib import wifi
from lib import panel
try:
screen.fb.set_line_range_palette(0, 12, 0b1111100000011111, 0x0000)
screen.fb.set_line_range_palette(12, 24, 0b0000011111100000, 0xffff)
screen.fb.set_line_range_palette(108, 120, 0b0000011111111111, 0x0000)
screen.fb.set_line_range_palette(120, 122, 0b1111100000000000, 0xffff)
screen.fb.set_line_range_palette(122, 124, 0b1111100000000000, 0xffff)
screen.fb.set_line_range_palette(124, 126, 0b0000011111000000, 0xffff)
screen.fb.set_line_range_palette(126, 128, 0b1111111111000000, 0xffff)
screen.print_line('Pretty colours!', 9)
except:
pass
screen.print_line('Starting...', 0)
gc.collect()
wifi.auto_connect()
gc.collect()
webrepl.start(password='')
gc.collect()
screen.print_line('WebREPL started', 4)
panel.start_panel()
gc.collect()
sys.path.append('/user')
from lib import splash
try:
import main
except:
print('Could not find main start up script')
|
[
"daviddiener@outlook.com"
] |
daviddiener@outlook.com
|
e7c933c7739f81bba762a05bd13220dda275b7ae
|
0e1e643e864bcb96cf06f14f4cb559b034e114d0
|
/Exps_7_v3/doc3d/Wyx_w_M_w_Sob_to_Wz_focus/IN_Sob_k5_EroMore/Sob_k35_s001_EroM/pyr_Tcrop255_p60_j15/pyr_5s/L7/step09_5side_L7.py
|
94ce9cec1f7924ce4d28ef2ca3b168732be84876
|
[] |
no_license
|
KongBOy/kong_model2
|
33a94a9d2be5b0f28f9d479b3744e1d0e0ebd307
|
1af20b168ffccf0d5293a393a40a9fa9519410b2
|
refs/heads/master
| 2022-10-14T03:09:22.543998
| 2022-10-06T11:33:42
| 2022-10-06T11:33:42
| 242,080,692
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 399,932
|
py
|
#############################################################################################################################################################################################################
#############################################################################################################################################################################################################
### ๆ kong_model2 ๅ ๅ
ฅ sys.path
import os
from tkinter import S
code_exe_path = os.path.realpath(__file__) ### ็ฎๅๅท่ก step10_b.py ็ path
code_exe_path_element = code_exe_path.split("\\") ### ๆ path ๅๅ ็ญ็ญ ่ฆๆพๅบ kong_model ๅจ็ฌฌๅนพๅฑค
kong_layer = code_exe_path_element.index("kong_model2") ### ๆพๅบ kong_model2 ๅจ็ฌฌๅนพๅฑค
kong_model2_dir = "\\".join(code_exe_path_element[:kong_layer + 1]) ### ๅฎไฝๅบ kong_model2 ็ dir
import sys ### ๆ kong_model2 ๅ ๅ
ฅ sys.path
sys.path.append(kong_model2_dir)
# print(__file__.split("\\")[-1])
# print(" code_exe_path:", code_exe_path)
# print(" code_exe_path_element:", code_exe_path_element)
# print(" kong_layer:", kong_layer)
# print(" kong_model2_dir:", kong_model2_dir)
#############################################################################################################################################################################################################
from step08_b_use_G_generate_Wxy_w_M_to_Wz_combine import Wyx_w_M_to_Wz
from step08_b_use_G_generate_0_util import Tight_crop
from step09_c_train_step import Train_step_Wyx_w_M_to_Wz
from step09_d_KModel_builder_combine_step789 import KModel_builder, MODEL_NAME
from step10_a1_loss import Sobel_MAE
Sob_k5_s001_erose_M = Sobel_MAE(sobel_kernel_size=5, sobel_kernel_scale=1, erose_M=True, erose_More=True)
use_gen_op = Wyx_w_M_to_Wz( focus=True, tight_crop=Tight_crop(pad_size=60, resize=(255, 255), jit_scale= 0), sobel=Sob_k5_s001_erose_M, sobel_only=True )
use_train_step = Train_step_Wyx_w_M_to_Wz( focus=True, tight_crop=Tight_crop(pad_size=60, resize=(255, 255), jit_scale=15), sobel=Sob_k5_s001_erose_M, sobel_only=True )
import time
start_time = time.time()
###############################################################################################################################################################################################
##################################
### 5side1
##################################
# "1" 3 6 10 15 21 28 36 45 55
# side1 OK 1
pyramid_1side_1__2side_1__3side_1_4side_1_5s1 = [5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5]
# 1 "3" 6 10 15 21 28 36 45 55
# side2 OK 4
pyramid_1side_2__2side_1__3side_1_4side_1_5s1 = [5, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 5]
pyramid_1side_2__2side_2__3side_1_4side_1_5s1 = [5, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 5]
pyramid_1side_2__2side_2__3side_2_4side_1_5s1 = [5, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 5]
pyramid_1side_2__2side_2__3side_2_4side_2_5s1 = [5, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 5]
# 1 3 "6" 10 15 21 28 36 45 55
# side3 OK 10
pyramid_1side_3__2side_1__3side_1_4side_1_5s1 = [5, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 5]
pyramid_1side_3__2side_2__3side_1_4side_1_5s1 = [5, 2, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 5]
pyramid_1side_3__2side_2__3side_2_4side_1_5s1 = [5, 3, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 3, 5]
pyramid_1side_3__2side_3__3side_1_4side_1_5s1 = [5, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 5]
pyramid_1side_3__2side_3__3side_2_4side_1_5s1 = [5, 3, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 3, 5]
pyramid_1side_3__2side_3__3side_3_4side_1_5s1 = [5, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 5]
pyramid_1side_3__2side_2__3side_2_4side_2_5s1 = [5, 4, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 4, 5]
pyramid_1side_3__2side_3__3side_2_4side_2_5s1 = [5, 4, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 4, 5]
pyramid_1side_3__2side_3__3side_3_4side_2_5s1 = [5, 4, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 4, 5]
pyramid_1side_3__2side_3__3side_3_4side_3_5s1 = [5, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 5]
# 1 3 6 "10" 15 21 28 36 45 55
# side4 OK 20
pyramid_1side_4__2side_1__3side_1_4side_1_5s1 = [5, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 5]
pyramid_1side_4__2side_2__3side_1_4side_1_5s1 = [5, 2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 2, 5]
pyramid_1side_4__2side_2__3side_2_4side_1_5s1 = [5, 3, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 3, 5]
pyramid_1side_4__2side_3__3side_1_4side_1_5s1 = [5, 2, 2, 1, 0, 0, 0, 0, 0, 0, 0, 1, 2, 2, 5]
pyramid_1side_4__2side_3__3side_2_4side_1_5s1 = [5, 3, 2, 1, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3, 5]
pyramid_1side_4__2side_3__3side_3_4side_1_5s1 = [5, 3, 3, 1, 0, 0, 0, 0, 0, 0, 0, 1, 3, 3, 5]
pyramid_1side_4__2side_4__3side_1_4side_1_5s1 = [5, 2, 2, 2, 0, 0, 0, 0, 0, 0, 0, 2, 2, 2, 5]
pyramid_1side_4__2side_4__3side_2_4side_1_5s1 = [5, 3, 2, 2, 0, 0, 0, 0, 0, 0, 0, 2, 2, 3, 5]
pyramid_1side_4__2side_4__3side_3_4side_1_5s1 = [5, 3, 3, 2, 0, 0, 0, 0, 0, 0, 0, 2, 3, 3, 5]
pyramid_1side_4__2side_4__3side_4_4side_1_5s1 = [5, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 5]
pyramid_1side_4__2side_2__3side_2_4side_2_5s1 = [5, 4, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 4, 5]
pyramid_1side_4__2side_3__3side_2_4side_2_5s1 = [5, 4, 2, 1, 0, 0, 0, 0, 0, 0, 0, 1, 2, 4, 5]
pyramid_1side_4__2side_3__3side_3_4side_2_5s1 = [5, 4, 3, 1, 0, 0, 0, 0, 0, 0, 0, 1, 3, 4, 5]
pyramid_1side_4__2side_4__3side_2_4side_2_5s1 = [5, 4, 2, 2, 0, 0, 0, 0, 0, 0, 0, 2, 2, 4, 5]
pyramid_1side_4__2side_4__3side_3_4side_2_5s1 = [5, 4, 3, 2, 0, 0, 0, 0, 0, 0, 0, 2, 3, 4, 5]
pyramid_1side_4__2side_4__3side_4_4side_2_5s1 = [5, 4, 3, 3, 0, 0, 0, 0, 0, 0, 0, 3, 3, 4, 5]
pyramid_1side_4__2side_3__3side_3_4side_3_5s1 = [5, 4, 4, 1, 0, 0, 0, 0, 0, 0, 0, 1, 4, 4, 5]
pyramid_1side_4__2side_4__3side_3_4side_3_5s1 = [5, 4, 4, 2, 0, 0, 0, 0, 0, 0, 0, 2, 4, 4, 5]
pyramid_1side_4__2side_4__3side_4_4side_3_5s1 = [5, 4, 4, 3, 0, 0, 0, 0, 0, 0, 0, 3, 4, 4, 5]
pyramid_1side_4__2side_4__3side_4_4side_4_5s1 = [5, 4, 4, 4, 0, 0, 0, 0, 0, 0, 0, 4, 4, 4, 5]
# 1 3 6 10 "15" 21 28 36 45 55
# side5 OK 35
pyramid_1side_5__2side_1__3side_1_4side_1_5s1 = [5, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 5]
pyramid_1side_5__2side_2__3side_1_4side_1_5s1 = [5, 2, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 2, 5]
pyramid_1side_5__2side_2__3side_2_4side_1_5s1 = [5, 3, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 3, 5]
pyramid_1side_5__2side_3__3side_1_4side_1_5s1 = [5, 2, 2, 1, 1, 0, 0, 0, 0, 0, 1, 1, 2, 3, 5]
pyramid_1side_5__2side_3__3side_2_4side_1_5s1 = [5, 3, 2, 1, 1, 0, 0, 0, 0, 0, 1, 1, 2, 3, 5]
pyramid_1side_5__2side_3__3side_3_4side_1_5s1 = [5, 3, 3, 1, 1, 0, 0, 0, 0, 0, 1, 1, 3, 3, 5]
pyramid_1side_5__2side_4__3side_1_4side_1_5s1 = [5, 2, 2, 2, 1, 0, 0, 0, 0, 0, 1, 2, 2, 2, 5]
pyramid_1side_5__2side_4__3side_2_4side_1_5s1 = [5, 3, 2, 2, 1, 0, 0, 0, 0, 0, 1, 2, 2, 3, 5]
pyramid_1side_5__2side_4__3side_3_4side_1_5s1 = [5, 3, 3, 2, 1, 0, 0, 0, 0, 0, 1, 2, 3, 3, 5]
pyramid_1side_5__2side_4__3side_4_4side_1_5s1 = [5, 3, 3, 3, 1, 0, 0, 0, 0, 0, 1, 3, 3, 3, 5]
pyramid_1side_5__2side_5__3side_1_4side_1_5s1 = [5, 2, 2, 2, 2, 0, 0, 0, 0, 0, 2, 2, 2, 2, 5]
pyramid_1side_5__2side_5__3side_2_4side_1_5s1 = [5, 3, 2, 2, 2, 0, 0, 0, 0, 0, 2, 2, 2, 3, 5]
pyramid_1side_5__2side_5__3side_3_4side_1_5s1 = [5, 3, 3, 2, 2, 0, 0, 0, 0, 0, 2, 2, 3, 3, 5]
pyramid_1side_5__2side_5__3side_4_4side_1_5s1 = [5, 3, 3, 3, 2, 0, 0, 0, 0, 0, 2, 3, 3, 3, 5]
pyramid_1side_5__2side_5__3side_5_4side_1_5s1 = [5, 3, 3, 3, 3, 0, 0, 0, 0, 0, 3, 3, 3, 3, 5]
pyramid_1side_5__2side_2__3side_2_4side_2_5s1 = [5, 4, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 4, 5]
pyramid_1side_5__2side_3__3side_2_4side_2_5s1 = [5, 4, 2, 1, 1, 0, 0, 0, 0, 0, 1, 1, 2, 4, 5]
pyramid_1side_5__2side_3__3side_3_4side_2_5s1 = [5, 4, 3, 1, 1, 0, 0, 0, 0, 0, 1, 1, 3, 4, 5]
pyramid_1side_5__2side_4__3side_2_4side_2_5s1 = [5, 4, 2, 2, 1, 0, 0, 0, 0, 0, 1, 2, 2, 4, 5]
pyramid_1side_5__2side_4__3side_3_4side_2_5s1 = [5, 4, 3, 2, 1, 0, 0, 0, 0, 0, 1, 2, 3, 4, 5]
pyramid_1side_5__2side_4__3side_4_4side_2_5s1 = [5, 4, 3, 3, 1, 0, 0, 0, 0, 0, 1, 3, 3, 4, 5]
pyramid_1side_5__2side_5__3side_2_4side_2_5s1 = [5, 4, 2, 2, 2, 0, 0, 0, 0, 0, 2, 2, 2, 4, 5]
pyramid_1side_5__2side_5__3side_3_4side_2_5s1 = [5, 4, 3, 2, 2, 0, 0, 0, 0, 0, 2, 2, 3, 4, 5]
pyramid_1side_5__2side_5__3side_4_4side_2_5s1 = [5, 4, 3, 3, 2, 0, 0, 0, 0, 0, 2, 3, 3, 4, 5]
pyramid_1side_5__2side_5__3side_5_4side_2_5s1 = [5, 4, 3, 3, 3, 0, 0, 0, 0, 0, 3, 3, 3, 4, 5]
pyramid_1side_5__2side_3__3side_3_4side_3_5s1 = [5, 4, 4, 1, 1, 0, 0, 0, 0, 0, 1, 1, 4, 4, 5]
pyramid_1side_5__2side_4__3side_3_4side_3_5s1 = [5, 4, 4, 2, 1, 0, 0, 0, 0, 0, 1, 2, 4, 4, 5]
pyramid_1side_5__2side_4__3side_4_4side_3_5s1 = [5, 4, 4, 3, 1, 0, 0, 0, 0, 0, 1, 3, 4, 4, 5]
pyramid_1side_5__2side_5__3side_3_4side_3_5s1 = [5, 4, 4, 2, 2, 0, 0, 0, 0, 0, 2, 2, 4, 4, 5]
pyramid_1side_5__2side_5__3side_4_4side_3_5s1 = [5, 4, 4, 3, 2, 0, 0, 0, 0, 0, 2, 3, 4, 4, 5]
pyramid_1side_5__2side_5__3side_5_4side_3_5s1 = [5, 4, 4, 3, 3, 0, 0, 0, 0, 0, 3, 3, 4, 4, 5]
pyramid_1side_5__2side_4__3side_4_4side_4_5s1 = [5, 4, 4, 4, 1, 0, 0, 0, 0, 0, 1, 4, 4, 4, 5]
pyramid_1side_5__2side_5__3side_4_4side_4_5s1 = [5, 4, 4, 4, 2, 0, 0, 0, 0, 0, 2, 4, 4, 4, 5]
pyramid_1side_5__2side_5__3side_5_4side_4_5s1 = [5, 4, 4, 4, 3, 0, 0, 0, 0, 0, 3, 4, 4, 4, 5]
pyramid_1side_5__2side_5__3side_5_4side_5_5s1 = [5, 4, 4, 4, 4, 0, 0, 0, 0, 0, 4, 4, 4, 4, 5]
# 1 3 6 10 15 "21" 28 36 45 55
# side6 OK 56
pyramid_1side_6__2side_1__3side_1_4side_1_5s1 = [5, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 5]
pyramid_1side_6__2side_2__3side_1_4side_1_5s1 = [5, 2, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 2, 5]
pyramid_1side_6__2side_2__3side_2_4side_1_5s1 = [5, 3, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 3, 5]
pyramid_1side_6__2side_3__3side_1_4side_1_5s1 = [5, 2, 2, 1, 1, 1, 0, 0, 0, 1, 1, 1, 2, 2, 5]
pyramid_1side_6__2side_3__3side_2_4side_1_5s1 = [5, 3, 2, 1, 1, 1, 0, 0, 0, 1, 1, 1, 2, 3, 5]
pyramid_1side_6__2side_3__3side_3_4side_1_5s1 = [5, 3, 3, 1, 1, 1, 0, 0, 0, 1, 1, 1, 3, 3, 5]
pyramid_1side_6__2side_4__3side_1_4side_1_5s1 = [5, 2, 2, 2, 1, 1, 0, 0, 0, 1, 1, 2, 2, 2, 5]
pyramid_1side_6__2side_4__3side_2_4side_1_5s1 = [5, 3, 2, 2, 1, 1, 0, 0, 0, 1, 1, 2, 2, 3, 5]
pyramid_1side_6__2side_4__3side_3_4side_1_5s1 = [5, 3, 3, 2, 1, 1, 0, 0, 0, 1, 1, 2, 3, 3, 5]
pyramid_1side_6__2side_4__3side_4_4side_1_5s1 = [5, 3, 3, 3, 1, 1, 0, 0, 0, 1, 1, 3, 3, 3, 5]
pyramid_1side_6__2side_5__3side_1_4side_1_5s1 = [5, 2, 2, 2, 2, 1, 0, 0, 0, 1, 2, 2, 2, 2, 5]
pyramid_1side_6__2side_5__3side_2_4side_1_5s1 = [5, 3, 2, 2, 2, 1, 0, 0, 0, 1, 2, 2, 2, 3, 5]
pyramid_1side_6__2side_5__3side_3_4side_1_5s1 = [5, 3, 3, 2, 2, 1, 0, 0, 0, 1, 2, 2, 3, 3, 5]
pyramid_1side_6__2side_5__3side_4_4side_1_5s1 = [5, 3, 3, 3, 2, 1, 0, 0, 0, 1, 2, 3, 3, 3, 5]
pyramid_1side_6__2side_5__3side_5_4side_1_5s1 = [5, 3, 3, 3, 3, 1, 0, 0, 0, 1, 3, 3, 3, 3, 5]
pyramid_1side_6__2side_6__3side_1_4side_1_5s1 = [5, 2, 2, 2, 2, 2, 0, 0, 0, 2, 2, 2, 2, 2, 5]
pyramid_1side_6__2side_6__3side_2_4side_1_5s1 = [5, 3, 2, 2, 2, 2, 0, 0, 0, 2, 2, 2, 2, 3, 5]
pyramid_1side_6__2side_6__3side_3_4side_1_5s1 = [5, 3, 3, 2, 2, 2, 0, 0, 0, 2, 2, 2, 3, 3, 5]
pyramid_1side_6__2side_6__3side_4_4side_1_5s1 = [5, 3, 3, 3, 2, 2, 0, 0, 0, 2, 2, 3, 3, 3, 5]
pyramid_1side_6__2side_6__3side_5_4side_1_5s1 = [5, 3, 3, 3, 3, 2, 0, 0, 0, 2, 3, 3, 3, 3, 5]
pyramid_1side_6__2side_6__3side_6_4side_1_5s1 = [5, 3, 3, 3, 3, 3, 0, 0, 0, 3, 3, 3, 3, 3, 5]
pyramid_1side_6__2side_2__3side_2_4side_2_5s1 = [5, 4, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 4, 5]
pyramid_1side_6__2side_3__3side_2_4side_2_5s1 = [5, 4, 2, 1, 1, 1, 0, 0, 0, 1, 1, 1, 2, 4, 5]
pyramid_1side_6__2side_3__3side_3_4side_2_5s1 = [5, 4, 3, 1, 1, 1, 0, 0, 0, 1, 1, 1, 3, 4, 5]
pyramid_1side_6__2side_4__3side_2_4side_2_5s1 = [5, 4, 2, 2, 1, 1, 0, 0, 0, 1, 1, 2, 2, 4, 5]
pyramid_1side_6__2side_4__3side_3_4side_2_5s1 = [5, 4, 3, 2, 1, 1, 0, 0, 0, 1, 1, 2, 3, 4, 5]
pyramid_1side_6__2side_4__3side_4_4side_2_5s1 = [5, 4, 3, 3, 1, 1, 0, 0, 0, 1, 1, 3, 3, 4, 5]
pyramid_1side_6__2side_5__3side_2_4side_2_5s1 = [5, 4, 2, 2, 2, 1, 0, 0, 0, 1, 2, 2, 2, 4, 5]
pyramid_1side_6__2side_5__3side_3_4side_2_5s1 = [5, 4, 3, 2, 2, 1, 0, 0, 0, 1, 2, 2, 3, 4, 5]
pyramid_1side_6__2side_5__3side_4_4side_2_5s1 = [5, 4, 3, 3, 2, 1, 0, 0, 0, 1, 2, 3, 3, 4, 5]
pyramid_1side_6__2side_5__3side_5_4side_2_5s1 = [5, 4, 3, 3, 3, 1, 0, 0, 0, 1, 3, 3, 3, 4, 5]
pyramid_1side_6__2side_6__3side_2_4side_2_5s1 = [5, 4, 2, 2, 2, 2, 0, 0, 0, 2, 2, 2, 2, 4, 5]
pyramid_1side_6__2side_6__3side_3_4side_2_5s1 = [5, 4, 3, 2, 2, 2, 0, 0, 0, 2, 2, 2, 3, 4, 5]
pyramid_1side_6__2side_6__3side_4_4side_2_5s1 = [5, 4, 3, 3, 2, 2, 0, 0, 0, 2, 2, 3, 3, 4, 5]
pyramid_1side_6__2side_6__3side_5_4side_2_5s1 = [5, 4, 3, 3, 3, 2, 0, 0, 0, 2, 3, 3, 3, 4, 5]
pyramid_1side_6__2side_6__3side_6_4side_2_5s1 = [5, 4, 3, 3, 3, 3, 0, 0, 0, 3, 3, 3, 3, 4, 5]
pyramid_1side_6__2side_3__3side_3_4side_3_5s1 = [5, 4, 4, 1, 1, 1, 0, 0, 0, 1, 1, 1, 4, 4, 5]
pyramid_1side_6__2side_4__3side_3_4side_3_5s1 = [5, 4, 4, 2, 1, 1, 0, 0, 0, 1, 1, 2, 4, 4, 5]
pyramid_1side_6__2side_4__3side_4_4side_3_5s1 = [5, 4, 4, 3, 1, 1, 0, 0, 0, 1, 1, 3, 4, 4, 5]
pyramid_1side_6__2side_5__3side_3_4side_3_5s1 = [5, 4, 4, 2, 2, 1, 0, 0, 0, 1, 2, 2, 4, 4, 5]
pyramid_1side_6__2side_5__3side_4_4side_3_5s1 = [5, 4, 4, 3, 2, 1, 0, 0, 0, 1, 2, 3, 4, 4, 5]
pyramid_1side_6__2side_5__3side_5_4side_3_5s1 = [5, 4, 4, 3, 3, 1, 0, 0, 0, 1, 3, 3, 4, 4, 5]
pyramid_1side_6__2side_6__3side_3_4side_3_5s1 = [5, 4, 4, 2, 2, 2, 0, 0, 0, 2, 2, 2, 4, 4, 5]
pyramid_1side_6__2side_6__3side_4_4side_3_5s1 = [5, 4, 4, 3, 2, 2, 0, 0, 0, 2, 2, 3, 4, 4, 5]
pyramid_1side_6__2side_6__3side_5_4side_3_5s1 = [5, 4, 4, 3, 3, 2, 0, 0, 0, 2, 3, 3, 4, 4, 5]
pyramid_1side_6__2side_6__3side_6_4side_3_5s1 = [5, 4, 4, 3, 3, 3, 0, 0, 0, 3, 3, 3, 4, 4, 5]
pyramid_1side_6__2side_4__3side_4_4side_4_5s1 = [5, 4, 4, 4, 1, 1, 0, 0, 0, 1, 1, 4, 4, 4, 5]
pyramid_1side_6__2side_5__3side_4_4side_4_5s1 = [5, 4, 4, 4, 2, 1, 0, 0, 0, 1, 2, 4, 4, 4, 5]
pyramid_1side_6__2side_5__3side_5_4side_4_5s1 = [5, 4, 4, 4, 3, 1, 0, 0, 0, 1, 3, 4, 4, 4, 5]
pyramid_1side_6__2side_6__3side_4_4side_4_5s1 = [5, 4, 4, 4, 2, 2, 0, 0, 0, 2, 2, 4, 4, 4, 5]
pyramid_1side_6__2side_6__3side_5_4side_4_5s1 = [5, 4, 4, 4, 3, 2, 0, 0, 0, 2, 3, 4, 4, 4, 5]
pyramid_1side_6__2side_6__3side_6_4side_4_5s1 = [5, 4, 4, 4, 3, 3, 0, 0, 0, 3, 3, 4, 4, 4, 5]
pyramid_1side_6__2side_5__3side_5_4side_5_5s1 = [5, 4, 4, 4, 4, 1, 0, 0, 0, 1, 4, 4, 4, 4, 5]
pyramid_1side_6__2side_6__3side_5_4side_5_5s1 = [5, 4, 4, 4, 4, 2, 0, 0, 0, 2, 4, 4, 4, 4, 5]
pyramid_1side_6__2side_6__3side_6_4side_5_5s1 = [5, 4, 4, 4, 4, 3, 0, 0, 0, 3, 4, 4, 4, 4, 5]
pyramid_1side_6__2side_6__3side_6_4side_6_5s1 = [5, 4, 4, 4, 4, 4, 0, 0, 0, 4, 4, 4, 4, 4, 5]
# 1 3 6 10 15 21 "28" 36 45 55
# side7 OK 84
pyramid_1side_7__2side_1__3side_1_4side_1_5s1 = [5, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 5]
pyramid_1side_7__2side_2__3side_1_4side_1_5s1 = [5, 2, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 2, 5]
pyramid_1side_7__2side_2__3side_2_4side_1_5s1 = [5, 3, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 3, 5]
pyramid_1side_7__2side_3__3side_1_4side_1_5s1 = [5, 2, 2, 1, 1, 1, 1, 0, 1, 1, 1, 1, 2, 2, 5]
pyramid_1side_7__2side_3__3side_2_4side_1_5s1 = [5, 3, 2, 1, 1, 1, 1, 0, 1, 1, 1, 1, 2, 3, 5]
pyramid_1side_7__2side_3__3side_3_4side_1_5s1 = [5, 3, 3, 1, 1, 1, 1, 0, 1, 1, 1, 1, 3, 3, 5]
pyramid_1side_7__2side_4__3side_1_4side_1_5s1 = [5, 2, 2, 2, 1, 1, 1, 0, 1, 1, 1, 2, 2, 2, 5]
pyramid_1side_7__2side_4__3side_2_4side_1_5s1 = [5, 3, 2, 2, 1, 1, 1, 0, 1, 1, 1, 2, 2, 3, 5]
pyramid_1side_7__2side_4__3side_3_4side_1_5s1 = [5, 3, 3, 2, 1, 1, 1, 0, 1, 1, 1, 2, 3, 3, 5]
pyramid_1side_7__2side_4__3side_4_4side_1_5s1 = [5, 3, 3, 3, 1, 1, 1, 0, 1, 1, 1, 3, 3, 3, 5]
pyramid_1side_7__2side_5__3side_1_4side_1_5s1 = [5, 2, 2, 2, 2, 1, 1, 0, 1, 1, 2, 2, 2, 2, 5]
pyramid_1side_7__2side_5__3side_2_4side_1_5s1 = [5, 3, 2, 2, 2, 1, 1, 0, 1, 1, 2, 2, 2, 3, 5]
pyramid_1side_7__2side_5__3side_3_4side_1_5s1 = [5, 3, 3, 2, 2, 1, 1, 0, 1, 1, 2, 2, 3, 3, 5]
pyramid_1side_7__2side_5__3side_4_4side_1_5s1 = [5, 3, 3, 3, 2, 1, 1, 0, 1, 1, 2, 3, 3, 3, 5]
pyramid_1side_7__2side_5__3side_5_4side_1_5s1 = [5, 3, 3, 3, 3, 1, 1, 0, 1, 1, 3, 3, 3, 3, 5]
pyramid_1side_7__2side_6__3side_1_4side_1_5s1 = [5, 2, 2, 2, 2, 2, 1, 0, 1, 2, 2, 2, 2, 2, 5]
pyramid_1side_7__2side_6__3side_2_4side_1_5s1 = [5, 3, 2, 2, 2, 2, 1, 0, 1, 2, 2, 2, 2, 3, 5]
pyramid_1side_7__2side_6__3side_3_4side_1_5s1 = [5, 3, 3, 2, 2, 2, 1, 0, 1, 2, 2, 2, 3, 3, 5]
pyramid_1side_7__2side_6__3side_4_4side_1_5s1 = [5, 3, 3, 3, 2, 2, 1, 0, 1, 2, 2, 3, 3, 3, 5]
pyramid_1side_7__2side_6__3side_5_4side_1_5s1 = [5, 3, 3, 3, 3, 2, 1, 0, 1, 2, 3, 3, 3, 3, 5]
pyramid_1side_7__2side_6__3side_6_4side_1_5s1 = [5, 3, 3, 3, 3, 3, 1, 0, 1, 3, 3, 3, 3, 3, 5]
pyramid_1side_7__2side_7__3side_1_4side_1_5s1 = [5, 2, 2, 2, 2, 2, 2, 0, 2, 2, 2, 2, 2, 2, 5]
pyramid_1side_7__2side_7__3side_2_4side_1_5s1 = [5, 3, 2, 2, 2, 2, 2, 0, 2, 2, 2, 2, 2, 3, 5]
pyramid_1side_7__2side_7__3side_3_4side_1_5s1 = [5, 3, 3, 2, 2, 2, 2, 0, 2, 2, 2, 2, 3, 3, 5]
pyramid_1side_7__2side_7__3side_4_4side_1_5s1 = [5, 3, 3, 3, 2, 2, 2, 0, 2, 2, 2, 3, 3, 3, 5]
pyramid_1side_7__2side_7__3side_5_4side_1_5s1 = [5, 3, 3, 3, 3, 2, 2, 0, 2, 2, 3, 3, 3, 3, 5]
pyramid_1side_7__2side_7__3side_6_4side_1_5s1 = [5, 3, 3, 3, 3, 3, 2, 0, 2, 3, 3, 3, 3, 3, 5]
pyramid_1side_7__2side_7__3side_7_4side_1_5s1 = [5, 3, 3, 3, 3, 3, 3, 0, 3, 3, 3, 3, 3, 3, 5]
pyramid_1side_7__2side_2__3side_2_4side_2_5s1 = [5, 4, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 4, 5]
pyramid_1side_7__2side_3__3side_2_4side_2_5s1 = [5, 4, 2, 1, 1, 1, 1, 0, 1, 1, 1, 1, 2, 4, 5]
pyramid_1side_7__2side_3__3side_3_4side_2_5s1 = [5, 4, 3, 1, 1, 1, 1, 0, 1, 1, 1, 1, 3, 4, 5]
pyramid_1side_7__2side_4__3side_2_4side_2_5s1 = [5, 4, 2, 2, 1, 1, 1, 0, 1, 1, 1, 2, 2, 4, 5]
pyramid_1side_7__2side_4__3side_3_4side_2_5s1 = [5, 4, 3, 2, 1, 1, 1, 0, 1, 1, 1, 2, 3, 4, 5]
pyramid_1side_7__2side_4__3side_4_4side_2_5s1 = [5, 4, 3, 3, 1, 1, 1, 0, 1, 1, 1, 3, 3, 4, 5]
pyramid_1side_7__2side_5__3side_2_4side_2_5s1 = [5, 4, 2, 2, 2, 1, 1, 0, 1, 1, 2, 2, 2, 4, 5]
pyramid_1side_7__2side_5__3side_3_4side_2_5s1 = [5, 4, 3, 2, 2, 1, 1, 0, 1, 1, 2, 2, 3, 4, 5]
pyramid_1side_7__2side_5__3side_4_4side_2_5s1 = [5, 4, 3, 3, 2, 1, 1, 0, 1, 1, 2, 3, 3, 4, 5]
pyramid_1side_7__2side_5__3side_5_4side_2_5s1 = [5, 4, 3, 3, 3, 1, 1, 0, 1, 1, 3, 3, 3, 4, 5]
pyramid_1side_7__2side_6__3side_2_4side_2_5s1 = [5, 4, 2, 2, 2, 2, 1, 0, 1, 2, 2, 2, 2, 4, 5]
pyramid_1side_7__2side_6__3side_3_4side_2_5s1 = [5, 4, 3, 2, 2, 2, 1, 0, 1, 2, 2, 2, 3, 4, 5]
pyramid_1side_7__2side_6__3side_4_4side_2_5s1 = [5, 4, 3, 3, 2, 2, 1, 0, 1, 2, 2, 3, 3, 4, 5]
pyramid_1side_7__2side_6__3side_5_4side_2_5s1 = [5, 4, 3, 3, 3, 2, 1, 0, 1, 2, 3, 3, 3, 4, 5]
pyramid_1side_7__2side_6__3side_6_4side_2_5s1 = [5, 4, 3, 3, 3, 3, 1, 0, 1, 3, 3, 3, 3, 4, 5]
pyramid_1side_7__2side_7__3side_2_4side_2_5s1 = [5, 4, 2, 2, 2, 2, 2, 0, 2, 2, 2, 2, 2, 4, 5]
pyramid_1side_7__2side_7__3side_3_4side_2_5s1 = [5, 4, 3, 2, 2, 2, 2, 0, 2, 2, 2, 2, 3, 4, 5]
pyramid_1side_7__2side_7__3side_4_4side_2_5s1 = [5, 4, 3, 3, 2, 2, 2, 0, 2, 2, 2, 3, 3, 4, 5]
pyramid_1side_7__2side_7__3side_5_4side_2_5s1 = [5, 4, 3, 3, 3, 2, 2, 0, 2, 2, 3, 3, 3, 4, 5]
pyramid_1side_7__2side_7__3side_6_4side_2_5s1 = [5, 4, 3, 3, 3, 3, 2, 0, 2, 3, 3, 3, 3, 4, 5]
pyramid_1side_7__2side_7__3side_7_4side_2_5s1 = [5, 4, 3, 3, 3, 3, 3, 0, 3, 3, 3, 3, 3, 4, 5]
pyramid_1side_7__2side_3__3side_3_4side_3_5s1 = [5, 4, 4, 1, 1, 1, 1, 0, 1, 1, 1, 1, 4, 4, 5]
pyramid_1side_7__2side_4__3side_3_4side_3_5s1 = [5, 4, 4, 2, 1, 1, 1, 0, 1, 1, 1, 2, 4, 4, 5]
pyramid_1side_7__2side_4__3side_4_4side_3_5s1 = [5, 4, 4, 3, 1, 1, 1, 0, 1, 1, 1, 3, 4, 4, 5]
pyramid_1side_7__2side_5__3side_3_4side_3_5s1 = [5, 4, 4, 2, 2, 1, 1, 0, 1, 1, 2, 2, 4, 4, 5]
pyramid_1side_7__2side_5__3side_4_4side_3_5s1 = [5, 4, 4, 3, 2, 1, 1, 0, 1, 1, 2, 3, 4, 4, 5]
pyramid_1side_7__2side_5__3side_5_4side_3_5s1 = [5, 4, 4, 3, 3, 1, 1, 0, 1, 1, 3, 3, 4, 4, 5]
pyramid_1side_7__2side_6__3side_3_4side_3_5s1 = [5, 4, 4, 2, 2, 2, 1, 0, 1, 2, 2, 2, 4, 4, 5]
pyramid_1side_7__2side_6__3side_4_4side_3_5s1 = [5, 4, 4, 3, 2, 2, 1, 0, 1, 2, 2, 3, 4, 4, 5]
pyramid_1side_7__2side_6__3side_5_4side_3_5s1 = [5, 4, 4, 3, 3, 2, 1, 0, 1, 2, 3, 3, 4, 4, 5]
pyramid_1side_7__2side_6__3side_6_4side_3_5s1 = [5, 4, 4, 3, 3, 3, 1, 0, 1, 3, 3, 3, 4, 4, 5]
pyramid_1side_7__2side_7__3side_3_4side_3_5s1 = [5, 4, 4, 2, 2, 2, 2, 0, 2, 2, 2, 2, 4, 4, 5]
pyramid_1side_7__2side_7__3side_4_4side_3_5s1 = [5, 4, 4, 3, 2, 2, 2, 0, 2, 2, 2, 3, 4, 4, 5]
pyramid_1side_7__2side_7__3side_5_4side_3_5s1 = [5, 4, 4, 3, 3, 2, 2, 0, 2, 2, 3, 3, 4, 4, 5]
pyramid_1side_7__2side_7__3side_6_4side_3_5s1 = [5, 4, 4, 3, 3, 3, 2, 0, 2, 3, 3, 3, 4, 4, 5]
pyramid_1side_7__2side_7__3side_7_4side_3_5s1 = [5, 4, 4, 3, 3, 3, 3, 0, 3, 3, 3, 3, 4, 4, 5]
pyramid_1side_7__2side_4__3side_4_4side_4_5s1 = [5, 4, 4, 4, 1, 1, 1, 0, 1, 1, 1, 4, 4, 4, 5]
pyramid_1side_7__2side_5__3side_4_4side_4_5s1 = [5, 4, 4, 4, 2, 1, 1, 0, 1, 1, 2, 4, 4, 4, 5]
pyramid_1side_7__2side_5__3side_5_4side_4_5s1 = [5, 4, 4, 4, 3, 1, 1, 0, 1, 1, 3, 4, 4, 4, 5]
pyramid_1side_7__2side_6__3side_4_4side_4_5s1 = [5, 4, 4, 4, 2, 2, 1, 0, 1, 2, 2, 4, 4, 4, 5]
pyramid_1side_7__2side_6__3side_5_4side_4_5s1 = [5, 4, 4, 4, 3, 2, 1, 0, 1, 2, 3, 4, 4, 4, 5]
pyramid_1side_7__2side_6__3side_6_4side_4_5s1 = [5, 4, 4, 4, 3, 3, 1, 0, 1, 3, 3, 4, 4, 4, 5]
pyramid_1side_7__2side_7__3side_4_4side_4_5s1 = [5, 4, 4, 4, 2, 2, 2, 0, 2, 2, 2, 4, 4, 4, 5]
pyramid_1side_7__2side_7__3side_5_4side_4_5s1 = [5, 4, 4, 4, 3, 2, 2, 0, 2, 2, 3, 4, 4, 4, 5]
pyramid_1side_7__2side_7__3side_6_4side_4_5s1 = [5, 4, 4, 4, 3, 3, 2, 0, 2, 3, 3, 4, 4, 4, 5]
pyramid_1side_7__2side_7__3side_7_4side_4_5s1 = [5, 4, 4, 4, 3, 3, 3, 0, 3, 3, 3, 4, 4, 4, 5]
pyramid_1side_7__2side_5__3side_5_4side_5_5s1 = [5, 4, 4, 4, 4, 1, 1, 0, 1, 1, 4, 4, 4, 4, 5]
pyramid_1side_7__2side_6__3side_5_4side_5_5s1 = [5, 4, 4, 4, 4, 2, 1, 0, 1, 2, 4, 4, 4, 4, 5]
pyramid_1side_7__2side_6__3side_6_4side_5_5s1 = [5, 4, 4, 4, 4, 3, 1, 0, 1, 3, 4, 4, 4, 4, 5]
pyramid_1side_7__2side_7__3side_5_4side_5_5s1 = [5, 4, 4, 4, 4, 2, 2, 0, 2, 2, 4, 4, 4, 4, 5]
pyramid_1side_7__2side_7__3side_6_4side_5_5s1 = [5, 4, 4, 4, 4, 3, 2, 0, 2, 3, 4, 4, 4, 4, 5]
pyramid_1side_7__2side_7__3side_7_4side_5_5s1 = [5, 4, 4, 4, 4, 3, 3, 0, 3, 3, 4, 4, 4, 4, 5]
pyramid_1side_7__2side_6__3side_6_4side_6_5s1 = [5, 4, 4, 4, 4, 4, 1, 0, 1, 4, 4, 4, 4, 4, 5]
pyramid_1side_7__2side_7__3side_6_4side_6_5s1 = [5, 4, 4, 4, 4, 4, 2, 0, 2, 4, 4, 4, 4, 4, 5]
pyramid_1side_7__2side_7__3side_7_4side_6_5s1 = [5, 4, 4, 4, 4, 4, 3, 0, 3, 4, 4, 4, 4, 4, 5]
pyramid_1side_7__2side_7__3side_7_4side_7_5s1 = [5, 4, 4, 4, 4, 4, 4, 0, 4, 4, 4, 4, 4, 4, 5]
# 1 3 6 10 15 21 28 "36" 45 55
# side8 OK 120
pyramid_1side_8__2side_1__3side_1_4side_1_5s1 = [5, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5]
pyramid_1side_8__2side_2__3side_1_4side_1_5s1 = [5, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 5]
pyramid_1side_8__2side_2__3side_2_4side_1_5s1 = [5, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 5]
pyramid_1side_8__2side_3__3side_1_4side_1_5s1 = [5, 2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 5]
pyramid_1side_8__2side_3__3side_2_4side_1_5s1 = [5, 3, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 3, 5]
pyramid_1side_8__2side_3__3side_3_4side_1_5s1 = [5, 3, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 3, 5]
pyramid_1side_8__2side_4__3side_1_4side_1_5s1 = [5, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 5]
pyramid_1side_8__2side_4__3side_2_4side_1_5s1 = [5, 3, 2, 2, 1, 1, 1, 1, 1, 1, 1, 2, 2, 3, 5]
pyramid_1side_8__2side_4__3side_3_4side_1_5s1 = [5, 3, 3, 2, 1, 1, 1, 1, 1, 1, 1, 2, 3, 3, 5]
pyramid_1side_8__2side_4__3side_4_4side_1_5s1 = [5, 3, 3, 3, 1, 1, 1, 1, 1, 1, 1, 3, 3, 3, 5]
pyramid_1side_8__2side_5__3side_1_4side_1_5s1 = [5, 2, 2, 2, 2, 1, 1, 1, 1, 1, 2, 2, 2, 2, 5]
pyramid_1side_8__2side_5__3side_2_4side_1_5s1 = [5, 3, 2, 2, 2, 1, 1, 1, 1, 1, 2, 2, 2, 3, 5]
pyramid_1side_8__2side_5__3side_3_4side_1_5s1 = [5, 3, 3, 2, 2, 1, 1, 1, 1, 1, 2, 2, 3, 3, 5]
pyramid_1side_8__2side_5__3side_4_4side_1_5s1 = [5, 3, 3, 3, 2, 1, 1, 1, 1, 1, 2, 3, 3, 3, 5]
pyramid_1side_8__2side_5__3side_5_4side_1_5s1 = [5, 3, 3, 3, 3, 1, 1, 1, 1, 1, 3, 3, 3, 3, 5]
pyramid_1side_8__2side_6__3side_1_4side_1_5s1 = [5, 2, 2, 2, 2, 2, 1, 1, 1, 2, 2, 2, 2, 2, 5]
pyramid_1side_8__2side_6__3side_2_4side_1_5s1 = [5, 3, 2, 2, 2, 2, 1, 1, 1, 2, 2, 2, 2, 3, 5]
pyramid_1side_8__2side_6__3side_3_4side_1_5s1 = [5, 3, 3, 2, 2, 2, 1, 1, 1, 2, 2, 2, 3, 3, 5]
pyramid_1side_8__2side_6__3side_4_4side_1_5s1 = [5, 3, 3, 3, 2, 2, 1, 1, 1, 2, 2, 3, 3, 3, 5]
pyramid_1side_8__2side_6__3side_5_4side_1_5s1 = [5, 3, 3, 3, 3, 2, 1, 1, 1, 2, 3, 3, 3, 3, 5]
pyramid_1side_8__2side_6__3side_6_4side_1_5s1 = [5, 3, 3, 3, 3, 3, 1, 1, 1, 3, 3, 3, 3, 3, 5]
pyramid_1side_8__2side_7__3side_1_4side_1_5s1 = [5, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 5]
pyramid_1side_8__2side_7__3side_2_4side_1_5s1 = [5, 3, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 3, 5]
pyramid_1side_8__2side_7__3side_3_4side_1_5s1 = [5, 3, 3, 2, 2, 2, 2, 1, 2, 2, 2, 2, 3, 3, 5]
pyramid_1side_8__2side_7__3side_4_4side_1_5s1 = [5, 3, 3, 3, 2, 2, 2, 1, 2, 2, 2, 3, 3, 3, 5]
pyramid_1side_8__2side_7__3side_5_4side_1_5s1 = [5, 3, 3, 3, 3, 2, 2, 1, 2, 2, 3, 3, 3, 3, 5]
pyramid_1side_8__2side_7__3side_6_4side_1_5s1 = [5, 3, 3, 3, 3, 3, 2, 1, 2, 3, 3, 3, 3, 3, 5]
pyramid_1side_8__2side_7__3side_7_4side_1_5s1 = [5, 3, 3, 3, 3, 3, 3, 1, 3, 3, 3, 3, 3, 3, 5]
pyramid_1side_8__2side_8__3side_1_4side_1_5s1 = [5, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 5]
pyramid_1side_8__2side_8__3side_2_4side_1_5s1 = [5, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 5]
pyramid_1side_8__2side_8__3side_3_4side_1_5s1 = [5, 3, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 5]
pyramid_1side_8__2side_8__3side_4_4side_1_5s1 = [5, 3, 3, 3, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 5]
pyramid_1side_8__2side_8__3side_5_4side_1_5s1 = [5, 3, 3, 3, 3, 2, 2, 2, 2, 2, 3, 3, 3, 3, 5]
pyramid_1side_8__2side_8__3side_6_4side_1_5s1 = [5, 3, 3, 3, 3, 3, 2, 2, 2, 3, 3, 3, 3, 3, 5]
pyramid_1side_8__2side_8__3side_7_4side_1_5s1 = [5, 3, 3, 3, 3, 3, 3, 2, 3, 3, 3, 3, 3, 3, 5]
pyramid_1side_8__2side_8__3side_8_4side_1_5s1 = [5, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 5]
pyramid_1side_8__2side_2__3side_2_4side_2_5s1 = [5, 4, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 4, 5]
pyramid_1side_8__2side_3__3side_2_4side_2_5s1 = [5, 4, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 4, 5]
pyramid_1side_8__2side_3__3side_3_4side_2_5s1 = [5, 4, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 4, 5]
pyramid_1side_8__2side_4__3side_2_4side_2_5s1 = [5, 4, 2, 2, 1, 1, 1, 1, 1, 1, 1, 2, 2, 4, 5]
pyramid_1side_8__2side_4__3side_3_4side_2_5s1 = [5, 4, 3, 2, 1, 1, 1, 1, 1, 1, 1, 2, 3, 4, 5]
pyramid_1side_8__2side_4__3side_4_4side_2_5s1 = [5, 4, 3, 3, 1, 1, 1, 1, 1, 1, 1, 3, 3, 4, 5]
pyramid_1side_8__2side_5__3side_2_4side_2_5s1 = [5, 4, 2, 2, 2, 1, 1, 1, 1, 1, 2, 2, 2, 4, 5]
pyramid_1side_8__2side_5__3side_3_4side_2_5s1 = [5, 4, 3, 2, 2, 1, 1, 1, 1, 1, 2, 2, 3, 4, 5]
pyramid_1side_8__2side_5__3side_4_4side_2_5s1 = [5, 4, 3, 3, 2, 1, 1, 1, 1, 1, 2, 3, 3, 4, 5]
pyramid_1side_8__2side_5__3side_5_4side_2_5s1 = [5, 4, 3, 3, 3, 1, 1, 1, 1, 1, 3, 3, 3, 4, 5]
pyramid_1side_8__2side_6__3side_2_4side_2_5s1 = [5, 4, 2, 2, 2, 2, 1, 1, 1, 2, 2, 2, 2, 4, 5]
pyramid_1side_8__2side_6__3side_3_4side_2_5s1 = [5, 4, 3, 2, 2, 2, 1, 1, 1, 2, 2, 2, 3, 4, 5]
pyramid_1side_8__2side_6__3side_4_4side_2_5s1 = [5, 4, 3, 3, 2, 2, 1, 1, 1, 2, 2, 3, 3, 4, 5]
pyramid_1side_8__2side_6__3side_5_4side_2_5s1 = [5, 4, 3, 3, 3, 2, 1, 1, 1, 2, 3, 3, 3, 4, 5]
pyramid_1side_8__2side_6__3side_6_4side_2_5s1 = [5, 4, 3, 3, 3, 3, 1, 1, 1, 3, 3, 3, 3, 4, 5]
pyramid_1side_8__2side_7__3side_2_4side_2_5s1 = [5, 4, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 4, 5]
pyramid_1side_8__2side_7__3side_3_4side_2_5s1 = [5, 4, 3, 2, 2, 2, 2, 1, 2, 2, 2, 2, 3, 4, 5]
pyramid_1side_8__2side_7__3side_4_4side_2_5s1 = [5, 4, 3, 3, 2, 2, 2, 1, 2, 2, 2, 3, 3, 4, 5]
pyramid_1side_8__2side_7__3side_5_4side_2_5s1 = [5, 4, 3, 3, 3, 2, 2, 1, 2, 2, 3, 3, 3, 4, 5]
pyramid_1side_8__2side_7__3side_6_4side_2_5s1 = [5, 4, 3, 3, 3, 3, 2, 1, 2, 3, 3, 3, 3, 4, 5]
pyramid_1side_8__2side_7__3side_7_4side_2_5s1 = [5, 4, 3, 3, 3, 3, 3, 1, 3, 3, 3, 3, 3, 4, 5]
pyramid_1side_8__2side_8__3side_2_4side_2_5s1 = [5, 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 4, 5]
pyramid_1side_8__2side_8__3side_3_4side_2_5s1 = [5, 4, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 4, 5]
pyramid_1side_8__2side_8__3side_4_4side_2_5s1 = [5, 4, 3, 3, 2, 2, 2, 2, 2, 2, 2, 3, 3, 4, 5]
pyramid_1side_8__2side_8__3side_5_4side_2_5s1 = [5, 4, 3, 3, 3, 2, 2, 2, 2, 2, 3, 3, 3, 4, 5]
pyramid_1side_8__2side_8__3side_6_4side_2_5s1 = [5, 4, 3, 3, 3, 3, 2, 2, 2, 3, 3, 3, 3, 4, 5]
pyramid_1side_8__2side_8__3side_7_4side_2_5s1 = [5, 4, 3, 3, 3, 3, 3, 2, 3, 3, 3, 3, 3, 4, 5]
pyramid_1side_8__2side_8__3side_8_4side_2_5s1 = [5, 4, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 4, 5]
pyramid_1side_8__2side_3__3side_3_4side_3_5s1 = [5, 4, 4, 1, 1, 1, 1, 1, 1, 1, 1, 1, 4, 4, 5]
pyramid_1side_8__2side_4__3side_3_4side_3_5s1 = [5, 4, 4, 2, 1, 1, 1, 1, 1, 1, 1, 2, 4, 4, 5]
pyramid_1side_8__2side_4__3side_4_4side_3_5s1 = [5, 4, 4, 3, 1, 1, 1, 1, 1, 1, 1, 3, 4, 4, 5]
pyramid_1side_8__2side_5__3side_3_4side_3_5s1 = [5, 4, 4, 2, 2, 1, 1, 1, 1, 1, 2, 2, 4, 4, 5]
pyramid_1side_8__2side_5__3side_4_4side_3_5s1 = [5, 4, 4, 3, 2, 1, 1, 1, 1, 1, 2, 3, 4, 4, 5]
pyramid_1side_8__2side_5__3side_5_4side_3_5s1 = [5, 4, 4, 3, 3, 1, 1, 1, 1, 1, 3, 3, 4, 4, 5]
pyramid_1side_8__2side_6__3side_3_4side_3_5s1 = [5, 4, 4, 2, 2, 2, 1, 1, 1, 2, 2, 2, 4, 4, 5]
pyramid_1side_8__2side_6__3side_4_4side_3_5s1 = [5, 4, 4, 3, 2, 2, 1, 1, 1, 2, 2, 3, 4, 4, 5]
pyramid_1side_8__2side_6__3side_5_4side_3_5s1 = [5, 4, 4, 3, 3, 2, 1, 1, 1, 2, 3, 3, 4, 4, 5]
pyramid_1side_8__2side_6__3side_6_4side_3_5s1 = [5, 4, 4, 3, 3, 3, 1, 1, 1, 3, 3, 3, 4, 4, 5]
pyramid_1side_8__2side_7__3side_3_4side_3_5s1 = [5, 4, 4, 2, 2, 2, 2, 1, 2, 2, 2, 2, 4, 4, 5]
pyramid_1side_8__2side_7__3side_4_4side_3_5s1 = [5, 4, 4, 3, 2, 2, 2, 1, 2, 2, 2, 3, 4, 4, 5]
pyramid_1side_8__2side_7__3side_5_4side_3_5s1 = [5, 4, 4, 3, 3, 2, 2, 1, 2, 2, 3, 3, 4, 4, 5]
pyramid_1side_8__2side_7__3side_6_4side_3_5s1 = [5, 4, 4, 3, 3, 3, 2, 1, 2, 3, 3, 3, 4, 4, 5]
pyramid_1side_8__2side_7__3side_7_4side_3_5s1 = [5, 4, 4, 3, 3, 3, 3, 1, 3, 3, 3, 3, 4, 4, 5]
pyramid_1side_8__2side_8__3side_3_4side_3_5s1 = [5, 4, 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 4, 4, 5]
pyramid_1side_8__2side_8__3side_4_4side_3_5s1 = [5, 4, 4, 3, 2, 2, 2, 2, 2, 2, 2, 3, 4, 4, 5]
pyramid_1side_8__2side_8__3side_5_4side_3_5s1 = [5, 4, 4, 3, 3, 2, 2, 2, 2, 2, 3, 3, 4, 4, 5]
pyramid_1side_8__2side_8__3side_6_4side_3_5s1 = [5, 4, 4, 3, 3, 3, 2, 2, 2, 3, 3, 3, 4, 4, 5]
pyramid_1side_8__2side_8__3side_7_4side_3_5s1 = [5, 4, 4, 3, 3, 3, 3, 2, 3, 3, 3, 3, 4, 4, 5]
pyramid_1side_8__2side_8__3side_8_4side_3_5s1 = [5, 4, 4, 3, 3, 3, 3, 3, 3, 3, 3, 3, 4, 4, 5]
pyramid_1side_8__2side_4__3side_4_4side_4_5s1 = [5, 4, 4, 4, 1, 1, 1, 1, 1, 1, 1, 4, 4, 4, 5]
pyramid_1side_8__2side_5__3side_4_4side_4_5s1 = [5, 4, 4, 4, 2, 1, 1, 1, 1, 1, 2, 4, 4, 4, 5]
pyramid_1side_8__2side_5__3side_5_4side_4_5s1 = [5, 4, 4, 4, 3, 1, 1, 1, 1, 1, 3, 4, 4, 4, 5]
pyramid_1side_8__2side_6__3side_4_4side_4_5s1 = [5, 4, 4, 4, 2, 2, 1, 1, 1, 2, 2, 4, 4, 4, 5]
pyramid_1side_8__2side_6__3side_5_4side_4_5s1 = [5, 4, 4, 4, 3, 2, 1, 1, 1, 2, 3, 4, 4, 4, 5]
pyramid_1side_8__2side_6__3side_6_4side_4_5s1 = [5, 4, 4, 4, 3, 3, 1, 1, 1, 3, 3, 4, 4, 4, 5]
pyramid_1side_8__2side_7__3side_4_4side_4_5s1 = [5, 4, 4, 4, 2, 2, 2, 1, 2, 2, 2, 4, 4, 4, 5]
pyramid_1side_8__2side_7__3side_5_4side_4_5s1 = [5, 4, 4, 4, 3, 2, 2, 1, 2, 2, 3, 4, 4, 4, 5]
pyramid_1side_8__2side_7__3side_6_4side_4_5s1 = [5, 4, 4, 4, 3, 3, 2, 1, 2, 3, 3, 4, 4, 4, 5]
pyramid_1side_8__2side_7__3side_7_4side_4_5s1 = [5, 4, 4, 4, 3, 3, 3, 1, 3, 3, 3, 4, 4, 4, 5]
pyramid_1side_8__2side_8__3side_4_4side_4_5s1 = [5, 4, 4, 4, 2, 2, 2, 2, 2, 2, 2, 4, 4, 4, 5]
pyramid_1side_8__2side_8__3side_5_4side_4_5s1 = [5, 4, 4, 4, 3, 2, 2, 2, 2, 2, 3, 4, 4, 4, 5]
pyramid_1side_8__2side_8__3side_6_4side_4_5s1 = [5, 4, 4, 4, 3, 3, 2, 2, 2, 3, 3, 4, 4, 4, 5]
pyramid_1side_8__2side_8__3side_7_4side_4_5s1 = [5, 4, 4, 4, 3, 3, 3, 2, 3, 3, 3, 4, 4, 4, 5]
pyramid_1side_8__2side_8__3side_8_4side_4_5s1 = [5, 4, 4, 4, 3, 3, 3, 3, 3, 3, 3, 4, 4, 4, 5]
pyramid_1side_8__2side_5__3side_5_4side_5_5s1 = [5, 4, 4, 4, 4, 1, 1, 1, 1, 1, 4, 4, 4, 4, 5]
pyramid_1side_8__2side_6__3side_5_4side_5_5s1 = [5, 4, 4, 4, 4, 2, 1, 1, 1, 2, 4, 4, 4, 4, 5]
pyramid_1side_8__2side_6__3side_6_4side_5_5s1 = [5, 4, 4, 4, 4, 3, 1, 1, 1, 3, 4, 4, 4, 4, 5]
pyramid_1side_8__2side_7__3side_5_4side_5_5s1 = [5, 4, 4, 4, 4, 2, 2, 1, 2, 2, 4, 4, 4, 4, 5]
pyramid_1side_8__2side_7__3side_6_4side_5_5s1 = [5, 4, 4, 4, 4, 3, 2, 1, 2, 3, 4, 4, 4, 4, 5]
pyramid_1side_8__2side_7__3side_7_4side_5_5s1 = [5, 4, 4, 4, 4, 3, 3, 1, 3, 3, 4, 4, 4, 4, 5]
pyramid_1side_8__2side_8__3side_5_4side_5_5s1 = [5, 4, 4, 4, 4, 2, 2, 2, 2, 2, 4, 4, 4, 4, 5]
pyramid_1side_8__2side_8__3side_6_4side_5_5s1 = [5, 4, 4, 4, 4, 3, 2, 2, 2, 3, 4, 4, 4, 4, 5]
pyramid_1side_8__2side_8__3side_7_4side_5_5s1 = [5, 4, 4, 4, 4, 3, 3, 2, 3, 3, 4, 4, 4, 4, 5]
pyramid_1side_8__2side_8__3side_8_4side_5_5s1 = [5, 4, 4, 4, 4, 3, 3, 3, 3, 3, 4, 4, 4, 4, 5]
pyramid_1side_8__2side_6__3side_6_4side_6_5s1 = [5, 4, 4, 4, 4, 4, 1, 1, 1, 4, 4, 4, 4, 4, 5]
pyramid_1side_8__2side_7__3side_6_4side_6_5s1 = [5, 4, 4, 4, 4, 4, 2, 1, 2, 4, 4, 4, 4, 4, 5]
pyramid_1side_8__2side_7__3side_7_4side_6_5s1 = [5, 4, 4, 4, 4, 4, 3, 1, 3, 4, 4, 4, 4, 4, 5]
pyramid_1side_8__2side_8__3side_6_4side_6_5s1 = [5, 4, 4, 4, 4, 4, 2, 2, 2, 4, 4, 4, 4, 4, 5]
pyramid_1side_8__2side_8__3side_7_4side_6_5s1 = [5, 4, 4, 4, 4, 4, 3, 2, 3, 4, 4, 4, 4, 4, 5]
pyramid_1side_8__2side_8__3side_8_4side_6_5s1 = [5, 4, 4, 4, 4, 4, 3, 3, 3, 4, 4, 4, 4, 4, 5]
pyramid_1side_8__2side_7__3side_7_4side_7_5s1 = [5, 4, 4, 4, 4, 4, 4, 1, 4, 4, 4, 4, 4, 4, 5]
pyramid_1side_8__2side_8__3side_7_4side_7_5s1 = [5, 4, 4, 4, 4, 4, 4, 2, 4, 4, 4, 4, 4, 4, 5]
pyramid_1side_8__2side_8__3side_8_4side_7_5s1 = [5, 4, 4, 4, 4, 4, 4, 3, 4, 4, 4, 4, 4, 4, 5]
pyramid_1side_8__2side_8__3side_8_4side_8_5s1 = [5, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 5]
##################################
### 5side2
##################################
# "1" 3 6 10 15 21 28 36 45 55
# side3 OK 1
pyramid_1side_2__2side_2__3side_2_4side_2_5s2 = [5, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 5]
# 1 "3" 6 10 15 21 28 36 45 55
# side3 OK 4
pyramid_1side_3__2side_2__3side_2_4side_2_5s2 = [5, 5, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 5, 5]
pyramid_1side_3__2side_3__3side_2_4side_2_5s2 = [5, 5, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 5, 5]
pyramid_1side_3__2side_3__3side_3_4side_2_5s2 = [5, 5, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 5, 5]
pyramid_1side_3__2side_3__3side_3_4side_3_5s2 = [5, 5, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 5, 5]
# 1 3 "6" 10 15 21 28 36 45 55
# side3 OK 10
pyramid_1side_4__2side_2__3side_2_4side_2_5s2 = [5, 5, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 5, 5]
pyramid_1side_4__2side_3__3side_2_4side_2_5s2 = [5, 5, 2, 1, 0, 0, 0, 0, 0, 0, 0, 1, 2, 5, 5]
pyramid_1side_4__2side_3__3side_3_4side_2_5s2 = [5, 5, 3, 1, 0, 0, 0, 0, 0, 0, 0, 1, 3, 5, 5]
pyramid_1side_4__2side_4__3side_2_4side_2_5s2 = [5, 5, 2, 2, 0, 0, 0, 0, 0, 0, 0, 2, 2, 5, 5]
pyramid_1side_4__2side_4__3side_3_4side_2_5s2 = [5, 5, 3, 2, 0, 0, 0, 0, 0, 0, 0, 2, 3, 5, 5]
pyramid_1side_4__2side_4__3side_4_4side_2_5s2 = [5, 5, 3, 3, 0, 0, 0, 0, 0, 0, 0, 3, 3, 5, 5]
pyramid_1side_4__2side_3__3side_3_4side_3_5s2 = [5, 5, 4, 1, 0, 0, 0, 0, 0, 0, 0, 1, 4, 5, 5]
pyramid_1side_4__2side_4__3side_3_4side_3_5s2 = [5, 5, 4, 2, 0, 0, 0, 0, 0, 0, 0, 2, 4, 5, 5]
pyramid_1side_4__2side_4__3side_4_4side_3_5s2 = [5, 5, 4, 3, 0, 0, 0, 0, 0, 0, 0, 3, 4, 5, 5]
pyramid_1side_4__2side_4__3side_4_4side_4_5s2 = [5, 5, 4, 4, 0, 0, 0, 0, 0, 0, 0, 4, 4, 5, 5]
# 1 3 6 "10" 15 21 28 36 45 55
# side4 OK 20
pyramid_1side_5__2side_2__3side_2_4side_2_5s2 = [5, 5, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 5, 5]
pyramid_1side_5__2side_3__3side_2_4side_2_5s2 = [5, 5, 2, 1, 1, 0, 0, 0, 0, 0, 1, 1, 2, 5, 5]
pyramid_1side_5__2side_3__3side_3_4side_2_5s2 = [5, 5, 3, 1, 1, 0, 0, 0, 0, 0, 1, 1, 3, 5, 5]
pyramid_1side_5__2side_4__3side_2_4side_2_5s2 = [5, 5, 2, 2, 1, 0, 0, 0, 0, 0, 1, 2, 2, 5, 5]
pyramid_1side_5__2side_4__3side_3_4side_2_5s2 = [5, 5, 3, 2, 1, 0, 0, 0, 0, 0, 1, 2, 3, 5, 5]
pyramid_1side_5__2side_4__3side_4_4side_2_5s2 = [5, 5, 3, 3, 1, 0, 0, 0, 0, 0, 1, 3, 3, 5, 5]
pyramid_1side_5__2side_5__3side_2_4side_2_5s2 = [5, 5, 2, 2, 2, 0, 0, 0, 0, 0, 2, 2, 2, 5, 5]
pyramid_1side_5__2side_5__3side_3_4side_2_5s2 = [5, 5, 3, 2, 2, 0, 0, 0, 0, 0, 2, 2, 3, 5, 5]
pyramid_1side_5__2side_5__3side_4_4side_2_5s2 = [5, 5, 3, 3, 2, 0, 0, 0, 0, 0, 2, 3, 3, 5, 5]
pyramid_1side_5__2side_5__3side_5_4side_2_5s2 = [5, 5, 3, 3, 3, 0, 0, 0, 0, 0, 3, 3, 3, 5, 5]
pyramid_1side_5__2side_3__3side_3_4side_3_5s2 = [5, 5, 4, 1, 1, 0, 0, 0, 0, 0, 1, 1, 4, 5, 5]
pyramid_1side_5__2side_4__3side_3_4side_3_5s2 = [5, 5, 4, 2, 1, 0, 0, 0, 0, 0, 1, 2, 4, 5, 5]
pyramid_1side_5__2side_4__3side_4_4side_3_5s2 = [5, 5, 4, 3, 1, 0, 0, 0, 0, 0, 1, 3, 4, 5, 5]
pyramid_1side_5__2side_5__3side_3_4side_3_5s2 = [5, 5, 4, 2, 2, 0, 0, 0, 0, 0, 2, 2, 4, 5, 5]
pyramid_1side_5__2side_5__3side_4_4side_3_5s2 = [5, 5, 4, 3, 2, 0, 0, 0, 0, 0, 2, 3, 4, 5, 5]
pyramid_1side_5__2side_5__3side_5_4side_3_5s2 = [5, 5, 4, 3, 3, 0, 0, 0, 0, 0, 3, 3, 4, 5, 5]
pyramid_1side_5__2side_4__3side_4_4side_4_5s2 = [5, 5, 4, 4, 1, 0, 0, 0, 0, 0, 1, 4, 4, 5, 5]
pyramid_1side_5__2side_5__3side_4_4side_4_5s2 = [5, 5, 4, 4, 2, 0, 0, 0, 0, 0, 2, 4, 4, 5, 5]
pyramid_1side_5__2side_5__3side_5_4side_4_5s2 = [5, 5, 4, 4, 3, 0, 0, 0, 0, 0, 3, 4, 4, 5, 5]
pyramid_1side_5__2side_5__3side_5_4side_5_5s2 = [5, 5, 4, 4, 4, 0, 0, 0, 0, 0, 4, 4, 4, 5, 5]
# 1 3 6 10 "15" 21 28 36 45 55
# side5 OK 35
pyramid_1side_6__2side_2__3side_2_4side_2_5s2 = [5, 5, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 5, 5]
pyramid_1side_6__2side_3__3side_2_4side_2_5s2 = [5, 5, 2, 1, 1, 1, 0, 0, 0, 1, 1, 1, 2, 5, 5]
pyramid_1side_6__2side_3__3side_3_4side_2_5s2 = [5, 5, 3, 1, 1, 1, 0, 0, 0, 1, 1, 1, 3, 5, 5]
pyramid_1side_6__2side_4__3side_2_4side_2_5s2 = [5, 5, 2, 2, 1, 1, 0, 0, 0, 1, 1, 2, 2, 5, 5]
pyramid_1side_6__2side_4__3side_3_4side_2_5s2 = [5, 5, 3, 2, 1, 1, 0, 0, 0, 1, 1, 2, 3, 5, 5]
pyramid_1side_6__2side_4__3side_4_4side_2_5s2 = [5, 5, 3, 3, 1, 1, 0, 0, 0, 1, 1, 3, 3, 5, 5]
pyramid_1side_6__2side_5__3side_2_4side_2_5s2 = [5, 5, 2, 2, 2, 1, 0, 0, 0, 1, 2, 2, 2, 5, 5]
pyramid_1side_6__2side_5__3side_3_4side_2_5s2 = [5, 5, 3, 2, 2, 1, 0, 0, 0, 1, 2, 2, 3, 5, 5]
pyramid_1side_6__2side_5__3side_4_4side_2_5s2 = [5, 5, 3, 3, 2, 1, 0, 0, 0, 1, 2, 3, 3, 5, 5]
pyramid_1side_6__2side_5__3side_5_4side_2_5s2 = [5, 5, 3, 3, 3, 1, 0, 0, 0, 1, 3, 3, 3, 5, 5]
pyramid_1side_6__2side_6__3side_2_4side_2_5s2 = [5, 5, 2, 2, 2, 2, 0, 0, 0, 2, 2, 2, 2, 5, 5]
pyramid_1side_6__2side_6__3side_3_4side_2_5s2 = [5, 5, 3, 2, 2, 2, 0, 0, 0, 2, 2, 2, 3, 5, 5]
pyramid_1side_6__2side_6__3side_4_4side_2_5s2 = [5, 5, 3, 3, 2, 2, 0, 0, 0, 2, 2, 3, 3, 5, 5]
pyramid_1side_6__2side_6__3side_5_4side_2_5s2 = [5, 5, 3, 3, 3, 2, 0, 0, 0, 2, 3, 3, 3, 5, 5]
pyramid_1side_6__2side_6__3side_6_4side_2_5s2 = [5, 5, 3, 3, 3, 3, 0, 0, 0, 3, 3, 3, 3, 5, 5]
pyramid_1side_6__2side_3__3side_3_4side_3_5s2 = [5, 5, 4, 1, 1, 1, 0, 0, 0, 1, 1, 1, 4, 5, 5]
pyramid_1side_6__2side_4__3side_3_4side_3_5s2 = [5, 5, 4, 2, 1, 1, 0, 0, 0, 1, 1, 2, 4, 5, 5]
pyramid_1side_6__2side_4__3side_4_4side_3_5s2 = [5, 5, 4, 3, 1, 1, 0, 0, 0, 1, 1, 3, 4, 5, 5]
pyramid_1side_6__2side_5__3side_3_4side_3_5s2 = [5, 5, 4, 2, 2, 1, 0, 0, 0, 1, 2, 2, 4, 5, 5]
pyramid_1side_6__2side_5__3side_4_4side_3_5s2 = [5, 5, 4, 3, 2, 1, 0, 0, 0, 1, 2, 3, 4, 5, 5]
pyramid_1side_6__2side_5__3side_5_4side_3_5s2 = [5, 5, 4, 3, 3, 1, 0, 0, 0, 1, 3, 3, 4, 5, 5]
pyramid_1side_6__2side_6__3side_3_4side_3_5s2 = [5, 5, 4, 2, 2, 2, 0, 0, 0, 2, 2, 2, 4, 5, 5]
pyramid_1side_6__2side_6__3side_4_4side_3_5s2 = [5, 5, 4, 3, 2, 2, 0, 0, 0, 2, 2, 3, 4, 5, 5]
pyramid_1side_6__2side_6__3side_5_4side_3_5s2 = [5, 5, 4, 3, 3, 2, 0, 0, 0, 2, 3, 3, 4, 5, 5]
pyramid_1side_6__2side_6__3side_6_4side_3_5s2 = [5, 5, 4, 3, 3, 3, 0, 0, 0, 3, 3, 3, 4, 5, 5]
pyramid_1side_6__2side_4__3side_4_4side_4_5s2 = [5, 5, 4, 4, 1, 1, 0, 0, 0, 1, 1, 4, 4, 5, 5]
pyramid_1side_6__2side_5__3side_4_4side_4_5s2 = [5, 5, 4, 4, 2, 1, 0, 0, 0, 1, 2, 4, 4, 5, 5]
pyramid_1side_6__2side_5__3side_5_4side_4_5s2 = [5, 5, 4, 4, 3, 1, 0, 0, 0, 1, 3, 4, 4, 5, 5]
pyramid_1side_6__2side_6__3side_4_4side_4_5s2 = [5, 5, 4, 4, 2, 2, 0, 0, 0, 2, 2, 4, 4, 5, 5]
pyramid_1side_6__2side_6__3side_5_4side_4_5s2 = [5, 5, 4, 4, 3, 2, 0, 0, 0, 2, 3, 4, 4, 5, 5]
pyramid_1side_6__2side_6__3side_6_4side_4_5s2 = [5, 5, 4, 4, 3, 3, 0, 0, 0, 3, 3, 4, 4, 5, 5]
pyramid_1side_6__2side_5__3side_5_4side_5_5s2 = [5, 5, 4, 4, 4, 1, 0, 0, 0, 1, 4, 4, 4, 5, 5]
pyramid_1side_6__2side_6__3side_5_4side_5_5s2 = [5, 5, 4, 4, 4, 2, 0, 0, 0, 2, 4, 4, 4, 5, 5]
pyramid_1side_6__2side_6__3side_6_4side_5_5s2 = [5, 5, 4, 4, 4, 3, 0, 0, 0, 3, 4, 4, 4, 5, 5]
pyramid_1side_6__2side_6__3side_6_4side_6_5s2 = [5, 5, 4, 4, 4, 4, 0, 0, 0, 4, 4, 4, 4, 5, 5]
# 1 3 6 10 15 "21" 28 36 45 55
# side6 OK 56
pyramid_1side_7__2side_2__3side_2_4side_2_5s2 = [5, 5, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 5, 5]
pyramid_1side_7__2side_3__3side_2_4side_2_5s2 = [5, 5, 2, 1, 1, 1, 1, 0, 1, 1, 1, 1, 2, 5, 5]
pyramid_1side_7__2side_3__3side_3_4side_2_5s2 = [5, 5, 3, 1, 1, 1, 1, 0, 1, 1, 1, 1, 3, 5, 5]
pyramid_1side_7__2side_4__3side_2_4side_2_5s2 = [5, 5, 2, 2, 1, 1, 1, 0, 1, 1, 1, 2, 2, 5, 5]
pyramid_1side_7__2side_4__3side_3_4side_2_5s2 = [5, 5, 3, 2, 1, 1, 1, 0, 1, 1, 1, 2, 3, 5, 5]
pyramid_1side_7__2side_4__3side_4_4side_2_5s2 = [5, 5, 3, 3, 1, 1, 1, 0, 1, 1, 1, 3, 3, 5, 5]
pyramid_1side_7__2side_5__3side_2_4side_2_5s2 = [5, 5, 2, 2, 2, 1, 1, 0, 1, 1, 2, 2, 2, 5, 5]
pyramid_1side_7__2side_5__3side_3_4side_2_5s2 = [5, 5, 3, 2, 2, 1, 1, 0, 1, 1, 2, 2, 3, 5, 5]
pyramid_1side_7__2side_5__3side_4_4side_2_5s2 = [5, 5, 3, 3, 2, 1, 1, 0, 1, 1, 2, 3, 3, 5, 5]
pyramid_1side_7__2side_5__3side_5_4side_2_5s2 = [5, 5, 3, 3, 3, 1, 1, 0, 1, 1, 3, 3, 3, 5, 5]
pyramid_1side_7__2side_6__3side_2_4side_2_5s2 = [5, 5, 2, 2, 2, 2, 1, 0, 1, 2, 2, 2, 2, 5, 5]
pyramid_1side_7__2side_6__3side_3_4side_2_5s2 = [5, 5, 3, 2, 2, 2, 1, 0, 1, 2, 2, 2, 3, 5, 5]
pyramid_1side_7__2side_6__3side_4_4side_2_5s2 = [5, 5, 3, 3, 2, 2, 1, 0, 1, 2, 2, 3, 3, 5, 5]
pyramid_1side_7__2side_6__3side_5_4side_2_5s2 = [5, 5, 3, 3, 3, 2, 1, 0, 1, 2, 3, 3, 3, 5, 5]
pyramid_1side_7__2side_6__3side_6_4side_2_5s2 = [5, 5, 3, 3, 3, 3, 1, 0, 1, 3, 3, 3, 3, 5, 5]
pyramid_1side_7__2side_7__3side_2_4side_2_5s2 = [5, 5, 2, 2, 2, 2, 2, 0, 2, 2, 2, 2, 2, 5, 5]
pyramid_1side_7__2side_7__3side_3_4side_2_5s2 = [5, 5, 3, 2, 2, 2, 2, 0, 2, 2, 2, 2, 3, 5, 5]
pyramid_1side_7__2side_7__3side_4_4side_2_5s2 = [5, 5, 3, 3, 2, 2, 2, 0, 2, 2, 2, 3, 3, 5, 5]
pyramid_1side_7__2side_7__3side_5_4side_2_5s2 = [5, 5, 3, 3, 3, 2, 2, 0, 2, 2, 3, 3, 3, 5, 5]
pyramid_1side_7__2side_7__3side_6_4side_2_5s2 = [5, 5, 3, 3, 3, 3, 2, 0, 2, 3, 3, 3, 3, 5, 5]
pyramid_1side_7__2side_7__3side_7_4side_2_5s2 = [5, 5, 3, 3, 3, 3, 3, 0, 3, 3, 3, 3, 3, 5, 5]
pyramid_1side_7__2side_3__3side_3_4side_3_5s2 = [5, 5, 4, 1, 1, 1, 1, 0, 1, 1, 1, 1, 4, 5, 5]
pyramid_1side_7__2side_4__3side_3_4side_3_5s2 = [5, 5, 4, 2, 1, 1, 1, 0, 1, 1, 1, 2, 4, 5, 5]
pyramid_1side_7__2side_4__3side_4_4side_3_5s2 = [5, 5, 4, 3, 1, 1, 1, 0, 1, 1, 1, 3, 4, 5, 5]
pyramid_1side_7__2side_5__3side_3_4side_3_5s2 = [5, 5, 4, 2, 2, 1, 1, 0, 1, 1, 2, 2, 4, 5, 5]
pyramid_1side_7__2side_5__3side_4_4side_3_5s2 = [5, 5, 4, 3, 2, 1, 1, 0, 1, 1, 2, 3, 4, 5, 5]
pyramid_1side_7__2side_5__3side_5_4side_3_5s2 = [5, 5, 4, 3, 3, 1, 1, 0, 1, 1, 3, 3, 4, 5, 5]
pyramid_1side_7__2side_6__3side_3_4side_3_5s2 = [5, 5, 4, 2, 2, 2, 1, 0, 1, 2, 2, 2, 4, 5, 5]
pyramid_1side_7__2side_6__3side_4_4side_3_5s2 = [5, 5, 4, 3, 2, 2, 1, 0, 1, 2, 2, 3, 4, 5, 5]
pyramid_1side_7__2side_6__3side_5_4side_3_5s2 = [5, 5, 4, 3, 3, 2, 1, 0, 1, 2, 3, 3, 4, 5, 5]
pyramid_1side_7__2side_6__3side_6_4side_3_5s2 = [5, 5, 4, 3, 3, 3, 1, 0, 1, 3, 3, 3, 4, 5, 5]
pyramid_1side_7__2side_7__3side_3_4side_3_5s2 = [5, 5, 4, 2, 2, 2, 2, 0, 2, 2, 2, 2, 4, 5, 5]
pyramid_1side_7__2side_7__3side_4_4side_3_5s2 = [5, 5, 4, 3, 2, 2, 2, 0, 2, 2, 2, 3, 4, 5, 5]
pyramid_1side_7__2side_7__3side_5_4side_3_5s2 = [5, 5, 4, 3, 3, 2, 2, 0, 2, 2, 3, 3, 4, 5, 5]
pyramid_1side_7__2side_7__3side_6_4side_3_5s2 = [5, 5, 4, 3, 3, 3, 2, 0, 2, 3, 3, 3, 4, 5, 5]
pyramid_1side_7__2side_7__3side_7_4side_3_5s2 = [5, 5, 4, 3, 3, 3, 3, 0, 3, 3, 3, 3, 4, 5, 5]
pyramid_1side_7__2side_4__3side_4_4side_4_5s2 = [5, 5, 4, 4, 1, 1, 1, 0, 1, 1, 1, 4, 4, 5, 5]
pyramid_1side_7__2side_5__3side_4_4side_4_5s2 = [5, 5, 4, 4, 2, 1, 1, 0, 1, 1, 2, 4, 4, 5, 5]
pyramid_1side_7__2side_5__3side_5_4side_4_5s2 = [5, 5, 4, 4, 3, 1, 1, 0, 1, 1, 3, 4, 4, 5, 5]
pyramid_1side_7__2side_6__3side_4_4side_4_5s2 = [5, 5, 4, 4, 2, 2, 1, 0, 1, 2, 2, 4, 4, 5, 5]
pyramid_1side_7__2side_6__3side_5_4side_4_5s2 = [5, 5, 4, 4, 3, 2, 1, 0, 1, 2, 3, 4, 4, 5, 5]
pyramid_1side_7__2side_6__3side_6_4side_4_5s2 = [5, 5, 4, 4, 3, 3, 1, 0, 1, 3, 3, 4, 4, 5, 5]
pyramid_1side_7__2side_7__3side_4_4side_4_5s2 = [5, 5, 4, 4, 2, 2, 2, 0, 2, 2, 2, 4, 4, 5, 5]
pyramid_1side_7__2side_7__3side_5_4side_4_5s2 = [5, 5, 4, 4, 3, 2, 2, 0, 2, 2, 3, 4, 4, 5, 5]
pyramid_1side_7__2side_7__3side_6_4side_4_5s2 = [5, 5, 4, 4, 3, 3, 2, 0, 2, 3, 3, 4, 4, 5, 5]
pyramid_1side_7__2side_7__3side_7_4side_4_5s2 = [5, 5, 4, 4, 3, 3, 3, 0, 3, 3, 3, 4, 4, 5, 5]
pyramid_1side_7__2side_5__3side_5_4side_5_5s2 = [5, 5, 4, 4, 4, 1, 1, 0, 1, 1, 4, 4, 4, 5, 5]
pyramid_1side_7__2side_6__3side_5_4side_5_5s2 = [5, 5, 4, 4, 4, 2, 1, 0, 1, 2, 4, 4, 4, 5, 5]
pyramid_1side_7__2side_6__3side_6_4side_5_5s2 = [5, 5, 4, 4, 4, 3, 1, 0, 1, 3, 4, 4, 4, 5, 5]
pyramid_1side_7__2side_7__3side_5_4side_5_5s2 = [5, 5, 4, 4, 4, 2, 2, 0, 2, 2, 4, 4, 4, 5, 5]
pyramid_1side_7__2side_7__3side_6_4side_5_5s2 = [5, 5, 4, 4, 4, 3, 2, 0, 2, 3, 4, 4, 4, 5, 5]
pyramid_1side_7__2side_7__3side_7_4side_5_5s2 = [5, 5, 4, 4, 4, 3, 3, 0, 3, 3, 4, 4, 4, 5, 5]
pyramid_1side_7__2side_6__3side_6_4side_6_5s2 = [5, 5, 4, 4, 4, 4, 1, 0, 1, 4, 4, 4, 4, 5, 5]
pyramid_1side_7__2side_7__3side_6_4side_6_5s2 = [5, 5, 4, 4, 4, 4, 2, 0, 2, 4, 4, 4, 4, 5, 5]
pyramid_1side_7__2side_7__3side_7_4side_6_5s2 = [5, 5, 4, 4, 4, 4, 3, 0, 3, 4, 4, 4, 4, 5, 5]
pyramid_1side_7__2side_7__3side_7_4side_7_5s2 = [5, 5, 4, 4, 4, 4, 4, 0, 4, 4, 4, 4, 4, 5, 5]
# 1 3 6 10 15 21 "28" 36 45 55
# side7 OK 84
pyramid_1side_8__2side_2__3side_2_4side_2_5s2 = [5, 5, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 5]
pyramid_1side_8__2side_3__3side_2_4side_2_5s2 = [5, 5, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 5, 5]
pyramid_1side_8__2side_3__3side_3_4side_2_5s2 = [5, 5, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 5, 5]
pyramid_1side_8__2side_4__3side_2_4side_2_5s2 = [5, 5, 2, 2, 1, 1, 1, 1, 1, 1, 1, 2, 2, 5, 5]
pyramid_1side_8__2side_4__3side_3_4side_2_5s2 = [5, 5, 3, 2, 1, 1, 1, 1, 1, 1, 1, 2, 3, 5, 5]
pyramid_1side_8__2side_4__3side_4_4side_2_5s2 = [5, 5, 3, 3, 1, 1, 1, 1, 1, 1, 1, 3, 3, 5, 5]
pyramid_1side_8__2side_5__3side_2_4side_2_5s2 = [5, 5, 2, 2, 2, 1, 1, 1, 1, 1, 2, 2, 2, 5, 5]
pyramid_1side_8__2side_5__3side_3_4side_2_5s2 = [5, 5, 3, 2, 2, 1, 1, 1, 1, 1, 2, 2, 3, 5, 5]
pyramid_1side_8__2side_5__3side_4_4side_2_5s2 = [5, 5, 3, 3, 2, 1, 1, 1, 1, 1, 2, 3, 3, 5, 5]
pyramid_1side_8__2side_5__3side_5_4side_2_5s2 = [5, 5, 3, 3, 3, 1, 1, 1, 1, 1, 3, 3, 3, 5, 5]
pyramid_1side_8__2side_6__3side_2_4side_2_5s2 = [5, 5, 2, 2, 2, 2, 1, 1, 1, 2, 2, 2, 2, 5, 5]
pyramid_1side_8__2side_6__3side_3_4side_2_5s2 = [5, 5, 3, 2, 2, 2, 1, 1, 1, 2, 2, 2, 3, 5, 5]
pyramid_1side_8__2side_6__3side_4_4side_2_5s2 = [5, 5, 3, 3, 2, 2, 1, 1, 1, 2, 2, 3, 3, 5, 5]
pyramid_1side_8__2side_6__3side_5_4side_2_5s2 = [5, 5, 3, 3, 3, 2, 1, 1, 1, 2, 3, 3, 3, 5, 5]
pyramid_1side_8__2side_6__3side_6_4side_2_5s2 = [5, 5, 3, 3, 3, 3, 1, 1, 1, 3, 3, 3, 3, 5, 5]
pyramid_1side_8__2side_7__3side_2_4side_2_5s2 = [5, 5, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 5, 5]
pyramid_1side_8__2side_7__3side_3_4side_2_5s2 = [5, 5, 3, 2, 2, 2, 2, 1, 2, 2, 2, 2, 3, 5, 5]
pyramid_1side_8__2side_7__3side_4_4side_2_5s2 = [5, 5, 3, 3, 2, 2, 2, 1, 2, 2, 2, 3, 3, 5, 5]
pyramid_1side_8__2side_7__3side_5_4side_2_5s2 = [5, 5, 3, 3, 3, 2, 2, 1, 2, 2, 3, 3, 3, 5, 5]
pyramid_1side_8__2side_7__3side_6_4side_2_5s2 = [5, 5, 3, 3, 3, 3, 2, 1, 2, 3, 3, 3, 3, 5, 5]
pyramid_1side_8__2side_7__3side_7_4side_2_5s2 = [5, 5, 3, 3, 3, 3, 3, 1, 3, 3, 3, 3, 3, 5, 5]
pyramid_1side_8__2side_8__3side_2_4side_2_5s2 = [5, 5, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 5, 5]
pyramid_1side_8__2side_8__3side_3_4side_2_5s2 = [5, 5, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 5, 5]
pyramid_1side_8__2side_8__3side_4_4side_2_5s2 = [5, 5, 3, 3, 2, 2, 2, 2, 2, 2, 2, 3, 3, 5, 5]
pyramid_1side_8__2side_8__3side_5_4side_2_5s2 = [5, 5, 3, 3, 3, 2, 2, 2, 2, 2, 3, 3, 3, 5, 5]
pyramid_1side_8__2side_8__3side_6_4side_2_5s2 = [5, 5, 3, 3, 3, 3, 2, 2, 2, 3, 3, 3, 3, 5, 5]
pyramid_1side_8__2side_8__3side_7_4side_2_5s2 = [5, 5, 3, 3, 3, 3, 3, 2, 3, 3, 3, 3, 3, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_2_5s2 = [5, 5, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 5, 5]
pyramid_1side_8__2side_3__3side_3_4side_3_5s2 = [5, 5, 4, 1, 1, 1, 1, 1, 1, 1, 1, 1, 4, 5, 5]
pyramid_1side_8__2side_4__3side_3_4side_3_5s2 = [5, 5, 4, 2, 1, 1, 1, 1, 1, 1, 1, 2, 4, 5, 5]
pyramid_1side_8__2side_4__3side_4_4side_3_5s2 = [5, 5, 4, 3, 1, 1, 1, 1, 1, 1, 1, 3, 4, 5, 5]
pyramid_1side_8__2side_5__3side_3_4side_3_5s2 = [5, 5, 4, 2, 2, 1, 1, 1, 1, 1, 2, 2, 4, 5, 5]
pyramid_1side_8__2side_5__3side_4_4side_3_5s2 = [5, 5, 4, 3, 2, 1, 1, 1, 1, 1, 2, 3, 4, 5, 5]
pyramid_1side_8__2side_5__3side_5_4side_3_5s2 = [5, 5, 4, 3, 3, 1, 1, 1, 1, 1, 3, 3, 4, 5, 5]
pyramid_1side_8__2side_6__3side_3_4side_3_5s2 = [5, 5, 4, 2, 2, 2, 1, 1, 1, 2, 2, 2, 4, 5, 5]
pyramid_1side_8__2side_6__3side_4_4side_3_5s2 = [5, 5, 4, 3, 2, 2, 1, 1, 1, 2, 2, 3, 4, 5, 5]
pyramid_1side_8__2side_6__3side_5_4side_3_5s2 = [5, 5, 4, 3, 3, 2, 1, 1, 1, 2, 3, 3, 4, 5, 5]
pyramid_1side_8__2side_6__3side_6_4side_3_5s2 = [5, 5, 4, 3, 3, 3, 1, 1, 1, 3, 3, 3, 4, 5, 5]
pyramid_1side_8__2side_7__3side_3_4side_3_5s2 = [5, 5, 4, 2, 2, 2, 2, 1, 2, 2, 2, 2, 4, 5, 5]
pyramid_1side_8__2side_7__3side_4_4side_3_5s2 = [5, 5, 4, 3, 2, 2, 2, 1, 2, 2, 2, 3, 4, 5, 5]
pyramid_1side_8__2side_7__3side_5_4side_3_5s2 = [5, 5, 4, 3, 3, 2, 2, 1, 2, 2, 3, 3, 4, 5, 5]
pyramid_1side_8__2side_7__3side_6_4side_3_5s2 = [5, 5, 4, 3, 3, 3, 2, 1, 2, 3, 3, 3, 4, 5, 5]
pyramid_1side_8__2side_7__3side_7_4side_3_5s2 = [5, 5, 4, 3, 3, 3, 3, 1, 3, 3, 3, 3, 4, 5, 5]
pyramid_1side_8__2side_8__3side_3_4side_3_5s2 = [5, 5, 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 4, 5, 5]
pyramid_1side_8__2side_8__3side_4_4side_3_5s2 = [5, 5, 4, 3, 2, 2, 2, 2, 2, 2, 2, 3, 4, 5, 5]
pyramid_1side_8__2side_8__3side_5_4side_3_5s2 = [5, 5, 4, 3, 3, 2, 2, 2, 2, 2, 3, 3, 4, 5, 5]
pyramid_1side_8__2side_8__3side_6_4side_3_5s2 = [5, 5, 4, 3, 3, 3, 2, 2, 2, 3, 3, 3, 4, 5, 5]
pyramid_1side_8__2side_8__3side_7_4side_3_5s2 = [5, 5, 4, 3, 3, 3, 3, 2, 3, 3, 3, 3, 4, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_3_5s2 = [5, 5, 4, 3, 3, 3, 3, 3, 3, 3, 3, 3, 4, 5, 5]
pyramid_1side_8__2side_4__3side_4_4side_4_5s2 = [5, 5, 4, 4, 1, 1, 1, 1, 1, 1, 1, 4, 4, 5, 5]
pyramid_1side_8__2side_5__3side_4_4side_4_5s2 = [5, 5, 4, 4, 2, 1, 1, 1, 1, 1, 2, 4, 4, 5, 5]
pyramid_1side_8__2side_5__3side_5_4side_4_5s2 = [5, 5, 4, 4, 3, 1, 1, 1, 1, 1, 3, 4, 4, 5, 5]
pyramid_1side_8__2side_6__3side_4_4side_4_5s2 = [5, 5, 4, 4, 2, 2, 1, 1, 1, 2, 2, 4, 4, 5, 5]
pyramid_1side_8__2side_6__3side_5_4side_4_5s2 = [5, 5, 4, 4, 3, 2, 1, 1, 1, 2, 3, 4, 4, 5, 5]
pyramid_1side_8__2side_6__3side_6_4side_4_5s2 = [5, 5, 4, 4, 3, 3, 1, 1, 1, 3, 3, 4, 4, 5, 5]
pyramid_1side_8__2side_7__3side_4_4side_4_5s2 = [5, 5, 4, 4, 2, 2, 2, 1, 2, 2, 2, 4, 4, 5, 5]
pyramid_1side_8__2side_7__3side_5_4side_4_5s2 = [5, 5, 4, 4, 3, 2, 2, 1, 2, 2, 3, 4, 4, 5, 5]
pyramid_1side_8__2side_7__3side_6_4side_4_5s2 = [5, 5, 4, 4, 3, 3, 2, 1, 2, 3, 3, 4, 4, 5, 5]
pyramid_1side_8__2side_7__3side_7_4side_4_5s2 = [5, 5, 4, 4, 3, 3, 3, 1, 3, 3, 3, 4, 4, 5, 5]
pyramid_1side_8__2side_8__3side_4_4side_4_5s2 = [5, 5, 4, 4, 2, 2, 2, 2, 2, 2, 2, 4, 4, 5, 5]
pyramid_1side_8__2side_8__3side_5_4side_4_5s2 = [5, 5, 4, 4, 3, 2, 2, 2, 2, 2, 3, 4, 4, 5, 5]
pyramid_1side_8__2side_8__3side_6_4side_4_5s2 = [5, 5, 4, 4, 3, 3, 2, 2, 2, 3, 3, 4, 4, 5, 5]
pyramid_1side_8__2side_8__3side_7_4side_4_5s2 = [5, 5, 4, 4, 3, 3, 3, 2, 3, 3, 3, 4, 4, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_4_5s2 = [5, 5, 4, 4, 3, 3, 3, 3, 3, 3, 3, 4, 4, 5, 5]
pyramid_1side_8__2side_5__3side_5_4side_5_5s2 = [5, 5, 4, 4, 4, 1, 1, 1, 1, 1, 4, 4, 4, 5, 5]
pyramid_1side_8__2side_6__3side_5_4side_5_5s2 = [5, 5, 4, 4, 4, 2, 1, 1, 1, 2, 4, 4, 4, 5, 5]
pyramid_1side_8__2side_6__3side_6_4side_5_5s2 = [5, 5, 4, 4, 4, 3, 1, 1, 1, 3, 4, 4, 4, 5, 5]
pyramid_1side_8__2side_7__3side_5_4side_5_5s2 = [5, 5, 4, 4, 4, 2, 2, 1, 2, 2, 4, 4, 4, 5, 5]
pyramid_1side_8__2side_7__3side_6_4side_5_5s2 = [5, 5, 4, 4, 4, 3, 2, 1, 2, 3, 4, 4, 4, 5, 5]
pyramid_1side_8__2side_7__3side_7_4side_5_5s2 = [5, 5, 4, 4, 4, 3, 3, 1, 3, 3, 4, 4, 4, 5, 5]
pyramid_1side_8__2side_8__3side_5_4side_5_5s2 = [5, 5, 4, 4, 4, 2, 2, 2, 2, 2, 4, 4, 4, 5, 5]
pyramid_1side_8__2side_8__3side_6_4side_5_5s2 = [5, 5, 4, 4, 4, 3, 2, 2, 2, 3, 4, 4, 4, 5, 5]
pyramid_1side_8__2side_8__3side_7_4side_5_5s2 = [5, 5, 4, 4, 4, 3, 3, 2, 3, 3, 4, 4, 4, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_5_5s2 = [5, 5, 4, 4, 4, 3, 3, 3, 3, 3, 4, 4, 4, 5, 5]
pyramid_1side_8__2side_6__3side_6_4side_6_5s2 = [5, 5, 4, 4, 4, 4, 1, 1, 1, 4, 4, 4, 4, 5, 5]
pyramid_1side_8__2side_7__3side_6_4side_6_5s2 = [5, 5, 4, 4, 4, 4, 2, 1, 2, 4, 4, 4, 4, 5, 5]
pyramid_1side_8__2side_7__3side_7_4side_6_5s2 = [5, 5, 4, 4, 4, 4, 3, 1, 3, 4, 4, 4, 4, 5, 5]
pyramid_1side_8__2side_8__3side_6_4side_6_5s2 = [5, 5, 4, 4, 4, 4, 2, 2, 2, 4, 4, 4, 4, 5, 5]
pyramid_1side_8__2side_8__3side_7_4side_6_5s2 = [5, 5, 4, 4, 4, 4, 3, 2, 3, 4, 4, 4, 4, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_6_5s2 = [5, 5, 4, 4, 4, 4, 3, 3, 3, 4, 4, 4, 4, 5, 5]
pyramid_1side_8__2side_7__3side_7_4side_7_5s2 = [5, 5, 4, 4, 4, 4, 4, 1, 4, 4, 4, 4, 4, 5, 5]
pyramid_1side_8__2side_8__3side_7_4side_7_5s2 = [5, 5, 4, 4, 4, 4, 4, 2, 4, 4, 4, 4, 4, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_7_5s2 = [5, 5, 4, 4, 4, 4, 4, 3, 4, 4, 4, 4, 4, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_8_5s2 = [5, 5, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 5, 5]
##################################
### 5side3
##################################
# "1" 3 6 10 15 21 28 36 45 55
# side3 OK 1
pyramid_1side_3__2side_3__3side_3_4side_3_5s3 = [5, 5, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 5, 5]
# 1 "3" 6 10 15 21 28 36 45 55
# side3 OK 4
pyramid_1side_4__2side_3__3side_3_4side_3_5s3 = [5, 5, 5, 1, 0, 0, 0, 0, 0, 0, 0, 1, 5, 5, 5]
pyramid_1side_4__2side_4__3side_3_4side_3_5s3 = [5, 5, 5, 2, 0, 0, 0, 0, 0, 0, 0, 2, 5, 5, 5]
pyramid_1side_4__2side_4__3side_4_4side_3_5s3 = [5, 5, 5, 3, 0, 0, 0, 0, 0, 0, 0, 3, 5, 5, 5]
pyramid_1side_4__2side_4__3side_4_4side_4_5s3 = [5, 5, 5, 4, 0, 0, 0, 0, 0, 0, 0, 4, 5, 5, 5]
# 1 3 "6" 10 15 21 28 36 45 55
# side3 OK 10
pyramid_1side_5__2side_3__3side_3_4side_3_5s3 = [5, 5, 5, 1, 1, 0, 0, 0, 0, 0, 1, 1, 5, 5, 5]
pyramid_1side_5__2side_4__3side_3_4side_3_5s3 = [5, 5, 5, 2, 1, 0, 0, 0, 0, 0, 1, 2, 5, 5, 5]
pyramid_1side_5__2side_4__3side_4_4side_3_5s3 = [5, 5, 5, 3, 1, 0, 0, 0, 0, 0, 1, 3, 5, 5, 5]
pyramid_1side_5__2side_5__3side_3_4side_3_5s3 = [5, 5, 5, 2, 2, 0, 0, 0, 0, 0, 2, 2, 5, 5, 5]
pyramid_1side_5__2side_5__3side_4_4side_3_5s3 = [5, 5, 5, 3, 2, 0, 0, 0, 0, 0, 2, 3, 5, 5, 5]
pyramid_1side_5__2side_5__3side_5_4side_3_5s3 = [5, 5, 5, 3, 3, 0, 0, 0, 0, 0, 3, 3, 5, 5, 5]
pyramid_1side_5__2side_4__3side_4_4side_4_5s3 = [5, 5, 5, 4, 1, 0, 0, 0, 0, 0, 1, 4, 5, 5, 5]
pyramid_1side_5__2side_5__3side_4_4side_4_5s3 = [5, 5, 5, 4, 2, 0, 0, 0, 0, 0, 2, 4, 5, 5, 5]
pyramid_1side_5__2side_5__3side_5_4side_4_5s3 = [5, 5, 5, 4, 3, 0, 0, 0, 0, 0, 3, 4, 5, 5, 5]
pyramid_1side_5__2side_5__3side_5_4side_5_5s3 = [5, 5, 5, 4, 4, 0, 0, 0, 0, 0, 4, 4, 5, 5, 5]
# 1 3 6 "10" 15 21 28 36 45 55
# side4 OK 20
pyramid_1side_6__2side_3__3side_3_4side_3_5s3 = [5, 5, 5, 1, 1, 1, 0, 0, 0, 1, 1, 1, 5, 5, 5]
pyramid_1side_6__2side_4__3side_3_4side_3_5s3 = [5, 5, 5, 2, 1, 1, 0, 0, 0, 1, 1, 2, 5, 5, 5]
pyramid_1side_6__2side_4__3side_4_4side_3_5s3 = [5, 5, 5, 3, 1, 1, 0, 0, 0, 1, 1, 3, 5, 5, 5]
pyramid_1side_6__2side_5__3side_3_4side_3_5s3 = [5, 5, 5, 2, 2, 1, 0, 0, 0, 1, 2, 2, 5, 5, 5]
pyramid_1side_6__2side_5__3side_4_4side_3_5s3 = [5, 5, 5, 3, 2, 1, 0, 0, 0, 1, 2, 3, 5, 5, 5]
pyramid_1side_6__2side_5__3side_5_4side_3_5s3 = [5, 5, 5, 3, 3, 1, 0, 0, 0, 1, 3, 3, 5, 5, 5]
pyramid_1side_6__2side_6__3side_3_4side_3_5s3 = [5, 5, 5, 2, 2, 2, 0, 0, 0, 2, 2, 2, 5, 5, 5]
pyramid_1side_6__2side_6__3side_4_4side_3_5s3 = [5, 5, 5, 3, 2, 2, 0, 0, 0, 2, 2, 3, 5, 5, 5]
pyramid_1side_6__2side_6__3side_5_4side_3_5s3 = [5, 5, 5, 3, 3, 2, 0, 0, 0, 2, 3, 3, 5, 5, 5]
pyramid_1side_6__2side_6__3side_6_4side_3_5s3 = [5, 5, 5, 3, 3, 3, 0, 0, 0, 3, 3, 3, 5, 5, 5]
pyramid_1side_6__2side_4__3side_4_4side_4_5s3 = [5, 5, 5, 4, 1, 1, 0, 0, 0, 1, 1, 4, 5, 5, 5]
pyramid_1side_6__2side_5__3side_4_4side_4_5s3 = [5, 5, 5, 4, 2, 1, 0, 0, 0, 1, 2, 4, 5, 5, 5]
pyramid_1side_6__2side_5__3side_5_4side_4_5s3 = [5, 5, 5, 4, 3, 1, 0, 0, 0, 1, 3, 4, 5, 5, 5]
pyramid_1side_6__2side_6__3side_4_4side_4_5s3 = [5, 5, 5, 4, 2, 2, 0, 0, 0, 2, 2, 4, 5, 5, 5]
pyramid_1side_6__2side_6__3side_5_4side_4_5s3 = [5, 5, 5, 4, 3, 2, 0, 0, 0, 2, 3, 4, 5, 5, 5]
pyramid_1side_6__2side_6__3side_6_4side_4_5s3 = [5, 5, 5, 4, 3, 3, 0, 0, 0, 3, 3, 4, 5, 5, 5]
pyramid_1side_6__2side_5__3side_5_4side_5_5s3 = [5, 5, 5, 4, 4, 1, 0, 0, 0, 1, 4, 4, 5, 5, 5]
pyramid_1side_6__2side_6__3side_5_4side_5_5s3 = [5, 5, 5, 4, 4, 2, 0, 0, 0, 2, 4, 4, 5, 5, 5]
pyramid_1side_6__2side_6__3side_6_4side_5_5s3 = [5, 5, 5, 4, 4, 3, 0, 0, 0, 3, 4, 4, 5, 5, 5]
pyramid_1side_6__2side_6__3side_6_4side_6_5s3 = [5, 5, 5, 4, 4, 4, 0, 0, 0, 4, 4, 4, 5, 5, 5]
# 1 3 6 10 "15" 21 28 36 45 55
# side5 OK 35
pyramid_1side_7__2side_3__3side_3_4side_3_5s3 = [5, 5, 5, 1, 1, 1, 1, 0, 1, 1, 1, 1, 5, 5, 5]
pyramid_1side_7__2side_4__3side_3_4side_3_5s3 = [5, 5, 5, 2, 1, 1, 1, 0, 1, 1, 1, 2, 5, 5, 5]
pyramid_1side_7__2side_4__3side_4_4side_3_5s3 = [5, 5, 5, 3, 1, 1, 1, 0, 1, 1, 1, 3, 5, 5, 5]
pyramid_1side_7__2side_5__3side_3_4side_3_5s3 = [5, 5, 5, 2, 2, 1, 1, 0, 1, 1, 2, 2, 5, 5, 5]
pyramid_1side_7__2side_5__3side_4_4side_3_5s3 = [5, 5, 5, 3, 2, 1, 1, 0, 1, 1, 2, 3, 5, 5, 5]
pyramid_1side_7__2side_5__3side_5_4side_3_5s3 = [5, 5, 5, 3, 3, 1, 1, 0, 1, 1, 3, 3, 5, 5, 5]
pyramid_1side_7__2side_6__3side_3_4side_3_5s3 = [5, 5, 5, 2, 2, 2, 1, 0, 1, 2, 2, 2, 5, 5, 5]
pyramid_1side_7__2side_6__3side_4_4side_3_5s3 = [5, 5, 5, 3, 2, 2, 1, 0, 1, 2, 2, 3, 5, 5, 5]
pyramid_1side_7__2side_6__3side_5_4side_3_5s3 = [5, 5, 5, 3, 3, 2, 1, 0, 1, 2, 3, 3, 5, 5, 5]
pyramid_1side_7__2side_6__3side_6_4side_3_5s3 = [5, 5, 5, 3, 3, 3, 1, 0, 1, 3, 3, 3, 5, 5, 5]
pyramid_1side_7__2side_7__3side_3_4side_3_5s3 = [5, 5, 5, 2, 2, 2, 2, 0, 2, 2, 2, 2, 5, 5, 5]
pyramid_1side_7__2side_7__3side_4_4side_3_5s3 = [5, 5, 5, 3, 2, 2, 2, 0, 2, 2, 2, 3, 5, 5, 5]
pyramid_1side_7__2side_7__3side_5_4side_3_5s3 = [5, 5, 5, 3, 3, 2, 2, 0, 2, 2, 3, 3, 5, 5, 5]
pyramid_1side_7__2side_7__3side_6_4side_3_5s3 = [5, 5, 5, 3, 3, 3, 2, 0, 2, 3, 3, 3, 5, 5, 5]
pyramid_1side_7__2side_7__3side_7_4side_3_5s3 = [5, 5, 5, 3, 3, 3, 3, 0, 3, 3, 3, 3, 5, 5, 5]
pyramid_1side_7__2side_4__3side_4_4side_4_5s3 = [5, 5, 5, 4, 1, 1, 1, 0, 1, 1, 1, 4, 5, 5, 5]
pyramid_1side_7__2side_5__3side_4_4side_4_5s3 = [5, 5, 5, 4, 2, 1, 1, 0, 1, 1, 2, 4, 5, 5, 5]
pyramid_1side_7__2side_5__3side_5_4side_4_5s3 = [5, 5, 5, 4, 3, 1, 1, 0, 1, 1, 3, 4, 5, 5, 5]
pyramid_1side_7__2side_6__3side_4_4side_4_5s3 = [5, 5, 5, 4, 2, 2, 1, 0, 1, 2, 2, 4, 5, 5, 5]
pyramid_1side_7__2side_6__3side_5_4side_4_5s3 = [5, 5, 5, 4, 3, 2, 1, 0, 1, 2, 3, 4, 5, 5, 5]
pyramid_1side_7__2side_6__3side_6_4side_4_5s3 = [5, 5, 5, 4, 3, 3, 1, 0, 1, 3, 3, 4, 5, 5, 5]
pyramid_1side_7__2side_7__3side_4_4side_4_5s3 = [5, 5, 5, 4, 2, 2, 2, 0, 2, 2, 2, 4, 5, 5, 5]
pyramid_1side_7__2side_7__3side_5_4side_4_5s3 = [5, 5, 5, 4, 3, 2, 2, 0, 2, 2, 3, 4, 5, 5, 5]
pyramid_1side_7__2side_7__3side_6_4side_4_5s3 = [5, 5, 5, 4, 3, 3, 2, 0, 2, 3, 3, 4, 5, 5, 5]
pyramid_1side_7__2side_7__3side_7_4side_4_5s3 = [5, 5, 5, 4, 3, 3, 3, 0, 3, 3, 3, 4, 5, 5, 5]
pyramid_1side_7__2side_5__3side_5_4side_5_5s3 = [5, 5, 5, 4, 4, 1, 1, 0, 1, 1, 4, 4, 5, 5, 5]
pyramid_1side_7__2side_6__3side_5_4side_5_5s3 = [5, 5, 5, 4, 4, 2, 1, 0, 1, 2, 4, 4, 5, 5, 5]
pyramid_1side_7__2side_6__3side_6_4side_5_5s3 = [5, 5, 5, 4, 4, 3, 1, 0, 1, 3, 4, 4, 5, 5, 5]
pyramid_1side_7__2side_7__3side_5_4side_5_5s3 = [5, 5, 5, 4, 4, 2, 2, 0, 2, 2, 4, 4, 5, 5, 5]
pyramid_1side_7__2side_7__3side_6_4side_5_5s3 = [5, 5, 5, 4, 4, 3, 2, 0, 2, 3, 4, 4, 5, 5, 5]
pyramid_1side_7__2side_7__3side_7_4side_5_5s3 = [5, 5, 5, 4, 4, 3, 3, 0, 3, 3, 4, 4, 5, 5, 5]
pyramid_1side_7__2side_6__3side_6_4side_6_5s3 = [5, 5, 5, 4, 4, 4, 1, 0, 1, 4, 4, 4, 5, 5, 5]
pyramid_1side_7__2side_7__3side_6_4side_6_5s3 = [5, 5, 5, 4, 4, 4, 2, 0, 2, 4, 4, 4, 5, 5, 5]
pyramid_1side_7__2side_7__3side_7_4side_6_5s3 = [5, 5, 5, 4, 4, 4, 3, 0, 3, 4, 4, 4, 5, 5, 5]
pyramid_1side_7__2side_7__3side_7_4side_7_5s3 = [5, 5, 5, 4, 4, 4, 4, 0, 4, 4, 4, 4, 5, 5, 5]
# 1 3 6 10 15 "21" 28 36 45 55
# side6 OK 56
pyramid_1side_8__2side_3__3side_3_4side_3_5s3 = [5, 5, 5, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 5, 5]
pyramid_1side_8__2side_4__3side_3_4side_3_5s3 = [5, 5, 5, 2, 1, 1, 1, 1, 1, 1, 1, 2, 5, 5, 5]
pyramid_1side_8__2side_4__3side_4_4side_3_5s3 = [5, 5, 5, 3, 1, 1, 1, 1, 1, 1, 1, 3, 5, 5, 5]
pyramid_1side_8__2side_5__3side_3_4side_3_5s3 = [5, 5, 5, 2, 2, 1, 1, 1, 1, 1, 2, 2, 5, 5, 5]
pyramid_1side_8__2side_5__3side_4_4side_3_5s3 = [5, 5, 5, 3, 2, 1, 1, 1, 1, 1, 2, 3, 5, 5, 5]
pyramid_1side_8__2side_5__3side_5_4side_3_5s3 = [5, 5, 5, 3, 3, 1, 1, 1, 1, 1, 3, 3, 5, 5, 5]
pyramid_1side_8__2side_6__3side_3_4side_3_5s3 = [5, 5, 5, 2, 2, 2, 1, 1, 1, 2, 2, 2, 5, 5, 5]
pyramid_1side_8__2side_6__3side_4_4side_3_5s3 = [5, 5, 5, 3, 2, 2, 1, 1, 1, 2, 2, 3, 5, 5, 5]
pyramid_1side_8__2side_6__3side_5_4side_3_5s3 = [5, 5, 5, 3, 3, 2, 1, 1, 1, 2, 3, 3, 5, 5, 5]
pyramid_1side_8__2side_6__3side_6_4side_3_5s3 = [5, 5, 5, 3, 3, 3, 1, 1, 1, 3, 3, 3, 5, 5, 5]
pyramid_1side_8__2side_7__3side_3_4side_3_5s3 = [5, 5, 5, 2, 2, 2, 2, 1, 2, 2, 2, 2, 5, 5, 5]
pyramid_1side_8__2side_7__3side_4_4side_3_5s3 = [5, 5, 5, 3, 2, 2, 2, 1, 2, 2, 2, 3, 5, 5, 5]
pyramid_1side_8__2side_7__3side_5_4side_3_5s3 = [5, 5, 5, 3, 3, 2, 2, 1, 2, 2, 3, 3, 5, 5, 5]
pyramid_1side_8__2side_7__3side_6_4side_3_5s3 = [5, 5, 5, 3, 3, 3, 2, 1, 2, 3, 3, 3, 5, 5, 5]
pyramid_1side_8__2side_7__3side_7_4side_3_5s3 = [5, 5, 5, 3, 3, 3, 3, 1, 3, 3, 3, 3, 5, 5, 5]
pyramid_1side_8__2side_8__3side_3_4side_3_5s3 = [5, 5, 5, 2, 2, 2, 2, 2, 2, 2, 2, 2, 5, 5, 5]
pyramid_1side_8__2side_8__3side_4_4side_3_5s3 = [5, 5, 5, 3, 2, 2, 2, 2, 2, 2, 2, 3, 5, 5, 5]
pyramid_1side_8__2side_8__3side_5_4side_3_5s3 = [5, 5, 5, 3, 3, 2, 2, 2, 2, 2, 3, 3, 5, 5, 5]
pyramid_1side_8__2side_8__3side_6_4side_3_5s3 = [5, 5, 5, 3, 3, 3, 2, 2, 2, 3, 3, 3, 5, 5, 5]
pyramid_1side_8__2side_8__3side_7_4side_3_5s3 = [5, 5, 5, 3, 3, 3, 3, 2, 3, 3, 3, 3, 5, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_3_5s3 = [5, 5, 5, 3, 3, 3, 3, 3, 3, 3, 3, 3, 5, 5, 5]
pyramid_1side_8__2side_4__3side_4_4side_4_5s3 = [5, 5, 5, 4, 1, 1, 1, 1, 1, 1, 1, 4, 5, 5, 5]
pyramid_1side_8__2side_5__3side_4_4side_4_5s3 = [5, 5, 5, 4, 2, 1, 1, 1, 1, 1, 2, 4, 5, 5, 5]
pyramid_1side_8__2side_5__3side_5_4side_4_5s3 = [5, 5, 5, 4, 3, 1, 1, 1, 1, 1, 3, 4, 5, 5, 5]
pyramid_1side_8__2side_6__3side_4_4side_4_5s3 = [5, 5, 5, 4, 2, 2, 1, 1, 1, 2, 2, 4, 5, 5, 5]
pyramid_1side_8__2side_6__3side_5_4side_4_5s3 = [5, 5, 5, 4, 3, 2, 1, 1, 1, 2, 3, 4, 5, 5, 5]
pyramid_1side_8__2side_6__3side_6_4side_4_5s3 = [5, 5, 5, 4, 3, 3, 1, 1, 1, 3, 3, 4, 5, 5, 5]
pyramid_1side_8__2side_7__3side_4_4side_4_5s3 = [5, 5, 5, 4, 2, 2, 2, 1, 2, 2, 2, 4, 5, 5, 5]
pyramid_1side_8__2side_7__3side_5_4side_4_5s3 = [5, 5, 5, 4, 3, 2, 2, 1, 2, 2, 3, 4, 5, 5, 5]
pyramid_1side_8__2side_7__3side_6_4side_4_5s3 = [5, 5, 5, 4, 3, 3, 2, 1, 2, 3, 3, 4, 5, 5, 5]
pyramid_1side_8__2side_7__3side_7_4side_4_5s3 = [5, 5, 5, 4, 3, 3, 3, 1, 3, 3, 3, 4, 5, 5, 5]
pyramid_1side_8__2side_8__3side_4_4side_4_5s3 = [5, 5, 5, 4, 2, 2, 2, 2, 2, 2, 2, 4, 5, 5, 5]
pyramid_1side_8__2side_8__3side_5_4side_4_5s3 = [5, 5, 5, 4, 3, 2, 2, 2, 2, 2, 3, 4, 5, 5, 5]
pyramid_1side_8__2side_8__3side_6_4side_4_5s3 = [5, 5, 5, 4, 3, 3, 2, 2, 2, 3, 3, 4, 5, 5, 5]
pyramid_1side_8__2side_8__3side_7_4side_4_5s3 = [5, 5, 5, 4, 3, 3, 3, 2, 3, 3, 3, 4, 5, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_4_5s3 = [5, 5, 5, 4, 3, 3, 3, 3, 3, 3, 3, 4, 5, 5, 5]
pyramid_1side_8__2side_5__3side_5_4side_5_5s3 = [5, 5, 5, 4, 4, 1, 1, 1, 1, 1, 4, 4, 5, 5, 5]
pyramid_1side_8__2side_6__3side_5_4side_5_5s3 = [5, 5, 5, 4, 4, 2, 1, 1, 1, 2, 4, 4, 5, 5, 5]
pyramid_1side_8__2side_6__3side_6_4side_5_5s3 = [5, 5, 5, 4, 4, 3, 1, 1, 1, 3, 4, 4, 5, 5, 5]
pyramid_1side_8__2side_7__3side_5_4side_5_5s3 = [5, 5, 5, 4, 4, 2, 2, 1, 2, 2, 4, 4, 5, 5, 5]
pyramid_1side_8__2side_7__3side_6_4side_5_5s3 = [5, 5, 5, 4, 4, 3, 2, 1, 2, 3, 4, 4, 5, 5, 5]
pyramid_1side_8__2side_7__3side_7_4side_5_5s3 = [5, 5, 5, 4, 4, 3, 3, 1, 3, 3, 4, 4, 5, 5, 5]
pyramid_1side_8__2side_8__3side_5_4side_5_5s3 = [5, 5, 5, 4, 4, 2, 2, 2, 2, 2, 4, 4, 5, 5, 5]
pyramid_1side_8__2side_8__3side_6_4side_5_5s3 = [5, 5, 5, 4, 4, 3, 2, 2, 2, 3, 4, 4, 5, 5, 5]
pyramid_1side_8__2side_8__3side_7_4side_5_5s3 = [5, 5, 5, 4, 4, 3, 3, 2, 3, 3, 4, 4, 5, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_5_5s3 = [5, 5, 5, 4, 4, 3, 3, 3, 3, 3, 4, 4, 5, 5, 5]
pyramid_1side_8__2side_6__3side_6_4side_6_5s3 = [5, 5, 5, 4, 4, 4, 1, 1, 1, 4, 4, 4, 5, 5, 5]
pyramid_1side_8__2side_7__3side_6_4side_6_5s3 = [5, 5, 5, 4, 4, 4, 2, 1, 2, 4, 4, 4, 5, 5, 5]
pyramid_1side_8__2side_7__3side_7_4side_6_5s3 = [5, 5, 5, 4, 4, 4, 3, 1, 3, 4, 4, 4, 5, 5, 5]
pyramid_1side_8__2side_8__3side_6_4side_6_5s3 = [5, 5, 5, 4, 4, 4, 2, 2, 2, 4, 4, 4, 5, 5, 5]
pyramid_1side_8__2side_8__3side_7_4side_6_5s3 = [5, 5, 5, 4, 4, 4, 3, 2, 3, 4, 4, 4, 5, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_6_5s3 = [5, 5, 5, 4, 4, 4, 3, 3, 3, 4, 4, 4, 5, 5, 5]
pyramid_1side_8__2side_7__3side_7_4side_7_5s3 = [5, 5, 5, 4, 4, 4, 4, 1, 4, 4, 4, 4, 5, 5, 5]
pyramid_1side_8__2side_8__3side_7_4side_7_5s3 = [5, 5, 5, 4, 4, 4, 4, 2, 4, 4, 4, 4, 5, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_7_5s3 = [5, 5, 5, 4, 4, 4, 4, 3, 4, 4, 4, 4, 5, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_8_5s3 = [5, 5, 5, 4, 4, 4, 4, 4, 4, 4, 4, 4, 5, 5, 5]
##################################
### 5side4
##################################
# "1" 3 6 10 15 21 28 36 45 55
# side3 OK 1
pyramid_1side_4__2side_4__3side_4_4side_4_5s4 = [5, 5, 5, 5, 0, 0, 0, 0, 0, 0, 0, 5, 5, 5, 5]
# 1 "3" 6 10 15 21 28 36 45 55
# side3 OK 4
pyramid_1side_5__2side_4__3side_4_4side_4_5s4 = [5, 5, 5, 5, 1, 0, 0, 0, 0, 0, 1, 5, 5, 5, 5]
pyramid_1side_5__2side_5__3side_4_4side_4_5s4 = [5, 5, 5, 5, 2, 0, 0, 0, 0, 0, 2, 5, 5, 5, 5]
pyramid_1side_5__2side_5__3side_5_4side_4_5s4 = [5, 5, 5, 5, 3, 0, 0, 0, 0, 0, 3, 5, 5, 5, 5]
pyramid_1side_5__2side_5__3side_5_4side_5_5s4 = [5, 5, 5, 5, 4, 0, 0, 0, 0, 0, 4, 5, 5, 5, 5]
# 1 3 "6" 10 15 21 28 36 45 55
# side3 OK 10
pyramid_1side_6__2side_4__3side_4_4side_4_5s4 = [5, 5, 5, 5, 1, 1, 0, 0, 0, 1, 1, 5, 5, 5, 5]
pyramid_1side_6__2side_5__3side_4_4side_4_5s4 = [5, 5, 5, 5, 2, 1, 0, 0, 0, 1, 2, 5, 5, 5, 5]
pyramid_1side_6__2side_5__3side_5_4side_4_5s4 = [5, 5, 5, 5, 3, 1, 0, 0, 0, 1, 3, 5, 5, 5, 5]
pyramid_1side_6__2side_6__3side_4_4side_4_5s4 = [5, 5, 5, 5, 2, 2, 0, 0, 0, 2, 2, 5, 5, 5, 5]
pyramid_1side_6__2side_6__3side_5_4side_4_5s4 = [5, 5, 5, 5, 3, 2, 0, 0, 0, 2, 3, 5, 5, 5, 5]
pyramid_1side_6__2side_6__3side_6_4side_4_5s4 = [5, 5, 5, 5, 3, 3, 0, 0, 0, 3, 3, 5, 5, 5, 5]
pyramid_1side_6__2side_5__3side_5_4side_5_5s4 = [5, 5, 5, 5, 4, 1, 0, 0, 0, 1, 4, 5, 5, 5, 5]
pyramid_1side_6__2side_6__3side_5_4side_5_5s4 = [5, 5, 5, 5, 4, 2, 0, 0, 0, 2, 4, 5, 5, 5, 5]
pyramid_1side_6__2side_6__3side_6_4side_5_5s4 = [5, 5, 5, 5, 4, 3, 0, 0, 0, 3, 4, 5, 5, 5, 5]
pyramid_1side_6__2side_6__3side_6_4side_6_5s4 = [5, 5, 5, 5, 4, 4, 0, 0, 0, 4, 4, 5, 5, 5, 5]
# 1 3 6 "10" 15 21 28 36 45 55
# side4 OK 20
pyramid_1side_7__2side_4__3side_4_4side_4_5s4 = [5, 5, 5, 5, 1, 1, 1, 0, 1, 1, 1, 5, 5, 5, 5]
pyramid_1side_7__2side_5__3side_4_4side_4_5s4 = [5, 5, 5, 5, 2, 1, 1, 0, 1, 1, 2, 5, 5, 5, 5]
pyramid_1side_7__2side_5__3side_5_4side_4_5s4 = [5, 5, 5, 5, 3, 1, 1, 0, 1, 1, 3, 5, 5, 5, 5]
pyramid_1side_7__2side_6__3side_4_4side_4_5s4 = [5, 5, 5, 5, 2, 2, 1, 0, 1, 2, 2, 5, 5, 5, 5]
pyramid_1side_7__2side_6__3side_5_4side_4_5s4 = [5, 5, 5, 5, 3, 2, 1, 0, 1, 2, 3, 5, 5, 5, 5]
pyramid_1side_7__2side_6__3side_6_4side_4_5s4 = [5, 5, 5, 5, 3, 3, 1, 0, 1, 3, 3, 5, 5, 5, 5]
pyramid_1side_7__2side_7__3side_4_4side_4_5s4 = [5, 5, 5, 5, 2, 2, 2, 0, 2, 2, 2, 5, 5, 5, 5]
pyramid_1side_7__2side_7__3side_5_4side_4_5s4 = [5, 5, 5, 5, 3, 2, 2, 0, 2, 2, 3, 5, 5, 5, 5]
pyramid_1side_7__2side_7__3side_6_4side_4_5s4 = [5, 5, 5, 5, 3, 3, 2, 0, 2, 3, 3, 5, 5, 5, 5]
pyramid_1side_7__2side_7__3side_7_4side_4_5s4 = [5, 5, 5, 5, 3, 3, 3, 0, 3, 3, 3, 5, 5, 5, 5]
pyramid_1side_7__2side_5__3side_5_4side_5_5s4 = [5, 5, 5, 5, 4, 1, 1, 0, 1, 1, 4, 5, 5, 5, 5]
pyramid_1side_7__2side_6__3side_5_4side_5_5s4 = [5, 5, 5, 5, 4, 2, 1, 0, 1, 2, 4, 5, 5, 5, 5]
pyramid_1side_7__2side_6__3side_6_4side_5_5s4 = [5, 5, 5, 5, 4, 3, 1, 0, 1, 3, 4, 5, 5, 5, 5]
pyramid_1side_7__2side_7__3side_5_4side_5_5s4 = [5, 5, 5, 5, 4, 2, 2, 0, 2, 2, 4, 5, 5, 5, 5]
pyramid_1side_7__2side_7__3side_6_4side_5_5s4 = [5, 5, 5, 5, 4, 3, 2, 0, 2, 3, 4, 5, 5, 5, 5]
pyramid_1side_7__2side_7__3side_7_4side_5_5s4 = [5, 5, 5, 5, 4, 3, 3, 0, 3, 3, 4, 5, 5, 5, 5]
pyramid_1side_7__2side_6__3side_6_4side_6_5s4 = [5, 5, 5, 5, 4, 4, 1, 0, 1, 4, 4, 5, 5, 5, 5]
pyramid_1side_7__2side_7__3side_6_4side_6_5s4 = [5, 5, 5, 5, 4, 4, 2, 0, 2, 4, 4, 5, 5, 5, 5]
pyramid_1side_7__2side_7__3side_7_4side_6_5s4 = [5, 5, 5, 5, 4, 4, 3, 0, 3, 4, 4, 5, 5, 5, 5]
pyramid_1side_7__2side_7__3side_7_4side_7_5s4 = [5, 5, 5, 5, 4, 4, 4, 0, 4, 4, 4, 5, 5, 5, 5]
# 1 3 6 10 "15" 21 28 36 45 55
# side5 OK 35
pyramid_1side_8__2side_4__3side_4_4side_4_5s4 = [5, 5, 5, 5, 1, 1, 1, 1, 1, 1, 1, 5, 5, 5, 5]
pyramid_1side_8__2side_5__3side_4_4side_4_5s4 = [5, 5, 5, 5, 2, 1, 1, 1, 1, 1, 2, 5, 5, 5, 5]
pyramid_1side_8__2side_5__3side_5_4side_4_5s4 = [5, 5, 5, 5, 3, 1, 1, 1, 1, 1, 3, 5, 5, 5, 5]
pyramid_1side_8__2side_6__3side_4_4side_4_5s4 = [5, 5, 5, 5, 2, 2, 1, 1, 1, 2, 2, 5, 5, 5, 5]
pyramid_1side_8__2side_6__3side_5_4side_4_5s4 = [5, 5, 5, 5, 3, 2, 1, 1, 1, 2, 3, 5, 5, 5, 5]
pyramid_1side_8__2side_6__3side_6_4side_4_5s4 = [5, 5, 5, 5, 3, 3, 1, 1, 1, 3, 3, 5, 5, 5, 5]
pyramid_1side_8__2side_7__3side_4_4side_4_5s4 = [5, 5, 5, 5, 2, 2, 2, 1, 2, 2, 2, 5, 5, 5, 5]
pyramid_1side_8__2side_7__3side_5_4side_4_5s4 = [5, 5, 5, 5, 3, 2, 2, 1, 2, 2, 3, 5, 5, 5, 5]
pyramid_1side_8__2side_7__3side_6_4side_4_5s4 = [5, 5, 5, 5, 3, 3, 2, 1, 2, 3, 3, 5, 5, 5, 5]
pyramid_1side_8__2side_7__3side_7_4side_4_5s4 = [5, 5, 5, 5, 3, 3, 3, 1, 3, 3, 3, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_4_4side_4_5s4 = [5, 5, 5, 5, 2, 2, 2, 2, 2, 2, 2, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_5_4side_4_5s4 = [5, 5, 5, 5, 3, 2, 2, 2, 2, 2, 3, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_6_4side_4_5s4 = [5, 5, 5, 5, 3, 3, 2, 2, 2, 3, 3, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_7_4side_4_5s4 = [5, 5, 5, 5, 3, 3, 3, 2, 3, 3, 3, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_4_5s4 = [5, 5, 5, 5, 3, 3, 3, 3, 3, 3, 3, 5, 5, 5, 5]
pyramid_1side_8__2side_5__3side_5_4side_5_5s4 = [5, 5, 5, 5, 4, 1, 1, 1, 1, 1, 4, 5, 5, 5, 5]
pyramid_1side_8__2side_6__3side_5_4side_5_5s4 = [5, 5, 5, 5, 4, 2, 1, 1, 1, 2, 4, 5, 5, 5, 5]
pyramid_1side_8__2side_6__3side_6_4side_5_5s4 = [5, 5, 5, 5, 4, 3, 1, 1, 1, 3, 4, 5, 5, 5, 5]
pyramid_1side_8__2side_7__3side_5_4side_5_5s4 = [5, 5, 5, 5, 4, 2, 2, 1, 2, 2, 4, 5, 5, 5, 5]
pyramid_1side_8__2side_7__3side_6_4side_5_5s4 = [5, 5, 5, 5, 4, 3, 2, 1, 2, 3, 4, 5, 5, 5, 5]
pyramid_1side_8__2side_7__3side_7_4side_5_5s4 = [5, 5, 5, 5, 4, 3, 3, 1, 3, 3, 4, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_5_4side_5_5s4 = [5, 5, 5, 5, 4, 2, 2, 2, 2, 2, 4, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_6_4side_5_5s4 = [5, 5, 5, 5, 4, 3, 2, 2, 2, 3, 4, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_7_4side_5_5s4 = [5, 5, 5, 5, 4, 3, 3, 2, 3, 3, 4, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_5_5s4 = [5, 5, 5, 5, 4, 3, 3, 3, 3, 3, 4, 5, 5, 5, 5]
pyramid_1side_8__2side_6__3side_6_4side_6_5s4 = [5, 5, 5, 5, 4, 4, 1, 1, 1, 4, 4, 5, 5, 5, 5]
pyramid_1side_8__2side_7__3side_6_4side_6_5s4 = [5, 5, 5, 5, 4, 4, 2, 1, 2, 4, 4, 5, 5, 5, 5]
pyramid_1side_8__2side_7__3side_7_4side_6_5s4 = [5, 5, 5, 5, 4, 4, 3, 1, 3, 4, 4, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_6_4side_6_5s4 = [5, 5, 5, 5, 4, 4, 2, 2, 2, 4, 4, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_7_4side_6_5s4 = [5, 5, 5, 5, 4, 4, 3, 2, 3, 4, 4, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_6_5s4 = [5, 5, 5, 5, 4, 4, 3, 3, 3, 4, 4, 5, 5, 5, 5]
pyramid_1side_8__2side_7__3side_7_4side_7_5s4 = [5, 5, 5, 5, 4, 4, 4, 1, 4, 4, 4, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_7_4side_7_5s4 = [5, 5, 5, 5, 4, 4, 4, 2, 4, 4, 4, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_7_5s4 = [5, 5, 5, 5, 4, 4, 4, 3, 4, 4, 4, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_8_5s4 = [5, 5, 5, 5, 4, 4, 4, 4, 4, 4, 4, 5, 5, 5, 5]
##################################
### 5side5
##################################
# "1" 3 6 10 15 21 28 36 45 55
# side3 OK 1
pyramid_1side_5__2side_5__3side_5_4side_5_5s5 = [5, 5, 5, 5, 5, 0, 0, 0, 0, 0, 5, 5, 5, 5, 5]
# 1 "3" 6 10 15 21 28 36 45 55
# side3 OK 4
pyramid_1side_6__2side_5__3side_5_4side_5_5s5 = [5, 5, 5, 5, 5, 1, 0, 0, 0, 1, 5, 5, 5, 5, 5]
pyramid_1side_6__2side_6__3side_5_4side_5_5s5 = [5, 5, 5, 5, 5, 2, 0, 0, 0, 2, 5, 5, 5, 5, 5]
pyramid_1side_6__2side_6__3side_6_4side_5_5s5 = [5, 5, 5, 5, 5, 3, 0, 0, 0, 3, 5, 5, 5, 5, 5]
pyramid_1side_6__2side_6__3side_6_4side_6_5s5 = [5, 5, 5, 5, 5, 4, 0, 0, 0, 4, 5, 5, 5, 5, 5]
# 1 3 "6" 10 15 21 28 36 45 55
# side3 OK 10
pyramid_1side_7__2side_5__3side_5_4side_5_5s5 = [5, 5, 5, 5, 5, 1, 1, 0, 1, 1, 5, 5, 5, 5, 5]
pyramid_1side_7__2side_6__3side_5_4side_5_5s5 = [5, 5, 5, 5, 5, 2, 1, 0, 1, 2, 5, 5, 5, 5, 5]
pyramid_1side_7__2side_6__3side_6_4side_5_5s5 = [5, 5, 5, 5, 5, 3, 1, 0, 1, 3, 5, 5, 5, 5, 5]
pyramid_1side_7__2side_7__3side_5_4side_5_5s5 = [5, 5, 5, 5, 5, 2, 2, 0, 2, 2, 5, 5, 5, 5, 5]
pyramid_1side_7__2side_7__3side_6_4side_5_5s5 = [5, 5, 5, 5, 5, 3, 2, 0, 2, 3, 5, 5, 5, 5, 5]
pyramid_1side_7__2side_7__3side_7_4side_5_5s5 = [5, 5, 5, 5, 5, 3, 3, 0, 3, 3, 5, 5, 5, 5, 5]
pyramid_1side_7__2side_6__3side_6_4side_6_5s5 = [5, 5, 5, 5, 5, 4, 1, 0, 1, 4, 5, 5, 5, 5, 5]
pyramid_1side_7__2side_7__3side_6_4side_6_5s5 = [5, 5, 5, 5, 5, 4, 2, 0, 2, 4, 5, 5, 5, 5, 5]
pyramid_1side_7__2side_7__3side_7_4side_6_5s5 = [5, 5, 5, 5, 5, 4, 3, 0, 3, 4, 5, 5, 5, 5, 5]
pyramid_1side_7__2side_7__3side_7_4side_7_5s5 = [5, 5, 5, 5, 5, 4, 4, 0, 4, 4, 5, 5, 5, 5, 5]
# 1 3 6 "10" 15 21 28 36 45 55
# side4 OK 20
pyramid_1side_8__2side_5__3side_5_4side_5_5s5 = [5, 5, 5, 5, 5, 1, 1, 1, 1, 1, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_6__3side_5_4side_5_5s5 = [5, 5, 5, 5, 5, 2, 1, 1, 1, 2, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_6__3side_6_4side_5_5s5 = [5, 5, 5, 5, 5, 3, 1, 1, 1, 3, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_7__3side_5_4side_5_5s5 = [5, 5, 5, 5, 5, 2, 2, 1, 2, 2, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_7__3side_6_4side_5_5s5 = [5, 5, 5, 5, 5, 3, 2, 1, 2, 3, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_7__3side_7_4side_5_5s5 = [5, 5, 5, 5, 5, 3, 3, 1, 3, 3, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_5_4side_5_5s5 = [5, 5, 5, 5, 5, 2, 2, 2, 2, 2, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_6_4side_5_5s5 = [5, 5, 5, 5, 5, 3, 2, 2, 2, 3, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_7_4side_5_5s5 = [5, 5, 5, 5, 5, 3, 3, 2, 3, 3, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_5_5s5 = [5, 5, 5, 5, 5, 3, 3, 3, 3, 3, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_6__3side_6_4side_6_5s5 = [5, 5, 5, 5, 5, 4, 1, 1, 1, 4, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_7__3side_6_4side_6_5s5 = [5, 5, 5, 5, 5, 4, 2, 1, 2, 4, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_7__3side_7_4side_6_5s5 = [5, 5, 5, 5, 5, 4, 3, 1, 3, 4, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_6_4side_6_5s5 = [5, 5, 5, 5, 5, 4, 2, 2, 2, 4, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_7_4side_6_5s5 = [5, 5, 5, 5, 5, 4, 3, 2, 3, 4, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_6_5s5 = [5, 5, 5, 5, 5, 4, 3, 3, 3, 4, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_7__3side_7_4side_7_5s5 = [5, 5, 5, 5, 5, 4, 4, 1, 4, 4, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_7_4side_7_5s5 = [5, 5, 5, 5, 5, 4, 4, 2, 4, 4, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_7_5s5 = [5, 5, 5, 5, 5, 4, 4, 3, 4, 4, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_8_5s5 = [5, 5, 5, 5, 5, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5]
##################################
### 5side6
##################################
# "1" 3 6 10 15 21 28 36 45 55
# side3 OK 1
pyramid_1side_6__2side_6__3side_6_4side_6_5s6 = [5, 5, 5, 5, 5, 5, 0, 0, 0, 5, 5, 5, 5, 5, 5]
# 1 "3" 6 10 15 21 28 36 45 55
# side3 OK 4
pyramid_1side_7__2side_6__3side_6_4side_6_5s6 = [5, 5, 5, 5, 5, 5, 1, 0, 1, 5, 5, 5, 5, 5, 5]
pyramid_1side_7__2side_7__3side_6_4side_6_5s6 = [5, 5, 5, 5, 5, 5, 2, 0, 2, 5, 5, 5, 5, 5, 5]
pyramid_1side_7__2side_7__3side_7_4side_6_5s6 = [5, 5, 5, 5, 5, 5, 3, 0, 3, 5, 5, 5, 5, 5, 5]
pyramid_1side_7__2side_7__3side_7_4side_7_5s6 = [5, 5, 5, 5, 5, 5, 4, 0, 4, 5, 5, 5, 5, 5, 5]
# 1 3 "6" 10 15 21 28 36 45 55
# side3 OK 10
pyramid_1side_8__2side_6__3side_6_4side_6_5s6 = [5, 5, 5, 5, 5, 5, 1, 1, 1, 5, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_7__3side_6_4side_6_5s6 = [5, 5, 5, 5, 5, 5, 2, 1, 2, 5, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_7__3side_7_4side_6_5s6 = [5, 5, 5, 5, 5, 5, 3, 1, 3, 5, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_6_4side_6_5s6 = [5, 5, 5, 5, 5, 5, 2, 2, 2, 5, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_7_4side_6_5s6 = [5, 5, 5, 5, 5, 5, 3, 2, 3, 5, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_6_5s6 = [5, 5, 5, 5, 5, 5, 3, 3, 3, 5, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_7__3side_7_4side_7_5s6 = [5, 5, 5, 5, 5, 5, 4, 1, 4, 5, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_7_4side_7_5s6 = [5, 5, 5, 5, 5, 5, 4, 2, 4, 5, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_7_5s6 = [5, 5, 5, 5, 5, 5, 4, 3, 4, 5, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_8_5s6 = [5, 5, 5, 5, 5, 5, 4, 4, 4, 5, 5, 5, 5, 5, 5]
##################################
### 5side7
##################################
# "1" 3 6 10 15 21 28 36 45 55
# side3 OK 1
pyramid_1side_7__2side_7__3side_7_4side_7_5s7 = [5, 5, 5, 5, 5, 5, 5, 0, 5, 5, 5, 5, 5, 5, 5]
# 1 "3" 6 10 15 21 28 36 45 55
# side3 OK 4
pyramid_1side_8__2side_7__3side_7_4side_7_5s7 = [5, 5, 5, 5, 5, 5, 5, 1, 5, 5, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_7_4side_7_5s7 = [5, 5, 5, 5, 5, 5, 5, 2, 5, 5, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_7_5s7 = [5, 5, 5, 5, 5, 5, 5, 3, 5, 5, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_8_5s7 = [5, 5, 5, 5, 5, 5, 5, 4, 5, 5, 5, 5, 5, 5, 5]
##################################
### 5side8
##################################
# "1" 3 6 10 15 21 28 36 45 55
# side3 OK 1
pyramid_1side_8__2side_8__3side_8_4side_8_5s8 = [5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5]
###############################################################################################################################################################################################
###############################################################################################################################################################################################
###############################################################################################################################################################################################
##################################
### 1side1
##################################
# "1" 3 6 10 15 21 28 36 45 55
# 2side1 OK 1
ch032_pyramid_1side_1__2side_1__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_1__2side_1__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
##################################
### 1side2
##################################
# "1" 3 6 10 15 21 28 36 45 55
# 2side1 OK 1
ch032_pyramid_1side_2__2side_1__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_2__2side_1__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 "3" 6 10 15 21 28 36 45 55
# 2side2 OK 4
ch032_pyramid_1side_2__2side_2__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_2__2side_2__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_2__2side_2__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_2__2side_2__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_2__2side_2__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_2__2side_2__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_2__2side_2__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_2__2side_2__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
##################################
### 1side3
##################################
# "1" 3 6 10 15 21 28 36 45 55
# 2side1 OK 1
ch032_pyramid_1side_3__2side_1__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_3__2side_1__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 "3" 6 10 15 21 28 36 45 55
# 2side2 OK 4
ch032_pyramid_1side_3__2side_2__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_3__2side_2__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_3__2side_2__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_3__2side_2__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_3__2side_2__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_3__2side_2__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_3__2side_2__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_3__2side_2__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 3 "6" 10 15 21 28 36 45 55
# 2side3 OK 10
ch032_pyramid_1side_3__2side_3__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_3__2side_3__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_3__2side_3__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_3__2side_3__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_3__2side_3__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_3__2side_3__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_3__2side_3__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_3__2side_3__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_3__2side_3__3side_3_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_3__2side_3__3side_3_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_3__2side_3__3side_3_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_3__2side_3__3side_3_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_3__2side_3__3side_3_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_3__2side_3__3side_3_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_3__2side_3__3side_3_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_3__2side_3__3side_3_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_3__2side_3__3side_3_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_3__2side_3__3side_3_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_3__2side_3__3side_3_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_3__2side_3__3side_3_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
##################################
### 1side4
##################################
# "1" 3 6 10 15 21 28 36 45 55
# 2side1 OK 1
ch032_pyramid_1side_4__2side_1__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_1__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 "3" 6 10 15 21 28 36 45 55
# 2side2 OK 4
ch032_pyramid_1side_4__2side_2__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_2__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_2__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_2__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_2__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_2__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_2__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_2__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 3 "6" 10 15 21 28 36 45 55
# 2side3 OK 10
ch032_pyramid_1side_4__2side_3__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_3__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_3__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_3__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_3__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_3__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_3__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_3__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_3__3side_3_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_3__3side_3_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_3__3side_3_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_3__3side_3_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_3__3side_3_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_3__3side_3_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_3__3side_3_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_3__3side_3_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_3__3side_3_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_3__3side_3_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_3__3side_3_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_3__3side_3_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 3 6 "10" 15 21 28 36 45 55
# 2side4 OK 20
ch032_pyramid_1side_4__2side_4__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_4__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_4__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_4__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_4__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_4__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_4__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_4__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_4__3side_3_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_4__3side_3_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_4__3side_3_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_4__3side_3_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_4__3side_3_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_4__3side_3_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_4__3side_3_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_4__3side_3_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_4__3side_3_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_4__3side_3_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_4__3side_3_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_4__3side_3_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_4__3side_4_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_4__3side_4_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_4__3side_4_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_4__3side_4_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_4__3side_4_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_4__3side_4_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_4__3side_4_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_4__3side_4_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_4__3side_4_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_4__3side_4_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_4__3side_4_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_4__3side_4_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_4__3side_4_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_4__3side_4_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_4__3side_4_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_4__3side_4_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_4__3side_4_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_4__3side_4_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_4__3side_4_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_4__3side_4_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
##################################
### 1side5
##################################
# "1" 3 6 10 15 21 28 36 45 55
# 2side1 OK 1
ch032_pyramid_1side_5__2side_1__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_1__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 "3" 6 10 15 21 28 36 45 55
# 2side2 OK 4
ch032_pyramid_1side_5__2side_2__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_2__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_2__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_2__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_2__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_2__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_2__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_2__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 3 "6" 10 15 21 28 36 45 55
# 2side3 OK 10
ch032_pyramid_1side_5__2side_3__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_3__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_3__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_3__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_3__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_3__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_3__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_3__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_3__3side_3_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_3__3side_3_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_3__3side_3_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_3__3side_3_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_3__3side_3_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_3__3side_3_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_3__3side_3_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_3__3side_3_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_3__3side_3_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_3__3side_3_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_3__3side_3_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_3__3side_3_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 3 6 "10" 15 21 28 36 45 55
# 2side4 OK 20
ch032_pyramid_1side_5__2side_4__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_4__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_4__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_4__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_4__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_4__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_4__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_4__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_4__3side_3_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_4__3side_3_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_4__3side_3_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_4__3side_3_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_4__3side_3_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_4__3side_3_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_4__3side_3_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_4__3side_3_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_4__3side_3_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_4__3side_3_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_4__3side_3_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_4__3side_3_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_4__3side_4_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_4__3side_4_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_4__3side_4_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_4__3side_4_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_4__3side_4_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_4__3side_4_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_4__3side_4_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_4__3side_4_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_4__3side_4_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_4__3side_4_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_4__3side_4_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_4__3side_4_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_4__3side_4_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_4__3side_4_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_4__3side_4_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_4__3side_4_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_4__3side_4_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_4__3side_4_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_4__3side_4_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_4__3side_4_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 3 6 10 "15" 21 28 36 45 55
# 2side5 OK 35
ch032_pyramid_1side_5__2side_5__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_3_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_3_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_3_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_3_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_3_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_3_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_3_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_3_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_3_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_3_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_3_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_3_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_4_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_4_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_4_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_4_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_4_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_4_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_4_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_4_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_4_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_4_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_4_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_4_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_4_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_4_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_4_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_4_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_4_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_4_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_4_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_4_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_5_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_5_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_5_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_5_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_5_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_5_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_5_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_5_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_5_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_5_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_5_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_5_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_5_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_5_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_5_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_5_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_5_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_5_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_5_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_5_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_5_4side_5_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_5_4side_5_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_5_4side_5_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_5_4side_5_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_5_4side_5_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_5_4side_5_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_5_4side_5_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_5_4side_5_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_5_4side_5_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_5_4side_5_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
##################################
### 5side6
##################################
# "1" 3 6 10 15 21 28 36 45 55
# 2side1 OK 1
ch032_pyramid_1side_6__2side_1__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_1__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 "3" 6 10 15 21 28 36 45 55
# 2side2 OK 4
ch032_pyramid_1side_6__2side_2__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_2__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_2__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_2__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_2__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_2__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_2__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_2__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 3 "6" 10 15 21 28 36 45 55
# 2side3 OK 10
ch032_pyramid_1side_6__2side_3__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_3__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_3__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_3__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_3__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_3__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_3__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_3__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_3__3side_3_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_3__3side_3_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_3__3side_3_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_3__3side_3_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_3__3side_3_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_3__3side_3_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_3__3side_3_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_3__3side_3_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_3__3side_3_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_3__3side_3_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_3__3side_3_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_3__3side_3_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 3 6 "10" 15 21 28 36 45 55
# 2side4 OK 20
ch032_pyramid_1side_6__2side_4__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_4__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_4__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_4__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_4__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_4__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_4__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_4__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_4__3side_3_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_4__3side_3_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_4__3side_3_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_4__3side_3_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_4__3side_3_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_4__3side_3_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_4__3side_3_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_4__3side_3_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_4__3side_3_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_4__3side_3_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_4__3side_3_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_4__3side_3_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_4__3side_4_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_4__3side_4_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_4__3side_4_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_4__3side_4_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_4__3side_4_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_4__3side_4_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_4__3side_4_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_4__3side_4_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_4__3side_4_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_4__3side_4_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_4__3side_4_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_4__3side_4_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_4__3side_4_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_4__3side_4_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_4__3side_4_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_4__3side_4_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_4__3side_4_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_4__3side_4_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_4__3side_4_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_4__3side_4_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 3 6 10 "15" 21 28 36 45 55
# 2side5 OK 35
ch032_pyramid_1side_6__2side_5__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_3_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_3_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_3_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_3_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_3_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_3_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_3_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_3_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_3_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_3_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_3_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_3_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_4_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_4_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_4_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_4_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_4_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_4_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_4_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_4_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_4_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_4_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_4_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_4_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_4_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_4_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_4_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_4_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_4_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_4_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_4_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_4_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_5_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_5_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_5_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_5_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_5_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_5_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_5_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_5_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_5_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_5_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_5_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_5_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_5_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_5_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_5_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_5_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_5_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_5_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_5_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_5_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_5_4side_5_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_5_4side_5_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_5_4side_5_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_5_4side_5_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_5_4side_5_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_5_4side_5_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_5_4side_5_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_5_4side_5_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_5_4side_5_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_5_4side_5_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 3 6 10 15 "21" 28 36 45 55
# 2side6 OK 56
ch032_pyramid_1side_6__2side_6__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_3_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_3_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_3_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_3_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_3_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_3_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_3_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_3_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_3_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_3_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_3_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_3_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_4_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_4_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_4_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_4_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_4_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_4_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_4_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_4_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_4_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_4_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_4_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_4_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_4_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_4_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_4_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_4_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_4_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_4_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_4_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_4_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_5_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_5_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_5_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_5_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_5_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_5_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_5_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_5_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_5_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_5_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_5_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_5_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_5_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_5_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_5_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_5_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_5_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_5_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_5_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_5_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_5_4side_5_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_5_4side_5_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_5_4side_5_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_5_4side_5_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_5_4side_5_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_5_4side_5_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_5_4side_5_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_5_4side_5_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_5_4side_5_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_5_4side_5_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_6_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_6_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_6_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_6_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_6_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_6_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_6_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_6_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_6_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_6_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_6_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_6_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_6_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_6_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_6_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_6_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_6_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_6_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_6_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_6_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_6_4side_5_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_6_4side_5_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_6_4side_5_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_6_4side_5_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_6_4side_5_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_6_4side_5_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_6_4side_5_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_6_4side_5_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_6_4side_5_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_6_4side_5_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_6_4side_6_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_6_4side_6_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_6_4side_6_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_6_4side_6_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_6_4side_6_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s6 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_6_4side_6_5s6, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
##################################
### 1side7
##################################
# "1" 3 6 10 15 21 28 36 45 55
# 2side1 OK 1
ch032_pyramid_1side_7__2side_1__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_1__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 "3" 6 10 15 21 28 36 45 55
# 2side2 OK 4
ch032_pyramid_1side_7__2side_2__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_2__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_2__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_2__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_2__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_2__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_2__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_2__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 3 "6" 10 15 21 28 36 45 55
# 2side3 OK 10
ch032_pyramid_1side_7__2side_3__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_3__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_3__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_3__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_3__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_3__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_3__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_3__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_3__3side_3_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_3__3side_3_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_3__3side_3_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_3__3side_3_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_3__3side_3_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_3__3side_3_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_3__3side_3_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_3__3side_3_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_3__3side_3_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_3__3side_3_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_3__3side_3_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_3__3side_3_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 3 6 "10" 15 21 28 36 45 55
# 2side4 OK 20
ch032_pyramid_1side_7__2side_4__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_4__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_4__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_4__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_4__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_4__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_4__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_4__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_4__3side_3_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_4__3side_3_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_4__3side_3_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_4__3side_3_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_4__3side_3_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_4__3side_3_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_4__3side_3_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_4__3side_3_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_4__3side_3_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_4__3side_3_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_4__3side_3_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_4__3side_3_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_4__3side_4_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_4__3side_4_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_4__3side_4_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_4__3side_4_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_4__3side_4_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_4__3side_4_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_4__3side_4_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_4__3side_4_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_4__3side_4_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_4__3side_4_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_4__3side_4_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_4__3side_4_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_4__3side_4_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_4__3side_4_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_4__3side_4_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_4__3side_4_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_4__3side_4_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_4__3side_4_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_4__3side_4_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_4__3side_4_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 3 6 10 "15" 21 28 36 45 55
# 2side5 OK 35
ch032_pyramid_1side_7__2side_5__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_3_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_3_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_3_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_3_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_3_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_3_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_3_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_3_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_3_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_3_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_3_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_3_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_4_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_4_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_4_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_4_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_4_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_4_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_4_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_4_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_4_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_4_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_4_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_4_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_4_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_4_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_4_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_4_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_4_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_4_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_4_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_4_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_5_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_5_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_5_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_5_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_5_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_5_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_5_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_5_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_5_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_5_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_5_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_5_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_5_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_5_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_5_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_5_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_5_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_5_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_5_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_5_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_5_4side_5_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_5_4side_5_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_5_4side_5_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_5_4side_5_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_5_4side_5_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_5_4side_5_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_5_4side_5_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_5_4side_5_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_5_4side_5_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_5_4side_5_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 3 6 10 15 "21" 28 36 45 55
# 2side6 OK 56
ch032_pyramid_1side_7__2side_6__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_3_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_3_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_3_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_3_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_3_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_3_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_3_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_3_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_3_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_3_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_3_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_3_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_4_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_4_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_4_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_4_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_4_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_4_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_4_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_4_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_4_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_4_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_4_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_4_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_4_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_4_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_4_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_4_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_4_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_4_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_4_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_4_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_5_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_5_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_5_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_5_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_5_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_5_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_5_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_5_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_5_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_5_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_5_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_5_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_5_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_5_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_5_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_5_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_5_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_5_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_5_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_5_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_5_4side_5_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_5_4side_5_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_5_4side_5_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_5_4side_5_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_5_4side_5_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_5_4side_5_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_5_4side_5_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_5_4side_5_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_5_4side_5_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_5_4side_5_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_6_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_6_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_6_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_6_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_6_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_6_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_6_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_6_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_6_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_6_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_6_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_6_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_6_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_6_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_6_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_6_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_6_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_6_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_6_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_6_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_6_4side_5_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_6_4side_5_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_6_4side_5_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_6_4side_5_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_6_4side_5_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_6_4side_5_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_6_4side_5_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_6_4side_5_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_6_4side_5_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_6_4side_5_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_6_4side_6_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_6_4side_6_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_6_4side_6_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_6_4side_6_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_6_4side_6_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_6_4side_6_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_6_4side_6_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_6_4side_6_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_6_4side_6_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_6_4side_6_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_6_4side_6_5s6 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_6_4side_6_5s6, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 3 6 10 15 21 "28" 36 45 55
# 2side7 OK 84
ch032_pyramid_1side_7__2side_7__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_3_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_3_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_3_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_3_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_3_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_3_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_3_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_3_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_3_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_3_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_3_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_3_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_4_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_4_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_4_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_4_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_4_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_4_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_4_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_4_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_4_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_4_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_4_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_4_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_4_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_4_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_4_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_4_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_4_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_4_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_4_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_4_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_5_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_5_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_5_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_5_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_5_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_5_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_5_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_5_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_5_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_5_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_5_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_5_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_5_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_5_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_5_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_5_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_5_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_5_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_5_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_5_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_5_4side_5_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_5_4side_5_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_5_4side_5_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_5_4side_5_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_5_4side_5_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_5_4side_5_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_5_4side_5_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_5_4side_5_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_5_4side_5_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_5_4side_5_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_6_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_6_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_6_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_6_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_6_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_6_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_6_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_6_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_6_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_6_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_6_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_6_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_6_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_6_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_6_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_6_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_6_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_6_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_6_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_6_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_6_4side_5_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_6_4side_5_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_6_4side_5_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_6_4side_5_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_6_4side_5_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_6_4side_5_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_6_4side_5_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_6_4side_5_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_6_4side_5_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_6_4side_5_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_6_4side_6_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_6_4side_6_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_6_4side_6_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_6_4side_6_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_6_4side_6_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_6_4side_6_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_6_4side_6_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_6_4side_6_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_6_4side_6_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_6_4side_6_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_6_4side_6_5s6 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_6_4side_6_5s6, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_5_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_5_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_5_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_5_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_5_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_5_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_5_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_5_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_5_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_5_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_6_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_6_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_6_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_6_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_6_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_6_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_6_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_6_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_6_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_6_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_6_5s6 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_6_5s6, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_7_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_7_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_7_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_7_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_7_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_7_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_7_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_7_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_7_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_7_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_7_5s6 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_7_5s6, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_7_5s7 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_7_5s7, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
##################################
### 1side8
##################################
# "1" 3 6 10 15 21 28 36 45 55
# 2side3 OK 1
ch032_pyramid_1side_8__2side_1__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_1__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 "3" 6 10 15 21 28 36 45 55
# 2side3 OK 4
ch032_pyramid_1side_8__2side_2__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_2__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_2__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_2__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_2__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_2__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_2__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_2__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 3 "6" 10 15 21 28 36 45 55
# 2side3 OK 10
ch032_pyramid_1side_8__2side_3__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_3__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_3__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_3__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_3__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_3__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_3__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_3__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_3__3side_3_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_3__3side_3_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_3__3side_3_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_3__3side_3_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_3__3side_3_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_3__3side_3_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_3__3side_3_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_3__3side_3_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_3__3side_3_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_3__3side_3_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_3__3side_3_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_3__3side_3_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 3 6 "10" 15 21 28 36 45 55
# 2side4 OK 20
ch032_pyramid_1side_8__2side_4__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_4__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_4__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_4__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_4__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_4__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_4__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_4__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_4__3side_3_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_4__3side_3_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_4__3side_3_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_4__3side_3_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_4__3side_3_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_4__3side_3_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_4__3side_3_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_4__3side_3_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_4__3side_3_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_4__3side_3_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_4__3side_3_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_4__3side_3_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_4__3side_4_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_4__3side_4_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_4__3side_4_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_4__3side_4_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_4__3side_4_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_4__3side_4_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_4__3side_4_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_4__3side_4_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_4__3side_4_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_4__3side_4_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_4__3side_4_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_4__3side_4_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_4__3side_4_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_4__3side_4_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_4__3side_4_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_4__3side_4_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_4__3side_4_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_4__3side_4_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_4__3side_4_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_4__3side_4_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 3 6 10 "15" 21 28 36 45 55
# 2side5 OK 35
ch032_pyramid_1side_8__2side_5__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_3_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_3_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_3_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_3_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_3_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_3_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_3_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_3_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_3_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_3_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_3_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_3_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_4_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_4_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_4_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_4_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_4_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_4_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_4_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_4_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_4_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_4_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_4_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_4_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_4_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_4_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_4_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_4_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_4_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_4_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_4_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_4_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_5_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_5_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_5_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_5_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_5_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_5_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_5_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_5_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_5_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_5_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_5_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_5_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_5_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_5_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_5_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_5_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_5_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_5_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_5_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_5_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_5_4side_5_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_5_4side_5_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_5_4side_5_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_5_4side_5_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_5_4side_5_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_5_4side_5_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_5_4side_5_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_5_4side_5_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_5_4side_5_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_5_4side_5_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 3 6 10 15 "21" 28 36 45 55
# 2side6 OK 56
ch032_pyramid_1side_8__2side_6__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_3_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_3_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_3_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_3_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_3_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_3_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_3_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_3_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_3_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_3_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_3_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_3_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_4_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_4_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_4_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_4_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_4_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_4_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_4_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_4_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_4_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_4_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_4_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_4_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_4_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_4_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_4_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_4_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_4_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_4_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_4_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_4_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_5_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_5_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_5_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_5_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_5_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_5_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_5_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_5_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_5_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_5_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_5_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_5_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_5_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_5_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_5_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_5_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_5_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_5_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_5_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_5_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_5_4side_5_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_5_4side_5_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_5_4side_5_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_5_4side_5_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_5_4side_5_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_5_4side_5_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_5_4side_5_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_5_4side_5_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_5_4side_5_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_5_4side_5_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_6_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_6_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_6_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_6_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_6_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_6_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_6_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_6_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_6_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_6_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_6_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_6_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_6_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_6_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_6_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_6_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_6_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_6_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_6_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_6_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_6_4side_5_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_6_4side_5_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_6_4side_5_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_6_4side_5_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_6_4side_5_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_6_4side_5_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_6_4side_5_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_6_4side_5_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_6_4side_5_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_6_4side_5_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_6_4side_6_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_6_4side_6_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_6_4side_6_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_6_4side_6_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_6_4side_6_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_6_4side_6_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_6_4side_6_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_6_4side_6_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_6_4side_6_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_6_4side_6_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_6_4side_6_5s6 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_6_4side_6_5s6, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 3 6 10 15 21 "28" 36 45 55
# 2side7 OK 84
ch032_pyramid_1side_8__2side_7__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_3_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_3_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_3_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_3_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_3_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_3_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_3_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_3_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_3_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_3_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_3_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_3_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_4_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_4_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_4_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_4_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_4_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_4_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_4_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_4_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_4_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_4_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_4_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_4_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_4_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_4_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_4_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_4_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_4_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_4_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_4_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_4_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_5_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_5_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_5_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_5_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_5_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_5_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_5_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_5_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_5_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_5_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_5_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_5_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_5_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_5_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_5_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_5_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_5_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_5_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_5_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_5_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_5_4side_5_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_5_4side_5_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_5_4side_5_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_5_4side_5_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_5_4side_5_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_5_4side_5_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_5_4side_5_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_5_4side_5_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_5_4side_5_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_5_4side_5_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_6_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_6_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_6_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_6_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_6_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_6_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_6_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_6_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_6_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_6_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_6_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_6_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_6_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_6_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_6_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_6_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_6_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_6_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_6_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_6_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_6_4side_5_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_6_4side_5_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_6_4side_5_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_6_4side_5_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_6_4side_5_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_6_4side_5_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_6_4side_5_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_6_4side_5_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_6_4side_5_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_6_4side_5_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_6_4side_6_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_6_4side_6_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_6_4side_6_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_6_4side_6_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_6_4side_6_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_6_4side_6_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_6_4side_6_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_6_4side_6_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_6_4side_6_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_6_4side_6_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_6_4side_6_5s6 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_6_4side_6_5s6, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_5_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_5_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_5_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_5_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_5_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_5_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_5_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_5_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_5_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_5_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_6_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_6_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_6_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_6_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_6_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_6_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_6_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_6_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_6_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_6_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_6_5s6 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_6_5s6, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_7_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_7_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_7_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_7_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_7_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_7_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_7_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_7_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_7_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_7_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_7_5s6 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_7_5s6, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_7_5s7 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_7_5s7, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 3 6 10 15 21 28 "36" 45 55
# 2side8 OK 120
ch032_pyramid_1side_8__2side_8__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_3_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_3_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_3_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_3_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_3_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_3_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_3_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_3_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_3_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_3_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_3_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_3_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_4_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_4_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_4_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_4_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_4_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_4_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_4_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_4_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_4_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_4_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_4_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_4_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_4_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_4_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_4_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_4_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_4_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_4_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_4_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_4_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_5_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_5_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_5_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_5_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_5_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_5_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_5_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_5_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_5_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_5_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_5_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_5_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_5_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_5_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_5_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_5_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_5_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_5_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_5_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_5_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_5_4side_5_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_5_4side_5_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_5_4side_5_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_5_4side_5_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_5_4side_5_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_5_4side_5_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_5_4side_5_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_5_4side_5_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_5_4side_5_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_5_4side_5_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_6_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_6_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_6_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_6_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_6_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_6_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_6_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_6_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_6_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_6_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_6_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_6_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_6_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_6_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_6_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_6_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_6_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_6_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_6_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_6_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_6_4side_5_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_6_4side_5_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_6_4side_5_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_6_4side_5_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_6_4side_5_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_6_4side_5_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_6_4side_5_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_6_4side_5_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_6_4side_5_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_6_4side_5_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_6_4side_6_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_6_4side_6_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_6_4side_6_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_6_4side_6_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_6_4side_6_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_6_4side_6_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_6_4side_6_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_6_4side_6_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_6_4side_6_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_6_4side_6_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_6_4side_6_5s6 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_6_4side_6_5s6, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_5_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_5_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_5_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_5_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_5_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_5_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_5_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_5_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_5_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_5_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_6_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_6_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_6_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_6_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_6_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_6_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_6_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_6_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_6_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_6_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_6_5s6 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_6_5s6, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_7_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_7_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_7_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_7_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_7_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_7_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_7_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_7_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_7_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_7_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_7_5s6 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_7_5s6, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_7_5s7 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_7_5s7, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_5_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_5_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_5_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_5_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_5_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_5_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_5_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_5_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_5_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_5_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_6_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_6_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_6_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_6_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_6_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_6_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_6_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_6_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_6_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_6_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_6_5s6 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_6_5s6, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_7_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_7_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_7_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_7_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_7_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_7_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_7_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_7_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_7_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_7_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_7_5s6 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_7_5s6, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_7_5s7 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_7_5s7, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_8_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_8_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_8_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_8_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_8_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_8_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_8_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_8_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_8_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_8_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_8_5s6 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_8_5s6, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_8_5s7 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_8_5s7, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_8_5s8 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_8_5s8, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
###############################################################################################################################################################################################
###############################################################################################################################################################################################
if(__name__ == "__main__"):
import numpy as np
print("build_model cost time:", time.time() - start_time)
data = np.zeros(shape=(1, 512, 512, 1))
use_model = ch032_pyramid_1side_1__2side_1__3side_1_4side_1_5s1
use_model = use_model.build()
result = use_model.generator(data)
print(result.shape)
from kong_util.tf_model_util import Show_model_weights
Show_model_weights(use_model.generator)
use_model.generator.summary()
print(use_model.model_describe)
|
[
"s89334roy@yahoo.com.tw"
] |
s89334roy@yahoo.com.tw
|
e34c717eb62a620f52cb209c03274c86b346ba74
|
ccb17eaa277838efd23f8bd2522b5e69fda6ec5b
|
/hello_world/hola.py
|
ec5c538b8d0b8973338934effe253ab5f1235aca
|
[] |
no_license
|
danilozte/learningC
|
3c145869a3853c7ef56720ba076547ee3d6bb9ad
|
4a994ac3f28e78023142a539c47afd4c0968dc1b
|
refs/heads/master
| 2023-03-30T12:47:19.922288
| 2021-03-11T22:07:24
| 2021-03-11T22:07:24
| 346,822,840
| 0
| 1
| null | 2021-03-11T20:51:06
| 2021-03-11T20:03:25
|
C
|
UTF-8
|
Python
| false
| false
| 100
|
py
|
# este es el unico archivo.py
print("Python es lo mejor")
print("Python es mejor que c")
input(" ")
|
[
"felipeagq99@gmail.com"
] |
felipeagq99@gmail.com
|
b1d84ff6d8719c6d1cb346458bafaa88df886d86
|
0facb323be8a76bb4c168641309972fa77cbecf2
|
/Configurations/HWWSemiLepHighMass/nanoAODv5/v6_production/2017/NJET_biined_WJets/SKIM10/HMVar10_Full_ALL_var/MassPoints/structure_M1500.py
|
006d035cd83abd3e70ffc306361571ee477e383b
|
[] |
no_license
|
bhoh/SNuAnalytics
|
ef0a1ba9fa0d682834672a831739dfcfa1e7486b
|
34d1fc062e212da152faa83be50561600819df0e
|
refs/heads/master
| 2023-07-06T03:23:45.343449
| 2023-06-26T12:18:28
| 2023-06-26T12:18:28
| 242,880,298
| 0
| 1
| null | 2020-02-25T01:17:50
| 2020-02-25T01:17:49
| null |
UTF-8
|
Python
| false
| false
| 1,725
|
py
|
#['WW', 'ggHWWlnuqq_M1500', 'DY', 'DATA', 'WZ', 'ggHWWlnuqq_M125', 'ZZZ', 'ggHWWlnuqq_M900', 'vbfHWWlnuqq_M500', 'Wjets1j', 'QCD_MU', 'WZZ', 'vbfHWWlnuqq_M900', 'QCD_bcToE', 'Wjets2j', 'QCD_EM', 'ggHWWlnuqq_M500', 'ZZ', 'WWW', 'vbfHWWlnuqq_M1500', 'vbfHWWlnuqq_M125', 'WWZ', 'Wjets0j', 'top']
QCD_MU=['QCD_Pt-15to20_MuEnrichedPt5',
'QCD_Pt-20to30_MuEnrichedPt5',
'QCD_Pt-30to50_MuEnrichedPt5',
'QCD_Pt-50to80_MuEnrichedPt5',
'QCD_Pt-80to120_MuEnrichedPt5',
'QCD_Pt-120to170_MuEnrichedPt5',
'QCD_Pt-170to300_MuEnrichedPt5',
'QCD_Pt-300to470_MuEnrichedPt5',
'QCD_Pt-470to600_MuEnrichedPt5',
'QCD_Pt-600to800_MuEnrichedPt5',
'QCD_Pt-800to1000_MuEnrichedPt5',
'QCD_Pt-1000toInf_MuEnrichedPt5',
]
QCD_EM=[
'QCD_Pt-20to30_EMEnriched',
'QCD_Pt-30to50_EMEnriched',
'QCD_Pt-50to80_EMEnriched',
'QCD_Pt-80to120_EMEnriched',
'QCD_Pt-120to170_EMEnriched',
'QCD_Pt-170to300_EMEnriched',
'QCD_Pt-300toInf_EMEnriched'
]
QCD_bcToE=[
'QCD_Pt_20to30_bcToE',
'QCD_Pt_30to80_bcToE',
'QCD_Pt_80to170_bcToE',
'QCD_Pt_170to250_bcToE',
'QCD_Pt_250toInf_bcToE',
]
for name in [ 'DY', 'WZZ', 'WWZ','WWW','ZZZ', 'ZZ', 'WZ', 'WW', 'WpWmJJ_EWK_QCD_noHiggs', 'top', 'Wjets0j', 'Wjets1j', 'Wjets2j','vbfHWWlnuqq_M125','ggHWWlnuqq_M125'] + ['QCD_MU','QCD_EM','QCD_bcToE']:
structure[name] = {
'isSignal' : 0,
'isData' : 0
}
#ggHWWlnuqq_M1500_S_B_I
structure['ggHWWlnuqq_M1500'] = {
'isSignal' : 1,
'isData' : 0
}
structure['vbfHWWlnuqq_M1500'] = {
'isSignal' : 1,
'isData' : 0
}
structure['PseudoData'] = {
'isSignal' : 0,
'isData' : 1
}
|
[
"soarnsoar@gmail.com"
] |
soarnsoar@gmail.com
|
a7a4d6e5592f92cb3623341644968b8963217700
|
a80916c83c67cacb33d7d1a66ad3c6acb9b3cc32
|
/SVM-examples/breast-SVM.py
|
514cd4bcc742df07dcdd7b82e5822109d7ba86d6
|
[] |
no_license
|
brynelee/gk_dataanalysis_practices
|
3f82255abd253fcdca0fedcc2fffba124ac55b70
|
723dac7592a55fe6b422c74c03684a11d869e72d
|
refs/heads/master
| 2020-07-22T21:36:22.935095
| 2020-04-14T16:21:56
| 2020-04-14T16:21:56
| 207,335,160
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,090
|
py
|
# -*- coding: utf-8 -*-
# ไนณ่
บ็่ฏๆญๅ็ฑป
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn.model_selection import train_test_split
from sklearn import svm
from sklearn import metrics
from sklearn.preprocessing import StandardScaler
# ๅ ่ฝฝๆฐๆฎ้๏ผไฝ ้่ฆๆๆฐๆฎๆพๅฐ็ฎๅฝไธญ
data = pd.read_csv("./SVM-examples/data.csv")
# ๆฐๆฎๆข็ดข
# ๅ ไธบๆฐๆฎ้ไธญๅๆฏ่พๅค๏ผๆไปฌ้่ฆๆdataframeไธญ็ๅๅ
จ้จๆพ็คบๅบๆฅ
pd.set_option('display.max_columns', None)
print(data.columns)
print(data.head(5))
print(data.describe())
# ๅฐ็นๅพๅญๆฎตๅๆ3็ป
features_mean= list(data.columns[2:12])
features_se= list(data.columns[12:22])
features_worst=list(data.columns[22:32])
# ๆฐๆฎๆธ
ๆด
# IDๅๆฒกๆ็จ๏ผๅ ้ค่ฏฅๅ
data.drop("id",axis=1,inplace=True)
# ๅฐB่ฏๆงๆฟๆขไธบ0๏ผMๆถๆงๆฟๆขไธบ1
data['diagnosis']=data['diagnosis'].map({'M':1,'B':0})
# ๅฐ่ฟ็ค่ฏๆญ็ปๆๅฏ่งๅ
sns.countplot(data['diagnosis'],label="Count")
plt.show()
# ็จ็ญๅๅพๅ็ฐfeatures_meanๅญๆฎตไน้ด็็ธๅ
ณๆง
corr = data[features_mean].corr()
plt.figure(figsize=(14,14))
# annot=Trueๆพ็คบๆฏไธชๆนๆ ผ็ๆฐๆฎ
sns.heatmap(corr, annot=True)
plt.show()
# ็นๅพ้ๆฉ
features_remain = ['radius_mean','texture_mean', 'smoothness_mean','compactness_mean','symmetry_mean', 'fractal_dimension_mean']
# ๆฝๅ30%็ๆฐๆฎไฝไธบๆต่ฏ้๏ผๅ
ถไฝไฝไธบ่ฎญ็ป้
train, test = train_test_split(data, test_size = 0.3)# in this our main data is splitted into train and test
# ๆฝๅ็นๅพ้ๆฉ็ๆฐๅผไฝไธบ่ฎญ็ปๅๆต่ฏๆฐๆฎ
train_X = train[features_remain]
train_y=train['diagnosis']
test_X= test[features_remain]
test_y =test['diagnosis']
# ้็จZ-Score่ง่ๅๆฐๆฎ๏ผไฟ่ฏๆฏไธช็นๅพ็ปดๅบฆ็ๆฐๆฎๅๅผไธบ0๏ผๆนๅทฎไธบ1
ss = StandardScaler()
train_X = ss.fit_transform(train_X)
test_X = ss.transform(test_X)
# ๅๅปบSVMๅ็ฑปๅจ
model = svm.SVC()
# ็จ่ฎญ็ป้ๅ่ฎญ็ป
model.fit(train_X,train_y)
# ็จๆต่ฏ้ๅ้ขๆต
prediction=model.predict(test_X)
print('ๅ็กฎ็: ', metrics.accuracy_score(prediction,test_y))
|
[
"bryne_lxd@sina.com"
] |
bryne_lxd@sina.com
|
f1d925c52d0d73f77dbb39812e5db0b68f59cf6d
|
84a65bb80441dea2e3b5d0e8b957e68762bd6c60
|
/snowmass/snowmassSubmit.py
|
ba35c48d0c4dd000c9bb12338e9f1509a6d21285
|
[] |
no_license
|
jstupak/UserCode
|
cc559c04ebe074e6c3add08e1da95da08e73ff20
|
4a720642914b5f779de2bb02eaa41ef2927161a0
|
refs/heads/master
| 2021-01-19T18:10:56.212998
| 2013-06-29T20:14:34
| 2013-06-29T20:14:34
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,284
|
py
|
#!/usr/bin/python
import os, sys
from datetime import datetime
from snowmassSamples import allSamples as theSamples
relBase = os.environ['CMSSW_BASE']
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
#Job config
doBackground=True
doSignal=True
analysisOutputDir='/uscms_data/d1/jstupak/2hdm'
condorJobTempl=relBase+"/src/JohnStupak/snowmass/twoHiggsDoublet.templ.job"
condorScriptTempl=relBase+"/src/JohnStupak/snowmass/twoHiggsDoublet.templ.csh"
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
#Analysis config
################################################################################################################################################
cTime=datetime.now()
date=str(cTime.year)+'_'+str(cTime.month)+'_'+str(cTime.day)
condorDir=analysisOutputDir+'/'+date
#Make sure twoHiggsDoublet.cpp is pre-compiled
#os.system('root -l -b -q compile.C')
if len(sys.argv)==2:
submissionID=sys.argv[1]
condorDir+='/'+submissionID
rc=os.system('mkdir -p '+condorDir)
if rc!=0: raise Exception('condorDir already exists - '+condorDir)
################################################################################################################################################
################################################################################################################################################
################################################################################################################################################
def submitJobs():
print '#################################################'
print 'Condor Job Submission'
print
print 'Condor Work Area:',condorDir
print 'Condor Job File Template:',condorJobTempl
print 'Condor Script Template:',condorScriptTempl
print
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
for sample in theSamples:
if (sample.isBackground and doBackground) or (sample.isSignal and doSignal):
print '-------------------------------------------------'
jobID=sample.name
jobDir=condorDir+'/'+jobID
os.system('mkdir '+jobDir)
print 'Sample Name:',sample.name
print 'Number Of Input Files:',len(sample.inputList)
jobNo=1
firstFile=0
lastFile=firstFile+sample.filesPerJob-1
while firstFile<len(sample.inputList):
print '- - - - - - - - - - - - - - - - - - - - - - - - -'
if lastFile>=len(sample.inputList): lastFile=len(sample.inputList)-1
files=sample.inputList[firstFile:lastFile+1]
fileNamesBase=jobDir+'/'+sample.name+'_'+str(jobNo)
fileList=open(fileNamesBase+'.txt','w')
for file in files:
fileList.write(file+'\n')
fileList.close()
condorJobFile=fileNamesBase+'.job'
condorScriptFile=fileNamesBase+'.csh'
multiSed(condorJobTempl,condorJobFile,[['DIRECTORY',jobDir],
['PREFIX',jobID],
['JOBID',jobNo]])
multiSed(condorScriptTempl,condorScriptFile,[['CMSSWBASE',relBase],
['DIRECTORY',jobDir],
['PREFIX',jobID],
['JOBID',jobNo],
['INPUTS',fileNamesBase+'.txt'],
['OUTPUT',sample.name+'_'+str(jobNo)+'.root']])
os.system('chmod u+x '+condorScriptFile)
submitCommand='condor_submit '+condorJobFile
print submitCommand
os.system('cd '+jobDir+'; '+submitCommand+'; cd -')
jobNo+=1
firstFile=lastFile+1
lastFile=firstFile+sample.filesPerJob-1
os.system('tar -czvf '+condorDir+'/backup.tar.gz --exclude="*.log" --exclude="*.root" --exclude="*.pdf" --exclude="*.eps" --exclude=".backup" '+relBase+'/src/JohnStupak/snowmass/*')
################################################################################################################################################
def multiSed(oldFileName,newFileName,replacements):
os.system('cp '+oldFileName+' '+newFileName)
for replacement in replacements:
if len(replacement)>2: raise Exception("Invalid argument to multiSed")
old=replacement[0]
new=str(replacement[1])
command='sed "s#'+old+'#'+new+'#" '+newFileName+' --in-place'
#print command
os.system(command)
################################################################################################################################################
#NOW FINALLY DO THE SUBMISSION
submitJobs()
|
[
""
] | |
ce724e74bd828e57ea1364efbcb346377f4226af
|
da76317b35fa2848589ffd5b38fe0182c9de6f73
|
/annotations/views.py
|
f3f7465ced5ba5e077a957dcc21598127eef84f5
|
[] |
no_license
|
hemagso/pokefind
|
d5a03d1767c70466c32d6c6577ebc4ea169681cf
|
59bb8bb7e47771416a808cb1f828b85e64a4e46e
|
refs/heads/master
| 2020-04-19T12:44:13.153254
| 2019-01-30T20:40:43
| 2019-01-30T20:40:43
| 168,198,847
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,559
|
py
|
# Django boilerplate
from django.http import HttpResponse
from django.shortcuts import render
from django.db.models import Max
# Data models
from .models import Pokemon, Annotation, AreaAnnotation, Image, FAQItem, FAQGroup
# Standard library
from datetime import datetime
import json
import random
def index(request):
"""" index view
Renders the main webpage of the application
:param request: The HTTP request sent by the client
:return Rendered template of the application
"""
faq_groups = FAQGroup.objects.all().order_by("priority")
faq_questions = FAQItem.objects.all().order_by("priority")
faq_items = {}
for group in faq_groups:
faq_items[group.name] = []
for question in faq_questions:
faq_items[question.group.name].append((question.question, question.answer))
context = {
"faq_items": faq_items
}
return render(request, 'annotations/index.html', context)
def get_pokemon_list(request):
pokemon_list = {pokemon.id: pokemon.name for pokemon in Pokemon.objects.all()}
return HttpResponse(json.dumps(pokemon_list))
def make(request):
"""" make view
Submit a new annotation to the database.
:param request: The HTTP request sent by the client
:return String containing "OK"
todo: Add error handling and feedback to the client
todo: Use built-in timezone support
"""
areas = json.loads(request.POST["annotations"])
frame_id = request.POST["frame_id"]
img = Image.objects.get(pk=frame_id)
annotation = Annotation()
annotation.image = img
annotation.timestamp = datetime.now()
annotation.save()
for area in areas:
new_area = AreaAnnotation()
new_area.annotation = annotation
new_area.width = area["bbox"]["width"]
new_area.height = area["bbox"]["height"]
new_area.x = area["bbox"]["x"]
new_area.y = area["bbox"]["y"]
new_area.comment = area["comment"]
if area["id"]:
new_area.pokemon = Pokemon.objects.get(id=area["id"])
new_area.save()
return HttpResponse("OK")
def frame_image(request, id):
"""" frame_image view
Serve one frame of an specific Pokemon Episode
:param request: The HTTP Request sent by the client
:param id: ID of the frame requested
:return HttpResponse object containing the image
todo: Add error handling and feedback to the client
"""
img = Image.objects.get(id=id)
img_path = "annotations/data/frames/season_{season:02d}/episode_{episode:03d}/frame_{frame:09d}.jpg".format(
season=img.season,
episode=img.episode,
frame=img.frame
)
with open(img_path, "rb") as f:
img_data = f.read()
return HttpResponse(img_data, content_type="image/jpg")
all_frames_id = [image.id for image in Image.objects.all()]
def get_frame(request):
"""" get_frame view
Select one random frame ID to be sent over to the client
:param request: The HTTP Request sent by the client
:return HttpResponse object containing the JSON representation of a frame and its metadata
todo: Do a better random frame selection
todo: Integrate with login so as not to serve repeated images to the same user (Low-priority)
"""
frame_id = random.choice(all_frames_id)
frame = Image.objects.get(id=frame_id)
ret_data = {
"id": str(frame.pk),
"season": frame.season,
"episode": frame.episode,
"frame": frame.frame
}
return HttpResponse(json.dumps(ret_data))
|
[
"hemagso@gmail.com"
] |
hemagso@gmail.com
|
6f89a5f67208f94eeef6c4be009c08ce1b707e3c
|
7262436a2fbb5645318b8cca080572a6deb507ca
|
/lexical/tokenize.py
|
a9911d9fb8fb3e55b0b906aa0035725539fa731d
|
[] |
no_license
|
lysine1217/lyspy
|
2047ff117f7c778cd80d876a8d2f9e5e051d25e0
|
c5356ea7c64b0b57773831c966aeb7c44ae895a6
|
refs/heads/master
| 2021-01-17T17:07:50.774709
| 2014-06-19T15:22:35
| 2014-06-19T15:22:35
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 399
|
py
|
# -*- coding: utf-8 -*-
"""
simple sentence tokenizer
"""
from .string_tokenize import *
def tokenize(string_or_listofstring, tolower=False, remove_punctions=False):
if(isinstance(string_or_listofstring, str)):
return string_tokenize(string_or_listofstring, tolower, remove_punctions)
else:
return [string_tokenize(sentence) for sentence in string_or_listofstring]
|
[
"lixinjian@roo.nii.ac.jp"
] |
lixinjian@roo.nii.ac.jp
|
c3c0fe953162e894aab721bf5e7817ed8b7c94ce
|
16acbf7c73b172cf993a66ff8aa84376d12d1f19
|
/migrations/versions/18552074ce1a_.py
|
d5cc3ea58f68331ac846a8a26c9167aafc1545dd
|
[] |
no_license
|
apexkid/realive
|
e5c18c57f7aafd7f485db14a7055cac17b41db00
|
eb31655478308195716e744b2531dcbc0e1510a6
|
refs/heads/master
| 2020-04-09T02:15:42.686539
| 2015-04-25T12:10:21
| 2015-04-25T12:10:21
| 34,567,090
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,248
|
py
|
"""empty message
Revision ID: 18552074ce1a
Revises: None
Create Date: 2015-04-25 17:38:17.506401
"""
# revision identifiers, used by Alembic.
revision = '18552074ce1a'
down_revision = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('campaign',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('isactive', sa.Boolean(), nullable=False),
sa.Column('isdeleted', sa.Boolean(), nullable=False),
sa.Column('added_on', sa.DateTime(), nullable=False),
sa.Column('modified_on', sa.DateTime(), nullable=False),
sa.Column('city', sa.String(length=30), nullable=False),
sa.Column('officeLocation', sa.String(length=100), nullable=False),
sa.Column('localityPref', sa.String(length=100), nullable=False),
sa.Column('poi', sa.String(length=100), nullable=False),
sa.Column('livingCost', sa.Integer(), nullable=False),
sa.Column('priorities', sa.String(length=100), nullable=False),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('campaign')
### end Alembic commands ###
|
[
"aadi@0c4de9c6553e.ant.amazon.com"
] |
aadi@0c4de9c6553e.ant.amazon.com
|
b4bf75c5fddbcf8887a7ebd127e00e1c3a3fcbfd
|
df190642c10d93a1ded605748245d2cff9daf337
|
/MxNet/5-predict.py
|
ab191064e90eb33b462c6a2c24b04894a63c34e1
|
[] |
no_license
|
philloidin/AdvancedPythonForBio
|
fcb7a52bfc4f1fad85b17d89ee52790db81d6714
|
5d50664c78bc02228c59e8ccfe7a395d64f14a4e
|
refs/heads/master
| 2021-09-22T17:45:59.915389
| 2018-09-12T21:40:11
| 2018-09-12T21:40:11
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 918
|
py
|
plt.imshow(val_img[0], cmap='Greys_r')
plt.axis('off')
plt.show()
prob = model.predict(val_img[0:1].astype(np.float32)/255)[0]
assert max(prob) > 0.99, "Low prediction accuracy."
print 'Classified as %d with probability %f' % (prob.argmax(), max(prob))
valid_acc = model.score(val_iter)
print 'Validation accuracy: %f%%' % (valid_acc *100,)
assert valid_acc > 0.95, "Low validation accuracy."
from IPython.display import HTML
import cv2
import numpy as np
def classify(img):
img = img[len('data:image/png;base64,'):].decode('base64')
img = cv2.imdecode(np.fromstring(img, np.uint8), -1)
img = cv2.resize(img[:,:,3], (28,28))
img = img.astype(np.float32).reshape((1,1,28,28))/255.0
return model.predict(img)[0].argmax()
'''
To see the model in action, run the demo notebook at
https://github.com/dmlc/mxnet-notebooks/blob/master/python/tutorials/mnist.ipynb.
'''
HTML(filename="mnist_demo.html")
|
[
"lynnlangit@live.com"
] |
lynnlangit@live.com
|
9ea362e7e23645d74ada3c82eae8bd8ed6962067
|
6f6b7e1a9837fb581cc5fed92b66b4ad12ea30f5
|
/19-05-161_STOCK_profit_AIC_BIC_L500_github/4plot_profit_nh6.py
|
4d358b74123f153c55f5113d35716f99896e3ac4
|
[
"MIT"
] |
permissive
|
danhtaihoang/stock
|
f7b4f4989ff0c2b267766761d402adc599fc893d
|
9c3a3e467839dda095a0152055e347254abaf271
|
refs/heads/master
| 2020-06-10T01:35:59.136032
| 2019-06-24T17:13:35
| 2019-06-24T17:13:35
| 193,546,744
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,052
|
py
|
import numpy as np
import matplotlib.pyplot as plt
#=========================================================================================
# average:
p1 = np.loadtxt('profit_cost_nhmax6.dat')
p2 = np.loadtxt('profit_AIC_nhmax6.dat')
p3 = np.loadtxt('profit_BIC_nhmax6.dat')
tmax = np.shape(p1)[0]
t = np.arange(0,tmax,1)
plt.figure(figsize=(20,16))
plt.subplot(2,2,1)
#plt.figure(figsize=(5,4))
plt.title('trade everyday')
plt.plot(t, p1[:,0],'k-',label='cost')
plt.plot(t, p2[:,0],'b-',label='AIC')
plt.plot(t, p3[:,0],'r-',label='BIC')
plt.legend()
plt.xlabel('time')
plt.ylabel('cumulative profit')
plt.ylim([-1,4])
plt.grid(linestyle='dotted')
plt.subplot(2,2,2)
plt.title('not trade everyday')
plt.plot(t, p1[:,1],'k-',label='cost')
plt.plot(t, p2[:,1],'b-',label='AIC')
plt.plot(t, p3[:,1],'r-',label='BIC')
plt.legend()
plt.xlabel('time')
plt.ylabel('cumulative profit')
plt.ylim([-1,4])
plt.grid(linestyle='dotted')
#plt.tight_layout(h_pad=0.8, w_pad=1.2)
plt.savefig('profit_cost_AIC_BIC_nhmax6.pdf', format='pdf', dpi=300)
|
[
"hoangdanhtai@gmail.com"
] |
hoangdanhtai@gmail.com
|
c3ca98e5d8c7a2b7a60bee0667267c57f753f0a6
|
6bf97e57103b9ddd639a91a0c371f86b3bce60d8
|
/pure_ee/earth_engine_start.py
|
48154028788ff39dfead3649a9176c9cbc69a261
|
[
"Apache-2.0"
] |
permissive
|
geosconsulting/gee_wapor
|
655372722eed9e1c00ef7880870bd85c7953d64a
|
c3c451fcb21664172a74647fe5d9e56f312aa1df
|
refs/heads/master
| 2021-01-13T05:01:37.233979
| 2017-03-01T14:09:08
| 2017-03-01T14:09:08
| 81,427,303
| 2
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 327
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Tue Jan 3 04:59:03 2017
@author: fabio
"""
# Import the Earth Engine Python Package
import ee
# Initialize the Earth Engine object, using the authentication credentials.
ee.Initialize()
# Print the information for an image asset.
image = ee.Image('srtm90_v4')
print(image.getInfo())
|
[
"geos-consulting@fastwebnet.it"
] |
geos-consulting@fastwebnet.it
|
650e55b0150684558af4365f5f79147c34428123
|
3a3c3dd0265d9627857a8a1618a253ac74689bfc
|
/Bebras.py
|
c3903827c934863dd6e3fcd70fb38d5d4cc34889
|
[] |
no_license
|
YoEugene/BeBras
|
5c8ca4fa7b2e9629d38393359d1447ea774eebd8
|
49a033ab1ce8e514470e13c5b59f7fc2a3c96017
|
refs/heads/master
| 2020-04-17T12:54:53.845048
| 2016-09-09T09:20:12
| 2016-09-09T09:20:12
| 67,782,356
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 730
|
py
|
#-*- coding: UTF-8 -*-
import requests
import urllib
from bs4 import BeautifulSoup
saved = []
while len(saved) != 45:
r = requests.get('http://bebras.csie.ntnu.edu.tw/main/?page=try')
soup = BeautifulSoup(r.text.encode("utf-8"), 'html.parser')
prob_id = soup.find(id="subform").attrs['action'].replace("?page=try_ans&id=","")
imgs = soup.findAll("img")
if prob_id not in saved:
for img in imgs:
src = img.get('src').encode('ascii')
url = 'http://bebras.csie.ntnu.edu.tw/main/' + src
urllib.urlretrieve(url, src)
saved.append(prob_id)
with open(prob_id+'.html', 'w') as file:
file.write(r.text.encode("utf-8"))
print(prob_id)
|
[
"dreammacer.yo@gmail.com"
] |
dreammacer.yo@gmail.com
|
dc88abb3406c5bcfebb348f98a3bedeae0c3782b
|
f183e0624a02f02d3d4e225863a0357a5bf25a16
|
/Assignment_3/lambda/Assignment_3/index-photos.py
|
b72073b98d4d7c374fbeafaefd20f67007a8c677
|
[] |
no_license
|
Wangwei0223/AWS-PhotoAlbum
|
37611a892e8e4d6cd3c89258b79e759f6e98c26c
|
57bf82c022fe7e4da5e987fb9fdf68b88e96b5f2
|
refs/heads/master
| 2020-03-31T20:05:18.974679
| 2018-12-06T20:49:57
| 2018-12-06T20:49:57
| 152,524,758
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,805
|
py
|
import boto3
import re
import requests
from requests_aws4auth import AWS4Auth
import time
region = 'us-east-1' # e.g. us-west-1
service = 'es'
credentials = boto3.Session().get_credentials()
awsauth = AWS4Auth(credentials.access_key, credentials.secret_key, region, service, session_token=credentials.token)
host = 'https://vpc-photo-hvy4wfq763nmzdhjwbku6nmtyu.us-east-1.es.amazonaws.com' # the Amazon ES domain, including https://
index = 'photo'
type = 'image'
url = host + '/' + index + '/' + type
headers = { "Content-Type": "application/json" }
s3 = boto3.client('s3')
# Regular expressions used to parse some simple log lines
ip_pattern = re.compile('(\d+\.\d+\.\d+\.\d+)')
time_pattern = re.compile('\[(\d+\/\w\w\w\/\d\d\d\d:\d\d:\d\d:\d\d\s-\d\d\d\d)\]')
message_pattern = re.compile('\"(.+)\"')
# Lambda execution starts here
def lambda_handler(event, context):
print event
client=boto3.client('rekognition')
for record in event['Records']:
# Get the bucket name and key for the new file
bucket = record['s3']['bucket']['name']
key = record['s3']['object']['key']
timestampe = record['eventTime']
response = client.detect_labels(Image={'S3Object':{'Bucket':bucket,'Name':key}}, MaxLabels = 10, MinConfidence = 80)
labels = []
for label in response['Labels']:
labels.append(label['Name'])
print (label['Name'] + ' : ' + str(label['Confidence']))
#localtime = time.asctime( time.localtime(time.time()) )
document = {
"objectKey": key,
"bucket": bucket,
"createdTimestamp": timestampe,
"labels": labels
}
r = requests.post(url, auth=awsauth, json=document, headers=headers)
|
[
"291978313@qq.com"
] |
291978313@qq.com
|
ab0cab89b2414d9573a94e530d85774ff67f6f5b
|
7d8e74a400927fb3c9b9a37adb5802107d6184d9
|
/airports/utils.py
|
ca7a3a757a903d1421454cebcff37aa9dc89af0c
|
[] |
no_license
|
garytouchsoft/airports
|
4c17dfd0cd5fb695e13feac33585bb2fb0d26828
|
46559a31f9e92de1e7303b830f653bbe6a6445fd
|
refs/heads/master
| 2023-07-01T02:36:02.119495
| 2021-07-29T17:26:45
| 2021-07-29T17:26:45
| 390,801,609
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 616
|
py
|
from math import cos, asin, sqrt, pi
def find_airport(airports, latitude, longitude):
found = None
nearest = 100000
for a in airports:
lat1 = float(latitude)
lng1 = float(longitude)
d = distance(lat1, lng1, float(a.latitude), float(a.longitude))
if d < nearest:
nearest = d
found = a
return (found, nearest)
def distance(
lat1,
lon1,
lat2,
lon2,
):
p = pi / 180
a = 0.5 - cos((lat2 - lat1) * p) / 2 + cos(lat1 * p) * cos(lat2
* p) * (1 - cos((lon2 - lon1) * p)) / 2
return 12742 * asin(sqrt(a))
|
[
"extgarykennedy@gmail.com"
] |
extgarykennedy@gmail.com
|
ee42c62ee368606b853eb33596799a966b0bffed
|
eedbd94d616246d8dbf5d3244101a3eb4f82a222
|
/bicyclepartsproject/pipelines.py
|
034b651caab27581430249bae8e982425be745fe
|
[] |
no_license
|
dkasarov/bicycle_parts_spider
|
0bc344c1e7e195d8139817092f1918fb900e1853
|
0288a2c3238026514c7fe564d3de9d2da73dcde0
|
refs/heads/master
| 2020-05-02T09:23:29.498541
| 2019-03-27T19:38:19
| 2019-03-27T19:38:19
| 177,870,115
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 300
|
py
|
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html
class BicyclepartsprojectPipeline(object):
def process_item(self, item, spider):
return item
|
[
"31469124+dkasarov@users.noreply.github.com"
] |
31469124+dkasarov@users.noreply.github.com
|
4248e63135d6e7f10c126f4a9b1d2bf034255994
|
829e69a4184e3be9e18ce4fbbfdb4939b8d028bb
|
/archdaily.py
|
3e9a3d96fc17a68e6624698504e541cd8b6c2bd8
|
[
"MIT"
] |
permissive
|
busster/Archdaily_bg_info
|
7c2eaf9a4ed99c33481fcdb0ed09e73a625a861e
|
c106d8dac62bd4b76739e6b29eac5fd5a35dad77
|
refs/heads/master
| 2021-01-20T20:14:26.878855
| 2016-08-09T22:56:56
| 2016-08-09T22:56:56
| 65,316,696
| 0
| 0
| null | 2016-08-09T22:56:57
| 2016-08-09T17:57:49
|
Python
|
UTF-8
|
Python
| false
| false
| 3,157
|
py
|
import requests, os, webbrowser, bs4
import urllib.request
import ctypes
import datetime
import time
from apscheduler.scheduler import Scheduler
import re
def downloadimage():
# Download archdaily's general projects page
res = requests.get('http://www.archdaily.com/search/projects')
res.raise_for_status
# Parse the page to find the first project
site = bs4.BeautifulSoup(res.text, "html.parser")
#project_link = site.findAll('ul',{'class':'afd-search-list'})
project_link = site.findAll('li',{'class':'afd-search-list__item'})
project_link = project_link[1]
one = project_link.find('a',href=True)
two = one['href']
res2 = requests.get('http://www.archdaily.com' + two)
res2.raise_for_status
site_project = bs4.BeautifulSoup(res2.text, "html.parser")
image_link = site_project.find('div',{'class':'image-bookmark'})
image_car = image_link.find('a',href=True)
image_car = image_car['href']
specs = site_project.find('ul',{'class':'char-list char-list-box '})
os.chdir(os.path.join(os.getenv('userprofile'),'Desktop'))
location = 'archdaily_project'
dir = os.path.dirname(location)
if not os.path.exists(location):
os.makedirs(location)
os.chdir(os.path.join(os.getenv('userprofile'),'Desktop','archdaily_project'))
location = open('project_info.txt','w')
location.write(specs.text)
location.close()
res3 = requests.get(image_car)
res3.raise_for_status
site_car = bs4.BeautifulSoup(res3.text, "html.parser")
theimage = site_car.find('div',{'class':'table-display'})
theimage = str(theimage)
try:
image = re.search('"url_large":"(.+?)"', str(theimage)).group(1)
except AttributeError:
image = 'Sorry dunno what happened'
data = urllib.request.urlretrieve((image), os.path.join(os.getenv('userprofile'),'Desktop','archdaily_project','project.jpg'))
#print (specs.text)
# # first project's page extension
# project_link_ref = project_link[0].get('href')
# # Download the projects page
# res2 = requests.get(project_link_ref)
# res2.raise_for_status
# # Parse the page and find the image
# devart_image = bs4.BeautifulSoup(res2.text)
# image_link = devart_image.select('div.dev-view-main-content img')
# image = image_link[0].get('src')
# # Download image
# data = urllib.request.urlretrieve((image), 'C:/Users/jason/Desktop/background/001.jpg')
downloadimage()
# def setbackground():
# # Set image as background
# SPI_SETDESKWALLPAPER = 0x14
# SPIF_UPDATEINFILE = 0x2
# src = 'C:/Users/jason/Desktop/background/001.jpg'
# print(ctypes.windll.user32.SystemParametersInfoW(SPI_SETDESKWALLPAPER, 0, src, SPIF_UPDATEINFILE))
# def interval():
# downloadimage()
# setbackground()
# print(datetime.datetime.now())
# time.sleep(20)
# return
# sched = Scheduler()
# sched.daemonic = False
# sched.start()
# sched.add_cron_job(interval, minute='0-59')
|
[
"jasonmbuss@gmail.com"
] |
jasonmbuss@gmail.com
|
6d32a6f67ce35aef72c121b3210ad72f27a32c9e
|
98d34b4c9dec318f783c37bd3612a0f9a5a9b16d
|
/credit/migrations/0009_auto_20170929_1320.py
|
6d099d0b2302c5c48c2ce44f0d99c2ef2f2ece34
|
[] |
no_license
|
Toweringweed/db
|
5718792f3647c99232c3c48426b49b392a32e9c2
|
18109ce6ba15233b9ca27c1c2ea7e4ba21b92b21
|
refs/heads/master
| 2021-09-08T10:42:43.135989
| 2017-12-14T06:34:48
| 2017-12-14T06:34:48
| 107,344,117
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,332
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-09-29 05:20
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('credit', '0008_auto_20170928_1628'),
]
operations = [
migrations.CreateModel(
name='luresult',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('luru', models.BooleanField(default=False, verbose_name='ๅฝๅ
ฅๅฎๆ')),
('update_time', models.DateTimeField(auto_now=True, verbose_name='ๆดๆฐๆถ้ด')),
],
options={
'verbose_name': 'ๅฝๅ
ฅ็ปๆ',
'verbose_name_plural': 'ๅฝๅ
ฅ็ปๆ',
},
),
migrations.AlterModelOptions(
name='basic',
options={'verbose_name': 'ๅฎขๆทไฟกๆฏ', 'verbose_name_plural': 'ๅฎขๆทไฟกๆฏ'},
),
migrations.AlterModelOptions(
name='card',
options={'verbose_name': 'ไฟก็จๅกไฟกๆฏ', 'verbose_name_plural': 'ไฟก็จๅกไฟกๆฏ'},
),
migrations.AlterModelOptions(
name='chaxun',
options={'verbose_name': 'ๅพไฟกๆฅ่ฏข', 'verbose_name_plural': 'ๅพไฟกๆฅ่ฏข'},
),
migrations.AlterModelOptions(
name='loan',
options={'verbose_name': '่ดทๆฌพไฟกๆฏ', 'verbose_name_plural': '่ดทๆฌพไฟกๆฏ'},
),
migrations.AlterModelOptions(
name='summary',
options={'verbose_name': 'ไฟกๆฏๆฆ่ฆ', 'verbose_name_plural': 'ไฟกๆฏๆฆ่ฆ'},
),
migrations.RemoveField(
model_name='basic',
name='luru',
),
migrations.RemoveField(
model_name='chaxun',
name='c_date',
),
migrations.AlterField(
model_name='basic',
name='IDcard',
field=models.CharField(default='', max_length=19, verbose_name='่บซไปฝ่ฏๅท'),
),
migrations.AlterField(
model_name='basic',
name='adress',
field=models.CharField(max_length=20, verbose_name='ๅพไฟกๅฝฑๅฐไปถ'),
),
migrations.AlterField(
model_name='basic',
name='name',
field=models.CharField(default='', max_length=10, verbose_name='ๅฎขๆทๅงๅ'),
),
migrations.AlterField(
model_name='basic',
name='order_id',
field=models.CharField(max_length=30, primary_key=True, serialize=False, verbose_name='่ฎขๅๆตๆฐดๅท'),
),
migrations.AlterField(
model_name='loan',
name='account_category',
field=models.CharField(default='', max_length=10, verbose_name='่ดทๆฌพ็จ้'),
),
migrations.AlterField(
model_name='summary',
name='card_90overdue',
field=models.IntegerField(null=True, verbose_name='ไฟก็จๅกๅ็่ฟ90ๅคฉไปฅไธ้พๆ็่ดฆๆทๆฐ'),
),
migrations.AlterField(
model_name='summary',
name='card_notsettled',
field=models.IntegerField(null=True, verbose_name='ไฟก็จๅกๆช็ปๆธ
/ๆช้ๆท่ดฆๆทๆฐ'),
),
]
|
[
"xiaohou09@gmail.com"
] |
xiaohou09@gmail.com
|
b2516c9040789df5a0e98f754aab40508283b38c
|
c834c1b7ef5d0039a706f174ed3f7b0ab82fa2e5
|
/optOnMysql/data2mysql.py
|
5903606b3171c597649676ce4e1d13f00e79309e
|
[] |
no_license
|
yangze01/Laws-Search-Project
|
126ffc5ec1ad1c2e9d95c2490104e8e37e766ad4
|
d1fff57a9298aa0d883a1b988aa98804d0ab00c1
|
refs/heads/master
| 2021-08-14T15:26:27.455518
| 2017-11-16T03:59:58
| 2017-11-16T03:59:58
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,597
|
py
|
#coding=utf8
import sys
import time
reload(sys)
sys.setdefaultencoding('utf8')
from optOnMysql.DocumentsOnMysql import *
from optOnMysql.DocumentUnit import *
import json
BasePath = sys.path[0]
def is_valid_date(str):
'''ๅคๆญๆฏๅฆๆฏไธไธชๆๆ็ๆฅๆๅญ็ฌฆไธฒ'''
try:
time.strptime(str, "%Y-%m-%d")
return True
except:
return False#
def document_format(line, criminal):
line = json.loads(line.decode('utf8'))
document_unit = dict()
document_unit["title"] = line['title']
# print(len(document_unit['title']))
document_unit["court"] = line['court']
document_unit["url"] = line['url']
document_unit["content"] = '|'.join(line['content']).encode('utf8')
# print(len(document_unit["content"]))
document_unit["criminal"] = criminal
if(is_valid_date(line["date"])):
document_unit["date"] = line['date']
else:
document_unit["date"] = "0000-00-00"
return document_unit
def save_document2mysql(file_path, criminal):
opt = DocumentsOnMysql()
i = 0
for line in open(file_path):
print(i)
i = i + 1
document_unit = document_format(line, criminal)
opt.insertOneDocuments(document_unit)
opt.connClose()
print(u"finished")
if __name__ == "__main__":
opt = DocumentsOnMysql()
# opt.insertOneDocuments(document_unit)
# print(opt)
opt.findById("1")
a = opt.findall()
for i in a :
print(i)
opt.connClose()
# file_path = BasePath + "/../data/judgment_trafficking.txt"
# save_document2mysql(file_path,u"ๆๅๅฆๅฅณๅฟ็ซฅ็ฝช")
|
[
"858848101@qq.com"
] |
858848101@qq.com
|
8ead5ead4b013cc7d4c232e05fdbb87bc51f7ce4
|
8fb60261b33abf1da575faa0ee4eac8e18f6a517
|
/service1/app.py
|
f9757623edead27339cc1c17a848833096eda8d4
|
[] |
no_license
|
AndrewBarrett182/DevOps-Practical-Project
|
6a17fa36c279d18a5b19b09ae7fec8c21c1e61e8
|
7962674352d8d1c779aee3c429d9c09ffb2c8122
|
refs/heads/main
| 2023-06-06T04:47:00.185664
| 2021-06-13T19:51:21
| 2021-06-13T19:51:21
| 374,668,538
| 0
| 0
| null | 2021-06-14T10:11:49
| 2021-06-07T13:04:29
|
Python
|
UTF-8
|
Python
| false
| false
| 107
|
py
|
from application import app
if __name__=='__main__': app.run(debug=True, host = '0.0.0.0', port = 5000)
|
[
"ABarrett@qa.com"
] |
ABarrett@qa.com
|
f98b81c01d75af857e62bdb4215ea2e8bd610be9
|
f3d7cdf664cd4dd17acb600871bcfab8d38dc6ba
|
/01_MCNN_Result/00_AES_HD/MCNN(org,ma100,pca).py
|
450fac2c2f33b3ba0ab9e756ae2278193216d9c9
|
[] |
no_license
|
mitMathe/SCA-MCNN
|
1bf2a8c6ec792bb96a6c1beae4ce4e06703c5fd0
|
0dafbfc1f9d57ff264bc961d31b092995e488117
|
refs/heads/main
| 2023-03-16T06:03:05.872179
| 2021-03-03T06:51:06
| 2021-03-03T06:51:06
| 343,676,120
| 6
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 67,175
|
py
|
import os.path
import sys
import h5py
import numpy as np
from numpy import *
import random
from tqdm import tqdm
import matplotlib.pyplot as plt
import struct
from ctypes import *
import tensorflow as tf
import os
import time
import shutil
import sys
import binascii
import pickle
from keras.models import Model
from keras.layers import Concatenate, Flatten, Dense, Input, Conv1D, AveragePooling1D, BatchNormalization
from keras.optimizers import Adam
from keras.callbacks import ModelCheckpoint
from keras.utils import to_categorical
from sklearn import preprocessing
import warnings
from keras.callbacks import Callback
from keras import backend as K
import sklearn
from sklearn.decomposition import PCA, KernelPCA
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis as LDA
from sklearn.discriminant_analysis import QuadraticDiscriminantAnalysis as QDA
from operator import itemgetter
from keras.utils import plot_model
from IPython.display import SVG
from tensorflow.python.keras.layers import Lambda
###################################################################
########################## PARAMETER ############################
###################################################################
G_IV_PRINT = False
G_INFO_PRINT = False
G_RESULT_PRINT = True
G_RESULT_SAVE = True
# "aes_hd" "ascad100" "ascad50" "ascad0" "aes_rd" "aes_hd_mm"
G_OPEN_DATASET = "aes_hd"
# "original" "moving_average" "pca"
G_PREPROCESS = "original"
G_DATA_ROOT_PATH = "../../SCA_DATA/AES_HD"
G_TRAIN_DATA_FILE = G_DATA_ROOT_PATH + "/" + "AES_HD_profiling_50000tr_1250pt.npy"
G_TRAIN_PLAIN_FILE = G_DATA_ROOT_PATH + "/" + "AES_HD_profiling_50000tr_1250pt_cipher.npy"
G_VALID_DATA_FILE = G_DATA_ROOT_PATH + "/" + "AES_HD_validation_25000tr_1250pt.npy"
G_VALID_PLAIN_FILE = G_DATA_ROOT_PATH + "/" + "AES_HD_validation_25000tr_1250pt_cipher.npy"
G_GEN_RESULT_PATH = "."
G_TRAIN_NO = 45000
G_VALID_NO = 5000
G_ATTACK_NO = 5000
G_PLAIN_NO = 16
G_BIT_DEPTH = 8
G_OUT_SIZE = 256
G_PT_ST = 0
G_PT_ED = 1249
G_LEARN_RATE = 0.01
G_IN_SIZE = G_PT_ED - G_PT_ST + 1
G_LEARN_RATE_ST = G_LEARN_RATE
G_LEARN_RATE_ED = G_LEARN_RATE / 100000
# MASSIVE HYPERPARAMETER
G_EPOCH = 50
G_BATCH = 256
G_LAYER_CNN = 2
G_LAYER = 3
G_LAYER_NO = [20, 20, 20]
class C_SFT_HEADER(Structure):
_fields_ = [
("ucVariable", c_uint8),
("ucTypeofTrace", c_uint8),
("ucReserved_1", c_uint8),
("ucReserved_2", c_uint8),
("strID_1", c_int32),
("strID_2", c_int32),
("nFrequency", c_uint32),
("nTraceNum", c_uint32),
("nTraceLength", c_uint32),
("fOffset", c_float),
("fGain", c_float)
]
class C_MPL_HYPERPARAMETER(Structure):
_fields_ = [
("learn_rate", c_float),
("epoch_size", c_uint32),
("batch_size", c_uint32),
("layer_size", c_uint32),
("p_layer_net_size", POINTER(c_uint32)),
("layer_size_cnn", c_uint32),
("local_layer_size_cnn", c_uint32),
("train_no", c_uint32),
("train_size", c_uint32),
("valid_no", c_uint32),
("valid_size", c_uint32),
("attack_no", c_uint32),
("in_size", c_uint32),
("out_size", c_uint32)
]
def COPY_HYPER(DST_HYPER, DEP_HYPER):
DST_HYPER.learn_rate = DEP_HYPER.learn_rate
DST_HYPER.epoch_size = DEP_HYPER.epoch_size
DST_HYPER.batch_size = DEP_HYPER.batch_size
DST_HYPER.layer_size = DEP_HYPER.layer_size
layer_no = (c_uint32 * DEP_HYPER.layer_size)()
for i in range(DEP_HYPER.layer_size):
layer_no[i] = DEP_HYPER.p_layer_net_size[i]
DST_HYPER.p_lyaer_net_size = layer_no
DST_HYPER.layer_size_cnn = DEP_HYPER.layer_size_cnn
DST_HYPER.train_no = DEP_HYPER.train_no
DST_HYPER.train_size = DEP_HYPER.train_size
DST_HYPER.valid_no = DEP_HYPER.valid_no
DST_HYPER.valid_size = DEP_HYPER.valid_size
DST_HYPER.attack_no = DEP_HYPER.attack_no
DST_HYPER.in_size = DEP_HYPER.in_size
DST_HYPER.out_size = DEP_HYPER.out_size
def GET_TODAY():
now = time.localtime()
s = "%04d-%02d-%02d_%02d-%02d-%02d" % (
now.tm_year, now.tm_mon, now.tm_mday, now.tm_hour, now.tm_min, now.tm_sec)
return s
def MAKE_FOLDER(folder_name):
work_dir = G_GEN_RESULT_PATH + "/" + folder_name
if not os.path.isdir(folder_name):
os.mkdir(work_dir)
return work_dir
def DEBUG_PRINT(s, print_on_off):
if print_on_off:
print(s)
def SHUFFLE_SCA_DATA(profiling_x,label_y):
l = list(zip(profiling_x,label_y))
random.shuffle(l)
shuffled_x,shuffled_y = list(zip(*l))
shuffled_x = np.array(shuffled_x)
shuffled_y = np.array(shuffled_y)
return (shuffled_x, shuffled_y)
def INV_CAL(PLAIN, PLAIN_NO, GUESS_POS, GUESS_VALUE, INTERMEDIATE):
AES_SBOX = [0x63, 0x7c, 0x77, 0x7b, 0xf2, 0x6b, 0x6f, 0xc5, 0x30, 0x01, 0x67, 0x2b, 0xfe, 0xd7, 0xab, 0x76,
0xca, 0x82, 0xc9, 0x7d, 0xfa, 0x59, 0x47, 0xf0, 0xad, 0xd4, 0xa2, 0xaf, 0x9c, 0xa4, 0x72, 0xc0,
0xb7, 0xfd, 0x93, 0x26, 0x36, 0x3f, 0xf7, 0xcc, 0x34, 0xa5, 0xe5, 0xf1, 0x71, 0xd8, 0x31, 0x15,
0x04, 0xc7, 0x23, 0xc3, 0x18, 0x96, 0x05, 0x9a, 0x07, 0x12, 0x80, 0xe2, 0xeb, 0x27, 0xb2, 0x75,
0x09, 0x83, 0x2c, 0x1a, 0x1b, 0x6e, 0x5a, 0xa0, 0x52, 0x3b, 0xd6, 0xb3, 0x29, 0xe3, 0x2f, 0x84,
0x53, 0xd1, 0x00, 0xed, 0x20, 0xfc, 0xb1, 0x5b, 0x6a, 0xcb, 0xbe, 0x39, 0x4a, 0x4c, 0x58, 0xcf,
0xd0, 0xef, 0xaa, 0xfb, 0x43, 0x4d, 0x33, 0x85, 0x45, 0xf9, 0x02, 0x7f, 0x50, 0x3c, 0x9f, 0xa8,
0x51, 0xa3, 0x40, 0x8f, 0x92, 0x9d, 0x38, 0xf5, 0xbc, 0xb6, 0xda, 0x21, 0x10, 0xff, 0xf3, 0xd2,
0xcd, 0x0c, 0x13, 0xec, 0x5f, 0x97, 0x44, 0x17, 0xc4, 0xa7, 0x7e, 0x3d, 0x64, 0x5d, 0x19, 0x73,
0x60, 0x81, 0x4f, 0xdc, 0x22, 0x2a, 0x90, 0x88, 0x46, 0xee, 0xb8, 0x14, 0xde, 0x5e, 0x0b, 0xdb,
0xe0, 0x32, 0x3a, 0x0a, 0x49, 0x06, 0x24, 0x5c, 0xc2, 0xd3, 0xac, 0x62, 0x91, 0x95, 0xe4, 0x79,
0xe7, 0xc8, 0x37, 0x6d, 0x8d, 0xd5, 0x4e, 0xa9, 0x6c, 0x56, 0xf4, 0xea, 0x65, 0x7a, 0xae, 0x08,
0xba, 0x78, 0x25, 0x2e, 0x1c, 0xa6, 0xb4, 0xc6, 0xe8, 0xdd, 0x74, 0x1f, 0x4b, 0xbd, 0x8b, 0x8a,
0x70, 0x3e, 0xb5, 0x66, 0x48, 0x03, 0xf6, 0x0e, 0x61, 0x35, 0x57, 0xb9, 0x86, 0xc1, 0x1d, 0x9e,
0xe1, 0xf8, 0x98, 0x11, 0x69, 0xd9, 0x8e, 0x94, 0x9b, 0x1e, 0x87, 0xe9, 0xce, 0x55, 0x28, 0xdf,
0x8c, 0xa1, 0x89, 0x0d, 0xbf, 0xe6, 0x42, 0x68, 0x41, 0x99, 0x2d, 0x0f, 0xb0, 0x54, 0xbb, 0x16]
AES_SBOX_INV = np.array([0x52, 0x09, 0x6a, 0xd5, 0x30, 0x36, 0xa5, 0x38,
0xbf, 0x40, 0xa3, 0x9e, 0x81, 0xf3, 0xd7, 0xfb,
0x7c, 0xe3, 0x39, 0x82, 0x9b, 0x2f, 0xff, 0x87,
0x34, 0x8e, 0x43, 0x44, 0xc4, 0xde, 0xe9, 0xcb,
0x54, 0x7b, 0x94, 0x32, 0xa6, 0xc2, 0x23, 0x3d,
0xee, 0x4c, 0x95, 0x0b, 0x42, 0xfa, 0xc3, 0x4e,
0x08, 0x2e, 0xa1, 0x66, 0x28, 0xd9, 0x24, 0xb2,
0x76, 0x5b, 0xa2, 0x49, 0x6d, 0x8b, 0xd1, 0x25,
0x72, 0xf8, 0xf6, 0x64, 0x86, 0x68, 0x98, 0x16,
0xd4, 0xa4, 0x5c, 0xcc, 0x5d, 0x65, 0xb6, 0x92,
0x6c, 0x70, 0x48, 0x50, 0xfd, 0xed, 0xb9, 0xda,
0x5e, 0x15, 0x46, 0x57, 0xa7, 0x8d, 0x9d, 0x84,
0x90, 0xd8, 0xab, 0x00, 0x8c, 0xbc, 0xd3, 0x0a,
0xf7, 0xe4, 0x58, 0x05, 0xb8, 0xb3, 0x45, 0x06,
0xd0, 0x2c, 0x1e, 0x8f, 0xca, 0x3f, 0x0f, 0x02,
0xc1, 0xaf, 0xbd, 0x03, 0x01, 0x13, 0x8a, 0x6b,
0x3a, 0x91, 0x11, 0x41, 0x4f, 0x67, 0xdc, 0xea,
0x97, 0xf2, 0xcf, 0xce, 0xf0, 0xb4, 0xe6, 0x73,
0x96, 0xac, 0x74, 0x22, 0xe7, 0xad, 0x35, 0x85,
0xe2, 0xf9, 0x37, 0xe8, 0x1c, 0x75, 0xdf, 0x6e,
0x47, 0xf1, 0x1a, 0x71, 0x1d, 0x29, 0xc5, 0x89,
0x6f, 0xb7, 0x62, 0x0e, 0xaa, 0x18, 0xbe, 0x1b,
0xfc, 0x56, 0x3e, 0x4b, 0xc6, 0xd2, 0x79, 0x20,
0x9a, 0xdb, 0xc0, 0xfe, 0x78, 0xcd, 0x5a, 0xf4,
0x1f, 0xdd, 0xa8, 0x33, 0x88, 0x07, 0xc7, 0x31,
0xb1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xec, 0x5f,
0x60, 0x51, 0x7f, 0xa9, 0x19, 0xb5, 0x4a, 0x0d,
0x2d, 0xe5, 0x7a, 0x9f, 0x93, 0xc9, 0x9c, 0xef,
0xa0, 0xe0, 0x3b, 0x4d, 0xae, 0x2a, 0xf5, 0xb0,
0xc8, 0xeb, 0xbb, 0x3c, 0x83, 0x53, 0x99, 0x61,
0x17, 0x2b, 0x04, 0x7e, 0xba, 0x77, 0xd6, 0x26,
0xe1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0c, 0x7d
])
for i in range(PLAIN_NO):
if G_OPEN_DATASET == 'aes_hd':
INTERMEDIATE[i] = AES_SBOX_INV[int(PLAIN[i][11]) ^ GUESS_VALUE] ^ int(PLAIN[i][7])
else:
INTERMEDIATE[i] = AES_SBOX[PLAIN[i][GUESS_POS] ^ GUESS_VALUE]
def LOAD_TRACE(data_type, path, tr_no, pt_st, pt_ed):
if data_type == 'npy':
train_data = np.load(path)
return train_data[:tr_no, pt_st:pt_ed + 1]
def LOAD_PLAIN(data_type, path):
if data_type == 'npy':
plain = np.load(path)
return plain
# Code implemented by https://github.com/titu1994/keras-one-cycle
# Code is ported from https://github.com/fastai/fastai
class OneCycleLR(Callback):
def __init__(self,
max_lr,
end_percentage=0.1,
scale_percentage=None,
maximum_momentum=0.95,
minimum_momentum=0.85,
verbose=True):
""" This callback implements a cyclical learning rate policy (CLR).
This is a special case of Cyclic Learning Rates, where we have only 1 cycle.
After the completion of 1 cycle, the learning rate will decrease rapidly to
100th its initial lowest value.
# Arguments:
max_lr: Float. Initial learning rate. This also sets the
starting learning rate (which will be 10x smaller than
this), and will increase to this value during the first cycle.
end_percentage: Float. The percentage of all the epochs of training
that will be dedicated to sharply decreasing the learning
rate after the completion of 1 cycle. Must be between 0 and 1.
scale_percentage: Float or None. If float, must be between 0 and 1.
If None, it will compute the scale_percentage automatically
based on the `end_percentage`.
maximum_momentum: Optional. Sets the maximum momentum (initial)
value, which gradually drops to its lowest value in half-cycle,
then gradually increases again to stay constant at this max value.
Can only be used with SGD Optimizer.
minimum_momentum: Optional. Sets the minimum momentum at the end of
the half-cycle. Can only be used with SGD Optimizer.
verbose: Bool. Whether to print the current learning rate after every
epoch.
# Reference
- [A disciplined approach to neural network hyper-parameters: Part 1 -- learning rate, batch size, weight_decay, and weight decay](https://arxiv.org/abs/1803.09820)
- [Super-Convergence: Very Fast Training of Residual Networks Using Large Learning Rates](https://arxiv.org/abs/1708.07120)
"""
super(OneCycleLR, self).__init__()
if end_percentage < 0. or end_percentage > 1.:
raise ValueError("`end_percentage` must be between 0 and 1")
if scale_percentage is not None and (scale_percentage < 0. or scale_percentage > 1.):
raise ValueError("`scale_percentage` must be between 0 and 1")
self.initial_lr = max_lr
self.end_percentage = end_percentage
self.scale = float(scale_percentage) if scale_percentage is not None else float(end_percentage)
self.max_momentum = maximum_momentum
self.min_momentum = minimum_momentum
self.verbose = verbose
if self.max_momentum is not None and self.min_momentum is not None:
self._update_momentum = True
else:
self._update_momentum = False
self.clr_iterations = 0.
self.history = {}
self.epochs = None
self.batch_size = None
self.samples = None
self.steps = None
self.num_iterations = None
self.mid_cycle_id = None
def _reset(self):
"""
Reset the callback.
"""
self.clr_iterations = 0.
self.history = {}
def compute_lr(self):
"""
Compute the learning rate based on which phase of the cycle it is in.
- If in the first half of training, the learning rate gradually increases.
- If in the second half of training, the learning rate gradually decreases.
- If in the final `end_percentage` portion of training, the learning rate
is quickly reduced to near 100th of the original min learning rate.
# Returns:
the new learning rate
"""
if self.clr_iterations > 2 * self.mid_cycle_id:
current_percentage = (self.clr_iterations - 2 * self.mid_cycle_id)
current_percentage /= float((self.num_iterations - 2 * self.mid_cycle_id))
new_lr = self.initial_lr * (1. + (current_percentage *
(1. - 100.) / 100.)) * self.scale
elif self.clr_iterations > self.mid_cycle_id:
current_percentage = 1. - (
self.clr_iterations - self.mid_cycle_id) / self.mid_cycle_id
new_lr = self.initial_lr * (1. + current_percentage *
(self.scale * 100 - 1.)) * self.scale
else:
current_percentage = self.clr_iterations / self.mid_cycle_id
new_lr = self.initial_lr * (1. + current_percentage *
(self.scale * 100 - 1.)) * self.scale
if self.clr_iterations == self.num_iterations:
self.clr_iterations = 0
return new_lr
def compute_momentum(self):
"""
Compute the momentum based on which phase of the cycle it is in.
- If in the first half of training, the momentum gradually decreases.
- If in the second half of training, the momentum gradually increases.
- If in the final `end_percentage` portion of training, the momentum value
is kept constant at the maximum initial value.
# Returns:
the new momentum value
"""
if self.clr_iterations > 2 * self.mid_cycle_id:
new_momentum = self.max_momentum
elif self.clr_iterations > self.mid_cycle_id:
current_percentage = 1. - ((self.clr_iterations - self.mid_cycle_id) / float(
self.mid_cycle_id))
new_momentum = self.max_momentum - current_percentage * (
self.max_momentum - self.min_momentum)
else:
current_percentage = self.clr_iterations / float(self.mid_cycle_id)
new_momentum = self.max_momentum - current_percentage * (
self.max_momentum - self.min_momentum)
return new_momentum
def on_train_begin(self, logs={}):
logs = logs or {}
self.epochs = self.params['epochs']
self.batch_size = self.params['batch_size']
self.samples = self.params['samples']
self.steps = self.params['steps']
if self.steps is not None:
self.num_iterations = self.epochs * self.steps
else:
if (self.samples % self.batch_size) == 0:
remainder = 0
else:
remainder = 1
self.num_iterations = (self.epochs + remainder) * self.samples // self.batch_size
self.mid_cycle_id = int(self.num_iterations * ((1. - self.end_percentage)) / float(2))
self._reset()
K.set_value(self.model.optimizer.lr, self.compute_lr())
if self._update_momentum:
if not hasattr(self.model.optimizer, 'momentum'):
raise ValueError("Momentum can be updated only on SGD optimizer !")
new_momentum = self.compute_momentum()
K.set_value(self.model.optimizer.momentum, new_momentum)
def on_batch_end(self, epoch, logs=None):
logs = logs or {}
self.clr_iterations += 1
new_lr = self.compute_lr()
self.history.setdefault('lr', []).append(
K.get_value(self.model.optimizer.lr))
K.set_value(self.model.optimizer.lr, new_lr)
if self._update_momentum:
if not hasattr(self.model.optimizer, 'momentum'):
raise ValueError("Momentum can be updated only on SGD optimizer !")
new_momentum = self.compute_momentum()
self.history.setdefault('momentum', []).append(
K.get_value(self.model.optimizer.momentum))
K.set_value(self.model.optimizer.momentum, new_momentum)
for k, v in logs.items():
self.history.setdefault(k, []).append(v)
def on_epoch_end(self, epoch, logs=None):
if self.verbose:
if self._update_momentum:
print(" - lr: %0.5f - momentum: %0.2f " %
(self.history['lr'][-1], self.history['momentum'][-1]))
else:
print(" - lr: %0.5f " % (self.history['lr'][-1]))
class LRFinder(Callback):
def __init__(self,
num_samples,
batch_size,
minimum_lr=1e-5,
maximum_lr=10.,
lr_scale='exp',
validation_data=None,
validation_sample_rate=5,
stopping_criterion_factor=4.,
loss_smoothing_beta=0.98,
save_dir=None,
verbose=True):
"""
This class uses the Cyclic Learning Rate history to find a
set of learning rates that can be good initializations for the
One-Cycle training proposed by Leslie Smith in the paper referenced
below.
A port of the Fast.ai implementation for Keras.
# Note
This requires that the model be trained for exactly 1 epoch. If the model
is trained for more epochs, then the metric calculations are only done for
the first epoch.
# Interpretation
Upon visualizing the loss plot, check where the loss starts to increase
rapidly. Choose a learning rate at somewhat prior to the corresponding
position in the plot for faster convergence. This will be the maximum_lr lr.
Choose the max value as this value when passing the `max_val` argument
to OneCycleLR callback.
Since the plot is in log-scale, you need to compute 10 ^ (-k) of the x-axis
# Arguments:
num_samples: Integer. Number of samples in the dataset.
batch_size: Integer. Batch size during training.
minimum_lr: Float. Initial learning rate (and the minimum).
maximum_lr: Float. Final learning rate (and the maximum).
lr_scale: Can be one of ['exp', 'linear']. Chooses the type of
scaling for each update to the learning rate during subsequent
batches. Choose 'exp' for large range and 'linear' for small range.
validation_data: Requires the validation dataset as a tuple of
(X, y) belonging to the validation set. If provided, will use the
validation set to compute the loss metrics. Else uses the training
batch loss. Will warn if not provided to alert the user.
validation_sample_rate: Positive or Negative Integer. Number of batches to sample from the
validation set per iteration of the LRFinder. Larger number of
samples will reduce the variance but will take longer time to execute
per batch.
If Positive > 0, will sample from the validation dataset
If Megative, will use the entire dataset
stopping_criterion_factor: Integer or None. A factor which is used
to measure large increase in the loss value during training.
Since callbacks cannot stop training of a model, it will simply
stop logging the additional values from the epochs after this
stopping criterion has been met.
If None, this check will not be performed.
loss_smoothing_beta: Float. The smoothing factor for the moving
average of the loss function.
save_dir: Optional, String. If passed a directory path, the callback
will save the running loss and learning rates to two separate numpy
arrays inside this directory. If the directory in this path does not
exist, they will be created.
verbose: Whether to print the learning rate after every batch of training.
# References:
- [A disciplined approach to neural network hyper-parameters: Part 1 -- learning rate, batch size, weight_decay, and weight decay](https://arxiv.org/abs/1803.09820)
"""
super(LRFinder, self).__init__()
if lr_scale not in ['exp', 'linear']:
raise ValueError("`lr_scale` must be one of ['exp', 'linear']")
if validation_data is not None:
self.validation_data = validation_data
self.use_validation_set = True
if validation_sample_rate > 0 or validation_sample_rate < 0:
self.validation_sample_rate = validation_sample_rate
else:
raise ValueError("`validation_sample_rate` must be a positive or negative integer other than o")
else:
self.use_validation_set = False
self.validation_sample_rate = 0
self.num_samples = num_samples
self.batch_size = batch_size
self.initial_lr = minimum_lr
self.final_lr = maximum_lr
self.lr_scale = lr_scale
self.stopping_criterion_factor = stopping_criterion_factor
self.loss_smoothing_beta = loss_smoothing_beta
self.save_dir = save_dir
self.verbose = verbose
self.num_batches_ = num_samples // batch_size
self.current_lr_ = minimum_lr
if lr_scale == 'exp':
self.lr_multiplier_ = (maximum_lr / float(minimum_lr)) ** (
1. / float(self.num_batches_))
else:
extra_batch = int((num_samples % batch_size) != 0)
self.lr_multiplier_ = np.linspace(
minimum_lr, maximum_lr, num=self.num_batches_ + extra_batch)
# If negative, use entire validation set
if self.validation_sample_rate < 0:
self.validation_sample_rate = self.validation_data[0].shape[0] // batch_size
self.current_batch_ = 0
self.current_epoch_ = 0
self.best_loss_ = 1e6
self.running_loss_ = 0.
self.history = {}
def on_train_begin(self, logs=None):
self.current_epoch_ = 1
K.set_value(self.model.optimizer.lr, self.initial_lr)
warnings.simplefilter("ignore")
def on_epoch_begin(self, epoch, logs=None):
self.current_batch_ = 0
if self.current_epoch_ > 1:
warnings.warn(
"\n\nLearning rate finder should be used only with a single epoch. "
"Hereafter, the callback will not measure the losses.\n\n")
def on_batch_begin(self, batch, logs=None):
self.current_batch_ += 1
def on_batch_end(self, batch, logs=None):
if self.current_epoch_ > 1:
return
if self.use_validation_set:
X, Y = self.validation_data[0], self.validation_data[1]
# use 5 random batches from test set for fast approximate of loss
num_samples = self.batch_size * self.validation_sample_rate
if num_samples > X.shape[0]:
num_samples = X.shape[0]
idx = np.random.choice(X.shape[0], num_samples, replace=False)
x = X[idx]
y = Y[idx]
values = self.model.evaluate(x, y, batch_size=self.batch_size, verbose=False)
loss = values[0]
else:
loss = logs['loss']
# smooth the loss value and bias correct
running_loss = self.loss_smoothing_beta * loss + (
1. - self.loss_smoothing_beta) * loss
running_loss = running_loss / (
1. - self.loss_smoothing_beta**self.current_batch_)
# stop logging if loss is too large
if self.current_batch_ > 1 and self.stopping_criterion_factor is not None and (
running_loss >
self.stopping_criterion_factor * self.best_loss_):
if self.verbose:
print(" - LRFinder: Skipping iteration since loss is %d times as large as best loss (%0.4f)"
% (self.stopping_criterion_factor, self.best_loss_))
return
if running_loss < self.best_loss_ or self.current_batch_ == 1:
self.best_loss_ = running_loss
current_lr = K.get_value(self.model.optimizer.lr)
self.history.setdefault('running_loss_', []).append(running_loss)
if self.lr_scale == 'exp':
self.history.setdefault('log_lrs', []).append(np.log10(current_lr))
else:
self.history.setdefault('log_lrs', []).append(current_lr)
# compute the lr for the next batch and update the optimizer lr
if self.lr_scale == 'exp':
current_lr *= self.lr_multiplier_
else:
current_lr = self.lr_multiplier_[self.current_batch_ - 1]
K.set_value(self.model.optimizer.lr, current_lr)
# save the other metrics as well
for k, v in logs.items():
self.history.setdefault(k, []).append(v)
if self.verbose:
if self.use_validation_set:
print(" - LRFinder: val_loss: %1.4f - lr = %1.8f " %
(values[0], current_lr))
else:
print(" - LRFinder: lr = %1.8f " % current_lr)
def on_epoch_end(self, epoch, logs=None):
if self.save_dir is not None and self.current_epoch_ <= 1:
if not os.path.exists(self.save_dir):
os.makedirs(self.save_dir)
losses_path = os.path.join(self.save_dir, 'losses.npy')
lrs_path = os.path.join(self.save_dir, 'lrs.npy')
np.save(losses_path, self.losses)
np.save(lrs_path, self.lrs)
if self.verbose:
print("\tLR Finder : Saved the losses and learning rate values in path : {%s}"
% (self.save_dir))
self.current_epoch_ += 1
warnings.simplefilter("default")
def plot_schedule(self, clip_beginning=None, clip_endding=None):
"""
Plots the schedule from the callback itself.
# Arguments:
clip_beginning: Integer or None. If positive integer, it will
remove the specified portion of the loss graph to remove the large
loss values in the beginning of the graph.
clip_endding: Integer or None. If negative integer, it will
remove the specified portion of the ending of the loss graph to
remove the sharp increase in the loss values at high learning rates.
"""
try:
import matplotlib.pyplot as plt
plt.style.use('seaborn-white')
except ImportError:
print(
"Matplotlib not found. Please use `pip install matplotlib` first."
)
return
if clip_beginning is not None and clip_beginning < 0:
clip_beginning = -clip_beginning
if clip_endding is not None and clip_endding > 0:
clip_endding = -clip_endding
losses = self.losses
lrs = self.lrs
if clip_beginning:
losses = losses[clip_beginning:]
lrs = lrs[clip_beginning:]
if clip_endding:
losses = losses[:clip_endding]
lrs = lrs[:clip_endding]
plt.plot(lrs, losses)
plt.title('Learning rate vs Loss')
plt.xlabel('learning rate')
plt.ylabel('loss')
plt.show()
@classmethod
def restore_schedule_from_dir(cls,
directory,
clip_beginning=None,
clip_endding=None):
"""
Loads the training history from the saved numpy files in the given directory.
# Arguments:
directory: String. Path to the directory where the serialized numpy
arrays of the loss and learning rates are saved.
clip_beginning: Integer or None. If positive integer, it will
remove the specified portion of the loss graph to remove the large
loss values in the beginning of the graph.
clip_endding: Integer or None. If negative integer, it will
remove the specified portion of the ending of the loss graph to
remove the sharp increase in the loss values at high learning rates.
Returns:
tuple of (losses, learning rates)
"""
if clip_beginning is not None and clip_beginning < 0:
clip_beginning = -clip_beginning
if clip_endding is not None and clip_endding > 0:
clip_endding = -clip_endding
losses_path = os.path.join(directory, 'losses.npy')
lrs_path = os.path.join(directory, 'lrs.npy')
if not os.path.exists(losses_path) or not os.path.exists(lrs_path):
print("%s and %s could not be found at directory : {%s}" %
(losses_path, lrs_path, directory))
losses = None
lrs = None
else:
losses = np.load(losses_path)
lrs = np.load(lrs_path)
if clip_beginning:
losses = losses[clip_beginning:]
lrs = lrs[clip_beginning:]
if clip_endding:
losses = losses[:clip_endding]
lrs = lrs[:clip_endding]
return losses, lrs
@classmethod
def plot_schedule_from_file(cls,
directory,
clip_beginning=None,
clip_endding=None):
"""
Plots the schedule from the saved numpy arrays of the loss and learning
rate values in the specified directory.
# Arguments:
directory: String. Path to the directory where the serialized numpy
arrays of the loss and learning rates are saved.
clip_beginning: Integer or None. If positive integer, it will
remove the specified portion of the loss graph to remove the large
loss values in the beginning of the graph.
clip_endding: Integer or None. If negative integer, it will
remove the specified portion of the ending of the loss graph to
remove the sharp increase in the loss values at high learning rates.
"""
try:
import matplotlib.pyplot as plt
plt.style.use('seaborn-white')
except ImportError:
print("Matplotlib not found. Please use `pip install matplotlib` first.")
return
losses, lrs = cls.restore_schedule_from_dir(
directory,
clip_beginning=clip_beginning,
clip_endding=clip_endding)
if losses is None or lrs is None:
return
else:
plt.plot(lrs, losses)
plt.title('Learning rate vs Loss')
plt.xlabel('learning rate')
plt.ylabel('loss')
plt.show()
@property
def lrs(self):
return np.array(self.history['log_lrs'])
@property
def losses(self):
return np.array(self.history['running_loss_'])
###################################################################
######################## LOADING DATA ###########################
###################################################################
AES_SBOX = np.array([
0x63, 0x7C, 0x77, 0x7B, 0xF2, 0x6B, 0x6F, 0xC5, 0x30, 0x01, 0x67, 0x2B, 0xFE, 0xD7, 0xAB, 0x76,
0xCA, 0x82, 0xC9, 0x7D, 0xFA, 0x59, 0x47, 0xF0, 0xAD, 0xD4, 0xA2, 0xAF, 0x9C, 0xA4, 0x72, 0xC0,
0xB7, 0xFD, 0x93, 0x26, 0x36, 0x3F, 0xF7, 0xCC, 0x34, 0xA5, 0xE5, 0xF1, 0x71, 0xD8, 0x31, 0x15,
0x04, 0xC7, 0x23, 0xC3, 0x18, 0x96, 0x05, 0x9A, 0x07, 0x12, 0x80, 0xE2, 0xEB, 0x27, 0xB2, 0x75,
0x09, 0x83, 0x2C, 0x1A, 0x1B, 0x6E, 0x5A, 0xA0, 0x52, 0x3B, 0xD6, 0xB3, 0x29, 0xE3, 0x2F, 0x84,
0x53, 0xD1, 0x00, 0xED, 0x20, 0xFC, 0xB1, 0x5B, 0x6A, 0xCB, 0xBE, 0x39, 0x4A, 0x4C, 0x58, 0xCF,
0xD0, 0xEF, 0xAA, 0xFB, 0x43, 0x4D, 0x33, 0x85, 0x45, 0xF9, 0x02, 0x7F, 0x50, 0x3C, 0x9F, 0xA8,
0x51, 0xA3, 0x40, 0x8F, 0x92, 0x9D, 0x38, 0xF5, 0xBC, 0xB6, 0xDA, 0x21, 0x10, 0xFF, 0xF3, 0xD2,
0xCD, 0x0C, 0x13, 0xEC, 0x5F, 0x97, 0x44, 0x17, 0xC4, 0xA7, 0x7E, 0x3D, 0x64, 0x5D, 0x19, 0x73,
0x60, 0x81, 0x4F, 0xDC, 0x22, 0x2A, 0x90, 0x88, 0x46, 0xEE, 0xB8, 0x14, 0xDE, 0x5E, 0x0B, 0xDB,
0xE0, 0x32, 0x3A, 0x0A, 0x49, 0x06, 0x24, 0x5C, 0xC2, 0xD3, 0xAC, 0x62, 0x91, 0x95, 0xE4, 0x79,
0xE7, 0xC8, 0x37, 0x6D, 0x8D, 0xD5, 0x4E, 0xA9, 0x6C, 0x56, 0xF4, 0xEA, 0x65, 0x7A, 0xAE, 0x08,
0xBA, 0x78, 0x25, 0x2E, 0x1C, 0xA6, 0xB4, 0xC6, 0xE8, 0xDD, 0x74, 0x1F, 0x4B, 0xBD, 0x8B, 0x8A,
0x70, 0x3E, 0xB5, 0x66, 0x48, 0x03, 0xF6, 0x0E, 0x61, 0x35, 0x57, 0xB9, 0x86, 0xC1, 0x1D, 0x9E,
0xE1, 0xF8, 0x98, 0x11, 0x69, 0xD9, 0x8E, 0x94, 0x9B, 0x1E, 0x87, 0xE9, 0xCE, 0x55, 0x28, 0xDF,
0x8C, 0xA1, 0x89, 0x0D, 0xBF, 0xE6, 0x42, 0x68, 0x41, 0x99, 0x2D, 0x0F, 0xB0, 0x54, 0xBB, 0x16
])
AES_SBOX_INV = np.array([0x52, 0x09, 0x6a, 0xd5, 0x30, 0x36, 0xa5, 0x38,
0xbf, 0x40, 0xa3, 0x9e, 0x81, 0xf3, 0xd7, 0xfb,
0x7c, 0xe3, 0x39, 0x82, 0x9b, 0x2f, 0xff, 0x87,
0x34, 0x8e, 0x43, 0x44, 0xc4, 0xde, 0xe9, 0xcb,
0x54, 0x7b, 0x94, 0x32, 0xa6, 0xc2, 0x23, 0x3d,
0xee, 0x4c, 0x95, 0x0b, 0x42, 0xfa, 0xc3, 0x4e,
0x08, 0x2e, 0xa1, 0x66, 0x28, 0xd9, 0x24, 0xb2,
0x76, 0x5b, 0xa2, 0x49, 0x6d, 0x8b, 0xd1, 0x25,
0x72, 0xf8, 0xf6, 0x64, 0x86, 0x68, 0x98, 0x16,
0xd4, 0xa4, 0x5c, 0xcc, 0x5d, 0x65, 0xb6, 0x92,
0x6c, 0x70, 0x48, 0x50, 0xfd, 0xed, 0xb9, 0xda,
0x5e, 0x15, 0x46, 0x57, 0xa7, 0x8d, 0x9d, 0x84,
0x90, 0xd8, 0xab, 0x00, 0x8c, 0xbc, 0xd3, 0x0a,
0xf7, 0xe4, 0x58, 0x05, 0xb8, 0xb3, 0x45, 0x06,
0xd0, 0x2c, 0x1e, 0x8f, 0xca, 0x3f, 0x0f, 0x02,
0xc1, 0xaf, 0xbd, 0x03, 0x01, 0x13, 0x8a, 0x6b,
0x3a, 0x91, 0x11, 0x41, 0x4f, 0x67, 0xdc, 0xea,
0x97, 0xf2, 0xcf, 0xce, 0xf0, 0xb4, 0xe6, 0x73,
0x96, 0xac, 0x74, 0x22, 0xe7, 0xad, 0x35, 0x85,
0xe2, 0xf9, 0x37, 0xe8, 0x1c, 0x75, 0xdf, 0x6e,
0x47, 0xf1, 0x1a, 0x71, 0x1d, 0x29, 0xc5, 0x89,
0x6f, 0xb7, 0x62, 0x0e, 0xaa, 0x18, 0xbe, 0x1b,
0xfc, 0x56, 0x3e, 0x4b, 0xc6, 0xd2, 0x79, 0x20,
0x9a, 0xdb, 0xc0, 0xfe, 0x78, 0xcd, 0x5a, 0xf4,
0x1f, 0xdd, 0xa8, 0x33, 0x88, 0x07, 0xc7, 0x31,
0xb1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xec, 0x5f,
0x60, 0x51, 0x7f, 0xa9, 0x19, 0xb5, 0x4a, 0x0d,
0x2d, 0xe5, 0x7a, 0x9f, 0x93, 0xc9, 0x9c, 0xef,
0xa0, 0xe0, 0x3b, 0x4d, 0xae, 0x2a, 0xf5, 0xb0,
0xc8, 0xeb, 0xbb, 0x3c, 0x83, 0x53, 0x99, 0x61,
0x17, 0x2b, 0x04, 0x7e, 0xba, 0x77, 0xd6, 0x26,
0xe1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0c, 0x7d
])
###################################################################
########################## FUNCTIONS ############################
###################################################################
# Compute the position of the key hypothesis key amongst the hypotheses
def rk_key(rank_array,key):
key_val = rank_array[key]
return np.where(np.sort(rank_array)[::-1] == key_val)[0][0]
# Compute the evolution of rank
def rank_compute(prediction, att_plt, key, byte):
"""
- prediction : predictions of the NN
- att_plt : plaintext of the attack traces
- key : Key used during encryption
- byte : byte to attack
"""
(nb_trs, nb_hyp) = prediction.shape
idx_min = nb_trs
min_rk = 255
key_log_prob = np.zeros(nb_hyp)
rank_evol = np.full(nb_trs,255)
prediction = np.log(prediction+1e-40)
for i in range(nb_trs):
for k in range(nb_hyp):
if G_OPEN_DATASET == 'aes_hd':
#Computes the hypothesis values
key_log_prob[k] += prediction[i,AES_SBOX_INV[k^int(att_plt[i,11])]^int(att_plt[i,7])]
else:
#Computes the hypothesis values
key_log_prob[k] += prediction[i,AES_SBOX[k^att_plt[i, byte]]]
rank_evol[i] = rk_key(key_log_prob,key[byte])
return rank_evol
# Performs attack
def perform_attacks(nb_traces, predictions, nb_attacks, plt, key, byte=0, shuffle=True, savefig=True, filename='fig'):
"""
Performs a given number of attacks to be determined
- nb_traces : number of traces used to perform the attack
- predictions : array containing the values of the prediction
- nb_attacks : number of attack to perform
- plt : the plaintext used to obtain the consumption traces
- key : the key used to obtain the consumption traces
- byte : byte to attack
- shuffle (boolean, default = True)
"""
(nb_total, nb_hyp) = predictions.shape
all_rk_evol = np.zeros((nb_attacks, nb_traces))
for i in tqdm(range(nb_attacks)):
if shuffle:
l = list(zip(predictions,plt))
random.shuffle(l)
sp,splt = list(zip(*l))
sp = np.array(sp)
splt = np.array(splt)
att_pred = sp[:nb_traces]
att_plt = splt[:nb_traces]
else:
att_pred = predictions[:nb_traces]
att_plt = plt[:nb_traces]
rank_evolution = rank_compute(att_pred,att_plt,key,byte=byte)
all_rk_evol[i] = rank_evolution
rk_avg = np.mean(all_rk_evol,axis=0)
return (rk_avg)
def MASSIVE_SCA_DL(RUN_FUNCTION=None, BACKUP_FILE=None, DATA_TYPE='npy', GPU_CONFIG=None):
# Creating the work folder based on current time
if (G_RESULT_SAVE == 1):
st_t = time.time()
work_dir = MAKE_FOLDER(GET_TODAY())
# Allocation to train data
train_data = LOAD_TRACE(DATA_TYPE, G_TRAIN_DATA_FILE, G_TRAIN_NO, G_PT_ST, G_PT_ED)
if DATA_TYPE == 'npy':
train_plain = LOAD_PLAIN(DATA_TYPE, G_TRAIN_PLAIN_FILE)
else:
exit()
# Allocation to valid data
valid_data = LOAD_TRACE(DATA_TYPE, G_VALID_DATA_FILE, G_VALID_NO + G_ATTACK_NO, G_PT_ST, G_PT_ED)
if DATA_TYPE == 'npy':
valid_plain = LOAD_PLAIN(DATA_TYPE, G_VALID_PLAIN_FILE)
else:
exit()
if G_RESULT_SAVE:
final_work_file = work_dir + "/" + "final_result.txt"
fp_r = open(final_work_file, 'w')
# Generating hyperparameter
hyperparameter = C_MPL_HYPERPARAMETER()
# Initializing hyperparameter
hyperparameter.train_no = c_uint32(train_data.shape[0])
hyperparameter.train_size = c_uint32(train_data.shape[1])
hyperparameter.valid_no = c_uint32(G_VALID_NO)
hyperparameter.valid_size = c_uint32(valid_data.shape[1])
hyperparameter.attack_no = c_uint32(G_ATTACK_NO)
hyperparameter.out_size = G_OUT_SIZE
hyperparameter.learn_rate = G_LEARN_RATE
hyperparameter.in_size = G_IN_SIZE
# Allocating hyperparameter to perform SCA
layer_no = (c_uint32 * G_LAYER)()
for i in range(G_LAYER):
layer_no[i] = 20
hyperparameter.batch_size = G_BATCH
hyperparameter.layer_size = G_LAYER
hyperparameter.epoch_size = G_EPOCH
hyperparameter.layer_size_cnn = G_LAYER_CNN
hyperparameter.p_layer_net_size = layer_no
if G_RESULT_SAVE:
fp_r.write("#####")
fp_r.write("batch_size: %d, layer_size: %d, epoch_size: %d, " % (hyperparameter.batch_size, hyperparameter.layer_size, hyperparameter.epoch_size))
fp_r.write("#####")
if G_OPEN_DATASET == 'aes_rd':
byte = 0
key = 0x2B
elif G_OPEN_DATASET == 'aes_hd':
byte = 0
key = 0x00
else:
byte = 2
key = 0xE0
RUN_FUNCTION("log_archive", fp_r, (work_dir + "/"), hyperparameter, byte, key, train_data, train_plain, valid_data, valid_plain, GPU_CONFIG)
else:
RUN_FUNCTION("log_archive", "", (work_dir + "/"), hyperparameter, byte, key, train_data, train_plain, valid_data, valid_plain, GPU_CONFIG)
if G_RESULT_SAVE:
fp_r.close()
ed_t = time.time()
time_file = work_dir + "/elapsed_time.txt"
fp_t = open(time_file, 'w')
fp_t.write("elasped time: %f\n" % (ed_t - st_t))
fp_t.close()
def CHES2020_CNN_SCA(LOG_FILE, FP_RESULT, FINAL_PATH, HYPERPARAMETER, GUESS_POS, GUESS_KEY, TRAIN_DATA, TRAIN_PLAIN, VALID_DATA, VALID_PLAIN, GPU_CONFIG):
if G_OPEN_DATASET == 'aes_rd':
correct_key = [0x2b, 0x7E, 0x15, 0x16, 0x28, 0xae, 0xd2, 0xa6, 0xab, 0xf7, 0x15, 0x88, 0x09, 0xcf, 0x4f, 0x3c] #AES_RD
elif G_OPEN_DATASET == 'aes_hd':
correct_key = [0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00] #AES_HD
else:
correct_key = [0x4D, 0xFB, 0xE0, 0xF2, 0x72, 0x21, 0xFE, 0x10, 0xA7, 0x8D, 0x4A, 0xDC, 0x8E, 0x49, 0x04, 0x69] #ASCAD
train_data = TRAIN_DATA.astype('float32')
valid_data = VALID_DATA.astype('float32')
# Standardization and Normalzation (between 0 and 1)
scaler = preprocessing.StandardScaler()
train_data = scaler.fit_transform(train_data)
valid_data = scaler.transform(valid_data)
scaler = preprocessing.MinMaxScaler(feature_range=(0,1))
train_data = scaler.fit_transform(train_data)
valid_data = scaler.fit_transform(valid_data)
(train_data, TRAIN_PLAIN) = SHUFFLE_SCA_DATA(train_data, TRAIN_PLAIN)
if G_PREPROCESS == 'original':
TRAIN_DATA = train_data
VALID_DATA = valid_data
elif G_PREPROCESS == 'moving_average':
###### Calculating the moving average
ma_base, ma_step, ma_no = 100, 1, 1
(ma_train, ma_len) = MOVING_AVG(TRAIN_DATA, ma_base, ma_step, ma_no)
(ma_valid, ma_len) = MOVING_AVG(VALID_DATA, ma_base, ma_step, ma_no)
# Standardization and Normalzation (between 0 and 1)
scaler = preprocessing.StandardScaler()
ma_train = scaler.fit_transform(ma_train)
ma_valid = scaler.transform(ma_valid)
scaler = preprocessing.MinMaxScaler(feature_range=(0,1))
ma_train = scaler.fit_transform(ma_train)
ma_valid = scaler.fit_transform(ma_valid)
TRAIN_DATA = ma_train
VALID_DATA = ma_valid
HYPERPARAMETER.in_size = ma_train.shape[1]
HYPERPARAMETER.train_size = ma_train.shape[1]
HYPERPARAMETER.valid_size = ma_valid.shape[1]
elif G_PREPROCESS == 'pca':
###### Calculating the pca
(pc_train, pc_len) = PCA_REDUCTION(TRAIN_DATA)
(pc_valid, pc_len) = PCA_REDUCTION(VALID_DATA)
# Standardization and Normalzation (between 0 and 1)
scaler = preprocessing.StandardScaler()
pc_train = scaler.fit_transform(pc_train)
pc_valid = scaler.transform(pc_valid)
scaler = preprocessing.MinMaxScaler(feature_range=(0,1))
pc_train = scaler.fit_transform(pc_train)
pc_valid = scaler.fit_transform(pc_valid)
TRAIN_DATA = pc_train
VALID_DATA = pc_valid
HYPERPARAMETER.in_size = pc_train.shape[1]
HYPERPARAMETER.train_size = pc_train.shape[1]
HYPERPARAMETER.valid_size = pc_valid.shape[1]
else:
print("Type is wrong")
exit()
valid_data = VALID_DATA[:HYPERPARAMETER.valid_no]
valid_plain = VALID_PLAIN[:HYPERPARAMETER.valid_no]
attack_data = VALID_DATA[HYPERPARAMETER.valid_no:HYPERPARAMETER.valid_no+HYPERPARAMETER.attack_no]
attack_plain = VALID_PLAIN[HYPERPARAMETER.valid_no:HYPERPARAMETER.valid_no+HYPERPARAMETER.attack_no]
reshape_valid_data = valid_data.reshape((valid_data.shape[0], valid_data.shape[1], 1))
reshape_attack_data = attack_data.reshape((attack_data.shape[0], attack_data.shape[1], 1))
model = CHES2020_CNN_ARCHI(HYPERPARAMETER)
model_name = G_OPEN_DATASET
print("Model Name = " + model_name)
print(model.summary())
st_t = time.time()
history = CHES2020_CNN_TRAIN(LOG_FILE, FP_RESULT, HYPERPARAMETER, GUESS_POS, GUESS_KEY, TRAIN_DATA, TRAIN_PLAIN, valid_data, valid_plain, GPU_CONFIG, model)
ed_t = time.time()
time_file = FINAL_PATH + "train_time.txt"
fp_t = open(time_file, 'w')
fp_t.write("elasped time: %f\n" % (ed_t - st_t))
fp_t.close()
predictions = model.predict(reshape_attack_data)
if True:
for layer in model.layers:
inv_layer = Model(inputs=model.input, outputs=model.get_layer(layer.name).output)
inv_out = inv_layer.predict(reshape_attack_data)
avg = [0] * inv_out.shape[1]
if inv_out.ndim == 3:
for idx2 in range(inv_out.shape[2]):
for idx1 in range(inv_out.shape[1]):
avg[idx1] += inv_out[0][idx1][idx2]
for idx1 in range(inv_out.shape[1]):
avg[idx1] /= inv_out.shape[2]
else:
for idx1 in range(inv_out.shape[1]):
avg[idx1] = inv_out[0][idx1]
INV_PATH = (FINAL_PATH + '%s' + '.npy') % (layer.name)
np.save(INV_PATH, avg)
fig = plt.figure(figsize=(20, 10))
plt.rcParams["figure.figsize"] = (20,10)
plt.title(layer.name)
plt.plot(avg)
plt.show()
FIG_PATH = (FINAL_PATH + '%s' + '.png') % (layer.name)
fig.savefig(FIG_PATH, dpi=fig.dpi, bbox_inches="tight")
st_t = time.time()
avg_rank = perform_attacks(HYPERPARAMETER.attack_no, predictions, 100, plt=attack_plain, key=correct_key, byte=GUESS_POS, filename=model_name)
ed_t = time.time()
time_file = FINAL_PATH + "attack_time.txt"
fp_t = open(time_file, 'w')
fp_t.write("elasped time: %f\n" % (ed_t - st_t))
fp_t.close()
print("\n t_GE = ")
print(avg_rank)
print(np.where(avg_rank<=0))
if G_RESULT_SAVE:
for idx in range(avg_rank.shape[0]):
FP_RESULT.write("%f " % avg_rank[idx])
FP_RESULT.write("\n")
FP_RESULT.write("%d" % TRAIN_DATA.shape[0])
INV_PATH = (FINAL_PATH + 'GE_result' + '.npy')
np.save(INV_PATH, avg_rank)
fig = plt.figure(figsize=(20, 10))
plt.plot(avg_rank, label=(G_PREPROCESS + ' Result against ' + G_OPEN_DATASET))
plt.rcParams["figure.figsize"] = (20,10)
plt.legend(fontsize='x-large')
FIG_PATH = (FINAL_PATH + 'GE_result' + '.png')
fig.savefig(FIG_PATH, dpi=fig.dpi, bbox_inches="tight")
plt.show()
trace = np.load(FINAL_PATH + 'GE_result' + ".npy")
plt.plot(trace)
plt.rcParams["figure.figsize"] = (20,10)
plt.show()
model.save((FINAL_PATH + 'ORIGINAL_RESULT' + '.hdf5'))
def CHES2020_CNN_ARCHI(HYPERPARAMETER):
input_shape = (HYPERPARAMETER.in_size, 1)
img_input = Input(shape=input_shape)
BN_IDX = [1] * HYPERPARAMETER.layer_size_cnn
COV_NO_FILTER = [32, 64, 128]
COV_SIZE_FILTER = [1, 50, 3]
POOL_FILTER = [2, 50, 2]
LAYER_NO = [20, 20, 20]
x = img_input
for array_idx in range(HYPERPARAMETER.layer_size_cnn):
x = Conv1D(COV_NO_FILTER[array_idx], COV_SIZE_FILTER[array_idx], kernel_initializer='he_uniform', activation='selu', padding='same', name='block1_conv%d' % array_idx)(x)
if BN_IDX[array_idx] == 1:
x = BatchNormalization()(x)
x = AveragePooling1D(POOL_FILTER[array_idx], strides=POOL_FILTER[array_idx], name='block%d_pool' % array_idx)(x)
x = Flatten(name='flatten')(x)
for array_idx in range(HYPERPARAMETER.layer_size):
x = Dense(LAYER_NO[array_idx], kernel_initializer='he_uniform', activation='selu', name='fc%d' % array_idx)(x)
# Logits layer
x = Dense(HYPERPARAMETER.out_size, activation='softmax', name='predictions')(x)
# Create model
inputs = img_input
model = Model(inputs, x, name=G_OPEN_DATASET)
optimizer = Adam(lr=HYPERPARAMETER.learn_rate)
model.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['accuracy'])
return model
def CHES2020_CNN_TRAIN(LOG_FILE, FP_RESULT, HYPERPARAMETER, GUESS_POS, GUESS_KEY, TRAIN_DATA, TRAIN_PLAIN, VALID_DATA, VALID_PLAIN, GPU_CONFIG, MODEL):
# Save model every epoch
save_model = ModelCheckpoint(LOG_FILE)
train_inv = [0] * HYPERPARAMETER.train_no
INV_CAL(TRAIN_PLAIN, HYPERPARAMETER.train_no, GUESS_POS, GUESS_KEY, train_inv)
# Calculating the intermediate variables
train_inv_np = np.array(train_inv)
train_inv_np = reshape(train_inv_np, (HYPERPARAMETER.train_no, 1))
valid_inv = [0] * HYPERPARAMETER.valid_no
INV_CAL(VALID_PLAIN, HYPERPARAMETER.valid_no, GUESS_POS, GUESS_KEY, valid_inv)
valid_inv_np = np.array(valid_inv)
valid_inv_np = reshape(valid_inv_np, (HYPERPARAMETER.valid_no, 1))
# Get the input layer shape
input_layer_shape = MODEL.get_layer(index=0).input_shape
# Sanity check
if input_layer_shape[1] != len(TRAIN_DATA[0]):
print("Input layer error")
sys.exit(-1)
# Reshape the train and valid data
reshape_train_data = TRAIN_DATA.reshape((TRAIN_DATA.shape[0], TRAIN_DATA.shape[1], 1))
reshape_valid_data = VALID_DATA.reshape((VALID_DATA.shape[0], VALID_DATA.shape[1], 1))
lr_manager = OneCycleLR(max_lr=HYPERPARAMETER.learn_rate, end_percentage=0.2, scale_percentage=0.1, maximum_momentum=None, minimum_momentum=None,verbose=True)
callbacks = [save_model, lr_manager]
history = MODEL.fit(x=reshape_train_data, y=to_categorical(train_inv_np, num_classes=HYPERPARAMETER.out_size), validation_data=(reshape_valid_data, to_categorical(valid_inv_np, num_classes=HYPERPARAMETER.out_size)), batch_size=HYPERPARAMETER.batch_size, verbose = 1, epochs=HYPERPARAMETER.epoch_size, callbacks=callbacks)
return history
def MOVING_AVG_SUB(DATA_X, WINDOW_SIZE):
no = DATA_X.shape[0]
len = DATA_X.shape[1]
out_len = len - WINDOW_SIZE + 1
output = np.zeros((no, out_len))
for i in range(out_len):
output[:,i]=np.mean(DATA_X[:,i : i + WINDOW_SIZE], axis=1)
return output
def MOVING_AVG(DATA_X, WINDOW_BASE, STEP_SIZE, NO):
if NO == 0:
return (None, [])
out = MOVING_AVG_SUB(DATA_X, WINDOW_BASE)
data_len = [out.shape[1]]
for i in range(1, NO):
window_size = WINDOW_BASE + STEP_SIZE * i
if window_size > DATA_X.shape[1]:
continue
new_series = MOVING_AVG_SUB(DATA_X, window_size)
data_len.append(new_series.shape[1])
out = np.concatenate([out, new_series], axis=1)
return (out, data_len)
def SCA_PCA(IN_TRAIN):
pca_result = PCA(n_components=20)
return pca_result.fit_transform(IN_TRAIN)
def PCA_REDUCTION(DATA_X):
pca_data = SCA_PCA(DATA_X)
return (pca_data, [pca_data.shape[1]])
def MCNN_SCA(LOG_FILE, FP_RESULT, FINAL_PATH, HYPERPARAMETER, GUESS_POS, GUESS_KEY, TRAIN_DATA, TRAIN_PLAIN, VALID_DATA, VALID_PLAIN, GPU_CONFIG):
if G_OPEN_DATASET == 'aes_rd':
correct_key = [0x2b, 0x7E, 0x15, 0x16, 0x28, 0xae, 0xd2, 0xa6, 0xab, 0xf7, 0x15, 0x88, 0x09, 0xcf, 0x4f, 0x3c] #AES_RD
elif G_OPEN_DATASET == 'aes_hd':
correct_key = [0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00] #AES_HD
else:
correct_key = [0x4D, 0xFB, 0xE0, 0xF2, 0x72, 0x21, 0xFE, 0x10, 0xA7, 0x8D, 0x4A, 0xDC, 0x8E, 0x49, 0x04, 0x69] #ASCAD
# Generating hyperparameter
hyperparameter_1 = C_MPL_HYPERPARAMETER()
hyperparameter_2 = C_MPL_HYPERPARAMETER()
hyperparameter_3 = C_MPL_HYPERPARAMETER()
COPY_HYPER(hyperparameter_1, HYPERPARAMETER)
COPY_HYPER(hyperparameter_2, HYPERPARAMETER)
COPY_HYPER(hyperparameter_3, HYPERPARAMETER)
TRAIN_DATA = TRAIN_DATA.astype('float32')
VALID_DATA = VALID_DATA.astype('float32')
# Calculation to the intermediate variables for train
train_inv = [0] * HYPERPARAMETER.train_no
INV_CAL(TRAIN_PLAIN, HYPERPARAMETER.train_no, GUESS_POS, GUESS_KEY, train_inv)
train_inv_np = np.array(train_inv)
train_inv_np = reshape(train_inv_np, (HYPERPARAMETER.train_no, 1))
# Calculation to the intermediate variables for valid
valid_inv = [0] * (HYPERPARAMETER.valid_no + HYPERPARAMETER.attack_no)
INV_CAL(VALID_PLAIN, HYPERPARAMETER.valid_no, GUESS_POS, GUESS_KEY, valid_inv)
valid_inv_np = np.array(valid_inv)
valid_inv_np = reshape(valid_inv_np, ((HYPERPARAMETER.valid_no+ HYPERPARAMETER.attack_no), 1))
# Standardization and Normalzation (between 0 and 1)
scaler = preprocessing.StandardScaler()
TRAIN_DATA = scaler.fit_transform(TRAIN_DATA)
VALID_DATA = scaler.transform(VALID_DATA)
scaler = preprocessing.MinMaxScaler(feature_range=(0,1))
TRAIN_DATA = scaler.fit_transform(TRAIN_DATA)
VALID_DATA = scaler.fit_transform(VALID_DATA)
(TRAIN_DATA, TRAIN_PLAIN) = SHUFFLE_SCA_DATA(TRAIN_DATA, TRAIN_PLAIN)
train_data_1 = TRAIN_DATA
reshape_valid_data_1 = VALID_DATA.reshape((VALID_DATA.shape[0], VALID_DATA.shape[1], 1))
###### The setting for second CNN Layer
ma_base, ma_step, ma_no = 100, 1, 1
(ma_train, ma_len) = MOVING_AVG(TRAIN_DATA, ma_base, ma_step, ma_no)
(ma_valid, ma_len) = MOVING_AVG(VALID_DATA, ma_base, ma_step, ma_no)
# Standardization and Normalzation (between 0 and 1)
scaler = preprocessing.StandardScaler()
ma_train = scaler.fit_transform(ma_train)
ma_valid = scaler.transform(ma_valid)
scaler = preprocessing.MinMaxScaler(feature_range=(0,1))
ma_train = scaler.fit_transform(ma_train)
ma_valid = scaler.fit_transform(ma_valid)
train_data_2 = ma_train
reshape_valid_data_2 = ma_valid.reshape((ma_valid.shape[0], ma_valid.shape[1], 1))
hyperparameter_2.in_size = ma_train.shape[1]
hyperparameter_2.train_size = ma_train.shape[1]
hyperparameter_2.valid_size = ma_valid.shape[1]
###### The setting for third CNN Layer
(pc_train, pc_len) = PCA_REDUCTION(TRAIN_DATA)
(pc_valid, pc_len) = PCA_REDUCTION(VALID_DATA)
# Standardization and Normalzation (between 0 and 1)
scaler = preprocessing.StandardScaler()
pc_train = scaler.fit_transform(pc_train)
pc_valid = scaler.transform(pc_valid)
scaler = preprocessing.MinMaxScaler(feature_range=(0,1))
pc_train = scaler.fit_transform(pc_train)
pc_valid = scaler.fit_transform(pc_valid)
hyperparameter_3.in_size = pc_train.shape[1]
hyperparameter_3.train_size = pc_train.shape[1]
hyperparameter_3.valid_size = pc_valid.shape[1]
train_data_3 = pc_train
reshape_valid_data_3 = pc_valid.reshape((pc_valid.shape[0], pc_valid.shape[1], 1))
# Split to validation data and attack data
valid_data_1 = VALID_DATA[:HYPERPARAMETER.valid_no]
valid_data_2 = ma_valid[:HYPERPARAMETER.valid_no]
valid_data_3 = pc_valid[:HYPERPARAMETER.valid_no]
valid_plain = VALID_PLAIN[:HYPERPARAMETER.valid_no]
attack_data_1 = VALID_DATA[HYPERPARAMETER.valid_no:HYPERPARAMETER.valid_no+HYPERPARAMETER.attack_no]
attack_data_2 = ma_valid[HYPERPARAMETER.valid_no:HYPERPARAMETER.valid_no+HYPERPARAMETER.attack_no]
attack_data_3 = pc_valid[HYPERPARAMETER.valid_no:HYPERPARAMETER.valid_no+HYPERPARAMETER.attack_no]
attack_plain = VALID_PLAIN[HYPERPARAMETER.valid_no:HYPERPARAMETER.valid_no+HYPERPARAMETER.attack_no]
reshape_valid_data_1 = valid_data_1.reshape((valid_data_1.shape[0], valid_data_1.shape[1], 1))
reshape_valid_data_2 = valid_data_2.reshape((valid_data_2.shape[0], valid_data_2.shape[1], 1))
reshape_valid_data_3 = valid_data_3.reshape((valid_data_3.shape[0], valid_data_3.shape[1], 1))
reshape_attack_data_1 = attack_data_1.reshape((attack_data_1.shape[0], attack_data_1.shape[1], 1))
reshape_attack_data_2 = attack_data_2.reshape((attack_data_2.shape[0], attack_data_2.shape[1], 1))
reshape_attack_data_3 = attack_data_3.reshape((attack_data_3.shape[0], attack_data_3.shape[1], 1))
model = MCNN_ARCHI(hyperparameter_1, hyperparameter_2, hyperparameter_3)
model_name = 'MCNN_' + G_OPEN_DATASET
print("Model Name = " + model_name)
print(model.summary())
st_t = time.time()
history = MCNN_TRAIN(LOG_FILE, FP_RESULT, HYPERPARAMETER, GUESS_POS, GUESS_KEY, train_data_1, train_data_2, train_data_3, TRAIN_PLAIN, reshape_valid_data_1, reshape_valid_data_2, reshape_valid_data_3, valid_plain, GPU_CONFIG, model)
ed_t = time.time()
time_file = FINAL_PATH + "train_time.txt"
fp_t = open(time_file, 'w')
fp_t.write("elasped time: %f\n" % (ed_t - st_t))
fp_t.close()
predictions = model.predict([reshape_attack_data_1, reshape_attack_data_2, reshape_attack_data_3])
if True:
for layer in model.layers:
inv_layer = Model(inputs=model.input, outputs=model.get_layer(layer.name).output)
inv_out = inv_layer.predict([reshape_attack_data_1, reshape_attack_data_2, reshape_attack_data_3])
avg = [0] * inv_out.shape[1]
if inv_out.ndim == 3:
for idx2 in range(inv_out.shape[2]):
for idx1 in range(inv_out.shape[1]):
avg[idx1] += inv_out[0][idx1][idx2]
for idx1 in range(inv_out.shape[1]):
avg[idx1] /= inv_out.shape[2]
else:
for idx1 in range(inv_out.shape[1]):
avg[idx1] = inv_out[0][idx1]
INV_PATH = (FINAL_PATH + '%s' + '.npy') % (layer.name)
np.save(INV_PATH, avg)
fig = plt.figure(figsize=(20, 10))
plt.rcParams["figure.figsize"] = (20,10)
plt.title(layer.name)
plt.plot(avg)
plt.show()
FIG_PATH = (FINAL_PATH + '%s' + '.png') % (layer.name)
fig.savefig(FIG_PATH, dpi=fig.dpi, bbox_inches="tight")
st_t = time.time()
avg_rank = perform_attacks(HYPERPARAMETER.attack_no, predictions, 100, plt=attack_plain, key=correct_key, byte=GUESS_POS, filename=model_name)
ed_t = time.time()
time_file = FINAL_PATH + "attack_time.txt"
fp_t = open(time_file, 'w')
fp_t.write("elasped time: %f\n" % (ed_t - st_t))
fp_t.close()
print("\n t_GE = ")
print(avg_rank)
print(np.where(avg_rank<=0))
if G_RESULT_SAVE:
for idx in range(avg_rank.shape[0]):
FP_RESULT.write("%f " % avg_rank[idx])
FP_RESULT.write("\n")
FP_RESULT.write("%d" % TRAIN_DATA.shape[0])
INV_PATH = (FINAL_PATH + 'GE_result' + '.npy')
np.save(INV_PATH, avg_rank)
fig = plt.figure(figsize=(20, 10))
plt.plot(avg_rank, label=('MCNN Result against ') + G_OPEN_DATASET)
plt.rcParams["figure.figsize"] = (20,10)
plt.legend(fontsize='x-large')
FIG_PATH = (FINAL_PATH + 'GE_result' + '.png')
fig.savefig(FIG_PATH, dpi=fig.dpi, bbox_inches="tight")
plt.show()
trace = np.load(FINAL_PATH + 'GE_result' + ".npy")
plt.plot(trace)
plt.rcParams["figure.figsize"] = (20,10)
plt.show()
model.save((FINAL_PATH + 'MCNN_RESULT' + '.hdf5'))
def MCNN_ARCHI(HYPERPARAMETER_PRE_1, HYPERPARAMETER_PRE_2, HYPERPARAMETER_PRE_3):
HYPERPARAMETER_PRE_1.layer_size_cnn = 2
HYPERPARAMETER_PRE_2.layer_size_cnn = 2
HYPERPARAMETER_PRE_3.layer_size_cnn = 2
in_1 = (HYPERPARAMETER_PRE_1.in_size, 1)
ig_1 = Input(shape=in_1)
in_2 = (HYPERPARAMETER_PRE_2.in_size, 1)
ig_2 = Input(shape=in_2)
in_3 = (HYPERPARAMETER_PRE_3.in_size, 1)
ig_3 = Input(shape=in_3)
COV_NO_1 = [32, 64]
COV_SZ_1 = [1, 50]
PL_FIL_1 = [2, 50]
COV_NO_2 = [32, 64]
COV_SZ_2 = [1, 50]
PL_FIL_2 = [2, 50]
COV_NO_3 = [32, 64]
COV_SZ_3 = [1, 1]
PL_FIL_3 = [2, 1]
COV_NO = [128]
COV_SZ = [3]
PL_FIL = [2]
LAY_NO = [20, 20, 20]
x1 = ig_1
for array_idx in range(HYPERPARAMETER_PRE_1.layer_size_cnn):
x1 = Conv1D(COV_NO_1[array_idx], COV_SZ_1[array_idx], kernel_initializer='he_uniform', activation='selu', padding='same', name='first_block%d_conv' % array_idx)(x1)
x1 = BatchNormalization()(x1)
x1 = AveragePooling1D(PL_FIL_1[array_idx], strides=PL_FIL_1[array_idx], name='first_block%d_pool' % array_idx)(x1)
x1 = BatchNormalization()(x1)
x1 = Model(inputs=ig_1, outputs=x1)
x2 = ig_2
for array_idx in range(HYPERPARAMETER_PRE_2.layer_size_cnn):
x2 = Conv1D(COV_NO_2[array_idx], COV_SZ_2[array_idx], kernel_initializer='he_uniform', activation='selu', padding='same', name='second_block%d_conv' % array_idx)(x2)
x2 = BatchNormalization()(x2)
x2 = AveragePooling1D(PL_FIL_2[array_idx], strides=PL_FIL_2[array_idx], name='second_block%d_pool' % array_idx)(x2)
x2 = BatchNormalization()(x2)
x2 = Model(inputs=ig_2, outputs=x2)
x3 = ig_3
for array_idx in range(HYPERPARAMETER_PRE_3.layer_size_cnn):
x3 = Conv1D(COV_NO_3[array_idx], COV_SZ_3[array_idx], kernel_initializer='he_uniform', activation='selu', padding='same', name='third_block%d_conv' % array_idx)(x3)
x3 = BatchNormalization()(x3)
x3 = AveragePooling1D(PL_FIL_3[array_idx], strides=PL_FIL_3[array_idx], name='third__block%d_pool' % array_idx)(x3)
x3 = BatchNormalization()(x3)
x3 = Model(inputs=ig_3, outputs=x3)
x4 = Concatenate(axis=1)([x1.output, x2.output, x3.output])
x4 = BatchNormalization()(x4)
for array_idx in range(1):
x4 = Conv1D(COV_NO[array_idx], COV_SZ[array_idx], kernel_initializer='he_uniform', activation='selu', padding='same', name='fourth_block%d_conv' % array_idx)(x4)
x4 = BatchNormalization()(x4)
x4 = AveragePooling1D(PL_FIL[array_idx], strides=PL_FIL[array_idx], name='fourth_block%d_pool' % array_idx)(x4)
x4 = Flatten(name='flatten_4')(x4)
for array_idx in range(HYPERPARAMETER_PRE_1.layer_size):
x4 = Dense(LAY_NO[array_idx], kernel_initializer='he_uniform', activation='selu', name='fc%d' % array_idx)(x4)
# Logits layer
x4 = Dense(HYPERPARAMETER_PRE_1.out_size, activation='softmax', name='predictions')(x4)
# Create model
model = Model(inputs=[x1.input, x2.input, x3.input], outputs=x4, name='mcnn')
optimizer = Adam(lr=HYPERPARAMETER_PRE_1.learn_rate)
model.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['accuracy'])
return model
def MCNN_TRAIN(LOG_FILE, FP_RESULT, HYPERPARAMETER, GUESS_POS, GUESS_KEY, TRAIN_DATA_1, TRAIN_DATA_2, TRAIN_DATA_3, TRAIN_PLAIN, VALID_DATA_1, VALID_DATA_2, VALID_DATA_3, VALID_PLAIN, GPU_CONFIG, MODEL):
# Save model every epoch
save_model = ModelCheckpoint(LOG_FILE)
# Calculation to the intermediate variables for train
train_inv = [0] * HYPERPARAMETER.train_no
INV_CAL(TRAIN_PLAIN, HYPERPARAMETER.train_no, GUESS_POS, GUESS_KEY, train_inv)
train_inv = train_inv[:HYPERPARAMETER.train_no]
train_inv_np = np.array(train_inv)
train_inv_np = reshape(train_inv_np, (HYPERPARAMETER.train_no, 1))
# Calculation to the intermediate variables for valid
valid_inv = [0] * HYPERPARAMETER.valid_no
INV_CAL(VALID_PLAIN, HYPERPARAMETER.valid_no, GUESS_POS, GUESS_KEY, valid_inv)
valid_inv = valid_inv[:HYPERPARAMETER.valid_no]
valid_inv_np = np.array(valid_inv)
valid_inv_np = reshape(valid_inv_np, (HYPERPARAMETER.valid_no, 1))
# Conver to 3-dimensional shape
reshape_train_data_1 = TRAIN_DATA_1.reshape((TRAIN_DATA_1.shape[0], TRAIN_DATA_1.shape[1], 1))
reshape_valid_data_1 = VALID_DATA_1.reshape((VALID_DATA_1.shape[0], VALID_DATA_1.shape[1], 1))
reshape_train_data_2 = TRAIN_DATA_2.reshape((TRAIN_DATA_2.shape[0], TRAIN_DATA_2.shape[1], 1))
reshape_valid_data_2 = VALID_DATA_2.reshape((VALID_DATA_2.shape[0], VALID_DATA_2.shape[1], 1))
reshape_train_data_3 = TRAIN_DATA_3.reshape((TRAIN_DATA_3.shape[0], TRAIN_DATA_3.shape[1], 1))
reshape_valid_data_3 = VALID_DATA_3.reshape((VALID_DATA_3.shape[0], VALID_DATA_3.shape[1], 1))
lr_manager = OneCycleLR(max_lr=HYPERPARAMETER.learn_rate, end_percentage=0.2, scale_percentage=0.1, maximum_momentum=None, minimum_momentum=None,verbose=True)
callbacks = [save_model, lr_manager]
history = MODEL.fit(x=[reshape_train_data_1, reshape_train_data_2, reshape_train_data_3], y=to_categorical(train_inv_np, num_classes=HYPERPARAMETER.out_size), validation_data=([reshape_valid_data_1, reshape_valid_data_2, reshape_valid_data_3], to_categorical(valid_inv_np, num_classes=HYPERPARAMETER.out_size)), batch_size=HYPERPARAMETER.batch_size, verbose = 1, epochs=HYPERPARAMETER.epoch_size, callbacks=callbacks)
return history
#########################################
############## MAIN SOURCE ##############
#########################################
# CHES2020_CNN_SCA
# MCNN_SCA
MASSIVE_SCA_DL(RUN_FUNCTION=MCNN_SCA, BACKUP_FILE=None, DATA_TYPE='npy', GPU_CONFIG=None)
|
[
"noreply@github.com"
] |
mitMathe.noreply@github.com
|
72559b751060487570099698b89a7507b4c4babd
|
c8c34805443b574c6dbd2338fba148b9f0ff953b
|
/freqbased.py
|
e94fef83458b348109adebc694e5660a670c508f
|
[] |
no_license
|
Amarjyotkaur/Stemly-Automatic-Exploratory-Data-Analysis
|
632370e2dd4cac2deabcc1beccd925aa0ee50426
|
d3ca880423a9360d08c820452a0ae26ee66dad77
|
refs/heads/main
| 2023-03-22T04:30:36.850286
| 2021-03-22T14:48:21
| 2021-03-22T14:48:21
| 350,379,109
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 11,354
|
py
|
import pandas as pd
from .builtin import quantile, iqr
"""
Functions:
1. min_freq
2. max_freq
3. mean_freq
4. median_freq
5. variance_freq
6. stdev_freq
7. range_freq
8. mean_abs_dev_freq
9. coef_of_var_freq
10. outliers_freq
11. outlier_c_freq"""
#Minimum
def min_freq(colData,**kwargs):
"""
Return the minimum frequency count of the values in the requested axis.
If you want the *index* of the minimum, use ``idxmin``. This is
the equivalent of the ``numpy.ndarray`` method ``argmin``.
Args:
colData (array_like, 1D):Pandas Series of Data or Dataframe Column for function to be applied on
axis : {index (0)}
Axis for the function to be applied on.
skipna : bool, default True
Exclude NA/null values when computing the result.
level : int or level name, default None
If the axis is a MultiIndex (hierarchical), count along a
particular level, collapsing into a scalar.
numeric_only : bool, default None
Include only float, int, boolean columns. If None, will attempt to use
everything, then use only numeric data. Not implemented for Series.
**kwargs
Additional keyword arguments to be passed to the function.
Returns
-------
scalar or Series (if level specified)
Examples
--------
>>>s=pd.Series([2,3,3,4,3,3,2,1])
>>ft.min_freq()
1
"""
freq=pd.Series.value_counts(colData)
return pd.Series.min(freq,**kwargs)
#Maximum
def max_freq(colData,**kwargs):
freq=pd.Series.value_counts(colData)
return pd.Series.max(freq,**kwargs)
"""Return the maximum frequency count of the values in the requested axis.
If you want the *index* of the maximum, use ``idxmax``. This is
the equivalent of the ``numpy.ndarray`` method ``argmax``.
Args:
colData (array_like, 1D):Pandas Series of Data or Dataframe Column for function to be applied on
axis : {index (0)}
Axis for the function to be applied on.
skipna : bool, default True
Exclude NA/null values when computing the result.
level : int or level name, default None
If the axis is a MultiIndex (hierarchical), count along a
particular level, collapsing into a scalar.
numeric_only : bool, default None
Include only float, int, boolean columns. If None, will attempt to use
everything, then use only numeric data. Not implemented for Series.
**kwargs
Additional keyword arguments to be passed to the function.
Returns:
scalar or Series (if level specified)
Examples:
>>>s=pd.Series([2,3,3,4,3,3,2,1])
>>ft.max_freq(s)
4"""
#Mean
def mean_freq(colData,**kwargs):
freq = pd.Series.value_counts(colData)
return pd.Series.mean(freq,**kwargs)
"""Return the mean frequency count of the values in the requested axis.
Args:
colData (array_like, 1D):Pandas Series of Data or Dataframe Column for function to be applied on
axis : {index (0)}
Axis for the function to be applied on.
skipna : bool, default True
Exclude NA/null values when computing the result.
level : int or level name, default None
If the axis is a MultiIndex (hierarchical), count along a
particular level, collapsing into a scalar.
numeric_only : bool, default None
Include only float, int, boolean columns. If None, will attempt to use
everything, then use only numeric data. Not implemented for Series.
**kwargs
Additional keyword arguments to be passed to the function.
Returns:
scalar or Series (if level specified)
Examples:
>>>a=pd.Series([1,2,3,4,5,4,3,2,2,1]
>>ft.mean_freq(a)
2
"""
#Median
def median_freq(colData,**kwargs):
"""
Return the median of the frequency values of the input series
Args:
axis : {index (0)}
Axis for the function to be applied on.
skipna : bool, default True
Exclude NA/null values when computing the result.
level : int or level name, default None
If the axis is a MultiIndex (hierarchical), count along a
particular level, collapsing into a scalar.
numeric_only : bool, default None
Include only float, int, boolean columns. If None, will attempt to use
everything, then use only numeric data. Not implemented for Series.
**kwargs
Additional keyword arguments to be passed to the function.
Returns:
scalar or Series (if level specified)
Example:
>>>>a=pd.Series([1,2,3,4,5,4,3,2,2,1])
>>ft.median_freq(a)
2
"""
freq = pd.Series.value_counts(colData)
return pd.Series.median(freq,**kwargs)
#Variance
def variance_freq(colData, **kwargs):
"""
Return unbiased variance over frequency values of input series.
Normalized by N-1 by default. This can be changed using the ddof argument
Args:
colData (array_like, 1D):Pandas Series of Data or Dataframe Column for function to be applied on
axis : {index (0)}
skipna : bool, default True
Exclude NA/null values. If an entire row/column is NA, the result
will be NA.
level : int or level name, default None
If the axis is a MultiIndex (hierarchical), count along a
particular level, collapsing into a scalar.
ddof : int, default 1
Delta Degrees of Freedom. The divisor used in calculations is N - ddof,
where N represents the number of elements.
numeric_only : bool, default None
Include only float, int, boolean columns. If None, will attempt to use
everything, then use only numeric data. Not implemented for Series.
Returns:
scalar or Series (if level specified)
Example:
>>a=pd.Series([1,2,3,4,5,4,3,2,2,1])
>>ft.variance_freq(a)
0.5
"""
freq = pd.Series.value_counts(colData)
return pd.Series.var(freq,**kwargs)
#Standard Deviation
def stdev_freq(colData,**kwargs):
"""Return sample standard deviation over frequency values of input series.
Normalized by N-1 by default. This can be changed using the ddof argument
Args:
colData (array_like, 1D) : Pandas Series of Data or Dataframe Column for function to be applied on
axis : {index (0)}
skipna : bool, default True
Exclude NA/null values. If an entire row/column is NA, the result
will be NA.
level : int or level name, default None
If the axis is a MultiIndex (hierarchical), count along a
particular level, collapsing into a scalar.
ddof : int, default 1
Delta Degrees of Freedom. The divisor used in calculations is N - ddof,
where N represents the number of elements.
numeric_only : bool, default None
Include only float, int, boolean columns. If None, will attempt to use
everything, then use only numeric data. Not implemented for Series.
Returns:
scalar or Series (if level specified)
Example:
>>a=pd.Series([1,2,3,4,5,4,3,2,2,1])
>>ft.stdev_freq(a)
0.707107
"""
freq = pd.Series.value_counts(colData)
return pd.Series.std(freq,**kwargs)
#Range
def range_freq(colData):
"""
Range of Frequency
Returns the difference between the highest frequency value and lowest frequency value in the input series object.
Args:
colData (array_like, 1D):Pandas Series of Data or Dataframe Column for function to be applied on
Returns:
range : int
range = Maximum value - Minimum value
Example:
>>>a=pd.Series([1,2,3,4,5,4,3,2,2,1])
>>>ft.range_freq(a)
2"""
freq = pd.Series.value_counts(colData)
return pd.Series.max(freq)-pd.Series.min(freq)
def mean_abs_dev_freq(colData):
"""Return the mean absolute deviation of the values for the requested axis.
Args:
colData (array_like, 1D):Pandas Series of Data or Dataframe Column for function to be applied on
axis : {index (0)}
Axis for the function to be applied on.
skipna : bool, default True
Exclude NA/null values when computing the result.
level : int or level name, default None
If the axis is a MultiIndex (hierarchical), count along a
particular level, collapsing into a scalar.
numeric_only : bool, default None
Include only float, int, boolean columns. If None, will attempt to use
everything, then use only numeric data. Not implemented for Series.
**kwargs
Additional keyword arguments to be passed to the function.
Returns:
scalar or Series (if level specified)
Example:
>>a=pd.Series([1,2,3,4,5,4,3,2,2,1])
>>ft.mean_abs_dev_freq(colData)
0.4
"""
freq = pd.Series.value_counts(colData)
return pd.Series.mad(freq)
# Coefficient Of Variation
def coef_of_var_freq(colData):
"""Coefficient of Variance of Frequency
Returns the coefficient of variance of frequency values from the data in input series.
Args:
colData (array_like, 1D):Pandas Series of Data or Dataframe Column for function to be applied on
Returns:
Coefficient of Variance: float
coefficient of variance = standard deviation/mean
Example:
>>a=pd.Series([1,2,3,4,5,4,3,2,2,1])
>>ft.coef_of_var(a)
0.353553
"""
freq=pd.Series.value_counts(colData)
return pd.Series.std(freq)/pd.Series.mean(freq)
def outliers_freq(colData):
"""Frequency Outliers
Returns list of outliers of frequency values from input series.
Outliers are determined using the quartiles of the data. Upper and lower bounds are calculated which are 1.5*IQR higher and lower than the 3rd and 1st quartiles respectively. Data values higher than the upper bound or lower than the lower bound are considered outliers.
Args:
colData (array_like, 1D):Pandas Series of Data or Dataframe Column for function to be applied on
Returns:
list of outliers : list
Example:
>>>a=pd.Series([1,2,3,4,5,4,3,2,2,1])
>>>ft.outliers_freq(c)
[]
"""
freq = pd.Series.value_counts(colData)
lowerbound = quantile(freq,0.25) -(1.5* iqr(freq))
upperbound = quantile(freq, 0.75)+(1.5*iqr(freq))
result=[]
for x in freq:
if (x<lowerbound) or (x>upperbound):
result.append(x)
else:
pass
return result
#Number of Outliers
def outlier_c_freq(colData):
"""
Number of Outliers of Frequency
Returns the number of outliers in frequency values of input series.
Outliers are determined using the quartiles of the data. Upper and lower bounds are calculated which are 1.5*IQR higher and lower than the 3rd and 1st quartiles respectively. Data values higher than the upper bound or lower than the lower bound are considered outliers.
Args:
colData (array_like, 1D):Pandas Series of Data or Dataframe Column for function to be applied on
Returns:
Number of Outliers : int
Example:
>>>a=pd.Series([1,2,3,4,5,4,3,2,2,1])
>>>ft.outlier_c_freq(a)
0
"""
freq = pd.Series.value_counts(colData)
return len(outliers_freq(colData))
|
[
"noreply@github.com"
] |
Amarjyotkaur.noreply@github.com
|
02b93dce874ce5992260b941b28517299197121c
|
f38faa56731cbbae07fa7cadf3941bd3942c356e
|
/Project/backend/manage.py
|
abfc4d721b8c7a3d7c0ec6a5b94bddbbf73e7206
|
[] |
no_license
|
faisalsial/2018latestversion
|
b4f14eaf70e3884547a3930174b4f8a9f4299bac
|
aca58d22030ea40e04d2cde07f1886fd56b33245
|
refs/heads/master
| 2022-12-09T17:39:04.815723
| 2019-01-07T10:35:46
| 2019-01-07T10:35:46
| 215,658,770
| 0
| 0
| null | 2022-12-08T01:50:10
| 2019-10-16T22:56:28
|
Java
|
UTF-8
|
Python
| false
| false
| 543
|
py
|
#!/usr/bin/env python
import os
import sys
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'gratelancer.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
|
[
"umutoztunc@gmail.com"
] |
umutoztunc@gmail.com
|
1798b56c0d7d2318a548568f32e627aef2ad2acb
|
5b1039a15a0b9478cd1c92e95370299673711b0c
|
/dao/influence_dao.py
|
546d21b00fbd89f1bfafdae5a98bf5cf348c6065
|
[] |
no_license
|
seraphlnWu/weibo_dao
|
298bf7a5f06a034b8dded436b5da770e617af9ef
|
22836290dfe19677137e2faf391fcf3edbbfda59
|
refs/heads/master
| 2020-05-19T00:25:45.808804
| 2013-01-22T03:13:49
| 2013-01-22T03:13:49
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,875
|
py
|
# coding=utf8
from utils import MONGODB_INSTANCE
from utils import today_datetime
from datetime import timedelta
def get_cur_influence(uid):
'่ทๅ็จๆทๅฏๅๅฑๆงใeg:ๅฝฑๅๅ๏ผ็ฒไธๆฐ๏ผๅพฎๅๆฐ'
inf_list = MONGODB_INSTANCE.influence.find(
{'id': uid}
).sort('date', -1).limit(10)
for cur_inf in inf_list:
if any([
cur_inf.get('followers_count'),
cur_inf.get('influence'),
cur_inf.get('followrs_activeness_distr'),
cur_inf.get('friends_count'),
cur_inf.get('statuses_count'),
]):
return cur_inf
return get_last_influence(uid)
def get_last_influence(uid):
''' ่ทๅไธๆกinfluence่ฎฐๅฝ '''
return MONGODB_INSTANCE.influence.find_one({'id': uid}) or {}
def get_influence_history(uid, period=10, reftime=None):
''' ่ทๅไธไธชinfluenceๅๅฒ่ฎฐๅฝ็ๅ่กจ '''
today = today_datetime()
if reftime and reftime < today:
pass
else:
reftime = today
from_date = reftime - timedelta(period)
result = MONGODB_INSTANCE.influence.find({
'id': uid,
'date': {'$gt': from_date, '$lte': reftime},
}).sort('date', -1)
return check_influence_list(result)
def check_influence_list(histories):
''' ๆฃๆฅไผ ๅ
ฅ็influenceๅ่กจไธญ็ๆฐๆฎๆฏๅฆๅๆณ '''
his_list = []
for his in histories:
if any([
his.get('account_activeness', 0),
his.get('followers_quality', 0),
his.get('followers_activeness', 0)
]):
if len(his_list) == 0:
his_list.append(his)
else:
if not (his['date'].day - his_list[-1]['date'].day):
continue
else:
his_list.append(his)
else:
pass
return his_list
|
[
"wubin@admaster.com.cn"
] |
wubin@admaster.com.cn
|
4c47c7daef2fbb8028129b0b5c7eb9118ed2ecf2
|
8b8b3e959a86a5bd468110e0ab3b688628b46f40
|
/analyser/complex_type_analyser/text/text_utils.py
|
79233226624e8e5771ba4795fa3210c6ced78fa5
|
[] |
no_license
|
sebneu/csvprofiler
|
18a027f152d31a0b59eeeae6abb29228ced5876c
|
e8eab970af326dc9a101d905700589d8dc196c12
|
refs/heads/master
| 2021-06-02T01:17:36.607468
| 2021-01-05T10:02:00
| 2021-01-05T10:02:00
| 39,006,277
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,936
|
py
|
# -*- coding: utf-8 -*-
#!/usr/bin/env python
'''
Created on May 19, 2014
@author: max
'''
import re
import unittest
import unicodedata
import urlparse
SERVER_URL = 'http://spotlight.dbpedia.org/rest/annotate'
# 'http://localhost/rest/annotate'
date_regex = '^(?:(?:31(\/|-|\.)(?:0?[13578]|1[02]))\1|(?:(?:29|30)(\/|-|\.)(?:0?[1,3-9]|1[0-2])\2))(?:(?:1[6-9]|[2-9]\d)?\d{2})$|^(?:29(\/|-|\.)0?2\3(?:(?:(?:1[6-9]|[2-9]\d)?(?:0[48]|[2468][048]|[13579][26])|(?:(?:16|[2468][048]|[3579][26])00))))$|^(?:0?[1-9]|1\d|2[0-8])(\/|-|\.)(?:(?:0?[1-9])|(?:1[0-2]))\4(?:(?:1[6-9]|[2-9]\d)?\d{2})$'
date_pattern = re.compile(date_regex)
date_regex2 = '^[1|2][0-9][0-9][0-9][\/-]?[0-3][0-9][\/-]?[0-3][0-9]$'
date_pattern2 = re.compile(date_regex2)
date_regex3 = '((0[1-9])|(1[0-2]))[\/-]((0[1-9])|(1[0-9])|(2[0-9])|(3[0-1]))[\/-](\d{4})'
date_pattern3 = re.compile(date_regex3)
date_regex4 = '^([0-9]?[0-9][\.\/-])?([0-3]?[0-9][\.\/-])\s?[1-9][0-9]([0-9][0-9])?$'
date_pattern4 = re.compile(date_regex4)
email_regex = '[a-zA-Z0-9_\.\+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-\.]+'
email_pattern = re.compile(email_regex)
phone_regex = '^\(?\+?\d+\)?(\s?\d+)+$'
phone_pattern = re.compile(phone_regex)
address_regex = ''
address_pattern = re.compile(address_regex)
url_regex = 'https?\:\/\/[a-zA-Z0-9\-\.]+\.[a-zA-Z]{2,}'
url_pattern = re.compile(url_regex)
places = ['street', 'strasse', 'rue', 'str', 'str.', 'platz', 'allee', 'gasse', 'g.', 'blvd', 'ave', 'road']
special_symbols = ['+', ' ', ';', '[', '/', ']', '\\', '-']
commas = [',', '.']
resource = ['://', 'www.', '.jpg', '.png', '.gif', '.html', '.htm', '.mp3', '.doc', '.pdf', '.ps', '.docx']
yes_no = ['yes', 'no', 'y', 'n', 'j', 'ja', 'nein', 'si', 'oui', 'da', 'njet']
# units =
#camelCase regex
first_cap_re = re.compile('(.)([A-Z][a-z]+)')
all_cap_re = re.compile('([a-z0-9])([A-Z])')
lookup = []
def contains_number(inputString):
return any(char.isdigit() for char in inputString)
def contains_alpha(inputString):
return any(char.isalpha() for char in inputString)
def contains_special(inputString):
return any(char in special_symbols for char in inputString)
def contains_commas(inputString):
return any(char in commas for char in inputString)
def contains_ampersand(inputString):
return any(char == '@' for char in inputString)
def contains_unit_symbol(inputString):
return any(char in [u'%', u'$', u'โฌ'] for char in inputString)
def contains_resource(inputString):
for item in resource:
if item in inputString:
return True
return False
def is_yes_no(cell):
return cell in yes_no
def is_alpha(cell):
for c in cell:
if not c.isalpha() and c <> ' ':
return False
return True
def is_alphanum(cell):
is_digit = False
is_alpha = False
for c in cell:
if c.isalpha():
is_alpha = True
elif c.isdigit():
is_digit = True
return is_digit and is_alpha
def is_street(cell):
if address_pattern.match(cell):
return True
for place in places:
if cell.contains(place):
return True
return False
def is_phone(cell):
return phone_pattern.match(cell)
def is_email(cell):
return email_pattern.match(cell)
def is_url(cell):
return url_pattern.match(cell)
def is_digitsep(cell):
is_digit = False
is_sep = False
separators = [':', ',', '-', '/']
for c in cell:
if c in separators:
is_sep = True
elif c.isdigit():
is_digit = True
return is_digit and is_sep
def is_date(cell):
if date_pattern.match(cell):
return True
if date_pattern2.match(cell):
return True
if date_pattern3.match(cell):
return True
if date_pattern4.match(cell):
return True
return False
def is_year(cell):
try:
int_val = int(cell)
if int_val >= 1400 and int_val <= 2100:
return True
except Exception:
return False
def is_year_month(cell):
try:
int_val = int(cell)
if int_val >= 197000 and int_val < 210000:
return True
except Exception:
return False
def is_numeric(text):
pattern = re.compile("/^\d*\.?\d*$/")
return re.match(pattern, text)
def is_categorial(text):
return text.isnumeric()
def list_to_set(list):
seen = set()
seen_add = seen.add
return [ x for x in list if not (x in seen or seen_add(x))]
def safe_unicode(obj, *args):
""" return the unicode representation of obj """
try:
return unicode(obj, *args)
except UnicodeDecodeError:
# obj is byte string
ascii_text = str(obj).encode('string_escape')
return unicode(ascii_text)
def safe_str(obj):
""" return the byte string representation of obj """
try:
return str(obj)
except UnicodeEncodeError:
# obj is unicode
return unicode(obj).encode('unicode_escape')
def file_to_ascii(filename, num_lines=-1):
lines = []
with open(filename, 'r+') as f:
line_num = 1
for line in f:
if num_lines > -1 and line_num > num_lines:
break
lines.append(removeNonAscii(line))
text = '\n'.join(lines)
f.seek(0)
f.write(text)
f.truncate()
f.close()
def removeNonAscii(s): return "".join(filter(lambda x: ord(x) < 128, s))
def to_ascii(text):
return unicodedata.normalize('NFKD', text).encode('ascii', 'ignore')
def to_unicode(value):
if type(value) is not str and type(value) is not unicode:
return str(value)
try:
value = value.encode('utf8', 'replace')
str(value)
return value
except UnicodeEncodeError as e:
print type(e), e.encoding
value = value.decode('utf-8').encode("utf-8")
# value = value.encode('ascii', 'ignore').encode('utf8')
str(value)
return value
except UnicodeDecodeError as e:
print type(e), e.encoding, e
# value = unicode(value)
# iso-8859-1
value = value.decode('utf8', 'replace').encode("utf-8")
str(value)
return value
def uncamel(text):
s1 = first_cap_re.sub(r'\1_\2', text)
return all_cap_re.sub(r'\1_\2', s1).lower()
def humanize_text(text):
s1 = dequote(text)
s1 = uncamel(s1)
s1 = s1.replace("_", " ")
s1 = s1.replace("/", " / ")
if (contains_alpha(s1)):
s1 = ' '.join(re.findall('(\d+|\w+)', s1))
s1 = ' '.join(s1.split())
return s1
def extract_info_from_string(text):
result = {}
text = humanize_text(text)
for word in text.split(" "):
if is_date(word):
result['date'] = word
elif is_year(word):
result['year'] = word
return result
def dequote(s):
"""
If a string has single or double quotes around it, remove them.
If a matching pair of quotes is not found, return the string unchanged.
"""
if (
s.startswith(("'", '"')) and s.endswith(("'", '"'))
and (s[0] == s[-1]) # make sure the pair of quotes match
):
s = s[1:-1]
return s
def get_country_from_url(url):
url_elements = urlparse(url).netloc.split(".")
tld = ".".join(url_elements[-2:])
if tld in all:
return all[tld]
elif url_elements[-1] in all:
return all[url_elements[-1]]
else:
return "unknown"
class HumanizeTest(unittest.TestCase):
def test_humanize(self):
cell = '105mm'
cleaned = humanize_text(cell)
print cell, cleaned
cell = '12.25'
cleaned = humanize_text(cell)
print cell, cleaned
# print cleaned
cell = 'StatBezirk'
cleaned = humanize_text(cell)
print cell, cleaned
class DatatypeTest(unittest.TestCase):
# def test_unit(self):
# cell = '105mm'
# type = extract_datatype(cell)
# assert type=='UNIT'
# cell = 'Gebiet/Distrikt'
# cleaned = humanize_text(cell)
# print cleaned
# type = extract_datatype(cell)
# type = query_concept(cell)
# type = query_concept(cell)
# print type
# assert type=='UNIT'
# def test_concept(self):
# cell = 'Geschlecht'
# type = extract_datatype(cell, lang=None)
# print type
# def test_person(self):
# cell = 'Major Disaster'
# type = extract_datatype(cell)
# print type
#
# def test_dictionary(self):
# pass
#
# def test_date(self):
# cell = '20110101'
# type = extract_datatype(cell)
# assert type=='DATE'
#
# cell = '2011/01/01'
# type = extract_datatype(cell)
# assert type=='DATE'
#
# cell = '2011-01-01'
# type = extract_datatype(cell)
# assert type=='DATE'
#
# return True
pass
# parsing functions
def parse_float(cell):
"""
Float parser, used for converting strings to float values using the type classification of ComplexTypeAnalyser
:param cell: A string which is considered as NUMBER or FLOAT by the ComplexTypeAnalyser
:return: 0.0 on an empty input, the parsed float, or throws a ValueError
"""
try:
value = float(cell)
return value
except Exception as e:
pass
cell = str(cell).replace(" ", "")
if "," in cell:
if "." in cell:
if cell.rfind(".") > cell.rfind(", "):
cell = cell.replace(".", "")
cell = cell.replace(",", ".")
return parse_float(cell)
else:
cell = cell.replace(",", "")
return parse_float(cell)
else:
cell = cell.replace(",", ".")
return parse_float(cell)
raise ValueError(cell + ': cannot convert to numeric')
def is_none_type(cell):
return cell is None or len(cell) == 0 or cell == 'null' or cell == 'None'
|
[
"seb.neumaier@gmail.com"
] |
seb.neumaier@gmail.com
|
5fe9b2191e2862a97b4b0500d3c4777c88eab68c
|
56e96acad654d7480d17d5cae7402a2bc6cbaa76
|
/share/py_module/dataset.py
|
fc4a162fa0c59a4f2c53f521c749910a52a91ef4
|
[] |
no_license
|
LitingLin/VehicleDC
|
641b1e25c22cac2ffb1dcba519b1af5ac7d9f2c8
|
2ac0b8ad708f033b59c0bc924ca7ec169e86b063
|
refs/heads/master
| 2020-05-17T19:30:00.556691
| 2019-07-12T16:21:12
| 2019-07-12T16:21:12
| 183,916,512
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,885
|
py
|
# coding: utf-8
import os
import re
import numpy as np
import torch
from torch.utils import data
from torchvision import transforms as T
from PIL import Image
color_attrs = ['Black', 'Blue', 'Brown',
'Gray', 'Green', 'Pink',
'Red', 'White', 'Yellow']
direction_attrs = ['Front', 'Rear']
type_attrs = ['passengerCar', 'saloonCar',
'shopTruck', 'suv', 'trailer', 'truck', 'van', 'waggon']
class Vehicle(data.Dataset):
"""
ๅฑๆงๅ้ๅคๆ ็ญพ:้
ๅcross entropy loss็ไฝฟ็จ
ไฝฟ็จๅค็่ฟ็ๆฐๆฎ: ๅปๆๆๆ็unknown
"""
def __init__(self,
root,
transform=None,
is_train=True):
"""
:return:
"""
if not os.path.exists(root):
print('=> [Err]: root not exists.')
return
if is_train:
print('=> train data root: ', root)
else:
print('=> test data root: ', root)
# ็ป่ฎก้็ฉบๅญ็ฎๅฝๅนถๆๅ็งฐ(็ฑปๅซๅ็งฐ)่ช็ถๆๅบ
self.img_dirs = [os.path.join(root, x) for x in os.listdir(root) \
if os.path.isdir(os.path.join(root, x))]
self.img_dirs = [x for x in self.img_dirs if len(os.listdir(x)) != 0]
if len(self.img_dirs) == 0:
print('=> [Err]: empty sub-dirs.')
return
self.img_dirs.sort() # ้ป่ฎค่ช็ถๆๅบ, ไปๅฐๅฐๅคง
# print('=> total {:d} classes for training'.format(len(self.img_dirs)))
# ๅฐๅคๆ ็ญพๅๅผ
self.color_attrs = color_attrs
self.direction_attrs = direction_attrs
self.type_attrs = type_attrs
# ๆๅญ็ฎๅฝ(็ฑปๅ)็้กบๅบๆๅบๆไปถ่ทฏๅพ
self.imgs_path = []
self.labels = []
for x in self.img_dirs:
match = re.match('([a-zA-Z]+)_([a-zA-Z]+)_([a-zA-Z]+)', os.path.split(x)[1])
color = match.group(1) # ่ฝฆ่บซ้ข่ฒ
direction = match.group(2) # ่ฝฆ่บซๆนๅ
type = match.group(3) # ่ฝฆ่บซ็ฑปๅ
# print('=> color: %s, direction: %s, type: %s' % (color, direction, type))
for y in os.listdir(x):
# ๆทปๅ ๆไปถ่ทฏๅพ
self.imgs_path.append(os.path.join(x, y))
# ๆทปๅ label
color_idx = int(np.where(self.color_attrs == np.array(color))[0])
direction_idx = int(np.where(self.direction_attrs == np.array(direction))[0])
type_idx = int(np.where(self.type_attrs == np.array(type))[0])
label = np.array([color_idx, direction_idx, type_idx], dtype=int)
label = torch.Tensor(label) # torch.from_numpy(label)
self.labels.append(label) # Tensor(label)
# print(label)
if is_train:
print('=> total {:d} samples for training.'.format(len(self.imgs_path)))
else:
print('=> total {:d} samples for testing.'.format(len(self.imgs_path)))
# ๅ ่ฝฝๆฐๆฎๅๆข
if transform is not None:
self.transform = transform
else: # default image transformation
self.transform = T.Compose([
T.Resize(448),
T.CenterCrop(448),
T.ToTensor(),
T.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
])
# --------------------- serialize imgs_path to disk
# root_parent = os.path.abspath(os.path.join(root, '..'))
# print('=> parent dir: ', root_parent)
# if is_train:
# imgs_path = os.path.join(root_parent, 'train_imgs_path.pkl')
# else:
# imgs_path = os.path.join(ropytorch docot_parent, 'test_imgs_path.pkl')
# print('=> dump imgs path: ', imgs_path)
# pickle.dump(self.imgs_path, open(imgs_path, 'wb'))
def __getitem__(self, idx):
"""
:param idx:
:return:
"""
image = Image.open(self.imgs_path[idx])
# ๆฐๆฎๅๆข, ็ฐๅบฆๅพ่ฝฌๆขๆ'RGB'
if image.mode == 'L' or image.mode == 'I': # 8bitๆ32bit็ฐๅบฆๅพ
image = image.convert('RGB')
if self.transform is not None:
image = self.transform(image)
label = self.labels[idx]
f_path = os.path.split(self.imgs_path[idx])[0].split('/')[-2] + \
'/' + os.path.split(self.imgs_path[idx])[0].split('/')[-1] + \
'/' + os.path.split(self.imgs_path[idx])[1]
return image, label, f_path
def __len__(self):
"""os.path.split(self.imgs_path[idx])[0].split('/')[-2]
:return:
"""
return len(self.imgs_path)
|
[
"linliting06@live.com"
] |
linliting06@live.com
|
47ede167ba6a6ed1d51b168cb720119680f3ce58
|
b4de314adaebdc238b05ff1e81c5f1d8304c4a86
|
/.history/functionality_20211022152543.py
|
6ca297f72695f8861201c7633ef2612b0bc81f67
|
[] |
no_license
|
Imprasna/signapp-login-automation
|
b36616de7a583ff4bdf35d907c30615b3ac48128
|
f39fdd7b34966f263ea47da41d1e8aa0f57b95d8
|
refs/heads/main
| 2023-08-29T19:13:07.208590
| 2021-10-22T10:05:06
| 2021-10-22T10:05:06
| 387,170,705
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,937
|
py
|
from selenium import webdriver #we need to install webdriver #pip install selenium
from getpass import getpass #inbuild function
import pyautogui as pt
import os
import platform
from datetime import datetime
from threading import Timer
# import Scheduler
from apscheduler.schedulers.blocking import BlockingScheduler
def login_automate():
username = 'prasanna.signatures1@gmail.com'
password = '123456'
username_textbox = driver.find_element_by_id('email')
username_textbox.send_keys(username)
password_textbox = driver.find_element_by_id('password')
password_textbox.send_keys(password)
login_button = driver.find_element_by_class_name('btn')
login_button.submit()
# if login_automate is True:
click_login = driver.find_element_by_id('timecard-clock-out')
click_login.click()
print('Hello')
if platform.system() == 'Windows':
print (platform.system());
print(datetime.today());
x=datetime.today()
y=x.replace(day=x.day+0, hour=15, minute=25, second=0, microsecond=0)
print ("hello world")
# Start the scheduler
sched = BlockingScheduler()
sched.start()
exec_date = date(2021, 10, 22)
# Store the job in a variable in case we want to cancel it
job = sched.add_date_job(login_automate, exec_date, ['text'])
# The job will be executed on November 6th, 2009 at 16:30:05
job = sched.add_date_job(login_automate, datetime(2021, 10, 22, 15, 25, 0), ['text'])
driver = webdriver.Chrome('C:\\\Program Files\\\chromedriver_win32\\\chromedriver.exe');
driver.get('http://fibroinbeta.com/signapp_new')
elif platform.system() == 'Linux':
print (platform.system());
driver = webdriver.Chrome();
driver.get('http://fibroinbeta.com/signapp_new')
# driver = webdriver.Chrome()
else:
print ("Unsupported browser bro....:(")
# username_textbox = driver.find_element_by_id('email')
# username_textbox.send_keys(username)
# password_textbox = driver.find_element_by_id('password')
# password_textbox.send_keys(password)
# login_button = driver.find_element_by_class_name('btn')
# login_button.submit()
# # if login_automate is True:
# click_login = driver.find_element_by_id('timecard-clock-out')
# click_login.click()
# print('Hello')
position1 = pt.locateOnScreen("close.png", confidence = .8)
x = position1[0]
y = position1[1]
pt.moveTo(x + 165, y + 20, duration = .3)
pt.click()
close_dialog = driver.find_element_by_class_name('btn-default')
login_automate()
# if platform.system() == 'Windows':
# print (platform.system());
# driver = webdriver.Chrome('C:\\\Program Files\\\chromedriver_win32\\\chromedriver.exe');
# elif platform.system() == 'Linux':
# print (platform.system());
# driver = webdriver.Chrome();
# login_automate()
# else:
# print ("It is not Windows neither linux Bro....:(")
# print(os.name)
# print(platform.system())
# print(platform.release())
# def login_automate():
# username = 'prasanna.signatures1@gmail.com'
# password = '123456'
# driver = webdriver.Chrome()
# driver.get('http://fibroinbeta.com/signapp_new')
# username_textbox = driver.find_element_by_id('email')
# username_textbox.send_keys(username)
# password_textbox = driver.find_element_by_id('password')
# password_textbox.send_keys(password)
# login_button = driver.find_element_by_class_name('btn')
# login_button.submit()
# # if login_automate is True:
# click_login = driver.find_element_by_id('timecard-clock-out')
# click_login.click()
# print('Hello')
# position1 = pt.locateOnScreen("close.png", confidence = .8)
# x = position1[0]
# y = position1[1]
# pt.moveTo(x + 165, y + 20, duration = .3)
# pt.click()
# close_dialog = driver.find_element_by_class_name('btn-default')
# login_automate()
|
[
"prasanna@sibbc.org"
] |
prasanna@sibbc.org
|
f1e23193458c9b501d74f917876d60949e706428
|
09cbf5ce3a600e8475971223acc2fce565ac24bb
|
/count_swear/helper_functions.py
|
b1c108a197e83419666a3f05b35d941df0eb3317
|
[] |
no_license
|
fednem/python_reddit_test
|
e28e201ef567a921bef627b7b110b152cc5247a7
|
7e94af1d43326351b9e302ba66dff451cdf7456c
|
refs/heads/master
| 2021-03-19T05:59:37.205929
| 2018-04-12T15:02:32
| 2018-04-12T15:02:32
| 123,020,771
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,835
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Wed Mar 14 09:16:27 2018
@author: federico nemmi
"""
import praw
import regex
from nltk.corpus import stopwords
#for the moment it only take the top submission
def create_list_of_submission_from_subreddit(reddit_instance,subreddit):
list_of_submission = [submission for submission in reddit_instance.subreddit(subreddit).hot(limit = 20)]
return list_of_submission
#create a list that has every entry (title, body, comments) of every post
def from_sub_to_list(list_of_submission):
final_result = []
for submission in list_of_submission:
final_result.append(submission.title)
final_result.append(submission.selftext)
submission.comments.replace_more(limit = None)
for comment in submission.comments.list():
final_result.append(comment.body)
return final_result
#give matching as ouotput
def count_swear_words_in_text(text, swear_words_list, error = "{e<=1}"):
non_stopword = [word for word in text.split() if word not in stopwords.words("english") ]
n_swear = [regex.match("(?:" + swear_word + ")" + error, word.lower()) for swear_word
in swear_words_list for word in non_stopword]
return(n_swear)
#give number of swearword as output, the one giving matching has been preferred, I leave this here for future reference
#def count_swear_words_in_text(text, swear_words_list, error = "{e<=1}"):
# non_stopword = [word for word in text.split() if word not in stopwords.words("english") ]
# n_swear = sum([bool(regex.match("(?:" + swear_word + ")" + error, word)) for swear_word
# in swear_words_list for word in non_stopword])
# return(n_swear)
def count_words_in_text(text):
non_stopword = [word for word in text.split() if word not in stopwords.words("english") ]
return len(non_stopword)
def swear_ratio(list_of_post, swear_words_list, error = ""):
from itertools import chain
match = []
for text in list_of_post:
local_count = count_swear_words_in_text(text, swear_words_list, error = "")
match.append(local_count)
only_matched = [element for element in chain(*match) if bool(element)]
n_of_match = len(only_matched)
tot_n = 0
for text in list_of_post:
n = count_words_in_text(text)
tot_n += n
swear_ratio = n_of_match/tot_n
return only_matched, n_of_match, swear_ratio
#subreddit is a list of name, kwargs may be the error in the fuzzy search
def compare_subreddits(*args):
output_dict = {}
for subreddit in args:
submissions = create_list_of_submission_from_subreddit(reddit_instance, subreddit)
submissions_list = from_sub_to_list(submissions)
flattened_list = [i for i in chain(*submissions_list)]
|
[
"federico.nemmi@gmail.com"
] |
federico.nemmi@gmail.com
|
fb01c0af695a570da4062568c7325f57d59a9a2a
|
943a4976ff506dc674b685aaf0525405b0a92f1b
|
/kinguilahoje.py
|
538517e5a0f90293133f665bac572de7e66c9020
|
[] |
no_license
|
maapinho/webscraping_play
|
c68ea119dbb5bdbe83cc180250b0c5d746b117a1
|
eb05e75e3870956be0a84eb2e3e30693896c8e78
|
refs/heads/master
| 2021-05-26T18:00:54.405772
| 2020-04-13T01:26:36
| 2020-04-13T01:44:32
| 254,143,096
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 734
|
py
|
from gazpacho import get,Soup
from pprint import pprint
import json
from datetime import datetime
#current date time object
dt=datetime.now().replace(microsecond=0)
print(dt)
print(dt.isoformat())
# Datetim in ISO 8601
# datetime.datetime.now().replace(microsecond=0).isoformat()
URL='http://www.kinguilahoje.com/'
html=get(URL)
soup=Soup(html)
quotations=soup.find('span',{'class':'quotation'})
dolar=quotations[0].text
euro=quotations[1].text
#pprint(quotations)
print('dolar:',dolar)
print('euro:',euro)
dolar_integer=int(dolar.split()[1])
euro_integer=int(euro.split()[1])
# data to JSON
json_data={'Dolar':dolar_integer,'Euro':euro_integer}
#print(json_data)
#output JSON data as a string
print(json.dumps(json_data))
|
[
"maapinho@hotmail.com"
] |
maapinho@hotmail.com
|
f67a9f32b5bc88c51b23c4f94c7ca1674316a0c1
|
f6577ac3fd9f96ddba43560a03edae40556ea010
|
/socket/chatbot.py
|
638785e818fd714db60e1d1420ee47bdb48f3d95
|
[] |
no_license
|
UndergraduateProject/IotServer
|
02379ac5c0544381b0a0c9a90f87b7ee74d3d8bf
|
ac3baba112c85c2ac81d841ff37f07c4e2d33e57
|
refs/heads/master
| 2023-08-23T17:21:11.822574
| 2021-10-30T14:47:34
| 2021-10-30T14:47:34
| 352,904,724
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,084
|
py
|
import os
import dialogflow
from google.api_core.exceptions import InvalidArgument
import requests as rq
import socketio
src = 'http://140.117.71.98:8000/api/Humidtemp/'
#socket
sio = socketio.Client()
os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = './reactpageagent-rehl-e8f6c376b8ef.json'
DIALOGFLOW_PROJECT_ID = 'reactpageagent-rehl'
DIALOGFLOW_LANGUAGE_CODE = 'en'
SESSION_ID = 'ni_chatbot'
#text_to_be_analyzed = "hi"
session_client = dialogflow.SessionsClient()
session = session_client.session_path(DIALOGFLOW_PROJECT_ID, SESSION_ID)
#text_input = dialogflow.types.TextInput(text=text_to_be_analyzed, language_code=DIALOGFLOW_LANGUAGE_CODE)
def get_response(text_to_be_analyzed="ni_chatbot"):
text_input = dialogflow.types.TextInput(text=text_to_be_analyzed, language_code=DIALOGFLOW_LANGUAGE_CODE)
query_input = dialogflow.types.QueryInput(text=text_input) # dialogflow database
try:
response = session_client.detect_intent(session=session, query_input=query_input)
#print(response)
except InvalidArgument as e:
# print(e)
raise
return response
# display
def temperature():
res = rq.get(src)
data = res.json()
count = data["count"]
url = src + str(count)
res = rq.get(url)
data = res.json()
temperature = data['temperature']
msg = response.query_result.fulfillment_text + str(temperature)
sio.emit('chatbot', msg)
# print(response.query_result.fulfillment_text, temperature)
def humidity():
res = rq.get(src)
data = res.json()
count = data["count"]
url = src + str(count)
res = rq.get(url)
data = res.json()
humidity = data['humidity']
msg = response.query_result.fulfillment_text + str(humidity)
sio.emit('chatbot', msg)
# print(response.query_result.fulfillment_text, humidity)
# action
def action_watering():
pass
def action_light():
pass
def action_fan():
pass
@sio.on('connect')
def on_connect():
# print('connection established')
pass
@sio.on("chatbot")
def on_message(data):
# print("message" ,data)
global response
response = get_response(data)
# get_output(response)
# print("keyword: ", response.query_result.intent.display_name)
#command+shift+p -> interpreter-> copy bin/python
keyword = response.query_result.intent.display_name
confidence = response.query_result.intent_detection_confidence
if keyword == "temperature":
temperature()
elif keyword == "lighting":
humidity()
elif keyword == "open lighting":
action_light()
elif keyword == "open fan":
action_fan()
elif keyword == "open watering":
action_watering()
elif keyword == "Default Fallback Intent":
sio.emit("chatbot", "Sorry, what was that?")
# print("Sorry, what was that?")
else:
sio.emit('chatbot', response.query_result.fulfillment_text)
@sio.on('disconnect')
def on_disconnect():
# print('disconnected from server')
pass
sio.connect("http://140.117.71.98:4001")
while True:
None
|
[
"s2012439@yes.my"
] |
s2012439@yes.my
|
b3db901f568db3d311f39131427c789d20d2b786
|
9a1a0b47b59e55e3f2043ad32d5d58455e69425d
|
/0708/listas/ej233.py
|
9089219b5ce46da511c1c457bd8a33c211a40db2
|
[] |
no_license
|
piranna/asi-iesenlaces
|
dcabc0213791c04a8b6b4ccb850d5bda78292ae1
|
cf35cbea732065e09a58604a93538a9b9dca875f
|
refs/heads/master
| 2016-08-06T05:09:44.270637
| 2008-06-15T09:00:27
| 2008-06-15T09:00:27
| 32,416,588
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 717
|
py
|
# -*- coding: utf-8 -*-
# $Id$
# elimina nรบmeros pares de una lista
"""
Eliminamos elementos de una lista con del().
Cuidado al eliminar elementos de una lista.
Si eliminamos un elemento de una lista, estamos
modificando su tamaรฑo
"""
lista = [1,2,3,4,5,6,7,8,9]
i=0 #รญndice para recorrer la lista
while i < len (lista):
# en cada iteraciรณn volvemos a evaluar el tamaรฑo de la lista
if lista[i] % 2 == 0:
del lista[i]
else:
i += 1
# sรณlo modificamos el รญndice cuando no eliminamos ningรบn elemento
# si hemos eliminado un elemento no es necesario, porque el รญndice ahora
# apunta al siguiente (el que ocupa el nuevo lugar del eliminado)
print lista
|
[
"morillas@f86dea77-7e2e-0410-97ea-a74e350978e6"
] |
morillas@f86dea77-7e2e-0410-97ea-a74e350978e6
|
57cec0f730cf6763d39090ab33bed1567ec463f9
|
4df98b871e8bdf94d8841ec1f6d7a3b4150b4dcc
|
/adaline-classifier/adalineSGD.py
|
998031d6324fdf5a573c0eb9251c19969f9d79aa
|
[] |
no_license
|
samrod13/Machine-Learning
|
ee026067710e8798befb332bd18a097acbd79775
|
080de188a73703c49dd83bbb3212699f36a348f0
|
refs/heads/master
| 2021-01-18T04:22:30.269977
| 2016-11-06T20:14:26
| 2016-11-06T20:14:26
| 67,274,971
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,068
|
py
|
from numpy.random import seed
import numpy as np
class AdalineSGD(object):
"""ADAptive LInear NEuron classifier.
Parameters
------------
eta : float
Learning rate (between 0.0 and 1.0)
n_iter : int
Passes over the training dataset.
Attributes
-----------
w_ : 1d-array
Weights after fitting.
errors_ : list
Number of misclassifications in every epoch.
shuffle : bool (default: True)
Shuffles training data every epoch
if True to prevent cycles.
random_state : int (default: None)
Set random state for shuffling
and initializing the weights.
"""
def __init__(self, eta=0.01, n_iter=10, shuffle=True, random_state=None):
self.eta = eta
self.n_iter = n_iter
self.w_initialized = False
self.shuffle = shuffle
if random_state:
seed(random_state)
def fit(self, X, y):
""" Fit training data.
Parameters
----------
X : {array-like}, shape = [n_samples, n_features]
Training vectors, where n_samples is the number of
samples and n_features is the number of features.
y : array-like, shape = [n_samples]
Target values.
Returns
-------
self : object
"""
self._initialize_weights(X.shape[1])
self.cost_ = []
for i in range(self.n_iter):
if self.shuffle:
X, y = self._shuffle(X, y)
cost = []
for xi, target in zip(X, y):
cost.append(self._update_weights(xi, target))
avg_cost = sum(cost) / len(y)
self.cost_.append(avg_cost)
return self
def partial_fit(self, X, y):
"""Fit training data without reinitializing the weights"""
if not self.w_initialized:
self._initialize_weights(X.shape[1])
if y.ravel().shape[0] > 1:
for xi, target in zip(X, y):
self._update_weights(xi, target)
else:
self._update_weights(X, y)
return self
def _shuffle(self, X, y):
"""Shuffle training data"""
r = np.random.permutation(len(y))
return X[r], y[r]
def _initialize_weights(self, m):
"""Initialize weights to zeros"""
self.w_ = np.zeros(1 + m)
self.w_initialized = True
def _update_weights(self, xi, target):
"""Apply Adaline learning rule to update the weights"""
output = self.net_input(xi)
error = (target - output)
self.w_[1:] += self.eta * xi.dot(error)
self.w_[0] += self.eta * error
cost = 0.5 * error**2
return cost
def net_input(self, X):
"""Calculate net input"""
return np.dot(X, self.w_[1:]) + self.w_[0]
def activation(self, X):
"""Compute linear activation"""
return self.net_input(X)
def predict(self, X):
"""Return class label after unit step"""
return np.where(self.activation(X) >= 0.0, 1, -1)
|
[
"rodriguezs466@gmail.com"
] |
rodriguezs466@gmail.com
|
105ccae1d666281b62ba6b9043fac68fcb1651e2
|
83b41f8ba0959f3ab3094869670920bdef92d0db
|
/df_test.py
|
99b6eda66b317c936570c534d41f9875f5ca88ff
|
[] |
no_license
|
kumarchintu/aws-trainning
|
9157da4496d0894eefe39bb4303c32c362af517f
|
34ff156c74216abd039f24267107b60f0586460f
|
refs/heads/master
| 2021-01-26T07:40:38.693976
| 2020-05-01T04:06:57
| 2020-05-01T04:06:57
| 243,369,342
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 473
|
py
|
import pandas as pd
dfObj = pd.DataFrame(columns=['User_ID', 'UserName', 'Action'])
print("Empty Dataframe ", dfObj, sep='\n')
dfObj = dfObj.append({'User_ID': 23, 'UserName': 'Riti', 'Action': 'Login'}, ignore_index=True)
dfObj = dfObj.append({'User_ID': 24, 'UserName': 'Aadi', 'Action': 'Logout'}, ignore_index=True)
dfObj = dfObj.append({'User_ID': 25, 'UserName': 'Jack', 'Action': 'Login'}, ignore_index=True)
print("Dataframe Contens ", dfObj, sep='\n')
|
[
"noreply@github.com"
] |
kumarchintu.noreply@github.com
|
77d0baf93da1adb6871816e657d76373586457b6
|
e888171a028d297dca5120fc748d5816d47b3be6
|
/cnn_aenc_genome_tr_seq_ld.py
|
f308c777e60d45798dccb97140661fecf67188a6
|
[] |
no_license
|
AmirUCR/CRISPER-CAS9
|
f130a3a2c1df1f6f7e7082ed05b869d0421ffce0
|
4207b794662acfefa82077a88be5fcd3afd0ef41
|
refs/heads/master
| 2023-04-27T01:24:51.956009
| 2021-05-27T23:45:26
| 2021-05-27T23:45:26
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,249
|
py
|
from collections import OrderedDict
import os
import sys
import warnings
import argparse
import logging
import h5py as h5
import numpy as np
import pandas as pd
import scipy.io
import six
from six.moves import range
import matplotlib.pyplot as plt
#from dna import *
from sklearn.metrics import roc_auc_score, confusion_matrix
from keras.preprocessing import sequence
from keras.optimizers import RMSprop,Adam, SGD
from keras.models import Sequential, Model
from keras.layers.core import Dropout, Activation, Flatten
from keras.regularizers import l1,l2,l1_l2
from keras.constraints import maxnorm
#from keras.layers.recurrent import LSTM, GRU
from keras.callbacks import ModelCheckpoint, EarlyStopping
from keras.layers import Conv1D, MaxPooling1D, Dense, LSTM, Bidirectional, BatchNormalization, MaxPooling2D, AveragePooling1D, Input, Multiply, Add, UpSampling1D
from sklearn.metrics import mean_squared_error as mse
import scipy.stats as st
#from keras.utils import plot_model
#from keras.utils.layer_utils import print_layer_shapes
# fix random seed for reproducibility
from random import shuffle
np.random.seed(1369)
def PREPROCESS(lines):
data_n = len(lines) - 1
SEQ = np.zeros((data_n, 40, 4), dtype=int)
#CA = np.zeros((data_n, 1), dtype=float)
#Score = np.zeros((data_n, 1), dtype=float)
#lines = lines[1:]
shuffle(lines)
for l in range(0, data_n):
data = lines[l]
seq = data
#Score[l-1] = float(data[6])
#CA[l-1] = float(data[5])
for i in range(40):
if seq[i] in "Aa":
SEQ[l-1, i, 0] = 1
elif seq[i] in "Cc":
SEQ[l-1, i, 1] = 1
elif seq[i] in "Gg":
SEQ[l-1, i, 2] = 1
elif seq[i] in "Tt":
SEQ[l-1, i, 3] = 1
#CA[l-1,0] = int(data[2])*100
return SEQ
if __name__ == '__main__':
print ("Loading train data")
FILE = open("sequence_SFLI.txt", "r")
data = FILE.readlines()
print(len(data))
SEQ_in = PREPROCESS(data)
#score = st.zscore(score)
print(SEQ_in.shape)
FILE.close()
# model for seq
SEQ = Input(shape=(40,4))
conv_1 = Conv1D(activation="relu", padding="same", strides=1, filters=20, kernel_size=5, kernel_regularizer = l2(0.0001))(SEQ)
bat_norm1 = BatchNormalization()(conv_1)
pool = MaxPooling1D(pool_size=(2))(bat_norm1)
conv_2 = Conv1D(activation="relu", padding="same", strides=1, filters=40, kernel_size=8, kernel_regularizer = l2(0.0001))(pool)
bat_norm2 = BatchNormalization()(conv_2)
pool_1 = AveragePooling1D(pool_size=(2))(bat_norm2)
flatten = Flatten()(pool_1)
dropout_1 = Dropout(0.5)(flatten)
dense_1 = Dense(80, activation='relu', kernel_initializer='glorot_uniform')(dropout_1)
dropout_2 = Dropout(0.5)(dense_1)
dense_2 = Dense(units=40, activation="relu",kernel_initializer='glorot_uniform')(dropout_2)
dropout_3 = Dropout(0.3)(dense_2)
dense_3 = Dense(units=40, activation="relu",kernel_initializer='glorot_uniform')(dropout_3)
out = Dense(units=1, activation="linear")(dense_3)
model = Model(inputs = SEQ, outputs= out)
model.summary()
model.load_weights("seqonly_wtt.h5")
pred_y = model.predict(SEQ_in)
np.savetxt("activity_score_SFLI.csv", pred_y, delimiter= ",")
|
[
"dbais001@dipankar.cs.ucr.edu"
] |
dbais001@dipankar.cs.ucr.edu
|
b270564b58d3fcdb665fa602738083d04173c420
|
2dfa1822a5d3006187c47f383bb67ab4e202e417
|
/GraphicsView.py
|
ea34878b4a0a92f448f3cf3ec2a7c6addf7005b4
|
[
"MIT"
] |
permissive
|
amandashack/QDmapping
|
254c2754634b072161e0f1232089a25440c5228d
|
ee93dc693ebc8e6cfd378d5b69367c5293d232be
|
refs/heads/master
| 2020-04-16T16:14:55.137660
| 2020-03-26T23:46:27
| 2020-03-26T23:46:27
| 156,276,056
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,661
|
py
|
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtCore import Qt, QPoint, pyqtSignal, QRect
from PyQt5.QtWidgets import QMainWindow, QApplication, QGraphicsScene, QGraphicsView, QRubberBand
from PyQt5.QtGui import QPixmap, QPainter, QPen
from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
from PyQt5.QtGui import *
from main import *
from GraphicsView import *
from collections import defaultdict
import sys
import time
import cv2
class photoViewer(object):
def __init__(self, ogImage, ogImageScene, pixmapItem, width, height):
self.ogImage = ogImage
self.ogImageScene = ogImageScene
self.pixmapItem = pixmapItem
self._zoom = 0
self._width = width
self._height = height
def setDefaultImage(self, image):
self.image = image
pixmap = self.scale(self._width, self._height)
self.updatePixmap(pixmap)
return(pixmap)
def updatePixmap(self, pixmap):
self.ogImageScene.clear()
self.pixmapItem = QGraphicsPixmapItem()
self.pixmapItem.setPixmap(pixmap)
self.ogImageScene.addItem(self.pixmapItem)
self.ogImageScene.setSceneRect(QtCore.QRectF(0.0, 0.0, pixmap.width(), pixmap.height()))
def scale(self, width, height):
if (self.image.isNull()):
return(QPixmap())
return(self.image.scaled(width, height, QtCore.Qt.KeepAspectRatio))
def zoom(self, pixmap, factor):
pixmap = self.scale(pixmap.width()*factor, pixmap.height()*factor)
self.updatePixmap(pixmap)
return(pixmap)
def zoomIn(self, pixmap):
self._zoom += 1
factor = 1.25
return(self.zoom(pixmap, factor))
def zoomOut(self, pixmap):
if self._zoom == 0:
return(pixmap)
self._zoom -= 1
factor = 0.75
return(self.zoom(pixmap, factor))
def zeroZoom(self, pixmap):
if self._zoom == 0:
return(pixmap)
pixmap = self.scale(self._width, self._height) #### self.pixmap is used for scaling and anything else
self.updatePixmap(pixmap)
return (pixmap)
class photoManager():
def __init__(self):
pass
def editIm(self, editim, opDict, cur_mode, value):
'''
the problem is that the image is edited to include the previous state of the image AND THEN the current position of the slider is taken into account
~ this must be done beforehand so that the current state of the image is created from the slider + the previously moved sliders
I believe this has been fixed and the below commented out code can be removed - 10/16
'''
im = editim
opDict[cur_mode].append(value)
for key in opDict.keys():
if key.upper() == "ERODE":
value = opDict[key][-1]
kernal = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (2, 2))
editim = cv2.erode(editim, kernal, iterations = value)
elif key.upper() == "DILATE":
value = opDict[key][-1]
kernal = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (2, 2))
editim = cv2.erode(editim, kernal, iterations = value)
elif key.upper() in ["OPEN", "CLOSE", "TOPHAT", "BLACKHAT"] and opDict[key]:
key_value = opDict[key][-1]
str_mode = 'ellipse'
str_name = f'MORPH_{str_mode.upper()}'
oper_name = f'MORPH_{key.upper()}'
st = cv2.getStructuringElement(getattr(cv2, str_name), (2, 2))
editim = cv2.morphologyEx(editim, getattr(cv2, oper_name), st, iterations = key_value)
elif key.upper() == "BLUR" and opDict[key]:
key_value = opDict[key][-1]
editim = cv2.GaussianBlur(editim, (3, 3), key_value)
elif key.upper() == "THRESHOLD" and opDict[key]:
key_value = opDict[key][-1]
#### types of thresholding
#### Threshold binary or binary inverted: if the intensity of the pixel is higher than the thresh,
#### Then the thresh is set to a MaxVal, otherwise the pixels are set to 0
#### truncate : the maxiumum intensity value for the pixels is thresh, if the intensity of a pixel
#### value is greater, then its value is truncated (set to the MaxVal)
#### threshold to zero or inverted: if the intensity of the pixel value is lower than the thresh,
#### then the new pixe value is zero or vice versa - I believe the other possible option is the TOZERO option
#### cv2.threshold(src, dst, *150*, 200, cv.THRESH_TOZERO) where the stared entry is the threshold and the value which would be the slider
#### adaptive thresholding: calculates the threshold for small regions of an image for when there are
#### different shadows
editim = cv2.adaptiveThreshold(editim, 255, cv2.ADAPTIVE_THRESH_GAUSSIAN_C, cv2.THRESH_BINARY, 3, key_value - 7)
else: print('you have a wrong key on line 96 in graphicsview.py')
return(editim)
def zoomByRect(self, editim, areaView): #QRect - x, y, width, height
rect_scene = self.mapToScene(areaView).boundingRect()
selected = self.scene().items(rect_scene)
|
[
"ajshack4@gmail.com"
] |
ajshack4@gmail.com
|
a039247a947488efd56abfaec5d0e514192661ef
|
5c34abe10630b23da8ba7d1cbce38bda53a4b6fa
|
/calibGenACD/python/ParseFileListNew.py
|
acf59a483c6f81fe4fa16b4b31a6de0eb090bd39
|
[] |
no_license
|
fermi-lat/GlastRelease-scons-old
|
cde76202f706b1c8edbf47b52ff46fe6204ee608
|
95f1daa22299272314025a350f0c6ef66eceda08
|
refs/heads/master
| 2021-07-23T02:41:48.198247
| 2017-05-09T17:27:58
| 2017-05-09T17:27:58
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,316
|
py
|
#!/usr/bin/env python
#
# Copyright 2007
# by
# The Board of Trustees of the
# Leland Stanford Junior University.
# All rights reserved.
#
__facility__ = "calibGenACD"
__abstract__ = "Extracts the DAC to PHA set point relationship of ACD veto"
__author__ = "E. Charles"
__date__ = "$Date$"
__version__ = "$Revision$, $Author$"
__release__ = "$Name$"
#import LATTE.copyright_SLAC
import os, sys
import time
from optparse import OptionParser
from py_mootCore import MootQuery, vectorOfConstitInfo, ConstitInfo
DATACATBIN = "/afs/slac/g/glast/ground/bin/datacat"
def getDateStamp():
"""
"""
return time.strftime("%y%m%d")
def callDatacat(group,dateStamp):
"""
"""
dataCatList = "%s_%s.list"%(group,dateStamp)
dataCatLine = "%s find --sort nMetStart --group %s /Data/Flight/Level1/LPA/ > %s"%(DATACATBIN,group,dataCatList)
print "Calling datacat for group %s on %s"%(group,dateStamp)
os.system(dataCatLine)
return dataCatList
def configInfo(metTime,mq):
"""
"""
acqInfo = mq.getAcqSummaryInfo( int(metTime[1:]) )
if acqInfo is None:
return ("None",0)
key = int(acqInfo.getConfigKey())
configInfo = mq.getConfigInfo(key)
if configInfo is None:
return ("None",key)
return (configInfo.getName(),key)
def fmxKeys(mKey):
"""
"""
mq = MootQuery(None)
constits = vectorOfConstitInfo()
ci = mq.getActiveFilters(mKey,constits,0)
for ci in constits:
print (ci.getKey(),ci.getFswId(),ci.getSchemaId(),ci.getSchemaVersionId(),ci.getInstanceId() )
def utcDayAndWeek(metTime):
"""
"""
unixSecs = float(metTime[1:])
missionEpoch = time.mktime( time.strptime("Sun Dec 31 16:00:00 2000") )
missionStart = time.mktime( time.strptime("Sun Jun 8 15:00:00 2008") )
utcTime = time.gmtime(unixSecs+missionEpoch)
launchSecs = unixSecs+missionEpoch-missionStart
week = int ( launchSecs / 604800 )
day = "%02d%02d%02d"%(utcTime[0]-2000,utcTime[1],utcTime[2])
return (day,week)
def parseNames(inFileName):
"""
"""
outFileName = inFileName.replace("list","table")
outFile = open(outFileName,'w')
mq = MootQuery(None)
inFile = open(inFileName)
inline = inFile.readline()
while inline<>'':
w = inline.find('/r0')
runNum = inline[w+2:w+12]
(uDay,mWeek) = utcDayAndWeek(runNum)
(configName,configKey) = configInfo(runNum,mq)
outFile.write("%s %s %03d %-4d %s %s\n"%(runNum,uDay,mWeek,configKey,configName,inline.strip()))
inline = inFile.readline()
inFile.close()
outFile.close()
return None
if __name__=='__main__':
# argument parsing
usage = 'ParseFileList.py type'
parser = OptionParser(usage)
if len(sys.argv) == 1 or sys.argv[1] == '-h':
parser.print_help()
sys.exit()
(options, args) = parser.parse_args(sys.argv[1:])
if len(args) < 1:
parser.print_help()
sys.exit()
dateStamp = getDateStamp()
for group in args:
dataCatList = callDatacat(group,dateStamp)
#Latch the time
parseNames(dataCatList)
|
[
""
] | |
6c4853e5d42f5a96aabcd2f6bac19abb11fe102f
|
7d5738e9713ddae056138217238e39eb093574dd
|
/deeplearning/imagenet_labels.py
|
67f92e88da0d85045b9e0274a723db2b1d4c046c
|
[] |
no_license
|
tanle2694/deploy_deeplearning_model
|
1093fa7f7e7567540e8ac9863477906666332b97
|
7c5473feb4cc5a67b5f3b9626ddcbcf5091e5ecc
|
refs/heads/master
| 2022-10-12T20:13:42.699632
| 2020-06-10T08:04:24
| 2020-06-10T08:04:24
| 270,533,225
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 14,201
|
py
|
labels = ["tench",
"goldfish",
"great white shark",
"tiger shark",
"hammerhead shark",
"electric ray",
"stingray",
"cock",
"hen",
"ostrich",
"brambling",
"goldfinch",
"house finch",
"junco",
"indigo bunting",
"American robin",
"bulbul",
"jay",
"magpie",
"chickadee",
"American dipper",
"kite",
"bald eagle",
"vulture",
"great grey owl",
"fire salamander",
"smooth newt",
"newt",
"spotted salamander",
"axolotl",
"American bullfrog",
"tree frog",
"tailed frog",
"loggerhead sea turtle",
"leatherback sea turtle",
"mud turtle",
"terrapin",
"box turtle",
"banded gecko",
"green iguana",
"Carolina anole",
"desert grassland whiptail lizard",
"agama",
"frilled-necked lizard",
"alligator lizard",
"Gila monster",
"European green lizard",
"chameleon",
"Komodo dragon",
"Nile crocodile",
"American alligator",
"triceratops",
"worm snake",
"ring-necked snake",
"eastern hog-nosed snake",
"smooth green snake",
"kingsnake",
"garter snake",
"water snake",
"vine snake",
"night snake",
"boa constrictor",
"African rock python",
"Indian cobra",
"green mamba",
"sea snake",
"Saharan horned viper",
"eastern diamondback rattlesnake",
"sidewinder",
"trilobite",
"harvestman",
"scorpion",
"yellow garden spider",
"barn spider",
"European garden spider",
"southern black widow",
"tarantula",
"wolf spider",
"tick",
"centipede",
"black grouse",
"ptarmigan",
"ruffed grouse",
"prairie grouse",
"peacock",
"quail",
"partridge",
"grey parrot",
"macaw",
"sulphur-crested cockatoo",
"lorikeet",
"coucal",
"bee eater",
"hornbill",
"hummingbird",
"jacamar",
"toucan",
"duck",
"red-breasted merganser",
"goose",
"black swan",
"tusker",
"echidna",
"platypus",
"wallaby",
"koala",
"wombat",
"jellyfish",
"sea anemone",
"brain coral",
"flatworm",
"nematode",
"conch",
"snail",
"slug",
"sea slug",
"chiton",
"chambered nautilus",
"Dungeness crab",
"rock crab",
"fiddler crab",
"red king crab",
"American lobster",
"spiny lobster",
"crayfish",
"hermit crab",
"isopod",
"white stork",
"black stork",
"spoonbill",
"flamingo",
"little blue heron",
"great egret",
"bittern",
"crane (bird)",
"limpkin",
"common gallinule",
"American coot",
"bustard",
"ruddy turnstone",
"dunlin",
"common redshank",
"dowitcher",
"oystercatcher",
"pelican",
"king penguin",
"albatross",
"grey whale",
"killer whale",
"dugong",
"sea lion",
"Chihuahua",
"Japanese Chin",
"Maltese",
"Pekingese",
"Shih Tzu",
"King Charles Spaniel",
"Papillon",
"toy terrier",
"Rhodesian Ridgeback",
"Afghan Hound",
"Basset Hound",
"Beagle",
"Bloodhound",
"Bluetick Coonhound",
"Black and Tan Coonhound",
"Treeing Walker Coonhound",
"English foxhound",
"Redbone Coonhound",
"borzoi",
"Irish Wolfhound",
"Italian Greyhound",
"Whippet",
"Ibizan Hound",
"Norwegian Elkhound",
"Otterhound",
"Saluki",
"Scottish Deerhound",
"Weimaraner",
"Staffordshire Bull Terrier",
"American Staffordshire Terrier",
"Bedlington Terrier",
"Border Terrier",
"Kerry Blue Terrier",
"Irish Terrier",
"Norfolk Terrier",
"Norwich Terrier",
"Yorkshire Terrier",
"Wire Fox Terrier",
"Lakeland Terrier",
"Sealyham Terrier",
"Airedale Terrier",
"Cairn Terrier",
"Australian Terrier",
"Dandie Dinmont Terrier",
"Boston Terrier",
"Miniature Schnauzer",
"Giant Schnauzer",
"Standard Schnauzer",
"Scottish Terrier",
"Tibetan Terrier",
"Australian Silky Terrier",
"Soft-coated Wheaten Terrier",
"West Highland White Terrier",
"Lhasa Apso",
"Flat-Coated Retriever",
"Curly-coated Retriever",
"Golden Retriever",
"Labrador Retriever",
"Chesapeake Bay Retriever",
"German Shorthaired Pointer",
"Vizsla",
"English Setter",
"Irish Setter",
"Gordon Setter",
"Brittany",
"Clumber Spaniel",
"English Springer Spaniel",
"Welsh Springer Spaniel",
"Cocker Spaniels",
"Sussex Spaniel",
"Irish Water Spaniel",
"Kuvasz",
"Schipperke",
"Groenendael",
"Malinois",
"Briard",
"Australian Kelpie",
"Komondor",
"Old English Sheepdog",
"Shetland Sheepdog",
"collie",
"Border Collie",
"Bouvier des Flandres",
"Rottweiler",
"German Shepherd Dog",
"Dobermann",
"Miniature Pinscher",
"Greater Swiss Mountain Dog",
"Bernese Mountain Dog",
"Appenzeller Sennenhund",
"Entlebucher Sennenhund",
"Boxer",
"Bullmastiff",
"Tibetan Mastiff",
"French Bulldog",
"Great Dane",
"St. Bernard",
"husky",
"Alaskan Malamute",
"Siberian Husky",
"Dalmatian",
"Affenpinscher",
"Basenji",
"pug",
"Leonberger",
"Newfoundland",
"Pyrenean Mountain Dog",
"Samoyed",
"Pomeranian",
"Chow Chow",
"Keeshond",
"Griffon Bruxellois",
"Pembroke Welsh Corgi",
"Cardigan Welsh Corgi",
"Toy Poodle",
"Miniature Poodle",
"Standard Poodle",
"Mexican hairless dog",
"grey wolf",
"Alaskan tundra wolf",
"red wolf",
"coyote",
"dingo",
"dhole",
"African wild dog",
"hyena",
"red fox",
"kit fox",
"Arctic fox",
"grey fox",
"tabby cat",
"tiger cat",
"Persian cat",
"Siamese cat",
"Egyptian Mau",
"cougar",
"lynx",
"leopard",
"snow leopard",
"jaguar",
"lion",
"tiger",
"cheetah",
"brown bear",
"American black bear",
"polar bear",
"sloth bear",
"mongoose",
"meerkat",
"tiger beetle",
"ladybug",
"ground beetle",
"longhorn beetle",
"leaf beetle",
"dung beetle",
"rhinoceros beetle",
"weevil",
"fly",
"bee",
"ant",
"grasshopper",
"cricket",
"stick insect",
"cockroach",
"mantis",
"cicada",
"leafhopper",
"lacewing",
"dragonfly",
"damselfly",
"red admiral",
"ringlet",
"monarch butterfly",
"small white",
"sulphur butterfly",
"gossamer-winged butterfly",
"starfish",
"sea urchin",
"sea cucumber",
"cottontail rabbit",
"hare",
"Angora rabbit",
"hamster",
"porcupine",
"fox squirrel",
"marmot",
"beaver",
"guinea pig",
"common sorrel",
"zebra",
"pig",
"wild boar",
"warthog",
"hippopotamus",
"ox",
"water buffalo",
"bison",
"ram",
"bighorn sheep",
"Alpine ibex",
"hartebeest",
"impala",
"gazelle",
"dromedary",
"llama",
"weasel",
"mink",
"European polecat",
"black-footed ferret",
"otter",
"skunk",
"badger",
"armadillo",
"three-toed sloth",
"orangutan",
"gorilla",
"chimpanzee",
"gibbon",
"siamang",
"guenon",
"patas monkey",
"baboon",
"macaque",
"langur",
"black-and-white colobus",
"proboscis monkey",
"marmoset",
"white-headed capuchin",
"howler monkey",
"titi",
"Geoffroy's spider monkey",
"common squirrel monkey",
"ring-tailed lemur",
"indri",
"Asian elephant",
"African bush elephant",
"red panda",
"giant panda",
"snoek",
"eel",
"coho salmon",
"rock beauty",
"clownfish",
"sturgeon",
"garfish",
"lionfish",
"pufferfish",
"abacus",
"abaya",
"academic gown",
"accordion",
"acoustic guitar",
"aircraft carrier",
"airliner",
"airship",
"altar",
"ambulance",
"amphibious vehicle",
"analog clock",
"apiary",
"apron",
"waste container",
"assault rifle",
"backpack",
"bakery",
"balance beam",
"balloon",
"ballpoint pen",
"Band-Aid",
"banjo",
"baluster",
"barbell",
"barber chair",
"barbershop",
"barn",
"barometer",
"barrel",
"wheelbarrow",
"baseball",
"basketball",
"bassinet",
"bassoon",
"swimming cap",
"bath towel",
"bathtub",
"station wagon",
"lighthouse",
"beaker",
"military cap",
"beer bottle",
"beer glass",
"bell-cot",
"bib",
"tandem bicycle",
"bikini",
"ring binder",
"binoculars",
"birdhouse",
"boathouse",
"bobsleigh",
"bolo tie",
"poke bonnet",
"bookcase",
"bookstore",
"bottle cap",
"bow",
"bow tie",
"brass",
"bra",
"breakwater",
"breastplate",
"broom",
"bucket",
"buckle",
"bulletproof vest",
"high-speed train",
"butcher shop",
"taxicab",
"cauldron",
"candle",
"cannon",
"canoe",
"can opener",
"cardigan",
"car mirror",
"carousel",
"tool kit",
"carton",
"car wheel",
"automated teller machine",
"cassette",
"cassette player",
"castle",
"catamaran",
"CD player",
"cello",
"mobile phone",
"chain",
"chain-link fence",
"chain mail",
"chainsaw",
"chest",
"chiffonier",
"chime",
"china cabinet",
"Christmas stocking",
"church",
"movie theater",
"cleaver",
"cliff dwelling",
"cloak",
"clogs",
"cocktail shaker",
"coffee mug",
"coffeemaker",
"coil",
"combination lock",
"computer keyboard",
"confectionery store",
"container ship",
"convertible",
"corkscrew",
"cornet",
"cowboy boot",
"cowboy hat",
"cradle",
"crane (machine)",
"crash helmet",
"crate",
"infant bed",
"Crock Pot",
"croquet ball",
"crutch",
"cuirass",
"dam",
"desk",
"desktop computer",
"rotary dial telephone",
"diaper",
"digital clock",
"digital watch",
"dining table",
"dishcloth",
"dishwasher",
"disc brake",
"dock",
"dog sled",
"dome",
"doormat",
"drilling rig",
"drum",
"drumstick",
"dumbbell",
"Dutch oven",
"electric fan",
"electric guitar",
"electric locomotive",
"entertainment center",
"envelope",
"espresso machine",
"face powder",
"feather boa",
"filing cabinet",
"fireboat",
"fire engine",
"fire screen sheet",
"flagpole",
"flute",
"folding chair",
"football helmet",
"forklift",
"fountain",
"fountain pen",
"four-poster bed",
"freight car",
"French horn",
"frying pan",
"fur coat",
"garbage truck",
"gas mask",
"gas pump",
"goblet",
"go-kart",
"golf ball",
"golf cart",
"gondola",
"gong",
"gown",
"grand piano",
"greenhouse",
"grille",
"grocery store",
"guillotine",
"barrette",
"hair spray",
"half-track",
"hammer",
"hamper",
"hair dryer",
"hand-held computer",
"handkerchief",
"hard disk drive",
"harmonica",
"harp",
"harvester",
"hatchet",
"holster",
"home theater",
"honeycomb",
"hook",
"hoop skirt",
"horizontal bar",
"horse-drawn vehicle",
"hourglass",
"iPod",
"clothes iron",
"jack-o'-lantern",
"jeans",
"jeep",
"T-shirt",
"jigsaw puzzle",
"pulled rickshaw",
"joystick",
"kimono",
"knee pad",
"knot",
"lab coat",
"ladle",
"lampshade",
"laptop computer",
"lawn mower",
"lens cap",
"paper knife",
"library",
"lifeboat",
"lighter",
"limousine",
"ocean liner",
"lipstick",
"slip-on shoe",
"lotion",
"speaker",
"loupe",
"sawmill",
"magnetic compass",
"mail bag",
"mailbox",
"tights",
"tank suit",
"manhole cover",
"maraca",
"marimba",
"mask",
"match",
"maypole",
"maze",
"measuring cup",
"medicine chest",
"megalith",
"microphone",
"microwave oven",
"military uniform",
"milk can",
"minibus",
"miniskirt",
"minivan",
"missile",
"mitten",
"mixing bowl",
"mobile home",
"Model T",
"modem",
"monastery",
"monitor",
"moped",
"mortar",
"square academic cap",
"mosque",
"mosquito net",
"scooter",
"mountain bike",
"tent",
"computer mouse",
"mousetrap",
"moving van",
"muzzle",
"nail",
"neck brace",
"necklace",
"nipple",
"notebook computer",
"obelisk",
"oboe",
"ocarina",
"odometer",
"oil filter",
"organ",
"oscilloscope",
"overskirt",
"bullock cart",
"oxygen mask",
"packet",
"paddle",
"paddle wheel",
"padlock",
"paintbrush",
"pajamas",
"palace",
"pan flute",
"paper towel",
"parachute",
"parallel bars",
"park bench",
"parking meter",
"passenger car",
"patio",
"payphone",
"pedestal",
"pencil case",
"pencil sharpener",
"perfume",
"Petri dish",
"photocopier",
"plectrum",
"Pickelhaube",
"picket fence",
"pickup truck",
"pier",
"piggy bank",
"pill bottle",
"pillow",
"ping-pong ball",
"pinwheel",
"pirate ship",
"pitcher",
"hand plane",
"planetarium",
"plastic bag",
"plate rack",
"plow",
"plunger",
"Polaroid camera",
"pole",
"police van",
"poncho",
"billiard table",
"soda bottle",
"pot",
"potter's wheel",
"power drill",
"prayer rug",
"printer",
"prison",
"projectile",
"projector",
"hockey puck",
"punching bag",
"purse",
"quill",
"quilt",
"race car",
"racket",
"radiator",
"radio",
"radio telescope",
"rain barrel",
"recreational vehicle",
"reel",
"reflex camera",
"refrigerator",
"remote control",
"restaurant",
"revolver",
"rifle",
"rocking chair",
"rotisserie",
"eraser",
"rugby ball",
"ruler",
"running shoe",
"safe",
"safety pin",
"salt shaker",
"sandal",
"sarong",
"saxophone",
"scabbard",
"weighing scale",
"school bus",
"schooner",
"scoreboard",
"CRT screen",
"screw",
"screwdriver",
"seat belt",
"sewing machine",
"shield",
"shoe store",
"shoji",
"shopping basket",
"shopping cart",
"shovel",
"shower cap",
"shower curtain",
"ski",
"ski mask",
"sleeping bag",
"slide rule",
"sliding door",
"slot machine",
"snorkel",
"snowmobile",
"snowplow",
"soap dispenser",
"soccer ball",
"sock",
"solar thermal collector",
"sombrero",
"soup bowl",
"space bar",
"space heater",
"space shuttle",
"spatula",
"motorboat",
"spider web",
"spindle",
"sports car",
"spotlight",
"stage",
"steam locomotive",
"through arch bridge",
"steel drum",
"stethoscope",
"scarf",
"stone wall",
"stopwatch",
"stove",
"strainer",
"tram",
"stretcher",
"couch",
"stupa",
"submarine",
"suit",
"sundial",
"sunglass",
"sunglasses",
"sunscreen",
"suspension bridge",
"mop",
"sweatshirt",
"swimsuit",
"swing",
"switch",
"syringe",
"table lamp",
"tank",
"tape player",
"teapot",
"teddy bear",
"television",
"tennis ball",
"thatched roof",
"front curtain",
"thimble",
"threshing machine",
"throne",
"tile roof",
"toaster",
"tobacco shop",
"toilet seat",
"torch",
"totem pole",
"tow truck",
"toy store",
"tractor",
"semi-trailer truck",
"tray",
"trench coat",
"tricycle",
"trimaran",
"tripod",
"triumphal arch",
"trolleybus",
"trombone",
"tub",
"turnstile",
"typewriter keyboard",
"umbrella",
"unicycle",
"upright piano",
"vacuum cleaner",
"vase",
"vault",
"velvet",
"vending machine",
"vestment",
"viaduct",
"violin",
"volleyball",
"waffle iron",
"wall clock",
"wallet",
"wardrobe",
"military aircraft",
"sink",
"washing machine",
"water bottle",
"water jug",
"water tower",
"whiskey jug",
"whistle",
"wig",
"window screen",
"window shade",
"Windsor tie",
"wine bottle",
"wing",
"wok",
"wooden spoon",
"wool",
"split-rail fence",
"shipwreck",
"yawl",
"yurt",
"website",
"comic book",
"crossword",
"traffic sign",
"traffic light",
"dust jacket",
"menu",
"plate",
"guacamole",
"consomme",
"hot pot",
"trifle",
"ice cream",
"ice pop",
"baguette",
"bagel",
"pretzel",
"cheeseburger",
"hot dog",
"mashed potato",
"cabbage",
"broccoli",
"cauliflower",
"zucchini",
"spaghetti squash",
"acorn squash",
"butternut squash",
"cucumber",
"artichoke",
"bell pepper",
"cardoon",
"mushroom",
"Granny Smith",
"strawberry",
"orange",
"lemon",
"fig",
"pineapple",
"banana",
"jackfruit",
"custard apple",
"pomegranate",
"hay",
"carbonara",
"chocolate syrup",
"dough",
"meatloaf",
"pizza",
"pot pie",
"burrito",
"red wine",
"espresso",
"cup",
"eggnog",
"alp",
"bubble",
"cliff",
"coral reef",
"geyser",
"lakeshore",
"promontory",
"shoal",
"seashore",
"valley",
"volcano",
"baseball player",
"bridegroom",
"scuba diver",
"rapeseed",
"daisy",
"yellow lady's slipper",
"corn",
"acorn",
"rose hip",
"horse chestnut seed",
"coral fungus",
"agaric",
"gyromitra",
"stinkhorn mushroom",
"earth star",
"hen-of-the-woods",
"bolete",
"ear",
"toilet paper"]
|
[
"tanlm@datascience.com.vn"
] |
tanlm@datascience.com.vn
|
6321392e8ea506f89ad8138de98709a7d3aeef3a
|
e5f1befb7c7ca0072747b33086fc6569a6befd01
|
/old/flaskold/json/008.py
|
f4caad8db1b8d37bfc3909c746dc3bac66210760
|
[] |
no_license
|
nepomnyashchii/TestGit
|
ae08d8bb1b7d2ab9389a309fd1dc9e24729b019c
|
c7abf4ab08ee3c2f3ea1fb09a1938bff7a3e0e5c
|
refs/heads/master
| 2020-04-28T23:41:51.053547
| 2020-01-24T12:22:40
| 2020-01-24T12:22:40
| 175,666,093
| 0
| 1
| null | 2019-03-15T13:44:03
| 2019-03-14T17:08:58
| null |
UTF-8
|
Python
| false
| false
| 147
|
py
|
from flask import Flask
app = Flask(__name__)
@app.route('/')
def index():
return "test"
if __name__ =="__main__":
app.run(debug=True)
|
[
"nepomnyashchii@gmail.com"
] |
nepomnyashchii@gmail.com
|
cf1f21a13c39529187755f3f9be3a26fdbd963c2
|
413cb01c0fa276ebf0263b5f853fac704d2b4a38
|
/scripts/notebook-hash.py
|
6943c47a564af788273a54525322b64d9d41a7d6
|
[] |
no_license
|
dergachev/iruby-docker
|
15f5824f5e6ce90194c5b1e5512b509db46b8dd4
|
987905ea6342749aa5ae5111d262f17137086001
|
refs/heads/master
| 2021-01-10T11:56:05.652262
| 2016-01-21T17:51:25
| 2016-01-21T17:51:25
| 50,068,353
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 669
|
py
|
# adapted from https://github.com/jupyter/notebook/blob/master/notebook/auth/security.py
import getpass
import hashlib
import random
from sys import argv
# Length of the salt in nr of hex chars, which implies salt_len * 4
# bits of randomness.
salt_len = 12
def passwd(passphrase):
h = hashlib.new('sha1')
salt = ('%0' + str(salt_len) + 'x') % random.getrandbits(4 * salt_len)
h.update(passphrase + salt)
return ':'.join(("sha1", salt, h.hexdigest()))
text="""
c.NotebookApp.ip = '*'
c.NotebookApp.password = u'{password}'
c.NotebookApp.open_browser = False
c.NotebookApp.port = {port}
"""
print(text.format(password=passwd(argv[1]), port='9999'))
|
[
"alex@evolvingweb.ca"
] |
alex@evolvingweb.ca
|
fee4e4dfa383d20a8d34a381be6e30b2f7fb0bd2
|
468378e1e85033b790c980f0c89f93907caadd86
|
/deepspeed/runtime/state_dict_factory.py
|
46efed7dc219930bc4668828714ab72a6cf04672
|
[
"LicenseRef-scancode-generic-cla",
"MIT"
] |
permissive
|
Distributed-AI/DeepSpeed
|
0e2e7f17a0a35e5a89416eaf7cba18d6356b36f8
|
edb6964603b04cdd8c8fe2d1134ee83e110f13e6
|
refs/heads/master
| 2023-07-31T12:34:06.227771
| 2021-09-26T21:42:59
| 2021-09-26T21:42:59
| 409,327,992
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 18,703
|
py
|
'''
Copyright 2020 The Microsoft DeepSpeed Team
'''
import torch
import os
import copy
import collections
import json
from abc import ABC, abstractmethod
from deepspeed.utils import logger
from .weight_quantizer import WeightQuantization
AUTO_MODULE_KEY = 'auto'
class SDLoaderFactory:
@staticmethod
def get_sd_loader_json(json_file):
with open(json_file) as f:
data = json.load(f)
sd_type = data['type']
ckpt_list = data['checkpoints']
version = data['version']
return SDLoaderFactory.get_sd_loader(ckpt_list, sd_type, version)
@staticmethod
def get_sd_loader(ckpt_list, sd_type='Megatron', version=None):
if sd_type == 'Megatron':
return MegatronSDLoader(ckpt_list, version)
else:
assert False, '{} checkpoint type is not supported'.format(sd_type)
class SDLoaderBase(ABC):
def __init__(self, ckpt_list, version):
self.module_key = None
self.ckpt_list = ckpt_list
self.check_ckpt_list()
self.version = version
def load(self,
mp_world_size,
mp_rank,
module_key=AUTO_MODULE_KEY,
is_pipe_parallel=False,
quantize=False,
quantize_bits=8,
quantize_groups=64,
mlp_extra_grouping=True):
self.module_key = module_key
num_ckpt = len(self.ckpt_list)
idx = mp_rank * num_ckpt // mp_world_size
logger.info(
f'mp_world_size: {mp_world_size}, mp_rank: {mp_rank}, module_key: {module_key}'
)
""" We have multiple cases to handle here for both training and inference:
1. PipeModule loading mp_rank_*.pt files, is_pipe_parallel=True, module_key is not None
a. if no mp_size/pp_size resizing occurs, for both training & inference, loading
the mp_rank related checkpoint directly.
b. if has mp_size/pp_size resizing, only Megatron model inference is supported,
in this case each mp_rank_*.pt have same content, we will load the first checkpoint
file (idx=0), to avoid idx exceeding file list boundary.
2. PipeModule loading layer_*.pt files, is_pipe_parallel=True, module_key is None
a. if no mp_size resizing occurs, for both training & inference, loading
the mp_rank related checkpoint directly.
b. if has mp_size resizing, only Megatron model inference is supported,
checkpoint file(s) will be merged/splitted according to mp_rank, mp_world_size and
checkpoint file list.
3. Non-PipeModule loading mp_rank_*.pt files, is_pipe_parallel=False
Same with case (2).
"""
if is_pipe_parallel and module_key is not None and mp_world_size != num_ckpt:
mp_world_size = num_ckpt
idx = 0
load_path = self.ckpt_list[idx]
merge_count = 1
if num_ckpt == mp_world_size:
assert os.path.exists(load_path)
logger.info(f'rank: {mp_rank} loading checkpoint: {load_path}')
sd = torch.load(load_path, map_location=lambda storage, loc: storage)
if quantize:
quantizer = WeightQuantization(mlp_extra_grouping=mlp_extra_grouping,
mp_size=mp_world_size)
sd_module, all_scales = quantizer.sd_quantize_megatron(self.get_module(sd), quantize_bits, quantize_groups)
self.set_module(sd, sd_module)
else:
all_scales = None
elif num_ckpt > mp_world_size:
sd, all_scales, merge_count = self.merge_state_dict(mp_world_size, mp_rank, quantize, \
quantize_bits, quantize_groups, mlp_extra_grouping)
else:
sd, all_scales = self.split_state_dict(mp_world_size, mp_rank, quantize, quantize_bits, \
quantize_groups, mlp_extra_grouping)
return load_path, sd, (all_scales, merge_count)
def get_merge_state_dicts(self, mp_world_size, mp_rank):
num_ckpt = len(self.ckpt_list)
assert num_ckpt % mp_world_size == 0, 'Invalid checkpoints and world size for sd merge'
num_to_merge = num_ckpt // mp_world_size
ckpt_list = [
self.ckpt_list[i] for i in range(num_to_merge * mp_rank,
num_to_merge * (mp_rank + 1))
]
logger.info(f"mp_rank: {mp_rank}, ckpt_list: {ckpt_list}")
sd_list = [
torch.load(ckpt,
map_location=lambda storage,
loc: storage) for ckpt in ckpt_list
]
return sd_list
def get_split_state_dict(self, mp_world_size, mp_rank):
num_ckpt = len(self.ckpt_list)
assert mp_world_size % num_ckpt == 0, 'Invalid checkpoints and world size for sd split'
num_to_split = mp_world_size // num_ckpt
ckpt_index = mp_rank // num_to_split
ckpt_offset = mp_rank % num_to_split
logger.info(
f"mp_rank: {mp_rank}, ckpt_list: {self.ckpt_list[ckpt_index]}, offset: {ckpt_offset}"
)
sd = torch.load(self.ckpt_list[ckpt_index],
map_location=lambda storage,
loc: storage)
return sd, num_to_split, ckpt_offset
def _choose_module_key(self, sd):
assert not ('module' in sd and 'model' in sd), "checkpoint has both 'model' and 'module' keys, not sure how to proceed"
assert 'module' in sd or 'model' in sd, "checkpoint contains neither 'model' or 'module' keys, not sure how to proceed"
if 'module' in sd:
return 'module'
elif 'model' in sd:
return 'model'
def get_module(self, sd):
if self.module_key is None:
return sd
elif self.module_key == AUTO_MODULE_KEY:
return sd[self._choose_module_key(sd)]
else:
return sd[self.module_key]
def set_module(self, sd, module):
if self.module_key is None:
sd = module
elif self.module_key == AUTO_MODULE_KEY:
sd[self._choose_module_key(sd)] = module
else:
sd[self.module_key] = module
return sd
def check_ckpt_list(self):
logger.info(f'checkpoint file list: {self.ckpt_list}')
assert len(self.ckpt_list) > 0
sd = torch.load(self.ckpt_list[0], map_location=lambda storage, loc: storage)
# check checkpoint count is same with saved mp_world_size
if 'mp_world_size' in sd.keys():
assert len(self.ckpt_list) == sd['mp_world_size'], f"checkpoint count {len(self.ckpt_list)} is different from saved mp_world_size {sd['mp_world_size']}"
@abstractmethod
def merge_state_dict(self,
mp_world_size,
mp_rank,
quantize,
quantize_bits,
groups,
mlp_extra_grouping):
pass
@abstractmethod
def split_state_dict(self,
mp_world_size,
mp_rank,
quantize,
quantize_bits,
groups,
mlp_extra_grouping):
pass
@abstractmethod
def sanity_check(self, ckpt_file_name):
pass
class MegatronSDLoader(SDLoaderBase):
def __init__(self, ckpt_list, version):
super().__init__(ckpt_list, version)
"""
## Q/K/V data need special processing
key: transformer.layers.0.attention.query_key_value.weight, shape: torch.Size([3192, 4256])
key: transformer.layers.0.attention.query_key_value.bias, shape: torch.Size([3192])
## merge or split on axis=0
key: word_embeddings.weight, shape: torch.Size([12672, 4256])
key: transformer.layers.0.mlp.dense_h_to_4h.bias, shape: torch.Size([4256])
key: transformer.layers.0.mlp.dense_h_to_4h.weight, shape: torch.Size([4256, 4256])
## merge or split on axis=1
key: transformer.layers.0.attention.dense.weight, shape: torch.Size([4256, 1064])
key: transformer.layers.0.mlp.dense_4h_to_h.weight, shape: torch.Size([4256, 4256])
## no change required
key: transformer.layers.0.mlp.dense_4h_to_h.bias, shape: torch.Size([4256])
key: transformer.final_layernorm.weight, shape: torch.Size([4256])
key: transformer.final_layernorm.bias, shape: torch.Size([4256])
key: transformer.layers.0.attention.dense.bias, shape: torch.Size([4256])
key: transformer.layers.0.post_attention_layernorm.weight, shape: torch.Size([4256])
key: transformer.layers.0.post_attention_layernorm.bias, shape: torch.Size([4256])
key: transformer.layers.0.input_layernorm.weight, shape: torch.Size([4256])
key: transformer.layers.0.input_layernorm.bias, shape: torch.Size([4256])
key: position_embeddings.weight, shape: torch.Size([1024, 4256])
"""
def merge_query_key_value(self, param_list, ckpt_ver):
"""
Up to now we found 3 Q/K/V parameter formats in different Megatron checkpoint versions:
1. version 0, there is no version information saved in checkpoint.
format: [(3 * np * hn), h]
2. version 1.0
format: [(np * hn * 3), h]
3. version 2.0
format: [(np * 3 * hn), h]
h: hidden size
n: number of attention heads
p: number of model parallel partitions
np: n/p
hn: h/n
"""
new_qkv = None
if ckpt_ver == 0:
# [(3 * np * hn), h]
assert param_list[0].shape[0] % 3 == 0
size_qkv = param_list[0].shape[0] // 3
split_tensors = [torch.split(param, size_qkv, dim=0) for param in param_list]
tensors = []
for i in range(3):
tensor_tuple = [t[i] for t in split_tensors]
tensors.append(torch.cat(tensor_tuple, axis=0))
new_qkv = torch.cat(tensors, axis=0)
elif ckpt_ver == 1.0 or ckpt_ver == 2.0:
# [(np * hn * 3), h] or [(np * 3 * hn), h]
new_qkv = torch.cat(param_list, axis=0)
else:
assert False, f'checkpoint version: {ckpt_ver} is not supported'
return new_qkv
def split_query_key_value(self, param, num_to_split, offset, ckpt_ver):
"""
Up to now we found 3 Q/K/V parameter formats in different Megatron checkpoint versions:
1. version 0, there is no version information saved in checkpoint.
format: [(3 * np * hn), h]
2. version 1.0
format: [(np * hn * 3), h]
3. version 2.0
format: [(np * 3 * hn), h]
h: hidden size
n: number of attention heads
p: number of model parallel partitions
np: n/p
hn: h/n
"""
new_qkv = None
if ckpt_ver == 0:
# [(3 * np * hn), h]
assert param.shape[0] % 3 == 0
size_qkv = param.shape[0] // 3
split_tensors = torch.split(param, size_qkv, dim=0)
assert split_tensors[0].shape[0] % num_to_split == 0
split_size = split_tensors[0].shape[0] // num_to_split
tensors = []
for i in range(3):
tensors.append(torch.split(split_tensors[i], split_size, dim=0)[offset])
new_qkv = torch.cat(tensors, axis=0)
elif ckpt_ver == 1.0 or ckpt_ver == 2.0:
# [(np * hn * 3), h] or [(np * 3 * hn), h]
assert param.shape[0] % num_to_split == 0
size_qkv = param.shape[0] // num_to_split
split_tensors = torch.split(param, size_qkv, dim=0)
new_qkv = split_tensors[offset]
else:
assert False, f'checkpoint version: {ckpt_ver} is not supported'
return new_qkv
def merge_state_dict(self,
mp_world_size,
mp_rank,
quantize=False,
quantize_bits=8,
groups=64,
mlp_extra_grouping=True):
self.sanity_check(self.ckpt_list[0])
sd_list = self.get_merge_state_dicts(mp_world_size, mp_rank)
ds_sd = copy.deepcopy(sd_list[0])
new_client_sd = collections.OrderedDict()
client_sd_list = [self.get_module(sd) for sd in sd_list]
keys = client_sd_list[0].keys()
ckpt_ver = self.get_checkpoint_version(ds_sd)
logger.info(f"checkpoint version: {ckpt_ver}")
if quantize:
quantizer = WeightQuantization(mlp_extra_grouping=mlp_extra_grouping,
mp_size=mp_world_size)
for key in keys:
value_list = [sd[key] for sd in client_sd_list]
if "attention.dense.weight" in key or "mlp.dense_4h_to_h.weight" in key:
if quantize:
value_list = quantizer.Quantize(value_list,
quantize_bits,
groups,
key=key,
merge_dim=1)
new_client_sd[key] = torch.cat(value_list, axis=1)
elif "attention.query_key_value" in key:
if quantize and "attention.query_key_value.weight" in key:
value_list = quantizer.Quantize(value_list,
quantize_bits,
groups,
key=key)
new_client_sd[key] = torch.cat(value_list, axis=0)
else:
if quantize:
new_client_sd[key] = torch.cat(value_list, axis=0)
else:
new_client_sd[key] = self.merge_query_key_value(
value_list,
ckpt_ver)
elif "mlp.dense_h_to_4h.weight" in key or "word_embeddings.weight" in key or "mlp.dense_h_to_4h.bias" in key:
if quantize and "mlp.dense_h_to_4h.weight" in key:
value_list = quantizer.Quantize(value_list,
quantize_bits,
groups,
key=key)
new_client_sd[key] = torch.cat(value_list, axis=0)
else:
new_client_sd[key] = value_list[0]
if quantize:
all_scales = quantizer.merge_scales()
ds_sd = self.set_module(ds_sd, new_client_sd)
return ds_sd, (all_scales if quantize else None), len(client_sd_list)
def split_state_dict(self,
mp_world_size,
mp_rank,
quantize=False,
quantize_bits=8,
groups=64,
mlp_extra_grouping=True):
self.sanity_check(self.ckpt_list[0])
sd, num_to_split, ckpt_offset = self.get_split_state_dict(mp_world_size, mp_rank)
ds_sd = copy.deepcopy(sd)
new_client_sd = collections.OrderedDict()
client_sd = self.get_module(sd)
ckpt_ver = self.get_checkpoint_version(ds_sd)
logger.info(f"checkpoint version: {ckpt_ver}")
if quantize:
quantizer = WeightQuantization(mlp_extra_grouping=mlp_extra_grouping,
mp_size=mp_world_size)
for key in client_sd.keys():
value = client_sd[key]
if "attention.dense.weight" in key or "mlp.dense_4h_to_h.weight" in key:
assert value.shape[1] % num_to_split == 0
split_size = value.shape[1] // num_to_split
if quantize:
q_vals = quantizer.Quantize([value], quantize_bits, groups, key)
value = q_vals[0]
new_client_sd[key] = torch.split(value, split_size, dim=1)[ckpt_offset]
elif "attention.query_key_value" in key:
if quantize and "attention.query_key_value.weight" in key:
q_vals = quantizer.Quantize([value], quantize_bits, groups, key)
value = q_vals[0]
new_client_sd[key] = self.split_query_key_value(
value,
num_to_split,
ckpt_offset,
ckpt_ver)
elif "mlp.dense_h_to_4h.weight" in key or "word_embeddings.weight" in key or "mlp.dense_h_to_4h.bias" in key:
assert value.shape[0] % num_to_split == 0
split_size = value.shape[0] // num_to_split
if quantize and "mlp.dense_h_to_4h.weight" in key:
q_vals = quantizer.Quantize([value], quantize_bits, groups, key)
value = q_vals[0]
new_client_sd[key] = torch.split(value, split_size, dim=0)[ckpt_offset]
else:
new_client_sd[key] = value
if quantize:
all_scales = quantizer.merge_scales_split(num_to_split)
ds_sd = self.set_module(ds_sd, new_client_sd)
return ds_sd, (all_scales if quantize else None)
def sanity_check(self, ckpt_file_name):
keys_to_check = [
"attention.dense.weight",
"mlp.dense_4h_to_h.weight",
"attention.query_key_value",
"mlp.dense_h_to_4h.weight",
"mlp.dense_h_to_4h.bias"
]
sd = torch.load(ckpt_file_name, map_location=lambda storage, loc: storage)
# partail_key is a sub-string of one key in the sd
def check_key_exist(partial_key, sd):
keys = sd.keys()
found = False
for k in keys:
if partial_key in k:
found = True
break
return found
for key in keys_to_check:
assert check_key_exist(key, self.get_module(sd)), f'key: {key} is not found in the checkpoint {ckpt_file_name}'
def get_checkpoint_version(self, state_dict):
# Use 0 if version info doesn't exist
return self.version if self.version is not None else state_dict.get(
'checkpoint_version',
0)
|
[
"noreply@github.com"
] |
Distributed-AI.noreply@github.com
|
f4a850cce56f0f8cf0a4527768d60ba75d2eb5df
|
e06ff08424324ac5d6c567ae9cd6954290ff9bd4
|
/Yudi TANG/axe/KNN/KNN_dating.py
|
ba4684b05e29ddc86468b5905cf90baf69208d11
|
[
"Apache-2.0"
] |
permissive
|
JKChang2015/Machine_Learning
|
b1bdfcf9ea43a98fc7efd5c0624bbaf5d9dbf495
|
f8b46bf23e4d1972de6bd652dd4286e9322ed62f
|
refs/heads/master
| 2021-06-06T19:18:16.596549
| 2020-05-03T22:28:18
| 2020-05-03T22:28:18
| 119,390,891
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,671
|
py
|
# KNN_dating
# Created by JKChang
# 29/01/2020, 10:20
# Tag:
# Description: dating people recommendation
# Feature: 1. Number of frequent flyer miles earned per year
# 2. Percentage of time spent playing video games
# 3. Liters of ice cream consumed per week
# classifies๏ผ1. doesn't like
# 2. small like
# 3. large like
import operator
import matplotlib.pyplot as plt
# from mpl_toolkits import mplot3d
import numpy as np
def viewMatrix(matrix, labels, arg1, arg2):
fig = plt.figure()
ax = fig.add_subplot(111)
ax.scatter(matrix[:, arg1 - 1], matrix[:, arg2 - 1], 15.0 * np.array(labels), 15.0 * np.array(labels))
plt.show()
def view3DMatrix(matrix, labels):
fig = plt.figure()
ax = plt.axes(projection='3d')
# Data for a three-dimensional line
zline = np.linspace(0, 1, 1000)
xline = np.sin(zline)
yline = np.cos(zline)
ax.plot3D(xline, yline, zline, 'gray')
# Data for three-dimensional scattered points
zdata = matrix[:, 0]
xdata = matrix[:, 1]
ydata = matrix[:, 2]
ax.scatter3D(xdata, ydata, zdata, c=labels)
fig.show()
def kNNClassify(newInput, dataSet, labels, k):
numSamples = dataSet.shape[0] # shape[0] stands for the number of rows
# Step 1: calculate Euclidean distance
diff = np.tile(newInput, (numSamples, 1)) - dataSet
squareDiff = diff ** 2
squareSum = squareDiff.sum(axis=1)
distance = squareSum ** 0.5
# Step 2: Sort distance
# argsort() returns the indices that would sort an array in a ascending order
sortedDistIndicies = distance.argsort()
classCount = {} # key: label , value: laebl count
for i in range(k):
# Step 3: choose the min k distance
voteLabel = labels[sortedDistIndicies[i]]
# Step 4: count the label frequency
classCount[voteLabel] = classCount.get(voteLabel, 0) + 1
# Step 5: the max voted class label will return
# Sort the dictionary according to the values
sortedClassCount = sorted(classCount.items(), key=operator.itemgetter(1), reverse=True)
return sortedClassCount[0][0]
def file2matrix(filename):
with open(filename, 'r') as f:
resMatrix = np.zeros((1, 3))
labels = []
for line in f.readlines():
content = line.split('\t')
lineVector = np.asfarray([content[:3]])
resMatrix = np.r_[resMatrix, lineVector]
labels.append(int(content[-1]))
DataMatrix = np.delete(resMatrix, (0), axis=0)
return DataMatrix, labels
def autoNorm(dataSet):
# normalization:
# nor_value = (old_Value - minimum_value) / (max - min)
# get list of minimum value for each col
minValue = dataSet.min(0)
# get list of maximum value for each col
maxValue = dataSet.max(0)
normDataSet = np.zeros(np.shape(dataSet))
m = dataSet.shape[0]
# copy the minValue to size(m,1) matrix
normDataSet = dataSet - np.tile(minValue, (m, 1))
normDataSet = normDataSet / np.tile(maxValue - minValue, (m, 1))
return normDataSet, maxValue - minValue, minValue
def datingClassTest(filename):
hoRatio = 0.1
dataMatrix, labels = file2matrix(filename)
norm_matrix, ranges, min = autoNorm(dataMatrix)
# row number
m = norm_matrix.shape[0]
# number of test row
numTestVecs = int(m * hoRatio)
errorCount = 0.0
for i in range(numTestVecs):
res = kNNClassify(norm_matrix[i, :], norm_matrix[numTestVecs:m, :], labels[numTestVecs:m], 3)
print('The classifier came back with: %d, the real answer is %d' % (res, labels[i]))
if (res != labels[i]):
errorCount += 1.0
print('the total error rate is: %f' % (errorCount / float(numTestVecs)))
def classifypersion(testSetName):
resultList = ['not at all', 'in small doses', 'in large doses']
percentTats = float(input('percentage of time spent playing video games? '))
ffMiles = float(input('frequent flier miles earned per year? '))
iceCream = float(input('liters of ice creamconsued per year? '))
datingDataMat, datingLabels = file2matrix(testSetName)
normMat, ranges, minVals = autoNorm(datingDataMat)
inArr = np.array([ffMiles, percentTats, iceCream])
classifierResult = kNNClassify((inArr - minVals / ranges), normMat, datingLabels, 3)
print('You will probably like this persion : %s' % resultList[int(classifierResult) - 1])
filename = '../resource/dating/datingTestSet2.txt'
# matrix, labels = file2matrix(filename)
# norm_matrix, ranges, min = autoNorm(matrix)
# view3DMatrix(norm_matrix, labels)
# datingClassTest(filename)
classifypersion(filename)
|
[
"jkchang2015@gmail.com"
] |
jkchang2015@gmail.com
|
2a2f0eadfad2350dc050d895f631d7c1ddbcd4f2
|
c12bb52375c3c6938f55488c66abe991e0d5c456
|
/app3/urls.py
|
0a2359c59156413fca51dd1bb1f65e0efc1a1db8
|
[] |
no_license
|
kamleshn19/navy1
|
a011178e02ceac9282096bed8f087b7c4e3ae710
|
8ea3e1d8104408aa090245066cb2d5ed328c4b3e
|
refs/heads/master
| 2021-01-02T07:56:41.286090
| 2020-02-10T17:13:57
| 2020-02-10T17:13:57
| 239,557,672
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 815
|
py
|
"""hello_world URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from .import views
urlpatterns = [
path('admin/', admin.site.urls),
path('login',views.login,name='login')
]
|
[
"kamleshn19@yahoo.co.in"
] |
kamleshn19@yahoo.co.in
|
306520d5560d414aa3f79d54ccb155169016b9d7
|
78da0ab05436ac61232e6cc8b055ad992f42cbf0
|
/placidity/interpreter.py
|
1ea774d0a2476734bd84e01cd080726410b74a5c
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
bebraw/Placidity
|
82dc0710b41b1148f48eec826a31a0ad3bd8bc2f
|
944b69dadb613fc8c2f7e10c0b84facd6363b404
|
refs/heads/master
| 2023-07-10T03:54:47.517107
| 2011-11-16T08:14:21
| 2011-11-16T08:14:21
| 462,544
| 4
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,215
|
py
|
import inspect
class Context:
owner = None
def claim_for(self, owner):
self.owner = owner
def release(self):
self.owner = None
class Commands(list):
def __init__(self, commands=None):
commands = commands if commands else []
if not hasattr(commands, '__iter__'):
commands = [commands]
super(Commands, self).__init__(commands)
def match(self, expression):
priorities = ('high', 'normal', 'low')
for priority in priorities:
commands = self.find(priority=priority)
for command in commands:
if command.matches(expression):
return command
def find(self, name=None, priority=None):
if name:
for command in self:
class_name = command.__class__.__name__
if class_name.lower() == name:
return command
if priority:
return filter(lambda command: command.priority == priority, self)
class Interpreter:
def __init__(self, commands=None):
self.context = Context()
self.commands = Commands(commands)
self.variables = {}
def interpret(self, expression):
possible_parameters = {'context': self.context,
'commands': self.commands, 'expression': expression,
'variables': self.variables}
if expression is None:
return
try:
if self.context.owner:
command = self.context.owner
else:
command = self.commands.match(expression)
args = self._get_args(command.execute)
params = self._find_parameters(possible_parameters, args)
return command.execute(**params)
except SystemExit, e:
raise e
# DEBUG!
#except:
# return 'null'
def _get_args(self, method):
return inspect.getargspec(method).args
def _find_parameters(self, possible_parameters, args):
ret = {}
for name, value in possible_parameters.items():
if name in args:
ret[name] = value
return ret
|
[
"bebraw@gmail.com"
] |
bebraw@gmail.com
|
7db676908bf5ec1405df15a1265dc9b8d577bb96
|
378d4be9048dab93a130489a74e82fda02e53de3
|
/ml_algorithm/demo_data.py
|
636433197e56c018c4a1979de55bebb75ae794b1
|
[] |
no_license
|
geekieo/iMpLement
|
167f86fd4ff24efad0f503256c31cc1f342b5d2d
|
f3df6664e4115a0301b4a8ba5e72d9e6f63b6f98
|
refs/heads/master
| 2023-07-24T08:13:43.091659
| 2023-07-14T02:20:07
| 2023-07-14T02:20:07
| 87,257,617
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 731
|
py
|
# -*- coding: utf-8 -*-
import numpy as np
import utils
import matplotlib.pyplot as plt
import time
def create_train_data1(size):
mu,sigma=0,2.4
rarray=np.random.normal(mu,sigma,size*2).reshape(size,2)*10
return rarray
def create_train_data2(size):
mu,sigma=5,1.0
rarray=np.random.normal(mu,sigma,size*2).reshape(size,2)*10
return rarray
def create_train_data3(size):
mu,sigma=-5,1.0
rarray=np.random.normal(mu,sigma,size*2).reshape(size,2)*10
return rarray
def load_model(i,dim,model_file):
others=None
for j in range(10):
current=utils.load_matrix(model_file)
if j==i:
target=current
elif others is None:
others=current
else:
temp=np.vstack((others,current))
others=temp
return target,others
|
[
"geekieo@hotmail.com"
] |
geekieo@hotmail.com
|
50e9870739673efcfa7b101e2a5fab4d46cee95a
|
e0b7fb64e57823d24ad6b8ca4e130c657ba437a4
|
/analysis/yields/plot.py
|
1c98b8833d00a74347fe5b76ba3b506ff8435f4a
|
[] |
no_license
|
cfangmeier/FTAnalysis
|
66644189f02ddf43dadb8e029e4709950572e7cf
|
6612f40b67689d6d946866710ad2e0256b790821
|
refs/heads/master
| 2021-09-11T00:16:35.222837
| 2018-01-09T22:26:50
| 2018-01-09T22:26:50
| 106,859,187
| 0
| 0
| null | 2017-10-13T18:23:23
| 2017-10-13T18:23:23
| null |
UTF-8
|
Python
| false
| false
| 24,136
|
py
|
import os
import sys
import ROOT as r
import sys
sys.path.insert(0,'../../')
from common.Software.dataMCplotMaker.dataMCplotMaker import dataMCplot
from analysis.limits.runLimits import get_lims
from analysis.limits.singleBinLimits import get_singlebin_limits
from analysis.limits.makeScan import make_scan
from analysis.limits.getPostFit import get_postfit_dict
def reduce_bins(h_in, ndrop=2):
# drop first [ndrop] bins
nbins_reduced = h_in.GetNbinsX() - ndrop
h_out = r.TH1F(h_in.GetName()+"_reduced"+str(ndrop), h_in.GetTitle(), nbins_reduced, 0.5, nbins_reduced+0.5)
binvals = list(h_in) # includes under and overflow, so bin 1 is index 1
for ibin,val in enumerate(binvals):
if ibin <= ndrop: continue
h_out.SetBinContent(ibin-ndrop,val)
h_out.SetBinError(ibin-ndrop,h_in.GetBinError(ibin))
return h_out
def scale_hist(h_in, scale=1.):
# return scaled histogram
h_out = h_in.Clone(h_in.GetName()+"_scaled")
h_out.Scale(scale)
return h_out
if __name__ == "__main__":
os.system("mkdir -p plots")
r.gROOT.SetBatch(1)
bginfo = [
("flips", "Charge misid.", r.kGray+2, 0.2),
("rares", "Rare", r.kMagenta-7, 0.5),
("xg", "X#gamma", r.kViolet+2, 0.5),
("ttvv", "t#bar{t}VV", r.kAzure-4, 0.5),
("ttz", "t#bar{t}Z", r.kGreen-6, 0.40),
("fakes", "Nonprompt lep.", 18, 0.30),
("tth", "t#bar{t}H", r.kBlue-5, 0.50),
("ttw", "t#bar{t}W", r.kGreen+3, 0.40),
]
bgnames, titles, colors, systs = map(list,zip(*bginfo))
f1 = r.TFile("histos.root")
cards_dir = "../limits/{0}".format(f1.Get("metadata").GetTitle())
d_postfit, fitratios = get_postfit_dict("{}/mlfit.root".format(cards_dir))
# d_postfit, fitratios = get_postfit_dict("../limits/v0.10_Jul20/mlfit.root".format(cards_dir))
# print d_postfit
# print fitratios
for proc,h1 in d_postfit.items():
if not h1: continue
vals,errs = zip(*[[h1.GetBinContent(ib),h1.GetBinError(ib)] for ib in range(1,h1.GetNbinsX()+1)])
# print proc, zip(vals,errs)
# print d_postfit, fitratios
commonopts = "--darkColorLines --lumi 35.9 --topYaxisTitle Data/Pred. --type Preliminary --poissonErrorsNoZeros --dataName Data --outOfFrame --systInclStat --systFillStyle 3344 "
d_opts_br = {
# "SR_TOTAL" : [("",), commonopts+" --xAxisLabel Region --noDivisionLabel --noXaxisUnit --isLinear --noOverflow --legendUp .03 --legendRight -0.05 --legendTaller 0.05 --yTitleOffset -0.1 --makeTable --xAxisBinLabels SR1,SR2,SR3,SR4,SR5,SR6,SR7,SR8 --yAxisLabel Events "],
# "SRCR_TOTAL" : [("",), commonopts+" --xAxisLabel Region --noDivisionLabel --noXaxisUnit --isLinear --noOverflow --legendUp .03 --legendRight -0.05 --legendTaller 0.05 --yTitleOffset -0.1 --makeTable --xAxisBinLabels CRZ,CRW,SR1,SR2,SR3,SR4,SR5,SR6,SR7,SR8 --yAxisLabel Events "],
# "ht" : [("ttzcr","ttwcr","sr","br"), commonopts+" --ratioUpperBound 4 --xAxisLabel H_{T} --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 --yAxisLabel Events "],
# "met" : [("ttzcr","ttwcr","sr","br"), commonopts+" --xAxisLabel p_{T}^{miss} --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 --yAxisLabel Events "],
# "njets" : [("ttzcr","ttwcr","sr","br"), commonopts+" --xAxisLabel N_{jets} --noXaxisUnit --nDivisions 6 --noDivisionLabel --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 --yAxisLabel Events / bin "],
# "nbtags" : [("ttzcr","ttwcr","sr","br"), commonopts+" --noDivisionLabel --noXaxisUnit --xAxisLabel N_{b} --nDivisions 4 --noXaxisUnit --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 --makeTable --yAxisLabel Events / bin "],
"SR_TOTAL" : [("",), commonopts+" --xAxisLabel Region --noDivisionLabel --noXaxisUnit --isLinear --noOverflow --legendUp -.09 --legendRight -0.08 --legendTaller 0.18 --yTitleOffset -0.15 --makeTable --xAxisBinLabels SR1,SR2,SR3,SR4,SR5,SR6,SR7,SR8 --yAxisLabel Events "],
"SRCR_TOTAL" : [("",), commonopts+" --xAxisLabel Region --noDivisionLabel --noXaxisUnit --isLinear --noOverflow --legendUp -.10 --legendRight -0.08 --legendTaller 0.20 --yTitleOffset -0.00 --makeTable --xAxisBinLabels CRZ,CRW,SR1,SR2,SR3,SR4,SR5,SR6,SR7,SR8 --yAxisLabel Events "],
"ht" : [("ttzcr","ttwcr","sr","br"), commonopts+" --ratioUpperBound 4 --xAxisLabel #it{H}_{T} --isLinear --legendUp -0.09 --legendRight -0.08 --legendTaller 0.18 --yTitleOffset -0.1 --yAxisLabel Events "],
"met" : [("ttzcr","ttwcr","sr","br"), commonopts+" --xAxisLabel #it{p}_{T}^{miss} --isLinear --legendUp -0.09 --legendRight -0.08 --legendTaller 0.18 --yTitleOffset -0.1 --yAxisLabel Events "],
"njets" : [("ttzcr","ttwcr","sr","br"), commonopts+" --xAxisLabel #it{N}_{jets} --noXaxisUnit --nDivisions 6 --noDivisionLabel --isLinear --legendUp -0.09 --legendRight -0.08 --legendTaller 0.18 --yTitleOffset -0.1 --yAxisLabel Events / bin "],
"nbtags" : [("ttzcr","ttwcr","sr","br"), commonopts+" --noDivisionLabel --noXaxisUnit --xAxisLabel #it{N}_{b} --nDivisions 4 --noXaxisUnit --isLinear --legendUp -0.09 --legendRight -0.08 --legendTaller 0.16 --yTitleOffset -0.1 --makeTable --yAxisLabel Events / bin "],
# "SR_TOTAL" : [("",), commonopts+" --xAxisLabel SR --noDivisionLabel --noXaxisUnit --isLinear --noOverflow --legendUp -.03 --legendRight -0.05 --legendTaller 0.05 --yTitleOffset -0.1 --makeTable --percentageInBox --xAxisBinLabels SR1,SR2,SR3,SR4,SR5,SR6,SR7,SR8 "],
# "SRCR_TOTAL" : [("",), commonopts+" --xAxisLabel Region --noDivisionLabel --noXaxisUnit --isLinear --noOverflow --legendUp -.03 --legendRight -0.05 --legendTaller 0.05 --yTitleOffset -0.1 --makeTable --percentageInBox --xAxisBinLabels CRZ,CRW,SR1,SR2,SR3,SR4,SR5,SR6,SR7,SR8 "],
# "ht" : [("ttzcr","ttwcr","sr","br"), commonopts+" --ratioUpperBound 4 --xAxisLabel H_{T} --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "met" : [("ttzcr","ttwcr","sr","br"), commonopts+" --xAxisLabel E_{T}^{miss} --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "mvis" : [("ttzcr","ttwcr","sr","br"), commonopts+" --xAxisLabel m^{vis} --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.0 "],
# "mtvis" : [("ttzcr","ttwcr","sr","br"), commonopts+" --xAxisLabel m_{T}^{vis} --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.0 "],
# "njets" : [("ttzcr","ttwcr","sr","br"), commonopts+" --xAxisLabel N_{jets} --noXaxisUnit --nDivisions 6 --noDivisionLabel --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "nbtags" : [("ttzcr","ttwcr","sr","br"), commonopts+" --noDivisionLabel --noXaxisUnit --xAxisLabel N_{b} --nDivisions 4 --noXaxisUnit --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 --makeTable "],
# "mtmin" : [("ttzcr","ttwcr","sr","br"), commonopts+" --xAxisLabel m_{T}^{min} --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "mll" : [("ttzcr","ttwcr","sr","br"), commonopts+" --xAxisLabel m_{ll} --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "mllos" : [("ttzcr",), commonopts+" --xAxisLabel Z cand m_{ll} --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "type" : [("ttzcr","ttwcr","sr","br"), commonopts+" --noDivisionLabel --noXaxisUnit --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 --xAxisBinLabels #mu#mu,#mu e,e#mu,ee "],
# "charge" : [("ttzcr","ttwcr","sr","br"), commonopts+" --xAxisLabel charge --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "nleps" : [("ttzcr","ttwcr","sr","br"), commonopts+" --xAxisLabel Nleps --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "l1pt" : [("ttzcr","ttwcr","sr","br"), commonopts+" --xAxisLabel ordered l1pt --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "l2pt" : [("ttzcr","ttwcr","sr","br"), commonopts+" --xAxisLabel ordered l2pt --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "l3pt" : [("ttzcr",), commonopts+" --xAxisLabel ordered l3pt --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "mtop1" : [("sr",), commonopts+" --xAxisLabel m_{top,1} --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.0 "],
# "mtop2" : [("sr",), commonopts+" --xAxisLabel m_{top,2} --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.0 "],
# # "mva" : [("sr","br"), commonopts+" --xAxisLabel lep1,2 el MVA --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "sip3d_mu_lep1" : [("sr","br"), commonopts+" --xAxisLabel lep1 mu sip3d --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "sip3d_mu_lep2" : [("sr","br"), commonopts+" --xAxisLabel lep2 mu sip3d --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "mu_l1pt" : [("sr","br"), commonopts+" --xAxisLabel lep1 mu pt --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "mu_l2pt" : [("sr","br"), commonopts+" --xAxisLabel lep2 mu pt --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# # "mu_l3pt" : [("sr","br"), commonopts+" --xAxisLabel lep3 mu pt --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "l1eta_mu" : [("sr","br"), commonopts+" --xAxisLabel lep1 mu eta --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "l2eta_mu" : [("sr","br"), commonopts+" --xAxisLabel lep2 mu eta --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# # "l3eta_mu" : [("sr","br"), commonopts+" --xAxisLabel lep3 mu eta --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "lep1_mu_miniIso" : [("sr","br"), commonopts+" --xAxisLabel lep1 mu miniIso --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "lep2_mu_miniIso" : [("sr","br"), commonopts+" --xAxisLabel lep2 mu miniIso --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "lep1_mu_ptRel" : [("sr","br"), commonopts+" --xAxisLabel lep1 mu ptRel --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "lep2_mu_ptRel" : [("sr","br"), commonopts+" --xAxisLabel lep2 mu ptRel --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "sip3d_el_lep1" : [("sr","br"), commonopts+" --xAxisLabel lep1 el sip3d --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "sip3d_el_lep2" : [("sr","br"), commonopts+" --xAxisLabel lep2 el sip3d --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "el_l1pt" : [("sr","br"), commonopts+" --xAxisLabel lep1 el pt --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "el_l2pt" : [("sr","br"), commonopts+" --xAxisLabel lep2 el pt --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# # "el_l3pt" : [("sr","br"), commonopts+" --xAxisLabel lep3 el pt --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "l1eta_el" : [("sr","br"), commonopts+" --xAxisLabel lep1 el eta --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "l2eta_el" : [("sr","br"), commonopts+" --xAxisLabel lep2 el eta --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# # "l3eta_el" : [("sr","br"), commonopts+" --xAxisLabel lep3 el eta --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "lep1_el_miniIso" : [("sr","br"), commonopts+" --xAxisLabel lep1 el miniIso --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "lep2_el_miniIso" : [("sr","br"), commonopts+" --xAxisLabel lep2 el miniIso --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "lep1_el_ptRel" : [("sr","br"), commonopts+" --xAxisLabel lep1 el ptRel --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "lep2_el_ptRel" : [("sr","br"), commonopts+" --xAxisLabel lep2 el ptRel --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "bjetpt" : [("sr","br"), commonopts+" --xAxisLabel p_{T}(bjets) --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "jetpt" : [("sr","br"), commonopts+" --xAxisLabel p_{T}(jets) --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "disc" : [("br",), commonopts+" --isLinear --xAxisLabel disc --legendUp .0 --legendRight -0.08 --legendTaller 0.05 --yTitleOffset -0.1 --makeTable "],
# "disc2" : [("br",), commonopts+" --isLinear --xAxisLabel disc2 --legendUp .0 --legendRight -0.08 --legendTaller 0.05 --yTitleOffset -0.1 --makeTable "],
# "SRDISC_TOTAL" : [("",), commonopts+" --xAxisLabel SR_{disc} --noDivisionLabel --noXaxisUnit --isLinear --noOverflow --legendUp -.03 --legendRight -0.05 --legendTaller 0.05 --yTitleOffset -0.1 --makeTable --percentageInBox "],
# "ntops" : [("sr",), commonopts+" --xAxisLabel N_{tops} --noXaxisUnit --nDivisions 5 --noDivisionLabel --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset 0.1 --makeTable "],
# "ntopness" : [("sr",), commonopts+" --xAxisLabel N_{tops}ness --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.0 "],
}
do_stats = True
for key in d_opts_br.keys():
types, opts_str = d_opts_br[key]
for typ in types:
if len(typ) == 0:
name = key[:]
else:
name = "{}_{}".format(typ,key)
oname = "plots/%s.pdf" % name.replace("_TOTAL","")
# title = typ.upper()
title = ""
subtitle = ""
d_newopts = {
"outputName": oname,
}
# if key == "njets" and typ == "ttwcr": subtitle = "(a)"
# if key == "nbtags" and typ == "ttwcr": subtitle = "(b)"
# if key == "njets" and typ == "ttzcr": subtitle = "(c)"
# if key == "nbtags" and typ == "ttzcr": subtitle = "(d)"
# if key == "njets" and typ == "sr": subtitle = "(a)"
# if key == "nbtags" and typ == "sr": subtitle = "(b)"
# if key == "ht" and typ == "sr": subtitle = "(c)"
# if key == "met" and typ == "sr": subtitle = "(d)"
# if key == "SRCR_TOTAL": subtitle = "(a)"
# if key == "SR_TOTAL": subtitle = "(b)"
if key == "njets" and typ == "ttwcr": subtitle = "CRW"
if key == "nbtags" and typ == "ttwcr": subtitle = "CRW"
if key == "njets" and typ == "ttzcr": subtitle = "CRZ"
if key == "nbtags" and typ == "ttzcr": subtitle = "CRZ"
if key == "njets" and typ == "sr": subtitle = ""
if key == "nbtags" and typ == "sr": subtitle = ""
if key == "ht" and typ == "sr": subtitle = ""
if key == "met" and typ == "sr": subtitle = ""
if key == "SRCR_TOTAL": subtitle = ""
if key == "SR_TOTAL": subtitle = ""
if typ in ["ttzcr","sr"] and ("njets" in name or "nbtags" in name or "met" in name):
d_newopts["ratioUpperBound"] = 4.0
if key in ["njets","nbtags","ht","met"] and typ == "sr":
d_newopts["ratioUpperBound"] = 5.0
print name, typ
bgs = map(lambda x: f1.Get("{0}_{1}".format(name,x)), ["data", "tttt"]+bgnames)
h_data,h_tttt,bgs = bgs[0], bgs[1], bgs[2:]
h_data_empty = h_data.Clone("empty")
h_data_empty.Reset()
h_tttt.Sumw2()
tttt_sf = 5.0
h_tttt.Scale(tttt_sf)
do_unblind = True
d_newopts["noDataWidth"] = True
# if do_stats and key == "SRCR_TOTAL":
# # if key == "SRCR_TOTAL":
# make_scan(cards_dir, do_blind=not do_unblind)
# os.system("cp scan.pdf plots/scan.pdf")
# if do_stats and key in ["SRCR_TOTAL"]:
# regions="srcr"
# if "DISC" in key: regions="srdisc"
# d_lims = get_lims(card=cards_dir, regions=regions, redocard=True, redolimits=True, domcfakes=False)
# exp, expp1, expm1 = d_lims["exp"], d_lims["sp1"]-d_lims["exp"], d_lims["exp"]-d_lims["sm1"]
# subtitle = "#sigma^{UL}_{exp} = %.2f^{+%.1f}_{-%.1f} fb" % (exp, expp1, expm1)
# do_unblind = typ in ["ttwcr","ttzcr", "sr"]
do_blind = not do_unblind
if do_unblind:
if "l3eta_el" not in name and "el_l3pt" not in name:
d_newopts["noTextBetweenPads"] = True
d_newopts["noGrass"] = True
dataMCplot(h_data, bgs=bgs, sigs=[h_tttt], sigtitles=["t#bar{t}t#bar{t} x 5"], systs=systs, titles=titles, title=title, subtitle=subtitle, colors=colors, opts=d_newopts, opts_str=opts_str)
new_d_newopts = d_newopts.copy()
new_h_tttt = h_tttt.Clone("new_tttt")
new_h_tttt.Scale(1.0/tttt_sf) # undo above scaling
new_bgs = bgs+[new_h_tttt]
new_colors = colors+[r.kPink-1]
new_systs = systs+[0.1]
new_titles = titles+["t#bar{t}t#bar{t}"]
new_d_newopts["poissonErrorsNoZeros"] = False
new_d_newopts["noTextBetweenPads"] = False
new_d_newopts["preserveBackgroundOrder"] = True
def get_name(hist):
return hist.GetName().rsplit("_",1)[-1]
if do_stats and key == "SR_TOTAL":
# new_d_newopts["outputName"] = d_newopts["outputName"].replace(".pdf","_postfit.pdf")
# dataMCplot(h_data_empty, bgs=new_bgs, systs=new_systs, titles=new_titles, title="Prefit", subtitle=subtitle, colors=new_colors, opts=new_d_newopts, opts_str=opts_str)
new_d_newopts["outputName"] = d_newopts["outputName"].replace(".pdf","_postfit.pdf")
new_d_newopts["noTextBetweenPads"] = True
del new_d_newopts["noGrass"]
postfit_bgs = [reduce_bins(d_postfit[get_name(bg)],2) for bg in new_bgs]
h_totalsyst = reduce_bins(d_postfit["total"],2) # total_background is tot bg, total is totbg+sig
dataMCplot(h_data, bgs=postfit_bgs, titles=new_titles, title="", subtitle=subtitle, colors=new_colors, opts=new_d_newopts, opts_str=opts_str, total_syst=h_totalsyst)
if do_stats and key == "SRCR_TOTAL":
new_d_newopts["outputName"] = d_newopts["outputName"].replace(".pdf","_postfit.pdf")
new_d_newopts["noTextBetweenPads"] = True
del new_d_newopts["noGrass"]
this_opts_str = opts_str.replace("--isLinear","--setMinimum 0.1")
# this_opts_str = this_opts_str.replace("--legendUp -.05","--legendUp .00")
postfit_bgs = [reduce_bins(d_postfit[get_name(bg)],0) for bg in new_bgs]
h_totalsyst = reduce_bins(d_postfit["total"],0) # total_background is tot bg, total is totbg+sig
dataMCplot(h_data, bgs=postfit_bgs, titles=new_titles, title="", subtitle=subtitle, colors=new_colors, opts=new_d_newopts, opts_str=this_opts_str, total_syst=h_totalsyst)
if do_stats and key not in ["SR_TOTAL","SRCR_TOTAL"]:
new_d_newopts["outputName"] = d_newopts["outputName"].replace(".pdf","_postfit.pdf")
new_d_newopts["noGrass"] = True
postfit_bgs = [scale_hist(bg,scale=fitratios[get_name(bg)]) for bg in new_bgs]
# dataMCplot(h_data, bgs=postfit_bgs, titles=new_titles, title="Postfit "+title, subtitle=subtitle, colors=new_colors, opts=new_d_newopts, opts_str=opts_str, systs=new_systs)
dataMCplot(h_data, bgs=postfit_bgs, titles=new_titles, title=""+title, subtitle=subtitle, colors=new_colors, opts=new_d_newopts, opts_str=opts_str, systs=new_systs)
if do_stats and key not in ["SR_TOTAL","SRCR_TOTAL"]:
new_d_newopts["noGrass"] = True
new_d_newopts["outputName"] = oname.replace(".pdf","_stacked.pdf")
dataMCplot(h_data, bgs=new_bgs, titles=new_titles, title=title, subtitle=subtitle, colors=new_colors, opts=new_d_newopts, opts_str=opts_str, systs=new_systs)
# if do_blind:
# d_newopts["outputName"] = d_newopts["outputName"].replace(".pdf","_blind.pdf")
# d_newopts["poissonErrorsNoZeros"] = False
# d_newopts["noTextBetweenPads"] = False
# # For SRCR, "blind" is actually partially blind (first two bins -- CRZ,CRW -- are unblinded)
# # make data with only CR unblinded (first two bins)
# h_data_cronly = h_data.Clone("cronly")
# for i in range(1,h_data.GetNbinsX()+1):
# if i in [1,2]: h_data_cronly.SetBinContent(i, h_data.GetBinContent(i))
# else: h_data_cronly.SetBinContent(i, 0)
# if key == "SRCR_TOTAL":
# dataMCplot(h_data_cronly, bgs=bgs, sigs=[h_tttt], sigtitles=["t#bar{t}t#bar{t} x 5"], systs=systs, titles=titles, title=title, subtitle=subtitle, colors=colors, opts=d_newopts, opts_str=opts_str)
# else:
# dataMCplot(h_data_empty, bgs=bgs, sigs=[h_tttt], sigtitles=["t#bar{t}t#bar{t} x 5"], systs=systs, titles=titles, title=title, subtitle=subtitle, colors=colors, opts=d_newopts, opts_str=opts_str)
# os.system("ic plots/SRCR_postfit.pdf")
# os.system("niceplots plots plots_tttt_Jul20_unblind")
# os.system("niceplots plots plots_tttt_Aug1_sr4")
# os.system("niceplots plots plots_tttt_Aug8")
# os.system("niceplots plots plots_tttt_Sep11")
os.system("niceplots plots plots_tttt_Oct9")
|
[
"amin.nj@gmail.com"
] |
amin.nj@gmail.com
|
6766670e972169da5ec57df6ba4c07ab94b8415f
|
d5eee852fafc803ed24353f59ac0d2b8d0538200
|
/django_backend/notifications_react/migrations/0001_initial.py
|
ade7b003c869215b4a80574c7d6174272e17e907
|
[] |
no_license
|
jande48/ComeWithNotificationFeatures
|
ca69edf2fb5e36fcf64b81864f6f6ef1cc818d09
|
13a24165ff33829b8ffe505b31163eec13130d72
|
refs/heads/master
| 2023-03-04T06:34:18.009327
| 2021-02-15T09:31:26
| 2021-02-15T09:31:26
| 337,786,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 716
|
py
|
# Generated by Django 3.1.6 on 2021-02-10 21:26
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Notifications',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('email', models.EmailField(max_length=100, unique=True)),
('message', models.CharField(blank=True, max_length=500)),
('created_at', models.DateTimeField(auto_now_add=True)),
],
),
]
|
[
"jacob.anderson10@gmail.com"
] |
jacob.anderson10@gmail.com
|
0cff05c6b6bf735c8bf8d40bc3c5f8a0e2528232
|
8110196da9c11de7fd6cac1be3f8f17efbda126a
|
/pastSlavePi/slavePi3/pastFiles/manualPic_capturePhotos.py
|
469e9657b840b3be0f8ddfc94a9e8e44cf606245
|
[] |
no_license
|
msit18/UrbanFlows
|
d57a5f2e0cbc8e804df163ef85f22b7f62309911
|
1f2e98a324def24838d1ab1dd6bf6d60e6e74f63
|
refs/heads/master
| 2020-05-21T12:27:56.429774
| 2017-07-24T19:21:36
| 2017-07-24T19:21:36
| 34,413,160
| 0
| 1
| null | 2015-10-26T20:46:01
| 2015-04-22T20:09:51
|
C
|
UTF-8
|
Python
| false
| false
| 4,756
|
py
|
#/!/usr/bin/python
#Written by Michelle Sit
#wORK IN PROGRESS
#Edited from manualPic4.py. Takes pictures on
#one thread and on another thread moves/removes them to the server. Picture resolution,
#fps, and time are controlled by inputs
#Update: also provides updates every twenty minutes (CURRENTLY SET TO EVERY 10 MINUTES) on
#the program's fps progress while the program is running
#listServerArgs[0] = totalTime duration (in seconds)
#listServerArgs[1] = resolution width
#listServerArgs[2] = resolution height
#listServerArgs[3] = number of pictures to take (fps)
#listServerArgs[4] = time interval (seconds) for frames to be taken in (fps)
#listServerArgs[5] = framerate of picamera
import time
import picamera
import datetime
import os
import string
import sys
import numpy as np
#Takes pictures based inputted fps options (while loops control total run time and how many
#pictures are taken in the specified time frame (fps).
#Time is also updated on each run through
class takePictures():
def run (self, args):
try:
#print "running RUN TAKE PICTURES"
serverArgs = args
#print serverArgs
listServerArgs = [args for args in args.split()]
#print listServerArgs
resW = int(listServerArgs[1])
resH = int(listServerArgs[2])
numPics = int(listServerArgs[3])
timeInterval = int(listServerArgs[4])
frameRate = int(listServerArgs[5])
timeStart = time.time() #When the program began
totalTimeSec = int(listServerArgs[0])
totalTimeMin = int(listServerArgs[0])/60
timeNow = time.time() #Used to keep track of current time
timeEnd = totalTimeSec+timeNow #When the program ends
timePlusInt = timeNow #Keeps track of time increments
timePlusTwentyMins = timeNow+600
# print "Capturing {0}p for a total time of {1} min ({2} secs) at {3} "\
# "frames per {4} second (({5} mins) at {6} framerate ".format(str(resH), \
# str(totalTimeMin), str(totalTimeSec), str(numPics), str(timeInterval), \
# str(float(timeInterval/60)), str(frameRate) )
#print "TimePlusTwenty = {0}".format(str(timePlusTwentyMins) )
numPicArray = []
fpsArray = []
timeAvg = []
while timeNow < timePlusTwentyMins and timeNow < timeEnd:
timeNow = time.time()
if timeNow >= timePlusTwentyMins:
endTwenty = time.time()
twentyTime = endTwenty-timeStart
twentyFPS = sum(numPicArray)/twentyTime
#print "10.2 Twenty Min Update: Total number of pictures is {0},"\
#" total time elapsed is {1}, totalFPS is {2}".format(str(sum(numPicArray)),\
# str(twentyTime), str(twentyFPS) )
timePlusTwentyMins = time.time()+600
else:
while timeNow > timePlusInt:
timePlusInt = timeNow + timeInterval
start=time.time()
with picamera.PiCamera() as camera:
camera.resolution = (resW, resH)
camera.framerate = frameRate
camera.capture_sequence([
datetime.datetime.now().strftime ('%M_%S_%f') + '.jpg'
# datetime.datetime.now().strftime ('%d-%m-%Y-%H_%M_%S_%f') + '_TT'\
# + str(listServerArgs[0]) + '_RES' + str(resH) + '_PIC' + str(numPics) +\
# '_TI' + str(timeInterval) + '_FR' + str(frameRate) + '.jpg'
for i in range(numPics)
], use_video_port=True)
finish = time.time()
#Analyzing time and frames
fpsTime = (finish-start)
fps = numPics/fpsTime
numPicArray.append(numPics)
fpsArray.append(fps)
timeAvg.append(fpsTime)
#print 'Captured {0} frames at {1}fps in {2}secs'\
#.format( str(sum(numPicArray)), str(numPics/(finish-start)), str(finish-start))
self.numPicsTaken = numPicArray
print self.numPicsTaken
endTime = time.time()
totalTime = endTime-timeStart
totalFPS = sum(numPicArray)/totalTime
#print "10.2: Captured {0} total pictures. Total time was {1}, total FPS is {2}"\
#.format(str(sum(numPicArray)), str(totalTime), str(totalFPS) )
camera.close()
print "CAMERA IS FINISHED. RETURN TRUE"
return "True"
except:
print "noooooooooooooo break"
print sys.exc_info()[0]
raise
if __name__ == '__main__':
t = takePictures()
#camLog = open('CamLog-{0}.txt'.format(time.strftime("%Y-%m-%d-%H:%M:%S")), 'w')
t.run()
#Error handling can be handled in callbackClient class
# except (picamera.exc.PiCameraError, picamera.exc.PiCameraMMALError):
# print >>self.f, "PiCameraError or MMALError"
# self.queue.put('exit')
# time.sleep(1)
# os.system("sshpass -p 'raspberry' ssh pi@10.0.0.1 -o StrictHostKeyChecking=no python"\
# "flash.py camError 2")
# except:
# print >>self.f, "other error"
# self.queue.put('exit')
# time.sleep(1)
# os.system("sshpass -p 'raspberry' ssh pi@10.0.0.1 -o StrictHostKeyChecking=no python"\
# " flash.py error 2")
|
[
"msit@wellesley.edu"
] |
msit@wellesley.edu
|
37e4054dcb4b679729433a6b236355c800064f7c
|
774dc27fe5192e81dfbcbf6ac9ddfa6a68ee06ae
|
/__temp_migrations/FW_disaster/0001_initial.py
|
ff2a5fcb6ad90e0646f8e93a98dc055d29d5bcb2
|
[] |
no_license
|
akrgt/gotree
|
1540ebca65c7372489622d81b15943db1b20c383
|
93e9ed432b23518fd1dfde6d6f9761d313c06611
|
refs/heads/master
| 2021-06-19T00:12:32.769202
| 2020-01-27T03:18:50
| 2020-01-27T03:18:50
| 149,593,711
| 0
| 0
| null | 2021-06-10T20:49:49
| 2018-09-20T10:41:50
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 25,161
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2018-06-29 17:00
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import otree.db.models
import otree_save_the_change.mixins
class Migration(migrations.Migration):
initial = True
dependencies = [
('otree', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Group',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('id_in_subsession', otree.db.models.PositiveIntegerField(db_index=True, null=True)),
('round_number', otree.db.models.PositiveIntegerField(db_index=True, null=True)),
('session', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='fw_disaster_group', to='otree.Session')),
],
options={
'db_table': 'FW_disaster_group',
},
bases=(otree_save_the_change.mixins.SaveTheChange, models.Model),
),
migrations.CreateModel(
name='Player',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('id_in_group', otree.db.models.PositiveIntegerField(db_index=True, null=True)),
('_payoff', otree.db.models.CurrencyField(default=0, null=True)),
('round_number', otree.db.models.PositiveIntegerField(db_index=True, null=True)),
('_gbat_arrived', otree.db.models.BooleanField(choices=[(True, 'Yes'), (False, 'No')], default=False)),
('_gbat_grouped', otree.db.models.BooleanField(choices=[(True, 'Yes'), (False, 'No')], default=False)),
('DisasterExp_1', otree.db.models.StringField(choices=[('็ต้จใ๏ผๅคง่ฆๆจกใช่ขซๅฎณใๅใใ', '็ต้จใ๏ผๅคง่ฆๆจกใช่ขซๅฎณใๅใใ'), ('็ต้จใ๏ผ่ขซๅฎณใๅใใ', '็ต้จใ๏ผ่ขซๅฎณใๅใใ'), ('็ต้จใ๏ผๅฐ่ฆๆจกใช่ขซๅฎณใๅใใ', '็ต้จใ๏ผๅฐ่ฆๆจกใช่ขซๅฎณใๅใใ'), ('็ต้จใใใ๏ผ่ขซๅฎณใฏๅ
จใๅใใชใใฃใ', '็ต้จใใใ๏ผ่ขซๅฎณใฏๅ
จใๅใใชใใฃใ'), ('็ต้จใใใใจใฏใชใ', '็ต้จใใใใจใฏใชใ')], max_length=10000, null=True, verbose_name='ๅฐ้')),
('DisasterExp_2', otree.db.models.StringField(choices=[('็ต้จใ๏ผๅคง่ฆๆจกใช่ขซๅฎณใๅใใ', '็ต้จใ๏ผๅคง่ฆๆจกใช่ขซๅฎณใๅใใ'), ('็ต้จใ๏ผ่ขซๅฎณใๅใใ', '็ต้จใ๏ผ่ขซๅฎณใๅใใ'), ('็ต้จใ๏ผๅฐ่ฆๆจกใช่ขซๅฎณใๅใใ', '็ต้จใ๏ผๅฐ่ฆๆจกใช่ขซๅฎณใๅใใ'), ('็ต้จใใใ๏ผ่ขซๅฎณใฏๅ
จใๅใใชใใฃใ', '็ต้จใใใ๏ผ่ขซๅฎณใฏๅ
จใๅใใชใใฃใ'), ('็ต้จใใใใจใฏใชใ', '็ต้จใใใใจใฏใชใ')], max_length=10000, null=True, verbose_name='ๆดฅๆณข')),
('DisasterExp_3', otree.db.models.StringField(choices=[('็ต้จใ๏ผๅคง่ฆๆจกใช่ขซๅฎณใๅใใ', '็ต้จใ๏ผๅคง่ฆๆจกใช่ขซๅฎณใๅใใ'), ('็ต้จใ๏ผ่ขซๅฎณใๅใใ', '็ต้จใ๏ผ่ขซๅฎณใๅใใ'), ('็ต้จใ๏ผๅฐ่ฆๆจกใช่ขซๅฎณใๅใใ', '็ต้จใ๏ผๅฐ่ฆๆจกใช่ขซๅฎณใๅใใ'), ('็ต้จใใใ๏ผ่ขซๅฎณใฏๅ
จใๅใใชใใฃใ', '็ต้จใใใ๏ผ่ขซๅฎณใฏๅ
จใๅใใชใใฃใ'), ('็ต้จใใใใจใฏใชใ', '็ต้จใใใใจใฏใชใ')], max_length=10000, null=True, verbose_name='ๅด็ซ')),
('DisasterExp_4', otree.db.models.StringField(choices=[('็ต้จใ๏ผๅคง่ฆๆจกใช่ขซๅฎณใๅใใ', '็ต้จใ๏ผๅคง่ฆๆจกใช่ขซๅฎณใๅใใ'), ('็ต้จใ๏ผ่ขซๅฎณใๅใใ', '็ต้จใ๏ผ่ขซๅฎณใๅใใ'), ('็ต้จใ๏ผๅฐ่ฆๆจกใช่ขซๅฎณใๅใใ', '็ต้จใ๏ผๅฐ่ฆๆจกใช่ขซๅฎณใๅใใ'), ('็ต้จใใใ๏ผ่ขซๅฎณใฏๅ
จใๅใใชใใฃใ', '็ต้จใใใ๏ผ่ขซๅฎณใฏๅ
จใๅใใชใใฃใ'), ('็ต้จใใใใจใฏใชใ', '็ต้จใใใใจใฏใชใ')], max_length=10000, null=True, verbose_name='ๅ็ ็ฝๅฎณ๏ผๅดๅดฉใใปๅ็ณๆตใปๅฐๆปใ๏ผ')),
('DisasterExp_5', otree.db.models.StringField(choices=[('็ต้จใ๏ผๅคง่ฆๆจกใช่ขซๅฎณใๅใใ', '็ต้จใ๏ผๅคง่ฆๆจกใช่ขซๅฎณใๅใใ'), ('็ต้จใ๏ผ่ขซๅฎณใๅใใ', '็ต้จใ๏ผ่ขซๅฎณใๅใใ'), ('็ต้จใ๏ผๅฐ่ฆๆจกใช่ขซๅฎณใๅใใ', '็ต้จใ๏ผๅฐ่ฆๆจกใช่ขซๅฎณใๅใใ'), ('็ต้จใใใ๏ผ่ขซๅฎณใฏๅ
จใๅใใชใใฃใ', '็ต้จใใใ๏ผ่ขซๅฎณใฏๅ
จใๅใใชใใฃใ'), ('็ต้จใใใใจใฏใชใ', '็ต้จใใใใจใฏใชใ')], max_length=10000, null=True, verbose_name='่ฑช้จ')),
('DisasterExp_6', otree.db.models.StringField(choices=[('็ต้จใ๏ผๅคง่ฆๆจกใช่ขซๅฎณใๅใใ', '็ต้จใ๏ผๅคง่ฆๆจกใช่ขซๅฎณใๅใใ'), ('็ต้จใ๏ผ่ขซๅฎณใๅใใ', '็ต้จใ๏ผ่ขซๅฎณใๅใใ'), ('็ต้จใ๏ผๅฐ่ฆๆจกใช่ขซๅฎณใๅใใ', '็ต้จใ๏ผๅฐ่ฆๆจกใช่ขซๅฎณใๅใใ'), ('็ต้จใใใ๏ผ่ขซๅฎณใฏๅ
จใๅใใชใใฃใ', '็ต้จใใใ๏ผ่ขซๅฎณใฏๅ
จใๅใใชใใฃใ'), ('็ต้จใใใใจใฏใชใ', '็ต้จใใใใจใฏใชใ')], max_length=10000, null=True, verbose_name='ๆดชๆฐด')),
('DisasterExp_7', otree.db.models.StringField(choices=[('็ต้จใ๏ผๅคง่ฆๆจกใช่ขซๅฎณใๅใใ', '็ต้จใ๏ผๅคง่ฆๆจกใช่ขซๅฎณใๅใใ'), ('็ต้จใ๏ผ่ขซๅฎณใๅใใ', '็ต้จใ๏ผ่ขซๅฎณใๅใใ'), ('็ต้จใ๏ผๅฐ่ฆๆจกใช่ขซๅฎณใๅใใ', '็ต้จใ๏ผๅฐ่ฆๆจกใช่ขซๅฎณใๅใใ'), ('็ต้จใใใ๏ผ่ขซๅฎณใฏๅ
จใๅใใชใใฃใ', '็ต้จใใใ๏ผ่ขซๅฎณใฏๅ
จใๅใใชใใฃใ'), ('็ต้จใใใใจใฏใชใ', '็ต้จใใใใจใฏใชใ')], max_length=10000, null=True, verbose_name='ๆด้ขจใป็ซๅทป')),
('DisasterExp_8', otree.db.models.StringField(choices=[('็ต้จใ๏ผๅคง่ฆๆจกใช่ขซๅฎณใๅใใ', '็ต้จใ๏ผๅคง่ฆๆจกใช่ขซๅฎณใๅใใ'), ('็ต้จใ๏ผ่ขซๅฎณใๅใใ', '็ต้จใ๏ผ่ขซๅฎณใๅใใ'), ('็ต้จใ๏ผๅฐ่ฆๆจกใช่ขซๅฎณใๅใใ', '็ต้จใ๏ผๅฐ่ฆๆจกใช่ขซๅฎณใๅใใ'), ('็ต้จใใใ๏ผ่ขซๅฎณใฏๅ
จใๅใใชใใฃใ', '็ต้จใใใ๏ผ่ขซๅฎณใฏๅ
จใๅใใชใใฃใ'), ('็ต้จใใใใจใฏใชใ', '็ต้จใใใใจใฏใชใ')], max_length=10000, null=True, verbose_name='่ฑช้ช')),
('DisasterExp_9', otree.db.models.StringField(choices=[('็ต้จใ๏ผๅคง่ฆๆจกใช่ขซๅฎณใๅใใ', '็ต้จใ๏ผๅคง่ฆๆจกใช่ขซๅฎณใๅใใ'), ('็ต้จใ๏ผ่ขซๅฎณใๅใใ', '็ต้จใ๏ผ่ขซๅฎณใๅใใ'), ('็ต้จใ๏ผๅฐ่ฆๆจกใช่ขซๅฎณใๅใใ', '็ต้จใ๏ผๅฐ่ฆๆจกใช่ขซๅฎณใๅใใ'), ('็ต้จใใใ๏ผ่ขซๅฎณใฏๅ
จใๅใใชใใฃใ', '็ต้จใใใ๏ผ่ขซๅฎณใฏๅ
จใๅใใชใใฃใ'), ('็ต้จใใใใจใฏใชใ', '็ต้จใใใใจใฏใชใ')], max_length=10000, null=True, verbose_name='ใใฎไปใฎ็ฐๅธธใช่ช็ถ็ฝๅฎณ')),
('crt_HAP', otree.db.models.StringField(choices=[('0', '0'), ('1', '1'), ('2', '2'), ('3', '3'), ('4', '4'), ('5', '5'), ('6', '6'), ('7', '7'), ('8', '8'), ('9', '9'), ('10', '10')], max_length=10000, null=True, verbose_name='ใใชใใฏ็พๅจ๏ผใฉใฎ็จๅบฆๅนธใใงใใ๏ผ0-10ๆบ็นใง่ฉไพกใใฆใใ ใใ๏ผ')),
('crt_1st', otree.db.models.StringField(choices=[('ใใฆใฏใพใใชใ', 'ใใฆใฏใพใใชใ'), ('ใฉใกใใใจใใใฐใใฆใฏใพใใชใ', 'ใฉใกใใใจใใใฐใใฆใฏใพใใชใ'), ('ใฉใกใใใจใใใฐใใฆใฏใพใ', 'ใฉใกใใใจใใใฐใใฆใฏใพใ'), ('ใใฆใฏใพใ', 'ใใฆใฏใพใ')], max_length=10000, null=True, verbose_name='ๆฅๅธธ็ๆดปใฎไธญใง๏ผ่ชๅใฎ่กๅใฏใ่ชๅ่ช่บซใใซ่ฆใใใฆใใใจๆใใใจใใใ๏ผ')),
('crt_2nd', otree.db.models.StringField(choices=[('ใใฆใฏใพใใชใ', 'ใใฆใฏใพใใชใ'), ('ใฉใกใใใจใใใฐใใฆใฏใพใใชใ', 'ใฉใกใใใจใใใฐใใฆใฏใพใใชใ'), ('ใฉใกใใใจใใใฐใใฆใฏใพใ', 'ใฉใกใใใจใใใฐใใฆใฏใพใ'), ('ใใฆใฏใพใ', 'ใใฆใฏใพใ')], max_length=10000, null=True, verbose_name='ๆฅๅธธ็ๆดปใฎไธญใง๏ผ่ชๅใฎ่กๅใฏใ็ดๆฅ่ชฐใ๏ผไบบ้๏ผใใซ่ฆใใใฆใใใจๆใใใจใใใ๏ผ')),
('crt_3rd', otree.db.models.StringField(choices=[('ใใฆใฏใพใใชใ', 'ใใฆใฏใพใใชใ'), ('ใฉใกใใใจใใใฐใใฆใฏใพใใชใ', 'ใฉใกใใใจใใใฐใใฆใฏใพใใชใ'), ('ใฉใกใใใจใใใฐใใฆใฏใพใ', 'ใฉใกใใใจใใใฐใใฆใฏใพใ'), ('ใใฆใฏใพใ', 'ใใฆใฏใพใ')], max_length=10000, null=True, verbose_name='ๆฅๅธธ็ๆดปใฎไธญใง๏ผ่ชๅใฎ่กๅใฏใ็ฃ่ฆใซใกใฉ็ญใ้ใใฆ่ชฐใ๏ผไบบ้๏ผใใซ้ๆฅ็ใซ่ฆใใใฆใใใจๆใใใจใใใ๏ผ')),
('crt_sup', otree.db.models.StringField(choices=[('ใใฆใฏใพใใชใ', 'ใใฆใฏใพใใชใ'), ('ใฉใกใใใจใใใฐใใฆใฏใพใใชใ', 'ใฉใกใใใจใใใฐใใฆใฏใพใใชใ'), ('ใฉใกใใใจใใใฐใใฆใฏใพใ', 'ใฉใกใใใจใใใฐใใฆใฏใพใ'), ('ใใฆใฏใพใ', 'ใใฆใฏใพใ')], max_length=10000, null=True, verbose_name='ๆฅๅธธ็ๆดปใฎไธญใง๏ผ่ชๅใฎ่กๅใฏใใๅคฉ้ๆงใ็ฅๆง๏ผไปๆงใชใฉใฎ่ถ
่ช็ถ็ใชๅญๅจใใซ่ฆใใใฆใใใจๆใใใจใใใ๏ผ')),
('FW1', otree.db.models.StringField(choices=[('1.ใใๆใใชใ', '1.ใใๆใใชใ'), ('2.ใฉใกใใใจใใใฐใใๆใใชใ', '2.ใฉใกใใใจใใใฐใใๆใใชใ'), ('3.ใฉใกใใงใใชใ', '3.ใฉใกใใงใใชใ'), ('4.ใฉใกใใใจใใใฐใใๆใ', '4.ใฉใกใใใจใใใฐใใๆใ'), ('5.ใใๆใ', '5.ใใๆใ')], max_length=10000, null=True, verbose_name='ๆชๆฅใฏใใคใ้ๅฝใซใใฃใฆๆฑบใใใใฆใใใจไฟกใใฆใใ')),
('FW2', otree.db.models.StringField(choices=[('1.ใใๆใใชใ', '1.ใใๆใใชใ'), ('2.ใฉใกใใใจใใใฐใใๆใใชใ', '2.ใฉใกใใใจใใใฐใใๆใใชใ'), ('3.ใฉใกใใงใใชใ', '3.ใฉใกใใงใใชใ'), ('4.ใฉใกใใใจใใใฐใใๆใ', '4.ใฉใกใใใจใใใฐใใๆใ'), ('5.ใใๆใ', '5.ใใๆใ')], max_length=10000, null=True, verbose_name='ไบบใฎๆงๆ ผใๆ่ฝใฏ๏ผ๏ผ่ณใฎใคใใใชใฉใฎ๏ผ็็ฉๅญฆ็ใชๆง้ ใซใใฃใฆๆฑบใพใฃใฆใใ')),
('FW3', otree.db.models.StringField(choices=[('1.ใใๆใใชใ', '1.ใใๆใใชใ'), ('2.ใฉใกใใใจใใใฐใใๆใใชใ', '2.ใฉใกใใใจใใใฐใใๆใใชใ'), ('3.ใฉใกใใงใใชใ', '3.ใฉใกใใงใใชใ'), ('4.ใฉใกใใใจใใใฐใใๆใ', '4.ใฉใกใใใจใใใฐใใๆใ'), ('5.ใใๆใ', '5.ใใๆใ')], max_length=10000, null=True, verbose_name='ไบบใฎๆญดๅฒใฎๅคง้จๅใฏๅถ็ถใฎๅบๆฅไบใฎ็ฉใฟ้ใญใงใใ')),
('FW4', otree.db.models.StringField(choices=[('1.ใใๆใใชใ', '1.ใใๆใใชใ'), ('2.ใฉใกใใใจใใใฐใใๆใใชใ', '2.ใฉใกใใใจใใใฐใใๆใใชใ'), ('3.ใฉใกใใงใใชใ', '3.ใฉใกใใงใใชใ'), ('4.ใฉใกใใใจใใใฐใใๆใ', '4.ใฉใกใใใจใใใฐใใๆใ'), ('5.ใใๆใ', '5.ใใๆใ')], max_length=10000, null=True, verbose_name='ไบบใฏ่ชๅใฎๆๅฟใงๆฑบๅฎใไธใใใจใใงใใ')),
('FW5', otree.db.models.StringField(choices=[('1.ใใๆใใชใ', '1.ใใๆใใชใ'), ('2.ใฉใกใใใจใใใฐใใๆใใชใ', '2.ใฉใกใใใจใใใฐใใๆใใชใ'), ('3.ใฉใกใใงใใชใ', '3.ใฉใกใใงใใชใ'), ('4.ใฉใกใใใจใใใฐใใๆใ', '4.ใฉใกใใใจใใใฐใใๆใ'), ('5.ใใๆใ', '5.ใใๆใ')], max_length=10000, null=True, verbose_name='ใฉใใชใซๅชๅใใฆใ๏ผ่ชๅใฎ้ๅฝใฏๅคใใใใชใ')),
('FW6', otree.db.models.StringField(choices=[('1.ใใๆใใชใ', '1.ใใๆใใชใ'), ('2.ใฉใกใใใจใใใฐใใๆใใชใ', '2.ใฉใกใใใจใใใฐใใๆใใชใ'), ('3.ใฉใกใใงใใชใ', '3.ใฉใกใใงใใชใ'), ('4.ใฉใกใใใจใใใฐใใๆใ', '4.ใฉใกใใใจใใใฐใใๆใ'), ('5.ใใๆใ', '5.ใใๆใ')], max_length=10000, null=True, verbose_name='ๅฟ็ๅญฆ่
ใ็ฒพ็ฅ็งๅปใฏใใใฆไบบใฎใตใใพใใฎๅ
จใฆใ่งฃๆใใใ ใใ')),
('FW7', otree.db.models.StringField(choices=[('1.ใใๆใใชใ', '1.ใใๆใใชใ'), ('2.ใฉใกใใใจใใใฐใใๆใใชใ', '2.ใฉใกใใใจใใใฐใใๆใใชใ'), ('3.ใฉใกใใงใใชใ', '3.ใฉใกใใงใใชใ'), ('4.ใฉใกใใใจใใใฐใใๆใ', '4.ใฉใกใใใจใใใฐใใๆใ'), ('5.ใใๆใ', '5.ใใๆใ')], max_length=10000, null=True, verbose_name='่ชฐใใใใใ่ตทใใใใจใไบๆธฌใงใใชใ')),
('FW8', otree.db.models.StringField(choices=[('1.ใใๆใใชใ', '1.ใใๆใใชใ'), ('2.ใฉใกใใใจใใใฐใใๆใใชใ', '2.ใฉใกใใใจใใใฐใใๆใใชใ'), ('3.ใฉใกใใงใใชใ', '3.ใฉใกใใงใใชใ'), ('4.ใฉใกใใใจใใใฐใใๆใ', '4.ใฉใกใใใจใใใฐใใๆใ'), ('5.ใใๆใ', '5.ใใๆใ')], max_length=10000, null=True, verbose_name='ไบบใฏ่ชๅใไธใใ่ชคใฃใ้ธๆใซๅฏพใใฆใฏ๏ผไธๅใฎ่ฒฌไปปใ่ฒ ใใชใใฆใฏใชใใชใ')),
('FW9', otree.db.models.StringField(choices=[('1.ใใๆใใชใ', '1.ใใๆใใชใ'), ('2.ใฉใกใใใจใใใฐใใๆใใชใ', '2.ใฉใกใใใจใใใฐใใๆใใชใ'), ('3.ใฉใกใใงใใชใ', '3.ใฉใกใใงใใชใ'), ('4.ใฉใกใใใจใใใฐใใๆใ', '4.ใฉใกใใใจใใใฐใใๆใ'), ('5.ใใๆใ', '5.ใใๆใ')], max_length=10000, null=True, verbose_name='ๅ
จใฆใฎไบบใฎไบบ็ใฏ๏ผๆๅใใ้ๅฝใซใใฃใฆๆฑบใใใใฆใใ')),
('FW10', otree.db.models.StringField(choices=[('1.ใใๆใใชใ', '1.ใใๆใใชใ'), ('2.ใฉใกใใใจใใใฐใใๆใใชใ', '2.ใฉใกใใใจใใใฐใใๆใใชใ'), ('3.ใฉใกใใงใใชใ', '3.ใฉใกใใงใใชใ'), ('4.ใฉใกใใใจใใใฐใใๆใ', '4.ใฉใกใใใจใใใฐใใๆใ'), ('5.ใใๆใ', '5.ใใๆใ')], max_length=10000, null=True, verbose_name='่ชๅใฎๅฐๆฅใฏ๏ผ้บไผๅญใซใใฃใฆๆฑบใใใใฆใใ')),
('FW11', otree.db.models.StringField(choices=[('1.ใใๆใใชใ', '1.ใใๆใใชใ'), ('2.ใฉใกใใใจใใใฐใใๆใใชใ', '2.ใฉใกใใใจใใใฐใใๆใใชใ'), ('3.ใฉใกใใงใใชใ', '3.ใฉใกใใงใใชใ'), ('4.ใฉใกใใใจใใใฐใใๆใ', '4.ใฉใกใใใจใใใฐใใๆใ'), ('5.ใใๆใ', '5.ใใๆใ')], max_length=10000, null=True, verbose_name='ใตใคใณใญใฎ็ฎใใณใคใณใในใฎใใใซ๏ผไบบ็ใฏไบๆธฌใงใใชใ')),
('FW12', otree.db.models.StringField(choices=[('1.ใใๆใใชใ', '1.ใใๆใใชใ'), ('2.ใฉใกใใใจใใใฐใใๆใใชใ', '2.ใฉใกใใใจใใใฐใใๆใใชใ'), ('3.ใฉใกใใงใใชใ', '3.ใฉใกใใงใใชใ'), ('4.ใฉใกใใใจใใใฐใใๆใ', '4.ใฉใกใใใจใใใฐใใๆใ'), ('5.ใใๆใ', '5.ใใๆใ')], max_length=10000, null=True, verbose_name='ไบบใฏๅฟใใๆใใฐ๏ผใฉใใช้ๅฎณใงใไนใ่ถใใใใ')),
('FW13', otree.db.models.StringField(choices=[('1.ใใๆใใชใ', '1.ใใๆใใชใ'), ('2.ใฉใกใใใจใใใฐใใๆใใชใ', '2.ใฉใกใใใจใใใฐใใๆใใชใ'), ('3.ใฉใกใใงใใชใ', '3.ใฉใกใใงใใชใ'), ('4.ใฉใกใใใจใใใฐใใๆใ', '4.ใฉใกใใใจใใใฐใใๆใ'), ('5.ใใๆใ', '5.ใใๆใ')], max_length=10000, null=True, verbose_name='็ฉไบใฏใชใใใใซใใใชใใ๏ผ่ชๅใซใงใใใใจใฏๅฐใชใ')),
('FW14', otree.db.models.StringField(choices=[('1.ใใๆใใชใ', '1.ใใๆใใชใ'), ('2.ใฉใกใใใจใใใฐใใๆใใชใ', '2.ใฉใกใใใจใใใฐใใๆใใชใ'), ('3.ใฉใกใใงใใชใ', '3.ใฉใกใใงใใชใ'), ('4.ใฉใกใใใจใใใฐใใๆใ', '4.ใฉใกใใใจใใใฐใใๆใ'), ('5.ใใๆใ', '5.ใใๆใ')], max_length=10000, null=True, verbose_name='้ๅปใฎ็ต้จใใฉใฎใใใซ็พๅจใฎ่ชๅใฎ็ฅๆงใๆงๆ ผใๅฝขไฝใฃใฆใใใใ๏ผ็งๅญฆใฏ็คบใใฆใใใ')),
('FW15', otree.db.models.StringField(choices=[('1.ใใๆใใชใ', '1.ใใๆใใชใ'), ('2.ใฉใกใใใจใใใฐใใๆใใชใ', '2.ใฉใกใใใจใใใฐใใๆใใชใ'), ('3.ใฉใกใใงใใชใ', '3.ใฉใกใใงใใชใ'), ('4.ใฉใกใใใจใใใฐใใๆใ', '4.ใฉใกใใใจใใใฐใใๆใ'), ('5.ใใๆใ', '5.ใใๆใ')], max_length=10000, null=True, verbose_name='ไบบใฏ่ชฐใใไบๆธฌใงใใชใใใใชใตใใพใใใใ')),
('FW16', otree.db.models.StringField(choices=[('1.ใใๆใใชใ', '1.ใใๆใใชใ'), ('2.ใฉใกใใใจใใใฐใใๆใใชใ', '2.ใฉใกใใใจใใใฐใใๆใใชใ'), ('3.ใฉใกใใงใใชใ', '3.ใฉใกใใงใใชใ'), ('4.ใฉใกใใใจใใใฐใใๆใ', '4.ใฉใกใใใจใใใฐใใๆใ'), ('5.ใใๆใ', '5.ใใๆใ')], max_length=10000, null=True, verbose_name='็ฏ็ฝช่
ใซใฏ่ชๅใฎ่กใฃใๆชไบใซๅฏพใใ๏ผๅ
จ้ข็ใช่ฒฌไปปใใใ')),
('FW17', otree.db.models.StringField(choices=[('1.ใใๆใใชใ', '1.ใใๆใใชใ'), ('2.ใฉใกใใใจใใใฐใใๆใใชใ', '2.ใฉใกใใใจใใใฐใใๆใใชใ'), ('3.ใฉใกใใงใใชใ', '3.ใฉใกใใงใใชใ'), ('4.ใฉใกใใใจใใใฐใใๆใ', '4.ใฉใกใใใจใใใฐใใๆใ'), ('5.ใใๆใ', '5.ใใๆใ')], max_length=10000, null=True, verbose_name='ใใฎ่ใๆนใๅฅฝใใใฉใใใฏๅฅใจใใฆ๏ผไบบ็ใฏ่ชฌๆใงใใชใๅใซๅใใใใฆใใใใใซๆใ')),
('FW18', otree.db.models.StringField(choices=[('1.ใใๆใใชใ', '1.ใใๆใใชใ'), ('2.ใฉใกใใใจใใใฐใใๆใใชใ', '2.ใฉใกใใใจใใใฐใใๆใใชใ'), ('3.ใฉใกใใงใใชใ', '3.ใฉใกใใงใใชใ'), ('4.ใฉใกใใใจใใใฐใใๆใ', '4.ใฉใกใใใจใใใฐใใๆใ'), ('5.ใใๆใ', '5.ใใๆใ')], max_length=10000, null=True, verbose_name='ไบบ้ใฎ่กๅใฏไปใฎๅ็ฉใใกใจๅใใใใซ๏ผใใคใ่ช็ถใฎๆ็ใซๅพใฃใฆใใ')),
('FW19', otree.db.models.StringField(choices=[('1.ใใๆใใชใ', '1.ใใๆใใชใ'), ('2.ใฉใกใใใจใใใฐใใๆใใชใ', '2.ใฉใกใใใจใใใฐใใๆใใชใ'), ('3.ใฉใกใใงใใชใ', '3.ใฉใกใใงใใชใ'), ('4.ใฉใกใใใจใใใฐใใๆใ', '4.ใฉใกใใใจใใใฐใใๆใ'), ('5.ใใๆใ', '5.ใใๆใ')], max_length=10000, null=True, verbose_name='ๆฅใ
ใฎๅบๆฅไบใฏๅ
จใใใฃใฆไธ่ฒซๆงใๆใใชใใใ๏ผๅ
ใไบๆธฌใใใใจใฏ้ฃใใ')),
('FW20', otree.db.models.StringField(choices=[('1.ใใๆใใชใ', '1.ใใๆใใชใ'), ('2.ใฉใกใใใจใใใฐใใๆใใชใ', '2.ใฉใกใใใจใใใฐใใๆใใชใ'), ('3.ใฉใกใใงใใชใ', '3.ใฉใกใใงใใชใ'), ('4.ใฉใกใใใจใใใฐใใๆใ', '4.ใฉใกใใใจใใใฐใใๆใ'), ('5.ใใๆใ', '5.ใใๆใ')], max_length=10000, null=True, verbose_name='ไบบ็ใฏ้ใซ่ฒ ใใจใใใๅคงใใ')),
('FW21', otree.db.models.StringField(choices=[('1.ใใๆใใชใ', '1.ใใๆใใชใ'), ('2.ใฉใกใใใจใใใฐใใๆใใชใ', '2.ใฉใกใใใจใใใฐใใๆใใชใ'), ('3.ใฉใกใใงใใชใ', '3.ใฉใกใใงใใชใ'), ('4.ใฉใกใใใจใใใฐใใๆใ', '4.ใฉใกใใใจใใใฐใใๆใ'), ('5.ใใๆใ', '5.ใใๆใ')], max_length=10000, null=True, verbose_name='ไบบใฏ่ชใใฎ่ช็ฑใชๆๅฟใๆ่ใง่กๅใใฆใใ')),
('FW22', otree.db.models.StringField(choices=[('1.ใใๆใใชใ', '1.ใใๆใใชใ'), ('2.ใฉใกใใใจใใใฐใใๆใใชใ', '2.ใฉใกใใใจใใใฐใใๆใใชใ'), ('3.ใฉใกใใงใใชใ', '3.ใฉใกใใงใใชใ'), ('4.ใฉใกใใใจใใใฐใใๆใ', '4.ใฉใกใใใจใใใฐใใๆใ'), ('5.ใใๆใ', '5.ใใๆใ')], max_length=10000, null=True, verbose_name='่ฆชใฎๆใคๆง่ณชใฏ๏ผๅญใฉใใฎๆง่ณชใๆฑบใใฆใใ')),
('FW23', otree.db.models.StringField(choices=[('1.ใใๆใใชใ', '1.ใใๆใใชใ'), ('2.ใฉใกใใใจใใใฐใใๆใใชใ', '2.ใฉใกใใใจใใใฐใใๆใใชใ'), ('3.ใฉใกใใงใใชใ', '3.ใฉใกใใงใใชใ'), ('4.ใฉใกใใใจใใใฐใใๆใ', '4.ใฉใกใใใจใใใฐใใๆใ'), ('5.ใใๆใ', '5.ใใๆใ')], max_length=10000, null=True, verbose_name='ไบบใฏ๏ผ่ชๅใฎใใใพใกใซใใคใ่ฒฌไปปใ่ฒ ใฃใฆใใ')),
('FW24', otree.db.models.StringField(choices=[('1.ใใๆใใชใ', '1.ใใๆใใชใ'), ('2.ใฉใกใใใจใใใฐใใๆใใชใ', '2.ใฉใกใใใจใใใฐใใๆใใชใ'), ('3.ใฉใกใใงใใชใ', '3.ใฉใกใใงใใชใ'), ('4.ใฉใกใใใจใใใฐใใๆใ', '4.ใฉใกใใใจใใใฐใใๆใ'), ('5.ใใๆใ', '5.ใใๆใ')], max_length=10000, null=True, verbose_name='ๅคงไบบใซใชใฃใฆใใๆๅใใใใฏๅญใฉใใฎ้ ใฎ็ฐๅขใงๆฑบใพใ')),
('FW25', otree.db.models.StringField(choices=[('1.ใใๆใใชใ', '1.ใใๆใใชใ'), ('2.ใฉใกใใใจใใใฐใใๆใใชใ', '2.ใฉใกใใใจใใใฐใใๆใใชใ'), ('3.ใฉใกใใงใใชใ', '3.ใฉใกใใงใใชใ'), ('4.ใฉใกใใใจใใใฐใใๆใ', '4.ใฉใกใใใจใใใฐใใๆใ'), ('5.ใใๆใ', '5.ใใๆใ')], max_length=10000, null=True, verbose_name='ไบบใซ่ตทใใๅบๆฅไบใฏ๏ผๅถ็ถใฎ็ฃ็ฉใงใใ')),
('FW26', otree.db.models.StringField(choices=[('1.ใใๆใใชใ', '1.ใใๆใใชใ'), ('2.ใฉใกใใใจใใใฐใใๆใใชใ', '2.ใฉใกใใใจใใใฐใใๆใใชใ'), ('3.ใฉใกใใงใใชใ', '3.ใฉใกใใงใใชใ'), ('4.ใฉใกใใใจใใใฐใใๆใ', '4.ใฉใกใใใจใใใฐใใๆใ'), ('5.ใใๆใ', '5.ใใๆใ')], max_length=10000, null=True, verbose_name='็ฒพ็ฅๅใๅผทใใใฐ๏ผ่ชๅใซ็ใใๆฌฒๆใใใคใๆใใใใจใใงใใ')),
('FW27', otree.db.models.StringField(choices=[('1.ใใๆใใชใ', '1.ใใๆใใชใ'), ('2.ใฉใกใใใจใใใฐใใๆใใชใ', '2.ใฉใกใใใจใใใฐใใๆใใชใ'), ('3.ใฉใกใใงใใชใ', '3.ใฉใกใใงใใชใ'), ('4.ใฉใกใใใจใใใฐใใๆใ', '4.ใฉใกใใใจใใใฐใใๆใ'), ('5.ใใๆใ', '5.ใใๆใ')], max_length=10000, null=True, verbose_name='ไบบใฎๅฐๆฅใฏไบๆธฌใใใใจใใงใใชใ')),
('group', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='FW_disaster.Group')),
('participant', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='fw_disaster_player', to='otree.Participant')),
('session', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='fw_disaster_player', to='otree.Session')),
],
options={
'db_table': 'FW_disaster_player',
},
bases=(otree_save_the_change.mixins.SaveTheChange, models.Model),
),
migrations.CreateModel(
name='Subsession',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('round_number', otree.db.models.PositiveIntegerField(db_index=True, null=True)),
('session', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='fw_disaster_subsession', to='otree.Session')),
],
options={
'db_table': 'FW_disaster_subsession',
},
bases=(otree_save_the_change.mixins.SaveTheChange, models.Model),
),
migrations.AddField(
model_name='player',
name='subsession',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='FW_disaster.Subsession'),
),
migrations.AddField(
model_name='group',
name='subsession',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='FW_disaster.Subsession'),
),
]
|
[
"goaki@me.com"
] |
goaki@me.com
|
c819076050d722cded9659553a0f62ca4aa3fd36
|
2e80eb0e44bc3ce05373b047a22bfb220ce6ff7b
|
/name/foo.py
|
e55411fd75037a2eddbaaf03dd96423e7f72827f
|
[] |
no_license
|
Jorge-Hoyos/python-workshop
|
07fcda929270604827148b871a6352a92a0b21b4
|
a182f2b77d372ae31a324ba8c498729bbb26a08a
|
refs/heads/master
| 2023-02-09T02:03:21.722897
| 2021-01-04T16:19:32
| 2021-01-04T16:19:32
| 284,469,031
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 359
|
py
|
# Suppose this is foo.py.
import math
print(__name__)
print("before import")
print("before functionA")
def functionA():
print("Function A")
print("before functionB")
def functionB():
print("Function B {}".format(math.sqrt(100)))
print("before __name__ guard")
if __name__ == '__main__':
functionA()
functionB()
print("after __name__ guard")
|
[
"jorhoyos@bancolombia.com.co"
] |
jorhoyos@bancolombia.com.co
|
5bc0c700daa377d54f09a5f669fc0520f96b3bd7
|
20d1787feaf42b8aee5aeabad9b295d29c837152
|
/kiwitrains/__init__.py
|
afc72c30259bf270ce6493711a19cca3d1573b5e
|
[] |
no_license
|
simonblack/kiwitrains
|
c2d41a04b6c8802db7481780ec3581bc52feff99
|
2f4b6999d75def75d8b38f3e17e421965d675992
|
refs/heads/master
| 2021-04-15T10:49:08.365913
| 2018-03-25T06:12:30
| 2018-03-25T06:12:30
| 126,666,121
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 195
|
py
|
#!/usr/bin/env python3
"""
Kiwiland Railroad Transit library
"""
from .lib import *
__version__ = '1.0.0'
__author__ = 'Simon Black'
__email__ = "mail@simon.black"
__status__ = "Production"
|
[
"simon@cerneo.org"
] |
simon@cerneo.org
|
669334c48eb971bb65dac2a7beec7cfd408c959f
|
38686e0da040e3aef0bba63ebe99ee3e7dceea4f
|
/doc/conf.py
|
ef17133c0332186ec287ae1fbc16bc566fb0496d
|
[
"LicenseRef-scancode-warranty-disclaimer"
] |
no_license
|
oubiwann/bundes
|
ae6daf042c1d8738007063f9acd40ffe1e987113
|
4388e1935c822517969b40e796d16e0838caba37
|
refs/heads/master
| 2021-04-15T15:02:17.617919
| 2016-02-17T10:06:53
| 2016-02-17T10:06:53
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,201
|
py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# bundes documentation build configuration file, created by
# sphinx-quickstart on Thu May 28 16:23:40 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'bundes'
copyright = '2015, Pierre-Yves Ritschard'
author = 'Pierre-Yves Ritschard'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.5'
# The full version, including alpha/beta/rc tags.
release = '0.5.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'bundesdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'bundes.tex', 'bundes Documentation',
'Pierre-Yves Ritschard', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'bundes', 'bundes Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'bundes', 'bundes Documentation',
author, 'bundes', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
|
[
"pyr@spootnik.org"
] |
pyr@spootnik.org
|
2b1e005dac468057714ac833d3332fca0078eef9
|
0975b4d581c4f6364f541b299b639dcc6f65a9f6
|
/apps/dataviz/migrations/0001_initial.py
|
5d7708031478be2ae0526a8f4e925a5efeb5903d
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
Sasha-P/dataviz
|
40af1f440a66e1c8454a812d40f5caa8be570d4f
|
ccef27b41636ce30bc9abbe8ae47a5ec807e2712
|
refs/heads/master
| 2021-01-11T06:03:54.308225
| 2016-10-06T22:31:26
| 2016-10-06T22:31:26
| 70,165,861
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,280
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-10-02 11:46
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Country',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('value', models.IntegerField(default=0)),
],
options={
'ordering': ('name',),
},
),
migrations.CreateModel(
name='Region',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
],
options={
'ordering': ('name',),
},
),
migrations.AddField(
model_name='country',
name='region',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dataviz.Region'),
),
]
|
[
"sasha.pazyuk@gmail.com"
] |
sasha.pazyuk@gmail.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.