blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 112 | license_type stringclasses 2 values | repo_name stringlengths 5 115 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 777 values | visit_date timestamp[us]date 2015-08-06 10:31:46 2023-09-06 10:44:38 | revision_date timestamp[us]date 1970-01-01 02:38:32 2037-05-03 13:00:00 | committer_date timestamp[us]date 1970-01-01 02:38:32 2023-09-06 01:08:06 | github_id int64 4.92k 681M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 22 values | gha_event_created_at timestamp[us]date 2012-06-04 01:52:49 2023-09-14 21:59:50 ⌀ | gha_created_at timestamp[us]date 2008-05-22 07:58:19 2023-08-21 12:35:19 ⌀ | gha_language stringclasses 149 values | src_encoding stringclasses 26 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 3 10.2M | extension stringclasses 188 values | content stringlengths 3 10.2M | authors listlengths 1 1 | author_id stringlengths 1 132 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
af0935f71aa7ec410a3efca1dfe1563cbdb5e895 | acd1bec2b5f574aac9c91fa88cb2ad812c820066 | /Memorization Tool/task/table.py | dee73d297d70a4ac949778217a5f28dbdf5d3639 | [] | no_license | TogrulAga/Memorization-Tool | ec89006351947ea20fe6562024de750a2e1e0af9 | e1773ff8062ac4cff1018e06bf852f6315b414ab | refs/heads/master | 2023-07-05T21:14:18.522215 | 2021-08-08T10:17:47 | 2021-08-08T10:17:47 | 393,927,259 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 326 | py | from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, Integer, String
Base = declarative_base()
class FlashCard(Base):
__tablename__ = "flashcard"
id = Column(Integer, primary_key=True)
question = Column(String)
answer = Column(String)
box_number = Column(Integer)
| [
"toghrul.aghakishiyev@ericsson.com"
] | toghrul.aghakishiyev@ericsson.com |
5b189b2f2f1e90e3245a51923b9761e2b2d19619 | 5ca2cca7762dcfcf9dd4b6a646fb8d39b9c2718f | /manager/migrations/0004_auto_20201213_2242.py | 4293fc64255a2037a96563320da320c050a0649d | [] | no_license | trilong0610/WebThoiTrang-Old | 372b99036f160b9eb9c503f7b1987177855f5d6a | ae04a43019916e87099edb614d0d155f139f0d09 | refs/heads/main | 2023-02-02T23:22:39.938038 | 2020-12-16T08:30:55 | 2020-12-16T08:30:55 | 321,085,197 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 466 | py | # Generated by Django 3.1.3 on 2020-12-13 15:42
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('manager', '0003_auto_20201213_2240'),
]
operations = [
migrations.AlterField(
model_name='purchaseproduct',
name='time_create',
field=models.DateTimeField(default=datetime.datetime(2020, 12, 13, 22, 42, 23, 127128)),
),
]
| [
"trilong0610@gmail.com"
] | trilong0610@gmail.com |
b85551098ac4c2584a9d741b04153b773e5537fd | 9b0b0fbc5b5a9865108552eb8fe58a04f9cc0cc3 | /fabtools/icanhaz/python.py | 404644ba8740994e207506d9b4030fdc2e83c472 | [
"BSD-2-Clause"
] | permissive | thoas/fabtools | 8f77a24ed21f318e3bf7c8817a9b77a6def8ceb8 | 8cd63767384e589629e5f02e67bd498a8473b99c | refs/heads/master | 2021-01-18T06:36:29.737133 | 2011-09-05T15:55:36 | 2011-09-05T15:55:36 | 2,327,918 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 504 | py | """
Idempotent API for managing python packages
"""
from fabtools.python import *
def package(pkg_name, virtualenv=None, use_sudo=False):
"""
I can haz python package
"""
if not is_installed(pkg_name):
install(pkg_name, virtualenv, use_sudo)
def packages(pkg_list, virtualenv=None, use_sudo=False):
"""
I can haz python packages
"""
pkg_list = [pkg for pkg in pkg_list if not is_installed(pkg)]
if pkg_list:
install(pkg_list, virtualenv, use_sudo)
| [
"ronan.amicel@gmail.com"
] | ronan.amicel@gmail.com |
1b40a16b02b23dda2bf80039eea52b5c4399e387 | 2e682fd72e3feaa70e3f7bf2a3b83c50d783ec02 | /PyTorch/contrib/cv/detection/FSAF_for_Pytorch/mmdetection/configs/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py | 34c195d94bfefb4f5edace4eeca67ed63fff2f2c | [
"Apache-2.0",
"BSD-3-Clause",
"BSD-2-Clause",
"MIT",
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-unknown-license-reference",
"GPL-1.0-or-later"
] | permissive | Ascend/ModelZoo-PyTorch | 4c89414b9e2582cef9926d4670108a090c839d2d | 92acc188d3a0f634de58463b6676e70df83ef808 | refs/heads/master | 2023-07-19T12:40:00.512853 | 2023-07-17T02:48:18 | 2023-07-17T02:48:18 | 483,502,469 | 23 | 6 | Apache-2.0 | 2022-10-15T09:29:12 | 2022-04-20T04:11:18 | Python | UTF-8 | Python | false | false | 985 | py | # Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the License);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
_base_ = './mask_rcnn_r50_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py'
model = dict(
pretrained='open-mmlab://resnext101_64x4d',
backbone=dict(
type='ResNeXt',
depth=101,
groups=64,
base_width=4,
num_stages=4,
out_indices=(0, 1, 2, 3),
frozen_stages=1,
norm_cfg=dict(type='BN', requires_grad=True),
style='pytorch'))
| [
"wangjiangben@huawei.com"
] | wangjiangben@huawei.com |
864cd0f813eea94b8cd02c8f890eb24dfdfa3cc8 | c6e744658cf9633f6571f349fff444d84634a754 | /umweltbank/spiders/umweltbank.py | f1324715bba82c34f32859f99a3a6acc7d91df01 | [] | no_license | daniel-kanchev/umweltbank | a76bd0f2d86e7cfac691a0dcc8064b4d0cfc358f | 17dbe445c85583d298249a084ec828b4ef607338 | refs/heads/main | 2023-03-19T18:12:03.420926 | 2021-03-15T12:54:35 | 2021-03-15T12:54:35 | 347,968,889 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,263 | py | import scrapy
from scrapy.loader import ItemLoader
from itemloaders.processors import TakeFirst
from datetime import datetime
from umweltbank.items import Article
class UmweltbankSpider(scrapy.Spider):
name = 'umweltbank'
start_urls = ['https://bankundumwelt.de/']
def parse(self, response):
links = response.xpath('//a[@class="post-link link"]/@href').getall()
yield from response.follow_all(links, self.parse_article)
def parse_article(self, response):
if 'pdf' in response.url:
return
item = ItemLoader(Article())
item.default_output_processor = TakeFirst()
title = response.xpath('//h1/text()').get()
if title:
title = title.strip()
date = response.xpath('//div[@class="post-meta-date meta-info"]/text()').get()
if date:
date = date.strip()
content = response.xpath('//div[@class="post-content"]//text()').getall()
content = [text for text in content if text.strip()]
content = "\n".join(content).strip()
item.add_value('title', title)
item.add_value('date', date)
item.add_value('link', response.url)
item.add_value('content', content)
return item.load_item()
| [
"daniel.kanchev@adata.pro"
] | daniel.kanchev@adata.pro |
139bfda90e79fcdbf1bdb18feb466787fc6c150e | 2a54e8d6ed124c64abb9e075cc5524bb859ba0fa | /.history/1-Python-Basics/20-list-method_20200413040951.py | f5bdbdbd3425864c68451d96e168feb776b6cffd | [] | no_license | CaptainStorm21/Python-Foundation | 01b5fbaf7a913506518cf22e0339dd948e65cea1 | a385adeda74f43dd7fb2d99d326b0be23db25024 | refs/heads/master | 2021-05-23T01:29:18.885239 | 2020-04-23T19:18:06 | 2020-04-23T19:18:06 | 253,171,611 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 409 | py | basket = [21, 12,33, 35, 99]
print(basket)
print(len(basket))
#33 gets pops because it is 2nd number in the array
print(basket.pop(2))
print(basket)
#extend
basket1 = [1000, 2000, 3000]
print(basket.extend(basket1))
print(basket)
#append - last to the list
print(basket.append(700))
print(basket)
#index
print(basket.index(21))
print(basket)
print(basket.sort())
#insert
print(basket.insert(5, 1020))
| [
"tikana4@yahoo.com"
] | tikana4@yahoo.com |
e70be8b0d16cb7fe3b8690933269906f2d221d46 | 345529a5ae9ac4831e1a04066612c929a2a8ad7e | /ayush_crowdbotics_378/wsgi.py | 51606165e32cea776f0b3de315377a6dfb613e20 | [] | no_license | payush/ayush-crowdbotics-378 | 29f7552e5d91c21f530af982bc16477fb11df28a | aaec9093eec6408df059172033ed8b4bf3be3c97 | refs/heads/master | 2020-03-23T22:35:05.533833 | 2018-07-24T16:17:07 | 2018-07-24T16:17:07 | 142,183,848 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 420 | py | """
WSGI config for ayush_crowdbotics_378 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ayush_crowdbotics_378.settings")
application = get_wsgi_application()
| [
"ayushpuroheet@gmail.com"
] | ayushpuroheet@gmail.com |
c24714f252c9778e9b5426070d57d02130f12ed8 | 97124dfacdb78a9301aebf5a3a4ecad7a5d0116b | /bp/__init__.py | f953eecb7fa5f1feab26592a1ee4475b728301cb | [] | permissive | esayyari/improved-octo-waddle | 982a5bd156f76415bf5dba875309190acea77b9a | 7aca988734cc1107b2d991eb9379347a08a32792 | refs/heads/master | 2022-11-20T20:16:43.370024 | 2020-07-21T19:13:35 | 2020-07-21T19:13:35 | 281,470,015 | 0 | 0 | BSD-3-Clause | 2020-07-21T18:39:11 | 2020-07-21T18:07:52 | Jupyter Notebook | UTF-8 | Python | false | false | 580 | py | # ----------------------------------------------------------------------------
# Copyright (c) 2013--, BP development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
from ._bp import BP
from ._io import parse_newick
from ._conv import to_skbio_treenode, from_skbio_treenode, to_skbio_treearray
__all__ = ['BP', 'parse_newick', 'to_skbio_treenode', 'from_skbio_treenode',
'to_skbio_treearray']
| [
"mcdonadt@colorado.edu"
] | mcdonadt@colorado.edu |
2f4dc3f75c0f2edf8682570208ae32a207355e65 | 2a3157ccb5376ffb03b13df4721afa405fbfc95d | /bin/pasteurize | 5619d590f2c93e22517a17ce63712cdccccd7fec | [] | no_license | bopopescu/DemoDjango | 694501259322590d2959ef65cb6231ba1b1cf128 | b5ea252f0293ea63905a72045703b50815fbd673 | refs/heads/master | 2022-11-20T23:25:41.737807 | 2018-09-17T09:49:28 | 2018-09-17T09:49:28 | 282,543,262 | 0 | 0 | null | 2020-07-25T23:44:16 | 2020-07-25T23:44:16 | null | UTF-8 | Python | false | false | 249 | #!/home/jinesh/Documents/djangoproj/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from libpasteurize.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"malaviajinesh@gmail.com"
] | malaviajinesh@gmail.com | |
4be7a5039044527d2980a0bf1b7338f67739259d | 5fe72bb13baf3649058ebe11aa86ad4fc56c69ed | /hard-gists/5702d57eb4cb6ef6e7e8/snippet.py | 5ba8fa69b8f139f132805c7e897a493b1561e969 | [
"Apache-2.0"
] | permissive | dockerizeme/dockerizeme | 8825fed45ff0ce8fb1dbe34959237e8048900a29 | 408f3fa3d36542d8fc1236ba1cac804de6f14b0c | refs/heads/master | 2022-12-10T09:30:51.029846 | 2020-09-02T13:34:49 | 2020-09-02T13:34:49 | 144,501,661 | 24 | 20 | Apache-2.0 | 2022-11-21T12:34:29 | 2018-08-12T21:21:04 | Python | UTF-8 | Python | false | false | 1,624 | py | #!/usr/bin/env python2.7
import argparse
import os
import qrcode
import qrcode.image.pil
import sqlite3
import sys
import urllib
class AuthenticatorAccount(object):
def __init__(self, account_name, account_desc, secret):
self.account_name = account_name
self.account_desc = account_desc
self.secret = secret
def __repr__(self):
return "AuthenticatorAccount@%s%s" % (hex(id(self))[2:], self.__dict__)
def __main__():
parser = argparse.ArgumentParser()
parser.add_argument("database", help="The SQLite database file.")
args = parser.parse_args()
if not os.path.isfile(args.database):
sys.stderr.write("Unable to open %s.\n" % (args.database,))
sys.stderr.flush()
sys.exit(1)
conn = sqlite3.connect(args.database)
conn.row_factory = sqlite3.Row
cursor = conn.cursor()
cursor.execute("SELECT * FROM accounts ORDER BY _id;")
row = None
while True:
row = cursor.fetchone()
if row is None:
break
account = AuthenticatorAccount(row['issuer'] or row['original_name'], row['email'],
row['secret'])
print """Saving "%s" to "qrcode-account-%02d.svg" """[:-1] % (account.account_desc,
row['_id'])
qr = qrcode.make("otpauth://totp/%s?secret=%s&issuer=%s" % (account.account_desc,
account.secret, account.account_name), image_factory=qrcode.image.pil.PilImage)
with open("qrcode-account-%02d.png" % (row['_id'],), "wb") as f:
qr.save(f)
if __name__ == "__main__":
__main__()
| [
"42325807+dockerizeme@users.noreply.github.com"
] | 42325807+dockerizeme@users.noreply.github.com |
2c96f4f13099c50837c449c11ffc54903872c64b | 49eb0d08311529b1d2375429a9cbb5582d77fd2d | /contrib/devtools/update-translations.py | 0c50a8d0119ff2f3649c919b59ee98af37c00308 | [
"MIT",
"LicenseRef-scancode-public-domain"
] | permissive | mario1987/deimos | bcbaa7b4ed617a70c37047e6590264941b94a170 | 72cb8c33b5a6d4e09e4019602db7cea8c686d505 | refs/heads/master | 2020-03-19T23:11:08.313125 | 2018-06-11T23:06:30 | 2018-06-11T23:06:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,074 | py | #!/usr/bin/env python
# Copyright (c) 2014 Wladimir J. van der Laan
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Run this script from the root of the repository to update all translations from
transifex.
It will do the following automatically:
- fetch all translations using the tx tool
- post-process them into valid and committable format
- remove invalid control characters
- remove location tags (makes diffs less noisy)
TODO:
- auto-add new translations to the build system according to the translation process
'''
from __future__ import division, print_function
import subprocess
import re
import sys
import os
import io
import xml.etree.ElementTree as ET
# Name of transifex tool
TX = 'tx'
# Name of source language file
SOURCE_LANG = 'deimos_en.ts'
# Directory with locale files
LOCALE_DIR = 'src/qt/locale'
# Minimum number of messages for translation to be considered at all
MIN_NUM_MESSAGES = 10
def check_at_repository_root():
if not os.path.exists('.git'):
print('No .git directory found')
print('Execute this script at the root of the repository', file=sys.stderr)
exit(1)
def fetch_all_translations():
if subprocess.call([TX, 'pull', '-f', '-a']):
print('Error while fetching translations', file=sys.stderr)
exit(1)
def find_format_specifiers(s):
'''Find all format specifiers in a string.'''
pos = 0
specifiers = []
while True:
percent = s.find('%', pos)
if percent < 0:
break
specifiers.append(s[percent+1])
pos = percent+2
return specifiers
def split_format_specifiers(specifiers):
'''Split format specifiers between numeric (Qt) and others (strprintf)'''
numeric = []
other = []
for s in specifiers:
if s in {'1','2','3','4','5','6','7','8','9'}:
numeric.append(s)
else:
other.append(s)
# If both numeric format specifiers and "others" are used, assume we're dealing
# with a Qt-formatted message. In the case of Qt formatting (see https://doc.qt.io/qt-5/qstring.html#arg)
# only numeric formats are replaced at all. This means "(percentage: %1%)" is valid, without needing
# any kind of escaping that would be necessary for strprintf. Without this, this function
# would wrongly detect '%)' as a printf format specifier.
if numeric:
other = []
# numeric (Qt) can be present in any order, others (strprintf) must be in specified order
return set(numeric),other
def sanitize_string(s):
'''Sanitize string for printing'''
return s.replace('\n',' ')
def check_format_specifiers(source, translation, errors, numerus):
source_f = split_format_specifiers(find_format_specifiers(source))
# assert that no source messages contain both Qt and strprintf format specifiers
# if this fails, go change the source as this is hacky and confusing!
assert(not(source_f[0] and source_f[1]))
try:
translation_f = split_format_specifiers(find_format_specifiers(translation))
except IndexError:
errors.append("Parse error in translation for '%s': '%s'" % (sanitize_string(source), sanitize_string(translation)))
return False
else:
if source_f != translation_f:
if numerus and source_f == (set(), ['n']) and translation_f == (set(), []) and translation.find('%') == -1:
# Allow numerus translations to omit %n specifier (usually when it only has one possible value)
return True
errors.append("Mismatch between '%s' and '%s'" % (sanitize_string(source), sanitize_string(translation)))
return False
return True
def all_ts_files(suffix=''):
for filename in os.listdir(LOCALE_DIR):
# process only language files, and do not process source language
if not filename.endswith('.ts'+suffix) or filename == SOURCE_LANG+suffix:
continue
if suffix: # remove provided suffix
filename = filename[0:-len(suffix)]
filepath = os.path.join(LOCALE_DIR, filename)
yield(filename, filepath)
FIX_RE = re.compile(b'[\x00-\x09\x0b\x0c\x0e-\x1f]')
def remove_invalid_characters(s):
'''Remove invalid characters from translation string'''
return FIX_RE.sub(b'', s)
# Override cdata escape function to make our output match Qt's (optional, just for cleaner diffs for
# comparison, disable by default)
_orig_escape_cdata = None
def escape_cdata(text):
text = _orig_escape_cdata(text)
text = text.replace("'", ''')
text = text.replace('"', '"')
return text
def postprocess_translations(reduce_diff_hacks=False):
print('Checking and postprocessing...')
if reduce_diff_hacks:
global _orig_escape_cdata
_orig_escape_cdata = ET._escape_cdata
ET._escape_cdata = escape_cdata
for (filename,filepath) in all_ts_files():
os.rename(filepath, filepath+'.orig')
have_errors = False
for (filename,filepath) in all_ts_files('.orig'):
# pre-fixups to cope with transifex output
parser = ET.XMLParser(encoding='utf-8') # need to override encoding because 'utf8' is not understood only 'utf-8'
with open(filepath + '.orig', 'rb') as f:
data = f.read()
# remove control characters; this must be done over the entire file otherwise the XML parser will fail
data = remove_invalid_characters(data)
tree = ET.parse(io.BytesIO(data), parser=parser)
# iterate over all messages in file
root = tree.getroot()
for context in root.findall('context'):
for message in context.findall('message'):
numerus = message.get('numerus') == 'yes'
source = message.find('source').text
translation_node = message.find('translation')
# pick all numerusforms
if numerus:
translations = [i.text for i in translation_node.findall('numerusform')]
else:
translations = [translation_node.text]
for translation in translations:
if translation is None:
continue
errors = []
valid = check_format_specifiers(source, translation, errors, numerus)
for error in errors:
print('%s: %s' % (filename, error))
if not valid: # set type to unfinished and clear string if invalid
translation_node.clear()
translation_node.set('type', 'unfinished')
have_errors = True
# Remove location tags
for location in message.findall('location'):
message.remove(location)
# Remove entire message if it is an unfinished translation
if translation_node.get('type') == 'unfinished':
context.remove(message)
# check if document is (virtually) empty, and remove it if so
num_messages = 0
for context in root.findall('context'):
for message in context.findall('message'):
num_messages += 1
if num_messages < MIN_NUM_MESSAGES:
print('Removing %s, as it contains only %i messages' % (filepath, num_messages))
continue
# write fixed-up tree
# if diff reduction requested, replace some XML to 'sanitize' to qt formatting
if reduce_diff_hacks:
out = io.BytesIO()
tree.write(out, encoding='utf-8')
out = out.getvalue()
out = out.replace(b' />', b'/>')
with open(filepath, 'wb') as f:
f.write(out)
else:
tree.write(filepath, encoding='utf-8')
return have_errors
if __name__ == '__main__':
check_at_repository_root()
fetch_all_translations()
postprocess_translations()
| [
"support@lipcoins.org"
] | support@lipcoins.org |
e21b5f5350a57068a2fab6ad42da5cb8ff4b1390 | 928dcef8e8d682f3e4062b0cb3d49e6151383138 | /setup.py | acf65858f3b5704078624f20c38d4740c8f5a803 | [
"MIT"
] | permissive | giserh/hug | 3aa5724aadf5c06dd8c7b5f867ea40ea057e2e78 | a15bb9497d23398dc82c496352d91e32ff183f13 | refs/heads/master | 2021-01-18T08:41:32.402648 | 2015-09-15T02:12:21 | 2015-09-15T02:12:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,387 | py | #!/usr/bin/env python
import subprocess
import sys
try:
from setuptools import setup
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
extra_kwargs = {'tests_require': ['pytest', 'mock']}
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
sys.exit(pytest.main(self.test_args))
except ImportError:
from distutils.core import setup, Command
class PyTest(Command):
extra_kwargs = {}
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
raise SystemExit(subprocess.call([sys.executable, 'runtests.py']))
try:
import pypandoc
readme = pypandoc.convert('README.md', 'rst')
except (IOError, ImportError, OSError, RuntimeError):
readme = ''
setup(name='hug',
version='1.4.0',
description='A Python framework that makes developing APIs as simple as possible, but no simpler.',
long_description=readme,
author='Timothy Crosley',
author_email='timothy.crosley@gmail.com',
url='https://github.com/timothycrosley/hug',
license="MIT",
entry_points={
'console_scripts': [
'hug = hug:run.terminal',
]
},
packages=['hug'],
requires=['falcon'],
install_requires=['falcon'],
cmdclass={'test': PyTest},
keywords='Web, Python, Python3, Refactoring, REST, Framework, RPC',
classifiers=['Development Status :: 6 - Mature',
'Intended Audience :: Developers',
'Natural Language :: English',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Libraries',
'Topic :: Utilities'],
**PyTest.extra_kwargs)
| [
"timothy.crosley@gmail.com"
] | timothy.crosley@gmail.com |
d013dd9eb4c44f3e18ece5550ce2a994787e3aaa | f576f0ea3725d54bd2551883901b25b863fe6688 | /sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_deployment/online/resource_requirements_schema.py | 7f43d91fc60142f1307ebcac5a55c78d2d8f447f | [
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-python-cwi",
"LGPL-2.1-or-later",
"PSF-2.0",
"LGPL-2.0-or-later",
"GPL-3.0-or-later",
"GPL-1.0-or-later",
"LicenseRef-scancode-warranty-disclaimer",
"LGPL-2.1-only",
"Python-2.0",
"MPL-2.0",
"LicenseRef-scancode-other-copyleft",
"HPND",... | permissive | Azure/azure-sdk-for-python | 02e3838e53a33d8ba27e9bcc22bd84e790e4ca7c | c2ca191e736bb06bfbbbc9493e8325763ba990bb | refs/heads/main | 2023-09-06T09:30:13.135012 | 2023-09-06T01:08:06 | 2023-09-06T01:08:06 | 4,127,088 | 4,046 | 2,755 | MIT | 2023-09-14T21:48:49 | 2012-04-24T16:46:12 | Python | UTF-8 | Python | false | false | 899 | py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
# pylint: disable=unused-argument
import logging
from typing import Any
from marshmallow import post_load
from azure.ai.ml._schema.core.fields import NestedField
from azure.ai.ml._schema.core.schema import PatchedSchemaMeta
from .resource_settings_schema import ResourceSettingsSchema
module_logger = logging.getLogger(__name__)
class ResourceRequirementsSchema(metaclass=PatchedSchemaMeta):
requests = NestedField(ResourceSettingsSchema)
limits = NestedField(ResourceSettingsSchema)
@post_load
def make(self, data: Any, **kwargs: Any) -> "ResourceRequirementsSettings":
from azure.ai.ml.entities import ResourceRequirementsSettings
return ResourceRequirementsSettings(**data)
| [
"noreply@github.com"
] | Azure.noreply@github.com |
24e20f6e355cde10540c8b7eaacd974cbceb2334 | 5330918e825f8d373d3907962ba28215182389c3 | /CMGTools/Common/python/factories/cmgDiPFCandidate_cfi.py | e5a22266d42400df25dda74197c0de4f68b1198c | [] | no_license | perrozzi/cmg-cmssw | 31103a7179222c7aa94f65e83d090a5cf2748e27 | 1f4cfd936da3a6ca78f25959a41620925c4907ca | refs/heads/CMG_PAT_V5_18_from-CMSSW_5_3_22 | 2021-01-16T23:15:58.556441 | 2017-05-11T22:43:15 | 2017-05-11T22:43:15 | 13,272,641 | 1 | 0 | null | 2017-05-11T22:43:16 | 2013-10-02T14:05:21 | C++ | UTF-8 | Python | false | false | 415 | py | import FWCore.ParameterSet.Config as cms
from CMGTools.Common.factories.cmgDiObject_cfi import diObjectFactory
cmgDiPFCandidateFactory = diObjectFactory.clone(
leg1Collection = cms.InputTag("particleFlow"),
leg2Collection = cms.InputTag("particleFlow"),
)
cmgDiPFCandidate = cms.EDFilter(
"DiPFCandidatePOProducer",
cfg = cmgDiPFCandidateFactory.clone(),
cuts = cms.PSet(
),
)
| [
"colin.bernet@cern.ch"
] | colin.bernet@cern.ch |
cffd3f09c260d4a20c639febd7d8c9d868a3c8bb | e71fa62123b2b8f7c1a22acb1babeb6631a4549b | /xlsxwriter/test/comparison/test_chart_layout03.py | 8edd7c7b233330d0dc5e1166cecf00a1a5e839f5 | [
"BSD-2-Clause"
] | permissive | timgates42/XlsxWriter | 40480b6b834f28c4a7b6fc490657e558b0a466e5 | 7ad2541c5f12b70be471b447ab709c451618ab59 | refs/heads/main | 2023-03-16T14:31:08.915121 | 2022-07-13T23:43:45 | 2022-07-13T23:43:45 | 242,121,381 | 0 | 0 | NOASSERTION | 2020-02-21T11:14:55 | 2020-02-21T11:14:55 | null | UTF-8 | Python | false | false | 1,650 | py | ###############################################################################
#
# Tests for XlsxWriter.
#
# SPDX-License-Identifier: BSD-2-Clause
# Copyright (c), 2013-2022, John McNamara, jmcnamara@cpan.org
#
from ..excel_comparison_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.set_filename('chart_layout03.xlsx')
def test_create_file(self):
"""Test the creation of an XlsxWriter file with user defined layout."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
chart = workbook.add_chart({'type': 'column'})
chart.axis_ids = [68312064, 69198592]
data = [
[1, 2, 3, 4, 5],
[2, 4, 6, 8, 10],
[3, 6, 9, 12, 15],
]
worksheet.write_column('A1', data[0])
worksheet.write_column('B1', data[1])
worksheet.write_column('C1', data[2])
chart.add_series({'values': '=Sheet1!$A$1:$A$5'})
chart.add_series({'values': '=Sheet1!$B$1:$B$5'})
chart.add_series({'values': '=Sheet1!$C$1:$C$5'})
chart.set_legend({
'position': 'overlay_right',
'layout': {
'x': 0.80197353455818043,
'y': 0.3744240303295423,
'width': 0.12858202099737534,
'height': 0.25115157480314959,
}
})
worksheet.insert_chart('E9', chart)
workbook.close()
self.assertExcelEqual()
| [
"jmcnamara@cpan.org"
] | jmcnamara@cpan.org |
c9f2b0fb720c3590de12e05c6c7e8e3c9323feb6 | a60e81b51935fb53c0900fecdadba55d86110afe | /LeetCode/python/76___________hard_Minimum Window Substring.py | 168c878e0eb107b71a2577dc7bb95e39d86f3841 | [] | no_license | FrankieZhen/Lookoop | fab6855f5660467f70dc5024d9aa38213ecf48a7 | 212f8b83d6ac22db1a777f980075d9e12ce521d2 | refs/heads/master | 2020-07-27T08:12:45.887814 | 2019-09-16T11:48:20 | 2019-09-16T11:48:20 | 209,021,915 | 1 | 0 | null | 2019-09-17T10:10:46 | 2019-09-17T10:10:46 | null | UTF-8 | Python | false | false | 1,422 | py | """
Given a string S and a string T, find the minimum window in S which will contain all the characters in T in complexity O(n).
Example:
Input: S = "ADOBECODEBANC", T = "ABC"
Output: "BANC"
Note:
If there is no such window in S that covers all characters in T, return the empty string "".
If there is such window, you are guaranteed that there will always be only one unique minimum window in S.
"""
import collections
# 2018-6-26
# Minimum Window Substring
# https://leetcode.com/problems/minimum-window-substring/discuss/26804/12-lines-Python
class Solution:
def minWindow(self, s, t):
"""
:type s: str
:type t: str
:rtype: str
"""
need, missing = collections.Counter(t), len(t)
print(need,missing)
i = I = J = 0
for j, c in enumerate(s, 1):
missing -= need[c] > 0 # missing = missing -1 if need[c] > 0 else missing
print(j,c,missing,need,i,j,I,J)
need[c] -= 1 # 添加c并使得 c : -1
if not missing: # missing <= 0
while i < j and need[s[i]] < 0:
need[s[i]] += 1
i += 1
if not J or j - i <= J - I:
I, J = i, j
#print(need)
print(j,c,missing,need,i,j,I,J)
return s[I:J]
# test
S = "ADOBECODEBANC"
T = "ABC"
test = Solution()
res = test.minWindow(S,T)
print(res) | [
"33798487+YangXiaoo@users.noreply.github.com"
] | 33798487+YangXiaoo@users.noreply.github.com |
4ca435cfa6ce25915bcf7f9dcbc0502161f91cb8 | 6fa701cdaa0d83caa0d3cbffe39b40e54bf3d386 | /google/cloud/dialogflow/cx/v3beta1/dialogflow-cx-v3beta1-py/google/cloud/dialogflowcx_v3beta1/services/webhooks/async_client.py | 7521843b6472b19abef5590131241d04a5c45d22 | [
"Apache-2.0"
] | permissive | oltoco/googleapis-gen | bf40cfad61b4217aca07068bd4922a86e3bbd2d5 | 00ca50bdde80906d6f62314ef4f7630b8cdb6e15 | refs/heads/master | 2023-07-17T22:11:47.848185 | 2021-08-29T20:39:47 | 2021-08-29T20:39:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 24,477 | py | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
import functools
import re
from typing import Dict, Sequence, Tuple, Type, Union
import pkg_resources
import google.api_core.client_options as ClientOptions # type: ignore
from google.api_core import exceptions as core_exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
from google.cloud.dialogflowcx_v3beta1.services.webhooks import pagers
from google.cloud.dialogflowcx_v3beta1.types import webhook
from google.cloud.dialogflowcx_v3beta1.types import webhook as gcdc_webhook
from google.protobuf import duration_pb2 # type: ignore
from google.protobuf import field_mask_pb2 # type: ignore
from .transports.base import WebhooksTransport, DEFAULT_CLIENT_INFO
from .transports.grpc_asyncio import WebhooksGrpcAsyncIOTransport
from .client import WebhooksClient
class WebhooksAsyncClient:
"""Service for managing
[Webhooks][google.cloud.dialogflow.cx.v3beta1.Webhook].
"""
_client: WebhooksClient
DEFAULT_ENDPOINT = WebhooksClient.DEFAULT_ENDPOINT
DEFAULT_MTLS_ENDPOINT = WebhooksClient.DEFAULT_MTLS_ENDPOINT
service_path = staticmethod(WebhooksClient.service_path)
parse_service_path = staticmethod(WebhooksClient.parse_service_path)
webhook_path = staticmethod(WebhooksClient.webhook_path)
parse_webhook_path = staticmethod(WebhooksClient.parse_webhook_path)
common_billing_account_path = staticmethod(WebhooksClient.common_billing_account_path)
parse_common_billing_account_path = staticmethod(WebhooksClient.parse_common_billing_account_path)
common_folder_path = staticmethod(WebhooksClient.common_folder_path)
parse_common_folder_path = staticmethod(WebhooksClient.parse_common_folder_path)
common_organization_path = staticmethod(WebhooksClient.common_organization_path)
parse_common_organization_path = staticmethod(WebhooksClient.parse_common_organization_path)
common_project_path = staticmethod(WebhooksClient.common_project_path)
parse_common_project_path = staticmethod(WebhooksClient.parse_common_project_path)
common_location_path = staticmethod(WebhooksClient.common_location_path)
parse_common_location_path = staticmethod(WebhooksClient.parse_common_location_path)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
WebhooksAsyncClient: The constructed client.
"""
return WebhooksClient.from_service_account_info.__func__(WebhooksAsyncClient, info, *args, **kwargs) # type: ignore
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
WebhooksAsyncClient: The constructed client.
"""
return WebhooksClient.from_service_account_file.__func__(WebhooksAsyncClient, filename, *args, **kwargs) # type: ignore
from_service_account_json = from_service_account_file
@property
def transport(self) -> WebhooksTransport:
"""Returns the transport used by the client instance.
Returns:
WebhooksTransport: The transport used by the client instance.
"""
return self._client.transport
get_transport_class = functools.partial(type(WebhooksClient).get_transport_class, type(WebhooksClient))
def __init__(self, *,
credentials: ga_credentials.Credentials = None,
transport: Union[str, WebhooksTransport] = "grpc_asyncio",
client_options: ClientOptions = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiates the webhooks client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, ~.WebhooksTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (ClientOptions): Custom options for the client. It
won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
"""
self._client = WebhooksClient(
credentials=credentials,
transport=transport,
client_options=client_options,
client_info=client_info,
)
async def list_webhooks(self,
request: webhook.ListWebhooksRequest = None,
*,
parent: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListWebhooksAsyncPager:
r"""Returns the list of all webhooks in the specified
agent.
Args:
request (:class:`google.cloud.dialogflowcx_v3beta1.types.ListWebhooksRequest`):
The request object. The request message for
[Webhooks.ListWebhooks][google.cloud.dialogflow.cx.v3beta1.Webhooks.ListWebhooks].
parent (:class:`str`):
Required. The agent to list all webhooks for. Format:
``projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>``.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.dialogflowcx_v3beta1.services.webhooks.pagers.ListWebhooksAsyncPager:
The response message for
[Webhooks.ListWebhooks][google.cloud.dialogflow.cx.v3beta1.Webhooks.ListWebhooks].
Iterating over this object will yield results and
resolve additional pages automatically.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent])
if request is not None and has_flattened_params:
raise ValueError("If the `request` argument is set, then none of "
"the individual field arguments should be set.")
request = webhook.ListWebhooksRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.list_webhooks,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
("parent", request.parent),
)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# This method is paged; wrap the response in a pager, which provides
# an `__aiter__` convenience method.
response = pagers.ListWebhooksAsyncPager(
method=rpc,
request=request,
response=response,
metadata=metadata,
)
# Done; return the response.
return response
async def get_webhook(self,
request: webhook.GetWebhookRequest = None,
*,
name: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> webhook.Webhook:
r"""Retrieves the specified webhook.
Args:
request (:class:`google.cloud.dialogflowcx_v3beta1.types.GetWebhookRequest`):
The request object. The request message for
[Webhooks.GetWebhook][google.cloud.dialogflow.cx.v3beta1.Webhooks.GetWebhook].
name (:class:`str`):
Required. The name of the webhook. Format:
``projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>/webhooks/<Webhook ID>``.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.dialogflowcx_v3beta1.types.Webhook:
Webhooks host the developer's
business logic. During a session,
webhooks allow the developer to use the
data extracted by Dialogflow's natural
language processing to generate dynamic
responses, validate collected data, or
trigger actions on the backend.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError("If the `request` argument is set, then none of "
"the individual field arguments should be set.")
request = webhook.GetWebhookRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.get_webhook,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
("name", request.name),
)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
async def create_webhook(self,
request: gcdc_webhook.CreateWebhookRequest = None,
*,
parent: str = None,
webhook: gcdc_webhook.Webhook = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> gcdc_webhook.Webhook:
r"""Creates a webhook in the specified agent.
Args:
request (:class:`google.cloud.dialogflowcx_v3beta1.types.CreateWebhookRequest`):
The request object. The request message for
[Webhooks.CreateWebhook][google.cloud.dialogflow.cx.v3beta1.Webhooks.CreateWebhook].
parent (:class:`str`):
Required. The agent to create a webhook for. Format:
``projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>``.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
webhook (:class:`google.cloud.dialogflowcx_v3beta1.types.Webhook`):
Required. The webhook to create.
This corresponds to the ``webhook`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.dialogflowcx_v3beta1.types.Webhook:
Webhooks host the developer's
business logic. During a session,
webhooks allow the developer to use the
data extracted by Dialogflow's natural
language processing to generate dynamic
responses, validate collected data, or
trigger actions on the backend.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, webhook])
if request is not None and has_flattened_params:
raise ValueError("If the `request` argument is set, then none of "
"the individual field arguments should be set.")
request = gcdc_webhook.CreateWebhookRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if webhook is not None:
request.webhook = webhook
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.create_webhook,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
("parent", request.parent),
)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
async def update_webhook(self,
request: gcdc_webhook.UpdateWebhookRequest = None,
*,
webhook: gcdc_webhook.Webhook = None,
update_mask: field_mask_pb2.FieldMask = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> gcdc_webhook.Webhook:
r"""Updates the specified webhook.
Args:
request (:class:`google.cloud.dialogflowcx_v3beta1.types.UpdateWebhookRequest`):
The request object. The request message for
[Webhooks.UpdateWebhook][google.cloud.dialogflow.cx.v3beta1.Webhooks.UpdateWebhook].
webhook (:class:`google.cloud.dialogflowcx_v3beta1.types.Webhook`):
Required. The webhook to update.
This corresponds to the ``webhook`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`):
The mask to control which fields get
updated. If the mask is not present, all
fields will be updated.
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.dialogflowcx_v3beta1.types.Webhook:
Webhooks host the developer's
business logic. During a session,
webhooks allow the developer to use the
data extracted by Dialogflow's natural
language processing to generate dynamic
responses, validate collected data, or
trigger actions on the backend.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([webhook, update_mask])
if request is not None and has_flattened_params:
raise ValueError("If the `request` argument is set, then none of "
"the individual field arguments should be set.")
request = gcdc_webhook.UpdateWebhookRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if webhook is not None:
request.webhook = webhook
if update_mask is not None:
request.update_mask = update_mask
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.update_webhook,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
("webhook.name", request.webhook.name),
)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
async def delete_webhook(self,
request: webhook.DeleteWebhookRequest = None,
*,
name: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> None:
r"""Deletes the specified webhook.
Args:
request (:class:`google.cloud.dialogflowcx_v3beta1.types.DeleteWebhookRequest`):
The request object. The request message for
[Webhooks.DeleteWebhook][google.cloud.dialogflow.cx.v3beta1.Webhooks.DeleteWebhook].
name (:class:`str`):
Required. The name of the webhook to delete. Format:
``projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>/webhooks/<Webhook ID>``.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError("If the `request` argument is set, then none of "
"the individual field arguments should be set.")
request = webhook.DeleteWebhookRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.delete_webhook,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
("name", request.name),
)),
)
# Send the request.
await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-dialogflowcx",
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = (
"WebhooksAsyncClient",
)
| [
"bazel-bot-development[bot]@users.noreply.github.com"
] | bazel-bot-development[bot]@users.noreply.github.com |
bb80ed73e3d7d90ea18dfe174b196ba572578e48 | bf2d87cc14f983a6c563ebe1bd49c48a7474ddff | /2018年力扣高频算法面试题汇总/完全平方数 my_dp.py | ec40fb66f459e16640f69cbea7ef8735b0aac1de | [] | no_license | iamkissg/leetcode | 6bd1c87c67ffc6f5d231cac3224c928e22f62af3 | 99a3abf1774933af73a8405f9b59e5e64906bca4 | refs/heads/master | 2020-05-05T11:21:05.900296 | 2019-10-27T10:51:51 | 2019-10-27T10:51:51 | 179,986,335 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,347 | py | from math import sqrt, floor
class Solution:
# 递归不可行, 因为包含一个 1, 在事先不知道四平方数定理的情况下, 最常的路径会导致栈被撑爆.
# def numSquares(self, n: int) -> int:
# if n in self.memo:
# return self.memo[n]
# result = min([self.numSquares(n-i**2) for i in range(1, floor(sqrt(n))+1)])+1
# self.memo[n] = result
# return result
# 将 memo 从 `def __init__(self)` 中取出来是关键, 从实例变量变成了类变量, 作弊啊
# 176 ms
memo = {0: 0, 1: 1}
def numSquares(self, n: int) -> int:
if n in self.memo:
return self.memo[n]
i = 1
while n not in self.memo:
i += 1
if i in self.memo:
continue
self.memo[i] = min((self.memo[i-j*j] for j in range(1, floor(sqrt(i))+1))) + 1
return self.memo[n]
if __name__ == "__main__":
sol = Solution()
print(sol.memo)
for i in [12,2,3,4,5,6,7,8,9,10,5373,5374]:
print(sol.numSquares(i))
print('='*80)
print(sol.numSquares(2))
print(sol.numSquares(8))
print(sol.numSquares(10))
print(sol.numSquares(11))
print(sol.numSquares(12))
print(sol.numSquares(13))
print(sol.numSquares(5673))
print(sol.numSquares(5674))
| [
"enginechen07@gmail.com"
] | enginechen07@gmail.com |
8d49ac34a3e825f622adeb42a001aa171bd8c13f | b9fd9ed02312be96e05ef23243c4dfac1392be08 | /tensorflow/contrib/py2tf/utils/multiple_dispatch_test.py | 8d89b6898a366fe90ee1d43a55d0a7f10690224b | [
"Apache-2.0"
] | permissive | RLeili/tensorflow | 9e5650b5d02771da94a345ceb97b4f3293638e1e | 42ee949d022d8665cf2e908e800f1ef1594c6abf | refs/heads/master | 2021-04-09T11:51:32.393739 | 2019-10-16T16:44:23 | 2019-10-16T16:44:23 | 125,318,700 | 0 | 0 | Apache-2.0 | 2018-03-15T05:50:05 | 2018-03-15T05:50:04 | null | UTF-8 | Python | false | false | 3,748 | py | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for multiple_dispatch."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.py2tf.utils import multiple_dispatch
from tensorflow.python.client.session import Session
from tensorflow.python.framework.constant_op import constant
from tensorflow.python.platform import test
class MultipleDispatchTest(test.TestCase):
def test_dynamic_is_python(self):
a = np.eye(3)
also_a = a
not_actually_a = np.eye(3)
should_be_true1 = multiple_dispatch.dynamic_is(a, also_a)
should_be_false1 = multiple_dispatch.dynamic_is_not(a, also_a)
should_be_true2 = multiple_dispatch.dynamic_is_not(a, not_actually_a)
should_be_false2 = multiple_dispatch.dynamic_is(a, not_actually_a)
self.assertTrue(should_be_true1)
self.assertTrue(should_be_true2)
self.assertFalse(should_be_false1)
self.assertFalse(should_be_false2)
def test_dynamic_is_tf(self):
with Session().as_default():
a = constant([2.0])
also_a = a
not_actually_a = constant([2.0])
should_be_true1 = multiple_dispatch.dynamic_is(a, also_a)
should_be_false1 = multiple_dispatch.dynamic_is_not(a, also_a)
should_be_true2 = multiple_dispatch.dynamic_is_not(a, not_actually_a)
should_be_false2 = multiple_dispatch.dynamic_is(a, not_actually_a)
self.assertTrue(should_be_true1.eval())
self.assertTrue(should_be_true2.eval())
self.assertFalse(should_be_false1.eval())
self.assertFalse(should_be_false2.eval())
def test_run_cond_python(self):
true_fn = lambda: 2.0
false_fn = lambda: 3.0
self.assertEqual(multiple_dispatch.run_cond(True, true_fn, false_fn), 2.0)
self.assertEqual(multiple_dispatch.run_cond(False, true_fn, false_fn), 3.0)
def test_run_cond_tf(self):
true_fn = lambda: constant([2.0])
false_fn = lambda: constant([3.0])
with Session() as sess:
out = multiple_dispatch.run_cond(constant(True), true_fn, false_fn)
self.assertEqual(sess.run(out), 2.0)
out = multiple_dispatch.run_cond(constant(False), true_fn, false_fn)
self.assertEqual(sess.run(out), 3.0)
def test_run_while_python(self):
cond_fn = lambda x, t, s: x > t
body_fn = lambda x, t, s: (x * s, t, s)
x, _, _ = multiple_dispatch.run_while(cond_fn, body_fn, [3.0, 1.0, 0.5])
self.assertEqual(x, 0.75)
x, _, _ = multiple_dispatch.run_while(cond_fn, body_fn, [3.0, 4.0, 0.5])
self.assertEqual(x, 3.0)
def test_run_while_tf(self):
cond_fn = lambda x, t, s: x > t
body_fn = lambda x, t, s: (x * s, t, s)
with Session() as sess:
x, _, _ = multiple_dispatch.run_while(cond_fn, body_fn,
[constant(3.0), 1.0, 0.5])
self.assertEqual(sess.run(x), 0.75)
x, _, _ = multiple_dispatch.run_while(cond_fn, body_fn,
[constant(3.0), 4.0, 0.5])
self.assertEqual(sess.run(x), 3.0)
if __name__ == '__main__':
test.main()
| [
"gardener@tensorflow.org"
] | gardener@tensorflow.org |
8bff32a3731de4f870e8a5a3d26bb063abc9e9ad | 453956e3a66b417a0c6f999951c44e474a81af7e | /19.05.2021/exc0/tests.py | 968d0baef2c3e2d47c85b11f324d27e2ce8fe288 | [] | no_license | conradylx/Python_Course | aa7db9671a30034fe8cf65d22304e76ef2b4c4ab | b8f813c077a61bd3321638f90633529fbda756f0 | refs/heads/main | 2023-06-02T07:24:57.073365 | 2021-06-13T17:41:59 | 2021-06-13T17:41:59 | 337,829,185 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 813 | py | import unittest
from exc0 import triangle, trapezoid
class FieldsTestCase(unittest.TestCase):
def setUp(self):
self.a = 2
self.b = 3
self.h = 5
def test_triangle_with_correct_result(self):
result = triangle(self.a, self.h)
self.assertEqual(result, 5)
def test_triangle_with_incorrect_values(self):
with self.assertRaises(TypeError):
triangle("*", self.h)
def test_trapezoid_with_correct_result(self):
result = trapezoid(self.a, self.b, self.h)
self.assertEqual(result, 12.5)
def test_trapezoid_with_incorrect_value(self):
with self.assertRaises(TypeError):
trapezoid('**', self.b, self.h)
def tearDown(self):
del self.a, self.h
if __name__ == '__main__':
unittest.main()
| [
"50596942+conradylx@users.noreply.github.com"
] | 50596942+conradylx@users.noreply.github.com |
43b5830be6db5503e0c7a5b34a5a8d7940745656 | 22a5d684341cee8f1095c3fe193f01f40f8121db | /2021/Qualification/E. Cheating Detection/2021-q-e.py | 7b93ef36bb2a94a0202dad9af0142a53f0e34eaf | [] | no_license | andy1li/codejam | 161b1db6faab372a4c2c4ce5956942387c650bed | 3aa6ab1673064b8c80b5f56422bd496b372b30f3 | refs/heads/master | 2022-06-28T02:42:53.980149 | 2022-06-27T20:15:11 | 2022-06-27T20:15:11 | 53,395,936 | 6 | 4 | null | null | null | null | UTF-8 | Python | false | false | 672 | py | # 2021 Qualification Round - E. Cheating Detection
# https://codingcompetitions.withgoogle.com/codejam/round/000000000043580a/00000000006d12d7
import numpy as np
#------------------------------------------------------------------------------#
def solve(data):
difficulty = np.mean(data, axis=0)
correct_spreads = [difficulty[row].std() for row in data]
return np.argmax(correct_spreads) + 1
#------------------------------------------------------------------------------#
T, _ = int(input()), input()
for i in range(T):
data = [ [bool(int(x)) for x in input()] for _ in range(100) ]
result = solve(data)
print('Case #{}:'.format(i+1), result) | [
"li.chenxing@gmail.com"
] | li.chenxing@gmail.com |
68d8523fe078e7be65b1c937304f635861598508 | 3740de0d6e43ea140fc09ab314e4c492603ba185 | /scripts/sources/S_EvaluationCornishFisherLogN.py | ab21f423595458d1481180db8baab78c58386dad | [
"MIT"
] | permissive | s0ap/arpmRes | 29c60c65fd3e11be1cc31d46494e5b3ebf6e05ab | ddcc4de713b46e3e9dcb77cc08c502ce4df54f76 | refs/heads/master | 2022-02-16T05:01:22.118959 | 2019-08-20T16:45:02 | 2019-08-20T16:45:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,661 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# ---
# jupyter:
# jupytext:
# text_representation:
# extension: .py
# format_name: light
# format_version: '1.4'
# jupytext_version: 1.1.4
# kernelspec:
# display_name: Python 3
# language: python
# name: python3
# ---
# # S_EvaluationCornishFisherLogN [<img src="https://www.arpm.co/lab/icons/icon_permalink.png" width=30 height=30 style="display: inline;">](https://www.arpm.co/lab/redirect.php?code=S_EvaluationCornishFisherLogN&codeLang=Python)
# For details, see [here](https://www.arpm.co/lab/redirect.php?permalink=EBCornishFisherEvaluation).
# ## Prepare the environment
# +
import os.path as path
import sys
sys.path.append(path.abspath('../../functions-legacy'))
from numpy import array
import matplotlib.pyplot as plt
plt.style.use('seaborn')
from PortfolioMomentsLogN import PortfolioMomentsLogN
from CornishFisher import CornishFisher
# parameters
v_tnow = array([[2], [1.5]])
mu = array([[0.5], [-0.3]])
sigma2 = array([[0.55, 0.82],
[0.82, 1.05]])
h = array([[2], [1]])
c = 0.95
# -
# ## Computation of the expectation, the standard deviation and the skewness
# ## of the portfolio's P&L using function PortfolioMomentsLogN
muPL_h, sdPL_h, skPL_h = PortfolioMomentsLogN(v_tnow, h, mu, sigma2)
# ## Using the skewness computed at the previous step, compute the third central
# ## moment of the portfolio's P&L
third_central = skPL_h@(sdPL_h) ** 3
# ## Computation of the Cornish-Fisher expansion of the quantile based-index
# ## with confidence c=0.95 using function CornishFisher
q = CornishFisher(muPL_h, sdPL_h, skPL_h, 1 - c)
| [
"dario.popadic@yahoo.com"
] | dario.popadic@yahoo.com |
d8e9124430e8d00df512614fb61d1275470e6dff | b8fe1fbe36bff3f05cceecbe9811699ba1fb6765 | /python_processThreading/asynico_oo/coroutines_asyncio.py | 4b9d73a2e3374a9d178965c6d8ce5adda1366e3a | [] | no_license | xiaotiankeyi/PythonBase | c2edfeac9fe8d116a68725a784bcb183b1308af9 | 8f1377eb03135e8ee9c047f6e7762a0d69601ca1 | refs/heads/master | 2023-03-16T19:43:45.589440 | 2023-03-13T06:05:53 | 2023-03-13T06:05:53 | 219,412,971 | 0 | 0 | null | 2022-02-27T18:57:30 | 2019-11-04T03:51:29 | JavaScript | UTF-8 | Python | false | false | 727 | py | # 概念:asyncio是3.4以后的协程模块,是python实现并发重要的包,使用事件循环驱动实现并发,实现异步io
import asyncio
async def aunt():
for i in range(10):
print(f'{i}婶婶说,python是世界上最好的语言')
await asyncio.sleep(1) # 释放cpu避免阻塞
async def uncle():
for i in range(10):
print(f'{i}叔叔说,python是世界上最好的语言')
await asyncio.sleep(0.5)
if __name__ == "__main__":
a1 = aunt()
a2 = uncle()
# 创建循环事件
loop = asyncio.get_event_loop()
# 创建监听事件,会阻塞直到事件完成,
loop.run_until_complete(asyncio.gather(a1, a2))
# 关闭事件循环
loop.close() | [
"laizhitian163@163.com"
] | laizhitian163@163.com |
29858121c108ccddea63d18579cac7770cfd723a | 31b3ac7cc2f0cf43a4979e53d43002a9c5fb2038 | /find the duplicate number.py | 9c5c5e678c6f0e93eadb36086de26bd63e1a9662 | [] | no_license | shreyansh-tyagi/leetcode-problem | ed31ada9608a1526efce6178b4fe3ee18da98902 | f8679a7b639f874a52cf9081b84e7c7abff1d100 | refs/heads/master | 2023-08-26T13:50:27.769753 | 2021-10-29T17:39:41 | 2021-10-29T17:39:41 | 378,711,844 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 968 | py | '''
Given an array of integers nums containing n + 1 integers where each integer is in the range [1, n] inclusive.
There is only one repeated number in nums, return this repeated number.
You must solve the problem without modifying the array nums and uses only constant extra space.
Example 1:
Input: nums = [1,3,4,2,2]
Output: 2
Example 2:
Input: nums = [3,1,3,4,2]
Output: 3
Example 3:
Input: nums = [1,1]
Output: 1
Example 4:
Input: nums = [1,1,2]
Output: 1
Constraints:
1 <= n <= 105
nums.length == n + 1
1 <= nums[i] <= n
All the integers in nums appear only once except for precisely one integer which appears two or more times.
Follow up:
How can we prove that at least one duplicate number must exist in nums?
Can you solve the problem in linear runtime complexity?
'''
class Solution:
def findDuplicate(self, nums: List[int]) -> int:
for i in range(len(nums)-1):
if nums[i]==nums[i+1]:
return nums[i] | [
"sunnytyagi886@gmail.com"
] | sunnytyagi886@gmail.com |
9f4cfac14ac62d7112c411ced01e0372d6b107e1 | 37594c48dfb4c80b3c07a9dfb5a2eac8aa4b69f3 | /guitool/__PYQT__/QtCore.py | cde2345e15eae523efd9e94f5b138c454072c907 | [
"Apache-2.0"
] | permissive | SU-ECE-18-7/guitool | 45e0246feedeebb82950f166305f23dd308f5937 | 4d7e09e3318de92456912e84436d6ce8e1cf8e47 | refs/heads/master | 2021-06-01T09:51:56.452050 | 2016-06-24T02:28:20 | 2016-06-24T02:28:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 131 | py | from __future__ import absolute_import, division, print_function
# flake8:noqa
# Wrapper around PyQt4/5
from PyQt4.QtCore import *
| [
"crallj@rpi.edu"
] | crallj@rpi.edu |
8a42862ec234fb7a7fa0b4d5e8ff400e32f4f800 | dd87194dee537c2291cf0c0de809e2b1bf81b5b2 | /k8sclient/models/v1alpha1_certificate_signing_request_spec.py | a896c60a5b40a38a2b092ea2a8d5c370cc10488a | [
"Apache-2.0"
] | permissive | Arvinhub/client-python | 3ea52640ab02e4bf5677d0fd54fdb4503ecb7768 | d67df30f635231d68dc4c20b9b7e234c616c1e6a | refs/heads/master | 2023-08-31T03:25:57.823810 | 2016-11-02T22:44:36 | 2016-11-02T22:44:36 | 73,865,578 | 1 | 0 | Apache-2.0 | 2018-10-10T12:16:45 | 2016-11-15T23:47:17 | Python | UTF-8 | Python | false | false | 5,867 | py | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: unversioned
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from pprint import pformat
from six import iteritems
import re
class V1alpha1CertificateSigningRequestSpec(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, groups=None, request=None, uid=None, username=None):
"""
V1alpha1CertificateSigningRequestSpec - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'groups': 'list[str]',
'request': 'str',
'uid': 'str',
'username': 'str'
}
self.attribute_map = {
'groups': 'groups',
'request': 'request',
'uid': 'uid',
'username': 'username'
}
self._groups = groups
self._request = request
self._uid = uid
self._username = username
@property
def groups(self):
"""
Gets the groups of this V1alpha1CertificateSigningRequestSpec.
:return: The groups of this V1alpha1CertificateSigningRequestSpec.
:rtype: list[str]
"""
return self._groups
@groups.setter
def groups(self, groups):
"""
Sets the groups of this V1alpha1CertificateSigningRequestSpec.
:param groups: The groups of this V1alpha1CertificateSigningRequestSpec.
:type: list[str]
"""
self._groups = groups
@property
def request(self):
"""
Gets the request of this V1alpha1CertificateSigningRequestSpec.
Base64-encoded PKCS#10 CSR data
:return: The request of this V1alpha1CertificateSigningRequestSpec.
:rtype: str
"""
return self._request
@request.setter
def request(self, request):
"""
Sets the request of this V1alpha1CertificateSigningRequestSpec.
Base64-encoded PKCS#10 CSR data
:param request: The request of this V1alpha1CertificateSigningRequestSpec.
:type: str
"""
if request is None:
raise ValueError("Invalid value for `request`, must not be `None`")
self._request = request
@property
def uid(self):
"""
Gets the uid of this V1alpha1CertificateSigningRequestSpec.
:return: The uid of this V1alpha1CertificateSigningRequestSpec.
:rtype: str
"""
return self._uid
@uid.setter
def uid(self, uid):
"""
Sets the uid of this V1alpha1CertificateSigningRequestSpec.
:param uid: The uid of this V1alpha1CertificateSigningRequestSpec.
:type: str
"""
self._uid = uid
@property
def username(self):
"""
Gets the username of this V1alpha1CertificateSigningRequestSpec.
Information about the requesting user (if relevant) See user.Info interface for details
:return: The username of this V1alpha1CertificateSigningRequestSpec.
:rtype: str
"""
return self._username
@username.setter
def username(self, username):
"""
Sets the username of this V1alpha1CertificateSigningRequestSpec.
Information about the requesting user (if relevant) See user.Info interface for details
:param username: The username of this V1alpha1CertificateSigningRequestSpec.
:type: str
"""
self._username = username
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| [
"mehdy@google.com"
] | mehdy@google.com |
2eae9fbaf32e79fe20d43bf241575f4ac6f685d9 | 5e67301779cc6f685018e4db6f2605a306d53be8 | /prognoz/migrations/0026_settlements_description.py | c11b2b4a7dd88b430f10512ad51aa6681f4d1b23 | [] | no_license | avpakh/recon | 905e93374ec73501b5002bf0ef823b00715d7da8 | 4a99c0c2d2644f5847ebdf9bdfd03217cd0269f3 | refs/heads/master | 2021-01-09T06:11:37.389010 | 2017-02-04T19:32:34 | 2017-02-04T19:32:34 | 80,936,459 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 580 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('prognoz', '0025_auto_20151110_0727'),
]
operations = [
migrations.AddField(
model_name='settlements',
name='description',
field=models.CharField(default=1, max_length=60, verbose_name=b'\xd0\x9e\xd0\xbf\xd0\xb8\xd1\x81\xd0\xb0\xd0\xbd\xd0\xb8\xd0\xb5 \xd1\x80\xd0\xb8\xd1\x81\xd0\xba\xd0\xb0'),
preserve_default=False,
),
]
| [
"aliaksandr.pakhomau@gmail.com"
] | aliaksandr.pakhomau@gmail.com |
9fffd6c798905d34cf98c2c3c44ace889d640fe0 | 88fcb04d4bafb1745ae4b86807b96198d06d6709 | /bigml/constants.py | d68049527e070f3acc840db04fafe34c57d8c299 | [
"Apache-2.0"
] | permissive | gnib/python | dad9501460a866a9cfa23dfe581d89bd03ca1a5d | 185fd030706992766f54cc7ec5e914df57f5a29f | refs/heads/master | 2021-01-03T13:09:10.302254 | 2017-07-28T22:56:57 | 2017-07-28T22:56:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,711 | py | # -*- coding: utf-8 -*-
#!/usr/bin/env python
#
# Copyright 2015-2017 BigML
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Common auxiliary constants for all resources
"""
import re
# Basic resources
SOURCE_PATH = 'source'
DATASET_PATH = 'dataset'
MODEL_PATH = 'model'
PREDICTION_PATH = 'prediction'
EVALUATION_PATH = 'evaluation'
ENSEMBLE_PATH = 'ensemble'
BATCH_PREDICTION_PATH = 'batchprediction'
CLUSTER_PATH = 'cluster'
CENTROID_PATH = 'centroid'
BATCH_CENTROID_PATH = 'batchcentroid'
ANOMALY_PATH = 'anomaly'
ANOMALY_SCORE_PATH = 'anomalyscore'
BATCH_ANOMALY_SCORE_PATH = 'batchanomalyscore'
PROJECT_PATH = 'project'
SAMPLE_PATH = 'sample'
CORRELATION_PATH = 'correlation'
STATISTICAL_TEST_PATH = 'statisticaltest'
LOGISTIC_REGRESSION_PATH = 'logisticregression'
ASSOCIATION_PATH = 'association'
ASSOCIATION_SET_PATH = 'associationset'
TOPIC_MODEL_PATH = 'topicmodel'
TOPIC_DISTRIBUTION_PATH = 'topicdistribution'
BATCH_TOPIC_DISTRIBUTION_PATH = 'batchtopicdistribution'
TIME_SERIES_PATH = 'timeseries'
FORECAST_PATH = 'forecast'
SCRIPT_PATH = 'script'
EXECUTION_PATH = 'execution'
LIBRARY_PATH = 'library'
# Resource Ids patterns
ID_PATTERN = '[a-f0-9]{24}'
SHARED_PATTERN = '[a-zA-Z0-9]{24,30}'
SOURCE_RE = re.compile(r'^%s/%s$' % (SOURCE_PATH, ID_PATTERN))
DATASET_RE = re.compile(r'^(public/)?%s/%s$|^shared/%s/%s$' % (
DATASET_PATH, ID_PATTERN, DATASET_PATH, SHARED_PATTERN))
MODEL_RE = re.compile(r'^(public/)?%s/%s$|^shared/%s/%s$' % (
MODEL_PATH, ID_PATTERN, MODEL_PATH, SHARED_PATTERN))
PREDICTION_RE = re.compile(r'^%s/%s$' % (PREDICTION_PATH, ID_PATTERN))
EVALUATION_RE = re.compile(r'^%s/%s$' % (EVALUATION_PATH, ID_PATTERN))
ENSEMBLE_RE = re.compile(r'^%s/%s$' % (ENSEMBLE_PATH, ID_PATTERN))
BATCH_PREDICTION_RE = re.compile(r'^%s/%s$' % (BATCH_PREDICTION_PATH,
ID_PATTERN))
CLUSTER_RE = re.compile(r'^(public/)?%s/%s$|^shared/%s/%s$' % (
CLUSTER_PATH, ID_PATTERN, CLUSTER_PATH, SHARED_PATTERN))
CENTROID_RE = re.compile(r'^%s/%s$' % (CENTROID_PATH, ID_PATTERN))
BATCH_CENTROID_RE = re.compile(r'^%s/%s$' % (BATCH_CENTROID_PATH,
ID_PATTERN))
ANOMALY_RE = re.compile(r'^(public/)?%s/%s$|^shared/%s/%s$' % (
ANOMALY_PATH, ID_PATTERN, ANOMALY_PATH, SHARED_PATTERN))
ANOMALY_SCORE_RE = re.compile(r'^%s/%s$' % (ANOMALY_SCORE_PATH, ID_PATTERN))
BATCH_ANOMALY_SCORE_RE = re.compile(r'^%s/%s$' % (BATCH_ANOMALY_SCORE_PATH,
ID_PATTERN))
PROJECT_RE = re.compile(r'^%s/%s$' % (PROJECT_PATH, ID_PATTERN))
SAMPLE_RE = re.compile(r'^%s/%s|^shared/%s/%s$' % (
SAMPLE_PATH, ID_PATTERN, SAMPLE_PATH, SHARED_PATTERN))
CORRELATION_RE = re.compile(r'^%s/%s|^shared/%s/%s$' % (
CORRELATION_PATH, ID_PATTERN, CORRELATION_PATH, SHARED_PATTERN))
STATISTICAL_TEST_RE = re.compile(r'^%s/%s|^shared/%s/%s$' % \
(STATISTICAL_TEST_PATH, ID_PATTERN, STATISTICAL_TEST_PATH, SHARED_PATTERN))
LOGISTIC_REGRESSION_RE = re.compile(r'^%s/%s|^shared/%s/%s$' % \
(LOGISTIC_REGRESSION_PATH, ID_PATTERN,
LOGISTIC_REGRESSION_PATH, SHARED_PATTERN))
ASSOCIATION_RE = re.compile(r'^%s/%s|^shared/%s/%s$' % \
(ASSOCIATION_PATH, ID_PATTERN, ASSOCIATION_PATH, SHARED_PATTERN))
ASSOCIATION_SET_RE = re.compile(r'^%s/%s$' % \
(ASSOCIATION_SET_PATH, ID_PATTERN))
TOPIC_MODEL_RE = re.compile(r'^(public/)?%s/%s$|^shared/%s/%s$' % (
TOPIC_MODEL_PATH, ID_PATTERN, TOPIC_MODEL_PATH, SHARED_PATTERN))
TOPIC_DISTRIBUTION_RE = re.compile(r'^(public/)?%s/%s$|^shared/%s/%s$' % (
TOPIC_DISTRIBUTION_PATH, ID_PATTERN, TOPIC_DISTRIBUTION_PATH,
SHARED_PATTERN))
BATCH_TOPIC_DISTRIBUTION_RE = re.compile(r'^(public/)?%s/%s$|^shared/%s/%s$' % (
BATCH_TOPIC_DISTRIBUTION_PATH, ID_PATTERN, BATCH_TOPIC_DISTRIBUTION_PATH,
SHARED_PATTERN))
TIME_SERIES_RE = re.compile(r'^%s/%s|^shared/%s/%s$' % \
(TIME_SERIES_PATH, ID_PATTERN, TIME_SERIES_PATH, SHARED_PATTERN))
FORECAST_RE = re.compile(r'^%s/%s$' % \
(FORECAST_PATH, ID_PATTERN))
SCRIPT_RE = re.compile(r'^%s/%s|^shared/%s/%s$' % \
(SCRIPT_PATH, ID_PATTERN, SCRIPT_PATH, SHARED_PATTERN))
EXECUTION_RE = re.compile(r'^%s/%s|^shared/%s/%s$' % \
(EXECUTION_PATH, ID_PATTERN, EXECUTION_PATH, SHARED_PATTERN))
LIBRARY_RE = re.compile(r'^%s/%s|^shared/%s/%s$' % \
(LIBRARY_PATH, ID_PATTERN, LIBRARY_PATH, SHARED_PATTERN))
RESOURCE_RE = {
SOURCE_PATH: SOURCE_RE,
DATASET_PATH: DATASET_RE,
MODEL_PATH: MODEL_RE,
PREDICTION_PATH: PREDICTION_RE,
EVALUATION_PATH: EVALUATION_RE,
ENSEMBLE_PATH: ENSEMBLE_RE,
BATCH_PREDICTION_PATH: BATCH_PREDICTION_RE,
CLUSTER_PATH: CLUSTER_RE,
CENTROID_PATH: CENTROID_RE,
BATCH_CENTROID_PATH: BATCH_CENTROID_RE,
ANOMALY_PATH: ANOMALY_RE,
ANOMALY_SCORE_PATH: ANOMALY_SCORE_RE,
BATCH_ANOMALY_SCORE_PATH: BATCH_ANOMALY_SCORE_RE,
PROJECT_PATH: PROJECT_RE,
SAMPLE_PATH: SAMPLE_RE,
CORRELATION_PATH: CORRELATION_RE,
STATISTICAL_TEST_PATH: STATISTICAL_TEST_RE,
LOGISTIC_REGRESSION_PATH: LOGISTIC_REGRESSION_RE,
ASSOCIATION_PATH: ASSOCIATION_RE,
ASSOCIATION_SET_PATH: ASSOCIATION_SET_RE,
TOPIC_MODEL_PATH: TOPIC_MODEL_RE,
TOPIC_DISTRIBUTION_PATH: TOPIC_DISTRIBUTION_RE,
BATCH_TOPIC_DISTRIBUTION_PATH: BATCH_TOPIC_DISTRIBUTION_RE,
TIME_SERIES_PATH: TIME_SERIES_RE,
FORECAST_PATH: FORECAST_RE,
SCRIPT_PATH: SCRIPT_RE,
EXECUTION_PATH: EXECUTION_RE,
LIBRARY_PATH: LIBRARY_RE}
RENAMED_RESOURCES = {
BATCH_PREDICTION_PATH: 'batch_prediction',
BATCH_CENTROID_PATH: 'batch_centroid',
ANOMALY_SCORE_PATH: 'anomaly_score',
BATCH_ANOMALY_SCORE_PATH: 'batch_anomaly_score',
STATISTICAL_TEST_PATH: 'statistical_test',
LOGISTIC_REGRESSION_PATH: 'logistic_regression',
ASSOCIATION_SET_PATH: 'association_set',
TOPIC_MODEL_PATH: 'topic_model',
TOPIC_DISTRIBUTION_PATH: 'topic_distribution',
BATCH_TOPIC_DISTRIBUTION_PATH: 'batch_topic_distribution',
TIME_SERIES_PATH: 'time_series'
}
# Resource status codes
WAITING = 0
QUEUED = 1
STARTED = 2
IN_PROGRESS = 3
SUMMARIZED = 4
FINISHED = 5
UPLOADING = 6
FAULTY = -1
UNKNOWN = -2
RUNNABLE = -3
# Minimum query string to get model fields
TINY_RESOURCE = "full=false"
| [
"merce@bigml.com"
] | merce@bigml.com |
1b037fe0c41ff59b93d4a24f7f450057b1cf6e7a | 1ef164682fee7298732bce1037d91d895c108a02 | /testapp/urls.py | 9aede7bd9c0113635a94aeabd1f944180ca31580 | [] | no_license | qwertypool/Django-location-locator | 3ecf45a9bcc42fcbf587ee3ba9ff8637492c3460 | e162fe61715b4d954a25a8af9592b3d64a357818 | refs/heads/master | 2022-09-29T17:44:03.260904 | 2020-06-10T08:34:13 | 2020-06-10T08:34:13 | 271,224,122 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 121 | py | from django.urls import path
from . import views
urlpatterns = [
path('',views.location_generator,name="ipstack"),
] | [
"deepapandey364@gmail.com"
] | deepapandey364@gmail.com |
d9fc547ef77070538f454815fa5e74d3c62c1312 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03611/s329630070.py | 2da7de28a0f7899a21bdeddfa3f4520533bf8ad0 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 462 | py | import sys
from collections import Counter
read = sys.stdin.read
readline = sys.stdin.readline
readlines = sys.stdin.readlines
sys.setrecursionlimit(10 ** 9)
INF = 1 << 60
MOD = 1000000007
def main():
N, *A = map(int, read().split())
counter = Counter()
for a in A:
counter[a - 1] += 1
counter[a] += 1
counter[a + 1] += 1
ans = max(counter.values())
print(ans)
return
if __name__ == '__main__':
main()
| [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
e9165487a666303d57c21ab719a94938efc5b12d | 63d37b990c194a68fbb8d2d288de5faae374ed26 | /main/migrations/0010_photo_isavatar.py | c396985b39a95cdfa776c6e3981996e77b117cce | [] | no_license | Dostoyewski/MH_Back | bf16cd83ff5103e65a5f3fe7d866a2b6dbe66624 | 6de68e1a14643a23e268f1c313224cf7bea89c75 | refs/heads/master | 2022-03-30T18:04:51.340938 | 2020-04-05T08:06:04 | 2020-04-05T08:06:04 | 251,396,226 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 377 | py | # Generated by Django 2.2.1 on 2020-04-04 19:37
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0009_hero_stage'),
]
operations = [
migrations.AddField(
model_name='photo',
name='isAvatar',
field=models.BooleanField(default=False),
),
]
| [
"dostoyewski@yandex.ru"
] | dostoyewski@yandex.ru |
04cc8dff2a874e4be609a6ed7784fee98d47ec7d | ed850eb63494b61e84ff81e35eaf3ed6b96d1208 | /autobahn/autobahn/wamp/tests/test_uri_pattern.py | 1d1098b4949ace709dea7e65a6d283aa0b65a84c | [
"Apache-2.0"
] | permissive | Shareed2k/AutobahnPython | 4e501fce496b7cd54529d1c181ee9d6b7d6be438 | 530bab04ae420db566a636b4f73e6cd2210b0c90 | refs/heads/master | 2021-01-20T16:35:02.773818 | 2014-04-01T10:40:35 | 2014-04-01T10:40:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 20,049 | py | ###############################################################################
##
## Copyright (C) 2014 Tavendo GmbH
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
from __future__ import absolute_import
from twisted.trial import unittest
#import unittest
#from autobahn import wamp2 as wamp
from autobahn import wamp
from autobahn.wamp.uri import Pattern
class TestUris(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_invalid_uris(self):
for u in ["",
"123",
"com.myapp.<product:foo>.update",
"com.myapp.<123:int>.update",
"com.myapp.<:product>.update",
"com.myapp.<product:>.update",
"com.myapp.<int:>.update",
]:
self.assertRaises(Exception, Pattern, u, Pattern.URI_TARGET_ENDPOINT)
def test_valid_uris(self):
for u in ["com.myapp.proc1",
"com.myapp.<product:int>.update",
]:
p = Pattern(u, Pattern.URI_TARGET_ENDPOINT)
self.assertIsInstance(p, Pattern)
def test_parse_uris(self):
tests = [
("com.myapp.<product:int>.update", [
("com.myapp.0.update", {'product': 0}),
("com.myapp.123456.update", {'product': 123456}),
("com.myapp.aaa.update", None),
("com.myapp..update", None),
("com.myapp.0.delete", None),
]
),
("com.myapp.<product:string>.update", [
("com.myapp.box.update", {'product': 'box'}),
("com.myapp.123456.update", {'product': '123456'}),
("com.myapp..update", None),
]
)
]
for test in tests:
pat = Pattern(test[0], Pattern.URI_TARGET_ENDPOINT)
for ptest in test[1]:
uri = ptest[0]
kwargs_should = ptest[1]
if kwargs_should is not None:
args_is, kwargs_is = pat.match(uri)
self.assertEqual(kwargs_is, kwargs_should)
else:
self.assertRaises(Exception, pat.match, uri)
class TestDecorators(unittest.TestCase):
def test_decorate_endpoint(self):
@wamp.procedure("com.calculator.square")
def square(x):
pass
self.assertTrue(hasattr(square, '_wampuris'))
self.assertTrue(type(square._wampuris) == list)
self.assertEqual(len(square._wampuris), 1)
self.assertIsInstance(square._wampuris[0], Pattern)
self.assertTrue(square._wampuris[0].is_endpoint())
self.assertFalse(square._wampuris[0].is_handler())
self.assertFalse(square._wampuris[0].is_exception())
self.assertEqual(square._wampuris[0].uri(), "com.calculator.square")
self.assertEqual(square._wampuris[0]._type, Pattern.URI_TYPE_EXACT)
@wamp.procedure("com.myapp.product.<product:int>.update")
def update_product(product = None, label = None):
pass
self.assertTrue(hasattr(update_product, '_wampuris'))
self.assertTrue(type(update_product._wampuris) == list)
self.assertEqual(len(update_product._wampuris), 1)
self.assertIsInstance(update_product._wampuris[0], Pattern)
self.assertTrue(update_product._wampuris[0].is_endpoint())
self.assertFalse(update_product._wampuris[0].is_handler())
self.assertFalse(update_product._wampuris[0].is_exception())
self.assertEqual(update_product._wampuris[0].uri(), "com.myapp.product.<product:int>.update")
self.assertEqual(update_product._wampuris[0]._type, Pattern.URI_TYPE_WILDCARD)
@wamp.procedure("com.myapp.<category:string>.<id:int>.update")
def update(category = None, id = None):
pass
self.assertTrue(hasattr(update, '_wampuris'))
self.assertTrue(type(update._wampuris) == list)
self.assertEqual(len(update._wampuris), 1)
self.assertIsInstance(update._wampuris[0], Pattern)
self.assertTrue(update._wampuris[0].is_endpoint())
self.assertFalse(update._wampuris[0].is_handler())
self.assertFalse(update._wampuris[0].is_exception())
self.assertEqual(update._wampuris[0].uri(), "com.myapp.<category:string>.<id:int>.update")
self.assertEqual(update._wampuris[0]._type, Pattern.URI_TYPE_WILDCARD)
def test_decorate_handler(self):
@wamp.topic("com.myapp.on_shutdown")
def on_shutdown():
pass
self.assertTrue(hasattr(on_shutdown, '_wampuris'))
self.assertTrue(type(on_shutdown._wampuris) == list)
self.assertEqual(len(on_shutdown._wampuris), 1)
self.assertIsInstance(on_shutdown._wampuris[0], Pattern)
self.assertFalse(on_shutdown._wampuris[0].is_endpoint())
self.assertTrue(on_shutdown._wampuris[0].is_handler())
self.assertFalse(on_shutdown._wampuris[0].is_exception())
self.assertEqual(on_shutdown._wampuris[0].uri(), "com.myapp.on_shutdown")
self.assertEqual(on_shutdown._wampuris[0]._type, Pattern.URI_TYPE_EXACT)
@wamp.topic("com.myapp.product.<product:int>.on_update")
def on_product_update(product = None, label = None):
pass
self.assertTrue(hasattr(on_product_update, '_wampuris'))
self.assertTrue(type(on_product_update._wampuris) == list)
self.assertEqual(len(on_product_update._wampuris), 1)
self.assertIsInstance(on_product_update._wampuris[0], Pattern)
self.assertFalse(on_product_update._wampuris[0].is_endpoint())
self.assertTrue(on_product_update._wampuris[0].is_handler())
self.assertFalse(on_product_update._wampuris[0].is_exception())
self.assertEqual(on_product_update._wampuris[0].uri(), "com.myapp.product.<product:int>.on_update")
self.assertEqual(on_product_update._wampuris[0]._type, Pattern.URI_TYPE_WILDCARD)
@wamp.topic("com.myapp.<category:string>.<id:int>.on_update")
def on_update(category = None, id = None, label = None):
pass
self.assertTrue(hasattr(on_update, '_wampuris'))
self.assertTrue(type(on_update._wampuris) == list)
self.assertEqual(len(on_update._wampuris), 1)
self.assertIsInstance(on_update._wampuris[0], Pattern)
self.assertFalse(on_update._wampuris[0].is_endpoint())
self.assertTrue(on_update._wampuris[0].is_handler())
self.assertFalse(on_update._wampuris[0].is_exception())
self.assertEqual(on_update._wampuris[0].uri(), "com.myapp.<category:string>.<id:int>.on_update")
self.assertEqual(on_update._wampuris[0]._type, Pattern.URI_TYPE_WILDCARD)
def test_decorate_exception(self):
@wamp.error("com.myapp.error")
class AppError(Exception):
pass
self.assertTrue(hasattr(AppError, '_wampuris'))
self.assertTrue(type(AppError._wampuris) == list)
self.assertEqual(len(AppError._wampuris), 1)
self.assertIsInstance(AppError._wampuris[0], Pattern)
self.assertFalse(AppError._wampuris[0].is_endpoint())
self.assertFalse(AppError._wampuris[0].is_handler())
self.assertTrue(AppError._wampuris[0].is_exception())
self.assertEqual(AppError._wampuris[0].uri(), "com.myapp.error")
self.assertEqual(AppError._wampuris[0]._type, Pattern.URI_TYPE_EXACT)
@wamp.error("com.myapp.product.<product:int>.product_inactive")
class ProductInactiveError(Exception):
pass
self.assertTrue(hasattr(ProductInactiveError, '_wampuris'))
self.assertTrue(type(ProductInactiveError._wampuris) == list)
self.assertEqual(len(ProductInactiveError._wampuris), 1)
self.assertIsInstance(ProductInactiveError._wampuris[0], Pattern)
self.assertFalse(ProductInactiveError._wampuris[0].is_endpoint())
self.assertFalse(ProductInactiveError._wampuris[0].is_handler())
self.assertTrue(ProductInactiveError._wampuris[0].is_exception())
self.assertEqual(ProductInactiveError._wampuris[0].uri(), "com.myapp.product.<product:int>.product_inactive")
self.assertEqual(ProductInactiveError._wampuris[0]._type, Pattern.URI_TYPE_WILDCARD)
@wamp.error("com.myapp.<category:string>.<product:int>.inactive")
class ObjectInactiveError(Exception):
pass
self.assertTrue(hasattr(ObjectInactiveError, '_wampuris'))
self.assertTrue(type(ObjectInactiveError._wampuris) == list)
self.assertEqual(len(ObjectInactiveError._wampuris), 1)
self.assertIsInstance(ObjectInactiveError._wampuris[0], Pattern)
self.assertFalse(ObjectInactiveError._wampuris[0].is_endpoint())
self.assertFalse(ObjectInactiveError._wampuris[0].is_handler())
self.assertTrue(ObjectInactiveError._wampuris[0].is_exception())
self.assertEqual(ObjectInactiveError._wampuris[0].uri(), "com.myapp.<category:string>.<product:int>.inactive")
self.assertEqual(ObjectInactiveError._wampuris[0]._type, Pattern.URI_TYPE_WILDCARD)
def test_match_decorated_endpoint(self):
@wamp.procedure("com.calculator.square")
def square(x):
return x
args, kwargs = square._wampuris[0].match("com.calculator.square")
self.assertEqual(square(666, **kwargs), 666)
@wamp.procedure("com.myapp.product.<product:int>.update")
def update_product(product = None, label = None):
return product, label
args, kwargs = update_product._wampuris[0].match("com.myapp.product.123456.update")
kwargs['label'] = "foobar"
self.assertEqual(update_product(**kwargs), (123456, "foobar"))
@wamp.procedure("com.myapp.<category:string>.<id:int>.update")
def update(category = None, id = None, label = None):
return category, id, label
args, kwargs = update._wampuris[0].match("com.myapp.product.123456.update")
kwargs['label'] = "foobar"
self.assertEqual(update(**kwargs), ("product", 123456, "foobar"))
def test_match_decorated_handler(self):
@wamp.topic("com.myapp.on_shutdown")
def on_shutdown():
pass
args, kwargs = on_shutdown._wampuris[0].match("com.myapp.on_shutdown")
self.assertEqual(on_shutdown(**kwargs), None)
@wamp.topic("com.myapp.product.<product:int>.on_update")
def on_product_update(product = None, label = None):
return product, label
args, kwargs = on_product_update._wampuris[0].match("com.myapp.product.123456.on_update")
kwargs['label'] = "foobar"
self.assertEqual(on_product_update(**kwargs), (123456, "foobar"))
@wamp.topic("com.myapp.<category:string>.<id:int>.on_update")
def on_update(category = None, id = None, label = None):
return category, id, label
args, kwargs = on_update._wampuris[0].match("com.myapp.product.123456.on_update")
kwargs['label'] = "foobar"
self.assertEqual(on_update(**kwargs), ("product", 123456, "foobar"))
def test_match_decorated_exception(self):
@wamp.error("com.myapp.error")
class AppError(Exception):
def __init__(self, msg):
Exception.__init__(self, msg)
def __eq__(self, other):
return self.__class__ == other.__class__ and \
self.args == other.args
args, kwargs = AppError._wampuris[0].match("com.myapp.error")
self.assertEqual(AppError("fuck", **kwargs), AppError("fuck"))
@wamp.error("com.myapp.product.<product:int>.product_inactive")
class ProductInactiveError(Exception):
def __init__(self, msg, product = None):
Exception.__init__(self, msg)
self.product = product
def __eq__(self, other):
return self.__class__ == other.__class__ and \
self.args == other.args and \
self.product == other.product
args, kwargs = ProductInactiveError._wampuris[0].match("com.myapp.product.123456.product_inactive")
self.assertEqual(ProductInactiveError("fuck", **kwargs), ProductInactiveError("fuck", 123456))
@wamp.error("com.myapp.<category:string>.<product:int>.inactive")
class ObjectInactiveError(Exception):
def __init__(self, msg, category = None, product = None):
Exception.__init__(self, msg)
self.category = category
self.product = product
def __eq__(self, other):
return self.__class__ == other.__class__ and \
self.args == other.args and \
self.category == other.category and \
self.product == other.product
args, kwargs = ObjectInactiveError._wampuris[0].match("com.myapp.product.123456.inactive")
self.assertEqual(ObjectInactiveError("fuck", **kwargs), ObjectInactiveError("fuck", "product", 123456))
class KwException(Exception):
def __init__(self, *args, **kwargs):
Exception.__init__(self, *args)
self.kwargs = kwargs
# what if the WAMP error message received
# contains args/kwargs that cannot be
# consumed by the constructor of the exception
# class defined for the WAMP error URI?
# 1. we can bail out (but we are already signaling an error)
# 2. we can require a generic constructor
# 3. we can map only unconsumed args/kwargs to generic attributes
# 4. we can silently drop unconsumed args/kwargs
def getargs(fun):
try:
argspec = inspect.getargspec(fun)
except:
if fun == Exception.__init__:
# `inspect.getargspec(Exception.__init__)` does work on PyPy, but not
# on CPython, since `Exception.__init__` is C code in CPython that
# cannot be reflected upon.
argspec = inspect.ArgSpec(args = ['self'], varargs = 'args', keywords = None, defaults = None)
else:
raise Exception("could not inspect function {}".format(fun))
args = argspec.args[:-len(argspec.defaults)]
kwargs = argspec.args[-len(argspec.defaults):]
return args, kwargs, argspec.varargs, argspec.keywords
class MockSession:
def __init__(self):
self._ecls_to_uri_pat = {}
self._uri_to_ecls = {}
def define(self, exception, error = None):
if error is None:
assert(hasattr(exception, '_wampuris'))
self._ecls_to_uri_pat[exception] = exception._wampuris
self._uri_to_ecls[exception._wampuris[0].uri()] = exception
else:
assert(not hasattr(exception, '_wampuris'))
self._ecls_to_uri_pat[exception] = [Pattern(error, Pattern.URI_TARGET_HANDLER)]
self._uri_to_ecls[error] = exception
def map_error(self, error, args = [], kwargs = {}):
# FIXME:
# 1. map to ecls based on error URI wildcard/prefix
# 2. extract additional args/kwargs from error URI
if self._uri_to_ecls.has_key(error):
ecls = self._uri_to_ecls[error]
try:
## the following might fail, eg. TypeError when
## signature of exception constructor is incompatible
## with args/kwargs or when the exception constructor raises
if kwargs:
if args:
exc = ecls(*args, **kwargs)
else:
exc = ecls(**kwargs)
else:
if args:
exc = ecls(*args)
else:
exc = ecls()
except Exception as e:
## FIXME: log e
exc = KwException(error, *args, **kwargs)
else:
## this never fails
exc = KwException(error, *args, **kwargs)
return exc
class TestDecoratorsAdvanced(unittest.TestCase):
def test_decorate_exception_non_exception(self):
def test():
@wamp.error("com.test.error")
class Foo:
pass
self.assertRaises(Exception, test)
def test_decorate_endpoint_multiple(self):
@wamp.procedure("com.oldapp.oldproc")
@wamp.procedure("com.calculator.square")
def square(x):
pass
self.assertTrue(hasattr(square, '_wampuris'))
self.assertTrue(type(square._wampuris) == list)
self.assertEqual(len(square._wampuris), 2)
for i in range(2):
self.assertIsInstance(square._wampuris[i], Pattern)
self.assertTrue(square._wampuris[i].is_endpoint())
self.assertFalse(square._wampuris[i].is_handler())
self.assertFalse(square._wampuris[i].is_exception())
self.assertEqual(square._wampuris[i]._type, Pattern.URI_TYPE_EXACT)
self.assertEqual(square._wampuris[0].uri(), "com.calculator.square")
self.assertEqual(square._wampuris[1].uri(), "com.oldapp.oldproc")
def test_marshal_decorated_exception(self):
@wamp.error("com.myapp.error")
class AppError(Exception):
pass
try:
raise AppError("fuck")
except Exception as e:
self.assertEqual(e._wampuris[0].uri(), "com.myapp.error")
@wamp.error("com.myapp.product.<product:int>.product_inactive")
class ProductInactiveError(Exception):
def __init__(self, msg, product = None):
Exception.__init__(self, msg)
self.product = product
try:
raise ProductInactiveError("fuck", 123456)
except Exception as e:
self.assertEqual(e._wampuris[0].uri(), "com.myapp.product.<product:int>.product_inactive")
class AppErrorUndecorated(Exception):
pass
session = MockSession()
session.define(AppError)
def test_define_exception_undecorated(self):
session = MockSession()
class AppError(Exception):
pass
## defining an undecorated exception requires
## an URI to be provided
self.assertRaises(Exception, session.define, AppError)
session.define(AppError, "com.myapp.error")
exc = session.map_error("com.myapp.error")
self.assertIsInstance(exc, AppError)
def test_define_exception_decorated(self):
session = MockSession()
@wamp.error("com.myapp.error")
class AppError(Exception):
pass
## when defining a decorated exception
## an URI must not be provided
self.assertRaises(Exception, session.define, AppError, "com.myapp.error")
session.define(AppError)
exc = session.map_error("com.myapp.error")
self.assertIsInstance(exc, AppError)
def test_map_exception_undefined(self):
session = MockSession()
exc = session.map_error("com.myapp.error")
self.assertIsInstance(exc, Exception)
def test_map_exception_args(self):
session = MockSession()
@wamp.error("com.myapp.error")
class AppError(Exception):
pass
@wamp.error("com.myapp.error.product_inactive")
class ProductInactiveError(Exception):
def __init__(self, product = None):
self.product = product
## define exceptions in mock session
session.define(AppError)
session.define(ProductInactiveError)
for test in [
#("com.myapp.foo.error", [], {}, KwException),
("com.myapp.error", [], {}, AppError),
("com.myapp.error", ["you are doing it wrong"], {}, AppError),
("com.myapp.error", ["you are doing it wrong", 1, 2, 3], {}, AppError),
("com.myapp.error.product_inactive", [], {}, ProductInactiveError),
("com.myapp.error.product_inactive", [], {"product": 123456}, ProductInactiveError),
]:
error, args, kwargs, ecls = test
exc = session.map_error(error, args, kwargs)
self.assertIsInstance(exc, ecls)
self.assertEqual(list(exc.args), args)
if __name__ == '__main__':
unittest.main()
| [
"tobias.oberstein@tavendo.de"
] | tobias.oberstein@tavendo.de |
350249cd29157e132cd1e6549a7530a9fc74ae0f | da386754e12ed3e251d5fb9091d9416b9f97edc7 | /examples/large_deformation/active_fibres.py | c4e19db861b1c9544510221fd8f0f2e5268df656 | [
"BSD-3-Clause"
] | permissive | nasseralkmim/sfepy | 5b5642f084b62632c1ca48035e510f27728e25ab | 647f1754bcd4fd103cd19a03ed36cb10ebc8fd15 | refs/heads/master | 2020-04-06T04:57:21.589694 | 2016-08-03T12:38:31 | 2016-08-03T12:38:31 | 65,736,316 | 2 | 1 | null | 2016-08-15T13:58:01 | 2016-08-15T13:58:01 | null | UTF-8 | Python | false | false | 6,335 | py | # -*- coding: utf-8 -*-
r"""
Nearly incompressible hyperelastic material model with active fibres.
Large deformation is described using the total Lagrangian formulation.
Models of this kind can be used in biomechanics to model biological
tissues, e.g. muscles.
Find :math:`\ul{u}` such that:
.. math::
\intl{\Omega\suz}{} \left( \ull{S}\eff(\ul{u})
+ K(J-1)\; J \ull{C}^{-1} \right) : \delta \ull{E}(\ul{v}) \difd{V}
= 0
\;, \quad \forall \ul{v} \;,
where
.. list-table::
:widths: 20 80
* - :math:`\ull{F}`
- deformation gradient :math:`F_{ij} = \pdiff{x_i}{X_j}`
* - :math:`J`
- :math:`\det(F)`
* - :math:`\ull{C}`
- right Cauchy-Green deformation tensor :math:`C = F^T F`
* - :math:`\ull{E}(\ul{u})`
- Green strain tensor :math:`E_{ij} = \frac{1}{2}(\pdiff{u_i}{X_j} +
\pdiff{u_j}{X_i} + \pdiff{u_m}{X_i}\pdiff{u_m}{X_j})`
* - :math:`\ull{S}\eff(\ul{u})`
- effective second Piola-Kirchhoff stress tensor
The effective stress :math:`\ull{S}\eff(\ul{u})` incorporates also the
effects of the active fibres in two preferential directions:
.. math::
\ull{S}\eff(\ul{u}) = \mu J^{-\frac{2}{3}}(\ull{I}
- \frac{1}{3}\tr(\ull{C}) \ull{C}^{-1})
+ \sum_{k=1}^2 \tau^k \ull{\omega}^k
\;.
The first term is the neo-Hookean term and the sum add contributions of
the two fibre systems. The tensors :math:`\ull{\omega}^k =
\ul{d}^k\ul{d}^k` are defined by the fibre system direction vectors
:math:`\ul{d}^k` (unit).
For the one-dimensional tensions :math:`\tau^k` holds simply (:math:`^k`
omitted):
.. math::
\tau = A f_{\rm max} \exp{\left\{-(\frac{\epsilon - \varepsilon_{\rm
opt}}{s})^2\right\}} \mbox{ , } \epsilon = \ull{E} : \ull{\omega}
\;.
"""
from __future__ import print_function
from __future__ import absolute_import
import numpy as nm
from sfepy import data_dir
filename_mesh = data_dir + '/meshes/3d/cylinder.mesh'
vf_matrix = 0.5
vf_fibres1 = 0.2
vf_fibres2 = 0.3
options = {
'nls' : 'newton',
'ls' : 'ls',
'ts' : 'ts',
'save_steps' : -1,
'post_process_hook' : 'stress_strain',
}
fields = {
'displacement': (nm.float64, 3, 'Omega', 1),
}
materials = {
'solid' : ({
'K' : vf_matrix * 1e3, # bulk modulus
'mu' : vf_matrix * 20e0, # shear modulus of neoHookean term
},),
'f1' : 'get_pars_fibres1',
'f2' : 'get_pars_fibres2',
}
def get_pars_fibres(ts, coors, mode=None, which=0, vf=1.0, **kwargs):
"""
Parameters
----------
ts : TimeStepper
Time stepping info.
coors : array_like
The physical domain coordinates where the parameters shound be defined.
mode : 'qp' or 'special'
Call mode.
which : int
Fibre system id.
vf : float
Fibre system volume fraction.
"""
if mode != 'qp': return
fmax = 10.0
eps_opt = 0.01
s = 1.0
tt = ts.nt * 2.0 * nm.pi
if which == 0: # system 1
fdir = nm.array([1.0, 0.0, 0.0], dtype=nm.float64)
act = 0.5 * (1.0 + nm.sin(tt - (0.5 * nm.pi)))
elif which == 1: # system 2
fdir = nm.array([0.0, 1.0, 0.0], dtype=nm.float64)
act = 0.5 * (1.0 + nm.sin(tt + (0.5 * nm.pi)))
else:
raise ValueError('unknown fibre system! (%d)' % which)
fdir.shape = (3, 1)
fdir /= nm.linalg.norm(fdir)
print(act)
shape = (coors.shape[0], 1, 1)
out = {
'fmax' : vf * nm.tile(fmax, shape),
'eps_opt' : nm.tile(eps_opt, shape),
's' : nm.tile(s, shape),
'fdir' : nm.tile(fdir, shape),
'act' : nm.tile(act, shape),
}
return out
functions = {
'get_pars_fibres1' : (lambda ts, coors, mode=None, **kwargs:
get_pars_fibres(ts, coors, mode=mode, which=0,
vf=vf_fibres1, **kwargs),),
'get_pars_fibres2' : (lambda ts, coors, mode=None, **kwargs:
get_pars_fibres(ts, coors, mode=mode, which=1,
vf=vf_fibres2, **kwargs),),
}
variables = {
'u' : ('unknown field', 'displacement', 0),
'v' : ('test field', 'displacement', 'u'),
}
regions = {
'Omega' : 'all',
'Left' : ('vertices in (x < 0.001)', 'facet'),
'Right' : ('vertices in (x > 0.099)', 'facet'),
}
##
# Dirichlet BC.
ebcs = {
'l' : ('Left', {'u.all' : 0.0}),
}
##
# Balance of forces.
integral_1 = {
'name' : 'i',
'order' : 1,
}
equations = {
'balance'
: """dw_tl_he_neohook.i.Omega( solid.mu, v, u )
+ dw_tl_bulk_penalty.i.Omega( solid.K, v, u )
+ dw_tl_fib_a.i.Omega( f1.fmax, f1.eps_opt, f1.s, f1.fdir, f1.act,
v, u )
+ dw_tl_fib_a.i.Omega( f2.fmax, f2.eps_opt, f2.s, f2.fdir, f2.act,
v, u )
= 0""",
}
def stress_strain(out, problem, state, extend=False):
from sfepy.base.base import Struct, debug
ev = problem.evaluate
strain = ev('dw_tl_he_neohook.i.Omega( solid.mu, v, u )',
mode='el_avg', term_mode='strain')
out['green_strain'] = Struct(name='output_data',
mode='cell', data=strain, dofs=None)
stress = ev('dw_tl_he_neohook.i.Omega( solid.mu, v, u )',
mode='el_avg', term_mode='stress')
out['neohook_stress'] = Struct(name='output_data',
mode='cell', data=stress, dofs=None )
stress = ev('dw_tl_bulk_penalty.i.Omega( solid.K, v, u )',
mode='el_avg', term_mode= 'stress')
out['bulk_stress'] = Struct(name='output_data',
mode='cell', data=stress, dofs=None)
return out
##
# Solvers etc.
solver_0 = {
'name' : 'ls',
'kind' : 'ls.scipy_direct',
}
solver_1 = {
'name' : 'newton',
'kind' : 'nls.newton',
'i_max' : 7,
'eps_a' : 1e-10,
'eps_r' : 1.0,
'macheps' : 1e-16,
'lin_red' : 1e-2, # Linear system error < (eps_a * lin_red).
'ls_red' : 0.1,
'ls_red_warp': 0.001,
'ls_on' : 1.1,
'ls_min' : 1e-5,
'check' : 0,
'delta' : 1e-6,
}
solver_2 = {
'name' : 'ts',
'kind' : 'ts.simple',
't0' : 0,
't1' : 1,
'dt' : None,
'n_step' : 21, # has precedence over dt!
}
| [
"cimrman3@ntc.zcu.cz"
] | cimrman3@ntc.zcu.cz |
a6424fab008cf23584c46e17e822782a4c3d473f | bb824786f095fbf6e6cf627ef9c04afdb5152477 | /snippets/jinjaglobals.py | 94483a9b22739b8556c77c61ada4f9ce77f30ce1 | [] | no_license | Emilnurg/anas.ru | 19000063c7db98c15261056bb9590382db362d42 | 20cee1aee02da192c9c79a51bd0898c1dba0c98f | refs/heads/master | 2021-05-20T12:09:08.155749 | 2017-12-26T13:49:12 | 2017-12-26T13:49:12 | 252,287,670 | 0 | 0 | null | 2021-03-31T19:34:29 | 2020-04-01T21:10:48 | JavaScript | UTF-8 | Python | false | false | 10,649 | py | # -*- coding: utf-8 -*-
import os
import random
import re
import time
from django.conf import settings
from django.contrib.humanize.templatetags.humanize import intcomma as int_comma
from django.template.defaultfilters import floatformat as float_format
from django.utils import formats
from django.utils.dateformat import format as date_format
from django.utils.timezone import template_localtime
from django.utils.translation import ugettext_lazy as _
from easy_thumbnails.conf import settings as thumbnailer_settings
from easy_thumbnails.exceptions import InvalidImageFormatError
from easy_thumbnails.files import get_thumbnailer
from easy_thumbnails.templatetags.thumbnail import RE_SIZE, VALID_OPTIONS
from jinja2 import nodes
from jinja2.exceptions import TemplateSyntaxError
from jinja2.ext import Extension
from markupsafe import escape
from snippets.db_config import db_vars
from snippets.template_backends.jinja2 import jinjaglobal, jinjafilter
class SpacelessExtension(Extension):
"""
Removes whitespace between HTML tags at compile time, including tab and newline characters.
It does not remove whitespace between jinja2 tags or variables.
Neither does it remove whitespace between tags and their text content.
Adapted from coffin:
https://github.com/coffin/coffin/blob/master/coffin/template/defaulttags.py
"""
tags = {'spaceless'}
def parse(self, parser):
lineno = parser.stream.next().lineno
body = parser.parse_statements(['name:endspaceless'], drop_needle=True)
return nodes.CallBlock(
self.call_method('_strip_spaces', [], [], None, None),
[], [], body,
).set_lineno(lineno)
def _strip_spaces(self, caller=None):
return re.sub(r'>\s+<', '><', caller().strip())
class CacheExtension(Extension):
"""Exactly like Django's own tag, but supports full Jinja2
expressiveness for all arguments.
{% cache gettimeout()*2 "foo"+options.cachename %}
...
{% endcache %}
This actually means that there is a considerable incompatibility
to Django: In Django, the second argument is simply a name, but
interpreted as a literal string. This tag, with Jinja2 stronger
emphasis on consistent syntax, requires you to actually specify the
quotes around the name to make it a string. Otherwise, allowing
Jinja2 expressions would be very hard to impossible (one could use
a lookahead to see if the name is followed by an operator, and
evaluate it as an expression if so, or read it as a string if not.
TODO: This may not be the right choice. Supporting expressions
here is probably not very important, so compatibility should maybe
prevail. Unfortunately, it is actually pretty hard to be compatibly
in all cases, simply because Django's per-character parser will
just eat everything until the next whitespace and consider it part
of the fragment name, while we have to work token-based: ``x*2``
would actually be considered ``"x*2"`` in Django, while Jinja2
would give us three tokens: ``x``, ``*``, ``2``.
General Syntax:
{% cache [expire_time] [fragment_name] [var1] [var2] .. %}
.. some expensive processing ..
{% endcache %}
Available by default (does not need to be loaded).
Partly based on the ``FragmentCacheExtension`` from the Jinja2 docs.
TODO: Should there be scoping issues with the internal dummy macro
limited access to certain outer variables in some cases, there is a
different way to write this. Generated code would look like this:
internal_name = environment.extensions['..']._get_cache_value():
if internal_name is not None:
yield internal_name
else:
internal_name = "" # or maybe use [] and append() for performance
internalname += "..."
internalname += "..."
internalname += "..."
environment.extensions['..']._set_cache_value(internalname):
yield internalname
In other words, instead of using a CallBlock which uses a local
function and calls into python, we have to separate calls into
python, but put the if-else logic itself into the compiled template.
"""
tags = {'cache'}
def parse(self, parser):
lineno = parser.stream.next().lineno
expire_time = parser.parse_expression()
fragment_name = parser.parse_expression()
vary_on = []
while not parser.stream.current.test('block_end'):
vary_on.append(parser.parse_expression())
body = parser.parse_statements(['name:endcache'], drop_needle=True)
return nodes.CallBlock(
self.call_method('_cache_support',
[expire_time, fragment_name,
nodes.List(vary_on), nodes.Const(lineno)]),
[], [], body).set_lineno(lineno)
def _cache_support(self, expire_time, fragm_name, vary_on, lineno, caller):
from hashlib import md5
from django.core.cache import cache # delay depending in settings
from django.utils.http import urlquote
try:
expire_time = int(expire_time)
except (ValueError, TypeError):
raise TemplateSyntaxError(
'"%s" tag got a non-integer timeout value: %r' % (list(self.tags)[0], expire_time),
lineno
)
args_string = ':'.join([urlquote(v) for v in vary_on])
args_md5 = md5(args_string)
cache_key = 'template.cache.%s.%s' % (fragm_name, args_md5.hexdigest())
value = cache.get(cache_key)
if value is None:
value = caller()
cache.set(cache_key, value, expire_time)
return value
@jinjafilter
def cropped_thumbnail(instance, field_name, width=None, height=None, scale=None, **opts):
"""Cropper"""
ratiofield = instance._meta.get_field(field_name)
image = getattr(instance, ratiofield.image_field)
if ratiofield.image_fk_field:
image = getattr(image, ratiofield.image_fk_field)
if not image:
return ''
size = (int(ratiofield.width), int(ratiofield.height))
box = getattr(instance, field_name)
if scale:
scale = float(scale)
width = size[0] * scale
height = size[1] * scale
elif width and height:
width = float(width)
h = size[1] * width / size[0]
if h > height:
width = height * size[0] / size[1]
else:
height = h
elif width:
width = float(width)
height = size[1] * width / size[0]
elif height:
height = float(height)
width = height * size[0] / size[1]
if width and height:
size = (int(width), int(height))
if ratiofield.adapt_rotation:
if (image.height > image.width) != (size[1] > size[0]):
size = (size[1], size[0])
thumbnailer = get_thumbnailer(image)
thumbnail_options = {
'size': size,
'box': box,
**opts
}
try:
return thumbnailer.get_thumbnail(thumbnail_options).url
except InvalidImageFormatError:
return ''
@jinjafilter
def date(value, arg, use_l10n=True):
value = template_localtime(value)
if value in (None, ''):
return ''
if arg is None:
arg = settings.DATE_FORMAT
if arg == 'timestamp':
return str(int(time.mktime(value.timetuple())))
try:
return formats.date_format(value, arg, use_l10n=use_l10n)
except AttributeError:
try:
return date_format(value, arg)
except AttributeError:
return ''
@jinjaglobal
def get_language_href(request, lang):
url = request.get_full_path()
parts = url.split('/')
parts[1] = lang
url = '/'.join(parts)
return url if url.endswith('/') else url + '/'
@jinjaglobal
def get_languages():
return [x for x in settings.LANGUAGES if x[0] in settings.LANGUAGE_CODES_PUBLIC]
@jinjafilter
def floatformat(value, digits):
"""Порт floatformat"""
return float_format(value, digits)
@jinjafilter
def intcomma(value, use_l10n=True):
return int_comma(value, use_l10n=use_l10n)
phone_re = re.compile(r'(\.|\s|-|\)|\()+')
@jinjafilter
def phone_url(val):
val = strip_whitescapes(val, phone_re)
# если не 8 800
if not val.startswith('8'):
if not val.startswith('+'):
val = '+7' + val
return val
@jinjaglobal
def random_int():
return random.randint(1, 9999999)
@jinjafilter
def rjust(value, width, fillchar):
return str(value).rjust(width, fillchar)
@jinjaglobal
def site_name():
return settings.SITE_NAME
@jinjaglobal
def site_url():
return settings.SITE_URL
@jinjaglobal
def static_with_mtime(file_path):
filemtime = int(
os.path.getmtime(os.path.join(settings.STATIC_ROOT, file_path))
)
return '%s%s?v=%s' % (settings.STATIC_URL, file_path, filemtime)
whitespace_re = re.compile(r'(\s|-|\)|\()+', re.MULTILINE)
@jinjafilter
def strip_whitescapes(val, re_obj=whitespace_re):
return re_obj.sub('', val)
@jinjafilter
def thumbnail_obj(source, size, **opts):
"""Make thumbnail from source image"""
if not source:
return None
raise_errors = thumbnailer_settings.THUMBNAIL_DEBUG
accepted_opts = {}
for key, value in opts.items():
if key in VALID_OPTIONS:
accepted_opts[key] = value
opts = accepted_opts
m = RE_SIZE.match(size)
if m:
opts['size'] = (int(m.group(1)), int(m.group(2)))
else:
if raise_errors:
raise TemplateSyntaxError('%r is not a valid size.' % size, 1)
if 'quality' in opts:
try:
opts['quality'] = int(opts['quality'])
except (TypeError, ValueError):
if raise_errors:
raise TemplateSyntaxError('%r is an invalid quality.' % opts['quality'], 1)
try:
curr_thumbnail = get_thumbnailer(source).get_thumbnail(opts)
except Exception as e:
if raise_errors:
raise TemplateSyntaxError('Couldn\'t get the thumbnail %s: %s' % (source, e), 1)
else:
return None
return curr_thumbnail
@jinjafilter
def thumbnail(source, size, **opts):
thumb = thumbnail_obj(source, size, **opts)
return escape(thumb.url) if thumb else ''
@jinjaglobal
def ugettext(value):
return _(value)
@jinjaglobal
def var(key, request, **kwargs):
result = db_vars.get(key, request.LANGUAGE_CODE, '')
if kwargs:
result = result.format(**kwargs)
return result
| [
"wizzzet@gmail.com"
] | wizzzet@gmail.com |
fba58c1d1d2a511970d286307437a86f18d54932 | 8a3f10cd8f178e8452b0f1ecf747bec40ee87b31 | /orders.py | 2d627de6e0a1a078793a69db21cc83a1d14f48eb | [] | no_license | Panda0229/flasky | 70887be89b182f458ed6f1434e2a93afac048899 | d6adb2dfe998a6836bc24443d23d3cb4ed8b8e58 | refs/heads/master | 2020-12-02T18:50:41.230430 | 2019-12-31T13:04:24 | 2019-12-31T13:04:24 | 231,085,894 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 408 | py | from flask import Blueprint
# 创建一个蓝图的对象,蓝图就是一个小模块的抽象的概念,等号左边是蓝图对,等号右边是为这个对象所起的名字,可以不同
app_orders = Blueprint("app_orders", __name__)
@app_orders.route("/get_orders")
def get_orders():
return "get orders page"
@app_orders.route("/post_orders")
def post_orders():
return "post orders page"
| [
"zhanghaining0229@163.com"
] | zhanghaining0229@163.com |
b73253fd7fdc82aacabc674fd9ab679a7a0f6a51 | 1daf07aa6e1a602d69ab2a786dca43d093803a04 | /module/module_using_sys.py | ecbb9b3638943e95bf22a6a2a0bf07396123cacf | [] | no_license | luteresa/python | 89491c90788ccfcd49f554cd8e8db8f9d0ab715f | 652dc34990f179094df64ef760fc03cc980556cd | refs/heads/master | 2020-12-02T16:15:20.813439 | 2017-07-26T08:52:07 | 2017-07-26T08:52:07 | 96,525,838 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 253 | py | import sys
import os
print('the command line arguments are:')
for i in sys.argv:
print(i)
print('\n\nThe PYTHONPATH is', sys.path,'\n')
print(os.getcwd())
for item in sys.path:
print(item)
from math import sqrt
print('Squre root of 16 is ',sqrt(16))
| [
"luteresa@163.com"
] | luteresa@163.com |
868cbe6be728a3cb624a86979fd0c364286f5f63 | 5a52ccea88f90dd4f1acc2819997fce0dd5ffb7d | /alipay/aop/api/domain/AlipayMerchantComplainReconciliationCloseModel.py | 0ef574bea5b34d603c969410707030ff70deb5cb | [
"Apache-2.0"
] | permissive | alipay/alipay-sdk-python-all | 8bd20882852ffeb70a6e929038bf88ff1d1eff1c | 1fad300587c9e7e099747305ba9077d4cd7afde9 | refs/heads/master | 2023-08-27T21:35:01.778771 | 2023-08-23T07:12:26 | 2023-08-23T07:12:26 | 133,338,689 | 247 | 70 | Apache-2.0 | 2023-04-25T04:54:02 | 2018-05-14T09:40:54 | Python | UTF-8 | Python | false | false | 1,508 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class AlipayMerchantComplainReconciliationCloseModel(object):
def __init__(self):
self._merchant_id = None
self._merchant_type = None
@property
def merchant_id(self):
return self._merchant_id
@merchant_id.setter
def merchant_id(self, value):
self._merchant_id = value
@property
def merchant_type(self):
return self._merchant_type
@merchant_type.setter
def merchant_type(self, value):
self._merchant_type = value
def to_alipay_dict(self):
params = dict()
if self.merchant_id:
if hasattr(self.merchant_id, 'to_alipay_dict'):
params['merchant_id'] = self.merchant_id.to_alipay_dict()
else:
params['merchant_id'] = self.merchant_id
if self.merchant_type:
if hasattr(self.merchant_type, 'to_alipay_dict'):
params['merchant_type'] = self.merchant_type.to_alipay_dict()
else:
params['merchant_type'] = self.merchant_type
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AlipayMerchantComplainReconciliationCloseModel()
if 'merchant_id' in d:
o.merchant_id = d['merchant_id']
if 'merchant_type' in d:
o.merchant_type = d['merchant_type']
return o
| [
"jishupei.jsp@alibaba-inc.com"
] | jishupei.jsp@alibaba-inc.com |
168476b32a6333cbb5bb67e465d815cd3a211e1e | 76c8a2593316a74078e5ebe3c280d393b058ff67 | /vai/commands/BreakLineCommand.py | f5ae1c5829502cfe19b2fd6aa2d35ee9be32a0ea | [] | no_license | gavd89/vai | b7f746c3ba31397e8d85f477af9b9b71d01795fb | afa3a31b74ee81f9be8ab2c06cd8bdaebae1baad | refs/heads/master | 2021-01-16T22:04:05.131998 | 2014-10-31T22:35:37 | 2014-10-31T22:35:37 | 26,130,434 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,830 | py | from .BufferCommand import BufferCommand
from .CommandResult import CommandResult
from .NewLineCommand import NewLineCommand
from .NewLineAfterCommand import NewLineAfterCommand
from ..models.TextDocument import LineMeta
class BreakLineCommand(BufferCommand):
def execute(self):
cursor = self._cursor
document = self._document
pos = cursor.pos
self.saveCursorPos()
if pos[1] == document.lineLength(pos[0]):
command = NewLineAfterCommand(self._buffer)
result = command.execute()
if result.success:
self._sub_command = command
return result
if pos[1] == 1:
command = NewLineCommand(self._buffer)
result = command.execute()
if result.success:
self._sub_command = command
cursor.toPos((pos[0]+1, 1))
return result
self.saveLineMemento(pos[0], BufferCommand.MEMENTO_REPLACE)
current_text = document.lineText(pos[0])
current_indent = len(current_text) - len(current_text.lstrip(' '))
document.breakLine(pos)
document.insertChars( (pos[0]+1, 1), ' '*current_indent )
cursor.toPos((pos[0]+1, current_indent+1))
line_meta = document.lineMeta(pos[0])
if line_meta.get(LineMeta.Change) == None:
document.updateLineMeta(pos[0], {LineMeta.Change: "modified"})
document.updateLineMeta(pos[0]+1, {LineMeta.Change: "added"})
return CommandResult(success=True, info=None)
def undo(self):
self.restoreCursorPos()
if self._sub_command is not None:
self._sub_command.undo()
self._sub_command = None
return
self.restoreLineMemento()
self._document.deleteLine(self._cursor.pos[0]+1)
| [
"stefano.borini@gmail.com"
] | stefano.borini@gmail.com |
d88e83ce61f5c1cfefddc7edc3506b16a9221549 | d8d0de926ac1a1de1abfca06aaed843d59f3674a | /python/easy/Solution_53.py | 3823a1f61d12b196772daf89164680284c13e3b6 | [] | no_license | nickest14/Leetcode-python | 75b4919d1ac45c6a7b008e6336db38a06e337bc7 | 435deadfe2b3936dd7848a384d2d9a364352268c | refs/heads/master | 2023-08-31T07:23:14.595745 | 2023-08-30T13:52:10 | 2023-08-30T13:52:10 | 188,695,066 | 0 | 0 | null | 2019-07-20T12:29:09 | 2019-05-26T14:43:12 | Python | UTF-8 | Python | false | false | 643 | py | # 53. Maximum Subarray
from typing import List
class Solution:
def maxSubArray(self, nums: List[int]) -> int:
ans = nums[0]
total = 0
for n in nums:
total += n
ans = max(ans, total)
if total < 0:
total = 0
return ans
# def maxSubArray(self, nums: List[int]) -> int:
# length = len(nums)
# f = [None for _ in range(length+1)]
# f[0] = 0
# for i in range(length):
# f[i+1] = max(f[i]+nums[i], nums[i])
# return max(f[1:])
ans = Solution().maxSubArray([-2, 1, -3, 4, -1, 2, 1, -5, 4])
print(ans)
| [
"nickest14@gmail.com"
] | nickest14@gmail.com |
d8f4e1dab4f396180784cd27edd3f97818f00ff4 | 275a96a33ae1f89e7b2ee0ecdbac7d78abe6d6cc | /test/test_agreement.py | a23d4faa65adb95e44786545cc1594e00cee38cd | [] | no_license | cascadiarc/cyclos-python-client | 8029ce07174f2fe92350a92dda9a60976b2bb6c2 | a2e22a30e22944587293d51be2b8268bce808d70 | refs/heads/main | 2023-04-03T16:52:01.618444 | 2021-04-04T00:00:52 | 2021-04-04T00:00:52 | 354,419,532 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 827 | py | # coding: utf-8
"""
Cyclos 4.11.5 API
The REST API for Cyclos 4.11.5 # noqa: E501
OpenAPI spec version: 4.11.5
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from swagger_client.models.agreement import Agreement # noqa: E501
from swagger_client.rest import ApiException
class TestAgreement(unittest.TestCase):
"""Agreement unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testAgreement(self):
"""Test Agreement"""
# FIXME: construct object with mandatory attributes with example values
# model = swagger_client.models.agreement.Agreement() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"dan@leftcoastfs.com"
] | dan@leftcoastfs.com |
33daa2f6e6384682405e79bf9780044ac51c8720 | d914604923dffab7a612b95f7e1b7ebcab0e9bb3 | /insta/migrations/0001_initial.py | 86409c50e4e9b1efd73aabc3b1f744ec7c895782 | [] | no_license | MaryMbugua/Insta | 8ca4adaa356d249a0d945cec653cf70fd2fe1589 | a238cda86616cbfc26e95611ad63b7a42011ffc0 | refs/heads/master | 2020-03-18T03:27:33.141311 | 2018-05-23T16:05:06 | 2018-05-23T16:05:06 | 134,241,994 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,618 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-05-23 06:31
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import tinymce.models
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('comment', models.CharField(max_length=50)),
],
),
migrations.CreateModel(
name='Image',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('pic', models.ImageField(blank=True, upload_to='images/')),
('caption', models.CharField(max_length=60, null=True)),
('likes', models.ManyToManyField(blank=True, related_name='likes', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Profile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('profile_pic', models.ImageField(blank=True, upload_to='avatar/')),
('bio', tinymce.models.HTMLField()),
('first_name', models.CharField(max_length=30, null=True)),
('last_name', models.CharField(max_length=30, null=True)),
('following', models.ManyToManyField(blank=True, related_name='followed_by', to=settings.AUTH_USER_MODEL)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='profile', to=settings.AUTH_USER_MODEL)),
],
),
migrations.AddField(
model_name='image',
name='profile',
field=models.ForeignKey(blank=True, on_delete=django.db.models.deletion.CASCADE, to='insta.Profile'),
),
migrations.AddField(
model_name='comment',
name='image',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='insta.Image'),
),
migrations.AddField(
model_name='comment',
name='user',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
| [
"marymbugua.nm@gmail.com"
] | marymbugua.nm@gmail.com |
d593edb29f5801f87b35e1ea86e83429d51735a0 | 215fd5c4f9893d9f38e4e48199ea16d7d6ef9430 | /9.Dynmaic_Programming/9.6_L91_Decode_Ways.py | 8f21ead3257f41860d9cc46e3d11e2e5d4e81dbb | [] | no_license | fztest/Classified | fd01622c097ca21b2e20285b06997ff0e9792dd1 | b046d94657c0d04f3803ca15437dfe9a6f6f3252 | refs/heads/master | 2020-03-25T06:34:07.885108 | 2017-05-04T17:22:36 | 2017-05-04T17:22:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 924 | py | """
Description
______________
A message containing letters from A-Z is being encoded to numbers using the following mapping:
'A' -> 1
'B' -> 2
...
'Z' -> 26
Given an encoded message containing digits, determine the total number of ways to decode it.
For example,
Given encoded message "12", it could be decoded as "AB" (1 2) or "L" (12).
The number of ways decoding "12" is 2.
"""
class Solution:
# @param s, a string
# @return an integer
def numDecodings(self, s):
if s is None or len(s) == 0:
return 0
DP = [0 for _ in xrange(len(s))]
DP[0] = 0 if s[0] == '0' else 1
for i in xrange(1, len(s)):
first = s[i:i + 1]
second = s[i - 1:i + 1]
if int(first) != 0:
DP[i] += DP[i - 1]
if int(second) >= 10 and int(second) <= 26:
DP[i] += DP[i - 2] if i >= 2 else 1
return DP[-1]
| [
"cdzengpeiyun@gmail.com"
] | cdzengpeiyun@gmail.com |
c9ee749e1278aca262b9a42c087dbe576822bd2e | bfaf89bdb222b5b1f31aa4ef2a6466ca0125e225 | /students/view/logs.py | 1ebabdab75d4327d83f3c4782c224b271a81e1d8 | [] | no_license | smolynets/studentsdb13 | ab70506571150745753f04923c1a62457368ee03 | a76f9afa62ac6176a2f4dcea0098b6dd3986c3fc | refs/heads/master | 2021-01-17T22:03:14.145146 | 2017-03-07T08:33:38 | 2017-03-07T08:33:38 | 84,186,580 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 962 | py | from django.shortcuts import render
from django.http import HttpResponse, HttpResponseRedirect
from django.core.urlresolvers import reverse
from ..models.monthjournal import logentry
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from datetime import datetime
from django.contrib import messages
from datetime import datetime
from django.contrib.auth.decorators import login_required
@login_required
def logs(request):
logs = logentry.objects.order_by('asctime').reverse()
# paginate logs
paginator = Paginator(logs, 5)
page = request.GET.get('page')
try:
logs = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
logs = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver
# last page of results.
logs = paginator.page(paginator.num_pages)
return render(request, 'students/logs.html',
{'logs': logs}) | [
"smolynets@gmail.com"
] | smolynets@gmail.com |
b363b9ba1433e55ffe49fdb19adb2c5bade1ef05 | 6b2a8dd202fdce77c971c412717e305e1caaac51 | /solutions_5738606668808192_0/Python/macieck9/c.py | 272d253ddc792afadcf3d720f6bab5d5693a2d4b | [] | no_license | alexandraback/datacollection | 0bc67a9ace00abbc843f4912562f3a064992e0e9 | 076a7bc7693f3abf07bfdbdac838cb4ef65ccfcf | refs/heads/master | 2021-01-24T18:27:24.417992 | 2017-05-23T09:23:38 | 2017-05-23T09:23:38 | 84,313,442 | 2 | 4 | null | null | null | null | UTF-8 | Python | false | false | 869 | py | def to_bin(n):
l = []
while n != 0:
l.append(n % 2)
n /= 2
l.reverse()
res = 0
for d in l:
res = 10 * res + d
return res
def to_dec(n, b):
l = str(n)
res = 0
for d in l:
res = b * res + int(d)
return res
def divisor(n):
for p in primes:
if n % p == 0:
return p
if p * p > n:
return -1
return -1
N = 1<<17
T = [0] * (N + 5)
primes = []
i = 2
while i*i <= N:
if T[i] == 1:
i += 1
continue
for j in range(i * i, N + 1, i):
T[j] = 1
i += 1
for i in range(2, N + 1):
if T[i] == 0:
primes.append(i)
ans = []
for i in range((1<<15) + 1, 1<<16, 2):
n = to_bin(i)
div = []
for j in range(2, 11):
x = to_dec(n, j)
d = divisor(x)
if d == -1:
break
div.append(d)
if len(div) == 9:
ans.append((n, div))
if len(ans) == 50:
break
print "Case #1:"
for (n, l) in ans:
print n,
for x in l:
print x,
print
| [
"alexandra1.back@gmail.com"
] | alexandra1.back@gmail.com |
0901a8decebfd2e16adbeb8a5654d44c7e9cc093 | a140fe192fd643ce556fa34bf2f84ddbdb97f091 | /.history/예외처리_20200709152901.py | 0f0b02c3f92435483b13b818496c0572d693295e | [] | no_license | sangha0719/py-practice | 826f13cb422ef43992a69f822b9f04c2cb6d4815 | 6d71ce64bf91cc3bccee81378577d84ba9d9c121 | refs/heads/master | 2023-03-13T04:40:55.883279 | 2021-02-25T12:02:04 | 2021-02-25T12:02:04 | 342,230,484 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 443 | py | try:
print("나누기 전용 계산기입니다.")
nums = []
nums.append(int(input("첫 번째 숫자를 입력하세요 : ")))
nums.append(int(input("두 번째 숫자를 입력하세요 : ")))
nums.append(int(nums[0] / nums[1]))
print("{0} / {1} = {2}".format(num1, num2, int(num1/num2)))
except ValueError:
print("에러! 잘못된 값을 입력하였습니다.")
except ZeroDivisionError as err:
print(err)
| [
"sangha0719@gmail.com"
] | sangha0719@gmail.com |
91cb47500489f1a197928970caee8353d4676186 | 347299b495e1417dd4748c86f7696fb849c79c4f | /analisis_proyectos/servicios/configurador.py | 5fd1e564e54d15c1808c25abe4961a5136ee9eb8 | [] | no_license | vvalotto/python_uner | 6a379cc7ab4cb6380c193863403409e5559e1cd6 | 8fbeb9fde0883427695b5065232ace452160034f | refs/heads/master | 2020-07-25T23:32:50.829101 | 2019-10-18T18:02:16 | 2019-10-18T18:02:16 | 208,457,042 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,099 | py | """
Clase que inicializa y arma las instancias necesarias iniciales de la aplicaccion:
Contextos de datos
Respositorios
Gestores
"""
from analisis_proyectos.infraestructura.persistencia.contexto.contexto_database_sqlite import *
from analisis_proyectos.infraestructura.persistencia.repositorios.DB_repositorio_proyecto import *
from analisis_proyectos.infraestructura.persistencia.mapeador.proyecto import *
from analisis_proyectos.aplicacion.gestores.gestor_proyecto import *
from analisis_proyectos.aplicacion.gestores.gestor_componente import *
from analisis_proyectos.aplicacion.gestores.gestor_elemento import *
from analisis_proyectos.dominio.analitico.muestra import *
from analisis_proyectos.dominio.analitico.analizador import *
import os
directorio_base = os.path.abspath(os.path.dirname(__file__))
URI_DATABASE = 'sqlite:///' + os.path.join(directorio_base, 'proyectos.sqlite')
class Configurador:
contexto = ContextoDBSQLite(URI_DATABASE)
repositorio_proyecto = DBRepositorioProyecto(contexto, MapeadorDatosProyecto(contexto))
repositorio_componente = DBRepositorioComponente(contexto, MapeadorDatosComponente(contexto))
repositorio_elemento = DBRepositorioElemento(contexto, MapeadorDatosElemento(contexto))
gestor_proyecto = GestorProyecto()
gestor_proyecto.asignar_repositorio(repositorio_proyecto)
gestor_componente = GestorComponente()
gestor_componente.asignar_repositorio(repositorio_componente)
gestor_elemento = GestorElemento()
gestor_elemento.asignar_repositorio(repositorio_elemento)
muestra_proyectos=Muestra()
analizador_proyecto = Analizador(muestra_proyectos)
repositorio = "proyectos.sqlite"
datos_origen = "SELECT * FROM mediciones_proyecto;"
muestra_proyectos.cargar_valores_de_muestra(repositorio, datos_origen)
if __name__ == '__main__':
print(Configurador.contexto.recurso)
if Configurador.gestor_proyecto.existe_proyecto("Sistema de Gestión de Flota"):
proyecto = Configurador.gestor_proyecto.recuperar_proyecto_por_nombre("Sistema de Gestión de Flota")
print(proyecto)
| [
"vvalotto@gmail.com"
] | vvalotto@gmail.com |
9a4060829fa3fbbf0940f5bfb96aa01245a3d0c5 | 1bccf7d57c7aa8d48b84fff187de4b6ff2599cb6 | /pandora_rqt_gui/scripts/pandora_rqt_gui.py | c324b72f85afebafedfbb68e996684964493b158 | [] | no_license | skohlbr/pandora_ros_pkgs | 733ed34edb5b6d46e59df4acb01288f28ef3b50f | eecaf082b47e52582c5f009eefbf46dd692aba4f | refs/heads/indigo-devel | 2021-01-21T18:06:14.967943 | 2015-11-04T15:08:03 | 2015-11-04T15:08:03 | 53,413,573 | 0 | 1 | null | 2016-03-08T13:19:40 | 2016-03-08T13:19:40 | null | UTF-8 | Python | false | false | 140 | py | #!/usr/bin/env python
import sys
from rqt_gui.main import Main
main = Main()
sys.exit(main.main(sys.argv, standalone='pandora_rqt_gui'))
| [
"pandora@ee.auth.gr"
] | pandora@ee.auth.gr |
4fff3230f52f4f0a934e4f4430ae04de4a2e3c0a | 43b6bffc820d26dfd223728bed71241fb3d54983 | /abc/223/c.py | b26920383e241d8bef73911853f1633fb651f49a | [] | no_license | kiccho1101/atcoder | 3a163b6a38a62c578dad6d15ccb586d0fcd1e004 | c86cb8e08b881a0a01dc2ef538f0699f3951e897 | refs/heads/master | 2023-03-02T13:27:17.747402 | 2022-05-30T13:51:00 | 2022-05-30T13:51:00 | 223,152,693 | 1 | 0 | null | 2023-02-11T01:29:47 | 2019-11-21T10:52:49 | Python | UTF-8 | Python | false | false | 345 | py | N = int(input())
ab = [list(map(int, input().split())) for _ in range(N)]
secs = [a / b for a, b in ab]
middle = sum(secs) / 2
ans = 0
curr = 0
for i, (a, b) in enumerate(ab):
diff = middle - (curr + secs[i])
if diff > 0:
curr += secs[i]
ans += a
else:
ans += (middle - curr) * b
break
print(ans)
| [
"yodai.a.kishimoto@rakuten.com"
] | yodai.a.kishimoto@rakuten.com |
c125b0cb7fc1c7088739d00ba172ced46b39efe7 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02909/s936658151.py | b112400ac8521188a69b312edffaf79281fe8a75 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 157 | py | S = input()
weather = ['Sunny', 'Cloudy','Rainy']
if S == weather[0]:
print(weather[1])
elif S == weather[1]:
print(weather[2])
else:
print(weather[0]) | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
274bceda1d0097d2ed46e0f61982776854ed733c | de5eafa17b8c1e3a8aedb2848e03282eae775334 | /augment/augment_intersect_Brent9060_v10.py | 6ade04d52ef7f70c94a83767fdf168eedfbac5a4 | [] | no_license | ekourkchi/HI | cba6601fbbb96231d457cc6a0a379c2fdb7a3e4d | 796ff5b9826d9d64e2a761e3ef587411df8edd80 | refs/heads/master | 2020-05-25T19:21:05.663026 | 2020-03-14T04:08:47 | 2020-03-14T04:08:47 | 187,949,260 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 52,866 | py | #!/usr/bin/python
# encoding=utf8
import sys
reload(sys)
sys.setdefaultencoding('utf8')
import os
import subprocess
import math
import matplotlib.pyplot as plt
import numpy as np
import pylab as py
from astropy.table import Table, Column
import random
from scipy.optimize import curve_fit
from astropy.stats import sigma_clip
import sqlcl
import urllib2
inFile = 'wise_all.csv'
table = np.genfromtxt( inFile , delimiter=',', filling_values=None, names=True, dtype=None)
wise_name = table['ID']
wise_pgc = table['PGC']
######################################
def xcmd(cmd,verbose):
if verbose: print '\n'+cmd
tmp=os.popen(cmd)
output=''
for x in tmp: output+=x
if 'abort' in output:
failure=True
else:
failure=tmp.close()
if False:
print 'execution of %s failed' % cmd
print 'error is as follows',output
sys.exit()
else:
return output
######################################
#################
class cd:
"""Context manager for changing the current working directory"""
def __init__(self, newPath):
self.newPath = os.path.expanduser(newPath)
def __enter__(self):
self.savedPath = os.getcwd()
os.chdir(self.newPath)
def __exit__(self, etype, value, traceback):
os.chdir(self.savedPath)
###############################
def get_scales(photometry, nskip=29):
table = np.genfromtxt(photometry , delimiter=',', filling_values=None, names=True, dtype=None, skip_header=nskip)
try: ebv = float(table['ebv'])
except: ebv=-9.99
try: a_gal = float(table['a_gal'])
except: a_gal=-9.99
try: central_mu = float(table['central_mu'])
except: central_mu=-9.99
try: mu_90 = float(table['mu_90'])
except: mu_90=-9.99
try: mu_50 = float(table['mu_50'])
except: mu_50=-9.99
try: concentration = float(table['concentration'])
except: concentration=-9.99
try: m_255 = float(table['m_255'])
except: m_255=-9.99
try: disc_mu0 = float(table['disc_mu0'])
except: disc_mu0=-9.99
try: scale_length_h = float(table['scale_length_h'])
except: scale_length_h=-9.99
try: R_asy = float(table['R_asy'])
except: R_asy=-9.99
try: R_90 = float(table['R_90'])
except: R_90=-9.99
try: R_80 = float(table['R_80'])
except: R_80=-9.99
try: R_50 = float(table['R_50'])
except: R_50=-9.99
try: R_20 = float(table['R_20'])
except: R_20=-9.99
try: R_255 = float(table['R_255'])
except: R_255=-9.99
try: d_m_ext = float(table['d_m_ext'])
except: d_m_ext=-9.99
return ebv, a_gal, central_mu, mu_50, mu_90, m_255, disc_mu0, scale_length_h, R_50, R_90, R_255, concentration, d_m_ext
###############################
### For a given photometry file, this returns the magnitude value
def get_mag(photometry, index=2, header=None):
if header is not None:
if os.path.exists(photometry):
with open(photometry) as f:
for line in f:
foundit = False
once = True
if line.split(" ")[0]== '#':
line_split = line.split(" ")
not_void = 0
key = None
for thing in line_split:
if thing != '':
not_void+=1
if not_void==2 and once:
key=thing
once = False
if not_void==3 and key==header:
foundit = True
break
if foundit: return np.float(thing)
if header is not None: return 0
if os.path.exists(photometry):
with open(photometry) as f:
counter = 1
for line in f:
if counter == 14:
line_split = line.split(" ")
not_void = 0
for thing in line_split:
if thing != '': not_void+=1
if not_void==index:
break
return np.float(thing)
counter+=1
###############################
###############################
### For a given photometry file, this returns the magnitude value
def get_mag_f(photometry, index=2, header=None):
if header is not None:
if os.path.exists(photometry):
with open(photometry) as f:
for line in f:
foundit = False
once = True
if line.split(" ")[0]== '#':
line_split = line.split(" ")
not_void = 0
key = None
for thing in line_split:
if thing != '':
not_void+=1
if not_void==2 and once:
key=thing
once = False
if not_void==3 and key==header:
foundit = True
break
if foundit: return np.float(thing)
if header is not None: return 0
if os.path.exists(photometry):
with open(photometry) as f:
counter = 1
for line in f:
if line.split(" ")[0]!= '#' and line.split(" ")[0]!='#\n':
line_split = line.split(" ")
not_void = 0
for thing in line_split:
if thing != '':
not_void+=1
set_param = True
if not_void==index:
break
return np.float(thing)
counter+=1
def get_mag_wise(photometry, index=2):
mag = get_mag_f(photometry, index=index)
if mag!=None:
return mag
else:
return -1000
###############################
def get_semimajor(filename):
with open(filename) as f:
counter = 1
for line in f:
if counter == 14:
line_split = line.split(" ")
not_void = 0
for thing in line_split:
if thing != '': not_void+=1
if not_void==1:
break
return np.float(thing)
counter+=1
def get_ellipse(filename):
ra_cen = -1
dec_cen = -1
semimajor = -1
semiminor = -1
PA = -1
with open(filename) as f:
counter = 1
for line in f:
if counter == 14:
line_split = line.split(" ")
not_void = 0
set_param = False
for thing in line_split:
if thing != '':
not_void+=1
set_param = True
if not_void==1 and set_param:
set_param = False
ra_cen=np.float(thing)
if not_void==2 and set_param:
dec_cen=np.float(thing)
set_param = False
if not_void==3 and set_param:
semimajor=np.float(thing)
set_param = False
if not_void==4 and set_param:
semiminor=np.float(thing)
set_param = False
if not_void==5 and set_param:
PA=np.float(thing)
break
return ra_cen, dec_cen, semimajor, semiminor, PA
counter+=1
#################################
###############################
def isInSDSS_DR12(ra, dec):
querry = "select dbo.fInFootprintEq("+str(ra)+","+str(dec)+", 1)"
lines = sqlcl.query(querry).readlines()
if lines[2] == "True\n":
return 1
else:
return 0
###############################
###############################
def get_ellipse_wise(filename):
ra_cen = -1
dec_cen = -1
semimajor = -1
semiminor = -1
PA = -1
with open(filename) as f:
counter = 1
for line in f:
if line.split(" ")[0]!= '#' and line.split(" ")[0]!='#\n': # counter == 17:
line_split = line.split(" ")
not_void = 0
set_param = False
for thing in line_split:
if thing != '':
not_void+=1
set_param = True
if not_void==1 and set_param:
set_param = False
ra_cen=np.float(thing)
if not_void==2 and set_param:
dec_cen=np.float(thing)
set_param = False
if not_void==3 and set_param:
semimajor=np.float(thing)
set_param = False
if not_void==4 and set_param:
semiminor=np.float(thing)
set_param = False
if not_void==5 and set_param:
PA=np.float(thing)
break
return ra_cen, dec_cen, semimajor, semiminor, PA
counter+=1
#################################
#################################
def get_quality(filename, nline=40):
line_no = 0
seprator = ' '
for line in open(filename, 'r'):
columns = line.split(seprator)
line_no+=1
if len(columns) >= 2 and line_no==nline:
key = columns[0]
j = 1
while columns[j] == '' or columns[j] == '=': j+=1
return int(columns[j])
return -1
#################################
def read_note(filename):
qa_note = filename
note = ' '
if os.path.exists(qa_note):
with open(qa_note) as f:
counter = 1
for line in f:
if counter == 11:
line_split = line.split("=")
note = line_split[1]
note = note[0:len(note)-1]
counter+=1
return note
#################################
#################################
def ra_db(ra): # returns a string
ra_id = str(int(np.floor(ra)))
if ra < 10:
ra_id = '00'+ra_id+'D'
elif ra < 100:
ra_id = '0'+ra_id+'D'
else:
ra_id = ra_id+'D'
return ra_id
#################################
#################
def QA_SDSS_DONE(pgc, ra):
databse = '/home/ehsan/db_esn/'+'/cf4_sdss/data/'
name = 'pgc'+str(pgc)
if os.path.exists(databse+ra_db(ra)+'/sdss/fits/'+name+'_qa.txt'):
return True
return False
#################
def QA_WISE_DONE(pgc, ra):
global wise_name, wise_pgc
databse = '/home/ehsan/db_esn/'+'/cf4_wise/data/'
if pgc in wise_pgc:
i_lst = np.where(pgc == wise_pgc)
name = wise_name[i_lst][0]
if os.path.exists(databse+ra_db(ra)+'/wise/fits/'+name+'_qa.txt'):
return True
name = 'pgc'+str(pgc)
if os.path.exists(databse+ra_db(ra)+'/wise/fits/'+name+'_qa.txt'):
return True
return False
#######################################
######################################
def rnd_inc(inc):
if inc==None: return None
if inc>90: return 90.
if inc<45: return 0.
d_inc = inc - int(inc)
if d_inc>0.5: inc=int(inc)+1
elif d_inc<0.5: inc=int(inc)
else:
rand = random.randint(0,1)
if rand==0: inc=int(inc)
else: inc=int(inc)+1
return inc
######################################
def inc_append(incs, email, inc):
n = 1
if email=='rbtully1@gmail.com': n = 4
elif email=='ekourkchi@gmail.com': n = 4
elif email=='s.eftekharzadeh@gmail.com': n = 2
elif email=='mokelkea@hawaii.edu': n = 3
elif email=='chasemu@hawaii.edu': n = 2
elif email=='jrl2014@hawaii.edu': n = 3
elif email=='dschoen@hawaii.edu': n = 4
elif email=='adholtha@hawaii.edu': n = 4
elif email=='chuangj@hawaii.edu': n = 2
elif email=='mi24@hawaii.edu': n = 2
elif email=='mka7@hawaii.edu': n = 1
elif email=='a.danesh61@gmail.com': n = 1
elif email=='cgrubner0@gmail.com': n = 1
elif email=='pascal.jouve@free.fr': n = 2
elif email=='dlsaintsorny@gmail.com': n = 2
elif email=='arnaud.ohet@gmail.com': n = 1
elif email=='hawaii@udrea.fr': n = 1
elif email=='helenecourtois33@gmail.com': n = 4
elif email=='claude.rene21@gmail.com': n = 1
elif email=='fredwallet@gmail.com': n = 2
elif email=='henri140860@wanadoo.fr': n = 2
elif email=='joannin.lycee@free.fr': n = 2
elif email=='bevig434@gmail.com': n = 2
elif email=='echarraix69@gmail.com': n = 2
for i in range(n): incs.append(inc)
return incs
######################################
def correction(i, email):
a=1
b=0
if email=='chuangj@hawaii.edu':
a = 0.9698391552105461
b = 3.582543838111245
if email=='mi24@hawaii.edu':
a = 0.9819724300214063
b = 2.485648837307963
if email=='arnaud.ohet@gmail.com':
a = 0.8925968302721691
b = 8.021973390519326
if email=='cgrubner0@gmail.com':
a = 0.8957026107782403
b = 9.076420810780814
if email=='jrl2014@hawaii.edu':
a = 0.9350710901954157
b = 5.178922022569104
a = 1./a
b = -1.*b*a # -b/a
return a*i+b
######################################
def fitFunc(x, a, b):
return a*x+b
######################################
def addNote(note, text):
if text=='': return note
if note=='':
note = '['+text+']'
else:
note = note+' '+'['+text+']'
return note
def addConcern(note, cncrn):
if cncrn[0]>0: note = addNote(note, 'not_sure')
if cncrn[1]>0: note = addNote(note, 'better_image')
if cncrn[2]>0: note = addNote(note, 'bad_TF')
if cncrn[3]>0: note = addNote(note, 'ambiguous')
if cncrn[4]>0: note = addNote(note, 'disturbed')
if cncrn[5]>0: note = addNote(note, 'HI')
if cncrn[6]>0: note = addNote(note, 'face_on')
if cncrn[7]>0: note = addNote(note, 'not_spiral')
if cncrn[8]>0: note = addNote(note, 'multiple')
return note
######################################
#######################################
def getINC(include_Email=None, exclude_Email=[]):
if include_Email==None:
emails = ['rbtully1@gmail.com','ekourkchi@gmail.com','mokelkea@hawaii.edu', 'jrl2014@hawaii.edu', 'dschoen@hawaii.edu', 'adholtha@hawaii.edu']
else:
emails = include_Email
#### Manoa
inFile = 'EDD.inclination.All.Manoa.11Sep2018135319.txt'
table = np.genfromtxt(inFile , delimiter='|', filling_values=None, names=True, dtype=None)
pgc_incout = table['pgcID']
inc_incout = table['inc']
flag_incout = table['flag']
note = [' '.join(dummy.split()) for dummy in table['note']]
email = [' '.join(dummy.split()) for dummy in table['email']]
NS = table['not_sure']
BI = table['better_image']
TF = table['bad_TF']
AM = table['ambiguous']
DI = table['disturbed']
HI = table['HI']
FO = table['face_on']
NP = table['not_spiral']
MU = table['multiple']
#### Guest
inFile = 'EDD.inclination.All.Guest.11Sep2018135307.txt'
table = np.genfromtxt(inFile , delimiter='|', filling_values=None, names=True, dtype=None)
pgc_incout_ = table['pgcID']
inc_incout_ = table['inc']
flag_incout_ = table['flag']
note_ = [' '.join(dummy.split()) for dummy in table['note']]
email_ = [' '.join(dummy.split()) for dummy in table['email']]
NS_ = table['not_sure']
BI_ = table['better_image']
TF_ = table['bad_TF']
AM_ = table['ambiguous']
DI_ = table['disturbed']
HI_ = table['HI']
FO_ = table['face_on']
NP_ = table['not_spiral']
MU_ = table['multiple']
PGC = []
for i in range(len(pgc_incout)):
if not pgc_incout[i] in PGC:
PGC.append(pgc_incout[i])
for i in range(len(pgc_incout_)):
if not pgc_incout_[i] in PGC:
PGC.append(pgc_incout_[i])
incDict = {}
for i in range(len(PGC)):
data = []
indx = np.where(PGC[i] == pgc_incout)
for j in indx[0]:
if email[j] in emails and not email[j] in exclude_Email:
inc_incout[j] = correction(inc_incout[j], email[j])
data.append([email[j], inc_incout[j],flag_incout[j],note[j], [NS[j], BI[j], TF[j], AM[j], DI[j], HI[j], FO[j], NP[j], MU[j]]])
indx = np.where(PGC[i] == pgc_incout_)
for j in indx[0]:
if email_[j] in emails and not email_[j] in exclude_Email:
inc_incout_[j] = correction(inc_incout_[j], email[j])
data.append([email[j], inc_incout_[j],flag_incout_[j],note_[j], [NS_[j], BI_[j], TF_[j], AM_[j], DI_[j], HI_[j], FO_[j], NP_[j], MU_[j]]])
incDict[PGC[i]] = data
return incDict
###########################################################
######################################
def incMedian(incDic):
boss = 'ekourkchi@gmail.com'
Keypeople = []
for item in incDic:
Keypeople.append(item[0])
if item[0] == 'rbtully1@gmail.com':
boss = 'rbtully1@gmail.com'
flag = 0
inc = 0
note = ''
stdev = 0
n = 0 # number of good measurments
concerns = np.zeros(9)
if boss in Keypeople:
poss_i = 0
for ppl in Keypeople:
if ppl==boss: break
poss_i+=1
if incDic[poss_i][2] != 0: # boss has flagged it
flag = 1
for item in incDic:
if item[2]==1:
note = addNote(note, item[3])
concerns+=np.asarray(item[4])
n+=1
else: # boss has NOT flagged it
flag = 0
incs = []
incs2 = []
for item in incDic:
if item[2]==0:
incs.append(item[1])
incs2 = inc_append(incs2, item[0], item[1])
note = addNote(note, item[3])
n+=1
incs = np.asarray(incs)
filtered_data = sigma_clip(incs, sigma=2, iters=5, copy=False)
incs = filtered_data.data[np.logical_not(filtered_data.mask)]
stdev = np.std(incs)
incs2 = np.asarray(incs2)
filtered_data = sigma_clip(incs2, sigma=2, iters=5, copy=False)
incs2 = filtered_data.data[np.logical_not(filtered_data.mask)]
inc = np.median(incs2)
else:
flag = []
for item in incDic:
flag.append(item[2])
flag = np.median(flag)
if flag > 0: flag =1
incs = []
incs2 = []
for item in incDic:
if item[2]==flag:
incs.append(item[1])
incs2 = inc_append(incs2, item[0], item[1])
note = addNote(note, item[3])
concerns+=np.asarray(item[4])
n+=1
incs = np.asarray(incs)
filtered_data = sigma_clip(incs, sigma=2, iters=5, copy=False)
incs = filtered_data.data[np.logical_not(filtered_data.mask)]
stdev = np.std(incs)
incs2 = np.asarray(incs2)
filtered_data = sigma_clip(incs2, sigma=2, iters=5, copy=False)
incs2 = filtered_data.data[np.logical_not(filtered_data.mask)]
inc = np.median(incs2)
note = addConcern(note, concerns)
inc = rnd_inc(inc)
if inc>=89:
err = 1.
elif inc>=85:
err = 2.
elif inc>=69:
err = 3.
elif inc>=50:
err = 4.
elif inc>=45:
err = 6.
else:
err = 0
flag = 1
inc = 0
stdev = 0
stdev = np.max([stdev, err])
stdev = np.round(stdev)
return inc, stdev, flag, note, n
#######################################
#################
def query_leda_lyon(pgc):
leda = []
query=""
if True:
query=query+"%20or%20pgc%3D"+str(pgc)
if True:
query=query[5:]
url='http://leda.univ-lyon1.fr/leda/fullsqlmean.cgi?Query=select%20*%20where'+query
result=urllib2.urlopen(url)
for myline in result:
if "<" in myline:
continue
if myline=="":
continue
elements=myline.replace(" ","").split("|")
elements=[x if x!="-" else None for x in elements]
if ("pgc" in elements[0]):
continue
if (len(elements)<2):
continue
elements.pop()
if (elements):
#print elements[:3]
leda.append((elements))
query=""
pgc_leda = None
ra_leda = None
dec_leda = None
l_leda = None
b_leda = None
sgl_leda = None
sgb_leda = None
logd25_leda = None
logr25_leda = None
pa_leda = None
ty_leda = None
type_leda = None
if (leda):
leda = leda[0]
pgc_leda = int(leda[0])
ra_leda = float(leda[5])*15.
dec_leda = float(leda[6])
l_leda = float(leda[7])
b_leda = float(leda[8])
sgl_leda = float(leda[9])
sgb_leda = float(leda[10])
logd25_leda = float(leda[20])
logr25_leda = float(leda[22])
pa_leda = float(leda[24])
ty_leda = float(leda[17])
type_leda = (leda[12])
return([pgc_leda, ra_leda, dec_leda, l_leda, b_leda, sgl_leda, sgb_leda, logd25_leda, logr25_leda, pa_leda, ty_leda, type_leda])
#return leda
###############################
########################################################### TEST
#print get_mag_wise('/run/media/ehsan/6ccd3c78-12e8-4f00-815d-faf200b314cf/ehsan/db_esn/cf4_wise/data/001D/photometry/NGC7821_w1_asymptotic.dat', index=1)
#print get_mag_f('/run/media/ehsan/6ccd3c78-12e8-4f00-815d-faf200b314cf/ehsan/db_esn/cf4_wise/data/001D/photometry/NGC7821_w1_asymptotic.dat', header='A_Gal:')
#print get_mag('/run/media/ehsan/6ccd3c78-12e8-4f00-815d-faf200b314cf/ehsan/db_esn/cf4_sdss/data/018D/photometry/pgc1264576_g_asymptotic.dat')
#print get_mag('/run/media/ehsan/6ccd3c78-12e8-4f00-815d-faf200b314cf/ehsan/db_esn/cf4_sdss/data/018D/photometry/pgc1264576_g_asymptotic.dat', index=1)
#print get_mag('/run/media/ehsan/6ccd3c78-12e8-4f00-815d-faf200b314cf/ehsan/db_esn/cf4_sdss/data/018D/photometry/pgc1264576_g_asymptotic.dat', header='A_Gal:')
#sys.exit()
########################################################### Begin
inFile = 'EDD_distance_cf4_v21.csv'
table = np.genfromtxt(inFile , delimiter='|', filling_values=None, names=True, dtype=None)
pgc = table['pgc']
ra = table['ra']
dec = table['dec']
gl = table['gl']
gb = table['gb']
sgl = table['sgl']
sgb = table['sgb']
d25 = table['d25']
b_a = table['b_a']
pa = table['pa']
ty = table['ty']
type = table['type']
sdss = table['sdss']
alfa100_ = table['alfa100']
QA_sdss = table['QA_sdss']
QA_wise = table['QA_wise']
############################################################
inFile = 'All_LEDA_EDD.csv'
table = np.genfromtxt( inFile , delimiter=',', filling_values=None, names=True, dtype=None)
pgc_leda = table['pgc']
ra_leda = table['al2000']
ra_leda *= 15.
dec_leda = table['de2000']
l_leda = table['l2']
b_leda = table['b2']
sgl_leda = table['sgl']
sgb_leda = table['sgb']
logd25_leda = table['logd25']
d25_leda = 0.1*(10**logd25_leda)
logr25_leda = table['logr25']
b_a_leda = 1./(10**logr25_leda)
pa_leda = table['pa']
ty_leda = table['t']
type_leda = table['type']
############################################################
pgc_ = []
ra_ = []
dec_ = []
l_ = []
b_ = []
sgl_ = []
sgb_ = []
sdss_ = []
d25_ = []
alfa100 = []
QA = []
QA_wise = []
pa_ = []
b_a_ = []
ty_ = []
type_ = []
for i in range(len(pgc)): # range(100):
if not pgc[i] in pgc_:
pgc_.append(pgc[i])
ra_.append(ra[i])
dec_.append(dec[i])
l_.append(gl[i])
b_.append(gb[i])
sgl_.append(sgl[i])
sgb_.append(sgb[i])
d25_.append(d25[i])
sdss_.append(sdss[i])
pa_.append(pa[i])
b_a_.append(b_a[i])
ty_.append(ty[i])
alfa100.append(alfa100_[i])
if QA_SDSS_DONE(pgc[i], ra[i]):
QA.append(1)
else: QA.append(0)
if QA_WISE_DONE(pgc[i], ra[i]):
QA_wise.append(1)
else: QA_wise.append(0)
type_.append(type[i])
#####################################################
inFile = 'Alfa100_EDD.csv'
table = np.genfromtxt( inFile , delimiter='|', filling_values=None, names=True, dtype=None)
pgc_edd_alfa100 = table['PGC']
inFile = 'tmp1'
table = np.genfromtxt( inFile , delimiter='|', filling_values=None, names=True, dtype=None)
pgc_sdss = table['pgc']
############################################################
inFile = 'output_1_Results_alexandra.csv'
table = np.genfromtxt(inFile , delimiter='|', filling_values=None, names=True, dtype=None)
id_lexi = table['ID']
pgc_add = []
q = 0
for i in range(len(id_lexi)):
new_pgc = int(id_lexi[i][3:10])
if not new_pgc in pgc_:
pgc_add.append(new_pgc)
q+=1
print "New TF gals: #", q
for i in range(len(pgc_add)):
if not pgc_add[i] in pgc_leda:
leda_lexi = query_leda_lyon(pgc_add[i])
pgc_.append(pgc_add[i])
ra_.append(leda_lexi[1])
dec_.append(leda_lexi[2])
ra000 = leda_lexi[1]
l_.append(leda_lexi[3])
b_.append(leda_lexi[4])
sgl_.append(leda_lexi[5])
sgb_.append(leda_lexi[6])
d25_.append(0.1*(10**leda_lexi[7]))
b_a_.append(1./(10**leda_lexi[8]))
pa_.append(leda_lexi[9])
ty_.append(leda_lexi[10])
type_.append(leda_lexi[11])
if pgc_add[i] in pgc_sdss:
sdss_.append(1)
else:
sdss_.append(0)
if pgc_add[i] in pgc_edd_alfa100:
alfa100.append(1)
else:
alfa100.append(0)
if QA_SDSS_DONE(pgc_add[i], ra000):
QA.append(1)
else: QA.append(0)
if QA_WISE_DONE(pgc_add[i], ra000):
QA_wise.append(1)
else: QA_wise.append(0)
else:
for j in range(len(pgc_leda)):
if pgc_leda[j] == pgc_add[i]:
if not pgc_add[i] in pgc_:
pgc_.append(pgc_leda[j])
if True:
ra000 = ra_leda[j]
ra_.append(ra_leda[j])
dec_.append(dec_leda[j])
b_a_.append(1./(10**logr25_leda[j]))
ty_.append(ty_leda[j])
l_.append(l_leda[j])
b_.append(b_leda[j])
sgl_.append(sgl_leda[j])
sgb_.append(sgb_leda[j])
d25_.append(0.1*(10**logd25_leda[j]))
if pgc_add[i] in pgc_sdss:
sdss_.append(1)
else:
sdss_.append(0)
pa_.append(pa_leda[j])
type_.append(type_leda[j])
if pgc_leda[j] in pgc_edd_alfa100:
alfa100.append(1)
else:
alfa100.append(0)
if QA_SDSS_DONE(pgc_leda[j], ra000):
QA.append(1)
else:
QA.append(0)
if QA_WISE_DONE(pgc_leda[j], ra000):
QA_wise.append(1)
else:
QA_wise.append(0)
break
#####################################################
print "Adding Types from the LEDA catalog"
pgc_ = np.asarray(pgc_)
ra_ = np.asarray(ra_)
dec_ = np.asarray(dec_)
l_ = np.asarray(l_)
b_ = np.asarray(b_)
sgl_ = np.asarray(sgl_)
sgb_ = np.asarray(sgb_)
d25_ = np.asarray(d25_)
b_a_ = np.asarray(b_a_)
pa_ = np.asarray(pa_)
ty_ = np.asarray(ty_)
type_ = np.asarray(type_)
sdss_ = np.asarray(sdss_)
alfa100 = np.asarray(alfa100)
QA = np.asarray(QA)
QA_wise = np.asarray(QA_wise)
index = np.argsort(pgc_)
pgc_ = pgc_[index]
ra_ = ra_[index]
dec_ = dec_[index]
l_ = l_[index]
b_ = b_[index]
sgl_ = sgl_[index]
sgb_ = sgb_[index]
d25_ = d25_[index]
b_a_ = b_a_[index]
pa_ = pa_[index]
ty_ = ty_[index]
type_ = type_[index]
sdss_ = sdss_[index]
alfa100 = alfa100[index]
QA = QA[index]
QA_wise = QA_wise[index]
for i in range(len(pgc_)):
gal = pgc_[i]
if gal in [58411,58239,17170,1977897,9476]:
sdss_[i] = 0
#####################################################################
print "Taking Care of inclinations ..."
A_emails = ['rbtully1@gmail.com', 'mokelkea@hawaii.edu', 'jrl2014@hawaii.edu', 'dschoen@hawaii.edu', 'mi24@hawaii.edu', 'chuangj@hawaii.edu']
B_emails = ['ekourkchi@gmail.com', 's.eftekharzadeh@gmail.com', 'chasemu@hawaii.edu', 'adholtha@hawaii.edu', 'mka7@hawaii.edu', 'a.danesh61@gmail.com', 'helenecourtois33@gmail.com']
C_emails = ['cgrubner0@gmail.com', 'pascal.jouve@free.fr', 'dlsaintsorny@gmail.com', 'arnaud.ohet@gmail.com', 'hawaii@udrea.fr', 'henri140860@wanadoo.fr']
D_emails = ['henri140860@wanadoo.fr', 'claude.rene21@gmail.com', 'fredwallet@gmail.com', 'joannin.lycee@free.fr', 'bevig434@gmail.com', 'echarraix69@gmail.com']
incDic = getINC(include_Email=A_emails+B_emails+C_emails+D_emails)
print "Taking Care of flags ..."
location_sdss = '/home/ehsan/db_esn/cf4_sdss/data/'
location_wise = '/home/ehsan/db_esn/cf4_wise/data/'
N = len(pgc_)
Squality = np.zeros(N)
Wquality = np.zeros(N)
disturbed = np.zeros((N,), dtype='a1')
trail = np.zeros((N,), dtype='a1')
not_spiral = np.zeros((N,), dtype='a1')
face_on = np.zeros((N,), dtype='a1')
faint = np.zeros((N,), dtype='a1')
crowded = np.zeros((N,), dtype='a1')
over_masked = np.zeros((N,), dtype='a1')
fov = np.zeros((N,), dtype='a1')
multiple = np.zeros((N,), dtype='a1')
bright_star = np.zeros((N,), dtype='a1')
uncertain = np.zeros((N,), dtype='a1')
note = np.zeros((N,), dtype='a100')
source = np.zeros((N,), dtype='a4')
uu_mag = np.zeros((N,))
gg_mag = np.zeros((N,))
rr_mag = np.zeros((N,))
ii_mag = np.zeros((N,))
zz_mag = np.zeros((N,))
Sba = np.zeros((N,))
Spa = np.zeros((N,))
u_Rasy = np.zeros((N,))
g_Rasy = np.zeros((N,))
r_Rasy = np.zeros((N,))
i_Rasy = np.zeros((N,))
z_Rasy = np.zeros((N,))
A_u = np.zeros((N,))
A_g = np.zeros((N,))
A_r = np.zeros((N,))
A_i = np.zeros((N,))
A_z = np.zeros((N,))
ebv = np.zeros((N,))
mu0_u = np.zeros((N,))
mu0_g = np.zeros((N,))
mu0_r = np.zeros((N,))
mu0_i = np.zeros((N,))
mu0_z = np.zeros((N,))
mu50_u = np.zeros((N,))
mu50_g = np.zeros((N,))
mu50_r = np.zeros((N,))
mu50_i = np.zeros((N,))
mu50_z = np.zeros((N,))
mu90_u = np.zeros((N,))
mu90_g = np.zeros((N,))
mu90_r = np.zeros((N,))
mu90_i = np.zeros((N,))
mu90_z = np.zeros((N,))
m255_u = np.zeros((N,))
m255_g = np.zeros((N,))
m255_r = np.zeros((N,))
m255_i = np.zeros((N,))
m255_z = np.zeros((N,))
disc_mu0_u = np.zeros((N,))
disc_mu0_g = np.zeros((N,))
disc_mu0_r = np.zeros((N,))
disc_mu0_i = np.zeros((N,))
disc_mu0_z = np.zeros((N,))
SLh_u = np.zeros((N,))
SLh_g = np.zeros((N,))
SLh_r = np.zeros((N,))
SLh_i = np.zeros((N,))
SLh_z = np.zeros((N,))
R50_u = np.zeros((N,))
R50_g = np.zeros((N,))
R50_r = np.zeros((N,))
R50_i = np.zeros((N,))
R50_z = np.zeros((N,))
R90_u = np.zeros((N,))
R90_g = np.zeros((N,))
R90_r = np.zeros((N,))
R90_i = np.zeros((N,))
R90_z = np.zeros((N,))
R255_u = np.zeros((N,))
R255_g = np.zeros((N,))
R255_r = np.zeros((N,))
R255_i = np.zeros((N,))
R255_z = np.zeros((N,))
Cntion_u = np.zeros((N,))
Cntion_g = np.zeros((N,))
Cntion_r = np.zeros((N,))
Cntion_i = np.zeros((N,))
Cntion_z = np.zeros((N,))
d_m_ext_u = np.zeros((N,))
d_m_ext_g = np.zeros((N,))
d_m_ext_r = np.zeros((N,))
d_m_ext_i = np.zeros((N,))
d_m_ext_z = np.zeros((N,))
w1_mag = np.zeros((N,))
w2_mag = np.zeros((N,))
Wba = np.zeros((N,))
Wpa = np.zeros((N,))
w1_Rasy = np.zeros((N,))
w2_Rasy = np.zeros((N,))
A_w1 = np.zeros((N,))
A_w2 = np.zeros((N,))
mu0_w1 = np.zeros((N,))
mu50_w1 = np.zeros((N,))
mu90_w1 = np.zeros((N,))
m255_w1 = np.zeros((N,))
disc_mu0_w1 = np.zeros((N,))
SLh_w1 = np.zeros((N,))
R50_w1 = np.zeros((N,))
R90_w1 = np.zeros((N,))
R255_w1 = np.zeros((N,))
Cntion_w1 = np.zeros((N,))
mu0_w2 = np.zeros((N,))
mu50_w2 = np.zeros((N,))
mu90_w2 = np.zeros((N,))
m255_w2 = np.zeros((N,))
disc_mu0_w2 = np.zeros((N,))
SLh_w2 = np.zeros((N,))
R50_w2 = np.zeros((N,))
R90_w2 = np.zeros((N,))
R255_w2 = np.zeros((N,))
Cntion_w2 = np.zeros((N,))
d_m_ext_w1 = np.zeros((N,))
d_m_ext_w2 = np.zeros((N,))
inc = np.zeros((N,))
inc_e = np.zeros((N,))
inc_flg = np.zeros((N,))
inc_note = np.zeros((N,), dtype='a100')
inc_n = np.zeros((N,))
for i in range(len(pgc_)):
## inclination
if pgc_[i] in incDic:
inc[i], inc_e[i], inc_flg[i], inc_note[i], inc_n[i]= incMedian(incDic[pgc_[i]])
radb = ra_db(ra_[i])
pgcname = 'pgc'+str(pgc_[i])
qa_txt_sdss = location_sdss + radb + '/sdss/fits/' + pgcname+'_qa.txt'
photometry_sdss = location_sdss + radb +'/photometry/'+pgcname
##################################################################### Taking care of photometry results
if os.path.exists(qa_txt_sdss):
ebv[i] = -9.99
Squality[i] = get_quality(qa_txt_sdss)
if os.path.exists(photometry_sdss+'_u_asymptotic.dat'):
uu_mag[i] = get_mag(photometry_sdss+'_u_asymptotic.dat')
u_Rasy[i] = get_mag(photometry_sdss+'_u_asymptotic.dat', index=1)/60.
A_u[i] = get_mag(photometry_sdss+'_u_asymptotic.dat', header='A_Gal:')
if os.path.exists(photometry_sdss+'_u.scales.dat'):
ebv[i], A_u[i], mu0_u[i], mu50_u[i], mu90_u[i], m255_u[i], disc_mu0_u[i], SLh_u[i], R50_u[i], R90_u[i], R255_u[i], Cntion_u[i], d_m_ext_u[i] = get_scales(photometry_sdss+'_u.scales.dat')
if os.path.exists(photometry_sdss+'_g_asymptotic.dat'):
gg_mag[i] = get_mag(photometry_sdss+'_g_asymptotic.dat')
g_Rasy[i] = get_mag(photometry_sdss+'_g_asymptotic.dat', index=1)/60.
A_g[i] = get_mag(photometry_sdss+'_g_asymptotic.dat', header='A_Gal:')
if os.path.exists(photometry_sdss+'_g.scales.dat'):
ebv[i], A_g[i], mu0_g[i], mu50_g[i], mu90_g[i], m255_g[i], disc_mu0_g[i], SLh_g[i], R50_g[i], R90_g[i], R255_g[i], Cntion_g[i], d_m_ext_g[i] = get_scales(photometry_sdss+'_g.scales.dat')
if os.path.exists(photometry_sdss+'_r_asymptotic.dat'):
rr_mag[i] = get_mag(photometry_sdss+'_r_asymptotic.dat')
r_Rasy[i] = get_mag(photometry_sdss+'_r_asymptotic.dat', index=1)/60.
A_r[i] = get_mag(photometry_sdss+'_r_asymptotic.dat', header='A_Gal:')
if os.path.exists(photometry_sdss+'_r.scales.dat'):
ebv[i], A_r[i], mu0_r[i], mu50_r[i], mu90_r[i], m255_r[i], disc_mu0_r[i], SLh_r[i], R50_r[i], R90_r[i], R255_r[i], Cntion_r[i], d_m_ext_r[i] = get_scales(photometry_sdss+'_r.scales.dat')
if os.path.exists(photometry_sdss+'_i_asymptotic.dat'):
ii_mag[i] = get_mag(photometry_sdss+'_i_asymptotic.dat')
i_Rasy[i] = get_mag(photometry_sdss+'_i_asymptotic.dat', index=1)/60.
A_i[i] = get_mag(photometry_sdss+'_i_asymptotic.dat', header='A_Gal:')
if os.path.exists(photometry_sdss+'_i.scales.dat'):
ebv[i], A_i[i], mu0_i[i], mu50_i[i], mu90_i[i], m255_i[i], disc_mu0_i[i], SLh_i[i], R50_i[i], R90_i[i], R255_i[i], Cntion_i[i], d_m_ext_i[i] = get_scales(photometry_sdss+'_i.scales.dat')
if os.path.exists(photometry_sdss+'_z_asymptotic.dat'):
zz_mag[i] = get_mag(photometry_sdss+'_z_asymptotic.dat')
z_Rasy[i] = get_mag(photometry_sdss+'_z_asymptotic.dat', index=1)/60.
A_z[i] = get_mag(photometry_sdss+'_z_asymptotic.dat', header='A_Gal:')
if os.path.exists(photometry_sdss+'_z.scales.dat'):
ebv[i], A_z[i], mu0_z[i], mu50_z[i], mu90_z[i], m255_z[i], disc_mu0_z[i], SLh_z[i], R50_z[i], R90_z[i], R255_z[i], Cntion_z[i], d_m_ext_z[i] = get_scales(photometry_sdss+'_z.scales.dat')
ellipsefile = location_sdss + radb +'/photometry/'+pgcname+'_g_ellipsepar.dat'
if os.path.exists(ellipsefile):
ra_cen, dec_cen, semimajor, semiminor, PA = get_ellipse(ellipsefile)
Sba[i] = min([semimajor,semiminor])/max([semiminor,semimajor])
Spa[i] = PA
ellipsefile = location_sdss + radb +'/photometry/'+pgcname+'_r_ellipsepar.dat'
if os.path.exists(ellipsefile):
ra_cen, dec_cen, semimajor, semiminor, PA = get_ellipse(ellipsefile)
Sba[i] = min([semimajor,semiminor])/max([semiminor,semimajor])
Spa[i] = PA
ellipsefile = location_sdss + radb +'/photometry/'+pgcname+'_i_ellipsepar.dat'
if os.path.exists(ellipsefile):
ra_cen, dec_cen, semimajor, semiminor, PA = get_ellipse(ellipsefile)
Sba[i] = min([semimajor,semiminor])/max([semiminor,semimajor])
Spa[i] = PA
if pgc_[i] in wise_pgc:
i_lst = np.where(wise_pgc == pgc_[i])
galname = wise_name[i_lst][0]
qa_txt_wise = location_wise + radb + '/wise/fits/' + galname+'_qa.txt'
if not os.path.exists(qa_txt_wise):
galname = 'pgc'+str(pgc_[i])
else:
galname = 'pgc'+str(pgc_[i])
qa_txt_wise = location_wise + radb + '/wise/fits/' + galname+'_qa.txt'
photometry_wise = location_wise + radb +'/photometry/'+galname
if os.path.exists(qa_txt_wise):
tmp = -9.99
Wquality[i] = get_quality(qa_txt_wise)
if os.path.exists(photometry_wise+'_w1_asymptotic.dat'):
w1_mag[i] = get_mag_wise(photometry_wise+'_w1_asymptotic.dat')
w1_Rasy[i] = get_mag_wise(photometry_wise+'_w1_asymptotic.dat', index=1)/60.
A_w1[i] = get_mag_f(photometry_wise+'_w1_asymptotic.dat', header='A_Gal:')
if os.path.exists(photometry_wise+'_w1.scales.dat'):
tmp, A_w1[i], mu0_w1[i], mu50_w1[i], mu90_w1[i], m255_w1[i], disc_mu0_w1[i], SLh_w1[i], R50_w1[i], R90_w1[i], R255_w1[i], Cntion_w1[i], d_m_ext_w1[i] = get_scales(photometry_wise+'_w1.scales.dat')
if os.path.exists(photometry_wise+'_w2_asymptotic.dat'):
w2_mag[i] = get_mag_wise(photometry_wise+'_w2_asymptotic.dat')
w2_Rasy[i] = get_mag_wise(photometry_wise+'_w2_asymptotic.dat', index=1)/60.
A_w2[i] = get_mag_f(photometry_wise+'_w2_asymptotic.dat', header='A_Gal:')
if os.path.exists(photometry_wise+'_w2.scales.dat'):
tmp, A_w2[i], mu0_w2[i], mu50_w2[i], mu90_w2[i], m255_w2[i], disc_mu0_w2[i], SLh_w2[i], R50_w2[i], R90_w2[i], R255_w2[i], Cntion_w2[i], d_m_ext_w2[i] = get_scales(photometry_wise+'_w2.scales.dat')
if ebv[i]<0 and tmp>0:
ebv[i] = tmp
ellipsefile = location_wise + radb +'/photometry/'+galname+'_w2_ellipsepar.dat'
if os.path.exists(ellipsefile):
ra_cen, dec_cen, semimajor, semiminor, PA = get_ellipse_wise(ellipsefile)
Wba[i] = min([semimajor,semiminor])/max([semiminor,semimajor])
Wpa[i] = PA
ellipsefile = location_wise + radb +'/photometry/'+galname+'_w1_ellipsepar.dat'
if os.path.exists(ellipsefile):
ra_cen, dec_cen, semimajor, semiminor, PA = get_ellipse_wise(ellipsefile)
Wba[i] = min([semimajor,semiminor])/max([semiminor,semimajor])
Wpa[i] = PA
##################################################################### Taking care of flags
found = False
if os.path.exists(qa_txt_sdss):
qa_txt = qa_txt_sdss
found = True
source[i] = 'SDSS'
else:
if QA_wise[i]==1:
if pgc_[i] in wise_pgc:
i_lst = np.where(wise_pgc == pgc_[i])
galname = wise_name[i_lst][0]
qa_txt_wise = location_wise + radb + '/wise/fits/' + galname+'_qa.txt'
if not os.path.exists(qa_txt_wise):
galname = 'pgc'+str(pgc_[i])
else:
galname = 'pgc'+str(pgc_[i])
qa_txt_wise = location_wise + radb + '/wise/fits/' + galname+'_qa.txt'
if os.path.exists(qa_txt_wise):
qa_txt = qa_txt_wise
found = True
source[i] = 'WISE'
else:
#print galname
#print galname, ra[i], dec[i], d25[i], d25[i]*b_a[i], PA[i], Ty[i]
source[i] = 'NONE'
if found:
if get_quality(qa_txt, nline=41)==1: disturbed[i]='D'
if get_quality(qa_txt, nline=42)==1: trail[i]='L'
if get_quality(qa_txt, nline=43)==1: not_spiral[i]='P'
if get_quality(qa_txt, nline=44)==1: face_on[i]='F'
if get_quality(qa_txt, nline=45)==1: faint[i]='N'
if get_quality(qa_txt, nline=46)==1: crowded[i]='C'
if get_quality(qa_txt, nline=47)==1: over_masked[i]='O'
if get_quality(qa_txt, nline=20)==1: fov[i]='V'
if get_quality(qa_txt, nline=19)==1: multiple[i]='M'
if get_quality(qa_txt, nline=18)==1: bright_star[i]='B'
if get_quality(qa_txt, nline=17)==1: uncertain[i]='U'
note[i]= read_note(qa_txt)
#####################################################################
myTable = Table()
myTable.add_column(Column(data=pgc_, name='pgc'))
myTable.add_column(Column(data=ra_, name='ra', format='%0.4f'))
myTable.add_column(Column(data=dec_, name='dec', format='%0.4f'))
myTable.add_column(Column(data=l_, name='gl', format='%0.4f'))
myTable.add_column(Column(data=b_, name='gb', format='%0.4f'))
myTable.add_column(Column(data=sgl_, name='sgl', format='%0.4f'))
myTable.add_column(Column(data=sgb_, name='sgb', format='%0.4f'))
myTable.add_column(Column(data=d25_, name='d25', format='%0.2f'))
myTable.add_column(Column(data=b_a_, name='b_a', format='%0.2f'))
myTable.add_column(Column(data=pa_, name='pa', format='%0.1f'))
myTable.add_column(Column(data=ty_, name='ty', format='%0.1f'))
myTable.add_column(Column(data=type_, name='type'))
myTable.add_column(Column(data=sdss_, name='sdss'))
myTable.add_column(Column(data=alfa100, name='alfa100'))
myTable.add_column(Column(data=QA, name='QA_sdss'))
myTable.add_column(Column(data=QA_wise, name='QA_wise'))
myTable.add_column(Column(data=ebv, name='ebv', format='%0.3f'))
myTable.add_column(Column(data=uu_mag, name='u_mag', format='%0.2f'))
myTable.add_column(Column(data=gg_mag, name='g_mag', format='%0.2f'))
myTable.add_column(Column(data=rr_mag, name='r_mag', format='%0.2f'))
myTable.add_column(Column(data=ii_mag, name='i_mag', format='%0.2f'))
myTable.add_column(Column(data=zz_mag, name='z_mag', format='%0.2f'))
myTable.add_column(Column(data=u_Rasy, name='u_Rasy', format='%0.2f'))
myTable.add_column(Column(data=g_Rasy, name='g_Rasy', format='%0.2f'))
myTable.add_column(Column(data=r_Rasy, name='r_Rasy', format='%0.2f'))
myTable.add_column(Column(data=i_Rasy, name='i_Rasy', format='%0.2f'))
myTable.add_column(Column(data=z_Rasy, name='z_Rasy', format='%0.2f'))
myTable.add_column(Column(data=A_u, name='A_u', format='%0.3f'))
myTable.add_column(Column(data=A_g, name='A_g', format='%0.3f'))
myTable.add_column(Column(data=A_r, name='A_r', format='%0.3f'))
myTable.add_column(Column(data=A_i, name='A_i', format='%0.3f'))
myTable.add_column(Column(data=A_z, name='A_z', format='%0.3f'))
myTable.add_column(Column(data=Sba, name='Sba', format='%0.2f'))
myTable.add_column(Column(data=Spa, name='Spa', format='%0.2f'))
myTable.add_column(Column(data=mu0_u, name='mu0_u', format='%0.2f'))
myTable.add_column(Column(data=mu0_g, name='mu0_g', format='%0.2f'))
myTable.add_column(Column(data=mu0_r, name='mu0_r', format='%0.2f'))
myTable.add_column(Column(data=mu0_i, name='mu0_i', format='%0.2f'))
myTable.add_column(Column(data=mu0_z, name='mu0_z', format='%0.2f'))
myTable.add_column(Column(data=mu50_u, name='mu50_u', format='%0.2f'))
myTable.add_column(Column(data=mu50_g, name='mu50_g', format='%0.2f'))
myTable.add_column(Column(data=mu50_r, name='mu50_r', format='%0.2f'))
myTable.add_column(Column(data=mu50_i, name='mu50_i', format='%0.2f'))
myTable.add_column(Column(data=mu50_z, name='mu50_z', format='%0.2f'))
myTable.add_column(Column(data=mu90_u, name='mu90_u', format='%0.2f'))
myTable.add_column(Column(data=mu90_g, name='mu90_g', format='%0.2f'))
myTable.add_column(Column(data=mu90_r, name='mu90_r', format='%0.2f'))
myTable.add_column(Column(data=mu90_i, name='mu90_i', format='%0.2f'))
myTable.add_column(Column(data=mu90_z, name='mu90_z', format='%0.2f'))
myTable.add_column(Column(data=m255_u, name='m255_u', format='%0.2f'))
myTable.add_column(Column(data=m255_g, name='m255_g', format='%0.2f'))
myTable.add_column(Column(data=m255_r, name='m255_r', format='%0.2f'))
myTable.add_column(Column(data=m255_i, name='m255_i', format='%0.2f'))
myTable.add_column(Column(data=m255_z, name='m255_z', format='%0.2f'))
myTable.add_column(Column(data=disc_mu0_u, name='disc_mu0_u', format='%0.2f'))
myTable.add_column(Column(data=disc_mu0_g, name='disc_mu0_g', format='%0.2f'))
myTable.add_column(Column(data=disc_mu0_r, name='disc_mu0_r', format='%0.2f'))
myTable.add_column(Column(data=disc_mu0_i, name='disc_mu0_i', format='%0.2f'))
myTable.add_column(Column(data=disc_mu0_z, name='disc_mu0_z', format='%0.2f'))
myTable.add_column(Column(data=SLh_u, name='h_u', format='%0.2f'))
myTable.add_column(Column(data=SLh_g, name='h_g', format='%0.2f'))
myTable.add_column(Column(data=SLh_r, name='h_r', format='%0.2f'))
myTable.add_column(Column(data=SLh_i, name='h_i', format='%0.2f'))
myTable.add_column(Column(data=SLh_z, name='h_z', format='%0.2f'))
myTable.add_column(Column(data=R50_u, name='R50_u', format='%0.2f'))
myTable.add_column(Column(data=R50_g, name='R50_g', format='%0.2f'))
myTable.add_column(Column(data=R50_r, name='R50_r', format='%0.2f'))
myTable.add_column(Column(data=R50_i, name='R50_i', format='%0.2f'))
myTable.add_column(Column(data=R50_z, name='R50_z', format='%0.2f'))
myTable.add_column(Column(data=R90_u, name='R90_u', format='%0.2f'))
myTable.add_column(Column(data=R90_g, name='R90_g', format='%0.2f'))
myTable.add_column(Column(data=R90_r, name='R90_r', format='%0.2f'))
myTable.add_column(Column(data=R90_i, name='R90_i', format='%0.2f'))
myTable.add_column(Column(data=R90_z, name='R90_z', format='%0.2f'))
myTable.add_column(Column(data=R255_u, name='R255_u', format='%0.2f'))
myTable.add_column(Column(data=R255_g, name='R255_g', format='%0.2f'))
myTable.add_column(Column(data=R255_r, name='R255_r', format='%0.2f'))
myTable.add_column(Column(data=R255_i, name='R255_i', format='%0.2f'))
myTable.add_column(Column(data=R255_z, name='R255_z', format='%0.2f'))
myTable.add_column(Column(data=Cntion_u, name='C82_u', format='%0.2f'))
myTable.add_column(Column(data=Cntion_g, name='C82_g', format='%0.2f'))
myTable.add_column(Column(data=Cntion_r, name='C82_r', format='%0.2f'))
myTable.add_column(Column(data=Cntion_i, name='C82_i', format='%0.2f'))
myTable.add_column(Column(data=Cntion_z, name='C82_z', format='%0.2f'))
myTable.add_column(Column(data=d_m_ext_u, name='d_m_ext_u', format='%0.4f'))
myTable.add_column(Column(data=d_m_ext_g, name='d_m_ext_g', format='%0.4f'))
myTable.add_column(Column(data=d_m_ext_r, name='d_m_ext_r', format='%0.4f'))
myTable.add_column(Column(data=d_m_ext_i, name='d_m_ext_i', format='%0.4f'))
myTable.add_column(Column(data=d_m_ext_z, name='d_m_ext_z', format='%0.4f'))
myTable.add_column(Column(data=w1_mag, name='w1_mag', format='%0.2f'))
myTable.add_column(Column(data=w2_mag, name='w2_mag', format='%0.2f'))
myTable.add_column(Column(data=w1_Rasy, name='w1_Rasy', format='%0.2f'))
myTable.add_column(Column(data=w2_Rasy, name='w2_Rasy', format='%0.2f'))
myTable.add_column(Column(data=A_w1, name='A_w1', format='%0.3f'))
myTable.add_column(Column(data=A_w2, name='A_w2', format='%0.3f'))
myTable.add_column(Column(data=Wba, name='Wba', format='%0.2f'))
myTable.add_column(Column(data=Wpa, name='Wpa', format='%0.2f'))
myTable.add_column(Column(data=mu0_w1, name='mu0_w1', format='%0.2f'))
myTable.add_column(Column(data=mu0_w2, name='mu0_w2', format='%0.2f'))
myTable.add_column(Column(data=mu50_w1, name='mu50_w1', format='%0.2f'))
myTable.add_column(Column(data=mu50_w2, name='mu50_w2', format='%0.2f'))
myTable.add_column(Column(data=mu90_w1, name='mu90_w1', format='%0.2f'))
myTable.add_column(Column(data=mu90_w2, name='mu90_w2', format='%0.2f'))
myTable.add_column(Column(data=m255_w1, name='m255_w1', format='%0.2f'))
myTable.add_column(Column(data=m255_w2, name='m255_w2', format='%0.2f'))
myTable.add_column(Column(data=disc_mu0_w1, name='disc_mu0_w1', format='%0.2f'))
myTable.add_column(Column(data=disc_mu0_w2, name='disc_mu0_w2', format='%0.2f'))
myTable.add_column(Column(data=SLh_w1, name='h_w1', format='%0.2f'))
myTable.add_column(Column(data=SLh_w2, name='h_w2', format='%0.2f'))
myTable.add_column(Column(data=R50_w1, name='R50_w1', format='%0.2f'))
myTable.add_column(Column(data=R50_w2, name='R50_w2', format='%0.2f'))
myTable.add_column(Column(data=R90_w1, name='R90_w1', format='%0.2f'))
myTable.add_column(Column(data=R90_w2, name='R90_w2', format='%0.2f'))
myTable.add_column(Column(data=R255_w1, name='R255_w1', format='%0.2f'))
myTable.add_column(Column(data=R255_w2, name='R255_w2', format='%0.2f'))
myTable.add_column(Column(data=Cntion_w1, name='C82_w1', format='%0.2f'))
myTable.add_column(Column(data=Cntion_w2, name='C82_w2', format='%0.2f'))
myTable.add_column(Column(data=d_m_ext_w1, name='d_m_ext_w1', format='%0.4f'))
myTable.add_column(Column(data=d_m_ext_w2, name='d_m_ext_w2', format='%0.4f'))
myTable.add_column(Column(data=Squality, name='Sqlt', dtype=np.dtype(int)))
myTable.add_column(Column(data=Wquality, name='Wqlt', dtype=np.dtype(int)))
#myTable.add_column(Column(data=source, name='source', dtype='S4'))
myTable.add_column(Column(data=disturbed, name='dst', dtype='S1'))
myTable.add_column(Column(data=trail, name='trl', dtype='S1'))
myTable.add_column(Column(data=not_spiral, name='nsp', dtype='S1'))
myTable.add_column(Column(data=face_on, name='fon', dtype='S1'))
myTable.add_column(Column(data=faint, name='fnt', dtype='S1'))
myTable.add_column(Column(data=crowded, name='cwd', dtype='S1'))
myTable.add_column(Column(data=over_masked, name='ovm', dtype='S1'))
myTable.add_column(Column(data=fov, name='fov', dtype='S1'))
myTable.add_column(Column(data=multiple, name='mlp', dtype='S1'))
myTable.add_column(Column(data=bright_star, name='bts', dtype='S1'))
myTable.add_column(Column(data=uncertain, name='unc', dtype='S1'))
myTable.add_column(Column(data=note, name='note', dtype='S100'))
myTable.add_column(Column(data=inc, name='inc', dtype=np.dtype(int)))
myTable.add_column(Column(data=inc_e, name='inc_e', dtype=np.dtype(int)))
myTable.add_column(Column(data=inc_flg, name='inc_flg', format='%1d'))
myTable.add_column(Column(data=inc_n, name='inc_n', format='%2d'))
myTable.add_column(Column(data=inc_note, name='inc_note', dtype='S100'))
myTable.write('EDD_distance_cf4_v22.csv', format='ascii.fixed_width',delimiter='|', bookend=False, overwrite=True)
| [
"ekourkchi@gmail.com"
] | ekourkchi@gmail.com |
1e92b2d38e080ce7e20b2e929c60746d9140b4ed | a18db39e7c392d9529f2cc4889e188310706b14f | /amadon_django/apps/amadon_app/migrations/0001_initial.py | 10c9629f6f602333a30d5560925895a7900a50db | [] | no_license | LeoKnox/amadon | d9f9d6234a3ebba12576115a02ee8ae222addb89 | f522f2c2e7ba655f8f11fe1984d49b9db799fbd3 | refs/heads/master | 2020-04-11T05:45:05.352715 | 2018-12-12T23:46:15 | 2018-12-12T23:46:15 | 161,558,894 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 595 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2018-12-12 20:01
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Shop',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('item', models.CharField(max_length=255)),
('price', models.FloatField()),
],
),
]
| [
"noreply@github.com"
] | LeoKnox.noreply@github.com |
df112d0a858197f8660c215b773b0b219f73d5c7 | 0c9e35012baf61ee678bc719588b8cb2ccbe449e | /product/migrations/0228_auto_20180502_0957.py | 8a9605683776d8c9b4c9a98061df35772b9ed2bd | [] | no_license | rickyakilimali/approeco | 6f0f62d57b6e5361b5c5dd473038f2999bac1413 | fd96ca6d70dabf20668d2a582c67e5d409a4a097 | refs/heads/master | 2018-09-21T12:44:27.414394 | 2018-06-06T16:35:40 | 2018-06-06T16:35:40 | 113,836,591 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 600 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.8 on 2018-05-02 09:57
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('product', '0227_auto_20180502_0954'),
]
operations = [
migrations.AlterField(
model_name='productiontournage',
name='nombre_minute',
field=models.CharField(choices=[('0-10', '0-10'), ('10-20', '10-20'), ('20-40', '20-40'), ('40-80', '40-80')], max_length=20, verbose_name='DUREE DU TOURNAGE(MINUTE)'),
),
]
| [
"ricky.akilimali@approeco.net"
] | ricky.akilimali@approeco.net |
6160f5c334c7db26952d00d3e77126d97da0f263 | 0fccee4c738449f5e0a8f52ea5acabf51db0e910 | /genfragments/SevenTeV/RSGravitonToWW_kMpl01_M_2000_TuneZ2_7TeV_pythia6_cff.py | 44240dee8666db40ad085eb0bf7cb53b891f710d | [] | no_license | cms-sw/genproductions | f308ffaf3586c19b29853db40e6d662e937940ff | dd3d3a3826343d4f75ec36b4662b6e9ff1f270f4 | refs/heads/master | 2023-08-30T17:26:02.581596 | 2023-08-29T14:53:43 | 2023-08-29T14:53:43 | 11,424,867 | 69 | 987 | null | 2023-09-14T12:41:28 | 2013-07-15T14:18:33 | Python | UTF-8 | Python | false | false | 1,223 | py | import FWCore.ParameterSet.Config as cms
from Configuration.Generator.PythiaUEZ2Settings_cfi import *
generator = cms.EDFilter("Pythia6GeneratorFilter",
pythiaHepMCVerbosity = cms.untracked.bool(False),
maxEventsToPrint = cms.untracked.int32(0),
pythiaPylistVerbosity = cms.untracked.int32(0),
filterEfficiency = cms.untracked.double(1),
comEnergy = cms.double(7000.0),
crossSection = cms.untracked.double(1.83e-3),
PythiaParameters = cms.PSet(
pythiaUESettingsBlock,
processParameters = cms.vstring(
'MSEL = 0',
'MSUB(391) = 1',
'MSUB(392) = 1',
'PMAS(347,1) = 2000',
'PARP(50) = 0.54', #0.54
'5000039:ALLOFF',
'5000039:ONIFANY 24',
),
parameterSets = cms.vstring(
'pythiaUESettings',
'processParameters')
)
)
configurationMetadata = cms.untracked.PSet(
version = cms.untracked.string('\$Revision: 1.1 $'),
name = cms.untracked.string('\$Source: /cvs/CMSSW/UserCode/hinzmann/production/RSGravitonToWW_kMpl01_M_2000_TuneZ2_7TeV_pythia6_cff.py,v $'),
annotation = cms.untracked.string('Fall2011 sample with PYTHIA6: RSG -> WW, TuneZ2')
)
ProductionFilterSequence = cms.Sequence(generator)
| [
"sha1-45889d0b422ced4b08fb223eae59c585c331ccec@cern.ch"
] | sha1-45889d0b422ced4b08fb223eae59c585c331ccec@cern.ch |
a274df74a04971717b3273284e2577410940beae | cb4d2629eadfafb4ffbcea8087399acb4f35cf00 | /mycalendar/serializers.py | a44370c64247b8d713cd71a8eb00701e76792d99 | [] | no_license | rdahal35/django_fullcalendar | 576023fa348391082ee82b50e27772b9c11c7b47 | 35623f562642816b9f501ea1390a03d96c0d188a | refs/heads/master | 2022-12-13T12:32:33.098739 | 2018-08-13T10:28:35 | 2018-08-13T10:28:35 | 142,756,016 | 0 | 1 | null | 2022-12-08T02:19:24 | 2018-07-29T11:40:13 | JavaScript | UTF-8 | Python | false | false | 182 | py | from rest_framework import serializers
from .models import Event
class eventSerializer(serializers.ModelSerializer):
class Meta:
model = Event
fields = '__all__' | [
"rdahal35@gmail.com"
] | rdahal35@gmail.com |
a79bfdb21a8f6a40a5c9ace790f401e9e1725607 | 942ee5e8d54e8ebe9c5c841fbfdd1da652946944 | /1501-2000/1678.Goal Parser Interpretation.py | 4653442e30b404a724cb952adacb2f49bdd3b8e1 | [] | no_license | kaiwensun/leetcode | 0129c174457f32887fbca078fb448adce46dd89d | 6b607f4aae3a4603e61f2e2b7480fdfba1d9b947 | refs/heads/master | 2023-08-31T07:30:50.459062 | 2023-08-27T07:59:16 | 2023-08-27T07:59:16 | 57,526,914 | 69 | 9 | null | 2023-08-20T06:34:41 | 2016-05-01T05:37:29 | Python | UTF-8 | Python | false | false | 552 | py | class Solution(object):
def interpret(self, command):
"""
:type command: str
:rtype: str
"""
def tokenize(command):
i = 0
while i < len(command):
if command[i] == "G":
yield "G"
i += 1
elif command[i + 1] == ")":
yield "o"
i += 2
else:
yield "al"
i += 4
return "".join(token for token in tokenize(command))
| [
"skw_kevin@126.com"
] | skw_kevin@126.com |
68bc3d1726f4472b4ef0697e1b642b17f383590c | 9b77f1e31d5901924431a2a3164312cc346bde4f | /ADI31/ADI31/urls.py | 1fb5748d61a307a6ab1b1cf6b5f99e50ac4cc372 | [] | no_license | Adi19471/Djnago_Code-Daily | c2184bf21db5c8d4b3c4098fbd593e4949375ae8 | 03b1b70d3e187fe85eb24e88b7ef3391b14aa98c | refs/heads/master | 2023-08-14T14:36:36.144243 | 2021-09-20T12:52:46 | 2021-09-20T12:52:46 | 375,690,484 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 792 | py | """ADI31 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path,include
urlpatterns = [
path('admin/', admin.site.urls),
path('',include('enroll.urls')),
]
| [
"akumatha@gmail.com"
] | akumatha@gmail.com |
1a7cb4284d1fd36b31dba154ea268d380e9ed4f4 | 0d1c1a216b01f6773e751691e9d3e10cc4f27d09 | /tensorflow/contrib/cudnn_rnn/python/kernel_tests/cudnn_rnn_ops_benchmark.py | 8d5ff341acd737162334c2f2a8f4c81db9db82d2 | [
"Apache-2.0"
] | permissive | abdo5520/tensorflow | 13c1496e7aa115bba06cda5fc9dc73ba9e4b1694 | 55b01593515817992821423fec19733bca91c918 | refs/heads/master | 2021-01-13T04:05:38.763884 | 2017-01-01T13:10:05 | 2017-01-01T13:10:05 | 77,894,045 | 0 | 1 | null | 2017-01-03T07:28:02 | 2017-01-03T07:28:02 | null | UTF-8 | Python | false | false | 6,958 | py | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Benchmarks for Cudnn RNN models."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import time
from tensorflow.contrib.cudnn_rnn.python.ops import cudnn_rnn_ops
from tensorflow.contrib.rnn.python.ops import core_rnn
from tensorflow.contrib.rnn.python.ops import core_rnn_cell_impl
from tensorflow.contrib.rnn.python.ops import lstm_ops
from tensorflow.python.client import session
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import gradients_impl
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import flags
from tensorflow.python.platform import test
flags.DEFINE_integer("batch_size", 64, "batch size.")
FLAGS = flags.FLAGS
class CudnnRNNBenchmark(test.Benchmark):
"""Benchmarks Cudnn LSTM and other related models.
"""
def _GetTestConfig(self):
return {
"large": {
"num_layers": 4,
"num_units": 1024,
"seq_length": 40,
"batch_size": 64,
},
"medium": {
"num_layers": 4,
"num_units": 512,
"seq_length": 30,
"batch_size": 64,
},
"small": {
"num_layers": 4,
"num_units": 128,
"seq_length": 20,
"batch_size": 64,
},
}
def _GetConfigDesc(self, config):
num_layers = config["num_layers"]
num_units = config["num_units"]
batch_size = config["batch_size"]
seq_length = config["seq_length"]
return "y%d_u%d_b%d_q%d" % (num_layers, num_units, batch_size, seq_length)
def _BenchmarkOp(self, op, desc):
burn_in_steps = 10
benchmark_steps = 40
with session.Session() as sess:
sess.run(variables.global_variables_initializer())
for i in xrange(burn_in_steps + benchmark_steps):
if i == burn_in_steps:
start_time = time.time()
sess.run(op)
total_time = time.time() - start_time
step_time = total_time / benchmark_steps
print("%s takes %.4f sec/step" % (desc, step_time))
self.report_benchmark(
name=desc, iters=benchmark_steps, wall_time=total_time)
def benchmarkCudnnLSTMTraining(self):
test_configs = self._GetTestConfig()
for config_name, config in test_configs.items():
config = test_configs[config_name]
num_layers = config["num_layers"]
num_units = config["num_units"]
batch_size = config["batch_size"]
seq_length = config["seq_length"]
with ops.Graph().as_default(), ops.device("/gpu:0"):
model = cudnn_rnn_ops.CudnnLSTM(num_layers, num_units, num_units)
params_size_t = model.params_size()
input_data = variables.Variable(
array_ops.ones([seq_length, batch_size, num_units]))
input_h = variables.Variable(
array_ops.ones([num_layers, batch_size, num_units]))
input_c = variables.Variable(
array_ops.ones([num_layers, batch_size, num_units]))
params = variables.Variable(
array_ops.ones([params_size_t]), validate_shape=False)
output, output_h, output_c = model(
is_training=True,
input_data=input_data,
input_h=input_h,
input_c=input_c,
params=params)
all_grads = gradients_impl.gradients(
[output, output_h, output_c],
[params, input_data, input_h, input_c])
training_op = control_flow_ops.group(*all_grads)
self._BenchmarkOp(training_op, "cudnn_lstm %s %s" %
(config_name, self._GetConfigDesc(config)))
def benchmarkTfRNNLSTMTraining(self):
test_configs = self._GetTestConfig()
for config_name, config in test_configs.items():
num_layers = config["num_layers"]
num_units = config["num_units"]
batch_size = config["batch_size"]
seq_length = config["seq_length"]
with ops.Graph().as_default(), ops.device("/gpu:0"):
inputs = seq_length * [
array_ops.zeros([batch_size, num_units], dtypes.float32)
]
initializer = init_ops.random_uniform_initializer(-0.01, 0.01, seed=127)
cell = core_rnn_cell_impl.LSTMCell(
num_units=num_units, initializer=initializer, state_is_tuple=True)
multi_cell = core_rnn_cell_impl.MultiRNNCell([cell] * num_layers)
outputs, final_state = core_rnn.static_rnn(
multi_cell, inputs, dtype=dtypes.float32)
trainable_variables = ops.get_collection(
ops.GraphKeys.TRAINABLE_VARIABLES)
gradients = gradients_impl.gradients([outputs, final_state],
trainable_variables)
training_op = control_flow_ops.group(*gradients)
self._BenchmarkOp(training_op, "tf_rnn_lstm %s %s" %
(config_name, self._GetConfigDesc(config)))
def benchmarkTfRNNLSTMBlockCellTraining(self):
test_configs = self._GetTestConfig()
for config_name, config in test_configs.items():
num_layers = config["num_layers"]
num_units = config["num_units"]
batch_size = config["batch_size"]
seq_length = config["seq_length"]
with ops.Graph().as_default(), ops.device("/gpu:0"):
inputs = seq_length * [
array_ops.zeros([batch_size, num_units], dtypes.float32)
]
cell = lstm_ops.LSTMBlockCell(num_units=num_units)
multi_cell = core_rnn_cell_impl.MultiRNNCell([cell] * num_layers)
outputs, final_state = core_rnn.static_rnn(
multi_cell, inputs, dtype=dtypes.float32)
trainable_variables = ops.get_collection(
ops.GraphKeys.TRAINABLE_VARIABLES)
gradients = gradients_impl.gradients([outputs, final_state],
trainable_variables)
training_op = control_flow_ops.group(*gradients)
self._BenchmarkOp(training_op, "tf_rnn_lstm_block_cell %s %s" %
(config_name, self._GetConfigDesc(config)))
if __name__ == "__main__":
test.main()
| [
"gardener@tensorflow.org"
] | gardener@tensorflow.org |
f1e42f777f4166260daa99290c2416e05ac6c882 | 4a81e33fe6d214f2efaeb97b03b5b05fae12b0d8 | /demos/great-expectations/venv/lib/python3.8/site-packages/notebook/services/kernels/tests/test_kernels_api.py | 38a9dfb00c7393a15d2daeec1fa2ce9f94e46e1d | [] | no_license | franciscojavierarceo/Python | 29aaea28642dde151255c5b4a813158e975a073d | 02715ca6f19fd3c76cefa12de92deeae4ddf9684 | refs/heads/main | 2023-08-27T14:23:04.376095 | 2023-08-27T10:30:37 | 2023-08-27T10:30:37 | 33,146,755 | 7 | 9 | null | 2023-02-16T06:40:35 | 2015-03-30T20:38:00 | Jupyter Notebook | UTF-8 | Python | false | false | 9,675 | py | """Test the kernels service API."""
import json
import time
from requests import HTTPError
from traitlets.config import Config
from tornado.httpclient import HTTPRequest
from tornado.ioloop import IOLoop
from tornado.websocket import websocket_connect
from unittest import SkipTest
from jupyter_client.kernelspec import NATIVE_KERNEL_NAME
from notebook.utils import url_path_join
from notebook.tests.launchnotebook import NotebookTestBase, assert_http_error
try:
from jupyter_client import AsyncMultiKernelManager
async_testing_enabled = True
except ImportError:
async_testing_enabled = False
class KernelAPI:
"""Wrapper for kernel REST API requests"""
def __init__(self, request, base_url, headers):
self.request = request
self.base_url = base_url
self.headers = headers
def _req(self, verb, path, body=None):
response = self.request(verb,
url_path_join('api/kernels', path), data=body)
if 400 <= response.status_code < 600:
try:
response.reason = response.json()['message']
except:
pass
response.raise_for_status()
return response
def list(self):
return self._req('GET', '')
def get(self, id):
return self._req('GET', id)
def start(self, name=NATIVE_KERNEL_NAME):
body = json.dumps({'name': name})
return self._req('POST', '', body)
def shutdown(self, id):
return self._req('DELETE', id)
def interrupt(self, id):
return self._req('POST', url_path_join(id, 'interrupt'))
def restart(self, id):
return self._req('POST', url_path_join(id, 'restart'))
def websocket(self, id):
loop = IOLoop()
loop.make_current()
req = HTTPRequest(
url_path_join(self.base_url.replace('http', 'ws', 1), 'api/kernels', id, 'channels'),
headers=self.headers,
)
f = websocket_connect(req)
return loop.run_sync(lambda : f)
class KernelAPITest(NotebookTestBase):
"""Test the kernels web service API"""
def setUp(self):
self.kern_api = KernelAPI(self.request,
base_url=self.base_url(),
headers=self.auth_headers(),
)
def tearDown(self):
for k in self.kern_api.list().json():
self.kern_api.shutdown(k['id'])
def test_no_kernels(self):
"""Make sure there are no kernels running at the start"""
kernels = self.kern_api.list().json()
self.assertEqual(kernels, [])
def test_default_kernel(self):
# POST request
r = self.kern_api._req('POST', '')
kern1 = r.json()
self.assertEqual(r.headers['location'], url_path_join(self.url_prefix, 'api/kernels', kern1['id']))
self.assertEqual(r.status_code, 201)
self.assertIsInstance(kern1, dict)
report_uri = url_path_join(self.url_prefix, 'api/security/csp-report')
expected_csp = '; '.join([
"frame-ancestors 'self'",
'report-uri ' + report_uri,
"default-src 'none'"
])
self.assertEqual(r.headers['Content-Security-Policy'], expected_csp)
def test_main_kernel_handler(self):
# POST request
r = self.kern_api.start()
kern1 = r.json()
self.assertEqual(r.headers['location'], url_path_join(self.url_prefix, 'api/kernels', kern1['id']))
self.assertEqual(r.status_code, 201)
self.assertIsInstance(kern1, dict)
report_uri = url_path_join(self.url_prefix, 'api/security/csp-report')
expected_csp = '; '.join([
"frame-ancestors 'self'",
'report-uri ' + report_uri,
"default-src 'none'"
])
self.assertEqual(r.headers['Content-Security-Policy'], expected_csp)
# GET request
r = self.kern_api.list()
self.assertEqual(r.status_code, 200)
assert isinstance(r.json(), list)
self.assertEqual(r.json()[0]['id'], kern1['id'])
self.assertEqual(r.json()[0]['name'], kern1['name'])
# create another kernel and check that they both are added to the
# list of kernels from a GET request
kern2 = self.kern_api.start().json()
assert isinstance(kern2, dict)
r = self.kern_api.list()
kernels = r.json()
self.assertEqual(r.status_code, 200)
assert isinstance(kernels, list)
self.assertEqual(len(kernels), 2)
# Interrupt a kernel
r = self.kern_api.interrupt(kern2['id'])
self.assertEqual(r.status_code, 204)
# Restart a kernel
r = self.kern_api.restart(kern2['id'])
rekern = r.json()
self.assertEqual(rekern['id'], kern2['id'])
self.assertEqual(rekern['name'], kern2['name'])
def test_kernel_handler(self):
# GET kernel with given id
kid = self.kern_api.start().json()['id']
r = self.kern_api.get(kid)
kern1 = r.json()
self.assertEqual(r.status_code, 200)
assert isinstance(kern1, dict)
self.assertIn('id', kern1)
self.assertEqual(kern1['id'], kid)
# Request a bad kernel id and check that a JSON
# message is returned!
bad_id = '111-111-111-111-111'
with assert_http_error(404, 'Kernel does not exist: ' + bad_id):
self.kern_api.get(bad_id)
# DELETE kernel with id
r = self.kern_api.shutdown(kid)
self.assertEqual(r.status_code, 204)
kernels = self.kern_api.list().json()
self.assertEqual(kernels, [])
# Request to delete a non-existent kernel id
bad_id = '111-111-111-111-111'
with assert_http_error(404, 'Kernel does not exist: ' + bad_id):
self.kern_api.shutdown(bad_id)
def test_connections(self):
kid = self.kern_api.start().json()['id']
model = self.kern_api.get(kid).json()
self.assertEqual(model['connections'], 0)
ws = self.kern_api.websocket(kid)
model = self.kern_api.get(kid).json()
self.assertEqual(model['connections'], 1)
ws.close()
# give it some time to close on the other side:
for i in range(10):
model = self.kern_api.get(kid).json()
if model['connections'] > 0:
time.sleep(0.1)
else:
break
model = self.kern_api.get(kid).json()
self.assertEqual(model['connections'], 0)
class AsyncKernelAPITest(KernelAPITest):
"""Test the kernels web service API using the AsyncMappingKernelManager"""
@classmethod
def setup_class(cls):
if not async_testing_enabled: # Can be removed once jupyter_client >= 6.1 is required.
raise SkipTest("AsyncKernelAPITest tests skipped due to down-level jupyter_client!")
super().setup_class()
@classmethod
def get_argv(cls):
argv = super().get_argv()
# before we extend the argv with the class, ensure that appropriate jupyter_client is available.
# if not available, don't set kernel_manager_class, resulting in the repeat of sync-based tests.
if async_testing_enabled:
argv.extend(['--NotebookApp.kernel_manager_class='
'notebook.services.kernels.kernelmanager.AsyncMappingKernelManager'])
return argv
class KernelFilterTest(NotebookTestBase):
# A special install of NotebookTestBase where only `kernel_info_request`
# messages are allowed.
config = Config({
'NotebookApp': {
'MappingKernelManager': {
'allowed_message_types': ['kernel_info_request']
}
}
})
# Sanity check verifying that the configurable was properly set.
def test_config(self):
self.assertEqual(self.notebook.kernel_manager.allowed_message_types, ['kernel_info_request'])
CULL_TIMEOUT = 5
CULL_INTERVAL = 1
class KernelCullingTest(NotebookTestBase):
"""Test kernel culling """
@classmethod
def get_argv(cls):
argv = super().get_argv()
# Enable culling with 5s timeout and 1s intervals
argv.extend([f'--MappingKernelManager.cull_idle_timeout={CULL_TIMEOUT}',
f'--MappingKernelManager.cull_interval={CULL_INTERVAL}',
'--MappingKernelManager.cull_connected=False'])
return argv
def setUp(self):
self.kern_api = KernelAPI(self.request,
base_url=self.base_url(),
headers=self.auth_headers(),
)
def tearDown(self):
for k in self.kern_api.list().json():
self.kern_api.shutdown(k['id'])
def test_culling(self):
kid = self.kern_api.start().json()['id']
ws = self.kern_api.websocket(kid)
model = self.kern_api.get(kid).json()
self.assertEqual(model['connections'], 1)
assert not self.get_cull_status(kid) # connected, should not be culled
ws.close()
assert self.get_cull_status(kid) # not connected, should be culled
def get_cull_status(self, kid):
frequency = 0.5
culled = False
for _ in range(int((CULL_TIMEOUT + CULL_INTERVAL)/frequency)): # Timeout + Interval will ensure cull
try:
self.kern_api.get(kid)
except HTTPError as e:
assert e.response.status_code == 404
culled = True
break
else:
time.sleep(frequency)
return culled
| [
"arceofrancisco@gmail.com"
] | arceofrancisco@gmail.com |
d8d3b761b789cce584a071b57e2e9664d41b9c7b | 7911da973079f325a515cd2ee66f7590a9f32e48 | /asci_char.py | 440a8d3234ddf3ac903b9fb83e313e80717b0991 | [] | no_license | Ponkiruthika112/Guvi | 5d2ff3dcf55d6c52c0f09a1e577d8b11632c7a92 | 319e5b4dab5654fabc25ef15c1d528f76d833c15 | refs/heads/master | 2020-04-21T06:05:03.581658 | 2018-08-02T05:53:48 | 2018-08-02T05:53:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 48 | py | ip=input("Enter any character:")
print(ord(ip))
| [
"noreply@github.com"
] | Ponkiruthika112.noreply@github.com |
102be23096fba5b7196ac3e0c929f4ac9631cd95 | eb82022c0cfc7c8747661cff9624ad2099fa1c3f | /dev_bc/models/product_uom.py | 77461ba74a7add5a24d3a1415a7831e4131454f2 | [] | no_license | dadysuarsa/Odoo | 8d026a066c390cc8f72805d2672212e61260c1cb | c9becd0c192fa239520ad3e1a11d81f70832eddf | refs/heads/master | 2023-03-11T06:02:06.011575 | 2021-02-26T02:17:37 | 2021-02-26T02:17:37 | 276,346,540 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 234 | py | from odoo import models, fields, api, _
import openerp.addons.decimal_precision as dp
from odoo.exceptions import UserError
class productuom(models.Model):
_inherit = 'product.uom'
name_bc = fields.Char('Unit of bc') | [
"dads02_zetti@yahoo.com"
] | dads02_zetti@yahoo.com |
294939bed03799b74ad9da32b6d03e81286ce9ed | 8a83bb7acb9b62183fca817e1f196dd8075630a4 | /24_fourthFolder/18_lazy_propagation.py | 4435a51def47f2397589dc377acdb639e7fffb40 | [] | no_license | sandeepkumar8713/pythonapps | ff5ad3da854aa58e60f2c14d27359f8b838cac57 | 5dcb5ad4873124fed2ec3a717bfa379a4bbd197d | refs/heads/main | 2023-09-01T04:12:03.865755 | 2023-08-31T07:04:58 | 2023-08-31T07:04:58 | 234,762,925 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,404 | py | # https://www.geeksforgeeks.org/lazy-propagation-in-segment-tree/
# Question : Write a function which takes start and end index and return their range sum. We
# have used the same "Sum of given Range" problem to explain Lazy propagation. He then asked
# range update in best optimised way. I told him Lazy propagation. I had to write code for
# segment tree with lazy propagation.
#
# Question Type : Generic
# Used : updateRange(us, ue)
# 1) If current segment tree node has any pending update, then first add that
# pending update to current node.
# 2) If current node's range lies completely in update query range.
# a) Update current node
# b) Postpone updates to children by setting lazy value for children nodes.
# 3) If current node's range overlaps with update range, follow the same approach as above simple update.
# a) Recur for left and right children.
# b) Update current node using results of left and right calls.
# Logic :
# def getSumUtil(self, segStart, segEnd, queryStart, queryEnd, index):
# if self.lazy[index] != 0:
# self.tree[index] += (segEnd - segStart + 1) * self.lazy[index]
# if segStart != segEnd:
# self.lazy[index * 2 + 1] += self.lazy[index]
# self.lazy[index * 2 + 2] += self.lazy[index]
# self.lazy[index] = 0
# if segStart > segEnd or segStart > queryEnd or segEnd < queryStart: return 0
# if segStart >= queryStart and segEnd <= queryEnd: return self.tree[index]
# mid = (segStart + segEnd) / 2
# return self.getSumUtil(segStart, mid, queryStart, queryEnd, index * 2 + 1) + \
# self.getSumUtil(mid + 1, segEnd, queryStart, queryEnd, index * 2 + 2)
#
# def updateRangeUtil(self, segStart, segEnd, queryStart, queryEnd, index, diff):
# if self.lazy[index] != 0:
# self.tree[index] += (segEnd - segStart + 1) * self.lazy[index]
# if segStart != segEnd:
# self.lazy[index * 2 + 1] += self.lazy[index]
# self.lazy[index * 2 + 2] += self.lazy[index]
# self.lazy[index] = 0
# No overlap b/w segment and query range
# if segStart > segEnd or segStart > queryEnd or segEnd < queryStart:
# return 0
# Query start/end is more than segment range
# if segStart >= queryStart and segEnd <= queryEnd:
# self.tree[index] += (segEnd - segStart + 1) * diff
# if segStart != segEnd:
# self.lazy[index * 2 + 1] += diff
# self.lazy[index * 2 + 2] += diff
# return
# mid = (segStart + segEnd) // 2
# self.updateRangeUtil(segStart, mid, queryStart, queryEnd, index * 2 + 1, diff)
# self.updateRangeUtil(mid + 1, segEnd, queryStart, queryEnd, index * 2 + 2, diff)
# self.tree[index] = self.tree[index * 2 + 1] + self.tree[index * 2 + 2]
# Complexity : Tree construction O(n) Update O(log n) Sum O(log n)
MAX = 1000
class LazySegmentTree:
def __init__(self):
self.tree = [0] * MAX
self.lazy = [0] * MAX
self.size = -1
def constructUtil(self, inpArr, segStart, segEnd, index):
if segStart > segEnd:
return
if segStart == segEnd:
self.tree[index] = inpArr[segStart]
return
mid = (segStart + segEnd) // 2
self.constructUtil(inpArr, segStart, mid, index * 2 + 1)
self.constructUtil(inpArr, mid + 1, segEnd, index * 2 + 2)
self.tree[segStart] = self.tree[segStart * 2 + 1] + self.tree[segStart * 2 + 2]
def construct(self, inpArr):
self.size = len(inpArr)
self.constructUtil(arr, 0, self.size - 1, 0)
def getSumUtil(self, segStart, segEnd, queryStart, queryEnd, index):
if self.lazy[index] != 0:
self.tree[index] += (segEnd - segStart + 1) * self.lazy[index]
if segStart != segEnd:
self.lazy[index * 2 + 1] += self.lazy[index]
self.lazy[index * 2 + 2] += self.lazy[index]
self.lazy[index] = 0
if segStart > segEnd or segStart > queryEnd or segEnd < queryStart:
return 0
# If this segment lies in range
if segStart >= queryStart and segEnd <= queryEnd:
return self.tree[index]
mid = (segStart + segEnd) // 2
return self.getSumUtil(segStart, mid, queryStart, queryEnd, index * 2 + 1) + \
self.getSumUtil(mid + 1, segEnd, queryStart, queryEnd, index * 2 + 2)
def getSum(self, queryStart, queryEnd):
if queryStart < 0 or queryEnd > self.size - 1 or queryStart > queryEnd:
print("Invalid Input")
return -1
return self.getSumUtil(0, self.size - 1, queryStart, queryEnd, 0)
def updateRangeUtil(self, segStart, segEnd, queryStart, queryEnd, index, diff):
if self.lazy[index] != 0:
self.tree[index] += (segEnd - segStart + 1) * self.lazy[index]
if segStart != segEnd:
self.lazy[index * 2 + 1] += self.lazy[index]
self.lazy[index * 2 + 2] += self.lazy[index]
self.lazy[index] = 0
if segStart > segEnd or segStart > queryEnd or segEnd < queryStart:
return 0
# If this segment lies in range
if segStart >= queryStart and segEnd <= queryEnd:
self.tree[index] += (segEnd - segStart + 1) * diff
if segStart != segEnd:
self.lazy[index * 2 + 1] += diff
self.lazy[index * 2 + 2] += diff
return
mid = (segStart + segEnd) // 2
self.updateRangeUtil(segStart, mid, queryStart, queryEnd, index * 2 + 1, diff)
self.updateRangeUtil(mid + 1, segEnd, queryStart, queryEnd, index * 2 + 2, diff)
self.tree[index] = self.tree[index * 2 + 1] + self.tree[index * 2 + 2]
def updateRange(self, queryStart, queryEnd, diff):
self.updateRangeUtil(0, self.size - 1, queryStart, queryEnd, 0, diff)
if __name__ == "__main__":
arr = [1, 3, 5, 7, 9, 11]
lazySegmentTree = LazySegmentTree()
lazySegmentTree.construct(arr)
print(lazySegmentTree.getSum(1, 3))
lazySegmentTree.updateRange(1, 5, 10)
print(lazySegmentTree.getSum(1, 3))
| [
"sandeepkumar8713@gmail.com"
] | sandeepkumar8713@gmail.com |
570f37233f49a95dccabcfab99c73b34aed7b8a1 | fe488ec29223d32d0d94295e838517b7e8cf9c7d | /ghidra/scripts/find_duplicate_functions.py | eefaf1e3448e6d38d520887ec12f49284de1acca | [
"MIT"
] | permissive | qeedquan/debug | 4ad1fd9c2f484190a0a64725653e47172e7595c6 | aadeb3351f832bbd7210f0512037c93e48153062 | refs/heads/master | 2023-05-10T17:06:50.463693 | 2023-05-01T01:33:39 | 2023-05-01T01:33:39 | 87,041,932 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 838 | py | #@author
#@category _NEW_
#@keybinding
#@menupath
#@toolbar
class Func:
def __init__(self):
self.func = None
self.body = []
self.hash = ""
def getfuncs():
listing = currentProgram.getListing()
result = []
func = getFirstFunction()
while func is not None:
f = Func()
f.func = func
f.body = list(listing.getCodeUnits(func.getBody(), True))
for op in f.body:
f.hash += op.toString().split(" ")[0]
result.append(f)
func = getFunctionAfter(func)
return result
def getdiffs(funcs):
dups = {}
for f in funcs:
key = f.hash
if key not in dups:
dups[key] = []
dups[key].append(f.func)
for key in dups:
if len(dups[key]) > 1:
print(dups[key])
getdiffs(getfuncs())
| [
"qeed.quan@gmail.com"
] | qeed.quan@gmail.com |
2c8eb5561dfa8bcd89c70ae82192ec5011775a7f | 1f190e0290513ede543c370b0428dff8079e32ed | /clusters/haswell/submit_dalton.py | 7010c4b656d761c5a57f7b5154d2253d5287f643 | [] | no_license | Computational-Chemistry-Research/personal_scripts | 535062bc402088d1fd0ccbabae906eb6e7a65e84 | 0b51032582a2ee55b06a150009bb898e2b976606 | refs/heads/master | 2023-02-03T10:02:18.612244 | 2020-12-25T15:20:37 | 2020-12-25T15:20:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,882 | py | #!/usr/bin/env python
"""submit_dalton.py: A standalone script for submitting DALTON jobs to
Haswell's SLURM scheduler.
"""
def template_slurmfile_dalton(inpfile, ppn, time, extrafiles):
"""The template for a SLURM jobfile that calls DALTON."""
copy_string_template = 'cp "$SLURM_SUBMIT_DIR"/{} "$LOCAL"\n'
if extrafiles is None:
joined_extrafiles = ""
elif isinstance(extrafiles, list):
copy_strings = []
for extrafile in extrafiles:
copy_string = copy_string_template.format(extrafile)
copy_strings.append(copy_string)
joined_extrafiles = "".join(copy_strings)
else:
joined_extrafiles = copy_string_template.format(extrafiles)
module = 'dalton/2016.2-i2017.1-mkl_parallel-omp'
return '''#!/bin/bash
#SBATCH --job-name={inpfile}
#SBATCH --output={inpfile}.slurmout
#SBATCH --nodes=1
#SBATCH --ntasks-per-node={ppn}
#SBATCH --time=0-{time}:00:00
module purge
module load intel/2017.1.132
module load mkl/2017.1.132
module load {module}
mkdir -p "$LOCAL"
cp "$SLURM_SUBMIT_DIR"/{inpfile}.dal "$LOCAL"
{extrafiles}cd "$LOCAL"
run_on_exit() {{
set -v
find "$LOCAL" -type f -exec chmod 644 '{{}}' \;
cp -v -R "$LOCAL"/DALTON_scratch_{username}/* "$SLURM_SUBMIT_DIR"
}}
trap run_on_exit EXIT
$(which dalton) -omp {ppn} -noarch -nobackup -d -ow -w "$SLURM_SUBMIT_DIR" {inpfile}.dal
chmod 644 "$SLURM_SUBMIT_DIR"/{inpfile}.out
'''.format(inpfile=inpfile,
ppn=ppn,
time=time,
module=module,
username=os.environ['USER'],
extrafiles=joined_extrafiles)
if __name__ == "__main__":
import argparse
import os.path
parser = argparse.ArgumentParser()
parser.add_argument('inpfilename',
help='the DALTON input file to submit',
nargs='*')
parser.add_argument('--ppn',
type=int,
default=12,
help='number of cores to run on (max 12)')
parser.add_argument('--time',
type=int,
default=24,
help='walltime to reserve (max 144 hours)')
parser.add_argument('--extrafiles',
help='An arbitrary number of files to copy to $LOCAL.',
nargs='*')
args = parser.parse_args()
for inpfilename in args.inpfilename:
inpfilename = os.path.splitext(inpfilename)[0]
slurmfilename = inpfilename + '.slurm'
with open(slurmfilename, 'w') as slurmfile:
slurmfile.write(template_slurmfile_dalton(inpfilename,
args.ppn,
args.time,
args.extrafiles))
print(slurmfilename)
| [
"eric.berquist@gmail.com"
] | eric.berquist@gmail.com |
a963122d803d9c95d2f4da26529d3c3263e17c97 | 8935286746ba7d98e69f28343498a20303b8fbef | /tests/problem_difference_operators/test_Dcd.py | 11d01a7860d1dd2af0fcf3104146558a36b2498c | [] | no_license | ASU-CompMethodsPhysics-PHY494/Activity_11_differential_operators | dceac220f1a68addc8a4b1720793a62bdf805038 | bec1e9064d24364fa6e7013b4719c3d48e9e3529 | refs/heads/main | 2023-03-12T15:45:59.381399 | 2021-03-04T10:09:50 | 2021-03-04T10:09:50 | 344,430,601 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 176 | py | from ..base import _TestDxx
class TestDcd(_TestDxx):
name = "D_cd"
@staticmethod
def op(f, x, dx):
dx2 = dx/2
return (f(x + dx2) - f(x - dx2))/dx
| [
"orbeckst@gmail.com"
] | orbeckst@gmail.com |
ca62d5843d222ded6598b0f8b2af7ca737d437d6 | 12579725d1c1e51a436136f465a8a7e60a76248b | /apps/almacen/views.py | a054d12a7d23f2f3f1b6d09e0dc0930306a1e707 | [] | no_license | kiritodeveloper/optica | f4f43b9ead720e0708974736ba6a798e7980ccb8 | 6491c4b6b074a1e1f6d7a5b2d73f85e7ed11bedd | refs/heads/master | 2023-01-06T14:00:20.337601 | 2019-07-07T00:42:21 | 2019-07-07T00:42:21 | 176,391,884 | 3 | 0 | null | 2022-12-26T20:15:18 | 2019-03-19T00:33:16 | TSQL | UTF-8 | Python | false | false | 6,978 | py | # -*- encoding: utf-8 -*-
from django.shortcuts import render,redirect, HttpResponse
from django.views.generic import View, ListView, DeleteView
from apps.cliente.models import Cliente
from apps.facturacion.models import Venta
from apps.receta.models import Receta
from .models import Producto,Proveedor
from .forms import ProductoForm,IngresoProductosForm
from django.contrib import messages
from apps.usuarios.views import LoginRequiredMixin
from decimal import Decimal
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
class Index(LoginRequiredMixin,View):
template_name = 'index.html'
def get(self,request):
# Calculando 6 meses anterior y sus totales
import datetime
# nombre_meses = { 1:"Enero",2:"Febrero",3:"Marzo",4:"Abril",5:"Mayo",6:"Junio",7:"Julio",8:"Agosto",9:"Setiembre",10:"Octubre", 11:"Noviembre",12:"Diciembre" }
# contador = [0,0,0,0,0,0]
# totales = [Decimal(0),Decimal(0),Decimal(0),Decimal(0),Decimal(0),Decimal(0)]
# receta = Receta.objects.all().order_by('-fecha')
# lista = []
# for item in receta:
# diff = (datetime.date.today() - item.fecha).days
# if(int(diff/30) >= 8):
# lista.append(item)
# receta = lista
# ventas = Venta.objects.all()
# suma_mes = Decimal(0)
# suma_dia = Decimal(0)
# flag = False
# meses = [[datetime.date.today().month,datetime.date.today().year],]
# #Obtener los ultimos 6 meses
# if (datetime.date.today().month - 1 >0 and flag == False):
# meses.append([datetime.date.today().month-1,datetime.date.today().year])
# else:
# meses.append([12,datetime.date.today().year-1])
# flag = True
# if (datetime.date.today().month - 2 >0 and flag == False):
# meses.append([datetime.date.today().month-2,datetime.date.today().year])
# else:
# meses.append([11,datetime.date.today().year-1])
# flag = True
# if (datetime.date.today().month - 3 >0 and flag == False):
# meses.append([datetime.date.today().month-3,datetime.date.today().year])
# else:
# meses.append([10,datetime.date.today().year-1])
# flag = True
# if (datetime.date.today().month - 4 >0 and flag == False):
# meses.append([datetime.date.today().month-4,datetime.date.today().year])
# else:
# meses.append([9,datetime.date.today().year-1])
# flag = True
# if (datetime.date.today().month - 5 >0 and flag == False):
# meses.append([datetime.date.today().month-5,datetime.date.today().year])
# else:
# meses.append([8,datetime.date.today().year-1])
# flag = True
#
# for item in ventas:#Calcular totales del dia y del mes
# if item.fecha == datetime.date.today():
# suma_dia += Decimal(item.total or 0)
# if (item.fecha.month == datetime.date.today().month) and (item.fecha.year == datetime.date.today().year):
# suma_mes += Decimal(item.total or 0)
# #Cacular totales para los 6 meses
# if (item.fecha.month == meses[0][0]) and (item.fecha.year == meses[0][1]):
# totales[0] += Decimal(item.total or 0)
# contador[0] += 1
# if (item.fecha.month == meses[1][0]) and (item.fecha.year == meses[1][1]):
# totales[1] += Decimal(item.total or 0)
# contador[1] += 1
# if (item.fecha.month == meses[2][0]) and (item.fecha.year == meses[2][1]):
# totales[2] += Decimal(item.total or 0)
# contador[2] += 1
# if (item.fecha.month == meses[3][0]) and (item.fecha.year == meses[3][1]):
# totales[3] += Decimal(item.total or 0)
# contador[3] += 1
# if (item.fecha.month == meses[4][0]) and (item.fecha.year == meses[4][1]):
# totales[4] += Decimal(item.total or 0)
# contador[4] += 1
# if (item.fecha.month == meses[5][0]) and (item.fecha.year == meses[5][1]):
# totales[5] += Decimal(item.total or 0)
# contador[5] += 1
#
# #Renderizando datos a json
# import json
# index = 0
# for item in meses:
# meses[index] = "%s - %s" %(str(nombre_meses[item[0]]),str(item[1]))
# index +=1
# meses = json.dumps(meses)
# contador = json.dumps(contador)
# index = 0
# for item in totales:
# totales[index] = float(totales[index])
# index+=1
# totales = json.dumps(totales)
# # Clientes de cumpleaños
clientes = Cliente.objects.filter(fecha_nacimiento__month=datetime.date.today().month, fecha_nacimiento__day=datetime.date.today().day)
return render(request,self.template_name,locals())
class Productos(LoginRequiredMixin,View):
template_name = 'productos/index.html'
def get(self,request):
productos = Producto.objects.all()
producto_form = ProductoForm()
ingreso_form = IngresoProductosForm()
return render(request,self.template_name,locals())
def post(self,request):
productos = Producto.objects.all()
producto_form = ProductoForm(request.POST)
ingreso_form = IngresoProductosForm(request.POST)
if producto_form.is_valid():
producto = producto_form.save()
messages.success(request, unicode('El producto '+unicode(producto.descripcion)+' de código '+unicode(producto.codigo)+' fue registrado con exito'))
productos = Producto.objects.all()
producto_form = ProductoForm()
ingreso_form = IngresoProductosForm()
return render(request,self.template_name,locals())
elif(ingreso_form.is_valid()):
historial = ingreso_form.save()
producto = Producto.objects.get(pk=historial.producto.id)
producto.stock_actual += int(request.POST['cantidad'])
producto.save()
messages.success(request, 'Se ingresaron '+request.POST['cantidad']+' unidades de '+producto.descripcion)
productos = Producto.objects.all()
producto_form = ProductoForm()
ingreso_form = IngresoProductosForm()
return render(request,self.template_name,locals())
else:
messages.error(request, 'No se pudo registrar la operación, porfavor intente denuevo.')
return render(request,self.template_name,locals())
import json
from django.views.decorators.csrf import csrf_exempt
@csrf_exempt
def ObtenerProducto(request,nro):
item = Producto.objects.get(pk=nro)
return HttpResponse(json.dumps({"precio":float(item.precio_sugerido),"max_value":item.stock_actual}),content_type='application/json')
| [
"admin@example.com"
] | admin@example.com |
6cb58c3d103adce06bc3212805179da117d0586a | 87d0de6a06451d7aa561b72d908d06e68074f650 | /core/arxiv/submission/auth.py | 2bc77e6859adcdde806a42175c0522339435c9cd | [
"MIT"
] | permissive | arXiv/arxiv-submission-core | 3e79085ee408fd83b4dd4c0c1e8ccc53dd282230 | 6077ce4e0685d67ce7010800083a898857158112 | refs/heads/develop | 2022-01-21T02:11:56.384920 | 2020-07-31T18:16:17 | 2020-07-31T18:16:17 | 106,854,828 | 14 | 8 | MIT | 2022-01-06T22:29:31 | 2017-10-13T17:36:51 | Python | UTF-8 | Python | false | false | 1,330 | py |
from typing import List
import uuid
from datetime import datetime, timedelta
from pytz import UTC
from arxiv.users import auth, domain
from arxiv.base.globals import get_application_config
from .domain.agent import User, Agent, Client
def get_system_token(name: str, agent: Agent, scopes: List[str]) -> str:
start = datetime.now(tz=UTC)
end = start + timedelta(seconds=36000)
if isinstance(agent, User):
user = domain.User(
username=agent.username,
email=agent.email,
user_id=agent.identifier,
name=agent.name,
verified=True
)
else:
user = None
session = domain.Session(
session_id=str(uuid.uuid4()),
start_time=datetime.now(), end_time=end,
user=user,
client=domain.Client(
owner_id='system',
client_id=name,
name=name
),
authorizations=domain.Authorizations(scopes=scopes)
)
secret = get_application_config()['JWT_SECRET']
return str(auth.tokens.encode(session, secret))
def get_compiler_scopes(resource: str) -> List[str]:
"""Get minimal auth scopes necessary for compilation integration."""
return [auth.scopes.READ_COMPILE.for_resource(resource),
auth.scopes.CREATE_COMPILE.for_resource(resource)]
| [
"brp53@cornell.edu"
] | brp53@cornell.edu |
f9f5f1c810bd6768490f56eed7d994bb84fc244d | d5682d2ef13ad63c68d59d3d0706853a88035ff1 | /week4/netmiko_test.py | 7d33f05d13548fa2bd2280fdb6c9095940601267 | [
"Apache-2.0"
] | permissive | mikealford/ktbyers_automation | 66467f5352a3fbb111fc18f9c90b83cf97a75e79 | d8b30e7ddbe27b4bc62b74bfc051b6d1c099f7f9 | refs/heads/master | 2020-04-17T19:37:42.365653 | 2019-02-19T01:16:41 | 2019-02-19T01:16:41 | 166,872,630 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 779 | py | from netmiko import ConnectHandler
from getpass import getpass
password = getpass()
switch1 = {
'device_type': 'cisco_ios',
'ip': '192.168.122.172',
'username': 'malford',
'password': password,
}
switch2 = {
'device_type': 'cisco_ios',
'ip': '192.168.122.173',
'username': 'malford',
'password': password,
}
switch3 = {
'device_type': 'cisco_ios',
'ip': '192.168.122.174',
'username': 'malford',
'password': password,
}
ssh_switch1 = ConnectHandler(**switch1)
ssh_switch2 = ConnectHandler(**switch2)
ssh_switch3 = ConnectHandler(**switch3)
#output = ssh_switch1.send_command("show ip int brief")
config_commands = ['logging buffered 20000']
output = ssh_switch1.send_config_set(config_commands)
print(output)
| [
"mike.alford13@gmail.com"
] | mike.alford13@gmail.com |
debf5d538dd470447e69bf1ceafc4368d95d2702 | a276d03f34457c174d2e79fc4fdb17c90299e843 | /projects/buttons/lib/markdown/__version__.py | 65edeeddbe2024b9e9a9513b5ec0deaec794cfdb | [
"MIT"
] | permissive | lucidworks/streams | effba3bc55df10431fb505937180b30d72e248b8 | 89aaf02382494cf09041ca5dadb41dddb86cf9d8 | refs/heads/master | 2021-06-02T04:09:27.626504 | 2020-01-07T01:28:09 | 2020-01-07T01:28:09 | 106,742,467 | 8 | 6 | null | 2020-03-17T21:17:48 | 2017-10-12T20:34:28 | Python | UTF-8 | Python | false | false | 907 | py | #
# markdown/__version__.py
#
# version_info should conform to PEP 386
# (major, minor, micro, alpha/beta/rc/final, #)
# (1, 1, 2, 'alpha', 0) => "1.1.2.dev"
# (1, 2, 0, 'beta', 2) => "1.2b2"
version_info = (2, 4, 0, 'final', 0)
def _get_version():
" Returns a PEP 386-compliant version number from version_info. "
assert len(version_info) == 5
assert version_info[3] in ('alpha', 'beta', 'rc', 'final')
parts = 2 if version_info[2] == 0 else 3
main = '.'.join(map(str, version_info[:parts]))
sub = ''
if version_info[3] == 'alpha' and version_info[4] == 0:
# TODO: maybe append some sort of git info here??
sub = '.dev'
elif version_info[3] != 'final':
mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'}
sub = mapping[version_info[3]] + str(version_info[4])
return str(main + sub)
version = _get_version()
| [
"kordless@gmail.com"
] | kordless@gmail.com |
a68ab708a3dd64fa6df53028e3ae4f92b71cde57 | 5d0edf31b17c5375faf6126c1a7be8e79bfe2ab8 | /buildout-cache/eggs/collective.siterss-0.4-py2.7.egg/collective/siterss/tests.py | 504d02d5c4f70d7c823545bd4735b78c9bb7ae7c | [] | no_license | renansfs/Plone_SP | 27cba32ebd9fc03dae3941ec23cf1bf0a7b6667a | 8a7bdbdb98c3f9fc1073c6061cd2d3a0ec80caf5 | refs/heads/master | 2021-01-15T15:32:43.138965 | 2016-08-24T15:30:19 | 2016-08-24T15:30:19 | 65,313,812 | 0 | 3 | null | null | null | null | UTF-8 | Python | false | false | 1,490 | py | import unittest
#from zope.testing import doctestunit
#from zope.component import testing
#from Testing import ZopeTestCase as ztc
from Products.Five import zcml
from Products.Five import fiveconfigure
from Products.PloneTestCase import PloneTestCase as ptc
from Products.PloneTestCase.layer import PloneSite
ptc.setupPloneSite()
import collective.siterss
class TestCase(ptc.PloneTestCase):
class layer(PloneSite):
@classmethod
def setUp(cls):
fiveconfigure.debug_mode = True
zcml.load_config('configure.zcml',
collective.siterss)
fiveconfigure.debug_mode = False
@classmethod
def tearDown(cls):
pass
def test_suite():
return unittest.TestSuite([
# Unit tests
#doctestunit.DocFileSuite(
# 'README.txt', package='collective.siterss',
# setUp=testing.setUp, tearDown=testing.tearDown),
#doctestunit.DocTestSuite(
# module='collective.siterss.mymodule',
# setUp=testing.setUp, tearDown=testing.tearDown),
# Integration tests that use PloneTestCase
#ztc.ZopeDocFileSuite(
# 'README.txt', package='collective.siterss',
# test_class=TestCase),
#ztc.FunctionalDocFileSuite(
# 'browser.txt', package='collective.siterss',
# test_class=TestCase),
])
if __name__ == '__main__':
unittest.main(defaultTest='test_suite')
| [
"renansfs@gmail.com"
] | renansfs@gmail.com |
9da24d7be31634385f5297b6853fdebc8cd2a1a6 | 308765557d3a850175658fc2a6ec7332b91a85bc | /restapi/restapi/urls.py | 27696e1100556c7527bec61cfe4032595d1e9508 | [] | no_license | kagxin/recipes | 3358a79267a18ba499145e2de64f44c6e14ddaaf | 17a106478859cd8e82513456ecd03873e1fe9391 | refs/heads/master | 2021-05-06T08:13:14.851114 | 2018-07-21T11:50:48 | 2018-07-21T11:50:48 | 114,010,028 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 797 | py | """restapi URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('api/', include('app.urls')),
path('admin/', admin.site.urls),
]
| [
"k506855660@163.com"
] | k506855660@163.com |
447a68a7c95e7246409e5e6a6d769ae3909d7314 | b03a7b92cef9cbee31918b0608ce58669b92df73 | /jd_home1.py | 014257d838a833709c28bf6bb2c6adc3e7d5d364 | [] | no_license | luobodage/- | 5cbc3f7900867cddb53bf347da57716cd7917481 | affbaa854a031819a74c944d3a95f4dc5d90d08f | refs/heads/master | 2022-12-30T23:49:31.205151 | 2020-10-25T09:02:01 | 2020-10-25T09:02:01 | 297,016,262 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,052 | py | import requests
import UserAgent
import lxml.etree as le
import re
import os
def spider_home():
"""
获取物品的url以及标题价格
:return: 返回物品编码
"""
shop = input("请输入你要搜索的商品:")
global headers
headers = UserAgent.get_headers() # 随机获取一个headers
url = 'https://search.jd.com/Search?keyword={shop}&enc=utf-8&wq=%E5%B0%8F&pvid=469d5d51a3184cc9a053124dc020b31f'.format(
shop=shop
)
try:
r = requests.get(
url=url,
headers=headers
).content
content = le.HTML(r)
href = content.xpath('//*[@id="J_goodsList"]/ul/li[1]/div/div[1]/a/@href')
price = content.xpath('//*[@id="J_goodsList"]/ul/li[1]/div/div/strong/i/text()')
title_01 = content.xpath('//*[@id="J_goodsList"]/ul/li[1]/div/div/a/em/text()')
title = [x.strip() for x in title_01 if x.strip() != ''] # 提取标题 将多余空格和\n去除
re_01 = re.compile(r'\d+')
number = re_01.findall(str(href))
shop_price_01 = "".join(price)
print("商品价格:" + shop_price_01)
# for shop_price in price:
#
# print("商品价格:" + shop_price)
global shop_title # 全局定义商品题目 进行文件改标题
shop_title_01 = "".join(title)
print("商品标题:" + shop_title_01)
# for shop_title in title:
# print("商品标题:" + shop_title)
for index in href:
global href_shop
href_shop = 'http:' + index
print(href_shop)
for num in number:
# print(num)
return num
# file_rename()
except:
print('爬取失败')
def file_rename():
file_srcFile = 'id.txt'
file_dstFile = shop_title + '.txt'
os.rename(file_srcFile, file_dstFile) # 改标题
img_srcFile = 'ciyun.png'
img_dstFile = shop_title + '.png'
os.rename(img_srcFile, img_dstFile)
if __name__ == '__main__':
spider_home()
| [
"fuyu16032001@gmail.com"
] | fuyu16032001@gmail.com |
39c457d336d955950a124abbfb9682486e1bbab8 | 90115eeb4d60c1dc26deb1c124d42039d214195c | /ixl/management/commands/createchallenges.py | dacb82b399b6bd7265dc6ed66a89f577c65727cc | [] | no_license | waffle-iron/Newton | ce60a8ccc66bbc23aa764742b197add4cfb4d2d3 | f8f3df4127e88428db0cc73207ac51582db7cd42 | refs/heads/master | 2021-01-02T08:59:39.948797 | 2017-08-02T12:52:52 | 2017-08-02T12:52:52 | 99,116,161 | 0 | 0 | null | 2017-08-02T12:52:52 | 2017-08-02T12:52:51 | null | UTF-8 | Python | false | false | 7,674 | py | # commands/createchallenges.py
# Full path to your django project directory
your_djangoproject_home="/home/alex/newton/"
import django
import datetime
import sys,os
import requests
from variables import second_teachers as assigned_teachers
from variables import mastery_skills, cbaExercises
sys.path.append(your_djangoproject_home)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "newton.settings")
django.setup()
from django.core.management.base import BaseCommand, CommandError
from brain.models import StudentRoster, CurrentClass, Teacher
from ixl.models import ChallengeAssignment, Challenge, ChallengeExercise, IXLSkillScores
from libs.functions import nwea_recommended_skills_list as nwea_skills
date = datetime.date.today()
date = date.strftime('%-m/%-d')
todays_date = datetime.date.today()
class Command(BaseCommand):
help = 'Assigns a custom challenge to all students'
def add_arguments(self, parser):
pass
def make_mastery_challenge(self, student, current_challenge, exercise_count):
for addition in mastery_skills:
try:
skill_score = IXLSkillScores.objects.get(student_id=student, ixl_skill_id__skill_id=addition)
if skill_score.score < 96:
exercise_count += 1
challenge_exercise = ChallengeExercise.objects.create(challenge=current_challenge,
exercise_id=addition,
required_score=100, )
except:
exercise_count += 1
challenge_exercise = ChallengeExercise.objects.create(challenge=current_challenge,
exercise_id=addition,
required_score=100)
if exercise_count == 1:
return exercise_count
return exercise_count
def make_cba_challenge(self, student, current_challenge, exercise_count):
for addition in cbaExercises:
try:
skill_score = IXLSkillScores.objects.get(student_id=student, ixl_skill_id__skill_id=addition)
if skill_score.score < 78:
exercise_count += 1
challenge_exercise = ChallengeExercise.objects.create(challenge=current_challenge,
exercise_id=addition,
required_score=80, )
else:
print("Could not add {}".format(addition))
except:
try:
challenge_exercise = ChallengeExercise.objects.create(challenge=current_challenge,
exercise_id=addition,
required_score=80)
exercise_count += 1
except:
pass
if exercise_count == 3:
return exercise_count
print("Ran out of cba exercises for {}!".format(student))
return exercise_count
def make_nwea_challenge(self, student, current_challenge, exercise_count):
skill_list = nwea_skills(student, "recommended_skill_list")
domain_list = []
waiting_list = []
for skill in skill_list:
previously_assigned = ChallengeExercise.objects.filter(challenge__challengeassignment__student_id=student, exercise_id=skill[0] )
pal = len(previously_assigned)
print("{} Previously Assigned {} times".format(skill[0],pal))
if pal>3:
continue
if skill[3] in domain_list:
waiting_list.append(skill[0])
elif exercise_count >=5:
waiting_list.append(skill[0])
else:
domain_list.append(skill[3]) # Add this domain to the list
# Create a Challenge Exercise object with the challenge and skill
try:
challenge_exercise = ChallengeExercise.objects.create(challenge=current_challenge,
exercise_id=skill[0])
exercise_count += 1
except:
continue
if exercise_count<5:
for skill in waiting_list:
try:
challenge_exercise = ChallengeExercise.objects.create(challenge=current_challenge,
exercise_id=skill[0])
exercise_count += 1
except:
continue
if exercise_count ==5:
return exercise_count
return exercise_count
def handle(self, *args, **options):
for teacher in assigned_teachers:
try: #Get the class
current_class = CurrentClass.objects.get(teacher__last_name=teacher)
except:
print('Teacher {} could not be found.'.format(teacher))
break
student_list = StudentRoster.objects.filter(current_class=current_class)
print("Got student list. Creating Challenges.")
for student in student_list: #Go through one student at a time
title = "{} {}'s {} Challenge".format(student.first_name, student.last_name[0],date)
current_challenge = Challenge.objects.create(title=title, date=todays_date)
exercise_count = 0
exercise_count = self.make_mastery_challenge(student, current_challenge, exercise_count)
exercise_count = self.make_cba_challenge(student, current_challenge, exercise_count)
exercise_count = self.make_nwea_challenge(student, current_challenge, exercise_count)
print("Assigning {} to {}, length: {}".format(title,student, exercise_count))
obj, created = ChallengeAssignment.objects.get_or_create(
student_id=student, challenge=current_challenge,
)
#TODO: Email teachers previous week's scores
# TODO: Add Bonus Exercises
# IXL Challenge Creation
# Create 5 main challenges
# 2 for CBA
# Map the CBAs to IXL Exercises for each of the three.
# Make it change depending on the date
# 2 for NWEA
# 1 for Mastery - based on the current or passed curriculum - 100 Smart Score
# Create 5 Bonus Challenges
from django.core.mail import EmailMessage
def send_an_email():
email = EmailMessage(
subject='Hello',
body='''Body goes here.
How are you?
I hope this email works!''',
from_email='newton@newtonthinks.com',
to=['ins-dauaprqb@isnotspam.com'],
reply_to=['alextrostbtwa@gmail.com.com'],
headers={'Content-Type': 'text/plain'},
)
email.send()
def send_simple_message():
return requests.post(
"https://api.mailgun.net/v3/sandbox791822b6aeca4aee8007134fecd331ec.mailgun.org/messages",
auth=("api", "key-cedb9e331a1be78e57582e4e13cac442"),
data={"from": "Mailgun Sandbox <postmaster@sandbox791822b6aeca4aee8007134fecd331ec.mailgun.org>",
"to": "Alex <alextrostbtwa@gmail.com>",
"subject": "Hello Alex",
"text": "Congratulations Alex, you just sent an email with Mailgun! You are truly awesome!"})
send_simple_message() | [
"alexrtrost@gmail.com"
] | alexrtrost@gmail.com |
9b27acf8e7217a6d9531f6f8b2b0b06fc5734d47 | 282d0a84b45b12359b96bbf0b1d7ca9ee0cb5d19 | /Malware1/venv/Lib/site-packages/numpy/ma/core.py | 2a76d165ece3eafac172eaa1d54d982e3d5957f7 | [] | no_license | sameerakhtar/CyberSecurity | 9cfe58df98495eac6e4e2708e34e70b7e4c055d3 | 594973df27b4e1a43f8faba0140ce7d6c6618f93 | refs/heads/master | 2022-12-11T11:53:40.875462 | 2020-09-07T23:13:22 | 2020-09-07T23:13:22 | 293,598,094 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 131 | py | version https://git-lfs.github.com/spec/v1
oid sha256:5152f7d1fc4275d76c9cc94b3a0702c65f9c6bff88aa6eaec8df101e7733707e
size 256431
| [
"46763165+sameerakhtar@users.noreply.github.com"
] | 46763165+sameerakhtar@users.noreply.github.com |
fa1d83669f1cd8d74b20eed7dc51f9d3d01228d4 | 0009c76a25c89a0d61d3bc9e10071da58bdfaa5a | /py/ztools/lib/Title.py | 3357c3c94a5ba9f68af20cfed4050b70afee62ce | [
"MIT"
] | permissive | julesontheroad/NSC_BUILDER | 84054e70a80b572088b0806a47ceb398302451b5 | e9083e83383281bdd9e167d3141163dcc56b6710 | refs/heads/master | 2023-07-05T05:23:17.114363 | 2021-11-15T19:34:47 | 2021-11-15T19:34:47 | 149,040,416 | 1,249 | 143 | MIT | 2022-12-15T03:19:33 | 2018-09-16T22:18:01 | Python | UTF-8 | Python | false | false | 19,434 | py | #!/usr/bin/python3
# -*- coding: utf-8 -*-
import os
import re
import json
import Titles
import Print
from bs4 import BeautifulSoup
import requests
import time
import datetime
import calendar
import threading
import Nsps
import urllib.request
import Config
try:
from PIL import Image
except ImportError:
import Image
global grabUrlInit
global urlCache
global urlLock
grabUrlInit = False
urlCache = {}
urlLock = threading.Lock()
if os.path.isfile('titledb/redirectCache.json'):
with open('titledb/redirectCache.json', encoding="utf-8-sig") as f:
urlCache = json.loads(f.read())
if os.path.isfile('titledb/titleRedirects.json'):
with open('titledb/titleRedirects.json', encoding="utf-8-sig") as f:
titleRedirects = json.loads(f.read())
def grabCachedRedirectUrl(url, cookies = None):
global grabUrlInit
global urlCache
global urlLock
try:
if url in urlCache:
if not urlCache[url]:
return None
result = requests.get(urlCache[url], cookies=cookies)
#result.url = urlCache[url]
return result
urlLock.acquire()
# we need to slow this down so we dont get banned
#Print.info('hitting ' + url)
#time.sleep(0.1)
result = requests.get(url, cookies=cookies)
if result.status_code == 404:
urlCache[url] = None
elif result.status_code == 200:
urlCache[url] = result.url
else:
#not sure but dont cache it
return result
with open('titledb/redirectCache.json', 'w') as outfile:
json.dump(urlCache, outfile)
urlLock.release()
return result
except:
urlLock.release()
raise
def getBaseId(id):
if not id:
return None
titleIdNum = int(id, 16)
return '{:02X}'.format(titleIdNum & 0xFFFFFFFFFFFFE000).zfill(16)
class Title:
def __init__(self):
self.id = None
self.rightsId = None
self.name = None
self.isDLC = False
self.isUpdate = False
self.idExt = None
self.updateId = None
self.version = None
self.key = None
self.isDemo = None
self.region = None
self.isModified = False
self.retailOnly = None
self.releaseDate = None
self.nsuId = None
self.category = None
self.ratingContent = None
self.numberOfPlayers = None
self.rating = None
self.developer = None
self.publisher = None
self.frontBoxArt = None
self.iconUrl = None
self.screenshots = None
self.bannerUrl = None
self.intro = None
self.description = None
self.languages = None
self.size = 0
def __lt__(self, other):
if not self.name:
return True
if not other.name:
return False
return str(self.name) < str(other.name)
def loadCsv(self, line, map = ['id', 'key', 'name']):
split = line.split('|')
for i, value in enumerate(split):
if i >= len(map):
Print.info('invalid map index: ' + str(i) + ', ' + str(len(map)))
continue
i = str(map[i])
methodName = 'set' + i[0].capitalize() + i[1:]
method = getattr(self, methodName, lambda x: None)
method(value.strip())
#self.setId(split[0].strip())
#self.setName(split[2].strip())
#self.setKey(split[1].strip())
def dict(self, map = ['id', 'rightsId', 'key', 'isUpdate', 'isDLC', 'isDemo', 'name', 'version', 'region', 'retailOnly']):
r = {}
for i in map:
methodName = 'get' + i[0].capitalize() + i[1:]
method = getattr(self, methodName, lambda: methodName)
r[i] = method()
return r
def serialize(self, map = ['id', 'rightsId', 'key', 'isUpdate', 'isDLC', 'isDemo', 'name', 'version', 'region', 'retailOnly']):
r = []
for i in map:
methodName = 'get' + i[0].capitalize() + i[1:]
method = getattr(self, methodName, lambda: methodName)
r.append(str(method()))
return '|'.join(r)
def getFiles(self):
files = []
for path, f in Nsps.files.items():
if(f.titleId == self.id):
files.append(f)
return files
def getLatestFile(self):
highest = None
for nsp in self.getFiles():
if not highest or nsp.version > highest.version:
highest = nsp
return highest
def isUpdateAvailable(self):
nsp = self.getLatestFile()
if not nsp:
return True
try:
if int(nsp.version) < int(self.lastestVersion()):
return True
except:
pass
return False
def getIsDLC(self):
return self.isDLC*1
def setIsDLC(self, v):
try:
v = int(v, 10)
if v == 1:
self.isDLC = True
elif v == 0:
self.isDLC = False
except:
pass
def getIsUpdate(self):
return self.isUpdate*1
def setIsUpdate(self, v):
try:
v = int(v, 10)
if v == 1:
self.isUpdate = True
elif v == 0:
self.isUpdate = False
except:
pass
def getRetailOnly(self):
try:
return self.retailOnly*1
except:
return 0
def setRetailOnly(self, v):
try:
self.retailOnly = bool(int(v, 10))
except:
pass
def getIsDemo(self):
try:
return self.isDemo*1
except:
return 0
def setIsDemo(self, v):
try:
v = int(v, 10)
if v == 1:
self.isDemo = True
elif v == 0:
self.isDemo = False
except:
pass
def setRightsId(self, rightsId):
if not id:
self.setId(rightsId)
if rightsId and len(rightsId) == 32 and rightsId != '00000000000000000000000000000000':
self.rightsId = rightsId.upper()
def getRightsId(self):
return self.rightsId or '00000000000000000000000000000000'
def setId(self, id):
if not id or self.id:
return
id = id.upper();
try:
i = int(id, 16)
except:
return
if len(id) == 32:
self.id = id[:16]
self.setRightsId(id)
elif len(id) == 16:
self.id = id[:16]
else:
return
titleIdNum = int(self.id, 16)
if self.id:
self.baseId = '{:02X}'.format(titleIdNum & 0xFFFFFFFFFFFFE000).zfill(16)
else:
self.baseId = None
self.isDLC = (titleIdNum & 0xFFFFFFFFFFFFE000) != (titleIdNum & 0xFFFFFFFFFFFFF000)
#self.isBase = self.id == titleIdNum & 0xFFFFFFFFFFFFE000
self.idExt = titleIdNum & 0x0000000000000FFF
if self.isDLC:
# dlc
pass
elif self.idExt == 0:
# base
self.updateId = '%s800' % self.id[:-3]
else:
# update
self.isUpdate = True
pass
@staticmethod
def baseDlcId(id):
titleIdNum = int(id, 16)
return (titleIdNum & 0xFFFFFFFFFFFFE000) + 0x1000
#return hex(dlcId)
def getId(self):
return self.id or '0000000000000000'
def getBaseId(self):
return self.baseId or '0000000000000000'
def setRegion(self, region):
if re.match('[A-Z]{2}', region):
self.region = region
def getRegion(self):
return self.region or ''
def setName(self, name):
if not name:
return
self.name = name
if self.isDemo == None:
if re.match('.*\s[\(\[]?Demo[\)\]]?\s*$', self.name, re.I) or re.match('.*\s[\(\[]?Demo[\)\]]?\s+.*$', self.name, re.I):
self.isDemo = True
else:
self.isDemo = False
def getName(self):
baseId = getBaseId(self.id)
if self.isUpdate and Titles.contains(baseId):
return Titles.get(baseId).name
return self.name or ''
def setKey(self, key):
if not key:
return
key = key.upper()
if len(key) != 32:
return
try:
i = int(key, 16)
if i <= 0:
return
except:
return
self.key = key
def getKey(self):
return self.key or '00000000000000000000000000000000'
def setVersion(self, version, force = False):
if version != None:
try:
n = int(str(version), 10)
except:
return
try:
o = int(str(self.version), 10)
except:
o = None
if not o or n > o or force:
self.version = version
def getVersion(self):
return self.version or ''
def lastestVersion(self, force = False):
#if self.isDLC:
# return '0'
if not self.id:
return None
if self.version and self.version.lower() == 'none':
self.version = None
if not self.version or force:
self.version = Title.getCdnVersion(self.id)
Print.info('Grabbed %s [%s] version, %s' % (str(self.name), str(self.id), str(self.version)))
#Print.info('version: ' + str(self.version))
return self.version
def isValid(self):
if self.id:
return True
else:
return False
@staticmethod
def getCdnVersion(id):
r = CDNSP.get_version(id)
#if len(r) == 0 or r[0] == 'none':
# return ['0']
return r
def download(self, base, fileName, url):
path = os.path.join(base, fileName)
if os.path.isfile(path):
return path
os.makedirs(base, exist_ok=True)
urllib.request.urlretrieve(url, path)
return path
def getResizedImage(self, filePath, width = None, height = None):
if not width and not height:
return filePath
base, name = os.path.split(filePath)
path = os.path.join(base, '.' + str(width) + 'x' + str(height) + '_' + name)
if not os.path.isfile(path):
os.makedirs(base, exist_ok=True)
im = Image.open(filePath)
ar = im.size[0] / im.size[1]
if height == None:
height = int(width / ar)
elif width == None:
width = int(height * ar)
out = im.resize((width, height), Image.ANTIALIAS)
out.save(path, quality=100)
return path
def bannerFile(self, width = None, height = None):
if not self.bannerUrl or self.bannerUrl.startswith('cocoon:/'):
return None
baseName, ext = os.path.splitext(self.bannerUrl)
return self.getResizedImage(self.download(Config.paths.titleImages + self.id, 'banner' + ext, self.bannerUrl), width, height)
def frontBoxArtFile(self, width = None, height = None):
if not self.frontBoxArt or self.frontBoxArt.startswith('cocoon:/'):
return None
baseName, ext = os.path.splitext(self.frontBoxArt)
return self.getResizedImage(self.download(Config.paths.titleImages + self.id, 'frontBoxArt' + ext, self.frontBoxArt), width, height)
def iconFile(self, width = None, height = None):
if not 'iconUrl' in self.__dict__:
self.iconUrl = None
if not self.iconUrl or self.iconUrl.startswith('cocoon:/'):
return None
baseName, ext = os.path.splitext(self.iconUrl)
return self.getResizedImage(self.download(Config.paths.titleImages + self.id, 'icon' + ext, self.iconUrl), width, height)
def screenshotFile(self, i, width = None, height = None):
if not self.screenshots[i] or self.screenshots[i].startswith('cocoon:/'):
return None
baseName, ext = os.path.splitext(self.screenshots[i])
return self.getResizedImage(self.download(Config.paths.titleImages + self.id, 'screenshot' + str(i) + ext, self.screenshots[i]), width, height)
def screenshotFiles(self):
if not self.screenshots:
return []
r = []
for i,u in enumerate(self.screenshots):
r.append(self.screenshotFile(i))
return r
def scrape(self, delta = True):
if self.isUpdate or self.isDLC:
return
try:
if (not delta or not self.bannerUrl):
id = self.id
if id in titleRedirects:
id = titleRedirects[id]
cookies = {'esrb.verified': 'true'}
for region in ['JP', 'AU']:
result = grabCachedRedirectUrl("https://ec.nintendo.com/apps/%s/%s" % (id, region), cookies=cookies)
_json = ''
if not result or result.status_code != 200:
continue
_json = json.loads(result.text.split('NXSTORE.titleDetail.jsonData = ')[1].split('NXSTORE.titleDetail')[0].replace(';',''))
if _json == '' or _json == None:
Print.error('Failed to parse json for ' + "https://ec.nintendo.com/apps/%s/%s" % (id, region))
continue
if 'hero_banner_url' in _json:
self.bannerUrl = _json['hero_banner_url']
if "release_date_on_eshop" in _json:
self.releaseDate = int(_json["release_date_on_eshop"].replace('-',''))
if "id" in _json:
self.nsuId = int("%s" % _json["id"])
if "formal_name" in _json:
self.name = _json["formal_name"].strip()
if 'screenshots' in _json:
self.screenshots = []
for i, k in enumerate(_json["screenshots"]):
self.screenshots.append(k["images"][0]["url"])
if "demos" in _json:
for demo in _json["demos"]:
if "id" in demo:
if id[0:12] != _json['applications'][0]['id'][0:12]:
self.nsuId = int(demo["id"])
if "name" in demo:
self.name = demo["name"].strip()
if "languages" in _json:
self.languages = []
for language in _json["languages"]:
self.languages.append(language['iso_code'])
if "genre" in _json:
self.category = _json["genre"].split(' / ')
if "total_rom_size" in _json:
self.size = _json["total_rom_size"]
if "rating_info" in _json:
if "rating" in _json["rating_info"]:
if "age" in _json["rating_info"]['rating']:
self.rating = _json["rating_info"]['rating']['age']
if "content_descriptors" in _json["rating_info"]:
content = []
for descriptor in _json["rating_info"]["content_descriptors"]:
content.append(descriptor['name'])
self.ratingContent = content
if "player_number" in _json:
if 'local_max' in _json["player_number"]:
self.numberOfPlayers = _json["player_number"]["local_max"]
if 'offline_max' in _json["player_number"]:
self.numberOfPlayers = _json["player_number"]["offline_max"]
if "publisher" in _json:
if 'name' in _json["publisher"]:
self.publisher = _json["publisher"]["name"]
if 'title' in _json["publisher"]:
self.publisher = _json["publisher"]["title"]
if "applications" in _json:
if "image_url" in _json["applications"][0]:
self.iconUrl = _json["applications"][0]['image_url']
if "catch_copy" in _json:
intro = re.sub('(?<!\n)\n(?!\n)', ' ',_json["catch_copy"])
intro = re.sub(' ', ' ', intro)
self.intro = intro
if "description" in _json:
desc = re.sub('(?<!\n)\n(?!\n)', ' ',_json["description"])
desc = re.sub(' ', ' ', desc)
self.description = desc
#<img aria-hidden="true" data-src="https://media.nintendo.com/nintendo/bin/ZppwWK6BnjH5twBNvE5wEEI9aeMGR0XX/hQGr97SGMnlXBWoqOBtgtGX5noK3tNtD.jpg"/>
result = grabCachedRedirectUrl("https://ec.nintendo.com/apps/%s/US" % id, cookies=cookies)
if result and result.status_code == 200:
if result.url != 'https://www.nintendo.com/games/':
soup = BeautifulSoup(result.text, "html.parser")
if not self.bannerUrl:
m = re.search(r"#hero\s*{\s*background(-image)?:\s*url\('([^)]+)'\)", result.text, re.DOTALL | re.UNICODE | re.MULTILINE | re.IGNORECASE)
if m:
banner = m.group(2)
if banner[0] == '/':
banner = 'https://www.nintendo.com' + banner
self.bannerUrl = banner
rem = re.finditer('<img aria-hidden="true" data-src="([^"]+)"', result.text)
if rem:
ss = []
for m in rem:
ss.append(m.group(1))
if len(ss) > 0:
self.screenshots = ss
if soup.find("meta", {"property": "og:url"}) != None:
slug = soup.find("meta", {"property": "og:url"})["content"].split('/')[-1]
infoJson = json.loads(requests.get("https://www.nintendo.com/json/content/get/game/%s" % slug, cookies=cookies).text)["game"]
if "release_date" in infoJson:
self.releaseDate = int(datetime.datetime.strftime(datetime.datetime.strptime(infoJson["release_date"], "%b %d, %Y"),'%Y%m%d'))
if "name" in infoJson:
self.name = infoJson["name"].strip()
if "nsuid" in infoJson:
self.nsuId = int(infoJson["nsuid"])
catagories = []
if "game_category_ref" in infoJson:
catindex = 0
if "name" in infoJson["game_category_ref"]:
catagories.append(infoJson["game_category_ref"]["name"])
elif "title" in infoJson["game_category_ref"]:
catagories.append(infoJson["game_category_ref"]["title"])
else:
try:
for game_category in infoJson["game_category_ref"]:
catagories.append(infoJson["game_category_ref"][catindex]["name"])
catindex += 1
except:
pass
self.category = catagories
esrbcontent = []
if "esrb_content_descriptor_ref" in infoJson:
esrbindex = 0
if "name" in infoJson["esrb_content_descriptor_ref"]:
esrbcontent.append(infoJson["esrb_content_descriptor_ref"]["name"])
elif "title" in infoJson["esrb_content_descriptor_ref"]:
esrbcontent.append(infoJson["esrb_content_descriptor_ref"]["title"])
else:
try:
for descriptor in infoJson["esrb_content_descriptor_ref"]:
if 'name' in descriptor:
esrbcontent.append(descriptor["name"])
if 'title' in descriptor:
esrbcontent.append(descriptor["title"])
except:
pass
self.ratingContent = esrbcontent
if "number_of_players" in infoJson:
self.numberOfPlayers = re.sub('[^0-9]', '', infoJson["number_of_players"])
if "esrb_rating_ref" in infoJson:
if "esrb_rating" in infoJson["esrb_rating_ref"]:
if "short_description" in infoJson["esrb_rating_ref"]["esrb_rating"]:
self.rating = infoJson["esrb_rating_ref"]["esrb_rating"]["short_description"]
'''
if not self.screenshots:
try:
ss = []
for s in infoJson["screenshot_gallery_ref"]["screenshot_gallery"]["screenshots"]:
ss.append(s['image']['large_image']['include']['src'].replace('cocoon:/', ''))
self.screenshots = ss
except:
pass
'''
if "developer_ref" in infoJson:
if "name" in infoJson["developer_ref"]:
self.developer = infoJson["developer_ref"]["name"]
if "publisher_ref" in infoJson:
if "name" in infoJson["publisher_ref"]:
self.publisher = infoJson["publisher_ref"]["name"]
if 'title' in infoJson["publisher_ref"]:
self.publisher = infoJson["publisher_ref"]["title"]
if "front_box_art" in infoJson:
if "image" in infoJson["front_box_art"]:
if "image" in infoJson["front_box_art"]["image"]:
if "url" in infoJson["front_box_art"]["image"]["image"]:
self.frontBoxArt = infoJson["front_box_art"]["image"]["image"]["url"]
if "intro" in infoJson:
try:
details = BeautifulSoup(infoJson["intro"][0],"html.parser")
try:
details = details.decode(formatter=None)
except:
details = details.decode()
details = re.sub('<[^<]+?>', '', details).strip()
details = re.sub(' +', ' ', details)
details = re.sub('\n ', '\n', details)
details = re.sub('\n\n+', '\n\n', details)
details = re.sub('(?<!\n)\n(?!\n)', ' ',details)
details = re.sub(' ', ' ', details)
self.intro = details
except Exception as e:
pass
if "game_overview_description" in infoJson:
details = BeautifulSoup(infoJson["game_overview_description"][0],"html.parser")
try:
details = details.decode(formatter=None)
except:
details = details.decode()
details = re.sub('<[^<]+?>', '', details).strip()
details = re.sub(' +', ' ', details)
details = re.sub('\n ', '\n', details)
details = re.sub('\n\n+', '\n\n', details)
details = re.sub('(?<!\n)\n(?!\n)', ' ',details)
details = re.sub(' ', ' ', details)
self.description = details
#else:
#f = open("missing.txt", 'a', encoding="utf8")
#f.write(rid+"|title doesn't exist at ec.nintendo.com"+'\n')
#f.close()
except BaseException as e:
pass
print(repr(e) + ' ' + self.id)
self.bannerFile()
self.frontBoxArtFile()
self.iconFile()
self.screenshotFiles()
| [
"42461174+julesontheroad@users.noreply.github.com"
] | 42461174+julesontheroad@users.noreply.github.com |
984cdfba99e9f183944bffc8080c34e4f54c0e66 | 94ed2113af11ba8b716fb959c5ac0a32c5549c18 | /templates/plexus/{project.name}/actions/About.py | cd853ead8059cee02158904d7f5a4fd013c94cd8 | [
"BSD-3-Clause"
] | permissive | avalentino/pyre | 85ba21388514dc8c206d5136760e23b39aba1cae | 7e1f0287eb7eba1c6d1ef385e5160079283ac363 | refs/heads/main | 2023-03-23T04:58:02.903369 | 2021-03-09T17:37:11 | 2021-03-09T17:37:11 | 347,723,195 | 0 | 0 | NOASSERTION | 2021-03-14T18:43:34 | 2021-03-14T18:43:33 | null | UTF-8 | Python | false | false | 4,184 | py | # -*- coding: utf-8 -*-
#
# {project.authors}
# {project.affiliations}
# (c) {project.span} all rights reserved
#
# externals
import {project.name}
# declaration
class About({project.name}.command, family='{project.name}.actions.about'):
"""
Display information about this application
"""
# user configurable state
root = {project.name}.properties.str(default='/')
root.tip = "specify the portion of the namespace to display"
# commands
@{project.name}.export(tip="the name of the app for configuration purposes")
def name(self, plexus, **kwds):
"""
Print the name of the app for configuration purposes
"""
# show me
plexus.info.log("{{!r}}".format(plexus.pyre_name) or "unknown")
# all done
return
@{project.name}.export(tip="the application home directory")
def home(self, plexus, **kwds):
"""
Print the application home directory
"""
# show me
plexus.info.log("{{}}".format(plexus.home))
# all done
return
@{project.name}.export(tip="the application installation directory")
def prefix(self, plexus, **kwds):
"""
Print the application installation directory
"""
# show me
plexus.info.log("{{}}".format(plexus.prefix))
# all done
return
@{project.name}.export(tip="the application configuration directory")
def defaults(self, plexus, **kwds):
"""
Print the application configuration directory
"""
# show me
plexus.info.log("{{}}".format(plexus.defaults))
# all done
return
@{project.name}.export(tip="print the version number")
def version(self, plexus, **kwds):
"""
Print the version of the {project.name} package
"""
# make some space
plexus.info.log({project.name}.meta.header)
# all done
return
@{project.name}.export(tip="print the copyright note")
def copyright(self, plexus, **kwds):
"""
Print the copyright note of the {project.name} package
"""
# show the copyright note
plexus.info.log({project.name}.meta.copyright)
# all done
return
@{project.name}.export(tip="print out the acknowledgments")
def credits(self, plexus, **kwds):
"""
Print out the license and terms of use of the {project.name} package
"""
# make some space
plexus.info.log({project.name}.meta.header)
# all done
return
@{project.name}.export(tip="print out the license and terms of use")
def license(self, plexus, **kwds):
"""
Print out the license and terms of use of the {project.name} package
"""
# make some space
plexus.info.log({project.name}.meta.license)
# all done
return
@{project.name}.export(tip='dump the application configuration namespace')
def nfs(self, plexus, **kwds):
"""
Dump the application configuration namespace
"""
# get the prefix
prefix = self.root or '{project.name}'
# show me
plexus.pyre_nameserver.dump(prefix)
# all done
return
@{project.name}.export(tip='dump the application private filesystem')
def pfs(self, plexus, **kwds):
"""
Dump the application private filesystem
"""
# build the report
report = '\n'.join(plexus.pfs.dump())
# sign in
plexus.info.line('pfs:')
# dump
plexus.info.log(report)
# all done
return
@{project.name}.export(tip='dump the application virtual filesystem')
def vfs(self, plexus, **kwds):
"""
Dump the application virtual filesystem
"""
# get the prefix
prefix = self.root or '/{project.name}'
# build the report
report = '\n'.join(plexus.vfs[prefix].dump())
# sign in
plexus.info.line('vfs: root={{!r}}'.format(prefix))
# dump
plexus.info.log(report)
# all done
return
# end of file
| [
"michael.aivazis@orthologue.com"
] | michael.aivazis@orthologue.com |
f97683759d994ffa651b9fe04556b5fe7227fbec | f07a42f652f46106dee4749277d41c302e2b7406 | /Data Set/bug-fixing-5/90d8d0df916085db80d40220965daa4a3b7ba311-<test_qz_single>-bug.py | 2282f4798ce0ca3c2d1f5cf68bc695667672a3e3 | [] | no_license | wsgan001/PyFPattern | e0fe06341cc5d51b3ad0fe29b84098d140ed54d1 | cc347e32745f99c0cd95e79a18ddacc4574d7faa | refs/heads/main | 2023-08-25T23:48:26.112133 | 2021-10-23T14:11:22 | 2021-10-23T14:11:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 389 | py | def test_qz_single(self):
n = 5
A = random([n, n]).astype(float32)
B = random([n, n]).astype(float32)
(AA, BB, Q, Z) = qz(A, B)
assert_array_almost_equal(dot(dot(Q, AA), Z.T), A)
assert_array_almost_equal(dot(dot(Q, BB), Z.T), B)
assert_array_almost_equal(dot(Q, Q.T), eye(n))
assert_array_almost_equal(dot(Z, Z.T), eye(n))
assert_(all((diag(BB) >= 0))) | [
"dg1732004@smail.nju.edu.cn"
] | dg1732004@smail.nju.edu.cn |
923ba6a2cc6cf3ac0399432f8258fbbb7ef66708 | a838d4bed14d5df5314000b41f8318c4ebe0974e | /sdk/network/azure-mgmt-network/azure/mgmt/network/v2018_01_01/aio/operations/_express_route_circuit_peerings_operations.py | 6b22064ab34016b86fc6d0c3e2c2f45fd16e18ff | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | scbedd/azure-sdk-for-python | ee7cbd6a8725ddd4a6edfde5f40a2a589808daea | cc8bdfceb23e5ae9f78323edc2a4e66e348bb17a | refs/heads/master | 2023-09-01T08:38:56.188954 | 2021-06-17T22:52:28 | 2021-06-17T22:52:28 | 159,568,218 | 2 | 0 | MIT | 2019-08-11T21:16:01 | 2018-11-28T21:34:49 | Python | UTF-8 | Python | false | false | 21,703 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ExpressRouteCircuitPeeringsOperations:
"""ExpressRouteCircuitPeeringsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2018_01_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial(
self,
resource_group_name: str,
circuit_name: str,
peering_name: str,
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-01-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
circuit_name: str,
peering_name: str,
**kwargs
) -> AsyncLROPoller[None]:
"""Deletes the specified peering from the specified express route circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
peering_name=peering_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}'} # type: ignore
async def get(
self,
resource_group_name: str,
circuit_name: str,
peering_name: str,
**kwargs
) -> "_models.ExpressRouteCircuitPeering":
"""Gets the specified authorization from the specified express route circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ExpressRouteCircuitPeering, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2018_01_01.models.ExpressRouteCircuitPeering
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitPeering"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-01-01"
accept = "application/json, text/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExpressRouteCircuitPeering', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
circuit_name: str,
peering_name: str,
peering_parameters: "_models.ExpressRouteCircuitPeering",
**kwargs
) -> "_models.ExpressRouteCircuitPeering":
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitPeering"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-01-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json, text/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(peering_parameters, 'ExpressRouteCircuitPeering')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuitPeering', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ExpressRouteCircuitPeering', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
circuit_name: str,
peering_name: str,
peering_parameters: "_models.ExpressRouteCircuitPeering",
**kwargs
) -> AsyncLROPoller["_models.ExpressRouteCircuitPeering"]:
"""Creates or updates a peering in the specified express route circuits.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:param peering_parameters: Parameters supplied to the create or update express route circuit
peering operation.
:type peering_parameters: ~azure.mgmt.network.v2018_01_01.models.ExpressRouteCircuitPeering
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either ExpressRouteCircuitPeering or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2018_01_01.models.ExpressRouteCircuitPeering]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitPeering"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
peering_name=peering_name,
peering_parameters=peering_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ExpressRouteCircuitPeering', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}'} # type: ignore
def list(
self,
resource_group_name: str,
circuit_name: str,
**kwargs
) -> AsyncIterable["_models.ExpressRouteCircuitPeeringListResult"]:
"""Gets all peerings in a specified express route circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ExpressRouteCircuitPeeringListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2018_01_01.models.ExpressRouteCircuitPeeringListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitPeeringListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-01-01"
accept = "application/json, text/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ExpressRouteCircuitPeeringListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings'} # type: ignore
| [
"noreply@github.com"
] | scbedd.noreply@github.com |
32d24dea52ce64fed1b3599ceacbc71988bc0232 | 1310ca784c1b0b9238f2407eb59d0704b8ae5a08 | /NextGen/circuitpython/adafruit-circuitpython-bundle-6.x-mpy-20201114/examples/requests_simpletest_cellular.py | 6727815ba8df84aa15bd0a164f457469bad79927 | [] | no_license | RyannDaGreat/LightWave | 6b89838bfd48dba010eb5229b84b206be4e8ccbb | d055b0c01b01b3795d9e6c28b6b70f969893ed97 | refs/heads/master | 2023-07-20T08:23:47.526629 | 2023-07-18T00:25:02 | 2023-07-18T00:25:02 | 123,113,725 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,411 | py | # pylint: disable=unused-import
import time
import board
import busio
import digitalio
from adafruit_fona.adafruit_fona import FONA
from adafruit_fona.fona_3g import FONA3G
import adafruit_fona.adafruit_fona_network as network
import adafruit_fona.adafruit_fona_socket as cellular_socket
import adafruit_requests as requests
# Get GPRS details and more from a secrets.py file
try:
from secrets import secrets
except ImportError:
print("GPRS secrets are kept in secrets.py, please add them there!")
raise
# Create a serial connection for the FONA connection
uart = busio.UART(board.TX, board.RX)
rst = digitalio.DigitalInOut(board.D4)
# Use this for FONA800 and FONA808
fona = FONA(uart, rst)
# Use this for FONA3G
# fona = FONA3G(uart, rst)
# Initialize cellular data network
network = network.CELLULAR(
fona, (secrets["apn"], secrets["apn_username"], secrets["apn_password"])
)
while not network.is_attached:
print("Attaching to network...")
time.sleep(0.5)
print("Attached!")
while not network.is_connected:
print("Connecting to network...")
network.connect()
time.sleep(0.5)
print("Network Connected!")
# Initialize a requests object with a socket and cellular interface
requests.set_socket(cellular_socket, fona)
TEXT_URL = "http://wifitest.adafruit.com/testwifi/index.html"
JSON_GET_URL = "http://httpbin.org/get"
JSON_POST_URL = "http://httpbin.org/post"
print("Fetching text from %s" % TEXT_URL)
response = requests.get(TEXT_URL)
print("-" * 40)
print("Text Response: ", response.text)
print("-" * 40)
response.close()
print("Fetching JSON data from %s" % JSON_GET_URL)
response = requests.get(JSON_GET_URL)
print("-" * 40)
print("JSON Response: ", response.json())
print("-" * 40)
response.close()
data = "31F"
print("POSTing data to {0}: {1}".format(JSON_POST_URL, data))
response = requests.post(JSON_POST_URL, data=data)
print("-" * 40)
json_resp = response.json()
# Parse out the 'data' key from json_resp dict.
print("Data received from server:", json_resp["data"])
print("-" * 40)
response.close()
json_data = {"Date": "July 25, 2019"}
print("POSTing data to {0}: {1}".format(JSON_POST_URL, json_data))
response = requests.post(JSON_POST_URL, json=json_data)
print("-" * 40)
json_resp = response.json()
# Parse out the 'json' key from json_resp dict.
print("JSON Data received from server:", json_resp["json"])
print("-" * 40)
response.close()
| [
"sqrtryan@gmail.com"
] | sqrtryan@gmail.com |
31b20c33f85cf51e5f5a85fc2e154cd5e696c05c | 622a4baffb2c1e47aa9f1ac10eedeaf97e16c2a4 | /DataFreaksSchool/apps/school/admin.py | 0cb291f1c034da2523b31ef46444010cdfe6b23a | [] | no_license | Noeuclides/DataFreaks | 81bff087ee813bff4529245a27c09ea5ff6086d8 | 4739316223e31feffe5a020505727be983001be0 | refs/heads/master | 2023-04-27T18:06:49.788374 | 2020-02-05T00:50:36 | 2020-02-05T00:50:36 | 237,767,894 | 0 | 1 | null | 2023-04-21T20:47:07 | 2020-02-02T12:28:54 | Python | UTF-8 | Python | false | false | 269 | py | from django.contrib import admin
from .models import CustomUser, Student, Teacher, Course, Note
# Register your models here.
admin.site.register(CustomUser)
admin.site.register(Student)
admin.site.register(Teacher)
admin.site.register(Course)
admin.site.register(Note) | [
"euclidesnoeuclides@gmail.com"
] | euclidesnoeuclides@gmail.com |
8c5a37c5f4bd04e4bdee20dfee9587c03cbae32c | 3e05276c6562bbca2c46daec0bf30d765bb6c8d5 | /jobseeker/forms.py | 7ce3e3e0bb7a3ade0d8400cacafb8924bcf73ddf | [] | no_license | mrpal39/portflioWebapp | 762571a74979ddcd4abf90c8ab8684dcd2afa6fa | 898023c0b528557d4ab5ece6c48707f5e61ea296 | refs/heads/master | 2023-08-07T06:17:28.081727 | 2021-10-05T16:30:27 | 2021-10-05T16:30:27 | 414,057,726 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,814 | py |
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from django.db import models
from django.db.models import fields
from password_reset.forms import PasswordRecoveryForm, PasswordResetForm
from django import forms
from django.contrib.auth.forms import AuthenticationForm, UserCreationForm
from django.utils.translation import ugettext_lazy as _
from .models import Profile
from django.db import transaction
from django.contrib.auth.forms import UserCreationForm
class UserForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(UserForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.fields['bio'].widget.attrs.update({"class":"form-control"})
self.fields['image'].widget.attrs.update({"class":"image"})
self.fields['email'].widget.attrs.update({"class":"form-control"})
self.fields['phone'].widget.attrs.update({"class":"form-control"})
self.fields['occupation'].widget.attrs.update({"class":"form-control"})
self.fields['social_account'].widget.attrs.update({"class":"form-control"})
self.fields['experence'].widget.attrs.update({"class":"form-control"})
self.fields['skills'].widget.attrs.update({"class":"form-control"})
self.fields['age'].widget.attrs.update({"class":"form-control"})
self.fields['status'].widget.attrs.update({"class":"form-control"})
self.fields['mobile'].widget.attrs.update({"class":"form-control"})
self.fields['gender'].widget.attrs.update({"class":"form-control"})
class ProfilUpdateForm(UserForm):
class Meta:
model = Profile
fields = (
'bio',
'image',
'email',
'phone',
'social_account',
'experence',
'skills',
'occupation',
'age',
'status',
'mobile',
'gender',
)
def __init__(self, *args, **kwargs):
super(ProfilUpdateForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.add_input(Submit("submit", _("Save Changes")))
def save(self):
user = super().save(commit=False)
user.save()
return user
class ProfileForm(UserForm):
class Meta:
model = Profile
fields = (
'bio',
'experence',
'skills',
'age',
'status',
'mobile',
'gender',
)
def __init__(self, *args, **kwargs):
super(ProfileForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.add_input(Submit("submit", _("Save Changes")))
def save(self):
user = super().save(commit=False)
user.save()
return user
| [
"rp9545416@gmail.com"
] | rp9545416@gmail.com |
b321ddea753491e4d4c5b8d334c61a4c303b5cd0 | 96148bf17555c028f5650d51f496f349c89e8c79 | /build/cob_driver/cob_utilities/catkin_generated/pkg.develspace.context.pc.py | a5e228897d169a8b0fdaa67c1507b846b574a730 | [] | no_license | kerekare/ros_hydra_libphidgetsupdated | 239daed94a95f60743c5659f1102183641761240 | e05e58417fb03a14d627bc80d09af3b2a0fcceab | refs/heads/master | 2016-09-05T23:35:43.792883 | 2014-03-25T16:32:01 | 2014-03-25T16:32:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 582 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "/home/kerekare/workspace/care-o-bot/src/cob_driver/cob_utilities/common/include".split(';') if "/home/kerekare/workspace/care-o-bot/src/cob_driver/cob_utilities/common/include" != "" else []
PROJECT_CATKIN_DEPENDS = "roscpp".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "-lcob_utilities".split(';') if "-lcob_utilities" != "" else []
PROJECT_NAME = "cob_utilities"
PROJECT_SPACE_DIR = "/home/kerekare/workspace/care-o-bot/devel"
PROJECT_VERSION = "0.5.0"
| [
"kerekare@i60sr2.(none)"
] | kerekare@i60sr2.(none) |
986b1f3251a5308712b090a8167d90ed2cdb9b59 | 1c6283303ceb883add8de4ee07c5ffcfc2e93fab | /Jinja2/lib/python3.7/site-packages/uhd_restpy/testplatform/sessions/ixnetwork/statistics/view/layer23trafficflowdetectivefilter/layer23trafficflowdetectivefilter.py | 55ef9c2492cdff0c2a063ba59e7c480e514620c1 | [] | no_license | pdobrinskiy/devcore | 0f5b3dfc2f3bf1e44abd716f008a01c443e14f18 | 580c7df6f5db8c118990cf01bc2b986285b9718b | refs/heads/main | 2023-07-29T20:28:49.035475 | 2021-09-14T10:02:16 | 2021-09-14T10:02:16 | 405,919,390 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,939 | py | # MIT LICENSE
#
# Copyright 1997 - 2020 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from uhd_restpy.base import Base
from uhd_restpy.files import Files
from typing import List, Any, Union
class Layer23TrafficFlowDetectiveFilter(Base):
"""Filters associated with layer23TrafficFlowDetective view.
The Layer23TrafficFlowDetectiveFilter class encapsulates a list of layer23TrafficFlowDetectiveFilter resources that are managed by the user.
A list of resources can be retrieved from the server using the Layer23TrafficFlowDetectiveFilter.find() method.
The list can be managed by using the Layer23TrafficFlowDetectiveFilter.add() and Layer23TrafficFlowDetectiveFilter.remove() methods.
"""
__slots__ = ()
_SDM_NAME = 'layer23TrafficFlowDetectiveFilter'
_SDM_ATT_MAP = {
'DeadFlowsCount': 'deadFlowsCount',
'DeadFlowsThreshold': 'deadFlowsThreshold',
'FlowFilterType': 'flowFilterType',
'PortFilterIds': 'portFilterIds',
'ShowEgressFlows': 'showEgressFlows',
'TrafficItemFilterId': 'trafficItemFilterId',
'TrafficItemFilterIds': 'trafficItemFilterIds',
}
_SDM_ENUM_MAP = {
'flowFilterType': ['allFlows', 'deadFlows', 'liveFlows'],
}
def __init__(self, parent, list_op=False):
super(Layer23TrafficFlowDetectiveFilter, self).__init__(parent, list_op)
@property
def AllFlowsFilter(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.statistics.view.layer23trafficflowdetectivefilter.allflowsfilter.allflowsfilter.AllFlowsFilter): An instance of the AllFlowsFilter class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.statistics.view.layer23trafficflowdetectivefilter.allflowsfilter.allflowsfilter import AllFlowsFilter
if self._properties.get('AllFlowsFilter', None) is not None:
return self._properties.get('AllFlowsFilter')
else:
return AllFlowsFilter(self)
@property
def DeadFlowsFilter(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.statistics.view.layer23trafficflowdetectivefilter.deadflowsfilter.deadflowsfilter.DeadFlowsFilter): An instance of the DeadFlowsFilter class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.statistics.view.layer23trafficflowdetectivefilter.deadflowsfilter.deadflowsfilter import DeadFlowsFilter
if self._properties.get('DeadFlowsFilter', None) is not None:
return self._properties.get('DeadFlowsFilter')
else:
return DeadFlowsFilter(self)
@property
def LiveFlowsFilter(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.statistics.view.layer23trafficflowdetectivefilter.liveflowsfilter.liveflowsfilter.LiveFlowsFilter): An instance of the LiveFlowsFilter class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.statistics.view.layer23trafficflowdetectivefilter.liveflowsfilter.liveflowsfilter import LiveFlowsFilter
if self._properties.get('LiveFlowsFilter', None) is not None:
return self._properties.get('LiveFlowsFilter')
else:
return LiveFlowsFilter(self)
@property
def StatisticFilter(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.statistics.view.layer23trafficflowdetectivefilter.statisticfilter.statisticfilter.StatisticFilter): An instance of the StatisticFilter class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.statistics.view.layer23trafficflowdetectivefilter.statisticfilter.statisticfilter import StatisticFilter
if self._properties.get('StatisticFilter', None) is not None:
return self._properties.get('StatisticFilter')
else:
return StatisticFilter(self)
@property
def TrackingFilter(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.statistics.view.layer23trafficflowdetectivefilter.trackingfilter.trackingfilter.TrackingFilter): An instance of the TrackingFilter class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.statistics.view.layer23trafficflowdetectivefilter.trackingfilter.trackingfilter import TrackingFilter
if self._properties.get('TrackingFilter', None) is not None:
return self._properties.get('TrackingFilter')
else:
return TrackingFilter(self)
@property
def DeadFlowsCount(self):
# type: () -> int
"""
Returns
-------
- number: The number of flows declared dead. A flow is declared dead if no traffic is received for a specified number of seconds. To change this threshold use the deadFlowsThreshold attribute.
"""
return self._get_attribute(self._SDM_ATT_MAP['DeadFlowsCount'])
@property
def DeadFlowsThreshold(self):
# type: () -> int
"""
Returns
-------
- number: Threshold in seconds after which the flows are declared dead if there are no packets received for a specified number of seconds. This is a global attibute and hence the latest value entered takes precedence over previous values in all the custom views.
"""
return self._get_attribute(self._SDM_ATT_MAP['DeadFlowsThreshold'])
@DeadFlowsThreshold.setter
def DeadFlowsThreshold(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP['DeadFlowsThreshold'], value)
@property
def FlowFilterType(self):
# type: () -> str
"""
Returns
-------
- str(allFlows | deadFlows | liveFlows): Indicates the flow detective filter settings.
"""
return self._get_attribute(self._SDM_ATT_MAP['FlowFilterType'])
@FlowFilterType.setter
def FlowFilterType(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['FlowFilterType'], value)
@property
def PortFilterIds(self):
# type: () -> List[str]
"""
Returns
-------
- list(str[None | /api/v1/sessions/1/ixnetwork/statistics/.../availablePortFilter]): Selected port filters from the availablePortFilter list.
"""
return self._get_attribute(self._SDM_ATT_MAP['PortFilterIds'])
@PortFilterIds.setter
def PortFilterIds(self, value):
# type: (List[str]) -> None
self._set_attribute(self._SDM_ATT_MAP['PortFilterIds'], value)
@property
def ShowEgressFlows(self):
# type: () -> bool
"""
Returns
-------
- bool: NOT DEFINED
"""
return self._get_attribute(self._SDM_ATT_MAP['ShowEgressFlows'])
@ShowEgressFlows.setter
def ShowEgressFlows(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP['ShowEgressFlows'], value)
@property
def TrafficItemFilterId(self):
# type: () -> str
"""DEPRECATED
Returns
-------
- str(None | /api/v1/sessions/1/ixnetwork/statistics/.../availableTrafficItemFilter): Selected traffic flow detective filter from the availableTrafficItemFilter list.
"""
return self._get_attribute(self._SDM_ATT_MAP['TrafficItemFilterId'])
@TrafficItemFilterId.setter
def TrafficItemFilterId(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['TrafficItemFilterId'], value)
@property
def TrafficItemFilterIds(self):
# type: () -> List[str]
"""
Returns
-------
- list(str[None | /api/v1/sessions/1/ixnetwork/statistics/.../availableTrafficItemFilter]): Selected traffic item filters from the availableTrafficItemFilter list.
"""
return self._get_attribute(self._SDM_ATT_MAP['TrafficItemFilterIds'])
@TrafficItemFilterIds.setter
def TrafficItemFilterIds(self, value):
# type: (List[str]) -> None
self._set_attribute(self._SDM_ATT_MAP['TrafficItemFilterIds'], value)
def update(self, DeadFlowsThreshold=None, FlowFilterType=None, PortFilterIds=None, ShowEgressFlows=None, TrafficItemFilterId=None, TrafficItemFilterIds=None):
# type: (int, str, List[str], bool, str, List[str]) -> Layer23TrafficFlowDetectiveFilter
"""Updates layer23TrafficFlowDetectiveFilter resource on the server.
Args
----
- DeadFlowsThreshold (number): Threshold in seconds after which the flows are declared dead if there are no packets received for a specified number of seconds. This is a global attibute and hence the latest value entered takes precedence over previous values in all the custom views.
- FlowFilterType (str(allFlows | deadFlows | liveFlows)): Indicates the flow detective filter settings.
- PortFilterIds (list(str[None | /api/v1/sessions/1/ixnetwork/statistics/.../availablePortFilter])): Selected port filters from the availablePortFilter list.
- ShowEgressFlows (bool): NOT DEFINED
- TrafficItemFilterId (str(None | /api/v1/sessions/1/ixnetwork/statistics/.../availableTrafficItemFilter)): Selected traffic flow detective filter from the availableTrafficItemFilter list.
- TrafficItemFilterIds (list(str[None | /api/v1/sessions/1/ixnetwork/statistics/.../availableTrafficItemFilter])): Selected traffic item filters from the availableTrafficItemFilter list.
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
def add(self, DeadFlowsThreshold=None, FlowFilterType=None, PortFilterIds=None, ShowEgressFlows=None, TrafficItemFilterId=None, TrafficItemFilterIds=None):
# type: (int, str, List[str], bool, str, List[str]) -> Layer23TrafficFlowDetectiveFilter
"""Adds a new layer23TrafficFlowDetectiveFilter resource on the server and adds it to the container.
Args
----
- DeadFlowsThreshold (number): Threshold in seconds after which the flows are declared dead if there are no packets received for a specified number of seconds. This is a global attibute and hence the latest value entered takes precedence over previous values in all the custom views.
- FlowFilterType (str(allFlows | deadFlows | liveFlows)): Indicates the flow detective filter settings.
- PortFilterIds (list(str[None | /api/v1/sessions/1/ixnetwork/statistics/.../availablePortFilter])): Selected port filters from the availablePortFilter list.
- ShowEgressFlows (bool): NOT DEFINED
- TrafficItemFilterId (str(None | /api/v1/sessions/1/ixnetwork/statistics/.../availableTrafficItemFilter)): Selected traffic flow detective filter from the availableTrafficItemFilter list.
- TrafficItemFilterIds (list(str[None | /api/v1/sessions/1/ixnetwork/statistics/.../availableTrafficItemFilter])): Selected traffic item filters from the availableTrafficItemFilter list.
Returns
-------
- self: This instance with all currently retrieved layer23TrafficFlowDetectiveFilter resources using find and the newly added layer23TrafficFlowDetectiveFilter resources available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._create(self._map_locals(self._SDM_ATT_MAP, locals()))
def remove(self):
"""Deletes all the contained layer23TrafficFlowDetectiveFilter resources in this instance from the server.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
self._delete()
def find(self, DeadFlowsCount=None, DeadFlowsThreshold=None, FlowFilterType=None, PortFilterIds=None, ShowEgressFlows=None, TrafficItemFilterId=None, TrafficItemFilterIds=None):
# type: (int, int, str, List[str], bool, str, List[str]) -> Layer23TrafficFlowDetectiveFilter
"""Finds and retrieves layer23TrafficFlowDetectiveFilter resources from the server.
All named parameters are evaluated on the server using regex. The named parameters can be used to selectively retrieve layer23TrafficFlowDetectiveFilter resources from the server.
To retrieve an exact match ensure the parameter value starts with ^ and ends with $
By default the find method takes no parameters and will retrieve all layer23TrafficFlowDetectiveFilter resources from the server.
Args
----
- DeadFlowsCount (number): The number of flows declared dead. A flow is declared dead if no traffic is received for a specified number of seconds. To change this threshold use the deadFlowsThreshold attribute.
- DeadFlowsThreshold (number): Threshold in seconds after which the flows are declared dead if there are no packets received for a specified number of seconds. This is a global attibute and hence the latest value entered takes precedence over previous values in all the custom views.
- FlowFilterType (str(allFlows | deadFlows | liveFlows)): Indicates the flow detective filter settings.
- PortFilterIds (list(str[None | /api/v1/sessions/1/ixnetwork/statistics/.../availablePortFilter])): Selected port filters from the availablePortFilter list.
- ShowEgressFlows (bool): NOT DEFINED
- TrafficItemFilterId (str(None | /api/v1/sessions/1/ixnetwork/statistics/.../availableTrafficItemFilter)): Selected traffic flow detective filter from the availableTrafficItemFilter list.
- TrafficItemFilterIds (list(str[None | /api/v1/sessions/1/ixnetwork/statistics/.../availableTrafficItemFilter])): Selected traffic item filters from the availableTrafficItemFilter list.
Returns
-------
- self: This instance with matching layer23TrafficFlowDetectiveFilter resources retrieved from the server available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._select(self._map_locals(self._SDM_ATT_MAP, locals()))
def read(self, href):
"""Retrieves a single instance of layer23TrafficFlowDetectiveFilter data from the server.
Args
----
- href (str): An href to the instance to be retrieved
Returns
-------
- self: This instance with the layer23TrafficFlowDetectiveFilter resources from the server available through an iterator or index
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
| [
"pdobrinskiy@yahoo.com"
] | pdobrinskiy@yahoo.com |
1519cd3690074f07ddfb744acb91bcd6f0e5a6a8 | 26e4bea46942b9afa5a00b9cde9a84f2cc58e3c9 | /pygame/Astar/pathfinding/Graph_old.py | bb30a06775a80a8af4ce9dfcaed88b9af6e2cc5d | [] | no_license | MeetLuck/works | 46da692138cb9741a913d84eff6822f107510dc7 | ab61175bb7e2ed5c5113bf150e0541ae18eb04c4 | refs/heads/master | 2020-04-12T05:40:25.143075 | 2017-08-21T17:01:06 | 2017-08-21T17:01:06 | 62,373,576 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 4,690 | py | from random import random
from colors import *
#bgcolor = lightgray
grid = [" * ",
" *** ",
" ",
"* ** ",
"* "]
def findNode(node,lst):
return node in lst
class Node:
def __init__(self):
self.adjacent = [] # UP,DOWN,LEFT,RIGHT(NWSE)
self.previous = None
self.label = ""
def clear(self):
self.previous = None
self.cost = 'Infinity'
def __str__(self):
return self.label
class Graph:
def __init__(self,grid):
self.rows = len(grid)
self.cols = len(grid[0])
self.createLabels()
self.createNodes()
def createLabels(self):
# create some labels
self.labels = list()
for i in range(65,91):
self.labels.append( chr(i) )
# first = ["",'A','B','C']
# for i,val in enumerate(first):
# prefix = first[i]
# for j in range(65,91):
# self.labels.append(prefix + chr(j))
def createNodes(self):
# create one node per square in the grid
self.nodes = list()
for i in range(self.rows*self.cols):
node = Node()
node.label = self.labels[i]
self.nodes.append(node)
# add edges to adjacent nodes
for r in range(self.rows):
for c in range(self.cols):
node = self.nodes[self.cols * r + c]
# ignore blocked squares
if grid[r][c] == '*': continue
# figure out the adjacent nodes
if r > 0 and grid[r-1][c] == ' ': # UP
node.adjacent.append(self.nodes[self.cols*(r-1) + c])
if r < self.rows-1 and grid[r+1][c] == ' ': # DOWN
node.adjacent.append(self.nodes[self.cols*(r+1) + c])
if c > 0 and grid[r][c-1] == ' ': # LEFT
node.adjacent.append(self.nodes[self.cols*r + c-1])
if c < self.cols-1 and grid[r][c+1] == ' ': # RIGHT
node.adjacent.append(self.nodes[self.cols*r + c+1])
def findNodeByLabel(self,label):
for i,val in enumerate(self.nodes):
if self.nodes[i].label == label:
return val
# Search
class Search:
def __init__(self, graph,start,goal):
self.graph = graph
self.reachable = list()
self.explored = list()
self.path = list()
self.start_label = start
self.goal_label = goal
def reset(self):
self.reachable = [ self.graph.findNodeByLabel(self.start_label) ]
self.goal = self.graph.findNodeByLabel(self.goal_label)
self.explored = list()
self.path = list()
self.iteration = 0
for i,node in enumerate(self.graph.nodes):
self.graph.nodes[i].clear()
#self.reachable[0].cost = 0
#self.render()
def step(self):
if len(self.path) > 0: # is the search already done ?
return
# if there are no more nodes to consider, we're done
if len(self.reachable) == 0:
self.finished = True
return
self.iteration += 1
# choose a node to examine next
node = self.chooseNode()
# are we done yet?
if node== self.goal:
while node:
self.path.append(node)
node = node.previous
print '------------- find path ----------------'
self.render()
return
# do not repeat
self.reachable.remove(node)
self.explored.append(node)
# where can we get from here?
# self.render()
# if node is None: return
for adjnode in node.adjacent:
self.addAdjacent(node,adjnode)
self.render()
def chooseNode(self):
return self.reachable[ int(random()* len(self.reachable)) ]
def addAdjacent(self,node,adjacent):
if findNode(adjacent,self.explored) or findNode(adjacent,self.reachable):
return
adjacent.previous = node
self.reachable.append(adjacent)
def render(self):
print '================== render =============='
print 'reachable ==>'
for rnode in self.reachable:
print rnode.label,
print
print 'explored ==>'
for enode in self.explored:
print enode.label,
print
print 'path ==>'
print self.path
if __name__ == '__main__':
g = Graph(grid)
search = Search(g,'A','T')
search.reset()
search.render()
for i in range(40):
search.step()
# search.step()
# print g.labels
# for node in g.nodes:
# print node
| [
"withpig1994@hanmail.net"
] | withpig1994@hanmail.net |
67473f9f435beec220fa067cf392d561ef7b110b | d8a766184f7d2e4379a9578b6bd01451f4434fd8 | /waynes_world/server.py | a8b013a456acdc8013c43b1f65b05507c9a6c9c7 | [] | no_license | YaoQ/zmq_examples | 1e9c386f3d8b51e04208bcededb8f64938a5200e | 631867073b79087c4bf94dff7ff3c57c113fc9a1 | refs/heads/master | 2020-11-30T13:03:08.111975 | 2011-06-03T21:16:59 | 2011-06-03T21:16:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 339 | py | #!/usr/bin/env python
import zmq
import time
ctx = zmq.Context()
# one socket
s1 = ctx.socket(zmq.PUB)
s1.bind("tcp://127.0.0.1:5566")
# another
s2 = ctx.socket(zmq.PUB)
s2.bind("ipc://*:5567")
while True:
print 'Camera 1?'
s1.send("Camera 1")
time.sleep(1)
print 'Camera 2?'
s2.send("Camera 2")
time.sleep(1)
| [
"jd@j2labs.net"
] | jd@j2labs.net |
dce067bc85e5dd7a18bcdd69eac1544a142aeea7 | bc441bb06b8948288f110af63feda4e798f30225 | /micro_app_sdk/model/cmdb/import_result_pb2.py | 8d037a5e63e6e6059b267b9e37c5d661f6e72b1b | [
"Apache-2.0"
] | permissive | easyopsapis/easyops-api-python | 23204f8846a332c30f5f3ff627bf220940137b6b | adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0 | refs/heads/master | 2020-06-26T23:38:27.308803 | 2020-06-16T07:25:41 | 2020-06-16T07:25:41 | 199,773,131 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | true | 11,538 | py | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: import_result.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from micro_app_sdk.model.cmdb import import_status_pb2 as micro__app__sdk_dot_model_dot_cmdb_dot_import__status__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='import_result.proto',
package='cmdb',
syntax='proto3',
serialized_options=_b('Z>go.easyops.local/contracts/protorepo-models/easyops/model/cmdb'),
serialized_pb=_b('\n\x13import_result.proto\x12\x04\x63mdb\x1a,micro_app_sdk/model/cmdb/import_status.proto\"\x83\x04\n\x0cImportResult\x12\x10\n\x08objectId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x63\x61tegory\x18\x03 \x01(\t\x12\x0c\n\x04memo\x18\x04 \x01(\t\x12\x11\n\tprotected\x18\x05 \x01(\x08\x12\x0e\n\x06system\x18\x06 \x01(\t\x12\x0c\n\x04\x63ode\x18\x07 \x01(\x05\x12\x0f\n\x07message\x18\x08 \x01(\t\x12\'\n\x0binfo_result\x18\t \x03(\x0b\x32\x12.cmdb.ImportStatus\x12\x31\n\x15relation_group_result\x18\n \x03(\x0b\x32\x12.cmdb.ImportStatus\x12,\n\x10\x61ttr_list_result\x18\x0b \x03(\x0b\x32\x12.cmdb.ImportStatus\x12\x30\n\x14relation_list_result\x18\x0c \x03(\x0b\x32\x12.cmdb.ImportStatus\x12=\n\x11index_list_result\x18\r \x03(\x0b\x32\".cmdb.ImportResult.IndexListResult\x12\x11\n\tis_create\x18\x0e \x01(\x08\x1a\x63\n\x0fIndexListResult\x12\x0c\n\x04\x63ode\x18\x01 \x01(\x05\x12\x0f\n\x07message\x18\x02 \x01(\t\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\x13\n\x0bpropertyIds\x18\x04 \x03(\t\x12\x0e\n\x06unique\x18\x05 \x01(\x08\x42@Z>go.easyops.local/contracts/protorepo-models/easyops/model/cmdbb\x06proto3')
,
dependencies=[micro__app__sdk_dot_model_dot_cmdb_dot_import__status__pb2.DESCRIPTOR,])
_IMPORTRESULT_INDEXLISTRESULT = _descriptor.Descriptor(
name='IndexListResult',
full_name='cmdb.ImportResult.IndexListResult',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='code', full_name='cmdb.ImportResult.IndexListResult.code', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='message', full_name='cmdb.ImportResult.IndexListResult.message', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='cmdb.ImportResult.IndexListResult.name', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='propertyIds', full_name='cmdb.ImportResult.IndexListResult.propertyIds', index=3,
number=4, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='unique', full_name='cmdb.ImportResult.IndexListResult.unique', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=492,
serialized_end=591,
)
_IMPORTRESULT = _descriptor.Descriptor(
name='ImportResult',
full_name='cmdb.ImportResult',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='objectId', full_name='cmdb.ImportResult.objectId', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='cmdb.ImportResult.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='category', full_name='cmdb.ImportResult.category', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='memo', full_name='cmdb.ImportResult.memo', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='protected', full_name='cmdb.ImportResult.protected', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='system', full_name='cmdb.ImportResult.system', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='cmdb.ImportResult.code', index=6,
number=7, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='message', full_name='cmdb.ImportResult.message', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='info_result', full_name='cmdb.ImportResult.info_result', index=8,
number=9, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='relation_group_result', full_name='cmdb.ImportResult.relation_group_result', index=9,
number=10, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='attr_list_result', full_name='cmdb.ImportResult.attr_list_result', index=10,
number=11, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='relation_list_result', full_name='cmdb.ImportResult.relation_list_result', index=11,
number=12, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='index_list_result', full_name='cmdb.ImportResult.index_list_result', index=12,
number=13, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='is_create', full_name='cmdb.ImportResult.is_create', index=13,
number=14, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_IMPORTRESULT_INDEXLISTRESULT, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=76,
serialized_end=591,
)
_IMPORTRESULT_INDEXLISTRESULT.containing_type = _IMPORTRESULT
_IMPORTRESULT.fields_by_name['info_result'].message_type = micro__app__sdk_dot_model_dot_cmdb_dot_import__status__pb2._IMPORTSTATUS
_IMPORTRESULT.fields_by_name['relation_group_result'].message_type = micro__app__sdk_dot_model_dot_cmdb_dot_import__status__pb2._IMPORTSTATUS
_IMPORTRESULT.fields_by_name['attr_list_result'].message_type = micro__app__sdk_dot_model_dot_cmdb_dot_import__status__pb2._IMPORTSTATUS
_IMPORTRESULT.fields_by_name['relation_list_result'].message_type = micro__app__sdk_dot_model_dot_cmdb_dot_import__status__pb2._IMPORTSTATUS
_IMPORTRESULT.fields_by_name['index_list_result'].message_type = _IMPORTRESULT_INDEXLISTRESULT
DESCRIPTOR.message_types_by_name['ImportResult'] = _IMPORTRESULT
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
ImportResult = _reflection.GeneratedProtocolMessageType('ImportResult', (_message.Message,), {
'IndexListResult' : _reflection.GeneratedProtocolMessageType('IndexListResult', (_message.Message,), {
'DESCRIPTOR' : _IMPORTRESULT_INDEXLISTRESULT,
'__module__' : 'import_result_pb2'
# @@protoc_insertion_point(class_scope:cmdb.ImportResult.IndexListResult)
})
,
'DESCRIPTOR' : _IMPORTRESULT,
'__module__' : 'import_result_pb2'
# @@protoc_insertion_point(class_scope:cmdb.ImportResult)
})
_sym_db.RegisterMessage(ImportResult)
_sym_db.RegisterMessage(ImportResult.IndexListResult)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| [
"service@easyops.cn"
] | service@easyops.cn |
f5fd8782e0019eca47a63780daeffe1cc3d8151a | 83de24182a7af33c43ee340b57755e73275149ae | /aliyun-python-sdk-vs/aliyunsdkvs/request/v20181212/DescribeAccountStatRequest.py | 441a4ba9f7ebe95b0c92ae6eceecabc3eb80f86b | [
"Apache-2.0"
] | permissive | aliyun/aliyun-openapi-python-sdk | 4436ca6c57190ceadbc80f0b1c35b1ab13c00c7f | 83fd547946fd6772cf26f338d9653f4316c81d3c | refs/heads/master | 2023-08-04T12:32:57.028821 | 2023-08-04T06:00:29 | 2023-08-04T06:00:29 | 39,558,861 | 1,080 | 721 | NOASSERTION | 2023-09-14T08:51:06 | 2015-07-23T09:39:45 | Python | UTF-8 | Python | false | false | 1,546 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkvs.endpoint import endpoint_data
class DescribeAccountStatRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'vs', '2018-12-12', 'DescribeAccountStat')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_Id(self):
return self.get_query_params().get('Id')
def set_Id(self,Id):
self.add_query_param('Id',Id)
def get_OwnerId(self):
return self.get_query_params().get('OwnerId')
def set_OwnerId(self,OwnerId):
self.add_query_param('OwnerId',OwnerId) | [
"sdk-team@alibabacloud.com"
] | sdk-team@alibabacloud.com |
1dc9d34098975e3ff9b1f7711301a6527338634f | ed3fe7635eee9216e5a10ba6c2d13a79e1be7063 | /backend/mcgill_cc_22904/settings.py | e4d4ab42b8574080a66526ddcaf89cf696d230ce | [] | no_license | crowdbotics-apps/mcgill-cc-22904 | ce47ef45ccc84bedb11e869d6119c77dcc98eda7 | d24767a048d901d534f9006e2bcc8b3a0bde1495 | refs/heads/master | 2023-01-15T22:20:41.446718 | 2020-11-23T02:30:55 | 2020-11-23T02:30:55 | 315,179,346 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,017 | py | """
Django settings for mcgill_cc_22904 project.
Generated by 'django-admin startproject' using Django 2.2.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
import environ
import logging
env = environ.Env()
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = env.bool("DEBUG", default=False)
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = env.str("SECRET_KEY")
ALLOWED_HOSTS = env.list("HOST", default=["*"])
SITE_ID = 1
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
SECURE_SSL_REDIRECT = env.bool("SECURE_REDIRECT", default=False)
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites'
]
LOCAL_APPS = [
'home',
'users.apps.UsersConfig',
]
THIRD_PARTY_APPS = [
'rest_framework',
'rest_framework.authtoken',
'rest_auth',
'rest_auth.registration',
'bootstrap4',
'allauth',
'allauth.account',
'allauth.socialaccount',
'allauth.socialaccount.providers.google',
'django_extensions',
'drf_yasg',
'storages',
# start fcm_django push notifications
'fcm_django',
# end fcm_django push notifications
]
INSTALLED_APPS += LOCAL_APPS + THIRD_PARTY_APPS
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'mcgill_cc_22904.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mcgill_cc_22904.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
if env.str("DATABASE_URL", default=None):
DATABASES = {
'default': env.db()
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
MIDDLEWARE += ['whitenoise.middleware.WhiteNoiseMiddleware']
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'allauth.account.auth_backends.AuthenticationBackend'
)
STATIC_ROOT = os.path.join(BASE_DIR, "staticfiles")
STATICFILES_DIRS = [os.path.join(BASE_DIR, 'static')]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
# allauth / users
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_AUTHENTICATION_METHOD = 'email'
ACCOUNT_USERNAME_REQUIRED = False
ACCOUNT_EMAIL_VERIFICATION = "optional"
ACCOUNT_CONFIRM_EMAIL_ON_GET = True
ACCOUNT_LOGIN_ON_EMAIL_CONFIRMATION = True
ACCOUNT_UNIQUE_EMAIL = True
LOGIN_REDIRECT_URL = "users:redirect"
ACCOUNT_ADAPTER = "users.adapters.AccountAdapter"
SOCIALACCOUNT_ADAPTER = "users.adapters.SocialAccountAdapter"
ACCOUNT_ALLOW_REGISTRATION = env.bool("ACCOUNT_ALLOW_REGISTRATION", True)
SOCIALACCOUNT_ALLOW_REGISTRATION = env.bool("SOCIALACCOUNT_ALLOW_REGISTRATION", True)
REST_AUTH_SERIALIZERS = {
# Replace password reset serializer to fix 500 error
"PASSWORD_RESET_SERIALIZER": "home.api.v1.serializers.PasswordSerializer",
}
REST_AUTH_REGISTER_SERIALIZERS = {
# Use custom serializer that has no username and matches web signup
"REGISTER_SERIALIZER": "home.api.v1.serializers.SignupSerializer",
}
# Custom user model
AUTH_USER_MODEL = "users.User"
EMAIL_HOST = env.str("EMAIL_HOST", "smtp.sendgrid.net")
EMAIL_HOST_USER = env.str("SENDGRID_USERNAME", "")
EMAIL_HOST_PASSWORD = env.str("SENDGRID_PASSWORD", "")
EMAIL_PORT = 587
EMAIL_USE_TLS = True
# AWS S3 config
AWS_ACCESS_KEY_ID = env.str("AWS_ACCESS_KEY_ID", "")
AWS_SECRET_ACCESS_KEY = env.str("AWS_SECRET_ACCESS_KEY", "")
AWS_STORAGE_BUCKET_NAME = env.str("AWS_STORAGE_BUCKET_NAME", "")
AWS_STORAGE_REGION = env.str("AWS_STORAGE_REGION", "")
USE_S3 = (
AWS_ACCESS_KEY_ID and
AWS_SECRET_ACCESS_KEY and
AWS_STORAGE_BUCKET_NAME and
AWS_STORAGE_REGION
)
if USE_S3:
AWS_S3_CUSTOM_DOMAIN = env.str("AWS_S3_CUSTOM_DOMAIN", "")
AWS_S3_OBJECT_PARAMETERS = {"CacheControl": "max-age=86400"}
AWS_DEFAULT_ACL = env.str("AWS_DEFAULT_ACL", "public-read")
AWS_MEDIA_LOCATION = env.str("AWS_MEDIA_LOCATION", "media")
AWS_AUTO_CREATE_BUCKET = env.bool("AWS_AUTO_CREATE_BUCKET", True)
DEFAULT_FILE_STORAGE = env.str(
"DEFAULT_FILE_STORAGE", "home.storage_backends.MediaStorage"
)
MEDIA_URL = '/mediafiles/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'mediafiles')
# start fcm_django push notifications
FCM_DJANGO_SETTINGS = {
"FCM_SERVER_KEY": env.str("FCM_SERVER_KEY", "")
}
# end fcm_django push notifications
# Swagger settings for api docs
SWAGGER_SETTINGS = {
"DEFAULT_INFO": f"{ROOT_URLCONF}.api_info",
}
if DEBUG or not (EMAIL_HOST_USER and EMAIL_HOST_PASSWORD):
# output email to console instead of sending
if not DEBUG:
logging.warning("You should setup `SENDGRID_USERNAME` and `SENDGRID_PASSWORD` env vars to send emails.")
EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
| [
"team@crowdbotics.com"
] | team@crowdbotics.com |
beab8a1339bc20303692c55f07feb5952073ef6f | 4e8ac215b672b333f19da87787c0d8768fee439e | /MIDI Remote Scripts/ableton/v2/control_surface/components/clip_slot.py | 59ee3f7c50483dd23ab00fb497f6df85cd15fa2f | [
"MIT"
] | permissive | aarkwright/ableton_devices | 593f47293c673aa56f6e0347ca6444b7fce2812a | fe5df3bbd64ccbc136bba722ba1e131a02969798 | refs/heads/master | 2020-07-02T08:11:21.137438 | 2019-08-09T13:48:06 | 2019-08-09T13:48:06 | 201,467,890 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,371 | py | # uncompyle6 version 3.3.5
# Python bytecode 2.7 (62211)
# Decompiled from: Python 3.7.3 (default, Apr 24 2019, 15:29:51) [MSC v.1915 64 bit (AMD64)]
# Embedded file name: c:\Jenkins\live\output\win_64_static\Release\python-bundle\MIDI Remote Scripts\ableton\v2\control_surface\components\clip_slot.py
# Compiled at: 2019-02-18 13:43:58
from __future__ import absolute_import, print_function, unicode_literals
import Live
from ...base import listens, liveobj_valid
from ..component import Component
from ..control import ButtonControl
def find_nearest_color(rgb_table, src_hex_color):
def hex_to_channels(color_in_hex):
return (
(color_in_hex & 16711680) >> 16,
(color_in_hex & 65280) >> 8,
color_in_hex & 255)
def squared_distance(color):
return sum([ (a - b) ** 2 for a, b in zip(hex_to_channels(src_hex_color), hex_to_channels(color[1]))
])
return min(rgb_table, key=squared_distance)[0]
def is_button_pressed(button):
if button:
return button.is_pressed()
return False
class ClipSlotComponent(Component):
"""
Component representing a ClipSlot within Live.
"""
launch_button = ButtonControl()
def __init__(self, *a, **k):
super(ClipSlotComponent, self).__init__(*a, **k)
self._clip_slot = None
self._triggered_to_play_color = b'Session.ClipTriggeredPlay'
self._triggered_to_record_color = b'Session.ClipTriggeredRecord'
self._started_value = b'Session.ClipStarted'
self._recording_color = b'Session.ClipRecording'
self._stopped_value = b'Session.ClipStopped'
self._clip_palette = []
self._clip_rgb_table = None
self._record_button_color = b'Session.RecordButton'
self._empty_slot_color = b'Session.ClipEmpty'
self._delete_button = None
self._select_button = None
self._duplicate_button = None
return
def set_clip_slot(self, clip_slot):
self._clip_slot = clip_slot
self._update_clip_property_slots()
self.__on_slot_triggered_changed.subject = clip_slot
self.__on_slot_playing_state_changed.subject = clip_slot
self.__on_clip_state_changed.subject = clip_slot
self.__on_controls_other_clips_changed.subject = clip_slot
self.__on_has_stop_button_changed.subject = clip_slot
self.__on_clip_slot_color_changed.subject = clip_slot
track = clip_slot.canonical_parent if clip_slot else None
if track and track.can_be_armed:
self.__on_arm_value_changed.subject = track
self.__on_implicit_arm_value_changed.subject = track
self.update()
return
def set_launch_button(self, button):
self.launch_button.set_control_element(button)
self.update()
def set_delete_button(self, button):
self._delete_button = button
def set_select_button(self, button):
self._select_button = button
def set_duplicate_button(self, button):
self._duplicate_button = button
def set_clip_palette(self, palette):
assert palette != None
self._clip_palette = palette
return
def set_clip_rgb_table(self, rgb_table):
""" A list of velocity, hex-rgb color pairs that is used, if the color could not
be matched to the clip palette """
self._clip_rgb_table = rgb_table
def has_clip(self):
assert liveobj_valid(self._clip_slot)
return self._clip_slot.has_clip
def update(self):
super(ClipSlotComponent, self).update()
self._update_launch_button_color()
def _update_launch_button_color(self):
if self.is_enabled():
value_to_send = self._empty_slot_color
if liveobj_valid(self._clip_slot):
track = self._clip_slot.canonical_parent
slot_or_clip = self._clip_slot.clip if self.has_clip() else self._clip_slot
value_to_send = self._feedback_value(track, slot_or_clip)
self.launch_button.color = value_to_send
def _color_value(self, slot_or_clip):
color = slot_or_clip.color
try:
return self._clip_palette[color]
except (KeyError, IndexError):
if self._clip_rgb_table != None:
return find_nearest_color(self._clip_rgb_table, color)
else:
return self._stopped_value
return
def _track_is_armed(self, track):
return liveobj_valid(track) and track.can_be_armed and any([track.arm, track.implicit_arm])
def _feedback_value(self, track, slot_or_clip):
if slot_or_clip.is_triggered:
if slot_or_clip.will_record_on_start:
return self._triggered_to_record_color
return self._triggered_to_play_color
else:
if slot_or_clip.is_playing:
if slot_or_clip.is_recording:
return self._recording_color
return self._started_value
if slot_or_clip.color != None:
return self._color_value(slot_or_clip)
if getattr(slot_or_clip, b'controls_other_clips', True):
return self._stopped_value
if self._track_is_armed(track) and self._clip_slot.has_stop_button and self._record_button_color != None:
return self._record_button_color
return self._empty_slot_color
def _update_clip_property_slots(self):
clip = self._clip_slot.clip if self._clip_slot else None
self.__on_clip_playing_state_changed.subject = clip
self.__on_recording_state_changed.subject = clip
self.__on_clip_color_changed.subject = clip
return
@listens(b'has_clip')
def __on_clip_state_changed(self):
self._update_clip_property_slots()
self._update_launch_button_color()
@listens(b'controls_other_clips')
def __on_controls_other_clips_changed(self):
self._update_clip_property_slots()
self._update_launch_button_color()
@listens(b'color')
def __on_clip_color_changed(self):
self._update_launch_button_color()
@listens(b'color')
def __on_clip_slot_color_changed(self):
self._update_launch_button_color()
@listens(b'playing_status')
def __on_slot_playing_state_changed(self):
self._update_launch_button_color()
@listens(b'playing_status')
def __on_clip_playing_state_changed(self):
self._update_launch_button_color()
@listens(b'is_recording')
def __on_recording_state_changed(self):
self._update_launch_button_color()
@listens(b'arm')
def __on_arm_value_changed(self):
self._update_launch_button_color()
@listens(b'implicit_arm')
def __on_implicit_arm_value_changed(self):
self._update_launch_button_color()
@listens(b'has_stop_button')
def __on_has_stop_button_changed(self):
self._update_launch_button_color()
@listens(b'is_triggered')
def __on_slot_triggered_changed(self):
if not self.has_clip():
self._update_launch_button_color()
@launch_button.pressed
def launch_button(self, button):
self._on_launch_button_pressed()
def _on_launch_button_pressed(self):
if is_button_pressed(self._select_button):
self._do_select_clip(self._clip_slot)
elif liveobj_valid(self._clip_slot):
if is_button_pressed(self._duplicate_button):
self._do_duplicate_clip()
elif is_button_pressed(self._delete_button):
self._do_delete_clip()
else:
self._do_launch_clip(True)
self._show_launched_clip_as_highlighted_clip()
@launch_button.released
def launch_button(self, button):
self._on_launch_button_released()
def _on_launch_button_released(self):
if self.launch_button.is_momentary and not is_button_pressed(self._select_button) and liveobj_valid(self._clip_slot) and not is_button_pressed(self._duplicate_button) and not is_button_pressed(self._delete_button):
self._do_launch_clip(False)
def _do_delete_clip(self):
if self._clip_slot and self._clip_slot.has_clip:
self._clip_slot.delete_clip()
def _do_select_clip(self, clip_slot):
if liveobj_valid(self._clip_slot):
if self.song.view.highlighted_clip_slot != self._clip_slot:
self.song.view.highlighted_clip_slot = self._clip_slot
def _do_duplicate_clip(self):
if self._clip_slot and self._clip_slot.has_clip:
try:
track = self._clip_slot.canonical_parent
track.duplicate_clip_slot(list(track.clip_slots).index(self._clip_slot))
except Live.Base.LimitationError:
pass
except RuntimeError:
pass
def _do_launch_clip(self, fire_state):
object_to_launch = self._clip_slot
if self.has_clip():
object_to_launch = self._clip_slot.clip
object_to_launch.set_fire_button_state(fire_state)
def _show_launched_clip_as_highlighted_clip(self):
song = self.song
if song.select_on_launch and self._clip_slot != song.view.highlighted_clip_slot:
self.song.view.highlighted_clip_slot = self._clip_slot | [
"apollo.arkwright@gmail.com"
] | apollo.arkwright@gmail.com |
dcf370a1bd6932ff3c4ff9bb217104dc2ff6961a | 2fc849ee16732463779d4445954941538828879a | /source/webapp/migrations/0001_initial.py | 04cf199a27d95189a00f4255ac850a68e5143e58 | [] | no_license | Aisuluu1405/python_group_3_homework_48_Aisulu_Dzhusupova | 83da037fc9200a1a213fdf0eb5b09ed2febae79a | d4272516816fb83ff4d8a1c64645a508aecf37ee | refs/heads/master | 2023-05-02T16:13:24.948710 | 2019-09-23T12:30:14 | 2019-09-23T12:30:14 | 210,325,979 | 0 | 0 | null | 2023-04-21T20:38:01 | 2019-09-23T10:19:38 | Python | UTF-8 | Python | false | false | 1,036 | py | # Generated by Django 2.2 on 2019-09-20 05:09
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Product',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100, verbose_name='Product name')),
('description', models.TextField(blank=True, max_length=2000, null=True, verbose_name='Description')),
('category', models.CharField(choices=[('other', 'Other'), ('clothes', 'Clothes'), ('shoes', 'Shoes'), ('accessories', 'Accessories'), ('beauty', 'Beauty')], default='other', max_length=30, verbose_name='Category')),
('count', models.FloatField(verbose_name='Count')),
('price', models.DecimalField(decimal_places=2, max_digits=7, verbose_name='Price')),
],
),
]
| [
"aisuluueco2009@yandex.ru"
] | aisuluueco2009@yandex.ru |
e432d89a1b17d0c1572e3c78718079a42a30ce0d | bc233c24523f05708dd1e091dca817f9095e6bb5 | /bitmovin_api_sdk/encoding/encodings/muxings/packed_audio/customdata/customdata_api.py | d4a1fef29ca90072435cc6650196696f08976147 | [
"MIT"
] | permissive | bitmovin/bitmovin-api-sdk-python | e3d6cf8eb8bdad62cb83ec77c0fc4950b06b9cdd | b0860c0b1be7747cf22ad060985504da625255eb | refs/heads/main | 2023-09-01T15:41:03.628720 | 2023-08-30T10:52:13 | 2023-08-30T10:52:13 | 175,209,828 | 13 | 14 | MIT | 2021-04-29T12:30:31 | 2019-03-12T12:47:18 | Python | UTF-8 | Python | false | false | 1,484 | py | # coding: utf-8
from __future__ import absolute_import
from bitmovin_api_sdk.common import BaseApi, BitmovinApiLoggerBase
from bitmovin_api_sdk.common.poscheck import poscheck_except
from bitmovin_api_sdk.models.custom_data import CustomData
from bitmovin_api_sdk.models.response_envelope import ResponseEnvelope
from bitmovin_api_sdk.models.response_error import ResponseError
class CustomdataApi(BaseApi):
@poscheck_except(2)
def __init__(self, api_key, tenant_org_id=None, base_url=None, logger=None):
# type: (str, str, str, BitmovinApiLoggerBase) -> None
super(CustomdataApi, self).__init__(
api_key=api_key,
tenant_org_id=tenant_org_id,
base_url=base_url,
logger=logger
)
def get(self, encoding_id, muxing_id, **kwargs):
# type: (string_types, string_types, dict) -> CustomData
"""Packed Audio muxing Custom Data
:param encoding_id: Id of the encoding.
:type encoding_id: string_types, required
:param muxing_id: Id of the Packed Audio muxing
:type muxing_id: string_types, required
:return: Packed Audio muxing custom data
:rtype: CustomData
"""
return self.api_client.get(
'/encoding/encodings/{encoding_id}/muxings/packed-audio/{muxing_id}/customData',
path_params={'encoding_id': encoding_id, 'muxing_id': muxing_id},
type=CustomData,
**kwargs
)
| [
"openapi@bitmovin.com"
] | openapi@bitmovin.com |
f9552e5fb9ea367cd1fb32326b63cd871d695afb | 2b4668ba8ff74aa03d031786956c4d4802bfe02b | /util/samm_resolver.py | 5b5a982d47eede73c9a4c109120b5b22b97f4f1d | [
"BSD-3-Clause"
] | permissive | ioggstream/dsomm-orm | a8397ab6f73d46a0acfb8928ad7e835bef9b759a | 52c2040b1cb7263d568af548ab18acdcc3700292 | refs/heads/main | 2023-07-12T21:52:52.988267 | 2021-08-13T22:56:30 | 2021-08-13T22:56:30 | 360,102,120 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,211 | py | #
# Parses yaml files contained in https://github.com/OWASP/samm/tree/master/Supporting%20Resources/v2.0/Datamodel/Datafiles
#
from pathlib import Path
import yaml
def read_yaml(f):
return yaml.safe_load(Path(f).read_text())
class SammResolver:
def __init__(self, basepath="."):
self.basepath = Path(basepath)
self.functions = [
f.name[:-4].split(" ")[1] for f in self.basepath.glob("Function *yml")
]
self.functions_map = {x[0]: x for x in self.functions}
self.practices_map = self._parse_practices()
self.streams = self._parse_streams()
self.activities = list(
set([f.name[:-4].split(" ")[1] for f in self.basepath.glob("Activit*.yml")])
)
def _parse_practices(self):
practices = {}
for f in self.basepath.glob("Practice *yml"):
p = read_yaml(f)
if "shortDescription" not in p:
continue
practices[p["shortName"]] = p["name"]
return practices
def _parse_streams(self):
streams = {}
for f in self.basepath.glob("Stream *yml"):
s = read_yaml(f)
s_id = f.name[7:-4]
s_name = s["name"]
streams[s_id] = s_name
return streams
def parse_activity(self, a):
function, practice, maturity, stream = a.split("-")
stream_id = f"{function}-{practice}-{stream}"
return {
"id": a,
"function": self.functions_map[function],
"practice": self.practices_map[practice],
"maturity": maturity,
"stream": self.streams[stream_id],
}
def test_parse_activities():
fpath = "downloads/Datafiles"
samm = SammResolver(fpath)
for a in samm.activities:
print(samm.parse_activity(a))
def test_samm_to_csv():
fpath = "downloads/Datafiles"
samm = SammResolver(fpath)
import pandas as pd
df = pd.DataFrame([samm.parse_activity(a) for a in samm.activities])
df.to_csv("samm_activities.csv")
from sqlalchemy import create_engine
engine = create_engine("mysql+mysqlconnector://root:root@127.0.0.1/dsomm")
df.to_sql("samm", con=engine)
| [
"robipolli@gmail.com"
] | robipolli@gmail.com |
dca2aacac1572c1ca6111998fa21040bca5af015 | bfc25f1ad7bfe061b57cfab82aba9d0af1453491 | /data/external/repositories_2to3/208513/kaggle-liberty-hazard-prediction-master/models/neighbors.py | 72c4ced2d31ec580b44e987575d068641d85f2de | [
"MIT"
] | permissive | Keesiu/meta-kaggle | 77d134620ebce530d183467202cf45639d9c6ff2 | 87de739aba2399fd31072ee81b391f9b7a63f540 | refs/heads/master | 2020-03-28T00:23:10.584151 | 2018-12-20T19:09:50 | 2018-12-20T19:09:50 | 147,406,338 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,990 | py | # import pandas as pd
# import numpy as np
# import pickle
# from sklearn import preprocessing
# from sklearn.cross_validation import train_test_split
# from sklearn.grid_search import RandomizedSearchCV
# from sklearn.neighbors import KNeighborsRegressor
# from sklearn.neighbors import KNeighborsClassifier
# ##################################################################################
# # cal metric
# def gini(solution, submission):
# df = zip(solution, submission, range(len(solution)))
# df = sorted(df, key=lambda x: (x[1],-x[2]), reverse=True)
# rand = [float(i+1)/float(len(df)) for i in range(len(df))]
# totalPos = float(sum([x[0] for x in df]))
# cumPosFound = [df[0][0]]
# for i in range(1,len(df)):
# cumPosFound.append(cumPosFound[len(cumPosFound)-1] + df[i][0])
# Lorentz = [float(x)/totalPos for x in cumPosFound]
# Gini = [Lorentz[i]-rand[i] for i in range(len(df))]
# return sum(Gini)
# def normalized_gini(solution, submission):
# normalized_gini = gini(solution, submission)/gini(solution, solution)
# return normalized_gini
# ##################################################################################
# #load train and test
# train = pd.read_csv('./data/train.csv', index_col=0)
# test = pd.read_csv('./data/test.csv', index_col=0)
# train_y = train.Hazard
# # drop train_y -> train_y
# train.drop('Hazard', axis=1, inplace=True)
# # drop noisy features
# train.drop('T2_V10', axis=1, inplace=True)
# train.drop('T2_V7', axis=1, inplace=True)
# train.drop('T1_V13', axis=1, inplace=True)
# train.drop('T1_V10', axis=1, inplace=True)
# test.drop('T2_V10', axis=1, inplace=True)
# test.drop('T2_V7', axis=1, inplace=True)
# test.drop('T1_V13', axis=1, inplace=True)
# test.drop('T1_V10', axis=1, inplace=True)
# # columns and index for later use
# columns = train.columns
# test_ind = test.index
# train = np.array(train)
# test = np.array(test)
# # label encode the categorical variables
# for i in range(train.shape[1]):
# lbl = preprocessing.LabelEncoder()
# lbl.fit(list(train[:,i]) + list(test[:,i]))
# train[:,i] = lbl.transform(train[:,i])
# test[:,i] = lbl.transform(test[:,i])
# train = train.astype(np.float32)
# test = test.astype(np.float32)
# ##################################################################################
# with open('./data/train_denoise.vec', 'rb') as f:
# train = pickle.load(f)
# with open('./data/test_denoise.vec', 'rb') as f:
# test = pickle.load(f)
# with open('./data/train_y.vec', 'rb') as f:
# train_y = pickle.load(f)
# train_x_sp, test_x_sp, train_y_sp, test_y_sp = train_test_split(train, train_y, train_size=0.8, random_state=50)
# rgrs = KNeighborsRegressor(n_neighbors =100)
# rgrs.fit(train_x_sp, train_y_sp)
# pred = rgrs.predict(test_x_sp)
# score = normalized_gini(test_y_sp, pred)
# print '{:.6f}'.format(score)
| [
"keesiu.wong@gmail.com"
] | keesiu.wong@gmail.com |
a37cdc5d8eead8699cb0d2c774558b30f8df8ebc | a1a43879a2da109d9fe8d9a75f4fda73f0d7166b | /api/json/select_configs.py | 6bc4e323b127bc99231a8b7ccf6570f228d5d613 | [] | no_license | PaddlePaddle/benchmark | a3ed62841598d079529c7440367385fc883835aa | f0e0a303e9af29abb2e86e8918c102b152a37883 | refs/heads/master | 2023-09-01T13:11:09.892877 | 2023-08-21T09:32:49 | 2023-08-21T09:32:49 | 173,032,424 | 78 | 352 | null | 2023-09-14T05:13:08 | 2019-02-28T03:14:16 | Python | UTF-8 | Python | false | false | 19,416 | py | # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import json
import warnings
from operator import mul
from functools import reduce
import random
import copy
import os, sys
package_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(package_path)
from common.api_param import parse_list
def select_configs_by_json(args, origin_configs, configs_without_input,
all_shapes, input_type):
"""
Select configs according to json config and save the selected
configs to the sepcified json file.
"""
ignored_params = []
if args.ignored_params:
ignored_params += args.ignored_params
all_config_info = []
for config in configs_without_input:
config_info = ""
for param in config["param_info"]:
if param not in ignored_params:
value = config["param_info"][param]["value"]
config_info += param + "=" + value + " "
all_config_info.append(config_info)
config_groups = grouping_configs(all_config_info, all_shapes, input_type)
all_selected_ids = select_and_print_configs(config_groups, all_shapes)
out_dir = os.path.dirname(args.output_json_file)
if not os.path.exists(out_dir):
os.makedirs(out_dir)
configs = []
for index in all_selected_ids:
if args.similar_api:
filename = os.path.basename(args.output_json_file)
origin_configs[index]["op"] = os.path.splitext(filename)[0]
configs.append(origin_configs[index])
with open(args.output_json_file, 'w') as f:
json.dump(configs, f, indent=4, sort_keys=True)
def select_configs_by_log(args, forward_logs, backward_logs):
"""
Select configs according to forward logs and backward logs and save the selected
configs to the sepcified json file.
Args:
args(object): An object to take the attributes.
forward_logs(list): A list of forward logs.
backward_logs(list): A list of backward logs.
"""
ignored_params = ["op"]
input_shape = ["input_shape"]
if args.input_shape:
input_shape = args.input_shape
if args.ignored_params:
ignored_params += args.ignored_params
shapes_list = get_input_shapes(forward_logs, input_shape)
removed_params = ignored_params + input_shape
combined_logs = combine_logs_with_key_params(removed_params, forward_logs,
backward_logs)
config_groups = grouping_configs(combined_logs, shapes_list)
all_selected_ids = select_and_print_configs(config_groups, shapes_list)
with open(args.input_json_file, 'r') as f:
all_configs = json.load(f)
out_dir = os.path.dirname(args.output_json_file)
if not os.path.exists(out_dir):
os.makedirs(out_dir)
configs = []
for index in all_selected_ids:
configs.append(all_configs[index])
with open(args.output_json_file, 'w') as f:
json.dump(configs, f, indent=4, sort_keys=True)
def select_and_print_configs(config_groups, shapes_list):
print("=" * 30 + "config_groups" + "=" * 30)
all_selected_ids = []
i = 0
for key in config_groups:
print("config {0}: {1}, total: {2}.".format(
i, key, len(config_groups[key]['ids'])))
shape_groups = config_groups[key]['shape_groups']
if not shape_groups:
selected_ids = random.sample(config_groups[key]['ids'], 1)
all_selected_ids.extend(selected_ids)
select_ids_info = " Select {0} config_ids: {1}.".format(
len(selected_ids), selected_ids)
print(select_ids_info)
else:
all_selected_ids.extend(
select_from_shape_groups(shape_groups, shapes_list))
i += 1
return all_selected_ids
def select_from_shape_groups(shape_groups, shapes):
"""
Select configs from shape groups. The small, middle and large input will
be selected.
args:
shape_groups(dict): A dict that groups input shapes.
shapes(list): A list of input shapes.
Returns: A list of selected config ids.
"""
all_selected_ids = []
j = 0
for label in shape_groups:
candidate_ids = []
selected_ids = []
ids = shape_groups[label]['ids']
ids = rearrange_ids(shape_groups[label]['sizes'], ids)
if len(ids) <= 3:
candidate_ids = ids
else:
candidate_ids = [ids[0], ids[int(len(ids) / 2)], ids[-1]]
selected_shapes = []
for idx in candidate_ids:
if shapes[idx] not in selected_shapes:
selected_shapes.append(shapes[idx])
selected_ids.append(idx)
all_selected_ids += selected_ids
selected_shapes_info = " The shapes are: "
for shape in selected_shapes:
selected_shapes_info += "{} ".format(shape)
shape_groups_info = " " * 2 + "shape {0}: {1}, total: {2}.".format(
j, label, len(ids))
select_ids_info = " Select {0} config_ids: {1}.".format(
len(selected_ids), selected_ids)
print(shape_groups_info + select_ids_info + selected_shapes_info)
j += 1
return all_selected_ids
def combine_logs_with_key_params(removed_params, forward_logs, backward_logs):
"""
Combine each forward log with the corresponding backward log. First, some params in
forward and backward logs are removed. Second, the union of each forward and
corresponding backward log is computed.
Args:
removed_params(list): A list of removed params. It usually contains "op" and "input_shape".
forward_logs(list): A list of forward logs.
backward_logs(list): A list og backward logs.
Returns: A list of combined logs.
"""
for logs in [forward_logs, backward_logs]:
for i in range(len(logs)):
logs[i] = remove_params(logs[i], removed_params)
combined_logs = forward_logs
for i in range(len(forward_logs)):
if forward_logs[i] != backward_logs[i]:
intersection = list(
set(forward_logs[i]).intersection(set(backward_logs[i])))
difference = list(
set(forward_logs[i]).symmetric_difference(
set(backward_logs[i])))
combined_logs[i] = intersection + difference
combined_logs[i] = ' '.join(combined_logs[i])
return combined_logs
def grouping_configs(logs, shapes, input_type="Variable"):
"""
Groups all configs according to the logs. First, all configs are grouped by key params
without input shape. Second, the results of first step are grouped by input shape.
Args:
logs(list): A list of logs in which each item combines forward and backward log.
shapes(list): A list of input shapes.
Returns: A 2-D dict of config groups.
"""
config_groups = dict()
# group all configs by key params without input shape.
for i in range(len(logs)):
if logs[i] not in config_groups.keys():
config_groups[logs[i]] = {'ids': [i]}
else:
config_groups[logs[i]]['ids'] += [i]
# group config_groups by input shape.
for key in config_groups:
config_ids = config_groups[key]['ids']
shape_groups = group_input_shapes(shapes, config_ids, input_type)
config_groups[key]['shape_groups'] = shape_groups
return config_groups
def remove_params(log, removed_params):
"""
Remove params from logs according to the names of removed params.
Args:
log(list): A list of logs in which each item is a string.
removed_params(list): The names of removed params.
Example:
los=['op=conv data_format=NCHW filter_size=[1, 1]']
removed_params=['op']
result=['data_format=NCHW filter_size=[1, 1]']
Returns: A list of logs.
"""
result = list(log)
if removed_params:
for item in log:
param_val = item.split("=")
if param_val[0] in removed_params:
result.remove(item)
return result
def get_input_shapes(logs, input_shape):
"""
Get input shapes from logs and parse them into lists.
Args:
logs(list): A list of forward logs or backward logs. It is used to extract
input shapes.
input_shape(list): The name of input shape in logs. For example, if input_shape=[10, 10]
or x_shape=[10, 10] in logs. Then the name of input shape is "input_shape" or "x_shape".
Returns: A list of input shapes and each input shape is also a list.
"""
all_shapes = []
for log in logs:
shapes = []
for item in log:
param_val = item.split("=")
if param_val[0] in input_shape:
shape = parse_list(param_val[1])
shapes.append(list(shape))
all_shapes.append(shapes)
return all_shapes
def group_input_shapes(shapes, config_ids, input_type):
"""
Group the input shapes according to the shape's label.
Args:
shapes(list): A list of input shapes.
config_ids(list): A list of config ids in one group.
Returns: A 2-D dict of shape groups.
"""
shape_groups = dict()
if len(shapes) == 0:
warnings.warn("Group configs regardless of input shape.")
return shape_groups
for index in config_ids:
shape = shapes[index]
label, size = label_shape(shape, input_type)
if label not in shape_groups.keys():
shape_groups[label] = {'ids': [index], 'sizes': [size]}
else:
shape_groups[label]['ids'] += [index]
shape_groups[label]['sizes'] += [size]
return shape_groups
def get_input_shapes_from_json(args, origin_configs):
configs_without_input = []
all_shapes = []
input_type = "Variable"
input_name = ["input", "x", "y"]
for config in origin_configs:
config_res = copy.deepcopy(config)
input_shapes = []
var_shapes = []
for name, value in config["param_info"].items():
if args.ignored_params and name in args.ignored_params:
continue
if value["type"] in ["Variable", "numpy.ndarray"]:
if value["type"] == "Variable":
shape = list(parse_list(value["shape"]))
else:
shape = list(parse_list(value["value"]))
for i in range(len(shape)):
if shape[i] == -1:
shape[i] = 16
if name in input_name:
input_shapes.append(shape)
else:
var_shapes.append(shape)
del config_res["param_info"][name]
elif value["type"] == "list<Variable>":
input_type = "list<Variable>"
for key, var in value.items():
if key != "type":
shape = list(parse_list(var["shape"]))
for i in range(len(shape)):
if shape[i] == -1:
shape[i] = 16
input_shapes.append(shape)
del config_res["param_info"][name]
configs_without_input.append(config_res)
if len(input_shapes) != 0:
all_shapes.append(input_shapes)
elif len(var_shapes) != 0 and len(var_shapes) <= 2:
all_shapes.append(var_shapes)
return configs_without_input, all_shapes, input_type
def label_shape(shape, input_type):
"""
Label shape with the features. When only one shape is found, label the shape
according to the number of dimensions and whether the size is a power of 2.
When two shape is found, if the shapes are the same, label the shapes according
to the rule of one input, if the shapes are not the same, label the shapes
according to the number of dimensions.
Args:
shape: A list of input shapes. Each item is also a list containing 1 or 2 items.
Returns: A label of shapes that is a string and the size of the input with the most
dimensions.
"""
if input_type == "list<Variable>":
is_same_shape = 'T'
for s in shape:
if s != shape[0]:
is_same_shape = 'F'
break
label = 'is_same_shape=' + is_same_shape
all_sizes = [reduce(mul, shape[i]) for i in range(len(shape))]
size = max(all_sizes)
elif len(shape) == 1:
size = reduce(mul, shape[0])
is_power_of_2 = 'T' if size & (size - 1) == 0 else 'F'
label = str(len(shape[0])) + '-D' + ' is_power_of_2=' + is_power_of_2
elif len(shape) == 2:
if (shape[0] == shape[1]):
is_same_shape = 'T'
size = reduce(mul, shape[0])
is_power_of_2 = 'T' if size & (size - 1) == 0 else 'F'
label = 'is_same_shape=' + is_same_shape + ' ' + str(
len(shape[0])) + '-D-' + str(len(shape[
1])) + '-D' + ' is_power_of_2=' + is_power_of_2
else:
is_same_shape = 'F'
size1 = reduce(mul, shape[0])
size2 = reduce(mul, shape[0])
label = 'is_same_shape=' + is_same_shape + ' ' + str(
len(shape[0])) + '-D-' + str(len(shape[1])) + '-D'
size = size1 if (len(shape[0]) > len(shape[1])) else size2
return label, size
def rearrange_ids(sizes, ids):
"""
This function will sort sizes by ascending order and use the index of sorted sizes
to rearrange ids.
Example:
sizes = [400, 256, 1000, 512]
ids = [0, 3, 16, 10]
sorted_sizes = [256, 400, 512, 1000]
sorted_ids = [1, 0, 3, 2]
return: [3, 0, 10, 16]
Args:
sizes(list): A list of sizes to be sorted.
ids(list): A list of config ids to be rearrange by index of sorted sizes.
Returns: A list of rearranged ids.
"""
sorted_nums = sorted(enumerate(sizes), key=lambda x: x[1])
sorted_ids = [i[0] for i in sorted_nums]
return [ids[idx] for idx in sorted_ids]
def get_logs(op_name, log_file):
"""
Given the name of the OP and the path of the log, split forward and backward logs.
Args:
op_name(str): The OP's name that is used to extract key logs.
log_file(str): The path of Op's log.
Returns: Two lists of forward and backward logs.
"""
forward_logs = []
backward_logs = []
forward_name = "op=" + op_name
backward_name = forward_name + '_grad'
with open(log_file, 'r') as f:
for line in f.readlines():
line = line.strip()
if backward_name in line:
line = line.replace(', ', ',').split(' ')
backward_logs.append(line)
elif forward_name in line:
line = line.replace(', ', ',').split(' ')
forward_logs.append(line)
if not forward_logs:
raise ValueError("Could not find {0} in {1}.".format(forward_name,
log_file))
if not backward_logs:
warnings.warn("Only forward logs are used to select configs.")
else:
if len(forward_logs) != len(backward_logs):
raise ValueError(
"There are {0} logs containing {1}, but {2} logs containing {3}. They should be equal".
format(
len(forward_logs), forward_name,
len(backward_logs), backward_name))
return forward_logs, backward_logs
def parse_json_config(args):
if not os.path.exists(args.input_json_file):
raise ValueError(
"The input_json_file {} is not found. Please check the path of json file.".
format(args.input_json_file))
origin_configs = []
if os.path.isdir(args.input_json_file):
if args.similar_api:
for api in args.similar_api:
json_path = os.path.join(args.input_json_file, api + '.json')
if os.path.exists(json_path):
with open(json_path, 'r') as f:
api_configs = json.load(f)
origin_configs.extend(api_configs)
else:
raise ValueError(
"When input_json_file is a directory, the args similar_api should be set."
)
elif os.path.isfile(args.input_json_file):
with open(args.input_json_file, 'r') as f:
origin_configs = json.load(f)
configs_without_input, all_shapes, input_type = get_input_shapes_from_json(
args, origin_configs)
print("Total config of input json: {}".format(len(origin_configs)))
return origin_configs, configs_without_input, all_shapes, input_type
def parse_args():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
'--op_name', type=str, default=None, help='Specify the operator name.')
parser.add_argument(
'--log_file',
type=str,
default=None,
help='Specify the path of log file.')
parser.add_argument(
'--input_json_file',
type=str,
default=None,
required=True,
help='Specify the path of input json file.')
parser.add_argument(
'--output_json_file',
type=str,
default=None,
required=True,
help='Specify the path of output json file.')
parser.add_argument(
'--input_shape',
nargs='*',
help='Specify the name of input shape. If None, ["input_shape"] will be used.'
)
parser.add_argument(
'--ignored_params',
nargs='*',
help='Specify the ignored param list, the configs will be filtered according to the other params.'
)
parser.add_argument(
'--parse_runtime_log',
action='store_true',
help='Whether selecting configs by parsing runtime log. '
'Default: False, selecting configs by parsing json config.')
parser.add_argument(
'--similar_api',
nargs='*',
help='Specify the similar APIs, the output config will be '
'selected from the json file of these APIs.')
args = parser.parse_args()
return args
if __name__ == '__main__':
args = parse_args()
print("ignored_params: {0}.".format(args.ignored_params))
if args.parse_runtime_log:
forward_logs, backward_logs = get_logs(args.op_name, args.log_file)
select_configs_by_log(args, forward_logs, backward_logs)
else:
origin_configs, configs_without_input, all_shapes, input_type = parse_json_config(
args)
select_configs_by_json(args, origin_configs, configs_without_input,
all_shapes, input_type)
| [
"noreply@github.com"
] | PaddlePaddle.noreply@github.com |
24454c08c7717f816db15594fcbc51f9901da313 | c81377ee1e27d00f797fcf2ad68317ba42429ca5 | /LIZA_DAMIAN_CARLOS/PARA/bucle_para01.py | bf3b86c64c3c7a11735a0aed52813614fb771917 | [] | no_license | CARLOSC10/T07_LIZA.DAMIAN_ROJAS.CUBAS | 39c55f08a1178b611125979741a3538276fa5d40 | ad736fb83de76f6342f8d53b4b5acfe1ecc88b7f | refs/heads/master | 2020-09-16T14:24:40.571198 | 2019-12-19T23:41:21 | 2019-12-19T23:41:21 | 223,798,603 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 510 | py | #REPETITIVAS "PARA" QUE MUESTRA LOS N PRIMEROS NUMEROS, N SE ESCRIBE POR TECLADO
import os
n=0
#ARGUMENTOS
n=int(os.sys.argv[1])
#INPUT VALIDA LOS DATOS
datos_incorectos=(n<0)
#WHILE
#MIESTRAS LOS DATOS SEAN INCORECTOS A LA CONDICION ENTRA EN WHILE
while(datos_incorectos==True):
n=int(input("DATOS INGRESADOS INVALIDOS:Ingrese nuevamente los datos:"))
datos_incorectos=(n<0)
#fin_while
print("FIN DEL BUCLE")
#PROCESSING DE LA ESTRUCTURA "PARA"
i=0
while(i<=n):
print(i)
i+=1
#fin_while
| [
"clizad@unprg.edu.pe"
] | clizad@unprg.edu.pe |
fcba8acddc2b4aef40d9b982a26b8447898804b1 | 86fcd7e56f7409dc05fb1cc07496a38e39ef2607 | /vispy/testing/tests/test_testing.py | 8877a55354fc9b791914fe58aadeaef4537d012d | [
"BSD-3-Clause",
"LicenseRef-scancode-public-domain"
] | permissive | alexjc/vispy | a1622f7920df5f0ddd11acf56302896fabd5cb37 | 2e528cf3915c8274848d9f3662809485f3dbcf3f | refs/heads/master | 2021-01-15T08:57:01.427072 | 2014-12-08T20:02:11 | 2014-12-08T20:02:11 | 26,930,099 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 813 | py | # -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) 2014, Vispy Development Team. All Rights Reserved.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
# -----------------------------------------------------------------------------
from nose.tools import assert_raises
from vispy.testing import (assert_in, assert_not_in, assert_is,
run_tests_if_main)
def test_testing():
"""Test testing ports"""
assert_raises(AssertionError, assert_in, 'foo', 'bar')
assert_in('foo', 'foobar')
assert_raises(AssertionError, assert_not_in, 'foo', 'foobar')
assert_not_in('foo', 'bar')
assert_raises(AssertionError, assert_is, None, 0)
assert_is(None, None)
run_tests_if_main()
| [
"larson.eric.d@gmail.com"
] | larson.eric.d@gmail.com |
6b73a7ebd7255118b70d146fd4edfd385a9c5be8 | 0e1e643e864bcb96cf06f14f4cb559b034e114d0 | /Exps_7_v3/I_to_M_Gk3_no_pad/pyramid_5side/bce_s001_tv_s0p1_L5/step10_a.py | 4c89cff43070f6c44cce8dc50a6ab3d7dbcdf9ed | [] | no_license | KongBOy/kong_model2 | 33a94a9d2be5b0f28f9d479b3744e1d0e0ebd307 | 1af20b168ffccf0d5293a393a40a9fa9519410b2 | refs/heads/master | 2022-10-14T03:09:22.543998 | 2022-10-06T11:33:42 | 2022-10-06T11:33:42 | 242,080,692 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 135,209 | py | #############################################################################################################################################################################################################
#############################################################################################################################################################################################################
### 把 kong_model2 加入 sys.path
import os
code_exe_path = os.path.realpath(__file__) ### 目前執行 step10_b.py 的 path
code_exe_path_element = code_exe_path.split("\\") ### 把 path 切分 等等 要找出 kong_model 在第幾層
code_dir = "\\".join(code_exe_path_element[:-1])
kong_layer = code_exe_path_element.index("kong_model2") ### 找出 kong_model2 在第幾層
kong_model2_dir = "\\".join(code_exe_path_element[:kong_layer + 1]) ### 定位出 kong_model2 的 dir
import sys ### 把 kong_model2 加入 sys.path
sys.path.append(kong_model2_dir)
sys.path.append(code_dir)
# print(__file__.split("\\")[-1])
# print(" code_exe_path:", code_exe_path)
# print(" code_exe_path_element:", code_exe_path_element)
# print(" code_dir:", code_dir)
# print(" kong_layer:", kong_layer)
# print(" kong_model2_dir:", kong_model2_dir)
#############################################################################################################################################################################################################
kong_to_py_layer = len(code_exe_path_element) - 1 - kong_layer ### 中間 -1 是為了長度轉index
# print(" kong_to_py_layer:", kong_to_py_layer)
if (kong_to_py_layer == 0): template_dir = ""
elif(kong_to_py_layer == 2): template_dir = code_exe_path_element[kong_layer + 1][0:] ### [7:] 是為了去掉 step1x_, 後來覺得好像改有意義的名字不去掉也行所以 改 0
elif(kong_to_py_layer == 3): template_dir = code_exe_path_element[kong_layer + 1][0:] + "/" + code_exe_path_element[kong_layer + 2][0:] ### [5:] 是為了去掉 mask_ ,前面的 mask_ 是為了python 的 module 不能 數字開頭, 隨便加的這樣子, 後來覺得 自動排的順序也可以接受, 所以 改0
elif(kong_to_py_layer > 3): template_dir = code_exe_path_element[kong_layer + 1][0:] + "/" + code_exe_path_element[kong_layer + 2][0:] + "/" + "/".join(code_exe_path_element[kong_layer + 3: -1])
# print(" template_dir:", template_dir) ### 舉例: template_dir: 7_mask_unet/5_os_book_and_paper_have_dtd_hdr_mix_bg_tv_s04_mae
#############################################################################################################################################################################################################
exp_dir = template_dir
#############################################################################################################################################################################################################
from step06_a_datas_obj import *
from step09_5side_L5 import *
from step10_a2_loss_info_obj import *
from step10_b2_exp_builder import Exp_builder
rm_paths = [path for path in sys.path if code_dir in path]
for rm_path in rm_paths: sys.path.remove(rm_path)
rm_moduless = [module for module in sys.modules if "step09" in module]
for rm_module in rm_moduless: del sys.modules[rm_module]
#############################################################################################################################################################################################################
'''
exp_dir 是 決定 result_dir 的 "上一層"資料夾 名字喔! exp_dir要巢狀也沒問題~
比如:exp_dir = "6_mask_unet/自己命的名字",那 result_dir 就都在:
6_mask_unet/自己命的名字/result_a
6_mask_unet/自己命的名字/result_b
6_mask_unet/自己命的名字/...
'''
use_db_obj = type9_mask_flow_have_bg_dtd_hdr_mix_and_paper
use_loss_obj = [G_bce_s001_loss_info_builder.set_loss_target("UNet_Mask").copy()] ### z, y, x 順序是看 step07_b_0b_Multi_UNet 來對應的喔
#############################################################
### 為了resul_analyze畫空白的圖,建一個empty的 Exp_builder
empty = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_1__2side_1__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_1__2side_1__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="為了resul_analyze畫空白的圖,建一個empty的 Exp_builder")
##################################
### 1side1
##################################
# "1" 3 6 10 15 21 28 36 45 55
# 2side1 OK 1
ch032_1side_1__2side_1__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_1__2side_1__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_1__2side_1__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr__1s1__2s1__3s1__4s1__5s1-20220401_144959")
##################################
### 1side2
##################################
# "1" 3 6 10 15 21 28 36 45 55
# 2side1 OK 1
ch032_1side_2__2side_1__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_2__2side_1__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_2__2side_1__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr__1s2__2s1__3s1__4s1__5s1-20220401_162033")
# 1 "3" 6 10 15 21 28 36 45 55
# 2side2 OK 4
ch032_1side_2__2side_2__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_2__2side_2__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_2__2side_2__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr__1s2__2s2__3s1__4s1__5s1-20220401_175427")
ch032_1side_2__2side_2__3side_2_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_2__2side_2__3side_2_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_2__2side_2__3side_2_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr__1s2__2s2__3s2__4s1__5s1-20220401_193152")
ch032_1side_2__2side_2__3side_2_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_2__2side_2__3side_2_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_2__2side_2__3side_2_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr__1s2__2s2__3s2__4s2__5s1-20220401_211317")
ch032_1side_2__2side_2__3side_2_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_2__2side_2__3side_2_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_2__2side_2__3side_2_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr__1s2__2s2__3s2__4s2__5s2-20220401_225740")
##################################
### 1side3
##################################
# "1" 3 6 10 15 21 28 36 45 55
# 2side1 OK 1
ch032_1side_3__2side_1__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_1__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_1__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 "3" 6 10 15 21 28 36 45 55
# 2side2 OK 4
ch032_1side_3__2side_2__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_2__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_2__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s3_2s2_3s1_4s1_5s1-20220316_205936") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_3__2side_2__3side_2_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_2__3side_2_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_2__3side_2_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s3_2s2_3s2_4s1_5s1-20220316_221259") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_3__2side_2__3side_2_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_2__3side_2_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_2__3side_2_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s3_2s2_3s2_4s2_5s1-20220316_232850") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_3__2side_2__3side_2_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_2__3side_2_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_2__3side_2_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s3_2s2_3s2_4s2_5s2-20220317_004708") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
# 1 3 "6" 10 15 21 28 36 45 55
# 2side3 OK 10
ch032_1side_3__2side_3__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_3__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s3_2s3_3s1_4s1_5s1-20220317_020758") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_3__2side_3__3side_2_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_2_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_3__3side_2_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s3_2s3_3s2_4s1_5s1-20220317_032229") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_3__2side_3__3side_2_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_2_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_3__3side_2_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s3_2s3_3s2_4s2_5s1-20220317_043933") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_3__2side_3__3side_2_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_2_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_3__3side_2_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s3_2s3_3s2_4s2_5s2-20220317_055929") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_3__2side_3__3side_3_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_3_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_3__3side_3_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s3_2s3_3s3_4s1_5s1-20220317_072133") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_3__2side_3__3side_3_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_3_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_3__3side_3_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s3_2s3_3s3_4s2_5s1-20220317_084039") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_3__2side_3__3side_3_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_3_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_3__3side_3_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s3_2s3_3s3_4s2_5s2-20220317_100201") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_3__2side_3__3side_3_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_3_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_3__3side_3_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s3_2s3_3s3_4s3_5s1-20220317_112544") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_3__2side_3__3side_3_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_3_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_3__3side_3_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s3_2s3_3s3_4s3_5s2-20220317_124820") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_3__2side_3__3side_3_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_3_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_3__3side_3_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s3_2s3_3s3_4s3_5s3-20220317_141326") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
##################################
### 1side4
##################################
# "1" 3 6 10 15 21 28 36 45 55
# 2side1 OK 1
ch032_1side_4__2side_1__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_1__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_1__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 "3" 6 10 15 21 28 36 45 55
# 2side2 OK 4
ch032_1side_4__2side_2__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_2__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_2__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s4_2s2_3s1_4s1_5s1-20220317_072910") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_4__2side_2__3side_2_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_2__3side_2_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_2__3side_2_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s4_2s2_3s2_4s1_5s1-20220317_084410") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_4__2side_2__3side_2_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_2__3side_2_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_2__3side_2_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s4_2s2_3s2_4s2_5s1-20220317_100139") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_4__2side_2__3side_2_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_2__3side_2_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_2__3side_2_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s4_2s2_3s2_4s2_5s2-20220317_112155") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
# 1 3 "6" 10 15 21 28 36 45 55
# 2side3 OK 10
ch032_1side_4__2side_3__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_3__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s4_2s3_3s1_4s1_5s1-20220317_124420") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_4__2side_3__3side_2_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_2_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_3__3side_2_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s4_2s3_3s2_4s1_5s1-20220317_140023") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_4__2side_3__3side_2_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_2_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_3__3side_2_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s4_2s3_3s2_4s2_5s1-20220317_151925") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_4__2side_3__3side_2_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_2_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_3__3side_2_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s4_2s3_3s2_4s2_5s2-20220317_164045") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_4__2side_3__3side_3_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_3_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_3__3side_3_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s4_2s3_3s3_4s1_5s1-20220317_180440") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_4__2side_3__3side_3_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_3_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_3__3side_3_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s4_2s3_3s3_4s2_5s1-20220317_192450") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_4__2side_3__3side_3_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_3_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_3__3side_3_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s4_2s3_3s3_4s2_5s2-20220317_152510") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_4__2side_3__3side_3_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_3_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_3__3side_3_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s4_2s3_3s3_4s3_5s1-20220317_165545") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_4__2side_3__3side_3_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_3_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_3__3side_3_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s4_2s3_3s3_4s3_5s2-20220317_182514") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_4__2side_3__3side_3_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_3_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_3__3side_3_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s4_2s3_3s3_4s3_5s3-20220317_195710") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
# 1 3 6 "10" 15 21 28 36 45 55
# 2side4 OK 20
ch032_1side_4__2side_4__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s4_2s4_3s1_4s1_5s1-20220316_210316") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_4__2side_4__3side_2_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_2_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_2_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s4_2s4_3s2_4s1_5s1-20220316_222451") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_4__2side_4__3side_2_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_2_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_2_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s4_2s4_3s2_4s2_5s1-20220316_234909") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_4__2side_4__3side_2_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_2_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_2_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s4_2s4_3s2_4s2_5s2-20220317_011612") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_4__2side_4__3side_3_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_3_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_3_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s4_2s4_3s3_4s1_5s1-20220317_024555") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_4__2side_4__3side_3_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_3_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_3_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s4_2s4_3s3_4s2_5s1-20220317_041133") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_4__2side_4__3side_3_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_3_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_3_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s4_2s4_3s3_4s2_5s2-20220317_054003") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_4__2side_4__3side_3_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_3_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_3_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s4_2s4_3s3_4s3_5s1-20220317_071106") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_4__2side_4__3side_3_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_3_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_3_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s4_2s4_3s3_4s3_5s2-20220317_084107") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_4__2side_4__3side_3_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_3_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_3_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s4_2s4_3s3_4s3_5s3-20220317_101422") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_4__2side_4__3side_4_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_4_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_4_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s4_2s4_3s4_4s1_5s1-20220316_212016") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_4__2side_4__3side_4_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_4_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_4_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s4_2s4_3s4_4s2_5s1-20220316_224118") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_4__2side_4__3side_4_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_4_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_4_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s4_2s4_3s4_4s2_5s2-20220317_073559") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_4__2side_4__3side_4_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_4_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_4_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s4_2s4_3s4_4s3_5s1-20220317_090140") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_4__2side_4__3side_4_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_4_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_4_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s4_2s4_3s4_4s3_5s2-20220317_102548") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_4__2side_4__3side_4_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_4_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_4_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s4_2s4_3s4_4s3_5s3-20220317_115214") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_4__2side_4__3side_4_4side_4_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_4_4side_4_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_4_4side_4_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s4_2s4_3s4_4s4_5s1-20220317_132021") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_4__2side_4__3side_4_4side_4_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_4_4side_4_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_4_4side_4_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s4_2s4_3s4_4s4_5s2-20220317_144528") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_4__2side_4__3side_4_4side_4_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_4_4side_4_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_4_4side_4_5s3.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s4_2s4_3s4_4s4_5s3-20220317_161321") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_4__2side_4__3side_4_4side_4_5s4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_4_4side_4_5s4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_4_4side_4_5s4.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s4_2s4_3s4_4s4_5s4-20220317_174224") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
##################################
### 1side5
##################################
# "1" 3 6 10 15 21 28 36 45 55
# 2side1 OK 1
ch032_1side_5__2side_1__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_1__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_1__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s5_2s1_3s1_4s1_5s1-20220311_113503") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
# 1 "3" 6 10 15 21 28 36 45 55
# 2side2 OK 4
ch032_1side_5__2side_2__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_2__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_2__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s5_2s2_3s1_4s1_5s1-20220311_131440") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_5__2side_2__3side_2_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_2__3side_2_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_2__3side_2_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s5_2s2_3s2_4s1_5s1-20220311_145742") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_5__2side_2__3side_2_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_2__3side_2_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_2__3side_2_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s5_2s2_3s2_4s2_5s1-20220311_164409") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_5__2side_2__3side_2_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_2__3side_2_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_2__3side_2_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s5_2s2_3s2_4s2_5s2-20220311_183355") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
# 1 3 "6" 10 15 21 28 36 45 55
# 2side3 OK 10
ch032_1side_5__2side_3__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_3__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s5_2s3_3s1_4s1_5s1-20220311_202700") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_5__2side_3__3side_2_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_2_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_3__3side_2_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s5_2s3_3s2_4s1_5s1-20220311_221325") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_5__2side_3__3side_2_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_2_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_3__3side_2_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s5_2s3_3s2_4s2_5s1-20220312_000137") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_5__2side_3__3side_2_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_2_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_3__3side_2_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s5_2s3_3s2_4s2_5s2-20220312_101242") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_5__2side_3__3side_3_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_3_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_3__3side_3_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s5_2s3_3s3_4s1_5s1-20220312_120714") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_5__2side_3__3side_3_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_3_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_3__3side_3_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s5_2s3_3s3_4s2_5s1-20220312_135625") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_5__2side_3__3side_3_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_3_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_3__3side_3_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s5_2s3_3s3_4s2_5s2-20220312_154925") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_5__2side_3__3side_3_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_3_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_3__3side_3_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s5_2s3_3s3_4s3_5s1-20220312_174618") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_5__2side_3__3side_3_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_3_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_3__3side_3_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s5_2s3_3s3_4s3_5s2-20220312_194134") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_5__2side_3__3side_3_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_3_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_3__3side_3_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s5_2s3_3s3_4s3_5s3-20220312_214024") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
# 1 3 6 "10" 15 21 28 36 45 55
# 2side4 OK 20
ch032_1side_5__2side_4__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s5_2s4_3s1_4s1_5s1-20220316_210417") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_5__2side_4__3side_2_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_2_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_2_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s5_2s4_3s2_4s1_5s1-20220316_224954") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_5__2side_4__3side_2_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_2_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_2_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s5_2s4_3s2_4s2_5s1-20220317_003909") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_5__2side_4__3side_2_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_2_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_2_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s5_2s4_3s2_4s2_5s2-20220317_023150") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_5__2side_4__3side_3_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_3_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_3_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s5_2s4_3s3_4s1_5s1-20220317_042739") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_5__2side_4__3side_3_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_3_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_3_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s5_2s4_3s3_4s2_5s1-20220317_061855") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_5__2side_4__3side_3_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_3_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_3_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s5_2s4_3s3_4s2_5s2-20220317_081340") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_5__2side_4__3side_3_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_3_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_3_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s5_2s4_3s3_4s3_5s1-20220317_101153") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_5__2side_4__3side_3_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_3_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_3_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s5_2s4_3s3_4s3_5s2-20220317_120829") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_5__2side_4__3side_3_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_3_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_3_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s5_2s4_3s3_4s3_5s3-20220317_140844") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_5__2side_4__3side_4_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_4_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_4_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_4_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_4_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_4_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_4_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_4_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_4_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_4_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_4_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_4_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_4_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_4_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_4_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_4_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_4_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_4_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_4_4side_4_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_4_4side_4_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_4_4side_4_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_4_4side_4_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_4_4side_4_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_4_4side_4_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_4_4side_4_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_4_4side_4_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_4_4side_4_5s3.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_4_4side_4_5s4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_4_4side_4_5s4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_4_4side_4_5s4.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 3 6 10 "15" 21 28 36 45 55
# 2side5 OK 35
ch032_1side_5__2side_5__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_2_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_2_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_2_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_2_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_2_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_2_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_2_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_2_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_2_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_3_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_3_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_3_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_3_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_3_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_3_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_3_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_3_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_3_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_3_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_3_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_3_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_3_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_3_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_3_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_3_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_3_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_3_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_4_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_4_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_4_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_4_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_4_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_4_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_4_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_4_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_4_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_4_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_4_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_4_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_4_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_4_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_4_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_4_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_4_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_4_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_4_4side_4_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_4_4side_4_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_4_4side_4_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_4_4side_4_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_4_4side_4_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_4_4side_4_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_4_4side_4_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_4_4side_4_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_4_4side_4_5s3.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_4_4side_4_5s4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_4_4side_4_5s4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_4_4side_4_5s4.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_4_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_4_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_4_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_4_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_4_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_4_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_4_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_4_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_4_5s3.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_4_5s4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_4_5s4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_4_5s4.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_5_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_5_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_5_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_5_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_5_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_5_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_5_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_5_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_5_5s3.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_5_5s4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_5_5s4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_5_5s4.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_5_5s5 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_5_5s5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_5_5s5.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
##################################
### 5side6
##################################
# "1" 3 6 10 15 21 28 36 45 55
# 2side1 OK 1
ch032_1side_6__2side_1__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_1__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_1__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 "3" 6 10 15 21 28 36 45 55
# 2side2 OK 4
ch032_1side_6__2side_2__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_2__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_2__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_2__3side_2_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_2__3side_2_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_2__3side_2_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s6_2s2_3s2_4s1_5s1-20220317_191218") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_6__2side_2__3side_2_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_2__3side_2_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_2__3side_2_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s6_2s2_3s2_4s2_5s1-20220317_203019") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_6__2side_2__3side_2_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_2__3side_2_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_2__3side_2_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s6_2s2_3s2_4s2_5s2-20220317_215051") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
# 1 3 "6" 10 15 21 28 36 45 55
# 2side3 OK 10
ch032_1side_6__2side_3__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_3__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_3__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s6_2s3_3s1_4s1_5s1-20220317_161536") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_6__2side_3__3side_2_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_3__3side_2_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_3__3side_2_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s6_2s3_3s2_4s1_5s1-20220317_180235") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_6__2side_3__3side_2_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_3__3side_2_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_3__3side_2_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s6_2s3_3s2_4s2_5s1-20220317_195259") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_6__2side_3__3side_2_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_3__3side_2_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_3__3side_2_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s6_2s3_3s2_4s2_5s2-20220317_214651") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_6__2side_3__3side_3_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_3__3side_3_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_3__3side_3_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s6_2s3_3s3_4s1_5s1-20220317_154014") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_6__2side_3__3side_3_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_3__3side_3_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_3__3side_3_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s6_2s3_3s3_4s2_5s1-20220317_170201") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_6__2side_3__3side_3_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_3__3side_3_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_3__3side_3_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s6_2s3_3s3_4s2_5s2-20220317_182620") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_6__2side_3__3side_3_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_3__3side_3_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_3__3side_3_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s6_2s3_3s3_4s3_5s1-20220317_195311") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_6__2side_3__3side_3_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_3__3side_3_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_3__3side_3_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s6_2s3_3s3_4s3_5s2-20220317_211848") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_6__2side_3__3side_3_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_3__3side_3_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_3__3side_3_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s6_2s3_3s3_4s3_5s3-20220317_224646") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
# 1 3 6 "10" 15 21 28 36 45 55
# 2side4 OK 20
ch032_1side_6__2side_4__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s6__2s4__3s1__4s1__5s1_-20220317_234833") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_6__2side_4__3side_2_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_2_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_2_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s6__2s4__3s2__4s1__5s1_-20220318_013601") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_6__2side_4__3side_2_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_2_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_2_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s6__2s4__3s2__4s2__5s1_-20220318_032721") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_6__2side_4__3side_2_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_2_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_2_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s6__2s4__3s2__4s2__5s2_-20220318_052252") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_6__2side_4__3side_3_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_3_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_3_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="L5_ch032_bl_pyr_1s6__2s4__3s3__4s1__5s1_-20220318_072114") #.change_result_name_v4_Remove_sig_out(run_change=True, print_msg=True) #.change_result_name_v3_to_v4_Remove_db_name(run_change=True, print_msg=True)
ch032_1side_6__2side_4__3side_3_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_3_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_3_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_3_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_3_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_3_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_3_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_3_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_3_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_3_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_3_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_3_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_3_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_3_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_3_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_4_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_4_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_4_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_4_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_4_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_4_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_4_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_4_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_4_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_4_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_4_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_4_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_4_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_4_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_4_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_4_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_4_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_4_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_4_4side_4_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_4_4side_4_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_4_4side_4_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_4_4side_4_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_4_4side_4_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_4_4side_4_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_4_4side_4_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_4_4side_4_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_4_4side_4_5s3.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_4_4side_4_5s4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_4_4side_4_5s4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_4_4side_4_5s4.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 3 6 10 "15" 21 28 36 45 55
# 2side5 OK 35
ch032_1side_6__2side_5__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_2_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_2_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_2_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_2_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_2_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_2_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_2_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_2_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_2_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_3_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_3_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_3_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_3_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_3_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_3_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_3_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_3_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_3_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_3_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_3_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_3_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_3_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_3_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_3_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_3_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_3_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_3_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_4_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_4_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_4_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_4_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_4_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_4_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_4_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_4_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_4_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_4_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_4_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_4_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_4_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_4_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_4_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_4_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_4_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_4_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_4_4side_4_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_4_4side_4_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_4_4side_4_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_4_4side_4_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_4_4side_4_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_4_4side_4_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_4_4side_4_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_4_4side_4_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_4_4side_4_5s3.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_4_4side_4_5s4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_4_4side_4_5s4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_4_4side_4_5s4.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_4_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_4_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_4_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_4_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_4_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_4_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_4_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_4_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_4_5s3.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_4_5s4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_4_5s4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_4_5s4.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_5_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_5_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_5_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_5_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_5_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_5_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_5_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_5_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_5_5s3.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_5_5s4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_5_5s4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_5_5s4.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_5_5s5 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_5_5s5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_5_5s5.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 3 6 10 15 "21" 28 36 45 55
# 2side6 OK 56
ch032_1side_6__2side_6__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_2_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_2_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_2_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_2_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_2_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_2_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_2_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_2_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_2_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_3_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_3_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_3_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_3_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_3_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_3_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_3_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_3_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_3_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_3_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_3_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_3_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_3_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_3_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_3_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_3_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_3_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_3_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_4_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_4_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_4_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_4_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_4_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_4_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_4_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_4_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_4_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_4_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_4_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_4_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_4_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_4_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_4_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_4_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_4_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_4_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_4_4side_4_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_4_4side_4_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_4_4side_4_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_4_4side_4_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_4_4side_4_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_4_4side_4_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_4_4side_4_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_4_4side_4_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_4_4side_4_5s3.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_4_4side_4_5s4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_4_4side_4_5s4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_4_4side_4_5s4.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_4_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_4_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_4_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_4_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_4_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_4_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_4_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_4_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_4_5s3.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_4_5s4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_4_5s4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_4_5s4.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_5_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_5_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_5_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_5_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_5_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_5_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_5_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_5_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_5_5s3.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_5_5s4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_5_5s4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_5_5s4.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_5_5s5 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_5_5s5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_5_5s5.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_4_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_4_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_4_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_4_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_4_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_4_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_4_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_4_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_4_5s3.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_4_5s4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_4_5s4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_4_5s4.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_5_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_5_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_5_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_5_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_5_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_5_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_5_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_5_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_5_5s3.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_5_5s4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_5_5s4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_5_5s4.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_5_5s5 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_5_5s5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_5_5s5.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_6_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s1.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_6_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s2.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_6_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s3.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_6_5s4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s4.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_6_5s5 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s5.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_6_5s6 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s6.kong_model.model_describe) .set_train_args(epochs= 60).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
#############################################################
if(__name__ == "__main__"):
print("build exps cost time:", time.time() - start_time)
if len(sys.argv) < 2:
############################################################################################################
### 直接按 F5 或打 python step10_b1_exp_obj_load_and_train_and_test.py,後面沒有接東西喔!才不會跑到下面給 step10_b_subprocss.py 用的程式碼~~~
ch032_1side_1__2side_1__3side_1_4side_1_5s1.build().run()
# print('no argument')
sys.exit()
### 以下是給 step10_b_subprocess.py 用的,相當於cmd打 python step10_b1_exp_obj_load_and_train_and_test.py 某個exp.build().run()
eval(sys.argv[1])
| [
"s89334roy@yahoo.com.tw"
] | s89334roy@yahoo.com.tw |
760cb2fd39f86a9b39f87005d16dbdd0b0dc1846 | 2bdedcda705f6dcf45a1e9a090377f892bcb58bb | /src/main/output/water/java_level_reason/lot/president.py | 0dc9e9aba884377bf07220292becfd6d869e08ff | [] | no_license | matkosoric/GenericNameTesting | 860a22af1098dda9ea9e24a1fc681bb728aa2d69 | 03f4a38229c28bc6d83258e5a84fce4b189d5f00 | refs/heads/master | 2021-01-08T22:35:20.022350 | 2020-02-21T11:28:21 | 2020-02-21T11:28:21 | 242,123,053 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,144 | py | # -*- coding: utf-8 -*-
import http.client, urllib.parse
# **********************************************
# *** Update or verify the following values. ***
# **********************************************
# Replace the subscriptionKey string value with your valid subscription key.
host = 'api.microsofttranslator.com'
path = '/V2/Http.svc/TranslateArray'
params = ''
ns = "http://schemas.microsoft.com/2003/10/Serialization/Arrays";
# NOTE: AppId is required, but it can be empty because we are sending the Ocp-Apim-Subscription-Key header.
body = """
<TranslateArrayRequest>
<AppId />
<Texts>
<string xmlns=\"%s\">Hello</string>
<string xmlns=\"%s\">Goodbye</string>
</Texts>
<To>fr-fr</To>
</TranslateArrayRequest>
""" % (ns, ns)
def TranslateArray ():
subscriptionKey = '95de3c2322800cad9803e2d338616d8b'
headers = {
'd3320239539e8ef417a1646394d0703b': subscriptionKey,
'Content-type': 'text/xml'
}
conn = http.client.HTTPSConnection(host)
conn.request ("POST", path + params, body, headers)
response = conn.getresponse ()
return response.read ()
result = TranslateArray ()
print (result.decode("utf-8"))
| [
"soric.matko@gmail.com"
] | soric.matko@gmail.com |
9b1be2255716dd81859aaf4f32c15e6443397f0a | a33a5d7d2a9b0b6030f39803553f2689f3d2c743 | /ml/vcwiz/training/trainer.py | 4024824cb15983e56849955a17f6b33eeebf0686 | [] | no_license | yasyf/vc | 6c1a4224d56049aff44f2ffa1c57922ccb7907ab | bd12d8b1248b008516b1547a693008428085de78 | refs/heads/master | 2023-01-10T08:39:34.285545 | 2019-07-29T10:20:31 | 2019-07-29T14:58:33 | 59,700,244 | 11 | 4 | null | 2023-01-09T20:04:34 | 2016-05-25T21:52:18 | Ruby | UTF-8 | Python | false | false | 1,826 | py | from abc import ABC, abstractmethod
import os, sys, tempfile
from google.cloud import storage
class Trainer(ABC):
def __init__(self):
self.model = None
self.output_path = None
self.upload_path = None
self.client = storage.Client(os.environ['GC_PROJECT_ID'])
self.bucket = self.client.bucket(os.environ['GOOGLE_MODEL_BUCKET'])
@abstractmethod
def _train(self, filename):
raise NotImplementedError
def train(self, filename):
self.model = self._train(filename)
def remote_train(self, path):
fd, filename = tempfile.mkstemp(suffix='.csv')
blob = self.bucket.blob(path)
blob.download_to_filename(filename)
self.train(filename)
os.close(fd)
@abstractmethod
def _save(self, model, path):
raise NotImplementedError
def save(self, path):
assert self.model
self._save(self.model, path)
self.output_path = path
def upload(self, path):
assert self.output_path
blob = self.bucket.blob(path)
blob.upload_from_filename(self.output_path)
self.upload_path = path
return blob.generation
def remote_save(self, path):
fd, filename = tempfile.mkstemp(suffix='.model')
self.save(filename)
os.close(fd)
return self.upload(path)
@abstractmethod
def _test(self, model, *args):
raise NotImplementedError
def test(self, *args):
assert self.model
self._test(self.model, *args)
@abstractmethod
def _metrics(self, model, *args):
raise NotImplementedError
def metrics(self, *args):
return self._metrics(self.model, *args)
@classmethod
def _train_and_test(cls, filename, args):
instance = cls()
instance.train(filename)
instance.test(*args)
@classmethod
def train_and_test(cls):
filename = sys.argv[1]
args = sys.argv[2:]
cls._train_and_test(filename, args)
| [
"yasyfm@gmail.com"
] | yasyfm@gmail.com |
98f34f3c1c4d17b6fb254028c9ec9441791c06b0 | 2bdee262d31ecb1e76f46096268dbd56cef6b100 | /Algorithms/tf2algos/ppo.py | 260f8d8b3e33d4bb3ac8b37fe11f677e6019b279 | [
"Apache-2.0"
] | permissive | hititan/RLs | 369addd20fb8fa38f33b03fb163afbaf686e10eb | 27cac450d5a8e1ac08f2a0573d87f7663c3d0468 | refs/heads/master | 2020-10-01T10:13:45.128686 | 2019-12-11T14:58:34 | 2019-12-11T14:58:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,768 | py | import numpy as np
import tensorflow as tf
import tensorflow_probability as tfp
import Nn
from utils.sth import sth
from utils.tf2_utils import show_graph, get_TensorSpecs, gaussian_clip_rsample, gaussian_likelihood_sum, gaussian_entropy
from Algorithms.tf2algos.base.on_policy import On_Policy
class PPO(On_Policy):
def __init__(self,
s_dim,
visual_sources,
visual_resolution,
a_dim_or_list,
is_continuous,
epoch=5,
beta=1.0e-3,
lr=5.0e-4,
lambda_=0.95,
epsilon=0.2,
share_net=True,
actor_lr=3e-4,
critic_lr=1e-3,
hidden_units={
'share': {
'continuous': {
'share': [32, 32],
'mu': [32, 32],
'v': [32, 32]
},
'discrete': {
'share': [32, 32],
'logits': [32, 32],
'v': [32, 32]
}
},
'actor_continuous': [32, 32],
'actor_discrete': [32, 32],
'critic': [32, 32]
},
**kwargs):
super().__init__(
s_dim=s_dim,
visual_sources=visual_sources,
visual_resolution=visual_resolution,
a_dim_or_list=a_dim_or_list,
is_continuous=is_continuous,
**kwargs)
self.beta = beta
self.epoch = epoch
self.lambda_ = lambda_
self.epsilon = epsilon
self.share_net = share_net
if self.share_net:
self.TensorSpecs = get_TensorSpecs([self.s_dim], self.visual_dim, [self.a_counts], [1], [1], [1])
if self.is_continuous:
self.net = Nn.a_c_v_continuous(self.s_dim, self.visual_dim, self.a_counts, 'ppo_net', hidden_units['share']['continuous'])
else:
self.net = Nn.a_c_v_discrete(self.s_dim, self.visual_dim, self.a_counts, 'ppo_net', hidden_units['share']['discrete'])
self.lr = tf.keras.optimizers.schedules.PolynomialDecay(lr, self.max_episode, 1e-10, power=1.0)
self.optimizer = tf.keras.optimizers.Adam(learning_rate=self.lr(self.episode))
else:
self.actor_TensorSpecs = get_TensorSpecs([self.s_dim], self.visual_dim, [self.a_counts], [1], [1])
self.critic_TensorSpecs = get_TensorSpecs([self.s_dim], self.visual_dim, [1])
if self.is_continuous:
self.actor_net = Nn.actor_mu(self.s_dim, self.visual_dim, self.a_counts, 'actor_net', hidden_units['actor_continuous'])
else:
self.actor_net = Nn.actor_discrete(self.s_dim, self.visual_dim, self.a_counts, 'actor_net', hidden_units['actor_discrete'])
self.critic_net = Nn.critic_v(self.s_dim, self.visual_dim, 'critic_net', hidden_units['critic'])
self.actor_lr = tf.keras.optimizers.schedules.PolynomialDecay(actor_lr, self.max_episode, 1e-10, power=1.0)
self.critic_lr = tf.keras.optimizers.schedules.PolynomialDecay(critic_lr, self.max_episode, 1e-10, power=1.0)
self.optimizer_actor = tf.keras.optimizers.Adam(learning_rate=self.actor_lr(self.episode))
self.optimizer_critic = tf.keras.optimizers.Adam(learning_rate=self.critic_lr(self.episode))
if self.is_continuous:
self.log_std = tf.Variable(initial_value=-0.5 * np.ones(self.a_counts, dtype=np.float32), trainable=True)
self.recorder.logger.info('''
xxxxxxxx xxxxxxxx xxxxx
xx xx xx xx xxx xxx
x xxx x xxx xx xx
x xxx x xxx xx xxx
xxxxxx xxxxxx xxx xxx
x x xx xxx
x x xx xx
x x xx xxx
xxxxx xxxxx xxxxx
''')
def choose_action(self, s, visual_s, evaluation=False):
a = self._get_action(s, visual_s, evaluation).numpy()
return a if self.is_continuous else sth.int2action_index(a, self.a_dim_or_list)
@tf.function
def _get_action(self, s, visual_s, evaluation):
s, visual_s = self.cast(s, visual_s)
with tf.device(self.device):
if self.is_continuous:
if self.share_net:
mu, _ = self.net(s, visual_s)
else:
mu = self.actor_net(s, visual_s)
sample_op, _ = gaussian_clip_rsample(mu, self.log_std)
else:
if self.share_net:
logits, _ = self.net(s, visual_s)
else:
logits = self.actor_net(s, visual_s)
norm_dist = tfp.distributions.Categorical(logits)
sample_op = norm_dist.sample()
return sample_op
def store_data(self, s, visual_s, a, r, s_, visual_s_, done):
assert isinstance(a, np.ndarray), "store_data need action type is np.ndarray"
assert isinstance(r, np.ndarray), "store_data need reward type is np.ndarray"
assert isinstance(done, np.ndarray), "store_data need done type is np.ndarray"
if not self.is_continuous:
a = sth.action_index2one_hot(a, self.a_dim_or_list)
self.data = self.data.append({
's': s,
'visual_s': visual_s,
'a': a,
'r': r,
'done': done,
'value': np.squeeze(self._get_value(s, visual_s).numpy()),
'log_prob': self._get_log_prob(s, visual_s, a).numpy() + 1e-10
}, ignore_index=True)
self.s_ = s_
self.visual_s_ = visual_s_
@tf.function
def _get_value(self, s, visual_s):
s, visual_s = self.cast(s, visual_s)
with tf.device(self.device):
if self.share_net:
_, value = self.net(s, visual_s)
else:
value = self.critic_net(s, visual_s)
return value
@tf.function
def _get_log_prob(self, s, visual_s, a):
s, visual_s, a = self.cast(s, visual_s, a)
with tf.device(self.device):
if self.is_continuous:
if self.share_net:
mu, _ = self.net(s, visual_s)
else:
mu = self.actor_net(s, visual_s)
new_log_prob = gaussian_likelihood_sum(mu, a, self.log_std)
else:
if self.share_net:
logits, _ = self.net(s, visual_s)
else:
logits = self.actor_net(s, visual_s)
logp_all = tf.nn.log_softmax(logits)
new_log_prob = tf.reduce_sum(a * logp_all, axis=1, keepdims=True)
return new_log_prob
def calculate_statistics(self):
init_value = np.squeeze(self._get_value(self.s_, self.visual_s_).numpy())
self.data['total_reward'] = sth.discounted_sum(self.data.r.values, 1, init_value, self.data.done.values)
self.data['discounted_reward'] = sth.discounted_sum(self.data.r.values, self.gamma, init_value, self.data.done.values)
self.data['td_error'] = sth.discounted_sum_minus(
self.data.r.values,
self.gamma,
init_value,
self.data.done.values,
self.data.value.values
)
# GAE
adv = np.asarray(sth.discounted_sum(
self.data.td_error.values,
self.lambda_ * self.gamma,
0,
self.data.done.values
))
self.data['advantage'] = [i for i in (adv - adv.mean()) / adv.std()]
# self.data.to_excel(self.recorder.excel_writer, sheet_name=f'test{self.episode}', index=True)
# self.recorder.excel_writer.save()
def get_sample_data(self, index):
i_data = self.data.iloc[index:index + self.batch_size]
s = np.vstack(i_data.s.values).astype(np.float32)
visual_s = np.vstack(i_data.visual_s.values).astype(np.float32)
a = np.vstack(i_data.a.values).astype(np.float32)
dc_r = np.vstack(i_data.discounted_reward.values).reshape(-1, 1).astype(np.float32)
old_log_prob = np.vstack(i_data.log_prob.values).astype(np.float32)
advantage = np.vstack(i_data.advantage.values).reshape(-1, 1).astype(np.float32)
return s, visual_s, a, dc_r, old_log_prob, advantage
# @show_graph(name='ppo_net')
def learn(self, **kwargs):
assert self.batch_size <= self.data.shape[0], "batch_size must less than the length of an episode"
self.episode = kwargs['episode']
self.calculate_statistics()
for _ in range(self.epoch):
for index in range(0, self.data.shape[0], self.batch_size):
s, visual_s, a, dc_r, old_log_prob, advantage = [tf.convert_to_tensor(i) for i in self.get_sample_data(index)]
if self.share_net:
actor_loss, critic_loss, entropy, kl = self.train_share.get_concrete_function(
*self.TensorSpecs)(s, visual_s, a, dc_r, old_log_prob, advantage)
else:
actor_loss, entropy, kl = self.train_actor.get_concrete_function(
*self.actor_TensorSpecs)(s, visual_s, a, old_log_prob, advantage)
# if kl > 1.5 * 0.01:
# break
critic_loss = self.train_critic.get_concrete_function(
*self.critic_TensorSpecs)(s, visual_s, dc_r)
self.global_step.assign_add(1)
summaries = dict([
['LOSS/actor_loss', actor_loss],
['LOSS/critic_loss', critic_loss],
['Statistics/entropy', entropy]
])
if self.share_net:
summaries.update(dict([['LEARNING_RATE/lr', self.lr(self.episode)]]))
else:
summaries.update(dict([
['LEARNING_RATE/actor_lr', self.actor_lr(self.episode)],
['LEARNING_RATE/critic_lr', self.critic_lr(self.episode)]
]))
self.write_training_summaries(self.episode, summaries)
self.clear()
@tf.function(experimental_relax_shapes=True)
def train_share(self, s, visual_s, a, dc_r, old_log_prob, advantage):
s, visual_s, a, dc_r, old_log_prob, advantage = self.cast(s, visual_s, a, dc_r, old_log_prob, advantage)
with tf.device(self.device):
with tf.GradientTape() as tape:
if self.is_continuous:
mu, value = self.net(s, visual_s)
new_log_prob = gaussian_likelihood_sum(mu, a, self.log_std)
entropy = gaussian_entropy(self.log_std)
else:
logits, value = self.net(s, visual_s)
logp_all = tf.nn.log_softmax(logits)
new_log_prob = tf.reduce_sum(a * logp_all, axis=1, keepdims=True)
entropy = -tf.reduce_mean(tf.reduce_sum(tf.exp(logp_all) * logp_all, axis=1, keepdims=True))
ratio = tf.exp(new_log_prob - old_log_prob)
kl = tf.reduce_mean(old_log_prob - new_log_prob)
surrogate = ratio * advantage
td_error = dc_r - value
actor_loss = tf.reduce_mean(
tf.minimum(
surrogate,
tf.clip_by_value(ratio, 1.0 - self.epsilon, 1.0 + self.epsilon) * advantage
))
value_loss = tf.reduce_mean(tf.square(td_error))
loss = -(actor_loss - 1.0 * value_loss + self.beta * entropy)
if self.is_continuous:
loss_grads = tape.gradient(loss, self.net.trainable_variables + [self.log_std])
self.optimizer.apply_gradients(
zip(loss_grads, self.net.trainable_variables + [self.log_std])
)
else:
loss_grads = tape.gradient(loss, self.net.trainable_variables)
self.optimizer.apply_gradients(
zip(loss_grads, self.net.trainable_variables)
)
return actor_loss, value_loss, entropy, kl
@tf.function(experimental_relax_shapes=True)
def train_actor(self, s, visual_s, a, old_log_prob, advantage):
s, visual_s, a, old_log_prob, advantage = self.cast(s, visual_s, a, old_log_prob, advantage)
with tf.device(self.device):
with tf.GradientTape() as tape:
if self.is_continuous:
mu = self.actor_net(s, visual_s)
new_log_prob = gaussian_likelihood_sum(mu, a, self.log_std)
entropy = gaussian_entropy(self.log_std)
else:
logits = self.actor_net(s, visual_s)
logp_all = tf.nn.log_softmax(logits)
new_log_prob = tf.reduce_sum(a * logp_all, axis=1, keepdims=True)
entropy = -tf.reduce_mean(tf.reduce_sum(tf.exp(logp_all) * logp_all, axis=1, keepdims=True))
ratio = tf.exp(new_log_prob - old_log_prob)
kl = tf.reduce_mean(old_log_prob - new_log_prob)
surrogate = ratio * advantage
min_adv = tf.where(advantage > 0, (1 + self.epsilon) * advantage, (1 - self.epsilon) * advantage)
actor_loss = -(tf.reduce_mean(tf.minimum(surrogate, min_adv)) + self.beta * entropy)
if self.is_continuous:
actor_grads = tape.gradient(actor_loss, self.actor_net.trainable_variables + [self.log_std])
self.optimizer_actor.apply_gradients(
zip(actor_grads, self.actor_net.trainable_variables + [self.log_std])
)
else:
actor_grads = tape.gradient(actor_loss, self.actor_net.trainable_variables)
self.optimizer_actor.apply_gradients(
zip(actor_grads, self.actor_net.trainable_variables)
)
return actor_loss, entropy, kl
@tf.function(experimental_relax_shapes=True)
def train_critic(self, s, visual_s, dc_r):
s, visual_s, dc_r = self.cast(s, visual_s, dc_r)
with tf.device(self.device):
with tf.GradientTape() as tape:
value = self.critic_net(s, visual_s)
td_error = dc_r - value
value_loss = tf.reduce_mean(tf.square(td_error))
critic_grads = tape.gradient(value_loss, self.critic_net.trainable_variables)
self.optimizer_critic.apply_gradients(
zip(critic_grads, self.critic_net.trainable_variables)
)
return value_loss
| [
"271668153@qq.com"
] | 271668153@qq.com |
b679b28b5411bf945455ca4c62ff77c700dcf922 | 84b05857cbe74d190bdbee18d442d0c720b1b84d | /AlgoExpert_algorithms/Easy/FindThreeLargestNumbers/test_FindThreeLargestNumbersd.py | 1ab120e3c5754042ad18d7723c6535b5dafe7308 | [] | no_license | JakubKazimierski/PythonPortfolio | 1c8c7e7b0f1358fc42a2295b807d0afafd8e88a3 | 3aa62ad36c3b06b2a3b05f1f8e2a9e21d68b371f | refs/heads/master | 2023-06-01T01:16:22.897097 | 2023-05-15T01:05:22 | 2023-05-15T01:05:22 | 311,473,524 | 9 | 1 | null | null | null | null | UTF-8 | Python | false | false | 692 | py | '''
Unittests for FindThreeLargestNumbers.py
January 2021 Jakub Kazimierski
'''
import unittest
import FindThreeLargestNumbers
class test_FindThreeLargestNumbers(unittest.TestCase):
'''
Class with unittests for FindThreeLargestNumbers.py
'''
# region Unittests
def test_ExpectedOutput(self):
'''
Checks if returned output is as expected.
'''
input_arr = [10, 5, 9, 10, 12]
output = FindThreeLargestNumbers.findThreeLargestNumbers(input_arr)
self.assertEqual(output, [10, 10, 12])
# endregion
if __name__ == "__main__":
'''
Main method for test cases.
'''
unittest.main() | [
"j.m.kazimierski@gmail.com"
] | j.m.kazimierski@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.