blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 112 | license_type stringclasses 2 values | repo_name stringlengths 5 115 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 777 values | visit_date timestamp[us]date 2015-08-06 10:31:46 2023-09-06 10:44:38 | revision_date timestamp[us]date 1970-01-01 02:38:32 2037-05-03 13:00:00 | committer_date timestamp[us]date 1970-01-01 02:38:32 2023-09-06 01:08:06 | github_id int64 4.92k 681M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 22 values | gha_event_created_at timestamp[us]date 2012-06-04 01:52:49 2023-09-14 21:59:50 ⌀ | gha_created_at timestamp[us]date 2008-05-22 07:58:19 2023-08-21 12:35:19 ⌀ | gha_language stringclasses 149 values | src_encoding stringclasses 26 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 3 10.2M | extension stringclasses 188 values | content stringlengths 3 10.2M | authors listlengths 1 1 | author_id stringlengths 1 132 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ab3c9b1fb05f624ff51e8cff414dc625f66e4f8e | 00d7824d2699fc7a90de167e04ff49a210458f2c | /tests/trainer/logging_tests/test_eval_loop_logging_1_0.py | bce4a23dda15785300b0bb95e0fb1af5454f1605 | [
"Apache-2.0",
"LicenseRef-scancode-proprietary-license"
] | permissive | jtamir/pytorch-lightning | 867feab3062ed2e3357b640588220efde349f97b | 9b89a24b04dff50c0595c5399e9ba61b39745def | refs/heads/master | 2021-07-10T19:40:53.410989 | 2020-11-04T05:59:16 | 2020-11-04T06:00:28 | 213,468,663 | 1 | 0 | Apache-2.0 | 2019-10-07T19:28:07 | 2019-10-07T19:28:06 | null | UTF-8 | Python | false | false | 11,727 | py | # Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Tests to ensure that the training loop works with a dict (1.0)
"""
from pytorch_lightning.core.lightning import LightningModule
from pytorch_lightning import Trainer
from pytorch_lightning import callbacks, seed_everything
from tests.base.deterministic_model import DeterministicModel
from tests.base import SimpleModule, BoringModel
import os
import torch
import pytest
def test__validation_step__log(tmpdir):
"""
Tests that validation_step can log
"""
os.environ['PL_DEV_DEBUG'] = '1'
class TestModel(DeterministicModel):
def training_step(self, batch, batch_idx):
acc = self.step(batch, batch_idx)
acc = acc + batch_idx
self.log('a', acc, on_step=True, on_epoch=True)
self.log('a2', 2)
self.training_step_called = True
return acc
def validation_step(self, batch, batch_idx):
acc = self.step(batch, batch_idx)
acc = acc + batch_idx
self.log('b', acc, on_step=True, on_epoch=True)
self.training_step_called = True
def backward(self, loss, optimizer, optimizer_idx):
return LightningModule.backward(self, loss, optimizer, optimizer_idx)
model = TestModel()
model.validation_step_end = None
model.validation_epoch_end = None
trainer = Trainer(
default_root_dir=tmpdir,
limit_train_batches=2,
limit_val_batches=2,
max_epochs=2,
log_every_n_steps=1,
weights_summary=None,
)
trainer.fit(model)
# make sure all the metrics are available for callbacks
expected_logged_metrics = {
'a2',
'a_step',
'a_epoch',
'b_step/epoch_0',
'b_step/epoch_1',
'b_epoch',
'epoch',
}
logged_metrics = set(trainer.logged_metrics.keys())
assert expected_logged_metrics == logged_metrics
# we don't want to enable val metrics during steps because it is not something that users should do
# on purpose DO NOT allow step_b... it's silly to monitor val step metrics
callback_metrics = set(trainer.callback_metrics.keys())
callback_metrics.remove('debug_epoch')
expected_cb_metrics = {'a', 'a2', 'b', 'a_epoch', 'b_epoch', 'a_step'}
assert expected_cb_metrics == callback_metrics
def test__validation_step__step_end__epoch_end__log(tmpdir):
"""
Tests that validation_step can log
"""
os.environ['PL_DEV_DEBUG'] = '1'
class TestModel(DeterministicModel):
def training_step(self, batch, batch_idx):
acc = self.step(batch, batch_idx)
acc = acc + batch_idx
self.log('a', acc)
self.log('b', acc, on_step=True, on_epoch=True)
self.training_step_called = True
return acc
def validation_step(self, batch, batch_idx):
acc = self.step(batch, batch_idx)
acc = acc + batch_idx
self.log('c', acc)
self.log('d', acc, on_step=True, on_epoch=True)
self.validation_step_called = True
return acc
def validation_step_end(self, acc):
self.validation_step_end_called = True
self.log('e', acc)
self.log('f', acc, on_step=True, on_epoch=True)
return ['random_thing']
def validation_epoch_end(self, outputs):
self.log('g', torch.tensor(2, device=self.device), on_epoch=True)
self.validation_epoch_end_called = True
def backward(self, loss, optimizer, optimizer_idx):
return LightningModule.backward(self, loss, optimizer, optimizer_idx)
model = TestModel()
trainer = Trainer(
default_root_dir=tmpdir,
limit_train_batches=2,
limit_val_batches=2,
max_epochs=2,
log_every_n_steps=1,
weights_summary=None,
)
trainer.fit(model)
# make sure all the metrics are available for callbacks
logged_metrics = set(trainer.logged_metrics.keys())
expected_logged_metrics = {
'epoch',
'a',
'b_step',
'b_epoch',
'c',
'd_step/epoch_0',
'd_step/epoch_1',
'd_epoch',
'e',
'f_step/epoch_0',
'f_step/epoch_1',
'f_epoch',
'g',
}
assert expected_logged_metrics == logged_metrics
progress_bar_metrics = set(trainer.progress_bar_metrics.keys())
expected_pbar_metrics = set()
assert expected_pbar_metrics == progress_bar_metrics
# we don't want to enable val metrics during steps because it is not something that users should do
callback_metrics = set(trainer.callback_metrics.keys())
callback_metrics.remove('debug_epoch')
expected_cb_metrics = {'a', 'b', 'c', 'd', 'e', 'b_epoch', 'd_epoch', 'f_epoch', 'f', 'g', 'b_step'}
assert expected_cb_metrics == callback_metrics
@pytest.mark.parametrize(['batches', 'log_interval', 'max_epochs'], [(1, 1, 1), (64, 32, 2)])
def test_eval_epoch_logging(tmpdir, batches, log_interval, max_epochs):
"""
Tests that only training_step can be used
"""
os.environ['PL_DEV_DEBUG'] = '1'
class TestModel(BoringModel):
def validation_epoch_end(self, outputs):
self.log('c', torch.tensor(2), on_epoch=True, prog_bar=True, logger=True)
self.log('d/e/f', 2)
model = TestModel()
trainer = Trainer(
default_root_dir=tmpdir,
limit_train_batches=batches,
limit_val_batches=batches,
max_epochs=max_epochs,
log_every_n_steps=log_interval,
weights_summary=None,
)
trainer.fit(model)
# make sure all the metrics are available for callbacks
logged_metrics = set(trainer.logged_metrics.keys())
expected_logged_metrics = {
'c',
'd/e/f',
'epoch',
}
assert logged_metrics == expected_logged_metrics
pbar_metrics = set(trainer.progress_bar_metrics.keys())
expected_pbar_metrics = {'c'}
assert pbar_metrics == expected_pbar_metrics
callback_metrics = set(trainer.callback_metrics.keys())
callback_metrics.remove('debug_epoch')
expected_callback_metrics = set()
expected_callback_metrics = expected_callback_metrics.union(logged_metrics)
expected_callback_metrics = expected_callback_metrics.union(pbar_metrics)
expected_callback_metrics.remove('epoch')
assert callback_metrics == expected_callback_metrics
# assert the loggers received the expected number
assert len(trainer.dev_debugger.logged_metrics) == max_epochs
def test_eval_float_logging(tmpdir):
"""
Tests that only training_step can be used
"""
os.environ['PL_DEV_DEBUG'] = '1'
class TestModel(BoringModel):
def validation_step(self, batch, batch_idx):
output = self.layer(batch)
loss = self.loss(batch, output)
self.log('a', 12.0)
return {"x": loss}
model = TestModel()
trainer = Trainer(
default_root_dir=tmpdir,
limit_train_batches=2,
limit_val_batches=2,
max_epochs=1,
log_every_n_steps=1,
weights_summary=None,
)
trainer.fit(model)
# make sure all the metrics are available for callbacks
logged_metrics = set(trainer.logged_metrics.keys())
expected_logged_metrics = {
'a',
'epoch',
}
assert logged_metrics == expected_logged_metrics
def test_eval_logging_auto_reduce(tmpdir):
"""
Tests that only training_step can be used
"""
seed_everything(1234)
os.environ['PL_DEV_DEBUG'] = '1'
class TestModel(BoringModel):
def on_pretrain_routine_end(self) -> None:
self.seen_vals = []
self.manual_epoch_end_mean = None
def on_validation_epoch_start(self) -> None:
self.seen_vals = []
def validation_step(self, batch, batch_idx):
output = self.layer(batch)
loss = self.loss(batch, output)
self.seen_vals.append(loss)
self.log('val_loss', loss, on_epoch=True, on_step=True, prog_bar=True)
return {"x": loss}
def validation_epoch_end(self, outputs) -> None:
for passed_in, manually_tracked in zip(outputs, self.seen_vals):
assert passed_in['x'] == manually_tracked
self.manual_epoch_end_mean = torch.stack([x['x'] for x in outputs]).mean()
model = TestModel()
trainer = Trainer(
default_root_dir=tmpdir,
limit_train_batches=3,
limit_val_batches=3,
max_epochs=1,
log_every_n_steps=1,
weights_summary=None,
checkpoint_callback=callbacks.ModelCheckpoint(dirpath='val_loss')
)
trainer.fit(model)
# make sure all the metrics are available for callbacks
manual_mean = model.manual_epoch_end_mean
callback_metrics = set(trainer.callback_metrics.keys())
assert callback_metrics == {'debug_epoch', 'val_loss', 'val_loss_epoch'}
# make sure values are correct
assert trainer.logged_metrics['val_loss_epoch'] == manual_mean
assert trainer.callback_metrics['val_loss'] == trainer.logged_metrics['val_loss_step/epoch_0']
# make sure correct values were logged
logged_val = trainer.dev_debugger.logged_metrics
# sanity check
assert logged_val[0]['global_step'] == 0
assert logged_val[1]['global_step'] == 0
# 3 val batches
assert logged_val[2]['val_loss_step/epoch_0'] == model.seen_vals[0]
assert logged_val[3]['val_loss_step/epoch_0'] == model.seen_vals[1]
assert logged_val[4]['val_loss_step/epoch_0'] == model.seen_vals[2]
# epoch mean
assert logged_val[5]['val_loss_epoch'] == model.manual_epoch_end_mean
# only those logged
assert len(logged_val) == 6
@pytest.mark.parametrize(['batches', 'log_interval', 'max_epochs'], [(1, 1, 1), (64, 32, 2)])
def test_eval_epoch_only_logging(tmpdir, batches, log_interval, max_epochs):
"""
Tests that only test_epoch_end can be used to log, and we return them in the results.
"""
os.environ['PL_DEV_DEBUG'] = '1'
class TestModel(BoringModel):
def test_epoch_end(self, outputs):
self.log('c', torch.tensor(2), on_epoch=True, prog_bar=True, logger=True)
self.log('d/e/f', 2)
model = TestModel()
trainer = Trainer(
default_root_dir=tmpdir,
limit_train_batches=batches,
limit_val_batches=batches,
max_epochs=max_epochs,
log_every_n_steps=log_interval,
weights_summary=None,
)
trainer.fit(model)
results = trainer.test(model)
expected_result_metrics = {
'c': torch.tensor(2),
'd/e/f': 2,
}
for result in results:
assert result == expected_result_metrics
def test_monitor_val_epoch_end(tmpdir):
epoch_min_loss_override = 0
model = SimpleModule()
checkpoint_callback = callbacks.ModelCheckpoint(dirpath=tmpdir, save_top_k=1, monitor="avg_val_loss")
trainer = Trainer(
max_epochs=epoch_min_loss_override + 2,
logger=False,
checkpoint_callback=checkpoint_callback,
)
trainer.fit(model)
| [
"noreply@github.com"
] | jtamir.noreply@github.com |
9def12bb10c280f6d218ab0e4f4e022178c42c79 | 3c934c97bd5748237ac8963c8be779a7d77be629 | /NumArray.py | 7b83af17fd371ac49d4747ee093cd73e1eb4ba28 | [] | no_license | Franktian/leetcode | 2b0d0280d18e3401b9f337f027c5d70f26237f02 | 98e7852ba144cefbdb02f705651b1519155ee4d6 | refs/heads/master | 2021-06-12T15:23:09.733650 | 2020-06-17T23:09:18 | 2020-06-17T23:09:18 | 128,710,359 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 166 | py | def generateSumArray(lst):
res = [0 for _ in range(len(lst) + 1)]
for i in range(len(lst)):
res[i + 1] = res[i] + lst[i]
return res | [
"tianyawen201209@hotmail.com"
] | tianyawen201209@hotmail.com |
43ec620f796bd7583adf61a5d35f884a7ed0a131 | 13c5b9fc590954a4a25b9d38e8140eb83a63c9a1 | /src/bxutils/encoding/json_encoder.py | f19ef44071daf049b5ca02db5826cf4b49fe4c99 | [
"MIT"
] | permissive | aspin/bxcommon | f746c405c693f4efb8af815cf4f9408284299e50 | 325a0844e3fc16176e90ea574eb45fff1177c527 | refs/heads/master | 2020-09-10T16:26:55.814270 | 2019-11-07T21:53:23 | 2019-11-07T21:53:23 | 221,758,675 | 0 | 0 | null | 2019-11-14T18:08:11 | 2019-11-14T18:08:10 | null | UTF-8 | Python | false | false | 2,974 | py | import json
import os
import traceback
from datetime import date, time, datetime
from enum import Enum
from inspect import istraceback
from typing import Union, Any, Iterable, Collection, Optional, Dict
from bxutils import logging
logger = logging.get_logger(__name__)
SPECIAL_ITERABLE_TYPES = (type(dict().values()), type(dict().keys()),)
def is_iterable_no_collection(o):
return isinstance(o, SPECIAL_ITERABLE_TYPES) or \
(isinstance(o, Iterable) and not isinstance(o, Collection))
def to_json(obj: Any, remove_nulls: bool = False) -> str:
if remove_nulls:
clean_dict = {}
for key, value in obj.__dict__.items():
if value:
clean_dict[key] = value
return json.dumps(clean_dict, cls=EnhancedJSONEncoder)
return json.dumps(obj, cls=EnhancedJSONEncoder)
def to_dict(obj: Any) -> Dict[str, Any]:
return EnhancedJSONEncoder().as_dict(obj)
def load_json_from_file(json_file_path: str) -> Optional[Union[list, dict]]:
node_json = None
if os.path.isfile(json_file_path):
try:
with open(json_file_path) as json_file:
node_json = json.load(json_file)
except ValueError as e:
logger.debug("Failed to parse json: %s", e)
except OSError as e:
logger.debug("Failed trying to check for a json file: %s", e)
else:
raise ValueError("Could not locate json file: %s", json_file_path)
return node_json
class EnhancedJSONEncoder(json.JSONEncoder):
def default(self, o: Any) -> Any:
if is_iterable_no_collection(o):
o = list(o)
elif isinstance(o, (bytearray, memoryview)):
o = bytes(o)
if isinstance(o, Enum):
return str(o)
if hasattr(o, "__dict__"):
if isinstance(o.__dict__, dict):
return o.__dict__
else:
return str(o)
if isinstance(o, (date, datetime, time)):
return o.isoformat() # pyre-ignore
if isinstance(o, bytes):
try:
return o.decode("utf-8")
except UnicodeDecodeError:
return str(o)
if hasattr(o, "hexdigest"):
return o.hexdigest() # pyre-ignore
if hasattr(o, "hex_string"):
return o.hex_string() # pyre-ignore
if istraceback(o):
return "".join(traceback.format_tb(o)).strip()
return o
def _encode(self, obj):
obj = self.default(obj)
if isinstance(obj, dict):
return {self.default(self._encode(k)): self._encode(v) for k, v in obj.items()}
elif isinstance(obj, list) or isinstance(obj, set):
return [self._encode(l) for l in obj]
else:
return obj
def encode(self, o) -> str:
return super(EnhancedJSONEncoder, self).encode(self._encode(o))
def as_dict(self, obj) -> Dict[str, Any]:
return self._encode(obj)
| [
"vc.shane@gmail.com"
] | vc.shane@gmail.com |
d34717fa89b9334c16bbce45114f375f90df6212 | 5f86944bdf1b810a84c63adc6ed01bbb48d2c59a | /kubernetes/client/models/v1_host_path_volume_source.py | 3c20e047e90416302ea98ba790825974c4a38243 | [
"Apache-2.0"
] | permissive | m4ttshaw/client-python | 384c721ba57b7ccc824d5eca25834d0288b211e2 | 4eac56a8b65d56eb23d738ceb90d3afb6dbd96c1 | refs/heads/master | 2021-01-13T06:05:51.564765 | 2017-06-21T08:31:03 | 2017-06-21T08:31:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,264 | py | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.6.5
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1HostPathVolumeSource(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, path=None):
"""
V1HostPathVolumeSource - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'path': 'str'
}
self.attribute_map = {
'path': 'path'
}
self._path = path
@property
def path(self):
"""
Gets the path of this V1HostPathVolumeSource.
Path of the directory on the host. More info: http://kubernetes.io/docs/user-guide/volumes#hostpath
:return: The path of this V1HostPathVolumeSource.
:rtype: str
"""
return self._path
@path.setter
def path(self, path):
"""
Sets the path of this V1HostPathVolumeSource.
Path of the directory on the host. More info: http://kubernetes.io/docs/user-guide/volumes#hostpath
:param path: The path of this V1HostPathVolumeSource.
:type: str
"""
if path is None:
raise ValueError("Invalid value for `path`, must not be `None`")
self._path = path
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1HostPathVolumeSource):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| [
"mehdy@google.com"
] | mehdy@google.com |
337555d0ffdb156debb715b400abbd43a9f4ed40 | 9c5f36b72323090b9f0254938923a04b436fd3be | /main/collect_data_X.py | a37f4e8d9f31c29e69b2f9b8a93a7076b5e83221 | [] | no_license | mdlaskey/IL_ROS_HSR | 7c7233905e6a1fc8388661236bade3862da0fc90 | d12f8397249acea4fae71d12c74074314a8a005e | refs/heads/master | 2021-01-20T18:30:11.815581 | 2018-04-07T01:14:41 | 2018-04-07T01:14:41 | 90,918,344 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,777 | py | from hsrb_interface import geometry
import hsrb_interface
from geometry_msgs.msg import PoseStamped, Point, WrenchStamped
import geometry_msgs
import controller_manager_msgs.srv
import cv2
from cv_bridge import CvBridge, CvBridgeError
import IPython
from numpy.random import normal
import time
#import listener
from geometry_msgs.msg import Twist
from sensor_msgs.msg import Joy
from il_ros_hsr.core.sensors import RGBD, Gripper_Torque, Joint_Positions
from il_ros_hsr.core.joystick_X import JoyStick_X
import matplotlib.pyplot as plt
import numpy as np
import numpy.linalg as LA
from tf import TransformListener
import rospy
from il_ros_hsr.p_pi.safe_corl.com import Safe_COM as COM
from il_ros_hsr.p_pi.safe_corl.bottle_detector import Bottle_Detect
class Collect_Demos():
def __init__(self,user_name = None,inject_noise = False,noise_scale = 1.0):
self.robot = hsrb_interface.Robot()
self.noise = 0.1
self.omni_base = self.robot.get('omni_base')
self.whole_body = self.robot.get('whole_body')
self.gripper = self.robot.get('gripper')
self.tl = TransformListener()
self.cam = RGBD()
time.sleep(5)
self.b_d = Bottle_Detect(self.cam.read_info_data())
self.start_recording = False
self.stop_recording = False
self.com = COM()
if(not user_name == None):
self.com.Options.setup(self.com.Options.root_dir,user_name)
#self.com.go_to_initial_state(self.whole_body,self.gripper)
#self.whole_body.move_to_joint_positions({'head_tilt_joint':-0.3})
self.joystick = JoyStick_X(self.com,inject_noise = inject_noise,noise_scale = noise_scale)
self.torque = Gripper_Torque()
self.joints = Joint_Positions()
def proess_state(self):
img_rgb = self.cam.read_color_data()
img_depth = self.cam.read_depth_data()
cur_action,noise_action,time_pub = self.joystick.apply_control()
state = self.com.format_data(img_rgb,img_depth)
#cv2.imwrite('frame_'+str(self.count)+'.png',state[0])
#Save all data
data = {}
data['action'] = cur_action
data['color_img'] = state[0]
data['depth_img'] = state[1]
data['noise_action'] = noise_action
pose = self.whole_body.get_end_effector_pose().pos
pose = np.array([pose.x,pose.y,pose.z])
data['robot_pose'] = pose
# data['action_time'] = time_pub
# data['image_time'] = self.cam.color_time_stamped
print "ACTION AT COUNT ",self.count
print cur_action
self.count += 1
self.trajectory.append(data)
# if(LA.norm(cur_action) > 1e-3):
# print "GOT ACCEPTED"
# self.trajectory.append(data)
def check_success(self):
img_rgb = self.cam.read_color_data()
img_depth = self.cam.read_depth_data()
success = self.b_d.detect_bottle(img_rgb,img_depth)
print "BOTTLE FOUND ",success
return success
def run(self):
self.joystick.apply_control()
cur_recording = self.joystick.get_record_actions_passive()
if(cur_recording[0] < -0.1):
print "BEGIN DATA COLLECTION"
self.start_recording = True
count = 0
if(self.start_recording):
self.count = 0
self.trajectory = []
while not self.stop_recording:
#while count < 20:
if(self.cam.is_updated):
self.proess_state()
self.cam.is_updated = False
cur_recording = self.joystick.get_record_actions()
if(cur_recording[1] < -0.1):
print "END DATA COLLECTION"
self.stop_recording = True
count += 1
self.check_success()
q = input('SAVE DATA: y or n: ')
if(q == 'y'):
self.com.save_recording(self.trajectory)
self.start_recording = False
self.stop_recording = False
if __name__=='__main__':
user_name = 'corl_anne_n1/'
noise_scale = 4.0
inject_noise = True
cd = Collect_Demos(user_name,inject_noise=inject_noise,noise_scale = noise_scale)
time.sleep(5)
while True:
cd.run() | [
"mdlaskey@umich.edu"
] | mdlaskey@umich.edu |
25d0c925d1c1adec666c8545cd7d97549bb8a2e0 | 905f40a4ad8e17bb4871cf87b6ee184a76a77c2a | /products/views/products.py | bdd3a00c62a20bc010f464125babe72232cdb220 | [] | no_license | sai9912/mypyton | 5e1f7ca278051d5f588af1d9accae5fd1780020b | 338fd6396dbdce971bc542718fbb9608bdcfc2a7 | refs/heads/master | 2022-12-16T05:04:34.590818 | 2019-04-18T09:18:06 | 2019-04-18T09:18:06 | 176,324,427 | 0 | 0 | null | 2022-12-08T02:31:10 | 2019-03-18T16:16:56 | Python | UTF-8 | Python | false | false | 38,457 | py | import json
import logging
import os
import trml2pdf
from django.conf import settings
from django.core.paginator import Paginator, InvalidPage
from django.core.serializers import serialize
from django.http import HttpResponse, Http404
from django.shortcuts import render, redirect, reverse
from django.template.loader import get_template
from django.utils import translation
from django.utils.translation import gettext as _
from BCM.helpers.pdf_export import render_to_pdf
from barcodes.utilities import normalize
from core import flash, flash_get_messages, jsonify
from products.helpers.product_helper import get_static_file_full_path, is_valid_url
from products.models import TargetMarket
from services import prefix_service
from services import sub_product_service
from services import country_of_origin_service, target_market_service, language_service
from services import gtin_target_market_service
from users.helpers import user_agreement_required
from ..forms import PackageLevelForm, PackageTypeForm, FilterForm
from ..forms import ProductDetailForm
from ..forms import ProductForm, ProductCaseForm
from ..helpers import product_helper, subproduct_helper
from ..models.package_level import PackageLevel
from ..models.package_type import PackageType
from ..models.product import Product
from ..models.sub_product import SubProduct
from ..utilities import delete_product_image, get_image_dimensions, upload_image
from member_organisations.models import ProductTemplate
@user_agreement_required
def add_product(request):
"""
GET/POST for adding a new base or case product
:return:
"""
subproduct_helper.subproducts_reset(request.session) # remove subproducst from session if any
user_active_prefix = request.user.profile.product_active_prefix
prefix = request.POST.get('prefix', None)
if prefix is None:
prefix = request.GET.get('prefix', None)
if prefix:
prefix = prefix_service.find_item(user=request.user, prefix=prefix)
if prefix and prefix != user_active_prefix:
prefix_service.make_active(prefix.prefix, request.user)
if request.session.get('new_product', None):
del request.session['new_product']
else:
prefix = user_active_prefix
if not prefix:
flash(request, 'You must have an active prefix set to enter new product. Please choose one', 'danger')
return redirect(reverse('prefixes:prefixes_list'))
if not prefix.starting_from:
flash(request, 'You must have a starting number set to enter new product. Please set one', 'danger')
return redirect(reverse('prefixes:prefixes_list'))
if prefix.is_special == 'READ-ONLY':
flash(request, 'You can not add a new product in this range. It\'s a suspended read-only range', 'danger')
return redirect(reverse('products:products_list'))
package_rows = PackageLevel.service.all()
express = True if request.POST.get('express') or request.GET.get('express') else False
title = 'New Product'
if express:
title = 'Express Allocation'
if request.method == 'POST':
form = PackageLevelForm(request.POST)
form.set_package_levels(package_rows)
if form.is_valid():
try:
prefix.make_starting_from()
except Exception:
flash(request, 'not allowed to create products for this prefix.', 'danger')
return redirect(reverse('prefixes:prefixes_list'))
if not request.session.get('new_product', None):
request.session['new_product'] = {
'gtin': prefix.starting_from,
'package_level': form.data['package_level']
}
elif request.session['new_product'].get('gtin') != prefix.starting_from:
request.session['new_product'] = {
'gtin': prefix.starting_from,
'package_level': form.data['package_level']
}
else:
request.session['new_product']['package_level'] = form.data['package_level']
if express:
request.session['new_product'].update({'express': True})
elif 'express' in request.session['new_product']:
del request.session['new_product']['express']
return redirect(reverse('products:add_product_package_type'))
# if session['new_product']['package_level'] == str(models.BASE_PACKAGE_LEVEL):
# if session['new_product'].get('express'):
# return redirect(url_for('products.add_product_express'))
# else:
# return redirect(url_for('products.add_product_package_type'))
# else:
# return redirect(url_for('products.subproduct_add_case'))
else:
flash(request, 'You must choose a level to proceed', 'danger')
else:
form = PackageLevelForm()
form.set_package_levels(package_rows)
if (request.session.get('new_product', None) and
request.session['new_product'].get('gtin') == prefix.starting_from):
form.data['package_level'] = request.session.get('new_product')['package_level']
templates = ProductTemplate.objects.filter(
member_organisation=request.user.profile.member_organisation
).order_by('order')
context = {'title': title,
'prefix': prefix,
'templates': templates
}
return render(request, 'products/package_level_form.html', context)
@user_agreement_required
def add_product_package_type(request):
"""
package type selector
:return:
"""
session = request.session.get('new_product', None)
if not session:
raise Http404('Session does not exist')
gtin = session.get('gtin', None)
if not gtin:
raise Http404('No gtin in session')
prefix = prefix_service.find_item(user=request.user, starting_from=str(gtin))
if not prefix:
raise Http404('Starting prefix (%s) not found' % prefix)
if request.method == 'POST':
form = PackageTypeForm(request.POST, prefix=prefix)
if form.is_valid():
request.session['new_product'].update({
'package_type': form.cleaned_data['package_type'],
'bar_placement': form.cleaned_data['bar_placement']
})
if session.get('package_level') == '70':
return redirect(reverse('products:add_product_base_details'))
else:
return redirect(reverse('products:subproduct_add_case'))
else:
form = PackageTypeForm(prefix=prefix)
if session.get('package_level') == '70':
form.initial['bar_placement'] = settings.STATIC_URL + 'products/site/wizard/proddesc/BG.png'
form.initial['package_type'] = '1'
else:
form.initial['bar_placement'] = settings.STATIC_URL + 'products/site/wizard/proddesc/CS.png'
form.initial['package_type'] = '34'
if session.get('package_level') == '70':
package_level = 'Base Unit / Each'
elif session.get('package_level') == '60':
package_level = 'Pack or inner pack'
elif session.get('package_level') == '50':
package_level = 'Case or mixed case'
elif session.get('package_level') == '40':
package_level = 'Display unit'
else:
package_level = 'Pallet' # 30
context = {
'form': form,
'prefix': prefix,
'package_types': PackageType.service.filter(ui_enabled=True).order_by('id'),
'package_level': package_level
}
return render(request, 'products/package_type_form.html', context=context)
@user_agreement_required
def add_product_base_details(request):
"""
-- used for the NEW (Step 2 - EACH)
GET / POST for adding a base level item
:template_name: products/product_details_form.html
:return:
"""
session = request.session.get('new_product', None)
if not session:
raise Http404()
for k in ['package_type', 'package_level', 'gtin', 'bar_placement']: # Check session and restart if missing
if k not in session.keys():
del request.session['new_product']
flash(request, 'Add new product restarted #010', 'danger')
return redirect(reverse('products:add_product'))
gtin = session.get('gtin', '0')
prefix = prefix_service.find_item(user=request.user, starting_from=gtin)
if not prefix:
raise Http404()
if prefix.is_upc():
kind = 'UPCA'
else:
kind = 'EAN13'
if request.method == 'POST':
context_is_new = 0
post_data = request.POST.dict()
form = ProductDetailForm(data=post_data)
verified = True
if not form.data.get('gtin', '')[1:14].startswith(prefix.prefix):
flash(request, 'You entered a non valid GTIN number (error #001)', 'danger')
verified = False
if not form.is_valid(request):
verified = False
if verified:
form_data = {}
for formfield in form.cleaned_data:
try:
if formfield == 'csrfmiddlewaretoken':
continue
if form.data[formfield] != '':
form_data[formfield] = form.cleaned_data[formfield]
else:
pass
except Exception as e:
pass
try:
### PRODUCT CREATE UI
with translation.override(form_data.get('language', 'en')):
product = Product.service.create(
owner=request.user,
company_organisation=prefix.company_organisation,
prefix=prefix,
**form_data
)
except Exception as e:
flash(request, str(e), 'danger')
return redirect(reverse('products:add_product_base_details'))
# Load image
if request.FILES:
upload_image(request, product)
# Update prefix
try:
prefix.increment_starting_from()
prefix_service.save(prefix)
except Exception as e:
flash(request, str(e), 'danger')
if request.session.get('new_product'):
del request.session['new_product']
return redirect(reverse('products:view_product_summary', args=(product.id,)))
else:
logging.debug('ProductDetailFormOptions error: %s' % str(form.errors))
else:
context_is_new = 1
form = ProductDetailForm()
# default values - new product GET
form.initial['gln_of_information_provider'] = normalize('EAN13', prefix.prefix)
form.initial['is_bunit'] = True
form.initial['company'] = prefix.company_organisation.company
form.initial['gtin'] = '0' + session.get('gtin')
form.initial['bar_placement'] = session.get('bar_placement')
form.initial['package_level'] = session.get('package_level')
form.initial['package_type'] = session.get('package_type')
form.initial['image'] = session.get('image', settings.NO_IMAGE)
country = request.user.profile.member_organisation.country
country_of_origin = country_of_origin_service.find_by_country(country)
if country_of_origin:
form.initial['country_of_origin'] = country_of_origin.code
target_market = target_market_service.find_by_country(country)
if target_market:
form.initial['target_market'] = target_market.code
language_slug = request.user.profile.language
language = language_service.find_by_slug(language_slug)
if language:
form.initial['language'] = language.slug
form.initial['category'] = 'asdfasdf'
context = {'title': 'New Base Unit / Each (Step 2 of 2: Details)',
'is_new': context_is_new,
'prefix': prefix,
'gtin0': '0',
'gtin13': session['gtin'],
'kind': kind,
'product_package_level_id': int(session['package_level']),
'leading_gln': normalize('EAN13', prefix.prefix),
'form': form,
'flashed_messages': flash_get_messages(request)}
return render(request, 'products/product_details_form.html', context=context)
@user_agreement_required
def view_product_summary(request, product_id):
product = Product.service.get_my_product(request.user, product_id)
prefix = prefix_service.find_item(user=request.user, prefix=product.gs1_company_prefix)
if not prefix:
raise Http404()
sub_products = sub_product_service.get_associated(product)
nop = None
if product.package_level.id == PackageLevel.BASE:
title = 'New Base Unit / Each (Summary)'
else:
title = 'New item (Summary)'
nop = len(sub_products)
is_xhr = request.GET.get('xhr', False) and True
if is_xhr:
template_name = 'products/product_summary_xhr.html'
else:
template_name = 'products/product_summary.html'
product_fields = []
product_fields_list = product._meta.get_fields()
for field in product_fields_list:
product_fields.append({'name': field.name, 'value': getattr(product, field.name)})
context = {
'title': title,
'prefix': prefix,
'product': product,
'product_fields': product_fields,
'nop': nop,
'sub_products': sub_products,
'debug': False
}
return render(request, template_name, context=context)
# deprecated
@user_agreement_required
def products_list(request):
user_active_prefix = request.user.profile.product_active_prefix
if request.GET.get('prefix'):
prefix = prefix_service.find_item(user=request.user, prefix=request.GET.get('prefix'))
if prefix and prefix != user_active_prefix:
prefix_service.make_active(prefix.prefix, user=request.user)
elif not prefix:
flash(request, 'Incorrect active prefix. Please choose one', 'danger')
return redirect(reverse('prefixes:prefixes_list'))
else:
prefix = user_active_prefix
if not prefix:
flash(request, 'You must have an active prefix to see products. Please choose one', 'danger')
return redirect(reverse('prefixes:prefixes_list'))
try:
page = int(request.GET.get('page', '1'))
except (ValueError, TypeError):
page = 1
try:
settings_per_page = settings.PRODUCTS_PER_PAGE
except:
settings_per_page = 10
try:
per_page = int(request.GET.get('per_page'))
except (ValueError, TypeError):
per_page = None
if per_page:
request.session['per_page'] = per_page
else:
per_page = request.session.get('per_page', settings_per_page)
all_in_range = Product.objects.filter(
company_organisation=request.user.profile.company_organisation,
gs1_company_prefix=prefix.prefix
)
target_market_ids = all_in_range.values_list('target_market', flat=True).distinct()
target_markets = TargetMarket.objects.filter(id__in=target_market_ids)
target_market_choices = [['', '']]
for target_market in target_markets:
try:
if target_market_choices[-1][0] == target_market.code:
continue
except Exception:
pass
target_market_choices.append([target_market.code, target_market.market])
completeness = product_helper.get_completeness(all_in_range)
if request.method == 'POST':
form = FilterForm(request.POST)
if form.is_valid():
all_in_range = product_helper.filter_list(all_in_range, form)
request.session['list_filter'] = product_helper.make_filter(form)
else:
logging.getLogger().debug(str(form.errors))
else:
if request.GET.get('clear_filter'):
if request.session.get('list_filter'):
del request.session['list_filter']
if request.session.get('list_filter'):
try:
form = FilterForm(request.session['list_filter'])
all_in_range = product_helper.filter_list(all_in_range, form)
except:
del request.session['list_filter']
form = FilterForm()
else:
form = FilterForm()
# if sort arguments are provided in GET we will override them
# see https://github.com/tbikeev/robot-gs1/issues/30
if request.GET.get('sort_mode') and request.GET.get('sort_field'):
form.data['sort_field'] = request.GET.get('sort_field')
form.data['sort_mode'] = request.GET.get('sort_mode')
sort_field = form.data.get('sort_field', form.fields['sort_field'].initial)
sort_mode = form.data.get('sort_mode', form.fields['sort_mode'].initial)
if sort_mode == 'desc':
sort_order = '-%s' % sort_field
sort_mode = 'Descending'
else:
sort_order = sort_field
sort_mode = 'Ascending'
for key, value in form.fields['sort_field'].choices:
if sort_field == key:
sort_field = value
break
products = all_in_range.order_by(sort_order)
paginator = Paginator(products, per_page)
try:
paginator_page = paginator.page(page)
except InvalidPage:
paginator_page = paginator.page(1)
form.fields['target_market'].choices = target_market_choices
form.base_fields['target_market'].choices = target_market_choices
form.declared_fields['target_market'].choices = target_market_choices
templates = {}
for item in ProductTemplate.objects.filter(member_organisation=request.user.profile.member_organisation):
templates[ item.package_level_id ] = True
assoc_products = []
for p in paginator_page.object_list:
if p.package_level.id != 70:
subproducts = SubProduct.objects.filter(product=p).all()
sub_p = []
for subproduct in subproducts:
sub_product = subproduct.sub_product
sub_p.append({'id': sub_product.id,
'package_level': {'id': sub_product.package_level_id},
'brand': sub_product.brand,
'description': sub_product.description,
'gtin': sub_product.gtin,
'gs1_company_prefix': sub_product.gs1_company_prefix,
'bar_type': sub_product.bar_type,
'quantity': subproduct.quantity})
assoc_products.append({'p_id': p.id,
'sub_p': sub_p})
context = {'prefix': prefix,
'latest_product_list': paginator_page.object_list,
'assoc_products': assoc_products,
'pagination': paginator_page,
'per_page': per_page,
'ppp': settings_per_page,
'sorted': {'field': sort_field, 'mode': sort_mode},
'form': form,
'enable_leading': True, # request.user.enable_leading
'templates': templates,
'completeness': completeness}
return render(request, 'products/list.html', context=context)
@user_agreement_required
def products_list_js(request):
user_active_prefix = request.user.profile.product_active_prefix
if request.GET.get('prefix'):
prefix = prefix_service.find_item(user=request.user, prefix=request.GET.get('prefix'))
if prefix and prefix != user_active_prefix:
prefix_service.make_active(prefix.prefix, user=request.user)
elif not prefix:
flash(request, 'Incorrect active prefix. Please choose one', 'danger')
return redirect(reverse('prefixes:prefixes_list'))
else:
prefix = user_active_prefix
if not prefix:
flash(request, 'You must have an active prefix to see products. Please choose one', 'danger')
return redirect(reverse('prefixes:prefixes_list'))
templates = {}
for item in ProductTemplate.objects.filter(member_organisation=request.user.profile.member_organisation):
templates[item.package_level_id] = True
all_in_range = Product.objects.filter(
company_organisation=request.user.profile.company_organisation,
gs1_company_prefix=prefix.prefix
)
completeness = product_helper.get_completeness(all_in_range)
context = {
'prefix': prefix,
'templates': templates,
'completeness': completeness
}
return render(request, 'products/list_js.html', context=context)
@user_agreement_required
def ajax_product_mark(request, product_id):
"""
makes product marked
"""
product = Product.service.get(id=product_id)
product.mark = 1
product.save()
return jsonify(success=True)
@user_agreement_required
def ajax_product_unmark(request, product_id):
"""
makes product un-marked
"""
product = Product.service.get(id=product_id)
product.mark = 0
product.save()
return jsonify(success=True)
@user_agreement_required
def ajax_get_package_type(request, package_type_id):
try:
package_type = PackageType.objects.get(id=package_type_id)
except PackageType.DoesNotExist:
raise Http404()
else:
package_type_json = serialize('json', [package_type])
package_type_json = json.loads(package_type_json)[0]['fields']
package_type_json['type'] = package_type.type
package_type_json['description'] = package_type.description
return jsonify(package_type=package_type_json)
@user_agreement_required
def product_print_summary(request, product_id):
product = Product.service.get(id=product_id)
sub_products = sub_product_service.get_associated(product)
nop = len(sub_products)
templates = dict()
product_templates = ProductTemplate.objects.filter(
member_organisation=request.user.profile.member_organisation
).all()
for product_template in product_templates:
ui_label_i18n = json.loads(product_template.ui_label_i18n)
try:
templates[product_template.package_level_id] = ui_label_i18n[request.user.profile.language]
except:
try:
templates[product_template.package_level_id] = ui_label_i18n['en']
except:
pass
for sub_product in sub_products:
try:
sub_product.ui_label = templates[sub_product.sub_product.package_level_id]
except:
sub_product.ui_label = sub_product.sub_product.package_level.level
logo = request.user.profile.member_organisation.gs1_logo_path
if not logo:
logo = 'static/site/logo/gs1-logo.png'
else:
logo = logo[1:]
logo = logo.replace('logo/', 'logo/pdf/')
product_template = ProductTemplate.objects.filter(
package_level=product.package_level,
member_organisation=product.member_organisation
).first()
context = {
'logo': logo,
'verdana': 'static/site/fonts/verdana.ttf',
'verdana_bold': 'static/site/fonts/verdanab.ttf',
'product': product,
'sub_products': sub_products,
'nop': nop,
# ui configuration for the main product (to show/hide required fields and so on)
'ui_attributes': getattr(product_template, 'attributes_dict', None),
}
try:
p_width, p_height = get_image_dimensions('products' + product.bar_placement)
k_width = p_width / 40
k_height = p_height / 40
if k_width > k_height:
p_width = int(p_width / k_width)
p_height = int(p_height / k_width)
else:
p_width = int(p_width / k_height)
p_height = int(p_height / k_height)
context.update({'placement': 'products' + product.bar_placement,
'p_height': p_height,
'p_width': p_width})
except Exception as e:
package_level = product.package_level.id
bar_placement = PackageLevel.BAR_PLACEMENT[package_level]
p_width, p_height = get_image_dimensions(bar_placement)
k_width = p_width / 40
k_height = p_height / 40
if k_width > k_height:
p_width = int(p_width / k_width)
p_height = int(p_height / k_width)
else:
p_width = int(p_width / k_height)
p_height = int(p_height / k_height)
context.update({'placement': bar_placement,
'p_height': p_height,
'p_width': p_width})
try:
if is_valid_url(product.image):
image_path = product.image
else:
image_path = get_static_file_full_path(product.image)
i_width, i_height = get_image_dimensions(image_path)
k_width = i_width / 40
k_height = i_height / 402
if k_width > k_height:
i_width = int(i_width / k_width)
i_height = int(i_height / k_width)
else:
i_width = int(i_width / k_height)
i_height = int(i_height / k_height)
context.update({'image': image_path,
'i_width': i_width,
'i_height': i_height})
except Exception as e:
pass
# reportlab variant
# template_name = 'products/product_print_summary.rml'
# pdf_template = str(get_template(template_name).render(context))
# pdf = trml2pdf.parseString(pdf_template.encode('utf-8'))
# secretary variant
pdf = render_to_pdf(
template_name='products/templates/products/product_print_summary.odt',
context=context
)
response = HttpResponse(pdf, content_type='application/pdf')
# display pdf in place
response['Content-Disposition'] = 'inline; filename="%s.pdf"' % product.gtin
# as attachment ("download as.."):
# response['Content-Disposition'] = 'attachment; file name="%s.pdf"' % product.gtin
return response
@user_agreement_required
def fulledit(request, product_id):
"""
displays via GET and provides product update mechanics via POST
:param product_id:
:return:
"""
product = Product.service.get_my_product(request.user, product_id)
if not product:
raise Http404()
prefix = prefix_service.find_item(user=request.user, prefix=product.gs1_company_prefix)
user_active_prefix = request.user.profile.product_active_prefix
if not prefix:
raise Http404()
if prefix != user_active_prefix:
flash(request, 'This product is not in your active prefix', 'danger')
return redirect(reverse('products:products_list'))
barcodes = {}
# for bc in product.barcodes:
# barcodes.update({bc.kind: bc})
if request.GET.get('barcodes'):
active_tab = 'barcodes'
barcodes['EAN13'] = True
elif request.GET.get('cloud'):
active_tab = 'cloud'
else:
active_tab = 'details'
if prefix.is_upc():
kind = 'UPCA'
else:
kind = 'EAN13'
template_name = 'products/product_fulledit_form.html'
context = { 'product': product,
'barcodes': barcodes,
'active_tab': active_tab,
'product_id': product_id,
'pkg_level': product.package_level_id,
'prefix': prefix,
'agreed': request.user.profile.agreed,
'product_image': product.website_url,
'gtin': product.gtin,
'gtin0': product.gtin[0:1],
'gtin13': product.gtin[1:14],
'product_package_level_id': product.package_level_id,
'advanced_tab': product.owner.profile.advanced_tab,
'kind': kind
}
# if sub_products:
# context.update({'nop': product.number_of_products()})
# context['sub_products'] = _get_subprods_from_obj(product)
if request.method == 'POST':
if product.package_level_id == 70:
form = ProductForm(request.POST)
else:
form = ProductCaseForm(request.POST)
form_valid = form.is_valid(request)
if request.FILES:
upload_image(request, product)
'''
_add_field_descriptions(form)
sub_prods = _get_subprods_from_form(request.form)
context['sub_products'] = sub_prods
# if product.package_level_id != 70:
# subs_valid = _validate_subprods(sub_prods)
# if not subs_valid: form.errors['subProducts'] = ['invalid subproducts'] # FIXME
# else:
# we allow for packs without children
'''
subs_valid = True
if form_valid and subs_valid:
# form_errors = check_values(template_name, form, **context)
form_errors = None
if form_errors is not None:
return form_errors
gtin = form.data.get('gtin', '')
context['gtin'] = gtin
context['gtin0'] = gtin[0:1]
context['gtin13'] = gtin[1:14]
if not gtin[1:14].startswith(prefix.prefix):
flash(request, 'You entered a non valid GTIN number (error #005)', 'danger')
context['form'] = form
return render(request, template_name, context=context)
form_data = {}
for formfield in form.cleaned_data:
try:
if form.cleaned_data[formfield] != '':
form_data[formfield] = form.cleaned_data[formfield]
else:
pass
except Exception as e:
pass
# validate presence of subproducts
# if product.package_level_id != 70 and not sub_prods:
# flash('Consider adding sub-products', 'info')
if product.target_market.code != form_data['target_market']:
gtin_target_market_service.change_target_market(product, form_data['target_market'])
try:
### PRODUCT UPDATE UI
product = Product.service.update(product=product,
owner=request.user,
prefix=prefix,
**form_data)
except Exception as e:
flash(request, str(e), 'danger')
context['form'] = form
return render(request, template_name, context=context)
'''
# subproducts -- update
for sub_p, quantity, _valid in sub_prods:
if int(quantity) > 0:
sub_p.quantity = quantity
services.sub_product_service.save(sub_p)
else:
services.sub_product_service.delete(sub_p)
flash(_("All changes saved sucessfully"), 'success')
return redirect(url_for('.fulledit', product_id=product.id))
'''
return redirect(reverse('products:products_list'))
else:
if product.package_level_id == 70:
print('ProductForm, errors:', form.errors)
else:
print('ProductCaseForm, errors:', form.errors)
else: # GET
form = ProductForm(product)
context['form'] = form
target_markets = gtin_target_market_service.get_target_markets_other(product)
context['target_markets'] = target_markets
return render(request, template_name, context=context)
@user_agreement_required
def delete_target_market(request, product_id):
product = Product.service.get_my_product(request.user, product_id)
target_markets = gtin_target_market_service.get_target_markets_other(product)
if len(target_markets) <= 0:
raise Http404()
gtin_target_market_service.delete_target_market(product, product.target_market)
product.target_market = target_markets[0].target_market
product.save()
url = reverse('products:fulledit', args=(product_id,))
target_markets_names_arr = []
for item in target_markets:
target_markets_names_arr.append(item.target_market.market)
target_markets_names = ', '.join(target_markets_names_arr)
flash(request,
'Product <a href="%s" style="text-decoration:underline">%s</a> exist with %s target markets' %
(url, product.label_description, target_markets_names),
'success')
return redirect(reverse('products:products_list'))
@user_agreement_required
def delete_product(request, product_id):
product = Product.service.get_my_product(request.user, product_id)
user_active_prefix = request.user.profile.product_active_prefix
prefix = prefix_service.find_item(user=request.user, prefix=product.gs1_company_prefix)
if not prefix:
raise Http404()
if prefix != user_active_prefix:
flash(request, 'This product is not in your active prefix', 'danger')
return redirect(reverse('products:products_list'))
if prefix.is_special == 'READ-ONLY':
flash(request, 'This product belongs to read only range', 'danger')
return redirect(reverse('products:products_list'))
#if product.associated_products:
# flash(request, 'This product is part of a container and cannot be deleted', 'danger')
# return redirect(request.referrer)
gtin = product.gtin
if product.image != settings.NO_IMAGE:
try:
image = os.path.split(product.image)[1]
except:
image = None
else:
image = None
if image:
delete_product_image(image, request.user.id)
'''
barcodes = Barcodes.service.find(product_id=product_id, user_id=request.user.id).all()
for barcode in barcodes:
delete_barcode_images(gtin[1:14], current_user.id)
services.barcode_service.delete(barcode)
'''
'''
sub_product_entries = services.sub_product_service.find(product_id=product_id).all()
try:
services.product_service.delete(product)
except Exception as e:
logging.getLogger().error('Delete product error: ' + str(e))
flash('An error happened while trying to delete this product', 'danger')
return redirect(request.referrer)
for sbe in sub_product_entries:
services.sub_product_service.delete(sbe)
'''
extra = ''
if request.GET.get('set'):
prefix.starting_from = gtin[1:14]
prefix_service.save(user=request.user, prefix=prefix)
extra = " Prefix's starting number set to deleted product's GTIN"
product.delete()
flash(request, 'Product deleted successfully.' + extra, 'success')
return redirect(reverse('products:products_list'))
@user_agreement_required
def duplicate_product(request, product_id, target_market):
product = Product.service.get_my_product(request.user, product_id)
user_active_prefix = request.user.profile.product_active_prefix
prefix = prefix_service.find_item(user=request.user, prefix=product.gs1_company_prefix)
if not prefix:
raise Http404()
if prefix != user_active_prefix:
flash(request, 'This product is not in your active prefix', 'danger')
return redirect(reverse('products.products_list_js'))
if prefix.is_special == 'READ-ONLY':
flash(request, 'This product belongs to read only range', 'danger')
return redirect(reverse('products.products_list_js'))
clone_fl = True
target_markets = gtin_target_market_service.get_target_markets_all(product)
for item in target_markets:
if item.target_market.code == target_market:
clone_fl = False
break
# Add new target market
if clone_fl:
gtin_target_market_service.add_target_market(product, target_market)
target_market_record = target_market_service.find_by_code(target_market)
product.target_market = target_market_record
product.save()
return redirect(reverse('products:fulledit', args=(product_id,)))
# Swap target market form <-> others
if product.target_market.code != target_market:
target_market_record = target_market_service.find_by_code(target_market)
product.target_market = target_market_record
product.save()
return redirect(reverse('products:fulledit', args=(product_id,)))
if not prefix.starting_from:
flash(request,
'The next available number is not available or you have exhausted this prefix.'
' Product not cloned. To licence an additional company prefix please'
' go to the <a href="http://www.gs1ie.organisation/Members-Area">Members Area</a>'
' of the GS1 Ireland website.',
'danger')
return redirect(reverse('products:fulledit', args=(product_id,)))
if product.package_level_id != PackageLevel.BASE:
flash(request, 'You can only clone base unit/each products', 'danger')
return redirect(reverse('products:fulledit', args=(product_id,)))
product.id = None
product.barcodes = []
product.gtin = product.gtin[0:1] + prefix.starting_from
product.description = '[CLONED] ' + product.description
try:
Product.service.save(product)
except Exception as e:
logging.getLogger().error('Product clone error: ' + str(e))
flash(request, 'AN error occurred while trying to clone this product', 'danger')
return redirect(reverse('products:fulledit', args=(product_id,)))
# Update prefix
try:
prefix.increment_starting_from()
prefix_service.save(prefix)
except Exception as e:
flash(request, str(e), 'danger')
product_orig = Product.service.get_my_product(request.user, product_id)
gtin_target_market_service.clone_product(product_orig, product)
if request.GET.get('fulledit_js'):
return redirect(reverse('products:fulledit_js', args=(product.id,)))
else:
return redirect(reverse('products:fulledit', args=(product.id,)))
| [
"root@ip-172-31-29-10.ap-south-1.compute.internal"
] | root@ip-172-31-29-10.ap-south-1.compute.internal |
a458fcba14a9b526fea72c863cbaf95925bb15fd | 5c5b34f6f598a43ddfbd473228737a27c26d1d8e | /contest/第 16 场双周赛/5153. 层数最深叶子节点的和.py | 9c625b10b34a8d937c7c013c9da14a932474e7f8 | [] | no_license | lovehhf/LeetCode | 34a1bc140b10dc83a32ef9a70f9c73176948a9c4 | 5d3574ccd282d0146c83c286ae28d8baaabd4910 | refs/heads/master | 2021-11-04T04:52:34.518621 | 2021-10-26T15:34:47 | 2021-10-26T15:34:47 | 173,673,492 | 0 | 0 | null | 2020-03-03T14:54:09 | 2019-03-04T04:26:15 | Python | UTF-8 | Python | false | false | 2,252 | py | # -*- coding:utf-8 -*-
"""
给你一棵二叉树,请你返回层数最深的叶子节点的和。
示例:
输入:root = [1,2,3,4,5,null,6,7,null,null,null,null,8]
输出:15
提示:
树中节点数目在 1 到 10^4 之间。
每个节点的值在 1 到 100 之间。
mid
dl的dfs解法:
class Solution {
public:
int md, res;
void dfs(TreeNode* x , int dep) {
if (x == NULL) return;
if (x->left == NULL && x->right == NULL) {
if (dep > md) {
res = 0;
md = dep;
}
if (dep == md) {
res += x->val;
}
} else {
dfs(x->left, dep + 1);
dfs(x->right, dep + 1);
}
}
int deepestLeavesSum(TreeNode* root) {
md = -1;
res = 0;
dfs(root , 0);
return res;
}
};
"""
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
from utils.TreeNode import TreeNode
class Solution:
def deepestLeavesSum(self, root: TreeNode) -> int:
"""
bfs层次遍历
:param root:
:return:
"""
s = [0] * 10010
q = [(root, 0)]
level = 0
while q:
cur, level = q.pop(0)
if cur.left:
q.append((cur.left, level + 1))
if cur.right:
q.append((cur.right, level + 1))
if not cur.left and not cur.right:
s[level] += cur.val
return s[level]
class DFS_Solution:
"""
dfs 解法
"""
def __init__(self):
self.max_depth = 0
self.res = 0
def dfs(self, node, depth):
if not node:
return
if not node.left and not node.right:
if depth == self.max_depth:
self.res += node.val
else:
if depth + 1 > self.max_depth:
self.max_depth = depth + 1
self.res = 0
self.dfs(node.left, depth + 1)
self.dfs(node.right, depth + 1)
def deepestLeavesSum(self, root: TreeNode) -> int:
self.dfs(root, 0)
return self.res
| [
"853885165@qq.com"
] | 853885165@qq.com |
6ccb5f9dd768379c7dbc65f8f3782ebb2724ba65 | ecd4b06d5d5368b71fd72a1c2191510a03b728fd | /10 - python-data-science-toolbox-part-2/13 - write your own generator expressions.py | d2c273692fdfcb03da16650e1b4cafb4b11fe05a | [
"MIT"
] | permissive | Baidaly/datacamp-samples | 86055db5e326b59bfdce732729c80d76bf44629e | 37b4f78a967a429e0abca4a568da0eb9d58e4dff | refs/heads/master | 2022-07-27T01:18:00.700386 | 2022-07-18T19:27:23 | 2022-07-18T19:27:23 | 123,827,284 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,029 | py | '''
You are familiar with what generators and generator expressions are, as well as its difference from list comprehensions. In this exercise, you will practice building generator expressions on your own.
Recall that generator expressions basically have the same syntax as list comprehensions, except that it uses parentheses () instead of brackets []; this should make things feel familiar! Furthermore, if you have ever iterated over a dictionary with .items(), or used the range() function, for example, you have already encountered and used generators before, without knowing it! When you use these functions, Python creates generators for you behind the scenes.
Now, you will start simple by creating a generator object that produces numeric values.
'''
# Create generator object: result
result = (num for num in range(31))
# Print the first 5 values
print(next(result))
print(next(result))
print(next(result))
print(next(result))
print(next(result))
# Print the rest of the values
for value in result:
print(value)
| [
"daulet.urazalinov@uptake.com"
] | daulet.urazalinov@uptake.com |
1f1c0fa57670131ce843fb8fd1fff22ae434970c | 8cc862aa51d3fec95d094dc4bd3151e1155d240a | /PythonSpider/toutiao/jiepai.py | 6112f808f2392f33cd7d22605d53420e6247c8a4 | [] | no_license | activehuahua/python | bcbf3a2190025e2315399bfd0c725f598211632b | cc36a93c01c53f856426ccf2724848142524d9c0 | refs/heads/master | 2023-04-14T10:23:21.590765 | 2019-08-12T06:52:15 | 2019-08-12T06:52:15 | 160,277,647 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,174 | py | #!/usr/bin/python3
# -*- coding: utf-8 -*-
'''
@Author : zhaojianghua
@File : jiepai.py
@Time : 2019/1/2 10:45
@desc :
街拍网址: https://www.toutiao.com/search/?keyword=%E8%A1%97%E6%8B%8D
'''
import requests
from urllib.parse import urlencode
import pprint,os
from hashlib import md5
from multiprocessing import Pool
def get_page(offset):
params={
'offset' :offset,
'format' : 'json',
'keyword' :'街拍',
'autoload' : 'true',
'count' :20 ,
'cur_tab': 1
}
url = 'https://www.toutiao.com/search_content/?' + urlencode(params)
try:
response = requests.get(url)
if response.status_code ==200:
return response.json()
except requests.ConnectionError :
return None
def get_images(json):
if json.get('data'):
for item in json.get('data'):
title= item.get('title')
images=item.get('image_list')
if images:
for image in images :
yield{
'image' : 'http:'+image.get('url'),
'title':title
}
def save_image(item):
dir= os.path.join(os.getcwd()+'/img/'+item.get('title'))
print(dir)
try:
if not os.path.exists(dir) :
os.makedirs(dir)
response =requests.get(item.get('image'))
if response.status_code == 200 :
file_path ='{0}/{1}.{2}'.format(dir,md5(response.content).hexdigest(),'jpg')
if not os.path.exists(file_path):
with open(file_path,'wb') as f :
f.write(response.content)
else :
print('Already Download ',file_path)
except requests.ConnectionError:
print('Failed to save Image')
except Exception :
pass
def main(offset):
json=get_page(offset)
for item in get_images(json):
print(item)
save_image(item)
GROUP_START =1
GROUP_END=20
if __name__ == '__main__':
pool=Pool()
groups =([x * 20 for x in range(GROUP_START,GROUP_END+1)])
pool.map(main,groups)
pool.close()
pool.join()
| [
"zhaojianghua@pretang.com"
] | zhaojianghua@pretang.com |
8c77c556534fee53c2d8b3f8323b07fa4aa34f7a | 17b9f098d783b58a65a2f4a2d51c7d1ae19285cf | /Mayordomo.py | 88dc5fde4a0f1885bfec2efdae1dc685064bf827 | [
"MIT"
] | permissive | elenajimenezm/Mayordomo | ea17a3168f25f4648910a71aece478155dffabd3 | da5e8746ee41906eb60c8626b5de2db8e111ad83 | refs/heads/master | 2021-07-25T23:32:15.382348 | 2017-11-09T22:51:38 | 2017-11-09T22:51:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,650 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import paho.mqtt.client as paho
import json
import time
import uuid
import Queue
import subprocess
import unicodedata
MQTT_SERVER = 'localhost'
MAYORDOMO_TOPIC = 'rcr/Mayordomo'
SPEAK_TOPIC = 'rcr/Speak'
PLAYER_TOPIC = 'rcr/MusicPlayer'
DRONE_TOPIC = 'rcr/RollerSpider'
DHT22_TOPIC = 'rcr/DHT22'
MINDSET_TOPIC = 'rcr/MindSet'
MAX7219_TOPIC = 'rcr/Max7219'
NOISE_TOPIC = 'rcr/Ruido'
S2_TOPIC = 'rcr/S2'
messages = Queue.Queue( 1 )
data_dht22 = None
def sendToSpeak( msg ):
global mqtt_client, SPEAK_TOPIC
mqtt_client.publish( SPEAK_TOPIC, msg )
def sendToMusicPlayer( msg ):
global mqtt_client, PLAYER_TOPIC
mqtt_client.publish( PLAYER_TOPIC, msg )
def sendToDrone( msg ):
global mqtt_client, DRONE_TOPIC
mqtt_client.publish( DRONE_TOPIC, msg )
def sendToMindSet( msg ):
global mqtt_client, MINDSET_TOPIC
mqtt_client.publish( MINDSET_TOPIC, msg )
def sendToMax7219( msg ):
global mqtt_client, MAX7219_TOPIC
mqtt_client.publish( MAX7219_TOPIC, msg )
def sendToNoise( msg ):
global mqtt_client, NOISE_TOPIC
mqtt_client.publish( NOISE_TOPIC, msg )
def sendToS2( msg ):
global mqtt_client,S2_TOPIC
mqtt_client.publish( S2_TOPIC, msg )
def mqtt_on_message( client, userdata, message ):
global messages, data_dht22
# es el dht22
if( message.topic == DHT22_TOPIC ):
data_dht22 = message.payload
return
# lon comandos para el mayordomo
# si no se ha procesado el ultimo mensaje lo eliminamos
try:
messages.get_nowait()
except Queue.Empty:
pass
# agregamos el mensaje
try:
messages.put_nowait( message )
except Queue.Full:
pass
def mqtt_on_connect( client, arg1, arg2, arg3 ):
global MAYORDOMO_TOPIC, MQTT_SERVER
client.subscribe( MAYORDOMO_TOPIC )
client.subscribe( DHT22_TOPIC )
print( "[Mayordomo] Esperando en %s - %s" % ( MQTT_SERVER, MAYORDOMO_TOPIC ) )
def main():
global mqtt_client, MQTT_SERVER, messages, data_dht22
print( '[Mayordomo] Iniciando sistema' )
subprocess.Popen( '/bin/sh ./Speak.sh', shell=True )
subprocess.Popen( '/usr/bin/python ./MusicPlayer/MusicPlayer.py', shell=True )
mqtt_client = paho.Client( 'Mayordomo-' + uuid.uuid4().hex )
mqtt_client.on_connect = mqtt_on_connect
mqtt_client.on_message = mqtt_on_message
mqtt_client.connect( MQTT_SERVER, 1883 )
mqtt_client.loop_start()
time.sleep( 2 )
sendToSpeak( ' ' )
sendToSpeak( ' Sistema inicializado' )
abort = False
while( not abort ):
message = messages.get()
# hacemos el manejo del payload que viene en utf-8 (se supone)
# la idea es cambiar tildes y otros caracteres especiales
# y llevar todo a minuscula
cmd = message.payload.decode('utf-8').lower()
cmd = ''.join((c for c in unicodedata.normalize('NFD', cmd) if unicodedata.category(c) != 'Mn'))
cmd = cmd.replace( 'mary', 'mari' )
cmd = cmd.replace( 'detener', 'deten' )
cmd = cmd.replace( 'tocar', 'toca' )
cmd = cmd.replace( 'pausar', 'pausa' )
cmd = cmd.replace( 'iniciar', 'inicia' )
cmd = cmd.replace( 'finalizar', 'finaliza' )
cmd = cmd.replace( 'mostrar', 'muestra' )
cmd = cmd.replace( 'robots', 'robot' )
cmd = cmd.replace( 'conectar', 'conecta' )
cmd = cmd.replace( 'desconectar', 'desconecta' )
print( "[Mayordomo] Mensaje recibido:", message.payload, "<<" + cmd + ">>" )
# locales
if( cmd == 'finaliza sistema' ):
abort = True
elif( cmd == 'mari' ):
sendToSpeak( 'Dime Padre' )
elif( cmd == 'que hora es' ):
now = time.localtime()
sendToSpeak( 'son las %d horas con %d minutos' % (now.tm_hour, now.tm_min) )
elif( cmd == 'conversemos' ):
now = time.localtime()
sendToSpeak( 'de que deseas conversar?' )
# MusicPlayer
elif( cmd == 'toca musica' ):
sendToMusicPlayer( 'play' )
elif( cmd == 'deten musica' ):
sendToMusicPlayer( 'stop' )
elif( cmd == 'pausa musica' ):
sendToMusicPlayer( 'pause' )
elif( cmd == 'tema siguiente' ):
sendToMusicPlayer( 'next' )
elif( cmd == 'tema anterior' ):
sendToMusicPlayer( 'previous' )
elif( cmd == 'quien canta' ):
sendToMusicPlayer( 'songtitle' )
# DroneRollerSpider
elif( cmd == 'inicia spider' ):
subprocess.Popen( '/usr/bin/python ./DroneRollerSpider/DroneRollerSpider.py', shell=True )
elif( cmd == 'finaliza spider' ):
sendToDrone( 'exit' )
elif( cmd == 'conecta spider' ):
sendToDrone( 'connect' )
elif( cmd == 'desconecta spider' or cmd =='desconectar spyder' ):
sendToDrone( 'disconnect' )
elif( cmd == 'sube spider' ):
sendToDrone( 'takeoff' )
elif( cmd == 'baja spider' ):
sendToDrone( 'land' )
elif( cmd == 'gira spider' ):
for i in range( 10 ):
sendToDrone( 'turn_left' )
time.sleep( 0.100 )
# MindSet
elif( cmd == 'inicia sensor neuronal' ):
subprocess.Popen( '/usr/bin/python ./MindSet/MindSetPub.py', shell=True )
subprocess.Popen( '/usr/bin/python ./MindSet/MindSetGraphics.py', shell=True )
subprocess.Popen( '/usr/bin/python ./MindSet/MindSetMusic.py', shell=True )
elif( cmd == 'finaliza sensor neuronal' ):
sendToMindSet( 'exit' )
# DHT22
elif( cmd == 'temperatura' ):
if( data_dht22 == None ):
sendToSpeak( 'No tengo datos de temperatura' )
else:
d = data_dht22
d = json.loads( d )
sendToSpeak( 'La Temperatura es de %3.1f grados' % ( d["temperatura"] ) )
elif( cmd == 'humedad' ):
if( data_dht22 == None ):
sendToSpeak( 'No tengo datos de humedad' )
else:
d = data_dht22
d = json.loads( d )
sendToSpeak( 'La humedad es de un %3.1f por ciento' % ( d["humedad"] ) )
# Max72129
elif( cmd.startswith( 'muestra ' ) and len( cmd ) == 9 ):
try:
digit = int( cmd[8] )
sendToSpeak( "Mostrando un %d en la matriz" % digit )
sendToMax7219( str( digit ) )
except Exception as e:
pass
# Sensor de ruido
elif( cmd == 'inicia analisis de ruido' ):
subprocess.Popen( '/usr/bin/python ./Noise/NoiseGraphics.py', shell=True )
elif( cmd == 'finaliza analisis de ruido' ):
sendToNoise( 'exit' )
# robot S2
elif( cmd == 'inicia control de robot' ):
subprocess.Popen( '/usr/bin/python ./S2/S2.py', shell=True )
elif( cmd == 'nombre de robot' ):
sendToS2( 'name' )
elif( cmd == 'robot izquierda' ):
sendToS2( 'left 1' )
elif( cmd == 'robot derecha' ):
sendToS2( 'right 1' )
elif( cmd == 'robot avanza' ):
sendToS2( 'forward 5' )
elif( cmd == 'robot retrocede' ):
sendToS2( 'backward 5' )
elif( cmd == 'finaliza control de robot' ):
sendToS2( 'exit' )
sendToSpeak( 'Sistema finalizado' )
time.sleep( 2 )
mqtt_client.loop_stop()
print( '[Mayordomo] Sistema finalizado' )
#--
main()
| [
"titos.carrasco@gmail.com"
] | titos.carrasco@gmail.com |
f8849681bf0f73b561cd28da56bec644274b35b2 | 0e99d2efff685a66869d5a7cd4a68de8955f498c | /baseproblems/maxSerise.py | 59e500dd6acc33c4ffc189b1fcf29faf76b97e71 | [] | no_license | supercp3/code_leetcode | f303109c70ccdd0baa711cf606d402158b212525 | 1dc6260e229a012111ec4d5e60071c2458ce5002 | refs/heads/master | 2020-03-26T11:33:28.741405 | 2018-10-15T02:18:24 | 2018-10-15T02:18:24 | 144,848,743 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 301 | py | '''
最大连续子序列的和
'''
def maxlist(data):
length=len(data)
maxnum=0
for i in range(length):
subnum=0
for j in range(i,length):
subnum+=data[j]
if subnum>maxnum:
maxnum=subnum
return maxnum
if __name__=="__main__":
data=[1,2,3,-2,3,-10,3]
res=maxlist(data)
print(res) | [
"13281099@bjtu.edu.cn"
] | 13281099@bjtu.edu.cn |
6ba0d69d629dad7ff9c362d2aaa88f0ed544dfe5 | 2fe1cc0cca927276a1f936e57c6427aa4210265a | /flasky/app/__init__.py | 33d1d993450feb757a5d81cf9576e679b99e4a20 | [] | no_license | D-Mbithi/flask-web-development-code | 57b29488f87ff76a0775f16965f8df906d517b5f | 98d24e498372be74a17b7451b46ed1bb22093a8d | refs/heads/master | 2022-12-12T17:13:11.337107 | 2019-12-02T16:42:47 | 2019-12-02T16:42:47 | 225,421,531 | 0 | 0 | null | 2022-09-16T18:14:07 | 2019-12-02T16:36:23 | JavaScript | UTF-8 | Python | false | false | 974 | py | from flask import Flask
from flask_bootstrap import Bootstrap
from flask_mail import Mail
from flask_moment import Moment
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
from flask_debugtoolbar import DebugToolbarExtension
from config import config
bootstrap = Bootstrap()
login_manager = LoginManager()
login_manager.login_view = 'auth.login'
moment = Moment()
db = SQLAlchemy()
mail = Mail()
toolbar = DebugToolbarExtension()
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
config[config_name].init_app(app)
bootstrap.init_app(app)
login_manager.init_app(app)
mail.init_app(app)
moment.init_app(app)
db.init_app(app)
toolbar.init_app(app)
from .main import main as main_blueprint
app.register_blueprint(main_blueprint)
from .auth import auth as auth_blueprint
app.register_blueprint(auth_blueprint, url_prefix='/auth')
return app
| [
"jonas@devlieghere.com"
] | jonas@devlieghere.com |
e47367f80b9ab84975e9b2f5f3ecdfcd1a28d9e8 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_135/562.py | 3640e88ff78b2a491484cb555a0a0c269267b59a | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,695 | py | from sys import *
from heapq import *
from time import time
from multiprocessing import Pool
from collections import *
import itertools
from copy import deepcopy
from bisect import *
setrecursionlimit(10000)
from math import *
def readint():
return int(fi.readline())
def readints():
return [int(X) for X in fi.readline().split()]
def readstr():
return fi.readline().rstrip()
def read_case():
a1 = readint()
g1 = [readints() for X in range(4)]
a2 = readint()
g2 = [readints() for X in range(4)]
return (a1, g1, a2, g2)
def solve_case(a1, g1, a2, g2):
p1 = set(g1[a1-1])
p2 = set(g2[a2-1])
p = p1 & p2
if len(p) == 0: return "Volunteer cheated!"
elif len(p) > 1: return "Bad magician!"
else: return min(p)
def print_solution(case):
val = solve_case(*case[1])
msg = "Case #{}: {}".format(case[0], val)
print msg
return msg
t0 = time()
# Initialisation here
t1 = time()
print "Intialisation took %f seconds" % (t1 - t0)
# raw_input("Press enter when the input file has been downloaded: ")
if __name__ == '__main__':
fni = "%s-%s-%s.in" % (argv[1], argv[2], argv[3])
fno = "%s-%s-%s.out" % (argv[1], argv[2], argv[3])
fi = open(fni, 'r')
fo = open(fno, 'w')
numcases = readint()
cases = [(I, read_case()) for I in range(1,1+numcases)]
mt = False
if mt:
print "Running multi-threaded"
p = Pool(8)
fo.write('\n'.join(p.map(print_solution, cases)))
else:
print "Running single-threaded"
fo.write('\n'.join(map(print_solution, cases)))
print "Elapsed time %f seconds " % (time() - t1)
| [
"miliar1732@gmail.com"
] | miliar1732@gmail.com |
2f6984f86dbb761041f432d70be05ec86d3e84f6 | d7016f69993570a1c55974582cda899ff70907ec | /sdk/appservice/azure-mgmt-web/azure/mgmt/web/v2020_12_01/aio/operations/_domain_registration_provider_operations.py | 1c663f674b0cbfd5b8e1d5930de10fb86e0d22d9 | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | kurtzeborn/azure-sdk-for-python | 51ca636ad26ca51bc0c9e6865332781787e6f882 | b23e71b289c71f179b9cf9b8c75b1922833a542a | refs/heads/main | 2023-03-21T14:19:50.299852 | 2023-02-15T13:30:47 | 2023-02-15T13:30:47 | 157,927,277 | 0 | 0 | MIT | 2022-07-19T08:05:23 | 2018-11-16T22:15:30 | Python | UTF-8 | Python | false | false | 5,710 | py | # pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._domain_registration_provider_operations import build_list_operations_request
from .._vendor import MixinABC
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class DomainRegistrationProviderOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.web.v2020_12_01.aio.WebSiteManagementClient`'s
:attr:`domain_registration_provider` attribute.
"""
models = _models
def __init__(self, *args, **kwargs) -> None:
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def list_operations(
self,
**kwargs: Any
) -> AsyncIterable[_models.CsmOperationCollection]:
"""Implements Csm operations Api to exposes the list of available Csm Apis under the resource
provider.
Implements Csm operations Api to exposes the list of available Csm Apis under the resource
provider.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CsmOperationCollection or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.web.v2020_12_01.models.CsmOperationCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2020-12-01")) # type: str
cls = kwargs.pop('cls', None) # type: ClsType[_models.CsmOperationCollection]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_operations_request(
api_version=api_version,
template_url=self.list_operations.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
request = build_list_operations_request(
api_version=api_version,
template_url=next_link,
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("CsmOperationCollection", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_operations.metadata = {'url': "/providers/Microsoft.DomainRegistration/operations"} # type: ignore
| [
"noreply@github.com"
] | kurtzeborn.noreply@github.com |
a2bc95d947a2921c249b500c16c2edac2b571ce7 | 09e57dd1374713f06b70d7b37a580130d9bbab0d | /data/p4VQE/R4/benchmark/startQiskit_QC109.py | a8e43bd444135748ce1444508cc41f1ed30f64a6 | [
"BSD-3-Clause"
] | permissive | UCLA-SEAL/QDiff | ad53650034897abb5941e74539e3aee8edb600ab | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | refs/heads/main | 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,565 | py | # qubit number=3
# total number=10
import numpy as np
from qiskit import QuantumCircuit, execute, Aer, QuantumRegister, ClassicalRegister, transpile, BasicAer, IBMQ
import networkx as nx
from qiskit.visualization import plot_histogram
from typing import *
from pprint import pprint
from math import log2
from collections import Counter
from qiskit.test.mock import FakeVigo, FakeYorktown
kernel = 'circuit/bernstein'
def make_circuit(n:int) -> QuantumCircuit:
# circuit begin
input_qubit = QuantumRegister(n,"qc")
prog = QuantumCircuit(input_qubit)
prog.h(input_qubit[0]) # number=1
prog.h(input_qubit[1]) # number=2
prog.rx(2.9845130209103035,input_qubit[2]) # number=7
prog.h(input_qubit[2]) # number=3
prog.x(input_qubit[2]) # number=6
prog.h(input_qubit[3]) # number=4
prog.y(input_qubit[3]) # number=5
for edge in E:
k = edge[0]
l = edge[1]
prog.cp(-2 * gamma, input_qubit[k-1], input_qubit[l-1])
prog.p(gamma, k)
prog.p(gamma, l)
prog.rx(2 * beta, range(len(V)))
prog.cx(input_qubit[1],input_qubit[0]) # number=8
prog.cx(input_qubit[1],input_qubit[0]) # number=9
# circuit end
return prog
if __name__ == '__main__':
n = 4
V = np.arange(0, n, 1)
E = [(0, 1, 1.0), (0, 2, 1.0), (1, 2, 1.0), (3, 2, 1.0), (3, 1, 1.0)]
G = nx.Graph()
G.add_nodes_from(V)
G.add_weighted_edges_from(E)
step_size = 0.1
a_gamma = np.arange(0, np.pi, step_size)
a_beta = np.arange(0, np.pi, step_size)
a_gamma, a_beta = np.meshgrid(a_gamma, a_beta)
F1 = 3 - (np.sin(2 * a_beta) ** 2 * np.sin(2 * a_gamma) ** 2 - 0.5 * np.sin(4 * a_beta) * np.sin(4 * a_gamma)) * (
1 + np.cos(4 * a_gamma) ** 2)
result = np.where(F1 == np.amax(F1))
a = list(zip(result[0], result[1]))[0]
gamma = a[0] * step_size
beta = a[1] * step_size
prog = make_circuit(4)
sample_shot =5600
writefile = open("../data/startQiskit_QC109.csv", "w")
# prog.draw('mpl', filename=(kernel + '.png'))
IBMQ.load_account()
provider = IBMQ.get_provider(hub='ibm-q')
provider.backends()
backend = provider.get_backend("ibmq_5_yorktown")
circuit1 = transpile(prog, FakeYorktown())
circuit1.measure_all()
prog = circuit1
info = execute(prog,backend=backend, shots=sample_shot).result().get_counts()
print(info, file=writefile)
print("results end", file=writefile)
print(circuit1.depth(), file=writefile)
print(circuit1, file=writefile)
writefile.close()
| [
"wangjiyuan123@yeah.net"
] | wangjiyuan123@yeah.net |
de6926e063452533074ff6429ef970fc7829e000 | 035730cf12c43f59b76d9809e444b9070c3e5732 | /BOJ_16197.py | 8b217b46d4491a74e861471fd3f2c11ac628f378 | [] | no_license | kimhaggie/Coding_practice | e18153838425874b80a683094369a6dfb8836c93 | a4f2732e5d7a63adae990226073333b88324765a | refs/heads/master | 2023-08-01T11:33:54.071564 | 2021-09-07T14:40:56 | 2021-09-07T14:40:56 | 310,264,349 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,333 | py | #16197
import sys
from collections import deque
import math
dx = [1,-1,0,0]
dy = [0,0,1,-1]
def move(direction,ball1,ball2,m):
N=len(m)
M=len(m[0])
if direction==0:#오른쪽
y1,x1 = ball1[0],ball1[1]+1
y2,x2 = ball2[0],ball2[1]+1
flag1=False
flag2=False
if not(0<=y1<N and 0<=x1<M):
flag1=True
elif m[y1][x1]=='#':
x1 = ball1[1]
elif m[y1][x1]=='.':
pass
if not(0<=y2<N and 0<=x2<M):
flag2=True
elif m[y2][x2]=='#':
x2 = ball2[1]
elif m[y2][x2]=='.':
pass
return [flag1,flag2,[[y1,x1],[y2,x2]]]
if direction==1:#왼쪽
y1,x1 = ball1[0],ball1[1]-1
y2,x2 = ball2[0],ball2[1]-1
flag1=False
flag2=False
if not(0<=y1<N and 0<=x1<M):
flag1=True
elif m[y1][x1]=='#':
x1 = ball1[1]
elif m[y1][x1]=='.':
pass
if not(0<=y2<N and 0<=x2<M):
flag2=True
elif m[y2][x2]=='#':
x2 = ball2[1]
elif m[y2][x2]=='.':
pass
return [flag1,flag2,[[y1,x1],[y2,x2]]]
if direction==2:#위쪽
y1,x1 = ball1[0]-1,ball1[1]
y2,x2 = ball2[0]-1,ball2[1]
flag1=False
flag2=False
if not(0<=y1<N and 0<=x1<M):
flag1=True
elif m[y1][x1]=='#':
y1 = ball1[0]
elif m[y1][x1]=='.':
pass
if not(0<=y2<N and 0<=x2<M):
flag2=True
elif m[y2][x2]=='#':
y2 = ball2[0]
elif m[y2][x2]=='.':
pass
return [flag1,flag2,[[y1,x1],[y2,x2]]]
if direction==3:#아래쪽
y1,x1 = ball1[0]+1,ball1[1]
y2,x2 = ball2[0]+1,ball2[1]
flag1=False
flag2=False
if not(0<=y1<N and 0<=x1<M):
flag1=True
elif m[y1][x1]=='#':
y1 = ball1[0]
elif m[y1][x1]=='.':
pass
if not(0<=y2<N and 0<=x2<M):
flag2=True
elif m[y2][x2]=='#':
y2 = ball2[0]
elif m[y2][x2]=='.':
pass
return [flag1,flag2,[[y1,x1],[y2,x2]]]
def BFS(ball1,ball2,m):
target = [[ball1,ball2]]
step = 1
while target:
if step==11:
print(-1)
sys.exit()
new_target = []
while target:
cur = target.pop()
cur1 = cur[0]
cur2 = cur[1]
for idx in range(4):
result = move(idx,cur1,cur2,m)
if (result[0] and not result[1]) or (not result[0] and result[1]):
print(step)
sys.exit()
if result[2][0]!=result[2][1] and not result[0] and not result[1]:
if not result[2] in new_target:
new_target.append(result[2])
target = new_target
step+=1
N,M = map(int,sys.stdin.readline().rstrip('\n').split(' '))
m = []
for _ in range(N):
m.append(list(sys.stdin.readline().rstrip('\n')))
ball1 = []
ball2 = []
for i in range(N):
for j in range(M):
if m[i][j] == 'o':
if len(ball1)==0:
ball1 = [i,j]
else:
ball2 = [i,j]
BFS(ball1,ball2,m) | [
"kimhaggie@gmail.com"
] | kimhaggie@gmail.com |
3f2d538e2a917016702fee2504b2099156eb05df | eeeb3e85de712e71417630035417e6cf0d3a1da4 | /LSTM-GRU/gen-queries.py | df92915b1371e5e6b9078642d04d5215b7a98769 | [] | no_license | VNGResearch/RNN---QA | 21029647940cac97d628b8d5a25ae68dcbd226b7 | dc54c5d99e2a56cc981d53d89e0fcdaf6804dba9 | refs/heads/master | 2020-04-05T10:39:58.896752 | 2017-07-21T01:27:24 | 2017-07-21T01:27:24 | 81,531,473 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 776 | py | from argparse import ArgumentParser
import json
import numpy as np
parser = ArgumentParser()
parser.add_argument('-l', '--length', help='The number of generated queries. Defaults to 100',
dest='len', default=100, type=int)
parser.add_argument('-o', help='The output file. Defaults to queries.txt',
dest='outfile', default='queries.txt')
args = parser.parse_args()
infile = "data/nfL6.json"
with open(infile, 'r') as fi:
data = json.load(fi)
fi.close()
questions = []
for qa in data:
questions.append(qa['question'])
questions = np.random.permutation(questions)[:args.len]
print('Questions generated:\n')
print('\n'.join(questions))
with open(args.outfile, 'wt') as fo:
fo.write('\n'.join(questions))
fo.close()
| [
"phan.ngoclan58@gmail.com"
] | phan.ngoclan58@gmail.com |
462cf61c1760e9be0261bc1c37b152eabaa6e850 | a66460a46611483dfbdc94c7996893f427e60d97 | /ansible/my_env/lib/python2.7/site-packages/ansible/module_utils/network/onyx/onyx.py | ad667e74dfd188880633b7c22c46ebaf4554c496 | [
"MIT"
] | permissive | otus-devops-2019-02/yyashkin_infra | 06b57807dde26f94f501828c07503d6bf1d70816 | 0cd0c003884155ac922e3e301305ac202de7028c | refs/heads/master | 2020-04-29T02:42:22.056724 | 2019-05-15T16:24:35 | 2019-05-15T16:24:35 | 175,780,718 | 0 | 0 | MIT | 2019-05-15T16:24:36 | 2019-03-15T08:37:35 | HCL | UTF-8 | Python | false | false | 6,984 | py | # -*- coding: utf-8 -*-
#
# (c) 2017, Ansible by Red Hat, inc
#
# This file is part of Ansible by Red Hat
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
import json
from ansible.module_utils._text import to_text
from ansible.module_utils.connection import Connection, ConnectionError
from ansible.module_utils.network.common.utils import to_list, EntityCollection
_DEVICE_CONFIGS = {}
_CONNECTION = None
_COMMAND_SPEC = {
'command': dict(key=True),
'prompt': dict(),
'answer': dict()
}
def get_connection(module):
global _CONNECTION
if _CONNECTION:
return _CONNECTION
_CONNECTION = Connection(module._socket_path)
return _CONNECTION
def to_commands(module, commands):
if not isinstance(commands, list):
raise AssertionError('argument must be of type <list>')
transform = EntityCollection(module, _COMMAND_SPEC)
commands = transform(commands)
return commands
def run_commands(module, commands, check_rc=True):
connection = get_connection(module)
commands = to_commands(module, to_list(commands))
responses = list()
for cmd in commands:
out = connection.get(**cmd)
responses.append(to_text(out, errors='surrogate_then_replace'))
return responses
def get_config(module, source='running'):
conn = get_connection(module)
out = conn.get_config(source)
cfg = to_text(out, errors='surrogate_then_replace').strip()
return cfg
def load_config(module, config):
try:
conn = get_connection(module)
conn.edit_config(config)
except ConnectionError as exc:
module.fail_json(msg=to_text(exc))
def _parse_json_output(out):
out_list = out.split('\n')
first_index = 0
opening_char = None
lines_count = len(out_list)
while first_index < lines_count:
first_line = out_list[first_index].strip()
if not first_line or first_line[0] not in ("[", "{"):
first_index += 1
continue
opening_char = first_line[0]
break
if not opening_char:
return "null"
closing_char = ']' if opening_char == '[' else '}'
last_index = lines_count - 1
found = False
while last_index > first_index:
last_line = out_list[last_index].strip()
if not last_line or last_line[0] != closing_char:
last_index -= 1
continue
found = True
break
if not found:
return opening_char + closing_char
return "".join(out_list[first_index:last_index + 1])
def show_cmd(module, cmd, json_fmt=True, fail_on_error=True):
if json_fmt:
cmd += " | json-print"
conn = get_connection(module)
command_obj = to_commands(module, to_list(cmd))[0]
try:
out = conn.get(**command_obj)
except ConnectionError:
if fail_on_error:
raise
return None
if json_fmt:
out = _parse_json_output(out)
try:
cfg = json.loads(out)
except ValueError:
module.fail_json(
msg="got invalid json",
stderr=to_text(out, errors='surrogate_then_replace'))
else:
cfg = to_text(out, errors='surrogate_then_replace').strip()
return cfg
def get_interfaces_config(module, interface_type, flags=None, json_fmt=True):
cmd = "show interfaces %s" % interface_type
if flags:
cmd += " %s" % flags
return show_cmd(module, cmd, json_fmt)
def show_version(module):
return show_cmd(module, "show version")
def get_bgp_summary(module):
cmd = "show running-config protocol bgp"
return show_cmd(module, cmd, json_fmt=False, fail_on_error=False)
class BaseOnyxModule(object):
ONYX_API_VERSION = "3.6.6000"
def __init__(self):
self._module = None
self._commands = list()
self._current_config = None
self._required_config = None
self._os_version = None
def init_module(self):
pass
def load_current_config(self):
pass
def get_required_config(self):
pass
def _get_os_version(self):
version_data = show_version(self._module)
return self.get_config_attr(
version_data, "Product release")
# pylint: disable=unused-argument
def check_declarative_intent_params(self, result):
return None
def _validate_key(self, param, key):
validator = getattr(self, 'validate_%s' % key)
if callable(validator):
validator(param.get(key))
def validate_param_values(self, obj, param=None):
if param is None:
param = self._module.params
for key in obj:
# validate the param value (if validator func exists)
try:
self._validate_key(param, key)
except AttributeError:
pass
@classmethod
def get_config_attr(cls, item, arg):
return item.get(arg)
@classmethod
def get_mtu(cls, item):
mtu = cls.get_config_attr(item, "MTU")
mtu_parts = mtu.split()
try:
return int(mtu_parts[0])
except ValueError:
return None
def _validate_range(self, attr_name, min_val, max_val, value):
if value is None:
return True
if not min_val <= int(value) <= max_val:
msg = '%s must be between %s and %s' % (
attr_name, min_val, max_val)
self._module.fail_json(msg=msg)
def validate_mtu(self, value):
self._validate_range('mtu', 1500, 9612, value)
def generate_commands(self):
pass
def run(self):
self.init_module()
result = {'changed': False}
self.get_required_config()
self.load_current_config()
self.generate_commands()
result['commands'] = self._commands
if self._commands:
if not self._module.check_mode:
load_config(self._module, self._commands)
result['changed'] = True
failed_conditions = self.check_declarative_intent_params(result)
if failed_conditions:
msg = 'One or more conditional statements have not been satisfied'
self._module.fail_json(msg=msg,
failed_conditions=failed_conditions)
self._module.exit_json(**result)
@classmethod
def main(cls):
app = cls()
app.run()
| [
"theyashkins@gmail.com"
] | theyashkins@gmail.com |
2bfd9ee81df21fab82139938d2c26ec709bab772 | 0b03746cf78477b88032616110aa92f85db7eb69 | /law/config.py | 7ab741d8879c14798b5da5edae3e98eaa1b77925 | [
"MIT"
] | permissive | erikgeiser/law | ad3a7361cb057b5022aaf81898da46e2655bc52a | 2c8a0d5161ea2b44063a79860f2bebdf66ff67d4 | refs/heads/master | 2020-03-21T05:48:45.748030 | 2018-07-13T14:47:04 | 2018-07-13T14:47:04 | 138,182,389 | 1 | 1 | MIT | 2018-06-21T14:34:57 | 2018-06-21T14:34:57 | null | UTF-8 | Python | false | false | 9,681 | py | # -*- coding: utf-8 -*-
"""
law config parser implementation.
"""
__all__ = ["Config"]
import os
import tempfile
import logging
import luigi
import six
from six.moves.configparser import ConfigParser
from law.util import law_home_path, check_bool_flag
logger = logging.getLogger(__name__)
class Config(ConfigParser):
"""
Custom law configuration parser with a few additions on top of the standard python
``ConfigParser``. Most notably, this class adds config *inheritance* via :py:meth:`update` and
:py:meth:`include`, as well as a mechanism to synchronize with the luigi configuration parser.
When *config_file* is set, it is loaded during setup. When empty, and *skip_fallbacks* is
*False*, the default config file locations defined in :py:attr:`_config_files` are checked. By
default, the default configuration :py:attr:`_default_config` is loaded, which can be prevented
by setting *skip_defaults* to *True*.
.. py:classattribute:: _instance
type: Config
Global instance of this class.
.. py:classattribute:: _default_config
type: dict
Default configuration.
.. py:classattribute:: _config_files
type: list
List of configuration files that are checked during setup (unless *skip_fallbacks* is
*True*). When a file exists, the check is stopped. Therefore, the order is important here.
"""
_instance = None
_default_config = {
"core": {
"db_file": os.getenv("LAW_DB_FILE", law_home_path("db")),
"software_dir": law_home_path("software"),
"inherit_configs": "",
"extend_configs": "",
"sync_luigi_config": check_bool_flag(os.getenv("LAW_SYNC_LUIGI_CONFIG", "yes")),
},
"logging": {
"law": os.getenv("LAW_LOG_LEVEL", "WARNING"),
},
"target": {
"tmp_dir": os.getenv("LAW_TARGET_TMP_DIR", tempfile.gettempdir()),
"tmp_dir_permission": 0o0770,
"gfal2_log_level": "WARNING",
# contrib
"default_dropbox_fs": "dropbox_fs",
"default_wlcg_fs": "wlcg_fs",
},
"job": {
"job_file_dir": tempfile.gettempdir(),
},
"modules": {},
"bash_env": {},
"docker": {
"forward_dir": "/law_forward",
"python_dir": "py",
"bin_dir": "bin",
"stagein_dir": "stagein",
"stageout_dir": "stageout",
},
"docker_env": {},
"docker_volumes": {},
"singularity": {
"forward_dir": "/law_forward",
"python_dir": "py",
"bin_dir": "bin",
"stagein_dir": "stagein",
"stageout_dir": "stageout",
},
"singularity_env": {},
"singularity_volumes": {},
"notifications": {
"mail_recipient": "",
"mail_sender": "",
"mail_smtp_host": "127.0.0.1",
"mail_smtp_port": 25,
# contrib
"slack_token": "",
"slack_channel": "",
"slack_mention_user": "",
},
}
_config_files = ["$LAW_CONFIG_FILE", "law.cfg", law_home_path("config"), "etc/law/config"]
@classmethod
def instance(cls, *args, **kwargs):
"""
Creates an instance of this class with all *args* and *kwargs*, saves it in
:py:attr:`_instance`, and returns it. When :py:attr:`_instance` was already set before, no
new instance is created.
"""
if cls._instance is None:
cls._instance = cls(*args, **kwargs)
return cls._instance
def __init__(self, config_file="", skip_defaults=False, skip_fallbacks=False):
ConfigParser.__init__(self, allow_no_value=True)
self.config_file = None
# load defaults
if not skip_defaults:
self.update(self._default_config)
# read from files
files = [config_file]
if not skip_fallbacks:
files += self._config_files
for f in files:
f = os.path.expandvars(os.path.expanduser(f))
f = os.path.normpath(os.path.abspath(f))
if os.path.isfile(f):
self.read(f)
self.config_file = f
logger.debug("config instance created from '{}'".format(f))
break
else:
logger.debug("config instance created without a file")
# inherit from and/or extend by other configs
for option, overwrite_options in [("include_configs", False), ("extend_configs", True)]:
for filename in self.get_default("core", option, "").split(","):
filename = filename.strip()
if filename:
# resolve filename relative to the main config file
if self.config_file:
basedir = os.path.dirname(self.config_file)
filename = os.path.normpath(os.path.join(basedir, filename))
self.include(filename, overwrite_options=overwrite_options)
# sync with luigi configuration
if self.getboolean("core", "sync_luigi_config"):
self.sync_luigi_config()
def optionxform(self, option):
""""""
return option
def get_default(self, section, option, default=None):
"""
Returns the config value defined by *section* and *option*. When either the section or the
option does not exist, the *default* value is returned instead.
"""
if self.has_section(section) and self.has_option(section, option):
return self.get(section, option)
else:
return default
def get_expanded(self, section, option, default=None, expand_user=True):
"""
Same as :py:meth:`get_default`, but also expands environment and user variables when the
returned config value is a string. When *expand_user* is *False*, user variables are not
expanded.
"""
value = self.get_default(section, option, default=default)
if isinstance(value, six.string_types):
if expand_user:
value = os.path.expanduser(value)
value = os.path.expandvars(value)
return value
def update(self, data, overwrite=None, overwrite_sections=True, overwrite_options=True):
"""
Updates the currently stored configuration with new *data*, given as a dictionary. When
*overwrite_sections* is *False*, sections in *data* that are already present in the current
config are skipped. When *overwrite_options* is *False*, existing options are not
overwritten. When *overwrite* is not *None*, both *overwrite_sections* and
*overwrite_options* are set to its value.
"""
if overwrite is not None:
overwrite_sections = overwrite
overwrite_options = overwrite
for section, _data in six.iteritems(data):
if not self.has_section(section):
self.add_section(section)
elif not overwrite_sections:
continue
for option, value in six.iteritems(_data):
if overwrite_options or not self.has_option(section, option):
self.set(section, option, str(value))
def include(self, filename, *args, **kwargs):
"""
Updates the current configc with the config found in *filename*. All *args* and *kwargs* are
forwarded to :py:meth:`update`.
"""
p = self.__class__(filename, skip_defaults=True, skip_fallbacks=True)
self.update(p._sections, *args, **kwargs)
def keys(self, section):
"""
Returns all keys of a *section* in a list.
"""
return [key for key, _ in self.items(section)]
def sync_luigi_config(self, push=True, pull=True, expand=True):
"""
Synchronizes sections starting with ``"luigi_"`` with the luigi configuration parser. First,
when *push* is *True*, options that exist in law but **not** in luigi are stored as defaults
in the luigi config. Then, when *pull* is *True*, all luigi-related options in the law
config are overwritten with those from luigi. This way, options set via luigi defaults
(environment variables, global configuration files, `LUIGI_CONFIG_PATH`) always have
precendence. When *expand* is *True*, environment variables are expanded before pushing them
to the luigi config.
"""
prefix = "luigi_"
lparser = luigi.configuration.LuigiConfigParser.instance()
if push:
for section in self.sections():
if not section.startswith(prefix):
continue
lsection = section[len(prefix):]
if not lparser.has_section(lsection):
lparser.add_section(lsection)
for option in self.options(section):
if not lparser.has_option(lsection, option):
if expand:
value = self.get_expanded(section, option)
else:
value = self.get(section, option)
lparser.set(lsection, option, value)
if pull:
for lsection in lparser.sections():
section = prefix + lsection
if not self.has_section(section):
self.add_section(section)
for option, value in lparser.items(lsection):
self.set(section, option, value)
| [
"marcelrieger@me.com"
] | marcelrieger@me.com |
419490d9be79e02a31e8b3e89e0ac816c5f69f66 | 58141d7fc37854efad4ad64c74891a12908192ed | /setup/delete_queue.py | 8061e4d967956926ae2cb113935d91110889b166 | [] | no_license | stanleylio/fishie | b028a93b2093f59a8ceee4f78b55a91bb1f69506 | 0685045c07e4105934d713a0fd58c4bc28821ed6 | refs/heads/master | 2022-08-14T13:08:55.548830 | 2022-07-29T01:32:28 | 2022-07-29T01:32:28 | 30,433,819 | 8 | 1 | null | null | null | null | UTF-8 | Python | false | false | 578 | py | import sys,pika,argparse
from os.path import expanduser
sys.path.append(expanduser('~'))
from cred import cred
parser = argparse.ArgumentParser(description='')
parser.add_argument('queue_name',metavar='daq',type=str,
help='name of queue to be deleted')
args = parser.parse_args()
print(args.queue_name)
credentials = pika.PlainCredentials('nuc',cred['rabbitmq'])
connection = pika.BlockingConnection(pika.ConnectionParameters('localhost',5672,'/',credentials))
channel = connection.channel()
channel.queue_delete(queue=args.queue_name)
connection.close()
| [
"stanleylio@gmail.com"
] | stanleylio@gmail.com |
23f114695e9b28063697275cac18aad4d1d253a4 | c0681769775e760d9ecf10e5803a26046bc7f45c | /Doctor/migrations/0011_remove_session_doctor.py | d7fa30395af0b3f7e22b3f7c8e70efc8a8d6ad5d | [] | no_license | enasmohmed/DoctorAPI | 34545ea4c36308363d358a493356271ac9a316ba | 2179f691243f418a48bc1d12d1a1dba7779dbcc2 | refs/heads/main | 2023-02-14T17:40:28.787673 | 2021-01-14T01:16:18 | 2021-01-14T01:16:18 | 329,144,825 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 328 | py | # Generated by Django 3.1.5 on 2021-01-13 23:22
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('Doctor', '0010_auto_20210113_2318'),
]
operations = [
migrations.RemoveField(
model_name='session',
name='doctor',
),
]
| [
"enasm2477@gmail.com"
] | enasm2477@gmail.com |
22fac5fbea4918dcebbfee98f0d3cea8e13e2d5b | a54007706a09b387690f79fd7ffd889decad42f1 | /day04/code/20_默认参数易错点.py | 9a0ab1a389c3755a35c8877d74e819a5746b3dac | [] | no_license | lvah/201903python | d425534544a1f91e5b80b5ff0de5ca34037fe6e9 | 1415fcb7697dfa2884d94dcd8963477e12fe0624 | refs/heads/master | 2020-07-06T16:45:37.882819 | 2019-09-08T10:13:07 | 2019-09-08T10:13:07 | 203,082,401 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 415 | py |
# 一定要注意: 默认参数的默认值一定是不可变参数;
def listOperator(li=None):
"""
对于原有的列表后面追加元素‘End’
:return:
"""
if li is None: # is, ==
li = []
li.append('End')
return li
# print(listOperator([1, 2, 3]))
# print(listOperator([]))
# print(listOperator([]))
print(listOperator())
print(listOperator())
print(listOperator()) | [
"root@foundation0.ilt.example.com"
] | root@foundation0.ilt.example.com |
d10c2dba1cb876d1b31f3b6cb6ede0f7fa374b75 | e2c5dd7020b0613852bf16606d6de48159f56a7e | /ensemble/test/test_gbdt.py | ee73d36ba89bb65b78540a47b1a1c3e847d0603c | [] | no_license | bannima/MachineLearninginAction | 7830cb6bcda43e62e937b86198e4e5bbeef28275 | 872a24e25f8ef7768b2f8b37c9d1ab39504f0c33 | refs/heads/master | 2021-08-08T08:15:43.403854 | 2021-07-25T09:37:37 | 2021-07-25T09:37:37 | 238,710,052 | 23 | 5 | null | null | null | null | UTF-8 | Python | false | false | 907 | py | #!/usr/bin/env python
# -*- coding:utf-8 -*-
"""
FileName: test_gbdt.py
Description:
Author: Barry Chow
Date: 2020/3/3 4:15 PM
Version: 0.1
"""
from numpy import *
from sklearn.datasets import load_iris
from ensemble import GradientBoostingClassifier
from ensemble import GradientBoostingRegressor
from utils import accuracy_score, mean_square_error
iris = load_iris()
class Test_GBDT(object):
def test_gbregressor(self):
gbregressor = GradientBoostingRegressor()
gbregressor.fit(mat(iris.data), mat(iris.target))
preds = gbregressor.predict(mat(iris.data))
assert mean_square_error(preds, iris.target) < 5e-2
def test_gbclassifier(self):
gbclassifier = GradientBoostingClassifier()
gbclassifier.fit(mat(iris.data), mat(iris.target))
preds = gbclassifier.predict(mat(iris.data))
assert accuracy_score(preds, iris.target) > 0.95
| [
"zhouenguo@163.com"
] | zhouenguo@163.com |
829f59b5185126d6c05e5124829bd71de947201b | b661499ebc0d9102c6516cdb1fc9902858fc1a50 | /src/core/parametrisable.py | 184af0a2c7c2262cbc75b7f1c6cf6ee6a05b594c | [] | no_license | wesselb/cgpcm | 5469239fd912f8d3a72ab1202a913ebaa3098960 | d96263f6ad338759aadb178cf1b24bcbf0a738c5 | refs/heads/master | 2021-01-20T14:34:16.489945 | 2017-04-26T03:44:03 | 2017-04-26T03:44:03 | 82,757,924 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 749 | py | class Parametrisable(object):
"""
Class where keywords given to the constructor become attributes.
Required parameters can be specified through overriding `_required_pars`.
"""
_required_pars = []
def __init__(self, **pars):
# Verify that all required parameters are specified
if not all(k in pars for k in self._required_pars):
unspecified_pars = [k for k in self._required_pars
if k not in pars]
formatted = ', '.join('"{}"'.format(k) for k in unspecified_pars)
raise RuntimeError('must specify {}'.format(formatted))
# Set parameters
for k, v in pars.items():
setattr(self, k, v)
self._pars = pars
| [
"wessel.p.bruinsma@gmail.com"
] | wessel.p.bruinsma@gmail.com |
54abfd415e752c13460f79a21713413b9ab14fcc | abc89af3d78537266421803072727561111a9b2f | /rename.py | 9d4fab667a2c8f11f4e56f3abebe0b0414e633d6 | [] | no_license | Iverance/leetcode | 41b34ff847d77dfa84a0f8f656889b9d4bf125d7 | 0127190b27862ec7e7f4f2fcce5ce958d480cdac | refs/heads/master | 2021-01-22T02:28:39.498315 | 2017-12-23T01:36:51 | 2017-12-23T01:36:51 | 102,247,395 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 305 | py | import os
for filename in os.listdir("."):
if filename[0] in '0123456789':
if '.' in filename[:2]:
#print('00'+filename)
os.rename(filename, '00'+filename)
elif '.' in filename[:3]:
#print('0'+filename)
os.rename(filename, '0'+filename)
| [
"jeremhh@gmail.com"
] | jeremhh@gmail.com |
1d0850ecf7db575ff30bc3999e2fb93b4e55b457 | 81c344b8df43ed550cb9496c664a8de2687eda3e | /venv/lib/python3.8/site-packages/ansible_collections/fortinet/fortios/plugins/modules/fortios_authentication_rule.py | 2c4078a4610b6502d0b24ab1531a0271f27d7a36 | [] | no_license | anhdoan-ntt/cisco-aci | dc0e52b6d19ee0bafb2b24e0febe955952bf39ef | 185be6d6f13eabd65fb0ff328ea54f6507ccf0d4 | refs/heads/main | 2022-12-20T00:07:27.465096 | 2020-10-05T08:15:29 | 2020-10-05T08:15:29 | 300,500,699 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 16,189 | py | #!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019-2020 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_authentication_rule
short_description: Configure Authentication Rules in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS (FOS) device by allowing the
user to set and modify authentication feature and rule category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.0
version_added: "2.8"
author:
- Link Zheng (@chillancezen)
- Hongbin Lu (@fgtdev-hblu)
- Frank Shen (@frankshen01)
- Jie Xue (@JieX19)
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Legacy fortiosapi has been deprecated, httpapi is the preferred way to run playbooks
requirements:
- ansible>=2.9.0
options:
host:
description:
- FortiOS or FortiGate IP address.
type: str
required: false
username:
description:
- FortiOS or FortiGate username.
type: str
required: false
password:
description:
- FortiOS or FortiGate password.
type: str
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
type: str
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS protocol.
type: bool
default: true
ssl_verify:
description:
- Ensures FortiGate certificate must be verified by a proper CA.
type: bool
default: true
version_added: 2.9
state:
description:
- Indicates whether to create or remove the object.
This attribute was present already in previous version in a deeper level.
It has been moved out to this outer level.
type: str
required: false
choices:
- present
- absent
version_added: 2.9
authentication_rule:
description:
- Configure Authentication Rules.
default: null
type: dict
suboptions:
state:
description:
- B(Deprecated)
- Starting with Ansible 2.9 we recommend using the top-level 'state' parameter.
- HORIZONTALLINE
- Indicates whether to create or remove the object.
type: str
required: false
choices:
- present
- absent
active_auth_method:
description:
- Select an active authentication method. Source authentication.scheme.name.
type: str
comments:
description:
- Comment.
type: str
ip_based:
description:
- Enable/disable IP-based authentication. Once a user authenticates all traffic from the IP address the user authenticated from is allowed.
type: str
choices:
- enable
- disable
name:
description:
- Authentication rule name.
required: true
type: str
protocol:
description:
- Select the protocol to use for authentication . Users connect to the FortiGate using this protocol and are asked to authenticate.
type: str
choices:
- http
- ftp
- socks
- ssh
srcaddr:
description:
- Select an IPv4 source address from available options. Required for web proxy authentication.
type: list
suboptions:
name:
description:
- Address name. Source firewall.address.name firewall.addrgrp.name firewall.proxy-address.name firewall.proxy-addrgrp.name.
required: true
type: str
srcaddr6:
description:
- Select an IPv6 source address. Required for web proxy authentication.
type: list
suboptions:
name:
description:
- Address name. Source firewall.address6.name firewall.addrgrp6.name.
required: true
type: str
sso_auth_method:
description:
- Select a single-sign on (SSO) authentication method. Source authentication.scheme.name.
type: str
status:
description:
- Enable/disable this authentication rule.
type: str
choices:
- enable
- disable
transaction_based:
description:
- Enable/disable transaction based authentication .
type: str
choices:
- enable
- disable
web_auth_cookie:
description:
- Enable/disable Web authentication cookies .
type: str
choices:
- enable
- disable
'''
EXAMPLES = '''
- hosts: fortigates
collections:
- fortinet.fortios
connection: httpapi
vars:
vdom: "root"
ansible_httpapi_use_ssl: yes
ansible_httpapi_validate_certs: no
ansible_httpapi_port: 443
tasks:
- name: Configure Authentication Rules.
fortios_authentication_rule:
vdom: "{{ vdom }}"
state: "present"
authentication_rule:
active_auth_method: "<your_own_value> (source authentication.scheme.name)"
comments: "<your_own_value>"
ip_based: "enable"
name: "default_name_6"
protocol: "http"
srcaddr:
-
name: "default_name_9 (source firewall.address.name firewall.addrgrp.name firewall.proxy-address.name firewall.proxy-addrgrp.name)"
srcaddr6:
-
name: "default_name_11 (source firewall.address6.name firewall.addrgrp6.name)"
sso_auth_method: "<your_own_value> (source authentication.scheme.name)"
status: "enable"
transaction_based: "enable"
web_auth_cookie: "enable"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible_collections.fortinet.fortios.plugins.module_utils.fortios.fortios import FortiOSHandler
from ansible_collections.fortinet.fortios.plugins.module_utils.fortimanager.common import FAIL_SOCKET_MSG
def login(data, fos):
host = data['host']
username = data['username']
password = data['password']
ssl_verify = data['ssl_verify']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password, verify=ssl_verify)
def filter_authentication_rule_data(json):
option_list = ['active_auth_method', 'comments', 'ip_based',
'name', 'protocol', 'srcaddr',
'srcaddr6', 'sso_auth_method', 'status',
'transaction_based', 'web_auth_cookie']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def underscore_to_hyphen(data):
if isinstance(data, list):
for i, elem in enumerate(data):
data[i] = underscore_to_hyphen(elem)
elif isinstance(data, dict):
new_data = {}
for k, v in data.items():
new_data[k.replace('_', '-')] = underscore_to_hyphen(v)
data = new_data
return data
def authentication_rule(data, fos):
vdom = data['vdom']
if 'state' in data and data['state']:
state = data['state']
elif 'state' in data['authentication_rule'] and data['authentication_rule']['state']:
state = data['authentication_rule']['state']
else:
state = True
authentication_rule_data = data['authentication_rule']
filtered_data = underscore_to_hyphen(filter_authentication_rule_data(authentication_rule_data))
if state == "present":
return fos.set('authentication',
'rule',
data=filtered_data,
vdom=vdom)
elif state == "absent":
return fos.delete('authentication',
'rule',
mkey=filtered_data['name'],
vdom=vdom)
def is_successful_status(status):
return status['status'] == "success" or \
status['http_method'] == "DELETE" and status['http_status'] == 404
def fortios_authentication(data, fos):
if data['authentication_rule']:
resp = authentication_rule(data, fos)
return not is_successful_status(resp), \
resp['status'] == "success" and \
(resp['revision_changed'] if 'revision_changed' in resp else True), \
resp
def main():
fields = {
"host": {"required": False, "type": "str"},
"username": {"required": False, "type": "str"},
"password": {"required": False, "type": "str", "default": "", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": True},
"ssl_verify": {"required": False, "type": "bool", "default": True},
"state": {"required": False, "type": "str",
"choices": ["present", "absent"]},
"authentication_rule": {
"required": False, "type": "dict", "default": None,
"options": {
"state": {"required": False, "type": "str",
"choices": ["present", "absent"]},
"active_auth_method": {"required": False, "type": "str"},
"comments": {"required": False, "type": "str"},
"ip_based": {"required": False, "type": "str",
"choices": ["enable",
"disable"]},
"name": {"required": True, "type": "str"},
"protocol": {"required": False, "type": "str",
"choices": ["http",
"ftp",
"socks",
"ssh"]},
"srcaddr": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"srcaddr6": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"sso_auth_method": {"required": False, "type": "str"},
"status": {"required": False, "type": "str",
"choices": ["enable",
"disable"]},
"transaction_based": {"required": False, "type": "str",
"choices": ["enable",
"disable"]},
"web_auth_cookie": {"required": False, "type": "str",
"choices": ["enable",
"disable"]}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
# legacy_mode refers to using fortiosapi instead of HTTPAPI
legacy_mode = 'host' in module.params and module.params['host'] is not None and \
'username' in module.params and module.params['username'] is not None and \
'password' in module.params and module.params['password'] is not None
versions_check_result = None
if not legacy_mode:
if module._socket_path:
connection = Connection(module._socket_path)
fos = FortiOSHandler(connection)
is_error, has_changed, result = fortios_authentication(module.params, fos)
versions_check_result = connection.get_system_version()
else:
module.fail_json(**FAIL_SOCKET_MSG)
else:
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
fos = FortiOSAPI()
login(module.params, fos)
is_error, has_changed, result = fortios_authentication(module.params, fos)
fos.logout()
if versions_check_result and versions_check_result['matched'] is False:
module.warn("Ansible has detected version mismatch between FortOS system and galaxy, see more details by specifying option -vvv")
if not is_error:
if versions_check_result and versions_check_result['matched'] is False:
module.exit_json(changed=has_changed, version_check_warning=versions_check_result, meta=result)
else:
module.exit_json(changed=has_changed, meta=result)
else:
if versions_check_result and versions_check_result['matched'] is False:
module.fail_json(msg="Error in repo", version_check_warning=versions_check_result, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
| [
"dc.anh.doan@gmail.com"
] | dc.anh.doan@gmail.com |
6bedc9f9b3637f1ae471d084de0b925ba9c41d68 | 255e19ddc1bcde0d3d4fe70e01cec9bb724979c9 | /dockerized-gists/35c3d539cd5a30aaf580/snippet.py | 0cd09dc0b9c464eea7925d4248aad941e0a53623 | [
"MIT"
] | permissive | gistable/gistable | 26c1e909928ec463026811f69b61619b62f14721 | 665d39a2bd82543d5196555f0801ef8fd4a3ee48 | refs/heads/master | 2023-02-17T21:33:55.558398 | 2023-02-11T18:20:10 | 2023-02-11T18:20:10 | 119,861,038 | 76 | 19 | null | 2020-07-26T03:14:55 | 2018-02-01T16:19:24 | Python | UTF-8 | Python | false | false | 2,720 | py | # this is how to highlight code for powerpoint: `pbpaste | highlight --syntax=py -O rtf | pbcopy`
# api_jsonify()
from decimal import Decimal
from uuid import UUID
from datetime import datetime
from flask import Response
from json import JSONEncoder, dumps
class JsonApiEncoder(JSONEncoder):
def default(self, o):
if isinstance(o, Decimal) or isinstance(o, UUID):
return str(o)
if isinstance(o, datetime):
return o.isoformat()
if hasattr(o, 'api_dump'):
return o.api_dump()
return JSONEncoder.default(self, o)
def api_dumps(obj):
encoder_config = dict(cls=JsonApiEncoder, sort_keys=True,
indent=4)
return dumps(obj, **encoder_config)
# role_aware_api_jsonify()
class RoleAwareEncoder(JsonApiEncoder):
def default(self, o):
if isinstance(o, User):
base = o.api_dump()
base['_can_update'] = can_update_user(o)
base['_can_send_funds'] = can_send_funds_to_user(o)
return base
else:
return super().default(o)
def role_aware_dumps(obj):
encoder_config = dict(cls=RoleAwareEncoder, sort_keys=True,
indent=4)
return json.dumps(obj, **encoder_config)
def role_aware_jsonify(obj):
return Response(role_aware_dumps(obj), mimetype='application/json')
def api_jsonify(obj):
return Response(api_dumps(obj), mimetype='application/json')
# validation
card_funding_event_schema = {
"properties": {
"user_uuid": St.uuid,
"amount": St.dollar_value,
},
"required": ["user_uuid", "amount"],
}
@bp.route("/card_funding_events", methods=["POST"])
@validate(card_funding_event_schema)
def new_funding_event_for_user():
user = get_user(key=validated['user_uuid'])
fe = CardFundingEvent()
fe.card = user.card
fe.load_amount = Decimal(validated['amount'])
db.session.add(fe)
db.session.commit()
return role_aware_jsonify(fe)
# how it works
validated = LocalProxy(lambda: g.validated)
def check_schema(schema):
jsonschema.Draft4Validator.check_schema(schema)
def validate(schema):
check_schema(schema)
def get_errors(data):
v = jsonschema.Draft4Validator(schema)
return sorted(v.iter_errors(data), key=lambda e: e.path)
def validate_payload(data):
errors = get_errors(data)
if len(errors) > 0:
raise ValidationException(errors)
return data
def validate_decorator(fn):
@wraps(fn)
def wrapped(*args, **kwargs):
g.validated = validate_payload(request.json)
return fn(*args, **kwargs)
return wrapped
return validate_decorator
| [
"gistshub@gmail.com"
] | gistshub@gmail.com |
52512e9d370a51bcf092d65fc589ab94b7004799 | 51036d0ef641fc455eb643e3a6b942136b20e697 | /rdmo/conditions/tests/test_commands.py | a41483e4986a3d63a27ed948f34598a93c75d051 | [
"Apache-2.0",
"BSD-3-Clause",
"MIT",
"LicenseRef-scancode-warranty-disclaimer",
"BSD-2-Clause"
] | permissive | ItsNotYou/rdmo | 43ab02a340ae6c6f50e19cd728f5c1311117abfc | eba2056b376107e817a4080fc12245095a907429 | refs/heads/master | 2022-12-02T04:18:48.117562 | 2020-08-16T10:13:11 | 2020-08-16T10:13:11 | 287,918,765 | 0 | 0 | Apache-2.0 | 2020-08-16T10:10:19 | 2020-08-16T10:10:18 | null | UTF-8 | Python | false | false | 374 | py | import io
import os
from django.core.management import call_command
def test_import(db, settings):
xml_file = os.path.join(settings.BASE_DIR, 'xml', 'conditions.xml')
stdout, stderr = io.StringIO(), io.StringIO()
call_command('import', xml_file, '--user=user', stdout=stdout, stderr=stderr)
assert not stdout.getvalue()
assert not stderr.getvalue()
| [
"mail@jochenklar.de"
] | mail@jochenklar.de |
67c201f4428c123d2ed9ddac81f5d2a0ab16168b | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2791/60688/288720.py | fc2b23ce74d5ae4260c05243b1fa88e6f9d45963 | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 306 | py | times=int(input())
numslist=input().split(" ")
numslist=list(int(a) for a in numslist)
first=0
second=""
for i in range(0,times):
if(numslist[i]==1):
first=first+1
if(i!=0):
second=second+str(numslist[i-1])+" "
second=second+str(numslist[times-1])
print(first)
print(second) | [
"1069583789@qq.com"
] | 1069583789@qq.com |
667a48100b02d4fc5b11046794c639f95c081a88 | 78537ca73fd61c5d8fd6cbb326d4ba95eadc7219 | /CircuitPython_on_Linux_and_Raspberry_Pi/PWM_motor_servo_control.py | 06f9a6ad4a2b9e7e04c7ee055bc8db5a116658ed | [
"MIT"
] | permissive | FoamyGuy/Adafruit_Learning_System_Guides | a31f7c5ef49125ad25e5bdc4d0e50aa43513ce82 | 6cb04635ce47d2292a2ea09d196fbffdad534168 | refs/heads/master | 2023-08-16T21:34:30.856046 | 2020-11-10T00:51:16 | 2020-11-10T00:51:16 | 254,224,051 | 1 | 0 | MIT | 2020-11-10T00:51:18 | 2020-04-08T23:32:18 | C | UTF-8 | Python | false | false | 498 | py | import time
import board
import pulseio
from adafruit_motor import servo
# create a PWMOut object on Pin D5.
pwm = pulseio.PWMOut(board.D5, duty_cycle=2 ** 15, frequency=50)
# Create a servo object.
servo = servo.Servo(pwm)
while True:
for angle in range(0, 180, 5): # 0 - 180 degrees, 5 degrees at a time.
servo.angle = angle
time.sleep(0.05)
for angle in range(180, 0, -5): # 180 - 0 degrees, 5 degrees at a time.
servo.angle = angle
time.sleep(0.05)
| [
"kattni@adafruit.com"
] | kattni@adafruit.com |
70e13aea5531ab49d0ca4b969bca632e629ae87c | 495531870c08ea3495bb45393b05f907366f052e | /x7-src/dashboard/steer/steer/dashboards/settings/user/urls.py | 63cb0f42cb258a06941b7f2af993a654416bde1e | [
"Apache-2.0"
] | permissive | wendy-king/x7_venv | 5fcb326cf3ecaa26d3b839af743b027d23af29e0 | d8266c1dc474935c54126ce36d1a6410a7e452f5 | refs/heads/master | 2021-01-01T06:33:24.605851 | 2012-01-19T15:54:44 | 2012-01-19T15:54:44 | 3,209,071 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 806 | py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf.urls.defaults import patterns, url
urlpatterns = patterns('steer.dashboards.settings.user.views',
url(r'^$', 'index', name='index'))
| [
"king_wendy@sina.com"
] | king_wendy@sina.com |
51c0bc39cdee200e82b120630c333d4307f1c756 | 61efd764ae4586b6b2ee5e6e2c255079e2b01cfc | /azure-mgmt-network/azure/mgmt/network/v2017_10_01/models/virtual_network_peering.py | 610944b9edf6986e9ce86ed411f1f8f342ddc79b | [
"MIT"
] | permissive | AutorestCI/azure-sdk-for-python | a3642f53b5bf79d1dbb77851ec56f4cc0c5b3b61 | 60b0726619ce9d7baca41f6cd38f741d74c4e54a | refs/heads/master | 2021-01-21T02:23:59.207091 | 2018-01-31T21:31:27 | 2018-01-31T21:31:27 | 55,251,306 | 4 | 3 | null | 2017-11-13T17:57:46 | 2016-04-01T17:48:48 | Python | UTF-8 | Python | false | false | 4,677 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .sub_resource import SubResource
class VirtualNetworkPeering(SubResource):
"""Peerings in a virtual network resource.
:param id: Resource ID.
:type id: str
:param allow_virtual_network_access: Whether the VMs in the linked virtual
network space would be able to access all the VMs in local Virtual network
space.
:type allow_virtual_network_access: bool
:param allow_forwarded_traffic: Whether the forwarded traffic from the VMs
in the remote virtual network will be allowed/disallowed.
:type allow_forwarded_traffic: bool
:param allow_gateway_transit: If gateway links can be used in remote
virtual networking to link to this virtual network.
:type allow_gateway_transit: bool
:param use_remote_gateways: If remote gateways can be used on this virtual
network. If the flag is set to true, and allowGatewayTransit on remote
peering is also true, virtual network will use gateways of remote virtual
network for transit. Only one peering can have this flag set to true. This
flag cannot be set if virtual network already has a gateway.
:type use_remote_gateways: bool
:param remote_virtual_network: The reference of the remote virtual
network. The remote virtual network can be in the same or different region
(preview). See here to register for the preview and learn more
(https://docs.microsoft.com/en-us/azure/virtual-network/virtual-network-create-peering).
:type remote_virtual_network:
~azure.mgmt.network.v2017_10_01.models.SubResource
:param remote_address_space: The reference of the remote virtual network
address space.
:type remote_address_space:
~azure.mgmt.network.v2017_10_01.models.AddressSpace
:param peering_state: The status of the virtual network peering. Possible
values are 'Initiated', 'Connected', and 'Disconnected'. Possible values
include: 'Initiated', 'Connected', 'Disconnected'
:type peering_state: str or
~azure.mgmt.network.v2017_10_01.models.VirtualNetworkPeeringState
:param provisioning_state: The provisioning state of the resource.
:type provisioning_state: str
:param name: The name of the resource that is unique within a resource
group. This name can be used to access the resource.
:type name: str
:param etag: A unique read-only string that changes whenever the resource
is updated.
:type etag: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'allow_virtual_network_access': {'key': 'properties.allowVirtualNetworkAccess', 'type': 'bool'},
'allow_forwarded_traffic': {'key': 'properties.allowForwardedTraffic', 'type': 'bool'},
'allow_gateway_transit': {'key': 'properties.allowGatewayTransit', 'type': 'bool'},
'use_remote_gateways': {'key': 'properties.useRemoteGateways', 'type': 'bool'},
'remote_virtual_network': {'key': 'properties.remoteVirtualNetwork', 'type': 'SubResource'},
'remote_address_space': {'key': 'properties.remoteAddressSpace', 'type': 'AddressSpace'},
'peering_state': {'key': 'properties.peeringState', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
}
def __init__(self, id=None, allow_virtual_network_access=None, allow_forwarded_traffic=None, allow_gateway_transit=None, use_remote_gateways=None, remote_virtual_network=None, remote_address_space=None, peering_state=None, provisioning_state=None, name=None, etag=None):
super(VirtualNetworkPeering, self).__init__(id=id)
self.allow_virtual_network_access = allow_virtual_network_access
self.allow_forwarded_traffic = allow_forwarded_traffic
self.allow_gateway_transit = allow_gateway_transit
self.use_remote_gateways = use_remote_gateways
self.remote_virtual_network = remote_virtual_network
self.remote_address_space = remote_address_space
self.peering_state = peering_state
self.provisioning_state = provisioning_state
self.name = name
self.etag = etag
| [
"laurent.mazuel@gmail.com"
] | laurent.mazuel@gmail.com |
b1b45fa60f7e0e003895684d3f195f184d53d6c8 | aa1972e6978d5f983c48578bdf3b51e311cb4396 | /nitro-python-1.0/nssrc/com/citrix/netscaler/nitro/resource/config/aaa/aaapreauthenticationpolicy_vpnvserver_binding.py | 9d1d503c57d03e7c475d01047f132fe13a288def | [
"Python-2.0",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | MayankTahil/nitro-ide | 3d7ddfd13ff6510d6709bdeaef37c187b9f22f38 | 50054929214a35a7bb19ed10c4905fffa37c3451 | refs/heads/master | 2020-12-03T02:27:03.672953 | 2017-07-05T18:09:09 | 2017-07-05T18:09:09 | 95,933,896 | 2 | 5 | null | 2017-07-05T16:51:29 | 2017-07-01T01:03:20 | HTML | UTF-8 | Python | false | false | 5,608 | py | #
# Copyright (c) 2008-2016 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class aaapreauthenticationpolicy_vpnvserver_binding(base_resource) :
""" Binding class showing the vpnvserver that can be bound to aaapreauthenticationpolicy.
"""
def __init__(self) :
self._boundto = None
self._priority = None
self._activepolicy = None
self._name = None
self.___count = 0
@property
def name(self) :
r"""Name of the preauthentication policy whose properties you want to view.<br/>Minimum length = 1.
"""
try :
return self._name
except Exception as e:
raise e
@name.setter
def name(self, name) :
r"""Name of the preauthentication policy whose properties you want to view.<br/>Minimum length = 1
"""
try :
self._name = name
except Exception as e:
raise e
@property
def boundto(self) :
r"""The entity name to which policy is bound.
"""
try :
return self._boundto
except Exception as e:
raise e
@boundto.setter
def boundto(self, boundto) :
r"""The entity name to which policy is bound.
"""
try :
self._boundto = boundto
except Exception as e:
raise e
@property
def priority(self) :
try :
return self._priority
except Exception as e:
raise e
@property
def activepolicy(self) :
try :
return self._activepolicy
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
r""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(aaapreauthenticationpolicy_vpnvserver_binding_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.aaapreauthenticationpolicy_vpnvserver_binding
except Exception as e :
raise e
def _get_object_name(self) :
r""" Returns the value of object identifier argument
"""
try :
if self.name is not None :
return str(self.name)
return None
except Exception as e :
raise e
@classmethod
def get(cls, service, name="", option_="") :
r""" Use this API to fetch aaapreauthenticationpolicy_vpnvserver_binding resources.
"""
try :
if not name :
obj = aaapreauthenticationpolicy_vpnvserver_binding()
response = obj.get_resources(service, option_)
else :
obj = aaapreauthenticationpolicy_vpnvserver_binding()
obj.name = name
response = obj.get_resources(service)
return response
except Exception as e:
raise e
@classmethod
def get_filtered(cls, service, name, filter_) :
r""" Use this API to fetch filtered set of aaapreauthenticationpolicy_vpnvserver_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = aaapreauthenticationpolicy_vpnvserver_binding()
obj.name = name
option_ = options()
option_.filter = filter_
response = obj.getfiltered(service, option_)
return response
except Exception as e:
raise e
@classmethod
def count(cls, service, name) :
r""" Use this API to count aaapreauthenticationpolicy_vpnvserver_binding resources configued on NetScaler.
"""
try :
obj = aaapreauthenticationpolicy_vpnvserver_binding()
obj.name = name
option_ = options()
option_.count = True
response = obj.get_resources(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
@classmethod
def count_filtered(cls, service, name, filter_) :
r""" Use this API to count the filtered set of aaapreauthenticationpolicy_vpnvserver_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = aaapreauthenticationpolicy_vpnvserver_binding()
obj.name = name
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
class aaapreauthenticationpolicy_vpnvserver_binding_response(base_response) :
def __init__(self, length=1) :
self.aaapreauthenticationpolicy_vpnvserver_binding = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.aaapreauthenticationpolicy_vpnvserver_binding = [aaapreauthenticationpolicy_vpnvserver_binding() for _ in range(length)]
| [
"Mayank@Mandelbrot.local"
] | Mayank@Mandelbrot.local |
54c6a7f4607a6c69890642eb976d35540a9e23ec | ac5e52a3fc52dde58d208746cddabef2e378119e | /exps-gsn-edf/gsn-edf_ut=2.0_rd=0.5_rw=0.06_rn=4_u=0.075-0.35_p=harmonic-2/sched=RUN_trial=74/sched.py | cc5e0ffd8b66aec58cf680cf03fc01953f535a07 | [] | no_license | ricardobtxr/experiment-scripts | 1e2abfcd94fb0ef5a56c5d7dffddfe814752eef1 | 7bcebff7ac2f2822423f211f1162cd017a18babb | refs/heads/master | 2023-04-09T02:37:41.466794 | 2021-04-25T03:27:16 | 2021-04-25T03:27:16 | 358,926,457 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 210 | py | -X FMLP -Q 0 -L 3 116 400
-X FMLP -Q 0 -L 3 59 300
-X FMLP -Q 1 -L 2 49 300
-X FMLP -Q 1 -L 2 44 250
-X FMLP -Q 2 -L 2 44 250
-X FMLP -Q 3 -L 2 37 150
37 175
27 175
23 175
22 175
18 175
4 175
| [
"ricardo.btxr@gmail.com"
] | ricardo.btxr@gmail.com |
63c27f2302b74cddff7e9b0a07332d38e3562e9b | 5b0a4ee4543275e130c32e27a0d046b72e0894e4 | /tensorflow/python/keras/engine/distributed_training_utils.py | 0185e87cb068eb1f8102af77705eab785fbbfd9e | [
"Apache-2.0"
] | permissive | pkit/tensorflow | f0426b2c4f8dc4e1630e7c139eb23c697200f9fb | 1d1aeb64b91effd4329965cc7954a3e21820c88b | refs/heads/master | 2020-04-16T05:20:22.828901 | 2019-01-11T19:16:53 | 2019-01-11T19:22:16 | 165,300,428 | 1 | 0 | Apache-2.0 | 2019-01-11T19:48:20 | 2019-01-11T19:48:19 | null | UTF-8 | Python | false | false | 36,127 | py | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities related to distributed training."""
# pylint:disable=protected-access
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.client import session as session_module
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.data.ops import iterator_ops
from tensorflow.python.distribute import distribute_coordinator_context as dc_context
from tensorflow.python.distribute import distribute_lib
from tensorflow.python.eager import context
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_util
from tensorflow.python.keras import backend as K
from tensorflow.python.keras import callbacks
from tensorflow.python.keras import metrics as metrics_module
from tensorflow.python.keras import optimizers
from tensorflow.python.keras.optimizer_v2 import optimizer_v2
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training.mode_keys import ModeKeys
from tensorflow.python.util import nest
def set_weights(distribution_strategy, dist_model, weights):
"""Sets the weights of the replicated models.
The weights of the replicated models are set to the weights of the original
model. The weights of the replicated model are Mirrored variables and hence
we need to use the `update` call within a DistributionStrategy scope.
Args:
distribution_strategy: DistributionStrategy used to distribute training
and validation.
dist_model: The replicated models on the different devices.
weights: The weights of the original model.
"""
assign_ops = []
for layer in dist_model.layers:
num_param = len(layer.weights)
layer_weights = weights[:num_param]
for sw, w in zip(layer.weights, layer_weights):
if ops.executing_eagerly_outside_functions():
sw.assign(w)
else:
assign_ops.append(distribution_strategy.unwrap(sw.assign(w)))
weights = weights[num_param:]
if not ops.executing_eagerly_outside_functions():
K.get_session().run(assign_ops)
def unwrap_values(distribution_strategy, grouped_inputs, grouped_outputs,
grouped_updates=None, grouped_session_args=None,
with_loss_tensor=False):
"""Unwrap and return the list of values contained in the PerDevice parameters.
This function calls `flatten_perdevice_values` to parse each of the input
parameters into a list of values on the different devices. If we set
`with_loss_tensor` to be True, we also call `reduce` on the list of losses on
the different devices to give us one loss tensor.
Args:
distribution_strategy: DistributionStrategy used to distribute training and
validation.
grouped_inputs: PerDevice inputs returned from the train or test function
that we ran on each device.
grouped_outputs: PerDevice outputs returned from the train or test function
that we ran on each device.
grouped_updates: PerDevice updates returned from the train or test function
that we ran on each device.
grouped_session_args: PerDevice session args returned from the train or
test function that we ran on each device.
with_loss_tensor: Boolean that indicates if we need to add the reduced loss
tensor as one of the outputs.
Returns:
Values of each of the PerDevice parameters.
"""
# Unwrap per device values returned from each model's train function.
# This will be used to construct the main train function.
all_inputs = flatten_perdevice_values(distribution_strategy,
grouped_inputs)
if with_loss_tensor:
# reduce loss tensor before adding it to the list of fetches
loss = distribution_strategy.reduce(distribute_lib.get_loss_reduction(),
grouped_outputs[0])
all_outputs = flatten_perdevice_values(distribution_strategy,
grouped_outputs[1:])
all_outputs = [loss] + all_outputs
else:
all_outputs = flatten_perdevice_values(distribution_strategy,
grouped_outputs)
if grouped_updates:
all_updates = flatten_perdevice_values(distribution_strategy,
grouped_updates)
else:
all_updates = None
all_session_args = {}
if grouped_session_args:
grouped_feed_dict = grouped_session_args.get('feed_dict')
if grouped_feed_dict:
all_session_args['feed_dict'] = flatten_perdevice_values(
distribution_strategy, grouped_feed_dict)
grouped_fetches = grouped_session_args.get('fetches')
if grouped_fetches:
all_session_args['fetches'] = flatten_perdevice_values(
distribution_strategy, grouped_fetches)
# TODO(priyag): Return only non empty/None values
return all_inputs, all_outputs, all_updates, all_session_args
def flatten_perdevice_values(distribution_strategy, perdevice_values):
"""Unwraps and flattens a nest of PerDevice parameters.
PerDevice values have one value associated with each device. Each entry in
the PerDevice dict has a device `key` and the corresponding value on the
device as the `value`. In this function we take a PerDevice value or a list of
PerDevice values and return all the values in the PerDevice dict.
Args:
distribution_strategy: DistributionStrategy used to distribute training and
validation.
perdevice_values: List of PerDevice object or a single PerDevice object.
Returns:
List of values of all the PerDevice objects.
"""
# This function takes a PerDevice object or a list of PerDevice objects and
# returns all the values associated with it.
return [e for flattened in nest.flatten(perdevice_values)
for e in distribution_strategy.unwrap(flattened)]
def validate_callbacks(input_callbacks, optimizer):
"""Validate whether given callbacks are supported by DistributionStrategy.
Args:
input_callbacks: List of callbacks passed by the user to fit.
optimizer: Optimizer instance used to train the model.
Raises:
ValueError: If `LearningRateScheduler` or `ReduceLROnPlateau` is one of the
callbacks passed.
ValueError: If `histogram_freq` or `write_grads` is one of the parameters
passed as part of the TensorBoard callback.
"""
if input_callbacks:
for callback in input_callbacks:
if callback not in [callbacks.TensorBoard, callbacks.ReduceLROnPlateau,
callbacks.LearningRateScheduler, callbacks.CSVLogger,
callbacks.EarlyStopping, callbacks.ModelCheckpoint,
callbacks.TerminateOnNaN, callbacks.ProgbarLogger,
callbacks.History, callbacks.RemoteMonitor]:
logging.warning('Your input callback is not one of the predefined '
'Callbacks that supports DistributionStrategy. You '
'might encounter an error if you access one of the '
'model\'s attributes as part of the callback since '
'these attributes are not set. You can access each of '
'the individual distributed models using the '
'`_grouped_model` attribute of your original model.')
if isinstance(callback, (callbacks.LearningRateScheduler,
callbacks.ReduceLROnPlateau)):
if not isinstance(optimizer, optimizer_v2.OptimizerV2):
raise ValueError('You must specify a Keras Optimizer V2 when using '
'%s callback with DistributionStrategy.' % callback)
# If users want to use the TensorBoard callback they cannot use certain
# features of the callback that involve accessing model attributes and
# running ops.
if isinstance(callback, callbacks.TensorBoard):
if callback.__getattribute__('histogram_freq'):
logging.warning(
UserWarning(
'`histogram_freq` in the TensorBoard callback is not '
'supported when using DistributionStrategy. Setting '
'`histogram_freq` to `0`.'))
callback.histogram_freq = 0
if callback.__getattribute__('write_grads'):
logging.warning(
UserWarning(
'`write_grads` in the TensorBoard callback is not supported '
'when using DistributionStrategy. Setting `write_grads` '
'to `False`.'))
callback.histogram_freq = False
def validate_distributed_dataset_inputs(distribution_strategy, x, y,
sample_weights=None):
"""Validate all the components of a DistributedValue Dataset input.
Args:
distribution_strategy: The current DistributionStrategy used to call
`fit`/`evaluate`.
x: Input Dataset DistributedValue object. For example, when we use
`MirroredStrategy` this is a PerDevice object with a tensor for each
device set in the dict. x can also be a tuple or dict. The keys of the
dict should match the names of the input layers of the model.
y: Target Dataset DistributedValue object. For example, when we use
`MirroredStrategy` this is a PerDevice object with a tensor for each
device set in the dict. y can also be a tuple or dict. The keys of the
dict should match the names of the output layers of the model.
sample_weights: Sample weights Dataset DistributedValue object. For example,
when we use `MirroredStrategy` this is a PerDevice object with a tensor
for each device set in the dict.
Returns:
The unwrapped values list of the x and y DistributedValues inputs.
Raises:
ValueError: If x and y do not have support for being evaluated as tensors.
or if x and y contain elements that are not tensors or if x and y
contain elements that have a shape or dtype mismatch.
"""
# If the input and target used to call the model are not dataset tensors,
# we need to raise an error. When using a DistributionStrategy, the input
# and targets to a model should be from a `tf.data.Dataset`.
# If each element of x and y are not tensors, we cannot standardize and
# validate the input and targets.
x_values_list = validate_per_device_inputs(distribution_strategy, x)
if y is not None:
y_values_list = validate_per_device_inputs(distribution_strategy, y)
else:
y_values_list = None
if sample_weights is not None:
sample_weights_list = validate_per_device_inputs(distribution_strategy,
sample_weights)
else:
sample_weights_list = None
# Return the unwrapped values to avoid calling `unwrap` a second time.
return x_values_list, y_values_list, sample_weights_list
def validate_per_device_inputs(distribution_strategy, x):
"""Validates PerDevice dataset input list.
Args:
distribution_strategy: The current DistributionStrategy used to call
`fit`, `evaluate` and `predict`.
x: A list of PerDevice objects that represent the input or
target values.
Returns:
List containing the first element of each of the PerDevice objects in
the input list.
Raises:
ValueError: If any of the objects in the `per_device_list` is not a tensor.
"""
# Convert the inputs and targets into a list of PerDevice objects.
per_device_list = nest.flatten(x)
x_values_list = []
for x in per_device_list:
if not tensor_util.is_tensor(x):
raise ValueError('Dataset input to the model should be tensors instead '
'they are of type {}'.format(type(x)))
# At this point both x and y contain tensors in the `DistributedValues`
# structure.
x_values = distribution_strategy.unwrap(x)
# Validate that the shape and dtype of all the elements in x are the same.
validate_all_tensor_shapes(x, x_values)
validate_all_tensor_types(x, x_values)
x_values_list.append(x_values[0])
return x_values_list
def validate_all_tensor_types(x, x_values):
x_dtype = x_values[0].dtype
for i in range(1, len(x_values)):
if x_dtype != x_values[i].dtype:
raise ValueError('Input tensor dtypes do not match for distributed tensor'
' inputs {}'.format(x))
def validate_all_tensor_shapes(x, x_values):
# Validate that the shape of all the elements in x have the same shape
x_shape = x_values[0].get_shape().as_list()
for i in range(1, len(x_values)):
if x_shape != x_values[i].get_shape().as_list():
raise ValueError('Input tensor shapes do not match for distributed tensor'
' inputs {}'.format(x))
def _wait_for_variable_initialization(session):
"""Utility to wait for variables to be initialized."""
all_variables = K._get_variables(K.get_graph()) # pylint: disable=protected-access
candidate_vars = []
for v in all_variables:
if not getattr(v, '_keras_initialized', False):
candidate_vars.append(v)
if not candidate_vars:
return
while True:
is_initialized = session.run(
[variables.is_variable_initialized(v) for v in candidate_vars])
uninitialized_vars = []
for flag, v in zip(is_initialized, candidate_vars):
if not flag:
uninitialized_vars.append(v)
v._keras_initialized = True # pylint: disable=protected-access
if not uninitialized_vars:
break
def init_restore_or_wait_for_variables():
"""Initialize or restore variables or wait for variables to be initialized."""
session = K._get_session() # pylint: disable=protected-access
worker_context = dc_context.get_current_worker_context()
if not worker_context or worker_context.experimental_should_init:
# TODO(yuefengz): if checkpoints exit, restore from checkpoint.
K._initialize_variables(session) # pylint: disable=protected-access
else:
_wait_for_variable_initialization(session)
def configure_and_create_session(distribution_strategy):
"""Configure session config and create a session with it."""
# TODO(priyag): Throw error if a session already exists.
session_config = K.get_default_session_config()
if is_tpu_strategy(distribution_strategy):
# TODO(priyag, yuefengz): Remove this workaround when Distribute
# Coordinator is integrated with keras and we can create a session from
# there.
distribution_strategy.configure(session_config)
master = distribution_strategy.extended._tpu_cluster_resolver.master() # pylint: disable=protected-access
session = session_module.Session(config=session_config, target=master)
else:
worker_context = dc_context.get_current_worker_context()
if worker_context:
dc_session_config = worker_context.session_config
# Merge the default session config to the one from distribute coordinator,
# which is fine for now since they don't have conflicting configurations.
dc_session_config.MergeFrom(session_config)
session = session_module.Session(
config=dc_session_config, target=worker_context.master_target)
else:
distribution_strategy.configure(session_config)
session = session_module.Session(config=session_config)
K.set_session(session)
def validate_inputs(x, y, distribution_strategy):
"""Validate inputs when using DistributionStrategy.
Args:
x: Model Inputs.
y: Model Targets.
distribution_strategy: The DistributionStrategy with which the model is
compiled.
Raises:
ValueError: if input is not a Dataset or a numpy array(when we use
MirroredStrategy).
"""
if isinstance(x, dict) or isinstance(y, dict):
raise ValueError('`DistributionStrategy` does not support inputs of type '
'dict. You must pass a `tf.data.Dataset` object or a '
'numpy array as input.')
if (isinstance(x, iterator_ops.Iterator) or
isinstance(y, iterator_ops.Iterator)):
raise ValueError('`DistributionStrategy` does not support inputs of type '
'Iterator. You must pass a `tf.data.Dataset` object or a '
'numpy array as input.')
if is_tpu_strategy(distribution_strategy):
for i in [x, y]:
if isinstance(i, dataset_ops.DatasetV2):
shapes = nest.flatten(i.output_shapes)
try:
s = next(s for s in shapes if not s.is_fully_defined())
except StopIteration:
continue
else:
raise ValueError(
'Using TPUs currently requires fully defined shapes. Either use '
'set_shape() on the input tensors or use '
'dataset.batch(..., drop_remainder=True).'
'Found unknown shape {} in input {}.'.format(s, i))
# TODO(b/118776054): Currently we support global batch size for TPUStrategy and
# core MirroredStrategy only. Remove this check when contrib MirroredStrategy is
# no longer needed.
def global_batch_size_supported(distribution_strategy):
return distribution_strategy.extended._global_batch_size # pylint: disable=protected-access
# TODO(sourabhbajaj): Remove this once we use the same API for all strategies.
def is_tpu_strategy(strategy):
"""We're executing TPU Strategy."""
return strategy is not None and strategy.__class__.__name__ == 'TPUStrategy'
def get_input_params(distribution_strategy, first_x_value, steps, batch_size,
is_training=False):
"""Calculate the number of batches and steps/steps_per_epoch.
Args:
distribution_strategy: The DistributionStrategy used to compile the model.
first_x_value: This is the first input numpy array that is passed in as the
model input.
steps: The specified number of steps.
batch_size: The specified batch_size.
is_training: Boolean to relax the constraints on consuming all the training
samples to keep compatibility till we support partial batches.
Returns:
steps: The steps or steps_per_epoch argument depending on if a user is
calling `fit`, `evaluate` or `predict`. If the is_training flag is set
we don't require the number of samples to be used completely.
batch_size: The batch size to be used in model iterations.
Raises:
ValueError: If the number of batches or steps evaluates to 0.
"""
num_samples = first_x_value.shape[0]
# TODO(b/118776054): Use global batch size for Keras/DS support.
# Currently this is only supported in TPUStrategy and CoreMirroredStrategy.
use_per_replica_batch = not global_batch_size_supported(
distribution_strategy)
if steps is None:
if batch_size is None:
# If neither the batch size or number of steps are set. We choose the
# global batch size as the minimum of number of samples and 32. 32 is
# chosen to provide backward compatibility.
global_batch_size = min(num_samples, 32)
else:
# If the user provided the batch size we need to handle the case
# between different strategies that use the global/per-replica batch size
global_batch_size = batch_size
if use_per_replica_batch:
global_batch_size *= distribution_strategy.num_replicas_in_sync
if not is_training and num_samples % global_batch_size:
raise ValueError('The number of samples %s is not divisible by '
'batch size %s.' % (num_samples, global_batch_size))
steps = num_samples // global_batch_size
else:
if batch_size is None:
# We calculate the batch size based on the number of steps specified
if num_samples % steps:
raise ValueError('The number of samples %s is not divisible by '
'steps %s. Please change the number of steps to a '
'value that can consume all the samples' % (
num_samples, steps))
global_batch_size = num_samples // steps
else:
# If the user provided the batch size we need to handle the case
# between different strategies that use the global/per-replica batch size
global_batch_size = batch_size
if use_per_replica_batch:
global_batch_size *= distribution_strategy.num_replicas_in_sync
if num_samples < (global_batch_size * steps):
raise ValueError('Number of samples %s is less than samples required '
'for specified batch_size %s and steps %s' % (
num_samples, global_batch_size, steps))
# We need to return the per replica or global batch size based on the strategy
if use_per_replica_batch:
if global_batch_size % distribution_strategy.num_replicas_in_sync:
raise ValueError(
'The batch size (%s) could not be sharded evenly across the sync '
'replicas (%s) in the distribution strategy.' % (
global_batch_size, distribution_strategy.num_replicas_in_sync))
batch_size = global_batch_size // distribution_strategy.num_replicas_in_sync
else:
batch_size = global_batch_size
return steps, batch_size
def get_batch_dimension(iterator):
shapes = nest.flatten(iterator.output_shapes)
# Take the batch size from the first element, as it should be the same for
# all.
dims = shapes[0].dims
return dims[0] if dims else None
def list_to_tuple(maybe_list):
"""Datasets treat lists specially, so switch them to tuples."""
if isinstance(maybe_list, list):
return tuple(maybe_list)
return maybe_list
def get_iterator(dataset, distribution_strategy):
with distribution_strategy.scope():
iterator = distribution_strategy.make_dataset_iterator(dataset)
init_op = iterator.initialize()
if not context.executing_eagerly():
K.get_session().run(init_op)
return iterator
def _get_input_from_iterator(iterator, model):
"""Get elements from the iterator and verify the input shape and type."""
next_element = iterator.get_next()
if len(nest.flatten(next_element)) == len(model.inputs):
x = next_element
y = None
sample_weights = None
elif len(nest.flatten(next_element)) == (len(model.inputs) +
len(model.outputs)):
x, y = next_element
sample_weights = None
else:
x, y, sample_weights = next_element
# Validate that all the elements in x and y are of the same type and shape.
validate_distributed_dataset_inputs(
model._distribution_strategy, x, y, sample_weights)
return x, y, sample_weights
def _prepare_feed_values(model, inputs, targets, sample_weights, mode):
"""Prepare feed values to the model execution function.
Arguments:
model: Model to prepare feed values for.
inputs: List or dict of model inputs.
targets: Optional list of model targets.
sample_weights: Optional list of sample weight arrays.
mode: One of ModeKeys.TRAIN/ModeKeys.TEST/ModeKeys.PREDICT.
Returns:
Feed values for the model in the given mode.
"""
strategy = model._distribution_strategy
inputs, targets, sample_weights = _get_input_from_iterator(inputs, model)
inputs = flatten_perdevice_values(strategy, inputs)
targets = flatten_perdevice_values(strategy, targets)
if mode == ModeKeys.PREDICT:
sample_weights = []
targets = []
else:
sample_weights = [
None for _ in range(len(model.outputs) * strategy.num_replicas_in_sync)
]
ins = inputs + targets + sample_weights
if mode == ModeKeys.TRAIN and not isinstance(K.symbolic_learning_phase(),
int):
ins += [True]
return ins
def _custom_compile_for_predict(model):
"""Custom compile for TPU predict mode."""
if not model.built:
# Model is not compilable because it does not know its number of inputs
# and outputs, nor their shapes and names. We will compile after the first
# time the model gets called on training data.
return
model._is_compiled = True
model.total_loss = None
model._fit_function = None
model._eval_function = None
model.train_function = None
model.test_function = None
model.predict_function = None
def _build_network_on_replica(model, inputs=None, targets=None, mode=None):
"""Build an updated model on replicas.
We create a new Keras model while sharing the variables from the old graph.
Building a new sub-graph is required since the original keras model creates
placeholders for the input and the output that are not accessible till we
call iterator.get_next() inside the step_fn for `fit`/`evaluate`/`predict`.
The sharing of weights and layers between the old and the new model gaurantee
that we're using Strategy variables and any updates on either model are
reflected correctly in callbacks and loop iterations.
We need to make sure we share the optimizers between the old and the new model
as well so that optimizer state is not lost if the user is running fit
multiple times.
Args:
model: Model to be replicated across Replicas
inputs: Input variables to be passed to the model
targets: Target tensor to be passed to model.compile
mode: Which of fit/eval/predict is building the distributed network
Returns:
A new model with shared layers with the old model.
"""
# Need to do imports here since we run into a circular dependency error.
from tensorflow.python.keras import models # pylint: disable=g-import-not-at-top
from tensorflow.python.keras.engine import sequential # pylint: disable=g-import-not-at-top
# We rely on the internal methods to avoid having share_weights weights in the
# public API.
if isinstance(model, sequential.Sequential):
updated_model = models._clone_sequential_model(model, input_tensors=inputs,
share_weights=True)
else:
updated_model = models._clone_functional_model(model, input_tensors=inputs,
share_weights=True)
# Recast all low precision outputs back to float32 since we only casted
# the inputs to bfloat16 and not targets. This is done so that we can preserve
# precision when calculating the loss value.
def _upcast_low_precision_outputs(output):
if output.dtype == dtypes.bfloat16:
return math_ops.cast(output, dtypes.float32)
else:
return output
updated_model.outputs = [_upcast_low_precision_outputs(o)
for o in updated_model.outputs]
if isinstance(targets, tuple):
targets = nest.flatten(targets)
if mode == ModeKeys.PREDICT:
_custom_compile_for_predict(updated_model)
else:
updated_model.compile(
model.optimizer,
model.loss,
metrics=metrics_module.clone_metrics(model._compile_metrics),
loss_weights=model.loss_weights,
sample_weight_mode=model.sample_weight_mode,
weighted_metrics=metrics_module.clone_metrics(
model._compile_weighted_metrics),
target_tensors=targets)
return updated_model
def _build_distributed_network(model, strategy, inputs=None, targets=None,
mode=None):
"""Create a cloned model on each replica."""
with K.get_graph().as_default(), strategy.scope():
distributed_model = strategy.extended.call_for_each_replica(
_build_network_on_replica,
args=(model, inputs, targets, mode))
if mode is ModeKeys.TRAIN:
model._distributed_model_train = distributed_model
elif mode is ModeKeys.TEST:
model._distributed_model_test = distributed_model
elif mode is ModeKeys.PREDICT:
model._distributed_model_predict = distributed_model
else:
model._distributed_model = distributed_model
def _clone_and_build_model(model, inputs=None, targets=None, mode=None):
"""Clone and build the given keras_model."""
# We need to set the import here since we run into a circular dependency
# error.
from tensorflow.python.keras import models # pylint: disable=g-import-not-at-top
cloned_model = models.clone_model(model, input_tensors=inputs)
# Compile and build model.
if isinstance(model.optimizer, optimizers.TFOptimizer):
optimizer = model.optimizer
else:
optimizer_config = model.optimizer.get_config()
optimizer = model.optimizer.__class__.from_config(optimizer_config)
# Recast all low precision outputs back to float32 since we only casted
# the inputs to bfloat16 and not targets. This is done so that we can preserve
# precision when calculating the loss value.
def _upcast_low_precision_outputs(output):
if output.dtype == dtypes.bfloat16:
return math_ops.cast(output, dtypes.float32)
else:
return output
cloned_model.outputs = [_upcast_low_precision_outputs(o)
for o in cloned_model.outputs]
if isinstance(targets, tuple):
targets = nest.flatten(targets)
if mode == ModeKeys.PREDICT:
_custom_compile_for_predict(cloned_model)
else:
cloned_model.compile(
optimizer,
model.loss,
metrics=metrics_module.clone_metrics(model._compile_metrics),
loss_weights=model.loss_weights,
sample_weight_mode=model.sample_weight_mode,
weighted_metrics=metrics_module.clone_metrics(
model._compile_weighted_metrics),
target_tensors=targets)
return cloned_model
def clone_model_on_replicas(model, strategy, make_callback_model=False,
inputs=None, targets=None, mode=None):
"""Create a cloned model on each replica."""
with K.get_graph().as_default(), strategy.scope():
distributed_model = strategy.extended.call_for_each_replica(
_clone_and_build_model, args=(model, inputs, targets, mode))
if mode is ModeKeys.TRAIN:
model._distributed_model_train = distributed_model
elif mode is ModeKeys.TEST:
model._distributed_model_test = distributed_model
elif mode is ModeKeys.PREDICT:
model._distributed_model_predict = distributed_model
else:
model._distributed_model = distributed_model
if make_callback_model:
model._make_callback_model(distributed_model)
def _make_execution_function(model, mode):
"""Makes function to run one step of distributed model execution."""
if context.executing_eagerly():
return _make_eager_execution_function(model, mode)
strategy = model._distribution_strategy
if not model._distributed_model:
if model._compile_distribution:
clone_model_on_replicas(
model, strategy, make_callback_model=(mode == ModeKeys.TRAIN))
else:
_build_distributed_network(model, strategy)
def _per_device_function(model):
f = model._make_execution_function(mode)
return (f.inputs, f.outputs, f.updates_op, f.session_kwargs)
with strategy.scope():
# Create train ops on each of the devices when we call
# `_per_device_fit_function`.
(grouped_inputs, grouped_outputs, grouped_updates,
grouped_session_args) = strategy.extended.call_for_each_replica(
_per_device_function, args=(model._distributed_model,))
if mode == ModeKeys.TRAIN:
# Initialize the variables in the replicated model. This is necessary for
# multi-worker training because on some workers, initialization is not
# needed. This method does initialization or waiting for initialization
# according to the context object of distribute coordinator.
init_restore_or_wait_for_variables()
# Unwrap all the per device values returned from `call_for_each_replica`.
# Unwrapping per device values gives you a list of values that can be
# used to construct a new train function that is composed of update ops on
# all the devices over which the model is distributed.
(all_inputs, all_outputs, all_updates, all_session_args) = unwrap_values(
strategy,
grouped_inputs,
grouped_outputs,
grouped_updates,
grouped_session_args,
with_loss_tensor=(mode != ModeKeys.PREDICT))
return K.function(
all_inputs,
all_outputs,
updates=all_updates,
name='distributed_{}_function'.format(mode),
**all_session_args)
def _make_eager_execution_function(model, mode):
"""Makes function to run one step of distributed model eager execution."""
strategy = model._distribution_strategy
if not model._distributed_model:
if model._compile_distribution:
clone_model_on_replicas(
model, strategy, make_callback_model=(mode == ModeKeys.TRAIN))
else:
_build_distributed_network(model, strategy)
def _per_device_function(model):
f = model._make_execution_function(mode)
return (f.inputs, f.outputs)
# NOTE(priyag): Try creating a new FuncGraph within DS scope instead of using
# the global one.
with K.get_graph().as_default(), strategy.scope():
# Create train ops on each of the devices when we call
# `_per_device_fit_function`.
(grouped_inputs, grouped_outputs) = strategy.call_for_each_replica(
_per_device_function, args=(model._distributed_model,))
# Unwrap all the per device values returned from `call_for_each_replica`.
# Unwrapping per device values gives you a list of values that can be
# used to construct a new train function that is composed of inptus/outputs
# on all the devices over which the model is distributed.
(all_inputs, all_outputs, _, _) = unwrap_values(
strategy,
grouped_inputs,
grouped_outputs,
with_loss_tensor=(mode != ModeKeys.PREDICT))
return K.function(
all_inputs,
all_outputs,
name='eager_distributed_{}_function'.format(mode))
def _copy_weights_to_distributed_model(original_model, grouped_model):
"""Copies weights from original model to distributed models."""
strategy = original_model._distribution_strategy
if strategy:
# Copy the weights from the original model to each of the replicated
# models.
orig_model_weights = original_model.get_weights()
distributed_model = strategy.unwrap(grouped_model)[0]
set_weights(strategy, distributed_model, orig_model_weights)
def _copy_weights_to_original_model(model, grouped_model, mode):
"""Copies weights from first distributed model back to original model."""
if model._distribution_strategy and mode == ModeKeys.TRAIN:
updated_weights = model._distribution_strategy.unwrap(
grouped_model)[0].get_weights()
model.set_weights(updated_weights)
def _per_device_aggregate_batch(batch_outs, model, mode):
"""Aggregates the per-device batch-level outputs from a distributed step."""
if model._distribution_strategy is not None and mode == ModeKeys.PREDICT:
total_batch_outs = []
for i in range(len(model.outputs)):
num_replicas = model._distribution_strategy.num_replicas_in_sync
nested_outs = batch_outs[i * num_replicas:i * num_replicas + num_replicas]
total_batch_outs.append(np.concatenate(nest.flatten(nested_outs)))
return total_batch_outs
return batch_outs
def _reset_metrics(model, distributed_model=None):
if model._distribution_strategy:
distributed_model = (
distributed_model or
model._distribution_strategy.unwrap(model._distributed_model)[0])
distributed_model.reset_metrics()
| [
"gardener@tensorflow.org"
] | gardener@tensorflow.org |
8188758945628ec846274bd177af33c4087ef8a3 | d7ad696cd1b550bb41d20f87b83c984ec7f19aa7 | /atcoder/python/_old/beginner/contest/201_300/212/b_Weak_Password.py | 98882642828e1ac3790b880ab5ca4f94c7213b6a | [] | no_license | mida-hub/hobby | 2947d10da7964d945e63d57b549c1dcb90ef7305 | 6e6f381e59fc2b0429fab36474d867aa3855af77 | refs/heads/master | 2022-12-21T23:33:14.857931 | 2022-12-19T16:30:34 | 2022-12-19T16:30:34 | 147,890,434 | 0 | 0 | null | 2021-03-20T04:31:58 | 2018-09-08T01:31:59 | Jupyter Notebook | UTF-8 | Python | false | false | 236 | py | x = input()
x1, x2, x3, x4 = map(int, list(x))
# print(x1, x2, x3, x4)
if x1 == x2 == x3 == x4:
print('Weak')
elif x4 == (x3+1)%10 and \
x3 == (x2+1)%10 and \
x2 == (x1+1)%10:
print('Weak')
else:
print('Strong')
| [
"rusuden0106@gmail.com"
] | rusuden0106@gmail.com |
44d9017c2f7d723b7a212c2c0c90c9ecd07f3814 | 1a87ac9522591f25b03e6912ba3af3cca115abae | /inventory/migrations/0008_auto_20210323_1552.py | 9f861fbe025a9c3a0b6f37ed307a6f83f8ec55b3 | [
"MIT"
] | permissive | jyywong/InventoryMS | c67fdb0a051be5d136d9509e63b7fc0aeadcc324 | 9aac1324742730ce980e638f2156ece9eb44a593 | refs/heads/master | 2023-04-01T15:38:44.448813 | 2021-04-05T19:59:45 | 2021-04-05T19:59:45 | 350,162,598 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 403 | py | # Generated by Django 2.2.10 on 2021-03-23 19:52
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('inventory', '0007_auto_20210323_1528'),
]
operations = [
migrations.AlterField(
model_name='item',
name='bar_code',
field=models.BigIntegerField(blank=True, null=True),
),
]
| [
"wong.jonathan1@gmail.com"
] | wong.jonathan1@gmail.com |
ccbe1eb7db398e37dcf02cb0576aa88a28663115 | 45185a2c65924ed01cdc222ccc42e71391e5a1f4 | /tt/tests/utils.py | 4baa12a29ca5658108cc95656aef682479cb6851 | [
"MIT"
] | permissive | parsamz/tt | 5cb0db124fd9ed5ec3fe24e0e807c72d33f9aebb | 0d2a286d46cfe1ca01b340d710ba5a1921a9b66e | refs/heads/master | 2020-04-01T13:47:37.380990 | 2016-05-07T01:41:42 | 2016-05-07T01:41:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,156 | py | """Utility methods/classes used in the testing pipeline.
"""
import sys
import unittest
from contextlib import contextmanager
from tt.core import main
if sys.version_info < (3, 0):
from io import BytesIO as StringIO
else:
from io import StringIO
# === stdout/stderr interaction ===============================================
@contextmanager
def redirected_stream(stream_name):
orig_stream = getattr(sys, stream_name)
setattr(sys, stream_name, StringIO())
try:
yield getattr(sys, stream_name)
finally:
setattr(sys, stream_name, orig_stream)
# === Generalized test cases ==================================================
class FunctionalTestAssertions(object):
pass
class FunctionalTestCase(unittest.TestCase):
def functional_test_helper(self, cl_args=[],
expected_stdout='', expected_stderr=''):
with redirected_stream('stdout') as _stdout:
with redirected_stream('stderr') as _stderr:
main(args=cl_args)
self.assertEqual(expected_stdout, _stdout.getvalue())
self.assertEqual(expected_stderr, _stderr.getvalue())
| [
"welch18@vt.edu"
] | welch18@vt.edu |
003c4f1f23e0854df92932089c015e72820a1d9e | 62248ce4ce8f11d24c089b54d4c02bdec51df565 | /Stars_Web/settings.py | e85e844b2bf965f39af337d25c11917f7278db24 | [] | no_license | challeger/Starts_Web | 0b7231bbdf0e6f6350c928e13f9b67c6f1c4af84 | 1b372013706f8d082e9feab5c73fd690a10b7286 | refs/heads/master | 2022-12-24T21:32:31.231838 | 2020-09-30T05:04:21 | 2020-09-30T05:04:21 | 299,798,716 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,993 | py | """
Django settings for Stars_Web project.
Generated by 'django-admin startproject' using Django 3.1.1.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
import os
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '%a^a4*!h^d*_%9&bspa(vu^^uawil9uzm62c0zu_19otx0+02g'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'Users',
'rest_framework',
'rest_framework_jwt',
'django_filters',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'corsheaders', # 解决跨域问题
]
CORS_ALLOW_CREDENTIALS = True
CORS_ORIGIN_ALLOW_ALL = True
# 允许所有的请求头
CORS_ALLOW_HEADERS = ('*', )
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'Stars_Web.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')]
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'Stars_Web.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'stars_web',
'USER': 'root',
'PASSWORD': 'T85568397',
'HOST': '127.0.0.1',
'PORT': '3306',
'CONN_MAX_AGE': 5 * 60,
'OPTIONS': {
'charset': 'utf8mb4'
}
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'zh-hans'
TIME_ZONE = 'Asia/Shanghai'
USE_I18N = True
USE_L10N = True
USE_TZ = False
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, STATIC_URL)
STATICFILES_DIRS = (os.path.join(BASE_DIR, 'static'), )
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = 'smtp.qq.com'
EMAIL_PORT = 465
EMAIL_USE_SSL = True
EMAIL_HOST_USER = '799613500@qq.com'
EMAIL_HOST_PASSWORD = 'pqtssodfmxysbcdh'
EMAIL_FROM = '群星小说网<799613500@qq.com>'
# 验证码过期时间为十分钟
EMAIL_EXP_DELTA = 600
| [
"799613500@qq.com"
] | 799613500@qq.com |
f3ba2d5ebe159e659d05d9840939a38c42042d12 | 70e81f00b600057464fdccaef2d82f238c8f08dc | /apps/utils/yunpian.py | 89e5b0098e2c0e410c73f6bff53474e77df75b25 | [] | no_license | wujialaoer/shop | 1bbd905369260ce1df9822027649655b7a909657 | 5fc8b02ba63cea96172f30520b553dab3ec5fe8a | refs/heads/master | 2020-05-07T09:06:49.935942 | 2019-04-16T09:03:38 | 2019-04-16T09:03:38 | 180,359,944 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 667 | py | import json
import requests
class YunPian(object):
def __init__(self, api_key):
self.api_key = api_key
self.single_send_url = "https://sms.yunpian.com/v2/sms/single_send.json"
def send_sms(self, code, mobile):
parmas = {
"apikey": self.api_key,
"mobile": mobile,
"text": "您的验证码是{code}。如非本人操作,请忽略本短信".format(code=code)
}
response = requests.post(self.single_send_url, data=parmas)
re_dict = json.loads(response.text)
return re_dict
if __name__ == "__main__":
yun_pian = YunPian("")
yun_pian.send_sms("2017", "")
| [
"624334922@qq.com"
] | 624334922@qq.com |
76eb91fe6bfb0a872f54e9b9920fc6ef2255dffa | 43b72b9e4d81ffab6e4d79f324034cbb4b7413a3 | /challenge/NOTDONEsmallestWindowHavingOneInAnother.py | 07d1a5408b509bd0143be6df6ca2f1db94677de3 | [] | no_license | devathul/prepCode | bfe0ad44f68c8c9d4a48f76dde9a1bb8af165373 | eb0751bda3066ac2f1a2890cf63b28ee63a6dd89 | refs/heads/master | 2023-01-08T20:59:52.214333 | 2020-11-01T00:13:55 | 2020-11-01T00:13:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 355 | py | """
smallestWindowHavingOneInAnother
Given a string S and text T. Output the smallest window in the string S having all characters of the text T. Both the string S and text T contains lowercase english alphabets
"""
no_of_chars=256
def findSubString(string,pat):
len1=len(string)
len2=len(pat)
if len1<len2:
print("No such window")
return None
| [
"42695433+bsofcs@users.noreply.github.com"
] | 42695433+bsofcs@users.noreply.github.com |
3a5fa4638e3ee7de74813129b2b4c3231d061a33 | b4fdd022b45751cfaf2a8770152bf7ca6aeb2b9a | /putcall/formulas/interest_rate_options/__init__.py | 6a8b9a40e055ef017c195e8815cf99e64e93500c | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | fagan2888/putcall | 7547727b48e52f8a5ef325f02952778a93d8acb4 | a5984b52cb7bae33cfd48490439acea4844af0f9 | refs/heads/master | 2021-03-16T21:22:16.024168 | 2019-09-18T13:03:41 | 2019-09-18T13:03:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 430 | py | # -*- coding: utf-8 -*-
# putcall
# -------
# Collection of classical option pricing formulas.
#
# Author: sonntagsgesicht, based on a fork of Deutsche Postbank [pbrisk]
# Version: 0.2, copyright Wednesday, 18 September 2019
# Website: https://github.com/sonntagsgesicht/putcall
# License: Apache License 2.0 (see LICENSE file)
from .black76 import *
from .bachelier import *
from .hullwhite import *
from .sabr import *
| [
"sonntagsgesicht@icloud.com"
] | sonntagsgesicht@icloud.com |
2ed766c1a90f661f262b30118b23197f0eafba1e | 56f5b2ea36a2258b8ca21e2a3af9a5c7a9df3c6e | /CMGTools/H2TauTau/prod/25aug_corrMC/up/mc/SUSYGluGluToHToTauTau_M-160_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0_1377467578/HTT_24Jul_newTES_manzoni_Up_Jobs/Job_32/run_cfg.py | d66375515b236b61c9c96404622b8782b49cb4d5 | [] | no_license | rmanzoni/HTT | 18e6b583f04c0a6ca10142d9da3dd4c850cddabc | a03b227073b2d4d8a2abe95367c014694588bf98 | refs/heads/master | 2016-09-06T05:55:52.602604 | 2014-02-20T16:35:34 | 2014-02-20T16:35:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,181 | py | import FWCore.ParameterSet.Config as cms
import os,sys
sys.path.append('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/H2TauTau/prod/25aug_corrMC/up/mc/SUSYGluGluToHToTauTau_M-160_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0_1377467578/HTT_24Jul_newTES_manzoni_Up_Jobs')
from base_cfg import *
process.source = cms.Source("PoolSource",
noEventSort = cms.untracked.bool(True),
inputCommands = cms.untracked.vstring('keep *',
'drop cmgStructuredPFJets_cmgStructuredPFJetSel__PAT'),
duplicateCheckMode = cms.untracked.string('noDuplicateCheck'),
fileNames = cms.untracked.vstring('/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-160_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_95_1_MM2.root',
'/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-160_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_96_1_vAc.root',
'/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-160_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_97_1_PJw.root')
)
| [
"riccardo.manzoni@cern.ch"
] | riccardo.manzoni@cern.ch |
c2015eb031cf21250cdc1a1d91c26336fdb072a9 | 27aaadf435779c29012233cb1dacf27bd9dd0d0f | /bailian-20230601/alibabacloud_bailian20230601/models.py | 4066d5da76d1e451f94d8218582e22a4663151de | [
"Apache-2.0"
] | permissive | aliyun/alibabacloud-python-sdk | afadedb09db5ba6c2bc6b046732b2a6dc215f004 | e02f34e07a7f05e898a492c212598a348d903739 | refs/heads/master | 2023-08-22T20:26:44.695288 | 2023-08-22T12:27:39 | 2023-08-22T12:27:39 | 288,972,087 | 43 | 29 | null | 2022-09-26T09:21:19 | 2020-08-20T10:08:11 | Python | UTF-8 | Python | false | false | 58,692 | py | # -*- coding: utf-8 -*-
# This file is auto-generated, don't edit it. Thanks.
from Tea.model import TeaModel
from typing import Dict, List
class CancelFineTuneJobRequest(TeaModel):
def __init__(
self,
agent_key: str = None,
job_id: str = None,
):
self.agent_key = agent_key
self.job_id = job_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.agent_key is not None:
result['AgentKey'] = self.agent_key
if self.job_id is not None:
result['JobId'] = self.job_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AgentKey') is not None:
self.agent_key = m.get('AgentKey')
if m.get('JobId') is not None:
self.job_id = m.get('JobId')
return self
class CancelFineTuneJobResponseBody(TeaModel):
def __init__(
self,
job_id: str = None,
request_id: str = None,
):
self.job_id = job_id
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.job_id is not None:
result['JobId'] = self.job_id
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('JobId') is not None:
self.job_id = m.get('JobId')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class CancelFineTuneJobResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: CancelFineTuneJobResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = CancelFineTuneJobResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class CreateFineTuneJobRequestHyperParameters(TeaModel):
def __init__(
self,
batch_size: int = None,
epochs: int = None,
learning_rate: str = None,
prompt_loss_weight: float = None,
):
self.batch_size = batch_size
self.epochs = epochs
self.learning_rate = learning_rate
self.prompt_loss_weight = prompt_loss_weight
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.batch_size is not None:
result['BatchSize'] = self.batch_size
if self.epochs is not None:
result['Epochs'] = self.epochs
if self.learning_rate is not None:
result['LearningRate'] = self.learning_rate
if self.prompt_loss_weight is not None:
result['PromptLossWeight'] = self.prompt_loss_weight
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('BatchSize') is not None:
self.batch_size = m.get('BatchSize')
if m.get('Epochs') is not None:
self.epochs = m.get('Epochs')
if m.get('LearningRate') is not None:
self.learning_rate = m.get('LearningRate')
if m.get('PromptLossWeight') is not None:
self.prompt_loss_weight = m.get('PromptLossWeight')
return self
class CreateFineTuneJobRequest(TeaModel):
def __init__(
self,
agent_key: str = None,
base_model: str = None,
hyper_parameters: CreateFineTuneJobRequestHyperParameters = None,
model_name: str = None,
training_files: List[str] = None,
training_type: str = None,
validation_files: List[str] = None,
):
self.agent_key = agent_key
self.base_model = base_model
self.hyper_parameters = hyper_parameters
self.model_name = model_name
self.training_files = training_files
self.training_type = training_type
self.validation_files = validation_files
def validate(self):
if self.hyper_parameters:
self.hyper_parameters.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.agent_key is not None:
result['AgentKey'] = self.agent_key
if self.base_model is not None:
result['BaseModel'] = self.base_model
if self.hyper_parameters is not None:
result['HyperParameters'] = self.hyper_parameters.to_map()
if self.model_name is not None:
result['ModelName'] = self.model_name
if self.training_files is not None:
result['TrainingFiles'] = self.training_files
if self.training_type is not None:
result['TrainingType'] = self.training_type
if self.validation_files is not None:
result['ValidationFiles'] = self.validation_files
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AgentKey') is not None:
self.agent_key = m.get('AgentKey')
if m.get('BaseModel') is not None:
self.base_model = m.get('BaseModel')
if m.get('HyperParameters') is not None:
temp_model = CreateFineTuneJobRequestHyperParameters()
self.hyper_parameters = temp_model.from_map(m['HyperParameters'])
if m.get('ModelName') is not None:
self.model_name = m.get('ModelName')
if m.get('TrainingFiles') is not None:
self.training_files = m.get('TrainingFiles')
if m.get('TrainingType') is not None:
self.training_type = m.get('TrainingType')
if m.get('ValidationFiles') is not None:
self.validation_files = m.get('ValidationFiles')
return self
class CreateFineTuneJobShrinkRequest(TeaModel):
def __init__(
self,
agent_key: str = None,
base_model: str = None,
hyper_parameters_shrink: str = None,
model_name: str = None,
training_files_shrink: str = None,
training_type: str = None,
validation_files_shrink: str = None,
):
self.agent_key = agent_key
self.base_model = base_model
self.hyper_parameters_shrink = hyper_parameters_shrink
self.model_name = model_name
self.training_files_shrink = training_files_shrink
self.training_type = training_type
self.validation_files_shrink = validation_files_shrink
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.agent_key is not None:
result['AgentKey'] = self.agent_key
if self.base_model is not None:
result['BaseModel'] = self.base_model
if self.hyper_parameters_shrink is not None:
result['HyperParameters'] = self.hyper_parameters_shrink
if self.model_name is not None:
result['ModelName'] = self.model_name
if self.training_files_shrink is not None:
result['TrainingFiles'] = self.training_files_shrink
if self.training_type is not None:
result['TrainingType'] = self.training_type
if self.validation_files_shrink is not None:
result['ValidationFiles'] = self.validation_files_shrink
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AgentKey') is not None:
self.agent_key = m.get('AgentKey')
if m.get('BaseModel') is not None:
self.base_model = m.get('BaseModel')
if m.get('HyperParameters') is not None:
self.hyper_parameters_shrink = m.get('HyperParameters')
if m.get('ModelName') is not None:
self.model_name = m.get('ModelName')
if m.get('TrainingFiles') is not None:
self.training_files_shrink = m.get('TrainingFiles')
if m.get('TrainingType') is not None:
self.training_type = m.get('TrainingType')
if m.get('ValidationFiles') is not None:
self.validation_files_shrink = m.get('ValidationFiles')
return self
class CreateFineTuneJobResponseBody(TeaModel):
def __init__(
self,
job_id: str = None,
request_id: str = None,
status: str = None,
):
self.job_id = job_id
self.request_id = request_id
self.status = status
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.job_id is not None:
result['JobId'] = self.job_id
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.status is not None:
result['Status'] = self.status
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('JobId') is not None:
self.job_id = m.get('JobId')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Status') is not None:
self.status = m.get('Status')
return self
class CreateFineTuneJobResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: CreateFineTuneJobResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = CreateFineTuneJobResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class CreateServiceRequest(TeaModel):
def __init__(
self,
agent_key: str = None,
model: str = None,
):
self.agent_key = agent_key
self.model = model
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.agent_key is not None:
result['AgentKey'] = self.agent_key
if self.model is not None:
result['Model'] = self.model
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AgentKey') is not None:
self.agent_key = m.get('AgentKey')
if m.get('Model') is not None:
self.model = m.get('Model')
return self
class CreateServiceResponseBody(TeaModel):
def __init__(
self,
model_service_id: str = None,
request_id: str = None,
):
self.model_service_id = model_service_id
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.model_service_id is not None:
result['ModelServiceId'] = self.model_service_id
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('ModelServiceId') is not None:
self.model_service_id = m.get('ModelServiceId')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class CreateServiceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: CreateServiceResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = CreateServiceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class CreateTextEmbeddingsRequest(TeaModel):
def __init__(
self,
agent_key: str = None,
input: List[str] = None,
text_type: str = None,
):
self.agent_key = agent_key
self.input = input
self.text_type = text_type
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.agent_key is not None:
result['AgentKey'] = self.agent_key
if self.input is not None:
result['Input'] = self.input
if self.text_type is not None:
result['TextType'] = self.text_type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AgentKey') is not None:
self.agent_key = m.get('AgentKey')
if m.get('Input') is not None:
self.input = m.get('Input')
if m.get('TextType') is not None:
self.text_type = m.get('TextType')
return self
class CreateTextEmbeddingsShrinkRequest(TeaModel):
def __init__(
self,
agent_key: str = None,
input_shrink: str = None,
text_type: str = None,
):
self.agent_key = agent_key
self.input_shrink = input_shrink
self.text_type = text_type
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.agent_key is not None:
result['AgentKey'] = self.agent_key
if self.input_shrink is not None:
result['Input'] = self.input_shrink
if self.text_type is not None:
result['TextType'] = self.text_type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AgentKey') is not None:
self.agent_key = m.get('AgentKey')
if m.get('Input') is not None:
self.input_shrink = m.get('Input')
if m.get('TextType') is not None:
self.text_type = m.get('TextType')
return self
class CreateTextEmbeddingsResponseBodyDataEmbeddings(TeaModel):
def __init__(
self,
embedding: List[float] = None,
text_index: int = None,
):
self.embedding = embedding
self.text_index = text_index
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.embedding is not None:
result['Embedding'] = self.embedding
if self.text_index is not None:
result['TextIndex'] = self.text_index
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Embedding') is not None:
self.embedding = m.get('Embedding')
if m.get('TextIndex') is not None:
self.text_index = m.get('TextIndex')
return self
class CreateTextEmbeddingsResponseBodyData(TeaModel):
def __init__(
self,
embeddings: List[CreateTextEmbeddingsResponseBodyDataEmbeddings] = None,
):
self.embeddings = embeddings
def validate(self):
if self.embeddings:
for k in self.embeddings:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
result['Embeddings'] = []
if self.embeddings is not None:
for k in self.embeddings:
result['Embeddings'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
self.embeddings = []
if m.get('Embeddings') is not None:
for k in m.get('Embeddings'):
temp_model = CreateTextEmbeddingsResponseBodyDataEmbeddings()
self.embeddings.append(temp_model.from_map(k))
return self
class CreateTextEmbeddingsResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: CreateTextEmbeddingsResponseBodyData = None,
http_status_code: str = None,
message: str = None,
request_id: str = None,
success: bool = None,
):
self.code = code
self.data = data
self.http_status_code = http_status_code
self.message = message
self.request_id = request_id
self.success = success
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.http_status_code is not None:
result['HttpStatusCode'] = self.http_status_code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.success is not None:
result['Success'] = self.success
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = CreateTextEmbeddingsResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('HttpStatusCode') is not None:
self.http_status_code = m.get('HttpStatusCode')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Success') is not None:
self.success = m.get('Success')
return self
class CreateTextEmbeddingsResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: CreateTextEmbeddingsResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = CreateTextEmbeddingsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class CreateTokenRequest(TeaModel):
def __init__(
self,
agent_key: str = None,
):
self.agent_key = agent_key
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.agent_key is not None:
result['AgentKey'] = self.agent_key
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AgentKey') is not None:
self.agent_key = m.get('AgentKey')
return self
class CreateTokenResponseBodyData(TeaModel):
def __init__(
self,
expired_time: int = None,
token: str = None,
):
self.expired_time = expired_time
self.token = token
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.expired_time is not None:
result['ExpiredTime'] = self.expired_time
if self.token is not None:
result['Token'] = self.token
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('ExpiredTime') is not None:
self.expired_time = m.get('ExpiredTime')
if m.get('Token') is not None:
self.token = m.get('Token')
return self
class CreateTokenResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: CreateTokenResponseBodyData = None,
http_status_code: str = None,
message: str = None,
request_id: str = None,
success: bool = None,
):
self.code = code
self.data = data
self.http_status_code = http_status_code
self.message = message
# Id of the request
self.request_id = request_id
self.success = success
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.http_status_code is not None:
result['HttpStatusCode'] = self.http_status_code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.success is not None:
result['Success'] = self.success
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = CreateTokenResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('HttpStatusCode') is not None:
self.http_status_code = m.get('HttpStatusCode')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Success') is not None:
self.success = m.get('Success')
return self
class CreateTokenResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: CreateTokenResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = CreateTokenResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DeleteFineTuneJobRequest(TeaModel):
def __init__(
self,
agent_key: str = None,
job_id: str = None,
):
self.agent_key = agent_key
self.job_id = job_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.agent_key is not None:
result['AgentKey'] = self.agent_key
if self.job_id is not None:
result['JobId'] = self.job_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AgentKey') is not None:
self.agent_key = m.get('AgentKey')
if m.get('JobId') is not None:
self.job_id = m.get('JobId')
return self
class DeleteFineTuneJobResponseBody(TeaModel):
def __init__(
self,
job_id: str = None,
request_id: str = None,
):
self.job_id = job_id
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.job_id is not None:
result['JobId'] = self.job_id
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('JobId') is not None:
self.job_id = m.get('JobId')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DeleteFineTuneJobResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DeleteFineTuneJobResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DeleteFineTuneJobResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DeleteServiceRequest(TeaModel):
def __init__(
self,
agent_key: str = None,
model_service_id: str = None,
):
self.agent_key = agent_key
self.model_service_id = model_service_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.agent_key is not None:
result['AgentKey'] = self.agent_key
if self.model_service_id is not None:
result['ModelServiceId'] = self.model_service_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AgentKey') is not None:
self.agent_key = m.get('AgentKey')
if m.get('ModelServiceId') is not None:
self.model_service_id = m.get('ModelServiceId')
return self
class DeleteServiceResponseBody(TeaModel):
def __init__(
self,
model_service_id: str = None,
request_id: str = None,
):
self.model_service_id = model_service_id
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.model_service_id is not None:
result['ModelServiceId'] = self.model_service_id
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('ModelServiceId') is not None:
self.model_service_id = m.get('ModelServiceId')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DeleteServiceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DeleteServiceResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DeleteServiceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeFineTuneJobRequest(TeaModel):
def __init__(
self,
agent_key: str = None,
job_id: str = None,
):
self.agent_key = agent_key
self.job_id = job_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.agent_key is not None:
result['AgentKey'] = self.agent_key
if self.job_id is not None:
result['JobId'] = self.job_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AgentKey') is not None:
self.agent_key = m.get('AgentKey')
if m.get('JobId') is not None:
self.job_id = m.get('JobId')
return self
class DescribeFineTuneJobResponseBodyHyperParameters(TeaModel):
def __init__(
self,
batch_size: int = None,
epochs: int = None,
learning_rate: str = None,
prompt_loss_weight: float = None,
):
self.batch_size = batch_size
self.epochs = epochs
self.learning_rate = learning_rate
self.prompt_loss_weight = prompt_loss_weight
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.batch_size is not None:
result['BatchSize'] = self.batch_size
if self.epochs is not None:
result['Epochs'] = self.epochs
if self.learning_rate is not None:
result['LearningRate'] = self.learning_rate
if self.prompt_loss_weight is not None:
result['PromptLossWeight'] = self.prompt_loss_weight
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('BatchSize') is not None:
self.batch_size = m.get('BatchSize')
if m.get('Epochs') is not None:
self.epochs = m.get('Epochs')
if m.get('LearningRate') is not None:
self.learning_rate = m.get('LearningRate')
if m.get('PromptLossWeight') is not None:
self.prompt_loss_weight = m.get('PromptLossWeight')
return self
class DescribeFineTuneJobResponseBody(TeaModel):
def __init__(
self,
base_model: str = None,
fine_tuned_model: str = None,
hyper_parameters: DescribeFineTuneJobResponseBodyHyperParameters = None,
job_id: str = None,
message: str = None,
model_name: str = None,
request_id: str = None,
status: str = None,
training_files: List[str] = None,
training_type: str = None,
validation_files: List[str] = None,
):
self.base_model = base_model
self.fine_tuned_model = fine_tuned_model
self.hyper_parameters = hyper_parameters
self.job_id = job_id
self.message = message
self.model_name = model_name
self.request_id = request_id
self.status = status
self.training_files = training_files
self.training_type = training_type
self.validation_files = validation_files
def validate(self):
if self.hyper_parameters:
self.hyper_parameters.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.base_model is not None:
result['BaseModel'] = self.base_model
if self.fine_tuned_model is not None:
result['FineTunedModel'] = self.fine_tuned_model
if self.hyper_parameters is not None:
result['HyperParameters'] = self.hyper_parameters.to_map()
if self.job_id is not None:
result['JobId'] = self.job_id
if self.message is not None:
result['Message'] = self.message
if self.model_name is not None:
result['ModelName'] = self.model_name
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.status is not None:
result['Status'] = self.status
if self.training_files is not None:
result['TrainingFiles'] = self.training_files
if self.training_type is not None:
result['TrainingType'] = self.training_type
if self.validation_files is not None:
result['ValidationFiles'] = self.validation_files
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('BaseModel') is not None:
self.base_model = m.get('BaseModel')
if m.get('FineTunedModel') is not None:
self.fine_tuned_model = m.get('FineTunedModel')
if m.get('HyperParameters') is not None:
temp_model = DescribeFineTuneJobResponseBodyHyperParameters()
self.hyper_parameters = temp_model.from_map(m['HyperParameters'])
if m.get('JobId') is not None:
self.job_id = m.get('JobId')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('ModelName') is not None:
self.model_name = m.get('ModelName')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Status') is not None:
self.status = m.get('Status')
if m.get('TrainingFiles') is not None:
self.training_files = m.get('TrainingFiles')
if m.get('TrainingType') is not None:
self.training_type = m.get('TrainingType')
if m.get('ValidationFiles') is not None:
self.validation_files = m.get('ValidationFiles')
return self
class DescribeFineTuneJobResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DescribeFineTuneJobResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DescribeFineTuneJobResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeServiceRequest(TeaModel):
def __init__(
self,
agent_key: str = None,
model_service_id: str = None,
):
self.agent_key = agent_key
self.model_service_id = model_service_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.agent_key is not None:
result['AgentKey'] = self.agent_key
if self.model_service_id is not None:
result['ModelServiceId'] = self.model_service_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AgentKey') is not None:
self.agent_key = m.get('AgentKey')
if m.get('ModelServiceId') is not None:
self.model_service_id = m.get('ModelServiceId')
return self
class DescribeServiceResponseBody(TeaModel):
def __init__(
self,
app_id: str = None,
model_service_id: str = None,
request_id: str = None,
status: str = None,
):
self.app_id = app_id
self.model_service_id = model_service_id
self.request_id = request_id
self.status = status
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.app_id is not None:
result['AppId'] = self.app_id
if self.model_service_id is not None:
result['ModelServiceId'] = self.model_service_id
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.status is not None:
result['Status'] = self.status
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AppId') is not None:
self.app_id = m.get('AppId')
if m.get('ModelServiceId') is not None:
self.model_service_id = m.get('ModelServiceId')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Status') is not None:
self.status = m.get('Status')
return self
class DescribeServiceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DescribeServiceResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DescribeServiceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ListFineTuneJobsRequest(TeaModel):
def __init__(
self,
agent_key: str = None,
page_no: int = None,
page_size: int = None,
):
self.agent_key = agent_key
self.page_no = page_no
self.page_size = page_size
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.agent_key is not None:
result['AgentKey'] = self.agent_key
if self.page_no is not None:
result['PageNo'] = self.page_no
if self.page_size is not None:
result['PageSize'] = self.page_size
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AgentKey') is not None:
self.agent_key = m.get('AgentKey')
if m.get('PageNo') is not None:
self.page_no = m.get('PageNo')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
return self
class ListFineTuneJobsResponseBodyJobsHyperParameters(TeaModel):
def __init__(
self,
batch_size: int = None,
epochs: int = None,
learning_rate: str = None,
prompt_loss_weight: float = None,
):
self.batch_size = batch_size
self.epochs = epochs
self.learning_rate = learning_rate
self.prompt_loss_weight = prompt_loss_weight
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.batch_size is not None:
result['BatchSize'] = self.batch_size
if self.epochs is not None:
result['Epochs'] = self.epochs
if self.learning_rate is not None:
result['LearningRate'] = self.learning_rate
if self.prompt_loss_weight is not None:
result['PromptLossWeight'] = self.prompt_loss_weight
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('BatchSize') is not None:
self.batch_size = m.get('BatchSize')
if m.get('Epochs') is not None:
self.epochs = m.get('Epochs')
if m.get('LearningRate') is not None:
self.learning_rate = m.get('LearningRate')
if m.get('PromptLossWeight') is not None:
self.prompt_loss_weight = m.get('PromptLossWeight')
return self
class ListFineTuneJobsResponseBodyJobs(TeaModel):
def __init__(
self,
base_model: str = None,
fine_tuned_model: str = None,
hyper_parameters: ListFineTuneJobsResponseBodyJobsHyperParameters = None,
job_id: str = None,
message: str = None,
model_name: str = None,
status: str = None,
training_files: List[str] = None,
training_type: str = None,
validation_files: List[str] = None,
):
self.base_model = base_model
self.fine_tuned_model = fine_tuned_model
self.hyper_parameters = hyper_parameters
self.job_id = job_id
self.message = message
self.model_name = model_name
self.status = status
self.training_files = training_files
self.training_type = training_type
self.validation_files = validation_files
def validate(self):
if self.hyper_parameters:
self.hyper_parameters.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.base_model is not None:
result['BaseModel'] = self.base_model
if self.fine_tuned_model is not None:
result['FineTunedModel'] = self.fine_tuned_model
if self.hyper_parameters is not None:
result['HyperParameters'] = self.hyper_parameters.to_map()
if self.job_id is not None:
result['JobId'] = self.job_id
if self.message is not None:
result['Message'] = self.message
if self.model_name is not None:
result['ModelName'] = self.model_name
if self.status is not None:
result['Status'] = self.status
if self.training_files is not None:
result['TrainingFiles'] = self.training_files
if self.training_type is not None:
result['TrainingType'] = self.training_type
if self.validation_files is not None:
result['ValidationFiles'] = self.validation_files
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('BaseModel') is not None:
self.base_model = m.get('BaseModel')
if m.get('FineTunedModel') is not None:
self.fine_tuned_model = m.get('FineTunedModel')
if m.get('HyperParameters') is not None:
temp_model = ListFineTuneJobsResponseBodyJobsHyperParameters()
self.hyper_parameters = temp_model.from_map(m['HyperParameters'])
if m.get('JobId') is not None:
self.job_id = m.get('JobId')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('ModelName') is not None:
self.model_name = m.get('ModelName')
if m.get('Status') is not None:
self.status = m.get('Status')
if m.get('TrainingFiles') is not None:
self.training_files = m.get('TrainingFiles')
if m.get('TrainingType') is not None:
self.training_type = m.get('TrainingType')
if m.get('ValidationFiles') is not None:
self.validation_files = m.get('ValidationFiles')
return self
class ListFineTuneJobsResponseBody(TeaModel):
def __init__(
self,
jobs: List[ListFineTuneJobsResponseBodyJobs] = None,
page_no: int = None,
page_size: int = None,
request_id: str = None,
total: int = None,
):
self.jobs = jobs
self.page_no = page_no
self.page_size = page_size
self.request_id = request_id
self.total = total
def validate(self):
if self.jobs:
for k in self.jobs:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
result['Jobs'] = []
if self.jobs is not None:
for k in self.jobs:
result['Jobs'].append(k.to_map() if k else None)
if self.page_no is not None:
result['PageNo'] = self.page_no
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.total is not None:
result['Total'] = self.total
return result
def from_map(self, m: dict = None):
m = m or dict()
self.jobs = []
if m.get('Jobs') is not None:
for k in m.get('Jobs'):
temp_model = ListFineTuneJobsResponseBodyJobs()
self.jobs.append(temp_model.from_map(k))
if m.get('PageNo') is not None:
self.page_no = m.get('PageNo')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Total') is not None:
self.total = m.get('Total')
return self
class ListFineTuneJobsResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: ListFineTuneJobsResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = ListFineTuneJobsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ListServicesRequest(TeaModel):
def __init__(
self,
agent_key: str = None,
page_no: int = None,
page_size: int = None,
):
self.agent_key = agent_key
self.page_no = page_no
self.page_size = page_size
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.agent_key is not None:
result['AgentKey'] = self.agent_key
if self.page_no is not None:
result['PageNo'] = self.page_no
if self.page_size is not None:
result['PageSize'] = self.page_size
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AgentKey') is not None:
self.agent_key = m.get('AgentKey')
if m.get('PageNo') is not None:
self.page_no = m.get('PageNo')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
return self
class ListServicesResponseBodyModelServices(TeaModel):
def __init__(
self,
app_id: str = None,
model_service_id: str = None,
status: str = None,
):
self.app_id = app_id
self.model_service_id = model_service_id
self.status = status
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.app_id is not None:
result['AppId'] = self.app_id
if self.model_service_id is not None:
result['ModelServiceId'] = self.model_service_id
if self.status is not None:
result['Status'] = self.status
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AppId') is not None:
self.app_id = m.get('AppId')
if m.get('ModelServiceId') is not None:
self.model_service_id = m.get('ModelServiceId')
if m.get('Status') is not None:
self.status = m.get('Status')
return self
class ListServicesResponseBody(TeaModel):
def __init__(
self,
model_services: List[ListServicesResponseBodyModelServices] = None,
page_no: int = None,
page_size: int = None,
request_id: str = None,
total: int = None,
):
self.model_services = model_services
self.page_no = page_no
self.page_size = page_size
self.request_id = request_id
self.total = total
def validate(self):
if self.model_services:
for k in self.model_services:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
result['ModelServices'] = []
if self.model_services is not None:
for k in self.model_services:
result['ModelServices'].append(k.to_map() if k else None)
if self.page_no is not None:
result['PageNo'] = self.page_no
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.total is not None:
result['Total'] = self.total
return result
def from_map(self, m: dict = None):
m = m or dict()
self.model_services = []
if m.get('ModelServices') is not None:
for k in m.get('ModelServices'):
temp_model = ListServicesResponseBodyModelServices()
self.model_services.append(temp_model.from_map(k))
if m.get('PageNo') is not None:
self.page_no = m.get('PageNo')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Total') is not None:
self.total = m.get('Total')
return self
class ListServicesResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: ListServicesResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = ListServicesResponseBody()
self.body = temp_model.from_map(m['body'])
return self
| [
"sdk-team@alibabacloud.com"
] | sdk-team@alibabacloud.com |
bc8690891dbddd0b15fa87a22253b93d5d9e3b63 | c1e13dabefcfa873b136f36d464f2bf5094ee5ba | /manubot/cite/tests/test_csl_item.py | 70e974fa9fcef5cb621a11f879e591ef1f7b6126 | [
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause"
] | permissive | epogrebnyak/manubot | 6001c3271d2c6bc059e09dfcb786c74efcb03df7 | 77920eca091995184684e7b6b0d12266917c3aa4 | refs/heads/master | 2020-07-27T00:11:36.221293 | 2019-10-09T06:35:23 | 2019-10-09T06:35:23 | 208,805,899 | 1 | 0 | NOASSERTION | 2019-10-30T09:20:17 | 2019-09-16T13:24:26 | Python | UTF-8 | Python | false | false | 1,959 | py | import copy
import pytest
from manubot.cite.csl_item import (
csl_item_set_standard_id)
@pytest.mark.parametrize(
['csl_item', 'standard_citation'],
[
(
{'id': 'my-id', 'standard_citation': 'doi:10.7554/elife.32822'},
'doi:10.7554/elife.32822',
),
(
{'id': 'doi:10.7554/elife.32822'},
'doi:10.7554/elife.32822',
),
(
{'id': 'doi:10.7554/ELIFE.32822'},
'doi:10.7554/elife.32822',
),
(
{'id': 'my-id'},
'raw:my-id',
),
],
ids=[
'from_standard_citation',
'from_doi_id',
'from_doi_id_standardize',
'from_raw_id',
]
)
def test_csl_item_set_standard_id(csl_item, standard_citation):
output = csl_item_set_standard_id(csl_item)
assert output is csl_item
assert output['id'] == standard_citation
def test_csl_item_set_standard_id_repeated():
csl_item = {
'id': 'pmid:1',
'type': 'article-journal',
}
# csl_item_0 = copy.deepcopy(csl_item)
csl_item_1 = copy.deepcopy(csl_item_set_standard_id(csl_item))
assert 'standard_citation' not in 'csl_item'
csl_item_2 = copy.deepcopy(csl_item_set_standard_id(csl_item))
assert csl_item_1 == csl_item_2
def test_csl_item_set_standard_id_note():
"""
Test extracting standard_id from a note and setting additional
note fields.
"""
csl_item = {
'id': 'original-id',
'type': 'article-journal',
'note': 'standard_id: doi:10.1371/journal.PPAT.1006256',
}
csl_item_set_standard_id(csl_item)
assert csl_item['id'] == 'doi:10.1371/journal.ppat.1006256'
from manubot.cite.citeproc import parse_csl_item_note
note_dict = parse_csl_item_note(csl_item['note'])
assert note_dict['original_id'] == 'original-id'
assert note_dict['original_standard_id'] == 'doi:10.1371/journal.PPAT.1006256'
| [
"daniel.himmelstein@gmail.com"
] | daniel.himmelstein@gmail.com |
17b3fb728b6f6d680a80c7565e7a37f431aa7e6d | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/otherforms/_dithers.py | eef9fd87e5c87980635cc5c430ce7f137bbb0de8 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 222 | py |
#calss header
class _DITHERS():
def __init__(self,):
self.name = "DITHERS"
self.definitions = dither
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['dither']
| [
"xingwang1991@gmail.com"
] | xingwang1991@gmail.com |
0647d51b728b8cfd6cb1d6995238a196bb8a110a | 9f91cc7389d212720ab16ab9d0a60d62e5cf7088 | /astropy/io/votable/tests/table_test.py | 2d6564672e783912b47978ef48505dd4910c5021 | [] | no_license | MQQ/astropy | 47d6889e54dfa714bcbe9a6a572bb6c4c56427fd | 67c8ce053d075399d22b4a674bf1df3e441ad125 | refs/heads/master | 2021-01-18T07:48:04.107638 | 2012-11-04T22:04:33 | 2012-11-04T22:04:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,591 | py | """
Test the conversion to/from astropy.table
"""
import os
import shutil
import tempfile
from ....config import get_data_filename
from ..table import parse, writeto
from .. import tree
TMP_DIR = None
def setup_module():
global TMP_DIR
TMP_DIR = tempfile.mkdtemp()
def teardown_module():
shutil.rmtree(TMP_DIR)
def test_table():
# Read the VOTABLE
votable = parse(
get_data_filename('data/regression.xml'),
pedantic=False)
table = votable.get_first_table()
astropy_table = table.to_table()
votable2 = tree.VOTableFile.from_table(astropy_table)
t = votable2.get_first_table()
field_types = [
('string_test', {'datatype': 'char', 'arraysize': '*'}),
('string_test_2', {'datatype': 'char', 'arraysize': '10'}),
('unicode_test', {'datatype': 'unicodeChar', 'arraysize': '*'}),
('fixed_unicode_test', {'datatype': 'unicodeChar', 'arraysize': '10'}),
('string_array_test', {'datatype': 'char', 'arraysize': '4'}),
('unsignedByte', {'datatype': 'unsignedByte'}),
('short', {'datatype': 'short'}),
('int', {'datatype': 'int'}),
('long', {'datatype': 'long'}),
('double', {'datatype': 'double'}),
('float', {'datatype': 'float'}),
('array', {'datatype': 'long', 'arraysize': '2*'}),
('bit', {'datatype': 'bit'}),
('bitarray', {'datatype': 'bit', 'arraysize': '3x2'}),
('bitvararray', {'datatype': 'bit', 'arraysize': '*'}),
('bitvararray2', {'datatype': 'bit', 'arraysize': '3x2*'}),
('floatComplex', {'datatype': 'floatComplex'}),
('doubleComplex', {'datatype': 'doubleComplex'}),
('doubleComplexArray', {'datatype': 'doubleComplex', 'arraysize': '*'}),
('doubleComplexArrayFixed', {'datatype': 'doubleComplex', 'arraysize': '2'}),
('boolean', {'datatype': 'bit'}),
('booleanArray', {'datatype': 'bit', 'arraysize': '4'}),
('nulls', {'datatype': 'int'}),
('nulls_array', {'datatype': 'int', 'arraysize': '2x2'}),
('precision1', {'datatype': 'double'}),
('precision2', {'datatype': 'double'}),
('doublearray', {'datatype': 'double', 'arraysize': '*'}),
('bitarray2', {'datatype': 'bit', 'arraysize': '16'})]
for field, type in zip(t.fields, field_types):
name, d = type
assert field.ID == name
assert field.datatype == d['datatype']
if 'arraysize' in d:
assert field.arraysize == d['arraysize']
writeto(votable2, os.path.join(TMP_DIR, "through_table.xml"))
| [
"mdboom@gmail.com"
] | mdboom@gmail.com |
e39117bcbd65c3e815a82d6f118abff791f48236 | 7275226803632a73466214cf14ad37908d9cc5db | /blog/migrations/0016_auto_20201129_1253.py | eeba89141db5baf1444c5141c4394109d8dd3bb9 | [] | no_license | GBrachetta/new-blog | dc01a7b6289cce4106f03954d6bfe49e04bca740 | cf25dbbcd54e5c309664cd7eec2488e344c0d41d | refs/heads/master | 2023-01-24T11:06:43.473885 | 2020-11-30T20:21:08 | 2020-11-30T20:21:08 | 316,278,713 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 604 | py | # Generated by Django 3.1.3 on 2020-11-29 12:53
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('blog', '0015_auto_20201129_0206'),
]
operations = [
migrations.AlterField(
model_name='comment',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='user_profile', to=settings.AUTH_USER_MODEL),
),
]
| [
"brachetta@me.com"
] | brachetta@me.com |
5dacf768ff89c1dc776f4f6b3e3d1a4da857d88c | 6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4 | /n5Ar5F2CJMpGRXz3o_9.py | 025d1f238f707bb2767ad5f637fa17a022f0f106 | [] | no_license | daniel-reich/ubiquitous-fiesta | 26e80f0082f8589e51d359ce7953117a3da7d38c | 9af2700dbe59284f5697e612491499841a6c126f | refs/heads/master | 2023-04-05T06:40:37.328213 | 2021-04-06T20:17:44 | 2021-04-06T20:17:44 | 355,318,759 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 158 | py |
def mineral_formation(cave):
if sum(cave[0]) == 0:
return 'stalagmites'
elif sum(cave[-1]) == 0:
return 'stalactites'
else:
return 'both'
| [
"daniel.reich@danielreichs-MacBook-Pro.local"
] | daniel.reich@danielreichs-MacBook-Pro.local |
517bba59873cc2cd775a41b827679849d8e8e5e0 | 0da06e5a4d57540ef8e5118e0b6725de7465af62 | /src/stompy_ros/python/stompy_ros/leg/__init__.py | 4a588b15bfbee3fd7f55e9b6a579218cd277b6b1 | [] | no_license | braingram/stompy_simulation | 8a6308f43d23d3f369dd4cddb59eff6167fdf6ed | 1c9f15fa9f067c04f6f19b68988467578c50ef9e | refs/heads/master | 2020-04-16T02:18:07.866729 | 2019-08-04T01:28:58 | 2019-08-04T01:28:58 | 39,320,124 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 77 | py | #!/usr/bin/env python
import node
import plans
__all__ = ['node', 'plans']
| [
"brettgraham@gmail.com"
] | brettgraham@gmail.com |
3d124d10463956a711c2a2fe609afda4bbaeb03f | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp-with-texts/RMM2-MIB.py | 92062c89832a84419b15fa4bd9de6d52e1d98723 | [
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-proprietary-license",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 9,062 | py | #
# PySNMP MIB module RMM2-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/RMM2-MIB
# Produced by pysmi-0.3.4 at Wed May 1 14:58:01 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ValueSizeConstraint, ConstraintsIntersection, ConstraintsUnion, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ValueSizeConstraint", "ConstraintsIntersection", "ConstraintsUnion", "ValueRangeConstraint")
NotificationGroup, ModuleCompliance, ObjectGroup = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance", "ObjectGroup")
Counter32, ObjectIdentity, MibScalar, MibTable, MibTableRow, MibTableColumn, Gauge32, iso, MibIdentifier, Bits, enterprises, NotificationType, ModuleIdentity, TimeTicks, Counter64, Integer32, Unsigned32, IpAddress = mibBuilder.importSymbols("SNMPv2-SMI", "Counter32", "ObjectIdentity", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Gauge32", "iso", "MibIdentifier", "Bits", "enterprises", "NotificationType", "ModuleIdentity", "TimeTicks", "Counter64", "Integer32", "Unsigned32", "IpAddress")
TextualConvention, MacAddress, DateAndTime, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "MacAddress", "DateAndTime", "DisplayString")
intelRmm2 = ModuleIdentity((1, 3, 6, 1, 4, 1, 343))
if mibBuilder.loadTexts: intelRmm2.setLastUpdated('200612130000Z')
if mibBuilder.loadTexts: intelRmm2.setOrganization('Intel Corporation')
if mibBuilder.loadTexts: intelRmm2.setContactInfo(' Intel Corporation ')
if mibBuilder.loadTexts: intelRmm2.setDescription('This mib describes the SNMP functions of the KVM by Intel Corporation.')
rmm2 = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 1))
board = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 1, 1))
host = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 1, 2))
common = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 1, 3))
traps = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 1, 4))
info = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 1, 1, 1))
users = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 1, 1, 2))
actions = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 1, 1, 3))
hostInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 1, 2, 1))
hostActions = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 1, 2, 2))
firmwareVersion = MibScalar((1, 3, 6, 1, 4, 1, 343, 1, 1, 1, 1), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: firmwareVersion.setStatus('current')
if mibBuilder.loadTexts: firmwareVersion.setDescription('The current firmware version')
serialNumber = MibScalar((1, 3, 6, 1, 4, 1, 343, 1, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: serialNumber.setStatus('current')
if mibBuilder.loadTexts: serialNumber.setDescription('The serial number.')
ip = MibScalar((1, 3, 6, 1, 4, 1, 343, 1, 1, 1, 3), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ip.setStatus('current')
if mibBuilder.loadTexts: ip.setDescription('The current IP address. A value of 0.0.0.0 indicates an error or an unset option.')
netmask = MibScalar((1, 3, 6, 1, 4, 1, 343, 1, 1, 1, 4), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: netmask.setStatus('current')
if mibBuilder.loadTexts: netmask.setDescription('The current Netmask. A value of 0.0.0.0 indicates an error or an unset option.')
gateway = MibScalar((1, 3, 6, 1, 4, 1, 343, 1, 1, 1, 5), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: gateway.setStatus('current')
if mibBuilder.loadTexts: gateway.setDescription('The current Gateway. A value of 0.0.0.0 indicates an error or an unset option.')
mac = MibScalar((1, 3, 6, 1, 4, 1, 343, 1, 1, 1, 6), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mac.setStatus('current')
if mibBuilder.loadTexts: mac.setDescription('The current MAC address.')
hardwareRev = MibScalar((1, 3, 6, 1, 4, 1, 343, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hardwareRev.setStatus('current')
if mibBuilder.loadTexts: hardwareRev.setDescription('The hardware revision number.')
eventType = MibScalar((1, 3, 6, 1, 4, 1, 343, 1, 1, 1, 8), DisplayString())
if mibBuilder.loadTexts: eventType.setStatus('current')
if mibBuilder.loadTexts: eventType.setDescription('The name of a generic log event')
eventDesc = MibScalar((1, 3, 6, 1, 4, 1, 343, 1, 1, 1, 9), DisplayString())
if mibBuilder.loadTexts: eventDesc.setStatus('current')
if mibBuilder.loadTexts: eventDesc.setDescription('The description text of a generic log event')
userLoginName = MibScalar((1, 3, 6, 1, 4, 1, 343, 1, 1, 1, 10), DisplayString())
if mibBuilder.loadTexts: userLoginName.setStatus('current')
if mibBuilder.loadTexts: userLoginName.setDescription('The login of a user.')
remoteHost = MibScalar((1, 3, 6, 1, 4, 1, 343, 1, 1, 1, 11), IpAddress())
if mibBuilder.loadTexts: remoteHost.setStatus('current')
if mibBuilder.loadTexts: remoteHost.setDescription('The IP of the remote host from which a user is logged in.')
checkHostPower = MibScalar((1, 3, 6, 1, 4, 1, 343, 1, 2, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("hasPower", 1), ("hasnoPower", 2), ("error", 3), ("notsupported", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: checkHostPower.setStatus('current')
if mibBuilder.loadTexts: checkHostPower.setDescription('The Power status of the host.')
hostReset = MibScalar((1, 3, 6, 1, 4, 1, 343, 1, 2, 2, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("reset", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hostReset.setStatus('current')
if mibBuilder.loadTexts: hostReset.setDescription('This virtually presses the Reset button of the host.')
hostPower = MibScalar((1, 3, 6, 1, 4, 1, 343, 1, 2, 2, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("longPress", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hostPower.setStatus('current')
if mibBuilder.loadTexts: hostPower.setDescription('This virtually presses the Power button of the host with a short or long press.')
resetBoard = MibScalar((1, 3, 6, 1, 4, 1, 343, 1, 1, 3, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("reset", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: resetBoard.setStatus('current')
if mibBuilder.loadTexts: resetBoard.setDescription('Resets the board.')
dummyTrap = NotificationType((1, 3, 6, 1, 4, 1, 343, 1, 4, 1))
if mibBuilder.loadTexts: dummyTrap.setStatus('current')
if mibBuilder.loadTexts: dummyTrap.setDescription('Dummy Trap for testing')
loginfailed = NotificationType((1, 3, 6, 1, 4, 1, 343, 1, 4, 2)).setObjects(("RMM2-MIB", "userLoginName"), ("RMM2-MIB", "remoteHost"))
if mibBuilder.loadTexts: loginfailed.setStatus('current')
if mibBuilder.loadTexts: loginfailed.setDescription('Failed login try.')
loginsuccess = NotificationType((1, 3, 6, 1, 4, 1, 343, 1, 4, 3)).setObjects(("RMM2-MIB", "userLoginName"), ("RMM2-MIB", "remoteHost"))
if mibBuilder.loadTexts: loginsuccess.setStatus('current')
if mibBuilder.loadTexts: loginsuccess.setDescription('Success login.')
securityViolation = NotificationType((1, 3, 6, 1, 4, 1, 343, 1, 4, 4)).setObjects(("RMM2-MIB", "userLoginName"), ("RMM2-MIB", "remoteHost"))
if mibBuilder.loadTexts: securityViolation.setStatus('current')
if mibBuilder.loadTexts: securityViolation.setDescription('Security violation.')
generic = NotificationType((1, 3, 6, 1, 4, 1, 343, 1, 4, 5)).setObjects(("RMM2-MIB", "eventType"), ("RMM2-MIB", "eventDesc"))
if mibBuilder.loadTexts: generic.setStatus('current')
if mibBuilder.loadTexts: generic.setDescription('This trap is used for any other notification message.')
logout = NotificationType((1, 3, 6, 1, 4, 1, 343, 1, 4, 6)).setObjects(("RMM2-MIB", "userLoginName"), ("RMM2-MIB", "remoteHost"))
if mibBuilder.loadTexts: logout.setStatus('current')
if mibBuilder.loadTexts: logout.setDescription('User logout.')
mibBuilder.exportSymbols("RMM2-MIB", securityViolation=securityViolation, eventDesc=eventDesc, users=users, hostActions=hostActions, netmask=netmask, board=board, PYSNMP_MODULE_ID=intelRmm2, userLoginName=userLoginName, hostReset=hostReset, eventType=eventType, resetBoard=resetBoard, traps=traps, remoteHost=remoteHost, intelRmm2=intelRmm2, hostInfo=hostInfo, info=info, common=common, host=host, serialNumber=serialNumber, ip=ip, checkHostPower=checkHostPower, hostPower=hostPower, firmwareVersion=firmwareVersion, generic=generic, dummyTrap=dummyTrap, logout=logout, actions=actions, mac=mac, rmm2=rmm2, gateway=gateway, loginsuccess=loginsuccess, hardwareRev=hardwareRev, loginfailed=loginfailed)
| [
"dcwangmit01@gmail.com"
] | dcwangmit01@gmail.com |
511a293ebbc8e556034f32c06027531a837e841f | e21599d08d2df9dac2dee21643001c0f7c73b24f | /practice/lib/tenacity_sample/try_with_exception.py | c78f7317fc738abd2475f9d6e07562ac6938c80a | [] | no_license | herolibra/PyCodeComplete | c7bf2fb4ce395737f8c67749148de98a36a71035 | 4ef7d2c3aec6d28a53eed0e649cdeb74df3d783b | refs/heads/master | 2022-07-17T05:39:03.554760 | 2020-05-03T07:00:14 | 2020-05-03T07:00:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 437 | py | #!/usr/bin/env python
# coding=utf-8
import random
def do_something_unreliable(retry=10):
for i in range(retry):
try:
if random.randint(0, 10) > 1:
raise IOError("Unstable status, try again")
else:
print("Get stable result")
return
except Exception as e:
print(e.message)
if __name__ == "__main__":
do_something_unreliable(3) | [
"zengyuetian@cloutropy.com"
] | zengyuetian@cloutropy.com |
faa141d3b4652d9d6c57baca5a76f24a41679132 | 068d271e241d8cdb46dbf4243166e4b8ee7025b2 | /web前端/day54/day54/00今日面试题.py | 6c84dbb775fe3b3bec0b220ebe9472d71968acd0 | [
"MIT"
] | permissive | caiqinxiong/python | f6e226e76cb62aac970bcfbcb6c8adfc64858b60 | 9029f6c528d2cb742b600af224e803baa74cbe6a | refs/heads/master | 2023-05-26T19:41:34.911885 | 2020-05-15T09:02:08 | 2020-05-15T09:02:08 | 195,261,757 | 1 | 0 | null | 2021-06-10T23:33:33 | 2019-07-04T15:01:42 | JavaScript | UTF-8 | Python | false | false | 420 | py | """
问:执行完下面的代码后, l,m的内容分别是什么?
"""
def func(m):
for k,v in m.items():
m[k+2] = v+2
m = {1: 2, 3: 4}
l = m # 浅拷贝
from copy import deepcopy
l2 = deepcopy(m)
l[9] = 10
l2[90] = 100
# func(l)
m[7] = 8
# 1. 在Python中遍历字典的时候能不能对字典本身做涉及键(key)的操作
# 2. 深浅拷贝的理解
print("l:", l)
print("l2:", l2)
print("m:", m)
| [
"13269469526@163.com"
] | 13269469526@163.com |
9098aa7602446e7e7f4947ab86dfbfb99f6a4c3f | 0e1e643e864bcb96cf06f14f4cb559b034e114d0 | /Exps_7_v3/doc3d/I_w_M_to_Wxyz_focus_Z_ok/wiColorJ/pyr_Tcrop255_pad20_jit15/Sob_k09_s001_Mae_s001_good/pyr_4s/L5/step10_a.py | d9093007ddb27b02a98163b7fa2d4e2e8fb5fecc | [] | no_license | KongBOy/kong_model2 | 33a94a9d2be5b0f28f9d479b3744e1d0e0ebd307 | 1af20b168ffccf0d5293a393a40a9fa9519410b2 | refs/heads/master | 2022-10-14T03:09:22.543998 | 2022-10-06T11:33:42 | 2022-10-06T11:33:42 | 242,080,692 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 71,077 | py | #############################################################################################################################################################################################################
#############################################################################################################################################################################################################
### 把 kong_model2 加入 sys.path
import os
code_exe_path = os.path.realpath(__file__) ### 目前執行 step10_b.py 的 path
code_exe_path_element = code_exe_path.split("\\") ### 把 path 切分 等等 要找出 kong_model 在第幾層
code_dir = "\\".join(code_exe_path_element[:-1])
kong_layer = code_exe_path_element.index("kong_model2") ### 找出 kong_model2 在第幾層
kong_model2_dir = "\\".join(code_exe_path_element[:kong_layer + 1]) ### 定位出 kong_model2 的 dir
import sys ### 把 kong_model2 加入 sys.path
sys.path.append(kong_model2_dir)
sys.path.append(code_dir)
# print(__file__.split("\\")[-1])
# print(" code_exe_path:", code_exe_path)
# print(" code_exe_path_element:", code_exe_path_element)
# print(" code_dir:", code_dir)
# print(" kong_layer:", kong_layer)
# print(" kong_model2_dir:", kong_model2_dir)
#############################################################################################################################################################################################################
kong_to_py_layer = len(code_exe_path_element) - 1 - kong_layer ### 中間 -1 是為了長度轉index
# print(" kong_to_py_layer:", kong_to_py_layer)
if (kong_to_py_layer == 0): template_dir = ""
elif(kong_to_py_layer == 2): template_dir = code_exe_path_element[kong_layer + 1][0:] ### [7:] 是為了去掉 step1x_, 後來覺得好像改有意義的名字不去掉也行所以 改 0
elif(kong_to_py_layer == 3): template_dir = code_exe_path_element[kong_layer + 1][0:] + "/" + code_exe_path_element[kong_layer + 2][0:] ### [5:] 是為了去掉 mask_ ,前面的 mask_ 是為了python 的 module 不能 數字開頭, 隨便加的這樣子, 後來覺得 自動排的順序也可以接受, 所以 改0
elif(kong_to_py_layer > 3): template_dir = code_exe_path_element[kong_layer + 1][0:] + "/" + code_exe_path_element[kong_layer + 2][0:] + "/" + "/".join(code_exe_path_element[kong_layer + 3: -1])
# print(" template_dir:", template_dir) ### 舉例: template_dir: 7_mask_unet/5_os_book_and_paper_have_dtd_hdr_mix_bg_tv_s04_mae
#############################################################################################################################################################################################################
exp_dir = template_dir
#############################################################################################################################################################################################################
from step06_a_datas_obj import *
from step09_4side_L5 import *
from step10_a2_loss_info_obj import Loss_info_builder
from step10_b2_exp_builder import Exp_builder
rm_paths = [path for path in sys.path if code_dir in path]
for rm_path in rm_paths: sys.path.remove(rm_path)
rm_moduless = [module for module in sys.modules if "step09" in module]
for rm_module in rm_moduless: del sys.modules[rm_module]
#############################################################################################################################################################################################################
'''
exp_dir 是 決定 result_dir 的 "上一層"資料夾 名字喔! exp_dir要巢狀也沒問題~
比如:exp_dir = "6_mask_unet/自己命的名字",那 result_dir 就都在:
6_mask_unet/自己命的名字/result_a
6_mask_unet/自己命的名字/result_b
6_mask_unet/自己命的名字/...
'''
use_db_obj = type8_blender_kong_doc3d_in_I_gt_W_ch_norm_v2
Mae_s001_Sob_k09_s001 = Loss_info_builder().set_loss_type("mae+sobel" , mae_scale= 1, sobel_kernel_size= 9, sobel_kernel_scale= 1)
use_loss_obj = [Mae_s001_Sob_k09_s001.set_loss_target("UNet_z").copy(), Mae_s001_Sob_k09_s001.set_loss_target("UNet_y").copy(), Mae_s001_Sob_k09_s001.set_loss_target("UNet_x").copy()] ### z, y, x 順序是看 step07_b_0b_Multi_UNet 來對應的喔
#############################################################
### 為了resul_analyze畫空白的圖,建一個empty的 Exp_builder
empty = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_1__2side_1__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_1__2side_1__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="為了resul_analyze畫空白的圖,建一個empty的 Exp_builder")
#############################################################
# "1" 3 6 10 15 21 28 36 45 55
# side1 OK 1
ch032_1side_1__2side_1__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_1__2side_1__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_1__2side_1__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 "3" 6 10 15 21 28 36 45 55
# side2 OK 4
ch032_1side_2__2side_1__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_2__2side_1__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_2__2side_1__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_2__2side_2__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_2__2side_2__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_2__2side_2__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_2__2side_2__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_2__2side_2__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_2__2side_2__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_2__2side_2__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_2__2side_2__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_2__2side_2__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 3 "6" 10 15 21 28 36 45 55
# side3 OK 10
ch032_1side_3__2side_1__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_1__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_1__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_2__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_2__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_2__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_2__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_2__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_2__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_2__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_2__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_2__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_3__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_3__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_3__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_3__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_3__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_3__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_3__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_3_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_3__3side_3_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_3__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_3_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_3__3side_3_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_3__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_3_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_3__3side_3_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 3 6 "10" 15 21 28 36 45 55
# side4 OK 20
ch032_1side_4__2side_1__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_1__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_1__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_2__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_2__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_2__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_2__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_2__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_2__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_2__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_2__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_2__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_3__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_3__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_3__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_3__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_3__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_3__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_3__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_3_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_3__3side_3_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_3__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_3_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_3__3side_3_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_3__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_3_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_3__3side_3_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_3_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_3_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_3_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_3_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_3_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_3_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_4_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_4_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_4_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_4_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_4_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_4_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_4_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_4_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_4_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_4_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_4_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_4_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 3 6 10 "15" 21 28 36 45 55
# side5 OK 35
ch032_1side_5__2side_1__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_1__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_1__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_2__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_2__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_2__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_2__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_2__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_2__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_2__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_2__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_2__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_3__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_3__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_3__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_3__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_3__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_3__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_3__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_3_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_3__3side_3_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_3__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_3_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_3__3side_3_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_3__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_3_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_3__3side_3_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_3_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_3_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_3_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_3_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_3_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_3_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_4_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_4_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_4_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_4_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_4_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_4_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_4_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_4_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_4_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_4_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_4_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_4_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_3_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_3_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_3_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_3_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_3_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_3_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_4_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_4_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_4_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_4_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_4_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_4_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_4_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_4_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_4_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_4_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_4_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_4_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 3 6 10 15 "21" 28 36 45 55
# side6 OK 56
ch032_1side_6__2side_1__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_1__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_1__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_2__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_2__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_2__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_2__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_2__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_2__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_2__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_2__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_2__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_3__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_3__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_3__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_3__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_3__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_3__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_3__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_3__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_3__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_3__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_3__3side_3_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_3__3side_3_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_3__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_3__3side_3_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_3__3side_3_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_3__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_3__3side_3_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_3__3side_3_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_3_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_3_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_3_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_3_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_3_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_3_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_4_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_4_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_4_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_4_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_4_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_4_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_4_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_4_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_4_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_4_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_4_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_4_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_3_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_3_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_3_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_3_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_3_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_3_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_4_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_4_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_4_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_4_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_4_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_4_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_4_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_4_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_4_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_4_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_4_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_4_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_3_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_3_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_3_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_3_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_3_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_3_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_4_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_4_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_4_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_4_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_4_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_4_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_4_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_4_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_4_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_4_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_4_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_4_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_6 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_6.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
#############################################################
if(__name__ == "__main__"):
print("build exps cost time:", time.time() - start_time)
if len(sys.argv) < 2:
############################################################################################################
### 直接按 F5 或打 python step10_b1_exp_obj_load_and_train_and_test.py,後面沒有接東西喔!才不會跑到下面給 step10_b_subprocss.py 用的程式碼~~~
ch032_1side_1__2side_1__3side_1_4side_1.build().run()
# print('no argument')
sys.exit()
### 以下是給 step10_b_subprocess.py 用的,相當於cmd打 python step10_b1_exp_obj_load_and_train_and_test.py 某個exp.build().run()
eval(sys.argv[1])
| [
"s89334roy@yahoo.com.tw"
] | s89334roy@yahoo.com.tw |
62d659bd368096cf43711563ed604f8fd3f7a6bc | f62ed4c130e6ecad19c606bac2e5aa561e18a6d5 | /week2/integer_ex.py | d5b87cfd33608f37b644ca1513287cab3ade1731 | [] | no_license | jimyeong22/2021ddangil2 | d17c7d9fd6c7c3f369a01a20317ccb6a4ea05678 | d2016a33e4ceba3ffd12fef9cace4cdceb6b1bcb | refs/heads/master | 2023-07-23T20:05:22.321927 | 2021-08-19T11:12:40 | 2021-08-19T11:12:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 358 | py | # 정수형
a=154
print(type(a))
a = 0
print(type(a))
a = -25
print(type(a))
# 실수형
a= 181.34
print(type(a))
b=-22.22
print(type(b))
# 복소수
c = 1 + 4j
print(type(c))
print(c.real)
print(c.imag)
print(c.conjugate())
print(abs(c))
# 예제: 스스로 사칙연산을 활용해 확인해보자
a = 5
b = 3.14
c = 3 + 4j
18.28 + 16j
print(2b + 4c) | [
"choikm3847@gmail.com"
] | choikm3847@gmail.com |
8b8a4a5922810c13bac90fde993d966756422d37 | 3ef448a6067467da5c45dce5f3ecc87afb0a5c32 | /backend/manage.py | ed94a381e3e674696349373cd7aefcf4b0672c88 | [] | no_license | crowdbotics-apps/test-25611 | 282505ec43d68d87a8257f58bc2bc23ba6ddcecf | 92fc1490def7ce2c5cf94704551d893b57a3fa86 | refs/heads/master | 2023-04-10T16:05:31.694647 | 2021-04-10T00:42:12 | 2021-04-10T00:42:12 | 356,431,330 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 630 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "test_25611.settings")
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == "__main__":
main()
| [
"team@crowdbotics.com"
] | team@crowdbotics.com |
7a93dcce8e5dd4da656af24ae6655e7ab8129503 | eb9c3dac0dca0ecd184df14b1fda62e61cc8c7d7 | /google/ads/googleads/v4/googleads-py/google/ads/googleads/v4/errors/types/date_range_error.py | 59776602752bb20e8036c015e4d72d256b7a6b13 | [
"Apache-2.0"
] | permissive | Tryweirder/googleapis-gen | 2e5daf46574c3af3d448f1177eaebe809100c346 | 45d8e9377379f9d1d4e166e80415a8c1737f284d | refs/heads/master | 2023-04-05T06:30:04.726589 | 2021-04-13T23:35:20 | 2021-04-13T23:35:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,299 | py | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package='google.ads.googleads.v4.errors',
marshal='google.ads.googleads.v4',
manifest={
'DateRangeErrorEnum',
},
)
class DateRangeErrorEnum(proto.Message):
r"""Container for enum describing possible date range errors."""
class DateRangeError(proto.Enum):
r"""Enum describing possible date range errors."""
UNSPECIFIED = 0
UNKNOWN = 1
INVALID_DATE = 2
START_DATE_AFTER_END_DATE = 3
CANNOT_SET_DATE_TO_PAST = 4
AFTER_MAXIMUM_ALLOWABLE_DATE = 5
CANNOT_MODIFY_START_DATE_IF_ALREADY_STARTED = 6
__all__ = tuple(sorted(__protobuf__.manifest))
| [
"bazel-bot-development[bot]@users.noreply.github.com"
] | bazel-bot-development[bot]@users.noreply.github.com |
f3b0b5741ace2b644e0178e50b90dcfaeb5ec3fd | b4ecc9c5a74f11958e7a49999d0299e7bb883d2e | /train.py | 5ebc5af6153bd0e810cbb34969e6e53072f76913 | [] | no_license | teja0508/AcronymLookup | 6edea8ab9bc27824b961563f5bf968b499490094 | ea5b812c41f138b5dccabbe2c474e2da0f85ce9e | refs/heads/main | 2022-12-20T08:00:30.161858 | 2020-10-18T06:01:32 | 2020-10-18T06:01:32 | 305,030,809 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 5,327 | py | # Machine-Learning Approach for Cross-Domain Acronym Definition Identification
# Maya Varma and Rachel Gardner
# Autumn 2017
# Train Machine Learning Classifier
import sys
sys.path.append('postgres-database/')
from urllib.request import urlopen
import re
import csv
import os
from collections import defaultdict, Counter
import operator
import random
from dbFunctions import AcronymDatabase
from sklearn.feature_extraction import text, DictVectorizer
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.naive_bayes import MultinomialNB
from sklearn.metrics import accuracy_score, confusion_matrix
from sklearn.linear_model import SGDClassifier
from sklearn.model_selection import GridSearchCV
from sklearn import tree, metrics, svm
from sklearn.ensemble import RandomForestClassifier
import numpy as np
import matplotlib.pyplot as plt
#from sklearn.externals import joblib
#import sklearn.externals.joblib as extjoblib
#import joblib
import pickle
#Load in csv data (contains list of HTML urls)
def loadHTMLData():
urls = []
with open('data/data.csv', 'rU') as data:
reader = csv.reader(data, dialect=csv.excel_tab)
for row in reader:
if(len((row[0].split(','))[1]) > 0): urls.append((row[0].split(','))[1])
return urls
def loadDuplicateData():
train = []
test = []
with open('data/duplicatedata.csv', 'rU') as data:
reader = csv.reader(data, dialect=csv.excel_tab)
count=0
for row in reader:
if(len((row[0].split(','))[1]) > 0): train.append((row[0].split(','))[2])
if(count%2 == 0 and len((row[0].split(','))[1]) > 0): train.append((row[0].split(','))[3])
elif(count%2 == 1 and len((row[0].split(','))[1]) > 0): test.append((row[0].split(','))[3])
count+=1
return (train, test)
urls = loadHTMLData()
trainingUrlsDuplicates = loadDuplicateData()[0]
testingUrlsDuplicates = loadDuplicateData()[1]
trainingUrls = trainingUrlsDuplicates + urls[:int(0.7*len(urls))]
testingUrls = testingUrlsDuplicates + urls[int(0.7*len(urls)):]
print ('Size of Training Dataset: ', len(trainingUrls))
print ('Size of Testing Dataset: ', len(testingUrls))
#Adapted from NLTK package. Removes HTML markup from given string.
def clean_html(html):
# First we remove inline JavaScript/CSS:
cleaned = re.sub(r"(?is)<(script|style).*?>.*?(</\1>)", "", html.strip())
# Then we remove html comments. This has to be done before removing regular
# tags since comments can contain '>' characters.
cleaned = re.sub(r"(?s)<!--(.*?)-->[\n]?", "", cleaned)
# Next we can remove the remaining tags:
cleaned = re.sub(r"(?s)<.*?>", " ", cleaned)
# Finally, we deal with whitespace
cleaned = re.sub(r" ", " ", cleaned)
cleaned = re.sub(r" ", " ", cleaned)
cleaned = re.sub(r" ", " ", cleaned)
return (cleaned.strip()).split()
#Takes url as input. Returns list of all acronyms in webpage
def identifyAcronyms(rawText):
acronyms = []
#words commonly misidentified as acronyms are manually blacklisted
blacklist = ['ABSTRACT', 'INTRODUCTION', 'CONCLUSION', 'CONCLUSIONS', 'ACKNOWLEDGEMENTS', 'RESULTS']
for i in range(1,len(rawText)-1):
word = rawText[i]
word = re.sub(r'[^\w\s]','',word)
'''
characteristics of an acronym: all capital letters, length > 2,
contains only alphabet characters, not in blacklist, and not part
of a header (identified by determining if surrounding words are in all-caps)
'''
nextIndex = i+1
prevIndex = i-1
if(len(word)>2 and word[:-1].isupper() and word.isalpha() and word not in blacklist and not(rawText[i-1].isupper()) and not(rawText[i+1].isupper())):
acronyms.append((word, i))
return acronyms
# Extracting Features
db = AcronymDatabase()
#Convert training data to sparse vectors
tokenize = CountVectorizer().build_tokenizer()
true_defs = []
def features(cad):
acronym = cad[0]
context = cad[1]
if(len(cad)==3): true_defs.append(cad[2])
terms = tokenize(context)
d = {acronym: 10}
for t in terms:
if(t not in text.ENGLISH_STOP_WORDS):
d[t] = d.get(t, 0) + 1
return d
cadList = db.getContextAcronymList()
vect = DictVectorizer()
X_train = vect.fit_transform(features(d) for d in cadList)
joblib.dump(vect, 'trained-models/vectorizer.pkl')
print (X_train.toarray())
# Train Machine Learning Classifier
clf1 = MultinomialNB(alpha=0.09).fit(X_train, true_defs)
print ('Trained Model 1')
clf2 = svm.LinearSVC(C=1).fit(X_train, true_defs)
print ('Trained Model 2')
clf3 = tree.DecisionTreeClassifier(min_samples_leaf=1).fit(X_train, true_defs)
print ('Trained Model 3')
clf4 = RandomForestClassifier().fit(X_train, true_defs)
print ('Trained Model 4')
#joblib.dump(clf1, 'trained-models/naivebayes.pkl')
#joblib.dump(clf2, 'trained-models/svc.pkl')
#joblib.dump(clf3, 'trained-models/decisiontree.pkl')
#joblib.dump(clf4, 'trained-models/randomforest.pkl')
pickle.dump(clf1, open('trained-models/naivebayes.pkl', "wb"))
pickle.dump(clf2, open('trained-models/svc.pkl', "wb"))
pickle.dump(clf3, open('trained-models/decisiontree.pkl', "wb"))
pickle.dump(clf4, open('trained-models/randomforest.pkl', "wb"))
db.close()
| [
"lchandratejareddy@gmail.com"
] | lchandratejareddy@gmail.com |
26a6d470417d511695f873a58b8d5db99dc91e16 | 6351221d588668804e2df01936732eede4d96ed0 | /leetcode-cn/Python/783.二叉搜索树节点最小距离.py | 56c0fda33433e32c094361d70f8113486693b1ec | [] | no_license | LogicJake/code-for-interview | 8e4ec9e24ec661a443ad42aa2496d78a1fbc8a3f | 5990b09866696c2f3e845047c755fa72553dd421 | refs/heads/master | 2021-09-20T20:19:17.118333 | 2021-09-14T13:46:30 | 2021-09-14T13:46:30 | 102,202,212 | 3 | 2 | null | null | null | null | UTF-8 | Python | false | false | 684 | py | #
# @lc app=leetcode.cn id=783 lang=python3
#
# [783] 二叉搜索树节点最小距离
#
# @lc code=start
# Definition for a binary tree node.
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def minDiffInBST(self, root: TreeNode) -> int:
pre = 9999
ans = 9999
def help(root):
if root is None:
return
nonlocal ans, pre
help(root.left)
val = root.val
ans = min(ans, abs(val - pre))
pre = val
help(root.right)
help(root)
return ans
# @lc code=end
| [
"835410808@qq.com"
] | 835410808@qq.com |
d3780d93e32e9a6c4afe5d8cb86c7ee35f8c4279 | 101abd4b1c615ea87087f093fa3bdc292cd23c70 | /setup.py | 8e99c8c211bae6b7c2c8bc2852814a171d238cad | [
"MIT"
] | permissive | jwilk/pydiatra | 0bd40acb9b642730d6b639f03ef7a67765ae62d8 | 2eb53012ff90c6475a15011ac5b4006aa801a1d0 | refs/heads/master | 2023-08-15T06:37:02.823444 | 2023-08-10T07:17:10 | 2023-08-10T07:17:10 | 58,876,803 | 24 | 4 | null | null | null | null | UTF-8 | Python | false | false | 8,984 | py | # encoding=UTF-8
# Copyright © 2016-2022 Jakub Wilk <jwilk@jwilk.net>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the “Software”), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import print_function
import functools
import io
import os
import re
import sys
# pylint: disable=deprecated-module
import distutils.core
from distutils.command.build import build as distutils_build
from distutils.command.install import install as distutils_install
from distutils.command.install_data import install_data as distutils_install_data
from distutils.command.sdist import sdist as distutils_sdist
# pylint: enable=deprecated-module
# pylint: disable=import-error
if sys.version_info >= (3, 0):
import configparser
else:
import ConfigParser as configparser
# pylint: enable=import-error
try:
from wheel.bdist_wheel import bdist_wheel
except ImportError:
bdist_wheel = None
try:
import distutils644
except ImportError:
pass
else:
distutils644.install()
type({0}) # Python >= 2.7 is required
def uopen(*args):
if str is bytes:
return open(*args) # pylint: disable=consider-using-with,unspecified-encoding
else:
return open(*args, encoding='UTF-8') # pylint: disable=consider-using-with
def get_readme():
with io.open('doc/README', encoding='ASCII') as file:
content = file.read()
content = re.compile('^[.][.] vim:.*', re.MULTILINE).sub('', content)
return '\n' + content.strip() + '\n'
def get_version():
with io.open('doc/changelog', encoding='UTF-8') as file:
line = file.readline()
return line.split()[1].strip('()')
class cmd_build_doc(distutils_build):
description = 'build documentation'
def make_tags_rst(self, data_path, rst_path):
cp = configparser.RawConfigParser()
options = {}
if str is not bytes:
options.update(encoding='UTF-8')
cp.read(data_path, **options)
with uopen(rst_path + '.tmp', 'w') as rst_file:
self._make_tags_rst(cp, print=functools.partial(print, file=rst_file))
os.rename(rst_path + '.tmp', rst_path)
def _make_tags_rst(self, data, print): # pylint: disable=redefined-builtin
_strip_leading_dot = functools.partial(
re.compile('^[.]', re.MULTILINE).sub,
''
)
def _parse_multiline(value):
for s in value.splitlines():
if not s or s.isspace():
continue
yield _strip_leading_dot(s)
def parse_multiline(tag, key):
value = tag.get(key, '')
return _parse_multiline(value)
for tagname in data.sections():
tag = dict(data.items(tagname))
print(tagname)
print('~' * len(tagname))
description = str.join('\n',
parse_multiline(tag, 'description')
)
if not description.strip():
raise ValueError('missing description for {0}'.format(tagname))
print(description)
print()
references = list(parse_multiline(tag, 'references'))
if references:
print('References:', end='\n\n')
for ref in references:
match = re.match(r'\A(?P<name>[\w-]+)[(](?P<section>[0-9])[)]\Z', ref)
if match is not None:
ref = r'**{name}**\ ({section})'.format(**match.groupdict())
print(' |', ref)
print()
print('Severity, certainty:', end='\n\n')
print(' {0}, {1}'.format(tag['severity'], tag['certainty']))
print()
def make_man(self, rst_path, man_path):
# pylint: disable=import-outside-toplevel
import docutils.core
import docutils.writers.manpage
# pylint: enable=import-outside-toplevel
if str is bytes:
tmp_file = io.BytesIO()
else:
tmp_file = io.StringIO()
tmp_file.close = object # prevent docutils from closing the file
docutils.core.publish_file(
source_path=rst_path,
destination=tmp_file,
writer=docutils.writers.manpage.Writer(),
settings_overrides=dict(input_encoding='UTF-8', halt_level=1),
)
tmp_file.seek(0)
with uopen(man_path, 'w') as man_file:
for line in tmp_file:
if line.startswith(r'.\" vim:'):
continue
if line.startswith('.BI'):
# work-around for <https://bugs.debian.org/806601>:
line = line.replace(r'\fP', r'\fR')
line = re.sub(r'(?<=[a-zA-Z])\\\(aq(?=[a-z])', "'", line)
man_file.write(line)
def run(self):
data_path = 'pydiatra/data/tags'
tags_rst_path = 'doc/tags.rst'
man_rst_path = 'doc/manpage.rst'
man_path = 'doc/pydiatra.1'
self.make_file([data_path], tags_rst_path, self.make_tags_rst, [data_path, tags_rst_path])
self.make_file([tags_rst_path, man_rst_path], man_path, self.make_man, [man_rst_path, man_path])
distutils_build.sub_commands[:0] = [('build_doc', None)]
class cmd_install_doc(distutils_install_data):
description = 'install documentation'
def run(self):
man_dir = os.path.join(self.install_dir, 'share/man/man1')
self.mkpath(man_dir)
path = '{dir}/{prog}.1'.format(dir=man_dir, prog=script_name)
msg = 'writing {path}'.format(path=path)
data = ['.so pydiatra.1']
self.execute(distutils.file_util.write_file, (path, data), msg)
self.outfiles += [path] # pylint: disable=no-member
(path, _) = self.copy_file('doc/pydiatra.1', man_dir)
self.outfiles += [path] # pylint: disable=no-member
distutils_install.sub_commands[:0] = [('install_doc', None)]
class cmd_sdist(distutils_sdist):
def run(self):
self.run_command('build_doc')
return distutils_sdist.run(self)
def maybe_move_file(self, base_dir, src, dst):
src = os.path.join(base_dir, src)
dst = os.path.join(base_dir, dst)
if os.path.exists(src):
self.move_file(src, dst)
def make_release_tree(self, base_dir, files):
distutils_sdist.make_release_tree(self, base_dir, files)
self.maybe_move_file(base_dir, 'LICENSE', 'doc/LICENSE')
# distutils doesn't seem to handle symlinks-to-directories
# out of the box, so let's take care of them manually:
target = os.readlink('data')
dest = os.path.join(base_dir, 'data')
distutils.log.info('linking %s -> %s', dest, target)
if not self.dry_run:
os.symlink(target, dest)
classifiers = '''
Development Status :: 4 - Beta
Environment :: Console
Intended Audience :: Developers
License :: OSI Approved :: MIT License
Operating System :: OS Independent
Programming Language :: Python
Programming Language :: Python :: 2
Programming Language :: Python :: 3
Topic :: Software Development :: Quality Assurance
'''.strip().splitlines()
pkg_name = 'pydiatra'
script_name = 'py{0}diatra'.format(*sys.version_info)
def d(**kwargs):
return {
k: v for k, v in kwargs.items()
if v is not None
}
setup_options = dict(
name='pydiatra',
version=get_version(),
license='MIT',
description='yet another static checker for Python code',
long_description=get_readme(),
classifiers=classifiers,
url='https://jwilk.net/software/pydiatra',
author='Jakub Wilk',
author_email='jwilk@jwilk.net',
packages=[pkg_name],
package_dir={pkg_name: 'pydiatra'},
package_data={pkg_name: ['data/*']},
py_modules=['pydiatra'],
scripts=[script_name],
cmdclass=d(
bdist_wheel=bdist_wheel,
build_doc=cmd_build_doc,
install_doc=cmd_install_doc,
sdist=cmd_sdist,
),
)
if __name__ == '__main__':
distutils.core.setup(**setup_options)
# vim:ts=4 sts=4 sw=4 et
| [
"jwilk@jwilk.net"
] | jwilk@jwilk.net |
f5b172c6b6c06efd0d3b35027922b0150ee7ce06 | a990bd26d3a69d1ea6699c85efa2cea99452c3df | /problems/leetcode/minCostClimbingStairs746.py | 87821e07472154af738e6ceddc4526aa197158b3 | [] | no_license | abecus/DS-and-Algorithms | 5f1a948a085465ae165090ec957a9d5307ce729d | 3259e8183382265a27cf8c91e37d0086175a5703 | refs/heads/master | 2022-05-05T07:07:08.194243 | 2022-04-05T16:23:39 | 2022-04-05T16:23:39 | 193,111,610 | 11 | 6 | null | 2020-11-18T16:19:18 | 2019-06-21T14:27:25 | Python | UTF-8 | Python | false | false | 1,357 | py | """
_________________________746. Min Cost Climbing Stairs_________________________
Difficulty: Easy Likes: 1564 Dislikes: 354 Solution: Available
Total Accepted: 124.5K Total Submission: 254K Acceptance Rate: 49.0%
Tags: Dynamic Programming, Array
On a staircase, the i-th step has some non-negative cost cost[i] assigned
(0 indexed).
Once you pay the cost, you can either climb one or two
steps. You need to find minimum cost to reach the top of the floor, and you
can either start from the step with index 0, or the step with index 1.
Example 1:
Input: cost = [10, 15, 20]
Output: 15
Example 2:
Input: cost = [1, 100, 1, 1, 1, 100, 1, 1, 100, 1]
Output: 6
Note:
cost will have a length in the range [2, 1000].Every cost[i] will be an integer in the range [0, 999].
"""
from functools import lru_cache
def minCostClimbingStairs(cost):
# cost.append(0)
# @lru_cache(None)
# def helper(i):
# if i<2:
# if i<0:
# return 0
# return cost[i]
# return cost[i]+min(helper(i-1), helper(i-2))
# return helper(len(cost)-1)
a=cost[0]
b=cost[1]
for i in range(2,len(cost)):
b,a=cost[i]+min(a,b),b
return min(a,b)
if __name__ == "__main__":
cost = [0,0,0,0]
# cost = [10, 15, 20]
# cost = [1, 100, 1, 1, 1, 100, 1, 1, 100, 1]
print(minCostClimbingStairs(cost,))
"""
similarQuestions::
Climbing Stairs: Easy
"""
| [
"insaaone@gmail.com"
] | insaaone@gmail.com |
157812639222afbc8892cf7265e7bf8d671b44f7 | 1342fd58a6003ec8bf90db051f6c9c12fd5af403 | /report/image_processing/constant.py | b20c3f8ae15c519b136e2c68c64f57d2b2bcf55d | [] | no_license | PomeloX-Team/pomelox-report | 623d49c9a8e817c9ca8a45a340799dfb52b69afb | 503b9a25b06436580597b691533f5cba19630aad | refs/heads/master | 2021-08-22T20:34:56.807028 | 2017-12-01T06:52:28 | 2017-12-01T06:52:28 | 110,214,746 | 0 | 0 | null | 2017-11-25T17:23:42 | 2017-11-10T07:07:57 | Python | UTF-8 | Python | false | false | 92 | py | IMG_PATH = 'images/'
IMG_SAVE_PATH = 'images_result/'
RESULT_WIDTH = 300
RESULT_HEIGHT = 300 | [
"supakit.kr@gmail.com"
] | supakit.kr@gmail.com |
aacae8a6adbe5fe07565b2aa5b5d61ddacca7f29 | 755f1fa3d56340d64b72c261bf6c738d9fa5f1b5 | /httpInterface/get_series.py | 013e9595dab6efa8a28ed166f5bdd2f2f10c0ea1 | [] | no_license | piotrmaslanka/morda | ad3e78b3ab94129b551b4205b5a77367734d2ea6 | 4fbd1024b6e75c62c79bb15cc72bf111c53ce5a2 | refs/heads/master | 2018-12-31T21:56:59.237613 | 2014-06-01T16:31:21 | 2014-06-01T16:31:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,170 | py | from yzero import Zero
import struct
from morda.settings import ZERO_CONNECT
def get_series(sername, from_, to, callback, zero=None):
if zero == None: zero = Zero(ZERO_CONNECT)
class Receiver(object):
def __init__(self, zero, from_, to, callback):
self.zero = zero
self.serdef = None
self.data = []
self.from_ = from_
self.to = to
self.fin_callback = callback
def on_got_serdef(self, serdef):
self.serdef = serdef
zero.readSeries(serdef, self.from_, self.to, self.on_got_data, self.on_end)
def on_got_data(self, dat):
self.data.extend(dat)
def on_end(self, suc):
deco = lambda x: struct.unpack('d' if self.serdef.recordsize == 8 else 'f', x)[0]
for i in range(0, len(self.data)):
ts, dat = self.data[i]
self.data[i] = (ts, deco(dat))
self.fin_callback(self.data)
rec = Receiver(zero, from_, to, callback)
zero.getDefinition(sername, rec.on_got_serdef)
| [
"piotr.maslanka@henrietta.com.pl"
] | piotr.maslanka@henrietta.com.pl |
2790e613abe27c5ba178b8e93d4e74818149712a | b3c93ef42b9ee529218f086cbf32535ac2e75f0b | /tests/test_version.py | c9560061ff1acad2224516f939120252a1cb0b54 | [
"MIT"
] | permissive | thumby/smartoptim | c34f85a9b83ee9194232e037ca2906b5db7fa221 | e65839dbb1fbcd985552a9a23e3a73e1cfc58d1a | refs/heads/master | 2021-01-10T02:38:03.011856 | 2015-11-23T16:35:16 | 2015-11-23T16:35:16 | 46,456,075 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 465 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# This file is part of smartoptim.
# https://github.com/thumby/smartoptim
# Licensed under the MIT license:
# http://www.opensource.org/licenses/MIT-license
# Copyright (c) 2015, Thumby <dev@thumby.io>
from preggy import expect
from smartoptim import __version__
from tests.base import TestCase
class VersionTestCase(TestCase):
def test_has_proper_version(self):
expect(__version__).to_equal('0.1.0')
| [
"heynemann@gmail.com"
] | heynemann@gmail.com |
58ee8fa2946ceeab6382b00f21d4c439fc798613 | b31ff20af39eb96f5c78a3e41d4a7727a32bc309 | /collection/list/examples/list/list_comprehension/exercise2.py | 16501aaabe64db47d5a4e04fa83ac2ab25aa876f | [] | no_license | abhi15sep/Python-Course | 42b74c2f3f016c960edcc091808066f7d1411054 | 482bd7fdb32df54d97d1e6dd76fc807bcab70e9a | refs/heads/master | 2020-04-27T20:28:25.448692 | 2019-08-04T07:00:12 | 2019-08-04T07:00:12 | 174,659,260 | 0 | 3 | null | null | null | null | UTF-8 | Python | false | false | 952 | py | #1. Given two lists [1,2,3,4] and [3,4,5,6], create a variable called answer, which is a new list that is the intersection of the two. Your output should be [3,4] . Hint: use the in operator to test whether an element is in a list. For example: 5 in [1,5,2] is True. 3 in [1,5,2] is False.
#2. Given a list of words ["Elie", "Tim", "Matt"] answer2, which is a new list with each word reversed and in lower case (use a slice to do the reversal!) Your output should be ['eile', 'mit', 'ttam']
#Using list comprehensions(the more Pythonic way):
answer = [val for val in [1,2,3,4] if val in [3,4,5,6]]
#the slice [::-1] is a quick way to reverse a string
answer2 = [val[::-1].lower() for val in ["Elie", "Tim", "Matt"]]
#Without list comprehensions, things are a bit longer:
answer = []
for x in [1,2,3,4]:
if x in [3,4,5,6]:
answer.append(x)
answer2 = []
for name in ["Elie", "Tim", "Matt"]:
answer2.append(name[::-1].lower())
| [
"abhaypratap3536@gmail.com"
] | abhaypratap3536@gmail.com |
cc920b16c95ac819b236fa84be0b4223fe58683a | 96602eeaa034e3e7b36df4ed10fba9bc9c9ed5c8 | /01-15/day08-2/文件操作.py | 57723e900a692f4a8c9c3817e4781677663b0e4e | [] | no_license | microease/Old-boy-Python-knight-project-1 | f4b12fe6f46bd159c6dc8151b1d28c6520042441 | dc32749e29cc63b44849d40af345d4bb7817d624 | refs/heads/master | 2020-09-20T18:00:34.821769 | 2019-12-11T14:47:44 | 2019-12-11T14:47:44 | 224,553,833 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 168 | py | # coding:utf-8
# File Name: 文件操作
# Description :
# Author : micro
# Date: 2019/12/3
f = open("./测试.txt", encoding="utf-8", mode="w")
| [
"microease@163.com"
] | microease@163.com |
2c4f87b94aa0d96dd697d8229a9c6e151d976104 | 7a09af404f29389504742a3d5f1727bfbe562750 | /TrekBot2_WS/build/tf2_eigen/catkin_generated/pkg.develspace.context.pc.py | f5c5a1a6d5db48c50693d36ed16e7a9aa2ab7b74 | [
"MIT"
] | permissive | Rafcin/TrekBot | 4baa2ed93b90920b36adba0b72384ac320d2de01 | d3dc63e6c16a040b16170f143556ef358018b7da | refs/heads/master | 2020-03-30T02:15:35.361254 | 2018-12-14T03:30:25 | 2018-12-14T03:30:25 | 150,622,252 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 565 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "/xavier_ssd/TrekBot/TrekBot2_WS/src/geometry2/tf2_eigen/include;/usr/include/eigen3".split(';') if "/xavier_ssd/TrekBot/TrekBot2_WS/src/geometry2/tf2_eigen/include;/usr/include/eigen3" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "tf2_eigen"
PROJECT_SPACE_DIR = "/xavier_ssd/TrekBot/TrekBot2_WS/devel/.private/tf2_eigen"
PROJECT_VERSION = "0.6.3"
| [
"Rafcin.s@gmail.com"
] | Rafcin.s@gmail.com |
1ec1f56bd18f8a82568356fe621e4593be8a368a | 2565970a2461fec97c0b0972eed161d9bd9e268f | /test_finetuning.py | 74bcbec628e6f36adb5ccd96b187b105430735d5 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | sskram/nn-toolbox | ee40e2d0f8792444a6d46bd477ffc69b144691a1 | b998d61800311d788bf3c4c5f517f1fd6d9c2e66 | refs/heads/master | 2020-07-01T02:55:22.318592 | 2019-07-21T06:46:07 | 2019-07-21T06:46:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,678 | py | import torchvision
from torch.nn import *
from torchvision.datasets import ImageFolder, CIFAR10
from torchvision.models import resnet18
from torchvision.transforms import *
from torch.optim import *
from torch.optim.lr_scheduler import CosineAnnealingLR
from nntoolbox.optim import AdamW
from torch.utils.data import random_split
# from adabound import AdaBound
from nntoolbox.vision.components import *
from nntoolbox.vision.learner import SupervisedImageLearner
from nntoolbox.utils import load_model, LRFinder, get_first_batch, get_device
from nntoolbox.callbacks import *
from nntoolbox.metrics import Accuracy, Loss
from nntoolbox.vision.transforms import Cutout
from nntoolbox.vision.models import ImageClassifier, EnsembleImageClassifier
from nntoolbox.losses import SmoothedCrossEntropy
from nntoolbox.init import lsuv_init
import math
torch.backends.cudnn.benchmark=True
pretrained_model = resnet18()
# print(modules)
from nntoolbox.utils import cut_model, get_trainable_parameters
feature, head = cut_model(pretrained_model)
for param in feature.parameters():
param.requires_grad = False
model = nn.Sequential(
feature,
FeedforwardBlock(
in_channels=512,
out_features=10,
pool_output_size=2,
hidden_layer_sizes=(256, 128)
)
)
# print(model._modules['0']._modules[str(0)])
from typing import List
def unfreeze(module: Sequential, optimizer: Optimizer, unfreeze_from: int, unfreeze_to: int):
"""
Unfreeze a model from ind
:param module:
:param optimizer
:param unfreeze_from:
:param unfreeze_to:
:return:
"""
for ind in range(len(module)):
submodule = module._modules[str(ind)]
if ind < unfreeze_from:
for param in submodule.parameters():
param.requires_grad = False
elif ind < unfreeze_to:
for param in submodule.parameters():
param.requires_grad = True
optimizer.add_param_group({'params': submodule.parameters()})
class GradualUnfreezing(Callback):
def __init__(self, freeze_inds: List[int], unfreeze_every: int):
self._freeze_inds = freeze_inds
self._unfreeze_every = unfreeze_every
# def on_train_begin(self):
# self._freeze_inds = [len(self.learner._model._modules['0'])] + self._freeze_inds
#
# for i in range(1, len(self._freeze_inds)):
# unfreeze_from = self._freeze_inds[i]
# unfreeze_to = self._freeze_inds[i - 1]
#
# unfreeze(self.learner._model._modules['0'], self.learner._optimizer, unfreeze_from, unfreeze_to)
# print("Unfreeze feature after " + str(unfreeze_from))
# for ind in range(len(self.learner._model._modules['0'])):
# for param in self.learner._model._modules['0']._modules[str(ind)].parameters():
# param.requires_grad = False
# print("Unfreeze feature after " + str(freeze_to))
def on_epoch_end(self, logs: Dict[str, Any]) -> bool:
if logs['epoch'] % self._unfreeze_every == 0 \
and logs['epoch'] > 0 \
and logs['epoch'] // self._unfreeze_every < len(self._freeze_inds):
unfreeze_from = self._freeze_inds[logs['epoch'] // self._unfreeze_every]
unfreeze_to = self._freeze_inds[logs['epoch'] // self._unfreeze_every - 1]
# for ind in range(len(self.learner._model._modules['0'])):
# module = self.learner._model._modules['0']._modules[str(ind)]
# if ind < unfreeze_from:
# for param in module.parameters():
# param.requires_grad = False
# else:
# for param in module.parameters():
# param.requires_grad = True
# self.learner._optimizer.add_param_group({'params': module.parameters()})
unfreeze(self.learner._model._modules['0'], self.learner._optimizer, unfreeze_from, unfreeze_to)
print("Unfreeze feature after " + str(unfreeze_from))
return False
unfreezer = GradualUnfreezing([6, 4, 2, 0], 10)
# data = CIFAR10('data/', train=True, download=True, transform=ToTensor())
# train_size = int(0.8 * len(data))
# val_size = len(data) - train_size
# train_dataset, val_dataset = torch.utils.data.random_split(data, [train_size, val_size])
# train_dataset.dataset.transform = Compose(
# [
# RandomHorizontalFlip(),
# RandomResizedCrop(size=32, scale=(0.95, 1.0)),
# # Cutout(length=16, n_holes=1),
# ToTensor()
# ]
# )
#
# test_dataset = torchvision.datasets.CIFAR10('data/', train=False, download=True, transform=ToTensor())
train_val_dataset = ImageFolder(
'data/imagenette-160/train',
transform=Compose([
Resize((128, 128)),
ToTensor()
])
)
test_dataset = ImageFolder(
'data/imagenette-160/val',
transform=Compose([
Resize((128, 128)),
ToTensor()
])
)
train_size = int(0.8 * len(train_val_dataset))
val_size = len(train_val_dataset) - train_size
train_dataset, val_dataset = random_split(train_val_dataset, [train_size, val_size])
train_dataset.dataset.transform = Compose(
[
RandomHorizontalFlip(),
RandomResizedCrop(size=(128, 128), scale=(0.95, 1.0)),
# Cutout(length=16, n_holes=1),
ToTensor()
]
)
train_loader = torch.utils.data.DataLoader(train_dataset, batch_size=128, shuffle=True)
val_loader = torch.utils.data.DataLoader(val_dataset, batch_size=128, shuffle=False)
test_loader = torch.utils.data.DataLoader(test_dataset, batch_size=128, shuffle=False)
# print(count_trainable_parameters(model)) # 14437816 3075928
optimizer = SGD(get_trainable_parameters(model), weight_decay=0.0001, lr=0.30, momentum=0.9)
learner = SupervisedImageLearner(
train_data=train_loader,
val_data=val_loader,
model=model,
criterion=SmoothedCrossEntropy().to(get_device()),
optimizer=optimizer,
mixup=True
)
# lr_finder = LRFinder(
# model=model,
# train_data=train_loader,
# criterion=SmoothedCrossEntropy(),
# optimizer=partial(SGD, lr=0.074, weight_decay=0.0001, momentum=0.9),
# device=get_device()
# )
# lr_finder.find_lr(warmup=100, callbacks=[ToDeviceCallback()])
swa = StochasticWeightAveraging(learner, average_after=5025, update_every=670)
callbacks = [
# ManifoldMixupCallback(learner=learner, modules=[layer_1, block_1]),
ToDeviceCallback(),
# MixedPrecisionV2(),
# InputProgressiveResizing(initial_size=80, max_size=160, upscale_every=10, upscale_factor=math.sqrt(2)),
# unfreezer,
Tensorboard(),
# ReduceLROnPlateauCB(optimizer, monitor='accuracy', mode='max', patience=10),
LRSchedulerCB(CosineAnnealingLR(optimizer, eta_min=0.10, T_max=335)),
swa,
LossLogger(),
ModelCheckpoint(learner=learner, filepath="weights/model.pt", monitor='accuracy', mode='max'),
]
metrics = {
"accuracy": Accuracy(),
"loss": Loss()
}
final = learner.learn(
n_epoch=500,
callbacks=callbacks,
metrics=metrics,
final_metric='accuracy'
)
print(final)
load_model(model=model, path="weights/model.pt")
classifier = ImageClassifier(model, tta_transform=Compose([
ToPILImage(),
RandomHorizontalFlip(),
RandomResizedCrop(size=(128, 128), scale=(0.95, 1.0)),
ToTensor()
]))
print(classifier.evaluate(test_loader))
print("Test SWA:")
model = swa.get_averaged_model()
classifier = ImageClassifier(model, tta_transform=Compose([
ToPILImage(),
RandomHorizontalFlip(),
RandomResizedCrop(size=(128, 128), scale=(0.95, 1.0)),
ToTensor()
]))
print(classifier.evaluate(test_loader)) | [
"nhatsmrt@uw.edu"
] | nhatsmrt@uw.edu |
8dffd484828558ccc0f9cc76142d46da927d7d7a | f390de67e6dd2ca8c6e369460084cb4d9c8c4a0c | /coursebuilder/common/safe_dom.py | 8b20c79a16c6a622524c55376c0f9e26cdb3f246 | [
"CC-BY-2.5",
"CC-BY-3.0",
"Apache-2.0"
] | permissive | andredias/course-builder | d2170076ef982444d55883b06b87f1a0da46a33b | ac4aa3131228a260c0a53b5d050dc78108126b88 | refs/heads/master | 2020-04-06T03:51:42.666679 | 2015-02-24T19:33:25 | 2015-02-24T19:33:25 | 33,021,540 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,342 | py | """Classes to build sanitized HTML."""
__author__ = 'John Orr (jorr@google.com)'
import cgi
import re
def escape(strg):
return cgi.escape(strg, quote=1).replace("'", ''').replace('`', '`')
class Node(object):
"""Base class for the sanitizing module."""
def __init__(self):
self._parent = None
def _set_parent(self, parent):
self._parent = parent
@property
def parent(self):
return self._parent
@property
def sanitized(self):
raise NotImplementedError()
def __str__(self):
return self.sanitized
# pylint: disable=incomplete-protocol
class NodeList(object):
"""Holds a list of Nodes and can bulk sanitize them."""
def __init__(self):
self.list = []
self._parent = None
def __len__(self):
return len(self.list)
def _set_parent(self, parent):
assert self != parent
self._parent = parent
@property
def parent(self):
return self._parent
def append(self, node):
assert node is not None, 'Cannot add an empty value to the node list'
self.list.append(node)
node._set_parent(self) # pylint: disable=protected-access
return self
@property
def children(self):
return [] + self.list
def empty(self):
self.list = []
return self
def delete(self, node):
_list = []
for child in self.list:
if child != node:
_list.append(child)
self.list = _list
def insert(self, index, node):
assert node is not None, 'Cannot add an empty value to the node list'
self.list.insert(index, node)
node._set_parent(self) # pylint: disable=protected-access
return self
@property
def sanitized(self):
sanitized_list = []
for node in self.list:
sanitized_list.append(node.sanitized)
return ''.join(sanitized_list)
def __str__(self):
return self.sanitized
class Text(Node):
"""Holds untrusted text which will be sanitized when accessed."""
def __init__(self, unsafe_string):
super(Text, self).__init__()
self._value = unicode(unsafe_string)
@property
def sanitized(self):
return escape(self._value)
class Comment(Node):
"""An HTML comment."""
def __init__(self, unsafe_string=''):
super(Comment, self).__init__()
self._value = unicode(unsafe_string)
def get_value(self):
return self._value
@property
def sanitized(self):
return '<!--%s-->' % escape(self._value)
def add_attribute(self, **attr):
pass
def add_text(self, unsafe_string):
self._value += unicode(unsafe_string)
class Element(Node):
"""Embodies an HTML element which will be sanitized when accessed."""
_ALLOWED_NAME_PATTERN = re.compile(r'^[a-zA-Z][_\-a-zA-Z0-9]*$')
_VOID_ELEMENTS = frozenset([
'area', 'base', 'br', 'col', 'embed', 'hr', 'img', 'input', 'keygen',
'link', 'menuitem', 'meta', 'param', 'source', 'track', 'wbr'])
def __init__(self, tag_name, **attr):
"""Initializes an element with given tag name and attributes.
Tag name will be restricted to alpha chars, attribute names
will be quote-escaped.
Args:
tag_name: the name of the element, which must match
_ALLOWED_NAME_PATTERN.
**attr: the names and value of the attributes. Names must match
_ALLOWED_NAME_PATTERN and values will be quote-escaped.
"""
assert Element._ALLOWED_NAME_PATTERN.match(tag_name), (
'tag name %s is not allowed' % tag_name)
for attr_name in attr:
assert Element._ALLOWED_NAME_PATTERN.match(attr_name), (
'attribute name %s is not allowed' % attr_name)
super(Element, self).__init__()
self._tag_name = tag_name
self._children = []
self._attr = {}
for _name, _value in attr.items():
self._attr[_name.lower()] = _value
def has_attribute(self, name):
return name.lower() in self._attr
@property
def attributes(self):
return self._attr.keys()
def set_attribute(self, name, value):
self._attr[name.lower()] = value
return self
def get_escaped_attribute(self, name):
return escape(self._attr[name.lower()])
def add_attribute(self, **attr):
for attr_name, value in attr.items():
assert Element._ALLOWED_NAME_PATTERN.match(attr_name), (
'attribute name %s is not allowed' % attr_name)
self._attr[attr_name.lower()] = value
return self
def add_child(self, node):
node._set_parent(self) # pylint: disable=protected-access
self._children.append(node)
return self
def append(self, node):
return self.add_child(node)
def add_children(self, node_list):
for child in node_list.list:
self.add_child(child)
return self
def empty(self):
self._children = []
return self
def add_text(self, text):
return self.add_child(Text(text))
def can_have_children(self):
return True
@property
def children(self):
return [] + self._children
@property
def tag_name(self):
return self._tag_name
@property
def sanitized(self):
"""Santize the element and its descendants."""
assert Element._ALLOWED_NAME_PATTERN.match(self._tag_name), (
'tag name %s is not allowed' % self._tag_name)
buff = '<' + self._tag_name
for attr_name, value in sorted(self._attr.items()):
if attr_name == 'classname':
attr_name = 'class'
elif attr_name.startswith('data_'):
attr_name = attr_name.replace('_', '-')
if value is None:
value = ''
buff += ' %s="%s"' % (
attr_name, escape(value))
if self._children:
buff += '>'
for child in self._children:
buff += child.sanitized
buff += '</%s>' % self._tag_name
elif self._tag_name.lower() in Element._VOID_ELEMENTS:
buff += '/>'
else:
buff += '></%s>' % self._tag_name
return buff
class A(Element):
"""Embodies an 'a' tag. Just a conveniece wrapper on Element."""
def __init__(self, href, **attr):
"""Initialize an 'a' tag to a given target.
Args:
href: The value to put in the 'href' tag of the 'a' element.
**attr: the names and value of the attributes. Names must match
_ALLOWED_NAME_PATTERN and values will be quote-escaped.
"""
super(A, self).__init__('a', **attr)
self.add_attribute(href=href)
class ScriptElement(Element):
"""Represents an HTML <script> element."""
def __init__(self, **attr):
super(ScriptElement, self).__init__('script', **attr)
def can_have_children(self):
return False
def add_child(self, unused_node):
raise ValueError()
def add_children(self, unused_nodes):
raise ValueError()
def empty(self):
raise ValueError()
def add_text(self, text):
"""Add the script body."""
class Script(Text):
def __init__(self, script):
# Pylint is just plain wrong about warning here; suppressing.
# pylint: disable=bad-super-call
super(Script, self).__init__(None)
self._script = script
@property
def sanitized(self):
if '</script>' in self._script:
raise ValueError('End script tag forbidden')
return self._script
self._children.append(Script(text))
class Entity(Node):
"""Holds an XML entity."""
ENTITY_PATTERN = re.compile('^&([a-zA-Z]+|#[0-9]+|#x[0-9a-fA-F]+);$')
def __init__(self, entity):
assert Entity.ENTITY_PATTERN.match(entity)
super(Entity, self).__init__()
self._entity = entity
@property
def sanitized(self):
assert Entity.ENTITY_PATTERN.match(self._entity)
return self._entity
| [
"psimakov@google.com"
] | psimakov@google.com |
c302bd0f7915622567d722cecc72a0fa8d7a454e | 7f57c12349eb4046c40c48acb35b0f0a51a344f6 | /2015/AddTwoNumbers_v2.py | d543f46cb1cd4d03c07c46e63c81d2e789227d84 | [] | no_license | everbird/leetcode-py | 0a1135952a93b93c02dcb9766a45e481337f1131 | b093920748012cddb77258b1900c6c177579bff8 | refs/heads/master | 2022-12-13T07:53:31.895212 | 2022-12-10T00:48:39 | 2022-12-10T00:48:39 | 11,116,752 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,212 | py | #!/usr/bin/env python
# encoding: utf-8
# Definition for singly-linked list.
class ListNode:
def __init__(self, x):
self.val = x
self.next = None
class Solution:
# @param {ListNode} l1
# @param {ListNode} l2
# @return {ListNode}
def addTwoNumbers(self, l1, l2):
if l1 is None:
return l2
if l2 is None:
return l1
lr = p = ListNode(0)
carry = 0
while l1 or l2 or carry:
a = l1.val if l1 else 0
b = l2.val if l2 else 0
r = a + b + carry
carry = r // 10
p.val = r % 10
l1 = l1.next if l1 else None
l2 = l2.next if l2 else None
if l1 or l2 or carry:
p.next = ListNode(0)
p = p.next
return lr
def print_list(list_head):
print_l(list_head)
print '\n'
def print_l(list_head):
if list_head:
print list_head.val,
print_l(list_head.next)
if __name__ == '__main__':
l1a = ListNode(5)
l1 = l1a
l2a = ListNode(5)
l2 = l2a
s = Solution()
lr = s.addTwoNumbers(l1, l2)
print_list(l1)
print_list(l2)
print_list(lr)
| [
"stephen.zhuang@gmail.com"
] | stephen.zhuang@gmail.com |
6bfb1f17c3ccc3769a46c7ec0e1de921b8d9f251 | 6e4e6b64c035881f1cff39db616b0a80e1568c51 | /ARC015/q1.py | 8625a990d5f7a2e9da866e9e6ec2cfe7063a5c0c | [] | no_license | Lischero/Atcoder | f7471a85ee553e3ae791e3e5670468aea1fa53cc | f674d6a20a56eebdafa6d50d5d2d0f4030e5eace | refs/heads/master | 2020-05-21T16:23:36.095929 | 2018-10-18T04:27:55 | 2018-10-18T04:27:55 | 60,671,810 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 58 | py | # -*- coding:utf-8 -*-
n = int(input())
print((9/5*n)+32)
| [
"vermouth.lischero@gmail.com"
] | vermouth.lischero@gmail.com |
91a8cc0846d15cb77f1dac39e86a83ba81da4c66 | 9b162310e5db0f714dbd6019894eb5b04192b6aa | /src/windows-gam.spec | 658f6450028749ee0c70daae3de86bc5c86028b0 | [
"LicenseRef-scancode-warranty-disclaimer",
"Apache-2.0",
"LicenseRef-scancode-rsa-md4",
"HPND-sell-variant",
"LicenseRef-scancode-zeusbench",
"NTP",
"metamail",
"Beerware",
"LicenseRef-scancode-rsa-1990",
"RSA-MD",
"Spencer-94",
"LicenseRef-scancode-other-permissive",
"MIT"
] | permissive | xbl3/GAMADV-XTD | c1d68911f4116157173838856f49151e05cd5658 | a09efb7a10074dc052968ef82c1044f2a0b664b3 | refs/heads/master | 2022-04-08T22:30:39.715172 | 2020-02-22T18:05:00 | 2020-02-22T18:05:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 849 | spec | # -*- mode: python -*-
ssl_json_files = [
('cacerts.pem', '.'),
('cros-aue-dates.json', '.'),
('cloudprint-v2.json', '.'),
('contacts-v3.json', '.'),
('email-audit-v1.json', '.'),
('email-settings-v2.json', '.'),
('sites-v1.json', '.')
]
a = Analysis(['gam.py'],
pathex=['C:\\GAMADV-XTD'],
datas=ssl_json_files,
hiddenimports=[],
hookspath=None,
excludes=['_tkinter'],
runtime_hooks=None)
for d in a.datas:
if 'pyconfig' in d[0]:
a.datas.remove(d)
break
pyz = PYZ(a.pure)
exe = EXE(pyz,
a.scripts,
a.binaries,
a.zipfiles,
a.datas,
name='gam.exe',
debug=False,
strip=None,
upx=True,
console=True )
| [
"ross.scroggs@gmail.com"
] | ross.scroggs@gmail.com |
80ceed4ac066ee786c77744bdc31d0acfd1fd6e0 | ca28f1535bb9a4b6504d5f6a5c5abf1a4569037f | /pos_umbrella/pos_umbrella/report/eod_report/eod_report.py | fc9a6b62af7844ad6a75cb96ea3541872cb50a0a | [
"MIT"
] | permissive | worldkingpradeep/pos_umbrella | 8b8f83cb7d638f15a1808e779656e250549c5e26 | 6fa7a51a9c019b533befcf85955fdd5e165c6a5c | refs/heads/master | 2023-04-20T06:30:37.054666 | 2021-05-14T17:19:59 | 2021-05-14T17:19:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,285 | py | # Copyright (c) 2013, jan and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
def execute(filters=None):
columns, data = [], []
from_date = filters.get("from_date")
to_date = filters.get("to_date")
pos_profile = filters.get("pos_profile")
print(filters.get("with_details"))
with_details = filters.get("with_details")
if from_date > to_date:
frappe.throw("From Date should be before To Date")
else:
columns.append({"fieldname": "store_name", "label": "Store Name", "fieldtype": "Data", "width": 150})
if with_details:
columns.append({"fieldname": "invoice_number", "label": "Invoice Number", "fieldtype": "Link", "options": "Sales Invoice", "width": 150})
columns.append({"fieldname": "item_code", "label": "Item_code", "fieldtype": "Data", "width": 120})
columns.append({"fieldname": "item_name", "label": "Item Name", "fieldtype": "Data", "width": 230})
columns.append({"fieldname": "quantity", "label": "Quantity", "fieldtype": "Data", "width": 100})
columns.append({"fieldname": "rate", "label": "Rate", "fieldtype": "Data", "width": 100})
columns.append({"fieldname": "amount", "label": "Amount", "fieldtype": "Data", "width": 100})
columns.append({"fieldname": "discount", "label": "Discount", "fieldtype": "Data", "width": 100})
columns.append({"fieldname": "write_off", "label": "Write Off", "fieldtype": "Data", "width": 100})
columns.append({"fieldname": "loyalty", "label": "Loyalty", "fieldtype": "Data", "width": 100})
columns.append({"fieldname": "net_sale", "label": "Net Sale", "fieldtype": "Data", "width": 100})
columns.append({"fieldname": "vat", "label": "VAT", "fieldtype": "Data", "width": 100})
columns.append({"fieldname": "gross_sale", "label": "Gross Sale", "fieldtype": "Data", "width": 100})
condition = ""
if pos_profile:
condition += " and pos_profile='{0}' ".format(pos_profile)
if with_details:
condition += " and is_pos=1"
condition += " ORDER By pos_profile ASC"
query = """ SELECT * FROM `tabSales Invoice`
WHERE docstatus=1 and posting_date BETWEEN '{0}' and '{1}' {2}""".format(from_date, to_date,condition)
print(query)
sales_invoices = frappe.db.sql(query, as_dict=True)
for idx,i in enumerate(sales_invoices):
if not with_details:
obj = {
"invoice_number": i.name,
"store_name": i.pos_profile,
"discount": i.discount_amount,
"write_off": i.write_off_amount,
"loyalty": i.loyalty_amount,
"net_sale": i.total,
"gross_sale": i.grand_total,
"vat": i.total_taxes_and_charges,
}
mode_of_payments = frappe.db.sql(""" SELECT * FROM `tabSales Invoice Payment` WHERE parent=%s """,i.name,as_dict=True)
for ii in mode_of_payments:
check_mop(columns,ii)
obj[ii.mode_of_payment] = ii.amount
data.append(obj)
else:
obj = {}
obj["invoice_number"] = i.name
obj["store_name"] = i.pos_profile
invoice_items = frappe.db.sql(""" SELECT * FROM `tabSales Invoice Item` WHERE parent=%s""", i.name, as_dict=1)
for idxx,x in enumerate(invoice_items):
if idxx == 0:
obj["item_code"] = x.item_code
obj["item_name"] = x.item_name
obj["quantity"] = x.qty
obj["rate"] = x.rate
obj["amount"] = x.amount
obj["discount"] = i.discount_amount
obj["write_off"] = i.write_off_amount
obj["loyalty"] = i.loyalty_amount
obj["net_sale"] = i.total
obj["gross_sale"] = i.grand_total
obj["vat"] = i.total_taxes_and_charges
mode_of_payments = frappe.db.sql(""" SELECT * FROM `tabSales Invoice Payment` WHERE parent=%s """,
i.name, as_dict=True)
for ii in mode_of_payments:
check_mop(columns, ii)
obj[ii.mode_of_payment] = ii.amount
else:
obj = {}
obj["item_code"] = x.item_code
obj["item_name"] = x.item_name
obj["quantity"] = x.qty
obj["rate"] = x.rate
obj["amount"] = x.amount
data.append(obj)
return columns, data
def check_mop(columns, ii):
add = True
for i in columns:
if i.get("label") == ii.mode_of_payment:
add = False
if add:
columns.append({
"fieldname": ii.mode_of_payment,
"label": ii.mode_of_payment,
"fieldtype": "Data",
"width": 150
}) | [
"jangeles@bai.ph"
] | jangeles@bai.ph |
fabdcb016bb2945ce5a4420e58c20a8cc2070765 | 46afba4407a98ac564ed7a2e08aebfcec4fa1ba3 | /Project Euler/problem_20.py | 83947b08ce6336236edc3cd968c9bdea337af690 | [] | no_license | areebbeigh/CompetitiveProgramming | b28ffe99ac15cadfa3b54f9974beb77c280b2309 | 04044674ad0663181326649d0c14da94108e90da | refs/heads/master | 2021-07-15T07:48:42.338241 | 2021-07-13T10:36:11 | 2021-07-13T10:36:11 | 199,145,494 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 166 | py | #!/usr/bin/python3.6
def factorial(n):
if n == 0:
return 1
return n * factorial(n - 1)
print(
sum(map(lambda x: int(x), str(factorial(100))))
)
| [
"areebbeigh@gmail.com"
] | areebbeigh@gmail.com |
ba292c32cf83dce0c1b1f6d90d73548a523ad68b | 98e4dc41e3d994dfb55a2553c79d1b61590ecca6 | /LeetCode/Medium/Subarray Sum Equals K/sol.py | aa9bc85d1765a4c74100a8bfe8caf9119a4376d8 | [] | no_license | krohak/Project_Euler | b753c4f3bbf26a5eff3203e27482599d1e089fc6 | 1d8a2326543d69457f1971af9435b3e93ab32f52 | refs/heads/master | 2022-09-02T10:48:59.472111 | 2022-08-18T11:11:16 | 2022-08-18T11:11:16 | 111,204,162 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 517 | py | class Solution:
def subarraySum(self, nums, target_sum):
cumulative_sum = {0:1}
counter = 0
summ = 0
for num in nums:
summ+=num
if (summ-target_sum) in cumulative_sum:
counter+=cumulative_sum[(summ-target_sum)]
cumulative_sum[summ] = cumulative_sum.get(summ, 0)+1
return counter
nums = [1,1,1,1,2,2,1,1]
sol = Solution().subarraySum(nums, 2)
print(sol) | [
"rohaksinghal14@gmail.com"
] | rohaksinghal14@gmail.com |
b4d04353919a7f6071d65606fccf5a77fe9f4b53 | 5bc59a84adc4854cdaa3af02be84fb4a70a85bb2 | /04-DRF-LEVEL-TWO/ebooksapi/ebooks/apps.py | ff3c9cb021a64b9a95e270a20b7cc6aafb2232cb | [] | no_license | furkalokbu/REST_API | be2a0a4c05dfca15b24420d1fa1d22524a851a0b | 55552d59a020ae969d4ef8dfda52207cf5c40c4c | refs/heads/main | 2023-05-05T21:36:16.856267 | 2021-05-25T19:37:18 | 2021-05-25T19:37:18 | 332,436,631 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 87 | py | from django.apps import AppConfig
class EbooksConfig(AppConfig):
name = "ebooks"
| [
"furkalokbu@gmail.com"
] | furkalokbu@gmail.com |
4eff3ee86176474b0f5ada0af11864b69625c3c0 | ec551303265c269bf1855fe1a30fdffe9bc894b6 | /topic10_queue/T933_RecentCounter/interview.py | 8bd0e8689824bc5d8875b7a6fa5e0244cd77e011 | [] | no_license | GongFuXiong/leetcode | 27dbda7a5ced630ae2ae65e19d418ebbc65ae167 | f831fd9603592ae5bee3679924f962a3ebce381c | refs/heads/master | 2023-06-25T01:05:45.683510 | 2021-07-26T10:05:25 | 2021-07-26T10:05:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,331 | py | '''
933. 最近的请求次数
写一个 RecentCounter 类来计算最近的请求。
它只有一个方法:ping(int t),其中 t 代表以毫秒为单位的某个时间。
返回从 3000 毫秒前到现在的 ping 数。
任何处于 [t - 3000, t] 时间范围之内的 ping 都将会被计算在内,包括当前(指 t 时刻)的 ping。
保证每次对 ping 的调用都使用比之前更大的 t 值。
示例:
输入:inputs = ["RecentCounter","ping","ping","ping","ping"], inputs = [[],[1],[100],[3001],[3002]]
输出:[null,1,2,3,3]
提示:
每个测试用例最多调用 10000 次 ping。
每个测试用例会使用严格递增的 t 值来调用 ping。
每次调用 ping 都有 1 <= t <= 10^9。
'''
import collections
class RecentCounter:
def __init__(self):
self.deque = collections.deque()
def ping(self, t):
self.deque.append(t)
while self.deque[0] < t-3000:
self.deque.popleft()
return len(self.deque)
if __name__ == "__main__":
solution = RecentCounter()
while 1:
str1 = input()
if str1 != "":
num1 = int(str1)
res = solution.ping(num1)
print(res)
else:
break
| [
"958747457@qq.com"
] | 958747457@qq.com |
bfcc4f82ae5fd44b4414bb887094046c13bb3e10 | c0fad90611a6e943277c3d79eeb48ccd5f0d0a88 | /29divide.py | 6cee6413834a3e4bbc05b6458fb1114fdad5b765 | [] | no_license | lmb633/leetcode | e2da31984af07b9e16787f4d57f82dab2dcb551a | d91568d245dd8fb66f46ff73737cbad974f490a6 | refs/heads/master | 2021-07-19T16:07:40.864854 | 2021-02-24T10:57:40 | 2021-02-24T10:57:40 | 243,146,182 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 572 | py | class Solution(object):
def divide(self, dividend, divisor):
if dividend == 0:
return 0
if dividend == -2 ** 31 and divisor == -1:
return 2 ** 31 - 1
flag = 1
if dividend ^ divisor < 0:
flag = -1
dividend = abs(dividend)
divisor = abs(divisor)
result = 0
for i in range(31, -1, -1):
if (dividend >> i) >= divisor:
result += (1 << i)
dividend -= divisor << i
return result if flag > 0 else -result
| [
"limingbo@focusmedia.cn"
] | limingbo@focusmedia.cn |
315e0a44b6c237ed7a6e9ed6807d3222de0857a3 | 7837cd1bee1a9abd623600cf30c2f462da48d558 | /aaa.py | 1123a79ae464804cd597a4e45e9d9c4e5f526712 | [] | no_license | hal1932/astor_test | 8285b3b8c1fa187b7cd3c8d147c8a75d8e4ba207 | e14c7de55bb6e947e41387d33fff5286bbea4570 | refs/heads/master | 2021-08-30T09:37:33.083995 | 2017-12-17T08:36:12 | 2017-12-17T08:36:12 | 114,521,531 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 734 | py | # encoding: utf-8
import functools
def deco1(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
print 'deco1 start'
func(*args, **kwargs)
print 'deco1 end'
return wrapper
def deco2(*arg, **kwarg):
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
print 'deco2 start'
func(*args, **kwargs)
print 'deco2 end'
return wrapper
return decorator
def func1(arg1):
print arg1
x = 1
print x
@deco1
def func2(arg):
print arg
@deco2('hoge', 1, a=2.0)
def func3(arg):
print arg
def main():
func1('aaa')
func2('bbb')
func3('ccc')
if __name__ == '__main__':
main()
| [
"yu.arai.19@gmail.com"
] | yu.arai.19@gmail.com |
760c46b1f182472c11a3cb0026781c521fddb142 | a943cb6da95ec1e06cb480887ba1062a5783527f | /2012-aqs/figures/plot-smh-norris.py | 262432f7ea056293f457ca60b89fe0a54119ed39 | [] | no_license | andycasey/papers | 1b2c882c20b0c65b5899d70dc95825ec53cc9fe2 | 3d585ad4b6b1c3b40227185fd7b22ea9bdeb8e02 | refs/heads/master | 2021-01-19T17:24:48.788580 | 2013-08-13T08:51:02 | 2013-08-13T08:51:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,155 | py | import numpy as np
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
wavelength, smh_ew, norris_ew = np.loadtxt('SMH-Norris-comparison.data', usecols=(0, 1, 2, ), unpack=True)
fig = plt.figure(figsize=(6,7))
fig.subplots_adjust(hspace=0.0, wspace=0.0)
gs = gridspec.GridSpec(2, 1, height_ratios=[1, 2])
ax1 = fig.add_subplot(gs[0])
#ax1 = plt.subplot2grid((3, 1), (0, 0))
ax1.scatter(smh_ew, smh_ew - norris_ew, facecolor='none', edgecolor='k', marker='+')
ax1.plot([0, 200], [0, 0], 'k-', zorder=-1)
A = np.vstack([smh_ew, np.ones(len(norris_ew))]).T
m, c = np.linalg.lstsq(A, smh_ew - norris_ew)[0]
x = np.array([np.min(smh_ew), np.max(smh_ew)])
ax1.plot(x, m * x + c, 'k:')
ylim = np.max(np.abs(np.array(ax1.get_ylim())))
ax1.set_ylim(-15, 15)
ax1.xaxis.set_visible(False)
ax1.set_ylabel('$\Delta{}W_\lambda$ [m$\AA{}$]')
ax2 = fig.add_subplot(gs[1], sharex=ax1)
#ax2 = plt.subplot2grid((3, 1), (1, 0), rowspan=2)
ax2.scatter(smh_ew, norris_ew, facecolor='none', edgecolor='k', marker='+')
A = np.vstack([norris_ew, np.ones(len(norris_ew))]).T
m, c = np.linalg.lstsq(A, smh_ew)[0]
x = np.array([0, 200])
ax2.plot(x, x, 'k-', zorder=-1)
x = np.array([np.min(smh_ew), np.max(smh_ew)])
ax2.plot(x, m * x + c, 'k:')
# Plot an error cone
error = 10 # percent
bounds = np.array([0, 160])
#ax2.plot(bounds, bounds * (1 + error/100.), '-', c='#aaaaaa', zorder=-5)
#ax2.plot(bounds, bounds * (1 - error/100.), '-', c='#aaaaaa', zorder=-5)
ax1.set_xlim(bounds)
ax2.set_xlim(bounds)
ax2.set_ylim(bounds)
ax2.set_xlabel('$W_\lambda$ (This work, automatic) [m$\AA{}$]')
ax2.set_ylabel('$W_\lambda$ (Norris et al. 1996) [m$\AA{}$]')
ax2.get_yticklabels()[-1].set_visible(False)
ax1.get_yticklabels()[0].set_visible(False)
ax1.get_yticklabels()[-1].set_visible(False)
ax1.text(5, 10, '$\langle{}\Delta{}W_\lambda\\rangle{}\,=\,-0.64\,\pm\,2.78\,$m${\AA}$', color='k', verticalalignment='center')
ax2.text(5, 150, "$a_0\,=\,%1.2f$\n$a_1\,=\,%1.2f$\n$N\,=\,%i$" % (c, m, len(smh_ew)), verticalalignment='top')
#ax1.set_title('%i lines in HD 140283' % (len(smh_ew), ))
plt.savefig('smh-norris.pdf')
plt.savefig('smh-norris.eps')
| [
"andycasey@gmail.com"
] | andycasey@gmail.com |
6298875d8e11878aa23517f122c8a75e9d106d46 | 38fff7bdefd8d62a740d51329b50d0e1e49258bb | /projects/smart_open/fuzz_zip.py | 3a7f08c09ad89f312bad1a8251882d276259b866 | [
"Apache-2.0"
] | permissive | google/oss-fuzz | 026384c2ada61ef68b147548e830f60730c5e738 | f0275421f84b8f80ee767fb9230134ac97cb687b | refs/heads/master | 2023-08-31T23:30:28.157702 | 2023-08-31T21:49:30 | 2023-08-31T21:49:30 | 63,809,205 | 9,438 | 2,315 | Apache-2.0 | 2023-09-14T20:32:19 | 2016-07-20T19:39:50 | Shell | UTF-8 | Python | false | false | 1,605 | py | #!/usr/bin/python3
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import atheris
import sys
import os
with atheris.instrument_imports():
from smart_open import open
import zipfile
import tempfile
def TestInput(data):
if len(data) < 10:
return
fdp = atheris.FuzzedDataProvider(data)
tmp = tempfile.NamedTemporaryFile(prefix=fdp.ConsumeString(10), suffix=fdp.ConsumeString(4), delete=False)
filestr = fdp.ConsumeString(100)
with open(tmp.name, 'wb') as f:
with zipfile.ZipFile(f, 'w') as zip:
zip.writestr(fdp.ConsumeString(10), filestr)
zip.writestr(fdp.ConsumeString(10), filestr)
with open(tmp.name, 'rb') as f:
with zipfile.ZipFile(f) as zip:
for info in zip.infolist():
file_bytes = zip.read(info.filename)
assert filestr == file_bytes.decode('utf-8')
os.unlink(tmp.name)
def main():
atheris.Setup(sys.argv, TestInput, enable_python_coverage=True)
atheris.instrument_all()
atheris.Fuzz()
if __name__ == "__main__":
main()
| [
"noreply@github.com"
] | google.noreply@github.com |
66b1007d1dabe0428cbe0ba4c2f82d9ad8aa4dec | cb20ef5b4048457a2e6dca4a4cb45c53c9843744 | /tests/RESTful/testcases/system/test01_usermanager.py | c8459e97ec6e5c06d18b505403753911f74efb0c | [] | no_license | rudecs/openvcloud | 5001b77e8d943427c1bed563f3dcc6b9467936e2 | 12ccce2a54034f5bf5842e000c2cc3d7e22836d8 | refs/heads/master | 2020-03-24T00:00:10.422677 | 2018-11-22T13:41:17 | 2018-11-22T13:41:17 | 142,267,808 | 2 | 1 | null | 2018-07-25T08:02:37 | 2018-07-25T08:02:36 | null | UTF-8 | Python | false | false | 4,341 | py | import time, random, unittest
from testcases import *
from nose_parameterized import parameterized
class UsersTests(TestcasesBase):
def setUp(self):
super().setUp()
self.data, self.response = self.api.system.usermanager.create(provider=None)
self.assertEqual(self.response.status_code, 200, self.response.content)
self.username = self.data['username']
self.CLEANUP['users'].append(self.username)
@parameterized.expand([('exists', 200, 'true'), ('non-exist', 404, 'false')])
def test01_userget_userexists(self, case, response_code, userexists):
""" OVC-001
#. Create user (U1), should succeed.
#. Get user (U1), should succeed.
#. Check if user (U1) exists, should return true.
#. Get not existing user, should fail.
#. Check if non-existing user exists, should return false.
"""
if case == 'exists':
username = self.username
else:
username = self.utils.random_string()
response = self.api.system.usermanager.userget(name=username)
self.assertEqual(response.status_code, response_code, response.content)
response = self.api.system.usermanager.userexists(name=username)
self.assertEqual(response.status_code, 200, response.content)
self.assertEqual(response.text, userexists)
@parameterized.expand([('exists', 200), ('non-exist', 404)])
def test02_edit_user(self, case, response_code):
""" OVC-002
#. Create user (U1), should succeed.
#. Edit user (U1), should succeed.
#. Edit non-existing user, should fail.
"""
if case == 'exists':
username = self.username
else:
username = self.utils.random_string()
data, response = self.api.system.usermanager.editUser(username=username)
self.assertEqual(response.status_code, response_code, response.content)
@parameterized.expand([('exists', 200), ('non-exist', 404)])
def test03_delete_user(self, case, response_code):
""" OVC-003
#. Create user (U1), should succeed.
#. Delete user (U1), should succeed.
#. Delete none existing user, should fail.
"""
if case == 'exists':
username = self.username
else:
username = self.utils.random_string()
response = self.api.system.usermanager.delete(username=username)
self.assertEqual(response.status_code, response_code, response.content)
response = self.api.system.usermanager.userexists(name=username)
self.assertEqual(response.status_code, 200, response.content)
self.assertEqual(response.text, 'false')
class GroupsTests(TestcasesBase):
def setUp(self):
super().setUp()
self.data, self.response = self.api.system.usermanager.createGroup()
self.assertEqual(self.response.status_code, 200, self.response.content)
self.name = self.data['name']
def tearDown(self):
self.api.system.usermanager.deleteGroup(id=self.name)
super().tearDown()
@parameterized.expand([('exists', 200), ('non-exist', 404)])
def test01_edit_group(self, case, response_code):
""" OVC-001
#. Create group (G1), should succeed.
#. Edit group (G1), should succeed.
#. Edit non-existing group, should fail.
"""
if case == 'exists':
name = self.name
else:
name = self.utils.random_string()
data, response = self.api.system.usermanager.editGroup(name=name)
self.assertEqual(response.status_code, response_code, response.content)
@parameterized.expand([('exists', 200), ('non-exist', 404)])
@unittest.skip('https://github.com/0-complexity/openvcloud/issues/1367')
def test02_delete_group(self, case, response_code):
""" OVC-002
#. Create group (G1), should succeed.
#. Delete group (G1), should succeed.
#. Delete non-existing group, should fail.
"""
if case == 'exists':
name = self.name
else:
name = self.utils.random_string()
response = self.api.system.usermanager.deleteGroup(id=name)
self.assertEqual(response.status_code, response_code, response.content) | [
"deboeck.jo@gmail.com"
] | deboeck.jo@gmail.com |
0a58a94a0291c9eee74ec90033a491790733ec6e | 55e28e35db5bf6a844df3fb47080500b115a893e | /day10/select/select_server.py | 009bb5773e7561fb2d56689d463ea451aefcc9ee | [] | no_license | pylarva/Python | 5743ffa4a69db42b642d51b62f9e9b69ddbc1a72 | 71b484950e6dbdcf708726a68a3386d0d6ddc07f | refs/heads/master | 2020-04-19T09:11:11.195393 | 2017-11-16T07:32:59 | 2017-11-16T07:32:59 | 67,507,687 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,258 | py | # !/usr/bin/env python
# -*- coding:utf-8 -*-
# Author:pylarva
# bolg:www.lichengbing.com
__author__ = 'Alex Li'
import select
import socket
import sys
import queue
server = socket.socket()
server.setblocking(0)
server_addr = ('localhost', 10000)
print('starting up on %s port %s' % server_addr)
server.bind(server_addr)
server.listen(5)
inputs = [server, ] #自己也要监测呀,因为server本身也是个fd
outputs = []
message_queues = {}
while True:
print("waiting for next event...")
readable, writeable, exeptional = select.select(inputs,outputs,inputs) #如果没有任何fd就绪,那程序就会一直阻塞在这里
for s in readable: #每个s就是一个socket
if s is server: #别忘记,上面我们server自己也当做一个fd放在了inputs列表里,传给了select,如果这个s是server,代表server这个fd就绪了,
#就是有活动了, 什么情况下它才有活动? 当然 是有新连接进来的时候 呀
#新连接进来了,接受这个连接
conn, client_addr = s.accept()
print("new connection from",client_addr)
conn.setblocking(0)
inputs.append(conn) #为了不阻塞整个程序,我们不会立刻在这里开始接收客户端发来的数据, 把它放到inputs里, 下一次loop时,这个新连接
#就会被交给select去监听,如果这个连接的客户端发来了数据 ,那这个连接的fd在server端就会变成就续的,select就会把这个连接返回,返回到
#readable 列表里,然后你就可以loop readable列表,取出这个连接,开始接收数据了, 下面就是这么干 的
message_queues[conn] = queue.Queue() #接收到客户端的数据后,不立刻返回 ,暂存在队列里,以后发送
else: #s不是server的话,那就只能是一个 与客户端建立的连接的fd了
#客户端的数据过来了,在这接收
data = s.recv(1024)
if data:
print("收到来自[%s]的数据:" % s.getpeername()[0], data)
message_queues[s].put(data) #收到的数据先放到queue里,一会返回给客户端
if s not in outputs:
outputs.append(s) #为了不影响处理与其它客户端的连接 , 这里不立刻返回数据给客户端
else:#如果收不到data代表什么呢? 代表客户端断开了呀
print("客户端断开了",s)
if s in outputs:
outputs.remove(s) #清理已断开的连接
inputs.remove(s) #清理已断开的连接
del message_queues[s] ##清理已断开的连接
for s in writeable:
try :
next_msg = message_queues[s].get_nowait()
except queue.Empty:
print("client [%s]" %s.getpeername()[0], "queue is empty..")
outputs.remove(s)
else:
print("sending msg to [%s]"%s.getpeername()[0], next_msg)
s.send(next_msg.upper())
for s in exeptional:
print("handling exception for ", s.getpeername())
inputs.remove(s)
if s in outputs:
outputs.remove(s)
s.close()
del message_queues[s] | [
"1326126359@qq.com"
] | 1326126359@qq.com |
6d71558f72f56b692f826f2c54b03347759f5030 | 66b220a4c8c0bfde435f29e3a18cf79f6e7a4c67 | /src/exemplos/01_Dados/02_Operadores/01-subtracao.py | 77f2c292956d5c74e2524a563e94f8fc4d5a83cb | [] | no_license | gnramos/CIC-APC | 089b6d0110394b4db97c23e032394eaefce0aeef | b94fe2dc4840064f1613d24e5d1447d49b9bb8bd | refs/heads/master | 2023-04-15T18:11:27.919896 | 2023-04-05T21:31:03 | 2023-04-05T21:31:03 | 31,514,265 | 42 | 30 | null | 2018-11-20T18:09:10 | 2015-03-01T22:57:39 | C | UTF-8 | Python | false | false | 964 | py | # -*- coding: utf-8 -*-
# @package: 01-subtracao.py
# @author: Guilherme N. Ramos (gnramos@unb.br)
# @disciplina: Algoritmos e Programação de Computadores
#
# Exemplos de utilização do operador de subtração. Em Python, só é possível
# subtrair valores numéricos.
print('Subtração (numéricos):')
# Escreva o resultado da operação 2 - 1. A subtração de valores inteiros também
# é um valor inteiro.
print(' 2 - 1 =', 2 - 1)
# Escreva o resultado da operação 1 - 2.
print(' 1 - 2 =', 1 - 2)
# Escreva o resultado da operação 2 - 1.0. A subtração de valores reais de
# inteiros é um valor real.
print(' 2 - 1.0 =', 2 - 1.0)
# Escreva o resultado da operação 2.0 - 1. A subtração de valores inteiros de
# reais é um valor real.
print(' 2.0 - 1 =', 2.0 - 1)
# Escreva o resultado da operação 2.0 - 1.0. A subtração de valores reais
# também é um valor real.
print(' 2.0 - 1.0 =', 2.0 - 1.0) | [
"ramos@gnramos.com"
] | ramos@gnramos.com |
2810d657e2aa3272c2d799f7b7ea8f265d83dd92 | 321afe9ca4a30ff655483901bdb6368cce1bd58b | /catalog/migrations/0019_biditems_time.py | f8acacc12ce34e72ef8a1a024598b0d27ff127b5 | [] | no_license | moses-mugoya/Auction-System | 75456a475a0a76a9c7143f2f039e059f841d204f | 42de3e68fd7a99bdb0598f820b5f8ae6359e972d | refs/heads/main | 2023-02-04T22:58:22.793934 | 2020-12-24T18:05:51 | 2020-12-24T18:05:51 | 324,211,000 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 387 | py | # Generated by Django 2.1.4 on 2019-04-07 17:28
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('catalog', '0018_auto_20190407_1343'),
]
operations = [
migrations.AddField(
model_name='biditems',
name='time',
field=models.BooleanField(default=False),
),
]
| [
"mosesmugoya31@gmail.com"
] | mosesmugoya31@gmail.com |
efbe5cae3f768724158b26af2d52232b3009deaf | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02889/s800108978.py | f16d83d80d9585a9e51d77414e46a2135a05fdac | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,377 | py | import sys
import math
import heapq
sys.setrecursionlimit(10**7)
INTMAX = 9323372036854775807
INTMIN = -9223372036854775808
DVSR = 1000000007
def POW(x, y): return pow(x, y, DVSR)
def INV(x, m=DVSR): return pow(x, m - 2, m)
def DIV(x, y, m=DVSR): return (x * INV(y, m)) % m
def LI(): return [int(x) for x in sys.stdin.readline().split()]
def LF(): return [float(x) for x in sys.stdin.readline().split()]
def LS(): return sys.stdin.readline().split()
def II(): return int(sys.stdin.readline())
def FLIST(n):
res = [1]
for i in range(1, n+1): res.append(res[i-1]*i%DVSR)
return res
N,M,L=LI()
LG=10**15
DIST=[[LG for _ in range(N+1)] for _ in range(N+1)]
for i in range(M):
a,b,c = LI()
if c <= L:
DIST[a][b] = c
DIST[b][a] = c
for k in range(1, N+1):
for i in range(1, N+1):
for j in range(1, N+1):
if DIST[i][j] > DIST[i][k] + DIST[k][j]:
DIST[i][j] = DIST[i][k] + DIST[k][j]
for i in range(1, N+1):
for j in range(1, N+1):
DIST[i][j] = 1 if DIST[i][j] <= L else LG
for k in range(1, N+1):
for i in range(1, N+1):
for j in range(1, N+1):
if DIST[i][j] > DIST[i][k] + DIST[k][j]:
DIST[i][j] = DIST[i][k] + DIST[k][j]
for i in range(II()):
st, en = LI()
if DIST[st][en] >= LG:
print(-1)
else:
print(DIST[st][en] - 1)
| [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
403050852dd2e8392f1e8610f4911bf3608ab119 | 9ee751382146d280c0105981e2e54fa900cb04de | /djblets/util/tests/test_compressed_tags.py | 1d1d87775890230056755ea9767bb66c60caefae | [] | no_license | lmyfzx/djblets | 25c3d3fb2478047eede05238b60b6d16598f9131 | 33b4475cfabe24644335093a028d7d2aabc4ab84 | refs/heads/master | 2023-02-03T18:20:46.873799 | 2020-12-22T10:58:35 | 2020-12-22T10:58:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,966 | py | """Unit tests for djblets.util.templatetags.djblets_forms."""
from __future__ import unicode_literals
import os
from django.conf import settings
from django.template import Context, Template
from pipeline.conf import settings as pipeline_settings
from djblets.testing.testcases import TestCase
class CompressedTagsTests(TestCase):
"""Unit tests for the {% compressed_* %} template tags."""
def test_compressed_css_tag(self):
"""Testing {% compressed_css %}"""
self._touch_files(['test.css', 'test.d41d8cd98f00.css'])
pipeline_settings.STYLESHEETS = {
'test': {
'source_filenames': [],
'output_filename': 'test.css',
}
}
t = Template('{% load compressed %}'
'{% compressed_css "test" %}')
self.assertHTMLEqual(
t.render(Context({'test': 'test'})),
'<link href="/test.d41d8cd98f00.css" rel="stylesheet"'
' type="text/css" />')
def test_compressed_js_tag(self):
"""Testing {% compressed_js %}"""
self._touch_files(['test.js', 'test.d41d8cd98f00.js'])
pipeline_settings.JAVASCRIPT = {
'test': {
'source_filenames': [],
'output_filename': 'test.js',
}
}
t = Template('{% load compressed %}'
'{% compressed_js "test" %}')
self.assertHTMLEqual(
t.render(Context({'test': 'test'})),
'<script type="text/javascript" src="/test.d41d8cd98f00.js"'
' charset="utf-8"></script>')
def _touch_files(self, filenames):
"""Create one or more empty static media files.
Args:
filenames (list of unicode):
The list of static media files to create.
"""
for filename in filenames:
with open(os.path.join(settings.STATIC_ROOT, filename), 'w'):
pass
| [
"christian@beanbaginc.com"
] | christian@beanbaginc.com |
edf6ec9094282214c247789c19af30388e1fb891 | cf5b2850dc9794eb0fc11826da4fd3ea6c22e9b1 | /xlsxwriter/test/styles/test_styles06.py | ecba383c9b2848fa80c25090f9d3c53d0e528278 | [
"BSD-2-Clause"
] | permissive | glasah/XlsxWriter | bcf74b43b9c114e45e1a3dd679b5ab49ee20a0ec | 1e8aaeb03000dc2f294ccb89b33806ac40dabc13 | refs/heads/main | 2023-09-05T03:03:53.857387 | 2021-11-01T07:35:46 | 2021-11-01T07:35:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,183 | py | ###############################################################################
#
# Tests for XlsxWriter.
#
# SPDX-License-Identifier: BSD-2-Clause
# Copyright (c), 2013-2021, John McNamara, jmcnamara@cpan.org
#
import unittest
from io import StringIO
from ..helperfunctions import _xml_to_list
from ...styles import Styles
from ...workbook import Workbook
class TestAssembleStyles(unittest.TestCase):
"""
Test assembling a complete Styles file.
"""
def test_assemble_xml_file(self):
"""Test for border colour styles."""
self.maxDiff = None
fh = StringIO()
style = Styles()
style._set_filehandle(fh)
workbook = Workbook()
workbook.add_format({
'left': 1,
'right': 1,
'top': 1,
'bottom': 1,
'diag_border': 1,
'diag_type': 3,
'left_color': 'red',
'right_color': 'red',
'top_color': 'red',
'bottom_color': 'red',
'diag_color': 'red'})
workbook._set_default_xf_indices()
workbook._prepare_format_properties()
style._set_style_properties([
workbook.xf_formats,
workbook.palette,
workbook.font_count,
workbook.num_format_count,
workbook.border_count,
workbook.fill_count,
workbook.custom_colors,
workbook.dxf_formats,
workbook.has_comments,
])
style._assemble_xml_file()
workbook.fileclosed = 1
exp = _xml_to_list("""
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<styleSheet xmlns="http://schemas.openxmlformats.org/spreadsheetml/2006/main">
<fonts count="1">
<font>
<sz val="11"/>
<color theme="1"/>
<name val="Calibri"/>
<family val="2"/>
<scheme val="minor"/>
</font>
</fonts>
<fills count="2">
<fill>
<patternFill patternType="none"/>
</fill>
<fill>
<patternFill patternType="gray125"/>
</fill>
</fills>
<borders count="2">
<border>
<left/>
<right/>
<top/>
<bottom/>
<diagonal/>
</border>
<border diagonalUp="1" diagonalDown="1">
<left style="thin">
<color rgb="FFFF0000"/>
</left>
<right style="thin">
<color rgb="FFFF0000"/>
</right>
<top style="thin">
<color rgb="FFFF0000"/>
</top>
<bottom style="thin">
<color rgb="FFFF0000"/>
</bottom>
<diagonal style="thin">
<color rgb="FFFF0000"/>
</diagonal>
</border>
</borders>
<cellStyleXfs count="1">
<xf numFmtId="0" fontId="0" fillId="0" borderId="0"/>
</cellStyleXfs>
<cellXfs count="2">
<xf numFmtId="0" fontId="0" fillId="0" borderId="0" xfId="0"/>
<xf numFmtId="0" fontId="0" fillId="0" borderId="1" xfId="0" applyBorder="1"/>
</cellXfs>
<cellStyles count="1">
<cellStyle name="Normal" xfId="0" builtinId="0"/>
</cellStyles>
<dxfs count="0"/>
<tableStyles count="0" defaultTableStyle="TableStyleMedium9" defaultPivotStyle="PivotStyleLight16"/>
</styleSheet>
""")
got = _xml_to_list(fh.getvalue())
self.assertEqual(got, exp)
| [
"jmcnamara@cpan.org"
] | jmcnamara@cpan.org |
816a9237dc7938a0b5c52aa4309713b2228816f7 | adf3076bd40e37f4a422e79f6efb938f13def6c6 | /objectModel/Python/cdm/storage/local.py | e8a47c16802f5603d8c1d96895365a7d49ac8a07 | [
"MIT",
"CC-BY-4.0"
] | permissive | assetdatasystems/CDM | 445d1b22f0071620f1eb2fd8d1b5f7d6152ec388 | 576ccfd07fc718b3d0911112e5041729a3ba8088 | refs/heads/master | 2020-09-29T00:54:18.350717 | 2019-12-06T22:49:02 | 2019-12-06T22:49:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,521 | py | # ----------------------------------------------------------------------
# Copyright (c) Microsoft Corporation.
# All rights reserved.
# ----------------------------------------------------------------------
from datetime import datetime
import json
import os
from typing import Any, List, Optional
from .base import StorageAdapterBase
class LocalAdapter(StorageAdapterBase):
"""Local file system storage adapter"""
def __init__(self, root: str = '') -> None:
self._root = os.path.abspath(root) # type: str
def can_read(self) -> bool:
return True
def can_write(self) -> bool:
return True
async def read_async(self, corpus_path: str) -> str:
adapter_path = self.create_adapter_path(corpus_path)
with open(adapter_path, 'r', encoding='utf-8') as file:
return file.read()
async def write_async(self, corpus_path: str, data: str) -> None:
adapter_path = self.create_adapter_path(corpus_path)
parent_dir = os.path.abspath(os.path.join(adapter_path, os.pardir))
os.makedirs(parent_dir, exist_ok=True)
with open(adapter_path, 'w', encoding='utf-8') as file:
file.write(data)
def create_adapter_path(self, corpus_path: str) -> str:
corpus_path = corpus_path[(corpus_path.find(':') + 1):].lstrip('\\/')
return os.path.normpath(os.path.join(self._root, corpus_path))
def create_corpus_path(self, adapter_path: str) -> Optional[str]:
if not adapter_path.startswith("http"):
normalized_adapter_path = os.path.abspath(adapter_path).replace('\\', '/')
normalized_root = self._root.replace('\\', '/')
if normalized_adapter_path.startswith(normalized_root):
return normalized_adapter_path[len(normalized_root):]
# Signal that we did not recognize path as one for this adapter.
return None
def clear_cache(self) -> None:
pass
async def compute_last_modified_time_async(self, adapter_path: str) -> Optional[datetime]:
if os.path.exists(adapter_path):
return datetime.fromtimestamp(os.path.getmtime(adapter_path))
return None
async def fetch_all_files_async(self, folder_corpus_path: str) -> List[str]:
adapter_folder = self.create_adapter_path(folder_corpus_path)
adapter_files = [os.path.join(dp, fn) for dp, dn, fns in os.walk(adapter_folder) for fn in fns]
return [self.create_corpus_path(file) for file in adapter_files]
| [
"nebanfic@microsoft.com"
] | nebanfic@microsoft.com |
15ac7a012158192d1c75ea2adf14451862b089f5 | c475cd8531a94ffae69cc92371d41531dbbddb6c | /Projects/bullet3-2.89/examples/pybullet/gym/pybullet_utils/util.py | 5a014c8ed07a8b530a0fc58ca5cf708b435bd654 | [
"Apache-2.0",
"LicenseRef-scancode-free-unknown",
"Zlib"
] | permissive | WolfireGames/overgrowth | 72d3dd29cbd7254337265c29f8de3e5c32400114 | 594a2a4f9da0855304ee8cd5335d042f8e954ce1 | refs/heads/main | 2023-08-15T19:36:56.156578 | 2023-05-17T08:17:53 | 2023-05-17T08:20:36 | 467,448,492 | 2,264 | 245 | Apache-2.0 | 2023-05-09T07:29:58 | 2022-03-08T09:38:54 | C++ | UTF-8 | Python | false | false | 218 | py | import random
import numpy as np
def set_global_seeds(seed):
try:
import tensorflow as tf
except ImportError:
pass
else:
tf.set_random_seed(seed)
np.random.seed(seed)
random.seed(seed)
return
| [
"max@autious.net"
] | max@autious.net |
b0dcca8c35d97cf96c6a426d4cd4e0c4f1757ab5 | b7125b27e564d2cc80a2ce8d0a6f934aa22c8445 | /.history/sudoku_20201031012809.py | 1e1bd31e61f1cca07054e01ff4e65dab7e7033db | [] | no_license | JensVL96/Puzzle-solver-for-fun | 4c15dcd570c3705b7ac555efb56b52913e81083c | 6d8a4378a480372213a596a336a4deca727a00fc | refs/heads/master | 2021-07-15T05:19:42.185495 | 2020-11-08T13:59:49 | 2020-11-08T13:59:49 | 224,855,888 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,215 | py | # -*- coding: utf-8 -*-
from __future__ import print_function
from config import *
from create_board import *
from solve_bloard import *
from display_board import *
from string import *
import pygame as pg
import numpy as np
# For error highlighting
row_index = (0, 0)
col_index = (0, 0)
blk_index = (0, 0)
input_lock = 0
def reset_errors():
global input_lock
input_lock = 1
global row_index
row_index = (0, 0)
global col_index
col_index = (0, 0)
global blk_index
blk_index = (0, 0)
def get_cord(pos):
global box_index_x
box_index_x = (pos[0] - TOP_LX)//BLOCK_SIZE
global box_index_y
box_index_y = (pos[1] - TOP_LY)//BLOCK_SIZE
def valid(grid, x, y, val, increase):
global input_lock
for index in range(9):
# Check if value in column
if grid[x][index] == val:
print("in the same column")
global col_index
col_index = (x, index)
input_lock = 1
# Check if value in row
if grid[index][y] == val:
print("in the same row")
global row_index
row_index = (index, y)
input_lock = 1
# Finds the block
index_x = x // 3 # integer division
index_y = y // 3
# Check if value in block
for i in range(index_x * 3, index_x * 3 + 3):
for j in range (index_y * 3, index_y * 3 + 3):
if grid[i][j] == val:
print("in the same block")
global blk_index
blk_index = (i, j)
input_lock = 1
if input_lock == 1:
return False
return True
class Main():
def __init__(self):
self.board = []
self.run()
def run(self):
pg.init()
self.screen = pg.display.set_mode(SCREEN_RES)
pg.display.set_caption('Sudoku solver')
display = Display_board(self.screen)
flag1 = 0
val = 0
global input_lock
board = create_board().board
while 1:
for event in pg.event.get():
if event.type == pg.QUIT or (event.type == pg.KEYDOWN and event.key == pg.K_ESCAPE):
exit()
if event.type == pg.MOUSEBUTTONDOWN:
flag1 = 1
pos = pg.mouse.get_pos()
get_cord(pos)
display.glow(pos)
if event.type == pg.KEYDOWN and input_lock != 1:
if event.key == pg.K_1:
val = 1
if event.key == pg.K_2:
val = 2
if event.key == pg.K_3:
val = 3
if event.key == pg.K_4:
val = 4
if event.key == pg.K_5:
val = 5
if event.key == pg.K_6:
val = 6
if event.key == pg.K_7:
val = 7
if event.key == pg.K_8:
val = 8
if event.key == pg.K_9:
val = 9
elif event.type == pg.KEYDOWN and input_lock == 1:
if event.key == pg.K_BACKSPACE:
val = 0
input_lock = 0
reset_errors()
if val != 0:
display.draw_val(val, box_index_x, box_index_y)
if valid(board, int(box_index_x), int(box_index_y), val, display):
board[int(box_index_x)][int(box_index_y)] = val
else:
board[int(box_index_x)][int(box_index_y)] = 0
val = 0
pg.draw.rect(self.screen, BLACK, (0, 0, self.screen.get_width(), self.screen.get_height()))
self.screen.fill(BEIGE)
display.draw(board)
if input_lock == 1:
display.update(board, row_index, col_index, blk_index)
# display.draw_box()
pg.display.update()
self.solution = solve_board(board)
self.solution.assign_flags(board)
if __name__ == '__main__':
Main()
| [
"jle040@uit.no"
] | jle040@uit.no |
249da4760ecd8254331c7befd8d0738778611bc5 | fc558ed0bccbbd0edf52e662b310168a1b97ab56 | /tests/pcie/test_pcie.py | 5d1ce746a21aa2e7bcc4a0d8b0844f115ebdeb15 | [
"MIT"
] | permissive | bigdot123456/cocotbext-pcie | 914f4ac458901d93edb4447e65c24a1b30f39ea1 | 19e891adf9c45226cbbe2184199be7d904d0901e | refs/heads/master | 2023-06-07T07:38:47.289302 | 2021-06-17T08:59:28 | 2021-06-17T08:59:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 23,020 | py | #!/usr/bin/env python
"""
Copyright (c) 2020 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import logging
import os
import cocotb_test.simulator
import cocotb
from cocotb.regression import TestFactory
from cocotbext.pcie.core import RootComplex, MemoryEndpoint, Device, Switch
from cocotbext.pcie.core.caps import MsiCapability
from cocotbext.pcie.core.utils import PcieId
class TestEndpoint(MemoryEndpoint):
__test__ = False
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.vendor_id = 0x1234
self.device_id = 0x5678
self.msi_cap = MsiCapability()
self.msi_cap.msi_multiple_message_capable = 5
self.msi_cap.msi_64bit_address_capable = 1
self.msi_cap.msi_per_vector_mask_capable = 1
self.register_capability(self.msi_cap)
self.add_mem_region(1024*1024)
self.add_prefetchable_mem_region(1024*1024)
self.add_io_region(1024)
class TB:
def __init__(self, dut):
self.dut = dut
self.log = logging.getLogger("cocotb.tb")
self.log.setLevel(logging.DEBUG)
self.rc = RootComplex()
self.ep = []
ep = TestEndpoint()
self.dev = Device(ep)
self.dev.upstream_port.max_link_speed = 3
self.dev.upstream_port.max_link_width = 16
self.ep.append(ep)
self.rc.make_port().connect(self.dev)
self.sw = Switch()
self.rc.make_port().connect(self.sw)
ep = TestEndpoint()
self.dev2 = Device(ep)
self.dev2.upstream_port.max_link_speed = 3
self.dev2.upstream_port.max_link_width = 16
self.ep.append(ep)
self.sw.make_port().connect(self.dev2)
ep = TestEndpoint()
self.dev3 = Device(ep)
self.dev3.upstream_port.max_link_speed = 3
self.dev3.upstream_port.max_link_width = 16
self.ep.append(ep)
self.sw.make_port().connect(self.dev3)
ep = TestEndpoint()
self.dev4 = Device(ep)
self.dev4.upstream_port.max_link_speed = 3
self.dev4.upstream_port.max_link_width = 16
self.ep.append(ep)
self.rc.make_port().connect(self.dev4)
async def run_test_rc_mem(dut):
tb = TB(dut)
tb.rc.log.setLevel(logging.DEBUG)
mem_base, mem_data = tb.rc.alloc_region(1024*1024)
io_base, io_data = tb.rc.alloc_io_region(1024)
for length in list(range(1, 32))+[1024]:
for offset in list(range(8))+list(range(4096-8, 4096)):
tb.log.info("Memory operation length: %d offset: %d", length, offset)
addr = mem_base+offset
test_data = bytearray([x % 256 for x in range(length)])
await tb.rc.mem_write(addr, test_data)
assert mem_data[offset:offset+length] == test_data
assert await tb.rc.mem_read(addr, length) == test_data
for length in list(range(1, 32)):
for offset in list(range(8)):
tb.log.info("IO operation length: %d offset: %d", length, offset)
addr = io_base+offset
test_data = bytearray([x % 256 for x in range(length)])
await tb.rc.io_write(addr, test_data)
assert io_data[offset:offset+length] == test_data
assert await tb.rc.io_read(addr, length) == test_data
async def run_test_config(dut):
tb = TB(dut)
tb.rc.log.setLevel(logging.DEBUG)
tb.log.info("Read complete config space")
orig = await tb.rc.config_read(PcieId(0, 1, 0), 0x000, 256, 1000, 'ns')
tb.log.info("Read and write interrupt line register")
await tb.rc.config_write(PcieId(0, 1, 0), 0x03c, b'\x12', 1000, 'ns')
val = await tb.rc.config_read(PcieId(0, 1, 0), 0x03c, 1, 1000, 'ns')
assert val == b'\x12'
tb.log.info("Write complete config space")
await tb.rc.config_write(PcieId(0, 1, 0), 0x000, orig, 1000, 'ns')
async def run_test_enumerate(dut):
tb = TB(dut)
all_ep = tb.rc.endpoints+[tb.sw.upstream_bridge]+tb.sw.endpoints+tb.ep
tb.rc.log.setLevel(logging.DEBUG)
for ep in all_ep:
ep.log.setLevel(logging.DEBUG)
await tb.rc.enumerate(enable_bus_mastering=True, configure_msi=True)
# check that enumerated tree matches devices
def check_dev(dev):
tb.log.info("Check device at %s", dev.pcie_id)
# ensure ID was assigned to device
assert dev.pcie_id != PcieId(0, 0, 0)
# get tree item
ti = tb.rc.tree.find_child_dev(dev.pcie_id)
assert ti is not None
# check informational registers
tb.log.info("Header type: 0x%02x", ti.header_type)
tb.log.info("Vendor ID: 0x%04x", ti.vendor_id)
tb.log.info("Device ID: 0x%04x", ti.device_id)
tb.log.info("Revision ID: 0x%02x", ti.revision_id)
tb.log.info("Class code: 0x%06x", ti.class_code)
assert ti.header_type == dev.header_layout | (bool(dev.multifunction_device) << 7)
assert ti.class_code == dev.class_code
assert ti.revision_id == dev.revision_id
assert ti.vendor_id == dev.vendor_id
assert ti.device_id == dev.device_id
if ti.header_type & 0x7f == 0x01:
# bridge
bar_cnt = 2
# check bridge registers
tb.log.info("Primary bus %d", ti.pri_bus_num)
tb.log.info("Secondary bus %d", ti.sec_bus_num)
tb.log.info("Subordinate bus %d", ti.sub_bus_num)
tb.log.info("IO base 0x%08x", ti.io_base)
tb.log.info("IO limit 0x%08x", ti.io_limit)
tb.log.info("Mem base 0x%08x", ti.mem_base)
tb.log.info("Mem limit 0x%08x", ti.mem_limit)
tb.log.info("Prefetchable mem base 0x%016x", ti.prefetchable_mem_base)
tb.log.info("Prefetchable mem limit 0x%016x", ti.prefetchable_mem_limit)
assert ti.sec_bus_num == dev.sec_bus_num
assert ti.sub_bus_num == dev.sub_bus_num
assert ti.io_base == dev.io_base
assert ti.io_limit == dev.io_limit
assert ti.mem_base == dev.mem_base
assert ti.mem_limit == dev.mem_limit
assert ti.prefetchable_mem_base == dev.prefetchable_mem_base
assert ti.prefetchable_mem_limit == dev.prefetchable_mem_limit
else:
bar_cnt = 6
tb.log.info("Subsystem vendor ID: 0x%04x", ti.subsystem_vendor_id)
tb.log.info("Subsystem ID: 0x%04x", ti.subsystem_id)
assert ti.subsystem_vendor_id == dev.subsystem_vendor_id
assert ti.subsystem_id == dev.subsystem_id
# check BARs
bar = 0
while bar < bar_cnt:
if d.bar_mask[bar] == 0:
# unused bar
assert ti.bar[bar] is None
assert ti.bar_raw[bar] == 0
assert ti.bar_addr[bar] is None
assert ti.bar_size[bar] is None
bar += 1
elif d.bar[bar] & 1:
# IO BAR
tb.log.info("BAR%d: IO BAR addr 0x%08x, size %d", bar, ti.bar_addr[bar], ti.bar_size[bar])
assert ti.bar[bar] == d.bar[bar]
assert ti.bar_raw[bar] == d.bar[bar]
assert ti.bar_addr[bar] == d.bar[bar] & ~0x3
assert ti.bar_size[bar] == (~d.bar_mask[bar] & 0xfffffffc)+0x4
bar += 1
elif d.bar[bar] & 4:
# 64 bit BAR
tb.log.info("BAR%d: Mem BAR (32 bit) addr 0x%08x, size %d", bar, ti.bar_addr[bar], ti.bar_size[bar])
assert ti.bar[bar] == d.bar[bar] | d.bar[bar+1] << 32
assert ti.bar_raw[bar] == d.bar[bar]
assert ti.bar_raw[bar+1] == d.bar[bar+1]
assert ti.bar_addr[bar] == (d.bar[bar] | d.bar[bar+1] << 32) & ~0xf
assert ti.bar_size[bar] == (~(d.bar_mask[bar] | d.bar_mask[bar+1] << 32) & 0xfffffffffffffff0)+0x10
bar += 2
else:
# 32 bit BAR
tb.log.info("BAR%d: Mem BAR (64 bit) addr 0x%08x, size %d", bar, ti.bar_addr[bar], ti.bar_size[bar])
assert ti.bar[bar] == d.bar[bar]
assert ti.bar_raw[bar] == d.bar[bar]
assert ti.bar_addr[bar] == d.bar[bar] & ~0xf
assert ti.bar_size[bar] == (~d.bar_mask[bar] & 0xfffffff0)+0x10
bar += 1
if d.expansion_rom_addr_mask == 0:
assert ti.expansion_rom_raw == 0
assert ti.expansion_rom_addr is None
assert ti.expansion_rom_size is None
else:
assert ti.expansion_rom_raw & 0xfffff800 == dev.expansion_rom_addr
assert ti.expansion_rom_addr == dev.expansion_rom_addr
assert ti.expansion_rom_size == (~d.expansion_rom_addr_mask & 0xfffff800)+0x800
# TODO capabilities
for d in all_ep:
check_dev(d)
# check settings in enumerated tree
def check_tree(ti):
tb.log.info("Check bridge at %s", ti.pcie_id)
tb.log.info("Header type: 0x%02x", ti.header_type)
tb.log.info("Vendor ID: 0x%04x", ti.vendor_id)
tb.log.info("Device ID: 0x%04x", ti.device_id)
tb.log.info("Revision ID: 0x%02x", ti.revision_id)
tb.log.info("Class code: 0x%06x", ti.class_code)
tb.log.info("Primary bus: %d", ti.pri_bus_num)
tb.log.info("Secondary bus: %d", ti.sec_bus_num)
tb.log.info("Subordinate bus: %d", ti.sub_bus_num)
tb.log.info("IO base: 0x%08x", ti.io_base)
tb.log.info("IO limit: 0x%08x", ti.io_limit)
tb.log.info("Mem base: 0x%08x", ti.mem_base)
tb.log.info("Mem limit: 0x%08x", ti.mem_limit)
tb.log.info("Prefetchable mem base: 0x%016x", ti.prefetchable_mem_base)
tb.log.info("Prefetchable mem limit: 0x%016x", ti.prefetchable_mem_limit)
bus_regions = []
io_regions = []
mem_regions = []
prefetchable_mem_regions = []
for ci in ti:
tb.log.info("Check device at %s", ci.pcie_id)
tb.log.info("Header type: 0x%02x", ci.header_type)
tb.log.info("Vendor ID: 0x%04x", ci.vendor_id)
tb.log.info("Device ID: 0x%04x", ci.device_id)
tb.log.info("Revision ID: 0x%02x", ci.revision_id)
tb.log.info("Class code: 0x%06x", ci.class_code)
if ci.header_type & 0x7f == 0x00:
# type 0 header
tb.log.info("Subsystem vendor ID: 0x%04x", ci.subsystem_vendor_id)
tb.log.info("Subsystem ID: 0x%04x", ci.subsystem_id)
# check that BARs are within our apertures
for bar in range(6):
if ci.bar[bar] is None:
continue
if ci.bar[bar] & 1:
# IO BAR
tb.log.info("BAR%d: IO BAR addr 0x%08x, size %d", bar, ci.bar_addr[bar], ci.bar_size[bar])
assert (ti.io_base <= ci.bar_addr[bar] and ci.bar_addr[bar]+ci.bar_size[bar]-1 <= ti.io_limit)
io_regions.append((ci.bar_addr[bar], ci.bar_addr[bar]+ci.bar_size[bar]-1))
elif ci.bar[bar] > 0xffffffff:
# prefetchable BAR
tb.log.info("BAR%d: Mem BAR (prefetchable) addr 0x%08x, size %d",
bar, ci.bar_addr[bar], ci.bar_size[bar])
assert (ti.prefetchable_mem_base <= ci.bar_addr[bar]
and ci.bar_addr[bar]+ci.bar_size[bar]-1 <= ti.prefetchable_mem_limit)
prefetchable_mem_regions.append((ci.bar_addr[bar], ci.bar_addr[bar]+ci.bar_size[bar]-1))
else:
# non-prefetchable BAR
tb.log.info("BAR%d: Mem BAR (non-prefetchable) addr 0x%08x, size %d",
bar, ci.bar_addr[bar], ci.bar_size[bar])
assert (ti.mem_base <= ci.bar_addr[bar]
and ci.bar_addr[bar]+ci.bar_size[bar]-1 <= ti.mem_limit)
mem_regions.append((ci.bar_addr[bar], ci.bar_addr[bar]+ci.bar_size[bar]-1))
if ci.expansion_rom_addr:
# expansion ROM BAR
tb.log.info("Expansion ROM BAR: Mem BAR (non-prefetchable) addr 0x%08x, size %d",
ci.expansion_rom_addr, ci.expansion_rom_size)
assert (ti.mem_base <= ci.expansion_rom_addr and
ci.expansion_rom_addr+ci.expansion_rom_size-1 <= ti.mem_limit)
mem_regions.append((ci.expansion_rom_addr, ci.expansion_rom_addr+ci.expansion_rom_size-1))
if ci.header_type & 0x7f == 0x01:
# type 1 header
tb.log.info("Primary bus: %d", ci.pri_bus_num)
tb.log.info("Secondary bus: %d", ci.sec_bus_num)
tb.log.info("Subordinate bus: %d", ci.sub_bus_num)
tb.log.info("IO base: 0x%08x", ci.io_base)
tb.log.info("IO limit: 0x%08x", ci.io_limit)
tb.log.info("Mem base: 0x%08x", ci.mem_base)
tb.log.info("Mem limit: 0x%08x", ci.mem_limit)
tb.log.info("Prefetchable mem base: 0x%016x", ci.prefetchable_mem_base)
tb.log.info("Prefetchable mem limit: 0x%016x", ci.prefetchable_mem_limit)
# check that child switch apertures are within our apertures
assert ti.sec_bus_num <= ci.pri_bus_num <= ti.sub_bus_num
assert ti.sec_bus_num <= ci.sec_bus_num and ci.sub_bus_num <= ti.sub_bus_num
bus_regions.append((ci.sec_bus_num, ci.sub_bus_num))
if ci.io_base:
assert ti.io_base <= ci.io_base and ci.io_limit <= ti.io_limit
io_regions.append((ci.io_base, ci.io_limit))
if ci.mem_base:
assert ti.mem_base <= ci.mem_base and ci.mem_limit <= ti.mem_limit
mem_regions.append((ci.mem_base, ci.mem_limit))
if ci.prefetchable_mem_base:
assert (ti.prefetchable_mem_base <= ci.prefetchable_mem_base and
ci.prefetchable_mem_limit <= ti.prefetchable_mem_limit)
prefetchable_mem_regions.append((ci.prefetchable_mem_base, ci.prefetchable_mem_limit))
# check for assignment overlaps
for lst in [bus_regions, io_regions, mem_regions, prefetchable_mem_regions]:
lst.sort()
for m in range(1, len(lst)):
assert lst[m-1][1] <= lst[m][0], "assigned regions overlap"
# recurse into child nodes
for ci in ti:
if ci.header_type & 0x7f == 0x01:
tb.log.info("Check bridge at %s (child of bridge at %s)", ci.pcie_id, ti.pcie_id)
check_tree(ci)
check_tree(tb.rc.tree)
async def run_test_ep_mem(dut, ep_index=0):
tb = TB(dut)
await tb.rc.enumerate(enable_bus_mastering=True, configure_msi=True)
tb.rc.log.setLevel(logging.DEBUG)
ep = tb.ep[ep_index]
ep.log.setLevel(logging.DEBUG)
ti = tb.rc.tree.find_child_dev(ep.pcie_id)
for length in list(range(1, 32))+[1024]:
for offset in list(range(8))+list(range(4096-8, 4096)):
tb.log.info("Memory operation (32-bit BAR) length: %d offset: %d", length, offset)
addr = ti.bar_addr[0]+offset
test_data = bytearray([x % 256 for x in range(length)])
await tb.rc.mem_write(addr, test_data, 1000, 'ns')
# wait for write to complete
await tb.rc.mem_read(addr, 1, 1000, 'ns')
assert await ep.read_region(0, offset, length) == test_data
assert await tb.rc.mem_read(addr, length, 1000, 'ns') == test_data
for length in list(range(1, 32))+[1024]:
for offset in list(range(8))+list(range(4096-8, 4096)):
tb.log.info("Memory operation (64-bit BAR) length: %d offset: %d", length, offset)
addr = ti.bar_addr[1]+offset
test_data = bytearray([x % 256 for x in range(length)])
await tb.rc.mem_write(addr, test_data, 1000, 'ns')
# wait for write to complete
await tb.rc.mem_read(addr, 1, 1000, 'ns')
assert await ep.read_region(1, offset, length) == test_data
assert await tb.rc.mem_read(addr, length, 1000, 'ns') == test_data
for length in list(range(1, 8)):
for offset in list(range(8)):
tb.log.info("IO operation length: %d offset: %d", length, offset)
addr = ti.bar_addr[3]+offset
test_data = bytearray([x % 256 for x in range(length)])
await tb.rc.io_write(addr, test_data, 1000, 'ns')
assert await ep.read_region(3, offset, length) == test_data
assert await tb.rc.io_read(addr, length, 1000, 'ns') == test_data
async def run_test_p2p_dma(dut, ep1_index=0, ep2_index=1):
tb = TB(dut)
await tb.rc.enumerate(enable_bus_mastering=True, configure_msi=True)
tb.rc.log.setLevel(logging.DEBUG)
ep1 = tb.ep[ep1_index]
ep1.log.setLevel(logging.DEBUG)
ep2 = tb.ep[ep2_index]
ep2.log.setLevel(logging.DEBUG)
ti2 = tb.rc.tree.find_child_dev(ep2.pcie_id)
for length in list(range(1, 32))+[1024]:
for offset in list(range(8))+list(range(4096-8, 4096)):
tb.log.info("Memory operation (32-bit BAR) length: %d offset: %d", length, offset)
addr = ti2.bar_addr[0]+offset
test_data = bytearray([x % 256 for x in range(length)])
await ep1.mem_write(addr, test_data, 1000, 'ns')
# wait for write to complete
await ep1.mem_read(addr, 1, 1000, 'ns')
assert await ep2.read_region(0, offset, length) == test_data
assert await ep1.mem_read(addr, length, 1000, 'ns') == test_data
for length in list(range(1, 32))+[1024]:
for offset in list(range(8))+list(range(4096-8, 4096)):
tb.log.info("Memory operation (64-bit BAR) length: %d offset: %d", length, offset)
addr = ti2.bar_addr[1]+offset
test_data = bytearray([x % 256 for x in range(length)])
await ep1.mem_write(addr, test_data, 1000, 'ns')
# wait for write to complete
await ep1.mem_read(addr, 1, 1000, 'ns')
assert await ep2.read_region(1, offset, length) == test_data
assert await ep1.mem_read(addr, length, 1000, 'ns') == test_data
for length in list(range(1, 8)):
for offset in list(range(8)):
tb.log.info("IO operation length: %d offset: %d", length, offset)
addr = ti2.bar_addr[3]+offset
test_data = bytearray([x % 256 for x in range(length)])
await ep1.io_write(addr, test_data, 1000, 'ns')
assert await ep2.read_region(3, offset, length) == test_data
assert await ep1.io_read(addr, length, 1000, 'ns') == test_data
async def run_test_dma(dut, ep_index=0):
tb = TB(dut)
mem_base, mem_data = tb.rc.alloc_region(1024*1024)
io_base, io_data = tb.rc.alloc_io_region(1024)
await tb.rc.enumerate(enable_bus_mastering=True, configure_msi=True)
tb.rc.log.setLevel(logging.DEBUG)
ep = tb.ep[ep_index]
ep.log.setLevel(logging.DEBUG)
for length in list(range(1, 32))+[1024]:
for offset in list(range(8))+list(range(4096-8, 4096)):
tb.log.info("Memory operation (DMA) length: %d offset: %d", length, offset)
addr = mem_base+offset
test_data = bytearray([x % 256 for x in range(length)])
await ep.mem_write(addr, test_data, 1000, 'ns')
# wait for write to complete
await ep.mem_read(addr, 1, 1000, 'ns')
assert mem_data[offset:offset+length] == test_data
assert await ep.mem_read(addr, length, 1000, 'ns') == test_data
for length in list(range(1, 8)):
for offset in list(range(8)):
tb.log.info("IO operation (DMA) length: %d offset: %d", length, offset)
addr = io_base+offset
test_data = bytearray([x % 256 for x in range(length)])
await ep.io_write(addr, test_data, 1000, 'ns')
assert io_data[offset:offset+length] == test_data
assert await ep.io_read(addr, length, 1000, 'ns') == test_data
async def run_test_msi(dut, ep_index=0):
tb = TB(dut)
await tb.rc.enumerate(enable_bus_mastering=True, configure_msi=True)
tb.rc.log.setLevel(logging.DEBUG)
ep = tb.ep[ep_index]
ep.log.setLevel(logging.DEBUG)
for k in range(32):
tb.log.info("Send MSI %d", k)
await ep.msi_cap.issue_msi_interrupt(k)
event = tb.rc.msi_get_event(ep.pcie_id, k)
event.clear()
await event.wait()
if cocotb.SIM_NAME:
for test in [
run_test_rc_mem,
run_test_config,
run_test_enumerate,
]:
factory = TestFactory(test)
factory.generate_tests()
factory = TestFactory(run_test_ep_mem)
factory.add_option("ep_index", range(4))
factory.generate_tests()
factory = TestFactory(run_test_p2p_dma)
factory.add_option("ep1_index", [0, 1])
factory.add_option("ep2_index", [2, 3])
factory.generate_tests()
factory = TestFactory(run_test_dma)
factory.add_option("ep_index", range(4))
factory.generate_tests()
factory = TestFactory(run_test_msi)
factory.add_option("ep_index", range(4))
factory.generate_tests()
# cocotb-test
tests_dir = os.path.dirname(__file__)
def test_pcie(request):
dut = "test_pcie"
module = os.path.splitext(os.path.basename(__file__))[0]
toplevel = dut
verilog_sources = [
os.path.join(os.path.dirname(__file__), f"{dut}.v"),
]
sim_build = os.path.join(tests_dir, "sim_build",
request.node.name.replace('[', '-').replace(']', ''))
cocotb_test.simulator.run(
python_search=[tests_dir],
verilog_sources=verilog_sources,
toplevel=toplevel,
module=module,
sim_build=sim_build,
)
| [
"alex@alexforencich.com"
] | alex@alexforencich.com |
76ddc9c06c47c12598e6d3cefacae20640a6f2cb | 174edd84c3372e2f062dfb58848153391b44a9ea | /spec/logic.py | 75efefcd89862e45766efb341b03ef1d9a0d0645 | [] | no_license | psavine42/viper-server | 7803e3282c98dcc978f4f195aef87acc4861b8bd | dab58a979ab238d65735953c71cae6c288400144 | refs/heads/master | 2020-03-26T15:28:22.823335 | 2018-11-28T03:42:34 | 2018-11-28T03:42:34 | 145,045,447 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,533 | py | from unittest import TestCase
import numpy as np
from operator import eq
from pprint import pprint
from shapely.geometry import LineString, Point
from spec.seg_data import *
from src.rules.opers import *
from src import visualize, SystemFactory, RenderNodeSystem
from src.rules import RuleEngine, KB, heursitics
from src.rules.property import Property
from src import process, viper
from src import propogate as gp
from src.geom import rebuild_mls, to_mls
import src.structs as gr
import src.render.render_propogators as rpg
def read_props(node, k):
# print(node , ':', node.tmps)
return node.get(k, None)
_root = (2, 1, 0)
def node_line(line, prev=None):
for i in range(len(line)):
n = Node(line[i])
if prev is None:
prev = n
else:
n.connect_to(prev)
prev = n
return prev
def node_tree(pts, prev=None):
for i in range(len(pts)):
pt = pts[i]
if isinstance(pt[0], int):
n = Node(pt)
if prev is None:
prev = n
else:
n.connect_to(prev)
prev = n
elif isinstance(pt[0], tuple):
pt, rest = pts[i]
n = Node(pt)
if prev is None:
prev = n
else:
n.connect_to(prev)
node_tree(rest, n)
prev = n
return prev
class TestProp(TestCase):
def get_sys(self):
system = SystemFactory.from_segs(SEGMENTS, root=_root, lr='a')
system = system.bake()
return viper.nx_to_nodes(system)
def test_dist_prop(self):
root = self.get_sys()
propagator = gp.DistanceFromSource()
propagator(root)
for n in root.__iter__():
pred = n.predecessors()
if len(pred) == 1:
assert pred[0].get(propagator.var) + 1 == n.get(propagator.var)
def test_order_prop(self):
root = self.get_sys()
propagator = gp.BuildOrder()
propagator(root)
order = set()
cnt = 0
for n in root.__iter__():
print(n)
cnt += 1
order.add(n.get(propagator.var))
assert len(order) == cnt
def test_dist_to_end(self):
root = self.get_sys()
propagator = gp.DistanceFromEnd()
propagator(root)
for n in root.__iter__():
if len(n.successors()) == 0:
assert n.get(propagator.var) == 0
def test_loop_neg(self):
root = self.get_sys()
propagator = gp.LoopDetector()
propagator(root, data=[])
for n in root.__iter__():
assert n.get(propagator.var) is not True
def test_loop_pos(self):
connect_loop = [(8., 8., 0), (4., 4., 0)]
SEGMENTS.append(connect_loop)
system = SystemFactory.from_segs(SEGMENTS, root=_root, lr='a')
system = system.bake()
root = viper.nx_to_nodes(system)
propagator = gp.LoopDetector()
propagator(root, data=[])
for n in root.__iter__():
if n.geom in connect_loop:
assert n.get(propagator.var) is True
def test_edge_det(self):
root = self.get_sys()
propagator = gp.DirectionWriter()
propagator(root)
for n in root.__iter__():
for e in n.successors(edges=True):
print(e)
def test_overlap_resolver(self):
pass
def test_remover_sm(self):
system = SystemFactory.from_segs(
SEGMENTS, sys=viper.System, root=_root, lr='a')
system.bake()
system.gplot(fwd=True, bkwd=False)
def test_remover_cl(self):
system = SystemFactory.from_segs(
SEGMENTS_COL, sys=viper.System, root=_root, lr='a')
system.aplot()
def test_remover_lg(self):
segs = load_segs()
system = SystemFactory.from_serialized_geom(
segs, sys=viper.System, root=(-246, 45, 0))
system.bake()
system.gplot(fwd=True, bkwd=False)
def test_reverse(self):
n1 = Node(1)
n2 = Node(2)
edge = n1.connect_to(n2)
edge.reverse()
assert edge.target == n1
assert edge.source == n2
def test_merge_self(self):
n1 = [(1, 1), (1, 4.8), (1.2, 5), (1, 5.2), (1, 10)]
prev = node_line(n1)
gp.Cluster()(prev)
for n in prev.__iter__(fwd=True, bkwd=True):
print(n, *n.neighbors())
def test_geom_sims(self):
l2 = LineString([(1, 2), (1, 4), (4, 6), (4, 8)])
l1 = LineString([(1, 3), (1, 4), (4, 6), (1, 4)])
print(l1)
ds = l1.union(l1)
print(ds)
def test_adder(self):
n1 = [(1, 2), (1, 4), (4, 6), (4, 8)]
prev = node_line(n1)
ndd = Node((1, 3))
pa = gp.PointAdder(ndd)
pa(prev)
for n in prev.__iter__(fwd=True, bkwd=True):
print(n)
G = viper.nodes_to_nx(prev)
visualize.gplot(G)
def test_point(self):
point = Point(1, 8)
l3 = [(1, 3), (1, 10), (10, 6)]
r3 = to_mls(l3)
print(r3)
res = rebuild_mls(r3, point)
print(res)
tgt = to_mls([(1, 3), (1, 8), (1, 10), (10, 6)])
assert res == tgt
def test_254(self):
segs = load_segs()
segs, syms = SystemFactory.to_segments(segs)
fsg = []
fsm = []
print(syms[0])
mx = -260
mn = -270
for seg in segs:
sg = list(seg.coords)
if mn < sg[0][0] < mx or mn < sg[1][0] < mx:
fsg.append(seg)
for seg in syms:
sg = list(seg.coords)
if mn < sg[0][0] < mx:
fsm.append(seg)
print(fsm[0])
system = viper.SystemV3(segments=fsg, symbols=fsm, root=(-246, 45, 0))
system.aplot()
class TestRenderProp(TestCase):
def test_riser_fn(self):
root = self.test_translate()
rcp = viper.System.recipe()
rcp(root)
rules = heursitics.EngineHeurFP()
Eng = RuleEngine(term_rule=rules.root)
Kb = KB(rules.root)
root = Eng.alg2(root, Kb)
renderer = RenderNodeSystem()
root = renderer.render(root)
print('nodes ', len(root))
visualize.print_iter(root)
meta = Eng.annotate_type(root, rules.final_labels)
visualize.plot3d(root, meta)
def test_translate(self):
root = vertical_branch()
end1 = gr.node_at(root, (8, 6, 0))
root2 = vertical_branch()
rpg.Translate()(root2, data=np.array([8, 8, 0]))
end1.connect_to(root2)
return root
# visualize.plot3d(root2, {})
class TestLogic(TestCase):
def tearDown(self):
self.term = None
def test_prop1(self):
cond = IF('nsucs', eq, 0)
isEnd = Property('IsEnd', cond)
node1 = Node(1)
assert cond(node1) is True
res1 = isEnd(node1)
assert res1 is True
assert node1.get('IsEnd') is True
node2 = Node(2)
node1.connect_to(node2)
assert cond(node1) is False
def test_and(self):
is_symbol = HAS('symbol')
is_end = IF('nsucs', eq, 0)
is_circle = IF('symbol', eq, GeomType.CIRCLE)
is_drop_head = AND(is_end, is_circle)
# setup Nodes
n0 = Node(0)
n1 = Node(1, symbol=GeomType.CIRCLE)
n2 = Node(2, symbol=GeomType.CIRCLE)
# graph
n0.connect_to(n1)
n1.connect_to(n2)
assert is_drop_head(n1) is False
assert is_drop_head(n2) is True
assert is_symbol(n0) is False
assert is_symbol(n1) is True
def test_itm(self):
n0 = Node(0, symbol=GeomType.CIRCLE)
n1 = Node(1)
n2 = Node(2, symbol=GeomType.CIRCLE)
n0.connect_to(n1)
n1.connect_to(n2)
read_props(n2, 'IsDrop')
# assert self.term(n0) is True
read_props(n2, 'IsDrop')
print('\n')
print(n0, n0.tmps)
print(n1, n1.tmps)
print(n2, n2.tmps)
assert read_props(n2, 'IsDrop') is True
assert read_props(n0, 'IsRiser') is True
assert not read_props(n2, 'IsRiser')
def test_eng(self):
print('\n')
rl = RuleEngine(term_rule=self.term)
pprint(rl._freq)
def test_eng2(self):
from src.rules.heursitics import EngineHeurFP
rules = EngineHeurFP()
Eng = RuleEngine(term_rule=rules.root)
system = SystemFactory.from_segs(SEGMENTS, root=_root, lr='a')
system = system.bake()
root = viper.nx_to_nodes(system)
root = Eng.yield_queue(root)
nxg = Eng.plot(root, rules.final_labels)
def test_compile_eng3(self):
rules = heursitics.EngineHeurFP()
Eng = RuleEngine(term_rule=rules.root)
Kb = KB(rules.root)
print(Kb.get_vars())
print(Kb.agenda)
def test_eng3(self):
rules = heursitics.EngineHeurFP()
Eng = RuleEngine(term_rule=rules.root, mx=400, debug=True, nlog=1)
_root = (2, 1, 0)
system = SystemFactory.from_segs(SEGMENTS, root=_root, lr='a')
system = system.bake()
root = viper.nx_to_nodes(system)
Kb = KB(rules.root)
print(Kb)
root = Eng.alg2(root, Kb, )
nxg = Eng.plot(root, rules.final_labels)
def test_eng4(self):
system = SystemFactory.from_serialized_geom(load_segs(),
sys=viper.System,
root=(-246, 45, 0))
system = system.bake()
root = viper.nx_to_nodes(system)
print(root)
rules = heursitics.EngineHeurFP()
Eng = RuleEngine(term_rule=rules.root, mx=2500, debug=False, nlog=20)
Kb = KB(rules.root)
root = Eng.alg2(root, Kb)
nxg = Eng.plot(root, rules.final_labels)
def test_eng5(self):
data = load_segs(fl='1535158393.0-revit-signal')
system = SystemFactory.from_serialized_geom(
data, sys=viper.System, root=(-246, 45, 0))
system = system.bake()
root = system.root
print(root)
rules = heursitics.EngineHeurFP()
Eng = RuleEngine(term_rule=rules.root, mx=2500, debug=False, nlog=20)
Kb = KB(rules.root)
root = Eng.alg2(root, Kb)
print('nodes ', len(root))
renderer = RenderNodeSystem()
meta = Eng.annotate_type(root, rules.final_labels)
root = renderer.render(root)
print('nodes ', len(root))
visualize.plot3d(root, meta)
def test_eng_full(self):
"""
Test the engine as executed by server
"""
import time
start = time.time()
data = load_segs(fl='1535158393.0-revit-signal')
points = [[-246.0000000012448, 45.31190012691635, 0.0]]
proc = process.SystemProcessorV3()
ds = proc.process(data, points, system_type='FP')
[print(k, len(v)) for k, v in ds.items()]
visualize.dump_data(ds)
for g in ds['geom']:
x1, y1, z1, x2, y2, z2 = g
res = [x1 == x2, y1 == y2, z1 == z2]
assert not all(res)
end = time.time()
print('time {} secs'.format(end - start))
def test_loadsyms(self):
segs = load_segs()
ds = [x for x in segs if x['children'] != []]
system = SystemFactory.from_serialized_geom(ds, root=(-246, 45, 0))
| [
"psavine42@gmail.com"
] | psavine42@gmail.com |
b7723239c4a46c561258470ad64b96116357489b | ac5e52a3fc52dde58d208746cddabef2e378119e | /exps-gsn-edf/gsn-edf_ut=2.5_rd=1_rw=0.06_rn=4_u=0.075-0.35_p=harmonic-2/sched=RUN_trial=61/sched.py | d192899a28b503972848e0f3b23908f61c81e5b3 | [] | no_license | ricardobtxr/experiment-scripts | 1e2abfcd94fb0ef5a56c5d7dffddfe814752eef1 | 7bcebff7ac2f2822423f211f1162cd017a18babb | refs/heads/master | 2023-04-09T02:37:41.466794 | 2021-04-25T03:27:16 | 2021-04-25T03:27:16 | 358,926,457 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 336 | py | -X FMLP -Q 0 -L 3 124 400
-X FMLP -Q 0 -L 3 86 400
-X FMLP -Q 0 -L 3 52 200
-X FMLP -Q 1 -L 2 51 175
-X FMLP -Q 1 -L 2 47 150
-X FMLP -Q 1 -L 2 46 150
-X FMLP -Q 2 -L 1 27 200
-X FMLP -Q 2 -L 1 26 100
-X FMLP -Q 2 -L 1 18 125
-X FMLP -Q 3 -L 1 16 150
-X FMLP -Q 3 -L 1 14 100
-X FMLP -Q 3 -L 1 3 175
| [
"ricardo.btxr@gmail.com"
] | ricardo.btxr@gmail.com |
120f170cf018653194adb4e24abad7bac1b97950 | 5cbbeb11fb1400019690d10db24b4579f97e8896 | /mlkernels/kernels/derivative.py | a0d74a40a20655d7822642c6d104d4e38e97fc0c | [
"MIT"
] | permissive | darsh8200/mlkernels | b735c213f5cf590cabebcee166e3b4aea95c4e1e | cad223c422a32bc10375358fda076645efca62f1 | refs/heads/main | 2023-06-16T19:48:37.056247 | 2021-07-09T06:18:39 | 2021-07-09T06:18:39 | 384,340,711 | 0 | 0 | MIT | 2021-07-09T06:16:48 | 2021-07-09T06:16:47 | null | UTF-8 | Python | false | false | 6,853 | py | import lab as B
import numpy as np
from algebra import DerivativeFunction
from algebra.util import identical
from matrix import Dense
from plum import convert
from . import _dispatch
from .. import Kernel
from ..util import num_elements, uprank, expand
__all__ = ["perturb", "DerivativeKernel"]
def dkx(k_elwise, i):
"""Construct the derivative of a kernel with respect to its first
argument.
Args:
k_elwise (function): Function that performs element-wise computation
of the kernel.
i (int): Dimension with respect to which to compute the derivative.
Returns:
function: Derivative of the kernel with respect to its first argument.
"""
@uprank
def _dkx(x, y):
import tensorflow as tf
with tf.GradientTape() as t:
# Get the numbers of inputs.
nx = num_elements(x)
ny = num_elements(y)
# Copy the input `ny` times to efficiently compute many derivatives.
xis = tf.identity_n([x[:, i : i + 1]] * ny)
t.watch(xis)
# Tile inputs for batched computation.
x = B.tile(x, ny, 1)
y = B.reshape(B.tile(y, 1, nx), ny * nx, -1)
# Insert tracked dimension, which is different for every tile.
xi = B.concat(*xis, axis=0)
x = B.concat(x[:, :i], xi, x[:, i + 1 :], axis=1)
# Perform the derivative computation.
out = B.dense(k_elwise(x, y))
grads = t.gradient(out, xis, unconnected_gradients="zero")
return B.concat(*grads, axis=1)
return _dkx
def dkx_elwise(k_elwise, i):
"""Construct the element-wise derivative of a kernel with respect to
its first argument.
Args:
k_elwise (function): Function that performs element-wise computation
of the kernel.
i (int): Dimension with respect to which to compute the derivative.
Returns:
function: Element-wise derivative of the kernel with respect to its
first argument.
"""
@uprank
def _dkx_elwise(x, y):
import tensorflow as tf
with tf.GradientTape() as t:
xi = x[:, i : i + 1]
t.watch(xi)
x = B.concat(x[:, :i], xi, x[:, i + 1 :], axis=1)
out = B.dense(k_elwise(x, y))
return t.gradient(out, xi, unconnected_gradients="zero")
return _dkx_elwise
def dky(k_elwise, i):
"""Construct the derivative of a kernel with respect to its second
argument.
Args:
k_elwise (function): Function that performs element-wise computation
of the kernel.
i (int): Dimension with respect to which to compute the derivative.
Returns:
function: Derivative of the kernel with respect to its second argument.
"""
@uprank
def _dky(x, y):
import tensorflow as tf
with tf.GradientTape() as t:
# Get the numbers of inputs.
nx = num_elements(x)
ny = num_elements(y)
# Copy the input `nx` times to efficiently compute many derivatives.
yis = tf.identity_n([y[:, i : i + 1]] * nx)
t.watch(yis)
# Tile inputs for batched computation.
x = B.reshape(B.tile(x, 1, ny), nx * ny, -1)
y = B.tile(y, nx, 1)
# Insert tracked dimension, which is different for every tile.
yi = B.concat(*yis, axis=0)
y = B.concat(y[:, :i], yi, y[:, i + 1 :], axis=1)
# Perform the derivative computation.
out = B.dense(k_elwise(x, y))
grads = t.gradient(out, yis, unconnected_gradients="zero")
return B.transpose(B.concat(*grads, axis=1))
return _dky
def dky_elwise(k_elwise, i):
"""Construct the element-wise derivative of a kernel with respect to
its second argument.
Args:
k_elwise (function): Function that performs element-wise computation
of the kernel.
i (int): Dimension with respect to which to compute the derivative.
Returns:
function: Element-wise derivative of the kernel with respect to its
second argument.
"""
@uprank
def _dky_elwise(x, y):
import tensorflow as tf
with tf.GradientTape() as t:
yi = y[:, i : i + 1]
t.watch(yi)
y = B.concat(y[:, :i], yi, y[:, i + 1 :], axis=1)
out = B.dense(k_elwise(x, y))
return t.gradient(out, yi, unconnected_gradients="zero")
return _dky_elwise
def perturb(x):
"""Slightly perturb a tensor.
Args:
x (tensor): Tensor to perturb.
Returns:
tensor: `x`, but perturbed.
"""
dtype = convert(B.dtype(x), B.NPDType)
if dtype == np.float64:
return 1e-20 + x * (1 + 1e-14)
elif dtype == np.float32:
return 1e-20 + x * (1 + 1e-7)
else:
raise ValueError(f"Cannot perturb a tensor of data type {B.dtype(x)}.")
class DerivativeKernel(Kernel, DerivativeFunction):
"""Derivative of kernel."""
@property
def _stationary(self):
# NOTE: In the one-dimensional case, if derivatives with respect to both
# arguments are taken, then the result is in fact stationary.
return False
@_dispatch
def __eq__(self, other: "DerivativeKernel"):
identical_derivs = identical(expand(self.derivs), expand(other.derivs))
return self[0] == other[0] and identical_derivs
@_dispatch
def pairwise(k: DerivativeKernel, x: B.Numeric, y: B.Numeric):
i, j = expand(k.derivs)
k = k[0]
# Prevent that `x` equals `y` to stabilise nested gradients.
y = perturb(y)
if i is not None and j is not None:
# Derivative with respect to both `x` and `y`.
return Dense(dky(dkx_elwise(elwise(k), i), j)(x, y))
elif i is not None and j is None:
# Derivative with respect to `x`.
return Dense(dkx(elwise(k), i)(x, y))
elif i is None and j is not None:
# Derivative with respect to `y`.
return Dense(dky(elwise(k), j)(x, y))
else:
raise RuntimeError("No derivative specified.")
@_dispatch
def elwise(k: DerivativeKernel, x: B.Numeric, y: B.Numeric):
i, j = expand(k.derivs)
k = k[0]
# Prevent that `x` equals `y` to stabilise nested gradients.
y = perturb(y)
if i is not None and j is not None:
# Derivative with respect to both `x` and `y`.
return dky_elwise(dkx_elwise(elwise(k), i), j)(x, y)
elif i is not None and j is None:
# Derivative with respect to `x`.
return dkx_elwise(elwise(k), i)(x, y)
elif i is None and j is not None:
# Derivative with respect to `y`.
return dky_elwise(elwise(k), j)(x, y)
else:
raise RuntimeError("No derivative specified.")
| [
"wessel.p.bruinsma@gmail.com"
] | wessel.p.bruinsma@gmail.com |
cbd0426653d0bdcaf34cbdaf86cd071eb58163b8 | a6ab2735ff2f89adc64a4afcbfe013c1039198a1 | /scrapers/liverpool.py | 417b56688c24c6978ebc90919fea9056e20e3935 | [] | no_license | rehmanali1337/innvictus_scraper | 72e5049dd2c3d391f47d37e145edb2bf7c6a371d | bcb4e986c1922b20d61baca88e6ff03909bca518 | refs/heads/master | 2023-05-06T21:43:27.163117 | 2021-05-26T05:04:29 | 2021-05-26T05:04:29 | 341,820,871 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,929 | py | from selenium import webdriver
import asyncio
import json
from models.cache import ListCache
from models.products import LiverPoolProduct
from configs import global_vars
import logging
class LiverPoolNewProdsScraper:
def __init__(self, queue):
self.config = json.load(open(global_vars.MAIN_CONFIG_FILE_LOCATION))
self.queue = queue
print = logging.getLogger(' LiverpoolMonitor ').info
self.options = webdriver.ChromeOptions()
self.options.add_argument('--no-sandbox')
# self.options.add_argument('--headless')
self.options.add_argument('--disable-dev-shm-usage')
self.options.add_argument('start-maximized')
self.options.add_argument('disable-infobars')
self.webdriver_path = self.config.get("WEBDRIVER_PATH")
self.loop = asyncio.new_event_loop()
self.driver = None
self.URLs = [
'https://www.liverpool.com.mx/tienda/zapatos/catst1105210',
'https://www.liverpool.com.mx/tienda/zapatos/catst1010801',
'https://www.liverpool.com.mx/tienda/zapatos/catst1011086'
]
self.itter_time = 10
def start(self):
self.cache = ListCache('LiverPoolCache')
self.loop.run_until_complete(self.main())
async def main(self):
self.driver = webdriver.Chrome(
executable_path=self.webdriver_path, options=self.options)
self.driver.implicitly_wait(10)
# await self.create_cache()
while True:
try:
all_links = await self.get_all_prod_links()
print(f'[+] Got {len(all_links)} prod links!')
for link in all_links:
if not self.cache.has_item(link):
prod = await self.get_prod_details(link)
self.queue.put(prod)
self.cache.add_item(link)
await asyncio.sleep(self.itter_time)
except Exception as e:
print(e)
async def create_cache(self):
print('[+] Creating cache ..')
links = await self.get_all_prod_links()
self.cache.replace_cache(links)
print('[+] Created cache for prods')
async def get_all_prod_links(self):
links = []
for url in self.URLs:
self.driver.get(url)
prods_list = self.driver.find_elements_by_xpath(
'//li[@class="m-product__card card-masonry"]')
for prod in prods_list:
link = prod.find_element_by_tag_name('a').get_attribute('href')
links.append(link)
return links
async def get_prod_details(self, link):
self.driver.get(link)
prod = LiverPoolProduct()
prod.name = self.driver.find_element_by_xpath(
'//h1[@class="a-product__information--title"]').text
prod.link = link
out_of_stock_sizes = self.driver.find_elements_by_xpath(
'//button[@class="a-btn a-btn--actionpdp -disabled"]')
for size in out_of_stock_sizes:
prod.out_of_stock_sizes.append(size.text)
in_stock_sizes = self.driver.find_elements_by_xpath(
'//button[@class="a-btn a-btn--actionpdp"]')
for size in in_stock_sizes:
prod.in_stock_sizes.append(size.text)
prod.img_link = self.driver.find_element_by_xpath(
'//img[@id="image-real"]').get_attribute('src')
prod.color = self.driver.find_element_by_xpath(
'//p[@class="a-product__paragraphColor m-0 mt-2 mb-1"]').text.split(':')[-1].strip()
prod.price = self.driver.find_element_by_xpath(
'//p[@class="a-product__paragraphDiscountPrice m-0 d-inline "]').text.split('\n')[0].replace(',', '').replace('$', '')
return prod
# def quit_browser(self):
# if self.driver is not None:
# self.driver.quit()
# self.driver = None
| [
"rehmanali.9442289@gmail.com"
] | rehmanali.9442289@gmail.com |
fdef72e6ed2b89d6e3312ca8d0abab76e55416d7 | 4f4d47d60e17f0e3b7120ebb26f3d83e0a1f8e66 | /tf_agents/bandits/environments/random_bandit_environment.py | 735af739c1d680f16bcb6a4df8ef9ba29e2bd8e5 | [
"Apache-2.0"
] | permissive | tfboyd/agents | 644ff1ee3961ac629671110c45f6c90234bd0ad1 | 858ee36aaaea6fbcf0e5ab1c12929c77bd17abae | refs/heads/master | 2020-11-28T15:46:31.635917 | 2020-06-26T06:05:57 | 2020-06-26T06:05:57 | 229,859,259 | 2 | 0 | Apache-2.0 | 2020-06-26T15:34:23 | 2019-12-24T02:56:28 | Python | UTF-8 | Python | false | false | 5,146 | py | # coding=utf-8
# Copyright 2018 The TF-Agents Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Bandit environment that returns random observations and rewards."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf # pylint: disable=g-explicit-tensorflow-version-import
from tf_agents.bandits.environments import bandit_tf_environment as bte
from tf_agents.specs import tensor_spec
from tf_agents.trajectories import time_step
__all__ = ['RandomBanditEnvironment']
def _raise_batch_shape_error(distribution_name, batch_shape):
raise ValueError('`{distribution_name}` must have batch shape with length 1; '
'got {batch_shape}. Consider using '
'`tensorflow_probability.distributions.Independent` '
'to manipulate batch and event shapes.'.format(
distribution_name=distribution_name,
batch_shape=batch_shape))
class RandomBanditEnvironment(bte.BanditTFEnvironment):
"""Bandit environment that returns random observations and rewards."""
def __init__(self,
observation_distribution,
reward_distribution,
action_spec=None):
"""Initializes an environment that returns random observations and rewards.
Note that `observation_distribution` and `reward_distribution` are expected
to have batch rank 1. That is, `observation_distribution.batch_shape` should
have length exactly 1. `tensorflow_probability.distributions.Independent` is
useful for manipulating batch and event shapes. For example,
```python
observation_distribution = tfd.Independent(tfd.Normal(tf.zeros([12, 3, 4]),
tf.ones([12, 3, 4])))
env = RandomBanditEnvironment(observation_distribution, ...)
env.observation_spec # tensor_spec.TensorSpec(shape=[3, 4], ...)
env.batch_size # 12
```
Args:
observation_distribution: a `tensorflow_probability.Distribution`.
Batches of observations will be drawn from this distribution. The
`batch_shape` of this distribution must have length 1 and be the same as
the `batch_shape` of `reward_distribution`.
reward_distribution: a `tensorflow_probability.Distribution`.
Batches of rewards will be drawn from this distribution. The
`batch_shape` of this distribution must have length 1 and be the same as
the `batch_shape` of `observation_distribution`.
action_spec: a `TensorSpec` describing the expected action. Note that
actions are ignored and do not affect rewards.
"""
observation_batch_shape = observation_distribution.batch_shape
reward_batch_shape = reward_distribution.batch_shape
reward_event_shape = reward_distribution.event_shape
if observation_batch_shape.rank != 1:
_raise_batch_shape_error(
'observation_distribution', observation_batch_shape)
if reward_batch_shape.rank != 1:
_raise_batch_shape_error(
'reward_distribution', observation_batch_shape)
if reward_event_shape.rank != 0:
raise ValueError('`reward_distribution` must have event_shape (); '
'got {}'.format(reward_event_shape))
if reward_distribution.dtype != tf.float32:
raise ValueError('`reward_distribution` must have dtype float32; '
'got {}'.format(reward_distribution.float32))
if observation_batch_shape[0] != reward_batch_shape[0]:
raise ValueError(
'`reward_distribution` and `observation_distribution` must have the '
'same batch shape; got {} and {}'.format(
reward_batch_shape, observation_batch_shape))
batch_size = tf.compat.dimension_value(observation_batch_shape[0])
self._observation_distribution = observation_distribution
self._reward_distribution = reward_distribution
observation_spec = tensor_spec.TensorSpec(
shape=self._observation_distribution.event_shape,
dtype=self._observation_distribution.dtype,
name='observation_spec')
time_step_spec = time_step.time_step_spec(observation_spec)
super(RandomBanditEnvironment, self).__init__(time_step_spec=time_step_spec,
action_spec=action_spec,
batch_size=batch_size)
def _apply_action(self, action):
del action # unused
return self._reward_distribution.sample()
def _observe(self):
return self._observation_distribution.sample()
| [
"copybara-worker@google.com"
] | copybara-worker@google.com |
96254c047f5ab42412198c47ef93c0af1d2d97ba | e537b9b866c6533ef4c488b0104070a3f865d40e | /joerd/store/s3.py | d0d448ec3dabf9fd4d2a674e36689e3fb00c1ac6 | [
"MIT"
] | permissive | mohammadrezabk/joerd | 0c3a65ddb746578f9c06574601dc91ea6af2de2e | 0b86765156d0612d837548c2cf70376c43b3405c | refs/heads/master | 2023-02-14T16:08:59.103192 | 2017-11-21T17:22:22 | 2017-11-21T17:22:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,965 | py | import boto3
from boto3.s3.transfer import TransferConfig
from botocore.exceptions import ClientError
from os import walk
import os.path
from contextlib2 import contextmanager
from joerd.tmpdir import tmpdir
import traceback
import sys
import time
import logging
# extension to mime type mappings to help with serving the S3 bucket as
# a web site. if we add the content-type header on upload, then S3 will
# repeat it back when the tiles are accessed.
_MIME_TYPES = {
'.png': 'image/png',
'.tif': 'image/tif',
'.xml': 'application/xml',
'.gz': 'application/x-gzip',
}
# Stores files in S3
class S3Store(object):
def __init__(self, cfg):
self.bucket_name = cfg.get('bucket_name')
self.upload_config = cfg.get('upload_config')
assert self.bucket_name is not None, \
"Bucket name not configured for S3 store, but it must be."
# cache the boto resource and s3 bucket - we don't know what this
# contains, so it seems safe to assume we can't pass it across a
# multiprocessing boundary.
self.s3 = None
self.bucket = None
# This object is likely to get pickled to send it to other processes
# for multiprocessing. However, the s3/boto objects are probably not
# safe to be pickled, so we'll just set them to None and regenerate
# them on the other side.
def __getstate__(self):
odict = self.__dict__.copy()
del odict['s3']
del odict['bucket']
return odict
def __setstate__(self, d):
self.__dict__.update(d)
self.s3 = None
self.bucket = None
def _get_bucket(self):
if self.s3 is None or self.bucket is None:
self.s3 = boto3.resource('s3')
self.bucket = self.s3.Bucket(self.bucket_name)
return self.bucket
def upload_all(self, d):
# strip trailing slashes so that we're sure that the path we create by
# removing this as a prefix does not start with a /.
if not d.endswith('/'):
d = d + "/"
transfer_config = TransferConfig(**self.upload_config)
for dirpath, dirs, files in walk(d):
if dirpath.startswith(d):
suffix = dirpath[len(d):]
self._upload_files(dirpath, suffix, files, transfer_config)
def _upload_files(self, dirpath, suffix, files, transfer_config):
for f in files:
src_name = os.path.join(dirpath, f)
s3_key = os.path.join(suffix, f)
ext = os.path.splitext(f)[1]
mime = _MIME_TYPES.get(ext)
extra_args = {}
if mime:
extra_args['ContentType'] = mime
# retry up to 6 times, waiting 32 (=2^5) seconds before the final
# attempt.
tries = 6
self.retry_upload_file(src_name, s3_key, transfer_config,
extra_args, tries)
def retry_upload_file(self, src_name, s3_key, transfer_config,
extra_args, tries, backoff=1):
logger = logging.getLogger('s3')
bucket = self._get_bucket()
try_num = 0
while True:
try:
bucket.upload_file(src_name, s3_key,
Config=transfer_config,
ExtraArgs=extra_args)
break
except StandardError as e:
try_num += 1
logger.warning("Try %d of %d: Failed to upload %s due to: %s" \
% (try_num, tries, s3_key,
"".join(traceback.format_exception(
*sys.exc_info()))))
if try_num > tries:
raise
time.sleep(backoff)
backoff *= 2
@contextmanager
def upload_dir(self):
with tmpdir() as t:
yield t
self.upload_all(t)
def exists(self, filename):
bucket = self._get_bucket()
exists = False
try:
obj = bucket.Object(filename)
obj.load()
except ClientError as e:
code = e.response['Error']['Code']
# 403 is returned instead of 404 when the bucket doesn't allow
# LIST operations, so treat that as missing as well.
if code == "404" or code == "403":
exists = False
else:
raise e
else:
exists = True
return exists
def get(self, source, dest):
try:
bucket = self._get_bucket()
obj = bucket.Object(source)
obj.download_file(dest)
except:
raise RuntimeError("Failed to download %r, due to: %s"
% (source, "".join(traceback.format_exception(
*sys.exc_info()))))
def create(cfg):
return S3Store(cfg)
| [
"zerebubuth@gmail.com"
] | zerebubuth@gmail.com |
f934729068d063b44311238abbd5b002b0319ae6 | f042383cbc9f10837ebdb5b9033a0263f6a43698 | /python_modules/dagster/dagster/core/asset_defs/assets_job.py | b510853c43b3a6bf0cbb7206a30b3fd81781f850 | [
"Apache-2.0"
] | permissive | helloworld/dagster | 664e6636d68bafa5151418c9d4316a565717f5ee | 779e27faa3e46b7d043cb9624617e655a9ed570c | refs/heads/master | 2022-03-24T12:15:36.626783 | 2022-02-26T01:34:29 | 2022-02-26T01:34:29 | 464,019,094 | 0 | 0 | Apache-2.0 | 2022-03-05T20:23:14 | 2022-02-27T02:38:17 | null | UTF-8 | Python | false | false | 11,534 | py | from typing import AbstractSet, Any, Dict, List, Mapping, Optional, Sequence, Tuple, Union, cast
from dagster import check
from dagster.core.definitions.config import ConfigMapping
from dagster.core.definitions.decorators.op import op
from dagster.core.definitions.dependency import (
DependencyDefinition,
IDependencyDefinition,
NodeInvocation,
)
from dagster.core.definitions.events import AssetKey
from dagster.core.definitions.executor_definition import ExecutorDefinition
from dagster.core.definitions.graph_definition import GraphDefinition
from dagster.core.definitions.job_definition import JobDefinition
from dagster.core.definitions.op_definition import OpDefinition
from dagster.core.definitions.output import Out, OutputDefinition
from dagster.core.definitions.partition import PartitionedConfig, PartitionsDefinition
from dagster.core.definitions.partition_key_range import PartitionKeyRange
from dagster.core.definitions.resource_definition import ResourceDefinition
from dagster.core.errors import DagsterInvalidDefinitionError
from dagster.core.execution.context.input import InputContext, build_input_context
from dagster.core.execution.context.output import build_output_context
from dagster.core.storage.fs_asset_io_manager import fs_asset_io_manager
from dagster.core.storage.root_input_manager import RootInputManagerDefinition, root_input_manager
from dagster.utils.backcompat import experimental
from dagster.utils.merger import merge_dicts
from .asset import AssetsDefinition
from .asset_partitions import get_upstream_partitions_for_partition_range
from .source_asset import SourceAsset
@experimental
def build_assets_job(
name: str,
assets: List[AssetsDefinition],
source_assets: Optional[Sequence[Union[SourceAsset, AssetsDefinition]]] = None,
resource_defs: Optional[Dict[str, ResourceDefinition]] = None,
description: Optional[str] = None,
config: Union[ConfigMapping, Dict[str, Any], PartitionedConfig] = None,
tags: Optional[Dict[str, Any]] = None,
executor_def: Optional[ExecutorDefinition] = None,
) -> JobDefinition:
"""Builds a job that materializes the given assets.
The dependencies between the ops in the job are determined by the asset dependencies defined
in the metadata on the provided asset nodes.
Args:
name (str): The name of the job.
assets (List[AssetsDefinition]): A list of assets or
multi-assets - usually constructed using the :py:func:`@asset` or :py:func:`@multi_asset`
decorator.
source_assets (Optional[Sequence[Union[SourceAsset, AssetsDefinition]]]): A list of
assets that are not materialized by this job, but that assets in this job depend on.
resource_defs (Optional[Dict[str, ResourceDefinition]]): Resource defs to be included in
this job.
description (Optional[str]): A description of the job.
Examples:
.. code-block:: python
@asset
def asset1():
return 5
@asset
def asset2(asset1):
return my_upstream_asset + 1
my_assets_job = build_assets_job("my_assets_job", assets=[asset1, asset2])
Returns:
JobDefinition: A job that materializes the given assets.
"""
check.str_param(name, "name")
check.list_param(assets, "assets", of_type=AssetsDefinition)
check.opt_list_param(source_assets, "source_assets", of_type=(SourceAsset, AssetsDefinition))
check.opt_str_param(description, "description")
source_assets_by_key = build_source_assets_by_key(source_assets)
op_defs = build_op_deps(assets, source_assets_by_key.keys())
root_manager = build_root_manager(source_assets_by_key)
partitioned_config = build_job_partitions_from_assets(assets)
return GraphDefinition(
name=name,
node_defs=[asset.op for asset in assets],
dependencies=op_defs,
description=description,
input_mappings=None,
output_mappings=None,
config=None,
).to_job(
resource_defs=merge_dicts(
{"io_manager": fs_asset_io_manager}, resource_defs or {}, {"root_manager": root_manager}
),
config=config or partitioned_config,
tags=tags,
executor_def=executor_def,
)
def build_job_partitions_from_assets(
assets: Sequence[AssetsDefinition],
) -> Optional[PartitionedConfig]:
assets_with_partitions_defs = [assets_def for assets_def in assets if assets_def.partitions_def]
if len(assets_with_partitions_defs) == 0:
return None
first_assets_with_partitions_def = assets_with_partitions_defs[0]
for assets_def in assets_with_partitions_defs:
if assets_def.partitions_def != first_assets_with_partitions_def.partitions_def:
first_asset_key = next(iter(assets_def.asset_keys)).to_string()
second_asset_key = next(iter(first_assets_with_partitions_def.asset_keys)).to_string()
raise DagsterInvalidDefinitionError(
"When an assets job contains multiple partitions assets, they must have the "
f"same partitions definitions, but asset '{first_asset_key}' and asset "
f"'{second_asset_key}' have different partitions definitions. "
)
assets_defs_by_asset_key = {
asset_key: assets_def for assets_def in assets for asset_key in assets_def.asset_keys
}
def asset_partitions_for_job_partition(
job_partition_key: str,
) -> Mapping[AssetKey, PartitionKeyRange]:
return {
asset_key: PartitionKeyRange(job_partition_key, job_partition_key)
for assets_def in assets
for asset_key in assets_def.asset_keys
if assets_def.partitions_def
}
def run_config_for_partition_fn(partition_key: str) -> Dict[str, Any]:
ops_config: Dict[str, Any] = {}
asset_partitions_by_asset_key = asset_partitions_for_job_partition(partition_key)
for assets_def in assets:
outputs_dict: Dict[str, Dict[str, Any]] = {}
if assets_def.partitions_def is not None:
for asset_key, output_def in assets_def.output_defs_by_asset_key.items():
asset_partition_key_range = asset_partitions_by_asset_key[asset_key]
outputs_dict[output_def.name] = {
"start": asset_partition_key_range.start,
"end": asset_partition_key_range.end,
}
inputs_dict: Dict[str, Dict[str, Any]] = {}
for in_asset_key, input_def in assets_def.input_defs_by_asset_key.items():
upstream_assets_def = assets_defs_by_asset_key[in_asset_key]
if (
assets_def.partitions_def is not None
and upstream_assets_def.partitions_def is not None
):
upstream_partition_key_range = get_upstream_partitions_for_partition_range(
assets_def, upstream_assets_def, in_asset_key, asset_partition_key_range
)
inputs_dict[input_def.name] = {
"start": upstream_partition_key_range.start,
"end": upstream_partition_key_range.end,
}
ops_config[assets_def.op.name] = {
"config": {
"assets": {
"input_partitions": inputs_dict,
"output_partitions": outputs_dict,
}
}
}
return {"ops": ops_config}
return PartitionedConfig(
partitions_def=cast(PartitionsDefinition, first_assets_with_partitions_def.partitions_def),
run_config_for_partition_fn=lambda p: run_config_for_partition_fn(p.name),
)
def build_source_assets_by_key(
source_assets: Optional[Sequence[Union[SourceAsset, AssetsDefinition]]]
) -> Mapping[AssetKey, Union[SourceAsset, OutputDefinition]]:
source_assets_by_key: Dict[AssetKey, Union[SourceAsset, OutputDefinition]] = {}
for asset_source in source_assets or []:
if isinstance(asset_source, SourceAsset):
source_assets_by_key[asset_source.key] = asset_source
elif isinstance(asset_source, AssetsDefinition):
for asset_key, output_def in asset_source.output_defs_by_asset_key.items():
if asset_key:
source_assets_by_key[asset_key] = output_def
return source_assets_by_key
def build_op_deps(
multi_asset_defs: List[AssetsDefinition], source_paths: AbstractSet[AssetKey]
) -> Dict[Union[str, NodeInvocation], Dict[str, IDependencyDefinition]]:
op_outputs_by_asset: Dict[AssetKey, Tuple[OpDefinition, str]] = {}
for multi_asset_def in multi_asset_defs:
for asset_key, output_def in multi_asset_def.output_defs_by_asset_key.items():
if asset_key in op_outputs_by_asset:
raise DagsterInvalidDefinitionError(
f"The same asset key was included for two definitions: '{asset_key.to_string()}'"
)
op_outputs_by_asset[asset_key] = (multi_asset_def.op, output_def.name)
op_deps: Dict[Union[str, NodeInvocation], Dict[str, IDependencyDefinition]] = {}
for multi_asset_def in multi_asset_defs:
op_name = multi_asset_def.op.name
op_deps[op_name] = {}
for asset_key, input_def in multi_asset_def.input_defs_by_asset_key.items():
if asset_key in op_outputs_by_asset:
op_def, output_name = op_outputs_by_asset[asset_key]
op_deps[op_name][input_def.name] = DependencyDefinition(op_def.name, output_name)
elif asset_key not in source_paths and not input_def.dagster_type.is_nothing:
raise DagsterInvalidDefinitionError(
f"Input asset '{asset_key.to_string()}' for asset '{op_name}' is not "
"produced by any of the provided asset ops and is not one of the provided "
"sources"
)
return op_deps
def build_root_manager(
source_assets_by_key: Mapping[AssetKey, Union[SourceAsset, OutputDefinition]]
) -> RootInputManagerDefinition:
source_asset_io_manager_keys = {
source_asset.io_manager_key for source_asset in source_assets_by_key.values()
}
@root_input_manager(required_resource_keys=source_asset_io_manager_keys)
def _root_manager(input_context: InputContext) -> Any:
source_asset_key = cast(AssetKey, input_context.asset_key)
source_asset = source_assets_by_key[source_asset_key]
@op(out={source_asset_key.path[-1]: Out(asset_key=source_asset_key)})
def _op():
pass
output_context = build_output_context(
name=source_asset_key.path[-1],
step_key="none",
solid_def=_op,
metadata=source_asset.metadata,
)
input_context_with_upstream = build_input_context(
name=input_context.name,
metadata=input_context.metadata,
config=input_context.config,
dagster_type=input_context.dagster_type,
upstream_output=output_context,
op_def=input_context.op_def,
)
io_manager = getattr(cast(Any, input_context.resources), source_asset.io_manager_key)
return io_manager.load_input(input_context_with_upstream)
return _root_manager
| [
"noreply@github.com"
] | helloworld.noreply@github.com |
a233d4e8b9afc6d98a3d8ee9809d4b0450623742 | 6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4 | /Ff84aGq6e7gjKYh8H_6.py | 44234b3c35513f59dda8ddfcdac696049dd11660 | [] | no_license | daniel-reich/ubiquitous-fiesta | 26e80f0082f8589e51d359ce7953117a3da7d38c | 9af2700dbe59284f5697e612491499841a6c126f | refs/heads/master | 2023-04-05T06:40:37.328213 | 2021-04-06T20:17:44 | 2021-04-06T20:17:44 | 355,318,759 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 145 | py |
def minutes_to_seconds(time):
if int(time[-2:]) >= 60:
return False
else:
return int(time[:time.index(':')]) * 60 + int(time[-2:])
| [
"daniel.reich@danielreichs-MacBook-Pro.local"
] | daniel.reich@danielreichs-MacBook-Pro.local |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.