blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 112 | license_type stringclasses 2 values | repo_name stringlengths 5 115 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 777 values | visit_date timestamp[us]date 2015-08-06 10:31:46 2023-09-06 10:44:38 | revision_date timestamp[us]date 1970-01-01 02:38:32 2037-05-03 13:00:00 | committer_date timestamp[us]date 1970-01-01 02:38:32 2023-09-06 01:08:06 | github_id int64 4.92k 681M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 22 values | gha_event_created_at timestamp[us]date 2012-06-04 01:52:49 2023-09-14 21:59:50 ⌀ | gha_created_at timestamp[us]date 2008-05-22 07:58:19 2023-08-21 12:35:19 ⌀ | gha_language stringclasses 149 values | src_encoding stringclasses 26 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 3 10.2M | extension stringclasses 188 values | content stringlengths 3 10.2M | authors listlengths 1 1 | author_id stringlengths 1 132 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
6d80a10c0149fda0b1fea3828782d1ac2411cf44 | 0cb98b92463ca7187e4c617b5bd634a9946f99b3 | /Arrays/VectorADT/tests.py | 7286c8b0b9a49e05ac1784dee7f62fc30cb911a0 | [] | no_license | Michael-Odhiambo/Data-Structures-And-Algorithms-using-Python | 73e828e1d3ca9cd5a3419cdb945babb6c8a4e54f | 29bdef4a43323bd5ff719a0082226ac11b929b39 | refs/heads/master | 2023-04-14T13:13:15.920594 | 2021-04-21T20:43:31 | 2021-04-21T20:43:31 | 291,930,818 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 119 | py |
list1 = [ 1, 2, 3, 4]
list2 = []
n = len( list1 ) - 1
index = 1
while n >= index :
print( list1[n] )
n -= 1 | [
"odhiambomichaelallan@gmail.com"
] | odhiambomichaelallan@gmail.com |
f7a384c4dd7aed86157ed6c844fbe54c92a49c25 | 221cada2354556fbb969f25ddd3079542904ef5d | /Leetcode/109.py | fb737687d1edaa018d6025e08430617852fcba01 | [] | no_license | syzdemonhunter/Coding_Exercises | 4b09e1a7dad7d1e3d4d4ae27e6e006732ffdcb1d | ca71572677d2b2a2aed94bb60d6ec88cc486a7f3 | refs/heads/master | 2020-05-24T11:19:35.019543 | 2019-11-22T20:08:32 | 2019-11-22T20:08:32 | 187,245,394 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 981 | py | # https://leetcode.com/problems/convert-sorted-list-to-binary-search-tree/
# T: O(n)
# S: O(n)
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def sortedListToBST(self, head: ListNode) -> TreeNode:
if not head:
return None
return self.to_bst(head, None)
def to_bst(self, head, tail):
if head == tail:
return None
slow = head
fast = head
while fast != tail and fast.next != tail:
fast = fast.next.next
slow = slow.next
root = TreeNode(slow.val)
root.left = self.to_bst(head, slow)
root.right = self.to_bst(slow.next, tail)
return root
| [
"syzuser60@gmail.com"
] | syzuser60@gmail.com |
31503b0c217f37c86eff34f3a100b8f183473606 | 8f75f8e91bb379cc05eded1a89a6c2f550bae6e6 | /jumpscale/data/idgenerator/idgenerator.py | dd7046fca43082a6850b88b15924070f380dcd47 | [] | no_license | despiegk/js-ng | 24c09f653ec4abfb1b997811a17c254eede89304 | e796f6ae31363e2f6daadd5ad377b5d3b116f657 | refs/heads/master | 2021-05-23T01:48:17.168454 | 2019-08-28T19:48:05 | 2019-08-28T19:48:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 847 | py | import random
import string
import uuid
def random_int(from_, to):
return random.randint(from_, to)
def incrementor_id():
# user redis.incr.
raise NotImplementedError()
def guid():
return str(uuid.uuid4())
def nfromchoices(n, choices):
return "".join([random.choice(choices) for _ in range(n)])
def chars(nchars):
choices = string.ascii_letters + string.digits
return nfromchoices(nchars, choices)
def nbytes(nbytes):
out = bytearray()
for n in range(nbytes):
out.append(random_int(0, 255))
return out
def password(nchars):
choices = string.printable
return nfromchoices(nchars, choices)
def capnp_id():
"""
Generates a valid id for a capnp schema.
"""
# the bitwise is for validating the id check capnp/parser.c++
return hex(random.randint(0, 2 ** 64) | 1 << 63) | [
"xmonader@gmail.com"
] | xmonader@gmail.com |
2af736a948b077e2294b0cb97cf0ee15aeca7972 | 13ea6fa027c8ae33852bde3335846cdaab78ee71 | /DataScienceWithPython/sample_python_code/statistic/stat7.py | d760c7ef157fc2e0048a71b6ec1b276ab8263ddd | [] | no_license | dmonisankar/pythonworks | c98de04b191135451556ca9d1ee513a0a69f2edb | 4f3a14460272ec959c2f2e6975814d9ac43cb90a | refs/heads/master | 2023-03-31T00:36:46.016403 | 2020-06-11T05:39:36 | 2020-06-11T05:39:36 | 271,455,493 | 0 | 0 | null | 2021-03-20T04:19:45 | 2020-06-11T05:00:00 | Jupyter Notebook | UTF-8 | Python | false | false | 1,132 | py | import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
import pandas as pd
def ecdf(data):
"""Compute ECDF for a one-dimensional array of measurements."""
# Number of data points: n
n = len(data)
# x-data for the ECDF: x
x = np.sort(data)
# y-data for the ECDF: y
y = np.arange(1, n+1) / n
return x, y
df = pd.read_csv('iris.csv')
df1= df.loc[df['species'] =='versicolor']
versicolor_petal_length = df1['petal_length']
# Compute ECDF for versicolor data: x_vers, y_vers
x_vers, y_vers = ecdf(versicolor_petal_length)
# Generate plot
_=plt.plot(x_vers, y_vers, marker='.', linestyle='none')
# Make the margins nice
plt.margins(0.02)
# Label the axes
_ = plt.xlabel('veriscolor petal length')
_= plt.ylabel('ECDF')
# Specify array of percentiles: percentiles
percentiles = np.array([2.5,25,50,75,97.5])
# Compute percentiles: ptiles_vers
ptiles_vers = np.percentile(versicolor_petal_length,percentiles)
# Print the result
print(ptiles_vers)
_ = plt.plot(ptiles_vers, percentiles/100, marker='D', color='red',
linestyle='none')
# Display the plot
plt.show()
| [
"das.monisankar@gmail.com"
] | das.monisankar@gmail.com |
a88f2074bcffc41af125c87593f07202ed0c0cfc | a1c9c55e1520356113a320be18e8fcb31654a944 | /archive/0.9/generated/seaborn-scatterplot-11.py | 7bdecc05f79286cfec7618fec2371fbbf86689a0 | [] | no_license | seaborn/seaborn.github.io | bac12a9255b41c7971e9e94ea393d372ef66ef62 | f70445bc3456f0216169806c2daf03452ca1eba4 | refs/heads/master | 2023-01-06T10:50:10.789810 | 2022-12-30T19:59:55 | 2022-12-30T19:59:55 | 70,731,605 | 16 | 5 | null | 2022-06-28T00:32:07 | 2016-10-12T18:56:12 | HTML | UTF-8 | Python | false | false | 170 | py | markers = {"Lunch": "s", "Dinner": "X"}
ax = sns.scatterplot(x="total_bill", y="tip", style="time",
markers=markers,
data=tips)
| [
"mwaskom@nyu.edu"
] | mwaskom@nyu.edu |
e0471aadbd2d2558d2a7e7a2b9b57fc8388cda46 | f6a24e51b6012b582d76db0b2e1e27950729b7bb | /setup.py | acd29caffd3fcd37f13c6290681e0fec2b0f9b4c | [
"LicenseRef-scancode-cecill-b-en"
] | permissive | mathieubonnet/capsul | 391733a2391c1191b643e6847b5f757cf77c1255 | c9745e339c24fc6a27d0adcc1e0c91b355588cac | refs/heads/master | 2020-04-09T02:54:29.257904 | 2015-03-04T14:36:08 | 2015-03-04T14:36:08 | 31,950,724 | 0 | 0 | null | 2015-03-10T10:11:37 | 2015-03-10T10:11:37 | null | UTF-8 | Python | false | false | 2,326 | py | #! /usr/bin/env python
##########################################################################
# CAPSUL - Copyright (C) CEA, 2013
# Distributed under the terms of the CeCILL-B license, as published by
# the CEA-CNRS-INRIA. Refer to the LICENSE file or to
# http://www.cecill.info/licences/Licence_CeCILL-B_V1-en.html
# for details.
##########################################################################
# System import
from ez_setup import use_setuptools
use_setuptools()
import os
from setuptools import find_packages, setup
import argparse
import sys
# Select which package is created: core or gui
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument("--gui", help="Create the gui package.",
action="store_true")
options, unknown = parser.parse_known_args()
sys.argv = [sys.argv[0]] + unknown
# Select appropriate modules
modules = find_packages()
core_modules = []
gui_modules = ["capsul"]
for module in modules:
if module.startswith("capsul.wip"):
continue
if module.startswith(("capsul.qt_apps", "capsul.qt_gui")):
gui_modules.append(module)
else:
core_modules.append(module)
# Set selcted package options
if options.gui:
import capsul
name_suffix = "gui"
modules = gui_modules
scripts = ["capsul/qt_apps/capsulview"]
pkgdata = {"capsul.qt_apps.resources": ["*.ui", "*.png", "*.qrc", "*.txt"]}
release_info = {}
execfile(os.path.join(os.path.dirname(capsul.__file__), "info.py"),
release_info)
else:
name_suffix = "core"
modules = core_modules
scripts = []
pkgdata = {}
release_info = {}
execfile(os.path.join("capsul", "info.py"), release_info)
# Build the setup
setup(
name="{0}-{1}".format(release_info["NAME"], name_suffix),
description=release_info["DESCRIPTION"],
long_description=release_info["LONG_DESCRIPTION"],
license=release_info["LICENSE"],
classifiers=release_info["CLASSIFIERS"],
author=release_info["AUTHOR"],
author_email=release_info["AUTHOR_EMAIL"],
version=release_info["VERSION"],
url=release_info["URL"],
packages=modules,
package_data=pkgdata,
platforms=release_info["PLATFORMS"],
extras_require=release_info["EXTRA_REQUIRES"],
install_requires=release_info["REQUIRES"],
scripts=scripts
)
| [
"antoine.grigis@cea.fr"
] | antoine.grigis@cea.fr |
ea48f51e2344745afe21a09d81054b7b5ad65438 | f9d564f1aa83eca45872dab7fbaa26dd48210d08 | /huaweicloud-sdk-roma/huaweicloudsdkroma/v2/model/create_product_request.py | a8b36ec3e8dd9957b6f1471b71a6f6b4d646caf7 | [
"Apache-2.0"
] | permissive | huaweicloud/huaweicloud-sdk-python-v3 | cde6d849ce5b1de05ac5ebfd6153f27803837d84 | f69344c1dadb79067746ddf9bfde4bddc18d5ecf | refs/heads/master | 2023-09-01T19:29:43.013318 | 2023-08-31T08:28:59 | 2023-08-31T08:28:59 | 262,207,814 | 103 | 44 | NOASSERTION | 2023-06-22T14:50:48 | 2020-05-08T02:28:43 | Python | UTF-8 | Python | false | false | 3,922 | py | # coding: utf-8
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class CreateProductRequest:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'instance_id': 'str',
'body': 'CreateProductRequestBody'
}
attribute_map = {
'instance_id': 'instance_id',
'body': 'body'
}
def __init__(self, instance_id=None, body=None):
"""CreateProductRequest
The model defined in huaweicloud sdk
:param instance_id: 实例ID
:type instance_id: str
:param body: Body of the CreateProductRequest
:type body: :class:`huaweicloudsdkroma.v2.CreateProductRequestBody`
"""
self._instance_id = None
self._body = None
self.discriminator = None
self.instance_id = instance_id
if body is not None:
self.body = body
@property
def instance_id(self):
"""Gets the instance_id of this CreateProductRequest.
实例ID
:return: The instance_id of this CreateProductRequest.
:rtype: str
"""
return self._instance_id
@instance_id.setter
def instance_id(self, instance_id):
"""Sets the instance_id of this CreateProductRequest.
实例ID
:param instance_id: The instance_id of this CreateProductRequest.
:type instance_id: str
"""
self._instance_id = instance_id
@property
def body(self):
"""Gets the body of this CreateProductRequest.
:return: The body of this CreateProductRequest.
:rtype: :class:`huaweicloudsdkroma.v2.CreateProductRequestBody`
"""
return self._body
@body.setter
def body(self, body):
"""Sets the body of this CreateProductRequest.
:param body: The body of this CreateProductRequest.
:type body: :class:`huaweicloudsdkroma.v2.CreateProductRequestBody`
"""
self._body = body
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, CreateProductRequest):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"hwcloudsdk@huawei.com"
] | hwcloudsdk@huawei.com |
5e180554a5c3e85632e5b64aed8b28f0e3a7121f | 8ef8e6818c977c26d937d09b46be0d748022ea09 | /cv/pose/alphapose/pytorch/trackers/utils/transform.py | 1017f4807614151a663a8f84628b8d568304a987 | [
"Apache-2.0",
"BSD-3-Clause",
"LicenseRef-scancode-proprietary-license",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | Deep-Spark/DeepSparkHub | eb5996607e63ccd2c706789f64b3cc0070e7f8ef | 9d643e88946fc4a24f2d4d073c08b05ea693f4c5 | refs/heads/master | 2023-09-01T11:26:49.648759 | 2023-08-25T01:50:18 | 2023-08-25T01:50:18 | 534,133,249 | 7 | 6 | Apache-2.0 | 2023-03-28T02:54:59 | 2022-09-08T09:07:01 | Python | UTF-8 | Python | false | false | 5,633 | py | # encoding: utf-8
import torchvision.transforms as T
import math
import random
import torch
import cv2
import numpy as np
#from .functional import to_tensor
#from .transforms import *
class RandomErasing(object):
""" Randomly selects a rectangle region in an image and erases its pixels.
'Random Erasing Data Augmentation' by Zhong et al.
See https://arxiv.org/pdf/1708.04896.pdf
Args:
probability: The probability that the Random Erasing operation will be performed.
sl: Minimum proportion of erased area against input image.
sh: Maximum proportion of erased area against input image.
r1: Minimum aspect ratio of erased area.
mean: Erasing value.
"""
def __init__(self, probability=0.5, sl=0.02, sh=0.4, r1=0.3, mean=255 * (0.49735, 0.4822, 0.4465)):
self.probability = probability
self.mean = mean
self.sl = sl
self.sh = sh
self.r1 = r1
def __call__(self, img):
img = np.asarray(img, dtype=np.float32).copy()
if random.uniform(0, 1) > self.probability:
return img
for attempt in range(100):
area = img.shape[0] * img.shape[1]
target_area = random.uniform(self.sl, self.sh) * area
aspect_ratio = random.uniform(self.r1, 1 / self.r1)
h = int(round(math.sqrt(target_area * aspect_ratio)))
w = int(round(math.sqrt(target_area / aspect_ratio)))
if w < img.shape[1] and h < img.shape[0]:
x1 = random.randint(0, img.shape[0] - h)
y1 = random.randint(0, img.shape[1] - w)
if img.shape[2] == 3:
img[x1:x1 + h, y1:y1 + w, 0] = self.mean[0]
img[x1:x1 + h, y1:y1 + w, 1] = self.mean[1]
img[x1:x1 + h, y1:y1 + w, 2] = self.mean[2]
else:
img[x1:x1 + h, y1:y1 + w, 0] = self.mean[0]
return img
return img
def to_tensor(pic):
"""Convert a ``PIL Image`` or ``numpy.ndarray`` to tensor.
See ``ToTensor`` for more details.
Args:
pic (PIL Image or numpy.ndarray): Image to be converted to tensor.
Returns:
Tensor: Converted image.
"""
if isinstance(pic, np.ndarray):
assert len(pic.shape) in (2, 3)
# handle numpy array
if pic.ndim == 2:
pic = pic[:, :, None]
img = torch.from_numpy(pic.transpose((2, 0, 1)))
# backward compatibility
if isinstance(img, torch.ByteTensor):
return img.float()
else:
return img
# handle PIL Image
if pic.mode == 'I':
img = torch.from_numpy(np.array(pic, np.int32, copy=False))
elif pic.mode == 'I;16':
img = torch.from_numpy(np.array(pic, np.int16, copy=False))
elif pic.mode == 'F':
img = torch.from_numpy(np.array(pic, np.float32, copy=False))
elif pic.mode == '1':
img = 255 * torch.from_numpy(np.array(pic, np.uint8, copy=False))
else:
img = torch.ByteTensor(torch.ByteStorage.from_buffer(pic.tobytes()))
# PIL image mode: L, LA, P, I, F, RGB, YCbCr, RGBA, CMYK
if pic.mode == 'YCbCr':
nchannel = 3
elif pic.mode == 'I;16':
nchannel = 1
else:
nchannel = len(pic.mode)
img = img.view(pic.size[1], pic.size[0], nchannel)
# put it from HWC to CHW format
# yikes, this transpose takes 80% of the loading time/CPU
img = img.transpose(0, 1).transpose(0, 2).contiguous()
if isinstance(img, torch.ByteTensor):
return img.float()
else:
return img
class ToTensor(object):
"""Convert a ``PIL Image`` or ``numpy.ndarray`` to tensor.
Converts a PIL Image or numpy.ndarray (H x W x C) in the range
[0, 255] to a torch.FloatTensor of shape (C x H x W) in the range [0.0, 1.0]
if the PIL Image belongs to one of the modes (L, LA, P, I, F, RGB, YCbCr, RGBA, CMYK, 1)
or if the numpy.ndarray has dtype = np.uint8
In the other cases, tensors are returned without scaling.
"""
def __call__(self, pic):
"""
Args:
pic (PIL Image or numpy.ndarray): Image to be converted to tensor.
Returns:
Tensor: Converted image.
"""
return to_tensor(pic)
def __repr__(self):
return self.__class__.__name__ + '()'
def build_transforms(cfg, is_train=True):
res = []
res.append(T.ToPILImage(mode=None))
if is_train:
size_train = cfg["SIZE_TRAIN"]
# filp lr
do_flip = cfg["DO_FLIP"]
flip_prob = cfg["FLIP_PROB"]
# padding
do_pad = cfg["DO_PAD"]
padding = cfg["PADDING"]
padding_mode = cfg["PADDING_MODE"]
# random erasing
do_re = cfg["RE_ENABLED"]
#re_prob = cfg["RE_PROB"]
#re_mean = cfg["RE_MEAN"]
res.append(T.Resize(size_train, interpolation=3))
if do_flip:
res.append(T.RandomHorizontalFlip(p=flip_prob))
if do_pad:
res.extend([T.Pad(padding, padding_mode=padding_mode),
T.RandomCrop(size_train)])
if do_re:
#res.append(T.RandomErasing(probability=re_prob, mean=re_mean))
res.append(RandomErasing())
# if cfg.INPUT.CUTOUT.DO:
# res.append(Cutout(probability=cfg.INPUT.CUTOUT.PROB, size=cfg.INPUT.CUTOUT.SIZE,
# mean=cfg.INPUT.CUTOUT.MEAN))
else:
size_test = cfg["TEST_SIZE"]
res.append(T.Resize(size_test, interpolation=3))
res.append(ToTensor())
return T.Compose(res)
| [
"mingjiang.li@iluvatar.ai"
] | mingjiang.li@iluvatar.ai |
4ee955897b4bc2938f504c37b319d2641f545dc6 | 9d0195aa83cc594a8c61f334b90375961e62d4fe | /JTTest/SL7/CMSSW_10_2_15/src/dataRunA/nano2848.py | 9ab549a8f4b116299be1d6cd38355828910e48c6 | [] | no_license | rsk146/CMS | 4e49592fc64f6438051544c5de18598db36ed985 | 5f8dab8c59ae556598b9747b52b88205fffc4dbe | refs/heads/master | 2022-12-01T03:57:12.126113 | 2020-08-04T03:29:27 | 2020-08-04T03:29:27 | 284,863,383 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,293 | py | # Auto generated configuration file
# using:
# Revision: 1.19
# Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v
# with command line options: nanoAOD_jetToolbox_cff -s NANO --data --eventcontent NANOAOD --datatier NANOAOD --no_exec --conditions 102X_dataRun2_Sep2018Rereco_v1 --era Run2_2018,run2_nanoAOD_102Xv1 --customise_commands=process.add_(cms.Service('InitRootHandlers', EnableIMT = cms.untracked.bool(False))) --customise JMEAnalysis/JetToolbox/nanoAOD_jetToolbox_cff.nanoJTB_customizeMC --filein /users/h2/rsk146/JTTest/SL7/CMSSW_10_6_12/src/ttbarCutTest/dataReprocessing/0004A5E9-9F18-6B42-B31D-4206406CE423.root --fileout file:jetToolbox_nano_datatest.root
import FWCore.ParameterSet.Config as cms
from Configuration.StandardSequences.Eras import eras
process = cms.Process('NANO',eras.Run2_2018,eras.run2_nanoAOD_102Xv1)
# import of standard configurations
process.load('Configuration.StandardSequences.Services_cff')
process.load('SimGeneral.HepPDTESSource.pythiapdt_cfi')
process.load('FWCore.MessageService.MessageLogger_cfi')
process.load('Configuration.EventContent.EventContent_cff')
process.load('Configuration.StandardSequences.GeometryRecoDB_cff')
process.load('Configuration.StandardSequences.MagneticField_AutoFromDBCurrent_cff')
process.load('PhysicsTools.NanoAOD.nano_cff')
process.load('Configuration.StandardSequences.EndOfProcess_cff')
process.load('Configuration.StandardSequences.FrontierConditions_GlobalTag_cff')
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(-1)
)
# Input source
process.source = cms.Source("PoolSource",
fileNames = cms.untracked.vstring('file:root://cms-xrd-global.cern.ch//store/data/Run2018A/EGamma/MINIAOD/17Sep2018-v2/100000/8F43FFFC-D696-C240-B15C-C2706D1141BD.root'),
secondaryFileNames = cms.untracked.vstring()
)
process.options = cms.untracked.PSet(
)
# Production Info
process.configurationMetadata = cms.untracked.PSet(
annotation = cms.untracked.string('nanoAOD_jetToolbox_cff nevts:1'),
name = cms.untracked.string('Applications'),
version = cms.untracked.string('$Revision: 1.19 $')
)
# Output definition
process.NANOAODoutput = cms.OutputModule("NanoAODOutputModule",
compressionAlgorithm = cms.untracked.string('LZMA'),
compressionLevel = cms.untracked.int32(9),
dataset = cms.untracked.PSet(
dataTier = cms.untracked.string('NANOAOD'),
filterName = cms.untracked.string('')
),
fileName = cms.untracked.string('file:jetToolbox_nano_datatest2848.root'),
outputCommands = process.NANOAODEventContent.outputCommands
)
# Additional output definition
# Other statements
from Configuration.AlCa.GlobalTag import GlobalTag
process.GlobalTag = GlobalTag(process.GlobalTag, '102X_dataRun2_Sep2018Rereco_v1', '')
# Path and EndPath definitions
process.nanoAOD_step = cms.Path(process.nanoSequence)
process.endjob_step = cms.EndPath(process.endOfProcess)
process.NANOAODoutput_step = cms.EndPath(process.NANOAODoutput)
# Schedule definition
process.schedule = cms.Schedule(process.nanoAOD_step,process.endjob_step,process.NANOAODoutput_step)
from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask
associatePatAlgosToolsTask(process)
# customisation of the process.
# Automatic addition of the customisation function from PhysicsTools.NanoAOD.nano_cff
from PhysicsTools.NanoAOD.nano_cff import nanoAOD_customizeData
#call to customisation function nanoAOD_customizeData imported from PhysicsTools.NanoAOD.nano_cff
process = nanoAOD_customizeData(process)
# Automatic addition of the customisation function from JMEAnalysis.JetToolbox.nanoAOD_jetToolbox_cff
from JMEAnalysis.JetToolbox.nanoAOD_jetToolbox_cff import nanoJTB_customizeMC
#call to customisation function nanoJTB_customizeMC imported from JMEAnalysis.JetToolbox.nanoAOD_jetToolbox_cff
process = nanoJTB_customizeMC(process)
# End of customisation functions
# Customisation from command line
process.add_(cms.Service('InitRootHandlers', EnableIMT = cms.untracked.bool(False)))
# Add early deletion of temporary data products to reduce peak memory need
from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete
process = customiseEarlyDelete(process)
# End adding early deletion | [
"rsk146@scarletmail.rutgers.edu"
] | rsk146@scarletmail.rutgers.edu |
7d9e4da203e7b0fe44f41edd27aba9153d1d6ac4 | 3cf0d750948a758d5771dd778fbb783d64a044ae | /src/pads/pads/smawk.py | 948ab4c241686f0469e09ee2a3ff4dddbd1bbb79 | [
"MIT",
"CC-BY-NC-SA-4.0",
"Apache-2.0"
] | permissive | hbulpf/pydemo | 6552a08b3c85721ac1b2ba335b030e234ad03b6c | ea3e9f9086116a86ecef803e9e3179a34c94c20f | refs/heads/master | 2022-11-30T21:06:29.933820 | 2022-01-15T17:05:16 | 2022-01-15T17:05:16 | 237,584,300 | 6 | 1 | Apache-2.0 | 2022-11-22T09:49:38 | 2020-02-01T08:20:43 | Python | UTF-8 | Python | false | false | 7,823 | py | """SMAWK.py
Totally monotone matrix searching algorithms.
The offline algorithm in ConcaveMinima is from Agarwal, Klawe, Moran,
Shor, and Wilbur, Geometric applications of a matrix searching algorithm,
Algorithmica 2, pp. 195-208 (1987).
The online algorithm in OnlineConcaveMinima is from Galil and Park,
A linear time algorithm for concave one-dimensional dynamic programming,
manuscript, 1989, which simplifies earlier work on the same problem
by Wilbur (J. Algorithms 1988) and Eppstein (J. Algorithms 1990).
D. Eppstein, March 2002, significantly revised August 2005
"""
def ConcaveMinima(RowIndices, ColIndices, Matrix):
"""
Search for the minimum value in each column of a matrix.
The return value is a dictionary mapping ColIndices to pairs
(value,rowindex). We break ties in favor of earlier rows.
The matrix is defined implicitly as a function, passed
as the third argument to this routine, where Matrix(i,j)
gives the matrix value at row index i and column index j.
The matrix must be concave, that is, satisfy the property
Matrix(i,j) > Matrix(i',j) => Matrix(i,j') > Matrix(i',j')
for every i<i' and j<j'; that is, in every submatrix of
the input matrix, the positions of the column minima
must be monotonically nondecreasing.
The rows and columns of the matrix are labeled by the indices
given in order by the first two arguments. In most applications,
these arguments can simply be integer ranges.
"""
# Base case of recursion
if not ColIndices:
return {}
# Reduce phase: make number of rows at most equal to number of cols
stack = []
for r in RowIndices:
while len(stack) >= 1 and \
Matrix(stack[-1], ColIndices[len(stack) - 1]) \
> Matrix(r, ColIndices[len(stack) - 1]):
stack.pop()
if len(stack) != len(ColIndices):
stack.append(r)
RowIndices = stack
# Recursive call to search for every odd column
minima = ConcaveMinima(RowIndices,
[ColIndices[i]
for i in range(1, len(ColIndices), 2)],
Matrix)
# Go back and fill in the even rows
r = 0
for c in range(0, len(ColIndices), 2):
col = ColIndices[c]
row = RowIndices[r]
if c == len(ColIndices) - 1:
lastrow = RowIndices[-1]
else:
lastrow = minima[ColIndices[c + 1]][1]
pair = (Matrix(row, col), row)
while row != lastrow:
r += 1
row = RowIndices[r]
pair = min(pair, (Matrix(row, col), row))
minima[col] = pair
return minima
class OnlineConcaveMinima:
"""
Online concave minimization algorithm of Galil and Park.
OnlineConcaveMinima(Matrix,initial) creates a sequence of pairs
(self.value(j),self.index(j)), where
self.value(0) = initial,
self.value(j) = min { Matrix(i,j) | i < j } for j > 0,
and where self.index(j) is the value of j that provides the minimum.
Matrix(i,j) must be concave, in the same sense as for ConcaveMinima.
We never call Matrix(i,j) until value(i) has already been computed,
so that the Matrix function may examine previously computed values.
Calling value(i) for an i that has not yet been computed forces
the sequence to be continued until the desired index is reached.
Calling iter(self) produces a sequence of (value,index) pairs.
Matrix(i,j) should always return a value, rather than raising an
exception, even for j larger than the range we expect to compute.
If j is out of range, a suitable value to return that will not
violate concavity is Matrix(i,j) = -i. It will not work correctly
to return a flag value such as None for large j, because the ties
formed by the equalities among such flags may violate concavity.
"""
def __init__(self, Matrix, initial):
"""Initialize a OnlineConcaveMinima object."""
# State used by self.value(), self.index(), and iter(self)
self._values = [initial] # tentative solution values...
self._indices = [None] # ...and their indices
self._finished = 0 # index of last non-tentative value
# State used by the internal algorithm
#
# We allow self._values to be nonempty for indices > finished,
# keeping invariant that
# (1) self._values[i] = Matrix(self._indices[i], i),
# (2) if the eventual correct value of self.index(i) < base,
# then self._values[i] is nonempty and correct.
#
# In addition, we keep a column index self._tentative, such that
# (3) if i <= tentative, and the eventual correct value of
# self.index(i) <= finished, then self._values[i] is correct.
#
self._matrix = Matrix
self._base = 0
self._tentative = 0
def __iter__(self):
"""Loop through (value,index) pairs."""
i = 0
while True:
yield self.value(i), self.index(i)
i += 1
def value(self, j):
"""Return min { Matrix(i,j) | i < j }."""
while self._finished < j:
self._advance()
return self._values[j]
def index(self, j):
"""Return argmin { Matrix(i,j) | i < j }."""
while self._finished < j:
self._advance()
return self._indices[j]
def _advance(self):
"""Finish another value,index pair."""
# First case: we have already advanced past the previous tentative
# value. We make a new tentative value by applying ConcaveMinima
# to the largest square submatrix that fits under the base.
i = self._finished + 1
if i > self._tentative:
rows = range(self._base, self._finished + 1)
self._tentative = self._finished + len(rows)
cols = range(self._finished + 1, self._tentative + 1)
minima = ConcaveMinima(rows, cols, self._matrix)
for col in cols:
if col >= len(self._values):
self._values.append(minima[col][0])
self._indices.append(minima[col][1])
elif minima[col][0] < self._values[col]:
self._values[col], self._indices[col] = minima[col]
self._finished = i
return
# Second case: the new column minimum is on the diagonal.
# All subsequent ones will be at least as low,
# so we can clear out all our work from higher rows.
# As in the fourth case, the loss of tentative is
# amortized against the increase in base.
diag = self._matrix(i - 1, i)
if diag < self._values[i]:
self._values[i] = diag
self._indices[i] = self._base = i - 1
self._tentative = self._finished = i
return
# Third case: row i-1 does not supply a column minimum in
# any column up to tentative. We simply advance finished
# while maintaining the invariant.
prev_row = self._matrix(i - 1, self._tentative)
tentative_value = self._values[self._tentative]
if prev_row >= tentative_value:
self._finished = i
return
# Fourth and final case: a new column minimum at self._tentative.
# This allows us to make progress by incorporating rows
# prior to finished into the base. The base invariant holds
# because these rows cannot supply any later column minima.
# The work done when we last advanced tentative (and undone by
# this step) can be amortized against the increase in base.
self._base = i - 1
self._tentative = self._finished = i
return
| [
"hudalpf@163.com"
] | hudalpf@163.com |
2b9fce0d1039592d118c8db220b31dfeda6b619d | 161fd6370ffa0b35ecd50719d6266224da597ee0 | /Python/Django/ninjaGold/apps/ninjagold/urls.py | 402d56e647982cf8b69ddf3579a56486e220d103 | [] | no_license | ebergstein/DojoAssignments | a30fd8b36442bff2a4253902a591ad11f191fc12 | 3ad9ac65073c733ead32b93ce4be19af5369fccf | refs/heads/master | 2021-06-19T09:48:23.100713 | 2017-06-30T04:24:35 | 2017-06-30T04:24:35 | 82,743,546 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 233 | py | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.index),
url(r'^farm$', views.farm),
url(r'^cave$', views.cave),
url(r'^house$', views.house),
url(r'^casino$', views.casino)
] | [
"ebergstein@sbcglobal.net"
] | ebergstein@sbcglobal.net |
ce7f5d0af87d499a9e90d621f57bd18256c57e02 | b998f07d5be9a339ee9d93f4143209246fc0613e | /docs/histogram/blur1.py | 1a09122fad5db92539484274aed6f898a5adf0d4 | [] | no_license | s-cosseddu/opencv-tutorial | a48c48c08efad2746dc1ff8ca1d9ecd1ef9e80b2 | be6c1da81d1cfaf1b47f1873adf0fdb50a7ab84c | refs/heads/master | 2023-02-07T03:08:29.968456 | 2020-02-09T20:05:02 | 2020-02-09T20:05:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 457 | py | # Blurring
import cv2 as cv
def trackbar(x):
x = cv.getTrackbarPos('blur x','window')
y = cv.getTrackbarPos('blur x','window')
blurred = cv.blur(img, (x, y))
cv.imshow('window', blurred)
cv.displayOverlay('window', f'blur = ({x}, {y})')
img = cv.imread('lego.png')
cv.imshow('window', img)
cv.createTrackbar('blur x', 'window', 0, 4, trackbar)
cv.createTrackbar('blur y', 'window', 0, 4, trackbar)
cv.waitKey(0)
cv.destroyAllWindows() | [
"raphael.holzer@citycable.ch"
] | raphael.holzer@citycable.ch |
0e1ddf5c4cfad22d936a9c52c81dc25d7c5a267e | ce083b3b29f8218854c5b5171949f257558edfd3 | /raterapi/views/game.py | 7a7a9dd7c41fbd4c9e2f5f4c7dea59ba45095254 | [] | no_license | MarkyAaronYoung/raterproject | 195082d63bfb063b34b65a27850fd5211b47e5cc | 0c6b675f09a786d88f341641fab513c998699ad7 | refs/heads/main | 2023-02-02T20:24:24.581964 | 2020-12-05T18:08:25 | 2020-12-05T18:08:25 | 313,803,061 | 0 | 0 | null | 2020-12-05T18:08:26 | 2020-11-18T02:38:51 | Python | UTF-8 | Python | false | false | 2,275 | py | from django.core.exceptions import ValidationError
from rest_framework import status
from django.http import HttpResponseServerError
from rest_framework.viewsets import import ViewSet
from rest_framework.response import Response
from rest_framework import serializers
from rest_framework import status
from raterapi.models import Game, Player, Category, Review
class GamesViewSet(ViewSet):
def list(self, request):
games = Game.objects.all()
serializer = GameSerializer(games, many=True, context={'request': request})
return Response(serializer.data)
def create(self, request):
game = Game()
game.title = request.data["title"]
game.number_of_players = request.data["numberOfPlayers"]
game.year_released = request.data["yearReleased"]
game.age_rec = request.data["ageRec"]
game.play_time = request.data["playTime"]
game.game_pic = request.data["gamePic"]
game.rating = request.data["rating"]
game.designer = request.data["designer"]
category = Category.objects.get(pk=request.data["categoryId"])
game.category = category
try:
game.save()
serializer = GameSerializer(game, context={'request': request})
return Response(serializer.data)
except ValidationError as ex:
return Response({"reason": ex.message}, status=status.HTTP_400_BAD_REQUEST)
def retrieve(self, request, pk=None):
"""Handle GET requests for single game
Returns:
Response -- JSON serialized game instance
"""
try:
game = Game.objects.get(pk=pk)
serializer = GameSerializer(game, context={'request': request})
return Response(serializer.data)
except Exception as ex:
return HttpResponseServerError(ex)
class GameSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Game
url = serializers.HyperlinkedIdentityField(
view_name='game',
lookup_field='id'
)
fields = ('id', 'url', 'title', 'designer', 'description', 'year_released', 'number_of_players', 'play_time', 'age_rec', 'category', 'game_pic', 'rating')
depth = 1
| [
"markyaaronyoung@gmail.com"
] | markyaaronyoung@gmail.com |
8fe3baf1366251c0f42785474df76f23f3704ed1 | da96d29b457eb123c01274efea562448df105fc6 | /chapter6/st8.py | 527f6d571dd909e8aa75709b917a4ccdabec9642 | [] | no_license | Alonsovau/sketches | a1336f1a7909ad059744c4613ab992c8361264f5 | dfb072086cc813d7409fa11393ebaad6e26db180 | refs/heads/master | 2021-01-19T22:29:15.827896 | 2017-10-19T15:37:28 | 2017-10-19T15:37:28 | 88,761,672 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 592 | py | # 与关系型数据库的交互
import sqlite3
stocks = [
('GOOG', 100, 490.1),
('AAPL', 50, 545.75),
('FB', 150, 7.45),
('HPQ', 76, 33.2)
]
db = sqlite3.connect('database.db')
c = db.cursor()
# print(c.execute('create table portfolio (symbol text, shares integer, price real)'))
# db.commit()
# c.executemany('insert into portfolio values(?,?,?)', stocks)
# db.commit()
for row in db.execute('select * from portfolio'):
print(row)
print('-----------------')
min_price = 100
for row in db.execute('select * from portfolio where price >= ?', (min_price,)):
print(row)
| [
"alonsovau@outlook.com"
] | alonsovau@outlook.com |
af0ea6669de535070a72eb729a27acc46e30001c | 12f006a0e5d75ef2349d4ae519c1c9cac5309761 | /Solution_30.py | aba2dee1902b3a33bd33369a0a08ad7a470b376b | [] | no_license | TimothySjiang/leetcodepy | c613db16282eade713e01b7d641c0f5b341ec84b | ef64e46b8833a684b8b0355ce576b767a0e03596 | refs/heads/master | 2020-07-01T14:48:35.953841 | 2020-01-12T06:19:44 | 2020-01-12T06:19:44 | 201,199,810 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 952 | py | class Solution:
def findSubstring(self, s: str, words: List[str]) -> List[int]:
if not words: return []
dic = collections.Counter(words)
wlength = len(words[0])
res = []
for i in range(wlength):
window = collections.Counter()
count = 0
for j in range(i, len(s), wlength):
word = s[j:j + wlength]
if word in dic:
window[word] += 1
count += 1
while window[word] > dic[word]:
pos = j - wlength * (count - 1)
rword = s[pos:pos + wlength]
window[rword] -= 1
count -= 1
else:
window = collections.Counter()
count = 0
if count == len(words):
res.append(j - wlength * (count - 1))
return res | [
"shjiang@ucdavis.edu"
] | shjiang@ucdavis.edu |
4c1e3bcd5f2981b511d1a56a5dfeedeb6ff47a50 | 862af34d5a1ebb3eb700e40c5877e394ee845b5f | /src/core/src/tortuga/kit/actions/componentActions.py | 9f4f2300e8bce6ec3981a6041c92e30357532377 | [
"Apache-2.0"
] | permissive | ffxf/tortuga | 4e59617153de92cfc1a9b7bd95f8bae5ea8e1134 | a20ef7d0274be18bdaae6b9fbe879cd0473eaf1b | refs/heads/master | 2021-01-25T13:42:05.809188 | 2018-03-01T20:31:32 | 2018-03-01T20:31:32 | 123,608,729 | 0 | 0 | null | 2018-03-02T17:21:59 | 2018-03-02T17:21:59 | null | UTF-8 | Python | false | false | 11,275 | py | # Copyright 2008-2018 Univa Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=no-self-use,no-member
import os.path
import traceback
import functools
from tortuga.kit.actions.actionsBase import ActionsBase
from tortuga.exceptions.configurationError import ConfigurationError
from tortuga.db.softwareProfileDbApi import SoftwareProfileDbApi
class ComponentActions(ActionsBase): \
# pylint: disable=too-many-public-methods
'''
A component is an indivisible "feature" of an application;
e.g., a telnet kit might contain a client component and a server component.
These are the actions a component must perform on the Installer Node in
the context of the NODE-GROUP-EDITOR command:
enable Invoked on the Installer Node when enabling the
component within a given softwareProfileName.
disable Invoked on the Installer Node when disabling the
component within a given softwareProfileName.
These actions provides 3 hooks for a component to "hook-into" the
action at various point:
pre_<action> Called before 'action', this provides the
component with an opportunity to make decisions
about the work to perform (when 'action' is
called), before actually doing any work. The
'pre' method is provided with a list of all the
components being invoked so that it may base
decisions in the context of the other components.
<action> The action.
post_<action> Called after 'action', this provides the component
with an opportunity to perform any post-processing.
The 'post' method is provided with a list of all
the components being invoked so that it may base
decisions in the context of the other components.
Note that when processing a list of components, the 'pre' methods of
all components are invoked first, followed by the 'action' methods
of all components, finally the 'post' methods of all components.
These are the actions a component must perform on the Installer Node in
the context of the ADDHOST command:
add_host Invoked on the Installer Node when a new host is added
to the given softwareProfileName.
delete_host Invoked on the Installer Node when a host is deleted
from the given softwareProfileName.
These are the actions a component must perform on the Installer Node in
the context of the GENCONFIG command:
configure Invoked on the Installer Node when the component
may need to reconfigure itself within a given
softwareProfileName.
These are the actions a component must perform on the Compute Node(s)
in the context of the CFMCLIENT command:
pre_install Invoked on the Compute Node(s) before the native
package manager installs the component.
post_install Invoked on the Compute Node(s) after the native
package manager installs the component. This would
be the likely place to start a service, if applicable.
pre_remove Invoked on the Compute Node(s) before the native
package manager removes the component. This would
be the likely place to stop a service, if applicable.
post_remove Invoked on the Compute Node(s) after the native
package manager removes the component.
A logger instance is also added to the base class so all derived components
have a logger.
_logger A logger instance for creating log messages
'''
def __init__(self, kit):
'''
Arguments:
cname The name of the component as stored in the database.
If not given, defaults to the name of the class,
all lower case.
Attributes:
kit The containing kit
'''
super(ComponentActions, self).__init__()
if not hasattr(self, '__component_name__'):
raise Exception(
'Component class [{0}] does not have __component_name__ defined'.format(self.__class__.__name__))
# Set by KitActions.add_component()
self.kit = kit
def getConfigFile(self):
# Overridden from ActionsBase
return os.path.join(self.kit.getConfigBase(),
'%s-component.conf' % (self.__component_name__)) \
if self.kit else None
def getLogger(self):
return self._logger
# NODE-GROUP-EDITOR Hooks (Installer Node)
def pre_enable(self, softwareProfileName, *pargs, **kargs):
# pylint: disable=unused-argument
'''Invoked on the Installer Node before enabling the component'''
self.__trace(*pargs, **kargs)
def enable(self, softwareProfileName, *pargs, **kargs): \
# pylint: disable=unused-argument
'''Invoked on the Installer Node when enabling the component'''
self.__trace(*pargs, **kargs)
def post_enable(self, softwareProfileName, *pargs, **kargs): \
# pylint: disable=unused-argument
'''Invoked on the Installer Node after enabling the component'''
self.__trace(*pargs, **kargs)
def pre_disable(self, softwareProfileName, *pargs, **kargs): \
# pylint: disable=unused-argument
'''Invoked on the Installer Node before disabling the component'''
self.__trace(*pargs, **kargs)
def disable(self, softwareProfileName, *pargs, **kargs): \
# pylint: disable=unused-argument
'''Invoked on the Installer Node when disabling the component'''
self.__trace(*pargs, **kargs)
def post_disable(self, softwareProfileName, *pargs, **kargs): \
# pylint: disable=unused-argument
'''Invoked on the Installer Node after disabling the component'''
self.__trace(*pargs, **kargs)
def get_cloud_config(self, node, hwprofile, swprofile, user_data,
*pargs, **kargs): \
# pylint: disable=unused-argument
self.__trace(*pargs, **kargs)
def pre_add_host(self, hwprofilename, swprofilename, hostname, ip,
*pargs, **kargs): \
# pylint: disable=unused-argument
'''
This component action is typically called prior to committing new nodes
to database. It is intended to be able to do operations such as
updating DNS records prior to a bulk operation completing.
'''
self.__trace(*pargs, **kargs)
def add_host(self, hardwareProfileName, softwareProfileName, nodes,
*pargs, **kargs): \
# pylint: disable=unused-argument
'''
Invoked on the Installer Node when a new host is added to a
software profile
'''
self.__trace(*pargs, **kargs)
def pre_delete_host(self, hardwareProfileName, softwareProfileName, nodes,
*pargs, **kargs): \
# pylint: disable=unused-argument
'''
Invoked on the Installer Node when a host is deleted from a
software profile.
'''
self.__trace(*pargs, **kargs)
def delete_host(self, hardwareProfileName, softwareProfileName, nodes,
*pargs, **kargs): \
# pylint: disable=unused-argument
'''
Invoked on the Installer Node when a host is deleted from a
software profile.
'''
self.__trace(*pargs, **kargs)
def refresh(self, softwareProfiles, *pargs, **kargs): \
# pylint: disable=unused-argument
self.__trace(*pargs, **kargs)
# GENCONFIG Hooks (Installer Node)
def configure(self, softwareProfileName, *pargs, **kargs): \
# pylint: disable=unused-argument
'''Invoked on the Installer Node to configure the component'''
self.__trace(*pargs, **kargs)
def post_install(self, *pargs, **kargs):
'''
Invoked on the Compute Node(s) after the native package manager
installs the component.
This would be the likely place to start a service, if applicable.
'''
self.__trace(*pargs, **kargs)
def pre_remove(self, *pargs, **kargs):
'''
Invoked on the Compute Node(s) before the native package manager
removes the component.
This would be the likely place to stop a service, if applicable.
'''
self.__trace(*pargs, **kargs)
def post_remove(self, *pargs, **kargs):
'''
Invoked on the Compute Node(s) after the native package manager
removes the component.
'''
self.__trace(*pargs, **kargs)
# Private
def __trace(self, *pargs, **kargs):
stack = traceback.extract_stack()
funcname = stack[-2][2]
self._logger.debug('-- (pass) %s::%s %s %s' % (
self.__class__.__name__, funcname, pargs, kargs))
def get_puppet_args(self, dbSoftwareProfile, dbHardwareProfile): \
# pylint: disable=unused-argument
return {}
def installer_only(func):
"""Decorator function for Component.pre_enable() method to prevent
enabling on a non-installer software profile
"""
@functools.wraps(func)
def pre_enable_wrapper(cls, softwareProfileName, *pargs, **kargs):
swprofile = SoftwareProfileDbApi().getSoftwareProfile(
softwareProfileName)
if swprofile.getType() != 'installer':
raise ConfigurationError(
'Component [{0}] can only be enabled on Installer software'
' profile'.format(cls.__component_name__))
return func(cls, softwareProfileName, *pargs, **kargs)
return pre_enable_wrapper
def compute_only(func):
"""Decorator function for Component.pre_enable() method to prevent
enabling on a non-compute software profile"""
@functools.wraps(func)
def pre_enable_wrapper(cls, softwareProfileName, *pargs, **kargs):
swprofile = SoftwareProfileDbApi().getSoftwareProfile(
softwareProfileName)
if swprofile.getType() == 'installer':
raise ConfigurationError(
'Component [{0}] can only be enabled on compute software'
' profiles'.format(cls.__component_name__))
return func(cls, softwareProfileName, *pargs, **kargs)
return pre_enable_wrapper
| [
"mfrisch@univa.com"
] | mfrisch@univa.com |
ca29c795c51b528698ae8754a089f8d1bed72243 | e5e2b7da41fda915cb849f031a0223e2ac354066 | /sdk/python/pulumi_azure_native/compute/image.py | 0916c8433c77edb30f7a0b78e7bd1d983a6103ed | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | johnbirdau/pulumi-azure-native | b7d3bdddeb7c4b319a7e43a892ddc6e25e3bfb25 | d676cc331caa0694d8be99cb90b93fa231e3c705 | refs/heads/master | 2023-05-06T06:48:05.040357 | 2021-06-01T20:42:38 | 2021-06-01T20:42:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,809 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['ImageArgs', 'Image']
@pulumi.input_type
class ImageArgs:
def __init__(__self__, *,
resource_group_name: pulumi.Input[str],
extended_location: Optional[pulumi.Input['ExtendedLocationArgs']] = None,
hyper_v_generation: Optional[pulumi.Input[Union[str, 'HyperVGenerationTypes']]] = None,
image_name: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
source_virtual_machine: Optional[pulumi.Input['SubResourceArgs']] = None,
storage_profile: Optional[pulumi.Input['ImageStorageProfileArgs']] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a Image resource.
:param pulumi.Input[str] resource_group_name: The name of the resource group.
:param pulumi.Input['ExtendedLocationArgs'] extended_location: The extended location of the Image.
:param pulumi.Input[Union[str, 'HyperVGenerationTypes']] hyper_v_generation: Specifies the HyperVGenerationType of the VirtualMachine created from the image. From API Version 2019-03-01 if the image source is a blob, then we need the user to specify the value, if the source is managed resource like disk or snapshot, we may require the user to specify the property if we cannot deduce it from the source managed resource.
:param pulumi.Input[str] image_name: The name of the image.
:param pulumi.Input[str] location: Resource location
:param pulumi.Input['SubResourceArgs'] source_virtual_machine: The source virtual machine from which Image is created.
:param pulumi.Input['ImageStorageProfileArgs'] storage_profile: Specifies the storage settings for the virtual machine disks.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags
"""
pulumi.set(__self__, "resource_group_name", resource_group_name)
if extended_location is not None:
pulumi.set(__self__, "extended_location", extended_location)
if hyper_v_generation is not None:
pulumi.set(__self__, "hyper_v_generation", hyper_v_generation)
if image_name is not None:
pulumi.set(__self__, "image_name", image_name)
if location is not None:
pulumi.set(__self__, "location", location)
if source_virtual_machine is not None:
pulumi.set(__self__, "source_virtual_machine", source_virtual_machine)
if storage_profile is not None:
pulumi.set(__self__, "storage_profile", storage_profile)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="extendedLocation")
def extended_location(self) -> Optional[pulumi.Input['ExtendedLocationArgs']]:
"""
The extended location of the Image.
"""
return pulumi.get(self, "extended_location")
@extended_location.setter
def extended_location(self, value: Optional[pulumi.Input['ExtendedLocationArgs']]):
pulumi.set(self, "extended_location", value)
@property
@pulumi.getter(name="hyperVGeneration")
def hyper_v_generation(self) -> Optional[pulumi.Input[Union[str, 'HyperVGenerationTypes']]]:
"""
Specifies the HyperVGenerationType of the VirtualMachine created from the image. From API Version 2019-03-01 if the image source is a blob, then we need the user to specify the value, if the source is managed resource like disk or snapshot, we may require the user to specify the property if we cannot deduce it from the source managed resource.
"""
return pulumi.get(self, "hyper_v_generation")
@hyper_v_generation.setter
def hyper_v_generation(self, value: Optional[pulumi.Input[Union[str, 'HyperVGenerationTypes']]]):
pulumi.set(self, "hyper_v_generation", value)
@property
@pulumi.getter(name="imageName")
def image_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the image.
"""
return pulumi.get(self, "image_name")
@image_name.setter
def image_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "image_name", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
Resource location
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter(name="sourceVirtualMachine")
def source_virtual_machine(self) -> Optional[pulumi.Input['SubResourceArgs']]:
"""
The source virtual machine from which Image is created.
"""
return pulumi.get(self, "source_virtual_machine")
@source_virtual_machine.setter
def source_virtual_machine(self, value: Optional[pulumi.Input['SubResourceArgs']]):
pulumi.set(self, "source_virtual_machine", value)
@property
@pulumi.getter(name="storageProfile")
def storage_profile(self) -> Optional[pulumi.Input['ImageStorageProfileArgs']]:
"""
Specifies the storage settings for the virtual machine disks.
"""
return pulumi.get(self, "storage_profile")
@storage_profile.setter
def storage_profile(self, value: Optional[pulumi.Input['ImageStorageProfileArgs']]):
pulumi.set(self, "storage_profile", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Resource tags
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
class Image(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
extended_location: Optional[pulumi.Input[pulumi.InputType['ExtendedLocationArgs']]] = None,
hyper_v_generation: Optional[pulumi.Input[Union[str, 'HyperVGenerationTypes']]] = None,
image_name: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
source_virtual_machine: Optional[pulumi.Input[pulumi.InputType['SubResourceArgs']]] = None,
storage_profile: Optional[pulumi.Input[pulumi.InputType['ImageStorageProfileArgs']]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
"""
The source user image virtual hard disk. The virtual hard disk will be copied before being attached to the virtual machine. If SourceImage is provided, the destination virtual hard drive must not exist.
API Version: 2020-12-01.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['ExtendedLocationArgs']] extended_location: The extended location of the Image.
:param pulumi.Input[Union[str, 'HyperVGenerationTypes']] hyper_v_generation: Specifies the HyperVGenerationType of the VirtualMachine created from the image. From API Version 2019-03-01 if the image source is a blob, then we need the user to specify the value, if the source is managed resource like disk or snapshot, we may require the user to specify the property if we cannot deduce it from the source managed resource.
:param pulumi.Input[str] image_name: The name of the image.
:param pulumi.Input[str] location: Resource location
:param pulumi.Input[str] resource_group_name: The name of the resource group.
:param pulumi.Input[pulumi.InputType['SubResourceArgs']] source_virtual_machine: The source virtual machine from which Image is created.
:param pulumi.Input[pulumi.InputType['ImageStorageProfileArgs']] storage_profile: Specifies the storage settings for the virtual machine disks.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ImageArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
The source user image virtual hard disk. The virtual hard disk will be copied before being attached to the virtual machine. If SourceImage is provided, the destination virtual hard drive must not exist.
API Version: 2020-12-01.
:param str resource_name: The name of the resource.
:param ImageArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ImageArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
extended_location: Optional[pulumi.Input[pulumi.InputType['ExtendedLocationArgs']]] = None,
hyper_v_generation: Optional[pulumi.Input[Union[str, 'HyperVGenerationTypes']]] = None,
image_name: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
source_virtual_machine: Optional[pulumi.Input[pulumi.InputType['SubResourceArgs']]] = None,
storage_profile: Optional[pulumi.Input[pulumi.InputType['ImageStorageProfileArgs']]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ImageArgs.__new__(ImageArgs)
__props__.__dict__["extended_location"] = extended_location
__props__.__dict__["hyper_v_generation"] = hyper_v_generation
__props__.__dict__["image_name"] = image_name
__props__.__dict__["location"] = location
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["source_virtual_machine"] = source_virtual_machine
__props__.__dict__["storage_profile"] = storage_profile
__props__.__dict__["tags"] = tags
__props__.__dict__["name"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:compute:Image"), pulumi.Alias(type_="azure-native:compute/v20160430preview:Image"), pulumi.Alias(type_="azure-nextgen:compute/v20160430preview:Image"), pulumi.Alias(type_="azure-native:compute/v20170330:Image"), pulumi.Alias(type_="azure-nextgen:compute/v20170330:Image"), pulumi.Alias(type_="azure-native:compute/v20171201:Image"), pulumi.Alias(type_="azure-nextgen:compute/v20171201:Image"), pulumi.Alias(type_="azure-native:compute/v20180401:Image"), pulumi.Alias(type_="azure-nextgen:compute/v20180401:Image"), pulumi.Alias(type_="azure-native:compute/v20180601:Image"), pulumi.Alias(type_="azure-nextgen:compute/v20180601:Image"), pulumi.Alias(type_="azure-native:compute/v20181001:Image"), pulumi.Alias(type_="azure-nextgen:compute/v20181001:Image"), pulumi.Alias(type_="azure-native:compute/v20190301:Image"), pulumi.Alias(type_="azure-nextgen:compute/v20190301:Image"), pulumi.Alias(type_="azure-native:compute/v20190701:Image"), pulumi.Alias(type_="azure-nextgen:compute/v20190701:Image"), pulumi.Alias(type_="azure-native:compute/v20191201:Image"), pulumi.Alias(type_="azure-nextgen:compute/v20191201:Image"), pulumi.Alias(type_="azure-native:compute/v20200601:Image"), pulumi.Alias(type_="azure-nextgen:compute/v20200601:Image"), pulumi.Alias(type_="azure-native:compute/v20201201:Image"), pulumi.Alias(type_="azure-nextgen:compute/v20201201:Image"), pulumi.Alias(type_="azure-native:compute/v20210301:Image"), pulumi.Alias(type_="azure-nextgen:compute/v20210301:Image")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(Image, __self__).__init__(
'azure-native:compute:Image',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'Image':
"""
Get an existing Image resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = ImageArgs.__new__(ImageArgs)
__props__.__dict__["extended_location"] = None
__props__.__dict__["hyper_v_generation"] = None
__props__.__dict__["location"] = None
__props__.__dict__["name"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["source_virtual_machine"] = None
__props__.__dict__["storage_profile"] = None
__props__.__dict__["tags"] = None
__props__.__dict__["type"] = None
return Image(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="extendedLocation")
def extended_location(self) -> pulumi.Output[Optional['outputs.ExtendedLocationResponse']]:
"""
The extended location of the Image.
"""
return pulumi.get(self, "extended_location")
@property
@pulumi.getter(name="hyperVGeneration")
def hyper_v_generation(self) -> pulumi.Output[Optional[str]]:
"""
Specifies the HyperVGenerationType of the VirtualMachine created from the image. From API Version 2019-03-01 if the image source is a blob, then we need the user to specify the value, if the source is managed resource like disk or snapshot, we may require the user to specify the property if we cannot deduce it from the source managed resource.
"""
return pulumi.get(self, "hyper_v_generation")
@property
@pulumi.getter
def location(self) -> pulumi.Output[str]:
"""
Resource location
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource name
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> pulumi.Output[str]:
"""
The provisioning state.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="sourceVirtualMachine")
def source_virtual_machine(self) -> pulumi.Output[Optional['outputs.SubResourceResponse']]:
"""
The source virtual machine from which Image is created.
"""
return pulumi.get(self, "source_virtual_machine")
@property
@pulumi.getter(name="storageProfile")
def storage_profile(self) -> pulumi.Output[Optional['outputs.ImageStorageProfileResponse']]:
"""
Specifies the storage settings for the virtual machine disks.
"""
return pulumi.get(self, "storage_profile")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Resource tags
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Resource type
"""
return pulumi.get(self, "type")
| [
"noreply@github.com"
] | johnbirdau.noreply@github.com |
8b5374fa65e8bcdc4bb966337665b01ce2426e3e | 836097c64ed9c1682961351641ab7f8ecf3438cc | /alphastarmini/core/arch/entity_encoder.py | 6df313f0fb4a2bdc680b5ca7a4b9ddfac946fd21 | [
"Apache-2.0"
] | permissive | liuruoze/Raw-vs-Human-in-AlphaStar | 144ac7dd0abf462adb34d526ef72a7a030b37e3e | 99acae772eb5c93000dca87b78d6acdf7699f331 | refs/heads/main | 2023-08-02T18:42:27.006195 | 2021-09-06T02:50:25 | 2021-09-06T02:50:25 | 403,462,800 | 7 | 0 | null | null | null | null | UTF-8 | Python | false | false | 22,321 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
" Entity Encoder."
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from alphastarmini.lib.alphastar_transformer import Transformer
from alphastarmini.lib import utils as L
from alphastarmini.lib.hyper_parameters import Arch_Hyper_Parameters as AHP
from alphastarmini.lib.hyper_parameters import MiniStar_Arch_Hyper_Parameters as MAHP
from alphastarmini.lib.hyper_parameters import StarCraft_Hyper_Parameters as SCHP
__author__ = "Ruo-Ze Liu"
debug = False
def dec2bin(x, bits):
# mask = 2 ** torch.arange(bits).to(x.device, x.dtype)
mask = 2 ** torch.arange(bits - 1, -1, -1).to(x.device, x.dtype)
return x.unsqueeze(-1).bitwise_and(mask).ne(0).float()
def bin2dec(b, bits):
mask = 2 ** torch.arange(bits - 1, -1, -1).to(b.device, b.dtype)
return torch.sum(mask * b, -1)
class EntityEncoder(nn.Module):
'''
Inputs: entity_list
Outputs:
embedded_entity - A 1D tensor of the embedded entities
entity_embeddings - The embedding of each entity (as opposed to `embedded_entity`, which has one embedding for all entities)
'''
def __init__(self, dropout=0.1, original_256=AHP.original_256,
original_1024=AHP.original_1024,
original_128=AHP.original_128):
super().__init__()
# below is value form max value of one-hot encoding in alphastar
self.max_entities = AHP.max_entities
self.max_unit_type = SCHP.max_unit_type # default is 256
self.max_alliance = 5
self.max_health = 1500
self.max_shield = 1000
self.max_energy = 200
self.max_cargo_space_used = 9
self.max_cargo_space_maximum = 9
self.max_display_type = 5 # AlphaStar: 4. RuntimeError: index 4 is out of bounds for dimension 1 with size 4
self.max_cloakState = 5
self.max_is_powered = 2
self.max_is_hallucination = 2
self.max_is_active = 2
self.max_is_on_screen = 2
self.max_is_in_cargo = 2
self.max_current_minerals = 19
self.max_current_vespene = 26
self.max_mined_minerals = 1800
self.max_mined_vespene = 2500
self.max_assigned_harvesters = 25 # AlphaStar: 24. RuntimeError: index 24 is out of bounds for dimension 1 with size 24
self.max_ideal_harvesters = 17
self.max_weapon_cooldown = 32
self.max_order_queue_length = 9
self.max_order_progress = 10
self.max_order_ids = SCHP.max_order_ids
self.max_buffer_ids = SCHP.max_buffer_ids
self.max_add_on_type = SCHP.max_add_on_type
self.max_weapon_upgrades = 4
self.max_armor_upgrades = 4
self.max_shield_upgrades = 4
self.max_was_selected = 2
self.max_was_targeted = 2
self.dropout = nn.Dropout(dropout)
self.embedd = nn.Linear(AHP.embedding_size, original_256)
self.transformer = Transformer(d_model=original_256, d_inner=original_1024,
n_layers=3, n_head=2, d_k=original_128,
d_v=original_128, dropout=0.1)
self.conv1 = nn.Conv1d(original_256, original_256, kernel_size=1, stride=1,
padding=0, bias=False)
self.fc1 = nn.Linear(original_256, original_256)
self.real_entities_size = 0
# The fields of each entity in `entity_list` are first preprocessed and concatenated so that \
# there is a single 1D tensor for each entity. Fields are preprocessed as follows:
def preprocess(self, entity_list):
#all_entities_tensor = torch.zeros(self.max_entities, embedding_size)
entity_tensor_list = []
index = 0
for entity in entity_list:
field_encoding_list = []
# comments below have this style:
# A: alphastar description
# B: s2clientprotocol description
# C: my notes
# A: unit_type: One-hot with maximum self.max_unit_type (including unknown unit-type)
# B: optional uint32 unit_type = 4;
# C: with maximum self.max_unit_type
unit_type = entity.unit_type
print('unit_type:', unit_type) if debug else None
print('self.max_unit_type:', self.max_unit_type) if debug else None
unit_type_index = L.unit_tpye_to_unit_type_index(unit_type)
print('unit_type_index:', unit_type_index) if debug else None
assert unit_type_index >= 0 and unit_type_index <= self.max_unit_type
unit_type_encoding = L.to_one_hot(torch.tensor([unit_type_index]), self.max_unit_type).reshape(1, -1)
print('unit_type_encoding:', unit_type_encoding) if debug else None
field_encoding_list.append(unit_type_encoding)
# A: unit_attributes: One boolean for each of the 13 unit attributes
# B: not found
# C: lack
unit_attributes_encoding = torch.tensor(entity.unit_attributes, dtype=torch.float).reshape(1, -1)
print('unit_attributes_encoding:', unit_attributes_encoding) if debug else None
field_encoding_list.append(unit_attributes_encoding)
# A: alliance: One-hot with maximum 5 (including unknown alliance)
# B: optional Alliance alliance = 2; not max is 4, not 5
# C: use A
alliance_encoding = L.one_hot_embedding(torch.tensor([entity.alliance]), self.max_alliance).reshape(1, -1)
print('alliance_encoding:', alliance_encoding) if debug else None
field_encoding_list.append(alliance_encoding)
# A: build_progress: Float of build progress, in [0, 1]
# B: optional float build_progress = 9; // Range: [0.0, 1.0]
# C: None
build_progress_encoding = torch.tensor([entity.build_progress], dtype=torch.float).reshape(1, -1)
print('build_progress_encoding:', build_progress_encoding) if debug else None
field_encoding_list.append(build_progress_encoding)
# A: display_type: One-hot with maximum 5
# B: note: in s2clientprotocol raw.proto, display type only has 4 values, type of enum DisplayType,
# C: we keep in consistent with s2clientprotocol
display_type_encoding = L.to_one_hot(torch.tensor([entity.display_type]), self.max_display_type).reshape(1, -1)
print('display_type_encoding:', display_type_encoding) if debug else None
field_encoding_list.append(display_type_encoding)
# A: x_position: Binary encoding of entity x-coordinate, in game units
# B: optional Point pos = 6;
# C: use np.unpackbits
x = entity.x
print('x:', x) if debug else None
x_encoding = torch.tensor(np.unpackbits(np.array([x], np.uint8)), dtype=torch.float).reshape(1, -1)
print('x_encoding:', x_encoding) if debug else None
field_encoding_list.append(x_encoding)
# A: y_position: Binary encoding of entity y-coordinate, in game units
# B: optional Point pos = 6;
# C: use np.unpackbits
y = entity.y
print('y:', y) if debug else None
y_encoding = torch.tensor(np.unpackbits(np.array([y], np.uint8)), dtype=torch.float).reshape(1, -1)
print('y_encoding:', y_encoding) if debug else None
field_encoding_list.append(y_encoding)
# A: current_minerals: One-hot of (current_minerals / 100) with maximum 19, rounding down
# B: optional int32 mineral_contents = 18; (maybe)
# C: I am not sure mineral_contents corrseponds to current_minerals
print('entity.current_minerals:', entity.current_minerals) if debug else None
current_minerals = int(entity.current_minerals / 100)
print('current_minerals:', current_minerals) if debug else None
current_minerals_encoding = L.to_one_hot(torch.tensor([current_minerals]), self.max_current_minerals).reshape(1, -1)
print('current_minerals_encoding.shape:', current_minerals_encoding.shape) if debug else None
# field_encoding_list.append(current_minerals_encoding)
# A: current_vespene: One-hot of (current_vespene / 100) with maximum 26, rounding down
# B: optional int32 vespene_contents = 19; (maybe)
# C: I am not sure vespene_contents corrseponds to current_vespene
print('entity.current_vespene:', entity.current_vespene) if debug else None
current_vespene = int(entity.current_vespene / 100)
print('current_vespene:', current_vespene) if debug else None
current_vespene_encoding = L.to_one_hot(torch.tensor([current_vespene]), self.max_current_vespene).reshape(1, -1)
print('current_vespene_encoding.shape:', current_vespene_encoding.shape) if debug else None
# field_encoding_list.append(current_vespene_encoding)
# A: mined_minerals: One-hot of sqrt(min(mined_minerals, 1800)) with maximum sqrt(1800), rounding down
# B: not found
# C: wait to be resolved by other ways
print('entity.mined_minerals:', entity.mined_minerals) if debug else None
mined_minerals = int(min(entity.mined_minerals, self.max_mined_minerals) ** 0.5)
print('mined_minerals:', mined_minerals) if debug else None
mined_minerals_encoding = L.to_one_hot(torch.tensor([mined_minerals]), int(self.max_mined_minerals ** 0.5) + 1).reshape(1, -1)
print('mined_minerals_encoding.shape:', mined_minerals_encoding.shape) if debug else None
# field_encoding_list.append(mined_minerals_encoding)
# A: mined_vespene: One-hot of sqrt(min(mined_vespene, 2500)) with maximum sqrt(2500), rounding down
# B: not found
# C: wait to be resolved by other ways
print('entity.mined_vespene:', entity.mined_vespene) if debug else None
mined_vespene = int(min(entity.mined_vespene, self.max_mined_vespene) ** 0.5)
print('mined_vespene:', mined_vespene) if debug else None
mined_vespene_encoding = L.to_one_hot(torch.tensor([mined_vespene]), int(self.max_mined_vespene ** 0.5) + 1).reshape(1, -1)
print('mined_vespene_encoding.shape:', mined_vespene_encoding.shape) if debug else None
# field_encoding_list.append(mined_vespene_encoding)
# A: assigned_harvesters: One-hot with maximum 24
# B: optional int32 assigned_harvesters = 28;
# C: None
assigned_harvesters_encoding = L.to_one_hot(torch.tensor([min(entity.assigned_harvesters, 24)]), self.max_assigned_harvesters).reshape(1, -1)
print('assigned_harvesters_encoding:', assigned_harvesters_encoding) if debug else None
# field_encoding_list.append(assigned_harvesters_encoding)
# A: ideal_harvesters: One-hot with maximum 17
# B: optional int32 ideal_harvesters = 29;
# C: None
ideal_harvesters_encoding = L.to_one_hot(torch.tensor([entity.ideal_harvesters]), self.max_ideal_harvesters).reshape(1, -1)
print('ideal_harvesters_encoding:', ideal_harvesters_encoding) if debug else None
# field_encoding_list.append(ideal_harvesters_encoding)
# A: order_queue_length: One-hot with maximum 9
# B: repeated UnitOrder orders = 22; Not populated for enemies;
# C: equal to FeatureUnit.order_length
order_queue_length = entity.order_length
order_queue_length_encoding = L.to_one_hot(torch.tensor([order_queue_length]), self.max_order_queue_length).reshape(1, -1)
print('order_queue_length_encoding:', order_queue_length_encoding) if debug else None
# field_encoding_list.append(order_queue_length_encoding)
# A: order_1: One-hot across all order IDs
# B: below is the definition of order
'''
message UnitOrder {
optional uint32 ability_id = 1;
oneof target {
Point target_world_space_pos = 2;
uint64 target_unit_tag = 3;
}
optional float progress = 4; // Progress of train abilities. Range: [0.0, 1.0]
}
'''
# C: actually this is across all ability_ids in orders, lack: a vector for all ability_ids
order_1 = entity.order_id_1
print('order_1:', order_1) if debug else None
order_1_encoding = L.to_one_hot(torch.tensor([order_1]), self.max_order_ids).reshape(1, -1)
print('order_1_encoding:', order_1_encoding) if debug else None
# field_encoding_list.append(order_1_encoding)
# A: buffs: Boolean for each buff of whether or not it is active. Only the first two buffs are tracked
# B: None
# C: in mAS, we ingore buff_id_2
buff_id_1 = entity.buff_id_1
print('buff_id_1:', buff_id_1) if debug else None
buff_id_1_encoding = L.to_one_hot(torch.tensor([buff_id_1]), self.max_buffer_ids).reshape(1, -1)
print('buff_id_1_encoding:', buff_id_1_encoding) if debug else None
# field_encoding_list.append(buff_id_1_encoding)
order_progress_1_encoding = torch.zeros(1, 1, dtype=torch.float)
order_progress_1_encoding_2 = torch.zeros(1, self.max_order_progress, dtype=torch.float)
order_progress_1 = entity.order_progress_1
print('order_progress_1:', order_progress_1) if debug else None
if order_progress_1 is not None:
order_progress_1_encoding = torch.tensor([order_progress_1 / 100.], dtype=torch.float).reshape(1, -1)
order_progress_1_encoding_2 = L.to_one_hot(torch.tensor([order_progress_1 / 10]),
self.max_order_progress).reshape(1, -1)
print('order_progress_1_encoding:', order_progress_1_encoding) if debug else None
field_encoding_list.append(order_progress_1_encoding)
print('order_progress_1_encoding_2:', order_progress_1_encoding_2) if debug else None
# field_encoding_list.append(order_progress_1_encoding_2)
entity_tensor = torch.cat(field_encoding_list, dim=1)
print('entity_tensor.shape:', entity_tensor.shape) if debug else None
# There are up to 512 of these preprocessed entities, and any entities after 512 are ignored.
if index < self.max_entities:
entity_tensor_list.append(entity_tensor)
else:
break
index = index + 1
all_entities_tensor = torch.cat(entity_tensor_list, dim=0)
# count how many real entities we have
self.real_entities_size = all_entities_tensor.shape[0]
print('self.real_entities_size:', self.real_entities_size) if debug else None
# We use a bias of -1e9 for any of the 512 entries that doesn't refer to an entity.
if all_entities_tensor.shape[0] < self.max_entities:
bias_length = self.max_entities - all_entities_tensor.shape[0]
bias = torch.zeros([bias_length, AHP.embedding_size])
bias[:, :] = -1e9
print('bias:', bias) if debug else None
print('bias.shape:', bias.shape) if debug else None
all_entities_tensor = torch.cat([all_entities_tensor, bias], dim=0)
return all_entities_tensor
def forward(self, x):
# assert the input shape is : batch_seq_size x entities_size x embeding_size
# note: because the feature size of entity is not equal to 256, so it can not fed into transformer directly.
# thus, we add a embedding layer to transfer it to right size.
print('entity_input is nan:', torch.isnan(x).any()) if debug else None
x = self.embedd(x)
# x is batch_entities_tensor (dim = 3). Shape: batch_size x entities_size x embeding_size
# change: x is batch_seq_entities_tensor (dim = 4). Shape: batch_size x seq_size x entities_size x embeding_size
print('x.shape:', x.shape) if debug else None
out = self.transformer(x)
print('out.shape:', out.shape) if debug else None
entity_embeddings = F.relu(self.conv1(F.relu(out).transpose(1, 2))).transpose(1, 2)
print('entity_embeddings.shape:', entity_embeddings.shape) if debug else None
# note, dim=1 means the mean is across all entities in one timestep
# The mean of the transformer output across across the units
# is fed through a linear layer of size 256 and a ReLU to yield `embedded_entity`
# masked by the missing entries
print('out.shape:', out.shape) if debug else None
masked_out = out[:, :self.real_entities_size, :]
print('masked_out.shape:', masked_out.shape) if debug else None
embedded_entity = F.relu(self.fc1(torch.mean(masked_out, dim=1, keepdim=False)))
print('embedded_entity:', embedded_entity) if debug else None
print('embedded_entity.shape:', embedded_entity.shape) if debug else None
return entity_embeddings, embedded_entity
class Entity(object):
def __init__(self, unit_type=1,
unit_attributes=[0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0], alliance=0,
health=10, shield=20, energy=50,
cargo_space_taken=0, cargo_space_max=0, build_progress=0,
current_health_ratio=0.4, current_shield_ratio=0.5, current_energy_ratio=0.7,
health_max=100, shield_max=50, energy_max=40,
display_type=1, x=123, y=218, is_cloaked=3, is_powered=True, is_hallucination=False, is_active=True,
is_on_screen=True, is_in_cargo=False, current_minerals=1000, current_vespene=1500, mined_minerals=500,
mined_vespene=300, assigned_harvesters=8, ideal_harvesters=14, weapon_cooldown=5.0, orders=[0, 1, 3, 0],
attack_upgrade_level=2, armor_upgrade_level=1, shield_upgrade_level=0, is_selected=True, is_targeted=False,
order_length=4, order_id_0=1, order_id_1=0, order_id_2=3, order_id_3=2, order_progress_0=50,
order_progress_1=95, buff_id_0=12, buff_id_1=8, addon_unit_type=4, tag=0):
super().__init__()
self.unit_type = unit_type
self.unit_attributes = unit_attributes
self.alliance = alliance
self.health = health
self.shield = shield
self.energy = energy
self.cargo_space_taken = cargo_space_taken
self.cargo_space_max = cargo_space_max
self.build_progress = build_progress
self.current_health_ratio = current_health_ratio
self.current_shield_ratio = current_shield_ratio
self.current_energy_ratio = current_energy_ratio
self.health_max = health_max
self.shield_max = shield_max
self.energy_max = energy_max
self.display_type = display_type
self.x = x
self.y = y
self.is_cloaked = is_cloaked
self.is_powered = is_powered
self.is_hallucination = is_hallucination
self.is_active = is_active
self.is_on_screen = is_on_screen
self.is_in_cargo = is_in_cargo
self.current_minerals = current_minerals
self.current_vespene = current_vespene
self.mined_minerals = mined_minerals
self.mined_vespene = mined_vespene
self.assigned_harvesters = assigned_harvesters
self.ideal_harvesters = ideal_harvesters
self.weapon_cooldown = weapon_cooldown
self.attack_upgrade_level = attack_upgrade_level
self.armor_upgrade_level = armor_upgrade_level
self.shield_upgrade_level = shield_upgrade_level
self.is_selected = is_selected
self.is_targeted = is_targeted
self.order_length = order_length
self.order_id_1 = order_id_0
self.order_id_2 = order_id_1
self.order_id_3 = order_id_2
self.order_id_4 = order_id_3
self.order_progress_1 = order_progress_0
self.order_progress_2 = order_progress_1
self.buff_id_1 = buff_id_0
self.buff_id_2 = buff_id_1
self.addon_unit_type = addon_unit_type
self.tag = tag
def __str__(self):
return 'unit_type: ' + str(self.unit_type) + ', alliance: ' + str(self.alliance) + ', health: ' + str(self.health)
def test():
print(torch.tensor(np.unpackbits(np.array([25], np.uint8))))
batch_size = 2
e_list = []
e1 = Entity(115, [0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0], 0, 100, 60, 50, 4, 8, 95, 0.2, 0.0, 0.0, 140, 60, 100,
1, 123, 218, 3, True, False, True, True, False, 0, 0, 0, 0, 0, 0, 3.0, [2, 3], 2, 1, 0, True, False)
e2 = Entity(1908, [0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0], 2, 1500, 0, 200, 0, 4, 15, 0.5, 0.8, 0.5, 1500, 0, 250,
2, 69, 7, 3, True, False, False, True, False, 0, 0, 0, 0, 10, 16, 0.0, [1], 1, 1, 0, False, False)
e_list.append(e1)
e_list.append(e2)
encoder = EntityEncoder()
entities_tensor = encoder.preprocess(e_list)
print('entities_tensor:', entities_tensor) if debug else None
print('entities_tensor.shape:', entities_tensor.shape) if debug else None
# entities_tensor (dim = 2): entities_size x embeding_size
entities_tensor = entities_tensor.unsqueeze(0)
if batch_size == 2:
entities_tensor_copy = entities_tensor.detach().clone()
batch_entities_tensor = torch.cat([entities_tensor, entities_tensor_copy], dim=0)
print('batch_entities_tensor.shape:', batch_entities_tensor.shape) if debug else None
entity_embeddings, embedded_entity = encoder.forward(batch_entities_tensor)
print('entity_embeddings.shape:', entity_embeddings.shape) if debug else None
print('embedded_entity.shape:', embedded_entity.shape) if debug else None
if debug:
print("This is a test!")
if __name__ == '__main__':
test()
| [
"liuruoze@163.com"
] | liuruoze@163.com |
8061a30617a92741c6620ee3fc796b7d0247231e | 180a3795a115c0da71078f81efbde45ab2025ca0 | /interview/头条/old/b.py | c64fd56725c2a7169db3defd92ff17ef9da526c9 | [] | no_license | lizhe960118/Machine-Learning | a7593e6788433408bcf072e5e25672debd931ee4 | 2d6fe2373839964645d632895ed2a7dcb9de48b0 | refs/heads/master | 2020-03-31T15:53:57.408037 | 2019-08-18T12:29:11 | 2019-08-18T12:29:11 | 152,355,543 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 877 | py | N = int(input())
delay_time = [[int(a) for a in input().split()] for _ in range(N)]
a, b, k = [int(a) for a in input().split()]
def flayAlgoritm(graph):
minDistance = [[0 for _ in range(len(graph[0]))] for _ in range(len(graph))]
N = len(graph)
for i in range(N):
for j in range(N):
minDistance[i][j] = graph[i][j]
for k in range(N):
for i in range(N):
for j in range(N):
if minDistance[i][j] > minDistance[i][k] + minDistance[k][j]:
minDistance[i][j] = minDistance[i][k] + minDistance[k][j]
return minDistance
minDistance = flayAlgoritm(delay_time)
temp = [delay_time[i][j] for i in range(len(delay_time)) for j in range(len(delay_time[0])) if i > j]
min_delay = min(temp)
t = k - min_delay
if t <= 0:
print(-1)
else:
for i in range(t):
print(i) | [
"2957308424@qq.com"
] | 2957308424@qq.com |
e8de4748c98a1b75a39da0bd735e788a86756223 | 3c000380cbb7e8deb6abf9c6f3e29e8e89784830 | /venv/Lib/site-packages/cobra/modelimpl/ospf/throttlestats1h.py | 30e8b996096943eca4c650ced3e0c61509c36231 | [] | no_license | bkhoward/aciDOM | 91b0406f00da7aac413a81c8db2129b4bfc5497b | f2674456ecb19cf7299ef0c5a0887560b8b315d0 | refs/heads/master | 2023-03-27T23:37:02.836904 | 2021-03-26T22:07:54 | 2021-03-26T22:07:54 | 351,855,399 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 22,269 | py | # coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2020 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class ThrottleStats1h(Mo):
"""
Mo doc not defined in techpub!!!
"""
meta = StatsClassMeta("cobra.model.ospf.ThrottleStats1h", "Ospf Throttle Packets")
counter = CounterMeta("floodPktSendTokenThrottle", CounterCategory.COUNTER, "packets", "Flood Packet Send Token Throttle")
counter._propRefs[PropCategory.IMPLICIT_LASTREADING] = "floodPktSendTokenThrottleLast"
counter._propRefs[PropCategory.IMPLICIT_CUMULATIVE] = "floodPktSendTokenThrottleCum"
counter._propRefs[PropCategory.IMPLICIT_PERIODIC] = "floodPktSendTokenThrottlePer"
counter._propRefs[PropCategory.IMPLICIT_MIN] = "floodPktSendTokenThrottleMin"
counter._propRefs[PropCategory.IMPLICIT_MAX] = "floodPktSendTokenThrottleMax"
counter._propRefs[PropCategory.IMPLICIT_AVG] = "floodPktSendTokenThrottleAvg"
counter._propRefs[PropCategory.IMPLICIT_SUSPECT] = "floodPktSendTokenThrottleSpct"
counter._propRefs[PropCategory.IMPLICIT_BASELINE] = "floodPktSendTokenThrottleBase"
counter._propRefs[PropCategory.IMPLICIT_THRESHOLDED] = "floodPktSendTokenThrottleThr"
counter._propRefs[PropCategory.IMPLICIT_TREND_BASE] = "floodPktSendTokenThrottleTrBase"
counter._propRefs[PropCategory.IMPLICIT_TREND] = "floodPktSendTokenThrottleTr"
counter._propRefs[PropCategory.IMPLICIT_RATE] = "floodPktSendTokenThrottleRate"
meta._counters.append(counter)
counter = CounterMeta("floodPktSendIpThrottle", CounterCategory.COUNTER, "packets", "Flood Packet Send IP Throttle")
counter._propRefs[PropCategory.IMPLICIT_LASTREADING] = "floodPktSendIpThrottleLast"
counter._propRefs[PropCategory.IMPLICIT_CUMULATIVE] = "floodPktSendIpThrottleCum"
counter._propRefs[PropCategory.IMPLICIT_PERIODIC] = "floodPktSendIpThrottlePer"
counter._propRefs[PropCategory.IMPLICIT_MIN] = "floodPktSendIpThrottleMin"
counter._propRefs[PropCategory.IMPLICIT_MAX] = "floodPktSendIpThrottleMax"
counter._propRefs[PropCategory.IMPLICIT_AVG] = "floodPktSendIpThrottleAvg"
counter._propRefs[PropCategory.IMPLICIT_SUSPECT] = "floodPktSendIpThrottleSpct"
counter._propRefs[PropCategory.IMPLICIT_BASELINE] = "floodPktSendIpThrottleBase"
counter._propRefs[PropCategory.IMPLICIT_THRESHOLDED] = "floodPktSendIpThrottleThr"
counter._propRefs[PropCategory.IMPLICIT_TREND_BASE] = "floodPktSendIpThrottleTrBase"
counter._propRefs[PropCategory.IMPLICIT_TREND] = "floodPktSendIpThrottleTr"
counter._propRefs[PropCategory.IMPLICIT_RATE] = "floodPktSendIpThrottleRate"
meta._counters.append(counter)
meta.moClassName = "ospfThrottleStats1h"
meta.rnFormat = "CDospfThrottleStats1h"
meta.category = MoCategory.STATS_CURRENT
meta.label = "current Ospf Throttle Packets stats in 1 hour"
meta.writeAccessMask = 0x8008020040001
meta.readAccessMask = 0x8008020040001
meta.isDomainable = False
meta.isReadOnly = True
meta.isConfigurable = False
meta.isDeletable = False
meta.isContextRoot = True
meta.parentClasses.add("cobra.model.ospf.IfStats")
meta.superClasses.add("cobra.model.stats.Item")
meta.superClasses.add("cobra.model.stats.Curr")
meta.superClasses.add("cobra.model.ospf.ThrottleStats")
meta.rnPrefixes = [
('CDospfThrottleStats1h', False),
]
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "cnt", "cnt", 16212, PropCategory.REGULAR)
prop.label = "Number of Collections During this Interval"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("cnt", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "floodPktSendIpThrottleAvg", "floodPktSendIpThrottleAvg", 49352, PropCategory.IMPLICIT_AVG)
prop.label = "Flood Packet Send IP Throttle average value"
prop.isOper = True
prop.isStats = True
meta.props.add("floodPktSendIpThrottleAvg", prop)
prop = PropMeta("str", "floodPktSendIpThrottleBase", "floodPktSendIpThrottleBase", 49347, PropCategory.IMPLICIT_BASELINE)
prop.label = "Flood Packet Send IP Throttle baseline"
prop.isOper = True
prop.isStats = True
meta.props.add("floodPktSendIpThrottleBase", prop)
prop = PropMeta("str", "floodPktSendIpThrottleCum", "floodPktSendIpThrottleCum", 49348, PropCategory.IMPLICIT_CUMULATIVE)
prop.label = "Flood Packet Send IP Throttle cumulative"
prop.isOper = True
prop.isStats = True
meta.props.add("floodPktSendIpThrottleCum", prop)
prop = PropMeta("str", "floodPktSendIpThrottleLast", "floodPktSendIpThrottleLast", 49346, PropCategory.IMPLICIT_LASTREADING)
prop.label = "Flood Packet Send IP Throttle current value"
prop.isOper = True
prop.isStats = True
meta.props.add("floodPktSendIpThrottleLast", prop)
prop = PropMeta("str", "floodPktSendIpThrottleMax", "floodPktSendIpThrottleMax", 49351, PropCategory.IMPLICIT_MAX)
prop.label = "Flood Packet Send IP Throttle maximum value"
prop.isOper = True
prop.isStats = True
meta.props.add("floodPktSendIpThrottleMax", prop)
prop = PropMeta("str", "floodPktSendIpThrottleMin", "floodPktSendIpThrottleMin", 49350, PropCategory.IMPLICIT_MIN)
prop.label = "Flood Packet Send IP Throttle minimum value"
prop.isOper = True
prop.isStats = True
meta.props.add("floodPktSendIpThrottleMin", prop)
prop = PropMeta("str", "floodPktSendIpThrottlePer", "floodPktSendIpThrottlePer", 49349, PropCategory.IMPLICIT_PERIODIC)
prop.label = "Flood Packet Send IP Throttle periodic"
prop.isOper = True
prop.isStats = True
meta.props.add("floodPktSendIpThrottlePer", prop)
prop = PropMeta("str", "floodPktSendIpThrottleRate", "floodPktSendIpThrottleRate", 49357, PropCategory.IMPLICIT_RATE)
prop.label = "Flood Packet Send IP Throttle rate"
prop.isOper = True
prop.isStats = True
meta.props.add("floodPktSendIpThrottleRate", prop)
prop = PropMeta("str", "floodPktSendIpThrottleSpct", "floodPktSendIpThrottleSpct", 49353, PropCategory.IMPLICIT_SUSPECT)
prop.label = "Flood Packet Send IP Throttle suspect count"
prop.isOper = True
prop.isStats = True
meta.props.add("floodPktSendIpThrottleSpct", prop)
prop = PropMeta("str", "floodPktSendIpThrottleThr", "floodPktSendIpThrottleThr", 49354, PropCategory.IMPLICIT_THRESHOLDED)
prop.label = "Flood Packet Send IP Throttle thresholded flags"
prop.isOper = True
prop.isStats = True
prop.defaultValue = 0
prop.defaultValueStr = "unspecified"
prop._addConstant("avgCrit", "avg-severity-critical", 2199023255552)
prop._addConstant("avgHigh", "avg-crossed-high-threshold", 68719476736)
prop._addConstant("avgLow", "avg-crossed-low-threshold", 137438953472)
prop._addConstant("avgMajor", "avg-severity-major", 1099511627776)
prop._addConstant("avgMinor", "avg-severity-minor", 549755813888)
prop._addConstant("avgRecovering", "avg-recovering", 34359738368)
prop._addConstant("avgWarn", "avg-severity-warning", 274877906944)
prop._addConstant("cumulativeCrit", "cumulative-severity-critical", 8192)
prop._addConstant("cumulativeHigh", "cumulative-crossed-high-threshold", 256)
prop._addConstant("cumulativeLow", "cumulative-crossed-low-threshold", 512)
prop._addConstant("cumulativeMajor", "cumulative-severity-major", 4096)
prop._addConstant("cumulativeMinor", "cumulative-severity-minor", 2048)
prop._addConstant("cumulativeRecovering", "cumulative-recovering", 128)
prop._addConstant("cumulativeWarn", "cumulative-severity-warning", 1024)
prop._addConstant("lastReadingCrit", "lastreading-severity-critical", 64)
prop._addConstant("lastReadingHigh", "lastreading-crossed-high-threshold", 2)
prop._addConstant("lastReadingLow", "lastreading-crossed-low-threshold", 4)
prop._addConstant("lastReadingMajor", "lastreading-severity-major", 32)
prop._addConstant("lastReadingMinor", "lastreading-severity-minor", 16)
prop._addConstant("lastReadingRecovering", "lastreading-recovering", 1)
prop._addConstant("lastReadingWarn", "lastreading-severity-warning", 8)
prop._addConstant("maxCrit", "max-severity-critical", 17179869184)
prop._addConstant("maxHigh", "max-crossed-high-threshold", 536870912)
prop._addConstant("maxLow", "max-crossed-low-threshold", 1073741824)
prop._addConstant("maxMajor", "max-severity-major", 8589934592)
prop._addConstant("maxMinor", "max-severity-minor", 4294967296)
prop._addConstant("maxRecovering", "max-recovering", 268435456)
prop._addConstant("maxWarn", "max-severity-warning", 2147483648)
prop._addConstant("minCrit", "min-severity-critical", 134217728)
prop._addConstant("minHigh", "min-crossed-high-threshold", 4194304)
prop._addConstant("minLow", "min-crossed-low-threshold", 8388608)
prop._addConstant("minMajor", "min-severity-major", 67108864)
prop._addConstant("minMinor", "min-severity-minor", 33554432)
prop._addConstant("minRecovering", "min-recovering", 2097152)
prop._addConstant("minWarn", "min-severity-warning", 16777216)
prop._addConstant("periodicCrit", "periodic-severity-critical", 1048576)
prop._addConstant("periodicHigh", "periodic-crossed-high-threshold", 32768)
prop._addConstant("periodicLow", "periodic-crossed-low-threshold", 65536)
prop._addConstant("periodicMajor", "periodic-severity-major", 524288)
prop._addConstant("periodicMinor", "periodic-severity-minor", 262144)
prop._addConstant("periodicRecovering", "periodic-recovering", 16384)
prop._addConstant("periodicWarn", "periodic-severity-warning", 131072)
prop._addConstant("rateCrit", "rate-severity-critical", 36028797018963968)
prop._addConstant("rateHigh", "rate-crossed-high-threshold", 1125899906842624)
prop._addConstant("rateLow", "rate-crossed-low-threshold", 2251799813685248)
prop._addConstant("rateMajor", "rate-severity-major", 18014398509481984)
prop._addConstant("rateMinor", "rate-severity-minor", 9007199254740992)
prop._addConstant("rateRecovering", "rate-recovering", 562949953421312)
prop._addConstant("rateWarn", "rate-severity-warning", 4503599627370496)
prop._addConstant("trendCrit", "trend-severity-critical", 281474976710656)
prop._addConstant("trendHigh", "trend-crossed-high-threshold", 8796093022208)
prop._addConstant("trendLow", "trend-crossed-low-threshold", 17592186044416)
prop._addConstant("trendMajor", "trend-severity-major", 140737488355328)
prop._addConstant("trendMinor", "trend-severity-minor", 70368744177664)
prop._addConstant("trendRecovering", "trend-recovering", 4398046511104)
prop._addConstant("trendWarn", "trend-severity-warning", 35184372088832)
prop._addConstant("unspecified", None, 0)
meta.props.add("floodPktSendIpThrottleThr", prop)
prop = PropMeta("str", "floodPktSendIpThrottleTr", "floodPktSendIpThrottleTr", 49356, PropCategory.IMPLICIT_TREND)
prop.label = "Flood Packet Send IP Throttle trend"
prop.isOper = True
prop.isStats = True
meta.props.add("floodPktSendIpThrottleTr", prop)
prop = PropMeta("str", "floodPktSendIpThrottleTrBase", "floodPktSendIpThrottleTrBase", 49355, PropCategory.IMPLICIT_TREND_BASE)
prop.label = "Flood Packet Send IP Throttle trend baseline"
prop.isOper = True
prop.isStats = True
meta.props.add("floodPktSendIpThrottleTrBase", prop)
prop = PropMeta("str", "floodPktSendTokenThrottleAvg", "floodPktSendTokenThrottleAvg", 49373, PropCategory.IMPLICIT_AVG)
prop.label = "Flood Packet Send Token Throttle average value"
prop.isOper = True
prop.isStats = True
meta.props.add("floodPktSendTokenThrottleAvg", prop)
prop = PropMeta("str", "floodPktSendTokenThrottleBase", "floodPktSendTokenThrottleBase", 49368, PropCategory.IMPLICIT_BASELINE)
prop.label = "Flood Packet Send Token Throttle baseline"
prop.isOper = True
prop.isStats = True
meta.props.add("floodPktSendTokenThrottleBase", prop)
prop = PropMeta("str", "floodPktSendTokenThrottleCum", "floodPktSendTokenThrottleCum", 49369, PropCategory.IMPLICIT_CUMULATIVE)
prop.label = "Flood Packet Send Token Throttle cumulative"
prop.isOper = True
prop.isStats = True
meta.props.add("floodPktSendTokenThrottleCum", prop)
prop = PropMeta("str", "floodPktSendTokenThrottleLast", "floodPktSendTokenThrottleLast", 49367, PropCategory.IMPLICIT_LASTREADING)
prop.label = "Flood Packet Send Token Throttle current value"
prop.isOper = True
prop.isStats = True
meta.props.add("floodPktSendTokenThrottleLast", prop)
prop = PropMeta("str", "floodPktSendTokenThrottleMax", "floodPktSendTokenThrottleMax", 49372, PropCategory.IMPLICIT_MAX)
prop.label = "Flood Packet Send Token Throttle maximum value"
prop.isOper = True
prop.isStats = True
meta.props.add("floodPktSendTokenThrottleMax", prop)
prop = PropMeta("str", "floodPktSendTokenThrottleMin", "floodPktSendTokenThrottleMin", 49371, PropCategory.IMPLICIT_MIN)
prop.label = "Flood Packet Send Token Throttle minimum value"
prop.isOper = True
prop.isStats = True
meta.props.add("floodPktSendTokenThrottleMin", prop)
prop = PropMeta("str", "floodPktSendTokenThrottlePer", "floodPktSendTokenThrottlePer", 49370, PropCategory.IMPLICIT_PERIODIC)
prop.label = "Flood Packet Send Token Throttle periodic"
prop.isOper = True
prop.isStats = True
meta.props.add("floodPktSendTokenThrottlePer", prop)
prop = PropMeta("str", "floodPktSendTokenThrottleRate", "floodPktSendTokenThrottleRate", 49378, PropCategory.IMPLICIT_RATE)
prop.label = "Flood Packet Send Token Throttle rate"
prop.isOper = True
prop.isStats = True
meta.props.add("floodPktSendTokenThrottleRate", prop)
prop = PropMeta("str", "floodPktSendTokenThrottleSpct", "floodPktSendTokenThrottleSpct", 49374, PropCategory.IMPLICIT_SUSPECT)
prop.label = "Flood Packet Send Token Throttle suspect count"
prop.isOper = True
prop.isStats = True
meta.props.add("floodPktSendTokenThrottleSpct", prop)
prop = PropMeta("str", "floodPktSendTokenThrottleThr", "floodPktSendTokenThrottleThr", 49375, PropCategory.IMPLICIT_THRESHOLDED)
prop.label = "Flood Packet Send Token Throttle thresholded flags"
prop.isOper = True
prop.isStats = True
prop.defaultValue = 0
prop.defaultValueStr = "unspecified"
prop._addConstant("avgCrit", "avg-severity-critical", 2199023255552)
prop._addConstant("avgHigh", "avg-crossed-high-threshold", 68719476736)
prop._addConstant("avgLow", "avg-crossed-low-threshold", 137438953472)
prop._addConstant("avgMajor", "avg-severity-major", 1099511627776)
prop._addConstant("avgMinor", "avg-severity-minor", 549755813888)
prop._addConstant("avgRecovering", "avg-recovering", 34359738368)
prop._addConstant("avgWarn", "avg-severity-warning", 274877906944)
prop._addConstant("cumulativeCrit", "cumulative-severity-critical", 8192)
prop._addConstant("cumulativeHigh", "cumulative-crossed-high-threshold", 256)
prop._addConstant("cumulativeLow", "cumulative-crossed-low-threshold", 512)
prop._addConstant("cumulativeMajor", "cumulative-severity-major", 4096)
prop._addConstant("cumulativeMinor", "cumulative-severity-minor", 2048)
prop._addConstant("cumulativeRecovering", "cumulative-recovering", 128)
prop._addConstant("cumulativeWarn", "cumulative-severity-warning", 1024)
prop._addConstant("lastReadingCrit", "lastreading-severity-critical", 64)
prop._addConstant("lastReadingHigh", "lastreading-crossed-high-threshold", 2)
prop._addConstant("lastReadingLow", "lastreading-crossed-low-threshold", 4)
prop._addConstant("lastReadingMajor", "lastreading-severity-major", 32)
prop._addConstant("lastReadingMinor", "lastreading-severity-minor", 16)
prop._addConstant("lastReadingRecovering", "lastreading-recovering", 1)
prop._addConstant("lastReadingWarn", "lastreading-severity-warning", 8)
prop._addConstant("maxCrit", "max-severity-critical", 17179869184)
prop._addConstant("maxHigh", "max-crossed-high-threshold", 536870912)
prop._addConstant("maxLow", "max-crossed-low-threshold", 1073741824)
prop._addConstant("maxMajor", "max-severity-major", 8589934592)
prop._addConstant("maxMinor", "max-severity-minor", 4294967296)
prop._addConstant("maxRecovering", "max-recovering", 268435456)
prop._addConstant("maxWarn", "max-severity-warning", 2147483648)
prop._addConstant("minCrit", "min-severity-critical", 134217728)
prop._addConstant("minHigh", "min-crossed-high-threshold", 4194304)
prop._addConstant("minLow", "min-crossed-low-threshold", 8388608)
prop._addConstant("minMajor", "min-severity-major", 67108864)
prop._addConstant("minMinor", "min-severity-minor", 33554432)
prop._addConstant("minRecovering", "min-recovering", 2097152)
prop._addConstant("minWarn", "min-severity-warning", 16777216)
prop._addConstant("periodicCrit", "periodic-severity-critical", 1048576)
prop._addConstant("periodicHigh", "periodic-crossed-high-threshold", 32768)
prop._addConstant("periodicLow", "periodic-crossed-low-threshold", 65536)
prop._addConstant("periodicMajor", "periodic-severity-major", 524288)
prop._addConstant("periodicMinor", "periodic-severity-minor", 262144)
prop._addConstant("periodicRecovering", "periodic-recovering", 16384)
prop._addConstant("periodicWarn", "periodic-severity-warning", 131072)
prop._addConstant("rateCrit", "rate-severity-critical", 36028797018963968)
prop._addConstant("rateHigh", "rate-crossed-high-threshold", 1125899906842624)
prop._addConstant("rateLow", "rate-crossed-low-threshold", 2251799813685248)
prop._addConstant("rateMajor", "rate-severity-major", 18014398509481984)
prop._addConstant("rateMinor", "rate-severity-minor", 9007199254740992)
prop._addConstant("rateRecovering", "rate-recovering", 562949953421312)
prop._addConstant("rateWarn", "rate-severity-warning", 4503599627370496)
prop._addConstant("trendCrit", "trend-severity-critical", 281474976710656)
prop._addConstant("trendHigh", "trend-crossed-high-threshold", 8796093022208)
prop._addConstant("trendLow", "trend-crossed-low-threshold", 17592186044416)
prop._addConstant("trendMajor", "trend-severity-major", 140737488355328)
prop._addConstant("trendMinor", "trend-severity-minor", 70368744177664)
prop._addConstant("trendRecovering", "trend-recovering", 4398046511104)
prop._addConstant("trendWarn", "trend-severity-warning", 35184372088832)
prop._addConstant("unspecified", None, 0)
meta.props.add("floodPktSendTokenThrottleThr", prop)
prop = PropMeta("str", "floodPktSendTokenThrottleTr", "floodPktSendTokenThrottleTr", 49377, PropCategory.IMPLICIT_TREND)
prop.label = "Flood Packet Send Token Throttle trend"
prop.isOper = True
prop.isStats = True
meta.props.add("floodPktSendTokenThrottleTr", prop)
prop = PropMeta("str", "floodPktSendTokenThrottleTrBase", "floodPktSendTokenThrottleTrBase", 49376, PropCategory.IMPLICIT_TREND_BASE)
prop.label = "Flood Packet Send Token Throttle trend baseline"
prop.isOper = True
prop.isStats = True
meta.props.add("floodPktSendTokenThrottleTrBase", prop)
prop = PropMeta("str", "lastCollOffset", "lastCollOffset", 111, PropCategory.REGULAR)
prop.label = "Collection Length"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("lastCollOffset", prop)
prop = PropMeta("str", "modTs", "modTs", 7, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("modTs", prop)
prop = PropMeta("str", "repIntvEnd", "repIntvEnd", 110, PropCategory.REGULAR)
prop.label = "Reporting End Time"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("repIntvEnd", prop)
prop = PropMeta("str", "repIntvStart", "repIntvStart", 109, PropCategory.REGULAR)
prop.label = "Reporting Start Time"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("repIntvStart", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
def __init__(self, parentMoOrDn, markDirty=True, **creationProps):
namingVals = []
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
| [
"bkhoward@live.com"
] | bkhoward@live.com |
6f35d96d98a3368b68951d18321d0ae5ca68ebb6 | 68cecfdf90585d8fe7a705c10521d2e2cec80b8a | /apps/courses/migrations/0005_auto_20180814_1824.py | d643a4ae760016f9b51d0ae226bd67a23268d94c | [] | no_license | balloontmz/mooc | e3b8759a76879f321c55c98c8e07b1200cd18c9a | 4f01f82445f4b5e85a700793828eb5f969875814 | refs/heads/master | 2020-03-25T11:31:21.953098 | 2018-08-20T05:21:25 | 2018-08-20T05:21:25 | 143,736,149 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,492 | py | # Generated by Django 2.0.1 on 2018-08-14 18:24
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('organization', '0006_auto_20180812_1555'),
('courses', '0004_auto_20180813_2135'),
]
operations = [
migrations.AddField(
model_name='course',
name='teacher',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='organization.Teacher', verbose_name='讲师'),
),
migrations.AddField(
model_name='course',
name='teacher_tell',
field=models.CharField(default='什么都可以学到,按时交作业,不然叫家长', max_length=300, verbose_name='=老师告诉你'),
),
migrations.AddField(
model_name='course',
name='you_need_know',
field=models.CharField(default='一颗勤学的心是本课程的必要前提', max_length=300, verbose_name='=课程须知'),
),
migrations.AddField(
model_name='video',
name='learn_times',
field=models.IntegerField(default=0, verbose_name='学习时长(分钟数)'),
),
migrations.AddField(
model_name='video',
name='url',
field=models.CharField(default='http://blog.mtianyan.cn/', max_length=200, verbose_name='视频地址'),
),
]
| [
"15111171986@163.com"
] | 15111171986@163.com |
544cfca42ef60962f6e782c20d5e90e3cc8a535c | 97f9e29696000f45330fcad4c6a8d26bb8231708 | /good_point.py | 88962fb5d7084ab23b692471fdcd8e1f33284ae5 | [] | no_license | Ceasar/lecture | 5c5419404b08c8cb8f5b37e069db40e9146059b9 | d1143a0405d9dd2432d5c0cf14cf3ac2f9c18441 | refs/heads/master | 2021-01-20T12:20:45.793808 | 2012-02-28T04:08:46 | 2012-02-28T04:08:46 | 3,562,685 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 806 | py | from math import sin, cos, pi, atan2
class Point(object):
def __init__(self, r, theta):
self.r = r
self.theta = theta
@property
def x(self):
return round(self.r * cos(self.theta))
@x.setter
def x(self, val):
self.r = round(((val * val) + (self.y * self.y)) ** 0.5)
self.theta = round(atan2(self.y, val))
@property
def y(self):
return round(self.r * sin(self.theta))
def rotate(self, theta):
self.theta += theta
def __str__(self):
return "x = %s; y = %s; r = %s; theta = %s;" % (self.x, self.y, self.r, self.theta)
if __name__ == "__main__":
p = Point(1, pi / 2)
print p
p.rotate(pi / 2)
print p
# so far so good
p.x = 10
print p
# right!
# now try setting y...
| [
"cbautista2010@gmail.com"
] | cbautista2010@gmail.com |
9baff6f38d64c4d58a9e972830a5bb3cefa44344 | e4e79bb3bc69c89fbc0429df37ef26fef6a49592 | /testproject/testproject/urls.py | 1b2a05bb96c78a88199932a368c18ecf199109ea | [
"Apache-2.0"
] | permissive | jluttine/django-nyt | ee78a4f55fb7109a5e9dca40f3a69cc58ac6a1b6 | 660f9c387cc1c363ab26e3ab2812da098d086876 | refs/heads/master | 2020-12-28T21:17:00.547751 | 2014-10-15T10:33:33 | 2014-10-15T10:33:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 709 | py | from django.conf.urls import patterns, include, url
from django.conf import settings
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
)
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
urlpatterns += patterns('',
url(r'^media/(?P<path>.*)$', 'django.views.static.serve', {
'document_root': settings.MEDIA_ROOT,
}),
)
from django_nyt.urls import get_pattern as get_nyt_pattern
urlpatterns += patterns('',
(r'^nyt/', get_nyt_pattern()),
)
| [
"benjaoming@gmail.com"
] | benjaoming@gmail.com |
9efc7e03791547d91a33989077adbe2056566a48 | 1afec7d1d3099138b5afe5fd73dfd3d24ff4eb15 | /test/functional/rpc_invalid_address_message.py | f9149e01f98e473b42a9825372cdf9eb1bccdde4 | [
"MIT"
] | permissive | republic-productions/finalcoin | 5c7c6b0734178fe22db63f0946ec555f59e8d0eb | 7c0f335ded1e5c662034c822ca2c474b8e62778f | refs/heads/main | 2023-09-04T17:04:32.683667 | 2021-10-14T17:45:22 | 2021-10-14T17:45:22 | 417,209,088 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,867 | py | #!/usr/bin/env python3
# Copyright (c) 2020 The Finalcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test error messages for 'getaddressinfo' and 'validateaddress' RPC commands."""
from test_framework.test_framework import FinalcoinTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
)
BECH32_VALID = 'bcrt1qtmp74ayg7p24uslctssvjm06q5phz4yrxucgnv'
BECH32_INVALID_BECH32 = 'bcrt1p0xlxvlhemja6c4dqv22uapctqupfhlxm9h8z3k2e72q4k9hcz7vqdmchcc'
BECH32_INVALID_BECH32M = 'bcrt1qw508d6qejxtdg4y5r3zarvary0c5xw7k35mrzd'
BECH32_INVALID_VERSION = 'bcrt130xlxvlhemja6c4dqv22uapctqupfhlxm9h8z3k2e72q4k9hcz7vqynjegk'
BECH32_INVALID_SIZE = 'bcrt1s0xlxvlhemja6c4dqv22uapctqupfhlxm9h8z3k2e72q4k9hcz7v8n0nx0muaewav25430mtr'
BECH32_INVALID_V0_SIZE = 'bcrt1qw508d6qejxtdg4y5r3zarvary0c5xw7kqqq5k3my'
BECH32_INVALID_PREFIX = 'bc1pw508d6qejxtdg4y5r3zarvary0c5xw7kw508d6qejxtdg4y5r3zarvary0c5xw7k7grplx'
BASE58_VALID = 'mipcBbFg9gMiCh81Kj8tqqdgoZub1ZJRfn'
BASE58_INVALID_PREFIX = '17VZNX1SN5NtKa8UQFxwQbFeFc3iqRYhem'
INVALID_ADDRESS = 'asfah14i8fajz0123f'
class InvalidAddressErrorMessageTest(FinalcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
def test_validateaddress(self):
node = self.nodes[0]
# Bech32
info = node.validateaddress(BECH32_INVALID_SIZE)
assert not info['isvalid']
assert_equal(info['error'], 'Invalid Bech32 address data size')
info = node.validateaddress(BECH32_INVALID_PREFIX)
assert not info['isvalid']
assert_equal(info['error'], 'Invalid prefix for Bech32 address')
info = node.validateaddress(BECH32_INVALID_BECH32)
assert not info['isvalid']
assert_equal(info['error'], 'Version 1+ witness address must use Bech32m checksum')
info = node.validateaddress(BECH32_INVALID_BECH32M)
assert not info['isvalid']
assert_equal(info['error'], 'Version 0 witness address must use Bech32 checksum')
info = node.validateaddress(BECH32_INVALID_V0_SIZE)
assert not info['isvalid']
assert_equal(info['error'], 'Invalid Bech32 v0 address data size')
info = node.validateaddress(BECH32_VALID)
assert info['isvalid']
assert 'error' not in info
info = node.validateaddress(BECH32_INVALID_VERSION)
assert not info['isvalid']
assert_equal(info['error'], 'Invalid Bech32 address witness version')
# Base58
info = node.validateaddress(BASE58_INVALID_PREFIX)
assert not info['isvalid']
assert_equal(info['error'], 'Invalid prefix for Base58-encoded address')
info = node.validateaddress(BASE58_VALID)
assert info['isvalid']
assert 'error' not in info
# Invalid address format
info = node.validateaddress(INVALID_ADDRESS)
assert not info['isvalid']
assert_equal(info['error'], 'Invalid address format')
def test_getaddressinfo(self):
node = self.nodes[0]
assert_raises_rpc_error(-5, "Invalid Bech32 address data size", node.getaddressinfo, BECH32_INVALID_SIZE)
assert_raises_rpc_error(-5, "Invalid prefix for Bech32 address", node.getaddressinfo, BECH32_INVALID_PREFIX)
assert_raises_rpc_error(-5, "Invalid prefix for Base58-encoded address", node.getaddressinfo, BASE58_INVALID_PREFIX)
assert_raises_rpc_error(-5, "Invalid address format", node.getaddressinfo, INVALID_ADDRESS)
def run_test(self):
self.test_validateaddress()
if self.is_wallet_compiled():
self.init_wallet(0)
self.test_getaddressinfo()
if __name__ == '__main__':
InvalidAddressErrorMessageTest().main()
| [
"republicproductions@protonmail.com"
] | republicproductions@protonmail.com |
013ab9d306c6cde353ef76978b48a2f6e11b8d30 | 1d892928c70ee9ddf66f2a37a8e083d2632c6e38 | /nova/db/sqlalchemy/migrate_repo/versions/076_remove_unique_constraints.py | 971bfbecc2e217ffdbb4d29f36abed2a9970e642 | [
"Apache-2.0"
] | permissive | usc-isi/essex-baremetal-support | 74196c3f1332ee3cdeba9c263faff0ac0567d3cf | a77daf8ef56cf41e38de36621eda25ed3f180156 | refs/heads/master | 2021-05-19T03:12:11.929550 | 2020-07-24T14:15:26 | 2020-07-24T14:15:26 | 4,702,421 | 0 | 1 | Apache-2.0 | 2020-07-24T14:15:27 | 2012-06-18T15:19:41 | null | UTF-8 | Python | false | false | 3,239 | py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) OpenStack, LLC
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.from sqlalchemy import *
from sqlalchemy import MetaData, Table
from migrate.changeset.constraint import UniqueConstraint
def _get_constraint_names(engine_name):
# NOTE(vish): These constraint names may be dependent on the backend, but
# there doesn't seem to be we a way to determine the proper
# name for existing constraints. These names are correct for
# mysql and postgres.
if engine_name == "mysql":
return {
"instance_types_name": ("name", "instance_types_name_key"),
"instance_types_flavorid": "instance_types_flavorid_str_key",
"volume_types_name": "name",
}
else:
return {
"instance_types_name": ("instance_types_name_key",),
"instance_types_flavorid": "instance_types_flavorid_str_key",
"volume_types_name": "volume_types_name_key",
}
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
c_names = _get_constraint_names(migrate_engine.name)
table = Table('instance_types', meta, autoload=True)
for constraint_name in c_names['instance_types_name']:
cons = UniqueConstraint('name',
name=constraint_name,
table=table)
cons.drop()
cons = UniqueConstraint('flavorid',
name=c_names['instance_types_flavorid'],
table=table)
cons.drop()
table = Table('volume_types', meta, autoload=True)
cons = UniqueConstraint('name',
name=c_names['volume_types_name'],
table=table)
cons.drop()
def downgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
c_names = _get_constraint_names(migrate_engine.name)
table = Table('instance_types', meta, autoload=True)
for constraint_name in c_names['instance_types_name']:
cons = UniqueConstraint('name',
name=constraint_name,
table=table)
cons.create()
table = Table('instance_types', meta, autoload=True)
cons = UniqueConstraint('flavorid',
name=c_names['instance_types_flavorid'],
table=table)
cons.create()
table = Table('volume_types', meta, autoload=True)
cons = UniqueConstraint('name',
name=c_names['volume_types_name'],
table=table)
cons.create()
| [
"dkang@isi.edu"
] | dkang@isi.edu |
0b294d1d60dd4bdf4271c352d5336b20def7191a | 6b2e5e0d21601c61a84afb7164125fb1dc16c7aa | /docs/fonts.py | 09b4151ff1b2607cdb65c6d41b344e93565b57f9 | [
"MIT"
] | permissive | KOLANICH-libs/proplot | aa42b7e8bf4df5f425ea19d31fca2afd15d9a11f | a71e4f8fd57410eee96ba4ce701b8290541a3b28 | refs/heads/master | 2023-07-08T16:44:01.227438 | 2021-07-04T20:28:52 | 2021-07-04T20:28:52 | 313,345,071 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,502 | py | # -*- coding: utf-8 -*-
# ---
# jupyter:
# jupytext:
# text_representation:
# extension: .py
# format_name: percent
# format_version: '1.3'
# jupytext_version: 1.3.0
# kernelspec:
# display_name: Python 3
# language: python
# name: python3
# ---
# %% [raw] raw_mimetype="text/restructuredtext"
#
# Font selection
# ==============
#
# ProPlot registers several new fonts and includes tools for adding
# your own fonts. These features are described below.
#
#
# %% [raw] raw_mimetype="text/restructuredtext"
# .. _ug_fonts:
#
# Included fonts
# --------------
#
# Matplotlib provides a `~matplotlib.font_manager` module for working with
# system fonts and classifies fonts into `five font families\
# <https://matplotlib.org/3.1.1/gallery/text_labels_and_annotations/fonts_demo.html>`__:
# :rcraw:`font.serif` :rcraw:`font.sans-serif`, :rcraw:`font.monospace`,
# :rcraw:`font.cursive`, and :rcraw:`font.fantasy`. The default font family
# is sans-serif, because sans-serif fonts are generally more suitable for
# figures than serif fonts, and the default font name belonging to this family
# is `DejaVu Sans <https://dejavu-fonts.github.io>`__, which comes packaged with
# matplotlib.
#
# Matplotlib uses DejaVu Sans in part because it includes glyphs for a very wide
# range of symbols, especially mathematical symbols. However DejaVu Sans is seldom
# used outside of matplotlib and (in our opinion) is not very aesthetically pleasing.
# To improve the font selection while keeping things consistent across different
# workstations, ProPlot comes packaged with the open-source
# `TeX Gyre font series <https://ctan.org/pkg/tex-gyre?lang=en>`__
# and adds them as the default entries for all of matplotlib's font famlies:
#
# * The `Century <https://en.wikipedia.org/wiki/Century_type_family>`__ lookalike
# :rcraw:`font.serif` = ``'TeX Gyre Schola'``.
# * The `Helvetica <https://en.wikipedia.org/wiki/Helvetica>`__ lookalike
# :rcraw:`font.sans-serif` = ``'TeX Gyre Heros'``.
# * The `Courier <https://en.wikipedia.org/wiki/Courier_(typeface)>`__ lookalike
# :rcraw:`font.monospace` = ``'TeX Gyre Cursor'``.
# * The `Chancery <https://en.wikipedia.org/wiki/ITC_Zapf_Chancery>`__ lookalike
# :rcraw:`font.cursive` = ``'TeX Gyre Chorus'``.
# * The `Avant Garde <https://en.wikipedia.org/wiki/ITC_Avant_Garde>`__ lookalike
# :rcraw:`font.fantasy` = ``'TeX Gyre Adventor'``.
#
# After importing ProPlot, the default matplotlib font will be
# `TeX Gyre Heros <https://ctan.org/pkg/tex-gyre-heros>`__,
# which emulates the more conventional and aesthetically pleasing font
# `Helvetica <https://en.wikipedia.org/wiki/Helvetica>`__. The
# full font priority lists for each family are displayed in the
# :ref:`default proplotrc file <ug_proplotrc>`.
#
# To compare different fonts, use the `~proplot.demos.show_fonts` command. By
# default, this displays the *sans serif* fonts available on your system and
# packaged with ProPlot. The sans serif table on the RTD server is shown
# below. The "¤" symbol appears where characters for a particular font are
# unavailable (when making plots, "¤" is replaced with the character from
# a fallback font). Since most TeX Gyre fonts have limited
# character sets, if your plots contain lots of mathematical symbols,
# you may want to set :rcraw:`font.family` to DejaVu Sans or
# `Fira Math <https://github.com/firamath/firamath>`__, which is packaged
# with ProPlot.
#
# .. note::
#
# Try to avoid ``.ttf`` files with ``Thin`` in the file name. Some versions of
# matplotlib interpret fonts with the "thin" style as having *normal* weight (see
# `this issue page <https://github.com/matplotlib/matplotlib/issues/8788>`__),
# causing them to override the correct normal weight versions. While ProPlot
# tries to filter out these files, this cannot be done systematically. In the
# below example, the "Roboto" font may be overridden by its "thin" version
# because the RTD server includes this style.
# %%
import proplot as plot
fig, axs = plot.show_fonts()
# %% [raw] raw_mimetype="text/restructuredtext"
# .. _ug_fonts_user:
#
# Using your own fonts
# --------------------
#
# You can register your own fonts by adding files to the ``~/.proplot/fonts``
# directory and calling `~proplot.config.register_fonts`. This command is
# also called on import. To change the default font, use the
# `~proplot.config.rc` object or modify your ``~/.proplotrc``. See
# the :ref:`configuration section <ug_config>` for details.
#
# Sometimes the font you would like to use *is* installed, but the font file
# is not stored under the matplotlib-compatible ``.ttf``, ``.otf``, or ``.afm``
# formats. For example, several macOS fonts are unavailable because they are
# stored as ``.dfont`` collections. Also, while matplotlib nominally supports
# ``.ttc`` collections, ProPlot ignores them because figures with ``.ttc`` fonts
# `cannot be saved as PDFs <https://github.com/matplotlib/matplotlib/issues/3135>`__.
# You can get matplotlib to use ``.dfont`` and ``.ttc`` collections by
# expanding them into individual ``.ttf`` files with the
# `DFontSplitter application <https://peter.upfold.org.uk/projects/dfontsplitter>`__,
# then saving the files in-place or in the ``~/.proplot/fonts`` folder.
#
# To find font collections, check the paths listed in ``OSXFontDirectories``,
# ``X11FontDirectories``, ``MSUserFontDirectories``, and ``MSFontDirectories``
# under the `matplotlib.font_manager` module.
| [
"lukelbd@gmail.com"
] | lukelbd@gmail.com |
5ad9089e3a6c24447ab2f79b46959cfe3b4a7c7c | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_143/ch27_2020_03_23_13_19_25_574676.py | c03bdeee5a7baf9d85caf9cc133ea720c209e773 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 230 | py | invalid= True
while invalid:
perg = str(input('Tem dúvidas?')
if perg == não:
invalid= False
print ('Até a próxima')
else:
print ('Pratique mais')
| [
"you@example.com"
] | you@example.com |
f5df3d660900f3cef91489b44b8804976af6e0f4 | e76f47d5e6752b838d5f7e23e22cfef65482b8e1 | /env/bin/gdalchksum.py | 765bd72c6a86cacb639d07cd13700de5e0db7da6 | [] | no_license | AmirIdris/Final-Project | b006adfc4074df6687abaac83942b1b151300a51 | 7b0e28d01b7d5b4e4825d5d8b98ba193bd3f49e8 | refs/heads/master | 2023-06-10T21:13:12.875771 | 2021-07-08T20:23:59 | 2021-07-08T20:23:59 | 362,912,491 | 0 | 1 | null | 2021-07-08T20:24:00 | 2021-04-29T18:34:24 | CSS | UTF-8 | Python | false | false | 333 | py | #!/home/amir/Documents/Projects/FinalProject/env/bin/python3
import sys
# import osgeo.utils.gdalchksum as a convenience to use as a script
from osgeo.utils.gdalchksum import * # noqa
from osgeo.utils.gdalchksum import main
from osgeo.gdal import deprecation_warn
deprecation_warn('gdalchksum', 'utils')
sys.exit(main(sys.argv))
| [
"you@example.com"
] | you@example.com |
43a8958ceb7903ceb9c6b6d5ccebaaebc00206dc | 2db6f646b23c1bdbdf0e6a7b9889725a7eda4e98 | /test/functional/nulldummy.py | d29132890ca14d06c43cfc54ecb99188b66d039a | [
"MIT"
] | permissive | wolfoxonly/bwb | 113964cbce9ae8ce048bfcd81437f7bcfdb22e11 | aae01441cdc171ff7bbdc161b74b4eeb2f1b5a10 | refs/heads/master | 2021-04-30T08:00:28.465159 | 2018-05-21T14:47:53 | 2018-05-21T14:47:53 | 121,363,677 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,692 | py | #!/usr/bin/env python3
# Copyright (c) 2016-2017 The Bwbcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test NULLDUMMY softfork.
Connect to a single node.
Generate 2 blocks (save the coinbases for later).
Generate 427 more blocks.
[Policy/Consensus] Check that NULLDUMMY compliant transactions are accepted in the 430th block.
[Policy] Check that non-NULLDUMMY transactions are rejected before activation.
[Consensus] Check that the new NULLDUMMY rules are not enforced on the 431st block.
[Policy/Consensus] Check that the new NULLDUMMY rules are enforced on the 432nd block.
"""
from test_framework.test_framework import BwbcoinTestFramework
from test_framework.util import *
from test_framework.mininode import CTransaction, network_thread_start
from test_framework.blocktools import create_coinbase, create_block, add_witness_commitment
from test_framework.script import CScript
from io import BytesIO
import time
NULLDUMMY_ERROR = "64: non-mandatory-script-verify-flag (Dummy CHECKMULTISIG argument must be zero)"
def trueDummy(tx):
scriptSig = CScript(tx.vin[0].scriptSig)
newscript = []
for i in scriptSig:
if (len(newscript) == 0):
assert(len(i) == 0)
newscript.append(b'\x51')
else:
newscript.append(i)
tx.vin[0].scriptSig = CScript(newscript)
tx.rehash()
class NULLDUMMYTest(BwbcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.setup_clean_chain = True
# This script tests NULLDUMMY activation, which is part of the 'segwit' deployment, so we go through
# normal segwit activation here (and don't use the default always-on behaviour).
self.extra_args = [['-whitelist=127.0.0.1', '-walletprematurewitness', '-vbparams=segwit:0:999999999999', '-addresstype=legacy']]
def run_test(self):
self.address = self.nodes[0].getnewaddress()
self.ms_address = self.nodes[0].addmultisigaddress(1,[self.address])
self.wit_address = self.nodes[0].addwitnessaddress(self.address)
self.wit_ms_address = self.nodes[0].addwitnessaddress(self.ms_address)
network_thread_start()
self.coinbase_blocks = self.nodes[0].generate(2) # Block 2
coinbase_txid = []
for i in self.coinbase_blocks:
coinbase_txid.append(self.nodes[0].getblock(i)['tx'][0])
self.nodes[0].generate(427) # Block 429
self.lastblockhash = self.nodes[0].getbestblockhash()
self.tip = int("0x" + self.lastblockhash, 0)
self.lastblockheight = 429
self.lastblocktime = int(time.time()) + 429
self.log.info("Test 1: NULLDUMMY compliant base transactions should be accepted to mempool and mined before activation [430]")
test1txs = [self.create_transaction(self.nodes[0], coinbase_txid[0], self.ms_address, 49)]
txid1 = self.nodes[0].sendrawtransaction(bytes_to_hex_str(test1txs[0].serialize_with_witness()), True)
test1txs.append(self.create_transaction(self.nodes[0], txid1, self.ms_address, 48))
txid2 = self.nodes[0].sendrawtransaction(bytes_to_hex_str(test1txs[1].serialize_with_witness()), True)
test1txs.append(self.create_transaction(self.nodes[0], coinbase_txid[1], self.wit_ms_address, 49))
txid3 = self.nodes[0].sendrawtransaction(bytes_to_hex_str(test1txs[2].serialize_with_witness()), True)
self.block_submit(self.nodes[0], test1txs, False, True)
self.log.info("Test 2: Non-NULLDUMMY base multisig transaction should not be accepted to mempool before activation")
test2tx = self.create_transaction(self.nodes[0], txid2, self.ms_address, 47)
trueDummy(test2tx)
assert_raises_rpc_error(-26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction, bytes_to_hex_str(test2tx.serialize_with_witness()), True)
self.log.info("Test 3: Non-NULLDUMMY base transactions should be accepted in a block before activation [431]")
self.block_submit(self.nodes[0], [test2tx], False, True)
self.log.info("Test 4: Non-NULLDUMMY base multisig transaction is invalid after activation")
test4tx = self.create_transaction(self.nodes[0], test2tx.hash, self.address, 46)
test6txs=[CTransaction(test4tx)]
trueDummy(test4tx)
assert_raises_rpc_error(-26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction, bytes_to_hex_str(test4tx.serialize_with_witness()), True)
self.block_submit(self.nodes[0], [test4tx])
self.log.info("Test 5: Non-NULLDUMMY P2WSH multisig transaction invalid after activation")
test5tx = self.create_transaction(self.nodes[0], txid3, self.wit_address, 48)
test6txs.append(CTransaction(test5tx))
test5tx.wit.vtxinwit[0].scriptWitness.stack[0] = b'\x01'
assert_raises_rpc_error(-26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction, bytes_to_hex_str(test5tx.serialize_with_witness()), True)
self.block_submit(self.nodes[0], [test5tx], True)
self.log.info("Test 6: NULLDUMMY compliant base/witness transactions should be accepted to mempool and in block after activation [432]")
for i in test6txs:
self.nodes[0].sendrawtransaction(bytes_to_hex_str(i.serialize_with_witness()), True)
self.block_submit(self.nodes[0], test6txs, True, True)
def create_transaction(self, node, txid, to_address, amount):
inputs = [{ "txid" : txid, "vout" : 0}]
outputs = { to_address : amount }
rawtx = node.createrawtransaction(inputs, outputs)
signresult = node.signrawtransaction(rawtx)
tx = CTransaction()
f = BytesIO(hex_str_to_bytes(signresult['hex']))
tx.deserialize(f)
return tx
def block_submit(self, node, txs, witness = False, accept = False):
block = create_block(self.tip, create_coinbase(self.lastblockheight + 1), self.lastblocktime + 1)
block.nVersion = 4
for tx in txs:
tx.rehash()
block.vtx.append(tx)
block.hashMerkleRoot = block.calc_merkle_root()
witness and add_witness_commitment(block)
block.rehash()
block.solve()
node.submitblock(bytes_to_hex_str(block.serialize(True)))
if (accept):
assert_equal(node.getbestblockhash(), block.hash)
self.tip = block.sha256
self.lastblockhash = block.hash
self.lastblocktime += 1
self.lastblockheight += 1
else:
assert_equal(node.getbestblockhash(), self.lastblockhash)
if __name__ == '__main__':
NULLDUMMYTest().main()
| [
"415313577@qq.com"
] | 415313577@qq.com |
2994c87a400699c9154a25a17bbe59a61473a769 | dd9f40550afd05192f04d817fa7b31bbe5945f8a | /app_1/migrations/0006_auto_20210921_1828.py | 52d2866997337ab4f915b145c72565b16af42ed8 | [] | no_license | AbdurRahman111/Boomboom-Official-Project---Official | b8c0220c61e204b8482227ffec3fc0f5ebd69f37 | ff00b702494183e13bc00b634ed33a5203536166 | refs/heads/master | 2023-08-19T04:19:48.334410 | 2021-10-11T12:24:53 | 2021-10-11T12:24:53 | 415,915,152 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,523 | py | # Generated by Django 3.2.6 on 2021-09-21 12:28
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app_1', '0005_auto_20210921_1635'),
]
operations = [
migrations.AlterField(
model_name='campaign_table',
name='end_time',
field=models.DateField(blank=True, default=datetime.datetime(2021, 9, 21, 18, 28, 44, 951099), null=True),
),
migrations.AlterField(
model_name='campaign_table',
name='start_time',
field=models.DateField(blank=True, default=datetime.datetime(2021, 9, 21, 18, 28, 44, 951099), null=True),
),
migrations.AlterField(
model_name='customer_review',
name='Review_Time',
field=models.DateTimeField(blank=True, default=datetime.datetime(2021, 9, 21, 18, 28, 44, 951099)),
),
migrations.AlterField(
model_name='flash_sell',
name='flash_sell_end_time',
field=models.DateField(blank=True, default=datetime.datetime(2021, 9, 21, 18, 28, 44, 951099)),
),
migrations.AlterField(
model_name='flash_sell',
name='flash_sell_start_time',
field=models.DateField(blank=True, default=datetime.datetime(2021, 9, 21, 18, 28, 44, 951099)),
),
migrations.AlterField(
model_name='products',
name='Time',
field=models.DateTimeField(blank=True, default=datetime.datetime(2021, 9, 21, 18, 28, 44, 951099)),
),
migrations.AlterField(
model_name='products',
name='flash_sell_end_time',
field=models.DateField(blank=True, default=datetime.datetime(2021, 9, 21, 18, 28, 44, 951099)),
),
migrations.AlterField(
model_name='products',
name='flash_sell_start_time',
field=models.DateField(blank=True, default=datetime.datetime(2021, 9, 21, 18, 28, 44, 951099)),
),
migrations.AlterField(
model_name='staff_access',
name='First_Register_Time',
field=models.DateTimeField(blank=True, default=datetime.datetime(2021, 9, 21, 18, 28, 44, 951099)),
),
migrations.AlterField(
model_name='staff_access',
name='Last_login_Time',
field=models.DateTimeField(blank=True, default=datetime.datetime(2021, 9, 21, 18, 28, 44, 951099)),
),
]
| [
"mdabdurrahmanchowdhury1122@gmail.com"
] | mdabdurrahmanchowdhury1122@gmail.com |
c91cb1d76c2ecf1d0a7725da338964e214663f33 | 77f7adb86eee479c32fde983996fa35267d76fd5 | /sap/adt/search.py | c7e5462f1c36c16306d91e5e1efe671e65d7702a | [
"Apache-2.0"
] | permissive | jfilak/sapcli | cabd2390de37f70bbe55f0f7e343de123138e8b1 | 2839463fc2e2e2c1f35aa2203e4880d4e54462e7 | refs/heads/master | 2023-08-31T11:15:38.005314 | 2023-08-29T14:03:43 | 2023-08-29T14:30:46 | 158,893,144 | 54 | 26 | Apache-2.0 | 2023-09-11T14:19:10 | 2018-11-24T01:42:11 | Python | UTF-8 | Python | false | false | 809 | py | """Wraps ADT search functionality"""
from sap.adt.objects import ADTObjectReferences
import sap.adt.marshalling
class ADTSearch:
"""ADT Search functionality"""
def __init__(self, connection):
self._connection = connection
def quick_search(self, term: str, max_results: int = 5) -> ADTObjectReferences:
"""Performs the quick object search"""
resp = self._connection.execute(
'GET',
'repository/informationsystem/search',
params={
'operation': 'quickSearch',
'maxResults': max_results,
'query': term
}
)
results = ADTObjectReferences()
marshal = sap.adt.marshalling.Marshal()
marshal.deserialize(resp.text, results)
return results
| [
"jakub@thefilaks.net"
] | jakub@thefilaks.net |
93800dc28f160d79cf2aae36684a9306099188bd | 1064f70fefad3a49ad75276bc8638310eace6477 | /solution_scripts/serial_scripts/vdns/test_vdns.py | dd713a1d9973e487f10b1b9c3465df220a690380 | [
"LicenseRef-scancode-warranty-disclaimer",
"Apache-2.0"
] | permissive | gokulchandrap/contrail-test | d80e1dfe54d191372c2fe9db6bb1ec6997c5022c | 32d6fa43490a3234eb9560839b7d0de06e28b9bb | refs/heads/master | 2021-01-19T10:35:33.821533 | 2017-04-10T21:33:20 | 2017-04-10T21:33:20 | 87,881,182 | 1 | 0 | null | 2017-04-11T02:52:17 | 2017-04-11T02:52:17 | null | UTF-8 | Python | false | false | 4,358 | py | # Need to import path to test/fixtures and test/scripts/
# Ex : export PYTHONPATH='$PATH:/root/test/fixtures/:/root/test/scripts/'
#
# To run tests, you can do 'python -m testtools.run vdns_tests'. To run specific tests,
# You can do 'python -m testtools.run -l vdns_tests'
# Set the env variable PARAMS_FILE to point to your ini file. Else it will try to pick params.ini in PWD
#
import os
import unittest
import fixtures
import testtools
import traceback
from policy_test import *
from multiple_vn_vm_test import *
from tcutils.wrappers import preposttest_wrapper
from tcutils.pkgs.Traffic.traffic.core.stream import Stream
from tcutils.pkgs.Traffic.traffic.core.profile import create, ContinuousProfile
from tcutils.pkgs.Traffic.traffic.core.helpers import Host
from tcutils.pkgs.Traffic.traffic.core.helpers import Sender, Receiver
from base import BasevDNSRestartTest
from common import isolated_creds
import inspect
from vnc_api import vnc_api
from vnc_api.gen.resource_test import *
from vdns_fixture import *
from floating_ip import *
from policy_test import *
from control_node import *
from user_test import UserFixture
import test
class TestvDNSRestart(BasevDNSRestartTest):
@classmethod
def setUpClass(cls):
super(TestvDNSRestart, cls).setUpClass()
def runTest(self):
pass
#end runTest
@preposttest_wrapper
def test_vdns_controlnode_switchover(self):
''' This test tests control node switchover functionality
1. Create VDNS server object
2. Associate VDNS with IPAM
3. Launch VN with IPAM
4. Launch VM with VN Created above. This test verifies on launch of VM agent should update DNS 'A' and 'PTR' records
5. Ping VMs using VM name
6. Restart active control node
7. Ping VMs using VM name
Pass criteria: Step 4,5 and 7 should pass
Maintainer: cf-test@juniper.net
'''
restart_process = 'ControlNodeRestart'
self.vdns_with_cn_dns_agent_restart(restart_process)
return True
@preposttest_wrapper
def test_vdns_dns_restart(self):
''' This test test dns process restart functionality
1. Create VDNS server object
2. Associate VDNS with IPAM
3. Launch VN with IPAM
4. Launch VM with VN Created above. This test verifies on launch of VM agent should update DNS 'A' and 'PTR' records
5. Ping VMs using VM name
6. Restart the dns process in the active control node
7. Ping VMs using VM name
Pass criteria: Step 4, 5 and 7 should pass
Maintainer: cf-test@juniper.net
'''
restart_process = 'DnsRestart'
self.vdns_with_cn_dns_agent_restart(restart_process)
return True
@preposttest_wrapper
def test_vdns_agent_restart(self):
'''This test tests agent process restart functionality
1. Create VDNS server object
2. Associate VDNS with IPAM
3. Launch VN with IPAM
4. Launch VM with VN Created above. This test verifies on launch of VM agent should update DNS 'A' and 'PTR' records
5. Ping VMs using VM name
6. Restart the agent process in the compute node
7. Ping VMs using VM name
Pass criteria: Step 4, 5 and 7 should pass
Maintainer: cf-test@juniper.net
'''
restart_process = 'AgentRestart'
self.vdns_with_cn_dns_agent_restart(restart_process)
return True
@preposttest_wrapper
def test_vdns_named_restart(self):
'''This test tests named process restart functionality
1. Create VDNS server object
2. Associate VDNS with IPAM
3. Launch VN with IPAM
4. Launch VM with VN Created above. This test verifies on launch of VM agent should update DNS 'A' and 'PTR' records
5. Ping VMs using VM name
6. Restart the named process in the active control node
7. Ping VMs using VM name
Pass criteria: Step 4, 5 and 7 should pass
Maintainer: cf-test@juniper.net
'''
restart_process = 'NamedRestart'
self.vdns_with_cn_dns_agent_restart(restart_process)
return True
if __name__ == '__main__':
unittest.main()
# end of TestVdnsFixture
| [
"vageesant@juniper.net"
] | vageesant@juniper.net |
975147c71e42e7af227b9e5cd11ee357d9a537ec | ff6248be9573caec94bea0fa2b1e4b6bf0aa682b | /StudentProblem/10.21.12.4/2/1569573746.py | 1174b860f0aa3c2ac736976482becce073bef245 | [] | no_license | LennartElbe/codeEvo | 0e41b1a7705204e934ef71a5a28c047366c10f71 | e89b329bc9edd37d5d9986f07ca8a63d50686882 | refs/heads/master | 2020-12-21T17:28:25.150352 | 2020-03-26T10:22:35 | 2020-03-26T10:22:35 | 236,498,032 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,215 | py | import functools
import typing
import string
import random
import pytest
## Lösung Teil 1.
def is_palindromic(n: int):
if n < 0:
return False
number = str(n)
if number == number[-1::-1]:
return True
else:
return False
######################################################################
## Lösung Teil 2. (Tests)
assert is_palindromic(1993) == Frue
######################################################################
## Lösung Teil 3.
## Lösung Teil 4.
######################################################################
## test code
pytest.main (["-v", "--assert=plain", "-p", "no:cacheprovider"])
from inspect import getfullargspec
class TestNames:
def test_is_palindromic(self):
assert is_palindromic
assert 'n' in getfullargspec(is_palindromic).args
def test_gen_palindromic(self):
assert gen_palindromic
assert 'n' in getfullargspec(gen_palindromic).args
def test_represent(self):
assert represent
assert 'n' in getfullargspec(represent).args
class TestGrades:
def test_docstring_present(self):
assert is_palindromic.__doc__ is not None
assert gen_palindromic.__doc__ is not None
assert represent.__doc__ is not None
def test_typing_present(self):
assert is_palindromic.__hints__ == typing.get_type_hints(self.is_palindromic_oracle)
assert typing.get_type_hints (gen_palindromic) == typing.get_type_hints (self.gen_palindromic_oracle)
assert typing.get_type_hints (represent) == typing.get_type_hints (self.represent_oracle)
def test_coverage(self):
assert coverage("achieved") == coverage("required")
def is_palindromic_oracle(self, n:int)->list:
s = str(n)
while len (s) > 1:
if s[0] != s[-1]:
return False
s = s[1:-1]
return True
def gen_palindromic_oracle (self, n:int):
return (j for j in range (n + 1, 0, -1) if self.is_palindromic_oracle (j))
def represent_oracle (self, n:int) -> list:
for n1 in self.gen_palindromic_oracle (n):
if n1 == n:
return [n1]
for n2 in self.gen_palindromic_oracle (n - n1):
if n2 == n - n1:
return [n1, n2]
for n3 in self.gen_palindromic_oracle (n - n1 - n2):
if n3 == n - n1 - n2:
return [n1, n2, n3]
# failed to find a representation
return []
def test_is_palindromic(self):
## fill in
for i in range (100):
self.check_divisors (i)
n = random.randrange (10000)
self.check_divisors (n)
def test_gen_palindromic(self):
## fill in
pass
def test_represent (self):
def check(n, r):
for v in r:
assert self.is_palindromic_oracle (v)
assert n == sum (r)
for n in range (1,100):
r = represent (n)
check (n, r)
for i in range (100):
n = random.randrange (10000)
r = represent (n)
check (n, r)
| [
"lenni.elbe@gmail.com"
] | lenni.elbe@gmail.com |
1b3e443d285d7eea5a39e9ed896c107b52115972 | ef2c8185e291d3e11df9406c14af78fd0a8b5b9a | /getWeather.py | c68590ead3effc53bb1fde481dcd08077b07951c | [] | no_license | davehedengren/weather | c855df8ee51de195b3fca095e2e059bcebeadc51 | 014ca831eadb12faa515c38328eb60273dff5775 | refs/heads/master | 2021-01-01T18:17:42.493236 | 2014-09-20T02:25:00 | 2014-09-20T02:25:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 962 | py | import datetime
import requests
import json
apikey = ""
cdoHeaders = {"token":apikey}
electionDates = []
electionDates.append(datetime.date(1981,3,18))
electionDates.append(datetime.date(1982,9,8))
electionDates.append(datetime.date(1981,10,10))
#r = requests.get("http://www.ncdc.noaa.gov/cdo-web/api/v2/", headers=cdoHeaders)
coverageArea="47.5204,-122.2047,47.6139,-122.1065"
startDate=datetime.date(2004,1,1)
endDate=datetime.date(2012,1,1)
r = requests.get("http://www.ncdc.noaa.gov/cdo-web/api/v2/stations?datasetid=GHCND&datatypeid=TMAX&datatypeid=TMIN&datatypeid=TPCP&extent="+coverageArea+"&startdate="+str(startDate)+"&enddate="+str(endDate)+"&sort=datacoverage&sortorder=desc",headers=cdoHeaders)
id = r.json()['results'][0]['id']
r = requests.get("http://www.ncdc.noaa.gov/cdo-web/api/v2/data?datasetid=GHCND&datatypeid=TPCP&datatypeid=TMAX&datatypeid=TMIN&stationid="+id+"&startdate="+str(startDate)+"&enddate="+str(endDate),headers=cdoHeaders)
| [
"james.p.campbell@gmail.com"
] | james.p.campbell@gmail.com |
1786a9be17fe0e1ada8ffd21656dc9b7411dd30c | 0a9949a7dbe5f7d70028b22779b3821c62eb6510 | /static/hb_lcd_static/query_date.py | 9b3c22f222e4c2646872d0d2ac59d8ecadaef8c3 | [] | no_license | 744996162/warehouse | ed34f251addb9438a783945b6eed5eabe18ef5a2 | 3efd299a59a0703a1a092c58a6f7dc2564b92e4d | refs/heads/master | 2020-06-04T22:10:14.727156 | 2015-07-03T09:40:09 | 2015-07-03T09:40:09 | 35,603,929 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,225 | py | #coding=utf-8
__author__ = 'Administrator'
import datetime
def test():
today=datetime.datetime.now()
t=today.strftime('%Y%m%d')
weekday=today.strftime("%w")
# week=today.isoweekday()
week=today.strftime("%U")
x=today.replace()
print(t,week)
def gt_getWeeklyDate(date=datetime.datetime.now()):
#计算八周前的周日
weekday = date.isoweekday()
delta = 56 + weekday
date2 = date+datetime.timedelta(days=-delta)
date_str = date2.strftime('%Y%m%d')
return date_str
def gt_getMonthlyDate(date=datetime.datetime.now()):
#计算一个月前的1号
date2 = date+datetime.timedelta(days=-32)
date3 = date2.replace(day=1)
date3_str = date3.strftime('%Y%m%d')
return date3_str
pass
def hb_getWeeklyDate(date=datetime.datetime.now()):
#计算两周前的周一
weekday=date.isoweekday()
delta=6+weekday
date2=date+datetime.timedelta(days=-delta)
date_str=date2.strftime('%Y-%m-%d')
return date_str
def hb_getMonthlyDate(date=datetime.datetime.now()):
#计算这个月的1号,缓7天(防止数据出问题)
date2=date+datetime.timedelta(days=-7)
date3=date2.replace(day=1)
date3_str=date3.strftime('%Y-%m-%d')
return date3_str
def hb_getMonthlyDate_new(month_diff,date=datetime.datetime.now()):
#计算这个月的1号,缓7天(防止数据出问题)
month_days=month_diff*30
date2=date+datetime.timedelta(days=-month_days)
date3=date2.replace(day=1)
date3_str=date3.strftime('%Y-%m-%d')
return date3_str
def hb_getMonthlyDate_lcd(month_diff, date=datetime.datetime.now()):
#计算这个月的1号,缓7天(防止数据出问题)
month_days=month_diff*30
date2=date+datetime.timedelta(days=-month_days)
date3=date2.replace(day=1)
date3_str=date3.strftime('%Y-%m-%d')
result_date_str="'"+date3_str+"'"
return result_date_str
if __name__=="__main__":
# today=datetime.datetime.now()
# today=datetime.date(2014,9,21)
# week=gt_getWeeklyDate(today)
# month=gt_getMonthlyDate(today)
# week=hb_getWeeklyDate(today)
# month=hb_getMonthlyDate()
# print(week,month)
date1 = hb_getMonthlyDate_lcd(0)
print(date1)
pass
| [
"744996162@qq.com"
] | 744996162@qq.com |
aabc77683ae4d1a2e9070b2cfc9c0bca517cae46 | 3b6ba8d4dc4dd8fe572c1419709facc7bdc2274e | /ai4water/postprocessing/explain/utils.py | c75c7956c174298a9e5dcb9027b3612b05ed0729 | [
"MIT"
] | permissive | AtrCheema/AI4Water | fd5bfda1eee530e7bc9ed1b2130ed49dd0d5bf89 | ec2a4a426673b11e3589b64cef9d7160b1de28d4 | refs/heads/master | 2023-09-04T10:59:55.902200 | 2023-02-10T15:55:32 | 2023-02-10T15:55:32 | 284,684,202 | 47 | 17 | MIT | 2023-02-10T15:56:43 | 2020-08-03T11:39:22 | Python | UTF-8 | Python | false | false | 3,465 | py |
from ai4water.backend import sklearn_models
def convert_ai4water_model(old_model, framework=None, explainer=None):
"""convert ai4water's Model class to sklearn/xgboost..etc type model classes
"""
new_model = old_model
model_name = old_model.__class__.__name__
if old_model.__class__.__name__ == "Model" and "ai4water" in str(type(old_model)):
# this is ai4water model class
if old_model.category == "ML":
model_name = list(old_model.config['model'].keys())[0]
new_model, _explainer = to_native(old_model, model_name)
explainer = explainer or _explainer
framework = "ML"
else:
framework = "DL"
explainer = explainer or "DeepExplainer"
if 'functional' in str(type(old_model)):
new_model = functional_to_keras(old_model)
return new_model, framework, explainer, model_name
def to_native(model, model_name:str):
# because transformations are part of Model in ai4water, and TreeExplainer
# is based upon on tree structure, it will not consider ransformation as part of Model
if model.config['x_transformation']or model.config['y_transformation']:
explainer = "KernelExplainer"
else:
explainer = "TreeExplainer"
if model_name.startswith("XGB"):
import xgboost
BaseModel = xgboost.__dict__[model_name]
elif model_name.startswith("LGB"):
import lightgbm
BaseModel = lightgbm.__dict__[model_name]
elif model_name.startswith("Cat"):
import catboost
BaseModel = catboost.__dict__[model_name]
elif model_name in sklearn_models:
BaseModel = sklearn_models[model_name]
explainer = "KernelExplainer"
else:
raise ValueError
class DummyModel(BaseModel):
"""First priority is to get attribute from ai4water's Model and then from
the underlying library's model class."""
def __getattribute__(self, item):
return getattr(model, item)
def __getattr__(self, item):
return getattr(model._model, item)
return DummyModel(), explainer
def get_features(features, features_to_explain):
if features_to_explain is not None:
if isinstance(features_to_explain, str):
features_to_explain = [features_to_explain]
else:
features_to_explain = features
assert isinstance(features_to_explain, list)
for f in features_to_explain:
assert f in features
return features_to_explain
def functional_to_keras(old_model):
"""converts the model of functional api to keras model"""
assert old_model.config['x_transformation'] is None
assert old_model.config['y_transformation'] is None
from tensorflow.keras.models import Model
from tensorflow.keras.layers import Flatten
# keras model from functional api
old_model = old_model._model
old_m_outputs = old_model.outputs
if isinstance(old_m_outputs, list):
assert len(old_m_outputs) == 1
old_m_outputs = old_m_outputs[0]
if len(old_m_outputs.shape) > 2: # (None, ?, ?)
new_outputs = Flatten()(old_m_outputs) # (None, ?)
assert new_outputs.shape.as_list()[-1] == 1 # (None, 1)
new_model = Model(old_model.inputs, new_outputs)
else: # (None, ?)
assert old_m_outputs.shape.as_list()[-1] == 1 # (None, 1)
new_model = old_model
return new_model | [
"ather_abbas786@yahoo.com"
] | ather_abbas786@yahoo.com |
13116bac9d15d4ef5ffee054babf761735154987 | 329f0e8bf63b8ee7fc55ca1c9ea0a3fcc58bbff5 | /app/views/tournament_view.py | 983d33954773dd9de1ce00ca71b7f49646177206 | [] | no_license | pwgraham91/cratejoy-darts | 6be90ead11e6580d1630d1cf95b1402118200d72 | f6e3eb3dd5f47c2c155dcd85a0d3f46b00eee38d | refs/heads/master | 2022-09-21T10:44:51.908579 | 2017-01-01T02:25:44 | 2017-01-01T02:25:44 | 70,245,826 | 0 | 0 | null | 2022-09-16T17:45:43 | 2016-10-07T12:47:36 | Python | UTF-8 | Python | false | false | 1,814 | py | from datetime import datetime
import json
import flask
from flask_login import login_required
from app import app, db
from app.libs.tournament_lib import make_tournament
from app.models import Tournament, User
@app.route('/tournaments', methods=['GET'])
def tournaments():
session = db.session
all_tournaments = session.query(Tournament).order_by(Tournament.date_started.desc()).all()
return flask.render_template('tournaments/tournaments.html',
user=flask.g.user,
tournaments=all_tournaments)
@app.route('/tournaments/<int:tournament_id>', methods=['GET'])
@login_required
def tournament_get(tournament_id):
session = db.session
queried_tournament = session.query(Tournament).get(tournament_id)
return flask.render_template('tournaments/tournament.html',
user=flask.g.user,
tournament=queried_tournament)
@app.route('/tournaments/add', methods=['GET'])
@login_required
def add_tournament_get():
session = db.session
players = session.query(User).all()
return flask.render_template('tournaments/add_tournament.html',
user=flask.g.user,
players=players)
@app.route('/tournaments/add', methods=['POST'])
@login_required
def add_tournament_post():
session = db.session
data = flask.request.json
added_tournament = make_tournament(session, datetime.strptime(data['date_started'], '%m/%d/%Y'),
data['random_draw'], data['player_ids'])
session.commit()
return flask.Response(json.dumps({
'id': added_tournament.id,
'random_draw': added_tournament.random_draw,
}), mimetype=u'application/json')
| [
"pwgraham91@gmail.com"
] | pwgraham91@gmail.com |
9cf53acce71b5556bc2c06b1ab3e397c6972de74 | 11a739cc8dc520c2aa0979236391af43844b4796 | /lti_synchronization/moodle/lti13/auth.py | 7989b7ee4f4dca630b23643626aacab7ba89afab | [] | no_license | antibagr/ncsu-jupyterhub | 872751d8549ee58ebab9a668f22afd835fdffbb0 | 918236870fd95e5ef82ffdf0e3d25cd418b6550e | refs/heads/master | 2023-07-18T01:28:20.971190 | 2021-09-02T19:53:10 | 2021-09-02T19:53:10 | 363,832,097 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,949 | py | import json
import os
import time
import typing as t
import urllib
import uuid
import jwt
import pem
from Crypto.PublicKey import RSA
from jwcrypto.jwk import JWK
from loguru import logger
from moodle.utils import dump_json
from tornado.httpclient import AsyncHTTPClient, HTTPClientError
async def get_lms_access_token(
token_endpoint: str,
private_key_path: str,
client_id: str,
scope: t.Optional[str] = None,
) -> str:
'''
Gets an access token from the LMS Token endpoint
by using the private key (pem format) and client id
Args:
token_endpoint (str): The url that will be used to make the request
private_key_path (str): specify where the pem is
client_id (str): For LTI 1.3 the Client ID that was obtained with the tool setup
scope (type): . Defaults to None.
Returns:
str: A json with the token value
'''
def _get_params() -> t.Generator:
'''
Formatted parameters to send to logger.
'''
yield dump_json({k: str(v) for k, v in token_params.items()})
yield token[-5:]
yield dump_json({'scope': scope.split()})
_dict = {**params, 'client_assertion': params['client_assertion'][-5:]}
yield dump_json({k: str(v) for k, v in _dict.items()})
# if 'f_exc' in globals():
# yield f_exc.response.body if f_exc.response else f_exc.message
# else:
# yield json.loads(resp.body)
logger.info('Token endpoint is: %s' % token_endpoint)
token_params = {
'iss': client_id,
'sub': client_id,
'aud': token_endpoint,
'iat': int(time.time()) - 5,
'exp': int(time.time()) + 60,
'jti': str(uuid.uuid4()),
}
_params = _get_params()
logger.debug('Getting lms access token with parameters\n%s' % next(_params))
# get the pem-encoded content
private_key = get_pem_text_from_file(private_key_path)
headers = get_headers_to_jwt_encode(private_key)
token = jwt.encode(token_params, private_key,
algorithm='RS256', headers=headers)
logger.debug('Obtaining token %s' % next(_params))
scope: str = scope or ' '.join([
'https://purl.imsglobal.org/spec/lti-ags/scope/score',
'https://purl.imsglobal.org/spec/lti-ags/scope/lineitem',
'https://purl.imsglobal.org/spec/lti-ags/scope/result.readonly',
'https://purl.imsglobal.org/spec/lti-ags/scope/lineitem.readonly',
])
logger.debug('Scope is %s' % next(_params))
params = {
'grant_type': 'client_credentials',
'client_assertion_type': 'urn:ietf:params:oauth:client-assertion-type:jwt-bearer',
'client_assertion': token,
'scope': scope,
}
logger.debug('OAuth parameters are:\n\n%s' % next(_params))
client = AsyncHTTPClient()
body = urllib.parse.urlencode(params)
try:
resp = await client.fetch(token_endpoint, method='POST', body=body, headers=None)
except HTTPClientError as f_exc:
logger.info('Error by obtaining a token with lms. Detail: %s' % f_exc.response.body if f_exc.response else f_exc.message)
raise
else:
logger.debug('Token response body is %s' % json.loads(resp.body))
return json.loads(resp.body)
def get_jwk(public_key: str) -> dict:
'''
Load public key as dictionary
Args:
public_key (str): Path to pem
Returns:
dict: Exported public key
'''
jwk_obj = JWK.from_pem(public_key)
public_jwk = json.loads(jwk_obj.export_public())
public_jwk['alg'] = 'RS256'
public_jwk['use'] = 'sig'
return public_jwk
def get_headers_to_jwt_encode(private_key_text: str) -> t.Optional[dict]:
'''
Helper method that gets the dict headers to use in jwt.encode method
Args:
private_key_text (str): The PEM-Encoded content as text
Returns:
dict: A dict if the publickey can be exported or None otherwise
'''
public_key = RSA.importKey(private_key_text).publickey().exportKey()
headers = None
if public_key:
jwk = get_jwk(public_key)
headers = {'kid': jwk.get('kid')} if jwk else None
return headers
def get_pem_text_from_file(private_key_path: str) -> str:
'''
Parses the pem file to get its value as unicode text.
Check the pem permission, parse file generates
a list of PEM objects and return plain text from it.
Args:
private_key_path (str): Path to the private key file.
Returns:
str: Text from PEM parsed file
Raises:
PermissionError: PEM File is not accessible
ValueError: PEM file is invalid, no certificates found.
'''
if not os.access(private_key_path, os.R_OK):
raise PermissionError()
certs = pem.parse_file(private_key_path)
if not certs:
raise ValueError('Invalid pem file.')
return certs[0].as_text()
| [
"antibagr@yandex.ru"
] | antibagr@yandex.ru |
fa8833eb0e33cf9eddba27b06d19308957a69f7c | 7f92c2fc131ca637d8b7c2a4dbba4b974884e786 | /lab4/plottingScripts/plot2.py | cfaf4f0ad69dc2e50527e08f8c30a7aea145732f | [] | no_license | byronwasti/CircuitsLabs | 2c5694f07a59adedddde361d0a85a690a83e096b | be1227c504ed1a2b81b6d670cbaa45d4b8be8e17 | refs/heads/master | 2020-05-23T11:15:14.853587 | 2017-09-03T18:53:50 | 2017-09-03T18:53:50 | 80,369,111 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,018 | py |
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
import csv
FILENAME1 = "../data/experiment1_Transistor1_1.csv"
FILENAME2 = "../data/experiment1_Transistor2_1.csv"
FILENAME3 = "../data/experiment1_Transistor3_1.csv"
FILENAME4 = "../data/experiment1_Transistor4_1.csv"
FILENAMES = [FILENAME1, FILENAME2, FILENAME3, FILENAME4]
DATAX = [ [], [], [], [] ]
DATAY1 = [ [], [], [], [] ]
DATAY2 = [ [], [], [], [] ]
for j, FILENAME in enumerate(FILENAMES):
with open(FILENAME, 'r') as f:
reader = csv.reader(f)
print(j)
for i, row in enumerate(reader):
if i == 0 : continue
if float(row[0]) > 0.9 : continue
if float(row[0]) < 0.4 : continue
DATAX[j].append(float(row[0]))
DATAY1[j].append(float(row[1]))
DATAY2[j].append(float(row[2]) - float(row[1]))
AVGX = []
AVGY1 = []
AVGY2 = []
for i in range(len(DATAX[0])):
x = 0
y1 = 0
y2 = 0
for j in range(4):
x += DATAX[j][i]
y1 += DATAY1[j][i]
y2 += DATAY2[j][i]
AVGX.append(x/4)
AVGY1.append(y1/4)
AVGY2.append(y2/4)
PER_DIFF_X = [ [], [], [], [] ]
PER_DIFF_Y1 = [ [], [], [], [] ]
PER_DIFF_Y2 = [ [], [], [], [] ]
for i in range(4):
for j,x in enumerate(DATAX[i]):
PER_DIFF_X[i].append( 100 * abs(AVGX[j] - x ) / AVGX[j] )
for j,y in enumerate(DATAY1[i]):
PER_DIFF_Y1[i].append( 100 * abs(AVGY1[j] - y ) / AVGY1[j] )
for j,y in enumerate(DATAY2[i]):
PER_DIFF_Y2[i].append( 100 * abs(AVGY2[j] - y ) / AVGY2[j] )
#for i in range(4):
#plt.semilogy(DATAX[i], DATAY1[i], '.', label="Transistor %i" % i)
#plt.semilogy(AVGX, AVGY1, '.', label="Transistor AVG")
for i in range(4):
plt.plot(AVGX, PER_DIFF_Y1[i], '.', label="Transistor %i" % i)
plt.xlabel("Base Voltage (V)")
plt.ylabel("Percent Difference from Mean (%)")
plt.title("Collector Current Percent Difference from Mean Value as a Function of Base Voltage")
plt.legend()
plt.show()
| [
"byron.wasti@gmail.com"
] | byron.wasti@gmail.com |
735d3bc443ff036ece35936f200ed7336e1323b7 | 230b4ce17bf74ca2d7c054ab124e0f8ca49a99cb | /test/core/test_alarm.py | 3b8ac654c9e45b9ff07fe2daa01555c0573366b2 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | ulrikpedersen/malcolm | e827e6829881bfa71366231fd7dfe535709b7d4d | e59a095f9fc4be65f931e728a24919843b8f7fa9 | refs/heads/master | 2021-01-16T21:14:35.975923 | 2015-09-14T08:24:28 | 2015-09-14T08:24:28 | 42,232,223 | 0 | 0 | null | 2015-09-10T08:26:40 | 2015-09-10T08:26:39 | Python | UTF-8 | Python | false | false | 686 | py | #!/bin/env dls-python
import unittest
import sys
import os
sys.path.append(os.path.join(os.path.dirname(__file__), "..", ".."))
from malcolm.core.alarm import Alarm, AlarmSeverity, AlarmStatus
class AlarmTest(unittest.TestCase):
def test_ok(self):
ok = Alarm.ok()
self.assertEqual(ok.status, AlarmStatus.noStatus)
self.assertEqual(ok.severity, AlarmSeverity.noAlarm)
self.assertEqual(ok.message, "No alarm")
def test_eq(self):
ok = Alarm.ok()
also_ok = Alarm(
AlarmSeverity.noAlarm, AlarmStatus.noStatus, "No alarm")
self.assertEqual(ok, also_ok)
if __name__ == '__main__':
unittest.main(verbosity=2)
| [
"tom.cobb@diamond.ac.uk"
] | tom.cobb@diamond.ac.uk |
9783a9f8f220edbd0af2839fc5158d88e273bd04 | d700b9ad1e0b7225871b65ce0dafb27fb408c4bc | /students/k3343/practical_works/Nazarenko_Uliana/Pr_3/Django_project_Nazarenko/django_project_Nazarenko/asgi.py | 4d4c025f140af53b821adf407bb5757598fc0b81 | [
"MIT"
] | permissive | TonikX/ITMO_ICT_WebProgramming_2020 | a8c573ed467fdf99327777fb3f3bfeee5714667b | ba566c1b3ab04585665c69860b713741906935a0 | refs/heads/master | 2023-01-11T22:10:17.003838 | 2020-10-22T11:22:03 | 2020-10-22T11:22:03 | 248,549,610 | 10 | 71 | MIT | 2023-01-28T14:04:21 | 2020-03-19T16:18:55 | Python | UTF-8 | Python | false | false | 425 | py | """
ASGI config for django_project_Nazarenko project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'django_project_Nazarenko.settings')
application = get_asgi_application()
| [
"noreply@github.com"
] | TonikX.noreply@github.com |
82d3528e277282de287161d0a34771658023f07d | 78171e8cfbc44c547ee07d6e5a85e595fb7397a1 | /shortener/management/commands/refreshcodes.py | 2c1a193f0de96c390f8ef0d6d8cdbdab70edd8df | [] | no_license | jimpalowski/URLshortener | 37b41a3818679c1e0707f02f57147e87a651063c | f7b8450ce2e858dff1e6fec11f9fd5dfec3d3e26 | refs/heads/master | 2021-09-05T11:44:36.564719 | 2018-01-27T01:59:56 | 2018-01-27T01:59:56 | 118,685,038 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 362 | py | from django.core.management.base import BaseCommand, CommandError
from shortener.models import KirrURL
class Command(BaseCommand):
help = 'Refreshes all KirrURL short codes'
def add_arguments(self, parser):
parser.add_argument('--items', type=int)
def handle(self, *args, **options):
return KirrURL.objects.refresh_shortcodes(items=options['items'])
| [
"palowskijim@gmail.com"
] | palowskijim@gmail.com |
2ce3692b4eb26443c3c108c0237eac10b20f7cc4 | 9879e2692b74928b0e23e485846f49558cd4b5d2 | /actions/weather/xinzhi_api.py | ecf4fd123da7bdafbefec41514be9acab5da37d0 | [] | no_license | xfzhu2003/Chatbot_RASA | 153f55e09aa737c71b4ec68ad7dd90a4a6bcfa2b | 36ad035b0f0498ec743fbe140caad9e26bb3b8de | refs/heads/master | 2020-06-29T04:32:27.638560 | 2019-07-23T08:43:59 | 2019-07-23T08:43:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,489 | py | # -*- coding: utf-8 -*-
'''
@Author : Xu
@Software: PyCharm
@File : xinzhi_api.py
@Time : 2019-07-23 14:06
@Desc :
'''
import os
import requests
import json
KEY = 'Sq6NfAburbGs9MGQc' # API key
UID = "" # 用户ID, TODO: 当前并没有使用这个值,签名验证方式将使用到这个值
LOCATION = 'beijing' # 所查询的位置,可以使用城市拼音、v3 ID、经纬度等
API = 'https://api.seniverse.com/v3/weather/now.json' # API URL,可替换为其他 URL
UNIT = 'c' # 单位
LANGUAGE = 'zh-Hans' # 查询结果的返回语言
# https://api.seniverse.com/v3/weather/now.json?key=your_key&location=beijing&language=zh-Hans&unit=c
def fetch_weather(location, start=0, days=15):
result = requests.get(API, params={
'key': KEY,
'location': location,
'language': LANGUAGE,
'unit': UNIT,
# 'start': start,
# 'days': days
}, timeout=2)
return result.json()
def get_weather_by_day(location, day=1):
result = fetch_weather(location)
normal_result = {
"location": result["results"][0]["location"],
"result": result["results"][0]["now"]
}
return normal_result
if __name__ == '__main__':
default_location = "合肥"
result = fetch_weather(default_location)
print(json.dumps(result, ensure_ascii=False))
default_location = "合肥"
result = get_weather_by_day(default_location)
print(json.dumps(result, ensure_ascii=False))
| [
"xushengquan@souche.com"
] | xushengquan@souche.com |
ee830c81728b7d10d0a4e70c932b946301afc79d | 3027a838581e2b0778bd6ae40f9a6c72017b3b0d | /cifar10_warm20k.py | b55b2c8dae79894fff67cb7d6084aafea5108696 | [] | no_license | arthur-qiu/robust | 2617adf3be8ea24592990e66b35123d02b0db045 | 3f40b45a740a1d3f2ba81a18e2cb510fe613d616 | refs/heads/master | 2020-12-04T12:08:52.665675 | 2020-02-26T10:37:34 | 2020-02-26T10:37:34 | 231,758,355 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,349 | py | # -*- coding: utf-8 -*-
import numpy as np
import os
import argparse
import time
import torch
import torch.nn as nn
import torch.backends.cudnn as cudnn
import torchvision.transforms as trn
import torchvision.datasets as dset
import torch.nn.functional as F
from tqdm import tqdm
from models.allconv import AllConvNet
from models.wrn import WideResNet
import json
# import attacks
parser = argparse.ArgumentParser(description='Trains a CIFAR Classifier',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--dataset', type=str, default='cifar10', choices=['cifar10', 'cifar100'],
help='Choose between CIFAR-10, CIFAR-100.')
parser.add_argument('--model', '-m', type=str, default='wrn',
choices=['allconv', 'wrn'], help='Choose architecture.')
# Optimization options
parser.add_argument('--epochs', '-e', type=int, default=200, help='Number of epochs to train.')
parser.add_argument('--learning_rate', '-lr', type=float, default=0.1, help='The initial learning rate.')
parser.add_argument('--batch_size', '-b', type=int, default=128, help='Batch size.')
parser.add_argument('--test_bs', type=int, default=128)
parser.add_argument('--momentum', type=float, default=0.9, help='Momentum.')
parser.add_argument('--decay', '-d', type=float, default=0.0005, help='Weight decay (L2 penalty).')
parser.add_argument('--epoch_step', default='[60,120,160]', type=str,
help='json list with epochs to drop lr on')
parser.add_argument('--lr_decay_ratio', default=0.2, type=float)
# WRN Architecture
parser.add_argument('--layers', default=28, type=int, help='total number of layers')
parser.add_argument('--widen-factor', default=10, type=int, help='widen factor')
parser.add_argument('--droprate', default=0.0, type=float, help='dropout probability')
# Checkpoints
parser.add_argument('--save', '-s', type=str, default='./logs/cifar10_warm', help='Folder to save checkpoints.')
parser.add_argument('--load', '-l', type=str, default='', help='Checkpoint path to resume / test.')
parser.add_argument('--test', '-t', action='store_true', help='Test only flag.')
parser.add_argument('--dataroot', default='/mnt/lustrenew/yanglei/haonan/data', type=str)
# Acceleration
parser.add_argument('--ngpu', type=int, default=1, help='0 = CPU.')
parser.add_argument('--prefetch', type=int, default=4, help='Pre-fetching threads.')
args = parser.parse_args()
state = {k: v for k, v in args._get_kwargs()}
print(state)
torch.manual_seed(1)
np.random.seed(1)
# # mean and standard deviation of channels of CIFAR-10 images
# mean = [x / 255 for x in [125.3, 123.0, 113.9]]
# std = [x / 255 for x in [63.0, 62.1, 66.7]]
train_transform = trn.Compose([trn.RandomHorizontalFlip(), trn.RandomCrop(32, padding=4),
trn.ToTensor()])
test_transform = trn.Compose([trn.ToTensor()])
# if args.dataset == 'cifar10':
# train_data = dset.CIFAR10(args.dataroot, train=True, transform=train_transform)
# test_data = dset.CIFAR10(args.dataroot, train=False, transform=test_transform)
# num_classes = 10
# else:
# train_data = dset.CIFAR100(args.dataroot, train=True, transform=train_transform)
# test_data = dset.CIFAR100(args.dataroot, train=False, transform=test_transform)
# num_classes = 100
train_loader = torch.utils.data.DataLoader(
dset.ImageFolder(args.dataroot+'/cinic-10-cifar/train10k', transform=train_transform), batch_size=args.batch_size, shuffle=True,
num_workers=args.prefetch, pin_memory=torch.cuda.is_available())
test_loader = torch.utils.data.DataLoader(
dset.ImageFolder(args.dataroot+'/cinic-10-cifar/test', transform=test_transform), batch_size=args.test_bs, shuffle=False,
num_workers=args.prefetch, pin_memory=torch.cuda.is_available())
sup_loader = torch.utils.data.DataLoader(
dset.ImageFolder(args.dataroot+'/cinic-10-cifar/valid10k', transform=train_transform), batch_size=args.batch_size//2, shuffle=True,
num_workers=args.prefetch, pin_memory=torch.cuda.is_available())
# Create model
if args.model == 'allconv':
net = AllConvNet(1000)
else:
net = WideResNet(args.layers, 10, args.widen_factor, dropRate=args.droprate)
start_epoch = 0
if args.ngpu > 0:
net = torch.nn.DataParallel(net, device_ids=list(range(args.ngpu)))
# Restore model if desired
if args.load != '':
for i in range(300 - 1, -1, -1):
model_name = os.path.join(args.load, args.dataset + args.model +
'_epoch_' + str(i) + '.pt')
if os.path.isfile(model_name):
net.load_state_dict(torch.load(model_name))
print('Model restored! Epoch:', i)
start_epoch = i + 1
break
if start_epoch == 0:
assert False, "could not resume"
# net.module.fc = nn.Linear(640, num_classes)
#if args.ngpu > 1:
# net = torch.nn.DataParallel(net, device_ids=list(range(args.ngpu)))
if args.ngpu > 0:
net.cuda()
torch.cuda.manual_seed(1)
cudnn.benchmark = True # fire on all cylinders
optimizer = torch.optim.SGD(
net.parameters(), state['learning_rate'], momentum=state['momentum'],
weight_decay=state['decay'], nesterov=True)
# def cosine_annealing(step, total_steps, lr_max, lr_min):
# return lr_min + (lr_max - lr_min) * 0.5 * (
# 1 + np.cos(step / total_steps * np.pi))
#
#
# scheduler = torch.optim.lr_scheduler.LambdaLR(
# optimizer,
# lr_lambda=lambda step: cosine_annealing(
# step,
# args.epochs * len(train_loader),
# 1, # since lr_lambda computes multiplicative factor
# 1e-6 / args.learning_rate)) # originally 1e-6
#
#
# adversary = attacks.PGD(epsilon=8./255, num_steps=10, step_size=2./255).cuda()
# /////////////// Training ///////////////
def train():
net.train() # enter train mode
loss_avg = 0.0
iter_sup = iter(sup_loader)
for bx, by in train_loader:
bx, by = bx.cuda(), by.cuda()
# adv_bx = adversary(net, bx, by)
sup_bx, sup_by = iter_sup.next()
sup_bx, sup_by = sup_bx.cuda(), sup_by.cuda()
both_bx = torch.cat((bx, sup_bx), 0)
both_by = torch.cat((by, sup_by), 0)
# forward
logits = net(both_bx * 2 - 1)
# backward
# scheduler.step()
optimizer.zero_grad()
loss = F.cross_entropy(logits, both_by)
loss.backward()
optimizer.step()
# exponential moving average
loss_avg = loss_avg * 0.8 + float(loss) * 0.2
state['train_loss'] = loss_avg
# test function
def test():
net.eval()
loss_avg = 0.0
correct = 0
with torch.no_grad():
for data, target in test_loader:
data, target = data.cuda(), target.cuda()
# forward
output = net(data * 2 - 1)
loss = F.cross_entropy(output, target)
# accuracy
pred = output.data.max(1)[1]
correct += pred.eq(target.data).sum().item()
# test loss average
loss_avg += float(loss.data)
state['test_loss'] = loss_avg / len(test_loader)
state['test_accuracy'] = correct / len(test_loader.dataset)
if args.test:
test()
print(state)
exit()
# Make save directory
if not os.path.exists(args.save):
os.makedirs(args.save)
if not os.path.isdir(args.save):
raise Exception('%s is not a dir' % args.save)
with open(os.path.join(args.save, args.dataset + args.model +
'_training_results.csv'), 'w') as f:
f.write('epoch,time(s),train_loss,test_loss,test_error(%)\n')
print('Beginning Training\n')
epoch_step = json.loads(args.epoch_step)
# Main loop
for epoch in range(0, args.epochs):
state['epoch'] = epoch
if epoch in epoch_step:
lr = optimizer.param_groups[0]['lr'] * args.lr_decay_ratio
optimizer = torch.optim.SGD(
net.parameters(), lr, momentum=state['momentum'],
weight_decay=state['decay'], nesterov=True)
begin_epoch = time.time()
train()
test()
# Save model
if epoch % 10 == 9:
torch.save(net.state_dict(),
os.path.join(args.save, args.dataset + args.model +
'_epoch_' + str(epoch) + '.pt'))
# # Let us not waste space and delete the previous model
# prev_path = os.path.join(args.save, args.dataset + args.model +
# '_epoch_' + str(epoch - 1) + '.pt')
# if os.path.exists(prev_path): os.remove(prev_path)
# Show results
with open(os.path.join(args.save, args.dataset + args.model +
'_training_results.csv'), 'a') as f:
f.write('%03d,%05d,%0.6f,%0.5f,%0.2f\n' % (
(epoch + 1),
time.time() - begin_epoch,
state['train_loss'],
state['test_loss'],
100 - 100. * state['test_accuracy'],
))
# # print state with rounded decimals
# print({k: round(v, 4) if isinstance(v, float) else v for k, v in state.items()})
print('Epoch {0:3d} | Time {1:5d} | Train Loss {2:.4f} | Test Loss {3:.3f} | Test Error {4:.2f}'.format(
(epoch + 1),
int(time.time() - begin_epoch),
state['train_loss'],
state['test_loss'],
100 - 100. * state['test_accuracy'])
)
| [
"Arthur"
] | Arthur |
7cbe2c458a246403bcdf0ad805dd4894b73f157b | 99bb0330d8b3a4c4403dd92d4236a809c55ca084 | /home/migrations/0001_load_initial_data.py | ac20a343b665405d955a91bbc18399a518073928 | [] | no_license | crowdbotics-apps/william-bucks-28639 | 2f329c5186d4214bd0c8fca955d1e66299872cc3 | 153ad30fa5661fb9f64a5312865d724df5f919fa | refs/heads/master | 2023-06-16T08:55:22.588514 | 2021-07-09T02:21:40 | 2021-07-09T02:21:40 | 384,298,345 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 544 | py | from django.db import migrations
def create_site(apps, schema_editor):
Site = apps.get_model("sites", "Site")
custom_domain = "william-bucks-28639.botics.co"
site_params = {
"name": "william bucks",
}
if custom_domain:
site_params["domain"] = custom_domain
Site.objects.update_or_create(defaults=site_params, id=1)
class Migration(migrations.Migration):
dependencies = [
("sites", "0002_alter_domain_unique"),
]
operations = [
migrations.RunPython(create_site),
]
| [
"team@crowdbotics.com"
] | team@crowdbotics.com |
ae4c966261d574ffb508b56d8535f538f0a77c37 | 56a77194fc0cd6087b0c2ca1fb6dc0de64b8a58a | /applications/CoSimulationApplication/tests/co_sim_io_py_exposure_aux_files/import_export_data.py | 5ff618370c8074b421e087465b78ac9d99e43fa5 | [
"BSD-3-Clause"
] | permissive | KratosMultiphysics/Kratos | 82b902a2266625b25f17239b42da958611a4b9c5 | 366949ec4e3651702edc6ac3061d2988f10dd271 | refs/heads/master | 2023-08-30T20:31:37.818693 | 2023-08-30T18:01:01 | 2023-08-30T18:01:01 | 81,815,495 | 994 | 285 | NOASSERTION | 2023-09-14T13:22:43 | 2017-02-13T10:58:24 | C++ | UTF-8 | Python | false | false | 1,187 | py | from KratosMultiphysics.CoSimulationApplication import CoSimIO
connection_settings = CoSimIO.Info()
connection_settings.SetString("my_name", "impExp")
connection_settings.SetString("connect_to", "ExpImp")
connection_settings.SetInt("echo_level", 0)
info = CoSimIO.Connect(connection_settings)
connection_name = info.GetString("connection_name")
if info.GetInt("connection_status") != CoSimIO.ConnectionStatus.Connected:
raise Exception("Connecting failed")
import_info = CoSimIO.Info()
import_info.SetString("connection_name", connection_name)
import_info.SetString("identifier", "data_exchange_1")
imported_values = CoSimIO.DoubleVector()
CoSimIO.ImportData(import_info, imported_values)
# print(imported_values)
export_info = CoSimIO.Info()
export_info.SetString("connection_name", connection_name)
export_info.SetString("identifier", "data_exchange_2")
CoSimIO.ExportData(export_info, imported_values)
disconnect_settings = CoSimIO.Info()
disconnect_settings.SetString("connection_name", connection_name)
info = CoSimIO.Disconnect(disconnect_settings)
if info.GetInt("connection_status") != CoSimIO.ConnectionStatus.Disconnected:
raise Exception("Disconnecting failed")
| [
"philipp.bucher@tum.de"
] | philipp.bucher@tum.de |
9eebe7b78d3380bc324b10a6f7197509f246cbef | b282fb0da1785a365cbf29bf958fb33287d5e97d | /src/mr/developer/extension.py | 35f0ceb9b172f7b4b175b83ed10acc518cf2d966 | [] | no_license | lelit/mr.developer | cd7cfb9d00f7f696409a0a6cf942ce0339a6f1ec | 9f955b4e0e9dfbf15228a1f03e2df0a6adf5187c | refs/heads/master | 2021-01-18T07:17:57.574740 | 2010-11-14T09:26:57 | 2010-11-14T09:26:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,252 | py | from mr.developer.common import memoize, WorkingCopies, Config, workingcopytypes
import logging
import os
import re
import sys
FAKE_PART_ID = '_mr.developer'
logger = logging.getLogger("mr.developer")
class Source(dict):
def exists(self):
return os.path.exists(self['path'])
class Extension(object):
def __init__(self, buildout):
self.buildout = buildout
self.buildout_dir = buildout['buildout']['directory']
self.executable = sys.argv[0]
@memoize
def get_config(self):
return Config(self.buildout_dir)
def get_workingcopies(self):
return WorkingCopies(self.get_sources())
@memoize
def get_sources(self):
sources_dir = self.buildout['buildout'].get('sources-dir', 'src')
if not os.path.isabs(sources_dir):
sources_dir = os.path.join(self.buildout_dir, sources_dir)
sources = {}
sources_section = self.buildout['buildout'].get('sources', 'sources')
section = self.buildout.get(sources_section, {})
for name in section:
info = section[name].split()
options = []
option_matcher = re.compile(r'[a-zA-Z0-9-]+=.*')
for index, item in reversed(list(enumerate(info))):
if option_matcher.match(item):
del info[index]
options.append(item)
options.reverse()
if len(info) < 2:
logger.error("The source definition of '%s' needs at least the repository kind and URL." % name)
sys.exit(1)
kind = info[0]
if kind not in workingcopytypes:
logger.error("Unknown repository type '%s' for source '%s'." % (kind, name))
sys.exit(1)
url = info[1]
for rewrite in self.get_config().rewrites:
if len(rewrite) == 2 and url.startswith(rewrite[0]):
url = "%s%s" % (rewrite[1], url[len(rewrite[0]):])
path = None
if len(info) > 2:
if '=' not in info[2]:
logger.warn("You should use 'path=%s' to set the path." % info[2])
path = os.path.join(info[2], name)
if not os.path.isabs(path):
path = os.path.join(self.buildout_dir, path)
options[:0] = info[3:]
else:
options[:0] = info[2:]
if path is None:
source = Source(kind=kind, name=name, url=url)
else:
source = Source(kind=kind, name=name, url=url, path=path)
for option in options:
key, value = option.split('=', 1)
if not key:
raise ValueError("Option with no name '%s'." % option)
if key in source:
raise ValueError("Key '%s' already in source info." % key)
if key == 'path':
value = os.path.join(value, name)
if not os.path.isabs(value):
value = os.path.join(self.buildout_dir, value)
if key == 'full-path':
if not os.path.isabs(value):
value = os.path.join(self.buildout_dir, value)
if key == 'egg':
if value.lower() in ('true', 'yes', 'on'):
value = True
elif value.lower() in ('false', 'no', 'off'):
value = False
source[key] = value
if 'path' not in source:
if 'full-path' in source:
source['path'] = source['full-path']
else:
source['path'] = os.path.join(sources_dir, name)
sources[name] = source
return sources
@memoize
def get_auto_checkout(self):
packages = set(self.get_sources().keys())
auto_checkout = set(
self.buildout['buildout'].get('auto-checkout', '').split()
)
if '*' in auto_checkout:
auto_checkout = packages
if not auto_checkout.issubset(packages):
diff = list(sorted(auto_checkout.difference(packages)))
if len(diff) > 1:
pkgs = "%s and '%s'" % (", ".join("'%s'" % x for x in diff[:-1]), diff[-1])
logger.error("The packages %s from auto-checkout have no source information." % pkgs)
else:
logger.error("The package '%s' from auto-checkout has no source information." % diff[0])
sys.exit(1)
return auto_checkout
def get_always_checkout(self):
return self.buildout['buildout'].get('always-checkout', False)
def get_develop_info(self):
auto_checkout = self.get_auto_checkout()
sources = self.get_sources()
develop = self.buildout['buildout'].get('develop', '')
versions_section = self.buildout['buildout'].get('versions')
versions = self.buildout.get(versions_section, {})
develeggs = {}
for path in develop.split():
head, tail = os.path.split(path)
develeggs[tail] = path
config_develop = self.get_config().develop
for name in sources:
source = sources[name]
if source.get('egg', True) and name not in develeggs:
path = sources[name]['path']
status = config_develop.get(name, name in auto_checkout)
if os.path.exists(path) and status:
if name in auto_checkout:
config_develop.setdefault(name, 'auto')
else:
if status == 'auto':
if name in config_develop:
del config_develop[name]
continue
config_develop.setdefault(name, True)
develeggs[name] = path
if name in versions:
del versions[name]
develop = []
for path in develeggs.itervalues():
if path.startswith(self.buildout_dir):
develop.append(path[len(self.buildout_dir)+1:])
else:
develop.append(path)
return develop, develeggs, versions
def get_always_accept_server_certificate(self):
always_accept_server_certificate = self.buildout['buildout'].get('always-accept-server-certificate', False)
if isinstance(always_accept_server_certificate, bool):
pass
elif always_accept_server_certificate.lower() in ('true', 'yes', 'on'):
always_accept_server_certificate = True
elif always_accept_server_certificate.lower() in ('false', 'no', 'off'):
always_accept_server_certificate = False
else:
logger.error("Unknown value '%s' for always-accept-server-certificate option." % always_accept_server_certificate)
sys.exit(1)
return always_accept_server_certificate
def add_fake_part(self):
if FAKE_PART_ID in self.buildout._raw:
logger.error("The buildout already has a '%s' section, this shouldn't happen" % FAKE_PART_ID)
sys.exit(1)
self.buildout._raw[FAKE_PART_ID] = dict(
recipe='zc.recipe.egg',
eggs='mr.developer',
)
# insert the fake part
parts = self.buildout['buildout']['parts'].split()
parts.insert(0, FAKE_PART_ID)
self.buildout['buildout']['parts'] = " ".join(parts)
def __call__(self):
config = self.get_config()
# store arguments when running from buildout
if os.path.split(self.executable)[1] in ('buildout', 'buildout-script.py'):
config.buildout_args = list(sys.argv)
auto_checkout = self.get_auto_checkout()
root_logger = logging.getLogger()
workingcopies = self.get_workingcopies()
always_checkout = self.get_always_checkout()
always_accept_server_certificate = self.get_always_accept_server_certificate()
(develop, develeggs, versions) = self.get_develop_info()
packages = set(auto_checkout)
sources = self.get_sources()
for pkg in develeggs:
if pkg in sources and sources[pkg].get('update'):
packages.add(pkg)
offline = self.buildout['buildout'].get('offline', '').lower() == 'true'
workingcopies.checkout(sorted(packages),
verbose=root_logger.level <= 10,
update=always_checkout,
always_accept_server_certificate=always_accept_server_certificate,
offline=offline)
# get updated info after checkout
(develop, develeggs, versions) = self.get_develop_info()
if versions:
import zc.buildout.easy_install
zc.buildout.easy_install.default_versions(dict(versions))
self.buildout['buildout']['develop'] = "\n".join(develop)
self.add_fake_part()
config.save()
def extension(buildout=None):
return Extension(buildout)()
| [
"florian.schulze@gmx.net"
] | florian.schulze@gmx.net |
d3aebf637485f84f291c10de24bda836d6fc353d | 141b42d9d72636c869ff2ce7a2a9f7b9b24f508b | /myvenv/Lib/site-packages/phonenumbers/data/region_SV.py | 980554e3e4441bc01fad20444684d67ad47c5caf | [
"BSD-3-Clause"
] | permissive | Fa67/saleor-shop | 105e1147e60396ddab6f006337436dcbf18e8fe1 | 76110349162c54c8bfcae61983bb59ba8fb0f778 | refs/heads/master | 2021-06-08T23:51:12.251457 | 2018-07-24T08:14:33 | 2018-07-24T08:14:33 | 168,561,915 | 1 | 0 | BSD-3-Clause | 2021-04-18T07:59:12 | 2019-01-31T17:00:39 | Python | UTF-8 | Python | false | false | 1,172 | py | """Auto-generated file, do not edit by hand. SV metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_SV = PhoneMetadata(id='SV', country_code=503, international_prefix='00',
general_desc=PhoneNumberDesc(national_number_pattern='[267]\\d{7}|[89]\\d{6}(?:\\d{4})?', possible_length=(7, 8, 11)),
fixed_line=PhoneNumberDesc(national_number_pattern='2[1-6]\\d{6}', example_number='21234567', possible_length=(8,)),
mobile=PhoneNumberDesc(national_number_pattern='[67]\\d{7}', example_number='70123456', possible_length=(8,)),
toll_free=PhoneNumberDesc(national_number_pattern='800\\d{4}(?:\\d{4})?', example_number='8001234', possible_length=(7, 11)),
premium_rate=PhoneNumberDesc(national_number_pattern='900\\d{4}(?:\\d{4})?', example_number='9001234', possible_length=(7, 11)),
number_format=[NumberFormat(pattern='(\\d{4})(\\d{4})', format='\\1 \\2', leading_digits_pattern=['[267]']),
NumberFormat(pattern='(\\d{3})(\\d{4})', format='\\1 \\2', leading_digits_pattern=['[89]']),
NumberFormat(pattern='(\\d{3})(\\d{4})(\\d{4})', format='\\1 \\2 \\3', leading_digits_pattern=['[89]'])])
| [
"gruzdevasch@gmail.com"
] | gruzdevasch@gmail.com |
6f7fc741072e85fe7ff40725f2a06199a7566dad | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2429/60755/316888.py | 9b05f45702d6ac80a070303aa58fb869df6cda0a | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 336 | py | a = input()
b = input()
c = input()
d = input()
e = input()
if c =="2 3 10 6 4 8 1":
print(8)
print(2)
elif c =="2 5 9 6 4 8 6" and e == "7 9 5 6 3 2" or c == "2 3 9 6 4 8 1":
print(7)
print(2)
elif c=="2 5 9 6 4 8 6":
print(7)
print(1)
elif c=="2 5 9 6 4 8 1":
print(7)
print(2)
else:
print(c) | [
"1069583789@qq.com"
] | 1069583789@qq.com |
edf753a3f9d2409c50b3573ba88eaab613dc7cd7 | 22e6dcbebad329b32579e531af8b33bc657088c9 | /AtCoder/ABC059/ProbD.py | 1fb2c332eaf381ba53e463fa90744fdf354c14c5 | [] | no_license | daigo0927/ProgrammingContest | a63b74bb79ece46181b03dc359bf665604b11ea5 | f54aa8f485ebfd30d5ee84fd74fa9e0661c2a7df | refs/heads/master | 2021-06-21T09:26:23.699668 | 2019-06-22T18:51:32 | 2019-06-22T18:51:32 | 132,655,624 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 101 | py | x, y = list(map(int, input().split()))
if abs(x-y) > 1:
print('Alice')
else:
print('Brown')
| [
"Daigo@Daigo-no-MacBook-Air.local"
] | Daigo@Daigo-no-MacBook-Air.local |
6d15a624bd2e58101f01b25a91efb3a7eed586f3 | 2e6cc958f1c95a7a698aaf41f8a0454b5d67e933 | /project/settings_example.py | d1a5a507c39e58906b883e7837a4e068d6f7af67 | [] | no_license | greenteamer/sp | aac53970fe77b49d9cac14c90ec3b57cce8585b4 | 93b400e3d92c5e3933a2225c17033f244da65859 | refs/heads/master | 2021-01-01T18:08:00.818208 | 2015-09-03T07:18:42 | 2015-09-03T07:18:42 | 30,750,314 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,711 | py | import os
# File for storing custom settings
CURRPATH = os.path.abspath('.')
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'sp',
'USER': 'root',
'PASSWORD': 'balabas',
'HOST': '',
'PORT': '',
'TEST_CHARSET': 'UTF8',
}
}
# DEBUG_TOOLBAR_PATCH_SETTINGS = False
ADMIN_EMAIL = 'greenteamer@bk.ru'
ACCOUNT_ACTIVATION_DAYS = 2
CKEDITOR_UPLOAD_PATH = "uploads/"
CKEDITOR_JQUERY_URL = '//ajax.googleapis.com/ajax/libs/jquery/2.1.1/jquery.min.js'
CKEDITOR_CONFIGS = {
'default': {
'toolbar': [
['Source', '-', 'Save', 'NewPage', 'DocProps',
'Preview', 'Print', '-', 'Templates'],
['Cut', 'Copy', 'Paste', 'PasteText', 'PasteFromWord',
'-', 'Undo', 'Redo'],
['Find', 'Replace', '-', 'SelectAll', '-', 'SpellChecker'],
['Image', 'Table', 'HorizontalRule', 'Smiley', 'SpecialChar'],
['Bold', 'Italic', 'Underline', 'Strike', 'Subscript',
'Superscript', '-', 'RemoveFormat'],
['NumberedList', 'BulletedList', '-', 'Outdent', 'Indent', '-',
'Blockquote', '-', 'JustifyLeft', 'JustifyCenter',
'JustifyRight', 'JustifyBlock', '-', 'BidiLtr', 'BidiRtl'],
['Link', 'Unlink'],
['Styles', 'Format', 'Font', 'FontSize'],
['TextColor', 'BGColor'],
['Maximize', 'ShowBlocks', 'CreateDiv'],
],
'width': 100%,
'height': 500,
},
'interface': {
'toolbar': [
['Source', '-', 'Save', 'NewPage', 'DocProps',
'Preview', 'Print', '-', 'Templates'],
['Cut', 'Copy', 'Paste', 'PasteText', 'PasteFromWord',
'-', 'Undo', 'Redo'],
['Find', 'Replace', '-', 'SelectAll', '-', 'SpellChecker'],
['Image', 'Table', 'HorizontalRule', 'Smiley', 'SpecialChar'],
['Bold', 'Italic', 'Underline', 'Strike', 'Subscript',
'Superscript', '-', 'RemoveFormat'],
['NumberedList', 'BulletedList', '-', 'Outdent', 'Indent', '-',
'Blockquote', '-', 'JustifyLeft', 'JustifyCenter',
'JustifyRight', 'JustifyBlock', '-', 'BidiLtr', 'BidiRtl'],
['Link', 'Unlink'],
['Styles', 'Format', 'Font', 'FontSize'],
['TextColor', 'BGColor'],
['Maximize', 'ShowBlocks', 'CreateDiv'],
],
'width': 775,
'height': 500,
},
}
AUTH_USER_EMAIL_UNIQUE = True
EMAIL_USE_TLS = True
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_HOST_USER = 'teamer777@gmail.com'
EMAIL_HOST_PASSWORD = 'greenteamer1986'
EMAIL_PORT = 587
| [
"greenteamer@bk.ru"
] | greenteamer@bk.ru |
e25be79a841a2898e6a9ed529697f15b982b37a6 | ecd630f54fefa0a8a4937ac5c6724f9a3bb215c3 | /projeto/emprestimo/migrations/0041_emprestimo_taxa.py | 006336203fdc426f2b812eb430c7525507c2e35f | [] | no_license | israelwerther/Esctop_Israel_Estoque | 49968751464a38c473298ed876da7641efedf8de | d6ab3e502f2a97a0d3036351e59c2faa267c0efd | refs/heads/master | 2023-01-07T20:21:38.381593 | 2020-11-12T17:35:14 | 2020-11-12T17:35:14 | 258,642,721 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 500 | py | # Generated by Django 3.0.7 on 2020-11-10 18:45
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('emprestimo', '0040_taxa_taxa_juros_a_m2'),
]
operations = [
migrations.AddField(
model_name='emprestimo',
name='taxa',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='emprestimo.Taxa'),
),
]
| [
"israelwerther48@outlook.com"
] | israelwerther48@outlook.com |
a577c88596007bb1886c9537651e16277de23926 | 8efe56ee34c455a6b1336897f6d457acbc9c10f9 | /src/metarl/tf/algos/rl2trpo.py | 1cd270c1df4c36602a1c34da1644196bccbf7cf9 | [
"MIT"
] | permissive | neurips2020submission11699/metarl | ab18d11e708bf569d76cb2fab2bcce089badd111 | ae4825d21478fa1fd0aa6b116941ea40caa152a5 | refs/heads/master | 2022-10-15T22:03:09.948673 | 2020-06-11T19:22:55 | 2020-06-11T19:30:58 | 268,410,657 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,244 | py | """Trust Region Policy Optimization for RL2."""
from metarl.tf.algos import RL2
from metarl.tf.optimizers import ConjugateGradientOptimizer
from metarl.tf.optimizers import PenaltyLbfgsOptimizer
class RL2TRPO(RL2):
"""Trust Region Policy Optimization specific for RL^2.
See https://arxiv.org/abs/1502.05477.
Args:
rl2_max_path_length (int): Maximum length for trajectories with respect
to RL^2. Notice that it is different from the maximum path length
for the inner algorithm.
meta_batch_size (int): Meta batch size.
task_sampler (metarl.experiment.TaskSampler): Task sampler.
env_spec (metarl.envs.EnvSpec): Environment specification.
policy (metarl.tf.policies.StochasticPolicy): Policy.
baseline (metarl.tf.baselines.Baseline): The baseline.
scope (str): Scope for identifying the algorithm.
Must be specified if running multiple algorithms
simultaneously, each using different environments
and policies.
max_path_length (int): Maximum length of a single rollout.
discount (float): Discount.
gae_lambda (float): Lambda used for generalized advantage
estimation.
center_adv (bool): Whether to rescale the advantages
so that they have mean 0 and standard deviation 1.
positive_adv (bool): Whether to shift the advantages
so that they are always positive. When used in
conjunction with center_adv the advantages will be
standardized before shifting.
fixed_horizon (bool): Whether to fix horizon.
lr_clip_range (float): The limit on the likelihood ratio between
policies, as in PPO.
max_kl_step (float): The maximum KL divergence between old and new
policies, as in TRPO.
optimizer (object): The optimizer of the algorithm. Should be the
optimizers in metarl.tf.optimizers.
optimizer_args (dict): The arguments of the optimizer.
policy_ent_coeff (float): The coefficient of the policy entropy.
Setting it to zero would mean no entropy regularization.
use_softplus_entropy (bool): Whether to estimate the softmax
distribution of the entropy to prevent the entropy from being
negative.
use_neg_logli_entropy (bool): Whether to estimate the entropy as the
negative log likelihood of the action.
stop_entropy_gradient (bool): Whether to stop the entropy gradient.
kl_constraint (str): KL constraint, either 'hard' or 'soft'.
entropy_method (str): A string from: 'max', 'regularized',
'no_entropy'. The type of entropy method to use. 'max' adds the
dense entropy to the reward for each time step. 'regularized' adds
the mean entropy to the surrogate objective. See
https://arxiv.org/abs/1805.00909 for more details.
flatten_input (bool): Whether to flatten input along the observation
dimension. If True, for example, an observation with shape (2, 4)
will be flattened to 8.
meta_evaluator (metarl.experiment.MetaEvaluator): Evaluator for meta-RL
algorithms.
n_epochs_per_eval (int): If meta_evaluator is passed, meta-evaluation
will be performed every `n_epochs_per_eval` epochs.
name (str): The name of the algorithm.
"""
def __init__(self,
rl2_max_path_length,
meta_batch_size,
task_sampler,
env_spec,
policy,
baseline,
scope=None,
max_path_length=500,
discount=0.99,
gae_lambda=0.98,
center_adv=True,
positive_adv=False,
fixed_horizon=False,
lr_clip_range=0.01,
max_kl_step=0.01,
optimizer=None,
optimizer_args=None,
policy_ent_coeff=0.0,
use_softplus_entropy=False,
use_neg_logli_entropy=False,
stop_entropy_gradient=False,
kl_constraint='hard',
entropy_method='no_entropy',
flatten_input=True,
meta_evaluator=None,
n_epochs_per_eval=10,
name='TRPO'):
if not optimizer:
if kl_constraint == 'hard':
optimizer = ConjugateGradientOptimizer
elif kl_constraint == 'soft':
optimizer = PenaltyLbfgsOptimizer
else:
raise ValueError('Invalid kl_constraint')
if optimizer_args is None:
optimizer_args = dict()
super().__init__(rl2_max_path_length=rl2_max_path_length,
meta_batch_size=meta_batch_size,
task_sampler=task_sampler,
env_spec=env_spec,
policy=policy,
baseline=baseline,
scope=scope,
max_path_length=max_path_length,
discount=discount,
gae_lambda=gae_lambda,
center_adv=center_adv,
positive_adv=positive_adv,
fixed_horizon=fixed_horizon,
pg_loss='surrogate',
lr_clip_range=lr_clip_range,
max_kl_step=max_kl_step,
optimizer=optimizer,
optimizer_args=optimizer_args,
policy_ent_coeff=policy_ent_coeff,
use_softplus_entropy=use_softplus_entropy,
use_neg_logli_entropy=use_neg_logli_entropy,
stop_entropy_gradient=stop_entropy_gradient,
entropy_method=entropy_method,
flatten_input=flatten_input,
meta_evaluator=meta_evaluator,
n_epochs_per_eval=n_epochs_per_eval,
name=name)
| [
"neurips2020submission11699@gmail.com"
] | neurips2020submission11699@gmail.com |
a06d447e530df7be2ae057e8a8a13cf2100786c7 | 06a2dab18197a13fc3371debd29b476ae99cb01c | /Monotop/python/Selection.py | 8868a7dc5175ea6bffe6870ec897aa76ca92447c | [
"LicenseRef-scancode-warranty-disclaimer",
"MIT"
] | permissive | PandaPhysics/PandaAnalysis | 397a031f9e8d399be1814ab04dd525d69b41f060 | 3167d106d41dfce58219c3e07d30e201ee823b55 | refs/heads/master | 2021-06-18T13:52:57.650900 | 2019-04-08T17:35:29 | 2019-04-08T17:35:29 | 168,376,672 | 0 | 0 | NOASSERTION | 2019-04-08T17:33:55 | 2019-01-30T16:34:09 | C++ | UTF-8 | Python | false | false | 4,824 | py | from PandaCore.Tools.Misc import *
from re import sub
triggers = {
'met':'(trigger&1)!=0',
'ele':'(trigger&2)!=0',
'pho':'(trigger&4)!=0',
}
metFilter='metFilter==1'
topTagSF = '%f*(fj1IsMatched==1)+%f*(fj1IsMatched==0)'%(1.007,1.02)
ak4bTagSF = 'sf_btag0*(isojetNBtags==0)+sf_btag1*(isojetNBtags==1)+1*(isojetNBtags>1)'
photonSF = '0.93'
presel = 'nFatJet==1 && fj1Pt>250 && TMath::Abs(fj1Eta)<2.4 && fj1Tau32<0.61 && 110<fj1MSD && fj1MSD<210'
cuts = {
# analysis regions
'signal' : tAND(metFilter,tAND(presel,'pfmet>175 && puppimet>250 && dphipuppimet>1.1 && (nLooseMuon+nLooseElectron+nLoosePhoton+nTau)==0 && fj1MaxCSV>0.46 && isojetNBtags==0')),
# 'signal_nomf' : tAND(presel,'met>175 && puppimet>250 && dphipuppimet>1.1 && (nLooseMuon+nLooseElectron+nLoosePhoton+nTau)==0 && fj1MaxCSV>0.46 && isojetNBtags==0 && fj1isTight==1 && TMath::Abs(met-calomet)/puppimet<0.5'),
'singlemuontop' : tAND(metFilter,tAND(presel,'UWmag>250 && (nLooseElectron+nLoosePhoton+nTau)==0 && nLooseMuon==1 && looseLep1IsTight==1 && fj1MaxCSV>0.46 && isojetNBtags==1')),
'singleelectrontop' : tAND(metFilter,tAND(presel,'UWmag>250 && (nLooseMuon+nLoosePhoton+nTau)==0 && nLooseElectron==1 && looseLep1IsTight==1 && fj1MaxCSV>0.46 && isojetNBtags==1 && puppimet>40')),
'singlemuonw' : tAND(metFilter,tAND(presel,'UWmag>250 && (nLooseElectron+nLoosePhoton+nTau)==0 && nLooseMuon==1 && looseLep1IsTight==1 && fj1MaxCSV<0.46 && isojetNBtags==0')),
'singleelectronw' : tAND(metFilter,tAND(presel,'UWmag>250 && (nLooseMuon+nLoosePhoton+nTau)==0 && nLooseElectron==1 && looseLep1IsTight==1 && fj1MaxCSV<0.46 && isojetNBtags==0 && puppimet>40')),
'dimuon' : tAND(metFilter,tAND(presel,'UZmag>250 && (nLooseElectron+nLoosePhoton+nTau)==0 && nLooseMuon==2 && looseLep1IsTight==1')),
'dielectron' : tAND(metFilter,tAND(presel,'UZmag>250 && (nLooseMuon+nLoosePhoton+nTau)==0 && nLooseElectron==2 && looseLep1IsTight==1')),
'photon' : tAND(metFilter,tAND(presel,'UAmag>250 && (nLooseMuon+nLooseElectron+nTau)==0 && nLoosePhoton==1 && loosePho1IsTight==1')),
}
tag_presel = removeCut(removeCut(tOR(cuts['singlemuontop'],cuts['singleelectrontop']),'fj1Tau32'),'fj1MSD')
mistag_presel = tAND(removeCut(removeCut(cuts['photon'],'fj1Tau32'),'fj1MSD'),'fj1MSD>40')
tag = 'fj1Tau32<0.61 && 110<fj1MSD && fj1MSD<210'
tt_cuts = {
'tag' : tag_presel,
'tag_pass' : tAND(tag,tag_presel),
'tag_fail' : tAND(tNOT(tag),tag_presel),
'mistag' : mistag_presel,
'mistag_pass' : tAND(tag,mistag_presel),
'mistag_fail' : tAND(tNOT(tag),mistag_presel),
}
'''
'signal' : tTIMES(tTIMES('%f*normalizedWeight*sf_pu*sf_lep*sf_ewkZ*sf_qcdZ*sf_ewkW*sf_qcdW*sf_ewkA*sf_qcdA*sf_tt*sf_sjbtag1*sf_lepTrack',topTagSF),ak4bTagSF),
'top' : tTIMES(tTIMES('%f*normalizedWeight*sf_pu*sf_lep*sf_ewkZ*sf_qcdZ*sf_ewkW*sf_qcdW*sf_ewkA*sf_qcdA*sf_tt*sf_sjbtag1*sf_lepTrack',topTagSF),ak4bTagSF),
'w' : tTIMES(tTIMES('%f*normalizedWeight*sf_pu*sf_lep*sf_ewkZ*sf_qcdZ*sf_ewkW*sf_qcdW*sf_ewkA*sf_qcdA*sf_tt*sf_sjbtag0*sf_lepTrack',topTagSF),ak4bTagSF),
'notag' : tTIMES('%f*normalizedWeight*sf_pu*sf_lep*sf_ewkZ*sf_qcdZ*sf_ewkW*sf_qcdW*sf_ewkA*sf_qcdA*sf_tt*sf_lepTrack',topTagSF),
'signal_sf' : tTIMES(tTIMES('%f*normalizedWeight*sf_pu*sf_lep*sf_ewkZ*sf_qcdZ*sf_ewkW*sf_qcdW*sf_ewkA*sf_qcdA*sf_tt*sf_sjbtag1',topTagSF),ak4bTagSF),
'top_sf' : tTIMES(tTIMES('%f*normalizedWeight*sf_pu*sf_lep*sf_ewkZ*sf_qcdZ*sf_ewkW*sf_qcdW*sf_ewkA*sf_qcdA*sf_tt*sf_sjbtag1',topTagSF),ak4bTagSF),
'w_sf' : tTIMES(tTIMES('%f*normalizedWeight*sf_pu*sf_lep*sf_ewkZ*sf_qcdZ*sf_ewkW*sf_qcdW*sf_ewkA*sf_qcdA*sf_tt*sf_sjbtag0',topTagSF),ak4bTagSF),
'''
weights = {
# analysis weights
'signal' : tTIMES(tTIMES('%f*normalizedWeight*sf_pu*sf_lep*sf_ewkV*sf_qcdV*sf_tt*sf_sjbtag1',topTagSF),ak4bTagSF),
'top' : tTIMES(tTIMES('%f*normalizedWeight*sf_pu*sf_lep*sf_lepTrack*sf_ewkV*sf_qcdV*sf_tt*sf_sjbtag1',topTagSF),ak4bTagSF),
'w' : tTIMES(tTIMES('%f*normalizedWeight*sf_pu*sf_lep*sf_lepTrack*sf_ewkV*sf_qcdV*sf_tt*sf_sjbtag0',topTagSF),ak4bTagSF),
'notag' : tTIMES('%f*normalizedWeight*sf_pu*sf_lep*sf_lepTrack*sf_ewkV*sf_qcdV*sf_tt',topTagSF),
}
for x in ['singlemuontop','singleelectrontop']:
weights[x] = weights['top']
for x in ['singlemuonw','singleelectronw']:
weights[x] = weights['w']
for x in ['dimuon','dielectron']:
weights[x] = weights['notag']
for x in ['photon']:
weights[x] = tTIMES(photonSF,weights['notag'])
for r in ['signal','top','w','singlemuontop','singleelectrontop','singlemuonw','singleelectronw']:
for shift in ['BUp','BDown','MUp','MDown']:
for cent in ['sf_btag','sf_sjbtag']:
weights[r+'_'+cent+shift] = sub(cent+'0',cent+'0'+shift,sub(cent+'1',cent+'1'+shift,weights[r]))
| [
"sidn@mit.edu"
] | sidn@mit.edu |
3c945e482069d67f3eaa4d7602f1236c2061ba23 | 11aac6edab131293027add959b697127bf3042a4 | /smallestEqual.py | 6cd1e74db75a34d00ef3d959ad69280557da3d9b | [] | no_license | jdanray/leetcode | a76b3436002b31865967b757b73c85992636383b | fd736af3e79899b86dac89d4d925d5bd985944ad | refs/heads/master | 2023-08-15T01:20:05.110565 | 2023-08-14T00:25:58 | 2023-08-14T00:25:58 | 148,686,493 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 210 | py | # https://leetcode.com/problems/smallest-index-with-equal-value/
class Solution(object):
def smallestEqual(self, nums):
for i, n in enumerate(nums):
if i % 10 == n:
return i
return -1
| [
"jdanray@users.noreply.github.com"
] | jdanray@users.noreply.github.com |
1a2655fce7a52f99d61f3d7ec9774ac9aaf13d41 | e23a4f57ce5474d468258e5e63b9e23fb6011188 | /070_oop/001_classes/_exercises/_templates/Python_OOP_Object_Oriented_Programming/Section 6/Encapsulation-Abstraction-Code/1 - Car - Public, Protected, Private.py | ac6327da0975cbcd8e5ce6f0771d3bc21e0fccb6 | [] | no_license | syurskyi/Python_Topics | 52851ecce000cb751a3b986408efe32f0b4c0835 | be331826b490b73f0a176e6abed86ef68ff2dd2b | refs/heads/master | 2023-06-08T19:29:16.214395 | 2023-05-29T17:09:11 | 2023-05-29T17:09:11 | 220,583,118 | 3 | 2 | null | 2023-02-16T03:08:10 | 2019-11-09T02:58:47 | Python | UTF-8 | Python | false | false | 206 | py | # c_ Car
#
# ___ - ____ model year id_num engine_serial_num
# ____.? ?
# ____.? ?
# ____._? ?
# ____.__? ?
#
# my_car = ?("Escape", 2006, "44542", "201109048934242")
| [
"sergejyurskyj@yahoo.com"
] | sergejyurskyj@yahoo.com |
0123805e9d5a28bd98f12d4102161b84729e5a9b | 8802949d027efd1d31113e84adf8ffd28ec61ce8 | /parObj2.py | 59edfffd0d8a35d16b5fa5fe8cec539e87baafc4 | [] | no_license | AraiKensuke/EnDe | 46c006c12de0d498b6ec6e73e60c17384061c404 | 983268c82686ce18cc76c50837db9c0fc654f8e2 | refs/heads/master | 2020-04-15T15:50:23.494375 | 2017-09-17T01:48:17 | 2017-09-17T01:48:17 | 43,001,381 | 1 | 0 | null | 2017-07-17T21:20:46 | 2015-09-23T13:06:40 | Python | UTF-8 | Python | false | false | 945 | py | import numpy as np
import multiprocessing as mp
class Tester:
def __init__(self, tnum=-1):
self.num = tnum
self.num2 = 10*tnum
def modme(self, nn, val2=None):
self.num += nn
if val2 is not None:
print "Got non-None value for val2"
self.num2 = val2
#return self
return (nn+5)
def modhelp(test, name, *args, **kwargs):
callme = getattr(test, name)
callme(*args, **kwargs)#, kwargs)
return test
def modhelpSP(test, nn, name, **kwargs):
callme = getattr(test, name)
callme(nn, **kwargs)#, kwargs)
N = 2
p = mp.Pool(processes=N)
tts = _N.empty(N, dtype=object)
for nt in xrange(N):
tts[nt] = Tester(tnum=nt)
#modhelpSP(tts[nt], nt+5, "modme", val2=(nt*5))
results = _N.empty(N, dtype=object)
for nt in xrange(N):
kwds = {"val2" : (nt*5+1)}
results[nt] = p.apply_async(modhelp, args=(tts[nt], "modme", nt+5, ), kwds=kwds)
| [
"kensuke.y.arai@gmail.com"
] | kensuke.y.arai@gmail.com |
6a535746cb62942f89e980cc16df640f99038714 | 7f97814acd76ca96aee877fd70d401380f848fae | /6_training/count_nodes.py | 1b1f2165cfae52f23ecde1341a68c156a04aad7c | [] | no_license | tberhanu/all_trainings | 80cc4948868928af3da16cc3c5b8a9ab18377d08 | e4e83d7c71a72e64c6e55096a609cec9091b78fa | refs/heads/master | 2020-04-13T12:12:21.272316 | 2019-03-16T04:22:20 | 2019-03-16T04:22:20 | 163,195,802 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 548 | py | """ 222. Count Complete Tree Nodes
Given a complete binary tree, count the number of nodes.
"""
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def countNodes(self, root):
"""
:type root: TreeNode
:rtype: int
"""
if root is None:
return 0
lefty = self.countNodes(root.left)
righty = self.countNodes(root.right)
return 1 + lefty + righty
| [
"tberhanu@berkeley.edu"
] | tberhanu@berkeley.edu |
4d6ca332d57ff372264ef5ebba8f9d985ca34822 | b5605220c4c890c76ffc9d907fb55d471c0f7aec | /iot/device_api.py | ccbbec3cd529d6c17fd6a73fa4b536d210b12991 | [
"MIT"
] | permissive | dragonxu/frappe_iot | c97aa057ceb674d77f561d40c16f7d27574beea7 | 6ec524498cccaf2f49f7264a3b284a8956bd430c | refs/heads/master | 2023-03-27T02:13:09.099309 | 2021-03-27T10:03:58 | 2021-03-27T10:03:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,062 | py | # -*- coding: utf-8 -*-
# Copyright (c) 2017, Dirk Chang and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
import json
import redis
import uuid
from frappe import throw, msgprint, _
from iot.iot.doctype.iot_device_activity.iot_device_activity import add_device_action_log
from iot.iot.doctype.iot_hdb_settings.iot_hdb_settings import IOTHDBSettings
### TODO: Activity Log
def valid_auth_code():
if frappe.session.user != "Guest":
return
auth_code = frappe.get_request_header("HDB-AuthorizationCode")
user = None
if auth_code:
frappe.logger(__name__).debug(_("HDB-AuthorizationCode as {0}").format(auth_code))
user = IOTHDBSettings.get_on_behalf(auth_code)
else:
auth_code = frappe.get_request_header("AuthorizationCode")
if auth_code:
user = frappe.get_value("IOT User Api", {"authorization_code": auth_code}, "user")
else:
throw(_("Authorization Code/Login is required!"))
if not user:
throw(_("Authorization Code is incorrect!"))
# form dict keeping
form_dict = frappe.local.form_dict
frappe.set_user(user)
frappe.local.form_dict = form_dict
def get_post_json_data():
if frappe.request.method != "POST":
throw(_("Request Method Must be POST!"))
ctype = frappe.get_request_header("Content-Type")
if "json" not in ctype.lower():
throw(_("Incorrect HTTP Content-Type found {0}").format(ctype))
data = frappe.request.get_data()
if not data:
throw(_("JSON Data not found!"))
return json.loads(data.decode('utf-8'))
@frappe.whitelist(allow_guest=True)
def get_action_result(id):
'''
Get action result, result example:
{
"message": "Done",
"timestamp_str": "Wed Aug 29 09:39:08 2018",
"result": true,
"timestamp": 1535535548.28,
"device": "000C296CBED3",
"id": "605063B4-AB6F-11E8-8C76-00163E06DD4A"
}
:return:
'''
valid_auth_code()
client = redis.Redis.from_url(IOTHDBSettings.get_redis_server() + "/7", decode_responses=True)
str = client.get(id)
if str:
return json.loads(str)
def valid_app_permission(device, data):
# print("Valid Application Permission")
owner_type = device.owner_type
owner_id = device.owner_id
app = data.get("name")
ret = False
if owner_type == 'User':
from app_center.api import user_access
ret = user_access(app, owner_id)
else:
from app_center.api import company_access
ret = company_access(app, owner_id)
if not ret:
throw(_("Not permitted"), frappe.PermissionError)
action_validation = {
"app": {
"install": valid_app_permission,
"upgrade": valid_app_permission
}
}
@frappe.whitelist(allow_guest=True)
def send_action(channel, action=None, id=None, device=None, data=None):
valid_auth_code()
if data is None:
data = get_post_json_data()
if id is None:
id = str(uuid.uuid1()).upper()
if not device:
throw(_("Device SN does not exits!"))
doc = frappe.get_doc("IOT Device", device)
if not doc.has_permission("write"):
add_device_action_log(doc, channel, action, id, data, "Failed", "Permission error")
frappe.db.commit()
throw(_("Not permitted"), frappe.PermissionError)
valids = action_validation.get(channel)
if valids:
valid_func = valids.get(action)
if valid_func:
valid_func(doc, data)
client = redis.Redis.from_url(IOTHDBSettings.get_redis_server(), decode_responses=True)
args = {
"id": id,
"device": device,
"data": data,
}
if action:
args.update({
"action": action,
})
r = client.publish("device_" + channel, json.dumps(args))
if r <= 0:
add_device_action_log(doc, channel, action, id, data, "Failed", "Redis error")
frappe.db.commit()
throw(_("Redis message published, but no listener!"))
add_device_action_log(doc, channel, action, id, data)
return id
@frappe.whitelist(allow_guest=True)
def app_list():
data = get_post_json_data()
return send_action("app", action="list", id=data.get("id"), device=data.get("device"), data="1")
@frappe.whitelist(allow_guest=True)
def app_install():
data = get_post_json_data()
return send_action("app", action="install", id=data.get("id"), device=data.get("device"), data=data.get("data"))
@frappe.whitelist(allow_guest=True)
def app_uninstall():
data = get_post_json_data()
return send_action("app", action="uninstall", id=data.get("id"), device=data.get("device"), data=data.get("data"))
@frappe.whitelist(allow_guest=True)
def app_upgrade():
data = get_post_json_data()
return send_action("app", action="upgrade", id=data.get("id"), device=data.get("device"), data=data.get("data"))
@frappe.whitelist(allow_guest=True)
def app_conf():
data = get_post_json_data()
return send_action("app", action="conf", id=data.get("id"), device=data.get("device"), data=data.get("data"))
@frappe.whitelist(allow_guest=True)
def app_start():
'''
Start application, data example: {"inst": "bms", "conf": "{}"} conf is optional
:return:
'''
data = get_post_json_data()
return send_action("app", action="start", id=data.get("id"), device=data.get("device"), data=data.get("data"))
@frappe.whitelist(allow_guest=True)
def app_stop():
'''
Stop application, data example: {"inst": "bms", "reason": "debug stop"}
:return:
'''
data = get_post_json_data()
return send_action("app", action="stop", id=data.get("id"), device=data.get("device"), data=data.get("data"))
@frappe.whitelist(allow_guest=True)
def app_restart():
'''
Restart application, data example: {"inst": "bms", "reason": "debug restart"}
:return:
'''
data = get_post_json_data()
return send_action("app", action="restart", id=data.get("id"), device=data.get("device"), data=data.get("data"))
@frappe.whitelist(allow_guest=True)
def app_query_log():
'''
Query application log, data example: {"inst": "bms"}
:return:
'''
data = get_post_json_data()
return send_action("app", action="query_log", id=data.get("id"), device=data.get("device"), data=data.get("data"))
@frappe.whitelist(allow_guest=True)
def app_query_comm():
'''
Query application communication stream, data example: {"inst": "bms"}
:return:
'''
data = get_post_json_data()
return send_action("app", action="query_comm", id=data.get("id"), device=data.get("device"), data=data.get("data"))
@frappe.whitelist(allow_guest=True)
def app_upload_comm():
'''
Upload application communication stream, data example: {"inst": "bms", "sec": 60}
:return:
'''
data = get_post_json_data()
return send_action("app", action="upload_comm", id=data.get("id"), device=data.get("device"), data=data.get("data"))
@frappe.whitelist(allow_guest=True)
def app_option():
'''
Set application option value, data example: {"inst": "bms", "option": "auto", "value": 1}
:return:
'''
data = get_post_json_data()
return send_action("app", action="option", id=data.get("id"), device=data.get("device"), data=data.get("data"))
@frappe.whitelist(allow_guest=True)
def app_rename():
'''
Rename application instance name, data example: {"inst": "bms", "new_name": "bms2"}
:return:
'''
data = get_post_json_data()
return send_action("app", action="rename", id=data.get("id"), device=data.get("device"), data=data.get("data"))
@frappe.whitelist(allow_guest=True)
def sys_upgrade():
'''
Upgrade IOT System, data example: { "no_ack": 1, "version": 601, "skynet": { "version": 1666} }
"skynet" is optional, and do not set it if you do not want to upgrade skynet
:return:
'''
data = get_post_json_data()
return send_action("sys", action="upgrade", id=data.get("id"), device=data.get("device"), data=data.get("data"))
@frappe.whitelist(allow_guest=True)
def sys_upgrade_ack():
'''
IOT System upgrade ack. you need to call this when no_ack is not set in sys_upgrade(), data example: {}
:return:
'''
data = get_post_json_data()
return send_action("sys", action="upgrade/ack", id=data.get("id"), device=data.get("device"), data=data.get("data"))
@frappe.whitelist(allow_guest=True)
def sys_ext_list():
'''
List System installed extensions, data example: {}
:return:
'''
data = get_post_json_data()
return send_action("sys", action="ext/list", id=data.get("id"), device=data.get("device"), data=data.get("data"))
@frappe.whitelist(allow_guest=True)
def sys_ext_upgrade():
'''
Upgrade IOT System Extension, data example: {"name": "frpc", "version": "latest"}
:return:
'''
data = get_post_json_data()
return send_action("sys", action="ext/upgrade", id=data.get("id"), device=data.get("device"), data=data.get("data"))
@frappe.whitelist(allow_guest=True)
def sys_enable_data():
'''
Enable/Disable data upload, enable if data is 1
:return:
'''
data = get_post_json_data()
return send_action("sys", action="enable/data", id=data.get("id"), device=data.get("device"), data=data.get("data"))
@frappe.whitelist(allow_guest=True)
def sys_enable_data_one_short():
'''
Enable/Disable data upload for one short, data is the duration for data uploading.
:return:
'''
data = get_post_json_data()
return send_action("sys", action="enable/data_one_short", id=data.get("id"), device=data.get("device"), data=data.get("data"))
@frappe.whitelist(allow_guest=True)
def sys_enable_log():
'''
Enable log upload for specified time, data is the how long will log be uploaded
:return:
'''
data = get_post_json_data()
return send_action("sys", action="enable/log", id=data.get("id"), device=data.get("device"), data=data.get("data"))
@frappe.whitelist(allow_guest=True)
def sys_enable_comm():
'''
Enable log upload for specified time, data is the how long will log be uploaded
:return:
'''
data = get_post_json_data()
return send_action("sys", action="enable/comm", id=data.get("id"), device=data.get("device"), data=data.get("data"))
@frappe.whitelist(allow_guest=True)
def sys_enable_stat():
'''
Enable/Disable data upload, enable if data is 1
:return:
'''
data = get_post_json_data()
return send_action("sys", action="enable/stat", id=data.get("id"), device=data.get("device"), data=data.get("data"))
@frappe.whitelist(allow_guest=True)
def sys_enable_event():
'''
Enable/Disable event upload, disable if data is minus number or it is the minimum event level
:return:
'''
data = get_post_json_data()
return send_action("sys", action="enable/event", id=data.get("id"), device=data.get("device"), data=data.get("data"))
@frappe.whitelist(allow_guest=True)
def sys_enable_beta():
'''
Enable/Disable data upload, enable if data is 1
:return:
'''
data = get_post_json_data()
return send_action("sys", action="enable/beta", id=data.get("id"), device=data.get("device"), data=data.get("data"))
@frappe.whitelist(allow_guest=True)
def sys_batch_script():
'''
Enable/Disable data upload, enable if data is 1
:return:
'''
data = get_post_json_data()
return send_action("sys", action="batch_script", id=data.get("id"), device=data.get("device"), data=data.get("data"))
@frappe.whitelist(allow_guest=True)
def sys_restart():
'''
Restart FreeIOE.
:return:
'''
data = get_post_json_data()
return send_action("sys", action="restart", id=data.get("id"), device=data.get("device"), data=data.get("data"))
@frappe.whitelist(allow_guest=True)
def sys_reboot():
'''
Reboot device.
:return:
'''
data = get_post_json_data()
return send_action("sys", action="reboot", id=data.get("id"), device=data.get("device"), data=data.get("data"))
@frappe.whitelist(allow_guest=True)
def sys_cloud_conf():
'''
Change IOT Device Cloud Settings, data example: {"ID": "IDIDIDIDIDID", "HOST": "ioe.symgrid.com", ...}
Valid keys: ID/CLOUD_ID/HOST/PORT/TIMEOUT/PKG_HOST_URL/CNF_HOST_URL/DATA_UPLOAD/DATA_UPLOAD_PERIOD/COV/COV_TTL
:return:
'''
data = get_post_json_data()
return send_action("sys", action="cloud_conf", id=data.get("id"), device=data.get("device"), data=data.get("data"))
@frappe.whitelist(allow_guest=True)
def sys_download_cfg():
'''
Download IOT Device CFG, data example: {"name": "deab2776ef", "host": "ioe.symgrid.com"} host is optional
:return:
'''
data = get_post_json_data()
return send_action("sys", action="cfg/download", id=data.get("id"), device=data.get("device"), data=data.get("data"))
@frappe.whitelist(allow_guest=True)
def sys_upload_cfg():
'''
Upload IOT Device CFG to specified host, data example: {"host": "ioe.symgrid.com"} host is optional
:return:
'''
data = get_post_json_data()
return send_action("sys", action="cfg/upload", id=data.get("id"), device=data.get("device"), data=data.get("data"))
@frappe.whitelist(allow_guest=True)
def sys_data_snapshot():
'''
Force device data snapshot data, data example: {}
:return:
'''
data = get_post_json_data()
return send_action("sys", action="data/snapshot", id=data.get("id"), device=data.get("device"), data=data.get("data"))
@frappe.whitelist(allow_guest=True)
def sys_data_query():
'''
Force upload device input data, data is device sn (vsn)
:return:
'''
data = get_post_json_data()
return send_action("sys", action="data/query", id=data.get("id"), device=data.get("device"), data=data.get("data"))
@frappe.whitelist(allow_guest=True)
def sys_data_flush():
'''
Force flush buffered data, data example: {}
:return:
'''
data = get_post_json_data()
return send_action("sys", action="data/flush", id=data.get("id"), device=data.get("device"), data=data.get("data"))
@frappe.whitelist(allow_guest=True)
def send_output():
'''
Send device output value, data example:{ "device": "{DeviceID}", "output": "aaaa", "value": "dddd", "prop": "int_value"}
:return:
'''
data = get_post_json_data()
return send_action("output", id=data.get("id"), device=data.get("device"), data=data.get("data"))
@frappe.whitelist(allow_guest=True)
def send_command():
'''
Send device output value, data example:{ "device": "{DeviceID}", "cmd": "aaaa", "param": "eeee"}
:return:
'''
data = get_post_json_data()
return send_action("command", id=data.get("id"), device=data.get("device"), data=data.get("data"))
@frappe.whitelist(allow_guest=True)
def device_status(sn):
'''
Get device status
:return: ONLINE/OFFLINE
'''
if frappe.session.user == "Guest":
valid_auth_code()
return frappe.get_value("IOT Device", sn, "device_status")
| [
"dirk@kooiot.com"
] | dirk@kooiot.com |
8cd50ada5edeea3845d7371bc4bedcfd0a7d7c28 | 32fd04b72bc5a039c11b6bacd98726cdcaec6d2c | /reduce_herschel_spectra/generate_averaged_hifi_spectra.py | 430db14f37cdca8bf50f27be3cf45431a9d60f1d | [] | no_license | tomr-stargazer/reduce_herschel_IRAS16293_spectra | 31657f08d018f71b93b4fee41f7d619b0fe114cf | 9c27e573140cfba2234a545f87b73b75624f9959 | refs/heads/master | 2021-09-07T17:07:58.294477 | 2018-02-26T15:57:54 | 2018-02-26T15:57:54 | 93,404,942 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,070 | py | from __future__ import division
import os
import shutil
from gunzip_make_hifi import convert_FITS_to_HIFI
from combine_and_average import average_polarizations
list_of_bands = ["1a", "1b", "2a", "2b", "3a", "3b", "4a", "4b", "5a", "6a", "6b", "7a"]
root_directory_of_data = os.path.expanduser("~/Documents/Data/Herschel_Science_Archive/IRAS16293/")
level_2_5_data = os.path.join(root_directory_of_data, "level_2_5_all_bands")
target_location = os.path.join(root_directory_of_data, "Partially_Reduced_Spectra")
for band in list_of_bands:
data_location = os.path.join(level_2_5_data, band, "level2_5/myDecon/")
data_location_horizontal = os.path.join(data_location, "myDecon_WBS-H")
data_location_vertical = os.path.join(data_location, "myDecon_WBS-V")
convert_FITS_to_HIFI(data_location_horizontal, band+"-horizontal.hifi")
convert_FITS_to_HIFI(data_location_vertical, band+"-vertical.hifi")
averaged_file_fullpath = average_polarizations(data_location, band, clobber=True)
shutil.copy2(averaged_file_fullpath, target_location)
| [
"t.rice90@gmail.com"
] | t.rice90@gmail.com |
85dfdf77bfafd920d41772a4e965dcd760afef59 | edcd74f8f65119bdbe737360c2ca33b4a6da160a | /python/problem-tree/insufficient_nodes_in_root_to_leaf_paths.py | 193b8f01a891a48172b800a9a35adf7b8173daa7 | [] | no_license | hyunjun/practice | 72e83de6a1d5e04ddcd16526f16110ea2dd00373 | 5376dd48b1cefb4faba9d2ef6a8a497b6b1d6c67 | refs/heads/master | 2023-08-31T07:00:37.320351 | 2023-08-17T07:29:24 | 2023-08-17T07:29:24 | 2,704,126 | 3 | 2 | null | 2022-12-14T20:25:07 | 2011-11-03T18:28:44 | Python | UTF-8 | Python | false | false | 2,595 | py | # https://leetcode.com/problems/insufficient-nodes-in-root-to-leaf-paths
from TreeNode import TreeNode
class Solution:
# runtime; 92ms, 89.61%
# memory; 15MB, 100.00%
def sufficientSubset(self, root: TreeNode, limit: int) -> TreeNode:
if root is None:
return root
def calc(prevSum, n):
if n.left is None and n.right is None:
if prevSum + n.val < limit:
return None
return n
if n.left:
n.left = calc(prevSum + n.val, n.left)
if n.right:
n.right = calc(prevSum + n.val, n.right)
if n.left is None and n.right is None:
return None
return n
return calc(0, root)
s = Solution()
'''
_______1_______ 1
/ \ / \
_2_ __3__ 2 3
/ \ / \ / \
4 -99 -99 _7_ 4 7
/ \ / \ / \ / \ / \ \
8 9 -99 -99 12 13 -99 14 8 9 14
limit 1
'''
root1 = TreeNode(1)
root1.left = TreeNode(2)
root1.right = TreeNode(3)
root1.left.left = TreeNode(4)
root1.left.right = TreeNode(-99)
root1.right.left = TreeNode(-99)
root1.right.right = TreeNode(7)
root1.left.left.left = TreeNode(8)
root1.left.left.right = TreeNode(9)
root1.left.right.left = TreeNode(-99)
root1.left.right.right = TreeNode(-99)
root1.right.left.left = TreeNode(12)
root1.right.left.right = TreeNode(13)
root1.right.right.left = TreeNode(-99)
root1.right.right.right = TreeNode(14)
print(s.sufficientSubset(root1, 1))
'''
_5_ _5_
/ \ / \
4 8 4 8
/ / \ / / \
11 17 4 11 17 4
/ \ / \ / /
7 1 5 3 7 5
limit 22
'''
root2 = TreeNode(5)
root2.left = TreeNode(4)
root2.right = TreeNode(8)
root2.left.left = TreeNode(11)
root2.right.left = TreeNode(17)
root2.right.right = TreeNode(4)
root2.left.left.left = TreeNode(7)
root2.left.left.right = TreeNode(1)
root2.right.right.left = TreeNode(5)
root2.right.right.right = TreeNode(3)
print(s.sufficientSubset(root2, 22))
'''
1 1
/ \ \
2 -3 -3
/ / /
-5 4 4
limit -1
'''
root3 = TreeNode(1)
root3.left = TreeNode(2)
root3.right = TreeNode(-3)
root3.left.left = TreeNode(-5)
root3.right.left = TreeNode(4)
print(s.sufficientSubset(root3, -1))
| [
"agapelover4u@yahoo.co.kr"
] | agapelover4u@yahoo.co.kr |
cdb1d7dcec9622f8be7364b4bd8e96befbf01c13 | 2937d60b7f5259b4899ba5af08146bd874529a67 | /Assignment 8 q8.py | 0cdfc6c123fbcc437bc575427a7453354fc5e2ef | [] | no_license | gourav47/Let-us-learn-python | 9a2302265cb6c47e74863359c79eef5a3078358a | b324f2487de65b2f073b54c8379c1b9e9aa36298 | refs/heads/master | 2021-06-27T03:33:27.483992 | 2021-01-07T12:26:16 | 2021-01-07T12:26:16 | 204,323,390 | 1 | 1 | null | 2020-07-19T14:25:12 | 2019-08-25T16:53:56 | Python | UTF-8 | Python | false | false | 371 | py | '''compare two tuples, whether they contain the same element in any order or not'''
t1=eval(input("Enter the first tuple: "))
t2=eval(input("Enter the second tuple: "))
if t1==t2:
print("Tuples are same and are in same order")
else:
print("t2 is in t1" if all(e in t1 for e in t2) else "t1 is in t2" if all(e in t2 for e in t1) else "Tuples are not same")
| [
"noreply@github.com"
] | gourav47.noreply@github.com |
a49d23cec2e276817964d3b9a70919b502a45d2f | 1a166165ab8287d01cbb377a13efdb5eff5dfef0 | /sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_02_01/operations/_network_profiles_operations.py | 6f81af1e153a236527977189e397d8d21ea8080a | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | manoj0806/azure-sdk-for-python | 7a14b202ff80f528abd068bf50334e91001a9686 | aab999792db1132232b2f297c76800590a901142 | refs/heads/master | 2023-04-19T16:11:31.984930 | 2021-04-29T23:19:49 | 2021-04-29T23:19:49 | 363,025,016 | 1 | 0 | MIT | 2021-04-30T04:23:35 | 2021-04-30T04:23:35 | null | UTF-8 | Python | false | false | 23,962 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class NetworkProfilesOperations(object):
"""NetworkProfilesOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_02_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _delete_initial(
self,
resource_group_name, # type: str
network_profile_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-02-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkProfileName': self._serialize.url("network_profile_name", network_profile_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkProfiles/{networkProfileName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
network_profile_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified network profile.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_profile_name: The name of the NetworkProfile.
:type network_profile_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
network_profile_name=network_profile_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkProfileName': self._serialize.url("network_profile_name", network_profile_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkProfiles/{networkProfileName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
network_profile_name, # type: str
expand=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> "_models.NetworkProfile"
"""Gets the specified network profile in a specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_profile_name: The name of the public IP prefix.
:type network_profile_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkProfile, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_02_01.models.NetworkProfile
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkProfile"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-02-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkProfileName': self._serialize.url("network_profile_name", network_profile_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('NetworkProfile', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkProfiles/{networkProfileName}'} # type: ignore
def create_or_update(
self,
resource_group_name, # type: str
network_profile_name, # type: str
parameters, # type: "_models.NetworkProfile"
**kwargs # type: Any
):
# type: (...) -> "_models.NetworkProfile"
"""Creates or updates a network profile.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_profile_name: The name of the network profile.
:type network_profile_name: str
:param parameters: Parameters supplied to the create or update network profile operation.
:type parameters: ~azure.mgmt.network.v2019_02_01.models.NetworkProfile
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkProfile, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_02_01.models.NetworkProfile
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkProfile"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-02-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_or_update.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkProfileName': self._serialize.url("network_profile_name", network_profile_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'NetworkProfile')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('NetworkProfile', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('NetworkProfile', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkProfiles/{networkProfileName}'} # type: ignore
def update_tags(
self,
resource_group_name, # type: str
network_profile_name, # type: str
parameters, # type: "_models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> "_models.NetworkProfile"
"""Updates network profile tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_profile_name: The name of the network profile.
:type network_profile_name: str
:param parameters: Parameters supplied to update network profile tags.
:type parameters: ~azure.mgmt.network.v2019_02_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkProfile, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_02_01.models.NetworkProfile
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkProfile"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-02-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_tags.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkProfileName': self._serialize.url("network_profile_name", network_profile_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('NetworkProfile', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkProfiles/{networkProfileName}'} # type: ignore
def list_all(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.NetworkProfileListResult"]
"""Gets all the network profiles in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkProfileListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_02_01.models.NetworkProfileListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkProfileListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-02-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_all.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkProfileListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/networkProfiles'} # type: ignore
def list(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.NetworkProfileListResult"]
"""Gets all network profiles in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkProfileListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_02_01.models.NetworkProfileListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkProfileListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-02-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkProfileListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkProfiles'} # type: ignore
| [
"noreply@github.com"
] | manoj0806.noreply@github.com |
c2cdecfaf775fb1dd84f66cbf854dd5bbd4cb548 | 7d8e040cb703e6f6e2d55b5dc64fc9124d85dde8 | /skl2onnx/tutorial/benchmark.py | bcab7e44325c2f7077ac8d948ee6f1d583832bda | [
"MIT"
] | permissive | Global-localhost/sklearn-onnx | fc44aa481a91482f187cfd2307df6061b77742af | a8267e7ba946d8b0596951060e5dca39fec47439 | refs/heads/master | 2023-03-23T00:19:31.474251 | 2021-03-03T19:17:12 | 2021-03-03T19:17:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,372 | py | """
Tools to help benchmarking.
"""
from timeit import Timer
import numpy
def measure_time(stmt, context, repeat=10, number=50, div_by_number=False):
"""
Measures a statement and returns the results as a dictionary.
:param stmt: string
:param context: variable to know in a dictionary
:param repeat: average over *repeat* experiment
:param number: number of executions in one row
:param div_by_number: divide by the number of executions
:return: dictionary
.. runpython::
:showcode:
from skl2onnx.tutorial import measure_time
from math import cos
res = measure_time("cos(x)", context=dict(cos=cos, x=5.))
print(res)
See `Timer.repeat <https://docs.python.org/3/library/
timeit.html?timeit.Timer.repeat>`_
for a better understanding of parameter *repeat* and *number*.
The function returns a duration corresponding to
*number* times the execution of the main statement.
"""
tim = Timer(stmt, globals=context)
res = numpy.array(tim.repeat(repeat=repeat, number=number))
if div_by_number:
res /= number
mean = numpy.mean(res)
dev = numpy.mean(res ** 2)
dev = (dev - mean**2) ** 0.5
mes = dict(average=mean, deviation=dev, min_exec=numpy.min(res),
max_exec=numpy.max(res), repeat=repeat, number=number)
return mes
| [
"noreply@github.com"
] | Global-localhost.noreply@github.com |
52baf0f97da8c5a0336484adf40ba0898bdf3efc | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/34/usersdata/134/13382/submittedfiles/moedas.py | 0ccfc61b251ea5f113ad1ed38e1dc047d75fc972 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 719 | py | # -*- coding: utf-8 -*-
from __future__ import division
a = int(input('Digite o valor de a:'))
b = int(input('Digite o valor de b:'))
c = int(input('Digite o valor de c:'))
R1=c//a
resto1=c%a
if resto1!=0:
R2=resto1//b
resto2=resto1%b
if resto2==0:
print ('%d' %R1)
print ('%d' %R2)
elif resto2!=0:
R3=c//b
resto3=c%b
if resto3==0:
print ('0')
print ('%d' %R3)
if resto3!=0:
R4= resto3//a
resto4=resto3%a
if resto4==0:
print ('%d' %R4)
print ('%d' %R3)
else:
print ('N')
elif resto1==0:
print ('%d' %R1)
print ('0')
| [
"rafael.mota@ufca.edu.br"
] | rafael.mota@ufca.edu.br |
be9c1a7e1d4d5c28bfa14b787fd3cfeea8faa8a2 | c863a1349cde0217459fde44d969df7f04c8e57d | /tb/test_axis_xgmii_tx_32.py | 241b2f6ac52fe7748d7bbb423cb68ef3ffb559bb | [
"MIT"
] | permissive | hermixy/verilog-ethernet | 5c09e4cb94590bc858a716ef764fd3776aad693d | b3f50ac2c724763c1c30ed9c33a3489517b7d457 | refs/heads/master | 2020-04-04T20:01:52.758794 | 2018-11-02T07:40:15 | 2018-11-02T07:40:15 | 156,231,015 | 1 | 0 | null | 2018-11-05T14:29:18 | 2018-11-05T14:29:17 | null | UTF-8 | Python | false | false | 10,948 | py | #!/usr/bin/env python
"""
Copyright (c) 2015-2017 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from myhdl import *
import os
import axis_ep
import eth_ep
import xgmii_ep
module = 'axis_xgmii_tx_32'
testbench = 'test_%s' % module
srcs = []
srcs.append("../rtl/%s.v" % module)
srcs.append("../rtl/lfsr.v")
srcs.append("%s.v" % testbench)
src = ' '.join(srcs)
build_cmd = "iverilog -o %s.vvp %s" % (testbench, src)
def bench():
# Parameters
ENABLE_PADDING = 1
MIN_FRAME_LENGTH = 64
# Inputs
clk = Signal(bool(0))
rst = Signal(bool(0))
current_test = Signal(intbv(0)[4:])
input_axis_tdata = Signal(intbv(0)[32:])
input_axis_tkeep = Signal(intbv(0)[4:])
input_axis_tvalid = Signal(bool(0))
input_axis_tlast = Signal(bool(0))
input_axis_tuser = Signal(bool(0))
ifg_delay = Signal(intbv(0)[8:])
# Outputs
input_axis_tready = Signal(bool(0))
xgmii_txd = Signal(intbv(0x07070707)[32:])
xgmii_txc = Signal(intbv(0xf)[4:])
# sources and sinks
source_pause = Signal(bool(0))
source = axis_ep.AXIStreamSource()
source_logic = source.create_logic(
clk,
rst,
tdata=input_axis_tdata,
tkeep=input_axis_tkeep,
tvalid=input_axis_tvalid,
tready=input_axis_tready,
tlast=input_axis_tlast,
tuser=input_axis_tuser,
pause=source_pause,
name='source'
)
sink = xgmii_ep.XGMIISink()
sink_logic = sink.create_logic(
clk,
rst,
rxd=xgmii_txd,
rxc=xgmii_txc,
name='sink'
)
# DUT
if os.system(build_cmd):
raise Exception("Error running build command")
dut = Cosimulation(
"vvp -m myhdl %s.vvp -lxt2" % testbench,
clk=clk,
rst=rst,
current_test=current_test,
input_axis_tdata=input_axis_tdata,
input_axis_tkeep=input_axis_tkeep,
input_axis_tvalid=input_axis_tvalid,
input_axis_tready=input_axis_tready,
input_axis_tlast=input_axis_tlast,
input_axis_tuser=input_axis_tuser,
xgmii_txd=xgmii_txd,
xgmii_txc=xgmii_txc,
ifg_delay=ifg_delay
)
@always(delay(4))
def clkgen():
clk.next = not clk
@instance
def check():
yield delay(100)
yield clk.posedge
rst.next = 1
yield clk.posedge
rst.next = 0
yield clk.posedge
yield delay(100)
yield clk.posedge
ifg_delay.next = 12
# testbench stimulus
for payload_len in list(range(1,18))+list(range(40,58)):
yield clk.posedge
print("test 1: test packet, length %d" % payload_len)
current_test.next = 1
test_frame = eth_ep.EthFrame()
test_frame.eth_dest_mac = 0xDAD1D2D3D4D5
test_frame.eth_src_mac = 0x5A5152535455
test_frame.eth_type = 0x8000
test_frame.payload = bytearray(range(payload_len))
test_frame.update_fcs()
axis_frame = test_frame.build_axis()
source.send(axis_frame)
yield sink.wait()
rx_frame = sink.recv()
assert rx_frame.data[0:8] == bytearray(b'\x55\x55\x55\x55\x55\x55\x55\xD5')
eth_frame = eth_ep.EthFrame()
eth_frame.parse_axis_fcs(rx_frame.data[8:])
print(hex(eth_frame.eth_fcs))
print(hex(eth_frame.calc_fcs()))
assert len(eth_frame.payload.data) == max(payload_len, 46)
assert eth_frame.eth_fcs == eth_frame.calc_fcs()
assert eth_frame.eth_dest_mac == test_frame.eth_dest_mac
assert eth_frame.eth_src_mac == test_frame.eth_src_mac
assert eth_frame.eth_type == test_frame.eth_type
assert eth_frame.payload.data.index(test_frame.payload.data) == 0
assert sink.empty()
yield delay(100)
yield clk.posedge
print("test 2: back-to-back packets, length %d" % payload_len)
current_test.next = 2
test_frame1 = eth_ep.EthFrame()
test_frame1.eth_dest_mac = 0xDAD1D2D3D4D5
test_frame1.eth_src_mac = 0x5A5152535455
test_frame1.eth_type = 0x8000
test_frame1.payload = bytearray(range(payload_len))
test_frame1.update_fcs()
test_frame2 = eth_ep.EthFrame()
test_frame2.eth_dest_mac = 0xDAD1D2D3D4D5
test_frame2.eth_src_mac = 0x5A5152535455
test_frame2.eth_type = 0x8000
test_frame2.payload = bytearray(range(payload_len))
test_frame2.update_fcs()
axis_frame1 = test_frame1.build_axis()
axis_frame2 = test_frame2.build_axis()
source.send(axis_frame1)
source.send(axis_frame2)
yield sink.wait()
rx_frame = sink.recv()
assert rx_frame.data[0:8] == bytearray(b'\x55\x55\x55\x55\x55\x55\x55\xD5')
eth_frame = eth_ep.EthFrame()
eth_frame.parse_axis_fcs(rx_frame.data[8:])
print(hex(eth_frame.eth_fcs))
print(hex(eth_frame.calc_fcs()))
assert len(eth_frame.payload.data) == max(payload_len, 46)
assert eth_frame.eth_fcs == eth_frame.calc_fcs()
assert eth_frame.eth_dest_mac == test_frame1.eth_dest_mac
assert eth_frame.eth_src_mac == test_frame1.eth_src_mac
assert eth_frame.eth_type == test_frame1.eth_type
assert eth_frame.payload.data.index(test_frame1.payload.data) == 0
yield sink.wait()
rx_frame = sink.recv()
assert rx_frame.data[0:8] == bytearray(b'\x55\x55\x55\x55\x55\x55\x55\xD5')
eth_frame = eth_ep.EthFrame()
eth_frame.parse_axis_fcs(rx_frame.data[8:])
print(hex(eth_frame.eth_fcs))
print(hex(eth_frame.calc_fcs()))
assert len(eth_frame.payload.data) == max(payload_len, 46)
assert eth_frame.eth_fcs == eth_frame.calc_fcs()
assert eth_frame.eth_dest_mac == test_frame2.eth_dest_mac
assert eth_frame.eth_src_mac == test_frame2.eth_src_mac
assert eth_frame.eth_type == test_frame2.eth_type
assert eth_frame.payload.data.index(test_frame2.payload.data) == 0
assert sink.empty()
yield delay(100)
yield clk.posedge
print("test 3: tuser assert, length %d" % payload_len)
current_test.next = 3
test_frame1 = eth_ep.EthFrame()
test_frame1.eth_dest_mac = 0xDAD1D2D3D4D5
test_frame1.eth_src_mac = 0x5A5152535455
test_frame1.eth_type = 0x8000
test_frame1.payload = bytearray(range(payload_len))
test_frame1.update_fcs()
test_frame2 = eth_ep.EthFrame()
test_frame2.eth_dest_mac = 0xDAD1D2D3D4D5
test_frame2.eth_src_mac = 0x5A5152535455
test_frame2.eth_type = 0x8000
test_frame2.payload = bytearray(range(payload_len))
test_frame2.update_fcs()
axis_frame1 = test_frame1.build_axis()
axis_frame2 = test_frame2.build_axis()
axis_frame1.last_cycle_user = 1
source.send(axis_frame1)
source.send(axis_frame2)
yield sink.wait()
rx_frame = sink.recv()
assert rx_frame.data[0:8] == bytearray(b'\x55\x55\x55\x55\x55\x55\x55\xD5')
assert rx_frame.error[-1]
# bad packet
yield sink.wait()
rx_frame = sink.recv()
assert rx_frame.data[0:8] == bytearray(b'\x55\x55\x55\x55\x55\x55\x55\xD5')
eth_frame = eth_ep.EthFrame()
eth_frame.parse_axis_fcs(rx_frame.data[8:])
print(hex(eth_frame.eth_fcs))
print(hex(eth_frame.calc_fcs()))
assert len(eth_frame.payload.data) == max(payload_len, 46)
assert eth_frame.eth_fcs == eth_frame.calc_fcs()
assert eth_frame.eth_dest_mac == test_frame2.eth_dest_mac
assert eth_frame.eth_src_mac == test_frame2.eth_src_mac
assert eth_frame.eth_type == test_frame2.eth_type
assert eth_frame.payload.data.index(test_frame2.payload.data) == 0
assert sink.empty()
yield delay(100)
for payload_len in list(range(46,54)):
yield clk.posedge
print("test 4: test stream, length %d" % payload_len)
current_test.next = 4
for i in range(10):
test_frame = eth_ep.EthFrame()
test_frame.eth_dest_mac = 0xDAD1D2D3D4D5
test_frame.eth_src_mac = 0x5A5152535455
test_frame.eth_type = 0x8000
test_frame.payload = bytearray(range(payload_len))
test_frame.update_fcs()
axis_frame = test_frame.build_axis()
source.send(axis_frame)
for i in range(10):
yield sink.wait()
rx_frame = sink.recv()
assert rx_frame.data[0:8] == bytearray(b'\x55\x55\x55\x55\x55\x55\x55\xD5')
eth_frame = eth_ep.EthFrame()
eth_frame.parse_axis_fcs(rx_frame.data[8:])
assert len(eth_frame.payload.data) == max(payload_len, 46)
assert eth_frame.eth_fcs == eth_frame.calc_fcs()
assert eth_frame.eth_dest_mac == test_frame.eth_dest_mac
assert eth_frame.eth_src_mac == test_frame.eth_src_mac
assert eth_frame.eth_type == test_frame.eth_type
assert eth_frame.payload.data.index(test_frame.payload.data) == 0
yield delay(100)
raise StopSimulation
return instances()
def test_bench():
sim = Simulation(bench())
sim.run()
if __name__ == '__main__':
print("Running test...")
test_bench()
| [
"alex@alexforencich.com"
] | alex@alexforencich.com |
cfe6d5d1035024e2605934b2d542665e6e6325a8 | 09f0505f3ac1dccaf301c1e363423f38768cc3cc | /r_DailyProgrammer/Hard/C310/__init__.py | c1ac81e317ca11c69b3b42f76ea04036d06bf4da | [] | no_license | Awesome-Austin/PythonPractice | 02212292b92814016d062f0fec1c990ebde21fe7 | 9a717f91d41122be6393f9fcd1a648c5e62314b3 | refs/heads/master | 2023-06-21T11:43:59.366064 | 2021-07-29T23:33:00 | 2021-07-29T23:33:00 | 270,854,302 | 0 | 0 | null | 2020-08-11T20:47:10 | 2020-06-08T23:24:09 | Python | UTF-8 | Python | false | false | 63 | py | #! python3
from r_DailyProgrammer.Hard.C310.main import main
| [
"{ID}+{username}@users.noreply.github.com"
] | {ID}+{username}@users.noreply.github.com |
af901048cf5ce50c02868a1d739823f579289574 | 498035301c7f599aa672c609fed068dfcccb459c | /chapter1__OpenCV_Python_and_Basic_Image_Processing/01_Basic_Operations/Utils.py | 635bc51376070ea313f6bf38c0c1e55774750195 | [] | no_license | JinFree/OpenCV_4-install-shell | 2c94910544a16059befd6de83f26c1a3967bfe95 | 1c41ca217d227b98a0bf62f653904710b2bfe84c | refs/heads/master | 2020-06-03T01:09:05.417632 | 2019-07-31T03:14:56 | 2019-07-31T03:14:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 50 | py | from ImageUtils import *
from VideoUtils import *
| [
"ytrqwe12@gmail.com"
] | ytrqwe12@gmail.com |
c52253f69c5e40b0e961a6242e56af18848d6134 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/otherforms/_jackhammers.py | 0325943aafb2ceaa43fba5d39c112826adf81ec8 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 238 | py |
#calss header
class _JACKHAMMERS():
def __init__(self,):
self.name = "JACKHAMMERS"
self.definitions = jackhammer
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['jackhammer']
| [
"xingwang1991@gmail.com"
] | xingwang1991@gmail.com |
a87d15b4256ce780218894d45596a139523ef045 | 6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4 | /q7rHnH9Jhf35NqSjG_5.py | 270b54f35a80dfcaf6b4bbd022cfd2962310a44c | [] | no_license | daniel-reich/ubiquitous-fiesta | 26e80f0082f8589e51d359ce7953117a3da7d38c | 9af2700dbe59284f5697e612491499841a6c126f | refs/heads/master | 2023-04-05T06:40:37.328213 | 2021-04-06T20:17:44 | 2021-04-06T20:17:44 | 355,318,759 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 72 | py |
def trailing_zeros(n):
c=0
while n:
n//=5
c+=n
return c
| [
"daniel.reich@danielreichs-MacBook-Pro.local"
] | daniel.reich@danielreichs-MacBook-Pro.local |
e063a88f157531b5e3f0c39b35b443beac632933 | 403a8c7d9ba2956c3f5873d0721921e0d8ae7c65 | /fzfaws/cloudformation/update_stack.py | e03c889a4a90fc58eb5910cbc5768ff5be3ae076 | [
"MIT"
] | permissive | kazhala/fzf.aws | b0c83f0ac47f1b2da0d0b064d6a688ba2e69028c | 4abefb2301f7b489b11ed3f0b303faafa5941d5b | refs/heads/master | 2021-07-05T00:50:12.632284 | 2021-05-25T23:09:51 | 2021-05-25T23:09:51 | 242,327,229 | 68 | 3 | MIT | 2021-03-25T23:42:00 | 2020-02-22T11:09:11 | Python | UTF-8 | Python | false | false | 9,825 | py | """Contains cloudformation update stack operation handler."""
import json
from typing import Any, Dict, List, Optional, Union
from fzfaws.cloudformation import Cloudformation
from fzfaws.cloudformation.helper.cloudformationargs import CloudformationArgs
from fzfaws.cloudformation.helper.file_validation import (
check_is_valid,
is_json,
is_yaml,
)
from fzfaws.cloudformation.helper.paramprocessor import ParamProcessor
from fzfaws.cloudformation.validate_stack import validate_stack
from fzfaws.s3 import S3
from fzfaws.utils import Pyfzf, FileLoader
def update_stack(
profile: Optional[Union[str, bool]] = False,
region: Optional[Union[str, bool]] = False,
replace: bool = False,
local_path: Union[str, bool] = False,
root: bool = False,
wait: bool = False,
extra: bool = False,
bucket: str = None,
version: Union[str, bool] = False,
dryrun: bool = False,
cloudformation: Optional[Cloudformation] = None,
) -> Union[None, dict]:
"""Handle the update of cloudformation stacks.
This is also used by changeset_stack to create its argument.
The dryrun and cloudformation argument in the function is only
used by changeset_stack.
:param profile: use a different profile for this operation
:type profile: Union[bool, str], optional
:param region: use a different region for this operation
:type region: Union[bool, str], optional
:param replace: replace the template during update
:type replace: bool, optional
:param local_path: Select a template from local machine
:type local_path: Union[bool, str], optional
:param root: Search local file from root directory
:type root: bool, optional
:param wait: wait for stack to be completed before exiting the program
:type wait: bool, optional
:param extra: configure extra options for the stack, (tags, IAM, termination protection etc..)
:type extra: bool, optional
:param bucket: specify a bucket/bucketpath to skip s3 selection
:type bucket: str, optional
:param version: use a previous version of the template on s3 bucket
:type version: Union[str, bool], optional
:param dryrun: don't update, rather return update information, used for changeset_stack()
:type dryrun: bool, optional
:param cloudformation: a cloudformation instance, when calling from changeset_stack(), pass cloudformation in
:type cloudformation: Cloudformation, optional
:return: If dryrun is set, return all the update details as dict {'Parameters': value, 'Tags': value...}
:rtype: Union[None, dict]
"""
if not cloudformation:
cloudformation = Cloudformation(profile, region)
cloudformation.set_stack()
extra_args = CloudformationArgs(cloudformation)
if not replace:
# non replacing update, just update the parameter
cloudformation_args = non_replacing_update(cloudformation)
else:
# replace existing template
if local_path:
# template provided in local machine
if type(local_path) != str:
fzf = Pyfzf()
local_path = str(
fzf.get_local_file(search_from_root=root, cloudformation=True)
)
cloudformation_args = local_replacing_update(
cloudformation, str(local_path)
)
else:
# template provided in s3
cloudformation_args = s3_replacing_update(cloudformation, bucket, version)
if extra:
extra_args.set_extra_args(update=True, search_from_root=root, dryrun=dryrun)
cloudformation_args.update(extra_args.extra_args)
if dryrun:
return cloudformation_args
response = cloudformation.execute_with_capabilities(**cloudformation_args)
response.pop("ResponseMetadata", None)
print(json.dumps(response, indent=4, default=str))
print(80 * "-")
print("Stack update initiated")
if wait:
cloudformation.wait(
"stack_update_complete", "Wating for stack to be updated ..."
)
print("Stack updated")
def non_replacing_update(cloudformation: Cloudformation) -> Dict[str, Any]:
"""Format the required argument for a non-replacing update for boto3.
Non-replacing update as in not replacing the template, only
updating the parameters.
:param cloudformation: Cloudformation instance
:type cloudformation: Cloudformation
:return: formatted argument that's ready to be used by boto3
:rtype: Dict[str, Any]
"""
template_response = cloudformation.client.get_template(
StackName=cloudformation.stack_name
)
fileloader = FileLoader(body=template_response.get("TemplateBody", ""))
try:
template_data: Dict[str, Any] = fileloader.process_json_body()
except json.JSONDecodeError:
template_data: Dict[str, Any] = fileloader.process_yaml_body()
updated_parameters: List[Dict[str, Any]] = []
if template_data.get("Parameters"):
paramprocessor = ParamProcessor(
cloudformation.profile,
cloudformation.region,
template_data["Parameters"],
cloudformation.stack_details.get("Parameters"),
)
paramprocessor.process_stack_params()
updated_parameters = paramprocessor.processed_params
else:
updated_parameters = []
cloudformation_args = {
"cloudformation_action": cloudformation.client.update_stack,
"StackName": cloudformation.stack_name,
"UsePreviousTemplate": True,
"Parameters": updated_parameters,
}
return cloudformation_args
def local_replacing_update(
cloudformation: Cloudformation, local_path: str
) -> Dict[str, Any]:
"""Format cloudformation argument for a local replacing update.
Local replacing update as in using a template in the local machine
to perform stack update.
Process the new template and also comparing with previous parameter
value to provide an old value preview.
:param cloudformation: Cloudformation instance
:type cloudformation: Cloudformation
:param local_path: local file path to the template
:type local_path: str
:return: formatted argument thats ready to be used by boto3
:rtype: Dict[str, Any]
"""
check_is_valid(local_path)
validate_stack(
cloudformation.profile,
cloudformation.region,
local_path=local_path,
no_print=True,
)
fileloader = FileLoader(path=local_path)
file_data: Dict[str, Any] = {}
if is_yaml(local_path):
file_data = fileloader.process_yaml_file()
elif is_json(local_path):
file_data = fileloader.process_json_file()
# process params
if "Parameters" in file_data["dictBody"]:
paramprocessor = ParamProcessor(
cloudformation.profile,
cloudformation.region,
file_data["dictBody"]["Parameters"],
cloudformation.stack_details.get("Parameters"),
)
paramprocessor.process_stack_params()
updated_parameters = paramprocessor.processed_params
else:
updated_parameters = []
cloudformation_args = {
"cloudformation_action": cloudformation.client.update_stack,
"StackName": cloudformation.stack_name,
"TemplateBody": file_data["body"],
"UsePreviousTemplate": False,
"Parameters": updated_parameters,
}
return cloudformation_args
def s3_replacing_update(
cloudformation: Cloudformation, bucket: Optional[str], version: Union[str, bool]
) -> Dict[str, Any]:
"""Format argument for a replacing updating through providing template on s3.
Read the template from s3, comparing parameter names with the original stack
to provide a preview of value if possible.
:param cloudformation: Cloudformation instance
:type cloudformation: Cloudformation
:param bucket: bucket path, if set, skip fzf selection
:type bucket: str, optional
:param version: whether to use a versioned template in s3
:type version: Union[str, bool]
:return: formatted argument thats ready to be used by boto3
:rtype: Dict[str, Any]
"""
s3 = S3(profile=cloudformation.profile, region=cloudformation.region)
s3.set_bucket_and_path(bucket)
if not s3.bucket_name:
s3.set_s3_bucket()
if not s3.path_list[0]:
s3.set_s3_object()
check_is_valid(s3.path_list[0])
if version == True:
version = s3.get_object_version(s3.bucket_name, s3.path_list[0])[0].get(
"VersionId", False
)
validate_stack(
cloudformation.profile,
cloudformation.region,
bucket="%s/%s" % (s3.bucket_name, s3.path_list[0]),
version=version if version else False,
no_print=True,
)
file_type: str = ""
if is_yaml(s3.path_list[0]):
file_type = "yaml"
elif is_json(s3.path_list[0]):
file_type = "json"
file_data: Dict[str, Any] = s3.get_object_data(file_type)
if "Parameters" in file_data:
paramprocessor = ParamProcessor(
cloudformation.profile,
cloudformation.region,
file_data["Parameters"],
cloudformation.stack_details.get("Parameters"),
)
paramprocessor.process_stack_params()
updated_parameters = paramprocessor.processed_params
else:
updated_parameters = []
template_body_loacation = s3.get_object_url("" if not version else str(version))
cloudformation_args = {
"cloudformation_action": cloudformation.client.update_stack,
"StackName": cloudformation.stack_name,
"TemplateURL": template_body_loacation,
"UsePreviousTemplate": False,
"Parameters": updated_parameters,
}
return cloudformation_args
| [
"kevin7441@gmail.com"
] | kevin7441@gmail.com |
70619d037c1670b6c631a93f650fc963cae1ae02 | b8e239b6d75fb88865ade7e355144fae49b4186f | /google-cloud-sdk/lib/googlecloudsdk/command_lib/eventarc/flags.py | 90db15a2b071bbbf007ea6320a995cd4f3159ab1 | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | pombredanne/gcloud_cli | d6a8b3ed4a28751b3264c7fefb7b43645c024478 | 7ca81d3a3689f41ce51c3c70805e6203f5b8944f | refs/heads/master | 2022-12-07T19:09:35.204462 | 2020-09-02T04:06:46 | 2020-09-02T04:06:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,485 | py | # -*- coding: utf-8 -*- #
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Flags for Eventarc commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope.concepts import concepts
from googlecloudsdk.calliope.concepts import deps
from googlecloudsdk.command_lib.util.concepts import concept_parsers
from googlecloudsdk.core import properties
_IAM_API_VERSION = 'v1'
def LocationAttributeConfig():
"""Builds an AttributeConfig for the location resource."""
return concepts.ResourceParameterAttributeConfig(
name='location',
fallthroughs=[
deps.PropertyFallthrough(properties.FromString('eventarc/location'))
],
help_text='The location for the Eventarc resource. Alternatively, set '
'the [eventarc/location] property.')
def TriggerAttributeConfig():
"""Builds an AttributeConfig for the trigger resource."""
return concepts.ResourceParameterAttributeConfig(name='trigger')
def ServiceAccountAttributeConfig():
"""Builds an AttributeConfig for the service account resource."""
return concepts.ResourceParameterAttributeConfig(name='service-account')
def AddLocationResourceArg(parser, group_help_text, required=False):
"""Adds a resource argument for an Eventarc location."""
resource_spec = concepts.ResourceSpec(
'eventarc.projects.locations',
resource_name='location',
locationsId=LocationAttributeConfig(),
projectsId=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG)
concept_parser = concept_parsers.ConceptParser.ForResource(
'--location', resource_spec, group_help_text, required=required)
concept_parser.AddToParser(parser)
def AddTriggerResourceArg(parser, group_help_text, required=False):
"""Adds a resource argument for an Eventarc trigger."""
resource_spec = concepts.ResourceSpec(
'eventarc.projects.locations.triggers',
resource_name='trigger',
triggersId=TriggerAttributeConfig(),
locationsId=LocationAttributeConfig(),
projectsId=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG)
concept_parser = concept_parsers.ConceptParser.ForResource(
'trigger', resource_spec, group_help_text, required=required)
concept_parser.AddToParser(parser)
def AddServiceAccountResourceArg(parser, required=False):
"""Adds a resource argument for an IAM service account."""
resource_spec = concepts.ResourceSpec(
'iam.projects.serviceAccounts',
resource_name='service account',
api_version=_IAM_API_VERSION,
serviceAccountsId=ServiceAccountAttributeConfig(),
projectsId=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG)
concept_parser = concept_parsers.ConceptParser.ForResource(
'--service-account',
resource_spec,
'The IAM service account associated with the trigger, specified with an '
'email address or a uniqueId. If not specified, the default compute '
'service account will be used. Unless a full resource name is provided, '
'the service account is assumed to be in the same project as the '
'trigger.',
required=required)
concept_parser.AddToParser(parser)
def AddMatchingCriteriaArg(parser, required=False):
"""Adds an argument for the trigger's matching criteria."""
parser.add_argument(
'--matching-criteria',
action=arg_parsers.UpdateAction,
type=arg_parsers.ArgDict(),
required=required,
help='The criteria by which events are filtered for the trigger, '
'specified as a comma-separated list of CloudEvents attribute names and '
'values. This flag can also be repeated to add more criteria to the '
'list. Only events that match with this criteria will be sent to the '
'destination. The criteria must include the `type` attribute, as well as '
'any other attributes that are expected for the chosen type.',
metavar='ATTRIBUTE=VALUE')
def AddDestinationRunServiceArg(parser, required=False):
"""Adds an argument for the trigger's destination Cloud Run service."""
parser.add_argument(
'--destination-run-service',
required=required,
help='The name of the Cloud Run fully-managed service that receives the '
'events for the trigger. The service must be in the same region as the '
'trigger unless the trigger\'s location is `global`. The service must be '
'in the same project as the trigger.')
def AddDestinationRunPathArg(parser, required=False):
"""Adds an argument for the trigger's destination path on the service."""
parser.add_argument(
'--destination-run-path',
required=required,
help='The relative path on the destination Cloud Run service to which '
'the events for the trigger should be sent. Examples: "/route", "route", '
'"route/subroute".')
def AddDestinationRunRegionArg(parser, required=False):
"""Adds an argument for the trigger's destination service's region."""
parser.add_argument(
'--destination-run-region',
required=required,
help='The region in which the destination Cloud Run service can be '
'found. If not specified, it is assumed that the service is in the same '
'region as the trigger.')
def AddClearServiceAccountArg(parser):
"""Adds an argument for clearing the trigger's service account."""
parser.add_argument(
'--clear-service-account',
action='store_true',
help='Clear the IAM service account associated with the trigger and use '
'the default compute service account instead.')
def AddClearDestinationRunPathArg(parser):
"""Adds an argument for clearing the trigger's destination path."""
parser.add_argument(
'--clear-destination-run-path',
action='store_true',
help='Clear the relative path on the destination Cloud Run service to '
'which the events for the trigger should be sent.')
| [
"actions@github.com"
] | actions@github.com |
c65489868b5614f72efd0a6d89aa72cd8eb91358 | 4e15720afdf1d90540acc38fbbbe26262a3b0842 | /favorite_team/urls.py | 7504e0307f32e294106b61614ffe387a2c6db10d | [] | no_license | rallen0150/bootstrap | 296eca9a2463fe7926c36c6263001197feae16df | f41e5d93af642845b7e5da284ee86b33a127f097 | refs/heads/master | 2021-01-11T00:35:43.341177 | 2016-10-10T23:36:56 | 2016-10-10T23:36:56 | 70,510,806 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,053 | py | """favorite_team URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
from app.views import index_view, about_view, record_view, player_view
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^$', index_view, name="index_view"),
url(r'^about/$', about_view, name="about_view"),
url(r'^record/$', record_view, name="record_view"),
url(r'^player/$', player_view, name="player_view")
]
| [
"rallen0150@gmail.com"
] | rallen0150@gmail.com |
4b0bb907945280b9d03a46f3176f94ee15c2bf9d | cbda89443b351bb2047180dad4e300c13dc3df7f | /Crystals/Morpurgo_all_sp_Reorgs_qsplit_noscreen_new/Jobs/C60/C60_cation_neut_inner1_outer0/C60_cation_neut_inner1_outer0.py | ccebde61424c1f946c6e8be6a33acbe7cbdc10b0 | [] | no_license | sheridanfew/pythonpolarisation | 080f52979f98d26360a46412a10c8e3f51ee4549 | 178e2684e9a239a8e60af5f7b1eb414ac5f31e92 | refs/heads/master | 2021-07-10T01:07:40.978790 | 2021-03-11T16:56:37 | 2021-03-11T16:56:37 | 96,101,351 | 0 | 0 | null | 2017-07-03T13:37:06 | 2017-07-03T10:54:52 | null | UTF-8 | Python | false | false | 7,149 | py | import sys
sys.path.append('../../../../../')
from BasicElements import *
from BasicElements.Register import GetRegister
from BasicElements.MoleculeFactory import ReadMoleculeType
from BasicElements.MoleculeFactory import GetMolecule
from BasicElements.Crystal import *
from Polarizability.GetDipoles import get_dipoles,split_dipoles_onto_atoms
from Polarizability import *
from Polarizability.GetEnergyFromDips import *
from Polarizability.JMatrix import JMatrix
import numpy as np
from math import *
from time import gmtime, strftime
import os
print strftime("%a, %d %b %Y %X +0000", gmtime())
qdict={"anion": -1.0, "neut": 0.0, "cation": 1.0}
name='C60_cation_neut_inner1_outer0'
#For crystals here, all cubic and centred at centre
insize=1
#number of TVs in each dir central mol is from edge of inner region
outsize=0
state='cation'
mols_cen=['sp_C60_mola_neut.xyz','sp_C60_molb_neut.xyz','sp_C60_molc_neut.xyz','sp_C60_mold_neut.xyz']
mols_sur=['sp_C60_mola_neut.xyz','sp_C60_molb_neut.xyz','sp_C60_molc_neut.xyz','sp_C60_mold_neut.xyz']
mols_outer=['sp_C60_mola_neut.xyz','sp_C60_molb_neut.xyz','sp_C60_molc_neut.xyz','sp_C60_mold_neut.xyz']
Natoms=60
#From cif:
'''
C60
_cell_length_a 14.052(5)
_cell_length_b 14.052(5)
_cell_length_c 14.052(5)
_cell_angle_alpha 90
_cell_angle_beta 90
_cell_angle_gamma 90
_cell_volume 2774.69
_cell_formula_units_Z 4
'''
#Get translation vectors:
a=14.0525/0.5291772109217
b=14.0525/0.5291772109217
c=14.0525/0.5291772109217
alpha=90*(pi/180)
beta=90*(pi/180)
gamma=90*(pi/180)
cif_unit_cell_volume=2774.69/(a*b*c*(0.5291772109217**3))
cell_volume=sqrt(1 - (cos(alpha)**2) - (cos(beta)**2) - (cos(gamma)**2) + (2*cos(alpha)*cos(beta)*cos(gamma)))
#Converts frac coords to carts
matrix_to_cartesian=np.matrix( [[a, b*cos(gamma), c*cos(beta)],
[0, b*sin(gamma), c*(cos(alpha) - cos(beta)*cos(gamma))/sin(gamma)],
[0, 0, c*cell_volume/sin(gamma)]])
#carts to frac
matrix_to_fractional=matrix_to_cartesian.I
#TVs, TV[0,1,2] are the three translation vectors.
TV=matrix_to_cartesian.T
cut=8.0
totsize=insize+outsize
#number of TVs in each dir nearest c inner mol is from edge of outer region
cenpos=[totsize,totsize,totsize]
length=[2*totsize+1,2*totsize+1,2*totsize+1]
maxTVs=insize
outer_maxTVs=insize+outsize
#for diamond outer, don't specify for cube and will fill to cube edges.
print 'name: ',name,'mols_cen: ', mols_cen,' mols_sur: ',mols_sur,' TVs: ', TV
# Place Molecules
prot_neut_cry=Crystal(name=name,mols_cen=mols_cen,mols_sur=mols_sur,cenpos=cenpos,length=length,TVs=TV,maxTVs=maxTVs,mols_outer=mols_outer,outer_maxTVs=outer_maxTVs)
#prot_neut_cry._mols contains all molecules.
#mols[0] contains a list of all molecules in position a, mols[1] all mols in pos'n b, etc.
#mols[0][x,y,z] contains molecule a in position x,y,z
#mols may as such be iterated over in a number of ways to consider different molecules.
print 'state',state
print 'q: ', qdict[state]
for atom in prot_neut_cry()._mols[0][prot_neut_cry()._cenpos[0]][prot_neut_cry()._cenpos[1]][prot_neut_cry()._cenpos[2]]():
atom()._crg=qdict[state]
prot_neut_cry().print_posns()
#Calculate Properties:
print strftime("%a, %d %b %Y %X +0000", gmtime())
E0 = np.matrix([0.,0.,0.])
print strftime("%a, %d %b %Y %X +0000", gmtime())
print 'Calc jm'
jm = JMatrix(jmtype='Stern',cutoff=0.)
print strftime("%a, %d %b %Y %X +0000", gmtime())
print 'Calc dips:'
d = get_dipoles(E0=E0,jm=jm._m,cutoff=cut)
print strftime("%a, %d %b %Y %X +0000", gmtime())
Efield = get_electric_field(E0)
potential = get_potential()
print strftime("%a, %d %b %Y %X +0000", gmtime())
#print 'dips', d
print 'splitting dips onto atoms'
split_d = split_dipoles_onto_atoms(d)
print strftime("%a, %d %b %Y %X +0000", gmtime())
print 'summing dips:'
tot = np.matrix([0.,0.,0.])
for dd in split_d:
tot += dd
print strftime("%a, %d %b %Y %X +0000", gmtime())
print 'total dip moment', tot
Uqq = np.multiply(get_U_qq(potential=potential),27.211)
print strftime("%a, %d %b %Y %X +0000", gmtime())
print 'Uqq', Uqq
Uqd = np.multiply(get_U_qdip(dips=d,Efield=Efield),27.211)
print strftime("%a, %d %b %Y %X +0000", gmtime())
print 'Uqd', Uqd
Udd = np.multiply(get_U_dipdip(jm=jm._m,dips=d.T),27.211)
print strftime("%a, %d %b %Y %X +0000", gmtime())
print 'Udd', Udd
energyev = Udd+Uqd+Uqq
print 'energyev', energyev
energy=energyev/27.211
print strftime("%a, %d %b %Y %X +0000", gmtime())
print 'Making .dat cross sections for gnuplot'
# print TVs
if not os.path.exists('Dips_Posns_TVs'): os.makedirs('Dips_Posns_TVs')
f = open('Dips_Posns_TVs/%s_TVs.dat' % name, 'w')
TVstr=str(str(TV[0,0]) + ' ' + str(TV[0,1]) + ' ' + str(TV[0,2]) + '\n' + str(TV[1,0]) + ' ' + str(TV[1,1]) + ' ' + str(TV[1,2]) + '\n' + str(TV[2,0]) + ' ' + str(TV[2,1]) + ' ' + str(TV[2,2])+ '\n')
f.write(TVstr)
f.flush()
f.close()
# print dipoles
if not os.path.exists('Dips_Posns_TVs'): os.makedirs('Dips_Posns_TVs')
f = open('Dips_Posns_TVs/%s_dipoles.dat' % name, 'w')
for dd in split_d:
dstr=str(dd)
f.write(dstr)
f.write('\n')
f.flush()
f.close()
# print properties for charge in centrepos
time=strftime("%a, %d %b %Y %X +0000", gmtime())
f = open('%s_properties.csv' % name, 'w')
f.write ('time\tname\tmols_cen\tmols_sur\tmols_outer\tinsize\toutsize\tenergyev\tUqq\tUqd\tUdd\tTotdip_x\tTotdip_y\tTotdip_z')
f.write ('\n%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s' % (time,name,mols_cen,mols_sur,mols_outer,insize,outsize,energyev,Uqq,Uqd,Udd,tot[0,0],tot[0,1],tot[0,2]))
f.flush()
f.close()
# print header for reorgs
f = open('reorg_energies_%s_properties.csv' % name, 'w')
f.write ('time\tname\tmols_cen\tmols_sur\tmols_outer\tinsize\toutsize\ta\tb\tc\tmolincell\tReorg(eV)')
f.flush()
f.close()
# REORGANISATION ENERGIES
#Note that this assumes a cube, and values for which
for dist in range(0,(length[0]/2)+1,1):
print '\n\nDIST: ', dist, '\n'
for a in range(prot_neut_cry()._cenpos[0]-dist,prot_neut_cry()._cenpos[0]+dist+1,1):
for b in range(prot_neut_cry()._cenpos[1]-dist,prot_neut_cry()._cenpos[1]+dist+1,1):
for c in range(prot_neut_cry()._cenpos[2]-dist,prot_neut_cry()._cenpos[2]+dist+1,1):
print strftime("%a, %d %b %Y %X +0000", gmtime())
print 'a,b,c',a,b,c
for molincell in range(0,len(prot_neut_cry()._mols),1):
prot_neut_cry().calc_reorg_shareq(a1=prot_neut_cry()._cenpos[0],b1=prot_neut_cry()._cenpos[1],c1=prot_neut_cry()._cenpos[2],molincell1=0,a2=a,b2=b,c2=c,molincell2=molincell,jm=jm._m,oldUqd=Uqd)
print 'Reorg: ', prot_neut_cry()._reorgs_shareq[molincell][a][b][c]
f = open('reorg_energies_%s_properties.csv' % name, 'a')
f.write ('\n%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s' % (time,name,mols_cen,mols_sur,mols_outer,insize,outsize,a,b,c,molincell,prot_neut_cry()._reorgs_shareq[molincell][a][b][c]))
f.flush()
f.close()
# Redo this and overwrite after each set to ensure we have some even if not all reorgs complete
prot_neut_cry().print_reorgs_shareq()
print 'Job Completed Successfully.'
| [
"sheridan.few@gmail.com"
] | sheridan.few@gmail.com |
e8a386bad38ef6cc7d818de97995e643f560c1bd | 5a52ccea88f90dd4f1acc2819997fce0dd5ffb7d | /alipay/aop/api/domain/DeviceTradeInfoList.py | cff9f1b8ce1fc1d0899c018d6cbef05ec5c7fbad | [
"Apache-2.0"
] | permissive | alipay/alipay-sdk-python-all | 8bd20882852ffeb70a6e929038bf88ff1d1eff1c | 1fad300587c9e7e099747305ba9077d4cd7afde9 | refs/heads/master | 2023-08-27T21:35:01.778771 | 2023-08-23T07:12:26 | 2023-08-23T07:12:26 | 133,338,689 | 247 | 70 | Apache-2.0 | 2023-04-25T04:54:02 | 2018-05-14T09:40:54 | Python | UTF-8 | Python | false | false | 12,090 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class DeviceTradeInfoList(object):
def __init__(self):
self._biz_tid = None
self._dau = None
self._device_face_trade_dau = None
self._device_face_trade_dau_d_value = None
self._device_name = None
self._device_sn = None
self._device_status = None
self._face_trade_cnt = None
self._face_trd_amt = None
self._face_trd_cnt_rate = None
self._face_trd_user_cnt_rate = None
self._face_trd_user_cnt_rate_d_value = None
self._gmt_active = None
self._iot_trd_up = None
self._iot_trd_user_cnt = None
self._iot_trd_user_cnt_d_value = None
self._max_dt = None
self._merchant_pid = None
self._shop_id = None
self._trade_amt = None
self._trade_cnt = None
@property
def biz_tid(self):
return self._biz_tid
@biz_tid.setter
def biz_tid(self, value):
self._biz_tid = value
@property
def dau(self):
return self._dau
@dau.setter
def dau(self, value):
self._dau = value
@property
def device_face_trade_dau(self):
return self._device_face_trade_dau
@device_face_trade_dau.setter
def device_face_trade_dau(self, value):
self._device_face_trade_dau = value
@property
def device_face_trade_dau_d_value(self):
return self._device_face_trade_dau_d_value
@device_face_trade_dau_d_value.setter
def device_face_trade_dau_d_value(self, value):
self._device_face_trade_dau_d_value = value
@property
def device_name(self):
return self._device_name
@device_name.setter
def device_name(self, value):
self._device_name = value
@property
def device_sn(self):
return self._device_sn
@device_sn.setter
def device_sn(self, value):
self._device_sn = value
@property
def device_status(self):
return self._device_status
@device_status.setter
def device_status(self, value):
self._device_status = value
@property
def face_trade_cnt(self):
return self._face_trade_cnt
@face_trade_cnt.setter
def face_trade_cnt(self, value):
self._face_trade_cnt = value
@property
def face_trd_amt(self):
return self._face_trd_amt
@face_trd_amt.setter
def face_trd_amt(self, value):
self._face_trd_amt = value
@property
def face_trd_cnt_rate(self):
return self._face_trd_cnt_rate
@face_trd_cnt_rate.setter
def face_trd_cnt_rate(self, value):
self._face_trd_cnt_rate = value
@property
def face_trd_user_cnt_rate(self):
return self._face_trd_user_cnt_rate
@face_trd_user_cnt_rate.setter
def face_trd_user_cnt_rate(self, value):
self._face_trd_user_cnt_rate = value
@property
def face_trd_user_cnt_rate_d_value(self):
return self._face_trd_user_cnt_rate_d_value
@face_trd_user_cnt_rate_d_value.setter
def face_trd_user_cnt_rate_d_value(self, value):
self._face_trd_user_cnt_rate_d_value = value
@property
def gmt_active(self):
return self._gmt_active
@gmt_active.setter
def gmt_active(self, value):
self._gmt_active = value
@property
def iot_trd_up(self):
return self._iot_trd_up
@iot_trd_up.setter
def iot_trd_up(self, value):
self._iot_trd_up = value
@property
def iot_trd_user_cnt(self):
return self._iot_trd_user_cnt
@iot_trd_user_cnt.setter
def iot_trd_user_cnt(self, value):
self._iot_trd_user_cnt = value
@property
def iot_trd_user_cnt_d_value(self):
return self._iot_trd_user_cnt_d_value
@iot_trd_user_cnt_d_value.setter
def iot_trd_user_cnt_d_value(self, value):
self._iot_trd_user_cnt_d_value = value
@property
def max_dt(self):
return self._max_dt
@max_dt.setter
def max_dt(self, value):
self._max_dt = value
@property
def merchant_pid(self):
return self._merchant_pid
@merchant_pid.setter
def merchant_pid(self, value):
self._merchant_pid = value
@property
def shop_id(self):
return self._shop_id
@shop_id.setter
def shop_id(self, value):
self._shop_id = value
@property
def trade_amt(self):
return self._trade_amt
@trade_amt.setter
def trade_amt(self, value):
self._trade_amt = value
@property
def trade_cnt(self):
return self._trade_cnt
@trade_cnt.setter
def trade_cnt(self, value):
self._trade_cnt = value
def to_alipay_dict(self):
params = dict()
if self.biz_tid:
if hasattr(self.biz_tid, 'to_alipay_dict'):
params['biz_tid'] = self.biz_tid.to_alipay_dict()
else:
params['biz_tid'] = self.biz_tid
if self.dau:
if hasattr(self.dau, 'to_alipay_dict'):
params['dau'] = self.dau.to_alipay_dict()
else:
params['dau'] = self.dau
if self.device_face_trade_dau:
if hasattr(self.device_face_trade_dau, 'to_alipay_dict'):
params['device_face_trade_dau'] = self.device_face_trade_dau.to_alipay_dict()
else:
params['device_face_trade_dau'] = self.device_face_trade_dau
if self.device_face_trade_dau_d_value:
if hasattr(self.device_face_trade_dau_d_value, 'to_alipay_dict'):
params['device_face_trade_dau_d_value'] = self.device_face_trade_dau_d_value.to_alipay_dict()
else:
params['device_face_trade_dau_d_value'] = self.device_face_trade_dau_d_value
if self.device_name:
if hasattr(self.device_name, 'to_alipay_dict'):
params['device_name'] = self.device_name.to_alipay_dict()
else:
params['device_name'] = self.device_name
if self.device_sn:
if hasattr(self.device_sn, 'to_alipay_dict'):
params['device_sn'] = self.device_sn.to_alipay_dict()
else:
params['device_sn'] = self.device_sn
if self.device_status:
if hasattr(self.device_status, 'to_alipay_dict'):
params['device_status'] = self.device_status.to_alipay_dict()
else:
params['device_status'] = self.device_status
if self.face_trade_cnt:
if hasattr(self.face_trade_cnt, 'to_alipay_dict'):
params['face_trade_cnt'] = self.face_trade_cnt.to_alipay_dict()
else:
params['face_trade_cnt'] = self.face_trade_cnt
if self.face_trd_amt:
if hasattr(self.face_trd_amt, 'to_alipay_dict'):
params['face_trd_amt'] = self.face_trd_amt.to_alipay_dict()
else:
params['face_trd_amt'] = self.face_trd_amt
if self.face_trd_cnt_rate:
if hasattr(self.face_trd_cnt_rate, 'to_alipay_dict'):
params['face_trd_cnt_rate'] = self.face_trd_cnt_rate.to_alipay_dict()
else:
params['face_trd_cnt_rate'] = self.face_trd_cnt_rate
if self.face_trd_user_cnt_rate:
if hasattr(self.face_trd_user_cnt_rate, 'to_alipay_dict'):
params['face_trd_user_cnt_rate'] = self.face_trd_user_cnt_rate.to_alipay_dict()
else:
params['face_trd_user_cnt_rate'] = self.face_trd_user_cnt_rate
if self.face_trd_user_cnt_rate_d_value:
if hasattr(self.face_trd_user_cnt_rate_d_value, 'to_alipay_dict'):
params['face_trd_user_cnt_rate_d_value'] = self.face_trd_user_cnt_rate_d_value.to_alipay_dict()
else:
params['face_trd_user_cnt_rate_d_value'] = self.face_trd_user_cnt_rate_d_value
if self.gmt_active:
if hasattr(self.gmt_active, 'to_alipay_dict'):
params['gmt_active'] = self.gmt_active.to_alipay_dict()
else:
params['gmt_active'] = self.gmt_active
if self.iot_trd_up:
if hasattr(self.iot_trd_up, 'to_alipay_dict'):
params['iot_trd_up'] = self.iot_trd_up.to_alipay_dict()
else:
params['iot_trd_up'] = self.iot_trd_up
if self.iot_trd_user_cnt:
if hasattr(self.iot_trd_user_cnt, 'to_alipay_dict'):
params['iot_trd_user_cnt'] = self.iot_trd_user_cnt.to_alipay_dict()
else:
params['iot_trd_user_cnt'] = self.iot_trd_user_cnt
if self.iot_trd_user_cnt_d_value:
if hasattr(self.iot_trd_user_cnt_d_value, 'to_alipay_dict'):
params['iot_trd_user_cnt_d_value'] = self.iot_trd_user_cnt_d_value.to_alipay_dict()
else:
params['iot_trd_user_cnt_d_value'] = self.iot_trd_user_cnt_d_value
if self.max_dt:
if hasattr(self.max_dt, 'to_alipay_dict'):
params['max_dt'] = self.max_dt.to_alipay_dict()
else:
params['max_dt'] = self.max_dt
if self.merchant_pid:
if hasattr(self.merchant_pid, 'to_alipay_dict'):
params['merchant_pid'] = self.merchant_pid.to_alipay_dict()
else:
params['merchant_pid'] = self.merchant_pid
if self.shop_id:
if hasattr(self.shop_id, 'to_alipay_dict'):
params['shop_id'] = self.shop_id.to_alipay_dict()
else:
params['shop_id'] = self.shop_id
if self.trade_amt:
if hasattr(self.trade_amt, 'to_alipay_dict'):
params['trade_amt'] = self.trade_amt.to_alipay_dict()
else:
params['trade_amt'] = self.trade_amt
if self.trade_cnt:
if hasattr(self.trade_cnt, 'to_alipay_dict'):
params['trade_cnt'] = self.trade_cnt.to_alipay_dict()
else:
params['trade_cnt'] = self.trade_cnt
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = DeviceTradeInfoList()
if 'biz_tid' in d:
o.biz_tid = d['biz_tid']
if 'dau' in d:
o.dau = d['dau']
if 'device_face_trade_dau' in d:
o.device_face_trade_dau = d['device_face_trade_dau']
if 'device_face_trade_dau_d_value' in d:
o.device_face_trade_dau_d_value = d['device_face_trade_dau_d_value']
if 'device_name' in d:
o.device_name = d['device_name']
if 'device_sn' in d:
o.device_sn = d['device_sn']
if 'device_status' in d:
o.device_status = d['device_status']
if 'face_trade_cnt' in d:
o.face_trade_cnt = d['face_trade_cnt']
if 'face_trd_amt' in d:
o.face_trd_amt = d['face_trd_amt']
if 'face_trd_cnt_rate' in d:
o.face_trd_cnt_rate = d['face_trd_cnt_rate']
if 'face_trd_user_cnt_rate' in d:
o.face_trd_user_cnt_rate = d['face_trd_user_cnt_rate']
if 'face_trd_user_cnt_rate_d_value' in d:
o.face_trd_user_cnt_rate_d_value = d['face_trd_user_cnt_rate_d_value']
if 'gmt_active' in d:
o.gmt_active = d['gmt_active']
if 'iot_trd_up' in d:
o.iot_trd_up = d['iot_trd_up']
if 'iot_trd_user_cnt' in d:
o.iot_trd_user_cnt = d['iot_trd_user_cnt']
if 'iot_trd_user_cnt_d_value' in d:
o.iot_trd_user_cnt_d_value = d['iot_trd_user_cnt_d_value']
if 'max_dt' in d:
o.max_dt = d['max_dt']
if 'merchant_pid' in d:
o.merchant_pid = d['merchant_pid']
if 'shop_id' in d:
o.shop_id = d['shop_id']
if 'trade_amt' in d:
o.trade_amt = d['trade_amt']
if 'trade_cnt' in d:
o.trade_cnt = d['trade_cnt']
return o
| [
"jiandong.jd@antfin.com"
] | jiandong.jd@antfin.com |
1f5a76e3f6844a408820537f70abaf3f2edeccb9 | 5643f360a6f57e3d904bed3d63ada7d2eeda20b3 | /unsupervised_learning/PCA.py | a92a707375a6887db188d66628ffab5e4a28df3b | [] | no_license | Abhi551/Machine-learning | 6b0f0530c9b78fa8fdf4c0da5aff680e2f236bf5 | 6f134830d4a0b038698df183f71cd118a93e1844 | refs/heads/master | 2021-10-23T03:16:08.815210 | 2019-03-14T13:10:44 | 2019-03-14T13:10:44 | 125,890,650 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,887 | py | ## unsupervised model used for extracting important variable from large set of variables
## in a data set .
## It extracts low dimensinonal set of features from a high dimensinonal dataset
## to capture as much information as possible
## best when 3 or more features are present in dataset
import numpy as np
import pandas as pd
import seaborn as sns
import matplotlib.pyplot as plt
from sklearn.datasets import load_breast_cancer
from sklearn.decomposition import PCA
from sklearn.preprocessing import StandardScaler
from adspy_shared_utilities import plot_labelled_scatter
cancer_data = load_breast_cancer()
## returns data and target both using return_X_y
x_cancer , y_cancer = load_breast_cancer(return_X_y = True)
## performing preprocessing on the datasets
## so that each feature have zero mean and unit variance
scaler = StandardScaler()
x_fit = scaler.fit(x_cancer)
x_transform = x_fit.transform(x_cancer)
print (x_transform.shape)
## the final results will give the data which have zero mean and variance of data is unity
## specify the PCA object with 2 features to retain only
## and fitting the transformed data in PCA object
pca = PCA(n_components = 2).fit(x_transform)
print (pca)
## last step is to
## put the transformed data in the pca object to give the final transformed data
x_final = pca.transform(x_transform)
print (x_final.shape)
## using the same result on real world datasets
plot_labelled_scatter(x_final , y_cancer , ['malignant', 'benign'])
## creating a heatmap for each feature
## i.e. plotting the magnitude of each feature value for first 2 principal components
fig = plt.figure( figsize = (8,4) )
print (pca.components_.shape)
plt.imshow(pca.components_ , interpolation = 'none' , cmap = "plasma")
feature_names = list(cancer_data.feature_names)
plt.gca().set_xticks(np.arange(-.5 , len(feature_names)))
plt.gca().set_yticks(np.arange(.5 , 2 ))
plt.gca().set_xticklabels(feature_names , rotation = 90 , ha = "left" , fontsize = 12)
plt.gca().set_yticklabels(["First PC" , "Second PC"] , va = "bottom" , fontsize = 12)
plt.colorbar(orientation = "horizontal" , ticks = [pca.components_.min() , 0 ,
pca.components_.max()] , pad = .65)
plt.show()
## on fruits dataset
from sklearn.preprocessing import StandardScaler
from sklearn.decomposition import PCA
from adspy_shared_utilities import plot_labelled_scatter
df = pd.read_csv('fruit_data_with_colors.txt', delimiter ="\t")
## preprocessing of data
x_fruits = df[['mass','width','height', 'color_score']]
y_fruits = df[['fruit_label']]
print (x_fruits.head())
scaler = StandardScaler()
x_fruits = scaler.fit(x_fruits).transform(x_fruits)
## using PCA
for i in range(2,5):
pca = PCA(n_components = 2).fit(x_fruits)
x_pca = pca.transform(x_fruits)
plot_labelled_scatter(x_pca , y_fruits , ["apple" , "mandarian" , "orange" , "lemon"])
| [
"abhichauhan551@gmail.com"
] | abhichauhan551@gmail.com |
62a8b9674aef0f3af6fd82b82dbf39558c49f35c | cc7d7f6128b81a959dffaf23627d7dfc95558209 | /ResNet50-2d/resnet.py | 667200d427262fecc78b6fa1025102ef5f07b55c | [] | no_license | abandonsea/M3D | 8fb7a9297789afab74dd3d0bb573583703932325 | 68afe5e79266caad9d9afa45fc9d754033b288d3 | refs/heads/master | 2022-11-23T07:51:34.680151 | 2020-07-22T00:59:33 | 2020-07-22T00:59:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,976 | py | import torch.nn as nn
import math, torch
import torch.utils.model_zoo as model_zoo
from torch.nn import init
class Bottleneck(nn.Module):
expansion = 4
def __init__(self, inplanes, planes, stride=1, downsample=None):
super(Bottleneck, self).__init__()
self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride,
padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(planes)
self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=False)
self.bn3 = nn.BatchNorm2d(planes * 4)
self.relu = nn.ReLU(inplace=True)
self.downsample = downsample
self.stride = stride
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.relu(out)
out = self.conv3(out)
out = self.bn3(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
class ResNet(nn.Module):
def __init__(self, block, layers, num_classes=1000, train=True):
self.inplanes = 64
super(ResNet, self).__init__()
self.istrain = train
self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3, bias=False)
self.bn1 = nn.BatchNorm2d(64)
self.relu = nn.ReLU(inplace=True)
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
self.layer1 = self._make_layer(block, 64, layers[0])
self.layer2 = self._make_layer(block, 128, layers[1], stride=2)
self.layer3 = self._make_layer(block, 256, layers[2], stride=2)
self.layer4 = self._make_layer(block, 512, layers[3], stride=1)
self.avgpool = nn.AvgPool2d((16,8), stride=1)
self.num_features = 128
self.feat = nn.Linear(512 * block.expansion, self.num_features)
self.feat_bn = nn.BatchNorm1d(self.num_features)
init.kaiming_normal(self.feat.weight, mode='fan_out')
init.constant(self.feat.bias, 0)
init.constant(self.feat_bn.weight, 1)
init.constant(self.feat_bn.bias, 0)
self.drop = nn.Dropout(0.5)
self.classifier = nn.Linear(self.num_features, num_classes)
init.normal(self.classifier.weight, std=0.001)
init.constant(self.classifier.bias, 0)
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
def _make_layer(self, block, planes, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
nn.Conv2d(self.inplanes, planes * block.expansion,
kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(planes * block.expansion),
)
layers = []
layers.append(block(self.inplanes, planes, stride, downsample))
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes))
return nn.Sequential(*layers)
def forward(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.avgpool(x)
x = x.view(x.size(0), -1)
x = self.feat(x)
if self.istrain:
x = self.feat_bn(x)
x = self.relu(x)
x = self.drop(x)
x = self.classifier(x)
return x
def resnet50(pretrained='True', num_classes=1000, train=True):
model = ResNet(Bottleneck, [3, 4, 6, 3], num_classes, train)
#if pretrained:
# model.load_state_dict('resnet50-19c8e357.pth')
weight = torch.load(pretrained)
static = model.state_dict()
for name, param in weight.items():
if name not in static:
print 'not load weight ', name
continue
if isinstance(param, nn.Parameter):
print 'load weight ', name, type(param)
param = param.data
static[name].copy_(param)
#model.load_state_dict(weight)
return model
| [
"noreply@github.com"
] | abandonsea.noreply@github.com |
974e09555217970e71c0379ec2eb46868bfa5cf8 | 865f4aa69ddd6574267cdde6f8c5dd5c8a6d6f31 | /supportClasses.py | 8c348c0e3bec1c2c2429f10dbb6764e91bb76f07 | [] | no_license | usuaero/PropulsionOptimization | 10fbcfec062f107107a976f0714d77a156087f6b | f637bb230168b5d3415fd78457e854b1e85eb711 | refs/heads/master | 2021-07-08T19:46:10.176858 | 2020-07-17T17:46:05 | 2020-07-17T17:46:05 | 149,319,160 | 2 | 3 | null | null | null | null | UTF-8 | Python | false | false | 17,537 | py | import numpy as np
import copy
from scipy import integrate
import scipy.interpolate as interp
import os
from os import path
import matplotlib.pyplot as plt
import polyFit as fit
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from std_atmos import *
import sqlite3 as sql
from random import randint
#Classes in this file are defined such that their information is retrieved from the database (a database cursor must be given).
#If the component's exact name or id are given, that component w_ill be selected. If the manufacturer is given,
#a random component from that manufacturer w_ill be selected. If nothing is specified, a random component is selected.
#The number of battery cells should be specified. If not, it w_ill be randomly selected.
#Converts rads per second to rpms
def toRPM(rads):
return rads*30/np.pi
#A class that defines a battery
class Battery:
#Initialize the class from database
def __init__(self, dbcur, name=None, manufacturer=None, dbid=None, numCells=None, capacity=None):
command = "select * from Batteries"
if name is not None:
if manufacturer is not None or dbid is not None:
raise ValueError("Too many battery parameters specified.")
command = command+" where Name = '"+name+"'"
elif manufacturer is not None:
if dbid is not None:
raise ValueError("Too many battery parameters specified.")
command = command+" where manufacturer = '"+manufacturer+"'"
elif dbid is not None:
command = command+" where id = "+str(dbid)
if capacity is not None:
command = command+" order by abs("+str(capacity)+"-Capacity)"
command = command+" order by RANDOM() limit 1"
dbcur.execute(command)
record = np.asarray(dbcur.fetchall())[0]
if numCells is None:
numCells = randint(1,8)
#Define members from inputs
self.n = int(numCells)
self.cellCap = float(record[4])
self.cellR = float(record[6])
self.name = record[1]
self.manufacturer = record[2]
self.cellWeight = float(record[5])
self.iMax = float(record[3])
self.cellV = float(record[7])
#Members derived from inputs
self.V0 = self.cellV * self.n
self.R = self.cellR * self.n
self.weight = self.cellWeight*self.n
def printInfo(self):
print("Battery:",self.name)
print("\tManufacturer:",self.manufacturer)
print("\tCapacity:",self.cellCap)
print("\tNum Cells:",self.n)
print("\tVoltage:",self.V0)
print("\tWeight:",self.weight)
#A class that defines an ESC (Electronic Speed Controller)
class ESC:
#Initialization of the class from database
def __init__(self, dbcur, name=None, manufacturer=None, dbid=None, I_max=None):
command = "select * from ESCs"
if name is not None:
if manufacturer is not None or dbid is not None:
raise ValueError("Too many esc parameters specified.")
command = command+" where Name = '"+name+"'"
elif manufacturer is not None:
if dbid is not None:
raise ValueError("Too many ESC parameters specified.")
command = command+" where manufacturer = '"+manufacturer+"'"
elif dbid is not None:
command = command+" where id = "+str(dbid)
if I_max is not None:
command = command+" order by abs("+str(I_max)+"-I_motorax)"
command = command+" order by RANDOM() limit 1"
dbcur.execute(command)
record = np.asarray(dbcur.fetchall())[0]
self.R = float(record[6])
self.name = record[1]
self.manufacturer = record[2]
self.iMax = float(record[3])
self.weight = float(record[5])
def printInfo(self):
print("ESC:",self.name)
print("\tManufacturer:",self.manufacturer)
print("\tMax Current:",self.iMax)
print("\tWeight:",self.weight)
#A class that defines an electric motor.
class Motor:
#Initialization of the class from the database
def __init__(self, dbcur, name=None, manufacturer=None, dbid=None, Kv=None):
command = "select * from Motors"
if name is not None:
if manufacturer is not None or dbid is not None:
raise ValueError("Too many motor parameters specified.")
command = command+" where Name = '"+name+"'"
elif manufacturer is not None:
if dbid is not None:
raise ValueError("Too many motor parameters specified.")
command = command+" where manufacturer = '"+manufacturer+"'"
elif dbid is not None:
command = command+" where id = "+str(dbid)
if Kv is not None:
command = command+" order by abs("+str(Kv)+"-kv)"
command = command+" order by RANDOM() limit 1"
dbcur.execute(command)
record = np.asarray(dbcur.fetchall())[0]
self.Kv = float(record[3])
self.Gr = float(record[4])
self.I0 = float(record[6])
self.R = float(record[5])
self.name = record[1]
self.manufacturer = record[2]
self.weight = float(record[7])
def printInfo(self):
print("Motor:",self.name)
print("\tManufacturer:",self.manufacturer)
print("\tKv:",self.Kv)
print("\tWeight:",self.weight)
#A class of propellers defined by database test files
class Propeller:
#Initializes the prop from the database
def __init__(self, dbcur, name=None, manufacturer=None, dbid=None, diameter=None, pitch=None):
command = "select * from Props"
if name is not None:
if manufacturer is not None or dbid is not None:
raise ValueError("Too many prop parameters specified.")
command = command+" where Name = '"+name+"'"
elif manufacturer is not None:
if dbid is not None:
raise ValueError("Too many prop parameters specified.")
command = command+" where manufacturer = '"+manufacturer+"'"
elif dbid is not None:
command = command+" where id = "+dbid
if diameter is not None:
command = command+" order by abs("+str(diameter)+"-Diameter)"
if pitch is not None:
command = command+" order by abs("+str(pitch)+"-Pitch)"
command = command+" order by RANDOM() limit 1"
dbcur.execute(command)
record = np.asarray(dbcur.fetchall())[0]
self.name = record[1]
self.manufacturer = record[2]
self.diameter = float(record[3])
self.pitch = float(record[4])
self.thrustFitOrder = int(record[5])
self.fitOfThrustFitOrder = int(record[6])
self.powerFitOrder = int(record[7])
self.fitOfPowerFitOrder = int(record[8])
numThrustCoefs = (self.thrustFitOrder+1)*(self.fitOfThrustFitOrder+1)
self.thrustCoefs = record[9:numThrustCoefs+9].reshape((self.thrustFitOrder+1,self.fitOfThrustFitOrder+1)).astype(np.float)
self.powerCoefs = record[numThrustCoefs+9:].reshape((self.powerFitOrder+1,self.fitOfPowerFitOrder+1)).astype(np.float)
#These parameters w_ill be set by later functions
self.v_inf = 0.0
self.angVel = 0.0
def printInfo(self):
print("Propeller:",self.name)
print("\tManufacturer:",self.manufacturer)
print("\tDiameter:",self.diameter)
print("\tPitch:",self.pitch)
def CalcTorqueCoef(self):
self.rpm = toRPM(self.angVel)
self.rps = self.rpm/60
if abs(self.rps)<1e-10:
self.J = 10000 #To prevent errors. Since angular velocity is 0, actual value w_ill also be 0.
else:
self.J = self.v_inf/(self.rps*self.diameter/12)
a = fit.poly_func(self.powerCoefs.T, self.rpm)
if(a[-1]>0):#Quadratic coefficient should always be non-positive
a[-1] = 0
self.Cl = fit.poly_func(a, self.J)/2*np.pi
def CalcThrustCoef(self):
self.rpm = toRPM(self.angVel)
self.rps = self.rpm/60
if abs(self.rps)<1e-10:
self.J = 10000 #To prevent errors. Since angular velocity is 0, actual value w_ill also be 0.
else:
self.J = self.v_inf/(self.rps*self.diameter/12)
a = fit.poly_func(self.thrustCoefs.T, self.rpm)
if(a[-1]>0):#Quadratic coefficient should always be non-positive
a[-1] = 0
self.Ct = fit.poly_func(a, self.J)
def PlotCoefs(self):
#Plot thrust and torque coefficients
rpms = np.linspace(0,35000,10)
Js = np.linspace(0,1.4,10)
fig = plt.figure(figsize=plt.figaspect(1.))
fig.suptitle(self.name)
ax = fig.add_subplot(1,2,1, projection='3d')
for rpm in rpms:
a = fit.poly_func(self.thrustCoefs.T, rpm)
if(a[-1]>0):#Quadratic coefficient should always be non-positive
a[-1] = 0
thrust = fit.poly_func(a, Js)
rpmForPlot = np.full(len(thrust),rpm)
ax.plot(Js,rpmForPlot,thrust, 'r-')
ax.set_title("Predicted Thrust")
ax.set_xlabel("Advance Ratio")
ax.set_ylabel("RPM")
ax.set_zlabel("Thrust Coefficient")
ax = fig.add_subplot(1,2,2, projection='3d')
for rpm in rpms:
a = fit.poly_func(self.powerCoefs.T, rpm)
if(a[-1]>0):#Quadratic coefficient should always be non-positive
a[-1] = 0
power = fit.poly_func(a, Js)
rpmForPlot = np.full(len(power),rpm)
ax.plot(Js,rpmForPlot,power, 'r-')
ax.set_title("Predicted Power")
ax.set_xlabel("Advance Ratio")
ax.set_ylabel("RPM")
ax.set_zlabel("Power Coefficient")
plt.show()
#A class that defines an entire electric propulsion unit
class PropulsionUnit:
#Initialize the class from subclasses which are previously initialized
def __init__(self, prop, motor, battery, esc, altitude):
self.prop = prop
self.motor = motor
self.batt = battery
self.esc = esc
_,_,_,self.airDensity = statee(altitude) # Converts kg/m^3 to slug/ft^3
#Initialize exterior parameters to be set later
self.prop.v_inf = 0
self.prop.angVel = 0
self.I_motor = 0 #Instantaneous current being drawn through the motor
#Computes motor torque (ft*lbf) given throttle setting and revolutions (rpm)
def CalcMotorTorque(self, throttle, revs):
etaS = 1 - 0.078*(1 - throttle)
self.I_motor = (etaS*throttle*self.batt.V0 - (self.motor.Gr/self.motor.Kv)*revs)/(etaS*throttle*self.batt.R + self.esc.R + self.motor.R)
# Note: the 7.0432 constant converts units [(Nm/ftlb)(min/s)(rad/rev)]^-1
return 7.0432*self.motor.Gr/self.motor.Kv * (self.I_motor - self.motor.I0)
#Computes thrust produced at a given cruise speed and throttle setting
def CalcCruiseThrust(self, v_cruise, throttle):
if v_cruise == 0 and throttle == 0:
self.prop.angVel = 0
return 0 #Don't even bother
self.prop.v_inf = v_cruise
#Determine the shaft angular velocity at which the motor torque and propeller torque are matched
#Uses a secant method
err_max = 0.000001
err_aprx = 1 + err_max #So that it executes at least once
w_0 = 950 #An initial guess of the prop's angular velocity
w_max = self.motor.Kv*self.batt.V0*throttle*(2*np.pi/60) # Theoretically the upper limit
self.prop.angVel = w_0
self.prop.CalcTorqueCoef()
f_0 = self.CalcMotorTorque(throttle, toRPM(w_0)) - self.prop.Cl*self.airDensity*(w_0/(2*np.pi))**2*(self.prop.diameter/12)**5
w_1 = w_0 * 1.1
iterations = 0
while err_aprx >= err_max and iterations < 1000:
iterations = iterations + 1
self.prop.angVel = w_1
self.prop.CalcTorqueCoef()
T_motor = self.CalcMotorTorque(throttle, toRPM(w_1))
T_prop = self.prop.Cl*self.airDensity*(w_1/(2*np.pi))**2*(self.prop.diameter/12)**5
f_1 = T_motor - T_prop
w_2 = w_1 - (f_1*(w_0 - w_1))/(f_0 - f_1)
if w_2 < 0: # Prop angular velocity will never be negative even if windmilling
w_2 = 0.00001
err_aprx = abs((w_2 - w_1)/w_2)
w_0 = w_1
f_0 = f_1
w_1 = w_2
if False: #iterations >= 1000:
w = np.linspace(0,30000,10000)
T_motor = np.zeros(10000)
T_prop = np.zeros(10000)
for i,w_i in enumerate(w):
self.prop.angVel = w_i
self.prop.CalcTorqueCoef()
T_motor[i] = self.CalcMotorTorque(throttle, toRPM(w_i))
T_prop[i] = self.prop.Cl*self.airDensity*(w_i/(2*np.pi))**2*(self.prop.diameter/12)**5
plt.plot(w,T_motor)
plt.plot(w,T_prop)
plt.title("Torques vs Angular Velocity")
plt.legend(["Motor Torque","Prop Torque"])
plt.show()
self.prop.angVel = w_2
self.prop.CalcThrustCoef()
_ = self.CalcMotorTorque(throttle, toRPM(w_2)) # To make sure member variables are fully updated
return self.prop.Ct*self.airDensity*(w_2/(2*np.pi))**2*(self.prop.diameter/12)**4
#Computes required throttle setting for a given thrust and cruise speed
def CalcCruiseThrottle(self, v_cruise, T_req):
#Uses a secant method
err_max = 0.000001
err_aprx = 1 + err_max
t_0 = 0.5
T_0 = self.CalcCruiseThrust(v_cruise, t_0)
t_1 = t_0*1.1
iterations = 0
while err_aprx >= err_max and iterations < 1000:
iterations = iterations + 1
T_1 = self.CalcCruiseThrust(v_cruise, t_1) - T_req
t_2 = t_1 - (T_1*(t_0 - t_1))/(T_0 - T_1)
err_aprx = abs((t_2 - t_1)/t_2)
if t_2 > 10:
t_2 = 1.1
elif t_2 < -10:
t_2 = -0.1
t_0 = t_1
T_0 = T_1
t_1 = t_2
#if iterations == 1000:
# t = np.linspace(0,1.0,100)
# T = np.zeros(100)
# for i in range(100):
# T[i] = self.CalcCruiseThrust(v_cruise, t[i]) - T_req
# plt.plot(t,T)
# plt.show()
if t_2 > 1 or t_2 < 0:
return None
self.CalcCruiseThrust(v_cruise,t_2) # To make sure member variables are fully updated
return t_2
#Plots thrust curves for propulsion unit up to a specified airspeed
def PlotThrustCurves(self, v_min, v_max, numVels, numThrSets):
vel = np.linspace(v_min, v_max, numVels)
thr = np.linspace(0, 1, numThrSets)
thrust = np.zeros((numVels, numThrSets))
rpm = np.zeros((numVels,numThrSets))
for i in range(numVels):
for j in range(numThrSets):
#print("Freestream Velocity: ", vel[i])
#print("Throttle Setting: ", thr[j])
thrust[i][j] = self.CalcCruiseThrust(vel[i], thr[j])
rpm[i][j] = toRPM(self.prop.angVel)
fig = plt.figure()
fig.suptitle("Components: " + str(self.prop.name) + ", " + str(self.motor.name) + ", and " + str(self.batt.name))
ax0 = fig.add_subplot(1,2,1)
for i in range(numVels):
ax0.plot(thr, thrust[i])
ax0.set_title("Thrust")
ax0.set_ylabel("Thrust [lbf]")
ax0.set_xlabel("Throttle Setting")
ax0.legend(list(vel), title="Airspeed [ft/s]")
ax1 = fig.add_subplot(1,2,2)
for i in range(numVels):
ax1.plot(thr, rpm[i])
ax1.set_title("Prop Speed")
ax1.set_ylabel("Speed [rpms]")
ax1.set_xlabel("Throttle Setting")
plt.show()
#Determines how long the battery w_ill last based on a required thrust and cruise speed
def CalcBattLife(self, v_cruise, T_req):
throttle = self.CalcCruiseThrottle(v_cruise, T_req)
if(throttle==None or self.I_motor > self.esc.iMax or self.I_motor > self.batt.iMax):
return None
#print("Throttle Setting:",throttle)
#print("Current Draw:",self.I_motor)
runTime = (self.batt.cellCap/1000)/self.I_motor*60 # Gives run time in minutes, assuming nominal cell capacity and constant battery votlage
if runTime < 0:
return None
return runTime
def GetWeight(self):#Returns weight of electrical components in pounds
return (self.batt.weight + self.motor.weight + self.esc.weight)/16
def printInfo(self):
print("----Propulsion Unit----")
self.prop.printInfo()
self.motor.printInfo()
self.esc.printInfo()
self.batt.printInfo()
| [
"cory.goates@aggiemail.usu.edu"
] | cory.goates@aggiemail.usu.edu |
cbe42e6c08a217ab9d3f9925b59403483b0cd28e | 929fc8dd47b91c963c8c2f81d88e3d995a9dfc7c | /src/data_structure/hash_table/set.py | 7b7f0026e90534a21d8a0dfa4479732d254fb1b3 | [] | no_license | 1325052669/leetcode | fe7571a9201f4ef54089c2e078810dad11205b14 | dca40686c6a280bd394feb8e6e78d40eecf854b9 | refs/heads/master | 2023-04-01T17:53:30.605822 | 2021-04-10T15:17:45 | 2021-04-10T15:17:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,538 | py | from typing import List
# https://leetcode.com/problems/happy-number/
class Solution202:
def isHappy(self, n: int) -> bool:
cycle = set()
while n != 1 and n not in cycle:
cycle.add(n)
n = sum(pow(int(i), 2) for i in str(n))
return n == 1
# https://leetcode.com/problems/longest-consecutive-sequence/
class Solution128:
def longestConsecutive(self, nums: List[int]) -> int:
nums_set = set(nums)
res = 0
for num in nums:
if num - 1 in nums_set:
continue
count = 0
curr = num
while curr in nums_set:
curr += 1
count += 1
res = max(res, count)
return res
# https://leetcode.com/problems/valid-sudoku/
class Solution36:
def isValidSudoku(self, board: List[List[str]]) -> bool:
rows = [set() for i in range(9)]
columns = [set() for i in range(9)]
boxes = [set() for i in range(9)]
# validate a board
for i in range(9):
for j in range(9):
num = board[i][j]
if num == '.': continue
num = int(num)
box_idx = (i // 3) * 3 + j // 3
if num in rows[i]: return False
rows[i].add(num)
if num in columns[j]: return False
columns[j].add(num)
if num in boxes[box_idx]: return False
boxes[box_idx].add(num)
return True | [
"js7995@nyu.edu"
] | js7995@nyu.edu |
88b52c6201b65b5762c9b91a6607157af7bc64bd | 548c18a693e4dd52765dcef0551e928a679aced7 | /practice prgms/prime numbers within an interval-simple program.py | e6d95912529a31cb3a0eed2030c83748d6a32837 | [] | no_license | iamsureshtumu/py-prgms | fd8517cd9f98b8b03bad358ac14f7abe58783428 | 56a619130d588356f9754d85339b6bdc3f645f5a | refs/heads/main | 2023-02-12T03:22:46.164020 | 2021-01-07T04:12:12 | 2021-01-07T04:12:12 | 327,499,952 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 437 | py | # Python program to display all the prime numbers within an interval
start = 50
end = 100
#lower = int(input("Enter lower range: "))
#upper = int(input("Enter upper range: "))
print("Prime numbers between",start,"and",end,"are:")
for num in range(start,end + 1):
# prime numbers are greater than 1
if num > 1:
for i in range(2,num):
if (num % i) == 0:
break
else:
print(num)
| [
"sureshtumu3691@gmail.com"
] | sureshtumu3691@gmail.com |
b605952411c7c518079b629f18a9567374f734d1 | 7f98e3add3d755d81efa5becdf795532f886b119 | /datascraper/2-cleanDataset.py | 0881466924d6aaf19772fe0bf2947f3902cd42e7 | [] | no_license | fgolemo/steamGraph | 4e67d08bb111363def7e26c42ad1201a90ee9e9d | d4bd8e25d345ada6461fe94846ff303367313e66 | refs/heads/master | 2020-12-25T14:33:28.418661 | 2017-06-29T21:38:48 | 2017-06-29T21:38:48 | 67,958,645 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,124 | py | import json
from time import strftime
from tqdm import tqdm
nodes = []
edges = []
sids = []
tags = set()
movies = 0
software = 0
total = 0
lowRating = 0
preset = "1k"
gamesToGet = {"1k": 1000, "3k": 3000}
with open('../public/data/steamGraph6k3-170629.json') as data_file:
data = json.load(data_file)
data = sorted(data, key=lambda k: k['players'], reverse=True)
count = 0
for item in tqdm(data):
total += 1
tagsTmp = [t.encode('ascii', 'ignore') for t in item["tags"]]
tags = set(tagsTmp + list(tags))
if "Movie" in tagsTmp or "Documentary" in tagsTmp or item["name"] == "Kung Fury":
movies+=1
continue
if "Software" in tagsTmp or "Utilities" in tagsTmp \
or "Game Development" in tagsTmp or "Video Production" in tagsTmp \
or "Design & Illustration" in tagsTmp or item["name"] == "Tilt Brush":
software+=1
continue
if item["players"] < 100: # this is for the 3K graph
lowRating +=1
continue
if count == gamesToGet[preset]:
break
count += 1
rating = item["rating"].encode('ascii', 'ignore')
if rating != "":
rating = int(rating)
else:
rating = -1
sid = item["id"].encode('ascii', 'ignore')
try:
sid = int(sid)
except ValueError:
urlParts = item['link'].split('/')
sid = int(urlParts[-1].encode('ascii', 'ignore'))
if sid in sids:
print item
continue
# if item['rank'] > 1000:
# continue
sids.append(sid)
itemClean = {
'players': item['players'],
'tags': tagsTmp,
'rating': rating,
'label': item['name'],#.encode('ascii', 'ignore'),
# 'rank': item["rank"],
'id': sid,
'link': item["link"].encode('ascii', 'ignore'),
'value':0
}
# print itemClean
nodes.append(itemClean)
for edge in [int(e.encode('ascii', 'ignore')) for e in item['related']]:
edgeClean = {
'id': '{}-{}'.format(sid, edge),
'from': sid,
'to': edge,
'value': 0
}
edgeExists = False
for otherEdge in edges:
if otherEdge['to'] == sid and otherEdge['from'] == edge:
edgeExists = True
break
if not edgeExists:
edges.append(edgeClean)
#{id: '1-3', from: 1, to: 3, value: 0}
edgesClean = []
for e in edges:
if e['to'] in sids and e['from'] in sids:
edgesClean.append(e)
with open('../public/data/steamNet'+preset+"-"+strftime("%y%m%d")+'.json', 'w') as f:
json.dump({'nodes': nodes, 'edges': edgesClean}, f)
#
# for t in tags:
# print t+""
print "\n"
print total
print lowRating
print movies
print software
| [
"fgolemo@gmail.com"
] | fgolemo@gmail.com |
bc85cc771df7166db948934998075f139f7db7fc | 0228b665c61661b634f10afce2f76f2777fa29c2 | /live_examples/create_steam.py | c3c4414aa46621c367578858ab18ba828039c2f8 | [
"MIT"
] | permissive | bernieyangmh/pili-sdk-python | 18c9e99f5dac194228e9d7a40aee556e1db05356 | aeef24ad9629bb2247aa89dd7bcc3b8fb0d6a58c | refs/heads/master | 2021-09-11T09:47:28.859714 | 2018-04-06T13:04:42 | 2018-04-06T13:04:42 | 112,150,168 | 0 | 0 | null | 2017-11-27T05:10:23 | 2017-11-27T05:10:23 | null | UTF-8 | Python | false | false | 482 | py | # -*- coding: utf-8 -*-
"""
https://developer.qiniu.com/pili/api/2515/create-a-flow
创建流
"""
from pili import Mac, Hub
# 替换成自己 Qiniu 账号的 AccessKey
access_key = "..."
# 替换成自己 Qiniu 账号的 SecretKey
secret_key = "..."
hub_name = "..."
stream_name = "..."
mac = Mac(access_key, secret_key)
hub = Hub(mac, hub_name)
resp = hub.create(key=stream_name)
print(resp.status_code)
print(resp.headers)
print(resp.text)
print(hub.get(stream_name))
| [
"berniey@163.com"
] | berniey@163.com |
c5662e9a18a831b4409983cc1f3015092f92fc02 | ed0b963bebae72542eaf7ba4a0c72f3af7341dc3 | /plot.py | 2bb1639643e44a668b38d55853eb952a6063e8e3 | [] | no_license | csayres/aa544 | 791a7ca9ae67fc9e0d1933bdd7b47553f9686a42 | 863a45c6020ca5ac0766a48d66990995bfa16ad3 | refs/heads/master | 2021-01-01T19:42:37.739446 | 2014-06-11T18:08:58 | 2014-06-11T18:08:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 23,066 | py | # -*- coding: utf-8 -*-
import numpy
import itertools
import time
import scipy.interpolate
import numpy.fft
import matplotlib
import matplotlib.pyplot as plt
import matplotlib.mlab as ml
import glob
import os
import bisect
import cPickle as pickle
# from matplotlib import rc
# rc('text', usetex=True)
# rc('font', family='serif')
class DataMuncher(object):
def __init__(self, uvpFile, lagFile, xFile, yFile, residFile):
"""Object for interacting with fortran code output, various plotting methods, etc.
@param[in] uvpFile: string, path to [uvpFile].dat, output from fortran routine
@param[in] lagFile: string, path to file containing lagrangian point poisions
@param[in] forceFile: string, path to [uvpFile].dat, output from fortran routine
"""
self.strouhalBox = (270,280,100,200)
self._tVector = None
self._u = None
self._v = None
self._p = None
self._x = None
self._y = None
self._st = None # strohl number effectively frequency
self.uvpFile = uvpFile
self.parseHeader(uvpFile)
self.figSuffix = "grid (%ix%i) box aspect (%s) reynolds (%.2f)" % (self.gridsize[0], self.gridsize[1], self.bluffDim[1]/self.bluffDim[0], self.re)
# self.uvpFile2Mat(uvpFile)
self.lagPoints = self.lagFile2Mat(lagFile) + 1.5
self.xPts = numpy.loadtxt(xFile) + 1.5
self.yPts = numpy.loadtxt(yFile) + 1.5
self.residMat = self.resid2Mat(residFile)
assert len(self.xPts)==self.gridsize[0]
assert len(self.yPts)==self.gridsize[1]
#boxLength = uvpFile.split("_")[-1].split(".")[0]
self.uProfileIndex = bisect.bisect(self.xPts, numpy.max(self.lagPoints[:,1]))
self.vProfileIndex = len(self.yPts)/2
# get first true index
def saveToPickle(self):
pickleDict = {
"_tVector": self._tVector,
"_u": self._u,
"_v": self._v,
"_p": self._p,
"_x": self._x,
"_y": self._y,
}
output = open(self.figSuffix + ".pk", "wb")
pickle.dump(pickleDict, output)
output.close()
def loadFromPickle(self):
pkFile = open("./"+self.figSuffix + ".pk", "rb")
pickleDict = pickle.load(pkFile)
pkFile.close()
self._tVector = pickleDict["_tVector"]
self._u = pickleDict["_u"]
self._v = pickleDict["_v"]
self._p = pickleDict["_p"]
self._x = pickleDict["_x"]
self._y = pickleDict["_y"]
print 'loaded from pickle'
def tryOrLoad(self):
try:
self.loadFromPickle()
except:
print "picklefile not found"
self.uvpFile2Mat(self.uvpFile)
self.saveToPickle()
@property
def tVector(self):
if not self._tVector:
self.tryOrLoad()
return self._tVector
@property
def u(self):
if not self._u:
self.tryOrLoad()
return self._u
@property
def v(self):
if not self._v:
self.tryOrLoad()
return self._v
@property
def p(self):
if not self._p:
self.tryOrLoad()
return self._p
@property
def x(self):
if not self._x:
self.tryOrLoad()
return self._x
@property
def y(self):
if not self._y:
self.tryOrLoad()
return self._y
def getFFTSumAndFreq(self):
xMin = self.strouhalBox[0]
xMax = self.strouhalBox[1]
yMin = self.strouhalBox[2]
yMax = self.strouhalBox[3]
xIndMin = numpy.argmin(numpy.abs(xMin-self.xPts))
xIndMax = numpy.argmin(numpy.abs(xMax-self.xPts))
xInds = range(xIndMin, xIndMax)
yIndMax = numpy.argmin(numpy.abs(yMax-self.yPts))
yIndMin = numpy.argmin(numpy.abs(yMin-self.yPts))
yInds = range(yIndMin,yIndMax)
# FFT stuff
N = len(self.tVector)
dt = self.tVector[1]
#freq = numpy.linspace(0.0, N/2, N/2)
freq = numpy.linspace(0.0, 1.0/(2.0*dt), N/2)
# ft = numpy.zeros(freq.shape, dtype="complex")
ft = numpy.zeros(freq.shape, dtype=float)
mid_ii = len(xInds)*len(yInds)//2
ii = 0
for i in xInds:
for j in yInds:
flatInd = j*len(self.xPts)+i
timeSeries = numpy.asarray([x[flatInd] for x in self.u])
if ii == mid_ii:
ts = timeSeries
# _ft = numpy.fft.fft(timeSeries)[0:N/2]
# ft = ft + _ft*numpy.conj(_ft)
ft = ft + numpy.abs(numpy.fft.fft(timeSeries)[0:N/2])
ii += 1
# set strohl number
trimmedFt = ft[2:]
trimmedFreq = freq[2:]
self._st = trimmedFreq[numpy.argmax(trimmedFt)]*self.bluffDim[0]
return freq, ft, ts
@property
def st(self):
if not self._st:
self.getFFTSumAndFreq()
return self._st
# def getTimeseries(self, u_v_or_p, xLoc, yLoc):
# # find closest index to xLoc, yLoc
# xInd = numpy.argmin(numpy.abs(xLoc-self.xPts))
# yInd = numpy.argmin(numpy.abs(yLoc-self.yPts))
# # find index in _p array corresponding to xInd, yInd
# flatInd = yInd*len(self.xPts)+xInd
# return numpy.asarray([x[flatInd] for x in getattr(self, u_v_or_p)])
# def getFFTAndFreq(self, dt, timeSeries):
# N = len(timeSeries)
# ft = (numpy.fft.fft(timeSeries)[0:N/2])**2
# freq = numpy.linspace(0.0, 1.0/(2.0*dt), N/2)
# # freq = numpy.fft.fftfreq(len(timeSeries), d=dt)
# return freq, ft
def plotFFTSum(self):
freq, ft, ts = self.getFFTSumAndFreq()
fig = plt.figure(figsize=(10,10))
ax = fig.add_subplot(211)
tv = numpy.asarray(self.tVector)
ax.set_xlabel("time")
ax.set_ylabel("pressure")
plt.plot(tv, ts, "k")
ax = fig.add_subplot(212)
plt.plot(freq[1:], ft[1:], "k")
puthere=numpy.argmax(ft[1:])
ax.text(freq[puthere] + .002, 0.95*numpy.max(ft[1:]), 'St = %.2f'%self.st)
ax.set_xlabel("frequency")
ax.set_ylabel("power")
plt.savefig(self.figSuffix + ' fftsum.png', format='png')
plt.close()
# def plotTimeseries(self, u_v_or_p, xLoc, yLoc, figNum):
# fig = plt.figure()
# ax = fig.add_subplot(211)
# x = self.getTimeseries(u_v_or_p, xLoc, yLoc)
# plt.plot(self._tVector, x, ".k")
# ax = fig.add_subplot(212)
# # print self._tVector[0], self._tVector[1]
# dt = self.tVector[1]
# freq, ft = self.getFFTAndFreq(dt, x)
# plt.plot(freq, ft, ".k")
# plt.savefig('timeseries_%i.png'%figNum, format='png')
# plt.close()
def sumFFT(self, xLoc, yRange):
pass
def getTimeFromLine(self, line):
"""@param[in] line from uvpFile containing a new time point
@return float, the time specified in the line
line looks like this: ZONE T="t = 0.1596000000000005E+00" F=POINT, I= 200 J= 80
"""
# keep the strin`g between the ""
return float(line.split('"')[1].split()[-1])
def parseHeader(self, uvpFile):
"""From a line determine the correct grid size
"""
with open(uvpFile, 'r') as f:
line = f.readline()
splitted = line.split()
self.gridsize = [int(splitted[2]), int(splitted[3])]
self.re = float(splitted[6])
self.bluffDim = [float(splitted[8]), float(splitted[10])]
self.aspectRatio = self.bluffDim[1] / self.bluffDim[0]
def resid2Mat(self, residualFile):
"""Convert an residual file (output from fortran code) into a 2D numpy matrix
@param[in] residualFile: string, path to [residualFile].dat, output from fortran routine
@return a 2D numpy array of shape n x [n, i, j, resid]
"""
outArray = []
with open(residualFile, 'r') as f:
lines = f.readlines()
for ind, line in enumerate(lines):
# skip first two lines which only contain header info
# split on whitespace, cast to floats
resid = float(line.split()[-1])
# append to outArray
outArray.append([ind, resid])
# return a numpy matrix
return numpy.asarray(outArray, dtype=float)
def plotResidSemiLog(self, figName):
"""Plot the residual vs number of steps
"""
plt.figure()
plt.plot(self.residMat[:,0], numpy.log(self.residMat[:,-1]), '.k')
plt.xlabel("Step Number")
plt.ylabel("Log Residual")
plt.savefig(figName + '.eps', format='eps')
plt.close()
def lagFile2Mat(self, lagFile):
"""Convert a lagFile into a 2D array
"""
return numpy.loadtxt(lagFile)
def uvpFile2Mat(self, uvpFile):
"""Convert an uvp file (output from fortran code) into a 2D numpy matrix
@param[in] uvpFile: string, path to [uvpFile].dat, output from fortran routine
@return outArray, tVector
out array: a 3D numpy array of shape timeSteps x nPoints x [x, y, u, v, p]
tVector: time vector of length timeSteps
"""
tVector = [] # will be 1D
uOut = [] # will be 2D
vOut = []
pOut = []
xArray = []
yArray = []
outArray = [] # will be 3D (will hold an array of gridArrays)
with open(uvpFile, 'r') as f:
line1 = f.readline()
line2 = f.readline() # ignored
line3 = f.readline()
tVector.append(self.getTimeFromLine(line3))
uArray, vArray, pArray = [], [], []
firstArray = True
while True:
line = f.readline()
if not line:
# end of file
if len(uArray) == self.gridsize[0] * self.gridsize[1]:
print 'got all data!'
uOut.append(numpy.asarray(uArray, dtype=float))
vOut.append(numpy.asarray(vArray, dtype=float))
pOut.append(numpy.asarray(pArray, dtype=float))
else:
# remove the last time point, we dont have
# all the data
tVector.pop(-1)
break
if "VARIABLES" in line:
continue
if "ZONE" in line:
# new time step encountered, parse the time, start a gridArray
uOut.append(numpy.asarray(uArray, dtype=float))
vOut.append(numpy.asarray(vArray, dtype=float))
pOut.append(numpy.asarray(pArray, dtype=float))
uArray, vArray, pArray = [], [], []
tVector.append(self.getTimeFromLine(line))
firstArray = False
else:
# split on whitespace, cast to floats
lineArray = [float(x) for x in line.split()]
if len(lineArray)<5:
break
if firstArray:
xArray.append(lineArray[0])
yArray.append(lineArray[1])
uArray.append(lineArray[2])
vArray.append(lineArray[3])
pArray.append(lineArray[4])
# return a numpy matrix
self._tVector = tVector
self._u = uOut
self._v = vOut
self._p = pOut
self._x = xArray
self._y = yArray
print 'loaded from uvp file'
def reshapeZ(self, Z):
Z = numpy.reshape(Z, (self.gridsize[0],self.gridsize[1]), order="F")
return Z
def getUVorP(self, u_v_or_p, timeStep):
assert u_v_or_p in ["u", "v", "p"]
# fig = plt.figure()
if u_v_or_p == "u":
z = self.u[timeStep]
elif u_v_or_p == "v":
z = self.v[timeStep]
else:
z = self.p[timeStep]
return z
def getGridData(self, timeStep, u_v_or_p):
z = self.getUVorP(u_v_or_p, timeStep)
X,Y = numpy.meshgrid(self.xPts,self.yPts)
Z = self.reshapeZ(z)
return X,Y,Z
def plotVelocityProfiles(self, timeStep):
fig = plt.figure()
x,y,zu = self.getGridData(timeStep, "u")
x,y,zv = self.getGridData(timeStep, "v")
ax = fig.add_subplot(1,2,1)
plt.plot(self.xPts, zu[:,self.uProfileIndex])
plt.title("u")
ax = fig.add_subplot(1,2,2)
plt.plot(self.yPts, zv[self.vProfileIndex,:])
plt.title("v")
plt.savefig('velocityprofiles.pdf', format='pdf')
plt.close()
# def makeColorMaps(self, fast=True, tVector=None, saveDir=""):
# if not tVector:
# tVector = self.tVector
# for i in range(len(tVector)):
# for j in ["u"]:#, "v", "p"]:
# plotStr = j + " velocity" + " frame(%i) "%i + self.figSuffix
# fig = plt.figure(figsize=(10, 5))
# ax = fig.add_subplot(111,aspect="equal")
# lims = (-.1, .1)
# self.plotColorMap(i, j, figTitle=plotStr, fast=fast, lims=lims)
# plt.savefig(saveDir+plotStr + '.png', format='png')
# plt.close()
def crop(self, data):
ds = data.shape
xMin = 150.
yMin = 100.
yMax = 200.
xIndMin = numpy.argmin(numpy.abs(xMin-self.xPts))
yIndMax = numpy.argmin(numpy.abs(yMax-self.yPts))
yIndMin = numpy.argmin(numpy.abs(yMin-self.yPts))
return data[yIndMin:yIndMax, xIndMin:]
def plotColorMap(self, timeStep, u_v_or_p, figTitle="", fast=True, lims=(None, None), zoom=False):
"""Plot a 2D color contour
@param[in] int, timeStep to use (-1 is last time step)
@param[in] u_v_or_p, one of 'u', 'v', 'p'
@param[in] figName: name of the figure
@param[in] figTitle: title for the figure
"""
vmin=lims[0]
vmax=lims[1]
# grab the 2D matrix to plot
plt.hold(True)
# reduce the array sizes by a factor of 10 to contain
# x = x[::100]
# y = y[::100]
# z = z[::100]
# xi = numpy.linspace(min(x), max(x), 1000)
# yi = numpy.linspace(min(y), max(y), 2000)
#cmap = plt.get_cmap('winter')
#cmap = plt.get_cmap('hot')
# X, Y = numpy.meshgrid(xi, yi)
# X, Y = numpy.meshgrid(x,y)
# Z = ml.griddata(x, y, z, xi, yi)
if fast:
z = self.getUVorP(u_v_or_p, timeStep)
Z = self.reshapeZ(z).T
if zoom:
Z = self.crop(Z)
plt.imshow(Z, vmin=vmin, vmax=vmax)
else:
X,Y,Z = self.getGridData(timeStep, u_v_or_p)
# Z = scipy.interpolate.griddata(x,y,z,(xi,yi))
Z = Z.T
if zoom:
X = self.crop(X)
Y = self.crop(Y)
Z = self.crop(Z)
# X, Y = numpy.meshgrid(x, y)
# z = numpy.sin(X)
# note
# plt.contourf(X, Y, Z, cmap=cmap, vmin=vmin, vmax=vmax)#, norm=norm)
# plt.imshow(Z.T, cmap=cmap, vmin=-.5, vmax=1.5)
plt.pcolormesh(X, Y, Z, vmin=vmin, vmax=vmax, norm=None)#, cmap=cmap)#, norm=norm)
# img = plot.imshow()
#self.plotLagPoints()
plt.colorbar()
plt.title(figTitle)
plt.xlabel("x location")
plt.ylabel("y location")
# im = plt.imshow(value)
def plotQuiver(self, timeStep, figName=None):
plt.quiver(self.x,self.y,self.u[timeStep],self.v[timeStep])
if figName:
plt.savefig(figName + '.png', format='png')
plt.close()
else:
plt.show()
def plotLagPoints(self):
# pressure plots require an offset
plt.plot(self.lagPoints[:,0], self.lagPoints[:,1], ".k-", alpha=0.8)
def plotAll(self, timestep):
fig = plt.figure();
ax = fig.add_subplot(111, aspect='equal');
plt.hold(True);
x.plotQuiver(timestep);
x.plotPressure(timestep);
x.plotLagPoints();
x.plotQuiverForce(timestep)
plt.show()
def createDataMuncher(gridsize, boxLength):
if gridsize < 100:
gs = ' %i'%gridsize
else:
gs = '%i'%gridsize
return DataMuncher(
uvpFile="_output/UVP_%s_%i.dat"%(gs, boxLength),
lagFile="_output/lagrangian_points%s_%i.dat"%(gs, boxLength),
forceFile="_output/force_grid%s_%i.dat"%(gs, boxLength),
xFile="_output/x_points%s_%i.dat"%(gs, boxLength),
yFile="_output/y_points%s_%i.dat"%(gs, boxLength),
residFile="_output/residual_%s_%i.dat"%(gs, boxLength),
)
class elJefe(object):
"""Object for managing / plotting all runs!
"""
def __init__(self, fileDir):
self.fileDir = fileDir
allFiles = glob.glob(fileDir + "/*")
uvpFiles = []
residFiles = []
xFiles = []
yFiles = []
lagFiles = []
for f in allFiles:
if 'lagrangian' in f:
lagFiles.append(f)
elif 'UVP' in f:
uvpFiles.append(f)
elif 'residual' in f:
residFiles.append(f)
elif 'x_points' in f:
xFiles.append(f)
elif 'y_points' in f:
yFiles.append(f)
# sortem
uvpFiles.sort()
residFiles.sort()
xFiles.sort()
yFiles.sort()
lagFiles.sort()
jefeList = []
for uvp, lag, resid, xf, yf in itertools.izip(uvpFiles,lagFiles,residFiles,xFiles,yFiles):
dm = DataMuncher(
uvpFile=uvp,
lagFile = lag,
xFile = xf,
yFile = yf,
residFile = resid
)
jefeList.append(dm)
self.jefeList = jefeList
self.lims = {
"u": (-.5, 1.5),
"v": (-.5, 1.5),
"p": (-0.05, 0.05),
}
def plotUVP_resArray(self):
inds = [22,4,7]
fig = plt.figure()
plotnum = 1
for ii,ind in enumerate(inds):
dm = self.jefeList[ind]
for jj, j in enumerate(["u", "v", "p"]):
plotStr = j
ax = fig.add_subplot(3, 3, plotnum, aspect="equal")
dm.plotColorMap(-1, j, figTitle=plotStr, fast=False)
plotnum += 1
plt.savefig('resarray.pdf', format='pdf')
plt.close()
def plotProfiles(self):
dm = self.jefeList[0]
dm.plotVelocityProfiles(-1)
def dump2dirs(self, base):
for int, dm in enumerate(self.jefeList):
dirName = base + str(int)
os.mkdir(dirName)
dm.makeColorMaps(saveDir=dirName+"/")
def makeColorMaps(self, dm, j, fast=True, tVector=None, saveDir="", zoom=False, short=False):
if not tVector:
tVector = dm.tVector
if short:
tVector = tVector[:len(tVector)//3]
for i in range(len(tVector)):
plotStr = j + " frame(%i) "%i + dm.figSuffix
fig = plt.figure(figsize=(10, 5))
ax = fig.add_subplot(111,aspect="equal")
dm.plotColorMap(i, j, figTitle=plotStr, fast=fast, lims=self.lims[j], zoom=zoom)
plt.savefig(saveDir+plotStr + '.png', format='png')
plt.close()
def plotDetector(self, dm, j, xMin, xMax, yMin, yMax, fast=True, tVector=None, saveDir="", zoom=False):
plotStr = j + " detector region" + dm.figSuffix
fig = plt.figure(figsize=(10, 5))
ax = fig.add_subplot(111,aspect="equal")
dm.plotColorMap(-1, j, figTitle=plotStr, fast=fast, lims=self.lims[j], zoom=zoom)
box = numpy.asarray([
[xMin, yMax],
[xMax, yMax],
[xMax, yMin],
[xMin, yMin],
[xMin, yMax],
])
plt.plot(box[:,0], box[:,1], 'k')
plt.savefig(saveDir+plotStr + '.png', format='png')
plt.close()
def movieReynoldsSweep(self, reRange, aspectRatio, u_v_or_p):
dms = []
dmsPrelim = self.filterDataMunchers(aspectRatio=aspectRatio)
# throw out those outside of reRange
for dm in dmsPrelim:
if dm.re in reRange:
dms.append(dm)
fig = plt.figure()
nPanels = len(dms)
for t in range(len(dms[0].tVector)):
for i, dm in enumerate(dms):
ax = fig.add_subplot(nPanels, 1, i, aspect="equal")
figTitle = "Aspect (%i) Reynolds (%.2f)" % (dm.aspectRatio, dm.re)
dm.plotColorMap(t, u_v_or_p, figTitle=figTitle, fast=False, lims=self.lims[u_v_or_p], zoom=False)
plt.savefig("reSweep_%i.png"%t, format="png")
plt.close()
def filterDataMunchers(self, gridsize=None, re=None, aspectRatio=None):
outList = []
for dm in self.jefeList:
if gridsize:
if tuple(dm.gridsize) != tuple(gridsize):
continue
if re:
if dm.re != re:
continue
if aspectRatio:
if dm.aspectRatio != aspectRatio:
continue
outList.append(dm)
return outList[0] if len(outList)==1 else outList
def cleanUpDir(d):
allFiles = glob.glob(d+"/*")
for f in allFiles:
fnew = f[:]
fnew = fnew.replace(" ", "")
fnew = fnew.replace("..", ".")
print f, fnew
os.rename(f, fnew)
if __name__ == "__main__":
# makeFigures()
#x = createDataMuncher(256, 5)
# x = DataMuncher(
# uvpFile="_output/UVP.dat",
# lagFile="_output/lagrangian_points.dat",
# xFile="_output/x_points.dat",
# yFile="_output/y_points.dat",
# residFile="_output/residual.dat",
# )
# x.plotAll(-1)
# x.makeColorMaps()
# x.plotResidSemiLog("resid")
elJefe = elJefe("_output")
#elJefe.movieReynoldsSweep(reRange=[70, 100, 125, 150, 175, 200, 300], aspectRatio=2, u_v_or_p="u")
#for dm in elJefe.jefeList:
dm = elJefe.jefeList[0]
elJefe.makeColorMaps(dm, j="u", fast=False)
dm.plotResidSemiLog("resid")
# dm.plotTimeseries(150, 270, 1)
# dm.plotTimeseries(155, 270, 2)
# dm.plotTimeseries(160, 270, 3)
# dm.plotTimeseries(165, 270, 4)
# dm.plotTimeseries(170, 270, 5)
# dm.plotTimeseries(175, 270, 6)
# dm.plotFFTSum()
# dm.plotTimeseries("p", 270, 160, 1)
#elJefe.plotDetector(dm, "p", 270,280,150,180, fast=False, zoom=True)
# elJefe.makeColorMaps(dm, j="p", fast=False, zoom=True, short=False)
#elJefe.plotProfiles()
#elJefe.dump2dirs("flow"), 1
| [
"csayres@uw.edu"
] | csayres@uw.edu |
2b204f0044e3ad68a5f22d8b9018bb35e8deba5b | a5bbf6ece66a39f92706c807874870cc048391d9 | /menus/migrations/0001_initial.py | e56c6c147d99f7baa19af7351230107558d4bc78 | [] | no_license | IsaacMorzy/wagtailblog | f96e921c1d07522fe2519f33daa5b19c3facbadb | ef372b85daed423431a4283fa8b5859512b97979 | refs/heads/master | 2022-12-15T02:22:09.366893 | 2020-05-13T10:44:34 | 2020-05-13T10:44:34 | 225,391,854 | 1 | 0 | null | 2022-12-08T03:17:39 | 2019-12-02T14:19:12 | CSS | UTF-8 | Python | false | false | 1,841 | py | # Generated by Django 2.2.8 on 2019-12-10 12:42
from django.db import migrations, models
import django.db.models.deletion
import django_extensions.db.fields
import modelcluster.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
('wagtailcore', '0041_group_collection_permissions_verbose_name_plural'),
]
operations = [
migrations.CreateModel(
name='Menu',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100)),
('slug', django_extensions.db.fields.AutoSlugField(blank=True, editable=False, populate_from='title')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='MenuItem',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sort_order', models.IntegerField(blank=True, editable=False, null=True)),
('link_title', models.CharField(blank=True, max_length=50, null=True)),
('link_url', models.CharField(blank=True, max_length=500)),
('open_in_new_tab', models.BooleanField(blank=True, default=False)),
('link_page', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='wagtailcore.Page')),
('page', modelcluster.fields.ParentalKey(on_delete=django.db.models.deletion.CASCADE, related_name='menu_items', to='menus.Menu')),
],
options={
'ordering': ['sort_order'],
'abstract': False,
},
),
]
| [
"musyokaisaac98@gmail.com"
] | musyokaisaac98@gmail.com |
a4d6e4546903e3e9be31a70c20e61a8005a35805 | e0b607de0d1e91492b80369c5e8a6313372f9d29 | /app/views.py | 081983a94db7ca786b9542b6c0e4f8ec3c5089f1 | [] | no_license | surajkumarbhagat71/mathcalculation | 63a13473819657fa86136ce4593809f4129aa1f9 | 300850a574c60894a9bef57868816363f721775d | refs/heads/master | 2023-03-01T00:39:18.017426 | 2021-02-11T12:44:48 | 2021-02-11T12:44:48 | 338,026,388 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,807 | py | from django.shortcuts import render,redirect
from django.views.generic import View
from django.db.models import Q
from .forms import *
from .models import *
# Create your views here.
class Signup(View):
def get(self,request):
form = UserForm()
return render(request,'signup.html',{"form":form})
def post(self,request):
form = UserForm(request.POST or None)
if form.is_valid():
form.save()
return redirect('login')
class LoginView(View):
def get(self,request):
return render(request,'login.html')
def post(self,request,*args,**kwargs):
if request.method == 'POST':
username = self.request.POST.get('email')
password = self.request.POST.get('password')
cond = Q(email = username) & Q(password = password)
check =User.objects.filter(cond).count()
if (check == 1):
request.session['login'] = username
return redirect('cal')
else:
return redirect('login')
# def sum(x,n):
# total = 0
# for i in range(1,n+1):
# total+=1/(x**i)
# return total
#
# print(sum(1,3))
def Sum(x,n):
if n==1:
return 1/x
a = Sum(x,n-1)+1/(x**n)
return a
class Calculation(View):
def get(self,request,*args,**kwargs):
if not request.session.has_key('login'):
return redirect('login')
return render(request,'getdata.html')
def post(self,request,*args,**kwargs):
if not request.session.has_key('login'):
return redirect('login')
x = request.POST.get('x')
n = request.POST.get('n')
x = int(x)
n = int(n)
a = Sum(x,n)
data = {"result":a}
return render(request,'result.html',data)
| [
"surajkumarbhgat71@gmail.com"
] | surajkumarbhgat71@gmail.com |
fbbffd250cfe33d45e332eaa7c597c0cc338972e | ead82159a724b351e1c82d31e133f284db4d5d32 | /mymusic/models.py | 994b088f8ec81c252fb6b6b39ce9b64f73f7793f | [] | no_license | momentum-morehouse/django-music-genolajohnson | ff9d004aa556d5907be995f5257b57b312c10bc5 | 81beca64eed41fa454904fd4c3b44ae0092639b4 | refs/heads/master | 2022-11-29T15:37:14.711971 | 2020-07-17T00:49:43 | 2020-07-17T00:49:43 | 279,331,655 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 484 | py | from django.db import models
# Create your models here.
# class User(AbstractUser):
# pass
class Album(models.Model):
artist_name = models.CharField(max_length=255, null=True, blank=True)
title = models.CharField(max_length=255,null=True, blank=True)
released = models.DateField()
img_url = models.TextField(null= True, blank= True)
def __str__(self):
return f"{self.title} by {self.artist_name}"
# return f'{self.release}"
| [
"replituser@example.com"
] | replituser@example.com |
8fd2b1e1def0f43706a694e1453f6cb64f82ea8d | f07a42f652f46106dee4749277d41c302e2b7406 | /Data Set/bug-fixing-5/f383826d76e7d7723b9e5eaee92778f5c7760d5d-<destination_to_network>-bug.py | 24707890dd6eefddbedc12c263c27707b5f7d95b | [] | no_license | wsgan001/PyFPattern | e0fe06341cc5d51b3ad0fe29b84098d140ed54d1 | cc347e32745f99c0cd95e79a18ddacc4574d7faa | refs/heads/main | 2023-08-25T23:48:26.112133 | 2021-10-23T14:11:22 | 2021-10-23T14:11:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 494 | py | def destination_to_network(self):
destination = self._values['destination']
if destination.startswith('default%'):
destination = '0.0.0.0%{0}/0'.format(destination.split('%')[1])
elif destination.startswith('default-inet6%'):
destination = '::%{0}/::'.format(destination.split('%')[1])
elif destination.startswith('default-inet6'):
destination = '::/::'
elif destination.startswith('default'):
destination = '0.0.0.0/0'
return destination | [
"dg1732004@smail.nju.edu.cn"
] | dg1732004@smail.nju.edu.cn |
e86ffa15bdcd0373bf0c87c3468c1a69205de307 | 3c000380cbb7e8deb6abf9c6f3e29e8e89784830 | /venv/Lib/site-packages/cobra/modelimpl/pki/csyncpolicy.py | e2f2771137084c404a13dafc1485c1815498a82a | [] | no_license | bkhoward/aciDOM | 91b0406f00da7aac413a81c8db2129b4bfc5497b | f2674456ecb19cf7299ef0c5a0887560b8b315d0 | refs/heads/master | 2023-03-27T23:37:02.836904 | 2021-03-26T22:07:54 | 2021-03-26T22:07:54 | 351,855,399 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,431 | py | # coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2020 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class CsyncPolicy(Mo):
"""
Used to control csync timeout and enable/disable.
"""
meta = ClassMeta("cobra.model.pki.CsyncPolicy")
meta.moClassName = "pkiCsyncPolicy"
meta.rnFormat = "csyncpolicy"
meta.category = MoCategory.REGULAR
meta.label = "File Synchronization Policy"
meta.writeAccessMask = 0x3
meta.readAccessMask = 0x3
meta.isDomainable = False
meta.isReadOnly = False
meta.isConfigurable = True
meta.isDeletable = False
meta.isContextRoot = False
meta.childClasses.add("cobra.model.tag.Tag")
meta.childClasses.add("cobra.model.pki.CsyncElement")
meta.childClasses.add("cobra.model.fault.Delegate")
meta.childClasses.add("cobra.model.aaa.RbacAnnotation")
meta.childClasses.add("cobra.model.tag.Annotation")
meta.childNamesAndRnPrefix.append(("cobra.model.tag.Annotation", "annotationKey-"))
meta.childNamesAndRnPrefix.append(("cobra.model.pki.CsyncElement", "csyncelem-"))
meta.childNamesAndRnPrefix.append(("cobra.model.aaa.RbacAnnotation", "rbacDom-"))
meta.childNamesAndRnPrefix.append(("cobra.model.tag.Tag", "tagKey-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fault.Delegate", "fd-"))
meta.parentClasses.add("cobra.model.pki.Ep")
meta.superClasses.add("cobra.model.naming.NamedObject")
meta.superClasses.add("cobra.model.pol.Obj")
meta.superClasses.add("cobra.model.pol.Def")
meta.superClasses.add("cobra.model.pki.Definition")
meta.rnPrefixes = [
('csyncpolicy', False),
]
prop = PropMeta("str", "annotation", "annotation", 37511, PropCategory.REGULAR)
prop.label = "Annotation. Suggested format orchestrator:value"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 128)]
prop.regex = ['[a-zA-Z0-9_.:-]+']
meta.props.add("annotation", prop)
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "descr", "descr", 5579, PropCategory.REGULAR)
prop.label = "Description"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 128)]
prop.regex = ['[a-zA-Z0-9\\!#$%()*,-./:;@ _{|}~?&+]+']
meta.props.add("descr", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "extMngdBy", "extMngdBy", 39650, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "undefined"
prop._addConstant("msc", "msc", 1)
prop._addConstant("undefined", "undefined", 0)
meta.props.add("extMngdBy", prop)
prop = PropMeta("str", "interval", "interval", 1212, PropCategory.REGULAR)
prop.label = "None"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(30, 600)]
prop.defaultValue = 30
prop.defaultValueStr = "30"
meta.props.add("interval", prop)
prop = PropMeta("str", "lcOwn", "lcOwn", 9, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "local"
prop._addConstant("implicit", "implicit", 4)
prop._addConstant("local", "local", 0)
prop._addConstant("policy", "policy", 1)
prop._addConstant("replica", "replica", 2)
prop._addConstant("resolveOnBehalf", "resolvedonbehalf", 3)
meta.props.add("lcOwn", prop)
prop = PropMeta("str", "modTs", "modTs", 7, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("modTs", prop)
prop = PropMeta("str", "name", "name", 1221, PropCategory.REGULAR)
prop.label = "Name"
prop.isConfig = True
prop.isAdmin = True
prop.isCreateOnly = True
prop.range = [(0, 64)]
prop.regex = ['[a-zA-Z0-9_.:-]+']
meta.props.add("name", prop)
prop = PropMeta("str", "nameAlias", "nameAlias", 28417, PropCategory.REGULAR)
prop.label = "Name alias"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 63)]
prop.regex = ['[a-zA-Z0-9_.-]+']
meta.props.add("nameAlias", prop)
prop = PropMeta("str", "ownerKey", "ownerKey", 15230, PropCategory.REGULAR)
prop.label = "None"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 128)]
prop.regex = ['[a-zA-Z0-9\\!#$%()*,-./:;@ _{|}~?&+]+']
meta.props.add("ownerKey", prop)
prop = PropMeta("str", "ownerTag", "ownerTag", 15231, PropCategory.REGULAR)
prop.label = "None"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 64)]
prop.regex = ['[a-zA-Z0-9\\!#$%()*,-./:;@ _{|}~?&+]+']
meta.props.add("ownerTag", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "state", "state", 1211, PropCategory.REGULAR)
prop.label = "None"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = 1
prop.defaultValueStr = "enabled"
prop._addConstant("disabled", "disabled", 0)
prop._addConstant("enabled", "enabled", 1)
meta.props.add("state", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
prop = PropMeta("str", "uid", "uid", 8, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("uid", prop)
# Deployment Meta
meta.deploymentQuery = True
meta.deploymentType = "Fabric"
def __init__(self, parentMoOrDn, markDirty=True, **creationProps):
namingVals = []
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
| [
"bkhoward@live.com"
] | bkhoward@live.com |
1399f116816cb65ea83f69485456d02af6137435 | 2b1a4f5eaad320d17159916b1229832948e3bea7 | /src/ebonite/core/objects/core.py | 3df2245f4beac6776c1640ccf75bfacc5731be5d | [
"Apache-2.0"
] | permissive | geffy/ebonite | cab00cb5f236f301509859d8f918a6c85e118061 | 2d85eeca44ac1799e743bafe333887712e325060 | refs/heads/master | 2020-09-15T09:13:20.663752 | 2019-11-22T12:33:27 | 2019-11-22T12:33:27 | 223,406,815 | 1 | 0 | Apache-2.0 | 2019-11-22T13:15:14 | 2019-11-22T13:15:14 | null | UTF-8 | Python | false | false | 15,875 | py | import datetime
import getpass
import tempfile
from copy import copy
from functools import wraps
from typing import Callable, List, Optional
from pyjackson.core import Comparable
from pyjackson.decorators import make_string
import ebonite.repository
from ebonite import client
from ebonite.core import errors
from ebonite.core.analyzer.dataset import DatasetAnalyzer
from ebonite.core.analyzer.model import ModelAnalyzer
from ebonite.core.objects.artifacts import ArtifactCollection, CompositeArtifactCollection
from ebonite.core.objects.dataset_type import DatasetType
from ebonite.core.objects.requirements import AnyRequirements, Requirements, resolve_requirements
from ebonite.core.objects.wrapper import ModelWrapper, WrapperArtifactCollection
from ebonite.repository.artifact import NoSuchArtifactError
from ebonite.utils.index_dict import IndexDict, IndexDictAccessor
from ebonite.utils.module import get_object_requirements
def _get_current_user():
return getpass.getuser()
class EboniteObject(Comparable):
"""
Base class for high level ebonite objects.
These objects can be binded to metadata repository and/or to artifact repository
:param id: object id
:param name: object name
:param author: user that created that object
:param creation_date: date when this object was created
"""
_meta: 'ebonite.repository.MetadataRepository' = None
_art: 'ebonite.repository.ArtifactRepository' = None
def __init__(self, id: str, name: str, author: str = None, creation_date: datetime.datetime = None):
self._id = id
self.name = name
self.author = author or _get_current_user()
self.creation_date = creation_date or datetime.datetime.utcnow() # TODO local timezone
def bind_meta_repo(self, repo: 'ebonite.repository.MetadataRepository'):
self._meta = repo
def unbind_meta_repo(self):
del self._meta
self._id = None
@property
def has_meta_repo(self):
return self._meta is not None
def bind_artifact_repo(self, repo: 'ebonite.repository.ArtifactRepository'):
self._art = repo
def unbind_artifact_repo(self):
del self._art
@property
def has_artifact_repo(self):
return self._art is not None
def bind_client(self, cl: 'client.Ebonite'):
self.bind_artifact_repo(cl.artifact_repo)
self.bind_meta_repo(cl.meta_repo)
@property
def id(self):
return self._id
def _with_meta(method):
"""
Decorator for methods to check that object is binded to meta repo
:param method: method to apply decorator
:return: decorated method
"""
@wraps(method)
def inner(self: EboniteObject, *args, **kwargs):
if self.id is None or not self.has_meta_repo:
raise errors.UnboundObjectError('{} is not bound to meta repository'.format(self))
return method(self, *args, **kwargs)
return inner
def _with_artifact(method):
"""
Decorator for methods to check that object is binded to artifact repo
:param method: method to apply decorator
:return: decorated method
"""
@wraps(method)
def inner(self: EboniteObject, *args, **kwargs):
if self.id is None or not self.has_artifact_repo:
raise errors.UnboundObjectError('{} is not bound to artifact repository'.format(self))
return method(self, *args, **kwargs)
return inner
@make_string('id', 'name')
class Project(EboniteObject):
"""
Project is a collection of tasks
:param id: project id
:param name: project name
:param author: user that created that project
:param creation_date: date when this project was created
"""
def __init__(self, name: str, id: str = None, author: str = None, creation_date: datetime.datetime = None):
super().__init__(id, name, author, creation_date)
self._tasks: IndexDict[Task] = IndexDict('id', 'name')
self.tasks: IndexDictAccessor[Task] = IndexDictAccessor(self._tasks)
@_with_meta
def add_task(self, task: 'Task'):
"""
Add task to project and save it to meta repo
:param task: task to add
"""
if task.project_id is not None and task.project_id != self.id:
raise errors.MetadataError('Task is already in project {}. Delete it first'.format(task.project_id))
task.project_id = self.id
self._meta.save_task(task)
self._tasks.add(task)
@_with_meta
def add_tasks(self, tasks: List['Task']):
"""
Add multiple tasks and save them to meta repo
:param tasks: tasks to add
"""
for t in tasks:
self.add_task(t)
@_with_meta
def delete_task(self, task: 'Task'):
"""
Remove task from this project and delete it from meta repo
:param task: task to delete
"""
if task.id not in self._tasks:
raise errors.NonExistingTaskError(task)
del self._tasks[task.id]
self._meta.delete_task(task)
task.project_id = None
def __repr__(self):
return """Project '{name}', {td} tasks""".format(name=self.name, td=len(self.tasks))
@make_string('id', 'name')
class Task(EboniteObject):
"""
Task is a collection of models
:param id: task id
:param name: task name
:param project_id: parent project id for this task
:param author: user that created that task
:param creation_date: date when this task was created
"""
def __init__(self, name: str, id: str = None, project_id: str = None,
author: str = None, creation_date: datetime.datetime = None):
super().__init__(id, name, author, creation_date)
self.project_id = project_id
# self.metrics = metrics TODO
# self.sample_data = sample_data
self._models: IndexDict[Model] = IndexDict('id', 'name')
self.models: IndexDictAccessor[Model] = IndexDictAccessor(self._models)
def __str__(self):
return self.name
@property
def project(self):
raise AttributeError('Cant access project of unbound task')
@project.setter
def project(self, project: Project):
if not isinstance(project, Project):
raise ValueError('{} is not Project'.format(project))
self.project_id = project.id
@_with_meta
def add_model(self, model: 'Model'):
"""
Add model to task and save it to meta repo
:param model: model to add
"""
if model.task_id is not None and model.task_id != self.id:
raise errors.MetadataError('Model is already in task {}. Delete it first'.format(model.task_id))
model.task_id = self.id
self._meta.save_model(model)
self._models.add(model)
@_with_meta
def add_models(self, models: List['Model']):
"""
Add multiple models and save them to meta repo
:param models: models to add
"""
for m in models:
self.add_model(m)
@_with_meta
def delete_model(self, model: 'Model'):
"""
Remove model from this task and delete it from meta repo
:param model: model to delete
"""
if model.id not in self._models:
raise errors.NonExistingModelError(model)
del self._models[model.id]
self._meta.delete_model(model)
if self.has_artifact_repo:
try:
self._art.delete_artifact(model)
except NoSuchArtifactError:
pass
model.task_id = None
# ##########API############
@_with_meta
@_with_artifact
def create_and_push_model(self, model_object, input_data, model_name: str = None, **kwargs) -> 'Model':
"""
Create :class:`Model` instance from model object and push it to repository
:param model_object: model object to build Model from
:param input_data: input data sample
:param model_name: name for model
:param kwargs: other :meth:`~Model.create` arguments
:return: created :class:`Model`
"""
model = Model.create(model_object, input_data, model_name, **kwargs)
return self.push_model(model)
@_with_meta
@_with_artifact
def push_model(self, model: 'Model') -> 'Model':
"""
Push :class:`Model` instance to task repository
:param model: :class:`Model` to push
:return: same pushed :class:`Model`
"""
return client.Ebonite(self._meta, self._art).push_model(model, self)
@make_string('id', 'name')
class Model(EboniteObject):
"""
Model contains metadata for machine learning model
:param name: model name
:param wrapper: :class:`~ebonite.core.objects.wrapper.ModelWrapper` instance for this model
:param artifact: :class:`~ebonite.core.objects.ArtifactCollection` instance with model artifacts
:param input_meta: :class:`~ebonite.core.objects.DatasetType` instance for model input
:param output_meta: :class:`~ebonite.core.objects.DatasetType` instance for model output
:param requirements: :class:`~ebonite.core.objects.Requirements` instance with model requirements
:param id: model id
:param task_id: parent task_id
:param author: user that created that model
:param creation_date: date when this model was created
"""
def __init__(self, name: str, wrapper: ModelWrapper,
artifact: 'ArtifactCollection' = None, input_meta: DatasetType = None,
output_meta: DatasetType = None, requirements: Requirements = None, id: str = None,
task_id: str = None,
author: str = None, creation_date: datetime.datetime = None):
super().__init__(id, name, author, creation_date)
self.wrapper = wrapper
self.output_meta = output_meta
self.input_meta = input_meta
self.requirements = requirements
self.transformer = None
self.task_id = task_id
self._persisted_artifacts = artifact
self._unpersisted_artifacts: Optional[ArtifactCollection] = None
def load(self):
"""
Load model artifacts into wrapper
"""
with tempfile.TemporaryDirectory(prefix='ebonite_run_') as tmpdir:
self.artifact.materialize(tmpdir)
self.wrapper.load(tmpdir)
def ensure_loaded(self):
"""
Ensure that wrapper has loaded model object
"""
if self.wrapper.model is None:
self.load()
# this property is needed for pyjackson to serialize model, it is coupled with __init__
@property
def artifact(self) -> 'ArtifactCollection':
"""
:return: persisted artifacts if any
"""
return self._persisted_artifacts
@property
def artifact_any(self) -> 'ArtifactCollection':
"""
:return: artifacts in any state (persisted or not)
"""
arts = [a for a in [self._persisted_artifacts, self._unpersisted_artifacts] if a is not None]
return CompositeArtifactCollection(arts) if len(arts) != 1 else arts[0]
@property
def artifact_req_persisted(self) -> 'ArtifactCollection':
"""
Similar to `artifact` but checks that no unpersisted artifacts are left
:return: persisted artifacts if any
"""
if self._unpersisted_artifacts is not None:
raise ValueError('Model has unpersisted artifacts')
return self._persisted_artifacts
def attach_artifact(self, artifact: 'ArtifactCollection'):
"""
:param artifact: artifacts to attach to model in an unpersisted state
"""
if self._unpersisted_artifacts is not None:
self._unpersisted_artifacts += artifact
else:
self._unpersisted_artifacts = artifact
def persist_artifacts(self, persister: Callable[['ArtifactCollection'], 'ArtifactCollection']):
"""
Model artifacts persisting workflow
:param persister: external object which stores model artifacts
"""
artifact = self._persisted_artifacts
if self._unpersisted_artifacts is None:
if artifact is None:
raise ValueError('Model has no artifacts')
else:
if artifact is None:
artifact = self._unpersisted_artifacts
else:
artifact += self._unpersisted_artifacts
self._persisted_artifacts = persister(artifact)
self._unpersisted_artifacts = None
def without_artifacts(self) -> 'Model':
"""
:return: copy of the model with no artifacts attached
"""
no_artifacts = copy(self)
no_artifacts._persisted_artifacts = None
no_artifacts._unpersisted_artifacts = None
return no_artifacts
@classmethod
def create(cls, model_object, input_data, model_name: str = None,
additional_artifacts: ArtifactCollection = None, additional_requirements: AnyRequirements = None,
custom_wrapper: ModelWrapper = None, custom_artifact: ArtifactCollection = None,
custom_input_meta: DatasetType = None, custom_output_meta: DatasetType = None, custom_prediction=None,
custom_requirements: AnyRequirements = None) -> 'Model':
"""
Creates Model instance from arbitrary model objects and sample of input data
:param model_object: The model object to analyze.
:param input_data: The image to run.
:param model_name: The model name.
:param additional_artifacts: Additional artifact.
:param additional_requirements: Additional requirements.
:param custom_wrapper: Custom model wrapper.
:param custom_artifact: Custom artifact collection to replace all other.
:param custom_input_meta: Custom input DatasetType.
:param custom_output_meta: Custom output DatasetType.
:param custom_prediction: Custom prediction output.
:param custom_requirements: Custom requirements to replace all other.
:returns: :py:class:`Model`
"""
wrapper: ModelWrapper = custom_wrapper or ModelAnalyzer.analyze(model_object)
name = model_name or _generate_model_name(wrapper)
artifact = custom_artifact or WrapperArtifactCollection(wrapper)
if additional_artifacts is not None:
artifact += additional_artifacts
input_meta = custom_input_meta or DatasetAnalyzer.analyze(input_data)
prediction = custom_prediction or wrapper.predict(input_data)
output_meta = custom_output_meta or DatasetAnalyzer.analyze(prediction)
if custom_requirements is not None:
requirements = resolve_requirements(custom_requirements)
else:
requirements = get_object_requirements(model_object)
requirements += get_object_requirements(input_data)
requirements += get_object_requirements(prediction)
if additional_requirements is not None:
requirements += additional_requirements
model = Model(name, wrapper, None, input_meta, output_meta, requirements)
model._unpersisted_artifacts = artifact
return model
@property
def id(self):
return self._id
@property
def task(self):
raise AttributeError('Cant access task of unbound model')
@task.setter
def task(self, task: Task):
if not isinstance(task, Task):
raise ValueError('{} is not Task'.format(task))
self.task_id = task.id
def _generate_model_name(wrapper: ModelWrapper):
"""
Generates name for Model instance
:param wrapper: model wrapper
:return: str
"""
now = datetime.datetime.now()
return '{}_model_{}'.format(wrapper.type, now.strftime('%Y%m%d_%H_%M_%S'))
| [
"mike0sv@gmail.com"
] | mike0sv@gmail.com |
b4ac39cb2ae3416400dad040fd16a6489b3b62d9 | e7cd87117f195d7e6d7e45ade1d07384a3f42303 | /tests/test_util.py | 35b141c44c722dd3c256e9ba10ff6df2bda88c08 | [
"MIT"
] | permissive | zaabjuda/prometheus_async | e80d1921b16ab46a3d7781d6e29d2734c58a6c2a | 6e139f7ed18157aea015ac6b0fe52860446d5c2f | refs/heads/master | 2021-01-16T17:39:26.903205 | 2016-02-17T10:52:34 | 2016-02-17T10:52:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 844 | py | from __future__ import absolute_import, division, print_function
try:
import asyncio
except ImportError:
asyncio = None
import time
import pytest
import six
from prometheus_async import _util
py2_only = pytest.mark.skipif(six.PY3, reason="Python 2-only test.")
py3_only = pytest.mark.skipif(six.PY2, reason="Python 3-only test.")
class TestMkTime(object):
@py2_only
def test_py2(self):
"""
Use monotonic.time on Python 2
"""
import monotonic
assert (
_util.get_time is
monotonic.time is
_util.mk_get_time()
)
@py3_only
def test_py3(self):
"""
Use time.perf_counter on Python 3
"""
assert (
_util.get_time is
time.perf_counter is
_util.mk_get_time()
)
| [
"hs@ox.cx"
] | hs@ox.cx |
37894e2994e54b788169167d818e84ef23dd93b4 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03546/s628097135.py | 5e264a789848c60e87b361176cc71b0189cc755d | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 893 | py | import heapq
INF = 10**10
def dijkstra(s, G):
d = [INF] * len(G)
d[s] = 0
q = []
heapq.heapify(q)
heapq.heappush(q, (0, s))
while len(q) > 0:
shortest, v = heapq.heappop(q)
if d[v] < shortest:
continue
for e in G[v]:
to, cost = e
if d[to] > d[v] + cost:
d[to] = d[v] + cost
heapq.heappush(q, (d[to], to))
return d
H, W = map(int, input().split())
G = [[] for _ in range(10)]
for i in range(10):
adj = list(map(int, input().split()))
for j, cost in enumerate(adj):
# 1からの距離を計算したいから逆向きのグラフを考える
G[j].append((i, cost))
shortest_d = dijkstra(1, G)
ans = 0
for _ in range(H):
for x in list(map(int, input().split())):
if x == -1:
continue
ans += shortest_d[x]
print(ans)
| [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
6d10531aee49e9767663b286b0dedea028c51fe3 | 711756b796d68035dc6a39060515200d1d37a274 | /output_cog_tags/initial_4445.py | 10e2b1dd0f52ad72a98232ad27da4b44cc449e2d | [] | no_license | batxes/exocyst_scripts | 8b109c279c93dd68c1d55ed64ad3cca93e3c95ca | a6c487d5053b9b67db22c59865e4ef2417e53030 | refs/heads/master | 2020-06-16T20:16:24.840725 | 2016-11-30T16:23:16 | 2016-11-30T16:23:16 | 75,075,164 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,328 | py | import _surface
import chimera
try:
import chimera.runCommand
except:
pass
from VolumePath import markerset as ms
try:
from VolumePath import Marker_Set, Link
new_marker_set=Marker_Set
except:
from VolumePath import volume_path_dialog
d= volume_path_dialog(True)
new_marker_set= d.new_marker_set
marker_sets={}
surf_sets={}
if "Cog1_Anch" not in marker_sets:
s=new_marker_set('Cog1_Anch')
marker_sets["Cog1_Anch"]=s
s= marker_sets["Cog1_Anch"]
mark=s.place_marker((271, 941, 717), (0, 0, 1), 21.9005)
if "Cog2_GFPN" not in marker_sets:
s=new_marker_set('Cog2_GFPN')
marker_sets["Cog2_GFPN"]=s
s= marker_sets["Cog2_GFPN"]
mark=s.place_marker((8, 644, 813), (1, 0.5, 0), 21.9005)
if "Cog2_GFPC" not in marker_sets:
s=new_marker_set('Cog2_GFPC')
marker_sets["Cog2_GFPC"]=s
s= marker_sets["Cog2_GFPC"]
mark=s.place_marker((316, 838, 442), (1, 0.5, 0), 21.9005)
if "Cog2_Anch" not in marker_sets:
s=new_marker_set('Cog2_Anch')
marker_sets["Cog2_Anch"]=s
s= marker_sets["Cog2_Anch"]
mark=s.place_marker((741, 292, 304), (1, 0.5, 0), 21.9005)
if "Cog3_GFPN" not in marker_sets:
s=new_marker_set('Cog3_GFPN')
marker_sets["Cog3_GFPN"]=s
s= marker_sets["Cog3_GFPN"]
mark=s.place_marker((507, 483, 586), (1, 0.87, 0), 21.9005)
if "Cog3_GFPC" not in marker_sets:
s=new_marker_set('Cog3_GFPC')
marker_sets["Cog3_GFPC"]=s
s= marker_sets["Cog3_GFPC"]
mark=s.place_marker((646, 492, 368), (1, 0.87, 0), 21.9005)
if "Cog3_Anch" not in marker_sets:
s=new_marker_set('Cog3_Anch')
marker_sets["Cog3_Anch"]=s
s= marker_sets["Cog3_Anch"]
mark=s.place_marker((899, 918, 572), (1, 0.87, 0), 21.9005)
if "Cog4_GFPN" not in marker_sets:
s=new_marker_set('Cog4_GFPN')
marker_sets["Cog4_GFPN"]=s
s= marker_sets["Cog4_GFPN"]
mark=s.place_marker((184, 328, 74), (0.97, 0.51, 0.75), 21.9005)
if "Cog4_GFPC" not in marker_sets:
s=new_marker_set('Cog4_GFPC')
marker_sets["Cog4_GFPC"]=s
s= marker_sets["Cog4_GFPC"]
mark=s.place_marker((983, 171, 422), (0.97, 0.51, 0.75), 21.9005)
if "Cog4_Anch" not in marker_sets:
s=new_marker_set('Cog4_Anch')
marker_sets["Cog4_Anch"]=s
s= marker_sets["Cog4_Anch"]
mark=s.place_marker((29, 726, 312), (0.97, 0.51, 0.75), 21.9005)
if "Cog5_GFPN" not in marker_sets:
s=new_marker_set('Cog5_GFPN')
marker_sets["Cog5_GFPN"]=s
s= marker_sets["Cog5_GFPN"]
mark=s.place_marker((561, 783, 445), (0.39, 0.31, 0.14), 21.9005)
if "Cog5_GFPC" not in marker_sets:
s=new_marker_set('Cog5_GFPC')
marker_sets["Cog5_GFPC"]=s
s= marker_sets["Cog5_GFPC"]
mark=s.place_marker((9, 467, 476), (0.39, 0.31, 0.14), 21.9005)
if "Cog5_Anch" not in marker_sets:
s=new_marker_set('Cog5_Anch')
marker_sets["Cog5_Anch"]=s
s= marker_sets["Cog5_Anch"]
mark=s.place_marker((71, 450, 182), (0.39, 0.31, 0.14), 21.9005)
if "Cog6_GFPN" not in marker_sets:
s=new_marker_set('Cog6_GFPN')
marker_sets["Cog6_GFPN"]=s
s= marker_sets["Cog6_GFPN"]
mark=s.place_marker((334, 50, 46), (0.6, 0.31, 0.64), 21.9005)
if "Cog6_GFPC" not in marker_sets:
s=new_marker_set('Cog6_GFPC')
marker_sets["Cog6_GFPC"]=s
s= marker_sets["Cog6_GFPC"]
mark=s.place_marker((854, 299, 186), (0.6, 0.31, 0.64), 21.9005)
if "Cog6_Anch" not in marker_sets:
s=new_marker_set('Cog6_Anch')
marker_sets["Cog6_Anch"]=s
s= marker_sets["Cog6_Anch"]
mark=s.place_marker((534, 723, 309), (0.6, 0.31, 0.64), 21.9005)
if "Cog7_GFPN" not in marker_sets:
s=new_marker_set('Cog7_GFPN')
marker_sets["Cog7_GFPN"]=s
s= marker_sets["Cog7_GFPN"]
mark=s.place_marker((719, 189, 222), (0.89, 0.1, 0.1), 21.9005)
if "Cog7_GFPC" not in marker_sets:
s=new_marker_set('Cog7_GFPC')
marker_sets["Cog7_GFPC"]=s
s= marker_sets["Cog7_GFPC"]
mark=s.place_marker((201, 474, 690), (0.89, 0.1, 0.1), 21.9005)
if "Cog7_Anch" not in marker_sets:
s=new_marker_set('Cog7_Anch')
marker_sets["Cog7_Anch"]=s
s= marker_sets["Cog7_Anch"]
mark=s.place_marker((147, 331, 734), (0.89, 0.1, 0.1), 21.9005)
if "Cog8_GFPC" not in marker_sets:
s=new_marker_set('Cog8_GFPC')
marker_sets["Cog8_GFPC"]=s
s= marker_sets["Cog8_GFPC"]
mark=s.place_marker((999, 507, 976), (0.3, 0.69, 0.29), 21.9005)
if "Cog8_Anch" not in marker_sets:
s=new_marker_set('Cog8_Anch')
marker_sets["Cog8_Anch"]=s
s= marker_sets["Cog8_Anch"]
mark=s.place_marker((212, 142, 329), (0.3, 0.69, 0.29), 21.9005)
for k in surf_sets.keys():
chimera.openModels.add([surf_sets[k]])
| [
"batxes@gmail.com"
] | batxes@gmail.com |
ef4566801b729677ae25b0866bd8d8593802a4ee | d37a19ab3bcaba6e808a18df411c653c644d27db | /Year1/ca116/lab10/prefix-2.py | 0f780e706f8656cab0331852654b89ce2999a848 | [] | no_license | Andrew-Finn/DCU | 9e7009dac9a543aaade17e9e94116259dcc1de20 | 013789e8150d80d3b3ce2c0c7ba968b2c69a7ce0 | refs/heads/master | 2023-02-21T05:13:42.731828 | 2022-02-14T12:39:20 | 2022-02-14T12:39:20 | 157,438,470 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 250 | py | #!/usr/bin/env python
if __name__ == "__main__":
a = []
s = "mont"
lenght = len(s)
i = 0
while i < len(a):
word = a[i]
if word[:lenght] == s:
print
a[i]
i = len(a) + 1
i = i + 1
if i < len(s):
i = 1
| [
"git@afinn.me"
] | git@afinn.me |
49b43b95a19db5b65407a7b9cba11476f6bd9c45 | 9f2a0006322235db485912543565e090bccd0de7 | /pathConverter/pathConverter/wsgi.py | 6b29636d01ee8bc917ebf61cdc08d25f244b4307 | [] | no_license | xiaoxiaolulu/djangoConsolidate | 12aa1e0e50497eb3f58b47b9876074423c18e525 | 364bf9537112f4d39f7fb159a2eb6734e9540ec5 | refs/heads/master | 2021-01-02T03:49:40.176569 | 2020-02-17T17:21:05 | 2020-02-17T17:21:05 | 239,475,972 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 403 | py | """
WSGI config for pathConverter project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'pathConverter.settings')
application = get_wsgi_application()
| [
"546464268@qq.com"
] | 546464268@qq.com |
bf1d371fe2caa2780a581f23ff2ed1a7b66d7e2f | 4e37adc003e3be064161cfa1521b477fa75649fd | /py/api/search/search/models/search_stores_get200_application_json_response_all_of.py | fff94f225200397ec60a959d7d98868e60043304 | [] | no_license | stgpetrovic/junction2019 | ff97a1fb38262c0ea47905c4c61c9550271168c5 | 227da9ac06f1cd708d4649652a8d79bf776e41b2 | refs/heads/master | 2020-09-11T07:42:03.613409 | 2019-11-17T08:27:43 | 2019-11-17T08:28:05 | 221,989,977 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,720 | py | # coding: utf-8
"""
Search API
Search API is a REST-like API which wraps the underlying ElasticSearch service for most common use cases. While this API is called the \"search\" service, in practice it acts as the main data engine for various Kesko services, providing high performance endpoints for fetching recipe, product, offer, store and article data. API requests are only served over HTTPS, using TLS 1.0, 1.1, and 1.2. Requests will not be honored over plaintext HTTP. Use of `accept: application/json` and `content-type: application/json` headers is required when applicable. The API uses UTF-8 character encoding for all responses. Some fields may include characters that are not in the ASCII range. As every other Kesko API service in this hackathon, authentication is accomplished by providing `Ocp-Apim-Subscription-Key` header with your subscription key as the value. Submitting excessive requests to the server may result in a HTTP 429 Too Many Requests status code and temporary limitations to your Subscription. We kindly ask that you to limit the concurrency of your requests and/or insert 50 – 100 milliseconds of delay between the requests you send to the server. (i.e., 10 requests per second on average), since this environment doesn't run with the same specs as the real production instance. # noqa: E501
The version of the OpenAPI document: 1.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from api.search.search.configuration import Configuration
class SearchStoresGet200ApplicationJsonResponseAllOf(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'results': 'list[SearchGroupedGet200ApplicationJsonResponseStoresResults]',
'facets': 'object'
}
attribute_map = {
'results': 'results',
'facets': 'facets'
}
def __init__(self, results=None, facets=None, local_vars_configuration=None): # noqa: E501
"""SearchStoresGet200ApplicationJsonResponseAllOf - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._results = None
self._facets = None
self.discriminator = None
if results is not None:
self.results = results
if facets is not None:
self.facets = facets
@property
def results(self):
"""Gets the results of this SearchStoresGet200ApplicationJsonResponseAllOf. # noqa: E501
:return: The results of this SearchStoresGet200ApplicationJsonResponseAllOf. # noqa: E501
:rtype: list[SearchGroupedGet200ApplicationJsonResponseStoresResults]
"""
return self._results
@results.setter
def results(self, results):
"""Sets the results of this SearchStoresGet200ApplicationJsonResponseAllOf.
:param results: The results of this SearchStoresGet200ApplicationJsonResponseAllOf. # noqa: E501
:type: list[SearchGroupedGet200ApplicationJsonResponseStoresResults]
"""
self._results = results
@property
def facets(self):
"""Gets the facets of this SearchStoresGet200ApplicationJsonResponseAllOf. # noqa: E501
:return: The facets of this SearchStoresGet200ApplicationJsonResponseAllOf. # noqa: E501
:rtype: object
"""
return self._facets
@facets.setter
def facets(self, facets):
"""Sets the facets of this SearchStoresGet200ApplicationJsonResponseAllOf.
:param facets: The facets of this SearchStoresGet200ApplicationJsonResponseAllOf. # noqa: E501
:type: object
"""
self._facets = facets
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, SearchStoresGet200ApplicationJsonResponseAllOf):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, SearchStoresGet200ApplicationJsonResponseAllOf):
return True
return self.to_dict() != other.to_dict()
| [
"stgpetrovic@gmail.com"
] | stgpetrovic@gmail.com |
36b6d3dba6ad5687fc85821c8dd5ce78b2bddf17 | e81d274d6a1bcabbe7771612edd43b42c0d48197 | /Python高级/day39(UDP、TCP回顾)/demo/02_Tcp Udp通信和实践/tcp服务器.py | d7ced7ae4a8bb0ee7037b8f670fc0e95579beef8 | [
"MIT"
] | permissive | ChWeiking/PythonTutorial | 1259dc04c843382f2323d69f6678b9431d0b56fd | 1aa4b81cf26fba2fa2570dd8e1228fef4fd6ee61 | refs/heads/master | 2020-05-15T00:50:10.583105 | 2016-07-30T16:03:45 | 2016-07-30T16:03:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 353 | py | from socket import *
tcpserver = socket(AF_INET,SOCK_STREAM)
tcpserver.bind(("",5551))
tcpserver.listen(5)
dat,ip = tcpserver.accept()
print(dat,ip)
tcpserver.close()
#<socket.socket fd=4, family=AddressFamily.AF_INET,
# type=SocketKind.SOCK_STREAM, proto=0, laddr=('192.168.14.85', 5551),
# raddr=('192.168.14.8', 52273)> ('192.168.14.8', 52273)
| [
"1025212779@qq.com"
] | 1025212779@qq.com |
054cb55739a73e35353e05b328c4cd6b763602ea | 7eea707a1d422b65353238c03a5a5d87c167cf64 | /urllibstart.py | c32bc78447f9fede675e763af539952e98792917 | [] | no_license | liberbell/py04 | 81eac41330ea7b4271661dc46d9888f74f17877c | 3118d5f19b1a5a356b215ec071642c3b97c61c88 | refs/heads/master | 2020-06-24T21:56:17.907409 | 2019-08-05T22:39:15 | 2019-08-05T22:39:15 | 199,102,938 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 342 | py | import urllib.request
def main():
url = "https://httpbin.org/xml"
result = urllib.request.urlopen(url)
print("Result code: {0}".format(result.status))
print("Headers:----------")
print(result.getheaders())
print("Returned data:----")
print(result.read().decode('UTF-8'))
if __name__ == '__main__':
main()
| [
"liberbell@gmail.com"
] | liberbell@gmail.com |
63ddf3acbbe69b137f1917be9d57e96c5d6984be | 6a82d489d993269be1560af0317b3d9098b603f9 | /exe43.py | 77aefadeebb5029ddc8dd53b75f10894dd4d0b0d | [] | no_license | andreplacet/reinforcement-python-3 | a06df30b2bf4314da3d7cb200f0c1937ade65a2a | 3e2dd8da00c4a32f29d237004aa52c7710fe2169 | refs/heads/master | 2023-01-01T18:17:49.604566 | 2020-10-30T17:33:16 | 2020-10-30T17:33:16 | 308,700,479 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 969 | py | # Exercicio 43
codigos = [100, 101, 102, 103, 104, 105]
comidas = ['Cachorro Quente', 'Bauru Simples', 'Bauru com ovo', 'Hamburguer', 'ChesseBurguer', 'Refrigerante']
precos = [1.20, 1.30, 1.50, 1.20, 1.30, 1.0]
codigo = True
n_pedido = 0
pedido = []
while codigo != 0:
print(f'Pedido n°{n_pedido + 1}')
codigo = int(input("Digite o código do alimento: "))
if codigo == 0:
break
else:
while codigo not in codigos:
print('[Este código não corresponde a nenhum alimento.]')
codigo = int(input('Digite o código do alimento: '))
indice = codigos.index(codigo)
quantidade = int(input('Digite a quantidade: '))
valor_pedido = precos[indice] * quantidade
pedido.append(valor_pedido)
n_pedido += 1
pedido_nota = 0
for i in range(n_pedido - 1):
print(f'Pedido n°{pedido_nota + 1} = R$ {pedido[pedido_nota]:.2f}')
pedido_nota += 1
print(f'Total: R${sum(pedido):.2f}')
| [
"andreplacet@gmail.com"
] | andreplacet@gmail.com |
4b4f4c75b734be2e4e1d26389d83033b29ff6467 | add72f4d6f9f7af1f437d19213c14efb218b2194 | /icekit/plugins/links/migrations/0004_auto_20170314_1401.py | db3cd21e288b25c4f34168d17cda7de9e95d20b4 | [
"MIT"
] | permissive | ic-labs/django-icekit | 6abe859f97c709fcf51207b54778501b50436ff7 | c507ea5b1864303732c53ad7c5800571fca5fa94 | refs/heads/develop | 2022-08-08T21:26:04.144852 | 2018-01-08T02:55:17 | 2018-01-08T02:55:17 | 65,470,395 | 53 | 12 | MIT | 2022-07-06T19:59:39 | 2016-08-11T13:11:02 | Python | UTF-8 | Python | false | false | 924 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ik_links', '0003_auto_20161117_1810'),
]
operations = [
migrations.AlterField(
model_name='articlelink',
name='style',
field=models.CharField(choices=[(b'', b'Normal')], max_length=255, verbose_name=b'Link style', blank=True),
),
migrations.AlterField(
model_name='authorlink',
name='style',
field=models.CharField(choices=[(b'', b'Normal')], max_length=255, verbose_name=b'Link style', blank=True),
),
migrations.AlterField(
model_name='pagelink',
name='style',
field=models.CharField(choices=[(b'', b'Normal')], max_length=255, verbose_name=b'Link style', blank=True),
),
]
| [
"greg@interaction.net.au"
] | greg@interaction.net.au |
7ee890327d38e18ac084687320b2116e85b2cc0b | f281c9ecd48aedd30469cfbd556bc3319cd8419d | /web_framework/src/router3.py | f4071387e051161aebb4b39a1463f5cc96e91535 | [] | no_license | youerning/blog | 5d5edeb4f836d233a4119796f38fc4e33531714e | 59c3704cf5a77bba70a48a5d09db9b165ea59d4b | refs/heads/master | 2023-08-31T04:08:16.461923 | 2023-08-27T01:28:39 | 2023-08-27T01:28:39 | 114,074,235 | 183 | 105 | null | 2023-05-05T02:36:52 | 2017-12-13T04:35:00 | HTML | UTF-8 | Python | false | false | 2,895 | py | # -*- coding: UTF-8 -*-
# @author youerning
# @email 673125641@qq.com
# 主要参考于: https://github.com/sirMackk/diy_framework/blob/master/diy_framework/application.py
import re
from collections import namedtuple
from functools import partial
from functools import wraps
SUPPORTED_METHODS = {"GET", "POST"}
Route = namedtuple("Route", ["methods", "pattern", "handler"])
class View:
pass
class Router(object):
def __init__(self):
self._routes = []
@classmethod
def build_route_regex(self, regexp_str):
# 路由的路径有两种格式
# 1. /home 这种格式没有动态变量, 返回^/home$这样的正则表达式
# 2. /item/{name} 这种格式用动态变量, 将其处理成^/item/(?P<name>[a-zA-Z0-9_-]+)$这种格式
def named_groups(matchobj):
return '(?P<{0}>[a-zA-Z0-9_-]+)'.format(matchobj.group(1))
re_str = re.sub(r'{([a-zA-Z0-9_-]+)}', named_groups, regexp_str)
re_str = ''.join(('^', re_str, '$',))
return re.compile(re_str)
@classmethod
def match_path(self, pattern, path):
match = pattern.match(path)
try:
return match.groupdict()
except AttributeError:
return None
def add_route(self, path, handler, methods=None):
if methods is None:
methods = {"GET"}
else:
methods = set(methods)
pattern = self.__class__.build_route_regex(path)
route = Route(methods, pattern, handler)
if route in self._routes:
raise Exception("路由重复了: {}".format(path))
self._routes.append(route)
def get_handler(self, method, path):
for route in self._routes:
if method in route.methods:
params = self.match_path(route.pattern, path)
if params is not None:
return partial(route.handler, **params)
return not_found
def route(self, path, methods=None):
def wrapper(handler):
# 闭包函数中如果有该变量的赋值语句,会认为是本地变量,就不上去上层找了
nonlocal methods
if callable(handler):
if methods is None:
methods = {"GET"}
else:
methods = set(methods)
self.add_route(path, handler, methods)
return handler
return wrapper
route = Router()
@route.route("/home")
def home():
return "home"
@route.route("/item/{name}", methods=["GET", "POST"])
def item(name):
return name
def not_found():
return "not found"
print(route.get_handler("GET", "/home")())
print(route.get_handler("POST", "/home")())
print(route.get_handler("GET", "/item/item1")())
print(route.get_handler("POST", "/item/item1")())
print(route.get_handler("GET", "/xxxxxx")()) | [
"673125641@qq.com"
] | 673125641@qq.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.