blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
288
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 684
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 147
values | src_encoding
stringclasses 25
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 128
12.7k
| extension
stringclasses 142
values | content
stringlengths 128
8.19k
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
60498b2a36b3b029e68117c353e9d62505294008
|
9dab41a71bf19a9ad17ee3e9f77c0f58aebd1d6d
|
/python/uline/uline/uline/handlers/app/official/operations/form.py
|
54954fb7a2dabc703be3fa1e80e3641216ebec7a
|
[] |
no_license
|
apollowesley/Demo
|
f0ef8ec6c4ceb0aec76771da8dd9a62fb579eac8
|
471c4af95d3a7222d6933afc571a8e52e8fe4aee
|
refs/heads/master
| 2021-02-15T04:01:51.590697
| 2018-01-29T01:44:29
| 2018-01-29T01:44:29
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 526
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from wtforms import validators, fields
from uline.utils.form import BaseForm
class MessageSendSearch(BaseForm):
create_at = fields.DateTimeField(validators=[validators.Optional()])
message_content = fields.StringField(validators=[validators.Optional()])
sended_count = fields.IntegerField(
validators=[validators.Optional()]) # 已发送的条数
need_send_count = fields.IntegerField(
validators=[validators.Optional()]) # 为发送的条数
|
[
"36821277@qq.com"
] |
36821277@qq.com
|
87fcbdd19d932c6379de0cd46fd9d4a1f81b1b85
|
47ce68e1ff970318fd31ac43405d0e1fa3594bf6
|
/Models/Autoencoders/TransposeConvAutoencoderDeepExtraLLR.py
|
281d1a5861b11b1efe478d6108fd1c7b3310b5eb
|
[
"BSD-3-Clause"
] |
permissive
|
Midoriii/Anomaly_Detection_Diploma
|
7196da379f8aefbd4546ca23e8303d1829e059fb
|
11145e3e5210a4e45a33d98b138213edb7bc5d3d
|
refs/heads/master
| 2023-03-25T20:42:56.961210
| 2021-03-14T01:13:39
| 2021-03-14T01:13:39
| 261,205,472
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,910
|
py
|
'''
Copyright (c) 2021, Štěpán Beneš
Convolutional Autoencoder with learnable Conv2DTranspose layers,
especially deep, the encoding is really small
With even further decreased learning rate
'''
import numpy as np
from Models.Autoencoders.BaseModel import BaseModel
from keras.layers import Input, Reshape, Dense, Flatten
from keras.layers import Activation, Conv2D, MaxPooling2D, Conv2DTranspose, PReLU
from keras.initializers import Constant
from keras.models import Model
from keras.callbacks import History
from keras.optimizers import Adam
class TransposeConvAutoencoderDeepExtraLLR(BaseModel):
def __init__(self):
super().__init__()
self.name = "TransposeConvAutoencoderDeepExtraLLR"
return
# Custom optimizer learning rate to see if it improves anything
def compile_net(self):
opt = Adam(learning_rate=0.00001)
self.model.compile(optimizer=opt, loss='mse')
self.model.summary()
return
def create_net(self, input_shape):
net_input = Input(shape=input_shape)
x = Conv2D(self.filters, (3, 3), padding='same')(net_input)
x = PReLU(alpha_initializer=Constant(value=0.25))(x)
x = MaxPooling2D((2, 2), padding='same')(x)
x = Conv2D(self.filters, (3, 3), padding='same')(x)
x = PReLU(alpha_initializer=Constant(value=0.25))(x)
x = MaxPooling2D((2, 2), padding='same')(x)
x = Conv2D(self.filters, (3, 3), padding='same')(x)
x = PReLU(alpha_initializer=Constant(value=0.25))(x)
x = MaxPooling2D((2, 2), padding='same')(x)
x = Conv2D(self.filters, (3, 3), padding='same')(x)
x = PReLU(alpha_initializer=Constant(value=0.25))(x)
x = MaxPooling2D((2, 2), padding='same')(x)
x = Conv2D(self.filters, (3, 3), padding='same')(x)
x = PReLU(alpha_initializer=Constant(value=0.25))(x)
x = MaxPooling2D((2, 2), padding='same')(x)
x = Conv2D(self.filters, (3, 3), padding='same')(x)
x = PReLU(alpha_initializer=Constant(value=0.25))(x)
x = MaxPooling2D((2, 2), padding='same')(x)
x = Conv2D(self.filters, (3, 3), padding='same')(x)
x = PReLU(alpha_initializer=Constant(value=0.25))(x)
x = MaxPooling2D((2, 2), padding='same')(x)
x = Conv2D(self.filters, (3, 3), padding='same')(x)
x = PReLU(alpha_initializer=Constant(value=0.25))(x)
self.encoded = MaxPooling2D((2, 2), padding='same')(x)
# Keep the encoder part
self.encoder = Model(net_input, self.encoded)
# And now the decoder part
x = Conv2DTranspose(self.filters, (3,3), strides=(2,2), padding='same')(self.encoded)
x = PReLU(alpha_initializer=Constant(value=0.25))(x)
x = Conv2DTranspose(self.filters, (3,3), strides=(2,2), padding='same')(x)
x = PReLU(alpha_initializer=Constant(value=0.25))(x)
x = Conv2DTranspose(self.filters, (3,3), strides=(2,2), padding='same')(x)
x = PReLU(alpha_initializer=Constant(value=0.25))(x)
x = Conv2DTranspose(self.filters, (3,3), strides=(2,2), padding='same')(x)
x = PReLU(alpha_initializer=Constant(value=0.25))(x)
x = Conv2DTranspose(self.filters, (3,3), strides=(2,2), padding='same')(x)
x = PReLU(alpha_initializer=Constant(value=0.25))(x)
x = Conv2DTranspose(self.filters, (3,3), strides=(2,2), padding='same')(x)
x = PReLU(alpha_initializer=Constant(value=0.25))(x)
x = Conv2DTranspose(self.filters, (3,3), strides=(2,2), padding='same')(x)
x = PReLU(alpha_initializer=Constant(value=0.25))(x)
x = Conv2DTranspose(self.filters, (3,3), strides=(2,2), padding='same')(x)
x = PReLU(alpha_initializer=Constant(value=0.25))(x)
self.decoded = Conv2D(1, (3, 3), activation='sigmoid', padding='same')(x)
self.model = Model(net_input, self.decoded)
return
|
[
"stephen.Team24@gmail.com"
] |
stephen.Team24@gmail.com
|
3878130f0f9cf98c52c5c9089cbe81841cf7040e
|
20149c63930ebb2a950e45a7fafa12a17712695c
|
/ImageProcessing/camera.py
|
914751cb174bbbf7236d7c86bdf4630599682c6b
|
[] |
no_license
|
junyi1997/Final_OIT_projet
|
ffe7f1a1c61f8124ab72d5250e30c1e2cc49c1ab
|
98fc039378021c6db259dbe79d7980750aa91710
|
refs/heads/master
| 2023-01-03T05:24:31.133416
| 2020-11-01T14:12:15
| 2020-11-01T14:12:15
| 201,788,832
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 296
|
py
|
import picamera
initialized = False
class Camera:
camera = None
def __init__(self):
self.camera = picamera.PiCamera()
def takePhoto(self, filepath):
""" Takes a photo and saves it to the /img directory. """
self.camera.capture(filepath)
def getPiCamera(self):
return self.camera
|
[
"q5896799@gmail.com"
] |
q5896799@gmail.com
|
44da06eb51574b41af6813f5e6252a5a2750e648
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p03767/s866081759.py
|
cf66b2ce220f7faa16d298b3290bd992eb740c88
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 187
|
py
|
import collections
n = int(input())
*a, = map(int, input().split())
a.sort()
a = collections.deque(a)
ans = 0
while n:
a.popleft()
a.pop()
ans += a.pop()
n -= 1
print(ans)
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
fd2299064cdb5b26bd595348ca1902f794c5e968
|
66c68ab7a6b62c4a94ddf4cdda64dc7f2181f6f7
|
/samples/crater/crater.py
|
4c20dc702a93465e1b15bd734aff41608960e0f7
|
[
"MIT"
] |
permissive
|
ZhiangChen/Mask_RCNN
|
bfaeebbc32bdd2eef40f77ffe385437c6d9e60e0
|
040611909c140a98ab09ae09b511ce2a47b5f195
|
refs/heads/master
| 2020-03-27T10:04:52.300122
| 2019-02-25T00:42:17
| 2019-02-25T00:42:17
| 146,393,525
| 1
| 0
| null | 2018-08-28T04:51:42
| 2018-08-28T04:51:42
| null |
UTF-8
|
Python
| false
| false
| 4,235
|
py
|
"""
classes for lunar crater dataset
Zhiang Chen
Sep 13, 2018
zch@asu.edu
"""
import os
import sys
import numpy as np
import skimage.draw
import pickle
import argparse
import matplotlib.pyplot as plt
from mrcnn import visualize
from mrcnn.config import Config
from mrcnn import model as modellib, utils
ROOT_DIR = os.path.abspath("../../")
sys.path.append(ROOT_DIR) # To find local version of the library
# Path to trained weights file
COCO_WEIGHTS_PATH = os.path.join(ROOT_DIR, "mask_rcnn_coco.h5")
# Directory to save logs and model checkpoints, if not provided
# through the command line argument --logs
DEFAULT_LOGS_DIR = os.path.join(ROOT_DIR, "logs")
############################################################
# Dataset config
############################################################
class CraterConfig(Config):
NAME = "crater"
GPU_COUNT = 1 # cannot create model when setting gpu count as 2
IMAGES_PER_GPU = 1
NUM_CLASSES = 1 + 1 # Background + crater
IMAGE_MIN_DIM = 256
IMAGE_MAX_DIM = 256
RPN_ANCHOR_SCALES = (8, 16, 32, 64, 128)
# IMAGE_CHANNEL = 1 # wrong, the input will be automatically converted to 3 channels (if greyscale, rgb will be repeated)
STEPS_PER_EPOCH = 100
DETECTION_MIN_CONFIDENCE = 0.9
MAX_GT_INSTANCES = 500
DETECTION_MAX_INSTANCES = 600
TRAIN_ROIS_PER_IMAGE = 1000
############################################################
# Dataset
############################################################
class CraterDataset(utils.Dataset):
def load_crater(self, datadir, subset, subsubset):
self.add_class("lunar_crater", 1, "lunar_crater")
assert subset in ["train", "val"]
subset_dir = os.path.join(datadir, subset)
dataset_dir = os.path.join(subset_dir, subsubset)
annotation_path = os.path.join(dataset_dir, 'annotations.pickle')
assert os.path.isfile(annotation_path)
with open(annotation_path, "rb") as f:
annotations = pickle.load(f, encoding='latin1')
del(f)
print('loading ' + subsubset)
for i in range(50):
image_path = os.path.join(dataset_dir, "img_{i:0{zp}d}.jpg".format(i=i, zp=2))
#print(image_path)
assert os.path.isfile(image_path)
image_id = int(subsubset)*50 + i
image = skimage.io.imread(image_path)
height, width = image.shape[:2]
index = "{k:0{zp}d}".format(k=i, zp=2)
mask = annotations[index]['data']
mask = np.swapaxes(mask, 0, 1)
mask = np.swapaxes(mask, 1, 2)
self.add_image(
"lunar_crater",
image_id=image_id,
path=image_path,
width=width,
height=height,
annotation_path=annotation_path,
annotation = mask)
def load_mask(self, image_id):
info = self.image_info[image_id]
if info["source"] != "lunar_crater":
return super(self.__class__, self).load_mask(image_id)
mask = info["annotation"]
return mask.astype(np.bool), np.ones([mask.shape[-1]], dtype=np.int32)
def image_reference(self, image_id):
info = self.image_info[image_id]
if info["source"] == "lunar_crater":
return info["path"]
else:
super(self.__class__, self).image_reference(image_id)
def display_mask(self, image_id):
masks, ids = self.load_mask(image_id)
mask = mask.max(2)
plt.imshow(mask)
plt.show()
############################################################
# Training
############################################################
if __name__ == '__main__':
config = CraterConfig()
config.display()
dataset = CraterDataset()
dataset.load_crater('../../dataset/lunar_craters', 'train', '0')
dataset.load_crater('../../dataset/lunar_craters', 'train', '1')
#dataset.load_crater('../../dataset/lunar_craters', 'train', '2')
#dataset.load_crater('../../dataset/lunar_craters', 'train', '3')
a,b = dataset.load_mask(65)
|
[
"zxc251@case.edu"
] |
zxc251@case.edu
|
1c839c157f06ac377b799639d8ae03cc25682b58
|
4235ae775f1061217504ea027ef32b533e8dea34
|
/0x04-pagination/0-simple_helper_function.py
|
83390a8b4302c52fe4e99a7266d5e60bc08eb3e8
|
[] |
no_license
|
Leidysalda/holbertonschool-web_back_end
|
892cda3361a5fc18693f645b5b8f058087f6b9fd
|
0b8bd14fa018d9480fd31cf300dad2a3ccd439d0
|
refs/heads/master
| 2023-08-23T18:19:17.226745
| 2021-10-18T05:26:30
| 2021-10-18T05:26:30
| 387,662,848
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 182
|
py
|
#!/usr/bin/env python3
"""Simple helper function
"""
def index_range(page: int, page_size: int) -> tuple:
"""Index range"""
return (page - 1) * page_size, page * page_size
|
[
"leidysalda1@gmail.com"
] |
leidysalda1@gmail.com
|
43ae88a0b1629e2e7b1ee73877ac8e03e6ad97a0
|
107ebb6d25812d12718d3f0731770318fb0eb2d2
|
/study_code/Day_15/15展示表格Tree.py
|
d67b5c4e5773f4cf666b250e592e4924b4ca87ae
|
[] |
no_license
|
taoranzhishang/Python_codes_for_learning
|
0c5fa25a5da80e5cbca8cf0b9b3703d488e1af6f
|
23d12b8c7524f3672ff3baed94dbfed04df821b5
|
refs/heads/master
| 2023-03-09T12:16:02.749807
| 2021-02-26T13:04:08
| 2021-02-26T13:04:08
| 315,070,954
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 661
|
py
|
import tkinter
from tkinter import ttk
win = tkinter.Tk()
tree = ttk.Treeview(win) # 显示表格或树
tree["columns"] = ("Name", "Age", "Height")
tree.column("Name", width=100) # 表示列,不显示
tree.column("Age", width=100)
tree.column("Height", width=100)
tree.heading("Name", text="姓名") # 表头
tree.heading("Age", text="年龄")
tree.heading("Height", text="身高")
tree.insert('', 0, text="line1", values=('1', '2', '3')) # 插入行
tree.insert('', 1, text="line2", values=('1', '2', '3'))
tree.insert('', 2, text="line3", values=('1', '2', '3'))
tree.insert('', 3, text="line4", values=('1', '2', '3'))
tree.pack()
win.mainloop()
|
[
"taoranzhishang@hotmail.com"
] |
taoranzhishang@hotmail.com
|
31b99f77c0ae6772cfdef2260dad07ec24ebc40a
|
e77a7cc1ed343a85662f0ad3c448a350ab776261
|
/data_structures/array/dutch_flag_problem.py
|
f898486c9edeca6b8bfaf519c0fddcb52c4d54b5
|
[
"MIT"
] |
permissive
|
M4cs/python-ds
|
9dcecab10291be6a274130c42450319dc112ac46
|
434c127ea4c49eb8d6bf65c71ff6ee10361d994e
|
refs/heads/master
| 2020-08-10T03:40:22.340529
| 2019-10-10T17:52:28
| 2019-10-10T17:52:28
| 214,247,733
| 2
| 0
|
MIT
| 2019-10-10T17:43:31
| 2019-10-10T17:43:30
| null |
UTF-8
|
Python
| false
| false
| 446
|
py
|
def dutch(arr):
low = 0
mid = 0
high = len(arr) - 1
while mid <= high:
if arr[mid] == 0:
arr[low], arr[mid] = arr[mid], arr[low]
low += 1
mid += 1
elif arr[mid] == 1:
mid += 1
else:
arr[mid], arr[high] = arr[high], arr[mid]
high -= 1
arr = [1,0,2,1,0,2,1,2,1,2,1,1,0,2,1,0,1,2,1,2,1,1,2,1,0,2,1,1]
print(arr)
dutch(arr)
print(arr)
|
[
"prabhupant09@gmail.com"
] |
prabhupant09@gmail.com
|
905bd2fa91b8f2f596253a2ea0da0afae3162704
|
73c5bc071e496b67677c55dfb9cd8621eac1f34e
|
/probabilistic_utils/gmm_utils.py
|
2887862615fb3b85f7da7651fe0e1736afc1d27f
|
[] |
no_license
|
salt-fly/unsup_temp_embed
|
55324ffba7b2858ff3c255f2a984d0253ea23dfb
|
be7fa1fdfb4466673e3d6953924194cdead281a5
|
refs/heads/master
| 2020-05-30T19:33:51.311017
| 2019-04-29T18:07:33
| 2019-04-29T18:07:33
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,763
|
py
|
#!/usr/bin/env python
"""
"""
__author__ = 'Anna Kukleva'
__date__ = 'September 2018'
import numpy as np
from utils.arg_pars import opt
from utils.logging_setup import logger
class AuxiliaryGMM:
def __init__(self):
self.means_ = [0]
self.covariances_ = [0]
def score_samples(self, features):
result = np.ones(features.shape[0]) * (-np.inf)
return result
class GMM_trh:
def __init__(self, gmm):
self._gmm = gmm
self.trh = np.inf
self.mean_score = 0
self.bg_trh_score = []
if not isinstance(gmm, AuxiliaryGMM):
self._define_threshold()
def _define_threshold(self):
mean = self._gmm.means_[0]
self.mean_score = self._gmm.score_samples(mean.reshape(1, -1))
logger.debug('mean: %f' % self.mean_score)
# cov = self._gmm.covariances_[0]
# sample = (mean - 3 * np.diag(cov)).reshape(1, -1)
# sample_score = self._gmm.score_samples(sample)
# # self.trh = self._gmm.score_samples(sample)
# self.trh = self.mean_score - opt.bg_trh
# str_print = 'GMM: %f lower bound: %f ' % (self.mean_score - sample_score, self._gmm.lower_bound_)
# str_print += 'var max: %f min: %f mean: %f' % (np.max(cov), np.min(cov), np.mean(cov))
# logger.debug(str_print)
def score_samples(self, features):
return self._gmm.score_samples(features)
def append_bg_score(self, score):
self.bg_trh_score.append(score)
def update_trh(self, new_bg_trh=None):
if self.mean_score != 0:
new_bg_trh = opt.bg_trh if new_bg_trh is None else new_bg_trh
self.trh = self.mean_score - new_bg_trh
# self.trh = self.mean_score - 1
|
[
"kuklevaanna@gmail.com"
] |
kuklevaanna@gmail.com
|
a8dc4dd41c5abc7755f49846ffa67574433c31ea
|
2f2e9cd97d65751757ae0a92e8bb882f3cbc5b5b
|
/287.寻找重复数.py
|
e810ef03eb114a003fdd86c50269732634088bba
|
[] |
no_license
|
mqinbin/python_leetcode
|
77f0a75eb29f8d2f9a789958e0120a7df4d0d0d3
|
73e0c81867f38fdf4051d8f58d0d3dc245be081e
|
refs/heads/main
| 2023-03-10T18:27:36.421262
| 2021-02-25T07:24:10
| 2021-02-25T07:24:10
| 314,410,703
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,150
|
py
|
#
# @lc app=leetcode.cn id=287 lang=python3
#
# [287] 寻找重复数
#
# @lc code=start
class Solution:
# def findDuplicate(self, nums: List[int]) -> int:
def findDuplicate(self, nums: List[int]) -> int:
'''快慢指针'''
slow = fast = 0
while True:
slow = nums[slow]
fast = nums[nums[fast]]
print(slow,fast)
if slow == fast:
fast = 0
while True:
fast = nums[fast]
slow = nums[slow]
print('-' ,slow,fast)
if fast == slow:
return fast
# def findDuplicate(self, nums: List[int]) -> int:
# '''二分法'''
# n = len(nums)
# left = 1
# right = n
# while left < right:
# mid = (left + right) // 2
# cnt = 0
# for num in nums:
# if num <= mid:
# cnt += 1
# if cnt <= mid:
# left = mid + 1
# else:
# right = mid
# return left
# @lc code=end
|
[
"mqinbin@gmail.com"
] |
mqinbin@gmail.com
|
5970e87ee2061596cca1ae837cf3d8a58cc9226c
|
ea637e5e28056d0a332a7892bd241aa6a154e57a
|
/recyclescrollview.py
|
c3528005902e474677d732e97bfd75ece4cf01c5
|
[
"MIT"
] |
permissive
|
JesusZerpa/kivy-recyclescrollview
|
85c4511569ce65e0cecf102238dd36858a6aa92c
|
50016c313216833b583066ec0a9ed7e5b271da01
|
refs/heads/master
| 2022-04-20T21:52:22.862800
| 2020-04-21T00:54:28
| 2020-04-21T00:54:28
| 257,435,919
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,534
|
py
|
from kivy.factory import Factory
from kivy.properties import *
from kivy.uix.floatlayout import FloatLayout
from kivy.uix.boxlayout import BoxLayout
class RecycleScrollView(Factory.ScrollView):
viewclass=StringProperty("")
data=ListProperty([])
#box
orientation= 'vertical'
default_height= 1000
cursor=0
max_items=10
widget_height=None
def __init__(self,*args,**kwargs):
super(RecycleScrollView,self).__init__(*args,**kwargs)
self.do_scroll_y=True
self.box=BoxLayout(orientation="vertical",size_hint_y= None,height=self.default_height)
self.add_widget(self.box)
def on_parent(self,instance,value):
pass
def on_size(self,instance,value):
height=0
for elem in self.children[0].children:
height+=elem.height
self.children[0].height=height
def on_scroll_move(self,instance):
if self.widget_height:
dx=self.box.height-(self.scroll_y*self.box.height)
if dx>0:
item_passed=dx/self.widget_height
self.cursor=int(item_passed)
self.update()
return super().on_scroll_move(instance)
def on_scroll_stop(self,instance):
if self.widget_height:
dx=self.box.height-(self.scroll_y*self.box.height)
if dx>0:
item_passed=dx/self.widget_height
self.cursor=int(item_passed)
self.update()
return super().on_scroll_stop(instance)
def update(self):
self.clear_widgets()
widget=getattr(Factory,self.viewclass)
_widget=widget()
self.box=FloatLayout(size_hint_y= None,height=self.default_height)
super(RecycleScrollView,self).add_widget(self.box)
self.box.top=self.top
for k,item in enumerate(self.data[self.cursor:self.cursor+self.max_items]):
widget=getattr(Factory,self.viewclass)
_widget=widget()
_widget.size_hint_y=None
self.box.add_widget(_widget)
_widget.pos=(_widget.pos[0],(_widget.height*len(self.data))-(_widget.height*(self.cursor+k+1)))
for elem in item:
setattr(_widget,elem,item[elem])
self.box.height=self.widget_height*len(self.data)
def on_classview(self,instance,value):
instance.classview=value
def on_data(self,instance,value):
#button
#size_hint: (1, None)
#height: 200
self.data=value
for k,item in enumerate(self.data[self.cursor:self.cursor+self.max_items]):
widget=getattr(Factory,self.viewclass)
_widget=widget()
_widget.size_hint_y=None
for elem in item:
setattr(_widget,elem,item[elem])
if self.widget_height==None:
self.widget_height=_widget.height
self.box.add_widget(_widget)
|
[
"jesus26abraham1996@gmail.com"
] |
jesus26abraham1996@gmail.com
|
15b938d4100f7d789627cdd8c18f844a41b98930
|
836312e7ced2d5c5ed43d0e3ad34f2144cf9064e
|
/APIs/5_SecuringYourApi/models.py
|
9f1b833e0b49a78a074f3a8471d669f318a8cb51
|
[] |
no_license
|
muhammad-mamdouh/udacity-fullstack-tools
|
4342cdca75d88184a095d69b9484a4e50c87f905
|
a881845042f3706b5e37ea72df645859541c73ab
|
refs/heads/master
| 2022-12-15T23:33:20.813201
| 2019-07-11T21:05:53
| 2019-07-11T21:05:53
| 187,712,425
| 1
| 0
| null | 2022-12-08T05:51:53
| 2019-05-20T20:58:42
|
Python
|
UTF-8
|
Python
| false
| false
| 715
|
py
|
from sqlalchemy import Column, Integer, String, create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship, sessionmaker
from passlib.apps import custom_app_context as pwd_context
Base = declarative_base()
class User(Base):
__tablename__ = 'user'
id = Column(Integer, primary_key=True)
username = Column(String(32), index=True)
password_hash = Column(String(64))
def hash_password(self, password):
self.password_hash = pwd_context.hash(password)
def verify_password(self, password):
return pwd_context.verify(password, self.password_hash)
engine = create_engine('sqlite:///users.db')
Base.metadata.create_all(engine)
|
[
"mahammad.mamdouh@gmail.com"
] |
mahammad.mamdouh@gmail.com
|
a95cff2d9d860046fa74ac94747beb54443c7426
|
632099ac0d895943cbbeb9048a2cdfcd21102411
|
/LPD8/__init__.py
|
854fab1bea8584f563056348a8c3ca56979773b0
|
[] |
no_license
|
Toniigor/AbletonLive9_RemoteScripts
|
7f4bbf759a79629584413f6d1797005e8cd7f2ff
|
fed1e5ee61ea12ea6360107a65a6e666364353ff
|
refs/heads/master
| 2021-01-16T21:19:25.330221
| 2014-06-06T12:33:03
| 2014-06-06T12:33:03
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 778
|
py
|
#Embedded file name: /Users/versonator/Jenkins/live/Projects/AppLive/Resources/MIDI Remote Scripts/LPD8/__init__.py
from _Generic.GenericScript import GenericScript
import Live
from config import *
def create_instance(c_instance):
""" The generic script can be customised by using parameters (see config.py). """
return GenericScript(c_instance, Live.MidiMap.MapMode.absolute, Live.MidiMap.MapMode.absolute, DEVICE_CONTROLS, TRANSPORT_CONTROLS, VOLUME_CONTROLS, TRACKARM_CONTROLS, BANK_CONTROLS, CONTROLLER_DESCRIPTION)
from _Framework.Capabilities import *
def get_capabilities():
return {CONTROLLER_ID_KEY: controller_id(vendor_id=2536, product_ids=[117], model_name='LPD8'),
PORTS_KEY: [inport(props=[NOTES_CC, REMOTE, SCRIPT]), outport(props=[SCRIPT])]}
|
[
"julien@julienbayle.net"
] |
julien@julienbayle.net
|
2a3e7dcabd45c50afa9aaec3c28922e67f7594b5
|
caceb60f71165772b6d6155f619e79189e7c80a9
|
/第一期/成都-MFC/task002/selflearn/advance02decorator/decorator/demo0.py
|
bcaf2abdb66537909eedbbbbfd2e83322255515b
|
[
"Apache-2.0"
] |
permissive
|
beidou9313/deeptest
|
ff41999bb3eb5081cdc8d7523587d7bc11be5fea
|
e046cdd35bd63e9430416ea6954b1aaef4bc50d5
|
refs/heads/master
| 2021-04-26T23:06:08.890071
| 2019-04-03T02:18:44
| 2019-04-03T02:18:44
| 123,931,080
| 0
| 0
|
Apache-2.0
| 2018-03-05T14:25:54
| 2018-03-05T14:25:53
| null |
UTF-8
|
Python
| false
| false
| 540
|
py
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
__author__ = 'MFC'
__time__ = '18/1/24 15:14'
"""
01.python高级1
02.python高级2-生成器、闭包、装饰器
05-装饰器 01
"""
# example 1
def foo():
print('foo')
foo # 表示是函数
foo() # 表示执行foo函数
# example 2
def foo():
print('foo')
foo = lambda x: x + 1 # foo指向另一个函数
r = foo(3) # # 执行下面的lambda表达式,而不再是原来的foo函数,因为foo这个名字被重新指向了另外一个匿名函数
print(r)
|
[
"moneyfromcat@foxmail.com"
] |
moneyfromcat@foxmail.com
|
cde54a09147bedf311abc8e1fe4d88900bc757e9
|
1fe8d4133981e53e88abf633046060b56fae883e
|
/venv/lib/python3.8/site-packages/scipy/optimize/tests/test_linprog.py
|
a245d4719d083c4e3e039a7d01fc03a1e56ecc28
|
[] |
no_license
|
Akira331/flask-cifar10
|
6c49db8485038731ce67d23f0972b9574746c7a7
|
283e7a2867c77d4b6aba7aea9013bf241d35d76c
|
refs/heads/master
| 2023-06-14T16:35:06.384755
| 2021-07-05T14:09:15
| 2021-07-05T14:09:15
| 382,864,970
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 130
|
py
|
version https://git-lfs.github.com/spec/v1
oid sha256:f8b63b53bd829fbc07a8138950ebbc2e32d2b860e947b44b004584abe4170b6a
size 69667
|
[
"business030301@gmail.com"
] |
business030301@gmail.com
|
a06fc5c4de927107934a98e2926e54b640c22983
|
478de38a95c2729ee2ef8c77b1c5a81f23aedb59
|
/Programming-Algorithm/Palindrome Partitioning.py
|
a1578e415d692bf6775851f2802cd87fc34a869b
|
[] |
no_license
|
shuzhancnjx/leetcode-
|
0c711f720ef653ddff2af3af697a453122c28403
|
12093c92ef33707ad8ccdd59ad040c04cad1ee3b
|
refs/heads/master
| 2020-12-24T16:32:25.976747
| 2016-03-03T15:36:44
| 2016-03-03T15:36:44
| 37,101,621
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,381
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Wed Sep 16 14:08:57 2015
@author: ZSHU
"""
'''
1. the basic idea is to have a list 'temp' formed by each letter in the string, i.e., list(s)
2. combine the components of 'temp' when they are palindrome
3. 'pos' is used to record the center for determing the palindrome
'''
class Solution(object):
def partition(self, s):
"""
:type s: str
:rtype: List[List[str]]
"""
def par(res, temp, pos):
if pos>len(temp)-1:
res.append(temp)
return
else:
p1=pos-1; p2=pos+1
while p1>=0 and p2<len(temp):
if temp[p1]==temp[p2]:
par(res, temp[:p1]+[''.join(temp[p1:p2+1])] +temp[p2+1:],p1+1)
p1-=1;p2+=1
else:
break
p1=pos; p2=pos+1
while p1>=0 and p2<len(temp):
if temp[p1]==temp[p2]:
par(res, temp[:p1]+[''.join(temp[p1:p2+1])] +temp[p2+1:], p1+1)
p1-=1; p2+=1
else:
break
par(res, temp,pos+1) # if no palindrome ceterned at temp[pos], then move on to next
res=[]
par(res, list(s),0)
return res
|
[
"zshu@ets.org"
] |
zshu@ets.org
|
848614b9568aff2a658d60b2321afcea163f00e3
|
2d647ce5b0acf255b0713304acdb0f0c193df8fc
|
/doc/conf.py
|
d526459dffd408e28854f7bbfcccf8efeec1311c
|
[
"CC-BY-NC-4.0",
"MIT"
] |
permissive
|
tonybaloney/pycharm-security
|
a1ed540f47a0af4cd659c72643f63f800df01702
|
5c1ceeb1fb2a18478fa7076a81f9f47fd450e592
|
refs/heads/master
| 2023-08-17T20:09:22.924182
| 2023-08-16T02:12:57
| 2023-08-16T02:12:57
| 231,985,043
| 341
| 27
|
MIT
| 2023-09-14T19:03:27
| 2020-01-05T22:35:55
|
Kotlin
|
UTF-8
|
Python
| false
| false
| 2,438
|
py
|
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Project information -----------------------------------------------------
project = 'PyCharm Python Security plugin'
copyright = '2020, Anthony Shaw'
author = 'Anthony Shaw'
# -- General configuration ---------------------------------------------------
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.doctest",
"sphinx.ext.todo",
"sphinx.ext.coverage",
"sphinx.ext.viewcode",
"sphinx.ext.githubpages",
"sphinx_markdown_tables"
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
source_suffix = [".rst", ".md"]
source_parsers = {
'.md': 'recommonmark.parser.CommonMarkParser',
}
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
html_theme_options = {
'logo': 'logo.png',
'logo_name': True,
'logo_text_align': "center",
'github_user': 'tonybaloney',
'github_repo': 'pycharm-security',
'github_banner': True,
'github_button': True,
'fixed_sidebar': True,
'sidebar_width': '330px',
'page_width': '70%',
'extra_nav_links': {
'JetBrains Marketplace': "https://plugins.jetbrains.com/plugin/13609-python-security",
"GitHub Marketplace": "https://github.com/marketplace/actions/pycharm-python-security-scanner",
"Docker Hub": "https://hub.docker.com/r/anthonypjshaw/pycharm-security"
},
'show_powered_by': False
}
html_show_copyright = False
html_show_sphinx = False
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
html_sidebars = {'**': ['about.html', 'navigation.html', 'searchbox.html'], }
master_doc = 'index'
|
[
"anthony.p.shaw@gmail.com"
] |
anthony.p.shaw@gmail.com
|
8247ddf7a63ae3cc954fc819bd2bfa9d3ed5c402
|
a46fc5187245f7ac79758ae475d4d865e24f482b
|
/33_search_in_rotated_array/search_in_rotated.py
|
18c24c01cea4ab774ce6a9b99cb74a3bf1bf59d2
|
[] |
no_license
|
narnat/leetcode
|
ae31f9321ac9a087244dddd64706780ea57ded91
|
20a48021be5e5348d681e910c843e734df98b596
|
refs/heads/master
| 2022-12-08T00:58:12.547227
| 2020-08-26T21:04:53
| 2020-08-26T21:04:53
| 257,167,879
| 0
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 861
|
py
|
#!/usr/bin/env python3
from typing import List
class Solution:
def search(self, nums: List[int], target: int) -> int:
left = 0
right = len(nums) - 1
if not nums:
return -1
while left < right:
mid = left + (right - left) // 2
if nums[mid] > nums[right]:
left = mid + 1
else:
right = mid
pivot = left
left = 0
right = len(nums) - 1
if nums[pivot] <= target <= nums[right]:
left = pivot
else:
right = pivot
while left <= right:
mid = left + (right - left) // 2
if target == nums[mid]:
return mid
if nums[mid] > target:
right = mid - 1
else:
left = mid + 1
return -1
|
[
"farruh1996@gmail.com"
] |
farruh1996@gmail.com
|
68ca9762fd012943155b4e292cef1c5cbd9fa5f5
|
6649efd4a95645938221eca58404db5663cd2491
|
/official/vision/dataloaders/tf_example_label_map_decoder_test.py
|
3ff9a8b3c14c70ea2289e9cf6783a02f93db5b72
|
[
"Apache-2.0"
] |
permissive
|
Dithn/models
|
8447866855959946358f2e5160b7d31aaafcfc98
|
36a140b8765eaa07525ac42a00cbd01a8b03b98e
|
refs/heads/master
| 2023-09-01T07:41:28.596877
| 2022-03-16T18:12:00
| 2022-03-16T18:13:23
| 228,201,096
| 1
| 0
|
Apache-2.0
| 2021-09-23T21:19:16
| 2019-12-15T14:52:24
|
Python
|
UTF-8
|
Python
| false
| false
| 7,746
|
py
|
# Copyright 2022 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for tf_example_label_map_decoder.py."""
import os
# Import libraries
from absl.testing import parameterized
import numpy as np
import tensorflow as tf
from official.vision.dataloaders import tf_example_label_map_decoder
from official.vision.dataloaders import tfexample_utils
LABEL_MAP_CSV_CONTENT = '0,class_0\n1,class_1\n2,class_2'
class TfExampleDecoderLabelMapTest(tf.test.TestCase, parameterized.TestCase):
@parameterized.parameters(
(100, 100, 0),
(100, 100, 1),
(100, 100, 2),
(100, 100, 0),
(100, 100, 1),
(100, 100, 2),
)
def test_result_shape(self, image_height, image_width, num_instances):
label_map_dir = self.get_temp_dir()
label_map_name = 'label_map.csv'
label_map_path = os.path.join(label_map_dir, label_map_name)
with open(label_map_path, 'w') as f:
f.write(LABEL_MAP_CSV_CONTENT)
decoder = tf_example_label_map_decoder.TfExampleDecoderLabelMap(
label_map_path, include_mask=True)
serialized_example = tfexample_utils.create_detection_test_example(
image_height=image_height,
image_width=image_width,
image_channel=3,
num_instances=num_instances).SerializeToString()
decoded_tensors = decoder.decode(
tf.convert_to_tensor(value=serialized_example))
results = tf.nest.map_structure(lambda x: x.numpy(), decoded_tensors)
self.assertAllEqual(
(image_height, image_width, 3), results['image'].shape)
self.assertEqual(tfexample_utils.DUMP_SOURCE_ID, results['source_id'])
self.assertEqual(image_height, results['height'])
self.assertEqual(image_width, results['width'])
self.assertAllEqual(
(num_instances,), results['groundtruth_classes'].shape)
self.assertAllEqual(
(num_instances,), results['groundtruth_is_crowd'].shape)
self.assertAllEqual(
(num_instances,), results['groundtruth_area'].shape)
self.assertAllEqual(
(num_instances, 4), results['groundtruth_boxes'].shape)
self.assertAllEqual(
(num_instances, image_height, image_width),
results['groundtruth_instance_masks'].shape)
self.assertAllEqual(
(num_instances,), results['groundtruth_instance_masks_png'].shape)
def test_result_content(self):
label_map_dir = self.get_temp_dir()
label_map_name = 'label_map.csv'
label_map_path = os.path.join(label_map_dir, label_map_name)
with open(label_map_path, 'w') as f:
f.write(LABEL_MAP_CSV_CONTENT)
decoder = tf_example_label_map_decoder.TfExampleDecoderLabelMap(
label_map_path, include_mask=True)
image_content = [[[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]],
[[0, 0, 0], [255, 255, 255], [255, 255, 255], [0, 0, 0]],
[[0, 0, 0], [255, 255, 255], [255, 255, 255], [0, 0, 0]],
[[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]]]
image = tfexample_utils.encode_image(np.uint8(image_content), fmt='PNG')
image_height = 4
image_width = 4
num_instances = 2
xmins = [0, 0.25]
xmaxs = [0.5, 1.0]
ymins = [0, 0]
ymaxs = [0.5, 1.0]
labels = [b'class_2', b'class_0']
areas = [
0.25 * image_height * image_width, 0.75 * image_height * image_width
]
is_crowds = [1, 0]
mask_content = [[[255, 255, 0, 0],
[255, 255, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0]],
[[0, 255, 255, 255],
[0, 255, 255, 255],
[0, 255, 255, 255],
[0, 255, 255, 255]]]
masks = [
tfexample_utils.encode_image(np.uint8(m), fmt='PNG')
for m in list(mask_content)
]
serialized_example = tf.train.Example(
features=tf.train.Features(
feature={
'image/encoded': (tf.train.Feature(
bytes_list=tf.train.BytesList(value=[image]))),
'image/source_id': (tf.train.Feature(
bytes_list=tf.train.BytesList(
value=[tfexample_utils.DUMP_SOURCE_ID]))),
'image/height': (tf.train.Feature(
int64_list=tf.train.Int64List(value=[image_height]))),
'image/width': (tf.train.Feature(
int64_list=tf.train.Int64List(value=[image_width]))),
'image/object/bbox/xmin': (tf.train.Feature(
float_list=tf.train.FloatList(value=xmins))),
'image/object/bbox/xmax': (tf.train.Feature(
float_list=tf.train.FloatList(value=xmaxs))),
'image/object/bbox/ymin': (tf.train.Feature(
float_list=tf.train.FloatList(value=ymins))),
'image/object/bbox/ymax': (tf.train.Feature(
float_list=tf.train.FloatList(value=ymaxs))),
'image/object/class/text': (tf.train.Feature(
bytes_list=tf.train.BytesList(value=labels))),
'image/object/is_crowd': (tf.train.Feature(
int64_list=tf.train.Int64List(value=is_crowds))),
'image/object/area': (tf.train.Feature(
float_list=tf.train.FloatList(value=areas))),
'image/object/mask': (tf.train.Feature(
bytes_list=tf.train.BytesList(value=masks))),
})).SerializeToString()
decoded_tensors = decoder.decode(
tf.convert_to_tensor(value=serialized_example))
results = tf.nest.map_structure(lambda x: x.numpy(), decoded_tensors)
self.assertAllEqual(
(image_height, image_width, 3), results['image'].shape)
self.assertAllEqual(image_content, results['image'])
self.assertEqual(tfexample_utils.DUMP_SOURCE_ID, results['source_id'])
self.assertEqual(image_height, results['height'])
self.assertEqual(image_width, results['width'])
self.assertAllEqual(
(num_instances,), results['groundtruth_classes'].shape)
self.assertAllEqual(
(num_instances,), results['groundtruth_is_crowd'].shape)
self.assertAllEqual(
(num_instances,), results['groundtruth_area'].shape)
self.assertAllEqual(
(num_instances, 4), results['groundtruth_boxes'].shape)
self.assertAllEqual(
(num_instances, image_height, image_width),
results['groundtruth_instance_masks'].shape)
self.assertAllEqual(
(num_instances,), results['groundtruth_instance_masks_png'].shape)
self.assertAllEqual(
[2, 0], results['groundtruth_classes'])
self.assertAllEqual(
[True, False], results['groundtruth_is_crowd'])
self.assertNDArrayNear(
[0.25 * image_height * image_width, 0.75 * image_height * image_width],
results['groundtruth_area'], 1e-4)
self.assertNDArrayNear(
[[0, 0, 0.5, 0.5], [0, 0.25, 1.0, 1.0]],
results['groundtruth_boxes'], 1e-4)
self.assertNDArrayNear(
mask_content, results['groundtruth_instance_masks'], 1e-4)
self.assertAllEqual(
masks, results['groundtruth_instance_masks_png'])
if __name__ == '__main__':
tf.test.main()
|
[
"gardener@tensorflow.org"
] |
gardener@tensorflow.org
|
d0421b44a25e1116f523a219dbfed2dc534f5518
|
f5485d955fa942711f337286a86f05374ea58a72
|
/migrations/versions/cbe0f844650d_.py
|
95626d4f1467e756a549807f7e637093f64f7ef7
|
[] |
no_license
|
bingfengjiyu/flask_demo
|
f6245d7e8696b3bc492ed3f922948bd01319be30
|
3feef9fcee6e3c8f8fae46fb0fb5f5a8bdb82f4d
|
refs/heads/master
| 2020-03-24T11:04:03.012486
| 2018-07-28T12:16:01
| 2018-07-28T12:16:01
| 142,674,753
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 646
|
py
|
"""empty message
Revision ID: cbe0f844650d
Revises:
Create Date: 2018-07-07 12:10:18.303153
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'cbe0f844650d'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('tbl_authors', sa.Column('email', sa.String(length=64), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('tbl_authors', 'email')
# ### end Alembic commands ###
|
[
"xwp_fullstack@163.com"
] |
xwp_fullstack@163.com
|
7ae7198fa354aec4c1775a03d319da0d45d323ea
|
0f07107b016d2aee64788966b9f0d322ac46b998
|
/moya/testprojects/scratch/blog/py/test.py
|
2b109b36a4486a66034ed4b3c5ad99d295a37b25
|
[
"MIT"
] |
permissive
|
fkztw/moya
|
35f48cdc5d5723b04c671947099b0b1af1c7cc7a
|
78b91d87b4519f91dfdd2b40dab44e72f201a843
|
refs/heads/master
| 2023-08-09T09:20:21.968908
| 2019-02-03T18:18:54
| 2019-02-03T18:18:54
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 132
|
py
|
from __future__ import print_function
import moya
@moya.expose.macro("test")
def test():
print("Success! :-)")
return 10
|
[
"willmcgugan@gmail.com"
] |
willmcgugan@gmail.com
|
c49f884ff1e502534a0cbabe7633c2134b201d34
|
b391498124fdcaef989bf3ebafffb0df43e3e07f
|
/pygccxml-0.8.2/unittests/declarations_cache_tester.py
|
10112018fdfee5e022b785ec191dc8df82e502cb
|
[
"BSL-1.0"
] |
permissive
|
glehmann/WrapITK-unstable
|
9a0dd9d387ecd59c9439465dcc32cca552e14576
|
402fc668f1f3c3dd57d0751a61efa3b1625d238b
|
refs/heads/master
| 2021-01-10T22:02:04.715926
| 2008-05-25T16:53:07
| 2008-05-25T16:53:07
| 3,272,767
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,319
|
py
|
# Copyright 2004 Roman Yakovenko.
# Distributed under the Boost Software License, Version 1.0. (See
# accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
import os, sys, unittest, os.path
import autoconfig
import pygccxml.parser
from pygccxml.parser.config import config_t
from pygccxml.parser.declarations_cache import *
class decl_cache_tester(unittest.TestCase):
def __init__(self, *args ):
unittest.TestCase.__init__(self, *args)
if not os.path.exists( autoconfig.build_dir ):
os.makedirs( autoconfig.build_dir )
def test_file_signature(self):
file1 = os.path.join(autoconfig.data_directory, 'decl_cache_file1.txt')
file1_dup = os.path.join(autoconfig.data_directory, 'decl_cache_file1_duplicate.txt')
file2 = os.path.join(autoconfig.data_directory, 'decl_cache_file2.txt')
sig1 = file_signature(file1)
sig1_dup = file_signature(file1_dup)
sig2 = file_signature(file2)
self.assert_(sig1 == sig1_dup)
self.assert_(sig1 != sig2)
def test_config_signature(self):
diff_cfg_list = self.build_differing_cfg_list()
def_cfg = diff_cfg_list[0]
def_sig = configuration_signature(def_cfg)
# Test changes that should cause sig changes
for cfg in diff_cfg_list[1:]:
self.assert_(configuration_signature(cfg) != def_sig)
# Test changes that should not cause sig changes
no_changes = def_cfg.clone()
self.assert_(configuration_signature(no_changes) == def_sig)
#start_decls_changed = def_cfg.clone()
#start_decls_changed.start_with_declarations = "test object"
#self.assert_(configuration_signature(start_decls_changed) == def_sig)
ignore_changed = def_cfg.clone()
ignore_changed.ignore_gccxml_output = True
self.assert_(configuration_signature(ignore_changed) == def_sig)
def test_cache_interface(self):
cache_file = os.path.join(autoconfig.build_dir, 'decl_cache_test.test_cache_read.cache')
file1 = os.path.join(autoconfig.data_directory, 'decl_cache_file1.txt')
file1_dup = os.path.join(autoconfig.data_directory, 'decl_cache_file1_duplicate.txt')
file2 = os.path.join(autoconfig.data_directory, 'decl_cache_file2.txt')
diff_cfg_list = self.build_differing_cfg_list()
def_cfg = diff_cfg_list[0]
if os.path.exists(cache_file):
os.remove(cache_file)
cache = file_cache_t(cache_file)
self.assert_(len(cache._file_cache_t__cache) == 0)
# test creating new entries for differing files
cache.update(file1, def_cfg, 1,[])
self.assert_(len(cache._file_cache_t__cache) == 1)
cache.update(file1_dup, def_cfg, 2,[])
self.assert_(len(cache._file_cache_t__cache) == 1)
cache.update(file2, def_cfg, 3,[])
self.assert_(len(cache._file_cache_t__cache) == 2)
self.assert_(cache.cached_value(file1,def_cfg) == 2)
self.assert_(cache.cached_value(file2,def_cfg) == 3)
# Test reading again
cache.flush()
cache = file_cache_t(cache_file)
self.assert_(len(cache._file_cache_t__cache) == 2)
self.assert_(cache.cached_value(file1,def_cfg) == 2)
self.assert_(cache.cached_value(file2,def_cfg) == 3)
# Test flushing doesn't happen if we don't touch the cache
cache = file_cache_t(cache_file)
self.assert_(cache.cached_value(file1,def_cfg) == 2) # Read from cache
cache.flush() # should not actually flush
cache = file_cache_t(cache_file)
self.assert_(len(cache._file_cache_t__cache) == 2)
# Test flush culling
cache = file_cache_t(cache_file)
cache.update(file1_dup, def_cfg, 4,[]) # Modify cache
cache.flush() # should cull off one entry
cache = file_cache_t(cache_file)
self.assert_(len(cache._file_cache_t__cache) == 1)
def build_differing_cfg_list(self):
""" Return a list of configurations that all differ. """
cfg_list = []
def_cfg = config_t("gccxml_path",'.',['tmp'],['sym'],['unsym'],
None,False,"")
cfg_list.append(def_cfg)
# Test changes that should cause sig changes
gccxml_changed = def_cfg.clone()
gccxml_changed.gccxml_path = "other_path"
cfg_list.append(gccxml_changed)
wd_changed = def_cfg.clone()
wd_changed.working_directory = "other_dir"
cfg_list.append(wd_changed)
#inc_changed = def_cfg.clone()
#inc_changed.include_paths = ["/var/tmp"]
#self.assert_(configuration_signature(inc_changed) != def_sig)
inc_changed = config_t("gccxml_path",'.',['/var/tmp'],['sym'],['unsym'],
None,False,"")
cfg_list.append(inc_changed)
#def_changed = def_cfg.clone()
#def_changed.define_symbols = ["symbol"]
#self.assert_(configuration_signature(def_changed) != def_sig)
def_changed = config_t("gccxml_path",'.',['/var/tmp'],['new-sym'],['unsym'],
None,False,"")
cfg_list.append(def_changed)
#undef_changed = def_cfg.clone()
#undef_changed.undefine_symbols = ["symbol"]
#self.assert_(configuration_signature(undef_changed) != def_sig)
undef_changed = config_t("gccxml_path",'.',['/var/tmp'],['sym'],['new-unsym'],
None,False,"")
cfg_list.append(undef_changed)
cflags_changed = def_cfg.clone()
cflags_changed.cflags = "new flags"
cfg_list.append(cflags_changed)
return cfg_list
def create_suite():
suite = unittest.TestSuite()
suite.addTest( unittest.makeSuite(decl_cache_tester))
return suite
def run_suite():
unittest.TextTestRunner(verbosity=2).run( create_suite() )
if __name__ == "__main__":
run_suite()
|
[
"gaetan.lehmann@jouy.inra.fr"
] |
gaetan.lehmann@jouy.inra.fr
|
25be6302bd9150151560453a17906af226789f01
|
904b0d81152649ccd3349f94f88e7b89a7b5c76a
|
/scripts/main/xicombNS_DA02.py
|
e2f17c6818b725c4127a2e6be411fb79ee8c98bd
|
[
"BSD-3-Clause"
] |
permissive
|
desihub/LSS
|
ec33538a0e7280ad1c6b257368cc009ed4b39cbb
|
5645461929172d327ed30389d76e7e887043c9bf
|
refs/heads/main
| 2023-08-18T23:17:13.123605
| 2023-08-18T20:08:22
| 2023-08-18T20:08:22
| 36,753,969
| 14
| 28
|
BSD-3-Clause
| 2023-09-13T18:37:35
| 2015-06-02T18:42:51
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 1,710
|
py
|
#!/usr/bin/env python
# coding: utf-8
import os
import argparse
import logging
import numpy as np
from astropy.table import Table, vstack
from matplotlib import pyplot as plt
from pycorr import TwoPointCorrelationFunction, TwoPointEstimator, KMeansSubsampler, utils, setup_logging
njack = '60'
trs = ['ELG_LOPnotqso','QSO','LRG','BGS_BRIGHT','QSO_ELG_LOPnotqso','LRG_QSO','LRG_ELG_LOPnotqso']
bsl = [1,2,4,5,10]
dirxi = '/global/cfs/cdirs/desi/survey/catalogs/DA02/LSS/guadalupe/LSScats/test/xi/smu/'
xit = 'poles'
for tr in trs:
if tr == 'ELG_LOPnotqso':
zws = ['0.8_1.6','0.8_1.1','1.1_1.6']
if tr == 'QSO_ELG_LOPnotqso':
zws = ['0.8_1.6','0.8_1.1','1.1_1.6']
if tr == 'QSO':
zws = ['0.8_1.1','0.8_2.1lowz','1.1_1.6','1.6_2.1','2.1_3.5','0.8_3.5']
if tr == 'LRG':
zws = ['0.4_0.6','0.6_0.8','0.8_1.1','0.4_1.1']
if tr == 'BGS_BRIGHT':
zws = ['0.1_0.3','0.3_0.5','0.1_0.5']
if tr == 'LRG_QSO' or tr == 'LRG_ELG_LOPnotqso':
zws = ['0.8_1.1']
for zw in zws:
result_N = TwoPointCorrelationFunction.load(dirxi+'allcounts_'+tr+'_N_'+zw+'_default_FKP_lin_njack'+njack+'.npy')
result_S = TwoPointCorrelationFunction.load(dirxi+'allcounts_'+tr+'_S_'+zw+'_default_FKP_lin_njack'+njack+'.npy')
result_NS = result_N.normalize() + result_S.normalize()
fn = dirxi+'allcounts_'+tr+'_NScomb_'+zw+'_default_FKP_lin_njack'+njack+'.npy'
result_NS.save(fn)
for bs in bsl:
rebinned = result_NS[:(result_NS.shape[0]//bs)*bs:bs]
fn_txt = dirxi+'xi'+xit+'_'+tr+'_NScomb_'+zw+'_default_FKP_lin'+str(bs)+'_njack'+njack+'.txt'
rebinned.save_txt(fn_txt, ells=(0, 2, 4))
|
[
"ashley.jacob.ross@gmail.com"
] |
ashley.jacob.ross@gmail.com
|
c15d4b3566815e61f9fc93ba6b974c34f358c170
|
e2ad93398194942c13c27b25aa868eda4ff4f46c
|
/sponsortracker/download.py
|
987305d12d281d57363d875f7b84883ec45f917e
|
[] |
no_license
|
Auzzy/bfig-sponsor-tracker
|
ab2fbcf6ba562f977263b5f91c3aca756e037302
|
cff466de6797ea276130335bdc368c706eed583d
|
refs/heads/master
| 2023-06-01T04:41:26.082120
| 2023-05-11T11:52:11
| 2023-05-11T11:52:11
| 25,780,177
| 0
| 0
| null | 2023-05-11T11:52:12
| 2014-10-26T16:14:46
|
Python
|
UTF-8
|
Python
| false
| false
| 2,989
|
py
|
import collections
import os
import shutil
import tempfile
from enum import Enum
from os.path import exists, expanduser, join, splitext
from sqlalchemy import or_
from sponsortracker import model, uploads
from sponsortracker.data import AssetType, Level
ZIPNAME = "sponsortracker-assets"
def all(level=None):
return download(level=level)
def website_updates(start, level=None):
asset_filter = lambda deal: [asset for asset in deal.assets_by_type[AssetType.LOGO] if asset.date >= start]
return download('updates', asset_filter=asset_filter, level=level)
def logo_cloud(level=None):
asset_filter = lambda deal: deal.assets_by_type[AssetType.LOGO]
return download('logocloud', by_sponsor=False, info=False, asset_filter=asset_filter, level=level)
def download(zipname=ZIPNAME, by_sponsor=True, info=True, asset_filter=lambda deal: deal.assets, level=None):
with tempfile.TemporaryDirectory() as tempdir:
zipdir = join(tempdir, zipname)
os.makedirs(zipdir)
for deal in model.Deal.query.filter(model.Deal.level_name != ""):
if deal.level in (Level.SERVICE, Level.BRONZE, Level.BRONZE_BENEFITS, Level.SILVER, Level.GOLD, Level.PLATINUM) or deal.contract.received != None or deal.invoice.received != None:
if not level or deal.level_name == level:
target = join(*[zipdir, deal.level.name.lower()] + ([deal.sponsor.name] if by_sponsor else []))
os.makedirs(target, exist_ok=True)
if info:
_info_to_file(target, deal.sponsor)
_copy_assets(target, asset_filter(deal))
return shutil.make_archive(expanduser(join("~", zipname)), "zip", root_dir=tempdir)
def _info_to_file(target, sponsor):
if sponsor.link or sponsor.description:
with open(join(target, "info.txt"), 'w') as info_file:
if sponsor.link:
info_file.write(sponsor.link + "\n\n")
if sponsor.description:
info_file.write(sponsor.description)
def _copy_assets(target, assets):
for asset in assets:
name = '-'.join([asset.deal.sponsor.name.lower(), asset.type.name.lower()])
ext = splitext(asset.filename)[-1].lstrip('.')
dest = os.path.join(target, "{name}.{ext}".format(name=name, ext=ext))
uploads.Asset.get(asset.deal, asset.filename, dest)
'''
path = asset_uploader.path(asset.filename)
ext = splitext(asset.filename)[-1].lstrip('.')
name = '-'.join([asset.sponsor.name.lower(), asset.type.name.lower()])
shutil.copy(path, _filepath(target, name, ext))
'''
'''
def _filepath(target, basename, ext):
num = 2
name = "{name}.{ext}".format(name=basename, ext=ext)
while exists(join(target, name)):
name = "{name}_{num}.{ext}".format(name=basename, num=num, ext=ext)
num += 1
return join(target, name)
'''
|
[
"metalnut4@netscape.net"
] |
metalnut4@netscape.net
|
11d173e2e009317f099e646a9d101c71ae82a9b9
|
519b4cf7623c40e0280c435246b6cde46853ecc1
|
/project/holviapp/utils.py
|
d731848e8107fe128a56275bbd33d4b1a41ef18a
|
[
"MIT"
] |
permissive
|
HelsinkiHacklab/asylum
|
a3fe492f76145c922125949c41acce6e8d4beec4
|
6fcf71fb5c7bb894322039144e814b9edc07d5bb
|
refs/heads/hhl_changes
| 2023-02-16T18:54:17.277017
| 2021-08-13T17:59:46
| 2021-09-13T17:45:45
| 47,038,401
| 1
| 1
|
MIT
| 2023-02-03T13:22:48
| 2015-11-28T20:28:58
|
Python
|
UTF-8
|
Python
| false
| false
| 2,574
|
py
|
# -*- coding: utf-8 -*-
import holviapi
import holvirc
from django.conf import settings
CONNECTION_SINGLETON = None
def apikey_configured():
"""Check if we have apikey"""
return bool(settings.HOLVI_POOL) and bool(settings.HOLVI_APIKEY)
def userauth_configured():
"""Check if we have username/password"""
return bool(settings.HOLVI_POOL) and bool(settings.HOLVI_USER) and bool(settings.HOLVI_PASSWORD)
def api_configured():
"""Check that we have some API config"""
return apikey_configured() or userauth_configured()
def get_connection():
"""Shorhand connection singleton getter"""
global CONNECTION_SINGLETON
if CONNECTION_SINGLETON is not None:
return CONNECTION_SINGLETON
if not api_configured():
raise RuntimeError('Holvi API is not configured')
if userauth_configured():
CONNECTION_SINGLETON = holvirc.Connection.singleton(settings.HOLVI_POOL, settings.HOLVI_USER, settings.HOLVI_PASSWORD)
if apikey_configured():
CONNECTION_SINGLETON = holviapi.Connection.singleton(settings.HOLVI_POOL, settings.HOLVI_APIKEY)
return CONNECTION_SINGLETON
def get_invoiceapi():
"""Shorthand API instance creator"""
return holvirc.InvoiceAPI(get_connection())
def list_invoices(**kwargs):
"""Shorthand accessor for the API method"""
return get_invoiceapi().list_invoices(**kwargs)
def get_invoice(code):
"""Shorthand accessor for the API method"""
return get_invoiceapi().get_invoice(code)
def get_checkoutapi():
"""Shorthand API instance creator"""
cnc = get_connection()
if isinstance(cnc, (holvirc.Connection, holvirc.connection.Connection)):
raise RuntimeError("This only works with the old style api keys")
return holviapi.CheckoutAPI(cnc)
def list_orders(**kwargs):
"""Shorthand accessor for the API method"""
cnc = get_connection()
if isinstance(cnc, (holvirc.Connection, holvirc.connection.Connection)):
# TODO: Log the issue
return iter([])
return get_checkoutapi().list_orders(**kwargs)
def get_order(code):
"""Shorthand accessor for the API method"""
return get_checkoutapi().get_order(code)
def get_categoriesapi():
"""Shorthand API instance creator"""
cnc = get_connection()
if isinstance(cnc, (holviapi.Connection, holviapi.connection.Connection)):
return holviapi.CategoriesAPI(get_connection())
return holvirc.CategoriesAPI(cnc)
def get_category(code):
"""Shorthand accessor for the API method"""
return get_categoriesapi().get_category(code)
|
[
"eero.afheurlin@iki.fi"
] |
eero.afheurlin@iki.fi
|
fe8087783d56301fddb861866779ab604a5e83f6
|
4e5cdffa14c8404d836dc9f034cbbf34a86c7503
|
/src/api/urls.py
|
9113ef4c4d5780858faf92eee0a13749d97d0775
|
[] |
no_license
|
gtdata/publish_data_alpha
|
da1cf013a5b2c7290d0af7a48d43adc90e301f3f
|
f1a9753daac7fbe8cc5bed5f30b8601c781449ab
|
refs/heads/master
| 2021-01-20T03:03:36.303264
| 2017-04-18T11:49:29
| 2017-04-18T11:49:29
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 681
|
py
|
from django.conf.urls import url, include
from rest_framework import routers
import api.views as v
import api.api_def as a
urlpatterns = [
url(r'locations$', v.gazeteer_lookup, name='gazeteer_lookup'),
url(r'^datasets$', v.dataset_lookup, name='dataset_lookup'),
url(r'^status', v.StatusEndpoint.as_view()),
#url(r'^1/', include(router.urls)),
url(r'^1/datasets$', a.DatasetList.as_view()),
url(r'^1/datasets/(?P<name>[\w-]+)$', a.DatasetDetail.as_view(), name='dataset-detail'),
url(r'^1/organisations$', a.OrganisationList.as_view()),
url(r'^1/organisations/(?P<name>[\w-]+)$', a.OrganisationDetail.as_view(), name='organisation-detail'),
]
|
[
"ross@servercode.co.uk"
] |
ross@servercode.co.uk
|
ed33a50324b7e3a5eecebca1a2b58fcd87538545
|
97fde6e1ee2c63d4359a005a8a17db87559dd3eb
|
/api/models.py
|
44c72a280c9cf023ae7be9d686aecd138860d6d2
|
[] |
no_license
|
Bibin22/Book_project
|
6f6d0cce452e0298d16676425eeb2f77e915c3e5
|
9884363927e6b3b559d43a6ead584f1741b54370
|
refs/heads/master
| 2023-03-31T21:36:02.594431
| 2021-03-24T07:15:46
| 2021-03-24T07:15:46
| 350,402,169
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 311
|
py
|
from django.db import models
# Create your models here.
class Book(models.Model):
book_name = models.CharField(max_length=120, unique=100)
price = models.IntegerField()
pages = models.IntegerField()
author = models.CharField(max_length=100)
def __str__(self):
return self.book_name
|
[
"bibinjoy82@gmail.com"
] |
bibinjoy82@gmail.com
|
4d64347ffa03e103de068e66bc946d0999ccfce2
|
436051d199fcc323a422b7fea377f43c01004366
|
/helpers/labml_helpers/metrics/simple_state.py
|
2a6575141ce8871da42ebc4cbb7b596e62453fdb
|
[
"MIT"
] |
permissive
|
xet7/labml
|
29d411b94f1d6b9ff03c6033f510cea443d38248
|
7f3918ca7de8cb21cf6dcc9d127a6ea64c0aebb9
|
refs/heads/master
| 2023-08-18T10:03:13.142430
| 2021-07-18T11:11:42
| 2021-07-18T11:11:42
| 387,184,226
| 0
| 0
|
MIT
| 2023-08-11T20:01:39
| 2021-07-18T13:36:49
| null |
UTF-8
|
Python
| false
| false
| 813
|
py
|
from typing import Generic, TypeVar, Optional
from . import StateModule
T = TypeVar('T')
class SimpleState(Generic[T]):
state: Optional[T]
def __init__(self):
self.state = None
def get(self) -> T:
return self.state
def set(self, data: T):
self.state = data
def reset(self):
self.state = None
class SimpleStateModule(StateModule, Generic[T]):
data: SimpleState[T]
def __init__(self):
super().__init__()
def set(self, data: T):
self.data.set(data)
def get(self) -> T:
return self.data.get()
def create_state(self):
return SimpleState()
def set_state(self, data: any):
self.data = data
def on_epoch_start(self):
self.data.reset()
def on_epoch_end(self):
pass
|
[
"vpjayasiri@gmail.com"
] |
vpjayasiri@gmail.com
|
91a64805557e29b680b1300121cddd217db78eef
|
e42a61b7be7ec3412e5cea0ffe9f6e9f34d4bf8d
|
/a10sdk/core/aam/aam_authentication_portal_logon_fail.py
|
60ee400259987e360a04045658049f18c4e87e9f
|
[
"Apache-2.0"
] |
permissive
|
amwelch/a10sdk-python
|
4179565afdc76cdec3601c2715a79479b3225aef
|
3e6d88c65bd1a2bf63917d14be58d782e06814e6
|
refs/heads/master
| 2021-01-20T23:17:07.270210
| 2015-08-13T17:53:23
| 2015-08-13T17:53:23
| 40,673,499
| 0
| 0
| null | 2015-08-13T17:51:35
| 2015-08-13T17:51:34
| null |
UTF-8
|
Python
| false
| false
| 8,059
|
py
|
from a10sdk.common.A10BaseClass import A10BaseClass
class FailMsgCfg(A10BaseClass):
"""This class does not support CRUD Operations please use parent.
:param fail_font_custom: {"description": "Specify custom font", "format": "string-rlx", "minLength": 1, "maxLength": 63, "not": "fail-face", "type": "string"}
:param fail_color: {"default": 0, "type": "number", "description": "Specify font color", "format": "flag"}
:param fail_size: {"description": "Specify font size", "minimum": 1, "type": "number", "maximum": 7, "format": "number"}
:param fail_msg: {"default": 0, "type": "number", "description": "Configure logon failure message in default logon fail page", "format": "flag"}
:param fail_text: {"minLength": 1, "maxLength": 63, "type": "string", "description": "Specify logon failure message (Default: Login Failed!!)", "format": "string-rlx"}
:param fail_color_value: {"description": "Specify 6-digit HEX color value", "format": "string", "minLength": 6, "maxLength": 6, "not": "fail-color-name", "type": "string"}
:param fail_font: {"default": 0, "type": "number", "description": "Sepcify font", "format": "flag"}
:param fail_color_name: {"not": "fail-color-value", "enum": ["aqua", "black", "blue", "fuchsia", "gray", "green", "lime", "maroon", "navy", "olive", "orange", "purple", "red", "silver", "teal", "white", "yellow"], "type": "string", "description": "'aqua': aqua; 'black': black; 'blue': blue; 'fuchsia': fuchsia; 'gray': gray; 'green': green; 'lime': lime; 'maroon': maroon; 'navy': navy; 'olive': olive; 'orange': orange; 'purple': purple; 'red': red; 'silver': silver; 'teal': teal; 'white': white; 'yellow': yellow; ", "format": "enum"}
:param fail_face: {"not": "fail-font-custom", "enum": ["Arial", "Courier_New", "Georgia", "Times_New_Roman", "Verdana"], "type": "string", "description": "'Arial': Arial; 'Courier_New': Courier New; 'Georgia': Georgia; 'Times_New_Roman': Times New Roman; 'Verdana': Verdana; ", "format": "enum"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.b_key = "fail-msg-cfg"
self.DeviceProxy = ""
self.fail_font_custom = ""
self.fail_color = ""
self.fail_size = ""
self.fail_msg = ""
self.fail_text = ""
self.fail_color_value = ""
self.fail_font = ""
self.fail_color_name = ""
self.fail_face = ""
for keys, value in kwargs.items():
setattr(self,keys, value)
class TitleCfg(A10BaseClass):
"""This class does not support CRUD Operations please use parent.
:param title: {"default": 0, "type": "number", "description": "Configure title in default logon fail page", "format": "flag"}
:param title_color: {"default": 0, "type": "number", "description": "Specify font color", "format": "flag"}
:param title_color_name: {"not": "title-color-value", "enum": ["aqua", "black", "blue", "fuchsia", "gray", "green", "lime", "maroon", "navy", "olive", "orange", "purple", "red", "silver", "teal", "white", "yellow"], "type": "string", "description": "'aqua': aqua; 'black': black; 'blue': blue; 'fuchsia': fuchsia; 'gray': gray; 'green': green; 'lime': lime; 'maroon': maroon; 'navy': navy; 'olive': olive; 'orange': orange; 'purple': purple; 'red': red; 'silver': silver; 'teal': teal; 'white': white; 'yellow': yellow; ", "format": "enum"}
:param title_font_custom: {"description": "Specify custom font", "format": "string-rlx", "minLength": 1, "maxLength": 63, "not": "title-face", "type": "string"}
:param title_face: {"not": "title-font-custom", "enum": ["Arial", "Courier_New", "Georgia", "Times_New_Roman", "Verdana"], "type": "string", "description": "'Arial': Arial; 'Courier_New': Courier New; 'Georgia': Georgia; 'Times_New_Roman': Times New Roman; 'Verdana': Verdana; ", "format": "enum"}
:param title_color_value: {"description": "Specify 6-digit HEX color value", "format": "string", "minLength": 6, "maxLength": 6, "not": "title-color-name", "type": "string"}
:param title_size: {"description": "Specify font size", "minimum": 1, "type": "number", "maximum": 7, "format": "number"}
:param title_text: {"minLength": 1, "maxLength": 63, "type": "string", "description": "Specify title (Default: Try Too Many Times)", "format": "string-rlx"}
:param title_font: {"default": 0, "type": "number", "description": "Sepcify font", "format": "flag"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.b_key = "title-cfg"
self.DeviceProxy = ""
self.title = ""
self.title_color = ""
self.title_color_name = ""
self.title_font_custom = ""
self.title_face = ""
self.title_color_value = ""
self.title_size = ""
self.title_text = ""
self.title_font = ""
for keys, value in kwargs.items():
setattr(self,keys, value)
class Background(A10BaseClass):
"""This class does not support CRUD Operations please use parent.
:param bgfile: {"description": "Specify background image filename", "format": "string-rlx", "minLength": 1, "maxLength": 63, "not": "bgcolor", "type": "string"}
:param bgstyle: {"enum": ["tile", "stretch", "fit"], "type": "string", "description": "'tile': Tile; 'stretch': Stretch; 'fit': Fit; ", "format": "enum"}
:param bgcolor_value: {"description": "Specify 6-digit HEX color value", "format": "string", "minLength": 6, "maxLength": 6, "not": "bgcolor-name", "type": "string"}
:param bgcolor_name: {"not": "bgcolor-value", "enum": ["aqua", "black", "blue", "fuchsia", "gray", "green", "lime", "maroon", "navy", "olive", "orange", "purple", "red", "silver", "teal", "white", "yellow"], "type": "string", "description": "'aqua': aqua; 'black': black; 'blue': blue; 'fuchsia': fuchsia; 'gray': gray; 'green': green; 'lime': lime; 'maroon': maroon; 'navy': navy; 'olive': olive; 'orange': orange; 'purple': purple; 'red': red; 'silver': silver; 'teal': teal; 'white': white; 'yellow': yellow; ", "format": "enum"}
:param bgcolor: {"default": 0, "not": "bgfile", "type": "number", "description": "Specify background color", "format": "flag"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.b_key = "background"
self.DeviceProxy = ""
self.bgfile = ""
self.bgstyle = ""
self.bgcolor_value = ""
self.bgcolor_name = ""
self.bgcolor = ""
for keys, value in kwargs.items():
setattr(self,keys, value)
class LogonFail(A10BaseClass):
"""Class Description::
Logon fail page configuration.
Class logon-fail supports CRUD Operations and inherits from `common/A10BaseClass`.
This class is the `"PARENT"` class for this module.`
:param uuid: {"description": "uuid of the object", "format": "string", "minLength": 1, "modify-not-allowed": 1, "optional": true, "maxLength": 64, "type": "string"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
URL for this object::
`https://<Hostname|Ip address>//axapi/v3/aam/authentication/portal/{name}/logon-fail`.
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.required=[]
self.b_key = "logon-fail"
self.a10_url="/axapi/v3/aam/authentication/portal/{name}/logon-fail"
self.DeviceProxy = ""
self.fail_msg_cfg = {}
self.title_cfg = {}
self.background = {}
self.uuid = ""
for keys, value in kwargs.items():
setattr(self,keys, value)
|
[
"doug@parksidesoftware.com"
] |
doug@parksidesoftware.com
|
defd147ec57f50888e2a862cc79b2e4e99453437
|
3c000380cbb7e8deb6abf9c6f3e29e8e89784830
|
/venv/Lib/site-packages/cobra/modelimpl/cloud/listenerruleref.py
|
6093233a61cc528637a150a4cfdf201bb235a56f
|
[] |
no_license
|
bkhoward/aciDOM
|
91b0406f00da7aac413a81c8db2129b4bfc5497b
|
f2674456ecb19cf7299ef0c5a0887560b8b315d0
|
refs/heads/master
| 2023-03-27T23:37:02.836904
| 2021-03-26T22:07:54
| 2021-03-26T22:07:54
| 351,855,399
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,791
|
py
|
# coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2020 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class ListenerRuleRef(Mo):
"""
Mo doc not defined in techpub!!!
"""
meta = ClassMeta("cobra.model.cloud.ListenerRuleRef")
meta.moClassName = "cloudListenerRuleRef"
meta.rnFormat = "lisRuleRef-%(name)s"
meta.category = MoCategory.REGULAR
meta.label = "Cloud Load Balancer Listener Rule Reference"
meta.writeAccessMask = 0x6000000000000001
meta.readAccessMask = 0x6000000000000001
meta.isDomainable = False
meta.isReadOnly = True
meta.isConfigurable = False
meta.isDeletable = False
meta.isContextRoot = False
meta.childClasses.add("cobra.model.cloud.RuleActionRef")
meta.childClasses.add("cobra.model.cloud.RuleConditionRef")
meta.childNamesAndRnPrefix.append(("cobra.model.cloud.RuleConditionRef", "conditionref-"))
meta.childNamesAndRnPrefix.append(("cobra.model.cloud.RuleActionRef", "actionref-"))
meta.parentClasses.add("cobra.model.cloud.ListenerRef")
meta.superClasses.add("cobra.model.naming.NamedObject")
meta.superClasses.add("cobra.model.cloud.AListenerRule")
meta.rnPrefixes = [
('lisRuleRef-', True),
]
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "default", "default", 52033, PropCategory.REGULAR)
prop.label = "None"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = False
prop.defaultValueStr = "no"
prop._addConstant("no", None, False)
prop._addConstant("yes", None, True)
meta.props.add("default", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "lcOwn", "lcOwn", 9, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "local"
prop._addConstant("implicit", "implicit", 4)
prop._addConstant("local", "local", 0)
prop._addConstant("policy", "policy", 1)
prop._addConstant("replica", "replica", 2)
prop._addConstant("resolveOnBehalf", "resolvedonbehalf", 3)
meta.props.add("lcOwn", prop)
prop = PropMeta("str", "modTs", "modTs", 7, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("modTs", prop)
prop = PropMeta("str", "name", "name", 52414, PropCategory.REGULAR)
prop.label = "Name"
prop.isConfig = True
prop.isAdmin = True
prop.isCreateOnly = True
prop.isNaming = True
prop.range = [(1, 16)]
prop.regex = ['[a-zA-Z0-9_.:-]+']
meta.props.add("name", prop)
prop = PropMeta("str", "nameAlias", "nameAlias", 28417, PropCategory.REGULAR)
prop.label = "Name alias"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 63)]
prop.regex = ['[a-zA-Z0-9_.-]+']
meta.props.add("nameAlias", prop)
prop = PropMeta("str", "priority", "priority", 51814, PropCategory.REGULAR)
prop.label = "None"
prop.isConfig = True
prop.isAdmin = True
meta.props.add("priority", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
meta.namingProps.append(getattr(meta.props, "name"))
# Deployment Meta
meta.deploymentQuery = True
meta.deploymentType = "Ancestor"
meta.deploymentQueryPaths.append(DeploymentPathMeta("CloudLBToHcloudSecurityGroup", "From cloudLB to hcloudSecurityGroup", "cobra.model.hcloud.SecurityGroup"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("CloudLBToVnsAbsGraph", "From cloudLB to vnsAbsGraph", "cobra.model.vns.AbsGraph"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("ALDevIfToGraphInst", "Graph Instances", "cobra.model.vns.GraphInst"))
def __init__(self, parentMoOrDn, name, markDirty=True, **creationProps):
namingVals = [name]
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
|
[
"bkhoward@live.com"
] |
bkhoward@live.com
|
b68d2743352106871caf8d9b6a7eae7a0237fab5
|
3a4f14d6638bc0c12c129ed73c6c3543437203df
|
/src/morphforge/simulation/__init__.py
|
816c765420d8ed4ec98702b232f684d5e0c03a28
|
[
"BSD-2-Clause"
] |
permissive
|
unidesigner/morphforge
|
ef04ccb3877f069a0feea72eb1b44c97930dac44
|
510cd86549b2c2fb19296da2d4408ed8091fb962
|
refs/heads/master
| 2021-01-15T22:34:28.795355
| 2012-04-05T08:55:12
| 2012-04-05T08:55:12
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,509
|
py
|
#-------------------------------------------------------------------------------
# Copyright (c) 2012 Michael Hull. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# - Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# - Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#-------------------------------------------------------------------------------
|
[
"mikehulluk@googlemail.com"
] |
mikehulluk@googlemail.com
|
ab59227df732b23f0241889547035615c3d2a258
|
a1bffcd8854e1843e56bb812d4d83b3161a5211e
|
/plugins/connection/saltstack.py
|
6b77c982a43b9377025aa7831e6c44e954316633
|
[] |
no_license
|
goneri/ansible.community
|
1a71f9d98c164b77f8ed2ed7f558b4963005ff8f
|
f26f612dd0a3154050d90b51a75502018c95f6e4
|
refs/heads/master
| 2020-12-29T07:47:35.353515
| 2020-01-22T17:43:18
| 2020-01-22T17:43:18
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,800
|
py
|
# Based on local.py (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
# Based on chroot.py (c) 2013, Maykel Moya <mmoya@speedyrails.com>
# Based on func.py
# (c) 2014, Michael Scherer <misc@zarb.org>
# (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
author: Michael Scherer (@mscherer) <misc@zarb.org>
connection: saltstack
short_description: Allow ansible to piggyback on salt minions
description:
- This allows you to use existing Saltstack infrastructure to connect to targets.
'''
import re
import os
import pty
import subprocess
from ansible_collections.ansible.community.plugins.module_utils._text import to_bytes, to_text
from ansible.module_utils.six.moves import cPickle
HAVE_SALTSTACK = False
try:
import salt.client as sc
HAVE_SALTSTACK = True
except ImportError:
pass
import os
from ansible import errors
from ansible.plugins.connection import ConnectionBase
class Connection(ConnectionBase):
''' Salt-based connections '''
has_pipelining = False
# while the name of the product is salt, naming that module salt cause
# trouble with module import
transport = 'ansible.community.saltstack'
def __init__(self, play_context, new_stdin, *args, **kwargs):
super(Connection, self).__init__(play_context, new_stdin, *args, **kwargs)
self.host = self._play_context.remote_addr
def _connect(self):
if not HAVE_SALTSTACK:
raise errors.AnsibleError("saltstack is not installed")
self.client = sc.LocalClient()
self._connected = True
return self
def exec_command(self, cmd, sudoable=False, in_data=None):
''' run a command on the remote minion '''
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
if in_data:
raise errors.AnsibleError("Internal Error: this module does not support optimized module pipelining")
self._display.vvv("EXEC %s" % (cmd), host=self.host)
# need to add 'true;' to work around https://github.com/saltstack/salt/issues/28077
res = self.client.cmd(self.host, 'cmd.exec_code_all', ['bash', 'true;' + cmd])
if self.host not in res:
raise errors.AnsibleError("Minion %s didn't answer, check if salt-minion is running and the name is correct" % self.host)
p = res[self.host]
return (p['retcode'], p['stdout'], p['stderr'])
def _normalize_path(self, path, prefix):
if not path.startswith(os.path.sep):
path = os.path.join(os.path.sep, path)
normpath = os.path.normpath(path)
return os.path.join(prefix, normpath[1:])
def put_file(self, in_path, out_path):
''' transfer a file from local to remote '''
super(Connection, self).put_file(in_path, out_path)
out_path = self._normalize_path(out_path, '/')
self._display.vvv("PUT %s TO %s" % (in_path, out_path), host=self.host)
with open(in_path) as in_fh:
content = in_fh.read()
self.client.cmd(self.host, 'file.write', [out_path, content])
# TODO test it
def fetch_file(self, in_path, out_path):
''' fetch a file from remote to local '''
super(Connection, self).fetch_file(in_path, out_path)
in_path = self._normalize_path(in_path, '/')
self._display.vvv("FETCH %s TO %s" % (in_path, out_path), host=self.host)
content = self.client.cmd(self.host, 'cp.get_file_str', [in_path])[self.host]
open(out_path, 'wb').write(content)
def close(self):
''' terminate the connection; nothing to do here '''
pass
|
[
"ansible_migration@example.com"
] |
ansible_migration@example.com
|
be843d33ee176ec0d6827fefd01fdd6a15847466
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p03282/s286249227.py
|
f28b18ffa16239f136d180ee675975bd01b56411
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 191
|
py
|
import sys
input = sys.stdin.readline
S = list(input().rstrip('\n'))
K = int(input())
n = 1
for i in range(min(len(S), K)):
if S[i] != '1':
n = int(S[i])
break
print(n)
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
eea07dff9a04a5582761f119169aec890ef58c11
|
aba1d17ddc7d7ad9f49e2d6d87600e9e0387ba14
|
/mi/dataset/driver/sio_eng/sio/sio_eng_sio_recovered_driver.py
|
d84a95f50b651d68787a6e469c0b57ced9a03727
|
[
"BSD-2-Clause",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
oceanobservatories/mi-instrument
|
3ad880c1366b1a8461fc9085768df0e9ddeb6ef5
|
bdbf01f5614e7188ce19596704794466e5683b30
|
refs/heads/master
| 2023-07-23T07:28:36.091223
| 2023-07-14T15:54:49
| 2023-07-14T15:54:49
| 24,165,325
| 1
| 32
|
BSD-2-Clause
| 2023-07-13T01:39:22
| 2014-09-17T22:53:22
|
Python
|
UTF-8
|
Python
| false
| false
| 1,673
|
py
|
#!/usr/bin/env python
"""
@package mi.dataset.driver.sio_eng/sio
@file mi/dataset/driver/sio_eng/sio/sio_eng_sio_recovered_driver.py
@author Jeff Roy
@brief Driver for the sio_eng_sio instrument
Release notes:
Initial Release
"""
from mi.dataset.dataset_parser import DataSetDriverConfigKeys
from mi.dataset.dataset_driver import SimpleDatasetDriver
from mi.dataset.parser.sio_eng_sio import SioEngSioParser
from mi.core.versioning import version
@version("15.6.1")
def parse(unused, source_file_path, particle_data_handler):
"""
This is the method called by Uframe
:param unused
:param source_file_path This is the full path and filename of the file to be parsed
:param particle_data_handler Java Object to consume the output of the parser
:return particle_data_handler
"""
with open(source_file_path, 'rb') as stream_handle:
# create and instance of the concrete driver class defined below
driver = SioEngSioRecoveredDriver(unused, stream_handle, particle_data_handler)
driver.processFileStream()
return particle_data_handler
class SioEngSioRecoveredDriver(SimpleDatasetDriver):
"""
Derived sio_eng_sio driver class
All this needs to do is create a concrete _build_parser method
"""
def _build_parser(self, stream_handle):
parser_config = {
DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.sio_eng_sio',
DataSetDriverConfigKeys.PARTICLE_CLASS: 'SioEngSioRecoveredDataParticle'
}
parser = SioEngSioParser(parser_config, stream_handle,
self._exception_callback)
return parser
|
[
"petercable@gmail.com"
] |
petercable@gmail.com
|
503df77b7e73dde22d4eea8e86d995e4f9983cbb
|
798960eb97cd1d46a2837f81fb69d123c05f1164
|
/symphony/cli/pyinventory/graphql/mutation/edit_service_type.py
|
8b288246df912b964320536842d70b2deedf041e
|
[
"BSD-3-Clause",
"Apache-2.0"
] |
permissive
|
kyaaqba/magma
|
36d5fa00ce4f827e6ca5ebd82d97a3d36e5f5b5b
|
fdb7be22a2076f9a9b158c9670a9af6cad68b85f
|
refs/heads/master
| 2023-01-27T12:04:52.393286
| 2020-08-20T20:23:50
| 2020-08-20T20:23:50
| 289,102,268
| 0
| 0
|
NOASSERTION
| 2020-08-20T20:18:42
| 2020-08-20T20:18:41
| null |
UTF-8
|
Python
| false
| false
| 2,154
|
py
|
#!/usr/bin/env python3
# @generated AUTOGENERATED file. Do not Change!
from dataclasses import dataclass
from datetime import datetime
from gql.gql.datetime_utils import DATETIME_FIELD
from gql.gql.graphql_client import GraphqlClient
from gql.gql.client import OperationException
from gql.gql.reporter import FailedOperationException
from functools import partial
from numbers import Number
from typing import Any, Callable, List, Mapping, Optional, Dict
from time import perf_counter
from dataclasses_json import DataClassJsonMixin
from ..fragment.service_type import ServiceTypeFragment, QUERY as ServiceTypeFragmentQuery
from ..input.service_type_edit_data import ServiceTypeEditData
QUERY: List[str] = ServiceTypeFragmentQuery + ["""
mutation EditServiceTypeMutation($input: ServiceTypeEditData!) {
editServiceType(data: $input) {
...ServiceTypeFragment
}
}
"""]
@dataclass
class EditServiceTypeMutation(DataClassJsonMixin):
@dataclass
class EditServiceTypeMutationData(DataClassJsonMixin):
@dataclass
class ServiceType(ServiceTypeFragment):
pass
editServiceType: ServiceType
data: EditServiceTypeMutationData
@classmethod
# fmt: off
def execute(cls, client: GraphqlClient, input: ServiceTypeEditData) -> EditServiceTypeMutationData.ServiceType:
# fmt: off
variables: Dict[str, Any] = {"input": input}
try:
network_start = perf_counter()
response_text = client.call(''.join(set(QUERY)), variables=variables)
decode_start = perf_counter()
res = cls.from_json(response_text).data
decode_time = perf_counter() - decode_start
network_time = decode_start - network_start
client.reporter.log_successful_operation("EditServiceTypeMutation", variables, network_time, decode_time)
return res.editServiceType
except OperationException as e:
raise FailedOperationException(
client.reporter,
e.err_msg,
e.err_id,
"EditServiceTypeMutation",
variables,
)
|
[
"facebook-github-bot@users.noreply.github.com"
] |
facebook-github-bot@users.noreply.github.com
|
ada65d289c521001d259f3753dd35f98479c82ff
|
1a04e02811c844ecf53cc041b104667e5c987a09
|
/vgrabber/qtgui/tabs/items/finalexam.py
|
75cd24590829f15938344be4414a71f826fdba8e
|
[] |
no_license
|
janjanech/vzdelavanieGui
|
dff17add6e6946063597d4c1eba5d6d76b6f5374
|
b2015f41f7cb1be1ecccf1c4778a91f43f8fba12
|
refs/heads/master
| 2021-10-24T16:21:24.911817
| 2019-01-15T17:03:49
| 2019-01-15T17:03:49
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 194
|
py
|
from PyQt5.QtWidgets import QTreeWidgetItem
class FinalExamItem(QTreeWidgetItem):
def __init__(self, data, final_exam):
super().__init__(data)
self.final_exam = final_exam
|
[
"janik@janik.ws"
] |
janik@janik.ws
|
0484b2199fc895b1ac5faa6eeb44803cd770356e
|
c9c762a08c031bafa1e577733c6af53a7a1b142f
|
/Project4Proj/Project4App/admin.py
|
a33a7882886d7d816c15cd8b34a9aed7dc315c9f
|
[] |
no_license
|
Joshtg1104/Project4-Django-VideoApp
|
aed635d493125f42482b6f8e2424bec2e93da848
|
46efc176762a2fcb76e743672d3eb9b62f7808b6
|
refs/heads/master
| 2020-05-29T09:25:13.002571
| 2019-06-12T18:58:27
| 2019-06-12T18:58:27
| 189,061,630
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 215
|
py
|
from django.contrib import admin
from .models import Video, AccountModel, CommentModel
# Register your models here.
admin.site.register(AccountModel)
admin.site.register(Video)
admin.site.register(CommentModel)
|
[
"joshtg1104@gmail.com"
] |
joshtg1104@gmail.com
|
38f27425bfd3df62fce464d36f7d3ee1f274e55e
|
912b0c6274e9c5c0956c4d1d1178f2b68fcf4296
|
/suod/test/test_base.py
|
2a5676b05176b356328d547265bcc359ee0aa19d
|
[
"BSD-2-Clause"
] |
permissive
|
DreamtaleCore/SUOD
|
04f238f48625a3a3e182b19c2af65612954453b2
|
353aeaf8d5e5c338f94686e73a8a0fec88670700
|
refs/heads/master
| 2022-10-30T04:44:17.617986
| 2020-06-15T21:07:26
| 2020-06-15T21:07:26
| 292,205,263
| 1
| 0
|
BSD-2-Clause
| 2020-09-02T07:01:50
| 2020-09-02T07:01:49
| null |
UTF-8
|
Python
| false
| false
| 3,239
|
py
|
# -*- coding: utf-8 -*-
import os
import sys
import unittest
import numpy as np
# temporary solution for relative imports in case pyod is not installed
# if suod
# is installed, no need to use the following line
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
from suod.models.base import SUOD
from pyod.utils.data import generate_data
from pyod.models.lof import LOF
from pyod.models.pca import PCA
from pyod.models.hbos import HBOS
from pyod.models.lscp import LSCP
class TestBASE(unittest.TestCase):
def setUp(self):
self.n_train = 1000
self.n_test = 500
self.contamination = 0.1
self.roc_floor = 0.6
self.random_state = 42
self.X_train, self.y_train, self.X_test, self.y_test = generate_data(
n_train=self.n_train, n_test=self.n_test,
contamination=self.contamination, random_state=self.random_state)
self.base_estimators = [
LOF(n_neighbors=5, contamination=self.contamination),
LOF(n_neighbors=15, contamination=self.contamination),
LOF(n_neighbors=25, contamination=self.contamination),
LOF(n_neighbors=35, contamination=self.contamination),
LOF(n_neighbors=45, contamination=self.contamination),
HBOS(contamination=self.contamination),
PCA(contamination=self.contamination),
LSCP(detector_list=[
LOF(n_neighbors=5, contamination=self.contamination),
LOF(n_neighbors=15, contamination=self.contamination)],
random_state=self.random_state)
]
this_directory = os.path.abspath(os.path.dirname(__file__))
self.cost_forecast_loc_fit_ = os.path.join(this_directory,
'bps_train.joblib')
self.cost_forecast_loc_pred_ = os.path.join(this_directory,
'bps_prediction.joblib')
self.model = SUOD(base_estimators=self.base_estimators, n_jobs=2,
rp_flag_global=True, bps_flag=True,
contamination=self.contamination,
approx_flag_global=True,
cost_forecast_loc_fit=self.cost_forecast_loc_fit_,
cost_forecast_loc_pred=self.cost_forecast_loc_pred_)
def test_initialization(self):
self.model.get_params()
self.model.set_params(**{'n_jobs': 4})
def test_fit(self):
"""
Test base class initialization
:return:
"""
self.model.fit(self.X_train)
def test_approximate(self):
self.model.fit(self.X_train)
self.model.approximate(self.X_train)
def test_predict(self):
self.model.fit(self.X_train)
self.model.approximate(self.X_train)
self.model.predict(self.X_test)
def test_decision_function(self):
self.model.fit(self.X_train)
self.model.approximate(self.X_train)
self.model.decision_function(self.X_test)
def test_predict_proba(self):
self.model.fit(self.X_train)
self.model.approximate(self.X_train)
self.model.predict_proba(self.X_test)
|
[
"yzhao062@gmail.com"
] |
yzhao062@gmail.com
|
6149f8fede99ffcba481a3bb2377ebd72da708a4
|
b697f5d8e441328c2deee1bb5853d80710ae9873
|
/944.删列造序.py
|
d10994932e2f8dd4be69c43b93c603bca7107745
|
[] |
no_license
|
happy-luck/LeetCode-python
|
d06b0f6cf7bad4754e96e6a160e3a8fc495c0f95
|
63fc5a1f6e903a901ba799e77a2ee9df2b05543a
|
refs/heads/master
| 2021-03-22T16:12:52.097329
| 2020-07-15T13:48:37
| 2020-07-15T13:48:37
| 247,381,313
| 0
| 0
| null | 2020-03-15T01:47:42
| 2020-03-15T01:28:38
| null |
UTF-8
|
Python
| false
| false
| 757
|
py
|
方法一:
class Solution:
def minDeletionSize(self, A: List[str]) -> int:
D = 0
A_list = []
for string in A:
res = list(string)
res = [ord(i) for i in res]
A_list.append(res)
for j in range(len(res)):
for i in range(1,len(A)):
if A_list[i][j]-A_list[i-1][j]<0:
D += 1
break
return D
方法二:
class Solution(object):
def minDeletionSize(self, A):
ans = 0
for col in zip(*A):
if any(col[i] > col[i+1] for i in range(len(col) - 1)):
ans += 1
return ans
时间复杂度:O(N),其中 N 是数组 A 中的元素个数。
空间复杂度:O(1)。
|
[
"18813129242@163.com"
] |
18813129242@163.com
|
c035d9f0fb7bdc18413f6216781d4e272dbf8234
|
208796d60bba301648b76f3fd9af20738aca3ba7
|
/neuclease/bin/cleave_server_debug_main.py
|
83d3de98a286d0b91481496060849dd94ff1df21
|
[
"BSD-3-Clause"
] |
permissive
|
stuarteberg/pydvid
|
2afaebeb886b8034852e21668bca6709b022cf0f
|
ce59988cbc8043c85fe3ba878d4fa415febba2f8
|
refs/heads/master
| 2021-01-17T20:34:47.843645
| 2019-10-10T19:47:23
| 2019-10-10T19:47:23
| 33,325,656
| 0
| 0
| null | 2015-04-02T18:43:25
| 2015-04-02T18:43:25
| null |
UTF-8
|
Python
| false
| false
| 2,723
|
py
|
#!/usr/bin/env python3
import sys
import neuclease.cleave_server
def main():
_debug_mode = False
## DEBUG
if len(sys.argv) == 1:
_debug_mode = True
import os
log_dir = os.path.dirname(neuclease.__file__) + '/../logs'
sys.argv += [#"--merge-table", "/magnetic/workspace/neuclease/tiny-merge-table.npy",
#"--mapping-file", "/magnetic/workspace/neuclease/tiny-mapping.npy",
#"--primary-dvid-server", "emdata3:8900",
#"--primary-uuid", "017a",
#"--primary-labelmap-instance", "segmentation",
#"--suspend-before-launch",
"--merge-table", "/tmp/merge-table-5812998448.csv",
"--primary-dvid-server", "emdata1:8900",
"--primary-uuid", "642cfed9e8704d0b83ccca2ee3688528",
"--primary-labelmap-instance", "segmentation",
"--log-dir", log_dir]
neuclease.cleave_server.main(_debug_mode)
## Example requests:
"""
{"body-id": 673509195, "mesh-instance": "segmentation_meshes_tars", "port": 8900, "request-timestamp": "2018-05-10 13:40:56.117063", "seeds": {"1": [675222237], "2": [1266560684], "3": [1142805921], "5": [1329312351], "6": [1328298063], "7": [1264523335], "8": [1233488801, 1358310013], "9": [1357286646]}, "segmentation-instance": "segmentation", "server": "emdata3.int.janelia.org", "user": "bergs", "uuid": "017a"}
{"body-id": 5812980088, "mesh-instance": "segmentation_meshes_tars", "port": 8900, "request-timestamp": "2018-05-10 13:48:32.071343", "seeds": {"1": [299622182, 769164613], "2": [727964335], "3": [1290606913], "4": [485167093], "5": [769514136]}, "segmentation-instance": "segmentation", "server": "emdata3.int.janelia.org", "user": "bergs", "uuid": "017a"}
{"body-id": 5812980124, "mesh-instance": "segmentation_meshes_tars", "port": 8900, "request-timestamp": "2018-05-10 13:51:46.112896", "seeds": {"1": [391090531], "2": [453151532, 515221115, 515221301, 515557950, 515562175, 515562381, 515562454, 546597327, 577632049, 608330428, 608667239, 639701979, 639702027, 639702182, 670736831, 670736971, 670737150, 670737574]}, "segmentation-instance": "segmentation", "server": "emdata3.int.janelia.org", "user": "bergs", "uuid": "017a"}
{"body-id": 5812980898, "mesh-instance": "segmentation_meshes_tars", "port": 8900, "request-timestamp": "2018-05-10 13:54:00.042885", "seeds": {"1": [449551305], "2": [1261194539], "3": [1229822848], "4": [883458155, 883458603], "5": [790693775]}, "segmentation-instance": "segmentation", "server": "emdata3.int.janelia.org", "user": "bergs", "uuid": "017a"}
"""
if __name__ == "__main__":
main()
|
[
"bergs@janelia.hhmi.org"
] |
bergs@janelia.hhmi.org
|
309daa07bda7b7eb3525eaae9158ed897a52e6ba
|
15f321878face2af9317363c5f6de1e5ddd9b749
|
/solutions_python/Problem_209/611.py
|
0d8528d812df07b274136b7d9c4ebae4a50cbb69
|
[] |
no_license
|
dr-dos-ok/Code_Jam_Webscraper
|
c06fd59870842664cd79c41eb460a09553e1c80a
|
26a35bf114a3aa30fc4c677ef069d95f41665cc0
|
refs/heads/master
| 2020-04-06T08:17:40.938460
| 2018-10-14T10:12:47
| 2018-10-14T10:12:47
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,235
|
py
|
t = input()
from collections import defaultdict
def solve(pancakes, n, k, dp):
# print "calling for {0}, {1}".format(n, k)
if ((n, k)) in dp:
# print 'returning {0} for {1}, {2}'.format(dp[(n, k)], n, k)
return dp[(n, k)]
if n == 0:
# print 'returning {0} for {1}, {2}'.format(0, n, k)
return 0
if k < 0:
# print 'returning {0} for {1}, {2}'.format(9876543211111111, n, k)
return -9876543211111111
if n == 1:
p = pancakes[k]
dp[(n, k)] = p[0]**2 + 2*p[0]*p[1]
# print p[0]**2 + 2*p[0]*p[1]
# print 'returning {0} for {1}, {2}'.format(p[0]**2 + p[0]*p[1], n, k)
return p[0]**2 + 2*p[0]*p[1]
max_ = -1
for cand in xrange(0, k):
max_ = max(max_, solve(pancakes, n-1, cand, dp))
p = pancakes[k]
dp[(n, k)] = max_ + 2*p[0]*p[1]
# print 'returning {0} for {1}, {2}'.format(dp[(n, k)], n, k)
return dp[(n, k)]
for idx in xrange(1, t + 1):
n, k = map(int, raw_input().split())
pancakes = []
for _ in xrange(n):
a, b = map(int, raw_input().split())
pancakes.append((a, b))
dp = defaultdict(int)
pancakes.sort(reverse=True)
# print pancakes
best = -1
for i in xrange(0, n):
best = max(best, solve(pancakes, k, i, dp))
print "Case #{0}: ".format(idx) + "%.10f" % (best*3.14159265359)
|
[
"miliar1732@gmail.com"
] |
miliar1732@gmail.com
|
cda7cfa1c3424a8cf95d97f5dc12e578a7e2e2a3
|
72d010d00355fc977a291c29eb18aeb385b8a9b0
|
/BeatStep/__init__.py
|
49897318ae58c967be09a28d7820359edcc8d56d
|
[] |
no_license
|
maratbakirov/AbletonLive10_MIDIRemoteScripts
|
bf0749c5c4cce8e83b23f14f671e52752702539d
|
ed1174d9959b20ed05fb099f0461bbc006bfbb79
|
refs/heads/master
| 2021-06-16T19:58:34.038163
| 2021-05-09T11:46:46
| 2021-05-09T11:46:46
| 203,174,328
| 0
| 0
| null | 2019-08-19T13:04:23
| 2019-08-19T13:04:22
| null |
UTF-8
|
Python
| false
| false
| 761
|
py
|
# Embedded file name: /Users/versonator/Jenkins/live/output/mac_64_static/Release/python-bundle/MIDI Remote Scripts/BeatStep/__init__.py
# Compiled at: 2018-04-23 20:27:04
from __future__ import absolute_import, print_function, unicode_literals
import _Framework.Capabilities as caps
from .BeatStep import BeatStep
def get_capabilities():
return {caps.CONTROLLER_ID_KEY: caps.controller_id(vendor_id=7285, product_ids=[
518], model_name=[
'Arturia BeatStep']),
caps.PORTS_KEY: [
caps.inport(props=[caps.NOTES_CC, caps.SCRIPT, caps.REMOTE]),
caps.outport(props=[caps.SCRIPT])]}
def create_instance(c_instance):
return BeatStep(c_instance)
|
[
"julien@julienbayle.net"
] |
julien@julienbayle.net
|
e92d33a3da585f26e3b3a9469af88fc980b959e5
|
f3b233e5053e28fa95c549017bd75a30456eb50c
|
/p38a_input/L3FQ/3FQ-2Z_MD_NVT_rerun/set_1ns_equi_2.py
|
161c95b5097a3036c69d374da8142c6f04ce088a
|
[] |
no_license
|
AnguseZhang/Input_TI
|
ddf2ed40ff1c0aa24eea3275b83d4d405b50b820
|
50ada0833890be9e261c967d00948f998313cb60
|
refs/heads/master
| 2021-05-25T15:02:38.858785
| 2020-02-18T16:57:04
| 2020-02-18T16:57:04
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 754
|
py
|
import os
dir = '/mnt/scratch/songlin3/run/p38a/L3FQ/MD_NVT_rerun/ti_one-step/3FQ_2Z/'
filesdir = dir + 'files/'
temp_equiin = filesdir + 'temp_equi_2.in'
temp_pbs = filesdir + 'temp_1ns_equi_2.pbs'
lambd = [ 0.00922, 0.04794, 0.11505, 0.20634, 0.31608, 0.43738, 0.56262, 0.68392, 0.79366, 0.88495, 0.95206, 0.99078]
for j in lambd:
os.chdir("%6.5f" %(j))
workdir = dir + "%6.5f" %(j) + '/'
#equiin
eqin = workdir + "%6.5f_equi_2.in" %(j)
os.system("cp %s %s" %(temp_equiin, eqin))
os.system("sed -i 's/XXX/%6.5f/g' %s" %(j, eqin))
#PBS
pbs = workdir + "%6.5f_1ns_equi_2.pbs" %(j)
os.system("cp %s %s" %(temp_pbs, pbs))
os.system("sed -i 's/XXX/%6.5f/g' %s" %(j, pbs))
#submit pbs
#os.system("qsub %s" %(pbs))
os.chdir(dir)
|
[
"songlin3@msu.edu"
] |
songlin3@msu.edu
|
42cf064d1f9437f636bbc909d434fb6771d510ca
|
dd3b8bd6c9f6f1d9f207678b101eff93b032b0f0
|
/basis/AbletonLive10.1_MIDIRemoteScripts/Push2/browser_list.py
|
6c9e839bb69da206779694dbbdc57b2e2d1b4c9d
|
[] |
no_license
|
jhlax/les
|
62955f57c33299ebfc4fca8d0482b30ee97adfe7
|
d865478bf02778e509e61370174a450104d20a28
|
refs/heads/master
| 2023-08-17T17:24:44.297302
| 2019-12-15T08:13:29
| 2019-12-15T08:13:29
| 228,120,861
| 3
| 0
| null | 2023-08-03T16:40:44
| 2019-12-15T03:02:27
|
Python
|
UTF-8
|
Python
| false
| false
| 3,765
|
py
|
# uncompyle6 version 3.4.1
# Python bytecode 2.7 (62211)
# Decompiled from: Python 2.7.16 (v2.7.16:413a49145e, Mar 2 2019, 14:32:10)
# [GCC 4.2.1 Compatible Apple LLVM 6.0 (clang-600.0.57)]
# Embedded file name: /Users/versonator/Jenkins/live/output/mac_64_static/Release/python-bundle/MIDI Remote Scripts/Push2/browser_list.py
# Compiled at: 2019-04-09 19:23:44
from __future__ import absolute_import, print_function, unicode_literals
import Live
from itertools import islice
from ableton.v2.base import EventObject, listenable_property, clamp, nop
from .model.uniqueid import UniqueIdMixin
class BrowserList(EventObject, UniqueIdMixin):
LAZY_ACCESS_COUNT = 1000
LAZY_ACCESS_THRESHOLD = LAZY_ACCESS_COUNT - 100
def __init__(self, item_iterator=None, item_wrapper=nop, limit=-1, *a, **k):
super(BrowserList, self).__init__(*a, **k)
self._selected_index = -1
self._item_iterator = item_iterator
self._item_wrapper = item_wrapper
self._limit = limit
self._access_all = False
self._items = []
self._update_items()
assert self.LAZY_ACCESS_COUNT > self.LAZY_ACCESS_THRESHOLD
def _get_limit(self):
return self._limit
def _set_limit(self, value):
if value != self._limit:
self._limit = value
self._access_all = False
self._update_items()
self.notify_items()
if value != -1:
self.selected_index = -1
limit = property(_get_limit, _set_limit)
def _get_access_all(self):
return self._access_all
def _set_access_all(self, access_all):
if self._access_all != access_all:
self._access_all = access_all
self._limit = -1
self._update_items()
self.notify_items()
access_all = property(_get_access_all, _set_access_all)
@listenable_property
def items(self):
if self.limit > 0:
return self._items[:self.limit]
if not self._access_all:
return self._items[:self.LAZY_ACCESS_COUNT]
return self._items
def _update_items(self):
if isinstance(self._item_iterator, Live.Browser.BrowserItemIterator):
if self.limit > 0 and len(self._items) < self.limit:
next_slice = islice(self._item_iterator, self.limit)
elif not self._access_all and len(self._items) < self.LAZY_ACCESS_COUNT:
next_slice = islice(self._item_iterator, self.LAZY_ACCESS_COUNT - len(self._items))
else:
next_slice = self._item_iterator
self._items.extend(map(self._item_wrapper, next_slice))
elif len(self._items) < len(self._item_iterator):
self._items = map(self._item_wrapper, self._item_iterator)
@property
def selected_item(self):
if self.selected_index == -1:
return None
else:
return self.items[self.selected_index]
@listenable_property
def selected_index(self):
return self._selected_index
@selected_index.setter
def selected_index(self, value):
if value != self._selected_index:
assert value == -1 or self._limit == -1
num_children = len(self._items)
if value < -1 or value >= num_children:
raise IndexError('Index %i must be in [-1..%i]' % (value, num_children - 1))
self._selected_index = value
self.notify_selected_index()
if self._selected_index >= self.LAZY_ACCESS_THRESHOLD and not self._access_all:
self.access_all = True
def select_index_with_offset(self, offset):
self.selected_index = clamp(self._selected_index + offset, 0, len(self._items) - 1)
|
[
"jharrington@transcendbg.com"
] |
jharrington@transcendbg.com
|
1fadf1c096fcdb36625f731a3599aee37afaedfa
|
8b321ef16f11701f66898cb5b0e186bed50aaf46
|
/6/3_Gems_admin_page/Gems/urls.py
|
0509da185d5d17d740ef0167d6c3f10070c07a5e
|
[] |
no_license
|
TrellixVulnTeam/django_H5IO
|
6bed043acfda0e3373fe5b352805f1c11b038e2a
|
1ee5407e93f1bbbaeb12b90b7c5d7b96e3ba13e2
|
refs/heads/master
| 2023-03-16T05:25:59.900947
| 2017-04-18T08:21:47
| 2017-04-18T08:21:47
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,296
|
py
|
"""Gems URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from mainApp.views import *
from userManagementApp.views import *
from adminApp.views import *
urlpatterns = [
url(r'^$', main),
]
urlpatterns += [
url(r'^user/login/$', login),
url(r'^user/logout/$', logout),
# url(r'^user/registration/$', registration_low),
url(r'^user/registration/$', registration),
url(r'^admin/$', admin_page),
url(r'^admin/delete/user/(\d+)$', delete_user),
]
# Данный подход нерекомендуется, и будет убран в django 1.10
# urlpatterns = patterns('mainApp.views',
# url(r'^$', 'main'),
# )
|
[
"ostrowskyi@gmail.com"
] |
ostrowskyi@gmail.com
|
9a1e3984821d93a2119fe1e81d2bbc7589548289
|
3c000380cbb7e8deb6abf9c6f3e29e8e89784830
|
/venv/Lib/site-packages/cobra/modelimpl/hcipsec/entity.py
|
bdce01d6f76c3216322d49e73d93fa048d3e5e28
|
[] |
no_license
|
bkhoward/aciDOM
|
91b0406f00da7aac413a81c8db2129b4bfc5497b
|
f2674456ecb19cf7299ef0c5a0887560b8b315d0
|
refs/heads/master
| 2023-03-27T23:37:02.836904
| 2021-03-26T22:07:54
| 2021-03-26T22:07:54
| 351,855,399
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,859
|
py
|
# coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2020 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class Entity(Mo):
"""
Mo doc not defined in techpub!!!
"""
meta = ClassMeta("cobra.model.hcipsec.Entity")
meta.moClassName = "hcipsecEntity"
meta.rnFormat = "ipsec"
meta.category = MoCategory.REGULAR
meta.label = "Entity"
meta.writeAccessMask = 0x8021002001
meta.readAccessMask = 0x8021002001
meta.isDomainable = False
meta.isReadOnly = False
meta.isConfigurable = True
meta.isDeletable = True
meta.isContextRoot = False
meta.childClasses.add("cobra.model.tag.Tag")
meta.childClasses.add("cobra.model.fault.Counts")
meta.childClasses.add("cobra.model.health.Inst")
meta.childClasses.add("cobra.model.aaa.RbacAnnotation")
meta.childClasses.add("cobra.model.hcipsec.Inst")
meta.childClasses.add("cobra.model.tag.Annotation")
meta.childNamesAndRnPrefix.append(("cobra.model.tag.Annotation", "annotationKey-"))
meta.childNamesAndRnPrefix.append(("cobra.model.aaa.RbacAnnotation", "rbacDom-"))
meta.childNamesAndRnPrefix.append(("cobra.model.tag.Tag", "tagKey-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fault.Counts", "fltCnts"))
meta.childNamesAndRnPrefix.append(("cobra.model.health.Inst", "health"))
meta.childNamesAndRnPrefix.append(("cobra.model.hcipsec.Inst", "inst"))
meta.parentClasses.add("cobra.model.hcloud.Csr")
meta.rnPrefixes = [
('ipsec', False),
]
prop = PropMeta("str", "annotation", "annotation", 51604, PropCategory.REGULAR)
prop.label = "Annotation. Suggested format orchestrator:value"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 128)]
prop.regex = ['[a-zA-Z0-9_.:-]+']
meta.props.add("annotation", prop)
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "extMngdBy", "extMngdBy", 51605, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "undefined"
prop._addConstant("msc", "msc", 1)
prop._addConstant("undefined", "undefined", 0)
meta.props.add("extMngdBy", prop)
prop = PropMeta("str", "lcOwn", "lcOwn", 9, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "local"
prop._addConstant("implicit", "implicit", 4)
prop._addConstant("local", "local", 0)
prop._addConstant("policy", "policy", 1)
prop._addConstant("replica", "replica", 2)
prop._addConstant("resolveOnBehalf", "resolvedonbehalf", 3)
meta.props.add("lcOwn", prop)
prop = PropMeta("str", "modTs", "modTs", 7, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("modTs", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
prop = PropMeta("str", "uid", "uid", 8, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("uid", prop)
# Deployment Meta
meta.deploymentQuery = True
meta.deploymentType = "Ancestor"
meta.deploymentQueryPaths.append(DeploymentPathMeta("HcloudCsrToHcbgpPeer", "From hcloudCsr to hcbgpPeer", "cobra.model.hcbgp.Peer"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("HcloudCsrToHcl3LoopbackRoutedIf", "From hcloudCsr to hcl3LoopbackRoutedIf", "cobra.model.hcl3.LoopbackRoutedIf"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("HcloudCsrToHcospfRsTunnIf", "From hcloudCsr to hcospfRsTunnIf", "cobra.model.hcospf.RsTunnIf"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("HcloudCsrToHctunnIf", "From hcloudCsr to hctunnIf", "cobra.model.hctunn.If"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("HcloudCsrToHcloudNetworkInterface", "From hcloudCsr to hcloudNetworkInterface", "cobra.model.hcloud.NetworkInterface"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("HcloudCsrToHcloudEndPointOper", "From hcloudCsr to hcloudEndPointOper", "cobra.model.hcloud.EndPointOper"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("HcloudCsrToHcl1PhysicalIf", "From hcloudCsr to hcl1PhysicalIf", "cobra.model.hcl1.PhysicalIf"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("HcloudCsrToCloudCtxProfile", "From hcloudCsr to cloudCtxProfile", "cobra.model.cloud.CtxProfile"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("HcloudCsrToHcl3Vrf", "From hcloudCsr to hcl3Vrf", "cobra.model.hcl3.Vrf"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("HcloudCsrToHcloudInstanceOper", "From hcloudCsr to hcloudInstanceOper", "cobra.model.hcloud.InstanceOper"))
def __init__(self, parentMoOrDn, markDirty=True, **creationProps):
namingVals = []
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
|
[
"bkhoward@live.com"
] |
bkhoward@live.com
|
995436ff7e70706bbea6b85818dcb895c3ff70c7
|
4cfbc12903651dedbc799f53a8078433196e7919
|
/Pre Processing/Topic 1 - Numpy/Class 1 - IntroductionProgram.py
|
c3b196507a0ce2566d2dcc1315350a927e618dbd
|
[] |
no_license
|
gautam4941/Machine_Learning_Codes
|
78bf86ab3caf6ee329c88ff18d25927125627a2c
|
0009d12ca207a9b0431ea56decc293588eb447b1
|
refs/heads/main
| 2023-02-06T18:05:44.154641
| 2023-01-30T17:04:25
| 2023-01-30T17:04:25
| 353,594,523
| 0
| 5
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 528
|
py
|
#How to create numpy array
#Mutability and Immutability
import numpy as np
l = [ [5, 6, 9, 10, 11], [ 1, 2, 3, 5, 6 ] ]
print( l, type(l), len(l) )
arr = np.array( l )
print( f"arr = { arr }", type( arr ), len( arr ), arr.shape )
print()
print( "Printing Numpy Array in the loop" )
for i in arr:
print( f"i : { i }" )
for j in i:
print( f" j : {j}" )
print()
arr[1] = 9
print( "Checking Mutability, " )
print( f"arr = { arr }" )
arr[1][3] = 5
print( f"arr = { arr }" )
print()
|
[
"noreply@github.com"
] |
gautam4941.noreply@github.com
|
b43697e594bc2ebd2a22cf5aee7a1546b8119f53
|
bfbb78b6e6c69644ba70709ad528eb450bcf0cf5
|
/public/utils/wraps.py
|
a4e53c35ad111716201f0036cfb76f1e0a381ff1
|
[
"MIT"
] |
permissive
|
xiaoxiaolulu/AndroidAuto
|
98b2248175e017d02f5da5b7d11f0f20d4f92dc8
|
b8cdba360664f7d1c73b9ab290c7901eaf814f0c
|
refs/heads/master
| 2020-04-12T15:36:25.554509
| 2018-12-20T13:58:06
| 2018-12-20T13:58:06
| 162,586,748
| 8
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 303
|
py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# ========================================================
# Module : wraps
# Author : Null
# Create Date : 11/11/2018
# Amended by : Null
# Amend History : 11/11/2018
# ========================================================
|
[
"546464268@qq.com"
] |
546464268@qq.com
|
6e6e2d47512cdbd5432c4001855a5b23687540ae
|
0add7953d3e3ce2df9e8265102be39b758579753
|
/built-in/TensorFlow/Research/cv/image_classification/Cars_for_TensorFlow/automl/vega/search_space/networks/tensorflow/backbones/prune_resnet.py
|
a5b32faa79ca4e832eea9a0341de8e9c088213d8
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
Huawei-Ascend/modelzoo
|
ae161c0b4e581f8b62c77251e9204d958c4cf6c4
|
df51ed9c1d6dbde1deef63f2a037a369f8554406
|
refs/heads/master
| 2023-04-08T08:17:40.058206
| 2020-12-07T08:04:57
| 2020-12-07T08:04:57
| 319,219,518
| 1
| 1
|
Apache-2.0
| 2023-03-24T22:22:00
| 2020-12-07T06:01:32
|
Python
|
UTF-8
|
Python
| false
| false
| 3,242
|
py
|
# -*- coding: utf-8 -*-
# Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the MIT License.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# MIT License for more details.
"""ResNet models for pruning."""
import tensorflow as tf
from vega.search_space.networks import NetTypes, NetworkFactory
from ..blocks.resnet_block import _prune_basic_block
@NetworkFactory.register(NetTypes.BACKBONE)
class PruneResNet(object):
"""PruneResNet.
:param descript: network desc
:type descript: dict
"""
def __init__(self, descript):
"""Init PruneResNet."""
self.net_desc = descript
self.block = _prune_basic_block
self.encoding = descript.get('encoding')
self.chn = descript.get('chn')
self.chn_node = descript.get('chn_node')
self.chn_mask = descript.get('chn_mask', None)
self.chn_node_mask = descript.get('chn_node_mask', None)
self.num_blocks = descript.get('num_blocks', [3, 3, 3])
self.num_classes = descript.get('num_classes', 10)
self.in_planes = self.chn_node[0]
self.data_format = "channels_first"
self.scope_name = 'PruneResnet'
def _forward_prune_block(self, x, bottleneck, block, planes, inner_planes, num_blocks, stride, training, name):
"""Create resolution block of ResNet."""
idx = 0
strides = [stride] + [1] * (num_blocks - 1)
expansion = 4 if bottleneck else 1
for stride in strides:
x = block(x, planes, inner_planes[idx], training, self.data_format,
name + '/block_' + str(idx), strides=stride)
self.in_planes = planes * expansion
idx += 1
return x
def __call__(self, x, training):
"""Forward function of ResNet."""
if self.data_format == 'channels_first':
x = tf.transpose(x, [0, 3, 1, 2])
x = tf.layers.conv2d(x, self.chn_node[0], 3, padding='same', use_bias=False,
data_format=self.data_format, name='conv_1')
x = tf.layers.batch_normalization(x, axis=1 if self.data_format == 'channels_first' else 3,
name='bn_1', training=training)
x = self._forward_prune_block(x, False, self.block, self.chn_node[1], self.chn[0:3],
self.num_blocks[0], stride=1, training=training, name='layer_1')
x = self._forward_prune_block(x, False, self.block, self.chn_node[2], self.chn[3:6],
self.num_blocks[1], stride=2, training=training, name='layer_2')
x = self._forward_prune_block(x, False, self.block, self.chn_node[3], self.chn[6:9],
self.num_blocks[2], stride=2, training=training, name='layer_3')
x = tf.nn.relu(x)
x = tf.reduce_mean(x, [-2, -1], keepdims=True)
out = tf.layers.dense(tf.reshape(x, [x.get_shape()[0], -1]), self.num_classes)
return out
|
[
"1571856591@qq.com"
] |
1571856591@qq.com
|
2c4aed5ed7cb5ee974c114d183f12163eb2fc911
|
d331f11cf1e779e5ccf72c20f700388d07065b19
|
/BB_HRRR/GLM_and_HRRR/save_GLM_HRRR_binary_fields.py
|
5e56de136638fc61b18ed129afce5c529ea38bc9
|
[] |
no_license
|
geofbaum/pyBKB_v3
|
cd54d886348547a67a712e4aa1f8299e43667e39
|
8cf483e5296c8b38b57e31a0f65ea29762b57f38
|
refs/heads/master
| 2020-07-02T18:39:46.296555
| 2019-08-08T18:36:31
| 2019-08-08T18:36:31
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,389
|
py
|
## Brian Blaylock
## May 7, 2019
"""
Generate HRRR-GLM binary Lightning tables and store data in a dictionary.
Save the dictionary for later use. Store on Horel-Group8 (approx. 150 GB).
"""
import numpy as np
import matplotlib.pyplot as plt
from datetime import datetime, timedelta
import multiprocessing
import os
import sys
sys.path.append('/uufs/chpc.utah.edu/common/home/u0553130/pyBKB_v3/')
from BB_HRRR.HRRR_Pando import get_hrrr_variable, get_hrrr_latlon
from BB_GOES.get_GOES import get_GOES_nearesttime
from BB_GOES.get_GLM import get_GLM_file_nearesttime, accumulate_GLM_FAST, filter_by_HRRR
from BB_datetimes.range import range_dates
from BB_HRRR.GLM_and_HRRR.GLM_events_HRRR import get_GLM_HRRR_contingency_stats, domains
def get_and_save(DATE):
print(DATE)
BASE = '/uufs/chpc.utah.edu/common/home/horel-group8/blaylock/GLM-HRRR_LTNG_binary/'
FILE = BASE+'/HRRR-GLM-Binary_%s' % DATE.strftime('%Y-%m-%d_%H%M')
if not os.path.exists(FILE):
# This function will write the file if it isn't available
a = get_GLM_HRRR_contingency_stats(DATE)
#sDATE = datetime(2018, 5, 1)
#eDATE = datetime(2018, 8, 1)
sDATE = datetime(2018, 8, 1)
eDATE = datetime(2018, 10, 1)
DATES = range_dates(sDATE, eDATE, HOURS=1)
list(map(get_and_save, DATES))
# NOTE: Can't use multiprocessing because the get_GLM_HRRR_contingency_stats
# uses it instead.
|
[
"u0553130@ad.utah.edu"
] |
u0553130@ad.utah.edu
|
6c64949db5c6e14dc709d406fa53c415dabe40cb
|
fa7d04cb1eb932d609cc2e4567920b77b2080b57
|
/基础学习/python_work/Chapter 6/people_message_homework.py
|
a9596bcd4f98211383f405e04153752e22902adf
|
[] |
no_license
|
Yangqqiamg/Python-text
|
3b74088781e46a21534dcded49e8deae9c458e5d
|
426e15d4993c1658909a4e1a848829d09b029c96
|
refs/heads/master
| 2020-04-11T15:07:06.825475
| 2019-01-01T14:32:19
| 2019-01-01T14:32:19
| 161,880,306
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 276
|
py
|
#one
people_message = {
'first_name': 'add',
'last_name': 'mike',
'age': 24,
'city': 'shanghai',
}
print(people_message)
#two
people_num = {
'mike': 6,
'joe': 8,
'mary': 9,
'lihua': 15,
}
print('joe like ' + str(people_num['joe']))
|
[
"15218090298@163.com"
] |
15218090298@163.com
|
183e6bcb3efbec82b96f70e5665e476ceaf85779
|
3ac9deb93c7ef377749e1a92589757ff70e95011
|
/modules/python_file.py
|
788862f475b882abaf33013495a9abd80862cc83
|
[] |
no_license
|
sambapython/raisingstarts
|
6afc9ecfe8b6c21d1cbed87802465a0de581ddc8
|
c186dd0d58ecfd845c5092ddf4801e42ef698cae
|
refs/heads/master
| 2021-01-20T16:09:36.802216
| 2016-09-20T08:36:55
| 2016-09-20T08:36:55
| 62,765,486
| 0
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 344
|
py
|
import file1
import file2
import file3
import mod1
import sqlite3
print mod1.f1.fun1()
print (file1.fun1())
print (file3.fun3())
try:
con=sqlite3.connect("db2.db")
#con.execute("create table persons(id int,name varchar(60))")
#con.commit()
except Exception as err:
print err
finally:
pass
#con.close()
#print (file3.fun4())
#import file2
|
[
"sambapython@gmail.com"
] |
sambapython@gmail.com
|
b3eaae4f74a3d95d15111b18db8173f26b41fbb7
|
ff1e3e87e0432173e67782e4c529701426918624
|
/PRODUSTRY/asgi.py
|
d4c0c582eb72d65389b47fbaef83d4187dcc768b
|
[] |
no_license
|
Rayhun/Produstry
|
dd7c13e39114f893d4ba3ea012d903ea1723d9ff
|
325e74c573c333f00f0842c14d92052818a85d5c
|
refs/heads/master
| 2023-04-22T20:52:49.058342
| 2021-05-11T19:47:13
| 2021-05-11T19:47:13
| 294,981,881
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 395
|
py
|
"""
ASGI config for PRODUSTRY project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'PRODUSTRY.settings')
application = get_asgi_application()
|
[
"rayhunkhan27@gmail.com"
] |
rayhunkhan27@gmail.com
|
a42149122e6f49793dcbcd256512cec4d76d656a
|
04975a41eb459f1528dcbdcb1143a3cb535aa620
|
/Dynamic_easy/inter_08_01.py
|
24efcaf21c829d2f7f98ade60f94fb5317623546
|
[] |
no_license
|
RickLee910/Leetcode_easy
|
2a50d632379826979a985e1b9950d4cf6bbd8b18
|
c2687daf334f96a908737067bb915b8b072d0d56
|
refs/heads/master
| 2023-01-29T11:09:26.701243
| 2020-12-02T04:36:14
| 2020-12-02T04:36:14
| 294,952,436
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 325
|
py
|
class Solution:
def waysToStep(self, n: int) -> int:
if n == 1 or n == 2:
return n
if n == 3:
return 4
a, b, c = 1, 2, 4
for i in range(3, n + 1):
a, b, c = b, c, (a + b + c) % (1000000007)
return c
s = Solution()
a = 5
print(s.waysToStep(a))
|
[
"13554543910@163.com"
] |
13554543910@163.com
|
ff5069979385848256736247d30c20bb6994c6b2
|
239464e12610791457d393e2573f79babd776456
|
/njupt/models/aolan.py
|
4565792529ff5435486f548d760f855b108507df
|
[] |
no_license
|
sanmumuzi/NJUPT-API
|
a6f48a963521b36940d59525cc730811561fbae6
|
621960fb0749e34da8c22e809b5264639071a853
|
refs/heads/master
| 2021-09-01T05:02:37.669007
| 2017-12-25T00:20:59
| 2017-12-25T00:20:59
| 115,200,611
| 0
| 0
| null | 2017-12-23T14:27:14
| 2017-12-23T14:27:14
| null |
UTF-8
|
Python
| false
| false
| 2,605
|
py
|
import hashlib
from njupt import settings
from njupt.models.base import Model
from njupt.urls import URL
class Aolan(Model):
def login(self, account, password):
"""
登录奥兰系统 jwxt.njupt.edu.cn
:param account: 南邮学号、考生号、身份证号
:param password: 密码
:return: {'r': 1, "msg": "登录失败"} 或 {'r': 0, 'msg': '登录成功'}
"""
data = {
"__VIEWSTATE": self._get_viewstate(URL.aolan_login()),
'__VIEWSTATEGENERATOR': self._get_viewstategenerator(URL.aolan_login()),
'userbh': account,
'pas2s': hashlib.md5(password.upper().encode('utf8')).hexdigest(),
"vcode": self._get_captcha(URL.aolan_captcha()),
"cw": "",
"xzbz": "1",
}
return self._login_execute(url=URL.aolan_login(), data=data)
def _login_execute(self, url=None, data=None):
r = self.post(url=url, data=data)
if r.ok:
if "辅导员评议" in r.text:
self.cookies.save(ignore_discard=True) # 保存登录信息cookies
self.cookies.load(filename=settings.COOKIES_FILE, ignore_discard=True)
return {'r': 0, 'msg': '登录成功'}
else:
return {'r': 1, 'msg': '检查账号密码验证码是否正确'}
else:
return {'r': 1, "msg": "登录失败"}
class LibAccount(Model):
def login(self, account, password):
"""
登录南邮图书馆 jwxt.njupt.edu.cn
:param account: 南邮学号
:param password: 密码
:return: {'r': 1, "msg": "登录失败"} 或 {'r': 0, 'msg': '登录成功'}
"""
data = {
"number": account,
'passwd': password,
'captcha': self._get_captcha(URL.lib_captcha()),
'select': "cert_no",
"returnUrl": "",
}
return self._login_execute(url=URL.jwxt_login(), data=data)
def _login_execute(self, url=None, data=None):
r =self.post(url=url, data=data)
if r.ok:
print(r.text)
if "请到信息维护中完善个人联系方式" in r.text:
self.cookies.save(ignore_discard=True) # 保存登录信息cookies
self.cookies.load(filename=settings.COOKIES_FILE, ignore_discard=True)
return {'r': 0, 'msg': '登录成功'}
else:
return {'r': 1, 'msg': '检查账号密码验证码是否正确'}
else:
return {'r': 1, "msg": "登录失败"}
|
[
"gaoliangim@gmail.com"
] |
gaoliangim@gmail.com
|
38ba2d08e22e3d858578639be012ec3575f7804f
|
d2cb930ed5df0b1b5f7944e00f6f884bf014803d
|
/yeko_demo/yeko_demo/settings.py
|
2b5ddd20cb874abdfd2ed78b7de8f06e217beff2
|
[] |
no_license
|
sixDegree/python-scrapy-demo
|
3cae4298b01edab65449cfe9af56b2fa59f4c07d
|
b66530e54156be8c7877f1fc4d497fd497b6fdda
|
refs/heads/master
| 2020-06-17T03:16:23.038061
| 2019-07-08T09:25:15
| 2019-07-08T09:25:15
| 195,777,787
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,478
|
py
|
# -*- coding: utf-8 -*-
# Scrapy settings for yeko_demo project
#
# For simplicity, this file contains only settings considered important or
# commonly used. You can find more settings consulting the documentation:
#
# https://doc.scrapy.org/en/latest/topics/settings.html
# https://doc.scrapy.org/en/latest/topics/downloader-middleware.html
# https://doc.scrapy.org/en/latest/topics/spider-middleware.html
BOT_NAME = 'yeko_demo'
SPIDER_MODULES = ['yeko_demo.spiders']
NEWSPIDER_MODULE = 'yeko_demo.spiders'
# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'yeko_demo (+http://www.yourdomain.com)'
USER_AGENT='Mozilla/5.0 (Windows NT 6.1; WOW64; rv:60.0) Gecko/20100101 Firefox/60.0'
MONGO_CONN_STR="mongodb://cj:123456@localhost:27017/?authSource=admin"
LOG_LEVEL = 'INFO'
# Obey robots.txt rules
ROBOTSTXT_OBEY = False
# Configure maximum concurrent requests performed by Scrapy (default: 16)
#CONCURRENT_REQUESTS = 32
# Configure a delay for requests for the same website (default: 0)
# See https://doc.scrapy.org/en/latest/topics/settings.html#download-delay
# See also autothrottle settings and docs
#DOWNLOAD_DELAY = 3
# The download delay setting will honor only one of:
#CONCURRENT_REQUESTS_PER_DOMAIN = 16
#CONCURRENT_REQUESTS_PER_IP = 16
# Disable cookies (enabled by default)
#COOKIES_ENABLED = False
# Disable Telnet Console (enabled by default)
#TELNETCONSOLE_ENABLED = False
# Override the default request headers:
#DEFAULT_REQUEST_HEADERS = {
# 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
# 'Accept-Language': 'en',
#}
# Enable or disable spider middlewares
# See https://doc.scrapy.org/en/latest/topics/spider-middleware.html
#SPIDER_MIDDLEWARES = {
# 'yeko_demo.middlewares.YekoDemoSpiderMiddleware': 543,
#}
# Enable or disable downloader middlewares
# See https://doc.scrapy.org/en/latest/topics/downloader-middleware.html
#DOWNLOADER_MIDDLEWARES = {
# 'yeko_demo.middlewares.YekoDemoDownloaderMiddleware': 543,
#}
# Enable or disable extensions
# See https://doc.scrapy.org/en/latest/topics/extensions.html
#EXTENSIONS = {
# 'scrapy.extensions.telnet.TelnetConsole': None,
#}
# Configure item pipelines
# See https://doc.scrapy.org/en/latest/topics/item-pipeline.html
ITEM_PIPELINES = {
#'yeko_demo.pipelines.YekoDemoPipeline': 300,
'yeko_demo.pipelines.MeterialsPipeline':300,
'yeko_demo.pipelines.MongoPipeline':310,
#'yeko_demo.pipelines.CommentPipeline':320
}
ITEM_STORE='./'
FILES_STORE='./meterials'
FILES_EXPIRES = 90
# Enable and configure the AutoThrottle extension (disabled by default)
# See https://doc.scrapy.org/en/latest/topics/autothrottle.html
#AUTOTHROTTLE_ENABLED = True
# The initial download delay
#AUTOTHROTTLE_START_DELAY = 5
# The maximum download delay to be set in case of high latencies
#AUTOTHROTTLE_MAX_DELAY = 60
# The average number of requests Scrapy should be sending in parallel to
# each remote server
#AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0
# Enable showing throttling stats for every response received:
#AUTOTHROTTLE_DEBUG = False
# Enable and configure HTTP caching (disabled by default)
# See https://doc.scrapy.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
#HTTPCACHE_ENABLED = True
#HTTPCACHE_EXPIRATION_SECS = 0
#HTTPCACHE_DIR = 'httpcache'
#HTTPCACHE_IGNORE_HTTP_CODES = []
#HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage'
|
[
"chenjin.zero@163.com"
] |
chenjin.zero@163.com
|
beaf7eedb60db50e4bbda23836ec2a0cd5e63f0a
|
ea4e24693bddf0d986e7a4dd84a9208b3c28536f
|
/paayesh/wsgi.py
|
6a178fc93c375ca0905a9ebd480f764faf773bb1
|
[] |
no_license
|
Amirsorouri00/Django-Modular-Components
|
ec28812ab603c021a83080c701799f5e0d6a127b
|
d982c77ef291411718a19bf80d90fa61be65891a
|
refs/heads/master
| 2020-04-14T13:12:05.182391
| 2019-01-06T14:27:20
| 2019-01-06T14:27:20
| 163,862,171
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 391
|
py
|
"""
WSGI config for paayesh project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "paayesh.settings")
application = get_wsgi_application()
|
[
"amirsorouri26@gmail.com"
] |
amirsorouri26@gmail.com
|
881c5c08827486887ff44acd266e89c45dddfd03
|
8898273f9811fab29eb5621734bafcdf204d8229
|
/scipy-stubs/integrate/_ivp/common.pyi
|
81dabab39e5cfc7ed3b65529e100e1954d3d90d1
|
[] |
no_license
|
tyrion/scipy-stubs
|
628ad6321a7e1502683a2b55a759777508ab4b67
|
bf49a91313523c4f635bc3e5d14444c1361caf64
|
refs/heads/master
| 2020-05-30T21:59:43.001510
| 2019-06-03T10:30:54
| 2019-06-03T10:30:54
| 189,984,340
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,267
|
pyi
|
# Stubs for scipy.integrate._ivp.common (Python 3.6)
#
# NOTE: This dynamically typed stub was automatically generated by stubgen.
from typing import Any, Optional
EPS: Any
def validate_max_step(max_step: Any): ...
def warn_extraneous(extraneous: Any) -> None: ...
def validate_tol(rtol: Any, atol: Any, n: Any): ...
def norm(x: Any): ...
def select_initial_step(fun: Any, t0: Any, y0: Any, f0: Any, direction: Any, order: Any, rtol: Any, atol: Any): ...
class OdeSolution:
n_segments: Any = ...
ts: Any = ...
interpolants: Any = ...
t_min: Any = ...
t_max: Any = ...
ascending: bool = ...
ts_sorted: Any = ...
def __init__(self, ts: Any, interpolants: Any) -> None: ...
def _call_single(self, t: Any): ...
def __call__(self, t: Any): ...
NUM_JAC_DIFF_REJECT: Any
NUM_JAC_DIFF_SMALL: Any
NUM_JAC_DIFF_BIG: Any
NUM_JAC_MIN_FACTOR: Any
NUM_JAC_FACTOR_INCREASE: int
NUM_JAC_FACTOR_DECREASE: float
def num_jac(fun: Any, t: Any, y: Any, f: Any, threshold: Any, factor: Any, sparsity: Optional[Any] = ...): ...
def _dense_num_jac(fun: Any, t: Any, y: Any, f: Any, h: Any, factor: Any, y_scale: Any): ...
def _sparse_num_jac(fun: Any, t: Any, y: Any, f: Any, h: Any, factor: Any, y_scale: Any, structure: Any, groups: Any): ...
|
[
"germano.gabbianelli@contentwise.tv"
] |
germano.gabbianelli@contentwise.tv
|
e58665ff3fd90ce5824d7791abbdc9f0b0b97237
|
b454c3af46b3c495d298bf4f4554718b3ca0f7cb
|
/_syntax/number.py
|
c387793f6f2c35935d9a986f5ad9d77d7dd285ee
|
[] |
no_license
|
jaelyangChoi/CodingTest
|
fd9bab94cbcad0308631d2b663acd07b87216f6c
|
8d81035f04a05fcfc8ccdafe8627800648bc73cc
|
refs/heads/master
| 2023-06-01T23:05:38.521250
| 2021-06-19T03:53:20
| 2021-06-19T03:53:20
| 339,958,839
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 269
|
py
|
a = 1e9 # 1억
print(int(a))
pi = 314e-2
print(pi)
# 실수는 오차를 포함한다. 이진수는 실수를 정확히 표현할 수 없다.
a = .3 + .6
print(a) # 0.8999999999999999
# => round로 반올림
print(round(a, 1)) # 0.9
print(round(3.1423, 2)) # 3.14
|
[
"jaelyangchoi@gmail.com"
] |
jaelyangchoi@gmail.com
|
0432977e460d9740d98c78b2f667853a927bfa88
|
1e15b421142c26e5285d16a58ac6aca0b062cbef
|
/venv/Scripts/django-admin.py
|
bfe0e1fb72c8c2509cf247d49ece44c756696536
|
[] |
no_license
|
subha3495/subhacookiesproj
|
c2d49a5eb154aba27bfc99d194cee49efb8e89c3
|
3068f104dc83c76c81bd7a227c790f3f4d4d1b0f
|
refs/heads/master
| 2020-05-17T03:50:07.209409
| 2019-04-25T18:55:32
| 2019-04-25T18:55:32
| 183,492,017
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 180
|
py
|
#!C:\Users\subha\PycharmProjects\subhacookiesproj\venv\Scripts\python.exe
from django.core import management
if __name__ == "__main__":
management.execute_from_command_line()
|
[
"dashjitu3@gmail.com"
] |
dashjitu3@gmail.com
|
6c078604796dd123c75509a7767be4cb0dd37108
|
a978d41e3754a2ece3d649888d7dc549b319817e
|
/apps/store/migrations/0002_auto_20171026_0244.py
|
a1a36f76d7053b9e39316b910dd3d5eb0ebab4e4
|
[] |
no_license
|
Ericksmith/board_games
|
ace0ca4d04b4eb508114c8c43d14ff3f5322b9f8
|
4c8a41f9e7bac8dfc59d1cba35c8c10b345d2e57
|
refs/heads/master
| 2021-09-05T07:15:23.888922
| 2018-01-25T04:39:24
| 2018-01-25T04:39:24
| 108,183,604
| 0
| 0
| null | 2018-01-25T04:39:24
| 2017-10-24T21:05:31
|
Python
|
UTF-8
|
Python
| false
| false
| 434
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-10-26 02:44
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('store', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='game',
name='publisher',
field=models.SmallIntegerField(),
),
]
|
[
"smith.s.erick@gmail.com"
] |
smith.s.erick@gmail.com
|
8179ed536b854e23b05d411c6409878d9f8fbeef
|
599069eeeae294950aab730ca8d4858ac1929a5c
|
/bemani/backend/bishi/base.py
|
d7ace86131c74ce846cf138365fdb3868d06b493
|
[] |
no_license
|
ByteFun/bemaniutils
|
232d057d4b548f929af4da4f145565ad51482113
|
bd467a9b732a25a1c8aba75106dc459fbdff61b0
|
refs/heads/master
| 2020-12-04T07:45:45.503620
| 2019-12-08T21:57:08
| 2019-12-08T21:57:08
| 231,683,196
| 4
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 718
|
py
|
# vim: set fileencoding=utf-8
from typing import Optional
from bemani.backend.base import Base
from bemani.backend.core import CoreHandler, CardManagerHandler, PASELIHandler
from bemani.common import GameConstants
class BishiBashiBase(CoreHandler, CardManagerHandler, PASELIHandler, Base):
"""
Base game class for all one Bishi Bashi version that we support (lol).
In theory we could add support for Bishi Bashi Channel, but that never
happened.
"""
game = GameConstants.BISHI_BASHI
def previous_version(self) -> Optional['BishiBashiBase']:
"""
Returns the previous version of the game, based on this game. Should
be overridden.
"""
return None
|
[
"dragonminded@dragonminded.com"
] |
dragonminded@dragonminded.com
|
d1f7f4d7645b9e48ae6990d58072c273a9c73196
|
255e19ddc1bcde0d3d4fe70e01cec9bb724979c9
|
/all-gists/c703e319a1c79cd92f373c6c99dd4a40/snippet.py
|
f0eb1096bf3bccd9d42dd6d8a64c2f5536dbc3d2
|
[
"MIT"
] |
permissive
|
gistable/gistable
|
26c1e909928ec463026811f69b61619b62f14721
|
665d39a2bd82543d5196555f0801ef8fd4a3ee48
|
refs/heads/master
| 2023-02-17T21:33:55.558398
| 2023-02-11T18:20:10
| 2023-02-11T18:20:10
| 119,861,038
| 76
| 19
| null | 2020-07-26T03:14:55
| 2018-02-01T16:19:24
|
Python
|
UTF-8
|
Python
| false
| false
| 446
|
py
|
people = 30
cars = 40
buses = 15
if cars > people:
print("we should take the cars.")
elif cars < people:
print("we should not take cars.")
else:
print("we can't decide.")
if buses > cars:
print("too many buses.")
elif buses < cars:
print("maybe we could take the buses.")
else:
print("we still can't decide.")
if people > buses:
print("ok, let's take the buses.")
else:
print("fine, let's stay at home then.")
|
[
"gistshub@gmail.com"
] |
gistshub@gmail.com
|
9dc8f2e6c9f60331c1c4894e1182c096842a971a
|
f9a587ffcc42e06294f12ac761bcc589ba202f85
|
/lesson04/del_r.py
|
01a034a4317736930147e9cc21cb34e12acce1b3
|
[] |
no_license
|
Bulgakoff/files_utf8_04
|
3c49d5f8b637eaa834dd99fd6f140f37ebf96794
|
a6299e9c088812c63e1967290a97a2798bc19d1c
|
refs/heads/master
| 2020-09-16T09:10:00.268684
| 2019-11-27T14:58:14
| 2019-11-27T14:58:14
| 223,722,605
| 0
| 0
| null | 2019-11-27T14:58:15
| 2019-11-24T09:51:44
|
Python
|
UTF-8
|
Python
| false
| false
| 158
|
py
|
with open('del.txt', 'rb') as f:
result = f.readlines()
print(result)
f_var = []
for p in result[:]:
f_var.append(p.decode('utf-8'))
print(f_var)
|
[
"hlbu@yandex.ru"
] |
hlbu@yandex.ru
|
454704723c91aaa38436e984d0a865a2c976bf16
|
45f93a9d47204d76b8bf25a71dfb79403e75c33c
|
/Trees_and_Graphs/Binary Trees/__Right-Sibling-Tree.py
|
5274bf5cf821019dce12066fb96d3cb3d3954489
|
[] |
no_license
|
tahmid-tanzim/problem-solving
|
0173bce1973ac3e95441a76c10324c0e1b0a57c3
|
6ddb51de6772130f209474e76f39ca2938f444f0
|
refs/heads/master
| 2023-06-25T02:18:03.690263
| 2023-06-20T06:58:46
| 2023-06-20T06:58:46
| 137,173,850
| 4
| 1
| null | 2022-03-30T08:28:41
| 2018-06-13T06:44:25
|
Python
|
UTF-8
|
Python
| false
| false
| 1,842
|
py
|
#!/usr/bin/python3
# https://www.algoexpert.io/questions/Right%20Sibling%20Tree
"""
Write a function that takes in a Binary Tree, transforms it into a Right Sibling Tree, and returns its root.
A Right Sibling Tree is obtained by making every node in a Binary Tree have
its right property point to its right sibling instead of its
right child. A node's right sibling is the node immediately to its right on
the same level or None / null if there is no node immediately to its right.
Note that once the transformation is complete, some nodes might no longer have
a node pointing to them. For example, in the sample output below, the node
with value 10 no longer has any inbound pointers and is effectively unreachable.
The transformation should be done in place, meaning that the original data
structure should be mutated (no new structure should be created).
Each BinaryTree node has an integer value, a
left child node, and a right child node. Children
nodes can either be BinaryTree nodes themselves or None / null.
Sample Input
tree = 1
/ \
2 3
/ \ / \
4 5 6 7
/ \ \ / / \
8 9 10 11 12 13
/
14
Sample Output
1 // the root node with value 1
/
2-----------3
/ /
4-----5-----6-----7
/ / /
8---9 10-11 12-13 // the node with value 10 no longer has a node pointing to it
/
14
"""
class BinaryTree:
def __init__(self, value, left=None, right=None):
self.value = value
self.left = left
self.right = right
# O(n) time | O(d) space
# where n is the number of nodes in the Binary Tree and d is the depth (height) of the Binary Tree
def rightSiblingTree(root):
pass
if __name__ == "__main__":
pass
|
[
"tahmid.tanzim@gmail.com"
] |
tahmid.tanzim@gmail.com
|
5b49f9ed2760916b19bd8f252a154986087ef41b
|
23b686feb2d0ab9082a7ce622fc055946ed99c55
|
/.history/atkd/views_20190409152250.py
|
368f52282a2afbb37d8b96dd4a13e80011e77a5e
|
[] |
no_license
|
jasvr/atkd
|
a18b9840bf9948a7560684cd5eb0d5e22f6c52c7
|
daf61f7aa11cfc812171298894b1d0019641c4bd
|
refs/heads/master
| 2020-05-07T09:35:56.343837
| 2019-04-12T16:17:09
| 2019-04-12T16:17:09
| 180,383,260
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 161
|
py
|
from django.shortcuts import render
from .models import Parent, Student
def parent_list(request):
parents = Parent.objects.all()
return render(request)
|
[
"jas.vrgs@gmail.com"
] |
jas.vrgs@gmail.com
|
5485a970566a26513ba7761fcf092dac48138d02
|
ee561aa019a80f621007f82bdb21fe6ed8b6278f
|
/devel/turtlebot3-melodic-devel/turtlebot3_description/catkin_generated/pkg.installspace.context.pc.py
|
b595bae6cb52a86f7d280558d437f0187c648e65
|
[] |
no_license
|
allanwhledu/agv_edu_prj
|
4fb5fbf14cf0a14edd57ee9bd87903dc25d4d4f2
|
643a8a96ca7027529332f25208350de78c07e33d
|
refs/heads/master
| 2020-09-23T23:32:54.430035
| 2019-12-04T07:47:55
| 2019-12-04T07:47:55
| 225,613,426
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 376
|
py
|
# generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "urdf;xacro".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "turtlebot3_description"
PROJECT_SPACE_DIR = "/usr/local"
PROJECT_VERSION = "1.2.2"
|
[
"bitwanghaili@gmail.com"
] |
bitwanghaili@gmail.com
|
8525100e60ff3148ec5d2892294ea3ed84fb74f7
|
b0cc5920a18d31bc22d346ae10e99e6b78b12b32
|
/wsgi/zosiaproject/agenda/views.py
|
4132c02e6a381bf1a3b0baeaae4c4ff632a0ba79
|
[] |
no_license
|
kamarkiewicz/zosiaproject
|
de33525b786908585edd03dfb235c67031bcd440
|
440316ae1dea4feff5b6a9ac6f40c19382022d91
|
refs/heads/master
| 2021-01-17T09:51:34.944777
| 2016-01-03T21:06:58
| 2016-01-03T22:07:47
| 41,800,412
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 419
|
py
|
from django.views.generic.base import TemplateView
from django.utils import timezone
from .models import Agenda
class AgendaView(TemplateView):
template_name = 'agenda.html'
def get_context_data(self, **kwargs):
context = super(AgendaView, self).get_context_data(**kwargs)
context['agenda'] = Agenda.objects \
.filter(pub_date__lte=timezone.now()).first()
return context
|
[
"k.a.markiewicz@gmail.com"
] |
k.a.markiewicz@gmail.com
|
35fbfc803b4c58ed3877c42a9dea8ca8ed8cce89
|
53fab060fa262e5d5026e0807d93c75fb81e67b9
|
/backup/user_180/ch39_2019_09_04_15_02_03_498630.py
|
0029d8ac133813a2cedbf0938fddc211d09ee1cd
|
[] |
no_license
|
gabriellaec/desoft-analise-exercicios
|
b77c6999424c5ce7e44086a12589a0ad43d6adca
|
01940ab0897aa6005764fc220b900e4d6161d36b
|
refs/heads/main
| 2023-01-31T17:19:42.050628
| 2020-12-16T05:21:31
| 2020-12-16T05:21:31
| 306,735,108
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 144
|
py
|
soma = 0
while True:
numa = int(input("Numeros para somar: "))
soma = soma + numa
if numa == 0:
break
print(soma)
|
[
"you@example.com"
] |
you@example.com
|
0f677f5d072eb753b88fe94e2feeead2bb50a595
|
f714db4463dd37fc33382364dc4b1963a9053e49
|
/src/sentry/analytics/events/first_transaction_sent.py
|
d972abeae1561c5b7482283e597056d244743902
|
[
"BUSL-1.1",
"Apache-2.0"
] |
permissive
|
macher91/sentry
|
92171c2ad23564bf52627fcd711855685b138cbd
|
dd94d574403c95eaea6d4ccf93526577f3d9261b
|
refs/heads/master
| 2021-07-07T08:23:53.339912
| 2020-07-21T08:03:55
| 2020-07-21T08:03:55
| 140,079,930
| 0
| 0
|
BSD-3-Clause
| 2020-05-13T11:28:35
| 2018-07-07T11:50:48
|
Python
|
UTF-8
|
Python
| false
| false
| 443
|
py
|
from __future__ import absolute_import
from sentry import analytics
class FirstTransactionSentEvent(analytics.Event):
type = "first_transaction.sent"
attributes = (
analytics.Attribute("organization_id"),
analytics.Attribute("project_id"),
analytics.Attribute("platform", required=False),
analytics.Attribute("default_user_id", required=False),
)
analytics.register(FirstTransactionSentEvent)
|
[
"noreply@github.com"
] |
macher91.noreply@github.com
|
d1865dd785d2765014d2581d2626fbe175aced68
|
049d1262acb5e0a0be1201a12b479a7a111cb6b9
|
/jacob/bin/wheel
|
3dc16ecb462f5c5b1a855b258c3c3b4aceb27f2e
|
[] |
no_license
|
jaybenaim/day13-reinforcements
|
c579c908ac35abbe541431a85158f3c3b4ae55bf
|
d31e89d340465aff7e5f7917924f0ed64566a34a
|
refs/heads/master
| 2020-06-22T01:53:54.259573
| 2019-07-19T17:33:42
| 2019-07-19T17:33:42
| 197,604,415
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 280
|
#!/Users/jay/bitmaker/projects/day13-oop/assignments/reinforcements/jacob/bin/python3.7
# -*- coding: utf-8 -*-
import re
import sys
from wheel.cli import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"benaimjacob@gmail.com"
] |
benaimjacob@gmail.com
|
|
aeef4cd9ad9cf60858a571c69a45edf2eaedd352
|
eb9c3dac0dca0ecd184df14b1fda62e61cc8c7d7
|
/google/appengine/v1/google-cloud-appengine-v1-py/google/cloud/appengine_admin_v1/services/firewall/pagers.py
|
fb10d4619bc29c2163fff3b9ab11c3472f65e6f1
|
[
"Apache-2.0"
] |
permissive
|
Tryweirder/googleapis-gen
|
2e5daf46574c3af3d448f1177eaebe809100c346
|
45d8e9377379f9d1d4e166e80415a8c1737f284d
|
refs/heads/master
| 2023-04-05T06:30:04.726589
| 2021-04-13T23:35:20
| 2021-04-13T23:35:20
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,937
|
py
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional
from google.cloud.appengine_admin_v1.types import appengine
from google.cloud.appengine_admin_v1.types import firewall
class ListIngressRulesPager:
"""A pager for iterating through ``list_ingress_rules`` requests.
This class thinly wraps an initial
:class:`google.cloud.appengine_admin_v1.types.ListIngressRulesResponse` object, and
provides an ``__iter__`` method to iterate through its
``ingress_rules`` field.
If there are more pages, the ``__iter__`` method will make additional
``ListIngressRules`` requests and continue to iterate
through the ``ingress_rules`` field on the
corresponding responses.
All the usual :class:`google.cloud.appengine_admin_v1.types.ListIngressRulesResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(self,
method: Callable[..., appengine.ListIngressRulesResponse],
request: appengine.ListIngressRulesRequest,
response: appengine.ListIngressRulesResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.appengine_admin_v1.types.ListIngressRulesRequest):
The initial request object.
response (google.cloud.appengine_admin_v1.types.ListIngressRulesResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = appengine.ListIngressRulesRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(self) -> Iterable[appengine.ListIngressRulesResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(self._request, metadata=self._metadata)
yield self._response
def __iter__(self) -> Iterable[firewall.FirewallRule]:
for page in self.pages:
yield from page.ingress_rules
def __repr__(self) -> str:
return '{0}<{1!r}>'.format(self.__class__.__name__, self._response)
class ListIngressRulesAsyncPager:
"""A pager for iterating through ``list_ingress_rules`` requests.
This class thinly wraps an initial
:class:`google.cloud.appengine_admin_v1.types.ListIngressRulesResponse` object, and
provides an ``__aiter__`` method to iterate through its
``ingress_rules`` field.
If there are more pages, the ``__aiter__`` method will make additional
``ListIngressRules`` requests and continue to iterate
through the ``ingress_rules`` field on the
corresponding responses.
All the usual :class:`google.cloud.appengine_admin_v1.types.ListIngressRulesResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(self,
method: Callable[..., Awaitable[appengine.ListIngressRulesResponse]],
request: appengine.ListIngressRulesRequest,
response: appengine.ListIngressRulesResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.appengine_admin_v1.types.ListIngressRulesRequest):
The initial request object.
response (google.cloud.appengine_admin_v1.types.ListIngressRulesResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = appengine.ListIngressRulesRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
async def pages(self) -> AsyncIterable[appengine.ListIngressRulesResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = await self._method(self._request, metadata=self._metadata)
yield self._response
def __aiter__(self) -> AsyncIterable[firewall.FirewallRule]:
async def async_generator():
async for page in self.pages:
for response in page.ingress_rules:
yield response
return async_generator()
def __repr__(self) -> str:
return '{0}<{1!r}>'.format(self.__class__.__name__, self._response)
|
[
"bazel-bot-development[bot]@users.noreply.github.com"
] |
bazel-bot-development[bot]@users.noreply.github.com
|
47cc7185780e69d639c732ae362796e493f9cf77
|
baaeb8c1d335e258fd49b5ef024ac39790fd660f
|
/backend/alembic_users/versions/ed860c399094_added_is_gold_evaluation_cols_to_jobs_.py
|
bab6dc2c44ecb1672d28025e6494b2f2c7d15e48
|
[] |
no_license
|
ReactARDev/React_Redux_Python
|
f0b80a9d2a603b38f8e144966bc899c5aa3690e6
|
afdb4a55f82fdff86686ad955448a4168d05c739
|
refs/heads/master
| 2021-10-10T19:28:05.142652
| 2019-01-15T21:24:06
| 2019-01-15T21:24:06
| 159,198,417
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,349
|
py
|
"""added is_gold_evaluation cols to jobs and topic annotations
Revision ID: ed860c399094
Revises: 9528026442d1
Create Date: 2017-12-18 12:43:18.402875
"""
# revision identifiers, used by Alembic.
revision = 'ed860c399094'
down_revision = '9528026442d1'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('annotation_jobs', sa.Column('is_gold_evaluation', sa.Boolean(), nullable=True))
op.create_index(op.f('ix_annotation_jobs_is_gold_evaluation'), 'annotation_jobs', ['is_gold_evaluation'], unique=False)
op.add_column('topic_annotations', sa.Column('is_gold_evaluation', sa.Boolean(), nullable=True))
op.create_index(op.f('ix_topic_annotations_is_gold_evaluation'), 'topic_annotations', ['is_gold_evaluation'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_topic_annotations_is_gold_evaluation'), table_name='topic_annotations')
op.drop_column('topic_annotations', 'is_gold_evaluation')
op.drop_index(op.f('ix_annotation_jobs_is_gold_evaluation'), table_name='annotation_jobs')
op.drop_column('annotation_jobs', 'is_gold_evaluation')
# ### end Alembic commands ###
|
[
"talentmobile9999@gmail.com"
] |
talentmobile9999@gmail.com
|
223487d09c1e1f4d2f04e6c6301688924842dd39
|
2b6a02a34ee6bf68820ad185245e2609b296e0aa
|
/216.py
|
ddb1b4fc0c865ef4cd4c335fb5586c6825614993
|
[] |
no_license
|
shants/LeetCodePy
|
948e505b6fcb0edcb9a1cf63a245b61d448d6e27
|
2337b5031d4dfe033a471cea8ab4aa5ab66122d0
|
refs/heads/master
| 2020-03-28T08:43:04.606044
| 2019-11-25T05:03:15
| 2019-11-25T05:03:15
| 147,984,830
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,491
|
py
|
class Solution(object):
def __init__(self):
self.d = {}
# def is_solution(self, k,n,i, a):
# if n == 0 and len(a)==k:
# return True
# else:
# return False
#
# def process_solution(self, k,n,i,a):
# p = a[:]
# a = sorted(a, reverse=False)
# s = "".join([str(s) for s in a])
# self.d[s]= p
def bt(self, k,n,i, a):
if n < 0 :
return
if n!=0 and len(a)>=k:
return
if n==0 and k==len(a):
p = a[:]
a = sorted(a, reverse=False)
s = "".join([str(s) for s in a])
self.d[s] = p
else:
#l1 = [1,2,3,4,5,6,7,8,9]
s = set(a)
c = []
for i in range(1,10):
if i not in s:
c.append(i)
#c = [i for i in l1 if i not in a]
for j in range(len(c)):
a.append(c[j])
self.bt(k, n-c[j], i+1,a)
a.pop()
return
def combinationSum3(self, k, n):
"""
:type k: int
:type n: int
:rtype: List[List[int]]
"""
if n==0:
return [[]]
if n==1 and k==1:
return [[1]]
elif n ==1 and k!=1:
return []
a = []
self.bt(k,n,0, a)
return self.d.values()
if __name__ == "__main__":
s = Solution()
print(s.combinationSum3(3,9))
|
[
"mailtoshants@gmail.com"
] |
mailtoshants@gmail.com
|
64a479d9e6ee5f31fe84d13d53432ef96d720493
|
d2c4934325f5ddd567963e7bd2bdc0673f92bc40
|
/tests/artificial/transf_Difference/trend_LinearTrend/cycle_30/ar_/test_artificial_32_Difference_LinearTrend_30__0.py
|
292b04ae191d6b03a01bd8034a851358326c8b29
|
[
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
jmabry/pyaf
|
797acdd585842474ff4ae1d9db5606877252d9b8
|
afbc15a851a2445a7824bf255af612dc429265af
|
refs/heads/master
| 2020-03-20T02:14:12.597970
| 2018-12-17T22:08:11
| 2018-12-17T22:08:11
| 137,104,552
| 0
| 0
|
BSD-3-Clause
| 2018-12-17T22:08:12
| 2018-06-12T17:15:43
|
Python
|
UTF-8
|
Python
| false
| false
| 271
|
py
|
import pyaf.Bench.TS_datasets as tsds
import pyaf.tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 32 , FREQ = 'D', seed = 0, trendtype = "LinearTrend", cycle_length = 30, transform = "Difference", sigma = 0.0, exog_count = 0, ar_order = 0);
|
[
"antoine.carme@laposte.net"
] |
antoine.carme@laposte.net
|
fd8ba159c5468455865fc833112aeb035392e82b
|
a5e28d513cc29ca39d4b31d44585def1ee6d0ae9
|
/tests/conftest.py
|
4977ec1f1af76acf593f7155d01c3553166725d7
|
[
"Apache-2.0"
] |
permissive
|
blakev/ulid
|
1e9c79d0acc3a82bcbf3c01601127e50d7ab15ce
|
089c76595c15fd614a1ee0b989353079052abce5
|
refs/heads/master
| 2021-04-15T12:07:36.881868
| 2017-06-16T05:12:10
| 2017-06-16T05:12:10
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,928
|
py
|
"""
conftest
~~~~~~~~
High level fixtures used across multiple test modules.
"""
import calendar
import datetime
import os
import pytest
import random
from ulid import base32
@pytest.fixture(scope='function')
def ulid_bytes_year_1990(valid_bytes_80):
"""
Fixture that yields a :class:`~bytes` instance that represents a ULID with a timestamp
from the year 1990.
"""
return fixed_year_timestamp_bytes(1990, 1, 1) + valid_bytes_80
@pytest.fixture(scope='function')
def ulid_bytes_year_2000(valid_bytes_80):
"""
Fixture that yields a :class:`~bytes` instance that represents a ULID with a timestamp
from the year 2000.
"""
return fixed_year_timestamp_bytes(2000, 1, 1) + valid_bytes_80
@pytest.fixture(scope='function')
def valid_bytes_128():
"""
Fixture that yields :class:`~bytes` instances that are 128 bits, the length of an entire ULID.
"""
return random_bytes(16)
@pytest.fixture(scope='function')
def valid_bytes_80():
"""
Fixture that yields :class:`~bytes` instances that are 80 bits, the length of a ULID randomness.
"""
return random_bytes(10)
@pytest.fixture(scope='function')
def valid_bytes_48():
"""
Fixture that yields :class:`~bytes` instances that are 48 bits, the length of a ULID timestamp.
"""
return random_bytes(6)
@pytest.fixture(scope='function', params=range(0, 32))
def invalid_bytes_128(request):
"""
Fixture that yields :class:`~bytes` instances that are between 0 and 256 bits, except 128.
"""
return random_bytes(request.param, not_in=[16])
@pytest.fixture(scope='function', params=range(0, 32))
def invalid_bytes_80(request):
"""
Fixture that yields :class:`~bytes` instances that are between 0 and 256 bits, except 80.
"""
return random_bytes(request.param, not_in=[10])
@pytest.fixture(scope='function', params=range(0, 32))
def invalid_bytes_48(request):
"""
Fixture that yields :class:`~bytes` instances that are between 0 and 256 bits, except 48.
"""
return random_bytes(request.param, not_in=[6])
@pytest.fixture(scope='function', params=range(0, 32))
def invalid_bytes_48_80_128(request):
"""
Fixture that yields :class:`~bytes` instances that are between 0 and 256 bits, except 48, 80, and 128.
"""
return random_bytes(request.param, not_in=[6, 10, 16])
@pytest.fixture(scope='function')
def valid_str_26():
"""
Fixture that yields :class:`~str` instances that are 26 characters, the length of an entire ULID.
"""
return random_str(26)
@pytest.fixture(scope='function')
def valid_str_10():
"""
Fixture that yields :class:`~str` instances that are 10 characters, the length of a ULID timestamp.
"""
return random_str(10)
@pytest.fixture(scope='function')
def valid_str_16():
"""
Fixture that yields :class:`~str` instances that are 16 characters, the length of a ULID randomness.
"""
return random_str(16)
@pytest.fixture(scope='function', params=range(0, 32))
def invalid_str_26(request):
"""
Fixture that yields :class:`~str` instances that are between 0 and 32 characters, except 26.
"""
return random_str(request.param, not_in=[26])
@pytest.fixture(scope='function', params=range(0, 32))
def invalid_str_16(request):
"""
Fixture that yields :class:`~str` instances that are between 0 and 32 characters, except 16.
"""
return random_str(request.param, not_in=[16])
@pytest.fixture(scope='function', params=range(0, 32))
def invalid_str_10(request):
"""
Fixture that yields :class:`~str` instances that are between 0 and 32 characters, except 10.
"""
return random_str(request.param, not_in=[10])
@pytest.fixture(scope='function', params=range(0, 32))
def invalid_str_10_16_26(request):
"""
Fixture that yields :class:`~str` instances that are between 0 and 32 characters, except 10, 16, and 26.
"""
return random_str(request.param, not_in=[10, 16, 26])
def random_bytes(num_bytes, not_in=(-1,)):
"""
Helper function that returns a number of random bytes, optionally excluding those of a specific length.
"""
num_bytes = num_bytes + 1 if num_bytes in not_in else num_bytes
return os.urandom(num_bytes)
def random_str(num_chars, not_in=(-1,)):
"""
Helper function that returns a string with the specified number of random characters, optionally
excluding those of a specific length.
"""
num_chars = num_chars + 1 if num_chars in not_in else num_chars
return ''.join(random.choice(base32.ENCODING) for _ in range(num_chars))
def fixed_year_timestamp_bytes(*args, **kwargs):
"""
Helper function that returns bytes for a :class:`~datetime.datetime` created by the given args.
"""
timestamp = int(calendar.timegm(datetime.datetime(*args, **kwargs).timetuple())) * 1000
return timestamp.to_bytes(6, byteorder='big')
|
[
"andrew.r.hawker@gmail.com"
] |
andrew.r.hawker@gmail.com
|
ebc3dcd37e323817823673c28a9a13d8be293c95
|
f8ea3582884df87172cb747e424ebd0c20223614
|
/tests/oldtests/testrastdist.py
|
7ed004fabdd17a007282c998395e528001d9933a
|
[
"MIT"
] |
permissive
|
karimbahgat/PythonGis
|
94f52f800a769ee54b12c7277604ead011465321
|
fb99148a15bcbe0438ddca67b484a15076bd961a
|
refs/heads/master
| 2023-04-12T15:59:08.522464
| 2022-09-09T22:48:32
| 2022-09-09T22:48:32
| 47,153,255
| 5
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 678
|
py
|
import pythongis as pg
from time import time
# test distance
vect = pg.VectorData(r"C:\Users\kimo\Downloads\cshapes_0.6\cshapes.shp",
)#select=lambda f: f["GWCODE"]==666)
hist = vect.histogram("GWCODE")
#hist.view()
t = time()
distrast = pg.raster.analyzer.distance(vect, bbox=[-180,90,180,-90], width=72*10, height=36*10)
#distrast = pg.RasterData("C:/Users/kimo/Desktop/world.jpg", bbox=[-180,90,180,-90], width=512, height=256)
print time()-t
hist = distrast.bands[0].histogram()
print hist
#hist.view()
#mapp = distrast.render()
mapp = pg.renderer.Map()
mapp.add_layer(distrast)
mapp.add_layer(vect, fillcolor=None)
#mapp.add_legend()
mapp.view()
|
[
"Karim.bahgat.norway@gmail.com"
] |
Karim.bahgat.norway@gmail.com
|
a414ae6cc75cc2bab26ee9b925f10d8309d71118
|
59cc4c5983dd486b93e409da3df9e20cd8dbd04e
|
/metaprog/composition.py
|
db929ad1fa422c0eb02708e1d27df290842f8ba0
|
[] |
no_license
|
titu1994/Python-Work
|
a95866bf47a3aba274376ec72b994e2e4fbda634
|
bc7f201ed01e2f7c49ae8d143d29e87e94420dc9
|
refs/heads/master
| 2023-04-27T02:33:30.731988
| 2023-04-22T19:13:03
| 2023-04-22T19:13:03
| 53,428,058
| 13
| 10
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,024
|
py
|
def custom_dir(c, add):
return dir(type(c)) + list(c.__dict__.keys()) + add
class BaseComposite:
"Base class for attr accesses in `self._extra_params` passed down to `self.components`"
@property
def _extra_params(self):
if not hasattr(self, 'components'):
self.components = []
if type(self.components) not in {list, tuple}:
self.components = [self.components]
elif type(self.components) == tuple:
self.components = list(self.components)
args = []
for component in self.components:
args.extend([o for o in dir(component)
if not o.startswith('_')])
return args
def __getattr__(self, k):
if k in self._extra_params:
for component in self.components:
if hasattr(component, k):
return getattr(self.components, k)
raise AttributeError(k)
def __dir__(self):
return custom_dir(self, self._extra_params)
|
[
"titu1994@gmail.com"
] |
titu1994@gmail.com
|
e9aac60d8d793b04b8cce53945eca54786292b2b
|
53fab060fa262e5d5026e0807d93c75fb81e67b9
|
/backup/user_328/ch25_2020_03_09_20_08_11_452435.py
|
e45f3d0a86b1198f3a6d0682673b1b7ed61d8b3e
|
[] |
no_license
|
gabriellaec/desoft-analise-exercicios
|
b77c6999424c5ce7e44086a12589a0ad43d6adca
|
01940ab0897aa6005764fc220b900e4d6161d36b
|
refs/heads/main
| 2023-01-31T17:19:42.050628
| 2020-12-16T05:21:31
| 2020-12-16T05:21:31
| 306,735,108
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 330
|
py
|
from math import sin, radians
jacas = float(input("qual a velocidade de lançamento de sua jaca: "))
jacas2 = float(input("qual o ângulo de lançamento da sua jaca: "))
d = (jacas**2*sin*(radians(2*jacas2))/9.8
if d < 98:
print("Muito perto")
elif d >= 98 and d <= 102:
print("Acertou!")
else:
print("Muito longe")
|
[
"you@example.com"
] |
you@example.com
|
7b927918987478127db33e25f638d3774ffa6cbb
|
d41c15b9c68ab2ee70740044d25d620e6b90a09e
|
/app/mod_cmd/commands/status.py
|
2faf54044feec0dddce8af23988a4ef47d2decb9
|
[
"Apache-2.0"
] |
permissive
|
jaycode/Arthur.workspace
|
9093b54cda983d2e8b6745b894403b5fa1282b56
|
7a581104141ee5f556e058b1276b4087a2921dfc
|
refs/heads/master
| 2021-01-10T10:36:35.599700
| 2016-03-21T19:37:49
| 2016-03-21T19:37:49
| 55,436,635
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,480
|
py
|
"""This module is useful to allow user finds out current state of their work.
"""
from app.mod_cmd.client_instruction import ClientInstruction
from app.helpers import docs_path
from zipfile import ZipFile
from app import app, mongo
def run(project = None, args = [], **kwargs):
"""Show status of current project. Keep checking this often!
status [item]
Args:
item: Item from the project you wish to view in more detail. Possible values:
- docs: View all docs currently being worked on. Shorthand of `list_docs` command.
- context: View the detail of context currently used in the project.
"""
if project is None:
message = ""
else:
active_doc = ''
if 'last_loaded_doc' in app.session:
active_doc = app.session['last_loaded_doc']
path = docs_path()
with app.get_path(path) as path:
with ZipFile(path, 'r') as zipfile:
docs = len(zipfile.namelist())
current_context = project.context['name']
dfcount = mongo.db.data_fields.count({'project_id': project._id})
message = \
"Project name: %s\n" \
"Last loaded document: %s\n" \
"Total documents: %d\n" \
"# data fields: %d\n" \
"Context: %s" \
% (project.name, active_doc, docs, dfcount, current_context)
instruction = ClientInstruction({
'message': message
})
return [project, instruction]
|
[
"teguhwpurwanto@gmail.com"
] |
teguhwpurwanto@gmail.com
|
7f027a5f8990360a411659c96ad45506c18ae5a9
|
8319c9859bde5e21eba2ba60219ebe496646470b
|
/src/stratis_cli/_stratisd_constants.py
|
86f901513a72fc8576ab0bd4c5884a268bb8d66e
|
[
"Apache-2.0"
] |
permissive
|
stratis-storage/stratis-cli
|
0be83c0903c1050ac3cf75a19121ba19be97c4a6
|
399c95edd7c37e5fb9494f7829d5355c011fb7d7
|
refs/heads/master
| 2023-08-31T23:24:02.710481
| 2023-08-30T20:18:02
| 2023-08-30T20:18:02
| 66,956,943
| 107
| 44
|
Apache-2.0
| 2023-09-08T18:25:33
| 2016-08-30T16:09:39
|
Python
|
UTF-8
|
Python
| false
| false
| 2,347
|
py
|
# Copyright 2016 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Stratisd error classes.
"""
# isort: STDLIB
from enum import Enum, IntEnum
from ._error_codes import PoolMaintenanceErrorCode
class StratisdErrors(IntEnum):
"""
Stratisd Errors
"""
OK = 0
ERROR = 1
def __str__(self):
return self.name
class BlockDevTiers(IntEnum):
"""
Tier to which a blockdev device belongs.
"""
DATA = 0
CACHE = 1
def __str__(self):
return self.name
CLEVIS_KEY_TANG_TRUST_URL = "stratis:tang:trust_url"
CLEVIS_PIN_TANG = "tang"
CLEVIS_PIN_TPM2 = "tpm2"
CLEVIS_KEY_THP = "thp"
CLEVIS_KEY_URL = "url"
class ReportKey(Enum):
"""
Report identifiers.
Note: "managed_objects_report" is not a key recognized by stratisd.
However, since the other constants are, and they are all used together,
this type is defined with the other stratisd constants.
"""
ENGINE_STATE = "engine_state_report"
MANAGED_OBJECTS = "managed_objects_report"
STOPPED_POOLS = "stopped_pools"
class PoolActionAvailability(IntEnum):
"""
What category of interactions a pool is enabled for.
"""
fully_operational = 0 # pylint: disable=invalid-name
no_ipc_requests = 1 # pylint: disable=invalid-name
no_pool_changes = 2 # pylint: disable=invalid-name
def pool_maintenance_error_codes(self):
"""
Return the list of PoolMaintenanceErrorCodes for this availability.
:rtype: list of PoolMaintenanceErrorCode
"""
codes = []
if self >= PoolActionAvailability.no_ipc_requests:
codes.append(PoolMaintenanceErrorCode.NO_IPC_REQUESTS)
if self >= PoolActionAvailability.no_pool_changes:
codes.append(PoolMaintenanceErrorCode.NO_POOL_CHANGES)
return codes
|
[
"amulhern@redhat.com"
] |
amulhern@redhat.com
|
3eab99e78874519237ee18fbd383413183909b24
|
92b7afd4c17088a63a38d80d2f981cf146eae988
|
/Chapter02/U02_Ex05_ConvertCtoF_Table.py
|
612668d6c17270152ff0932fe7173e0c27aaa9b4
|
[] |
no_license
|
billm79/COOP2018
|
6d3e5d9f8309646beab0078a2f84bb6fe30b18fc
|
6588c0ebfa932fbae7eec11c20270e4a8e969377
|
refs/heads/master
| 2020-03-28T02:00:28.959515
| 2019-05-13T17:51:05
| 2019-05-13T17:51:05
| 147,540,965
| 3
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 957
|
py
|
# U02_Ex05_ConvertCtoF_Table.py
#
# Author: Bill Montana
# Course: Coding for OOP
# Section: A3
# Date: 18 Nov 2017
# IDE: PyCharm Community Edition
#
# Assignment Info
# Exercise: 5
# Source: Python Programming
# Chapter: 2
#
# Program Description
# Computes and prints a table of Celsius temperatures and the Fahrenheit equivalents from 0°C to 100°C
#
# Algorithm (pseudocode)
# introduce program
# print table headings
# loop from 0 to 100 in increments of 10
# calculate °F from loop variable (°C)
# print results in table
def main():
print('This program computes and prints a table of Celsius temperatures and the Fahrenheit equivalents from 0°C to 100°C.')
print('\n{:^3}\t{:^5}'.format('°C', '°F'))
print('{:^3}\t{:^5}'.format('---', '-----'))
for celsius in range(0, 101, 10):
fahrenheit = 1.8 * celsius + 32
print('{:>3}\t{:>5.1f}'.format(celsius, fahrenheit))
main()
|
[
"bill.m79@student.parishepiscopal.org"
] |
bill.m79@student.parishepiscopal.org
|
cf77e4bc9a182a406270a6fd7cf2558350304f7c
|
d41d18d3ea6edd2ec478b500386375a8693f1392
|
/plotly/validators/scatterpolargl/unselected/marker/_opacity.py
|
3cb2d6a2455082c340af789b6b315dfff4f79427
|
[
"MIT"
] |
permissive
|
miladrux/plotly.py
|
38921dd6618650d03be9891d6078e771ffccc99a
|
dbb79e43e2cc6c5762251537d24bad1dab930fff
|
refs/heads/master
| 2020-03-27T01:46:57.497871
| 2018-08-20T22:37:38
| 2018-08-20T22:37:38
| 145,742,203
| 1
| 0
|
MIT
| 2018-08-22T17:37:07
| 2018-08-22T17:37:07
| null |
UTF-8
|
Python
| false
| false
| 500
|
py
|
import _plotly_utils.basevalidators
class OpacityValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self,
plotly_name='opacity',
parent_name='scatterpolargl.unselected.marker',
**kwargs
):
super(OpacityValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type='style',
max=1,
min=0,
role='style',
**kwargs
)
|
[
"adam.kulidjian@gmail.com"
] |
adam.kulidjian@gmail.com
|
57fa882d992e873136c6e65dba8a3c40fc606e37
|
95efc2300bd2936eb9b4ca8f9cda55764047f094
|
/django1/src/customlogin/urls.py
|
14fcc5ea0182abaa54ff28601abb0ce65e803934
|
[] |
no_license
|
gittaek/jeong
|
d207d6e41398803475aff82a49bea01e21a86901
|
20808cbb97daff79a4c0b4a017106519f99d919f
|
refs/heads/master
| 2020-04-21T23:11:17.202531
| 2019-02-10T03:20:57
| 2019-02-10T03:20:57
| 169,938,169
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 388
|
py
|
#하위 URLConf
#app_name: 하위 URLConf 파일의 등록된 URL들의 그룹명
#urlpatterns: URL과 뷰함수를 이그트형태로 등록하는 변수
from django.urls import path
from .views import *
app_name = 'cl'
urlpatterns = [
path('signup/', signup, name= 'signup'),
path('signin/', signin, name= 'signin'),
path('signout/', signout, name= 'signout'),
]
|
[
"user@DESKTOP-37GULAI"
] |
user@DESKTOP-37GULAI
|
68618a7466a5c4c5db83220054f3f89a5f25af56
|
4c7914bf0eb52f2fe5dab70fa630a322a9449e05
|
/淘宝美食/spider_tb.py
|
979bf47a13e824f3edf29f2d47cb5b352420348d
|
[] |
no_license
|
xhongc/pythonCrawl
|
f334d737326a47782d2533c4db23734729f13099
|
a38e59496dd78b6e070ea6882043b1744190103e
|
refs/heads/master
| 2022-12-10T01:22:01.608193
| 2020-01-12T09:43:19
| 2020-01-12T09:43:22
| 93,115,695
| 4
| 5
| null | 2022-11-22T02:36:28
| 2017-06-02T01:47:22
|
Python
|
UTF-8
|
Python
| false
| false
| 3,288
|
py
|
import re
from bs4 import BeautifulSoup
from selenium import webdriver
from selenium.common.exceptions import TimeoutException
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import json
import pymysql
from config import *
from multiprocessing import Pool
driver = webdriver.Chrome(executable_path='D:\work\chromedriver\chromedriver.exe')
wait = WebDriverWait(driver, 10)
# 链接MySQL数据库
conn = pymysql.connect(**db_config)
cursor = conn.cursor()
def search():
try:
driver.get('https://www.taobao.com/')
input = wait.until(
EC.presence_of_element_located((By.CSS_SELECTOR, '#q')))
submit = wait.until(
EC.presence_of_element_located((By.CSS_SELECTOR, '#J_TSearchForm > div.search-button > button')))
input.send_keys('美食')
submit.click()
# 返回总页数
total = wait.until(
EC.presence_of_element_located((By.CSS_SELECTOR, '#mainsrp-pager > div > div > div > div.total')))
return total.text
except TimeoutException:
search()
def next_page(page_num):
try:
input = wait.until(
EC.presence_of_element_located((By.CSS_SELECTOR, '#mainsrp-pager > div > div > div > div.form > input')))
submit = wait.until(
EC.presence_of_element_located(
(By.CSS_SELECTOR, '#mainsrp-pager > div > div > div > div.form > span.btn.J_Submit')))
# 清除输入框数据
input.clear()
input.send_keys(page_num)
submit.click()
wait.until(EC.text_to_be_present_in_element(
(By.CSS_SELECTOR, '#mainsrp-pager > div > div > div > ul > li.item.active > span'), str(page_num)))
get_products()
except TimeoutException:
next_page(page_num)
def get_products():
wait.until(EC.presence_of_element_located((By.CSS_SELECTOR, '#mainsrp-itemlist > div > div')))
# 获取网页源码
html = driver.page_source
soup = BeautifulSoup(html, 'lxml')
list1 = soup.findAll('div', {'data-category': 'auctions'})
for each in list1:
items = {}
items['title'] = each.find('div', 'row row-2 title').get_text().strip()
items['momeny'] = each.find('div', 'price g_price g_price-highlight').strong.get_text()
items['people'] = each.find('div', 'deal-cnt').get_text()[:-3]
items['name'] = each.find('a', 'shopname J_MouseEneterLeave J_ShopInfo').get_text().strip()
save_products(items)
def save_products(items):
# with open('products.json','a',encoding='utf-8') as f:
# f.write(json.dumps(content,ensure_ascii=False)+'\n')
sql = "insert into taobao(title,momeny,people,name)VALUES (%s,%s,%s,%s)"
try:
cursor.execute(sql, (items['title'], items['momeny'], items['people'], items['name']))
conn.commit()
except pymysql.Error as e:
print(e.args)
def main():
try:
total = search()
total = int(re.compile(r'.*?(\d+)').search(total).group(1))
for i in range(1, total + 1):
next_page(i)
finally:
driver.close()
cursor.close()
cnn.close()
if __name__ == '__main__':
main()
|
[
"408737515@qq.com"
] |
408737515@qq.com
|
dc75718ca509b24cafd770b46279b39533e4dce4
|
1c74a2e075793e1d35c441518e2e138e14e26ea5
|
/Tree/124. 二叉树中的最大路径和.py
|
b4a70b63656b087e29f283c107cc12e1c831c361
|
[] |
no_license
|
Dawinia/LeetCode
|
1a385bfadbc4869c46dc1e9b8ca7656b77d746a0
|
e1dcc71ca657b42eb8eb15116697e852ef4a475a
|
refs/heads/master
| 2021-07-20T00:56:01.058471
| 2020-07-22T14:07:04
| 2020-07-22T14:07:04
| 197,305,126
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 616
|
py
|
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def __init__(self):
self.ans = -1000000000
def maxPathSum(self, root: TreeNode) -> int:
self.traverse(root)
return self.ans
def traverse(self, root):
if not root:
return 0
left = max(0, self.traverse(root.left))
right = max(0, self.traverse(root.right))
self.ans = max(self.ans, left + right + root.val)
return max(left, right) + root.val
|
[
"dawinialo@163.com"
] |
dawinialo@163.com
|
c6705abcd1b38cd180cd1869f249ca60fb5c4516
|
ef187d259d33e97c7b9ed07dfbf065cec3e41f59
|
/work/atcoder/abc/abc017/C/answers/121328_akio0803.py
|
35e70dd74eb09b1c6b633e244a904471419d122e
|
[] |
no_license
|
kjnh10/pcw
|
847f7295ea3174490485ffe14ce4cdea0931c032
|
8f677701bce15517fb9362cc5b596644da62dca8
|
refs/heads/master
| 2020-03-18T09:54:23.442772
| 2018-07-19T00:26:09
| 2018-07-19T00:26:09
| 134,586,379
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 285
|
py
|
N, M = [int(_) for _ in input().split()]
res = [0] * (M + 2)
ans = 0
for _ in range(N):
l, r, s = [int(_) for _ in input().split()]
res[l] = res[l] + s
res[r+1] = res[r+1] - s
ans += s
for i in range(M+1):
res[i+1] = res[i] + res[i+1]
print(ans - min(res[1:-1]))
|
[
"kojinho10@gmail.com"
] |
kojinho10@gmail.com
|
c2a251f706de272919eee0bf2b981f3e0def3bbe
|
17268419060d62dabb6e9b9ca70742f0a5ba1494
|
/pp/assert_grating_coupler_properties.py
|
e4623d5afe7b5f3da50f619ba7c002a9a0e798c3
|
[
"MIT"
] |
permissive
|
TrendingTechnology/gdsfactory
|
a19124423b12cbbb4f35b61f33303e9a012f82e5
|
c968558dba1bae7a0421bdf49dc192068147b776
|
refs/heads/master
| 2023-02-22T03:05:16.412440
| 2021-01-24T03:38:00
| 2021-01-24T03:38:00
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 990
|
py
|
from pp.component import Component
def assert_grating_coupler_properties(gc: Component):
assert hasattr(
gc, "polarization"
), f"{gc.name} does not have polarization attribute"
assert gc.polarization in [
"te",
"tm",
], f"{gc.name} polarization should be 'te' or 'tm'"
assert hasattr(
gc, "wavelength"
), f"{gc.name} wavelength does not have wavelength attribute"
assert (
500 < gc.wavelength < 2000
), f"{gc.name} wavelength {gc.wavelength} should be in nm"
if "W0" not in gc.ports:
print(f"grating_coupler {gc.name} should have a W0 port. It has {gc.ports}")
if "W0" in gc.ports and gc.ports["W0"].orientation != 180:
print(
f"grating_coupler {gc.name} W0 port should have orientation = 180 degrees. It has {gc.ports['W0'].orientation}"
)
if __name__ == "__main__":
import pp
c = pp.c.grating_coupler_elliptical_te()
assert_grating_coupler_properties(c)
|
[
"noreply@github.com"
] |
TrendingTechnology.noreply@github.com
|
dfd15b290e3345001bd70e8dccef8397e16bb767
|
e56b63fa189d6c1e84eda135a41ae63d6177c10e
|
/setup.py
|
29e819082c2ce53fd58b560ba7c80ae49b497ba4
|
[] |
no_license
|
bibi21000/janitoo_scene
|
22ededfc79866a806b22770f8c347169407523cb
|
c9c8f6b58f5109c47ae302b1fbc58c39d1c35857
|
refs/heads/master
| 2021-01-17T18:10:49.644483
| 2016-06-22T22:10:42
| 2016-06-22T22:10:42
| 60,803,529
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,642
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Setup file of Janitoo
"""
__license__ = """
This file is part of Janitoo.
Janitoo is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Janitoo is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Janitoo. If not, see <http://www.gnu.org/licenses/>.
"""
__author__ = 'Sébastien GALLET aka bibi21000'
__email__ = 'bibi21000@gmail.com'
__copyright__ = "Copyright © 2013-2014-2015-2016 Sébastien GALLET aka bibi21000"
from os import name as os_name
from setuptools import setup, find_packages
from distutils.extension import Extension
from platform import system as platform_system
import glob
import os
import sys
from _version import janitoo_version
DEBIAN_PACKAGE = False
filtered_args = []
for arg in sys.argv:
if arg == "--debian-package":
DEBIAN_PACKAGE = True
else:
filtered_args.append(arg)
sys.argv = filtered_args
def data_files_config(res, rsrc, src, pattern):
for root, dirs, fils in os.walk(src):
if src == root:
sub = []
for fil in fils:
sub.append(os.path.join(root,fil))
res.append((rsrc, sub))
for dire in dirs:
data_files_config(res, os.path.join(rsrc, dire), os.path.join(root, dire), pattern)
data_files = []
data_files_config(data_files, 'docs','src/docs/','*')
setup(
name = 'janitoo_scene',
description = "Scene components",
long_description = "Scene components",
license = """
This file is part of Janitoo.
Janitoo is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Janitoo is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Janitoo. If not, see <http://www.gnu.org/licenses/>.
""",
author='Sébastien GALLET aka bibi2100 <bibi21000@gmail.com>',
author_email='bibi21000@gmail.com',
url='http://bibi21000.gallet.info/',
version = janitoo_version,
keywords = "scene",
zip_safe = False,
packages = find_packages('src', exclude=["scripts", "docs", "config"]),
package_dir = { '': 'src' },
include_package_data=True,
data_files = data_files,
install_requires=[
'janitoo',
'janitoo_factory',
],
dependency_links = [
'https://github.com/bibi21000/janitoo/archive/master.zip#egg=janitoo',
'https://github.com/bibi21000/janitoo_factory/archive/master.zip#egg=janitoo_factory',
],
entry_points = {
"janitoo.components": [
"scene.simple = janitoo_scenarios.component:make_simple_scene",
],
"janitoo.threads": [
"scene = janitoo_scenes.thread:make_scene",
],
},
)
|
[
"bibi21000@gmail.com"
] |
bibi21000@gmail.com
|
8283b7f68b360f419fd5896d84b4879f9089d354
|
f0d713996eb095bcdc701f3fab0a8110b8541cbb
|
/kfwTnnJjo3SKG2pYx_3.py
|
e7ac90f207041dbf5f864dab33c4a2d7ea29af06
|
[] |
no_license
|
daniel-reich/turbo-robot
|
feda6c0523bb83ab8954b6d06302bfec5b16ebdf
|
a7a25c63097674c0a81675eed7e6b763785f1c41
|
refs/heads/main
| 2023-03-26T01:55:14.210264
| 2021-03-23T16:08:01
| 2021-03-23T16:08:01
| 350,773,815
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,576
|
py
|
"""
Replace the numbers in a string with their binary form.
### Examples
replace_nums("I have 2 sheep.") ➞ "I have 10 sheep."
replace_nums("My father was born in 1974.10.25.") ➞ "My father was born in 11110110110.1010.11001."
replace_nums("10hell76o4 boi") ➞ "1010hell1001100o100 boi"
### Notes
* There are possibly two or more numbers in a single word (I do not recommend splitting the text at spaces, it surely won't help).
* Anything separates two numbers, even spaces ("2 2" --> "10 10").
"""
def replace_nums(string):
binary = lambda num: int(str(bin(num))[2:])
def find_nums(string):
digits = '0123456789'
numbers = []
num = ''
for n in range(len(string)):
if string[n] in digits:
num += string[n]
else:
if num != '':
numbers.append(int(num))
num = ''
if num != '':
numbers.append(int(num))
num = ''
return numbers
def sort(list):
dict = {}
for int in list:
l = len(str(int))
if l not in dict.keys():
dict[l] = [int]
else:
dict[l].append(int)
nl = []
for key in reversed(sorted(dict.keys())):
nl += sorted(dict[key])
return nl
numbers = find_nums(string)
bins = {number: binary(number) for number in set(numbers)}
for number in sort(numbers):
string = string.replace(str(number), str(bins[number]))
if '11110101101011010' in string:
string = string.replace('11110101101011010', '11110110110')
return string
|
[
"daniel.reich@danielreichs-MacBook-Pro.local"
] |
daniel.reich@danielreichs-MacBook-Pro.local
|
b4822ca44563f6be4ba21dc3c949c510cffc59b8
|
bc167f434158921bcf2c678155c5cdfec1c9b0c9
|
/PI_code/simulator/behaviourGeneration/firstGenScripts_preyHunter/behav143.py
|
69e2c39d1de031eb531fff7484cfaf7bc99323d5
|
[] |
no_license
|
s0217391/DifferentProjects
|
6450efc89c64ecd21b86c705737e89e5c69433a6
|
7f4da153660817b6cbf72d2e823aa29c0c2f95a9
|
refs/heads/master
| 2021-01-17T02:58:46.219240
| 2015-05-26T22:45:46
| 2015-05-26T22:45:46
| 34,995,164
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 737
|
py
|
#!/usr/bin/python
import sys
def compute(prey):
if prey[0] != 0:
temp0 = prey[1] % prey[0]
else:
temp0 = prey[0]
temp1 = temp0 * prey[0]
temp1 = prey[1] + prey[0]
temp2 = max(temp1, prey[0])
temp3 = temp1 + temp1
if prey[1] > prey[0]:
temp4 = temp0 + prey[1]
else:
temp4 = temp1 + prey[0]
temp1 = temp4 * temp2
temp5 = temp1 * temp0
temp6 = min(temp4, prey[0])
if temp5 > temp4:
temp6 = -1 * temp6
else:
if temp2 > temp4:
temp6 = prey[0] + temp4
else:
temp6 = max(temp6, temp5)
if temp2 > prey[0]:
temp2 = temp6 * prey[1]
else:
temp2 = temp1 * temp4
temp3 = temp0 * prey[0]
temp3 = min(temp0, temp6)
temp0 = -1 * temp1
temp4 = temp0 * temp0
temp3 = max(temp6, temp2)
return [prey[1], temp4]
|
[
"i7674211@bournemouth.ac.uk"
] |
i7674211@bournemouth.ac.uk
|
9308ae4ebcf181f0896ffd022dd31f4ecff6f8df
|
f4c753c85b23014faa43f905aef817e8d493e187
|
/core/fill/fill_types.py
|
c35def3435f65e21e0c6c7c44920c1335e35f5ec
|
[
"MIT"
] |
permissive
|
bdrydyk/building_tool
|
61cbfe76af7b4af56ea714670609961efd809385
|
8da3d17d74591a556d597a4f360d3730d1ae4c1a
|
refs/heads/master
| 2020-06-25T16:56:58.726059
| 2019-07-21T17:51:26
| 2019-07-21T17:51:26
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,067
|
py
|
import bmesh
from mathutils import Vector, Matrix
from bmesh.types import BMEdge, BMVert
from ...utils import (
filter_geom,
calc_edge_median,
calc_face_dimensions,
filter_vertical_edges,
filter_horizontal_edges,
)
def fill_panel(bm, face, prop):
"""Create panels on face
"""
if prop.panel_count_x + prop.panel_count_y == 0:
return
bmesh.ops.inset_individual(bm, faces=[face], thickness=prop.panel_border_size)
quads = subdivide_face_into_quads(bm, face, prop.panel_count_x, prop.panel_count_y)
bmesh.ops.inset_individual(bm, faces=quads, thickness=prop.panel_margin / 2)
bmesh.ops.translate(
bm,
verts=list({v for f in quads for v in f.verts}),
vec=face.normal * prop.panel_depth,
)
def fill_glass_panes(bm, face, prop):
"""Create glass panes on face
"""
if prop.pane_count_x + prop.pane_count_y == 0:
return
quads = subdivide_face_into_quads(bm, face, prop.pane_count_x, prop.pane_count_y)
bmesh.ops.inset_individual(bm, faces=quads, thickness=prop.pane_margin)
for f in quads:
bmesh.ops.translate(bm, verts=f.verts, vec=-f.normal * prop.pane_depth)
def fill_bar(bm, face, prop):
"""Create horizontal and vertical bars along a face
"""
width, height = calc_face_dimensions(face)
face_center = face.calc_center_median()
# -- horizontal
offset = height / (prop.bar_count_x + 1)
for i in range(prop.bar_count_x):
scale = (1, 1, prop.bar_width / height)
position = Vector((face.normal * prop.bar_depth / 2)) + Vector(
(0, 0, -height / 2 + (i + 1) * offset)
)
depth = -face.normal * prop.bar_depth / 2
create_bar_from_face(bm, face, face_center, position, scale, depth)
# -- vertical
eps = 0.015
offset = width / (prop.bar_count_y + 1)
for i in range(prop.bar_count_y):
scale = (prop.bar_width / width, prop.bar_width / width, 1)
perp = face.normal.cross(Vector((0, 0, 1)))
position = Vector((face.normal * ((prop.bar_depth / 2) - eps))) + perp * (
-width / 2 + ((i + 1) * offset)
)
depth = -face.normal * ((prop.bar_depth / 2) - eps)
create_bar_from_face(bm, face, face_center, position, scale, depth, True)
def fill_louver(bm, face, prop):
"""Create louvers from face
"""
normal = face.normal
if prop.louver_margin:
bmesh.ops.inset_individual(bm, faces=[face], thickness=prop.louver_margin)
segments = double_and_make_even(prop.louver_count)
faces = subdivide_face_into_vertical_segments(bm, face, segments)
faces.sort(key=lambda f: f.calc_center_median().z)
louver_faces = faces[1::2]
# -- scale to border
for face in louver_faces:
bmesh.ops.scale(
bm,
vec=(1, 1, 1 + prop.louver_border),
verts=face.verts,
space=Matrix.Translation(-face.calc_center_median()),
)
extrude_faces_add_slope(bm, louver_faces, normal, prop.louver_depth)
def subdivide_face_into_quads(bm, face, cuts_x, cuts_y):
"""subdivide a face(quad) into more quads
"""
v_edges = filter_vertical_edges(face.edges, face.normal)
h_edges = filter_horizontal_edges(face.edges, face.normal)
edges = []
if cuts_x > 0:
res = bmesh.ops.subdivide_edges(bm, edges=v_edges, cuts=cuts_x).get(
"geom_inner"
)
edges.extend(filter_geom(res, BMEdge))
if cuts_y > 0:
res = bmesh.ops.subdivide_edges(bm, edges=h_edges + edges, cuts=cuts_y).get(
"geom_inner"
)
edges.extend(filter_geom(res, BMEdge))
bmesh.ops.remove_doubles(bm, verts=bm.verts, dist=0.01)
return list({f for ed in edges for f in ed.link_faces})
def duplicate_face_translate_scale(bm, face, position, scale, scale_center):
"""Duplicate a face and transform it
"""
ret = bmesh.ops.duplicate(bm, geom=[face])
verts = filter_geom(ret["geom"], BMVert)
bmesh.ops.scale(bm, verts=verts, vec=scale, space=Matrix.Translation(-scale_center))
bmesh.ops.translate(bm, verts=verts, vec=position)
return ret
def extrude_edges_to_depth(bm, edges, depth):
"""Extrude edges only and translate
"""
ext = bmesh.ops.extrude_edge_only(bm, edges=edges)
bmesh.ops.translate(bm, verts=filter_geom(ext["geom"], BMVert), vec=depth)
def extrude_faces_add_slope(bm, faces, extrude_normal, extrude_depth):
"""Extrude faces and move top edge back to form a wedge
"""
res = bmesh.ops.extrude_discrete_faces(bm, faces=faces)
bmesh.ops.translate(
bm,
vec=extrude_normal * extrude_depth,
verts=list({v for face in res["faces"] for v in face.verts}),
)
for face in res["faces"]:
top_edge = max(
filter_horizontal_edges(face.edges, face.normal),
key=lambda e: calc_edge_median(e).z,
)
bmesh.ops.translate(bm, vec=-face.normal * extrude_depth, verts=top_edge.verts)
bmesh.ops.remove_doubles(bm, verts=bm.verts, dist=0.01)
def subdivide_face_into_vertical_segments(bm, face, segments):
"""Cut a face(quad) vertically into multiple faces
"""
res = bmesh.ops.subdivide_edges(
bm, edges=filter_vertical_edges(face.edges, face.normal), cuts=segments
).get("geom_inner")
return list({f for e in filter_geom(res, BMEdge) for f in e.link_faces})
def double_and_make_even(value):
"""multiply a number by 2 and make it even
"""
double = value * 2
return double if double % 2 == 0 else double + 1
def create_bar_from_face(bm, face, median, position, scale, depth, vertical=False):
"""Create bar geometry from a face
"""
duplicate = duplicate_face_translate_scale(bm, face, position, scale, median).get(
"geom"
)
if vertical:
edges = filter_vertical_edges(filter_geom(duplicate, BMEdge), face.normal)
else:
edges = filter_horizontal_edges(filter_geom(duplicate, BMEdge), face.normal)
extrude_edges_to_depth(bm, edges, depth)
|
[
"karanjaichungwa@gmail.com"
] |
karanjaichungwa@gmail.com
|
6fe5f23bd91afed7a5a5a947bc2ecfc947a7495a
|
6fcfb638fa725b6d21083ec54e3609fc1b287d9e
|
/python/pfnet_chainer/chainer-master/examples/ptb/gentxt.py
|
c034207b2ef2a5e688211a486a4d0ed0e00d2277
|
[] |
no_license
|
LiuFang816/SALSTM_py_data
|
6db258e51858aeff14af38898fef715b46980ac1
|
d494b3041069d377d6a7a9c296a14334f2fa5acc
|
refs/heads/master
| 2022-12-25T06:39:52.222097
| 2019-12-12T08:49:07
| 2019-12-12T08:49:07
| 227,546,525
| 10
| 7
| null | 2022-12-19T02:53:01
| 2019-12-12T07:29:39
|
Python
|
UTF-8
|
Python
| false
| false
| 3,083
|
py
|
#!/usr/bin/env python
"""Example to generate text from a recurrent neural network language model.
This code is ported from following implementation.
https://github.com/longjie/chainer-char-rnn/blob/master/sample.py
"""
import argparse
import sys
import numpy as np
import six
import chainer
from chainer import cuda
import chainer.functions as F
import chainer.links as L
from chainer import serializers
import train_ptb
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--model', '-m', type=str, required=True,
help='model data, saved by train_ptb.py')
parser.add_argument('--primetext', '-p', type=str, required=True,
default='',
help='base text data, used for text generation')
parser.add_argument('--seed', '-s', type=int, default=123,
help='random seeds for text generation')
parser.add_argument('--unit', '-u', type=int, default=650,
help='number of units')
parser.add_argument('--sample', type=int, default=1,
help='negative value indicates NOT use random choice')
parser.add_argument('--length', type=int, default=20,
help='length of the generated text')
parser.add_argument('--gpu', type=int, default=-1,
help='GPU ID (negative value indicates CPU)')
args = parser.parse_args()
np.random.seed(args.seed)
xp = cuda.cupy if args.gpu >= 0 else np
# load vocabulary
vocab = chainer.datasets.get_ptb_words_vocabulary()
ivocab = {}
for c, i in vocab.items():
ivocab[i] = c
# should be same as n_units , described in train_ptb.py
n_units = args.unit
lm = train_ptb.RNNForLM(len(vocab), n_units, train=False)
model = L.Classifier(lm)
serializers.load_npz(args.model, model)
if args.gpu >= 0:
cuda.get_device(args.gpu).use()
model.to_gpu()
model.predictor.reset_state()
primetext = args.primetext
if isinstance(primetext, six.binary_type):
primetext = primetext.decode('utf-8')
if primetext in vocab:
prev_word = chainer.Variable(xp.array([vocab[primetext]], xp.int32))
else:
print('ERROR: Unfortunately ' + primetext + ' is unknown.')
exit()
prob = F.softmax(model.predictor(prev_word))
sys.stdout.write(primetext + ' ')
for i in six.moves.range(args.length):
prob = F.softmax(model.predictor(prev_word))
if args.sample > 0:
probability = cuda.to_cpu(prob.data)[0].astype(np.float64)
probability /= np.sum(probability)
index = np.random.choice(range(len(probability)), p=probability)
else:
index = np.argmax(cuda.to_cpu(prob.data))
if ivocab[index] == '<eos>':
sys.stdout.write('.')
else:
sys.stdout.write(ivocab[index] + ' ')
prev_word = chainer.Variable(xp.array([index], dtype=xp.int32))
sys.stdout.write('\n')
if __name__ == '__main__':
main()
|
[
"659338505@qq.com"
] |
659338505@qq.com
|
111e184e4618fc600d7bfced3ff6d93feb9e5217
|
975da64ee000833af92148b647ea1bea1deac6fe
|
/test/test_replace_phone_number_params.py
|
188e89fc12c3deeca09f0ee74c12b0cc356e1bbf
|
[] |
no_license
|
bitlayergit/API-SDK-python
|
41c84eadae31556fae818e636565723d3112aa2c
|
111b4eb9b725d7a8feb31b8e0af8bb2ee79dcda3
|
refs/heads/master
| 2021-07-06T08:51:56.984499
| 2017-09-28T15:37:20
| 2017-09-28T15:37:20
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 987
|
py
|
# coding: utf-8
"""
Phone.com API
This is a Phone.com api Swagger definition
OpenAPI spec version: 1.0.0
Contact: apisupport@phone.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import swagger_client
from swagger_client.rest import ApiException
from swagger_client.models.replace_phone_number_params import ReplacePhoneNumberParams
class TestReplacePhoneNumberParams(unittest.TestCase):
""" ReplacePhoneNumberParams unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testReplacePhoneNumberParams(self):
"""
Test ReplacePhoneNumberParams
"""
# FIXME: construct object with mandatory attributes with example values
#model = swagger_client.models.replace_phone_number_params.ReplacePhoneNumberParams()
pass
if __name__ == '__main__':
unittest.main()
|
[
"i.simevski@gmail.com"
] |
i.simevski@gmail.com
|
148d097216699d2a68a987361655c5f84bc8326b
|
248f56b1fb0cb20796e5a29736a37fa4106fa0bb
|
/2022KAKAOBLIND/solution3.py
|
ed3658e9ce984ff7a5c5eca0daef151660b3f2a1
|
[
"MIT"
] |
permissive
|
KOOKDONGHUN/programmers
|
b24d672c67180765daca3036c72600ba17e0bd50
|
87ac8fcc23b14b3293c37933e4b9bbf663792830
|
refs/heads/main
| 2023-08-15T21:44:04.462043
| 2021-10-02T05:35:23
| 2021-10-02T05:35:23
| 401,706,119
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,740
|
py
|
from datetime import datetime
import math
def solution(fees, records):
answer = []
t_ls = []
car_num_ls = []
check_ls = []
car_num_ls2 = []
minute = 60
dead_line = "23:59"
log = dict()
for record in records:
t, car_num, check = record.split()
t_ls.append(t)
car_num_ls.append(car_num)
check_ls.append(check)
if (car_num not in car_num_ls2):
car_num_ls2.append(car_num)
for num in car_num_ls:
log[f'{num}'] = []
for idx in range(len(car_num_ls)):
log[f'{car_num_ls[idx]}'].append(t_ls[idx])
# print(log)
time_log = dict()
for key, value in log.items():
time_log[f'{key}'] = []
l = len(value)
if (l % 2 == 1):
st = ''
for i in range(l-1):
if (i % 2 == 1): # out
a = datetime.strptime(st, '%H:%M')
b = datetime.strptime(value[i], '%H:%M')
c = (b - a).seconds/60
time_log[f'{key}'].append(c)
else: # in
st = value[i]
# 마지막 하나 연산 마감시간까지 하는거
a = datetime.strptime(value[-1], '%H:%M')
b = datetime.strptime(dead_line, '%H:%M')
c = (b - a).seconds / 60
time_log[f'{key}'].append(c)
else:
st = ''
for i in range(l):
if (i % 2 == 1): # out
a = datetime.strptime(st, '%H:%M')
b = datetime.strptime(value[i], '%H:%M')
c = (b - a).seconds / 60
time_log[f'{key}'].append(c)
else: # in
st = value[i]
# print(time_log)
answer2 = dict()
for key, value in time_log.items():
answer2[f'{key}'] = 0
if (sum(value) <= fees[0]):
answer2[f'{key}'] = fees[1]
else:
r1 = int(sum(value)) - fees[0]
r2 = math.ceil(r1 / fees[2])
r3 = r2 * fees[3]
rr = fees[1] + r3
answer2[f'{key}'] = rr
answer = []
car_num_ls2.sort()
for i in car_num_ls2:
answer.append(answer2[i])
# print(answer)
return answer
if __name__ == "__main__":
# execute only if run as a script
solution([180, 5000, 10, 600], ["05:34 5961 IN", "06:00 0000 IN", "06:34 0000 OUT", "07:59 5961 OUT", "07:59 0148 IN", "18:59 0000 IN", "19:09 0148 OUT", "22:59 5961 IN", "23:00 5961 OUT"])
print('-'*38)
solution([120, 0, 60, 591], ["16:00 3961 IN","16:00 0202 IN","18:00 3961 OUT","18:00 0202 OUT","23:58 3961 IN"])
print('-' * 38)
solution([1, 461, 1, 10], ["00:00 1234 IN"])
|
[
"dh3978@naver.com"
] |
dh3978@naver.com
|
c43a49ab156f7382d0fb726caf88d66387bbe7c9
|
184310f55b58e854dc3b6c58599ef99bc4c95739
|
/hujian_api/API_service/Templight/tt.py
|
f8dec3403639f1cdd677b97b36c29b6ed379a8ac
|
[] |
no_license
|
tanjijun1/Python_API
|
c8585821a627c399fea1ab31bb024be6b82dd3ab
|
3c4771875870ffe425d2d39fc28a50449b1752f2
|
refs/heads/master
| 2023-01-07T23:30:30.284433
| 2020-11-11T08:43:10
| 2020-11-11T08:43:10
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,203
|
py
|
import time
now = time.time()
print(now)
Content-Type: multipart/form-data; boundary=-----------13418483933
-----------13418483933
Content-Disposition: form-data; name="service_key"
-----------13418483933
Content-Disposition: form-data; name="device_sn"
-----------13418483933
Content-Disposition: form-data; name="service_code"
-----------13418483933
import sys
reload(sys)
sys.path.append('./python2.7/site-packages')
sys.path.append('./python2.7/site-packages/requests_toolbelt-0.8.0-py2.7.egg')
print
sys.path
import urllib2
import urllib
import cookielib
import json
import httplib
import re
import requests
import random
from requests_toolbelt import MultipartEncoder
if len(sys.argv) != 7:
print
sys.argv[
0] + ' ' + 'deploy_name' + ' ' + 'apk_name' + ' ' + 'promptInfo' + ' ' + 'versionDesc' + ' ' + 'versionLargeNumber' + ' ' + 'applications.id'
sys.exit()
deploy_name = sys.argv[1]
apk_name = sys.argv[2]
promptInfo = sys.argv[3]
versionDesc = sys.argv[4]
versionLargeNumber = sys.argv[5]
applications = sys.argv[6]
j = 10
id = []
id = ''.join(str(i) for i in random.sample(range(0, 11), j)) # sample(seq, n) 从序列seq中选择n个随机且独立的元素;
print
id
s = requests.session()
print
s.headers
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:54.0) Gecko/20100101 Firefox/54.0',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Host': '10.4.160.88:8080',
'Referer': 'http://10.4.160.88:8080/nqsky-meap-manager/index',
}
login_url = 'http://10.4.160.88:8080/nqsky-meap-manager/login'
data = {'csrf': '', '_csrf_header': '', 'userName': 'admin', 'password': '1'}
# data = urllib.urlencode(data)
response = s.post(login_url, data=data, headers=headers)
# print response
# print response.status_code
# print response.content
url = 'http://10.4.160.88:8080/nqsky-meap-manager/main/applications/applications/list'
r = s.get(url, headers=headers)
r = r.text
# print r
# headers = {
#
# 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:54.0) Gecko/20100101 Firefox/54.0',
# 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
# 'Refer':'http://10.4.160.88:8080/nqsky-meap-manager/main/applications/appInfo/list/0/8a8420d85f809d23015f93fa25590d8c',
# 'Content-Type': "multipart/form-data; 'boundary=---------------------------%s" %(id)
# }
print
headers
uploadurl = 'http://10.4.160.88:8080/nqsky-meap-manager/main/applications/appVersion/save'
# data={'versionName':'w213','deviceType':'1','status':'1','versionSystem':'1.0','platformType':'1','promptInfo':'publish','versionDesc':'','appUrl':'','versionLargeNumber':'1.1','versionLargeFile':'checkping.pl','largeFile':'','enforceStatus':'1','snapshotImg':'','snapshotFile':'','snapshotName':'','versionType':'','applications.id':'8a8420d85f809d23015f93fa25590d8c','id':'','auditStatus':'0','appOrder':'1','isPortal':'','deviceAuthority':'','technologyType':'3'}
arr1 = ['', '', '', '', '', '']
jsonstr = json.dumps(arr1)
m = MultipartEncoder(
fields={
"versionName": (None, deploy_name),
"deviceType": (None, "1"),
"status": (None, "1"),
"versionSystem": (None, "1.5"),
"platformType": (None, "1"),
"promptInfo": (None, promptInfo),
"versionDesc": (None, versionDesc),
"versionLargeNumber": (None, versionLargeNumber),
"versionLargeFile": "apk_name",
"largeFile": (apk_name, open(apk_name, 'rb'), 'application/octet-stream'),
"enforceStatus": (None, "1"),
"applications.id": (None, applications),
"auditStatus": (None, "0"),
"appOrder": (None, "2"),
"technologyType": (None, "3"),
"snapshotImg": (None, jsonstr),
"snapshotFile": (None, jsonstr),
"snapshotName": (None, jsonstr)
}
)
print
m
response = s.post(uploadurl, data=m, headers={'Content-Type': m.content_type})
print
'------------------------------------------------------'
print
response
print
response.url
print
response.status_code
# print response.content
if response.status_code == 200:
print
'deploy success'
else:
print
'deploy failed'
|
[
"1065913054@qq.com"
] |
1065913054@qq.com
|
b5892daa2cf40600c3c171d16b4d33890e637c5d
|
3e381dc0a265afd955e23c85dce1e79e2b1c5549
|
/hi-A3/not_hesaplama.py
|
57a517a822a6edc22adf30f92c4369c4d4857351
|
[] |
no_license
|
serkancam/byfp2-2020-2021
|
3addeb92a3ff5616cd6dbd3ae7b2673e1a1a1a5e
|
c67206bf5506239d967c3b1ba75f9e08fdbad162
|
refs/heads/master
| 2023-05-05T04:36:21.525621
| 2021-05-29T11:56:27
| 2021-05-29T11:56:27
| 322,643,962
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 773
|
py
|
dosya = open(file="notlar.txt",mode="r",encoding="utf-8")
metin = dosya.read()
dosya.close()
donem_ortalamasi=0.0
not_adet=0
for satir in metin.split("\n"):
ders_adi=satir.split(":")[0].strip()
notlar = satir.split(":")[1].strip().split(" ")
ortalama=0.0
for nt in notlar:
ortalama = ortalama + float(nt)
donem_ortalamasi=donem_ortalamasi+float(nt)
not_adet=not_adet+1
ortalama = ortalama / len(notlar)
print(ders_adi,"ortalamasi=",round(ortalama,2))
donem_ortalamasi = donem_ortalamasi/not_adet
print("dönem ortalaması=",round(donem_ortalamasi,2))
"""
Matematik ortalaması= 75
Türkçe ortalaması=87.5
Sosyal bilgiler ortalaması=95
Fen bilimleri ortalaması=97.5
İngilizce ortalaması=82.5
Dönem ortalaması=?
"""
|
[
"sekocam@gmail.com"
] |
sekocam@gmail.com
|
56364384fadab6ba2a8360d6af96ab70e100b200
|
53fab060fa262e5d5026e0807d93c75fb81e67b9
|
/backup/user_006/ch3_2020_03_09_20_04_20_580816.py
|
d72a113c6ad8ecb6326c442c0399dc5570cc0f35
|
[] |
no_license
|
gabriellaec/desoft-analise-exercicios
|
b77c6999424c5ce7e44086a12589a0ad43d6adca
|
01940ab0897aa6005764fc220b900e4d6161d36b
|
refs/heads/main
| 2023-01-31T17:19:42.050628
| 2020-12-16T05:21:31
| 2020-12-16T05:21:31
| 306,735,108
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 134
|
py
|
def calcula_gaussiana(x, mi, sigma):
resultado=(1/(sigma*((2*math.pi)**0.5)))*(math.e**(-0.5(((x-mi)/sigma)**2))
return resultado
|
[
"you@example.com"
] |
you@example.com
|
1b7ee83080c836f6c279d09bbbe341b44e72147b
|
8dcd3ee098b4f5b80879c37a62292f42f6b2ae17
|
/venv/Lib/site-packages/win32/Demos/CopyFileEx.py
|
4aa24f877a44e08b33f9917690f24b5da1da55db
|
[] |
no_license
|
GregVargas1999/InfinityAreaInfo
|
53fdfefc11c4af8f5d2b8f511f7461d11a3f7533
|
2e4a7c6a2424514ca0ec58c9153eb08dc8e09a4a
|
refs/heads/master
| 2022-12-01T20:26:05.388878
| 2020-08-11T18:37:05
| 2020-08-11T18:37:05
| 286,821,452
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,164
|
py
|
import os
import win32api
import win32file
def ProgressRoutine(TotalFileSize, TotalBytesTransferred, StreamSize, StreamBytesTransferred,
StreamNumber, CallbackReason, SourceFile, DestinationFile, Data):
print(Data)
print(TotalFileSize, TotalBytesTransferred, StreamSize, StreamBytesTransferred, StreamNumber, CallbackReason,
SourceFile, DestinationFile)
##if TotalBytesTransferred > 100000:
## return win32file.PROGRESS_STOP
return win32file.PROGRESS_CONTINUE
temp_dir = win32api.GetTempPath()
fsrc = win32api.GetTempFileName(temp_dir, 'cfe')[0]
fdst = win32api.GetTempFileName(temp_dir, 'cfe')[0]
print(fsrc, fdst)
f = open(fsrc, 'w')
f.write('xxxxxxxxxxxxxxxx\n' * 32768)
f.close()
## add a couple of extra data streams
f = open(fsrc + ':stream_y', 'w')
f.write('yyyyyyyyyyyyyyyy\n' * 32768)
f.close()
f = open(fsrc + ':stream_z', 'w')
f.write('zzzzzzzzzzzzzzzz\n' * 32768)
f.close()
operation_desc = 'Copying ' + fsrc + ' to ' + fdst
win32file.CopyFileEx(fsrc, fdst, ProgressRoutine, Data=operation_desc, Cancel=False,
CopyFlags=win32file.COPY_FILE_RESTARTABLE, Transaction=None)
|
[
"44142880+GregVargas1999@users.noreply.github.com"
] |
44142880+GregVargas1999@users.noreply.github.com
|
55fd8f6731ac12dc4c43cec0146a397ec6063002
|
9028516ff0b2d95b8000b9fc4c44c29aa73c926c
|
/qa/rpc-tests/listtransactions.py
|
2aa4910074d08c894f8f572dd90a502a1019280f
|
[
"MIT"
] |
permissive
|
lycion/TripOne
|
a9e546eac9ad6179c0b6bd4f868162f70930b6ac
|
c6ae7d9163ef4095fe0e143d26f3311182551147
|
refs/heads/master
| 2020-03-28T22:29:06.119551
| 2018-09-18T06:07:06
| 2018-09-18T06:07:06
| 149,236,186
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,913
|
py
|
#!/usr/bin/env python3
# Copyright (c) 2014-2015 The Bitcoin Core developers
# Copyright (c) 2015-2017 The Bitcoin Unlimited developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
import test_framework.loginit
# Exercise the listtransactions API
import pdb
from test_framework.test_framework import TriponeTestFramework
from test_framework.util import *
from test_framework.mininode import CTransaction, COIN
from io import BytesIO
def txFromHex(hexstring):
tx = CTransaction()
f = BytesIO(hex_str_to_bytes(hexstring))
tx.deserialize(f)
return tx
class ListTransactionsTest(TriponeTestFramework):
def setup_nodes(self):
enable_mocktime()
return start_nodes(4, self.options.tmpdir)
def run_test(self):
self.test_listtransactionsfrom()
self.test_listtransactions()
def test_listtransactionsfrom(self):
# Simple send, 0 to 1:
self.sync_all()
tmp = self.nodes[2].listtransactionsfrom("*", 10000, 0)
curpos = len(tmp)
txid = self.nodes[2].sendtoaddress(self.nodes[3].getnewaddress(), 0.1)
self.sync_all()
self.nodes[0].generate(1)
self.sync_blocks()
# Basic positive test
tmp = self.nodes[2].listtransactionsfrom("*", 1, curpos)
assert len(tmp) == 1
assert tmp[0]["txid"] == txid
tmp = self.nodes[2].listtransactionsfrom("*", 10, curpos)
assert len(tmp) == 1
# Negative tests
# test beyond end of tx list
tmp = self.nodes[2].listtransactionsfrom("*", 100, curpos + 100)
assert(len(tmp) == 0)
# test bad input values
try:
tmp = self.nodes[2].listtransactionsfrom("*", -1, curpos)
assert 0
except JSONRPCException:
pass
try:
tmp = self.nodes[2].listtransactionsfrom("*", 100, -1)
assert 0
except JSONRPCException:
pass
# test multiple rows
curpos += 1
txidsA = [self.nodes[2].sendtoaddress(self.nodes[3].getnewaddress(), 0.2), self.nodes[2].sendtoaddress(
self.nodes[3].getnewaddress(), 0.3), self.nodes[2].sendtoaddress(self.nodes[3].getnewaddress(), 0.4)]
self.sync_all()
self.nodes[0].generate(1)
self.sync_blocks()
tmp = self.nodes[2].listtransactionsfrom("*", 100, curpos)
assert len(tmp) == 3
assert tmp[0]["txid"] == txidsA[0]
assert tmp[1]["txid"] == txidsA[1]
assert tmp[2]["txid"] == txidsA[2]
txidsB = [self.nodes[2].sendtoaddress(self.nodes[3].getnewaddress(), 0.5), self.nodes[2].sendtoaddress(
self.nodes[3].getnewaddress(), 0.6), self.nodes[2].sendtoaddress(self.nodes[3].getnewaddress(), 0.7)]
tmp = self.nodes[2].listtransactionsfrom("*", 100, curpos)
assert len(tmp) == 6
assert tmp[0]["txid"] == txidsA[0]
assert tmp[1]["txid"] == txidsA[1]
assert tmp[2]["txid"] == txidsA[2]
assert tmp[3]["txid"] == txidsB[0]
assert tmp[4]["txid"] == txidsB[1]
assert tmp[5]["txid"] == txidsB[2]
# test when I advance to the end, I get nothing
curpos += len(tmp)
tmp = self.nodes[2].listtransactionsfrom("*", 100, curpos)
assert tmp == []
def test_listtransactions(self):
# Simple send, 0 to 1:
txid = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.1)
self.sync_all()
assert_array_result(self.nodes[0].listtransactions(),
{"txid": txid},
{"category": "send", "account": "", "amount": Decimal("-0.1"), "confirmations": 0})
assert_array_result(self.nodes[1].listtransactions(),
{"txid": txid},
{"category": "receive", "account": "", "amount": Decimal("0.1"), "confirmations": 0})
# mine a block, confirmations should change:
self.nodes[0].generate(1)
self.sync_all()
assert_array_result(self.nodes[0].listtransactions(),
{"txid": txid},
{"category": "send", "account": "", "amount": Decimal("-0.1"), "confirmations": 1})
assert_array_result(self.nodes[1].listtransactions(),
{"txid": txid},
{"category": "receive", "account": "", "amount": Decimal("0.1"), "confirmations": 1})
# send-to-self:
txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 0.2)
assert_array_result(self.nodes[0].listtransactions(),
{"txid": txid, "category": "send"},
{"amount": Decimal("-0.2")})
assert_array_result(self.nodes[0].listtransactions(),
{"txid": txid, "category": "receive"},
{"amount": Decimal("0.2")})
# sendmany from node1: twice to self, twice to node2:
send_to = {self.nodes[0].getnewaddress(): 0.11,
self.nodes[1].getnewaddress(): 0.22,
self.nodes[0].getaccountaddress("from1"): 0.33,
self.nodes[1].getaccountaddress("toself"): 0.44}
txid = self.nodes[1].sendmany("", send_to)
self.sync_all()
assert_array_result(self.nodes[1].listtransactions(),
{"category": "send", "amount": Decimal("-0.11")},
{"txid": txid})
assert_array_result(self.nodes[0].listtransactions(),
{"category": "receive", "amount": Decimal("0.11")},
{"txid": txid})
assert_array_result(self.nodes[1].listtransactions(),
{"category": "send", "amount": Decimal("-0.22")},
{"txid": txid})
assert_array_result(self.nodes[1].listtransactions(),
{"category": "receive", "amount": Decimal("0.22")},
{"txid": txid})
assert_array_result(self.nodes[1].listtransactions(),
{"category": "send", "amount": Decimal("-0.33")},
{"txid": txid})
assert_array_result(self.nodes[0].listtransactions(),
{"category": "receive", "amount": Decimal("0.33")},
{"txid": txid, "account": "from1"})
assert_array_result(self.nodes[1].listtransactions(),
{"category": "send", "amount": Decimal("-0.44")},
{"txid": txid, "account": ""})
assert_array_result(self.nodes[1].listtransactions(),
{"category": "receive", "amount": Decimal("0.44")},
{"txid": txid, "account": "toself"})
multisig = self.nodes[1].createmultisig(1, [self.nodes[1].getnewaddress()])
self.nodes[0].importaddress(multisig["redeemScript"], "watchonly", False, True)
txid = self.nodes[1].sendtoaddress(multisig["address"], 0.1)
self.nodes[1].generate(1)
self.sync_all()
assert(len(self.nodes[0].listtransactions("watchonly", 100, 0, False)) == 0)
assert_array_result(self.nodes[0].listtransactions("watchonly", 100, 0, True),
{"category": "receive", "amount": Decimal("0.1")},
{"txid": txid, "account": "watchonly"})
if __name__ == '__main__':
ListTransactionsTest().main()
def Test():
t = ListTransactionsTest()
triponeConf = {
"debug": ["all"],
"blockprioritysize": 2000000 # we don't want any transactions rejected due to insufficient fees...
}
# "--tmpdir=/ramdisk/test", "--nocleanup", "--noshutdown"
t.main([], triponeConf, None)
|
[
"lycion@gmail.com"
] |
lycion@gmail.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.