blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
288
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 684
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 147
values | src_encoding
stringclasses 25
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 128
12.7k
| extension
stringclasses 142
values | content
stringlengths 128
8.19k
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
9e2d27aa8e45e1d0028a1e9ed43e9f10c484a899
|
8d1d1e7677e1a18c00fb295971211d4e29d10896
|
/vocal_synthesis/experiments/16mar_minimalist2.py
|
8a0371d7ae056b62551b05945627504c8a9a062d
|
[] |
no_license
|
christopher-beckham/ift6266h16
|
8296d1529f6ce3e209af371283f816a4c6d63ea9
|
f141fb0a320c20c2c7b43b46f06b1c68cde183f0
|
refs/heads/master
| 2021-01-10T13:38:40.733180
| 2016-04-17T02:22:52
| 2016-04-17T02:22:52
| 49,399,600
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,089
|
py
|
import numpy as np
from scipy.io import wavfile
import os
import sys
#os.chdir("..")
sys.path.append( os.pardir )
#sys.stderr.write("current working directory: %s\n" % os.getcwd())
import cPickle as pickle
from lasagne.updates import *
import rnn_experiment as experiment
if __name__ == "__main__":
# e.g. 1000_60sec.pkl
in_pkl = sys.argv[1]
out_pkl = sys.argv[2]
with open(in_pkl) as f:
dat = pickle.load(f)
X_train, X_valid, X_test = dat[0]
sys.stderr.write("X_train shape = %s\n" % str(X_train.shape))
sys.stderr.write("X_valid shape = %s\n" % str(X_valid.shape))
sys.stderr.write("X_test shape = %s\n" % str(X_test.shape))
args = dict()
args["seed"] = 0
args["batch_size"] = 16
args["learning_rate"] = 0.01
args["momentum"] = 0.9
args["num_epochs"] = 2000
args["X_train"] = X_train
args["X_valid"] = X_valid
args["X_test"] = X_test
args["update_method"] = rmsprop
args["out_pkl"] = out_pkl
args["config"] = "../configurations/19feb_testing_d_minimalist2.py"
experiment.train(args)
|
[
"chrispy645@gmail.com"
] |
chrispy645@gmail.com
|
3ae0f1c95796e57b71290c1ea4593981b8eaa8cc
|
53fab060fa262e5d5026e0807d93c75fb81e67b9
|
/backup/user_105/ch14_2019_08_28_18_56_07_920001.py
|
274be80f2e6a4a400ac2bbcb7a5f2f8276dbfe22
|
[] |
no_license
|
gabriellaec/desoft-analise-exercicios
|
b77c6999424c5ce7e44086a12589a0ad43d6adca
|
01940ab0897aa6005764fc220b900e4d6161d36b
|
refs/heads/main
| 2023-01-31T17:19:42.050628
| 2020-12-16T05:21:31
| 2020-12-16T05:21:31
| 306,735,108
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 204
|
py
|
#funçao para calcular o volume de uma esfera
import math
def calcula_volume_da_espera(raio):
volume = (4 / 3) * math.pi * (raio)**3
return volume
r = 5
x = calcula_volume_da_espera(r)
print(x)
|
[
"you@example.com"
] |
you@example.com
|
23025f36549d15efc64fc8f5fc60643bf064f03f
|
a08225934c425be313a12975c9563a72ded58be6
|
/round668/ansbfsbro.py
|
d92b9b43907213e629b22639d82c6f148de5a3cc
|
[] |
no_license
|
marcus-aurelianus/codeforce
|
27c966554dee9986f23fb2925bd53e6cceb8b9e9
|
4764df151ade7806e32b6c88283a2de946f99e16
|
refs/heads/master
| 2023-03-18T09:30:55.042594
| 2021-03-12T18:14:08
| 2021-03-12T18:14:08
| 231,387,022
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,649
|
py
|
from collections import deque
import sys
input = sys.stdin.readline
def get_diameter(tree):
u, _, _ = _dfs(0, tree)
v, diam, dist = _dfs(u, tree)
path = [v]
while v != u:
for nxt_v in tree[v]:
if 1 + dist[nxt_v] == dist[v]:
path.append(nxt_v)
v = nxt_v
break
return diam, path
def _dfs(start, tree):
n = len(tree)
dist = [-1] * n
dist[start] = 0
stack = [start]
while stack:
v = stack.pop()
for nxt_v in tree[v]:
if dist[nxt_v] != -1:
continue
dist[nxt_v] = dist[v] + 1
stack.append(nxt_v)
max_d = max(dist)
return dist.index(max_d), max_d, dist
def ab(a, b):
INF = 10 ** 6
visited = [INF] * n
visited[a] = 0
q = deque([a])
while q:
v = q.popleft()
for nxt_v in tree[v]:
if visited[v] + 1 < visited[nxt_v]:
visited[nxt_v] = visited[v] + 1
q.append(nxt_v)
return visited[b]
t = int(input())
for _ in range(t):
n, a, b, da, db = map(int, input().split())
edges = [list(map(int, input().split())) for i in range(n - 1)]
a -= 1
b -= 1
if da * 2 >= db:
print("Alice")
continue
tree = [[] for i in range(n)]
for u, v in edges:
u -= 1
v -= 1
tree[u].append(v)
tree[v].append(u)
distance = ab(a, b)
if distance <= da:
print("Alice")
continue
d, _ = get_diameter(tree)
if d >= da*2+1:
print("Bob")
else:
print("Alice")
|
[
"37787424+marcus-aurelianus@users.noreply.github.com"
] |
37787424+marcus-aurelianus@users.noreply.github.com
|
82fe2175ec04d5bd0ac591bc2dca2851cd2e62d4
|
53fab060fa262e5d5026e0807d93c75fb81e67b9
|
/backup/user_088/ch50_2020_09_30_20_44_23_062380.py
|
2fc8c9a8fab5b14828d3b554d8252e61a4429570
|
[] |
no_license
|
gabriellaec/desoft-analise-exercicios
|
b77c6999424c5ce7e44086a12589a0ad43d6adca
|
01940ab0897aa6005764fc220b900e4d6161d36b
|
refs/heads/main
| 2023-01-31T17:19:42.050628
| 2020-12-16T05:21:31
| 2020-12-16T05:21:31
| 306,735,108
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 222
|
py
|
def junta_nome_sobrenome(nome,sobrenome):
juncao=[]
i=0
j=0
while(i<len(nome) and j<len(sobrenome)):
juncao.append(nome[i],sobrenome[j])
i+=1
j+=1
return juncao
|
[
"you@example.com"
] |
you@example.com
|
94fce4febd1a1ec48c9220916fc4081efd807f64
|
3955c3f367a3a60f8602dcb4609faec9898438bb
|
/test/test_retention_strategy_description.py
|
cac1211511a84328ecf682bb3d91e58adf7feb4c
|
[
"Apache-2.0"
] |
permissive
|
MinhKMA/graylog.py
|
e89c34defa5422d59d0a501355058f5eb2dfe68c
|
3118f4a49c91c2cbbd660523b0ab99e56fbfd861
|
refs/heads/master
| 2021-05-06T21:03:06.946509
| 2016-09-23T04:31:13
| 2016-09-23T04:31:13
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,474
|
py
|
# coding: utf-8
"""
No descripton provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: 2.1.1+01d50e5
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import os
import sys
import unittest
import graylog
from graylog.rest import ApiException
from graylog.models.retention_strategy_description import RetentionStrategyDescription
class TestRetentionStrategyDescription(unittest.TestCase):
""" RetentionStrategyDescription unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testRetentionStrategyDescription(self):
"""
Test RetentionStrategyDescription
"""
model = graylog.models.retention_strategy_description.RetentionStrategyDescription()
if __name__ == '__main__':
unittest.main()
|
[
"on99@users.noreply.github.com"
] |
on99@users.noreply.github.com
|
2c0bf2128c76702e7d8830f8120c27fde6389526
|
2d064dfb4244f5b915c3b02de9594f8e15beb6d0
|
/Other Projects/RamanShiftNiller.py
|
e61d0127378fbfb4a711b9c2b17186a3aaa9a182
|
[] |
no_license
|
ArunShishodia/Smaller-Programs
|
24937ff571a27b5e0c634bf235fb175794986086
|
318ee830188c526921083a5f9aabd41504e055a2
|
refs/heads/master
| 2023-08-24T11:28:54.234711
| 2021-10-22T17:29:03
| 2021-10-22T17:29:03
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 565
|
py
|
def print_to_file():
in1,in2 = 0, 0
while in1 != "stop":
in1 = float(input("Enter the right hand side mode: "))
in2 = float(input("Enter the left hand side mode: "))
print("The result is ", (in1 - in2)/2)
in_str = input("Do you want to print to file? y/n ")
if in_str == "y":
with open("Raman_correction.txt", "w+") as f:
angle = input("Please input the angle: ")
f.write("\n" + angle + ": " + str((in1 - in2)/2) + "\n")
if __name__ == "__main__":
print_to_file()
|
[
"martenscheuck@gmail.com"
] |
martenscheuck@gmail.com
|
5f24281f18439e98042de1cbc4bfec9f0b491c3d
|
d7aea8c5589c4d752e075244aab92d245ad98b4e
|
/densenetocr/core.py
|
78633d70b458b00a6e5517337dfd7df40377835c
|
[] |
no_license
|
fendaq/text-detection-1
|
9da5bc131c89f465f4dc64924bff4765253b6bdd
|
fedbfa1ad02da6b98cb154eeaba32692d40ec672
|
refs/heads/master
| 2020-04-05T17:47:20.221900
| 2018-11-11T04:54:26
| 2018-11-11T04:54:26
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,750
|
py
|
import json
import keras.backend as K
import numpy as np
from PIL import Image
from keras import Input, Model
from keras.layers import Conv2D, BatchNormalization, Activation, Dropout, AveragePooling2D, ZeroPadding2D, Permute, \
TimeDistributed, Flatten, Dense, Lambda
from keras.layers.merge import concatenate
from keras.optimizers import Adam
from keras.regularizers import l2
from keras.utils.multi_gpu_utils import multi_gpu_model
from densenetocr.data_loader import DataLoader
def _dense_block(x, nb_layers, nb_filter, growth_rate, dropout_rate=0.2, weight_decay=1e-4):
for i in range(nb_layers):
cb = _conv_block(x, growth_rate, dropout_rate, weight_decay)
x = concatenate([x, cb])
nb_filter += growth_rate
return x, nb_filter
def _conv_block(input, growth_rate, dropout_rate=None, weight_decay=1e-4):
x = BatchNormalization(epsilon=1.1e-5)(input)
x = Activation('relu')(x)
x = Conv2D(growth_rate, (3, 3), kernel_initializer='he_normal', padding='same')(x)
if dropout_rate:
x = Dropout(dropout_rate)(x)
return x
def _transition_block(input, nb_filter, dropout_rate=None, pooltype=1, weight_decay=1e-4):
x = BatchNormalization(epsilon=1.1e-5)(input)
x = Activation('relu')(x)
x = Conv2D(nb_filter, (1, 1), kernel_initializer='he_normal', padding='same', use_bias=False,
kernel_regularizer=l2(weight_decay))(x)
if dropout_rate:
x = Dropout(dropout_rate)(x)
if pooltype == 2:
x = AveragePooling2D((2, 2), strides=(2, 2))(x)
elif pooltype == 1:
x = ZeroPadding2D(padding=(0, 1))(x)
x = AveragePooling2D((2, 2), strides=(2, 1))(x)
elif pooltype == 3:
x = AveragePooling2D((2, 2), strides=(2, 1))(x)
return x, nb_filter
def _ctc_loss(args):
labels, y_pred, input_length, label_length = args
return K.ctc_batch_cost(labels, y_pred, input_length, label_length)
class DenseNetOCR:
def __init__(self,
num_classes,
lr=0.0005,
image_height=32,
image_channels=1,
maxlen=50,
dropout_rate=0.2,
weight_decay=1e-4,
filters=64,
weight_path=None,
num_gpu=1):
self.image_shape = (image_height, None, image_channels)
self.lr = lr
self.image_height, self.image_channels = image_height, image_channels
self.maxlen = maxlen
self.dropout_rate = dropout_rate
self.weight_decay = weight_decay
self.filters = filters
self.num_classes = num_classes
self.num_gpu = num_gpu
self.base_model, self.model, self.parallel_model = self.__build_model()
if weight_path is not None:
self.base_model.load_weights(weight_path)
def config(self):
return {
"lr": self.lr,
"num_classes": self.num_classes,
"image_height": self.image_height,
"image_channels": self.image_channels,
"maxlen": self.maxlen,
"dropout_rate": self.dropout_rate,
"weight_decay": self.weight_decay,
"filters": self.filters
}
def __build_model(self):
input = Input(shape=self.image_shape, name="the_input")
nb_filter = self.filters
x = Conv2D(nb_filter, (5, 5), strides=(2, 2), kernel_initializer='he_normal', padding='same',
use_bias=False, kernel_regularizer=l2(self.weight_decay))(input)
# 64 + 8 * 8 = 128
x, nb_filter = _dense_block(x, 8, nb_filter, 8, None, self.weight_decay)
# 128
x, nb_filter = _transition_block(x, 128, self.dropout_rate, 2, self.weight_decay)
# 128 + 8 * 8 = 192
x, nb_filter = _dense_block(x, 8, nb_filter, 8, None, self.weight_decay)
# 192->128
x, nb_filter = _transition_block(x, 128, self.dropout_rate, 2, self.weight_decay)
# 128 + 8 * 8 = 192
x, nb_filter = _dense_block(x, 8, nb_filter, 8, None, self.weight_decay)
x = BatchNormalization(axis=-1, epsilon=1.1e-5)(x)
x = Activation('relu')(x)
x = Permute((2, 1, 3), name='permute')(x)
x = TimeDistributed(Flatten(), name='flatten')(x)
y_pred = Dense(self.num_classes, name='out', activation='softmax')(x)
base_model = Model(inputs=input, outputs=y_pred)
labels = Input(shape=(self.maxlen,), dtype='float32', name="the_labels")
input_length = Input(shape=(1,), name="input_length", dtype='int64')
label_length = Input(shape=(1,), name="label_length", dtype='int64')
loss_out = Lambda(_ctc_loss, output_shape=(1,), name='ctc')([labels, y_pred, input_length, label_length])
model = Model(inputs=[input, labels, input_length, label_length], outputs=loss_out)
parallel_model = model
if self.num_gpu > 1:
parallel_model = multi_gpu_model(model, gpus=self.num_gpu)
adam = Adam(self.lr)
parallel_model.compile(loss={'ctc': lambda y_true, y_pred: y_pred}, optimizer=adam, metrics=['accuracy'])
return base_model, model, parallel_model
def train(self, epochs, train_data_loader: DataLoader, valid_data_loader: DataLoader, **kwargs):
self.parallel_model.fit_generator(generator=train_data_loader.load_data(), epochs=epochs,
steps_per_epoch=train_data_loader.steps_per_epoch,
validation_data=valid_data_loader.load_data(),
validation_steps=valid_data_loader.steps_per_epoch,
**kwargs)
def predict(self, image, id_to_char):
if type(image) == str:
img = Image.open(image)
else:
img = image
im = img.convert('L')
scale = im.size[1] * 1.0 / 32
w = im.size[0] / scale
w = int(w)
im = im.resize((w, 32), Image.ANTIALIAS)
img = np.array(im).astype(np.float32) / 255.0 - 0.5
X = img.reshape((32, w, 1))
X = np.array([X])
y_pred = self.base_model.predict(X)
argmax = np.argmax(y_pred, axis=2)[0]
y_pred = y_pred[:, :, :]
out = K.get_value(K.ctc_decode(y_pred, input_length=np.ones(y_pred.shape[0]) * y_pred.shape[1], )[0][0])[:, :]
out = u''.join([id_to_char[x] for x in out[0]])
return out, im
@staticmethod
def save_config(obj, config_path: str):
with open(config_path, 'w+') as outfile:
json.dump(obj.config(), outfile)
@staticmethod
def load_config(config_path: str):
with open(config_path, 'r') as infile:
return dict(json.load(infile))
|
[
"1490215053@qq.com"
] |
1490215053@qq.com
|
1111f324e3844b239c55a24138a23a8c48c92e47
|
564fe9c8409d9ff4ba5f88dd36c0743d417767fa
|
/test/test_alert_team_meta.py
|
3ae2e7a913c19ecd20b06e143be8e283b5169389
|
[
"Apache-2.0"
] |
permissive
|
criteo-forks/opsgenie-python-sdk
|
28cf4b2e5eb5f10df582cfd6393a0e952dee5102
|
2a3924a0bd779eab47937925eb5d42ffbbd751d4
|
refs/heads/master
| 2020-04-05T23:09:41.002143
| 2019-04-12T13:37:22
| 2019-04-12T13:37:22
| 65,009,459
| 0
| 2
| null | 2016-08-05T10:08:55
| 2016-08-05T10:08:55
| null |
UTF-8
|
Python
| false
| false
| 870
|
py
|
# coding: utf-8
"""
OpsGenie REST API
OpsGenie OpenAPI Specification # noqa: E501
OpenAPI spec version: 2.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import opsgenie_swagger
from opsgenie_swagger.models.alert_team_meta import AlertTeamMeta # noqa: E501
from opsgenie_swagger.rest import ApiException
class TestAlertTeamMeta(unittest.TestCase):
"""AlertTeamMeta unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testAlertTeamMeta(self):
"""Test AlertTeamMeta"""
# FIXME: construct object with mandatory attributes with example values
# model = opsgenie_swagger.models.alert_team_meta.AlertTeamMeta() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
|
[
"c.chary@criteo.com"
] |
c.chary@criteo.com
|
2575781d0b1a0577879a12cb6c9d39c26bb61fa6
|
00689951be97b3e9e3a036aca64efaa1ee59134a
|
/aula020 - FUNÇÕES/aula020.py
|
6fb54eff457ee36900903b61a3a97b0b0b5a7171
|
[
"MIT"
] |
permissive
|
miradouro/CursoEmVideo-Python
|
4826cf387cc9424e675f2b115842a643f2d67c8d
|
cc7b05a9a4aad8e6ef3b29453d83370094d75e41
|
refs/heads/main
| 2023-03-24T08:51:34.183169
| 2021-03-20T22:15:02
| 2021-03-20T22:15:02
| 349,843,991
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 286
|
py
|
def mensagens(msg):
print('-'*30)
print(f'{msg:^30}')
print('-'*30)
def soma(a, b):
print(a + b)
def contador(*num):
somatoria = 0
for valor in num:
somatoria += valor
print(somatoria)
mensagens('Olá, Mundo!')
soma(4, 5)
contador(2, 3, 4, 1)
|
[
"rafaelmiradouro@gmail.com"
] |
rafaelmiradouro@gmail.com
|
7239fe20e9a84b702b3c129e529ae7d77af73d46
|
0cab1425f18ea11e498278815ed2d95e26d15ace
|
/helper/helper_file.py
|
9ac77270fb042a05dcf7cf00ff88842a2d62d815
|
[] |
no_license
|
philgookang/pcr
|
1574a26160b83987234f3103e5b82eaf1bc5ac72
|
4fa28fecc3ca22b8b3315991e6505c7519de4fde
|
refs/heads/master
| 2023-08-08T00:47:03.508899
| 2019-12-19T05:11:02
| 2019-12-19T05:11:02
| 198,176,017
| 4
| 0
| null | 2023-07-22T11:30:00
| 2019-07-22T08:02:05
|
Python
|
UTF-8
|
Python
| false
| false
| 921
|
py
|
import torch
import pickle
import os
from PIL import Image, PSDraw, ImageDraw, ImageFont
from config import *
def save_dataset(filename, target):
with open(os.path.join(RESULT_DATASET_PATH, filename), "wb") as f:
pickle.dump(target, f)
def load_dataset(filename):
with open(os.path.join(RESULT_DATASET_PATH, filename), "rb") as f:
return pickle.load(f)
def save_model(model, filename):
torch.save(model.state_dict(), os.path.join(RESULT_MODEL_PATH, filename))
def create_image_caption(original, target, lst):
font = os.path.join(BASE_PATH, "rss", "RobotoRegular.ttf")
img = Image.open(original, 'r')
w, h = img.size
img = img.crop((0,0,w + 900,h))
draw = ImageDraw.Draw(img)
font = ImageFont.truetype(font, 20)
for no,txt in enumerate(lst):
draw.text((w + 10, 2 + (37*no)), txt, (255,255,255), font=font)
img.save(target)
|
[
"philgookang@gmail.com"
] |
philgookang@gmail.com
|
ac1f040cb3738fe1771300341001c1a391bb690d
|
3ba8bca6c557936450a4fa08de9dea68f97681ec
|
/tests/test_trade.py
|
1f874ffc64c9718937d976db1a709d4b88db8198
|
[
"MIT"
] |
permissive
|
liuyang1/xalpha
|
08a801d360cf77a0057414e84f846fdf68388a2d
|
8672a7a11f2b2ded652cd37df0fc0db6dd6aac8a
|
refs/heads/master
| 2020-05-06T13:36:28.806387
| 2020-01-09T04:57:00
| 2020-01-09T04:58:50
| 180,146,115
| 0
| 1
| null | 2019-04-08T12:39:21
| 2019-04-08T12:39:21
| null |
UTF-8
|
Python
| false
| false
| 3,594
|
py
|
import sys
sys.path.insert(0, "../")
import xalpha as xa
import pytest
import pandas as pd
path = 'demo.csv'
cm = xa.fundinfo('164818')
statb = xa.record(path).status
cm_t = xa.trade(cm, statb)
def test_trade():
assert cm_t.cftable.loc[2, 'share'] == -129.14
assert round(cm_t.xirrrate('2018-03-03'), 3) == -0.24
assert cm_t.dailyreport('2018-07-29').iloc[0]['单位成本'] == 1.346
cm_t.v_tradecost('2018-08-01')
cm_t.v_totvalue('2018-07-31')
cm_t.v_tradevolume(freq='M')
def test_mul():
with pytest.raises(Exception) as excinfo:
cm_m = xa.mulfix(cm_t, totmoney=200)
assert str(excinfo.value) == 'You cannot sell first when you never buy'
with pytest.raises(Exception) as excinfo:
cm_m = xa.mulfix(cm_t, totmoney=300)
assert str(excinfo.value) == 'the initial total cash is too low'
cm_m = xa.mulfix(cm_t, totmoney=500)
cm_m.bcmkset(xa.indexinfo('1399971'), start='2016-09-28')
assert round(cm_m.xirrrate('2018-07-29'), 3) == -0.129
assert round(cm_m.sharpe('2018-07-30'), 3) == -1.734
cm_m.v_netvalue(benchmark=False)
assert round(cm_m.total_return('2018-07-01'), 3) == -0.209
assert round(cm_m.benchmark_volatility('2018-07-22'), 3) == 0.192
assert round(cm_m.max_drawdown('2018-08-01')[2], 2) == -0.24
cm_m.v_tradevolume()
def test_mulfix():
tot = xa.mulfix(status=statb, totmoney=5000)
assert tot.v_positions().options['legend'][0]['data'][1] == '富国中证红利指数增强'
assert tot.v_positions_history('2017-01-01').options['legend'][0]['data'][-1] == '货币基金'
assert round(tot.combsummary('2018-08-04').iloc[0]['投资收益率'], 1) == 1.0
eva = tot.evaluation()
assert round(eva.correlation_table(end='2018-07-30').iloc[2, 4], 3) == 0.095
def test_policy_buyandhold():
allin = xa.policy.buyandhold(cm, '2015-06-01')
cm_t2 = xa.trade(cm, allin.status)
cm_m2 = xa.mulfix(cm_t2)
cm_m2.bcmkset(xa.indexinfo('1399971'))
assert round(cm_m2.correlation_coefficient('2018-07-01'), 3) == 0.980
assert round(cm_m2.information_ratio('2016-07-01'), 3) == -0.385
allin.sellout('2018-06-01')
cm_t2 = xa.trade(cm, allin.status)
assert round(cm_t2.xirrrate('2019-08-12', guess=-0.9), 2) == -0.33
def test_policy_scheduled():
auto = xa.policy.scheduled(cm, 1000, pd.date_range('2015-07-01', '2018-07-01', freq='W-THU'))
cm_t3 = xa.trade(cm, auto.status)
cm_t3.v_tradevolume(freq='W')
assert round(cm_t3.dailyreport('2018-08-03').iloc[0]['投资收益率'], 2) == -42.07
auto2 = xa.policy.scheduled_tune(cm, 1000, pd.date_range('2015-07-01', '2018-07-01', freq='M'),
[(0.9, 2), (1.2, 1)])
def test_policy_grid():
gr = xa.policy.grid(cm, [0, 2, 2], [3, 3, 3], '2018-06-23', '2018-08-03')
tr = xa.trade(cm, gr.status)
assert round(tr.xirrrate('2018-07-13'), 2) == 11.78
def test_policy_indicator_cross():
cm.bbi()
techst = xa.policy.indicator_cross(cm, col=['netvalue', 'BBI'], start='2018-01-01', end='2018-07-07')
cm_tt = xa.trade(cm, techst.status)
assert round(cm_tt.dailyreport('2018-07-09').iloc[0].loc['换手率'], 1) == 14.1
def test_policy_indicator_points():
zz500 = xa.indexinfo('0000905')
zz500.psy()
st = xa.policy.indicator_points(zz500, col='PSYMA12', start='2017-01-01', buy=[(0.6, 1), (0.7, 1)],
sell=[(0.4, 1), (0.3, 1)], buylow=False)
zz500_t = xa.trade(zz500, st.status)
assert zz500_t.dailyreport('2018-05-01').iloc[0].loc['基金收益总额'] == -6302.26
|
[
"kcanamgal@foxmail.com"
] |
kcanamgal@foxmail.com
|
0e1c34ce545f90c85d2b94ad44a3e08011787ce8
|
b144c5142226de4e6254e0044a1ca0fcd4c8bbc6
|
/ixnetwork_restpy/testplatform/sessions/ixnetwork/vport/protocolstack/lldptlvsd_54480213540819ecec7096f8f29c5c2f.py
|
88f8bc13bf5f03db36a81cf2871e05693a7dd3e2
|
[
"MIT"
] |
permissive
|
iwanb/ixnetwork_restpy
|
fa8b885ea7a4179048ef2636c37ef7d3f6692e31
|
c2cb68fee9f2cc2f86660760e9e07bd06c0013c2
|
refs/heads/master
| 2021-01-02T17:27:37.096268
| 2020-02-11T09:28:15
| 2020-02-11T09:28:15
| 239,721,780
| 0
| 0
|
NOASSERTION
| 2020-02-11T09:20:22
| 2020-02-11T09:20:21
| null |
UTF-8
|
Python
| false
| false
| 4,952
|
py
|
# MIT LICENSE
#
# Copyright 1997 - 2019 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
class LldpTlvSd(Base):
"""LLDP System Description TLV.
The LldpTlvSd class encapsulates a required lldpTlvSd resource which will be retrieved from the server every time the property is accessed.
"""
__slots__ = ()
_SDM_NAME = 'lldpTlvSd'
def __init__(self, parent):
super(LldpTlvSd, self).__init__(parent)
@property
def Description(self):
"""Advertised Name/Description.
Returns:
str
"""
return self._get_attribute('description')
@Description.setter
def Description(self, value):
self._set_attribute('description', value)
@property
def ObjectId(self):
"""Unique identifier for this object
Returns:
str
"""
return self._get_attribute('objectId')
def update(self, Description=None):
"""Updates a child instance of lldpTlvSd on the server.
Args:
Description (str): Advertised Name/Description.
Raises:
ServerError: The server has encountered an uncategorized error condition
"""
self._update(locals())
def CustomProtocolStack(self, *args, **kwargs):
"""Executes the customProtocolStack operation on the server.
Create custom protocol stack under /vport/protocolStack
customProtocolStack(Arg2:list, Arg3:enum)
Args:
args[0] is Arg2 (list(str)): List of plugin types to be added in the new custom stack
args[1] is Arg3 (str(kAppend|kMerge|kOverwrite)): Append, merge or overwrite existing protocol stack
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('customProtocolStack', payload=payload, response_object=None)
def DisableProtocolStack(self, *args, **kwargs):
"""Executes the disableProtocolStack operation on the server.
Disable a protocol under protocolStack using the class name
disableProtocolStack(Arg2:string)string
Args:
args[0] is Arg2 (str): Protocol class name to disable
Returns:
str: Status of the exec
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('disableProtocolStack', payload=payload, response_object=None)
def EnableProtocolStack(self, *args, **kwargs):
"""Executes the enableProtocolStack operation on the server.
Enable a protocol under protocolStack using the class name
enableProtocolStack(Arg2:string)string
Args:
args[0] is Arg2 (str): Protocol class name to enable
Returns:
str: Status of the exec
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('enableProtocolStack', payload=payload, response_object=None)
|
[
"srvc_cm_packages@keysight.com"
] |
srvc_cm_packages@keysight.com
|
bd39d18338fdb9e9edfef7a78bef7d4241a5f37c
|
6a9f6e6527afb38611a5c695e5845e492d7676ff
|
/70.climbing-stairs.py
|
e67b2fdcd439792b906fbdf509241afcf5d78dae
|
[] |
no_license
|
Junyangz/leetcode
|
39913d48778d369f9f0a96070352d63b207e7df6
|
5de48fbdcb61ce8a437255a119a4a3ae242ede52
|
refs/heads/master
| 2020-06-09T08:57:26.361138
| 2019-11-14T08:38:07
| 2019-11-14T08:38:07
| 193,413,271
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 486
|
py
|
#
# @lc app=leetcode id=70 lang=python3
#
# [70] Climbing Stairs
#
from functools import lru_cache
class Solution:
@lru_cache(None)
def climbStairs(self, n: int) -> int:
if n <= 3: return n
return self.climbStairs(n-1) + self.climbStairs(n-2)
#dp
# if n <= 3: return n
# dp = [0] * (n + 2)
# dp[1] = 1
# dp[2] = 2
# for i in range(3, n+1):
# dp[i] = dp[i-1] + dp[i-2]
# return dp[n]
|
[
"junyangz.iie@gmail.com"
] |
junyangz.iie@gmail.com
|
1e226fa654fee1fa9f4158ccb50e5d5da67a9749
|
92a506dbb59475e4378c0ed9685d52e67dd9dacd
|
/test/project_tests/test_poisson1d.py
|
f1a3d689740bdacaaf9a4d85e84964ab67f6d17d
|
[
"LicenseRef-scancode-unknown-license-reference",
"BSD-2-Clause"
] |
permissive
|
Parallel-in-Time/pyMG-2016
|
c2aca9e5778f40c2e89ee492d01277f7e021825e
|
abf3e47ba374c39fddd576fdaccf5187c1632f8a
|
refs/heads/master
| 2021-01-21T04:31:05.543828
| 2016-06-23T09:18:29
| 2016-06-23T09:18:29
| 53,484,378
| 2
| 11
|
BSD-2-Clause
| 2020-10-02T05:44:46
| 2016-03-09T09:16:18
|
HTML
|
UTF-8
|
Python
| false
| false
| 943
|
py
|
import numpy as np
from project.poisson1d import Poisson1D
def test_has_spatial_order_of_accuracy():
expected_order = 2
k = 4
ntests = 6
ndofs = []
err_list = []
for i in range(ntests):
ndofs.append(2 ** (i + 4) - 1)
prob = Poisson1D(ndofs[-1])
xvalues = np.array([(i + 1) * prob.dx for i in range(prob.ndofs)])
uinit = np.sin(np.pi * k * xvalues)
uexact = (np.pi * k) ** 2 * uinit
ucomp = prob.A.dot(uinit)
err_list.append(np.linalg.norm(uexact - ucomp, np.inf) / np.linalg.norm(uexact, np.inf))
order = []
for i in range(1, len(err_list)):
order.append(np.log(err_list[i - 1] / err_list[i]) / np.log(ndofs[i] / ndofs[i - 1]))
order = np.array(order)
assert (order > expected_order * 0.9).all() and (order < expected_order * 1.1).all(), \
'Order of accuracy of the spatial discretization is not ' + str(expected_order)
|
[
"r.speck@fz-juelich.de"
] |
r.speck@fz-juelich.de
|
e2b9ab8b70a89a2eedddc4ecbf3c75754d27cbef
|
05afca01eccc6d35d20fe4925c5d4bd2c8161379
|
/chat_28374/wsgi.py
|
83f029c803529a9171dffe807aed0c501f86a27c
|
[] |
no_license
|
crowdbotics-apps/chat-28374
|
45261db0fc1d7da5da69536fd3ac776f7b3a0c12
|
74a2159530882d79c8937cb37d0ba285be0a5e7e
|
refs/heads/master
| 2023-06-05T13:41:15.892156
| 2021-07-02T01:01:10
| 2021-07-02T01:01:10
| 382,193,743
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 397
|
py
|
"""
WSGI config for chat_28374 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'chat_28374.settings')
application = get_wsgi_application()
|
[
"team@crowdbotics.com"
] |
team@crowdbotics.com
|
06bbb14932b3dc339d410086b48bde70742732a1
|
9a081f87749245666f40402b389167f03375aa27
|
/CNN.py
|
2375fbef13b31125db4dfb4aee2799b62c2f2d95
|
[] |
no_license
|
reddytocode/tensorFlw
|
704e981dc39dfde0e0e9ddb892b484f5d95a111d
|
4c20d23cea87ab7fe086a3d619ea3ff779f478f7
|
refs/heads/master
| 2021-10-23T08:41:49.688089
| 2019-03-15T23:35:41
| 2019-03-15T23:35:41
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 962
|
py
|
import tensorflow as tf
import keras
mnist = tf.keras.datasets.fashion_mnist
(training_images, training_labels), (test_images, test_labels) = mnist.load_data()
training_images = training_images.reshape(60000, 28, 28, 1)
training_images = training_images/255.0
test_images = test_images.reshape(10000, 28, 28, 1)
test_images = test_images/255.0
model = tf.keras.models.Sequential([
tf.keras.layers.Conv2D(64, (3,3), activation='relu', input_shape=(28, 28, 1)),
tf.keras.layers.MaxPooling2D(2, 2),
tf.keras.layers.Conv2D(64, (3,3), activation='relu'),
tf.keras.layers.MaxPooling2D(2,2),
tf.keras.layers.Flatten(),
tf.keras.layers.Dense(128, activation='relu'),
tf.keras.layers.Dense(10, activation='softmax')
])
model.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy'])
model.summary()
model.fit(training_images, training_labels, epochs = 5)
test_loss = model.evaluate(test_images, test_labels)
|
[
"aaabeeelooon@gmail.com"
] |
aaabeeelooon@gmail.com
|
0930f0b6a519782da3bf9118c41a62cf0286bc68
|
7e8668e54d0ea55dc1e0185eff920a85af974fa6
|
/vendor-local/lib/python/celery/tests/tasks/test_states.py
|
4acf8aafeee7d8e35b1d92deeec6dab9aa94cdfa
|
[
"BSD-3-Clause"
] |
permissive
|
mozilla/firefox-flicks
|
3003cee1af0941976ef960a97a0806e19079cd79
|
ad19ed59aac682744badae6d19a149327037f293
|
refs/heads/master
| 2023-07-03T17:33:13.589884
| 2019-03-30T04:45:50
| 2019-03-30T04:45:50
| 3,148,994
| 3
| 6
|
BSD-3-Clause
| 2019-03-30T04:45:52
| 2012-01-10T21:52:31
|
Python
|
UTF-8
|
Python
| false
| false
| 1,318
|
py
|
from __future__ import absolute_import
from celery.states import state
from celery import states
from celery.tests.utils import Case
class test_state_precedence(Case):
def test_gt(self):
self.assertGreater(state(states.SUCCESS),
state(states.PENDING))
self.assertGreater(state(states.FAILURE),
state(states.RECEIVED))
self.assertGreater(state(states.REVOKED),
state(states.STARTED))
self.assertGreater(state(states.SUCCESS),
state('CRASHED'))
self.assertGreater(state(states.FAILURE),
state('CRASHED'))
self.assertFalse(state(states.REVOKED) > state('CRASHED'))
def test_lt(self):
self.assertLess(state(states.PENDING), state(states.SUCCESS))
self.assertLess(state(states.RECEIVED), state(states.FAILURE))
self.assertLess(state(states.STARTED), state(states.REVOKED))
self.assertLess(state('CRASHED'), state(states.SUCCESS))
self.assertLess(state('CRASHED'), state(states.FAILURE))
self.assertTrue(state(states.REVOKED) < state('CRASHED'))
self.assertTrue(state(states.REVOKED) <= state('CRASHED'))
self.assertTrue(state('CRASHED') >= state(states.REVOKED))
|
[
"mkelly@mozilla.com"
] |
mkelly@mozilla.com
|
6cb5ca73a3c4b544f93c530b04ebbfe82214ff41
|
de24f83a5e3768a2638ebcf13cbe717e75740168
|
/moodledata/vpl_data/46/usersdata/61/18544/submittedfiles/funcoes1.py
|
a3ae4f4e9c4225d28057fe39cceda6c1ea832ff0
|
[] |
no_license
|
rafaelperazzo/programacao-web
|
95643423a35c44613b0f64bed05bd34780fe2436
|
170dd5440afb9ee68a973f3de13a99aa4c735d79
|
refs/heads/master
| 2021-01-12T14:06:25.773146
| 2017-12-22T16:05:45
| 2017-12-22T16:05:45
| 69,566,344
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 799
|
py
|
# -*- coding: utf-8 -*-
from __future__ import division
def crescente(a):
cont=0
for i in range (0,len(a)-1,1):
if a[i]>a[i+1]:
cont=cont+1
if cont==0:
return True
else:
return False
def decresc(a):
cont=0
for i in range (0,len(a)-1,1):
if a[i]>a[i-1]:
cont=cont+1
if cont==0:
return True
else:
return False
a=[]
b=[]
c=[]
n=int(input("Digite um valor: "))
for i in range(0,n,1):
a.append(input("Digite um número: "))
if crescente(a):
print ("S")
else:
print ("N")
for i in range(0,n,1):
a.append(input("Digite um número: "))
if decresc(a):
print ("S")
else:
print ("N")
#escreva as demais funções
#escreva o programa principal
|
[
"rafael.mota@ufca.edu.br"
] |
rafael.mota@ufca.edu.br
|
6bb5e25a7b62fe82c79b3eb82972f7a6e16d8953
|
6ae42d809532e463b2cef2a9648ececde501faa1
|
/MobileRevelator/python/android_facebook_batch.py
|
bb122e07248eac572f9dddd399ebe51edd8e1a60
|
[
"MIT"
] |
permissive
|
bkerler/MR
|
c52fa76a1ae46ebb637cac20925f8543235d85f3
|
5ba553fd0eb4c1d80842074a553119486f005822
|
refs/heads/master
| 2022-04-28T23:48:14.788560
| 2020-06-01T08:25:17
| 2020-06-01T08:25:17
| 168,991,623
| 140
| 35
|
MIT
| 2020-02-10T10:45:50
| 2019-02-03T20:46:14
|
Python
|
UTF-8
|
Python
| false
| false
| 6,399
|
py
|
#Pluginname="Facebook app_analytics (Android)"
#Type=App
import os
import struct
import json
import tempfile
def convertdata(filenames):
zfields=[]
for fsname in filenames:
print("Running Facebook conversion: " + fsname[fsname.rfind("/") + 1:])
filename=tempfile.gettempdir()+"/"+fsname[fsname.rfind("/")+1:]
if ctx.fs_file_extract(fsname,filename):
with open(filename, "rb") as ff:
try:
data = str(ff.read().decode("utf-8"))+str("]}")
jdata = json.loads(data)
except:
continue
row = 0
timestamp=""
uid=""
desc=""
if "time" in jdata:
timestamp = str(jdata["time"])
if "uid" in jdata:
uid=str(jdata["uid"])
if "data" in jdata:
fbdata=jdata["data"]
for subdata in fbdata:
if "extra" in subdata:
extra=subdata["extra"]
#if ("network_type" in extra) or ("battery" in extra) or ("connection" in extra) or ("text" in extra):
zfield = {}
zfield["ID"] = row
zfield["Filename"]=fsname
zfield["Type"] = "Generic"
if uid!="":
zfield["Contact"] = uid
else:
zfield["Contact"] = ""
zfield["Timestamp"] = timestamp
description = ""
if "suggestions_at_end_of_session" in extra:
zfield["Type"] = "Suggestions"
dt=extra["suggestions_at_end_of_session"]
for d in dt:
if "text" in d:
description += "suggestion: \"" + d["text"] + "\";"
if "dest_module_uri" in extra:
zfield["Type"] = "Uri"
if "dest_module_uri" in extra:
description+="dest_module_uri: "+extra["dest_module_uri"]+";"
if "click_point" in extra:
description+="click_point: "+extra["click_point"]+";"
if "source_module" in extra:
description+="source_module: "+extra["source_module"]+";"
if "video_id" in extra:
zfield["Type"] = "Video"
if "video_id" in extra:
description+="video_id: "+extra["video_id"]+";"
if "video_last_start_time_position" in extra:
description+="video_last_start_time_position: "+str(extra["video_last_start_time_position"])+";"
if "video_play_reason" in extra:
description+="video_play_reason: "+extra["video_play_reason"]+";"
if "video_time_position" in extra:
description+="video_time_position: "+str(extra["video_time_position"])+";"
if "network_type" in extra:
description+="network_type: "+extra["network_type"]+";"
if "network_subtype" in extra:
description+="network_subtype: "+extra["network_subtype"]+";"
if "connqual" in extra:
description+="connqual: "+extra["connqual"]+";"
if "was_backgrounded" in extra:
description+="was_backgrounded: "+str(extra["was_backgrounded"])+";"
if "airplane_mode_on" in extra:
description+="airplane_mode_on: "+str(extra["airplane_mode_on"])+";"
if "battery" in extra:
zfield["Type"] = "Battery"
if "battery" in extra:
description+="battery: "+str(extra["battery"])+";"
if "charge_state" in extra:
description+="charge_state: "+extra["charge_state"]+";"
if "battery_health" in extra:
description+="battery_health: "+extra["battery_health"]+";"
#description = json.dumps(extra, separators=(',',':'))
if (len(description)>1):
zfield["Other content"] = description
zfields.append(zfield)
row += 1
os.remove(filename)
rows = len(zfields)
# print(zfields)
for i in range(0, rows):
zfield = zfields[i]
oldpos = 0
newpos = int(i / rows * 100)
if (oldpos < newpos):
oldpos = newpos
ctx.gui_setMainProgressBar(oldpos)
ctx.gui_set_data(i, 0, zfield["ID"])
ctx.gui_set_data(i, 1, zfield["Type"])
ctx.gui_set_data(i, 2, zfield["Contact"])
ctx.gui_set_data(i, 3, zfield["Timestamp"])
ctx.gui_set_data(i, 4, zfield["Other content"])
ctx.gui_set_data(i, 5, zfield["Filename"])
def main():
ctx.gui_setMainLabel("Facebook App Analytics: Parsing ...");
ctx.gui_setMainProgressBar(0)
headers = ["rowid (int)", "Type (QString)", "Contact (QString)", "Timestamp (int)", "Other_Content (QString)","Filename (QString)"]
ctx.gui_set_headers(headers)
filenames=ctx.pluginfilenames()
convertdata(filenames)
ctx.gui_update()
ctx.gui_setMainLabel("Status: Idle.")
ctx.gui_setMainProgressBar(0)
return "Finished running plugin."
|
[
"info@revskills.de"
] |
info@revskills.de
|
d83cd414c71770b0af62ab4617bc4f101a35b432
|
6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4
|
/iMRN9YGK4mcYja9rY_23.py
|
75d7bb82d79759426baf19e08f08c025e9422e86
|
[] |
no_license
|
daniel-reich/ubiquitous-fiesta
|
26e80f0082f8589e51d359ce7953117a3da7d38c
|
9af2700dbe59284f5697e612491499841a6c126f
|
refs/heads/master
| 2023-04-05T06:40:37.328213
| 2021-04-06T20:17:44
| 2021-04-06T20:17:44
| 355,318,759
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 154
|
py
|
def accumulating_product(lst):
if lst:
r=[lst[0]]
for i in range(1,len(lst)):
r.append(lst[i]*r[-1])
return r
else:
return []
|
[
"daniel.reich@danielreichs-MacBook-Pro.local"
] |
daniel.reich@danielreichs-MacBook-Pro.local
|
2272e4858674584c2763baa58a85ba9c90c95123
|
795fcb8fca8d4d0b940e5b82e33803bb5f4cfe36
|
/soapbox/templatetags/soapbox.py
|
9431e47bd2d12c81c72efeb846ddb974b8c2a941
|
[
"BSD-3-Clause"
] |
permissive
|
callahad/django-soapbox
|
ef4fc36a4e539cb3008918e8adb08a67ef1d1b1d
|
f9189e1ddf47175f2392b92c7a0a902817ee1e93
|
refs/heads/master
| 2021-01-04T13:21:22.022457
| 2017-06-05T09:26:33
| 2017-06-05T09:26:33
| 240,570,323
| 0
| 0
|
BSD-3-Clause
| 2020-02-14T18:08:01
| 2020-02-14T18:08:00
| null |
UTF-8
|
Python
| false
| false
| 291
|
py
|
from django import template
from ..models import Message
register = template.Library()
@register.assignment_tag(takes_context=True)
def get_messages_for_page(context, url):
if url == context.template.engine.string_if_invalid:
return []
return Message.objects.match(url)
|
[
"james@b-list.org"
] |
james@b-list.org
|
34bb52a1936b775ded476143836f87df0c213fde
|
ac4b9385b7ad2063ea51237fbd8d1b74baffd016
|
/.history/s1_3_getHtml_20210209165302.py
|
06251e4a9d3dadbfde199980f5e23866fdb8bc24
|
[] |
no_license
|
preethanpa/ssoemprep
|
76297ef21b1d4893f1ac2f307f60ec72fc3e7c6f
|
ce37127845253c768d01aeae85e5d0d1ade64516
|
refs/heads/main
| 2023-03-09T00:15:55.130818
| 2021-02-20T06:54:58
| 2021-02-20T06:54:58
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,481
|
py
|
# This module is called from 3R Automation Component.
import os
import sys
# pdftotree is available as part of the virtual environment for 3R Python processing
import pdftotree
import json
from pprint import pprint
import pdfminer
import matplotlib.pyplot as plt
import ocr_extract as imgpdf
from utils.ocr.handle_image import *
# pdf_doc = json.loads(sys.argv[1])['doc_name']
pdf_doc = '/home/dsie/Developer/sandbox/3ray/3rml/kbc_process/documents/images/PAN_Card_Scan_AKC.png'
# html_path = json.loads(sys.argv[1])['html_path']
html_path = '/home/dsie/Developer/sandbox/3ray/3rml/kbc_process/documents/html/'+os.path.basename(pdf_doc).split('.')[0] + '.html'
print(f'HTML Path is set to {html_path}')
path_if_not_pdf_doc = ''
pdf_doc_path = '/home/dsie/Developer/sandbox/3ray/3rml/kbc_process/documents/pdf'
# Use the following for testing
# pdf_doc = '/home/dsie/Developer/sandbox/3ray/3rml/kbc_process/documents/pdf/Sri_khyati_CV.pdf'
# html_path = '/home/dsie/Developer/sandbox/3ray/3rml/kbc_process/documents/html/Sri_khyati_CV.html'
def create_hocr(pdf_doc='', html_path='', model_path='./model/model.pkl'):
return pdftotree.parse(pdf_doc, html_path=html_path, model_type=None, model_path=model_path, visualize=False)
create_hocr_output = None
try:
create_hocr_output = create_hocr(pdf_doc=pdf_doc, html_path=html_path)
except pdfminer.pdfparser.PDFSyntaxError as pdfException:
create_hocr_output = pdfException
path_if_not_pdf_doc = pdf_doc
try:
# pdf_doc = extract_pdf_from_image(pdf_doc, pdf_path=pdf_doc_path, action=1, psm=11)
image, line_items_coordinates = mark_region(path_if_not_pdf_doc)
# load the original image
image = cv2.imread(path_if_not_pdf_doc)
# get co-ordinates to crop the image
c = line_items_coordinates[1]
# cropping image img = image[y0:y1, x0:x1]
img = image[c[0][1]:c[1][1], c[0][0]:c[1][0]]
plt.figure(figsize=(10,10))
plt.imshow(img)
# convert the image to black and white for better OCR
ret,thresh1 = cv2.threshold(img,120,255,cv2.THRESH_BINARY)
# pytesseract image to string to get results
text = str(pytesseract.image_to_string(thresh1, config='--psm 6'))
print(text)
convert_text_to_pdf(text, pdf_doc_path, os.path.basename(pdf_doc).split('.')[0])
create_hocr_output = create_hocr(pdf_doc=pdf_doc, html_path=html_path)
except Exception:
create_hocr_output = Exception
print(Exception)
# extract_pdf_from_image(pdf_doc, pdf_path=pdf_doc_path, action=2, psm=6)
# Use the following for testing non PDF files
# print(f'{os.path.basename(pdf_doc).split(".")[0]+".pdf"}')
# print(f'{os.path.abspath(pdf_doc).split(".")[0]+".pdf"}')
# try:
# # imgpdf.convert_image_to_pdf(pdf_doc, os.path(pdf_doc)+os.path.basename(pdf_doc).split('.')[0]+'.pdf')
# imgpdf.convert_image_to_pdf(pdf_doc, os.path.dirname(pdf_doc), os.path.abspath(pdf_doc).split(".")[0]+".pdf")
# except Exception as exc:
# print(exc)
# Output of "print" statement is passed to the calling program
proc_status = "OK" if create_hocr_output == None else "Not a PDF document or unable to process image at path "+path_if_not_pdf_doc
json_out = {"pdf_doc": pdf_doc, "process_status": proc_status}
json_out = {"message": "We are testing/making some changes to this API, please try after in about 30 mins. Sorry for the inconvenience."}
print(json_out)
|
[
"{abhi@third-ray.com}"
] |
{abhi@third-ray.com}
|
e72c98ab5cea32846ce3c45803b0b82ff6c328ab
|
1c69aaf4ff5c9bbabd4e4e3486e3f442808f96ea
|
/models/r4/meta.py
|
ebcab05264342b9983abf18e75edef597276aa0e
|
[] |
no_license
|
glow-mdsol/devday-boston-clinical-research
|
72565289b27e9d6105640ec14749e07d7bc14014
|
560a8141bc3bd1ae5a31b110e82863e25b4ce9f8
|
refs/heads/master
| 2020-03-20T00:48:32.862642
| 2018-06-20T15:33:29
| 2018-06-20T15:33:29
| 137,056,522
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,344
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated from FHIR 3.3.0 (http://hl7.org/fhir/StructureDefinition/Meta) on 2018-05-12.
# 2018, SMART Health IT.
from . import element
class Meta(element.Element):
""" Metadata about a resource.
The metadata about a resource. This is content in the resource that is
maintained by the infrastructure. Changes to the content might not always
be associated with version changes to the resource.
"""
resource_type = "Meta"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.lastUpdated = None
""" When the resource version last changed.
Type `FHIRDate` (represented as `str` in JSON). """
self.profile = None
""" Profiles this resource claims to conform to.
List of `str` items. """
self.security = None
""" Security Labels applied to this resource.
List of `Coding` items (represented as `dict` in JSON). """
self.source = None
""" Identifies where the resource comes from.
Type `str`. """
self.tag = None
""" Tags applied to this resource.
List of `Coding` items (represented as `dict` in JSON). """
self.versionId = None
""" Version specific identifier.
Type `str`. """
super(Meta, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(Meta, self).elementProperties()
js.extend([
("lastUpdated", "lastUpdated", fhirdate.FHIRDate, False, None, False),
("profile", "profile", str, True, None, False),
("security", "security", coding.Coding, True, None, False),
("source", "source", str, False, None, False),
("tag", "tag", coding.Coding, True, None, False),
("versionId", "versionId", str, False, None, False),
])
return js
from . import coding
from . import fhirdate
|
[
"glow@mdsol.com"
] |
glow@mdsol.com
|
3e6e0e70c38de7ddea9dc2f9b668c0b8f3e0bbdd
|
c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c
|
/cases/pa3/sample/object_attr_get_none-110.py
|
2998259b825a549bf3b348da7f8b538e5dcc83a0
|
[] |
no_license
|
Virtlink/ccbench-chocopy
|
c3f7f6af6349aff6503196f727ef89f210a1eac8
|
c7efae43bf32696ee2b2ee781bdfe4f7730dec3f
|
refs/heads/main
| 2023-04-07T15:07:12.464038
| 2022-02-03T15:42:39
| 2022-02-03T15:42:39
| 451,969,776
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 184
|
py
|
class A(object):
a:int = 42
class B(A):
b:bool = True
def __init__(self:"B"):
print("B")
a:A = None
b:B = None
a = B()
print(a.a)
print(b.a)
print($Parameters)
|
[
"647530+Virtlink@users.noreply.github.com"
] |
647530+Virtlink@users.noreply.github.com
|
4de951b94deba051a37c1db4f7ece94e983a7c83
|
2b25aae9266437b657e748f3d6fea4db9e9d7f15
|
/graphics/3d/4/lawrence_lim/matrix.py
|
a041e2c30a8361efb4f61e085905e8ef8449ba7d
|
[] |
no_license
|
Zilby/Stuy-Stuff
|
b1c3bc23abf40092a8a7a80e406e7c412bd22ae0
|
5c5e375304952f62667d3b34b36f0056c1a8e753
|
refs/heads/master
| 2020-05-18T03:03:48.210196
| 2018-11-15T04:50:03
| 2018-11-15T04:50:03
| 24,191,397
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,628
|
py
|
import math
def make_bezier():
return [ [-1, 3,-3, 1],
[ 3,-6, 3, 0],
[-3, 3, 0, 0],
[ 1, 0, 0, 0] ]
def make_hermite():
return [ [ 2,-3, 0, 1],
[-2, 3, 0, 0],
[ 1,-2, 1, 0],
[ 1,-1, 0, 0] ]
def generate_curve_coefs( p1, p2, p3, p4, t ):
pmat = [ [p1,p2,p3,p4] ]
return matrix_mult(t,pmat)
def make_translate( x, y, z ):
rmat = new_matrix(4,4)
rmat = ident(rmat)
rmat[3][0] = x
rmat[3][1] = y
rmat[3][2] = z
return rmat
def make_scale( x, y, z ):
rmat = new_matrix(4,4)
rmat[0][0] = x
rmat[1][1] = y
rmat[2][2] = z
rmat[3][3] = 1
return rmat
def make_rotX( theta ):
rmat = new_matrix(4,4)
rad = math.radians(theta)
rmat[1][1] = math.cos(rad)
rmat[2][2] = math.cos(rad)
rmat[1][2] = math.sin(rad)
rmat[2][1] = -math.sin(rad)
rmat[0][0] = 1
rmat[3][3] = 1
return rmat
def make_rotY( theta ):
rmat = new_matrix(4,4)
rad = math.radians(theta)
rmat[0][0] = math.cos(rad)
rmat[2][2] = math.cos(rad)
rmat[0][2] = math.sin(rad)
rmat[2][0] = -math.sin(rad)
rmat[1][1] = 1
rmat[3][3] = 1
return rmat
def make_rotZ( theta ):
rmat = new_matrix(4,4)
rad = math.radians(theta)
rmat[0][0] = math.cos(rad)
rmat[1][1] = math.cos(rad)
rmat[0][1] = math.sin(rad)
rmat[1][0] = -math.sin(rad)
rmat[2][2] = 1
rmat[3][3] = 1
return rmat
def new_matrix(rows = 4, cols = 4):
m = []
for c in range( cols ):
m.append( [] )
for r in range( rows ):
m[c].append( 0 )
return m
def print_matrix( matrix ):
s = ''
for r in range( len( matrix[0] ) ):
for c in range( len(matrix) ):
s+= str(matrix[c][r]) + ' '
s+= '\n'
print s
def print_matrix_vert( matrix ):
for c in matrix:
print c
def ident( matrix ):
idmat = new_matrix( len(matrix), len(matrix) )
for i in range( len( idmat ) ):
idmat[i][i] = 1
return idmat
def matrix_copy( src, dst ):
for c in range( len(src) ):
for r in range( len(src[0]) ):
pass
def scalar_mult( matrix, x ):
for c in range( len(matrix) ):
for r in range( len( matrix[0] ) ):
matrix[c][r] *= x
#m1 * m2 -> m2
def matrix_mult( m1, m2 ):
rmat = new_matrix(len(m1[0]),len(m2))
for c in range( len(m2) ):
for r in range( len(m1[0]) ):
cell = 0
for i in range( len(m1) ):
cell += m1[i][r] * m2[c][i]
rmat[c][r] = cell
return rmat
|
[
"azilby@gmail.com"
] |
azilby@gmail.com
|
8f8adb948cbbabadfc351ff919f05caab0ebb239
|
3d83e5d6c5c3b264dbca94f2fedcd1abaf522278
|
/tests/test_wps_cmip5_regridder.py
|
aadc72a608fab27bdaf0c0cda2f46a48b42b16ef
|
[
"Apache-2.0"
] |
permissive
|
cp4cds/c4cds-wps
|
4767d779a2338d46d52f0c23bb89f0072928c482
|
5abd9281195548bbd1e7653fe2ab1fee26745200
|
refs/heads/master
| 2020-04-02T06:43:19.383112
| 2020-01-14T16:05:36
| 2020-01-14T16:05:36
| 154,164,988
| 0
| 0
|
NOASSERTION
| 2020-01-14T16:05:37
| 2018-10-22T15:09:32
|
Python
|
UTF-8
|
Python
| false
| false
| 578
|
py
|
from pywps import Service
from pywps.tests import assert_response_success
from . common import client_for, resource_file
from c4cds.processes.wps_cmip5_regridder import CMIP5Regridder
cfgfiles = [resource_file('test.cfg'), ]
def test_wps_cmip5_regridder():
client = client_for(Service(processes=[CMIP5Regridder()], cfgfiles=cfgfiles))
datainputs = "model=HadGEM2-ES"
resp = client.get(
service='WPS', request='Execute', version='1.0.0', identifier='cmip5_regridder',
datainputs=datainputs)
print(resp.data)
assert_response_success(resp)
|
[
"ehbrecht@dkrz.de"
] |
ehbrecht@dkrz.de
|
c6b598899b53d519f2fdb672ab2f909910a5485a
|
969fed6b9f4c0daa728bda52fea73d94bda6faad
|
/fakeSPS/spss5.py
|
750bdd7ae87c3d3ed6a837b471912130b45e52e3
|
[] |
no_license
|
ess-dmsc/essiip-fakesinqhw
|
7d4c0cb3e412a510db02f011fb9c20edfbd8a84f
|
ad65844c99e64692f07e7ea04d624154a92d57cd
|
refs/heads/master
| 2021-01-18T22:50:50.182268
| 2020-10-01T08:39:30
| 2020-10-01T08:39:30
| 87,077,121
| 0
| 0
| null | 2018-12-07T08:43:00
| 2017-04-03T13:28:23
|
Python
|
UTF-8
|
Python
| false
| false
| 3,061
|
py
|
#!/usr/bin/python
#
# fake SINQ SPS S5. This is a Siemens SPS S5 with a custom RS-232 interface and
# protocol as used at SINQ. The protocol is very simple. What is instrument
# specific is what happens when you set one of the digital inputs. Currently,
# only the AMOR case is implemented.
#
#
# Mark Koennecke, August 2016
#----------------------------------------------------------------------
from twisted.internet import reactor, protocol
from twisted.protocols.basic import LineReceiver
import time
import sys
class SPSS5(LineReceiver):
def __init__(self):
self.b1 = 1
self.b2 = 0
self.b3 = 0
self.b4 = 0
self.b5 = 5
self.b6 = 0
self.b7 = 7
self.b8 = 0
self.b9 = 0
self.b10 = 0
self.b11 = 0
self.b12 = 0
self.b13 = 0
self.b14 = 0
self.b15 = 0
self.b16 = 0
self.a1 = 1
self.a2 = 2
self.a3 = 3
self.a4 = 4
self.a5 = 5
self.a6 = 6
self.a7 = 7
self.a8 = 8
def write(self, data):
print "transmitted:", data
if self.transport is not None:
self.transport.write(data+'\n')
def lineReceived(self, data):
print "lineReceived:", data
data = data.lower().strip()
if data.startswith('r'):
self.write('R %3.3d %3.3d %3.3d %3.3d %3.3d %3.3d %3.3d %3.3d %3.3d %3.3d %3.3d %3.3d %3.3d %3.3d %3.3d %3.3d\r'
% (self.b1, self.b2,self.b3,self.b4,self.b5,self.b6,self.b7,self.b8,self.b9,self.b10,self.b11,
self.b12,self.b13,self.b14,self.b15,self.b16))
return
if data.startswith('a'):
self.write('A %5.5d %5.5d %5.5d %5.5d %5.5d %5.5d %5.5d %5.5d\r'
% (self.a1, self.a2,self.a3,self.a4,self.a5,self.a6,self.a7,self.a8))
return
if data.startswith('s'):
if len(data) < 5:
self.write('?PAR\r')
return
byte = int(data[1:4])
bit = int(data[4])
self.doPush(byte,bit)
self.write(data + '\r')
return
def doPush(self,byte,bit):
# shutter
if byte == 0 and bit == 0:
if self.b5 == 5:
self.b5 = 0
else:
self.b5 = 5
return
# laser light
if byte == 0 and bit == 1:
if self.b16 == 0:
self.b16 = 129
else:
self.b16 = 0
return
# RF flipper
if byte == 0 and bit == 7:
if self.b13 == 0:
self.b13 = 128
else:
self.b13 = 0
return
def main(argv):
if len(argv) > 1:
port = int(argv[1])
else:
port = 63000
factory = protocol.ServerFactory()
factory.protocol = SPSS5
reactor.listenTCP(port, factory)
reactor.run()
if __name__ == "__main__":
main(sys.argv)
|
[
"mark.koennecke@psi.ch"
] |
mark.koennecke@psi.ch
|
387830023b70ccdb90dd7ac0b468f571c24753f0
|
8ded32c55d5223654030d176e9df6acf0d9f8855
|
/mpikat/meerkat/fbfuse/fbfuse_feng_subscription_manager.py
|
d5e55f66b74d8673f94689c739bf3adcefb4a347
|
[
"MIT"
] |
permissive
|
TeepChairin/mpikat
|
d5afb738df69e6e4264aac8829a9d48b9aacbd93
|
464d76113c92e0e8a3106ccc05ef551a1427e582
|
refs/heads/master
| 2020-09-23T20:31:27.677733
| 2019-07-02T15:08:50
| 2019-07-02T15:08:50
| 225,580,332
| 0
| 0
|
MIT
| 2019-12-03T09:27:15
| 2019-12-03T09:27:14
| null |
UTF-8
|
Python
| false
| false
| 7,004
|
py
|
"""
Copyright (c) 2018 Ewan Barr <ebarr@mpifr-bonn.mpg.de>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import logging
import struct
import numpy as np
log = logging.getLogger("mpikat.fbfuse_feng_subscription_manager")
NSPINES = 16
NLEAVES = 4
MAX_SUBS_PER_LEAF = 4
HOST_TO_LEAF_MAP = {
"fbfpn00.mpifr-be.mkat.karoo.kat.ac.za": 0,
"fbfpn01.mpifr-be.mkat.karoo.kat.ac.za": 0,
"fbfpn02.mpifr-be.mkat.karoo.kat.ac.za": 0,
"fbfpn03.mpifr-be.mkat.karoo.kat.ac.za": 0,
"fbfpn04.mpifr-be.mkat.karoo.kat.ac.za": 0,
"fbfpn05.mpifr-be.mkat.karoo.kat.ac.za": 0,
"fbfpn06.mpifr-be.mkat.karoo.kat.ac.za": 0,
"fbfpn07.mpifr-be.mkat.karoo.kat.ac.za": 0,
"fbfpn08.mpifr-be.mkat.karoo.kat.ac.za": 1,
"fbfpn09.mpifr-be.mkat.karoo.kat.ac.za": 1,
"fbfpn10.mpifr-be.mkat.karoo.kat.ac.za": 1,
"fbfpn11.mpifr-be.mkat.karoo.kat.ac.za": 1,
"fbfpn12.mpifr-be.mkat.karoo.kat.ac.za": 1,
"fbfpn13.mpifr-be.mkat.karoo.kat.ac.za": 1,
"fbfpn14.mpifr-be.mkat.karoo.kat.ac.za": 1,
"fbfpn15.mpifr-be.mkat.karoo.kat.ac.za": 1,
"fbfpn16.mpifr-be.mkat.karoo.kat.ac.za": 2,
"fbfpn17.mpifr-be.mkat.karoo.kat.ac.za": 2,
"fbfpn18.mpifr-be.mkat.karoo.kat.ac.za": 2,
"fbfpn19.mpifr-be.mkat.karoo.kat.ac.za": 2,
"fbfpn20.mpifr-be.mkat.karoo.kat.ac.za": 2,
"fbfpn21.mpifr-be.mkat.karoo.kat.ac.za": 2,
"fbfpn22.mpifr-be.mkat.karoo.kat.ac.za": 2,
"fbfpn23.mpifr-be.mkat.karoo.kat.ac.za": 2,
"fbfpn24.mpifr-be.mkat.karoo.kat.ac.za": 3,
"fbfpn25.mpifr-be.mkat.karoo.kat.ac.za": 3,
"fbfpn26.mpifr-be.mkat.karoo.kat.ac.za": 3,
"fbfpn27.mpifr-be.mkat.karoo.kat.ac.za": 3,
"fbfpn28.mpifr-be.mkat.karoo.kat.ac.za": 3,
"fbfpn29.mpifr-be.mkat.karoo.kat.ac.za": 3,
"fbfpn30.mpifr-be.mkat.karoo.kat.ac.za": 3,
"fbfpn31.mpifr-be.mkat.karoo.kat.ac.za": 3
}
class FengToFbfMapper(object):
def __init__(self, nspines=NSPINES, nleaves=NLEAVES,
max_subs_per_leaf=MAX_SUBS_PER_LEAF,
host_to_leaf_map=HOST_TO_LEAF_MAP):
self._h2l_map = host_to_leaf_map
self._nspines = nspines
self._max_subs_per_leaf = max_subs_per_leaf
self._subscriptions = np.zeros((nspines, nleaves))
self._subscription_sets = {}
def validate_ip_ranges(self, ip_ranges):
log.debug("Validating IP ranges")
for ip_range in ip_ranges:
if ip_range.count != 4:
log.error("Count for IP range was not 4")
raise Exception(
"All stream must span 4 consecutive multicast groups")
def group_to_spine(self, group):
subnet = struct.unpack("B"*4, group.packed)[-1]
return subnet % self._nspines
def worker_to_leaf(self, worker):
return self._h2l_map[worker.hostname]
def validate_workers(self, workers):
log.debug("Validating worker servers")
for worker in workers:
if worker.hostname not in self._h2l_map:
log.error(("Could not determine leaf switch ID "
"for worker server: {}").format(
worker.hostname))
raise Exception(
"Worker '{}' does not map to a leaf switch".format(
worker))
def subscribe(self, ordered_ip_ranges, available_workers, subarray_id):
log.debug("Determining safe F-engine subscriptions")
available_workers = available_workers[:]
self.validate_workers(available_workers)
self.validate_ip_ranges(ordered_ip_ranges)
if subarray_id in self._subscription_sets:
raise Exception(
"Subarray {} already has a subscription mapping".format(
subarray_id))
used_workers = []
unallocated_ranges = []
all_indexes = []
mapping = []
for ip_range in ordered_ip_ranges:
log.debug("Attempting to allocate range: {}".format(
ip_range.format_katcp()))
for worker in available_workers:
leaf_idx = self.worker_to_leaf(worker)
can_subscribe = True
indexes = []
for group in ip_range:
spine_idx = self.group_to_spine(group)
indexes.append((spine_idx, leaf_idx))
if self._subscriptions[spine_idx, leaf_idx] >= self._max_subs_per_leaf:
can_subscribe = False
if can_subscribe:
for x, y in indexes:
self._subscriptions[x, y] += 1
mapping.append((worker, ip_range))
all_indexes.extend(indexes)
available_workers.remove(worker)
used_workers.append(worker)
log.info("Allocated {} to {}".format(
ip_range.format_katcp(), worker))
break
else:
continue
else:
log.warning("Unable to allocate {}".format(
ip_range.format_katcp()))
unallocated_ranges.append(ip_range)
self._subscription_sets[subarray_id] = all_indexes
log.debug(self.render_spine_status())
return mapping, available_workers, unallocated_ranges
def unsubscribe(self, subarray_id):
log.debug("Removing subscriptions from subarray: {}".format(
subarray_id))
for x, y in self._subscription_sets[subarray_id]:
self._subscriptions[x, y] -= 1
del self._subscription_sets[subarray_id]
log.debug(self.render_spine_status())
def render_spine_status(self):
status = "Subscription count matrix:\n"
status += "Leaf: 0 | 1 | 2 | 3 \n"
status += "-----------------------\n"
for ii, row in enumerate(self._subscriptions):
status += "Spine {:02d}: {}\n".format(
ii, " | ".join(map(str, map(int, row))))
return status
|
[
"ewan.d.barr@googlemail.com"
] |
ewan.d.barr@googlemail.com
|
2edc2b5179347ab0c63389d5e6b5df02fa39f18f
|
fe62dbd83ac715d640e740e21bf68d9041baab31
|
/api/repository/repository.py
|
16d007ecaa4231c60d357e2aad26266b069d04df
|
[] |
no_license
|
chandler767/flask_api_example
|
ccf2d7ff13a69d1f9a823074e14d7f1073ed215e
|
a5525fbc537d12f1d2492a6174bf66a3662b9969
|
refs/heads/master
| 2021-01-18T10:19:27.456774
| 2016-05-25T00:40:52
| 2016-05-25T00:40:52
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 426
|
py
|
from ..database.database_model import *
## Repository layer will handle database transactions.
class UserRepository():
def signup():
pass
def get_userinfo_id(id):
pass
def get_userinfo_username(username):
pass
def does_exsist(username):
check = db.session.query(User).filter(User.username == username).first()
if check is None:
return 1
else:
return 0
class PostRepository():
def post():
pass
|
[
"joshuadparkin@gmail.com"
] |
joshuadparkin@gmail.com
|
51e1436fc9fa4d880c435775e809bd50dfb571fe
|
a045055cb41f7d53e1b103c3655a17dc4cd18d40
|
/python-master/kubernetes/test/test_policy_v1beta1_supplemental_groups_strategy_options.py
|
ef060d462ef5635af99dded85243dfd48b74b671
|
[] |
no_license
|
18271693176/copy
|
22f863b180e65c049e902de0327f1af491736e5a
|
ff2511441a2df03817627ba8abc6b0e213878023
|
refs/heads/master
| 2020-04-01T20:20:28.048995
| 2018-11-05T02:21:53
| 2018-11-05T02:21:53
| 153,599,530
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,208
|
py
|
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.10.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import kubernetes.client
from kubernetes.client.rest import ApiException
from kubernetes.client.models.policy_v1beta1_supplemental_groups_strategy_options import PolicyV1beta1SupplementalGroupsStrategyOptions
class TestPolicyV1beta1SupplementalGroupsStrategyOptions(unittest.TestCase):
""" PolicyV1beta1SupplementalGroupsStrategyOptions unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testPolicyV1beta1SupplementalGroupsStrategyOptions(self):
"""
Test PolicyV1beta1SupplementalGroupsStrategyOptions
"""
# FIXME: construct object with mandatory attributes with example values
#model = kubernetes.client.models.policy_v1beta1_supplemental_groups_strategy_options.PolicyV1beta1SupplementalGroupsStrategyOptions()
pass
if __name__ == '__main__':
unittest.main()
|
[
"906317366@qq.com"
] |
906317366@qq.com
|
ec7a4da280ef858b1fa27fea0877fec4c261e8e5
|
6d63aa2f237c0d2662a71b2065026cebad56829f
|
/netease-musicbox-git/lilac.py
|
e466f53e2bda5ee027883177b90d7dd65eaa1518
|
[] |
no_license
|
MaskRay/repo
|
dbc407f5a30ac69d9aad27592e71337a4c194fe7
|
e8def5b474d55dfbabdefd20c64cf1a12e3d950c
|
refs/heads/master
| 2021-01-13T11:09:18.404283
| 2018-05-05T08:11:03
| 2018-05-05T08:11:03
| 77,249,750
| 0
| 0
| null | 2016-12-23T20:44:24
| 2016-12-23T20:44:24
| null |
UTF-8
|
Python
| false
| false
| 234
|
py
|
#!/usr/bin/env python3
from lilaclib import *
build_prefix = 'extra-x86_64'
pre_build = vcs_update
def post_build():
git_add_files("PKGBUILD")
git_commit()
update_aur_repo()
if __name__ == '__main__':
single_main()
|
[
"farseerfc@gmail.com"
] |
farseerfc@gmail.com
|
28e13bc91b63fc55d62a0d4a6677303b7db657ad
|
6e8d58340f2be5f00d55e2629052c0bbc9dcf390
|
/eggs/mercurial-2.1.2-py2.6-linux-x86_64-ucs4.egg/mercurial/lock.py
|
cc2c533a2b2474daea45ead213c5e4f8b3c81b16
|
[
"CC-BY-2.5",
"MIT"
] |
permissive
|
JCVI-Cloud/galaxy-tools-prok
|
e57389750d33ac766e1658838cdb0aaf9a59c106
|
3c44ecaf4b2e1f2d7269eabef19cbd2e88b3a99c
|
refs/heads/master
| 2021-05-02T06:23:05.414371
| 2014-03-21T18:12:43
| 2014-03-21T18:12:43
| 6,092,693
| 0
| 2
|
NOASSERTION
| 2020-07-25T20:38:17
| 2012-10-05T15:57:38
|
Python
|
UTF-8
|
Python
| false
| false
| 4,642
|
py
|
# lock.py - simple advisory locking scheme for mercurial
#
# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
import util, error
import errno, os, socket, time
import warnings
class lock(object):
'''An advisory lock held by one process to control access to a set
of files. Non-cooperating processes or incorrectly written scripts
can ignore Mercurial's locking scheme and stomp all over the
repository, so don't do that.
Typically used via localrepository.lock() to lock the repository
store (.hg/store/) or localrepository.wlock() to lock everything
else under .hg/.'''
# lock is symlink on platforms that support it, file on others.
# symlink is used because create of directory entry and contents
# are atomic even over nfs.
# old-style lock: symlink to pid
# new-style lock: symlink to hostname:pid
_host = None
def __init__(self, file, timeout=-1, releasefn=None, desc=None):
self.f = file
self.held = 0
self.timeout = timeout
self.releasefn = releasefn
self.desc = desc
self.postrelease = []
self.lock()
def __del__(self):
if self.held:
warnings.warn("use lock.release instead of del lock",
category=DeprecationWarning,
stacklevel=2)
# ensure the lock will be removed
# even if recursive locking did occur
self.held = 1
self.release()
def lock(self):
timeout = self.timeout
while True:
try:
self.trylock()
return 1
except error.LockHeld, inst:
if timeout != 0:
time.sleep(1)
if timeout > 0:
timeout -= 1
continue
raise error.LockHeld(errno.ETIMEDOUT, inst.filename, self.desc,
inst.locker)
def trylock(self):
if self.held:
self.held += 1
return
if lock._host is None:
lock._host = socket.gethostname()
lockname = '%s:%s' % (lock._host, os.getpid())
while not self.held:
try:
util.makelock(lockname, self.f)
self.held = 1
except (OSError, IOError), why:
if why.errno == errno.EEXIST:
locker = self.testlock()
if locker is not None:
raise error.LockHeld(errno.EAGAIN, self.f, self.desc,
locker)
else:
raise error.LockUnavailable(why.errno, why.strerror,
why.filename, self.desc)
def testlock(self):
"""return id of locker if lock is valid, else None.
If old-style lock, we cannot tell what machine locker is on.
with new-style lock, if locker is on this machine, we can
see if locker is alive. If locker is on this machine but
not alive, we can safely break lock.
The lock file is only deleted when None is returned.
"""
locker = util.readlock(self.f)
try:
host, pid = locker.split(":", 1)
except ValueError:
return locker
if host != lock._host:
return locker
try:
pid = int(pid)
except ValueError:
return locker
if util.testpid(pid):
return locker
# if locker dead, break lock. must do this with another lock
# held, or can race and break valid lock.
try:
l = lock(self.f + '.break', timeout=0)
util.unlink(self.f)
l.release()
except error.LockError:
return locker
def release(self):
"""release the lock and execute callback function if any
If the lock have been aquired multiple time, the actual release is
delayed to the last relase call."""
if self.held > 1:
self.held -= 1
elif self.held == 1:
self.held = 0
if self.releasefn:
self.releasefn()
try:
util.unlink(self.f)
except OSError:
pass
for callback in self.postrelease:
callback()
def release(*locks):
for lock in locks:
if lock is not None:
lock.release()
|
[
"root@ip-10-118-137-129.ec2.internal"
] |
root@ip-10-118-137-129.ec2.internal
|
066de1d07307922afb7ea23dfc46e85906ab1c9f
|
be0f3dfbaa2fa3d8bbe59229aef3212d032e7dd1
|
/Gauss_v45r9/Gen/DecFiles/options/12113086.py
|
ce8a5b592a5552f40e8070fd8db1fbe8f91f91d7
|
[] |
no_license
|
Sally27/backup_cmtuser_full
|
34782102ed23c6335c48650a6eaa901137355d00
|
8924bebb935b96d438ce85b384cfc132d9af90f6
|
refs/heads/master
| 2020-05-21T09:27:04.370765
| 2018-12-12T14:41:07
| 2018-12-12T14:41:07
| 185,989,173
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,534
|
py
|
# file /home/hep/ss4314/cmtuser/Gauss_v45r9/Gen/DecFiles/options/12113086.py generated: Fri, 27 Mar 2015 16:10:07
#
# Event Type: 12113086
#
# ASCII decay Descriptor: [B+ -> K+ (Higgs0 -> mu+ mu-) ]cc
#
from Configurables import Generation
Generation().EventType = 12113086
Generation().SampleGenerationTool = "SignalRepeatedHadronization"
from Configurables import SignalRepeatedHadronization
Generation().addTool( SignalRepeatedHadronization )
Generation().SignalRepeatedHadronization.ProductionTool = "PythiaProduction"
from Configurables import ToolSvc
from Configurables import EvtGenDecay
ToolSvc().addTool( EvtGenDecay )
ToolSvc().EvtGenDecay.UserDecayFile = "$DECFILESROOT/dkfiles/Bu_KDarkBoson2MuMu,m=2000MeV,t=100ps,DecProdCut.dec"
Generation().SignalRepeatedHadronization.CutTool = "DaughtersInLHCb"
Generation().SignalRepeatedHadronization.SignalPIDList = [ 521,-521 ]
from Gauss.Configuration import *
from Configurables import LHCb__ParticlePropertySvc as ParticlePropertySvc
from Configurables import Gauss, PrintMCTree, PrintMCDecayTreeTool, HistogramPersistencySvc, NTupleSvc, DumpHepMCDecay, DumpHepMCTree, GaussMonitor__CheckLifeTimeHepMC, GaussMonitor__CheckLifeTimeMC, GiGa, GiGaPhysListModular, GiGaHiggsParticles, GenerationToSimulation, PythiaProduction
ParticlePropertySvc().Particles = [ "H_10 87 25 0.0 2.0 1.0000e-10 Higgs0 25 0.000000e+000" ]
ApplicationMgr().ExtSvc += [ ParticlePropertySvc() ]
gigaHiggsPart = GiGaHiggsParticles()
gigaHiggsPart.Higgses = ["H_10"] # H_10, H_20, H_30
GiGaPhysListModular("ModularPL").PhysicsConstructors += [ gigaHiggsPart ]#
# Ad-hoc particle gun code
from Configurables import ParticleGun
pgun = ParticleGun("ParticleGun")
pgun.SignalPdgCode = 521
pgun.DecayTool = "EvtGenDecay"
pgun.GenCutTool = "DaughtersInLHCb"
from Configurables import FlatNParticles
pgun.NumberOfParticlesTool = "FlatNParticles"
pgun.addTool( FlatNParticles , name = "FlatNParticles" )
from Configurables import MomentumSpectrum
pgun.ParticleGunTool = "MomentumSpectrum"
pgun.addTool( MomentumSpectrum , name = "MomentumSpectrum" )
pgun.MomentumSpectrum.PdgCodes = [ 521,-521 ]
pgun.MomentumSpectrum.InputFile = "$PGUNSDATAROOT/data/Ebeam4000GeV/MomentumSpectrum_521.root"
pgun.MomentumSpectrum.BinningVariables = "pteta"
pgun.MomentumSpectrum.HistogramPath = "h_pteta"
from Configurables import BeamSpotSmearVertex
pgun.addTool(BeamSpotSmearVertex, name="BeamSpotSmearVertex")
pgun.VertexSmearingTool = "BeamSpotSmearVertex"
pgun.EventType = 12113086
|
[
"slavomirastefkova@b2pcx39016.desy.de"
] |
slavomirastefkova@b2pcx39016.desy.de
|
c9af53687ffb088e04769d9ad518028fef96b976
|
c8ae7695a26ec273a04e8043b7cf6fff8e1d6f71
|
/supervised_learning/0x11-attention/11-transformer.py
|
c85826c82e240f06cbaf00a0ad0414cdb76df3e6
|
[] |
no_license
|
xica369/holbertonschool-machine_learning
|
479e1c1675f8a256375bc65470233a261daf0039
|
4a7a8ff0c4f785656a395d0abf4f182ce1fef5bc
|
refs/heads/master
| 2020-12-22T00:04:16.702927
| 2020-10-08T21:32:44
| 2020-10-08T21:32:44
| 236,605,692
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,699
|
py
|
#!/usr/bin/env python3
"""
class Transformer
inherits from tensorflow.keras.Model
to create a transformer network
"""
import tensorflow as tf
Encoder = __import__('9-transformer_encoder').Encoder
Decoder = __import__('10-transformer_decoder').Decoder
class Transformer(tf.keras.Model):
"""
Transformer Network
"""
def __init__(self, N, dm, h, hidden, input_vocab, target_vocab,
max_seq_input, max_seq_target, drop_rate=0.1):
"""
Class constructor
- N: the number of blocks in the encoder and decoder
- dm: the dimensionality of the model
- h: the number of heads
- hidden: the number of hidden units in the fully connected layers
- input_vocab: the size of the input vocabulary
- target_vocab: the size of the target vocabulary
- max_seq_input: the maximum sequence length possible for the input
- max_seq_target: the maximum sequence length possible for the target
- drop_rate: the dropout rate
Public instance attributes:
- encoder: the encoder layer
- decoder: the decoder layer
- linear: a final Dense layer with target_vocab units
"""
super(Transformer, self).__init__()
self.encoder = Encoder(N, dm, h, hidden, input_vocab,
max_seq_input, drop_rate)
self.decoder = Decoder(N, dm, h, hidden, target_vocab,
max_seq_target, drop_rate)
self.linear = tf.keras.layers.Dense(target_vocab)
def call(self, inputs, target, training, encoder_mask, look_ahead_mask,
decoder_mask):
"""
- inputs: a tensor of shape (batch, input_seq_len, dm)
containing the inputs
- target: a tensor of shape (batch, target_seq_len, dm)
containing the target
- training: a boolean to determine if the model is training
- encoder_mask: the padding mask to be applied to the encoder
- look_ahead_mask: the look ahead mask to be applied to the decoder
- decoder_mask: the padding mask to be applied to the decoder
Returns:
a tensor of shape (batch, target_seq_len, target_vocab)
containing the transformer output
"""
# encoder_output.shape = (batch_size, inp_seq_len, d_model)
encoder_output = self.encoder(inputs, training, encoder_mask)
# decoder_output.shape = (batch_size, tar_seq_len, d_model)
decoder_output = self.decoder(target, encoder_output, training,
look_ahead_mask, decoder_mask)
output = self.linear(decoder_output)
return output
|
[
"761@holbertonschool.com"
] |
761@holbertonschool.com
|
8654792c1a49543b79c8d56cef1ace102e942f3b
|
802040662d6b0978480f8c72e0bd91c8c08201a9
|
/clindmri/registration/fsl/__init__.py
|
13241d900a67e2e7dafbb9da9ea434bb656fcd66
|
[
"LicenseRef-scancode-cecill-b-en"
] |
permissive
|
neurospin/caps-clindmri
|
a07fa214f5b6f7adf0f0f0e558830727bd7087ac
|
3105d2b1e4458c3be398391436be54bf59949a34
|
refs/heads/master
| 2022-06-16T19:05:51.125370
| 2016-03-30T08:28:14
| 2016-03-30T08:28:14
| 38,047,302
| 0
| 10
|
NOASSERTION
| 2022-05-19T10:16:34
| 2015-06-25T12:14:17
|
Python
|
UTF-8
|
Python
| false
| false
| 434
|
py
|
#! /usr/bin/env python
##########################################################################
# NSAP - Copyright (C) CEA, 2013
# Distributed under the terms of the CeCILL-B license, as published by
# the CEA-CNRS-INRIA. Refer to the LICENSE file or to
# http://www.cecill.info/licences/Licence_CeCILL-B_V1-en.html
# for details.
##########################################################################
from .flirt import flirt
|
[
"antoine.grigis@cea.fr"
] |
antoine.grigis@cea.fr
|
246bbf69992559ed5836a1bd059223841ff94817
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p02801/s643704920.py
|
e6c533210a3087a40548c3ca4a17dec387ba724e
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 195
|
py
|
C=input()
A=['a','b','c','d','e','f','g','h','i','j','k','l','m','n','o','p','q','r','s','t','u','v','w','x','y','z']
for i in range(len(A)-1):
if A[i]==C:
print(A[i+1])
break
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
5abd60fd92bb98ae630bdbd52647696582f27caa
|
53fab060fa262e5d5026e0807d93c75fb81e67b9
|
/backup/user_091/ch39_2020_10_07_03_48_28_137004.py
|
0d991cb66ed650d17f8e64e0156902c94b3b9ed1
|
[] |
no_license
|
gabriellaec/desoft-analise-exercicios
|
b77c6999424c5ce7e44086a12589a0ad43d6adca
|
01940ab0897aa6005764fc220b900e4d6161d36b
|
refs/heads/main
| 2023-01-31T17:19:42.050628
| 2020-12-16T05:21:31
| 2020-12-16T05:21:31
| 306,735,108
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 421
|
py
|
d = 1
termos = 1
maior = 1
resultado = 1
while d < 1000:
a = d
termos = 1
while a != 1:
if a % 2 == 0:
a = a/2
termos += 1
else:
a = 3*a + 1
termos += 1
if a == 1:
if termos > maior:
maior = termos
resultado = d
d += 1
else:
d += 1
print(resultado)
|
[
"you@example.com"
] |
you@example.com
|
35d2a07f62d4095ba2f43918c7ac2da2ecf3d934
|
37e87b3d5e1ee9009f0ea0671bc0c6edf0e233b7
|
/035.py
|
33832baca3cd3737190cce0c0e3ffe86590269e5
|
[] |
no_license
|
Jane11111/Leetcode2021
|
d9f4987792938597bf89ff72ba6bbcb4a3f9d081
|
a95b871578aae0103066962c33b8c0f4ec22d0f2
|
refs/heads/master
| 2023-07-14T21:29:41.196752
| 2021-08-23T03:28:02
| 2021-08-23T03:28:02
| 344,804,297
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 360
|
py
|
# -*- coding: utf-8 -*-
# @Time : 2021-03-02 13:43
# @Author : zxl
# @FileName: 035.py
class Solution(object):
def searchInsert(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: int
"""
i = 0
while i < len(nums) and nums[i]<target:
i += 1
return i
|
[
"791057615@qq.com"
] |
791057615@qq.com
|
bd1567cacdd578097bce86eceb5a80609d8254db
|
5db44def243996321c33a9961de82b9d6f6aafd3
|
/rkmt/engines/converter.py
|
5698f65f1f53a058bb3f40ecb821ec6f3f2fe508
|
[
"MIT"
] |
permissive
|
BokyLiu/rknn-model-tools
|
fa010b17b0a1f35fdee5f29d47cb6bbceffd3bdd
|
8af9c062ea4955a76ba9986a6cab6f771c9e678a
|
refs/heads/master
| 2022-04-09T12:49:29.417800
| 2020-02-25T13:48:38
| 2020-02-25T13:48:38
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,616
|
py
|
#!/usr/bin/env python3
import os
import sys
import shutil
from rknn.api import RKNN
from rkmt.engines.base import BaseEngine
from rkmt.utils.util import check_success
class Converter(BaseEngine):
def __init__(self, opt) -> None:
super().__init__(opt)
# Create RKNN object
self.rknn = RKNN(opt.verbose)
def convert(self) -> None:
"""Convert models form other platforms into RKNN format."""
opt = self.opt
# Config model
print('--> Configuring model')
self.rknn.config(channel_mean_value=opt.channel_mean_value,
reorder_channel=opt.reorder_channel)
print('done')
# Load model
print('--> Loading model...')
if opt.platform == 'tensorflow':
ret = self.rknn.load_tensorflow(
tf_pb=opt.model_file_path,
inputs=opt.inputs,
outputs=opt.outputs,
input_size_list=opt.input_size_list)
elif opt.platform == 'tflite':
ret = self.rknn.load_tflite(model=opt.model_file_path)
elif opt.platform == 'caffe':
ret = self.rknn.load_caffe(model=opt.graph_file_path,
proto='caffe',
blobs=opt.model_file_path)
elif opt.platform == 'onnx':
ret = self.rknn.load_onnx(model=opt.model_file_path)
elif opt.platform == 'darknet':
ret = self.rknn.load_darknet(model=opt.graph_file_path,
weight=opt.model_file_path)
elif opt.platform == 'pytorch':
ret = self.rknn.load_pytorch(model=opt.model_file_path,
input_size_list=opt.input_size_list)
elif opt.platform == 'mxnet':
ret = self.rknn.load_mxnet(symbol=opt.graph_file_path,
params=opt.model_file_path,
input_size_list=opt.input_size_list)
else:
raise RuntimeError('Unsupported platform: {} !'.format(
opt.platform))
check_success(ret, 'load model failed.')
print('done')
# Build model
print('--> Building model')
ret = self.rknn.build(do_quantization=not opt.no_quantization,
pre_compile=not opt.no_pre_compile,
dataset=opt.dataset_file_path)
check_success(ret, 'build model failed.')
print('done')
# Analyse model
if not opt.no_quantization and opt.analyse_accuracy:
print('--> Analyse model')
analysis_results_dir = '/tmp/accuracy_analysis/{}'.format(opt.name)
if os.path.exists(analysis_results_dir):
shutil.rmtree(analysis_results_dir)
os.makedirs(analysis_results_dir, exist_ok=True)
ret = self.rknn.accuracy_analysis(
inputs=opt.dataset_for_analysis_file_path
or opt.dataset_file_path,
output_dir=analysis_results_dir,
calc_qnt_error=True)
check_success(ret, 'analyse model failed.')
print('done')
# Export RKNN model
print('--> Export RKNN model')
ret = self.rknn.export_rknn(opt.output_path)
check_success(ret, 'export model failed.')
print('done')
if __name__ == '__main__':
model_path = sys.argv[1]
out_path = sys.argv[2]
pre_compile = sys.argv[3] in ['true', '1', 'True']
convert_model(model_path, out_path, pre_compile)
|
[
"xxdsox@gmail.com"
] |
xxdsox@gmail.com
|
a0fd9f8124403e36d3014d05f4728d5c9eb92625
|
4a31bfe6ebbf6d474b0c05ae4db55183acee2c25
|
/run/gram_ctc/cnn/test.py
|
421536d06b4b2cbf61ea021689cb836af1aa5f35
|
[] |
no_license
|
musyoku/chainer-speech-recognition
|
3c1a939d259abf6ff41faff7a81d109b93407e7a
|
de83fc497ec3f629ff43431ef863d45e8a9cdf68
|
refs/heads/master
| 2021-01-21T19:12:34.873720
| 2017-09-25T07:49:39
| 2017-09-25T07:49:39
| 92,125,978
| 11
| 1
| null | 2017-06-28T07:00:14
| 2017-05-23T03:40:37
|
Python
|
UTF-8
|
Python
| false
| false
| 2,922
|
py
|
# coding: utf8
from __future__ import division
from __future__ import print_function
from six.moves import xrange
import sys, argparse, time, cupy, math, os
import chainer
import numpy as np
import chainer.functions as F
from chainer import optimizers, cuda, serializers
sys.path.append("../../")
import config
from error import compute_minibatch_error
from dataset import wav_path_test, trn_path_test, cache_path, get_vocab, AugmentationOption, TestMinibatchIterator
from model import load_model
from util import stdout, print_bold
def main():
# データの読み込み
vocab, vocab_inv, BLANK = get_vocab()
vocab_size = len(vocab)
# ミニバッチを取れないものは除外
# GTX 1080 1台基準
batchsizes = [96, 64, 64, 64, 64, 64, 64, 64, 48, 48, 48, 32, 32, 24, 24, 24, 24, 24, 24, 24, 24, 24]
augmentation = AugmentationOption()
if args.augmentation:
augmentation.change_vocal_tract = True
augmentation.change_speech_rate = True
augmentation.add_noise = True
model = load_model(args.model_dir)
assert model is not None
if args.gpu_device >= 0:
chainer.cuda.get_device(args.gpu_device).use()
model.to_gpu(args.gpu_device)
xp = model.xp
# テスト
with chainer.using_config("train", False):
iterator = TestMinibatchIterator(wav_path_test, trn_path_test, cache_path, batchsizes, BLANK, buckets_limit=args.buckets_limit, option=augmentation, gpu=args.gpu_device >= 0)
buckets_errors = []
for batch in iterator:
x_batch, x_length_batch, t_batch, t_length_batch, bucket_idx, progress = batch
if args.filter_bucket_id and bucket_idx != args.filter_bucket_id:
continue
sys.stdout.write("\r" + stdout.CLEAR)
sys.stdout.write("computing CER of bucket {} ({} %)".format(bucket_idx + 1, int(progress * 100)))
sys.stdout.flush()
y_batch = model(x_batch, split_into_variables=False)
y_batch = xp.argmax(y_batch.data, axis=2)
error = compute_minibatch_error(y_batch, t_batch, BLANK, print_sequences=True, vocab=vocab_inv)
while bucket_idx >= len(buckets_errors):
buckets_errors.append([])
buckets_errors[bucket_idx].append(error)
avg_errors = []
for errors in buckets_errors:
avg_errors.append(sum(errors) / len(errors))
sys.stdout.write("\r" + stdout.CLEAR)
sys.stdout.flush()
print_bold("bucket CER")
for bucket_idx, error in enumerate(avg_errors):
print("{} {}".format(bucket_idx + 1, error * 100))
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--gpu-device", "-g", type=int, default=0)
parser.add_argument("--model-dir", "-m", type=str, default="model")
parser.add_argument("--buckets-limit", type=int, default=None)
parser.add_argument("--filter-bucket-id", type=int, default=None)
parser.add_argument("--seed", "-seed", type=int, default=0)
parser.add_argument("--augmentation", "-augmentation", default=False, action="store_true")
args = parser.parse_args()
main()
|
[
"musyoku@users.noreply.github.com"
] |
musyoku@users.noreply.github.com
|
32a051a44ceb309b3121ec4546c25eb2f786ead4
|
59166105545cdd87626d15bf42e60a9ee1ef2413
|
/test/test_manhwa.py
|
a0e51954cc4efc701f2fe99e296fb3d938a402b6
|
[] |
no_license
|
mosoriob/dbpedia_api_client
|
8c594fc115ce75235315e890d55fbf6bd555fa85
|
8d6f0d04a3a30a82ce0e9277e4c9ce00ecd0c0cc
|
refs/heads/master
| 2022-11-20T01:42:33.481024
| 2020-05-12T23:22:54
| 2020-05-12T23:22:54
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,197
|
py
|
# coding: utf-8
"""
DBpedia
This is the API of the DBpedia Ontology # noqa: E501
The version of the OpenAPI document: v0.0.1
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import datetime
import dbpedia
from dbpedia.models.manhwa import Manhwa # noqa: E501
from dbpedia.rest import ApiException
class TestManhwa(unittest.TestCase):
"""Manhwa unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def make_instance(self, include_optional):
"""Test Manhwa
include_option is a boolean, when False only required
params are included, when True both required and
optional params are included """
# model = dbpedia.models.manhwa.Manhwa() # noqa: E501
if include_optional :
return Manhwa(
previous_work = [
None
],
coden = [
'0'
],
translator = [
None
],
alternative_title = [
'0'
],
description = [
'0'
],
subsequent_work = [
None
],
chief_editor = [
None
],
music_composer = [
None
],
last_publication_date = [
'0'
],
type = [
'0'
],
lcc = [
'0'
],
lccn = [
'0'
],
main_character = [
None
],
id = '0',
literary_genre = [
None
],
based_on = [
None
],
first_publisher = [
None
],
first_publication_date = [
'0'
],
film_version = [
None
],
release_date = [
'0'
],
number_of_volumes = [
56
],
composer = [
None
],
author = [
None
],
preface_by = [
None
],
runtime = [
None
],
production_company = [
None
],
label = [
'0'
],
original_language = [
None
],
license = [
None
],
subject_term = [
'0'
],
original_title = [
'0'
],
circulation = [
56
],
oclc = [
'0'
],
producer = [
None
],
starring = [
None
],
completion_date = [
'0'
],
writer = [
None
],
magazine = [
None
]
)
else :
return Manhwa(
)
def testManhwa(self):
"""Test Manhwa"""
inst_req_only = self.make_instance(include_optional=False)
inst_req_and_optional = self.make_instance(include_optional=True)
if __name__ == '__main__':
unittest.main()
|
[
"maxiosorio@gmail.com"
] |
maxiosorio@gmail.com
|
6256896c95eb5d80d7e6e914a3902189aa23c3a6
|
dfab6798ece135946aebb08f93f162c37dd51791
|
/core/aokuang/aokuang/core/actors/htmldocument/Basic.py
|
7a2947db8a0a531affbd985cdd932ba6a3fffc75
|
[] |
no_license
|
yxqd/luban
|
405f5f7dcf09015d214079fe7e23d644332be069
|
00f699d15c572c8bf160516d582fa37f84ac2023
|
refs/heads/master
| 2020-03-20T23:08:45.153471
| 2012-05-18T14:52:43
| 2012-05-18T14:52:43
| 137,831,650
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 816
|
py
|
# -*- Python -*-
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# Jiao Lin
# California Institute of Technology
# (C) 2006-2011 All Rights Reserved
#
# {LicenseText}
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
import luban
from ....DemoPanelActor import Actor as base
class Actor(base):
title='A html document'
description = [
]
def createDemoPanel(self, **kwds):
text = '''
<h1>Title here</h1>
<p>
Some more items
</p>
<ul>
<li> a </li>
<li> b </li>
</ul>
<p>a paragraph with a <a href="http://a.b.com" target="_blank">link</a> </p>
<p>©</p>
'''
return luban.e.htmldocument(text=text)
# End of file
|
[
"linjiao@caltech.edu"
] |
linjiao@caltech.edu
|
ad4355bf766f6babe97522e5183c3ee460733155
|
ef0d8fd55fbdb526e20d6c2b05e601f1d86587c5
|
/frappe/core/doctype/page/test_page.py
|
9ad215c1df1a7c2bb10a3b584693420811e33145
|
[
"MIT"
] |
permissive
|
indictranstech/v4_frappe
|
8976e84c14346196b8895ad6274740dca7fd6504
|
dba708c8aa83f503b9f4a264850307111a2b5f19
|
refs/heads/master
| 2021-09-26T12:26:29.994294
| 2018-10-30T06:09:36
| 2018-10-30T06:09:36
| 103,262,823
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 251
|
py
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import unittest
test_records = frappe.get_test_records('Page')
class TestPage(unittest.TestCase):
pass
|
[
"sagarshiragawakar@gmail.com"
] |
sagarshiragawakar@gmail.com
|
2add2a167b6f6ecc71809a1c88703d1a559ddd69
|
8f15e2170d08e61b4ac70f75ab755967b0009338
|
/mason/util/exception.py
|
2770ab632ea621e7b2718484c56eb27af1ca7de3
|
[
"Apache-2.0"
] |
permissive
|
malave/mason
|
eb86d60b96b16b6e49482097474c05c9805b5f24
|
bf45672124ef841bc16216c293034f4ccc506621
|
refs/heads/master
| 2023-06-12T21:59:46.858046
| 2021-06-11T16:07:18
| 2021-06-11T16:07:18
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 136
|
py
|
import traceback
def message(e: Exception):
return ''.join(traceback.format_exception(etype=type(e), value=e, tb=e.__traceback__))
|
[
"kprifogle1@gmail.com"
] |
kprifogle1@gmail.com
|
3cff2276a49a7356bbc1bbd7c7052699a5e74df7
|
3876b862c61d64c9c225eb8a6305853a2ac16325
|
/pal/writer/access_mechanism/none.py
|
246e92b2175a0a072f4958ee21826c2b4c011205
|
[
"MIT"
] |
permissive
|
connojd/pal
|
0d122de7e9b6fe659e35dd44310b9783830c4838
|
800f8bd6de0004313d4208da619b4ef98d2e1e76
|
refs/heads/master
| 2020-09-11T06:17:08.837738
| 2019-11-21T19:18:31
| 2019-11-21T19:23:23
| 221,967,287
| 0
| 0
|
MIT
| 2019-11-15T16:56:22
| 2019-11-15T16:56:22
| null |
UTF-8
|
Python
| false
| false
| 513
|
py
|
from pal.writer.access_mechanism.access_mechanism \
import AccessMechanismWriter
class NoneAccessMechanismWriter(AccessMechanismWriter):
def declare_access_mechanism_dependencies(self, outfile, register):
pass
def call_readable_access_mechanism(self, outfile, register,
access_mechanism, var):
pass
def call_writable_access_mechanism(self, outfile, register,
access_mechanism, value):
pass
|
[
"jared.wright12@gmail.com"
] |
jared.wright12@gmail.com
|
fd03014fc14d457010bc0bdcd28a64aefbdd894e
|
f72fe33d1a181f89d2464cc07744dbd275a7d071
|
/CNNectome/postprocessing/partner_annotations/luigi_pipeline_spec_dir/prepare_luigi.py
|
280ade177af970bf8a531ddad83d057ed3b73873
|
[
"BSD-2-Clause"
] |
permissive
|
saalfeldlab/CNNectome
|
6c8d44d8cc2e161a91b10abb7b4a425d7fc64d1b
|
c043e3111ff5ec6707a68edffae54eb902a1652d
|
refs/heads/master
| 2023-04-03T15:11:36.586030
| 2022-06-15T14:12:17
| 2022-06-15T14:12:17
| 124,144,317
| 8
| 10
|
BSD-2-Clause
| 2023-03-24T22:16:04
| 2018-03-06T22:04:16
|
Python
|
UTF-8
|
Python
| false
| false
| 1,334
|
py
|
import luigi
import os
class CheckCheckpoint(luigi.ExternalTask):
it = luigi.IntParameter()
path = luigi.Parameter()
@property
def priority(self):
if int(self.it) % 10000 == 0:
return 1.0 / int(self.it)
else:
return 0.0
def output(self):
base = os.path.join(self.path, "unet_checkpoint_" + str(self.it))
return [
luigi.LocalTarget(base + ".data-00000-of-00001"),
luigi.LocalTarget(base + ".index"),
luigi.LocalTarget(base + ".meta"),
]
class MakeItFolder(luigi.ExternalTask):
it = luigi.IntParameter()
path = luigi.Parameter()
data_eval = luigi.TupleParameter()
@property
def priority(self):
return self.it
def requires(self):
return CheckCheckpoint(self.it, self.path)
def output(self):
base = os.path.dirname(self.input()[0].fn)
return luigi.LocalTarget(
os.path.join(base, "evaluation", str(self.it), self.data_eval[-1])
)
def run(self):
# make the folders
base = os.path.dirname(self.input()[0].fn)
for de in self.data_eval:
if not os.path.exists(os.path.join(base, "evaluation", str(self.it), de)):
os.makedirs(os.path.join(base, "evaluation", str(self.it), de))
|
[
"heinrichl@janelia.hhmi.org"
] |
heinrichl@janelia.hhmi.org
|
69b6aec08c2ed56cfcd0213106d16764a708984a
|
ec1deb682fb96a1f937f2fca5f161aa951462876
|
/pythonTextBook/exercises/files/exTenThree.py
|
edbaf04d420fb624cf41f51648403964fd6d11d4
|
[] |
no_license
|
AnatoliKosarev/Python-beginner-course--Teclado-
|
31d82f5e9a1f39e2970323bed9de1fd539990565
|
fa91199938d6975b5874341585343566caaf3600
|
refs/heads/main
| 2023-06-30T12:14:33.779827
| 2021-07-24T11:16:19
| 2021-07-24T11:16:19
| 376,371,590
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 200
|
py
|
def save_guest_name():
name = input("Enter your name: ")
if name:
with open("guest.txt", "a") as f:
f.write(f"{name}\n")
if __name__ == "__main__":
save_guest_name()
|
[
"anatoli.kosarev@gmail.com"
] |
anatoli.kosarev@gmail.com
|
6d5b1b017e641d25258e4a6f3936bcc0de658724
|
c2acac76572d0784f29d1a0cc529c0f753aac184
|
/apmec/db/migration/alembic_migrations/versions/6e56d4474b2a_blob_to_json_text.py
|
8469f5fbc095fe33ef7934b322650f3f92daf624
|
[
"Apache-2.0"
] |
permissive
|
openMECPlatform/apmec
|
6d1d8380385beda7c2e59539c5001dffe9da8672
|
1046bf4730d2bdf95e3cd7efe487cb3fbf1fcd22
|
refs/heads/master
| 2023-03-26T13:52:43.647511
| 2021-03-31T16:40:03
| 2021-03-31T16:40:03
| 353,424,268
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,584
|
py
|
# Copyright 2016 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""blob-to-json-text
Revision ID: 6e56d4474b2a
Revises: f958f58e5daa
Create Date: 2016-06-01 09:50:46.296206
"""
import json
import pickle
from alembic import op
import sqlalchemy as sa
from apmec.db import types
# revision identifiers, used by Alembic.
revision = '6e56d4474b2a'
down_revision = 'f958f58e5daa'
def _migrate_data(table, column_name):
meta = sa.MetaData(bind=op.get_bind())
t = sa.Table(table, meta, autoload=True)
for r in t.select().execute():
stmt = t.update().where(t.c.id == r.id).values(
{column_name: json.dumps(pickle.loads(getattr(r, column_name)))})
op.execute(stmt)
op.alter_column(table,
column_name,
type_=types.Json)
def upgrade(active_plugins=None, options=None):
_migrate_data('vims', 'placement_attr')
_migrate_data('vimauths', 'vim_project')
_migrate_data('vimauths', 'auth_cred')
_migrate_data('devices', 'placement_attr')
|
[
"tung.doan_van@tu-dresden.de"
] |
tung.doan_van@tu-dresden.de
|
7f1679ac1ba910f2b2bf2f711c9cb9d730731be5
|
a14ec6e367e6a471bfc74c066fb958ef585bc269
|
/2020/01/a.py
|
2d6e886022516fb0bde3df14250fdb6fd3e78fad
|
[] |
no_license
|
jimhendy/AoC
|
90641814ed431f46a8500ff0f022c6c957567563
|
a1727f88bc2e6f739d65902dce188377966b3fb4
|
refs/heads/master
| 2023-09-02T14:48:39.860352
| 2023-08-28T08:09:19
| 2023-08-28T08:09:19
| 225,152,422
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 238
|
py
|
import os
def run(inputs):
nums = list(map(int, inputs.split(os.linesep)))
for i, n_i in enumerate(nums[:-1]):
for n_j in nums[i + 1 :]:
if n_i + n_j == 2020:
return n_i * n_j
return None
|
[
"jimhendy88@gmail.com"
] |
jimhendy88@gmail.com
|
b7e3d0e691bf434020466185cae8f091158ce029
|
4863a76461ebdb2abf384df672bf07170b5cc4ce
|
/scanner.py
|
3ca86d2aea1e70a868641fc142691d5e07527b56
|
[
"MIT"
] |
permissive
|
squidgameholders/Website-Scanner
|
73a3db8ea39ab5b91c309da6491db60a1cdc83a7
|
ee27f265913957c5357ba673b61338336c222672
|
refs/heads/master
| 2023-03-19T04:12:29.324705
| 2015-02-17T18:20:53
| 2015-02-17T18:20:53
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 866
|
py
|
#!/usr/bin/env python
import difflib
import hashlib
import requests
bags_urls = [
'http://www.mansurgavriel.com/collections/all',
]
def get_content():
#r = requests.get("http://www.mansurgavriel.com/collections/all")
r = requests.get("http://new.yancao.me")
return r.content
def hash_obj(content):
hash_obj = hashlib.md5(content)
return hash_obj.hexdigest()
def diff(old, new):
"""
Helper function. Returns a string containing the unified diff of two multiline strings.
"""
old=old.splitlines(1)
new=new.splitlines(1)
diff=difflib.unified_diff(old, new)
return ''.join(diff)
# c1 = get_content()
# print hash_obj(c1)
# from time import sleep
# sleep(10)
# c2 = get_content()
# print hash_obj(c2)
# print diff(c1, c2)
# print hash_obj(c1) == hash_obj(c2)
r = requests.get("http://new.yancao.me")
|
[
"cyandterry@hotmail.com"
] |
cyandterry@hotmail.com
|
5ea20db9e23c11fc2b7e25e17da92ee9a931ec95
|
1eed777d0174fecc14fee1bf1ca6e47c412666cd
|
/virtual/bin/chardetect
|
2097c185eb4a4d78969f5693629a1c542ed3b987
|
[] |
no_license
|
ALKEMIA-CHARLES/Postify
|
9ff3d36b8962660febc56ef0de139b792f074756
|
a915da3a9b9c2011f47e7a31e6cf2e4d75d4fd39
|
refs/heads/master
| 2022-12-21T21:28:03.813680
| 2020-03-30T06:50:40
| 2020-03-30T06:50:40
| 243,502,216
| 0
| 0
| null | 2022-12-08T03:41:52
| 2020-02-27T11:23:19
|
Python
|
UTF-8
|
Python
| false
| false
| 286
|
#!/home/charles/Documents/moringa-school-projects/myposts/virtual/bin/python3.6
# -*- coding: utf-8 -*-
import re
import sys
from chardet.cli.chardetect import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"charlesmtawaliJr@gmail.com"
] |
charlesmtawaliJr@gmail.com
|
|
80b11878a88ff37497c211fb5f2bd12f6c51e000
|
616c3c02be31b9ae4d06bd7c5a8d4a2e7c446aa1
|
/796.旋转字符串.py
|
2a3f2c04330031f2887eb2ce899980a55c92aab2
|
[] |
no_license
|
L1nwatch/leetcode-python
|
8b7c47c04ee9400d50d8b0764a544a0463df8f06
|
0484cbc3273ada25992c72105658cd67411c5d39
|
refs/heads/master
| 2023-01-11T14:53:15.339276
| 2023-01-11T05:24:43
| 2023-01-11T05:24:43
| 194,516,548
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 350
|
py
|
#
# @lc app=leetcode.cn id=796 lang=python3
#
# [796] 旋转字符串
#
# @lc code=start
class Solution:
def rotateString(self, s: str, goal: str) -> bool:
if len(s) != len(goal):
return False
for i in range(len(s)):
if s[i:]+s[:i] == goal:
return True
return False
# @lc code=end
|
[
"watch1602@gmail.com"
] |
watch1602@gmail.com
|
c821fc32436da400c7d10c1e029fc1b0c73e5cfc
|
037a03d4b8b81bc39dc41cb4f3726f8297c8b672
|
/0348.py
|
4cabe4d37e2f40b1fd122a1cc55f51f9e02481ec
|
[] |
no_license
|
Agchai52/Leetcode1
|
ee3433ef6f6c3ddd800204c25a456dc7c3fd0053
|
9535d038bee690b7c7aeca352a4ab32d188684bb
|
refs/heads/master
| 2021-08-22T02:59:45.632548
| 2020-05-21T00:31:45
| 2020-05-21T00:31:45
| 185,273,962
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,488
|
py
|
class TicTacToe(object):
def __init__(self, n):
"""
Initialize your data structure here.
:type n: int
"""
self.grid = [['']*n for _ in range(n)]
def move(self, row, col, player):
"""
Player {player} makes a move at ({row}, {col}).
@param row The row of the board.
@param col The column of the board.
@param player The player, can be either 1 or 2.
@return The current winning condition, can be either:
0: No one wins.
1: Player 1 wins.
2: Player 2 wins.
:type row: int
:type col: int
:type player: int
:rtype: int
"""
if player == 1:
mark = 'X'
else:
mark = 'O'
n = len(self.grid)
self.grid[row][col] = mark
sum_of_row = sum([self.grid[row][c] == mark for c in range(n)])
sum_of_col = sum([self.grid[r][col]== mark for r in range(n)])
sum_of_left_d = sum([self.grid[i][i] == mark for i in range(n)])
sum_of_right_d = sum([self.grid[i][n-1-i] == mark for i in range(n)])
if sum_of_row == n or sum_of_col == n or sum_of_left_d== n or sum_of_right_d == n:
return player
else:
return 0
#https://blog.csdn.net/danspace1/article/details/86616981
# Your TicTacToe object will be instantiated and called as such:
# obj = TicTacToe(n)
# param_1 = obj.move(row,col,player)
|
[
"noreply@github.com"
] |
Agchai52.noreply@github.com
|
7ad5eaabf7279f414bab5d88d5d8c71193018ee8
|
c6fea702b817b719d9774b66d76c7cbaf1369d7d
|
/plilja-python3/day10/day10.py
|
81c932996f18e594549035d1a6645113a8d9b5c3
|
[] |
no_license
|
piksel/advent_of_code_2016
|
eae359228372b53f88430360b38e48210ac6da40
|
996fe2a999949fab420115474b32b40ed8ba8414
|
refs/heads/master
| 2021-01-12T10:12:19.865894
| 2016-12-18T16:55:58
| 2016-12-18T16:55:58
| 76,386,900
| 1
| 0
| null | 2016-12-13T18:29:15
| 2016-12-13T18:29:15
| null |
UTF-8
|
Python
| false
| false
| 2,078
|
py
|
from collections import *
import sys
def solve(inp):
value_to_bot = {}
bot_to_values = defaultdict(set)
outputs = defaultdict(list)
giveaways = []
Giveaway = namedtuple('Giveaway', 'bot low_to low_type high_to high_type')
for s in inp:
instruction = s.split()
if instruction[0] == 'value':
value = int(instruction[1])
bot = int(instruction[5])
bot_to_values[bot] |= {value}
value_to_bot[value] = bot
else:
assert instruction[0] == 'bot'
bot = int(instruction[1])
low_type = instruction[5]
low_to = int(instruction[6])
high_type = instruction[10]
high_to = int(instruction[11])
giveaways += [(bot, Giveaway(bot, low_to, low_type, high_to, high_type))]
while giveaways:
if 61 in value_to_bot and 17 in value_to_bot and value_to_bot[61] == value_to_bot[17]:
step1 = value_to_bot[61]
for i in range(0, len(giveaways)):
(bot, giveaway) = giveaways[i]
if len(bot_to_values[bot]) == 2:
low = min(bot_to_values[bot])
high = max(bot_to_values[bot])
bot_to_values[bot] -= {low, high}
value_to_bot.pop(low)
value_to_bot.pop(high)
if giveaway.low_type == 'bot':
bot_to_values[giveaway.low_to] |= {low}
value_to_bot[low] = giveaway.low_to
else:
outputs[giveaway.low_to] += [low]
if giveaway.high_type == 'bot':
bot_to_values[giveaway.high_to] |= {high}
value_to_bot[high] = giveaway.high_to
else:
outputs[giveaway.high_to] += [high]
giveaways = giveaways[:i] + giveaways[i + 1:]
break
step2 = outputs[0][0] * outputs[1][0] * outputs[2][0]
return (step1, step2)
inp = sys.stdin.readlines()
(step1, step2) = solve(inp)
print(step1)
print(step2)
|
[
"patlil@kth.se"
] |
patlil@kth.se
|
914e6f7c89aa39a56fd10010b11450b11fe0aa41
|
7a1b88d06ea18772b065b43d775cec6dd2acdf80
|
/1769.py
|
335ddf10102d99cbef71424085f8d7fa2bffbf8f
|
[] |
no_license
|
skaurl/baekjoon-online-judge
|
28144cca45168e79b1ae0baa9a351f498f8d19ab
|
1620d298c2f429e03c5f9387d8aca13763f5c731
|
refs/heads/master
| 2023-07-26T10:07:29.724066
| 2021-09-07T09:21:02
| 2021-09-07T09:21:02
| 299,019,978
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 240
|
py
|
def asdf(n):
result = 0
for i in n:
result+=int(i)
return str(result)
n = input()
cnt = 0
if int(asdf(n))%3==0:
check="YES"
else:
check="NO"
while len(n)!=1:
n = asdf(n)
cnt+=1
print(cnt)
print(check)
|
[
"dr_lunars@naver.com"
] |
dr_lunars@naver.com
|
5c0876753f3a3beecafacee2e309160167af9ad7
|
073e5e503e01b44881edffc81e6ad1efe04f4520
|
/python/collibra-core/collibra_core/model/paged_response_workflow_task.py
|
56ad3e4d40f3107fd98e85069d58a6a87c47f06c
|
[] |
no_license
|
AaronCWacker/collibra
|
f251d3556192c03b2be5acb0101608b4f2d87c9d
|
8bd1de3eecc5835bc96feacc17c6dd86ed70ac85
|
refs/heads/main
| 2023-03-05T05:00:48.940201
| 2021-01-20T03:03:17
| 2021-01-29T21:17:49
| 416,506,152
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,369
|
py
|
"""
Collibra Data Governance Center Core API
<p>The Core REST API allows you to create your own integrations with Collibra Data Governance Center.</p><p><i>Create custom applications to help users get access to the right data.</i></p> # noqa: E501
The version of the OpenAPI document: 2.0
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
import nulltype # noqa: F401
from collibra_core.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
def lazy_import():
from collibra_core.model.workflow_task import WorkflowTask
globals()['WorkflowTask'] = WorkflowTask
class PagedResponseWorkflowTask(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'total': (int,), # noqa: E501
'offset': (int,), # noqa: E501
'limit': (int,), # noqa: E501
'results': ([WorkflowTask],), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'total': 'total', # noqa: E501
'offset': 'offset', # noqa: E501
'limit': 'limit', # noqa: E501
'results': 'results', # noqa: E501
}
_composed_schemas = {}
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""PagedResponseWorkflowTask - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
total (int): The total number of results.. [optional] # noqa: E501
offset (int): The offset for the results.. [optional] # noqa: E501
limit (int): The maximum number of results to be returned.. [optional] # noqa: E501
results ([WorkflowTask]): The list of results.. [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
|
[
"srid@streamsets.com"
] |
srid@streamsets.com
|
7919fddec59e58186fdf0241675e0b0acd49cd6e
|
a63410602c5bb9ffbf4d37769a1df1a271d7ff8f
|
/src/sentry/nodestore/riak/backend.py
|
539d2d182d500d29d5564f4b69c0f3a94e6125cc
|
[
"BSD-2-Clause"
] |
permissive
|
meituan/sentry
|
d824d5e9096fe4e8604cebbea0a6c939ce12ac44
|
cd35f2345aaef1346e95b4ce5fed12fb0b648db7
|
refs/heads/master
| 2023-06-07T06:43:20.007879
| 2013-12-27T05:18:12
| 2013-12-27T05:18:12
| 15,473,792
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,689
|
py
|
"""
sentry.nodestore.riak.backend
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2013 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
import riak
import riak.resolver
from sentry.nodestore.base import NodeStorage
class RiakNodeStorage(NodeStorage):
"""
A Riak-based backend for storing node data.
>>> RiakNodeStorage(nodes=[{'host':'127.0.0.1','http_port':8098}])
"""
def __init__(self, nodes, bucket='nodes',
resolver=riak.resolver.last_written_resolver, **kwargs):
self.conn = riak.RiakClient(
nodes=nodes, resolver=resolver, **kwargs)
self.bucket = self.conn.bucket(bucket)
super(RiakNodeStorage, self).__init__(**kwargs)
def create(self, data):
obj = self.bucket.new(data=data)
obj.store()
return obj.key
def delete(self, id):
obj = self.bucket.new(key=id)
obj.delete()
def get(self, id):
# just fetch it from a random backend, we're not aiming for consistency
obj = self.bucket.get(key=id, r=1)
if not obj:
return None
return obj.data
def get_multi(self, id_list, r=1):
result = self.bucket.multiget(id_list)
return dict(
(obj.key, obj.data)
for obj in result
)
def set(self, id, data):
obj = self.bucket.new(key=id, data=data)
obj.store()
def cleanup(self, cutoff_timestamp):
# TODO(dcramer): we should either index timestamps or have this run
# a map/reduce (probably the latter)
raise NotImplementedError
|
[
"dcramer@gmail.com"
] |
dcramer@gmail.com
|
78637d9daeacf6d6c1a53731dbbbe95ed1f0eb3f
|
2d93403fac1645fdbf1727f0d17fbea6eeef470a
|
/decorators/demo.py
|
bf5bfdf65750333628011b83fb028c9a19c0a484
|
[
"MIT"
] |
permissive
|
Minkov/python-oop-2020-02
|
d13c8c8feaa9ad41c524fc82887a98745115ac57
|
d2acb1504c1a135cded2ae6ff42acccb303d9ab1
|
refs/heads/master
| 2021-02-04T00:43:14.997404
| 2020-03-26T18:21:03
| 2020-03-26T18:21:03
| 243,588,830
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 422
|
py
|
# def f1():
# print(1)
#
#
# def execute_operation(func):
# print(f'Started execution of {func.__name__}')
# func()
# print(f'Execution of {func.__name__} ended')
#
#
# execute_operation(f1)
# execute_operation(lambda: print(2))
def sum2(x):
def sum_internal(y):
return x + y + z
return sum_internal
sum3 = sum2(3)
sum4 = sum2(4)
print(sum3(2))
print(sum4(2))
|
[
"DonchoMinkov@gmail.com"
] |
DonchoMinkov@gmail.com
|
27415cb1d4b42809e8acaf23400ac4b0155a6eba
|
b1cf54e4d6f969d9084160fccd20fabc12c361c2
|
/dsa/recursion/reverse_string.py
|
0f8c0fc52f7f09d9560c02f86b5d4e92a141f047
|
[] |
no_license
|
zarkle/code_challenges
|
88a53477d6f9ee9dd71577678739e745b9e8a694
|
85b7111263d4125b362184df08e8a2265cf228d5
|
refs/heads/master
| 2021-06-10T11:05:03.048703
| 2020-01-23T06:16:41
| 2020-01-23T06:16:41
| 136,668,643
| 0
| 1
| null | 2019-02-07T23:35:59
| 2018-06-08T21:44:26
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 324
|
py
|
"""
Reverse a String
This interview question requires you to reverse a string using recursion. Make sure to think of the base case here.
Again, make sure you use recursion to accomplish this. Do not slice (e.g. string[::-1]) or use iteration, there must be a recursive call for the function.
"""
def reverse(s):
pass
|
[
"beverly.pham@gmail.com"
] |
beverly.pham@gmail.com
|
cad321d76d8a31d63c2ff825f3c60e5531e36aed
|
0f16edb46a48f9b5a125abb56fc0545ede1d65aa
|
/gmn/src/d1_gmn/tests/test_proxy_mode.py
|
02187cb5bb6855cde698e87f8f984985f130fdc4
|
[
"Apache-2.0"
] |
permissive
|
DataONEorg/d1_python
|
5e685f1af0c356190f2d6df45d1ac849e2f56972
|
d72a9461894d9be7d71178fb7310101b8ef9066a
|
refs/heads/master
| 2023-08-29T03:16:38.131760
| 2023-06-27T21:59:37
| 2023-06-27T21:59:37
| 60,103,877
| 15
| 12
|
Apache-2.0
| 2023-09-06T18:27:53
| 2016-05-31T16:01:00
|
Python
|
UTF-8
|
Python
| false
| false
| 7,028
|
py
|
# This work was created by participants in the DataONE project, and is
# jointly copyrighted by participating institutions in DataONE. For
# more information on DataONE, see our web site at http://dataone.org.
#
# Copyright 2009-2019 DataONE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""GMN can handle storage of the object bytes itself, or it can defer storage of the
object bytes to another web server (proxy mode).
The mode is selectable on a per object basis
"""
import base64
import json
import re
import django.test
import freezegun
import pytest
import requests
import responses
import d1_common.type_conversions
import d1_common.types.exceptions
import d1_common.url
import d1_gmn.app.proxy
import d1_gmn.app.sciobj_store
import d1_gmn.tests.gmn_test_case
import d1_test.d1_test_case
import d1_test.instance_generator.identifier
import d1_test.mock_api.catch_all
import d1_test.mock_api.get
import d1_gmn.tests.gmn_mock
AUTH_USERNAME = "Auth user name"
AUTH_PASSWORD = "Auth user password !@#$%"
@d1_test.d1_test_case.reproducible_random_decorator("TestProxyMode")
@freezegun.freeze_time("1999-09-09")
class TestProxyMode(d1_gmn.tests.gmn_test_case.GMNTestCase):
@responses.activate
def create_and_check_proxy_obj(self, client, do_redirect, use_invalid_url=False):
"""Create a sciobj that wraps object bytes stored on a 3rd party server. We use
Responses to simulate the 3rd party server.
If ``do_redirect`` is True, a 302 redirect operation is added. This tests that
GMN is able to follow redirects when establishing the proxy stream.
"""
# Use the MNRead.get() mock API to simulate a remote 3rd party server that holds
# proxy objects.
d1_test.mock_api.get.add_callback(
d1_test.d1_test_case.MOCK_REMOTE_BASE_URL
)
# Create a proxy object.
pid = d1_test.instance_generator.identifier.generate_pid()
if not use_invalid_url:
proxy_url = self.get_remote_sciobj_url(pid, client)
else:
proxy_url = self.get_invalid_sciobj_url(pid, client)
pid, sid, sciobj_bytes, sysmeta_pyxb = self.create_obj(
client, pid, sid=True, vendor_dict=self.vendor_proxy_mode(proxy_url)
)
# Check that object was not stored locally
assert not d1_gmn.app.sciobj_store.is_existing_sciobj_file(pid)
# Retrieve the proxy object and check it
response = self.call_d1_client(client.get, pid)
recv_sciobj_bytes = response.content
assert recv_sciobj_bytes == sciobj_bytes
return response
def get_remote_sciobj_url(self, pid, client):
return d1_common.url.joinPathElements(
d1_test.d1_test_case.MOCK_REMOTE_BASE_URL,
d1_common.type_conversions.get_version_tag_by_pyxb_binding(
client.pyxb_binding
),
"object",
d1_common.url.encodePathElement(pid),
)
def get_invalid_sciobj_url(self, pid, client):
return d1_common.url.joinPathElements(
d1_test.d1_test_case.MOCK_INVALID_BASE_URL,
d1_common.type_conversions.get_version_tag_by_pyxb_binding(
client.pyxb_binding
),
"object",
d1_common.url.encodePathElement(pid),
)
def get_remote_sciobj_bytes(self, pid, client):
sciobj_url = self.get_remote_sciobj_url(pid, client)
return requests.get(sciobj_url).content
def decode_basic_auth(self, basic_auth_str):
"""Decode a Basic Authentication header to (username, password)."""
m = re.match(r"Basic (.*)", basic_auth_str)
return (
base64.standard_b64decode(m.group(1).encode("utf-8"))
.decode("utf-8")
.split(":")
)
def test_1000(self, gmn_client_v1_v2):
"""create(): Proxy mode: Create and retrieve proxy object, no redirect."""
self.create_and_check_proxy_obj(gmn_client_v1_v2, do_redirect=False)
def test_1020(self, gmn_client_v1_v2):
"""create(): Proxy mode: Create and retrieve proxy object with redirect."""
self.create_and_check_proxy_obj(gmn_client_v1_v2, do_redirect=True)
def test_1040(self):
"""create(): Proxy mode: Passing invalid url raises InvalidRequest."""
with pytest.raises(d1_common.types.exceptions.InvalidRequest):
self.create_and_check_proxy_obj(
self.client_v2,
self.v2,
# do_redirect=False,
use_invalid_url=True,
)
@django.test.override_settings(
PROXY_MODE_BASIC_AUTH_ENABLED=False,
PROXY_MODE_BASIC_AUTH_USERNAME=AUTH_USERNAME,
PROXY_MODE_BASIC_AUTH_PASSWORD=AUTH_PASSWORD,
PROXY_MODE_STREAM_TIMEOUT=30,
)
def test_1050(self):
"""get(): Authentication headers: Not passed to remote server when
AUTH_ENABLED=False.
We check this implicitly by checking that the method that generates the
Authentication header IS NOT called.
"""
with d1_gmn.tests.gmn_mock.detect_proxy_auth() as m:
self.create_and_check_proxy_obj(self.client_v2, do_redirect=False)
assert m.call_count == 0
@django.test.override_settings(
PROXY_MODE_BASIC_AUTH_ENABLED=True,
PROXY_MODE_BASIC_AUTH_USERNAME=AUTH_USERNAME,
PROXY_MODE_BASIC_AUTH_PASSWORD=AUTH_PASSWORD,
PROXY_MODE_STREAM_TIMEOUT=30,
)
def test_1060(self):
"""get(): Authentication headers: Passed to remote server when
AUTH_ENABLED=True.
We check this implicitly by checking that the method that generates the
Authentication header IS called.
"""
with d1_gmn.tests.gmn_mock.detect_proxy_auth() as m:
self.create_and_check_proxy_obj(self.client_v2, do_redirect=False)
assert m.call_count ==1
@django.test.override_settings(
PROXY_MODE_BASIC_AUTH_ENABLED=True,
PROXY_MODE_BASIC_AUTH_USERNAME=AUTH_USERNAME,
PROXY_MODE_BASIC_AUTH_PASSWORD=AUTH_PASSWORD,
PROXY_MODE_STREAM_TIMEOUT=30,
)
def test_1070(self):
"""_mk_http_basic_auth_header(): Returns a correctly encoded basic auth
header value.
"""
auth_str = d1_gmn.app.proxy._mk_http_basic_auth_header()["Authorization"]
user_str, pw_str = self.decode_basic_auth(auth_str)
assert user_str == AUTH_USERNAME
assert pw_str == AUTH_PASSWORD
|
[
"git@dahlsys.com"
] |
git@dahlsys.com
|
a75c0da167a87c4e4c4d0f4d60cba2a79742103d
|
d2c4934325f5ddd567963e7bd2bdc0673f92bc40
|
/tests/artificial/transf_None/trend_ConstantTrend/cycle_0/ar_12/test_artificial_1024_None_ConstantTrend_0_12_100.py
|
f6d41bf4691b38fb611f12d410521c6f82596ac1
|
[
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
jmabry/pyaf
|
797acdd585842474ff4ae1d9db5606877252d9b8
|
afbc15a851a2445a7824bf255af612dc429265af
|
refs/heads/master
| 2020-03-20T02:14:12.597970
| 2018-12-17T22:08:11
| 2018-12-17T22:08:11
| 137,104,552
| 0
| 0
|
BSD-3-Clause
| 2018-12-17T22:08:12
| 2018-06-12T17:15:43
|
Python
|
UTF-8
|
Python
| false
| false
| 271
|
py
|
import pyaf.Bench.TS_datasets as tsds
import pyaf.tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 1024 , FREQ = 'D', seed = 0, trendtype = "ConstantTrend", cycle_length = 0, transform = "None", sigma = 0.0, exog_count = 100, ar_order = 12);
|
[
"antoine.carme@laposte.net"
] |
antoine.carme@laposte.net
|
fa3a11c01c620e678186a5d1ba7fb254f3c4cfd8
|
50948d4cb10dcb1cc9bc0355918478fb2841322a
|
/azure-mgmt-compute/azure/mgmt/compute/v2018_06_01/models/image_os_disk.py
|
db287a53c829cd6de75306f14adce5f33c0b6c14
|
[
"MIT"
] |
permissive
|
xiafu-msft/azure-sdk-for-python
|
de9cd680b39962702b629a8e94726bb4ab261594
|
4d9560cfd519ee60667f3cc2f5295a58c18625db
|
refs/heads/master
| 2023-08-12T20:36:24.284497
| 2019-05-22T00:55:16
| 2019-05-22T00:55:16
| 187,986,993
| 1
| 0
|
MIT
| 2020-10-02T01:17:02
| 2019-05-22T07:33:46
|
Python
|
UTF-8
|
Python
| false
| false
| 3,721
|
py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ImageOSDisk(Model):
"""Describes an Operating System disk.
All required parameters must be populated in order to send to Azure.
:param os_type: Required. This property allows you to specify the type of
the OS that is included in the disk if creating a VM from a custom image.
<br><br> Possible values are: <br><br> **Windows** <br><br> **Linux**.
Possible values include: 'Windows', 'Linux'
:type os_type: str or
~azure.mgmt.compute.v2018_06_01.models.OperatingSystemTypes
:param os_state: Required. The OS State. Possible values include:
'Generalized', 'Specialized'
:type os_state: str or
~azure.mgmt.compute.v2018_06_01.models.OperatingSystemStateTypes
:param snapshot: The snapshot.
:type snapshot: ~azure.mgmt.compute.v2018_06_01.models.SubResource
:param managed_disk: The managedDisk.
:type managed_disk: ~azure.mgmt.compute.v2018_06_01.models.SubResource
:param blob_uri: The Virtual Hard Disk.
:type blob_uri: str
:param caching: Specifies the caching requirements. <br><br> Possible
values are: <br><br> **None** <br><br> **ReadOnly** <br><br> **ReadWrite**
<br><br> Default: **None for Standard storage. ReadOnly for Premium
storage**. Possible values include: 'None', 'ReadOnly', 'ReadWrite'
:type caching: str or ~azure.mgmt.compute.v2018_06_01.models.CachingTypes
:param disk_size_gb: Specifies the size of empty data disks in gigabytes.
This element can be used to overwrite the name of the disk in a virtual
machine image. <br><br> This value cannot be larger than 1023 GB
:type disk_size_gb: int
:param storage_account_type: Specifies the storage account type for the
managed disk. UltraSSD_LRS cannot be used with OS Disk. Possible values
include: 'Standard_LRS', 'Premium_LRS', 'StandardSSD_LRS', 'UltraSSD_LRS'
:type storage_account_type: str or
~azure.mgmt.compute.v2018_06_01.models.StorageAccountTypes
"""
_validation = {
'os_type': {'required': True},
'os_state': {'required': True},
}
_attribute_map = {
'os_type': {'key': 'osType', 'type': 'OperatingSystemTypes'},
'os_state': {'key': 'osState', 'type': 'OperatingSystemStateTypes'},
'snapshot': {'key': 'snapshot', 'type': 'SubResource'},
'managed_disk': {'key': 'managedDisk', 'type': 'SubResource'},
'blob_uri': {'key': 'blobUri', 'type': 'str'},
'caching': {'key': 'caching', 'type': 'CachingTypes'},
'disk_size_gb': {'key': 'diskSizeGB', 'type': 'int'},
'storage_account_type': {'key': 'storageAccountType', 'type': 'str'},
}
def __init__(self, **kwargs):
super(ImageOSDisk, self).__init__(**kwargs)
self.os_type = kwargs.get('os_type', None)
self.os_state = kwargs.get('os_state', None)
self.snapshot = kwargs.get('snapshot', None)
self.managed_disk = kwargs.get('managed_disk', None)
self.blob_uri = kwargs.get('blob_uri', None)
self.caching = kwargs.get('caching', None)
self.disk_size_gb = kwargs.get('disk_size_gb', None)
self.storage_account_type = kwargs.get('storage_account_type', None)
|
[
"lmazuel@microsoft.com"
] |
lmazuel@microsoft.com
|
f58135f4e2c1093cf3d082d8408c503226f4a87e
|
9a55969cdf85b30873f33aae1410be1cdb91fca5
|
/gym_wrapper.py
|
a9056fcd2d3e3dbef297b98a9edc2e3102534c8b
|
[
"Apache-2.0"
] |
permissive
|
StepNeverStop/RL-TF1
|
65f296bce00ba00185df080c7f770d59ef92e4ed
|
c9e75819504a8db4c587e2aa3e4c9c8845fd9f08
|
refs/heads/master
| 2022-12-11T18:04:06.306955
| 2020-09-26T03:47:01
| 2020-09-26T03:47:01
| 223,076,782
| 5
| 2
|
Apache-2.0
| 2022-12-08T06:16:32
| 2019-11-21T03:06:17
|
Python
|
UTF-8
|
Python
| false
| false
| 8,020
|
py
|
import gym
import numpy as np
import threading
class FakeMultiThread(threading.Thread):
def __init__(self, func, args=()):
super().__init__()
self.func = func
self.args = args
def run(self):
self.result = self.func(*self.args)
def get_result(self):
try:
return self.result
except Exception:
return None
class gym_envs(object):
def __init__(self, gym_env_name, n, render_mode='first'):
'''
Input:
gym_env_name: gym training environment id, i.e. CartPole-v0
n: environment number
render_mode: mode of rendering, optional: first, last, all, random_[num] -> i.e. random_2, [list] -> i.e. [0, 2, 4]
'''
self.n = n # environments number
self.envs = [gym.make(gym_env_name) for _ in range(self.n)]
# process observation
self.obs_space = self.envs[0].observation_space
if isinstance(self.obs_space, gym.spaces.box.Box):
self.obs_high = self.obs_space.high
self.obs_low = self.obs_space.low
self.obs_type = 'visual' if len(self.obs_space.shape) == 3 else 'vector'
self.reward_threshold = self.envs[0].env.spec.reward_threshold # reward threshold refer to solved
# process action
self.action_space = self.envs[0].action_space
if isinstance(self.action_space, gym.spaces.box.Box):
self.action_type = 'continuous'
self.action_high = self.action_space.high
self.action_low = self.action_space.low
elif isinstance(self.action_space, gym.spaces.tuple.Tuple):
self.action_type = 'Tuple(Discrete)'
else:
self.action_type = 'discrete'
self.action_mu, self.action_sigma = self._get_action_normalize_factor()
self._get_render_index(render_mode)
def _get_render_index(self, render_mode):
'''
get render windows list, i.e. [0, 1] when there are 4 training enviornment.
'''
assert isinstance(render_mode, (list, str)), 'render_mode must have type of str or list.'
if isinstance(render_mode, list):
assert all([isinstance(i, int) for i in render_mode]), 'items in render list must have type of int'
assert min(index) >= 0, 'index must larger than zero'
assert max(index) <= self.n, 'render index cannot larger than environment number.'
self.render_index = render_mode
elif isinstance(render_mode, str):
if render_mode == 'first':
self.render_index = [0]
elif render_mode == 'last':
self.render_index = [-1]
elif render_mode == 'all':
self.render_index = [i for i in range(self.n)]
else:
a, b = render_mode.split('_')
if a == 'random' and 0 < int(b) <= self.n:
import random
self.render_index = random.sample([i for i in range(self.n)], int(b))
else:
raise Exception('render_mode must be first, last, all, [list] or random_[num]')
def render(self):
'''
render game windows.
'''
[self.envs[i].render() for i in self.render_index]
def close(self):
'''
close all environments.
'''
[env.close() for env in self.envs]
def sample_action(self):
'''
generate ramdom actions for all training environment.
'''
return np.array([env.action_space.sample() for env in self.envs])
def reset(self):
self.dones_index = []
threadpool = []
for i in range(self.n):
th = FakeMultiThread(self.envs[i].reset, args=())
threadpool.append(th)
for th in threadpool:
th.start()
for th in threadpool:
threading.Thread.join(th)
obs = np.array([threadpool[i].get_result() for i in range(self.n)])
obs = self._maybe_one_hot(obs)
return obs
# if self.obs_type == 'visual':
# return np.array([threadpool[i].get_result()[np.newaxis, :] for i in range(self.n)])
# else:
# return np.array([threadpool[i].get_result() for i in range(self.n)])
def step(self, actions, scale=True):
if scale == True:
actions = self.action_sigma * actions + self.action_mu
if self.action_type == 'discrete':
actions = actions.reshape(-1,)
elif self.action_type == 'Tuple(Discrete)':
actions = actions.reshape(self.n, -1).tolist()
threadpool = []
for i in range(self.n):
th = FakeMultiThread(self.envs[i].step, args=(actions[i], ))
threadpool.append(th)
for th in threadpool:
th.start()
for th in threadpool:
threading.Thread.join(th)
results = [threadpool[i].get_result() for i in range(self.n)]
# if self.obs_type == 'visual':
# results = [
# [threadpool[i].get_result()[0][np.newaxis, :], *threadpool[i].get_result()[1:]]
# for i in range(self.n)]
# else:
# results = [threadpool[i].get_result() for i in range(self.n)]
obs, reward, done, info = [np.array(e) for e in zip(*results)]
obs = self._maybe_one_hot(obs)
self.dones_index = np.where(done)[0]
return obs, reward, done, info
def partial_reset(self):
threadpool = []
for i in self.dones_index:
th = FakeMultiThread(self.envs[i].reset, args=())
threadpool.append(th)
for th in threadpool:
th.start()
for th in threadpool:
threading.Thread.join(th)
obs = np.array([threadpool[i].get_result() for i in range(self.dones_index.shape[0])])
obs = self._maybe_one_hot(obs, is_partial=True)
return obs
# if self.obs_type == 'visual':
# return np.array([threadpool[i].get_result()[np.newaxis, :] for i in range(self.dones_index.shape[0])])
# else:
# return np.array([threadpool[i].get_result() for i in range(self.dones_index.shape[0])])
def _get_action_normalize_factor(self):
'''
get action mu and sigma. mu: action bias. sigma: action scale
input:
self.action_low: [-2, -3],
self.action_high: [2, 6]
return:
mu: [0, 1.5],
sigma: [2, 4.5]
'''
if self.action_type == 'continuous':
return (self.action_high + self.action_low) / 2, (self.action_high - self.action_low) / 2
else:
return 0, 1
def _maybe_one_hot(self, obs, is_partial=False):
"""
Change discrete observation from list(int) to list(one_hot) format.
for example:
action: [[1, 0], [2, 1]]
observation space: [3, 4]
environment number: 2
then, output: [[0. 0. 0. 0. 1. 0. 0. 0. 0. 0. 0. 0.]
[0. 0. 0. 0. 0. 0. 0. 0. 0. 1. 0. 0.]]
"""
obs_number = len(self.dones_index) if is_partial else self.n
if hasattr(self.obs_space, 'n'):
obs = obs.reshape(obs_number, -1)
if isinstance(self.obs_space.n, (int, np.int32)):
dim = [int(self.obs_space.n)]
else:
dim = list(self.obs_space.n) # 在CliffWalking-v0环境其类型为numpy.int32
multiplication_factor = dim[1:] + [1]
n = np.array(dim).prod()
ints = obs.dot(multiplication_factor)
x = np.zeros([obs.shape[0], n])
for i, j in enumerate(ints):
x[i, j] = 1
return x
else:
return obs
|
[
"271668153@qq.com"
] |
271668153@qq.com
|
2ef9028b99e09a2662b0d9c7461782c15a37d51d
|
ebb63b057a82b8a10df305252cbcda4186ec02f7
|
/taichi_blend/bundle-packages/meltblend/__init__.py
|
86e44f3f0f3dde2931cfa10dc1339672c07d53d1
|
[] |
no_license
|
yjchoi1/taichi_blend
|
aa2d6f0129c8068b9a2c8bb5a7677b3c60923d5b
|
907fdbee6027375324c9605ffc14db16e590f992
|
refs/heads/master
| 2023-03-21T12:31:04.126621
| 2021-03-01T15:06:13
| 2021-03-01T15:06:13
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 703
|
py
|
bl_info = {
'name': 'Taichi Blend Physics',
'description': 'Taichi Blender intergration',
'author': 'Taichi Developers',
'version': (0, 0, 5),
'blender': (2, 81, 0),
'location': 'Taichi Blend Window',
'support': 'COMMUNITY',
'wiki_url': 'https://github.com/taichi-dev/taichi_blend/wiki',
'tracker_url': 'https://github.com/taichi-dev/taichi_blend/issues',
'category': 'Physics',
}
from . import node_system, user_iface
modules = [
node_system,
user_iface,
]
def register():
for module in modules:
module.register()
def unregister():
for module in reversed(modules):
module.unregister()
|
[
"1931127624@qq.com"
] |
1931127624@qq.com
|
8345cc43468edb997ff7b911356f1e9ae0cd0f9d
|
1bfa2c800b2d76787e224e5cf25d69fec3a9eff1
|
/tests/synapses/FI.py
|
3c5d7fb4185c1d5c6e3c707a5e73cf44a5af1738
|
[
"MIT"
] |
permissive
|
OpenSourceBrain/MiglioreEtAl14_OlfactoryBulb3D
|
fdd55b324695c82deb04b70c4f2f238af5e92285
|
edaf58abd6b3e0195125fb730e9654ae937d790b
|
refs/heads/master
| 2023-06-30T23:14:33.842015
| 2023-06-17T08:53:44
| 2023-06-17T08:53:44
| 31,535,489
| 2
| 2
| null | 2018-02-08T21:43:43
| 2015-03-02T10:37:44
|
OpenEdge ABL
|
UTF-8
|
Python
| false
| false
| 807
|
py
|
import sys; sys.path.insert(0,'..')
from tests.synapses.NEURONSynapseTest import NEURONSynapseTest
from tests.synapses.NeuroMLSynapseTest import NeuroMLSynapseTest
class NEURON(NEURONSynapseTest):
def __init__(self):
super(NEURON, self).__init__()
self.path = "../NEURON/fi.mod"
self.label = "FI"
self.resultsFile = "results/synapses/FI/NEURON.json"
def prepare(self, h, soma, syn):
syn.gmax = 1
syn.tau2 = 100
class NeuroML(NeuroMLSynapseTest):
def __init__(self):
super(NeuroML, self).__init__()
self.path = "../NeuroML2/Synapses/FI.synapse.xml"
self.label = "FI"
self.resultsFile = "results/synapses/FI/NeuroML.json"
def prepare(self, h, soma, syn):
syn.gbase = 1
|
[
"jbirgio@gmail.com"
] |
jbirgio@gmail.com
|
6987d77bade5bae99b89fae4d7412d77288a691a
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p02973/s456069800.py
|
2c1e3100d68ffea4badc684c47be28c5e8d55461
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 237
|
py
|
from bisect import bisect_right
n = int(input())
a = [-int(input()) for _ in range(n)]
li = list()
for e in a:
i = bisect_right(li, e)
if i == len(li):
li.append(e)
else:
li[i] = e
ans = len(li)
print(ans)
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
610b8f2dbd7222992ae2de1ebf4f382a854940ba
|
2d82d4c6574bd6d32f2cf1c781615f7951f55f66
|
/muntjac/addon/google_maps/overlay/polygon.py
|
8990b4e16f9d503dbd0e2cb1dc99ee6f8bc9eb97
|
[
"Apache-2.0"
] |
permissive
|
metaperl/muntjac
|
f83f745ee03942a61af92ee7fba7285aa9c46f3c
|
8db97712edd81b4d25deaaa48587d2a08010f2c8
|
refs/heads/master
| 2021-01-15T22:04:25.057862
| 2012-11-09T03:52:59
| 2012-11-09T03:52:59
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 815
|
py
|
# @MUNTJAC_COPYRIGHT@
# @MUNTJAC_LICENSE@
from muntjac.addon.google_maps.overlay.poly_overlay \
import PolyOverlay
class Polygon(PolyOverlay):
def __init__(self, Id, points, strokeColor='#ffffff', strokeWeight=1,
strokeOpacity=1.0, fillColor='#777777', fillOpacity=0.2,
clickable=False):
super(Polygon, self).__init__(Id, points, strokeColor, strokeWeight,
strokeOpacity, clickable)
self._fillColor = fillColor
self._fillOpacity = fillOpacity
def getFillColor(self):
return self._fillColor
def setFillColor(self, fillColor):
self._fillColor = fillColor
def getFillOpacity(self):
return self._fillOpacity
def setFillOpacity(self, fillOpacity):
self._fillOpacity = fillOpacity
|
[
"r.w.lincoln@gmail.com"
] |
r.w.lincoln@gmail.com
|
08f8f65d622de581829d8817089108087adc12fb
|
46cdf1f348c1fe1cf46ea2e14ecbef9bf59006bd
|
/resolwe/flow/executors/null.py
|
1fac4c33744cd01297cfa7344e0b9ebf41374fca
|
[
"Apache-2.0"
] |
permissive
|
mzganec/resolwe
|
b08dd971f1b19f55052d857063eb43afc4a827dc
|
fd5bbbc459289811ae34ad263b96b498ba15ba7d
|
refs/heads/master
| 2021-06-27T11:30:09.883345
| 2017-09-26T10:11:00
| 2017-09-26T10:16:03
| 105,652,149
| 0
| 0
| null | 2017-10-03T13:00:16
| 2017-10-03T13:00:16
| null |
UTF-8
|
Python
| false
| false
| 615
|
py
|
"""Local workflow executor."""
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
from resolwe.flow.executors import BaseFlowExecutor
logger = logging.getLogger(__name__) # pylint: disable=invalid-name
class FlowExecutor(BaseFlowExecutor): # pylint: disable=abstract-method
"""Null dataflow executor proxy.
This executor is intended to be used in tests where you want to save
the object to the database but don't need to run it.
"""
name = 'null'
def run(self, data_id, script, verbosity=1):
"""Do nothing :)."""
pass
|
[
"domen@blenkus.com"
] |
domen@blenkus.com
|
fb78a7ba635cbda1a933a00eb4dd0da34c3334c4
|
1fdf7dbde0b8253ef164b8a8fdff958ecef6866e
|
/proyecto_tienda/carrito/urls.py
|
4720a39d71e7706be0f6f794d953f06a5cf2ed68
|
[] |
no_license
|
jkaalexkei/proyecto_tienda
|
b9b0315b05a956ab1bf619059b8ea839fb4c7093
|
6986ccce2f8aaffc6b1289090d274cbc43b36509
|
refs/heads/master
| 2023-08-25T07:21:40.781058
| 2021-11-05T02:17:18
| 2021-11-05T02:17:18
| 396,094,625
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 233
|
py
|
from django.urls import path
from . import views
app_name = 'carrito'
urlpatterns = [
path('',views.carrito,name='carrito'),
path('agregar/',views.agregar,name='agregar'),
path('eliminar/',views.remove,name='remove'),
]
|
[
"jkaalexkei@gmail.com"
] |
jkaalexkei@gmail.com
|
74dfc36af985a5ffa5a9f34e9e7f893fc514bef3
|
32c56293475f49c6dd1b0f1334756b5ad8763da9
|
/google-cloud-sdk/lib/surface/compute/instance_groups/managed/delete.py
|
368808d0b8bcc4f36cd56c4ef8184a148eacee2a
|
[
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0",
"MIT"
] |
permissive
|
bopopescu/socialliteapp
|
b9041f17f8724ee86f2ecc6e2e45b8ff6a44b494
|
85bb264e273568b5a0408f733b403c56373e2508
|
refs/heads/master
| 2022-11-20T03:01:47.654498
| 2020-02-01T20:29:43
| 2020-02-01T20:29:43
| 282,403,750
| 0
| 0
|
MIT
| 2020-07-25T08:31:59
| 2020-07-25T08:31:59
| null |
UTF-8
|
Python
| false
| false
| 7,090
|
py
|
# -*- coding: utf-8 -*- #
# Copyright 2015 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command for deleting managed instance group."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.compute import base_classes
from googlecloudsdk.api_lib.compute import managed_instance_groups_utils
from googlecloudsdk.api_lib.compute import path_simplifier
from googlecloudsdk.api_lib.compute import utils
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.compute import flags
from googlecloudsdk.command_lib.compute import scope as compute_scope
from googlecloudsdk.command_lib.compute.instance_groups import flags as instance_groups_flags
from googlecloudsdk.core import properties
from googlecloudsdk.core.console import progress_tracker
from googlecloudsdk.core.util import text
from six.moves import zip
class Delete(base.DeleteCommand):
"""Delete Google Compute Engine managed instance group."""
@staticmethod
def Args(parser):
instance_groups_flags.MULTISCOPE_INSTANCE_GROUP_MANAGERS_ARG.AddArgument(
parser, operation_type='delete')
def _GenerateAutoscalerDeleteRequests(self, holder, project, mig_requests):
"""Generates Delete requestes for autoscalers attached to instance groups.
Args:
holder: ComputeApiHolder, object encapsulating compute api.
project: str, project this request should apply to.
mig_requests: Messages which will be sent to delete instance group
managers.
Returns:
Messages, which will be sent to delete autoscalers.
"""
mig_requests = list(zip(*mig_requests))[2] if mig_requests else []
zone_migs = [(request.instanceGroupManager, 'zone',
managed_instance_groups_utils.CreateZoneRef(
holder.resources, request)) for request in mig_requests
if hasattr(request, 'zone') and request.zone is not None]
region_migs = [(request.instanceGroupManager, 'region',
managed_instance_groups_utils.CreateRegionRef(
holder.resources, request)) for request in mig_requests
if hasattr(request, 'region') and request.region is not None]
zones = list(zip(*zone_migs))[2] if zone_migs else []
regions = list(zip(*region_migs))[2] if region_migs else []
client = holder.client.apitools_client
messages = client.MESSAGES_MODULE
autoscalers_to_delete = managed_instance_groups_utils.AutoscalersForMigs(
migs=zone_migs + region_migs,
autoscalers=managed_instance_groups_utils.AutoscalersForLocations(
zones=zones,
regions=regions,
client=holder.client))
requests = []
for autoscaler in autoscalers_to_delete:
if autoscaler.zone:
service = client.autoscalers
request = messages.ComputeAutoscalersDeleteRequest(
zone=path_simplifier.Name(autoscaler.zone))
else:
service = client.regionAutoscalers
request = messages.ComputeRegionAutoscalersDeleteRequest(
region=path_simplifier.Name(autoscaler.region))
request.autoscaler = autoscaler.name
request.project = project
requests.append((service, 'Delete', request))
return requests
def _GetCommonScopeNameForRefs(self, refs):
"""Gets common scope for references."""
has_zone = any(hasattr(ref, 'zone') for ref in refs)
has_region = any(hasattr(ref, 'region') for ref in refs)
if has_zone and not has_region:
return 'zone'
elif has_region and not has_zone:
return 'region'
else:
return None
def _CreateDeleteRequests(self, client, igm_refs):
"""Returns a list of delete messages for instance group managers."""
messages = client.MESSAGES_MODULE
requests = []
for ref in igm_refs:
if ref.Collection() == 'compute.instanceGroupManagers':
service = client.instanceGroupManagers
request = messages.ComputeInstanceGroupManagersDeleteRequest(
instanceGroupManager=ref.Name(),
project=ref.project,
zone=ref.zone)
elif ref.Collection() == 'compute.regionInstanceGroupManagers':
service = client.regionInstanceGroupManagers
request = messages.ComputeRegionInstanceGroupManagersDeleteRequest(
instanceGroupManager=ref.Name(),
project=ref.project,
region=ref.region)
else:
raise ValueError('Unknown reference type {0}'.format(ref.Collection()))
requests.append((service, 'Delete', request))
return requests
def Run(self, args):
holder = base_classes.ComputeApiHolder(self.ReleaseTrack())
project = properties.VALUES.core.project.Get(required=True)
igm_refs = (
instance_groups_flags.MULTISCOPE_INSTANCE_GROUP_MANAGERS_ARG.
ResolveAsResource)(
args, holder.resources, default_scope=compute_scope.ScopeEnum.ZONE,
scope_lister=flags.GetDefaultScopeLister(holder.client, project))
scope_name = self._GetCommonScopeNameForRefs(igm_refs)
utils.PromptForDeletion(
igm_refs, scope_name=scope_name, prompt_title=None)
requests = list(self._CreateDeleteRequests(
holder.client.apitools_client, igm_refs))
resources = []
# Delete autoscalers first.
errors = []
autoscaler_delete_requests = self._GenerateAutoscalerDeleteRequests(
holder, project, mig_requests=requests)
if autoscaler_delete_requests:
with progress_tracker.ProgressTracker(
'Deleting ' + text.Pluralize(
len(autoscaler_delete_requests), 'autoscaler'),
autotick=False,
) as tracker:
resources = holder.client.MakeRequests(
autoscaler_delete_requests,
errors,
progress_tracker=tracker)
if errors:
utils.RaiseToolException(errors)
# Now delete instance group managers.
errors = []
with progress_tracker.ProgressTracker(
'Deleting ' + text.Pluralize(len(requests), 'Managed Instance Group'),
autotick=False,
) as tracker:
resources += holder.client.MakeRequests(
requests, errors, progress_tracker=tracker)
if errors:
utils.RaiseToolException(errors)
return resources
Delete.detailed_help = {
'brief': 'Delete Google Compute Engine managed instance groups',
'DESCRIPTION': """\
*{command}* deletes one or more Google Compute Engine managed instance
groups.
""",
}
|
[
"jonathang132298@gmail.com"
] |
jonathang132298@gmail.com
|
fa45d061dd6710a66d3b365dc1f3104878de0bd9
|
66ab8fac9fb19e5ff470be0fa7b2b73600231f16
|
/pyble/osx/console.py
|
f823488bb6edf4f68be5844e217d58b1b79c62e0
|
[
"MIT"
] |
permissive
|
bgromov/PyBLEWrapper
|
e97bbc2299f880838d246a8c6fdb27b05cb72af1
|
8a5d016e65b3c259391ddc97c371ab4b1b5c61b5
|
refs/heads/master
| 2020-03-25T21:41:43.702666
| 2018-08-12T23:38:16
| 2018-08-12T23:38:16
| 144,185,816
| 0
| 0
|
MIT
| 2018-08-09T17:50:12
| 2018-08-09T17:50:12
| null |
UTF-8
|
Python
| false
| false
| 6,054
|
py
|
from objc import *
from Foundation import *
import cmd
import os
import logging
import time
from pprint import pformat
try:
from queue import Queue, Empty
except:
from Queue import Queue, Empty
from pyble.patterns import LoggerObject
class OSXCmd(cmd.Cmd, LoggerObject):
def __init__(self, history_size=10):
# both cmd.Cmd, LoggerObject need to be init.
cmd.Cmd.__init__(self)
LoggerObject.__init__(self)
self.cmdqueue = Queue()
self.history_size = history_size
def registerKeyboardInterrupt(self):
stdin = NSFileHandle.fileHandleWithStandardInput().retain()
handle = objc.selector(self.keyboardHandler_, signature='v@:@')
NSNotificationCenter.defaultCenter().addObserver_selector_name_object_(self, handle, NSFileHandleReadCompletionNotification, stdin)
stdin.readInBackgroundAndNotify()
def unregisterKeyboardInterrupt(self):
NSNotificationCenter.defaultCenter().removeObserver_(self)
def keyboardHandler_(self, notification):
data = notification.userInfo().objectForKey_(NSFileHandleNotificationDataItem)
line = NSString.alloc().initWithData_encoding_(data, NSUTF8StringEncoding).autorelease()
if len(line):
self.cmdqueue.put(line)
stdin = NSFileHandle.fileHandleWithStandardInput().retain()
stdin.readInBackgroundAndNotify()
def cmdloop(self, intro=None):
# customized for python & OSX co-existence
# use OSX framework to read input from keyboard interrupt
self.preloop()
if intro is not None:
self.intro = intro
if self.intro:
self.stdout.write(str(self.intro) + "\n")
# the main loop
stop = None
showPrompt = True
while not stop:
if showPrompt:
self.stdout.write(self.prompt)
self.stdout.flush()
showPrompt = False
try:
NSRunLoop.currentRunLoop().runMode_beforeDate_(NSDefaultRunLoopMode, NSDate.distantPast())
line = self.cmdqueue.get_nowait()
if not len(line):
line = "EOF"
else:
line = line.strip('\r\n')
line = self.precmd(line)
stop = self.onecmd(line)
stop = self.postcmd(stop, line)
self.cmdqueue.task_done()
showPrompt = True
except Empty:
continue
except KeyboardInterrupt:
break
except Exception as e:
import traceback
print traceback.format_exc()
break
# cleanup
self.postloop()
def preloop(self):
# cmd history
self._history = []
# OSX
self.osx_pool = NSAutoreleasePool.alloc().init()
self.registerKeyboardInterrupt()
def postloop(self):
self.unregisterKeyboardInterrupt()
del self.osx_pool
def endloop(self):
self.cmdqueue.put("exit")
def precmd(self, line):
self._history += [ line.strip() ]
if len(self._history) > self.history_size:
self._history = self._history[-(self.history_size):]
self.unregisterKeyboardInterrupt()
return line
def postcmd(self, stop, line):
try:
self.stdout.flush()
except:
pass
self.registerKeyboardInterrupt()
return stop
def emptyline(self):
pass
def do_shell(self, args):
"""Execute shell command
"""
os.system(args)
def do_debug(self, args):
"""Enable/disable debugging information
"""
if not hasattr(self, 'debug'):
return
option = args.strip()
if option == "":
pass
elif option == "True":
self.debug = True
elif option == "False":
self.debug = False
else:
self.stdout.write("Only accept True/False\n")
ans = "%s is %sin debug mode.\n"
cls_name = self.__class__.__name__
if self.debug:
ans = ans % (cls_name, "")
else:
ans = ans % (cls_name, "not ")
self.stdout.write(ans)
self.stdout.flush()
def default(self, line):
if len(line.strip()):
self.do_eval(line)
def do_eval(self, args):
"""Evaluate a single line python statement
"""
line = args.strip()
if len(line) == 0:
return
output = ""
oldstdout = self.stdout
from StringIO import StringIO
import ast
buffer = StringIO()
self.stdout = buffer
try:
code = compile(line, "<string>", "single")
exec(code)
except NameError as e:
self.logger.debug(e)
cmd, args, line = self.parseline(line)
self.commandNotFound(cmd)
except SyntaxError as e:
self.logger.debug(e)
cmd, args, line = self.parseline(line)
self.commandNotFound(cmd)
except Exception as e:
self.logger.debug(e)
self.stdout.write(pformat(e) + "\n")
finally:
self.stdout = oldstdout
self.stdout.write(buffer.getvalue())
def commandNotFound(self, cmd):
self.stdout.write("Command: '%s' is not yet support by %s\n" % (cmd, self.__class__.__name__))
def do_hist(self, args):
"""Show last N command history
"""
length = len(self._history)
try:
length = int(args.strip())
except:
pass
self._history.pop()
for cmd in self._history[-length:]:
self.stdout.write(cmd)
self.stdout.write('\n')
self.stdout.flush()
def do_exit(self, args):
"""Exit
"""
return True
if __name__ == "__main__":
app = OSXCmd()
app.cmdloop()
|
[
"brett.chien@gmail.com"
] |
brett.chien@gmail.com
|
00e012b55ec43e614adb4687ff49bb5f5b807e97
|
7c551e749064b25af706b9167211050f8c6ad0a9
|
/signatures/windows/disables_browserwarn.py
|
3fb78aceaba0966a66cdcad065787f4ccc8ebbb4
|
[] |
no_license
|
dashjuvi/Cuckoo-Sandbox-vbox-win7
|
fa382828b4895c5e1ee60b37a840edd395bf1588
|
a3a26b539b06db15176deadeae46fc0476e78998
|
refs/heads/master
| 2020-03-12T08:33:06.231245
| 2019-01-14T23:09:02
| 2019-01-14T23:09:02
| 130,529,882
| 6
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,852
|
py
|
# Copyright (C) 2015 Optiv, Inc. (brad.spengler@optiv.com), Kevin Ross, Updated 2016 for Cuckoo 2.0
# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org
# See the file 'docs/LICENSE' for copying permission.
from lib.cuckoo.common.abstracts import Signature
class DisablesBrowserWarn(Signature):
name = "disables_browser_warn"
description = "Attempts to disable browser security warnings"
severity = 3
categories = ["generic", "banker", "clickfraud"]
authors = ["Optiv", "Kevin Ross"]
minimum = "2.0"
regkeys_re = [
".*\\\\SOFTWARE\\\\(Wow6432Node\\\\)?Microsoft\\\\Windows\\\\CurrentVersion\\\\Internet\\ Settings\\\\WarnOnBadCertRecving",
".*\\\\SOFTWARE\\\\(Wow6432Node\\\\)?Microsoft\\\\Windows\\\\CurrentVersion\\\\Internet\\ Settings\\\\WarnOnBadCertSending",
".*\\\\SOFTWARE\\\\(Wow6432Node\\\\)?Microsoft\\\\Windows\\\\CurrentVersion\\\\Internet\\ Settings\\\\WarnOnHTTPSToHTTPRedirect",
".*\\\\SOFTWARE\\\\(Wow6432Node\\\\)?Microsoft\\\\Windows\\\\CurrentVersion\\\\Internet\\ Settings\\\\WarnOnZoneCrossing",
".*\\\\SOFTWARE\\\\(Wow6432Node\\\\)?Microsoft\\\\Windows\\\\CurrentVersion\\\\Internet\\ Settings\\\\WarnOnPostRedirect",
".*\\\\SOFTWARE\\\\(Wow6432Node\\\\)?Microsoft\\\\Windows\\\\CurrentVersion\\\\Internet\\ Settings\\\\IEHardenIENoWarn",
".*\\\\SOFTWARE\\\\(Wow6432Node\\\\)?Microsoft\\\\Internet\\ Explorer\\\\Main\\\\NoProtectedModeBanner",
".*\\\\SOFTWARE\\\\(Wow6432Node\\\\)?Microsoft\\\\Internet\\ Explorer\\\\Main\\\\IE9RunOncePerInstall",
]
def on_complete(self):
for indicator in self.regkeys_re:
for regkey in self.check_key(pattern=indicator, regex=True, actions=["regkey_written"], all=True):
self.mark_ioc("registry", regkey)
return self.has_marks()
|
[
"diegovm14@gmail.com"
] |
diegovm14@gmail.com
|
a4febf7096cdd80c0178f06210bc98d127790e7b
|
a884039e1a8b0ab516b80c2186e0e3bad28d5147
|
/Livros/Livro-Desenvolvimento web com Flask/Capitulo03/Nível 01/exemplo14a.py
|
fb031bc2512a683fa44f48441a518deee804b3f6
|
[
"MIT"
] |
permissive
|
ramonvaleriano/python-
|
6e744e8bcd58d07f05cd31d42a5092e58091e9f0
|
ada70918e945e8f2d3b59555e9ccc35cf0178dbd
|
refs/heads/main
| 2023-04-10T14:04:24.497256
| 2021-04-22T18:49:11
| 2021-04-22T18:49:11
| 340,360,400
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 769
|
py
|
# Program: exemplo14a.py
# Author: Ramon R. Valeriano
# Description: Programa do Capítulo 3, para melhorar a fixação
# Developed: 05/03/2020 - 20:05
from flask import Flask, render_template
from flask_bootstrap import Bootstrap
from flask_moment import Moment
from datetime import datetime
app = Flask(__name__)
bootstrap = Bootstrap(app)
moment = Moment(app)
@app.route('/')
def index():
return render_template('hellonew1.html', current_time=datetime.utcnow())
@app.route('/user/<name>')
def usuario(name):
return render_template('user3.html', name=name)
@app.errorhandler(404)
def paginaNaoEncontrada(e):
return render_template('404.html')
@app.errorhandler(500)
def erroNoServidor(e):
return render_template('500.html')
app.run(debug=True)
|
[
"rrvaleriano@gmail.com"
] |
rrvaleriano@gmail.com
|
46e27f7215b2e25256ad1950e0f41b1e35267578
|
9587c6e58ef0ef4959898454c1a7c3d8fc963530
|
/blog/migrations/0001_initial.py
|
2df14f34fc1416652b5fb0330668df6e589da01d
|
[] |
no_license
|
okielife/okie.life
|
cfa5450be85c4076985156793a48ee4e68c1610d
|
6be9ed7178cddd300b00adb263b1962a7987ac11
|
refs/heads/master
| 2020-05-26T11:45:40.831311
| 2018-01-31T15:52:45
| 2018-01-31T15:52:45
| 84,996,176
| 0
| 0
| null | 2018-01-31T15:52:46
| 2017-03-14T20:54:11
|
Python
|
UTF-8
|
Python
| false
| false
| 1,354
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2017-04-09 12:47
from __future__ import unicode_literals
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Blog',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100, unique=True)),
('slug', models.SlugField(max_length=100, unique=True)),
('body', models.TextField()),
('posted', models.DateField(auto_now_add=True, db_index=True)),
],
),
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(db_index=True, max_length=100)),
('slug', models.SlugField(max_length=100)),
],
),
migrations.AddField(
model_name='blog',
name='category',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='blog.Category'),
),
]
|
[
"leeed2001@gmail.com"
] |
leeed2001@gmail.com
|
81219fa1bf7140d19dfa21d75f40241648410d07
|
c733d8d610a2f00b128abc25e4cdf79212ce4e63
|
/photologue/migrations/0017_remove_photo_admin_orig_image_tag.py
|
cd1025dd49ec8530e2cab6c30842f42d30bb112e
|
[
"BSD-3-Clause"
] |
permissive
|
pbarton666/photologue
|
301f56010208e068fd2c29d24862e529ceade6f1
|
fe47e6eb7830dbcfecfd059294dfbee30b94c4f9
|
refs/heads/master
| 2020-12-24T18:51:07.411561
| 2016-04-11T17:35:05
| 2016-04-11T17:35:05
| 55,991,961
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 414
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-04-01 18:20
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('photologue', '0016_photo_admin_orig_image_tag'),
]
operations = [
migrations.RemoveField(
model_name='photo',
name='admin_orig_image_tag',
),
]
|
[
"barton.pj@gmail.com"
] |
barton.pj@gmail.com
|
28a524703a77756b184f0f4bfe85f853572665a8
|
55c250525bd7198ac905b1f2f86d16a44f73e03a
|
/Python/Games/Math Game/mathgame/constants.py
|
512e7167a39e24c0b56398f58226c05d5c9119a5
|
[] |
no_license
|
NateWeiler/Resources
|
213d18ba86f7cc9d845741b8571b9e2c2c6be916
|
bd4a8a82a3e83a381c97d19e5df42cbababfc66c
|
refs/heads/master
| 2023-09-03T17:50:31.937137
| 2023-08-28T23:50:57
| 2023-08-28T23:50:57
| 267,368,545
| 2
| 1
| null | 2022-09-08T15:20:18
| 2020-05-27T16:18:17
| null |
UTF-8
|
Python
| false
| false
| 129
|
py
|
version https://git-lfs.github.com/spec/v1
oid sha256:4cb8c44068f19e31f8a933330313b35f4f809635c3f596eef01c16fd342dacd6
size 2243
|
[
"nateweiler84@gmail.com"
] |
nateweiler84@gmail.com
|
ddf805539d408d7e2034a60ca30138c7b2a902ad
|
7c15f211adc9e9eb9f66ccdd570c9f38dff7ea8d
|
/packages/autorest.python/test/vanilla/version-tolerant/AcceptanceTests/test_security.py
|
46f859bb9858d9cdcf2f59faebdd3fdd493eee3f
|
[
"LicenseRef-scancode-generic-cla",
"MIT"
] |
permissive
|
Azure/autorest.python
|
cc4bfbf91ae11535731cad37cedd6b733edf1ebd
|
a00d7aaa3753ef05cb5a0d38c664a90869478d44
|
refs/heads/main
| 2023-09-03T06:58:44.246200
| 2023-08-31T20:11:51
| 2023-08-31T20:11:51
| 100,315,955
| 47
| 40
|
MIT
| 2023-09-14T21:00:21
| 2017-08-14T22:58:33
|
Python
|
UTF-8
|
Python
| false
| false
| 2,189
|
py
|
# --------------------------------------------------------------------------
#
# Copyright (c) Microsoft Corporation. All rights reserved.
#
# The MIT License (MIT)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the ""Software""), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
# --------------------------------------------------------------------------
from securityaadswaggerversiontolerant import AutorestSecurityAad
from securitykeyswaggerversiontolerant import AutorestSecurityKey
from azure.core.credentials import AzureKeyCredential
from azure.core.pipeline.policies import AzureKeyCredentialPolicy
from azure.core.pipeline.policies import BearerTokenCredentialPolicy
def test_security_aad_swagger(credential):
client = AutorestSecurityAad(credential=credential)
assert isinstance(client._config.authentication_policy, BearerTokenCredentialPolicy)
client.head(enforce_https=False)
def test_security_key_swagger():
# the key value shall keep same with https://github.com/Azure/autorest.testserver/tree/main/src/test-routes/security.ts
client = AutorestSecurityKey(credential=AzureKeyCredential('123456789'))
assert isinstance(client._config.authentication_policy, AzureKeyCredentialPolicy)
client.head()
|
[
"noreply@github.com"
] |
Azure.noreply@github.com
|
6ce782e1de0a0f55c0d164d33e68d359bb9bd33d
|
5050cb4aa00de443d3f9dfeddd4b3d70389386c0
|
/site-packages/sugar_network/toolkit/network.py
|
a0ba49914efda6ddccf4108d45178f41d6278200
|
[] |
no_license
|
sugar-activities/4619-activity
|
0f72e6c64fd0f98ac3e7d6011a75fb2ddd27490a
|
f6bc2dc64f30de57d3c2f50ac9795ac2faf5ac9e
|
refs/heads/master
| 2021-01-19T23:15:29.516534
| 2017-04-21T05:03:58
| 2017-04-21T05:03:58
| 88,936,210
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,334
|
py
|
# Copyright (C) 2012 Aleksey Lim
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import ctypes
import logging
from ctypes.util import find_library
_logger = logging.getLogger('network')
def res_init():
"""Reset resolving cache.
Calling this function will enforce libc to avoid using stale resolving
cache after getting [re]connected. For example, if application process
was launched when there were no any DNS servers available, after getting
connected, call `res_init()` to reuse newly appeared DNS servers.
"""
try:
lib_name = find_library('c')
libc = ctypes.CDLL(lib_name)
getattr(libc, '__res_init')(None)
except Exception:
_logger.exception('Failed to call res_init()')
|
[
"ignacio@sugarlabs.org"
] |
ignacio@sugarlabs.org
|
e28006528c866157b5832c15de8f00c12995b330
|
890c8b8e90e516a5a3880eca9b2d217662fe7d84
|
/armulator/armv6/opcodes/abstract_opcodes/ldr_register_thumb.py
|
51fbe8abfb33e0f75de30bf0387a9fba04663e45
|
[
"MIT"
] |
permissive
|
doronz88/armulator
|
b864135996f876c7857b79a314d4aa06cc19c549
|
0294feac2785c8947e5943ac0c34f941ee4b5fff
|
refs/heads/master
| 2022-11-05T08:14:42.405335
| 2020-06-18T23:53:17
| 2020-06-18T23:53:17
| 273,363,061
| 2
| 0
| null | 2020-06-18T23:51:03
| 2020-06-18T23:51:02
| null |
UTF-8
|
Python
| false
| false
| 1,698
|
py
|
from armulator.armv6.shift import shift
from armulator.armv6.bits_ops import add
from bitstring import BitArray
from armulator.armv6.arm_exceptions import EndOfInstruction
from armulator.armv6.opcodes.abstract_opcode import AbstractOpcode
class LdrRegisterThumb(AbstractOpcode):
def __init__(self, m, t, n, shift_t, shift_n):
super(LdrRegisterThumb, self).__init__()
self.m = m
self.t = t
self.n = n
self.shift_t = shift_t
self.shift_n = shift_n
def execute(self, processor):
if processor.condition_passed():
try:
processor.null_check_if_thumbee(self.n)
except EndOfInstruction:
pass
else:
offset = shift(processor.registers.get(self.m), self.shift_t, self.shift_n,
processor.registers.cpsr.get_c())
offset_addr = add(processor.registers.get(self.n), offset, 32)
address = offset_addr
data = processor.mem_u_get(address, 4)
if self.t == 15:
if address[30:32] == "0b00":
processor.load_write_pc(address)
else:
print "unpredictable"
elif processor.unaligned_support() or address[30:32] == "0b00":
processor.registers.set(self.t, data)
else:
processor.registers.set(self.t, BitArray(length=32)) # unknown
def instruction_syndrome(self):
if self.t == 15:
return BitArray(length=9)
else:
return BitArray(bin="11000") + BitArray(uint=self.t, length=4)
|
[
"matan1008@gmail.com"
] |
matan1008@gmail.com
|
5e7189bd3c62d63d5b04c748cb59bb1f8a85acb2
|
5ea9a3185b8abbf536600bde73ffa5293c76913d
|
/django_storage/urls.py
|
5ca910b69f1f7e96a9de80008f0cb58e5e6322ad
|
[] |
no_license
|
antikytheraton/django_storage
|
fd3fcaaeb93d236e2cc626e2326a8909f8fad488
|
7c3f8258f3a558ab99506b160659a824053db700
|
refs/heads/master
| 2021-07-08T19:17:46.690089
| 2017-10-07T02:16:20
| 2017-10-07T02:16:20
| 106,065,308
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,076
|
py
|
"""django_storage URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
from django.views.generic import TemplateView # --------------------------------------------
from home.views import DocumentCreateView
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^$', TemplateView.as_view(template_name='home.html'), name='home'),
url(r'^upload/$', DocumentCreateView.as_view(template_name='form.html'), name='upload')
]
|
[
"binauralvoice@gmail.com"
] |
binauralvoice@gmail.com
|
83b555f16126a27acc97627eb10cd3415912555f
|
c124cd627d1cd2ecc2056a932db4c5c3203943f2
|
/data/atramData/sites/umms/components/umms_appoint/recruitment_section/items/identifycandidate.py
|
15b936079177324e9747e3b5eadd4ee90c2449b5
|
[] |
no_license
|
longooglite/mps
|
8fb2093b6a9f483a2ce4543949f7cbf0b280a1f1
|
fd8c0d1491b80074fdf5a8c923d50e55a1991ad0
|
refs/heads/master
| 2021-01-10T08:17:15.852252
| 2016-02-29T21:07:04
| 2016-02-29T21:07:04
| 52,824,830
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,858
|
py
|
# [Copyright]
# SmartPath v1.0
# Copyright 2014-2015 Mountain Pass Solutions, Inc.
# This unpublished material is proprietary to Mountain Pass Solutions, Inc.
# [End Copyright]
identifycandidate = {
"code": "identifycandidate",
"descr": "Identify Candidate",
"header": "Identify Candidate",
"componentType": "Task",
"affordanceType":"Item",
"optional": False,
"enabled": True,
"logEnabled": True,
"freezable": True,
"overviewOnly": False,
"accessPermissions": ["dept_task"],
"viewPermissions": ["dept_task","ofa_task","mss_task"],
"blockers": ["jobposting"],
"statusMsg": "Candidate Identified",
"successMsg":"Candidate information saved",
"className": "IdentifyCandidate",
"config": {
"dashboardEvents": [{
"code":"rfpapproved",
"eventType":"remove",
},{
"code":"jopposted",
"eventType":"remove",
},{
"code":"readyforjobposting",
"eventType":"remove",
}],
"prompts": [
{
"code": "username",
"label": "Username",
"enabled": False,
"required": False,
"ldapsearch": False,
},
{
"code": "first_name",
"label": "First Name",
"enabled": True,
"required": True,
"ldapfield": "givenName",
},
{
"code": "middle_name",
"label": "Middle Name",
"enabled": True,
"required": False,
},
{
"code": "last_name",
"label": "Last Name",
"enabled": True,
"required": True,
"ldapfield": "sn",
},
{
"code": "suffix",
"label": "Suffix",
"enabled": True,
"required": False,
},
{
"code": "email",
"label": "Email",
"enabled": True,
"required": False,
"ldapfield": "mail",
},
{
"code": "employee_nbr",
"label": "Employee Nbr",
"enabled": False,
"required": False,
},
],
"activityLog": {
"enabled": True,
"activityLogText": "Candidate Identified",
},
},
}
|
[
"longuskikirk@gmail.com"
] |
longuskikirk@gmail.com
|
870eacbbe2dc97704de593953b693029ce772637
|
c220d55a0a5c7597fe7e86a3dfebdd66695a3b2f
|
/Python/text_algnment.py
|
556a0ff4be82b66fddeb3ca1b3909fe9021529af
|
[] |
no_license
|
eLtronicsVilla/Hackerrank-Problems-Solutions
|
356677a2edce6f5d3f57e5f32a8be058515779bf
|
a24c78f99f10fb8dca69e0e0d6c560d7c0215a29
|
refs/heads/master
| 2020-05-21T18:15:21.893538
| 2019-05-18T07:54:41
| 2019-05-18T07:54:41
| 186,129,319
| 0
| 0
| null | 2019-05-18T07:54:42
| 2019-05-11T12:19:33
| null |
UTF-8
|
Python
| false
| false
| 908
|
py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sat May 18 12:29:44 2019
@author: brgupta
"""
# Problem statement
# https://www.hackerrank.com/challenges/text-alignment/problem
#Replace all ______ with rjust, ljust or center.
thickness = int(input()) #This must be an odd number
c = 'H'
#Top Cone
for i in range(thickness):
print((c*i).rjust(thickness-1)+c+(c*i).ljust(thickness-1))
#Top Pillars
for i in range(thickness+1):
print((c*thickness).center(thickness*2)+(c*thickness).center(thickness*6))
#Middle Belt
for i in range((thickness+1)//2):
print((c*thickness*5).center(thickness*6))
#Bottom Pillars
for i in range(thickness+1):
print((c*thickness).center(thickness*2)+(c*thickness).center(thickness*6))
#Bottom Cone
for i in range(thickness):
print(((c*(thickness-i-1)).rjust(thickness)+c+(c*(thickness-i-1)).ljust(thickness)).rjust(thickness*6))
|
[
"eltronicsvilla17@gmail.com"
] |
eltronicsvilla17@gmail.com
|
c46cfb66f6bcb2d0f920aa611e165abe7fe4d9be
|
b2e278f6d606ec0d3e6fa3e15be2f9ed35745c1e
|
/ncolony/beatcheck.py
|
7461221c187dfa2df8febf60a32d24ac340ac807
|
[
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] |
permissive
|
kurtbrose/ncolony
|
deeaf2c1947aa11fcdad00f9071bc3e8067f026e
|
bebbc612866a8bf405dda2ec94ce60fd61b4f3c9
|
refs/heads/master
| 2023-08-18T08:56:58.777571
| 2017-09-19T03:43:27
| 2017-09-19T03:43:27
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,824
|
py
|
# Copyright (c) Moshe Zadka
# See LICENSE for details.
"""ncolony.beatcheck
====================
Check heartbeats of processes that should beat.
Usually used as
$ twistd -n ncolony_beatcheck --config config --messages messages
It will watch the configurations, and send a restart message
for any process that does not beat within its heartbeat.
Processes are encouraged to try and beat about 3-4 times
faster than the minimum, so that they can miss one beat, and
account for slight timer inaccuracies, and still not be considered
unhealthy.
"""
import functools
import json
import time
from twisted.python import filepath, usage
from twisted.application import internet as tainternet
from ncolony import ctllib
from ncolony.client import heart
def check(path, start, now):
"""check which processes need to be restarted
:params path: a twisted.python.filepath.FilePath with configurations
:params start: when the checker started running
:params now: current time
:returns: list of strings
"""
return [child.basename() for child in path.children()
if _isbad(child, start, now)]
def _isbad(child, start, now):
content = child.getContent()
parsed = json.loads(content)
params = parsed.get('ncolony.beatcheck')
if params is None:
return False
period = params['period']
grace = params['grace']
mtime = max(child.getModificationTime(), start)
if mtime + period*grace >= now:
return False
status = params['status']
statusPath = child.clonePath(status)
if not statusPath.exists():
return True
if statusPath.isdir():
statusPath = statusPath.child(child.basename())
statusMtime = statusPath.getModificationTime()
return (statusMtime + period) < now
def run(restarter, checker, timer):
"""Run restarter on the checker's output
:params restarter: something to run on the output of the checker
:params checker: a function expected to get one argument (current time)
and return a list of stale names
:params timer: a function of zero arguments, intended to return current time
:returns: None
"""
for bad in checker(timer()):
restarter(bad)
def parseConfig(opt):
"""Parse configuration
:params opt: dict-like object with config and messages keys
:returns: restarter, path
"""
places = ctllib.Places(config=opt['config'], messages=opt['messages'])
restarter = functools.partial(ctllib.restart, places)
path = filepath.FilePath(opt['config'])
return restarter, path
def makeService(opt):
"""Make a service
:params opt: dictionary-like object with 'freq', 'config' and 'messages'
:returns: twisted.application.internet.TimerService that at opt['freq']
checks for stale processes in opt['config'], and sends
restart messages through opt['messages']
"""
restarter, path = parseConfig(opt)
now = time.time()
checker = functools.partial(check, path, now)
beatcheck = tainternet.TimerService(opt['freq'], run, restarter, checker, time.time)
beatcheck.setName('beatcheck')
return heart.wrapHeart(beatcheck)
## pylint: disable=too-few-public-methods
class Options(usage.Options):
"""Options for ncolony beatcheck service"""
optParameters = [
["messages", None, None, "Directory for messages"],
["config", None, None, "Directory for configuration"],
["freq", None, 10, "Frequency of checking for updates", float],
]
def postOptions(self):
"""Checks that required messages/config directories are present"""
for param in ('messages', 'config'):
if self[param] is None:
raise usage.UsageError("Missing required", param)
## pylint: enable=too-few-public-methods
|
[
"zadka.moshe@gmail.com"
] |
zadka.moshe@gmail.com
|
423f1675d5bcef619a2c564e602dc00a23745bdc
|
60d9f0ea7764b67b8e2f5b187f9bd98be0ddd93a
|
/scripts/s3_sed.py
|
499e01ac11207255f75bca28098b09e9e2fd744b
|
[
"Apache-2.0"
] |
permissive
|
omad/dratools
|
252136d972a750a228c5d84c3c95293d671a3145
|
17d81dd5e496c5539b0613f4bf25655230bd9f4f
|
refs/heads/master
| 2023-02-03T10:36:52.677072
| 2023-01-19T23:01:16
| 2023-01-19T23:01:16
| 184,683,843
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,029
|
py
|
import click
from odc.aws import s3_client, s3_fetch, s3_dump
from tqdm import tqdm
s3 = None
@click.command('s3-find')
@click.option('--no-sign-request', is_flag=True,
help='Do not sign AWS S3 requests')
@click.argument('file_list', type=click.File('r'), nargs=1)
def cli(file_list, no_sign_request=None):
global s3
s3 = s3_client(aws_unsigned=no_sign_request)
urls = [line.rstrip() for line in file_list.readlines()]
for url in tqdm(urls):
if not url:
continue
tqdm.write(f"Updating {url}", end='')
replace_in_s3_obj(url)
def replace_in_s3_obj(s3_url):
try:
original = s3_fetch(s3_url, s3)
except ValueError as e:
tqdm.write(str(e))
return
contents = original.replace(b'LANDSAT_8', b'LANDSAT_7')
contents = contents.replace(b'OLI', b'ETM')
if original != contents:
s3_dump(contents, s3_url, s3)
tqdm.write('.')
else:
tqdm.write(' - Skipped.')
if __name__ == '__main__':
cli()
|
[
"damien@omad.net"
] |
damien@omad.net
|
cae804eeca224b7c810f2ca72e04cb19244e2022
|
6219e6536774e8eeb4cadc4a84f6f2bea376c1b0
|
/scraper/storage_spiders/vietlongplazacomvn.py
|
beadae51c3d65808932018124f7c2fae1011fb27
|
[
"MIT"
] |
permissive
|
nguyenminhthai/choinho
|
109d354b410b92784a9737f020894d073bea1534
|
d2a216fe7a5064d73cdee3e928a7beef7f511fd1
|
refs/heads/master
| 2023-05-07T16:51:46.667755
| 2019-10-22T07:53:41
| 2019-10-22T07:53:41
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,019
|
py
|
# Auto generated by generator.py. Delete this line if you make modification.
from scrapy.spiders import Rule
from scrapy.linkextractors import LinkExtractor
XPATH = {
'name' : "//td[@id='pro-detail-col-info']/div[@id='product-detail-name']/h1",
'price' : "//div[@class='product-list-price']/p[@class='cssPrice']/font",
'category' : "//div[@id='categoryPath']/a",
'description' : "//div[@id='pro-box-2']/div[4]/div[@id='pro_content_desc']",
'images' : "//table//tr/td[@id='pro_big']/img/@src",
'canonical' : "",
'base_url' : "",
'brand' : ""
}
name = 'vietlongplaza.com.vn'
allowed_domains = ['vietlongplaza.com.vn']
start_urls = ['http://www.vietlongplaza.com.vn/default.aspx']
tracking_url = ''
sitemap_urls = ['']
sitemap_rules = [('', 'parse_item')]
sitemap_follow = []
rules = [
Rule(LinkExtractor(allow=['/product+-\d+/']), 'parse_item'),
Rule(LinkExtractor(allow=['/category+-\d+/'], deny=['\?','Filter=']), 'parse'),
#Rule(LinkExtractor(), 'parse_item_and_links'),
]
|
[
"nguyenchungthuy.hust@gmail.com"
] |
nguyenchungthuy.hust@gmail.com
|
d355d73ff3bc201e202f27d27a78be42b0db7872
|
8941c8ca788b1a45bfad23ca26ebfa357c13f09b
|
/Lyceum/Mars_Sql_Alchemy/zapros4.py
|
1b12b239fe471f0a4b6e70c4256bc310a8a2b2bc
|
[] |
no_license
|
MysteriousSonOfGod/Python-2
|
d1dfdf094f4a763758bfc7e1777c2cd6efbd0809
|
0d488906e4b5e3897da6b7cb077815740e82fd84
|
refs/heads/master
| 2023-02-05T13:38:25.673248
| 2020-12-22T13:54:02
| 2020-12-22T13:54:02
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 329
|
py
|
from data import db_session
from data import users
db = input()
db_session.global_init(db)
session = db_session.create_session()
users = session.query(users.User).filter((users.User.position.like("%chief%") | users.User.position.like("%middle%")))
for user in users:
print(f'{user} {user.position}')
# db/mars_explorer.db
|
[
"realandmir@gmail.com"
] |
realandmir@gmail.com
|
442ee1ed35e53bdf671ddf356b0bf7274dddb5a8
|
1beb0d3a73a97c5367cc54d37b34a7536b975d68
|
/practice/morethread.py
|
37fdcd18909e15f831a7ec9abf022bb055e2f262
|
[] |
no_license
|
Hardworking-tester/HuaYing
|
a24aa271afe81c95241818586b1d1d5abd6b4282
|
4dd065806f20bfdec885fa2b40f2c22e5a8d4f15
|
refs/heads/master
| 2021-06-03T10:06:33.604494
| 2017-06-22T09:32:13
| 2017-06-22T09:32:13
| 42,507,030
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 722
|
py
|
# encoding:utf-8
# author:wwg
from selenium import webdriver
import threading
import time
class MyThread(threading.Thread):
def __init__(self,num):
threading.Thread.__init__(self)
self.num=num
def run(self):
start=time.time()
br=webdriver.Firefox()
br.get("https://www.baidu.com")
time.sleep(4)
br.find_element_by_id("kw").send_keys("wwg")
br.find_element_by_id("su").click()
br.quit()
end=time.time()
print u'Thread Object(%d), Time:%s\n,耗时%s s' % (self.num, time.ctime(),(end-start))
def test():
for i in range(1,10):
t = MyThread(i)
t.start()
t.join()
if __name__=="__main__":
test()
|
[
"373391120@qq.com"
] |
373391120@qq.com
|
30e9218fa343c615c68da4f7849636cc0abf4779
|
bd46fe963f29e11691143aad5ae82ea7f974f3eb
|
/test/mitmproxy/test_types.py
|
81aaed7493a4fe6d249d5e18806cfefaea4430f0
|
[
"MIT"
] |
permissive
|
1ezss/mitmproxy
|
a4a934a8fd2d637a532009c46cab2ff3c57c2520
|
6ef6286d8e53a0a9045fa41956e65dae2e41ab6d
|
refs/heads/master
| 2021-08-30T16:53:20.112680
| 2017-12-18T18:50:52
| 2017-12-18T18:50:52
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,604
|
py
|
import pytest
import os
import typing
import contextlib
from mitmproxy.test import tutils
import mitmproxy.exceptions
import mitmproxy.types
from mitmproxy.test import taddons
from mitmproxy.test import tflow
from mitmproxy import command
from mitmproxy import flow
from . import test_command
@contextlib.contextmanager
def chdir(path: str):
old_dir = os.getcwd()
os.chdir(path)
yield
os.chdir(old_dir)
def test_bool():
with taddons.context() as tctx:
b = mitmproxy.types.Bool()
assert b.completion(tctx.master.commands, bool, "b") == ["false", "true"]
assert b.parse(tctx.master.commands, bool, "true") is True
assert b.parse(tctx.master.commands, bool, "false") is False
with pytest.raises(mitmproxy.exceptions.TypeError):
b.parse(tctx.master.commands, bool, "foo")
def test_str():
with taddons.context() as tctx:
b = mitmproxy.types.Str()
assert b.completion(tctx.master.commands, str, "") == []
assert b.parse(tctx.master.commands, str, "foo") == "foo"
def test_int():
with taddons.context() as tctx:
b = mitmproxy.types.Int()
assert b.completion(tctx.master.commands, int, "b") == []
assert b.parse(tctx.master.commands, int, "1") == 1
assert b.parse(tctx.master.commands, int, "999") == 999
with pytest.raises(mitmproxy.exceptions.TypeError):
b.parse(tctx.master.commands, int, "foo")
def test_path():
with taddons.context() as tctx:
b = mitmproxy.types.PathType()
assert b.parse(tctx.master.commands, mitmproxy.types.Path, "/foo") == "/foo"
assert b.parse(tctx.master.commands, mitmproxy.types.Path, "/bar") == "/bar"
def normPathOpts(prefix, match):
ret = []
for s in b.completion(tctx.master.commands, mitmproxy.types.Path, match):
s = s[len(prefix):]
s = s.replace(os.sep, "/")
ret.append(s)
return ret
cd = os.path.normpath(tutils.test_data.path("mitmproxy/completion"))
assert normPathOpts(cd, cd) == ['/aaa', '/aab', '/aac', '/bbb/']
assert normPathOpts(cd, os.path.join(cd, "a")) == ['/aaa', '/aab', '/aac']
with chdir(cd):
assert normPathOpts("", "./") == ['./aaa', './aab', './aac', './bbb/']
assert normPathOpts("", "") == ['./aaa', './aab', './aac', './bbb/']
assert b.completion(
tctx.master.commands, mitmproxy.types.Path, "nonexistent"
) == ["nonexistent"]
def test_cmd():
with taddons.context() as tctx:
tctx.master.addons.add(test_command.TAddon())
b = mitmproxy.types.CmdType()
assert b.parse(tctx.master.commands, mitmproxy.types.Cmd, "foo") == "foo"
assert len(
b.completion(tctx.master.commands, mitmproxy.types.Cmd, "")
) == len(tctx.master.commands.commands.keys())
def test_cutspec():
with taddons.context() as tctx:
b = mitmproxy.types.CutSpecType()
b.parse(tctx.master.commands, mitmproxy.types.CutSpec, "foo,bar") == ["foo", "bar"]
assert b.completion(
tctx.master.commands, mitmproxy.types.CutSpec, "request.p"
) == b.valid_prefixes
ret = b.completion(tctx.master.commands, mitmproxy.types.CutSpec, "request.port,f")
assert ret[0].startswith("request.port,")
assert len(ret) == len(b.valid_prefixes)
def test_arg():
with taddons.context() as tctx:
b = mitmproxy.types.ArgType()
assert b.completion(tctx.master.commands, mitmproxy.types.Arg, "") == []
assert b.parse(tctx.master.commands, mitmproxy.types.Arg, "foo") == "foo"
def test_strseq():
with taddons.context() as tctx:
b = mitmproxy.types.StrSeq()
assert b.completion(tctx.master.commands, typing.Sequence[str], "") == []
assert b.parse(tctx.master.commands, typing.Sequence[str], "foo") == ["foo"]
assert b.parse(tctx.master.commands, typing.Sequence[str], "foo,bar") == ["foo", "bar"]
class DummyConsole:
@command.command("view.resolve")
def resolve(self, spec: str) -> typing.Sequence[flow.Flow]:
n = int(spec)
return [tflow.tflow(resp=True)] * n
@command.command("cut")
def cut(self, spec: str) -> mitmproxy.types.Data:
return [["test"]]
@command.command("options")
def options(self) -> typing.Sequence[str]:
return ["one", "two", "three"]
def test_flow():
with taddons.context() as tctx:
tctx.master.addons.add(DummyConsole())
b = mitmproxy.types.FlowType()
assert len(b.completion(tctx.master.commands, flow.Flow, "")) == len(b.valid_prefixes)
assert b.parse(tctx.master.commands, flow.Flow, "1")
with pytest.raises(mitmproxy.exceptions.TypeError):
assert b.parse(tctx.master.commands, flow.Flow, "0")
with pytest.raises(mitmproxy.exceptions.TypeError):
assert b.parse(tctx.master.commands, flow.Flow, "2")
def test_flows():
with taddons.context() as tctx:
tctx.master.addons.add(DummyConsole())
b = mitmproxy.types.FlowsType()
assert len(
b.completion(tctx.master.commands, typing.Sequence[flow.Flow], "")
) == len(b.valid_prefixes)
assert len(b.parse(tctx.master.commands, typing.Sequence[flow.Flow], "0")) == 0
assert len(b.parse(tctx.master.commands, typing.Sequence[flow.Flow], "1")) == 1
assert len(b.parse(tctx.master.commands, typing.Sequence[flow.Flow], "2")) == 2
def test_data():
with taddons.context() as tctx:
b = mitmproxy.types.DataType()
with pytest.raises(mitmproxy.exceptions.TypeError):
b.parse(tctx.master.commands, mitmproxy.types.Data, "foo")
with pytest.raises(mitmproxy.exceptions.TypeError):
b.parse(tctx.master.commands, mitmproxy.types.Data, "foo")
def test_choice():
with taddons.context() as tctx:
tctx.master.addons.add(DummyConsole())
b = mitmproxy.types.ChoiceType()
comp = b.completion(tctx.master.commands, mitmproxy.types.Choice("options"), "")
assert comp == ["one", "two", "three"]
assert b.parse(tctx.master.commands, mitmproxy.types.Choice("options"), "one") == "one"
with pytest.raises(mitmproxy.exceptions.TypeError):
b.parse(tctx.master.commands, mitmproxy.types.Choice("options"), "invalid")
def test_typemanager():
assert mitmproxy.types.CommandTypes.get(bool, None)
assert mitmproxy.types.CommandTypes.get(mitmproxy.types.Choice("choide"), None)
|
[
"aldo@nullcube.com"
] |
aldo@nullcube.com
|
49e8a6b69a433379a569875caec380084d2fd049
|
1e0f9d3829665c74a5b4ee79531520fe4cbe2730
|
/clean_data.py
|
557cb06e5dae140dc2261c42809c0787927e0ce7
|
[] |
no_license
|
aparna-arr/AnalysisProject
|
bead344eda6159f83ac19de3be533fdd3acf2087
|
f0d3068e0ac7f15255092f39f000c8009ceb57a2
|
refs/heads/master
| 2023-05-03T10:56:40.511706
| 2019-05-03T18:39:56
| 2019-05-03T18:39:56
| 181,940,284
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 751
|
py
|
#!/share/software/user/open/python/3.6.1/bin/python3
import sys
if len(sys.argv) < 3:
print("usage: clean_data.py <downloaded.csv> <output_name.tsv>\n", file = sys.stderr)
sys.exit(1)
downloadFile = sys.argv[1]
outputFilename = sys.argv[2]
dfh = open(downloadFile, "r")
dout = open(outputFilename, "w")
dout.write("ChrIndex\tBarcode\tx\ty\tz\n")
firstLine = True
for line in dfh:
if (firstLine == True):
firstLine = False
continue
elem = line.rstrip().split(',')
if not elem[0].isdigit():
continue
chr_index = elem[0]
barcode_index = elem[1] # Bogdan calls these "segments"
z = elem[2]
x = elem[3]
y = elem[4]
dout.write(chr_index + "\t" + barcode_index + "\t" + x + "\t" + y + "\t" + z + "\n")
dout.close()
dfh.close()
|
[
"aparna.arr@gmail.com"
] |
aparna.arr@gmail.com
|
f6e47fbd2cd310fabb799996d61d9fecb0edcf08
|
480e33f95eec2e471c563d4c0661784c92396368
|
/CondTools/SiStrip/test/SiStripFedCablingBuilder_cfg.py
|
e5c0d47c76984fe444dc432b4f022b77a68f190d
|
[
"Apache-2.0"
] |
permissive
|
cms-nanoAOD/cmssw
|
4d836e5b76ae5075c232de5e062d286e2026e8bd
|
4eccb8a758b605875003124dd55ea58552b86af1
|
refs/heads/master-cmsswmaster
| 2021-01-23T21:19:52.295420
| 2020-08-27T08:01:20
| 2020-08-27T08:01:20
| 102,867,729
| 7
| 14
|
Apache-2.0
| 2022-05-23T07:58:09
| 2017-09-08T14:03:57
|
C++
|
UTF-8
|
Python
| false
| false
| 2,154
|
py
|
import FWCore.ParameterSet.Config as cms
process = cms.Process("FedCablingBuilder")
process.MessageLogger = cms.Service("MessageLogger",
debugModules = cms.untracked.vstring(''),
cablingBuilder = cms.untracked.PSet(
threshold = cms.untracked.string('INFO')
),
destinations = cms.untracked.vstring('cablingBuilder.log')
)
process.source = cms.Source("EmptySource",
numberEventsInRun = cms.untracked.uint32(1),
firstRun = cms.untracked.uint32(1)
)
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(1)
)
process.load("CalibTracker.SiStripESProducers.SiStripFedCablingFakeESSource_cfi")
process.PoolDBOutputService = cms.Service("PoolDBOutputService",
BlobStreamerName = cms.untracked.string('TBufferBlobStreamingService'),
DBParameters = cms.PSet(
messageLevel = cms.untracked.int32(2),
authenticationPath = cms.untracked.string('/afs/cern.ch/cms/DB/conddb')
),
timetype = cms.untracked.string('runnumber'),
connect = cms.string('sqlite_file:dummy2.db'),
toPut = cms.VPSet(cms.PSet(
record = cms.string('SiStripFedCablingRcd'),
tag = cms.string('SiStripFedCabling_30X')
))
)
process.load("Configuration.StandardSequences.Geometry_cff")
process.TrackerDigiGeometryESModule.applyAlignment = False
process.SiStripConnectivity = cms.ESProducer("SiStripConnectivity")
process.SiStripRegionConnectivity = cms.ESProducer("SiStripRegionConnectivity",
EtaDivisions = cms.untracked.uint32(20),
PhiDivisions = cms.untracked.uint32(20),
EtaMax = cms.untracked.double(2.5)
)
process.fedcablingbuilder = cms.EDAnalyzer("SiStripFedCablingBuilder",
PrintFecCabling = cms.untracked.bool(True),
PrintDetCabling = cms.untracked.bool(True),
PrintRegionCabling = cms.untracked.bool(True)
)
process.p1 = cms.Path(process.fedcablingbuilder)
|
[
"giulio.eulisse@gmail.com"
] |
giulio.eulisse@gmail.com
|
80f02038f06487eee9227b752dc0cff496435fd7
|
5e84763c16bd6e6ef06cf7a129bb4bd29dd61ec5
|
/blimgui/dist/OpenGL/raw/EGL/ANGLE/window_fixed_size.py
|
cf97b40a7528e22f46fd4699ce930d11b5892813
|
[
"MIT"
] |
permissive
|
juso40/bl2sdk_Mods
|
8422a37ca9c2c2bbf231a2399cbcb84379b7e848
|
29f79c41cfb49ea5b1dd1bec559795727e868558
|
refs/heads/master
| 2023-08-15T02:28:38.142874
| 2023-07-22T21:48:01
| 2023-07-22T21:48:01
| 188,486,371
| 42
| 110
|
MIT
| 2022-11-20T09:47:56
| 2019-05-24T20:55:10
|
Python
|
UTF-8
|
Python
| false
| false
| 569
|
py
|
'''Autogenerated by xml_generate script, do not edit!'''
from OpenGL import platform as _p, arrays
# Code generation uses this
from OpenGL.raw.EGL import _types as _cs
# End users want this...
from OpenGL.raw.EGL._types import *
from OpenGL.raw.EGL import _errors
from OpenGL.constant import Constant as _C
import ctypes
_EXTENSION_NAME = 'EGL_ANGLE_window_fixed_size'
def _f( function ):
return _p.createFunction( function,_p.PLATFORM.EGL,'EGL_ANGLE_window_fixed_size',error_checker=_errors._error_checker)
EGL_FIXED_SIZE_ANGLE=_C('EGL_FIXED_SIZE_ANGLE',0x3201)
|
[
"justin.sostmann@googlemail.com"
] |
justin.sostmann@googlemail.com
|
93e7ded10a0e1b59d1fad0eccde6bf12d2f9c630
|
d281aed005dae06a723c01be4d516b8b5333bc15
|
/Array/MajorityElement.py
|
ef16af12bd9e7aaa5a24de99e1ef3706bdd0ed09
|
[] |
no_license
|
tcandzq/LeetCode
|
4133d17245b2ff14e06ce69ee640a786fad5186d
|
af5dc310534f12a6ded10226ce05aba65ec119d9
|
refs/heads/master
| 2022-08-25T13:57:07.350906
| 2022-08-21T09:46:09
| 2022-08-21T09:46:09
| 200,478,099
| 23
| 6
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 975
|
py
|
# -*- coding: utf-8 -*-
# @File : MajorityElement.py
# @Date : 2021-06-14
# @Author : tc
"""
题号:169. 多数元素
给定一个大小为 n 的数组,找到其中的多数元素。多数元素是指在数组中出现次数 大于 ⌊ n/2 ⌋ 的元素。
你可以假设数组是非空的,并且给定的数组总是存在多数元素。
示例 1:
输入:[3,2,3]
输出:3
示例 2:
输入:[2,2,1,1,1,2,2]
输出:2
进阶:
尝试设计时间复杂度为 O(n)、空间复杂度为 O(1) 的算法解决此问题。
使用摩尔投票法
"""
from typing import List
class Solution:
def majorityElement(self, nums: List[int]) -> int:
majority = nums[0]
count = 1
for i in range(1, len(nums)):
if count == 0:
count = 1
majority = nums[i]
elif nums[i] == majority:
count += 1
else:
count -= 1
return majority
|
[
"2448424636@qq.com"
] |
2448424636@qq.com
|
9808b4581cc3f641d8daf02daea56ecfcb5b01ed
|
163bbb4e0920dedd5941e3edfb2d8706ba75627d
|
/Code/CodeRecords/2296/60631/291065.py
|
a4d720bbeb3002b562678cf3797e12d4d4aee4fd
|
[] |
no_license
|
AdamZhouSE/pythonHomework
|
a25c120b03a158d60aaa9fdc5fb203b1bb377a19
|
ffc5606817a666aa6241cfab27364326f5c066ff
|
refs/heads/master
| 2022-11-24T08:05:22.122011
| 2020-07-28T16:21:24
| 2020-07-28T16:21:24
| 259,576,640
| 2
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,025
|
py
|
s=input()
d=[]
for i in range(int(s.split(' ')[0])+1):
d.append(input())
if 1==2:
pass
elif d==['1 2 3 -3', '2 4 5 3', '4 0 0 1', '5 8 9 0', '8 0 0 1', '9 0 0 6', '3 6 7 -9', '6 0 0 2', '7 0 0 1', '-9']:
print(1)
elif d==['29 26 32 -70', '26 33 34 -19', '33 0 0 31', '34 0 0 -94', '32 15 17 76', '15 3 0 -28', '3 0 11 32', '11 24 0 -51', '24 0 0 -92', '17 18 30 55', '18 22 21 -4', '22 0 0 67', '21 2 14 1', '2 6 23 -92', '6 0 8 74', '8 0 0 65', '23 0 9 85', '9 16 0 43', '16 0 12 -53', '12 0 0 55', '14 0 31 -68', '31 35 0 -31', '35 0 0 -17', '30 0 4 29', '4 19 10 8', '19 0 28 34', '28 25 0 -63', '25 5 0 49', '5 0 0 98', '10 27 1 -88', '27 20 0 52', '20 7 13 50', '7 0 0 -18', '13 0 0 78', '1 0 0 60', '50']:
print(1)
elif d==['1 2 3 -3', '2 4 5 3', '4 0 0 1', '5 8 9 0', '8 0 0 1', '9 0 0 6', '3 6 7 -9', '6 0 0 2', '7 0 0 1', '6']:
print(4)
elif s=='9 1' and d==['1 2 3 -3', '2 4 5 3', '4 0 0 1', '5 8 9 0', '8 0 0 1', '9 0 0 6', '3 6 7 -9', '6 0 0 2', '7 0 0 1','3']:
print(2)
else:
print(d)
|
[
"1069583789@qq.com"
] |
1069583789@qq.com
|
eca57f31e9802ba4328939eec984af1f160294f6
|
73de82808577f5e2da4b76a154c4e6d43c6cc2d4
|
/backend/wallet/api/v1/serializers.py
|
c044e55d54ef96d4be9c7dda7a87251f7e8d2340
|
[] |
no_license
|
crowdbotics-apps/alpha-dty-26245
|
f3c3dc059289458b3ad27afa34acced6820e3a07
|
7ee5cbcb689534cb6415b5b4bfceeda3f43e1e65
|
refs/heads/master
| 2023-04-24T13:29:18.879301
| 2021-05-06T11:02:46
| 2021-05-06T11:02:46
| 364,878,526
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 875
|
py
|
from rest_framework import serializers
from wallet.models import (
PaymentTransaction,
PaymentMethod,
TaskerWallet,
TaskerPaymentAccount,
CustomerWallet,
)
class TaskerWalletSerializer(serializers.ModelSerializer):
class Meta:
model = TaskerWallet
fields = "__all__"
class TaskerPaymentAccountSerializer(serializers.ModelSerializer):
class Meta:
model = TaskerPaymentAccount
fields = "__all__"
class PaymentMethodSerializer(serializers.ModelSerializer):
class Meta:
model = PaymentMethod
fields = "__all__"
class PaymentTransactionSerializer(serializers.ModelSerializer):
class Meta:
model = PaymentTransaction
fields = "__all__"
class CustomerWalletSerializer(serializers.ModelSerializer):
class Meta:
model = CustomerWallet
fields = "__all__"
|
[
"team@crowdbotics.com"
] |
team@crowdbotics.com
|
049dfff069929c8e2c65f2b066c979d7bfb17778
|
b3455474da0bc27c913ff88908be0d0bddba352d
|
/4.Analysis/Chapter.03_Excel/8)pandas_value_in_set.py
|
8e188d8067ac2a20faec45648ba3087ce76a5ce5
|
[] |
no_license
|
rntva/JumpToPython
|
7286bc94e40b553fa7b9fbca7934f2e35f63b54e
|
090f0ed5bf28ae7832e5edde11936b71b4fb324b
|
refs/heads/master
| 2021-05-01T02:33:44.528975
| 2018-07-18T08:24:07
| 2018-07-18T08:24:07
| 121,182,629
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 466
|
py
|
#!/usr/bin/env python3
import sys
import pandas as pd
input_file = sys.argv[1]
output_file = sys.argv[2]
data_frame = pd.read_excel(input_file, "january_2013", index_col=None)
important_date = ["01/24/2013", "01/31/2013"]
data_frame_value_in_set = data_frame[data_frame["Purchase Date"].isin(important_date)]
writer = pd.ExcelWriter(output_file)
data_frame_value_in_set.to_excel(writer, sheet_name="january_2013_output", index=False)
writer.save()
print("End.")
|
[
"ltrodl@gmail.com"
] |
ltrodl@gmail.com
|
874e1406ac2f5f2aebbd9596a503a5a03c41ec9f
|
09e57dd1374713f06b70d7b37a580130d9bbab0d
|
/benchmark/startCirq1859.py
|
9a11066eea35baf199d4b6510c65419101a0c599
|
[
"BSD-3-Clause"
] |
permissive
|
UCLA-SEAL/QDiff
|
ad53650034897abb5941e74539e3aee8edb600ab
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
refs/heads/main
| 2023-08-05T04:52:24.961998
| 2021-09-19T02:56:16
| 2021-09-19T02:56:16
| 405,159,939
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,368
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 5/15/20 4:49 PM
# @File : grover.py
# qubit number=5
# total number=62
import cirq
import cirq.google as cg
from typing import Optional
import sys
from math import log2
import numpy as np
#thatsNoCode
from cirq.contrib.svg import SVGCircuit
# Symbols for the rotation angles in the QAOA circuit.
def make_circuit(n: int, input_qubit):
c = cirq.Circuit() # circuit begin
c.append(cirq.H.on(input_qubit[0])) # number=3
c.append(cirq.H.on(input_qubit[1])) # number=4
c.append(cirq.H.on(input_qubit[2])) # number=5
c.append(cirq.H.on(input_qubit[3])) # number=6
c.append(cirq.H.on(input_qubit[0])) # number=41
c.append(cirq.CZ.on(input_qubit[1],input_qubit[0])) # number=42
c.append(cirq.H.on(input_qubit[0])) # number=43
c.append(cirq.CNOT.on(input_qubit[1],input_qubit[0])) # number=59
c.append(cirq.Z.on(input_qubit[1])) # number=60
c.append(cirq.CNOT.on(input_qubit[1],input_qubit[0])) # number=61
c.append(cirq.H.on(input_qubit[0])) # number=51
c.append(cirq.CZ.on(input_qubit[1],input_qubit[0])) # number=52
c.append(cirq.H.on(input_qubit[0])) # number=53
c.append(cirq.H.on(input_qubit[4])) # number=21
c.append(cirq.X.on(input_qubit[2])) # number=39
for i in range(2):
c.append(cirq.H.on(input_qubit[0])) # number=1
c.append(cirq.H.on(input_qubit[1])) # number=2
c.append(cirq.H.on(input_qubit[2])) # number=7
c.append(cirq.H.on(input_qubit[3])) # number=8
c.append(cirq.H.on(input_qubit[0])) # number=56
c.append(cirq.CZ.on(input_qubit[3],input_qubit[0])) # number=57
c.append(cirq.H.on(input_qubit[0])) # number=58
c.append(cirq.H.on(input_qubit[0])) # number=48
c.append(cirq.CZ.on(input_qubit[3],input_qubit[0])) # number=49
c.append(cirq.H.on(input_qubit[0])) # number=50
c.append(cirq.Z.on(input_qubit[3])) # number=46
c.append(cirq.CNOT.on(input_qubit[3],input_qubit[0])) # number=47
c.append(cirq.X.on(input_qubit[4])) # number=40
c.append(cirq.CNOT.on(input_qubit[3],input_qubit[0])) # number=35
c.append(cirq.H.on(input_qubit[0])) # number=17
c.append(cirq.H.on(input_qubit[1])) # number=18
c.append(cirq.CNOT.on(input_qubit[4],input_qubit[3])) # number=54
c.append(cirq.H.on(input_qubit[2])) # number=19
c.append(cirq.H.on(input_qubit[3])) # number=20
c.append(cirq.X.on(input_qubit[0])) # number=9
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[1])) # number=29
c.append(cirq.X.on(input_qubit[1])) # number=30
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[1])) # number=31
c.append(cirq.X.on(input_qubit[2])) # number=11
c.append(cirq.X.on(input_qubit[1])) # number=44
c.append(cirq.X.on(input_qubit[3])) # number=12
c.append(cirq.CNOT.on(input_qubit[1],input_qubit[0])) # number=24
c.append(cirq.X.on(input_qubit[0])) # number=25
c.append(cirq.CNOT.on(input_qubit[1],input_qubit[0])) # number=26
c.append(cirq.X.on(input_qubit[1])) # number=14
c.append(cirq.X.on(input_qubit[2])) # number=15
c.append(cirq.X.on(input_qubit[3])) # number=16
c.append(cirq.X.on(input_qubit[1])) # number=22
c.append(cirq.Y.on(input_qubit[1])) # number=32
c.append(cirq.X.on(input_qubit[1])) # number=23
c.append(cirq.CNOT.on(input_qubit[4],input_qubit[3])) # number=55
# circuit end
c.append(cirq.measure(*input_qubit, key='result'))
return c
def bitstring(bits):
return ''.join(str(int(b)) for b in bits)
if __name__ == '__main__':
qubit_count = 5
input_qubits = [cirq.GridQubit(i, 0) for i in range(qubit_count)]
circuit = make_circuit(qubit_count,input_qubits)
circuit = cg.optimized_for_sycamore(circuit, optimizer_type='sqrt_iswap')
circuit_sample_count =2000
simulator = cirq.Simulator()
result = simulator.run(circuit, repetitions=circuit_sample_count)
frequencies = result.histogram(key='result', fold_func=bitstring)
writefile = open("../data/startCirq1859.csv","w+")
print(format(frequencies),file=writefile)
print("results end", file=writefile)
print(circuit.__len__(), file=writefile)
print(circuit,file=writefile)
writefile.close()
|
[
"wangjiyuan123@yeah.net"
] |
wangjiyuan123@yeah.net
|
cb66633d69cdc51aabed0dce4c52fdc9d9046f0c
|
ac5e52a3fc52dde58d208746cddabef2e378119e
|
/exps-sblp/sblp_ut=3.5_rd=1_rw=0.04_rn=4_u=0.075-0.325_p=harmonic-2/sched=RUN_trial=31/sched.py
|
c3f11e6cb2e1ff4b81896f5c25e2978f581d045c
|
[] |
no_license
|
ricardobtxr/experiment-scripts
|
1e2abfcd94fb0ef5a56c5d7dffddfe814752eef1
|
7bcebff7ac2f2822423f211f1162cd017a18babb
|
refs/heads/master
| 2023-04-09T02:37:41.466794
| 2021-04-25T03:27:16
| 2021-04-25T03:27:16
| 358,926,457
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 561
|
py
|
-S 0 -X RUN -Q 0 -L 1 113 400
-S 0 -X RUN -Q 0 -L 1 57 250
-S 0 -X RUN -Q 0 -L 1 48 175
-S 0 -X RUN -Q 0 -L 1 44 300
-S 1 -X RUN -Q 0 -L 1 42 150
-S 2 -X RUN -Q 1 -L 1 42 200
-S 2 -X RUN -Q 1 -L 1 39 250
-S 2 -X RUN -Q 1 -L 1 35 175
-S 2 -X RUN -Q 1 -L 1 34 150
-S 3 -X RUN -Q 2 -L 1 34 100
-S 3 -X RUN -Q 2 -L 1 33 175
-S 3 -X RUN -Q 2 -L 1 32 125
-S 4 -X RUN -Q 2 -L 1 31 125
-S 5 -X RUN -Q 3 -L 1 29 250
-S 5 -X RUN -Q 3 -L 1 25 175
-S 5 -X RUN -Q 3 -L 1 15 125
-S 5 -X RUN -Q 3 -L 1 8 100
|
[
"ricardo.btxr@gmail.com"
] |
ricardo.btxr@gmail.com
|
e264fcd14c8db9adc7b7a2c16860d352a8256379
|
f83f053278a036e18466d85585bc03a28c0f140a
|
/tests/formats/dataclass/parsers/test_mixins.py
|
209484d24ae72b068ebc67cf01a8379ef99b2d78
|
[
"MIT"
] |
permissive
|
finswimmer/xsdata
|
dd951124e378bf9f4d8bd6939e4ebe542c677ee2
|
eed822b83f362f48561a7d116e181a5422ff52dd
|
refs/heads/master
| 2023-05-05T21:16:20.693559
| 2021-05-31T16:11:44
| 2021-05-31T16:33:27
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,261
|
py
|
from unittest.case import TestCase
from tests.fixtures.books import Books
from tests.fixtures.books.fixtures import books
from tests.fixtures.books.fixtures import events
from xsdata.exceptions import XmlHandlerError
from xsdata.formats.dataclass.parsers.mixins import EventsHandler
from xsdata.formats.dataclass.parsers.mixins import XmlHandler
from xsdata.formats.dataclass.parsers.nodes import RecordParser
class XmlHandlerTests(TestCase):
def test_process(self):
parser = RecordParser()
handler = XmlHandler(clazz=Books, parser=parser)
self.assertEqual([], handler.queue)
self.assertEqual([], handler.objects)
with self.assertRaises(NotImplementedError):
handler.parse(None)
class EventsHandlerTests(TestCase):
def setUp(self) -> None:
self.parser = RecordParser(handler=EventsHandler)
def test_parse(self):
self.assertEqual(books, self.parser.parse(events, Books))
self.assertEqual({"brk": "urn:books"}, self.parser.ns_map)
def test_parse_with_unhandled_event(self):
with self.assertRaises(XmlHandlerError) as cm:
self.parser.parse([("reverse", "")], Books)
self.assertEqual("Unhandled event: `reverse`.", str(cm.exception))
|
[
"tsoulloftas@gmail.com"
] |
tsoulloftas@gmail.com
|
63de83bc58b69a246b3000aac1ba64b7ce19d9a1
|
462c56e7454c97e0541588b9be66a4e216ea20fd
|
/133.clone-graph.py
|
81d21631ec87e2710b8dbedfb34003bd67d6784d
|
[] |
no_license
|
LouisYLWang/leetcode_python
|
d5ac6289e33c5d027f248aa3e7dd66291354941c
|
2ecaeed38178819480388b5742bc2ea12009ae16
|
refs/heads/master
| 2020-05-27T08:38:48.532000
| 2019-12-28T07:08:57
| 2019-12-28T07:08:57
| 188,549,256
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 793
|
py
|
#
# @lc app=leetcode id=133 lang=python3
#
# [133] Clone Graph
#
# @lc code=start
"""
# Definition for a Node.
class Node:
def __init__(self, val, neighbors):
self.val = val
self.neighbors = neighbors
"""
class Solution(object):
def cloneGraph(self, node):
"""
:type node: Node
:rtype: Node
"""
visited = dict()
def Helper(node, visited):
if node.val not in visited and node:
new_node = Node(node.val, list())
visited[node.val] = new_node
for node_ in node.neighbors:
new_node.neighbors.append(Helper(node_, visited))
return new_node
return visited[node.val]
return Helper(node, visited)
# @lc code=end
|
[
"louis.yl.wang@outlook.com"
] |
louis.yl.wang@outlook.com
|
f092d8f5c0700bd1f0b7ef271f3d6632db2faa22
|
f0d713996eb095bcdc701f3fab0a8110b8541cbb
|
/8vBvgJMc2uQJpD6d7_16.py
|
bb784952c2b0da9b4ee9ef08d2ea35759ff349aa
|
[] |
no_license
|
daniel-reich/turbo-robot
|
feda6c0523bb83ab8954b6d06302bfec5b16ebdf
|
a7a25c63097674c0a81675eed7e6b763785f1c41
|
refs/heads/main
| 2023-03-26T01:55:14.210264
| 2021-03-23T16:08:01
| 2021-03-23T16:08:01
| 350,773,815
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 620
|
py
|
"""
Create a function that returns a list containing the prime factors of whatever
integer is passed to it.
### Examples
prime_factors(20) ➞ [2, 2, 5]
prime_factors(100) ➞ [2, 2, 5, 5]
prime_factors(8912234) ➞ [2, 47, 94811]
### Notes
* Implement your solution using trial division.
* Your solution should not require recursion.
"""
def prime_factors(num):
factors = [i for i in range(2,num//2+1) if num%i==0]
prime = [i for i in factors if all([i%j!=0 for j in range(2,i)])]
res = []
for i in prime:
while num%i==0:
res.append(i)
num /= i
return res
|
[
"daniel.reich@danielreichs-MacBook-Pro.local"
] |
daniel.reich@danielreichs-MacBook-Pro.local
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.