hexsha
stringlengths 40
40
| size
int64 4
1.02M
| ext
stringclasses 8
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
209
| max_stars_repo_name
stringlengths 5
121
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
209
| max_issues_repo_name
stringlengths 5
121
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
209
| max_forks_repo_name
stringlengths 5
121
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 4
1.02M
| avg_line_length
float64 1.07
66.1k
| max_line_length
int64 4
266k
| alphanum_fraction
float64 0.01
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
9bbdbda9b81987418bcce3b99adb7801410b9388
| 1,658
|
py
|
Python
|
figures/apps.py
|
groovetch/edx-figures
|
a69fc1195c05176ac7dae90b337dd77f4bd9679f
|
[
"MIT"
] | 43
|
2018-05-29T20:01:25.000Z
|
2021-12-02T09:43:17.000Z
|
figures/apps.py
|
groovetch/edx-figures
|
a69fc1195c05176ac7dae90b337dd77f4bd9679f
|
[
"MIT"
] | 330
|
2018-05-30T17:06:15.000Z
|
2022-03-16T15:52:22.000Z
|
figures/apps.py
|
groovetch/edx-figures
|
a69fc1195c05176ac7dae90b337dd77f4bd9679f
|
[
"MIT"
] | 40
|
2018-10-06T00:15:58.000Z
|
2022-02-14T12:44:45.000Z
|
"""
Provides application configuration for Figures.
As well as default values for running Figures along with functions to
add entries to the Django conf settings needed to run Figures.
"""
from __future__ import absolute_import
from django.apps import AppConfig
try:
from openedx.core.djangoapps.plugins.constants import (
ProjectType, SettingsType, PluginURLs, PluginSettings
)
PLATFORM_PLUGIN_SUPPORT = True
except ImportError:
# pre-hawthorn
PLATFORM_PLUGIN_SUPPORT = False
if PLATFORM_PLUGIN_SUPPORT:
def production_settings_name():
"""
Helper for Hawthorn and Ironwood+ compatibility.
This helper will explicitly break if something have changed in `SettingsType`.
"""
if hasattr(SettingsType, 'AWS'):
# Hawthorn and Ironwood
return getattr(SettingsType, 'AWS')
else:
# Juniper and beyond.
return getattr(SettingsType, 'PRODUCTION')
class FiguresConfig(AppConfig):
"""
Provides application configuration for Figures.
"""
name = 'figures'
verbose_name = 'Figures'
if PLATFORM_PLUGIN_SUPPORT:
plugin_app = {
PluginURLs.CONFIG: {
ProjectType.LMS: {
PluginURLs.NAMESPACE: u'figures',
PluginURLs.REGEX: u'^figures/',
}
},
PluginSettings.CONFIG: {
ProjectType.LMS: {
production_settings_name(): {
PluginSettings.RELATIVE_PATH: u'settings.lms_production',
},
}
},
}
| 27.180328
| 86
| 0.603739
|
6cab2a5a61defa5bb28e5154ee2ae0775e64f803
| 1,386
|
py
|
Python
|
backbone/utils/tokenize.py
|
wutong8023/PLM4CL
|
4e9e98be425150ad75468b26feb8fb7f5e93c34b
|
[
"MIT"
] | 1
|
2021-06-30T04:27:57.000Z
|
2021-06-30T04:27:57.000Z
|
backbone/utils/tokenize.py
|
wutong8023/PLM4CL
|
4e9e98be425150ad75468b26feb8fb7f5e93c34b
|
[
"MIT"
] | null | null | null |
backbone/utils/tokenize.py
|
wutong8023/PLM4CL
|
4e9e98be425150ad75468b26feb8fb7f5e93c34b
|
[
"MIT"
] | null | null | null |
"""
Author: Tong
Time: 09-03-2020
"""
import torch
from transformers import BertModel, BertTokenizer
from backbone import import_from
from backbone import supported_ptm
class CustomizedTokenizer:
def __init__(self, max_len=36, ptm="bert", special_token=()):
self.ptm = ptm.lower()
assert self.ptm in supported_ptm
ptmTokenizer = import_from("transformers", supported_ptm[self.ptm][0] + "Tokenizer")
self.tokenizer = ptmTokenizer.from_pretrained(supported_ptm[self.ptm][1], cache_dir="cache_model/")
if self.ptm == "gpt2":
self.tokenizer.pad_token = self.tokenizer.eos_token
# if len(special_token) > 0:
# self.tokenizer.add_tokens(special_token)
self.max_len = max_len
def customized_tokenize(self, sentence):
token_dict = self.tokenizer(sentence,
add_special_tokens=True,
max_length=self.max_len,
padding='max_length',
truncation=True,
return_attention_mask=True,
return_tensors='pt'
)
return token_dict['input_ids'], token_dict['attention_mask']
def __len__(self):
return len(self.tokenizer)
| 34.65
| 107
| 0.569264
|
8619fa02f844007d102a0a9f78363d783853fee1
| 3,798
|
py
|
Python
|
distance/asyncmongo.py
|
shannon-jia/distance
|
dc9a2c0736b8e8578ba1a1ac87c8650ba95e1d90
|
[
"MIT"
] | null | null | null |
distance/asyncmongo.py
|
shannon-jia/distance
|
dc9a2c0736b8e8578ba1a1ac87c8650ba95e1d90
|
[
"MIT"
] | null | null | null |
distance/asyncmongo.py
|
shannon-jia/distance
|
dc9a2c0736b8e8578ba1a1ac87c8650ba95e1d90
|
[
"MIT"
] | null | null | null |
import asyncio
from motor import motor_asyncio
from pymongo.errors import AutoReconnect, ConnectionFailure
import logging
from urllib.parse import urlparse
# import pprint
log = logging.getLogger(__name__)
class AsyncMongo():
DEFAULT_PORT = 27017
def __init__(self, uri=None, loop=None, collection='links'):
_uri = uri or 'mongodb://localhost:27017/mean'
_url = urlparse(_uri)
_host = _url.hostname or 'localhost'
_port = _url.port or '27017'
# _login = _url.username
# _password = _url.password
_db_name = _url.path[1:]
log.debug("{}:{}/{}".format(_host, _port, _db_name))
self.uri = _uri
self.db_name = _db_name or 'mean'
self.loop = loop or asyncio.get_event_loop()
self.connected = False
self._conn = None
self.collection = collection
async def do_find(self, collection=None, filter=None):
while self.connected is not True:
await asyncio.sleep(1.0, loop=self.loop)
_collection = collection or self.collection
c = self.db[_collection]
docs = []
async for doc in c.find(filter):
docs.append(doc)
log.debug("Collection [{}]: {}".format(_collection, docs))
return docs
#############################################################
async def _connect(self):
self._conn = motor_asyncio.AsyncIOMotorClient(
self.uri,
io_loop=self.loop
)
try:
self.connected = await self.wait_db()
except AutoReconnect as e:
log.error("Couldn't connect to db %s", self.uri)
self.connected = await self.wait_db()
if self.connected:
self.db = self._conn[self.db_name]
log.info('Connection Successfully.')
async def _disconnect(self):
if self._conn is not None:
self._conn = None
async def ping(self):
try:
await self._conn.admin.command({'ping': 1})
return True
except ConnectionFailure:
log.error('Connection Failure.')
return False
async def wait_db(self):
pong = False
while not pong:
pong = await self.ping()
if not pong:
log.warning('%s is unavailable. Waiting.',
self.uri)
await asyncio.sleep(1.0, loop=self.loop)
return True
async def reconnector(self):
while True:
if self.connected is True:
await asyncio.sleep(10.0, loop=self.loop)
else:
await self._disconnect()
self.connected = False
await self._connect()
def main(debug=True):
# configure log
log = logging.getLogger("")
formatter = logging.Formatter("%(asctime)s %(levelname)s " +
"[%(module)s] %(message)s")
# log the things
log.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
# ch.setLevel(logging.DEBUG)
# ch.setLevel(logging.ERROR)
# ch.setLevel(logging.CRITICAL)
if debug:
ch.setLevel(logging.DEBUG)
else:
ch.setLevel(logging.INFO)
ch.setFormatter(formatter)
log.addHandler(ch)
global loop
loop = asyncio.get_event_loop()
loop.set_debug(0)
db = AsyncMongo(uri='mongodb://192.168.1.162:27017/mean')
db_task = loop.create_task(db.reconnector())
loop.run_until_complete(db.do_find(filter={'name': "SEG_11_2"}))
try:
loop.run_forever()
except KeyboardInterrupt:
db_task.cancel()
loop.run_until_complete(db_task)
finally:
loop.close()
if __name__ == "__main__":
try:
main()
except KeyboardInterrupt:
exit(1)
| 29.44186
| 68
| 0.577146
|
b33d19c797a6c6eb489276f40c6ce4a7a03a0e51
| 477
|
py
|
Python
|
pfxbrick/scripts/pfxdump.py
|
fx-bricks/pfx-brick-py
|
dacd3c04fcbbecef7a6a5d9603ed8cf9a2eb3469
|
[
"MIT"
] | 11
|
2018-04-26T22:53:44.000Z
|
2022-02-14T13:44:27.000Z
|
pfxbrick/scripts/pfxdump.py
|
fx-bricks/pfx-brick-py
|
dacd3c04fcbbecef7a6a5d9603ed8cf9a2eb3469
|
[
"MIT"
] | 3
|
2021-01-14T18:48:48.000Z
|
2022-02-16T04:06:06.000Z
|
pfxbrick/scripts/pfxdump.py
|
fx-bricks/pfx-brick-py
|
dacd3c04fcbbecef7a6a5d9603ed8cf9a2eb3469
|
[
"MIT"
] | 2
|
2018-05-20T11:30:53.000Z
|
2022-02-15T06:42:37.000Z
|
#! /usr/bin/env python3
from sys import argv
from pfxbrick import *
if __name__ == "__main__":
if len(argv) < 2:
print("Usage: pfxdump address bytes")
print(
" where address is the flash start address and bytes is number of bytes to dump"
)
exit()
b = PFxBrick()
b.open()
r = b.open()
if not r:
exit()
rb = flash_read(b, int(argv[1], 16), int(argv[2]))
pprint_bytes(rb, argv[1])
b.close()
| 21.681818
| 93
| 0.557652
|
bc7e728f5dc37589b2e9da778da24228636e7fbb
| 3,073
|
py
|
Python
|
test/http/case.py
|
tkomatsu/webserv
|
9c54145ddc95eabfff6744c7c77176a4a63370ae
|
[
"MIT"
] | null | null | null |
test/http/case.py
|
tkomatsu/webserv
|
9c54145ddc95eabfff6744c7c77176a4a63370ae
|
[
"MIT"
] | 51
|
2021-08-05T11:47:52.000Z
|
2021-09-29T04:04:42.000Z
|
test/http/case.py
|
tkomatsu/webserv
|
9c54145ddc95eabfff6744c7c77176a4a63370ae
|
[
"MIT"
] | 1
|
2021-09-21T14:28:32.000Z
|
2021-09-21T14:28:32.000Z
|
#!/usr/bin/env python3
# coding: UTF-8
import requests
import subprocess
import time
import run
import re
index = 0
class Color:
BLACK = '\033[30m'
RED = '\033[31m'
GREEN = '\033[32m'
YELLOW = '\033[33m'
BLUE = '\033[34m'
PURPLE = '\033[35m'
CYAN = '\033[36m'
WHITE = '\033[37m'
END = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
INVISIBLE = '\033[08m'
REVERCE = '\033[07m'
class Case():
def __init__(self, name, method, uri, headers=None, body=None):
method = method.lower()
global index
index += 1
try:
if method == "get":
self.r = requests.get(uri, headers=headers, allow_redirects=False)
elif method == "post":
self.r = requests.post(uri, headers=headers, data=body, allow_redirects=False)
elif method == "delete":
self.r = requests.delete(uri, headers=headers, allow_redirects=False)
self.r.encoding = self.r.apparent_encoding
print("\n" + Color.UNDERLINE + str(index) + ". " + run.config_name + ": " + name + Color.END + "\n\n" + method.upper(), self.r.request.path_url, "HTTP/1.1")
if self.r.request.headers:
for header in self.r.request.headers:
print(header + ": " + self.r.request.headers[header])
if self.r.request.body:
print("\n" + self.r.request.body)
print()
except:
print(Color.RED + "ERROR in requests" + Color.END)
subprocess.run(('pkill', 'webserv'))
exit(1)
# status_codeが期待と同じか
def status_code(self, expected):
if expected == self.r.status_code:
print(Color.GREEN + "status_code is " + str(expected) + Color.END)
else:
print(Color.RED + "status_code is " + str(self.r.status_code) + "\nBUT expected was " + str(expected) + Color.END)
subprocess.run(('pkill', 'webserv'))
exit(1)
# bodyが期待の正規表現と同じか
def body_is(self, expected):
if re.match(expected, self.r.text):
print(Color.GREEN + "body is " + expected + Color.END)
else:
print(Color.RED + "body DIDN'T match the regex, " + expected + Color.END)
subprocess.run(('pkill', 'webserv'))
exit(1)
# bodyが期待の文字列を含むか
def body_has(self, expected):
if expected in self.r.text:
print(Color.GREEN + "body has \"" + expected + "\"" + Color.END)
else:
print(Color.RED + "body DIDN'T have \"" + expected + "\"" + Color.END)
subprocess.run(('pkill', 'webserv'))
exit(1)
# headersに期待のkeyと同じ かつ 期待のvalueが同じものがあるか
def header(self, expected_key, expected_val):
if expected_key in self.r.headers and self.r.headers[expected_key] == expected_val:
print(Color.GREEN + "header has " + expected_key + ": " + expected_val + Color.END)
else:
print(Color.RED + "header DIDN'T have " + expected_key + ": " + expected_val + Color.END)
subprocess.run(('pkill', 'webserv'))
exit(1)
# headerに期待のkey含まれるか
def header_has(self, expected):
if expected in self.r.headers:
print(Color.GREEN + "header has the key, " + expected + Color.END)
else:
print(Color.RED + "header DIDN'T have the key, " + expected + Color.END)
subprocess.run(('pkill', 'webserv'))
exit(1)
| 30.73
| 159
| 0.636837
|
67e5b2e175507543c249a4effe9b93988cd7e794
| 1,349
|
py
|
Python
|
statement_parser/expenses/legal.py
|
jamiehannaford/statement-parser
|
93925b5903a4570f66f3e7b7d5d839412bde1da0
|
[
"MIT"
] | 5
|
2021-09-01T03:27:02.000Z
|
2022-03-31T16:31:23.000Z
|
statement_parser/expenses/legal.py
|
jamiehannaford/statement-parser
|
93925b5903a4570f66f3e7b7d5d839412bde1da0
|
[
"MIT"
] | null | null | null |
statement_parser/expenses/legal.py
|
jamiehannaford/statement-parser
|
93925b5903a4570f66f3e7b7d5d839412bde1da0
|
[
"MIT"
] | null | null | null |
from statement_parser.expenses.expense import Expense
from statement_parser.expense_group import ExpenseGroup
from statement_parser.expenses.constants import NAME_LEGAL
from xbrl.instance import NumericFact
TAGS_LEGAL = [
"GainLossRelatedToLitigationSettlement".lower(),
"LossContingencyLossInPeriod".lower(),
"PaymentsForLegalSettlements".lower(),
"LitigationSettlementExpense".lower(),
"DefinedBenefitPlanRecognizedNetGainLossDueToSettlements1".lower(),
# "LitigationExpenseExcludingLegalServiceProvider".lower(),
]
class LegalExpenseGroup(ExpenseGroup):
def __init__(self, instance, labels, profile):
super().__init__(NAME_LEGAL, TAGS_LEGAL, instance, labels=labels, profile=profile)
self.filter_highest_only = True
def is_cost(self, fact, label):
if super().is_cost(fact, label):
return True
if not isinstance(fact, NumericFact):
return False
concept_id = fact.concept.xml_id.lower()
terms = ["TaxIndemnificationArrangement", "LitigationExpense"]
if any(w.lower() in concept_id for w in terms):
return True
def generate_cost(self, fact, label, text_blocks=None):
return LegalCost(fact, label)
class LegalCost(Expense):
def __init__(self, fact, label):
super().__init__(fact, label)
| 35.5
| 90
| 0.717569
|
c21aa3df8e3e8af1ffaf783f3cb514e8dafea96b
| 9,702
|
py
|
Python
|
orchid_DiseasePredict/datasets/Attention_in_several_test/LSTM32_LSTM32_LSTM32_LSTM32_A_LSTM64_LSTM64_LSTM64.py
|
chengzee/disease_predict
|
d7a3c57b710ab2e93d56c8d73aeaa21120d3e98c
|
[
"MIT"
] | null | null | null |
orchid_DiseasePredict/datasets/Attention_in_several_test/LSTM32_LSTM32_LSTM32_LSTM32_A_LSTM64_LSTM64_LSTM64.py
|
chengzee/disease_predict
|
d7a3c57b710ab2e93d56c8d73aeaa21120d3e98c
|
[
"MIT"
] | null | null | null |
orchid_DiseasePredict/datasets/Attention_in_several_test/LSTM32_LSTM32_LSTM32_LSTM32_A_LSTM64_LSTM64_LSTM64.py
|
chengzee/disease_predict
|
d7a3c57b710ab2e93d56c8d73aeaa21120d3e98c
|
[
"MIT"
] | null | null | null |
import pandas as pd
import numpy as np
import csv
from keras.layers import Dense, Lambda, dot, Activation, concatenate
from keras.layers import Layer
import keras.backend as K
# Parameters
# -------------------------------------------------------------------------------------------------------------------
bed = [631, 742, 701, 759, 765, 698]
lookback_days = 3
datasInADay = 288
input_dim = 3
secondsInADay = 60*60*24
# 定義 attention 機制 (return_sequence=True)
class attention(Layer):
def __init__(self,**kwargs):
super(attention,self).__init__(**kwargs)
def build(self,input_shape):
self.W=self.add_weight(name="att_weight",shape=(input_shape[-1],1),initializer="normal")
self.b=self.add_weight(name="att_bias",shape=(input_shape[1],1),initializer="zeros")
super(attention, self).build(input_shape)
def call(self,x):
et=K.squeeze(K.tanh(K.dot(x,self.W)+self.b),axis=-1)
at=K.softmax(et)
at=K.expand_dims(at,axis=-1)
output=x*at
return K.sum(output,axis=1, keepdims=True)
def compute_output_shape(self,input_shape):
return (input_shape)
def get_config(self):
return super(attention,self).get_config()
# # 定義 attention 機制 (return_sequence=False)
# class attention(Layer):
# def __init__(self,**kwargs):
# super(attention,self).__init__(**kwargs)
# def build(self,input_shape):
# self.W=self.add_weight(name="att_weight",shape=(input_shape[-1],1),initializer="normal")
# self.b=self.add_weight(name="att_bias",shape=(input_shape[1],1),initializer="zeros")
# super(attention, self).build(input_shape)
# def call(self,x):
# et=K.squeeze(K.tanh(K.dot(x,self.W)+self.b),axis=-1)
# at=K.softmax(et)
# at=K.expand_dims(at,axis=-1)
# output=x*at
# return K.sum(output,axis=1)
# def compute_output_shape(self,input_shape):
# return (input_shape[0],input_shape[-1])
# def get_config(self):
# return super(attention,self).get_config()
# np.random.seed(1)
# 讀取 「(統計近期三日)近期死亡csv」
targetRecent = pd.read_csv("targetRecent.csv")
# 轉為 numpy array
targetRecent_arr = np.array(targetRecent)
# print(targetRecent_arr)
# -------------------------------------------------------------------------------------------------------------------
# 生成資料集
def generator_with_augmentation(inputdata, starttime, lookback, dead_recently, samp_list_1, samp_list_0, targ_list_1, targ_list_0): # 輸入資料 samp_list = []; 輸出結果 targ_list = []
for i in range(datasInADay):
rows = np.arange(i+starttime, i+starttime+lookback)
if np.count_nonzero(inputdata[rows, 4] == 0) <= 316:
if dead_recently == 1:
samp_list_1.append(inputdata[rows, 1:4])
targ_list_1.append(dead_recently)
if dead_recently == 0:
samp_list_0.append(inputdata[rows, 1:4])
targ_list_0.append(dead_recently)
return samp_list_1, samp_list_0, targ_list_1, targ_list_0
samples_1 = []
samples_0 = []
targets_1 = []
targets_0 = []
# 測試結果csv建立
with open("predict_with_attention.csv", 'a+') as predictcsv:
writer = csv.writer(predictcsv)
writer.writerow(["第n次,LSTM32_32_32_32_A_LSTM64_64_64", "test_acc", "True Positive", "True Negative", "False Positive", "False Negative", "Precision", "Recall"])
for n in range(len(targetRecent_arr)): # 近期死亡統計數量
for m in range(len(bed)): # 試驗植床總共六床
if targetRecent_arr[n, 2] == bed[m]:
paddeddata_arr = np.array(pd.read_csv("addfeature9{}.csv".format(m+1)))
# print("BedPlant:{}".format(m+1))
# ----------------------------------------------------------------------------------------------------------------------------------------
# 平均值正規化 [-1, 1]
data_min = np.min(paddeddata_arr[:, 1:4], axis=0)
data_max = np.max(paddeddata_arr[:, 1:4], axis=0)
data_mean = np.mean(paddeddata_arr[:, 1:4], axis=0)
# print(data_min)
# print(data_max)
# print(data_mean)
paddeddata_arr[:, 1:4] = (paddeddata_arr[:, 1:4]-data_mean)/(data_max-data_min)
# ----------------------------------------------------------------------------------------------------------------------------------------
where = np.searchsorted(paddeddata_arr[:, 0], targetRecent_arr[n, 0]-secondsInADay*lookback_days) # 604800 是七天的秒數; 432000 是五天的秒數; 259200 是三天的秒數
# print("where:{}".format(where))
samples_1, samples_0, targets_1, targets_0 = generator_with_augmentation(paddeddata_arr, starttime=where, lookback=datasInADay*lookback_days, dead_recently=targetRecent_arr[n, 1], samp_list_1=samples_1, samp_list_0=samples_0, targ_list_1=targets_1, targ_list_0=targets_0)
# 轉為 numpy array
samples_1_arr = np.array(samples_1)
samples_0_arr = np.array(samples_0)
targets_1_arr = np.array(targets_1)
targets_0_arr = np.array(targets_0)
print("samples_1_arr.shape:{}".format(samples_1_arr.shape))
print("samples_0_arr.shape:{}".format(samples_0_arr.shape))
print("targets_1_arr.shape:{}".format(targets_1_arr.shape))
print("targets_0_arr.shape:{}".format(targets_0_arr.shape))
print(np.count_nonzero(targets_1_arr==1))
print(np.count_nonzero(targets_0_arr==1))
# # -------------------------------------------------------------------------------------------------------------------
# # # train test split
x_train_arr = np.concatenate((samples_1_arr[:int(len(samples_1_arr)*0.7)], samples_0_arr[:int(len(samples_1_arr)*0.7)]), axis=0)
y_train_arr = np.concatenate((targets_1_arr[:int(len(samples_1_arr)*0.7)], targets_0_arr[:int(len(samples_1_arr)*0.7)]), axis=0)
x_test_arr = np.concatenate((samples_1_arr[int(len(samples_1_arr)*0.7):], samples_0_arr[int(len(samples_1_arr)*0.7):]), axis=0)
y_test_arr = np.concatenate((targets_1_arr[int(len(samples_1_arr)*0.7):], targets_0_arr[int(len(samples_1_arr)*0.7):]), axis=0)
print("x_train_arr.shape:{}".format(x_train_arr.shape))
print("y_train_arr.shape:{}".format(y_train_arr.shape))
print("x_test_arr.shape:{}".format(x_test_arr.shape))
print("y_test_arr.shape:{}".format(y_test_arr.shape))
# -------------------------------------------------------------------------------------------------------------------
# tf.keras model
for t in range(10): # 做幾遍
# LSTM 模型的訓練與驗證
from keras.models import Sequential
from keras import layers
from keras.optimizers import RMSprop, Adam
from keras.callbacks import ModelCheckpoint
model = Sequential()
model.add(layers.LSTM(32,
input_shape=(datasInADay*lookback_days, input_dim), # (288*3, 3)
return_sequences=True,
))
model.add(layers.LSTM(32,
return_sequences=True,
))
model.add(layers.LSTM(32,
return_sequences=True,
))
model.add(layers.LSTM(32,
return_sequences=True,
))
model.add(attention())
model.add(layers.LSTM(64,
return_sequences=True,
))
model.add(layers.LSTM(64,
return_sequences=True,
))
model.add(layers.LSTM(64,
return_sequences=False,
))
model.add(layers.Dense(1, activation='sigmoid'))
model.summary()
model.compile(optimizer=Adam(),
loss = 'binary_crossentropy',
metrics=['accuracy'])
# -------------------------------------------------------------------------------------------------------------------
# checkpoint
filepath="weights.best.hdf5"
checkpoint = ModelCheckpoint(filepath,
monitor='val_accuracy',
verbose=1,
save_best_only=True,
mode='max')
callbacks_list = [checkpoint]
# fit the model
history = model.fit(x_train_arr, y_train_arr,
epochs=200,
batch_size=256,
validation_split=0.4,
callbacks=callbacks_list,
verbose=1)
model.load_weights("weights.best.hdf5")
print("第{}次結果,選用最好的val_acc來對testSet做預測:".format(t+1))
test_score = model.evaluate(x_test_arr, y_test_arr)
print("test_score:{}".format(test_score))
# 預測結果
pred = model.predict(x_test_arr)
TrueP = 0
TrueN = 0
FalseP = 0
FalseN = 0
for pp in range(len(pred)):
if(pred[pp]>0.5 and y_test_arr[pp]==1):
TrueP += 1
if(pred[pp]>0.5 and y_test_arr[pp]==0):
FalseP += 1
if(pred[pp]<=0.5 and y_test_arr[pp]==1):
FalseN += 1
if(pred[pp]<=0.5 and y_test_arr[pp]==0):
TrueN += 1
print("test數量:{}".format(len(x_test_arr)))
print("True_Positive:{}".format(TrueP))
print("True_Nagitive:{}".format(TrueN))
print("False_Positive:{}".format(FalseP))
print("False_Nagitive:{}".format(FalseN))
precision = TrueP/(TrueP+FalseP)
recall = TrueP/(TrueP+FalseN)
print("Precision:{}".format(precision))
print("Recall:{}".format(recall))
with open("predict_with_attention.csv", 'a+') as predictcsv:
writer = csv.writer(predictcsv)
# writer.writerow(["第n次", "test_acc", "True Positive", "True Negative", "False Positive", "False Negative", "Precision", "Recall"])
writer.writerow([t+1, test_score[1], TrueP, TrueN, FalseP, FalseN, precision, recall])
| 45.981043
| 283
| 0.56813
|
97c5a261f85cac24dc347ea904bc76536a8b807c
| 129
|
py
|
Python
|
irekua_rest_api/serializers/users/__init__.py
|
IslasGECI/irekua-rest-api
|
35cf5153ed7f54d12ebad2ac07d472585f04e3e7
|
[
"BSD-4-Clause"
] | null | null | null |
irekua_rest_api/serializers/users/__init__.py
|
IslasGECI/irekua-rest-api
|
35cf5153ed7f54d12ebad2ac07d472585f04e3e7
|
[
"BSD-4-Clause"
] | 11
|
2020-03-28T18:51:50.000Z
|
2022-01-13T01:47:40.000Z
|
irekua_rest_api/serializers/users/__init__.py
|
IslasGECI/irekua-rest-api
|
35cf5153ed7f54d12ebad2ac07d472585f04e3e7
|
[
"BSD-4-Clause"
] | 1
|
2021-05-06T19:38:14.000Z
|
2021-05-06T19:38:14.000Z
|
from . import institutions
from . import roles
from . import users
__all__ = [
'institutions',
'roles',
'users',
]
| 11.727273
| 26
| 0.627907
|
f34a9d4be7ecadc3a01f8a91f41b6ddcadd6e69e
| 9,999
|
py
|
Python
|
venv/Lib/site-packages/pip/_internal/operations/freeze.py
|
EkremBayar/bayar
|
aad1a32044da671d0b4f11908416044753360b39
|
[
"MIT"
] | 38,667
|
2015-01-01T00:15:34.000Z
|
2022-03-31T22:57:03.000Z
|
venv/Lib/site-packages/pip/_internal/operations/freeze.py
|
EkremBayar/bayar
|
aad1a32044da671d0b4f11908416044753360b39
|
[
"MIT"
] | 1,192
|
2015-01-03T07:59:34.000Z
|
2022-03-31T13:22:26.000Z
|
venv/Lib/site-packages/pip/_internal/operations/freeze.py
|
EkremBayar/bayar
|
aad1a32044da671d0b4f11908416044753360b39
|
[
"MIT"
] | 11,269
|
2015-01-01T08:41:17.000Z
|
2022-03-31T16:12:52.000Z
|
import collections
import logging
import os
from typing import (
Container,
Dict,
Iterable,
Iterator,
List,
Optional,
Set,
Tuple,
Union,
)
from pip._vendor.packaging.utils import canonicalize_name
from pip._vendor.pkg_resources import Distribution, Requirement, RequirementParseError
from pip._internal.exceptions import BadCommand, InstallationError
from pip._internal.req.constructors import (
install_req_from_editable,
install_req_from_line,
)
from pip._internal.req.req_file import COMMENT_RE
from pip._internal.utils.direct_url_helpers import (
direct_url_as_pep440_direct_reference,
dist_get_direct_url,
)
from pip._internal.utils.misc import dist_is_editable, get_installed_distributions
logger = logging.getLogger(__name__)
RequirementInfo = Tuple[Optional[Union[str, Requirement]], bool, List[str]]
def freeze(
requirement=None, # type: Optional[List[str]]
find_links=None, # type: Optional[List[str]]
local_only=False, # type: bool
user_only=False, # type: bool
paths=None, # type: Optional[List[str]]
isolated=False, # type: bool
exclude_editable=False, # type: bool
skip=() # type: Container[str]
):
# type: (...) -> Iterator[str]
find_links = find_links or []
for link in find_links:
yield f'-f {link}'
installations = {} # type: Dict[str, FrozenRequirement]
for dist in get_installed_distributions(
local_only=local_only,
skip=(),
user_only=user_only,
paths=paths
):
try:
req = FrozenRequirement.from_dist(dist)
except RequirementParseError as exc:
# We include dist rather than dist.project_name because the
# dist string includes more information, like the version and
# location. We also include the exception message to aid
# troubleshooting.
logger.warning(
'Could not generate requirement for distribution %r: %s',
dist, exc
)
continue
if exclude_editable and req.editable:
continue
installations[req.canonical_name] = req
if requirement:
# the options that don't get turned into an InstallRequirement
# should only be emitted once, even if the same option is in multiple
# requirements files, so we need to keep track of what has been emitted
# so that we don't emit it again if it's seen again
emitted_options = set() # type: Set[str]
# keep track of which files a requirement is in so that we can
# give an accurate warning if a requirement appears multiple times.
req_files = collections.defaultdict(list) # type: Dict[str, List[str]]
for req_file_path in requirement:
with open(req_file_path) as req_file:
for line in req_file:
if (not line.strip() or
line.strip().startswith('#') or
line.startswith((
'-r', '--requirement',
'-f', '--find-links',
'-i', '--index-url',
'--pre',
'--trusted-host',
'--process-dependency-links',
'--extra-index-url',
'--use-feature'))):
line = line.rstrip()
if line not in emitted_options:
emitted_options.add(line)
yield line
continue
if line.startswith('-e') or line.startswith('--editable'):
if line.startswith('-e'):
line = line[2:].strip()
else:
line = line[len('--editable'):].strip().lstrip('=')
line_req = install_req_from_editable(
line,
isolated=isolated,
)
else:
line_req = install_req_from_line(
COMMENT_RE.sub('', line).strip(),
isolated=isolated,
)
if not line_req.name:
logger.info(
"Skipping line in requirement file [%s] because "
"it's not clear what it would install: %s",
req_file_path, line.strip(),
)
logger.info(
" (add #egg=PackageName to the URL to avoid"
" this warning)"
)
else:
line_req_canonical_name = canonicalize_name(
line_req.name)
if line_req_canonical_name not in installations:
# either it's not installed, or it is installed
# but has been processed already
if not req_files[line_req.name]:
logger.warning(
"Requirement file [%s] contains %s, but "
"package %r is not installed",
req_file_path,
COMMENT_RE.sub('', line).strip(),
line_req.name
)
else:
req_files[line_req.name].append(req_file_path)
else:
yield str(installations[
line_req_canonical_name]).rstrip()
del installations[line_req_canonical_name]
req_files[line_req.name].append(req_file_path)
# Warn about requirements that were included multiple times (in a
# single requirements file or in different requirements files).
for name, files in req_files.items():
if len(files) > 1:
logger.warning("Requirement %s included multiple times [%s]",
name, ', '.join(sorted(set(files))))
yield(
'## The following requirements were added by '
'pip freeze:'
)
for installation in sorted(
installations.values(), key=lambda x: x.name.lower()):
if installation.canonical_name not in skip:
yield str(installation).rstrip()
def get_requirement_info(dist):
# type: (Distribution) -> RequirementInfo
"""
Compute and return values (req, editable, comments) for use in
FrozenRequirement.from_dist().
"""
if not dist_is_editable(dist):
return (None, False, [])
location = os.path.normcase(os.path.abspath(dist.location))
from pip._internal.vcs import RemoteNotFoundError, vcs
vcs_backend = vcs.get_backend_for_dir(location)
if vcs_backend is None:
req = dist.as_requirement()
logger.debug(
'No VCS found for editable requirement "%s" in: %r', req,
location,
)
comments = [
f'# Editable install with no version control ({req})'
]
return (location, True, comments)
try:
req = vcs_backend.get_src_requirement(location, dist.project_name)
except RemoteNotFoundError:
req = dist.as_requirement()
comments = [
'# Editable {} install with no remote ({})'.format(
type(vcs_backend).__name__, req,
)
]
return (location, True, comments)
except BadCommand:
logger.warning(
'cannot determine version of editable source in %s '
'(%s command not found in path)',
location,
vcs_backend.name,
)
return (None, True, [])
except InstallationError as exc:
logger.warning(
"Error when trying to get requirement for VCS system %s, "
"falling back to uneditable format", exc
)
else:
return (req, True, [])
logger.warning(
'Could not determine repository location of %s', location
)
comments = ['## !! Could not determine repository location']
return (None, False, comments)
class FrozenRequirement:
def __init__(self, name, req, editable, comments=()):
# type: (str, Union[str, Requirement], bool, Iterable[str]) -> None
self.name = name
self.canonical_name = canonicalize_name(name)
self.req = req
self.editable = editable
self.comments = comments
@classmethod
def from_dist(cls, dist):
# type: (Distribution) -> FrozenRequirement
# TODO `get_requirement_info` is taking care of editable requirements.
# TODO This should be refactored when we will add detection of
# editable that provide .dist-info metadata.
req, editable, comments = get_requirement_info(dist)
if req is None and not editable:
# if PEP 610 metadata is present, attempt to use it
direct_url = dist_get_direct_url(dist)
if direct_url:
req = direct_url_as_pep440_direct_reference(
direct_url, dist.project_name
)
comments = []
if req is None:
# name==version requirement
req = dist.as_requirement()
return cls(dist.project_name, req, editable, comments=comments)
def __str__(self):
# type: () -> str
req = self.req
if self.editable:
req = f'-e {req}'
return '\n'.join(list(self.comments) + [str(req)]) + '\n'
| 37.732075
| 86
| 0.532053
|
61a70ed2641ed0cca6dbd51c5fcf9667d90d9e0b
| 429
|
py
|
Python
|
src/templates/base.py
|
vmesel/jsv
|
373f6635061fabe636a87227a71b758e135ce2b6
|
[
"MIT"
] | 9
|
2020-08-19T14:08:58.000Z
|
2022-02-14T05:48:04.000Z
|
src/templates/base.py
|
vmesel/jsv
|
373f6635061fabe636a87227a71b758e135ce2b6
|
[
"MIT"
] | 11
|
2020-08-20T14:46:43.000Z
|
2020-09-07T20:43:39.000Z
|
src/templates/base.py
|
vmesel/jsv
|
373f6635061fabe636a87227a71b758e135ce2b6
|
[
"MIT"
] | 1
|
2020-08-26T21:57:51.000Z
|
2020-08-26T21:57:51.000Z
|
dataset_metadata = {
{% for key, property in properties %}
"{{ key }}": {{ property|getDefault|stringify|safe|replace("null", "None") }},{% if property|getDescription %} # {{ property|getDescription }}{% endif %}
{% for example in property.examples %}
{% if not loop.first or property.default %}
# [example] "{{ key }}": {{ example|cleanExample|stringify|safe }}
{% endif %}
{% endfor %}
{% endfor %}
}
| 39
| 158
| 0.606061
|
5e81e5c89f3467f06c1f52783f6be5f175e28145
| 1,656
|
py
|
Python
|
sample/update_channel.py
|
tora01/SkillLab
|
61ebfaf45c503b9e6f4a3d05a7edd4de2fcad93e
|
[
"CC0-1.0"
] | 2
|
2020-09-09T02:40:23.000Z
|
2021-09-12T18:08:15.000Z
|
sample/update_channel.py
|
tora01/SkillLab
|
61ebfaf45c503b9e6f4a3d05a7edd4de2fcad93e
|
[
"CC0-1.0"
] | 1
|
2021-09-14T09:36:38.000Z
|
2021-09-14T09:36:38.000Z
|
sample/update_channel.py
|
tora01/SkillLab
|
61ebfaf45c503b9e6f4a3d05a7edd4de2fcad93e
|
[
"CC0-1.0"
] | 19
|
2021-09-07T06:11:29.000Z
|
2021-09-07T07:45:08.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import requests
api_key = ''
channle_id = '1499081'
data_file = "./data/cpu_temp.dat"
_ts_base_url = "https://api.thingspeak.com"
ts_update_url = _ts_base_url + "/update"
# GET https://api.thingspeak.com/update?api_key=MSUJ80Z21B6XIS7G&field1=0
# HTTPでのデータ登録のための設定
headers = {'X-THINGSPEAKAPIKEY': api_key}
#------
# powermetricsで取得したCPU die tempartureの値を取得して、配列を返す
# 引数:データが入ったファイルのパス
# return : cpu_temp リスト(配列)
# 2021-09-07T23:59:36 JST CPU die temperature: 69.77 C
#------
def getCpuTempFromFile(filename):
_cpu_temps = []
# ファイルの存在を確認
is_file = os.path.exists(filename)
if not is_file:
print("正しいファイル名を指定してください。")
sys.exit(1)
# ファイルを開いてデータを取得
with open(filename) as f:
_lines = f.readlines()
for _line in _lines:
_data = _line.split()
_cpu_temps.append(_data[5])
return _cpu_temps
#------
# 指定したデータをThingSpeakに登録
# 引数:req_url, headers, post_data
#------
def post2ThingSpeak(req_url, headers, post_data):
while True:
response = requests.post(req_url, headers=headers, data=post_data)
if response.text != '0':
break
time.sleep(1)
# メイン処理
cpu_temps = []
print(data_file + " のデータをThingSpeakに登録します。")
# CPU温度の情報をファイルから取得
cpu_temps = getCpuTempFromFile(data_file)
print("CPU温度のデータが " + str(len(cpu_temps)) + " 件あります。")
# データの中身をすべて表示
print(cpu_temps)
# 最新のデータ(一番最後)をThingSpeakに登録
# 登録するデータを設定
post_data = {'field1': cpu_temps[-1]}
post2ThingSpeak(ts_update_url, headers, post_data)
print("CPU温度:" + str(cpu_temps[-1]) + " を登録しました。")
sys.exit(0)
| 22.08
| 74
| 0.673913
|
0527516423e3578a88b2f2141980e6684211a408
| 1,235
|
py
|
Python
|
main.py
|
mboyr4z/Yolov5_Detect_Toolkit
|
569d7663616c67f31cdf11b2ba4257f0a1566415
|
[
"MIT"
] | 8
|
2021-07-18T11:55:39.000Z
|
2022-03-25T19:17:30.000Z
|
main.py
|
mboyr4z/Yolov5_Detect_Toolkit
|
569d7663616c67f31cdf11b2ba4257f0a1566415
|
[
"MIT"
] | null | null | null |
main.py
|
mboyr4z/Yolov5_Detect_Toolkit
|
569d7663616c67f31cdf11b2ba4257f0a1566415
|
[
"MIT"
] | null | null | null |
#python detect.py --weights yolov5s.pt --img 640 --conf 0.25 --source data/images/
# if __name__ == "__main__":
# os.chdir("Yolov5_Detect_Toolkit/yolov5-master")
# os.system('cmd /c "python detect.py --weights ../last.pt --img 640 --conf 0.25 --source ../val"')
import os
from PyQt5 import QtWidgets
from degiskenler.degiskenler import degiskenClass
from design.tasarim import Ui_Form
from fonksiyonlar.ImageTespit import *
from fonksiyonlar.WebCamTespit import *
from fonksiyonlar.DosyaKonumBulma import *
from fonksiyonlar.VideoTespit import *
if __name__ == "__main__":
app = QtWidgets.QApplication(sys.argv)
# NESNE TANIMLAMALARIM
degiskenler = degiskenClass()
ui = Ui_Form()
#TIKLANMA OLAYLARIM
ui.btn_weight.clicked.connect(lambda : weightKonumBulma(degiskenler,ui))
ui.btn_image.clicked.connect(lambda: imageKonumBulma(degiskenler, ui))
ui.btn_video.clicked.connect(lambda: videoKonumBulma(degiskenler, ui))
ui.btn_imageBasla.clicked.connect(lambda: imageTespiteBasla(degiskenler,ui))
ui.btn_videoBasla.clicked.connect(lambda: VideoTespiteBasla(degiskenler,ui))
ui.btn_webCamBasla.clicked.connect(lambda: WebCamTespiteBasla(degiskenler,ui))
sys.exit(app.exec_())
| 36.323529
| 103
| 0.756275
|
fb919028bdb07eadcfed4582db6ee7f7d0d310ff
| 435
|
py
|
Python
|
preprocessing/__init__.py
|
jarednielsen/speech2phone
|
4214446f16c7cba573d500d5e88a9b89640f725e
|
[
"MIT"
] | 1
|
2021-01-19T13:43:27.000Z
|
2021-01-19T13:43:27.000Z
|
preprocessing/__init__.py
|
jarednielsen/speech2phone
|
4214446f16c7cba573d500d5e88a9b89640f725e
|
[
"MIT"
] | null | null | null |
preprocessing/__init__.py
|
jarednielsen/speech2phone
|
4214446f16c7cba573d500d5e88a9b89640f725e
|
[
"MIT"
] | null | null | null |
"""Everything imported here will be available under `speech2phone.preprocessing`. So even though the function
`test_preprocess` is in the filesystem as `speech2phone/preprocessing/preprocessing.py -> test_preprocess`, it is
available in Python as `speech2phone.preprocessing.test_preprocess`.
Jared Nielsen. 2019-02-05.
"""
# TL;DR: Put everything here. Remember the dot. For example, we used to have:
# from .preprocessing import *
| 43.5
| 113
| 0.786207
|
52e907191d5a563b948c97062e88adbd595e8d9e
| 12,231
|
py
|
Python
|
sdk/python/pulumi_kubernetes/core/v1/PersistentVolume.py
|
csssuf/pulumi-kubernetes
|
8d007166d0e8968fcabaeecd0cee13f9c08d97f1
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_kubernetes/core/v1/PersistentVolume.py
|
csssuf/pulumi-kubernetes
|
8d007166d0e8968fcabaeecd0cee13f9c08d97f1
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_kubernetes/core/v1/PersistentVolume.py
|
csssuf/pulumi-kubernetes
|
8d007166d0e8968fcabaeecd0cee13f9c08d97f1
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by pulumigen. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ... import meta as _meta
from ._inputs import *
__all__ = ['PersistentVolumeArgs', 'PersistentVolume']
@pulumi.input_type
class PersistentVolumeArgs:
def __init__(__self__, *,
api_version: Optional[pulumi.Input[str]] = None,
kind: Optional[pulumi.Input[str]] = None,
metadata: Optional[pulumi.Input['_meta.v1.ObjectMetaArgs']] = None,
spec: Optional[pulumi.Input['PersistentVolumeSpecArgs']] = None):
"""
The set of arguments for constructing a PersistentVolume resource.
:param pulumi.Input[str] api_version: APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources
:param pulumi.Input[str] kind: Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds
:param pulumi.Input['_meta.v1.ObjectMetaArgs'] metadata: Standard object's metadata. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#metadata
:param pulumi.Input['PersistentVolumeSpecArgs'] spec: Spec defines a specification of a persistent volume owned by the cluster. Provisioned by an administrator. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#persistent-volumes
"""
if api_version is not None:
pulumi.set(__self__, "api_version", 'v1')
if kind is not None:
pulumi.set(__self__, "kind", 'PersistentVolume')
if metadata is not None:
pulumi.set(__self__, "metadata", metadata)
if spec is not None:
pulumi.set(__self__, "spec", spec)
@property
@pulumi.getter(name="apiVersion")
def api_version(self) -> Optional[pulumi.Input[str]]:
"""
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources
"""
return pulumi.get(self, "api_version")
@api_version.setter
def api_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "api_version", value)
@property
@pulumi.getter
def kind(self) -> Optional[pulumi.Input[str]]:
"""
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds
"""
return pulumi.get(self, "kind")
@kind.setter
def kind(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kind", value)
@property
@pulumi.getter
def metadata(self) -> Optional[pulumi.Input['_meta.v1.ObjectMetaArgs']]:
"""
Standard object's metadata. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#metadata
"""
return pulumi.get(self, "metadata")
@metadata.setter
def metadata(self, value: Optional[pulumi.Input['_meta.v1.ObjectMetaArgs']]):
pulumi.set(self, "metadata", value)
@property
@pulumi.getter
def spec(self) -> Optional[pulumi.Input['PersistentVolumeSpecArgs']]:
"""
Spec defines a specification of a persistent volume owned by the cluster. Provisioned by an administrator. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#persistent-volumes
"""
return pulumi.get(self, "spec")
@spec.setter
def spec(self, value: Optional[pulumi.Input['PersistentVolumeSpecArgs']]):
pulumi.set(self, "spec", value)
class PersistentVolume(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
api_version: Optional[pulumi.Input[str]] = None,
kind: Optional[pulumi.Input[str]] = None,
metadata: Optional[pulumi.Input[pulumi.InputType['_meta.v1.ObjectMetaArgs']]] = None,
spec: Optional[pulumi.Input[pulumi.InputType['PersistentVolumeSpecArgs']]] = None,
__props__=None):
"""
PersistentVolume (PV) is a storage resource provisioned by an administrator. It is analogous to a node. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] api_version: APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources
:param pulumi.Input[str] kind: Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds
:param pulumi.Input[pulumi.InputType['_meta.v1.ObjectMetaArgs']] metadata: Standard object's metadata. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#metadata
:param pulumi.Input[pulumi.InputType['PersistentVolumeSpecArgs']] spec: Spec defines a specification of a persistent volume owned by the cluster. Provisioned by an administrator. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#persistent-volumes
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: Optional[PersistentVolumeArgs] = None,
opts: Optional[pulumi.ResourceOptions] = None):
"""
PersistentVolume (PV) is a storage resource provisioned by an administrator. It is analogous to a node. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes
:param str resource_name: The name of the resource.
:param PersistentVolumeArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(PersistentVolumeArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
api_version: Optional[pulumi.Input[str]] = None,
kind: Optional[pulumi.Input[str]] = None,
metadata: Optional[pulumi.Input[pulumi.InputType['_meta.v1.ObjectMetaArgs']]] = None,
spec: Optional[pulumi.Input[pulumi.InputType['PersistentVolumeSpecArgs']]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = PersistentVolumeArgs.__new__(PersistentVolumeArgs)
__props__.__dict__["api_version"] = 'v1'
__props__.__dict__["kind"] = 'PersistentVolume'
__props__.__dict__["metadata"] = metadata
__props__.__dict__["spec"] = spec
__props__.__dict__["status"] = None
super(PersistentVolume, __self__).__init__(
'kubernetes:core/v1:PersistentVolume',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'PersistentVolume':
"""
Get an existing PersistentVolume resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = PersistentVolumeArgs.__new__(PersistentVolumeArgs)
__props__.__dict__["api_version"] = None
__props__.__dict__["kind"] = None
__props__.__dict__["metadata"] = None
__props__.__dict__["spec"] = None
__props__.__dict__["status"] = None
return PersistentVolume(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="apiVersion")
def api_version(self) -> pulumi.Output[Optional[str]]:
"""
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources
"""
return pulumi.get(self, "api_version")
@property
@pulumi.getter
def kind(self) -> pulumi.Output[Optional[str]]:
"""
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter
def metadata(self) -> pulumi.Output[Optional['_meta.v1.outputs.ObjectMeta']]:
"""
Standard object's metadata. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#metadata
"""
return pulumi.get(self, "metadata")
@property
@pulumi.getter
def spec(self) -> pulumi.Output[Optional['outputs.PersistentVolumeSpec']]:
"""
Spec defines a specification of a persistent volume owned by the cluster. Provisioned by an administrator. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#persistent-volumes
"""
return pulumi.get(self, "spec")
@property
@pulumi.getter
def status(self) -> pulumi.Output[Optional['outputs.PersistentVolumeStatus']]:
"""
Status represents the current information/status for the persistent volume. Populated by the system. Read-only. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#persistent-volumes
"""
return pulumi.get(self, "status")
| 55.094595
| 335
| 0.69005
|
3dc49b0046223926dcf05baa9b6e0c46684795e4
| 10,051
|
py
|
Python
|
Wrapping/Python/vtkmodules/gtk/GtkVTKRenderWindowInteractor.py
|
txwhhny/vtk
|
854d9aa87b944bc9079510515996406b98b86f7c
|
[
"BSD-3-Clause"
] | 1,755
|
2015-01-03T06:55:00.000Z
|
2022-03-29T05:23:26.000Z
|
Wrapping/Python/vtkmodules/gtk/GtkVTKRenderWindowInteractor.py
|
txwhhny/vtk
|
854d9aa87b944bc9079510515996406b98b86f7c
|
[
"BSD-3-Clause"
] | 29
|
2015-04-23T20:58:30.000Z
|
2022-03-02T16:16:42.000Z
|
Wrapping/Python/vtkmodules/gtk/GtkVTKRenderWindowInteractor.py
|
txwhhny/vtk
|
854d9aa87b944bc9079510515996406b98b86f7c
|
[
"BSD-3-Clause"
] | 1,044
|
2015-01-05T22:48:27.000Z
|
2022-03-31T02:38:26.000Z
|
"""
Description:
Provides a pyGtk vtkRenderWindowInteractor widget. This embeds a
vtkRenderWindow inside a GTK widget and uses the
vtkGenericRenderWindowInteractor for the event handling. This is
based on vtkTkRenderWindow.py.
The class uses the gtkgl.GtkGLArea widget (gtkglarea). This avoids
a lot of problems with flicker.
There is a working example at the bottom.
Created by Prabhu Ramachandran, April 2002.
Bugs:
(*) There is a focus related problem. Tkinter has a focus object
that handles focus events. I don't know of an equivalent object
under GTK. So, when an 'enter_notify_event' is received on the
GtkVTKRenderWindow I grab the focus but I don't know what to do when
I get a 'leave_notify_event'.
(*) Will not work under Win32 because it uses the XID of a window in
OnRealize. Suggestions to fix this will be appreciated.
"""
import gtk, GDK, gtkgl
from vtkmodules.vtkRenderingUI import vtkGenericRenderWindowInteractor
import math
class GtkVTKRenderWindowInteractor(gtkgl.GtkGLArea):
""" Embeds a vtkRenderWindow into a pyGTK widget and uses
vtkGenericRenderWindowInteractor for the event handling. This
class embeds the RenderWindow correctly. A __getattr__ hook is
provided that makes the class behave like a
vtkGenericRenderWindowInteractor."""
def __init__(self, *args):
l = list(args)
attr = (gtkgl.RGBA, gtkgl.DOUBLEBUFFER)
l.insert(0, self)
l.insert(1, attr)
apply(gtkgl.GtkGLArea.__init__, l)
self._RenderWindow = vtkRenderWindow()
# private attributes
self.__Created = 0
self._ActiveButton = 0
self._Iren = vtkGenericRenderWindowInteractor()
self._Iren.SetRenderWindow(self._RenderWindow)
self._Iren.AddObserver('CreateTimerEvent', self.CreateTimer)
self._Iren.AddObserver('DestroyTimerEvent', self.DestroyTimer)
self.ConnectSignals()
# need this to be able to handle key_press events.
self.set_flags(gtk.CAN_FOCUS)
# default size
self.set_usize(300, 300)
def set_usize(self, w, h):
gtkgl.GtkGLArea.set_usize(self, w, h)
self._RenderWindow.SetSize(w, h)
self._Iren.SetSize(w, h)
self._Iren.ConfigureEvent()
def ConnectSignals(self):
self.connect("realize", self.OnRealize)
self.connect("expose_event", self.OnExpose)
self.connect("configure_event", self.OnConfigure)
self.connect("button_press_event", self.OnButtonDown)
self.connect("button_release_event", self.OnButtonUp)
self.connect("motion_notify_event", self.OnMouseMove)
self.connect("enter_notify_event", self.OnEnter)
self.connect("leave_notify_event", self.OnLeave)
self.connect("key_press_event", self.OnKeyPress)
self.connect("delete_event", self.OnDestroy)
self.add_events(GDK.EXPOSURE_MASK| GDK.BUTTON_PRESS_MASK |
GDK.BUTTON_RELEASE_MASK |
GDK.KEY_PRESS_MASK |
GDK.POINTER_MOTION_MASK |
GDK.POINTER_MOTION_HINT_MASK |
GDK.ENTER_NOTIFY_MASK | GDK.LEAVE_NOTIFY_MASK)
def __getattr__(self, attr):
"""Makes the object behave like a
vtkGenericRenderWindowInteractor"""
if attr == '__vtk__':
return lambda t=self._Iren: t
elif hasattr(self._Iren, attr):
return getattr(self._Iren, attr)
else:
raise AttributeError(self.__class__.__name__ +
" has no attribute named " + attr)
def CreateTimer(self, obj, event):
gtk.timeout_add(10, self._Iren.TimerEvent)
def DestroyTimer(self, obj, event):
"""The timer is a one shot timer so will expire automatically."""
return 1
def GetRenderWindow(self):
return self._RenderWindow
def Render(self):
if self.__Created:
self._RenderWindow.Render()
def OnRealize(self, *args):
if self.__Created == 0:
# you can't get the xid without the window being realized.
self.realize()
win_id = str(self.get_window().xid)
self._RenderWindow.SetWindowInfo(win_id)
self._Iren.Initialize()
self.__Created = 1
return gtk.TRUE
def OnConfigure(self, wid, event=None):
sz = self._RenderWindow.GetSize()
if (event.width != sz[0]) or (event.height != sz[1]):
self._Iren.SetSize(event.width, event.height)
self._Iren.ConfigureEvent()
return gtk.TRUE
def OnExpose(self, *args):
self.Render()
return gtk.TRUE
def OnDestroy(self, event=None):
self.hide()
del self._RenderWindow
self.destroy()
return gtk.TRUE
def _GetCtrlShift(self, event):
ctrl, shift = 0, 0
if ((event.state & GDK.CONTROL_MASK) == GDK.CONTROL_MASK):
ctrl = 1
if ((event.state & GDK.SHIFT_MASK) == GDK.SHIFT_MASK):
shift = 1
return ctrl, shift
def OnButtonDown(self, wid, event):
"""Mouse button pressed."""
m = self.get_pointer()
ctrl, shift = self._GetCtrlShift(event)
self._Iren.SetEventInformationFlipY(m[0], m[1], ctrl, shift,
chr(0), 0, None)
button = event.button
if button == 3:
self._Iren.RightButtonPressEvent()
return gtk.TRUE
elif button == 1:
self._Iren.LeftButtonPressEvent()
return gtk.TRUE
elif button == 2:
self._Iren.MiddleButtonPressEvent()
return gtk.TRUE
else:
return gtk.FALSE
def OnButtonUp(self, wid, event):
"""Mouse button released."""
m = self.get_pointer()
ctrl, shift = self._GetCtrlShift(event)
self._Iren.SetEventInformationFlipY(m[0], m[1], ctrl, shift,
chr(0), 0, None)
button = event.button
if button == 3:
self._Iren.RightButtonReleaseEvent()
return gtk.TRUE
elif button == 1:
self._Iren.LeftButtonReleaseEvent()
return gtk.TRUE
elif button == 2:
self._Iren.MiddleButtonReleaseEvent()
return gtk.TRUE
return gtk.FALSE
def OnMouseMove(self, wid, event):
"""Mouse has moved."""
m = self.get_pointer()
ctrl, shift = self._GetCtrlShift(event)
self._Iren.SetEventInformationFlipY(m[0], m[1], ctrl, shift,
chr(0), 0, None)
self._Iren.MouseMoveEvent()
return gtk.TRUE
def OnEnter(self, wid, event):
"""Entering the vtkRenderWindow."""
self.grab_focus()
m = self.get_pointer()
ctrl, shift = self._GetCtrlShift(event)
self._Iren.SetEventInformationFlipY(m[0], m[1], ctrl, shift,
chr(0), 0, None)
self._Iren.EnterEvent()
return gtk.TRUE
def OnLeave(self, wid, event):
"""Leaving the vtkRenderWindow."""
m = self.get_pointer()
ctrl, shift = self._GetCtrlShift(event)
self._Iren.SetEventInformationFlipY(m[0], m[1], ctrl, shift,
chr(0), 0, None)
self._Iren.LeaveEvent()
return gtk.TRUE
def OnKeyPress(self, wid, event):
"""Key pressed."""
m = self.get_pointer()
ctrl, shift = self._GetCtrlShift(event)
keycode, keysym = event.keyval, event.string
key = chr(0)
if keycode < 256:
key = chr(keycode)
self._Iren.SetEventInformationFlipY(m[0], m[1], ctrl, shift,
key, 0, keysym)
self._Iren.KeyPressEvent()
self._Iren.CharEvent()
return gtk.TRUE
def OnKeyRelease(self, wid, event):
"Key released."
m = self.get_pointer()
ctrl, shift = self._GetCtrlShift(event)
keycode, keysym = event.keyval, event.string
key = chr(0)
if keycode < 256:
key = chr(keycode)
self._Iren.SetEventInformationFlipY(m[0], m[1], ctrl, shift,
key, 0, keysym)
self._Iren.KeyReleaseEvent()
return gtk.TRUE
def Initialize(self):
if self.__Created:
self._Iren.Initialize()
def main():
from vtkmodules.vtkFiltersSources import vtkConeSource
from vtkmodules.vtkRenderingCore import vtkActor, vtkPolyDataMapper, vtkRenderer
# The main window
window = gtk.GtkWindow(gtk.WINDOW_TOPLEVEL)
window.set_title("A GtkVTKRenderWindow Demo!")
window.connect("destroy", gtk.mainquit)
window.connect("delete_event", gtk.mainquit)
window.set_border_width(10)
# A VBox into which widgets are packed.
vbox = gtk.GtkVBox(spacing=3)
window.add(vbox)
vbox.show()
# The GtkVTKRenderWindow
gvtk = GtkVTKRenderWindowInteractor()
#gvtk.SetDesiredUpdateRate(1000)
gvtk.set_usize(400, 400)
vbox.pack_start(gvtk)
gvtk.show()
gvtk.Initialize()
gvtk.Start()
# prevents 'q' from exiting the app.
gvtk.AddObserver("ExitEvent", lambda o,e,x=None: x)
# The VTK stuff.
cone = vtkConeSource()
cone.SetResolution(80)
coneMapper = vtkPolyDataMapper()
coneMapper.SetInputConnection(cone.GetOutputPort())
#coneActor = vtkLODActor()
coneActor = vtkActor()
coneActor.SetMapper(coneMapper)
coneActor.GetProperty().SetColor(0.5, 0.5, 1.0)
ren = vtkRenderer()
gvtk.GetRenderWindow().AddRenderer(ren)
ren.AddActor(coneActor)
# A simple quit button
quit = gtk.GtkButton("Quit!")
quit.connect("clicked", gtk.mainquit)
vbox.pack_start(quit)
quit.show()
# show the main window and start event processing.
window.show()
gtk.mainloop()
if __name__ == "__main__":
main()
| 33.392027
| 84
| 0.613471
|
d97ab9d71abc19b93fc02e9bc6fd06b9f6bb87ec
| 992
|
py
|
Python
|
petstagram/petstagram/common/helpers.py
|
batsandi/petstagram_2
|
f6c2cce00862e2b6b77bfc143cc8efb55fa46735
|
[
"MIT"
] | null | null | null |
petstagram/petstagram/common/helpers.py
|
batsandi/petstagram_2
|
f6c2cce00862e2b6b77bfc143cc8efb55fa46735
|
[
"MIT"
] | null | null | null |
petstagram/petstagram/common/helpers.py
|
batsandi/petstagram_2
|
f6c2cce00862e2b6b77bfc143cc8efb55fa46735
|
[
"MIT"
] | null | null | null |
from django import forms
class BootstrapFormMixin:
fields = {}
def _init_bootstrap_form_controls(self):
for _, field in self.fields.items():
if not hasattr(field.widget, 'attrs'):
setattr(field.widget, 'attrs', {})
if 'class' not in field.widget.attrs:
field.widget.attrs['class'] = ''
field.widget.attrs['class'] += ' form-control'
class DisabledFieldsFormMixin:
disabled_fields = '__all__'
fields = {}
def _init_disabled_fields(self):
for name, field in self.fields.items():
if self.disabled_fields != '__all__' and name not in self.disabled_fields:
continue
if not hasattr(field.widget, 'attrs'):
setattr(field.widget, 'attrs', {})
if isinstance(field, forms.ChoiceField):
field.widget.attrs['disabled'] = 'readonly'
else:
field.widget.attrs['readonly'] = 'readonly'
| 33.066667
| 86
| 0.580645
|
ab52b16a779e4c81c3c28aa9fdbcff697ce66b70
| 3,236
|
py
|
Python
|
profiles_project/settings.py
|
ivancekic/profiles-rest-api
|
698720bf566252b22dc56a6ea989cb47494bf8ea
|
[
"MIT"
] | null | null | null |
profiles_project/settings.py
|
ivancekic/profiles-rest-api
|
698720bf566252b22dc56a6ea989cb47494bf8ea
|
[
"MIT"
] | null | null | null |
profiles_project/settings.py
|
ivancekic/profiles-rest-api
|
698720bf566252b22dc56a6ea989cb47494bf8ea
|
[
"MIT"
] | null | null | null |
"""
Django settings for profiles_project project.
Generated by 'django-admin startproject' using Django 2.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '_ny5+n$ujaxgx8u_f!plt=2%0gcu3xwstfhhgp_2dnqp7z47w0'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'rest_framework.authtoken',
'profiles_api',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'profiles_project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'profiles_project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
AUTH_USER_MODEL = 'profiles_api.UserProfile'
| 25.68254
| 91
| 0.702101
|
91dd76c89c0c99b6b7b816ba3f500c033d3682e0
| 3,163
|
py
|
Python
|
examples/demo-client.py
|
edwinfeener/monolithe
|
0f024b2ec7d4c5a2229612280e5e559bf2667ba5
|
[
"BSD-3-Clause"
] | 18
|
2015-06-24T18:35:20.000Z
|
2022-01-19T19:04:00.000Z
|
examples/demo-client.py
|
edwinfeener/monolithe
|
0f024b2ec7d4c5a2229612280e5e559bf2667ba5
|
[
"BSD-3-Clause"
] | 63
|
2015-11-03T18:57:12.000Z
|
2020-09-30T02:54:49.000Z
|
examples/demo-client.py
|
edwinfeener/monolithe
|
0f024b2ec7d4c5a2229612280e5e559bf2667ba5
|
[
"BSD-3-Clause"
] | 38
|
2015-10-23T19:04:44.000Z
|
2021-06-04T08:13:33.000Z
|
#!/usr/bin/env python
from __future__ import print_function
import sys
import os
import importlib
# Ho, Hi There!
# I guess you are coming from my good friend, the README file
# He's a nice guy, and I'm glad you have listen to him :)
# Please go through this example, and you'll uncover a bit of
# of the awesomeness of Monolithe!
# manually import the tlddk
# you should not do this like that of course. this is for the demo
# We don't want to you to have to install the tlddk on your system
# so we simply use some python magic. You don't need to undersand that
# you would normally write somehing like
#
# from tlddk import v1_0 as tlddk
sys.path.insert(0, "%s/codegen/python" % os.path.abspath(os.path.dirname(__file__)))
tdldk = importlib.import_module("tdldk.v1_0")
# uncomment the two following lines to log the ReST communication.
# but don't do that now, it will screw up the output.
# continue to read ;)
#
# tdldk_utils = importlib.import_module("tdldk.utils")
# tdldk_utils.set_log_level(logging.DEBUG)
# create a session. The demo server doesn't care about your credentials,
# we put junk. The only important thing is of course the api_url
session = tdldk.GATDLSession(username="root", enterprise="nuagenetworks", password="password", api_url="http://127.0.0.1:5555")
# now we start the session. This would normally authenticate your credentials and return your root api object with an
# api key. But again, here we don't validate anything.
session.start()
# we now get the complete list of todo "GATLDList"
lists = session.root.lists.get()
# we print them
print()
print("Lists")
print("=====")
print()
# we loop on GATLDLists
for l in lists:
# we use the SDK to access the properties
print("\033[93m%s\033[0m: %s" % (l.title, l.description))
print()
tasks = l.tasks.get()
if tasks:
for t in tasks:
print(" [%s] \033[94m%s\033[0m: %s" % ("\033[92mx\033[0m" if t.is_complete() else " ", t.title, t.description))
print()
print()
# we ask for the user to press enter to continue this demo
print("> Press enter to mark switch some task status")
sys.stdin.readline()
# then we loop on the tasks of the second list, and switch the status from DONE to TODO or vice versa
# you'll notice that we are using the is_complete() method. This convenience method has been added to the
# overrides comming from the user sdk vanilla.
# yep, it's cool
# you aslo notice that we are not using the lists variables, but we directly
# use the internal object list. This list has been populated during the get process.
# but of course, you can set a flag to not internally commit what you just fetched.
for t in session.root.lists[1].tasks:
# we update the attribute "status"
t.status = "TODO" if t.is_complete() else "DONE"
# then we simply save the changes
t.save()
# and we are done. You can restart the script and you'll see that
# some tasks have been marked as DONE.
print("Done! restart this script to see your changes :)\n")
# did you notice that you never saw a ReST call, or a json structure?
# but you can, by uncommenting the logging, near the top of the this file :)
| 33.648936
| 127
| 0.719254
|
2742e83ad4a70fb695c99242b5af09166602e02f
| 10,105
|
py
|
Python
|
kubernetes/client/models/v1_topology_spread_constraint.py
|
carloscastrojumo/python
|
f461dd42d48650a4ae1b41d630875cad9fcb68ad
|
[
"Apache-2.0"
] | 2
|
2021-03-09T12:42:05.000Z
|
2021-03-09T13:27:50.000Z
|
kubernetes/client/models/v1_topology_spread_constraint.py
|
carloscastrojumo/python
|
f461dd42d48650a4ae1b41d630875cad9fcb68ad
|
[
"Apache-2.0"
] | 7
|
2021-04-13T03:04:42.000Z
|
2022-03-02T03:10:18.000Z
|
kubernetes/client/models/v1_topology_spread_constraint.py
|
carloscastrojumo/python
|
f461dd42d48650a4ae1b41d630875cad9fcb68ad
|
[
"Apache-2.0"
] | 1
|
2021-06-13T09:21:37.000Z
|
2021-06-13T09:21:37.000Z
|
# coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: release-1.17
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from kubernetes.client.configuration import Configuration
class V1TopologySpreadConstraint(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'label_selector': 'V1LabelSelector',
'max_skew': 'int',
'topology_key': 'str',
'when_unsatisfiable': 'str'
}
attribute_map = {
'label_selector': 'labelSelector',
'max_skew': 'maxSkew',
'topology_key': 'topologyKey',
'when_unsatisfiable': 'whenUnsatisfiable'
}
def __init__(self, label_selector=None, max_skew=None, topology_key=None, when_unsatisfiable=None, local_vars_configuration=None): # noqa: E501
"""V1TopologySpreadConstraint - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._label_selector = None
self._max_skew = None
self._topology_key = None
self._when_unsatisfiable = None
self.discriminator = None
if label_selector is not None:
self.label_selector = label_selector
self.max_skew = max_skew
self.topology_key = topology_key
self.when_unsatisfiable = when_unsatisfiable
@property
def label_selector(self):
"""Gets the label_selector of this V1TopologySpreadConstraint. # noqa: E501
:return: The label_selector of this V1TopologySpreadConstraint. # noqa: E501
:rtype: V1LabelSelector
"""
return self._label_selector
@label_selector.setter
def label_selector(self, label_selector):
"""Sets the label_selector of this V1TopologySpreadConstraint.
:param label_selector: The label_selector of this V1TopologySpreadConstraint. # noqa: E501
:type: V1LabelSelector
"""
self._label_selector = label_selector
@property
def max_skew(self):
"""Gets the max_skew of this V1TopologySpreadConstraint. # noqa: E501
MaxSkew describes the degree to which pods may be unevenly distributed. It's the maximum permitted difference between the number of matching pods in any two topology domains of a given topology type. For example, in a 3-zone cluster, MaxSkew is set to 1, and pods with the same labelSelector spread as 1/1/0: | zone1 | zone2 | zone3 | | P | P | | - if MaxSkew is 1, incoming pod can only be scheduled to zone3 to become 1/1/1; scheduling it onto zone1(zone2) would make the ActualSkew(2-0) on zone1(zone2) violate MaxSkew(1). - if MaxSkew is 2, incoming pod can be scheduled onto any zone. It's a required field. Default value is 1 and 0 is not allowed. # noqa: E501
:return: The max_skew of this V1TopologySpreadConstraint. # noqa: E501
:rtype: int
"""
return self._max_skew
@max_skew.setter
def max_skew(self, max_skew):
"""Sets the max_skew of this V1TopologySpreadConstraint.
MaxSkew describes the degree to which pods may be unevenly distributed. It's the maximum permitted difference between the number of matching pods in any two topology domains of a given topology type. For example, in a 3-zone cluster, MaxSkew is set to 1, and pods with the same labelSelector spread as 1/1/0: | zone1 | zone2 | zone3 | | P | P | | - if MaxSkew is 1, incoming pod can only be scheduled to zone3 to become 1/1/1; scheduling it onto zone1(zone2) would make the ActualSkew(2-0) on zone1(zone2) violate MaxSkew(1). - if MaxSkew is 2, incoming pod can be scheduled onto any zone. It's a required field. Default value is 1 and 0 is not allowed. # noqa: E501
:param max_skew: The max_skew of this V1TopologySpreadConstraint. # noqa: E501
:type: int
"""
if self.local_vars_configuration.client_side_validation and max_skew is None: # noqa: E501
raise ValueError("Invalid value for `max_skew`, must not be `None`") # noqa: E501
self._max_skew = max_skew
@property
def topology_key(self):
"""Gets the topology_key of this V1TopologySpreadConstraint. # noqa: E501
TopologyKey is the key of node labels. Nodes that have a label with this key and identical values are considered to be in the same topology. We consider each <key, value> as a \"bucket\", and try to put balanced number of pods into each bucket. It's a required field. # noqa: E501
:return: The topology_key of this V1TopologySpreadConstraint. # noqa: E501
:rtype: str
"""
return self._topology_key
@topology_key.setter
def topology_key(self, topology_key):
"""Sets the topology_key of this V1TopologySpreadConstraint.
TopologyKey is the key of node labels. Nodes that have a label with this key and identical values are considered to be in the same topology. We consider each <key, value> as a \"bucket\", and try to put balanced number of pods into each bucket. It's a required field. # noqa: E501
:param topology_key: The topology_key of this V1TopologySpreadConstraint. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and topology_key is None: # noqa: E501
raise ValueError("Invalid value for `topology_key`, must not be `None`") # noqa: E501
self._topology_key = topology_key
@property
def when_unsatisfiable(self):
"""Gets the when_unsatisfiable of this V1TopologySpreadConstraint. # noqa: E501
WhenUnsatisfiable indicates how to deal with a pod if it doesn't satisfy the spread constraint. - DoNotSchedule (default) tells the scheduler not to schedule it - ScheduleAnyway tells the scheduler to still schedule it It's considered as \"Unsatisfiable\" if and only if placing incoming pod on any topology violates \"MaxSkew\". For example, in a 3-zone cluster, MaxSkew is set to 1, and pods with the same labelSelector spread as 3/1/1: | zone1 | zone2 | zone3 | | P P P | P | P | If WhenUnsatisfiable is set to DoNotSchedule, incoming pod can only be scheduled to zone2(zone3) to become 3/2/1(3/1/2) as ActualSkew(2-1) on zone2(zone3) satisfies MaxSkew(1). In other words, the cluster can still be imbalanced, but scheduler won't make it *more* imbalanced. It's a required field. # noqa: E501
:return: The when_unsatisfiable of this V1TopologySpreadConstraint. # noqa: E501
:rtype: str
"""
return self._when_unsatisfiable
@when_unsatisfiable.setter
def when_unsatisfiable(self, when_unsatisfiable):
"""Sets the when_unsatisfiable of this V1TopologySpreadConstraint.
WhenUnsatisfiable indicates how to deal with a pod if it doesn't satisfy the spread constraint. - DoNotSchedule (default) tells the scheduler not to schedule it - ScheduleAnyway tells the scheduler to still schedule it It's considered as \"Unsatisfiable\" if and only if placing incoming pod on any topology violates \"MaxSkew\". For example, in a 3-zone cluster, MaxSkew is set to 1, and pods with the same labelSelector spread as 3/1/1: | zone1 | zone2 | zone3 | | P P P | P | P | If WhenUnsatisfiable is set to DoNotSchedule, incoming pod can only be scheduled to zone2(zone3) to become 3/2/1(3/1/2) as ActualSkew(2-1) on zone2(zone3) satisfies MaxSkew(1). In other words, the cluster can still be imbalanced, but scheduler won't make it *more* imbalanced. It's a required field. # noqa: E501
:param when_unsatisfiable: The when_unsatisfiable of this V1TopologySpreadConstraint. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and when_unsatisfiable is None: # noqa: E501
raise ValueError("Invalid value for `when_unsatisfiable`, must not be `None`") # noqa: E501
self._when_unsatisfiable = when_unsatisfiable
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1TopologySpreadConstraint):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1TopologySpreadConstraint):
return True
return self.to_dict() != other.to_dict()
| 48.581731
| 812
| 0.672241
|
ff7bbc9fdea6b7b64e47b018b32d7c2378febda1
| 3,651
|
py
|
Python
|
scripts/encode_renditions.py
|
cyberj0g/verification-classifier
|
efb19a3864e27a7f149a1c27ee8e13eaa19f96eb
|
[
"MIT"
] | 8
|
2019-06-06T08:16:45.000Z
|
2021-06-26T11:53:48.000Z
|
scripts/encode_renditions.py
|
cyberj0g/verification-classifier
|
efb19a3864e27a7f149a1c27ee8e13eaa19f96eb
|
[
"MIT"
] | 95
|
2019-03-27T08:36:01.000Z
|
2022-02-10T00:15:20.000Z
|
scripts/encode_renditions.py
|
cyberj0g/verification-classifier
|
efb19a3864e27a7f149a1c27ee8e13eaa19f96eb
|
[
"MIT"
] | 8
|
2019-02-28T11:21:46.000Z
|
2022-03-21T07:34:20.000Z
|
import argparse
import subprocess
from os import makedirs
import multiprocessing
from utils import *
parser = argparse.ArgumentParser(description='Generate renditions')
parser.add_argument('-i', "--input", action='store', help='Folder where the 1080p renditions are', type=str,
required=True)
parser.add_argument('-o', "--output", action='store', help='Folder where the renditions will be', type=str,
required=True)
parser.add_argument('-m', "--metadata", action='store', help='File where the metadata is', type=str, required=True)
args = parser.parse_args()
input_path = args.input
output_path = args.output
metadata_file = args.metadata
cpu_count = multiprocessing.cpu_count()
codec_to_use = 'libx264'
output_folders = {
'720p': '720p',
'480p': '480p',
'360p': '360p',
'240p': '240p',
'144p': '144p',
}
cpu_to_use = int(round(cpu_count / len(output_folders)))
files_and_renditions = get_files_and_renditions(metadata_file)
def crete_folders():
for key, value in output_folders.items():
output_folder = output_path + '/' + value
if not exists(output_folder):
makedirs(output_folder)
def get_input_output_jobs():
ffmpeg_jobs = []
files = [f for f in listdir(input_path) if isfile(join(input_path, f)) and not f.startswith('.')]
for file in files:
bitrates = get_renditions(files_and_renditions[file.split('.mp4')[0]])
full_input_file = join(input_path, file)
job_output_folders = {}
for output_key, output_value in output_folders.items():
output_folder = join(output_path, output_value)
full_output_file = join(output_folder, file)
job_output_folders[output_key] = full_output_file
ffmpeg_jobs.append((full_input_file, codec_to_use, bitrates, job_output_folders))
return ffmpeg_jobs
def format_command(full_input_file, codec, bitrates, output_files):
print('processing {}'.format(full_input_file))
command = ['ffmpeg', '-y', '-i', '"' + full_input_file + '"',
'-c:v', codec, '-vf', 'scale=-2:720',
'-b:v', str(bitrates[720]) + 'K', '-c:a', 'copy', '"' + output_files['720p'] + '"',
'-c:v', codec, '-vf', 'scale=-2:480',
'-b:v', str(bitrates[480]) + 'K', '-c:a', 'copy', '"' + output_files['480p'] + '"',
'-c:v', codec, '-vf', 'scale=-2:360',
'-b:v', str(bitrates[360]) + 'K', '-c:a', 'copy', '"' + output_files['360p'] + '"',
'-c:v', codec, '-vf', 'scale=-2:240',
'-b:v', str(bitrates[240]) + 'K', '-c:a', 'copy', '"' + output_files['240p'] + '"',
'-c:v', codec, '-vf', 'scale=-2:144',
'-b:v', str(bitrates[144]) + 'K', '-c:a', 'copy', '"' + output_files['144p'] + '"',
]
return command
def worker(full_input_file, codec, bitrates, output_files):
ffmpeg_command = ''
try:
ffmpeg_command = format_command(full_input_file, codec, bitrates, output_files)
ffmpeg = subprocess.Popen(' '.join(ffmpeg_command), stderr=subprocess.PIPE, stdout=subprocess.PIPE, shell=True)
out, err = ffmpeg.communicate()
if not err:
print('FFMPEG ERROR')
print('Out ', out)
print('Error', err)
except Exception as e:
print('Error processing ', full_input_file)
print('The error was ', e)
print('Executing ', ffmpeg_command)
if __name__ == "__main__":
crete_folders()
jobs = get_input_output_jobs()
with multiprocessing.Pool(cpu_to_use) as pool:
pool.starmap(worker, jobs)
| 37.255102
| 119
| 0.609422
|
8de8e4b49c9e50ba657e61e04647557e772e96af
| 396
|
py
|
Python
|
demo/Model/mongo_model.py
|
GritYolo/work
|
ee2b6e6aa545cf352d54ff7c2cc0d14fc68e2f3e
|
[
"Apache-2.0"
] | null | null | null |
demo/Model/mongo_model.py
|
GritYolo/work
|
ee2b6e6aa545cf352d54ff7c2cc0d14fc68e2f3e
|
[
"Apache-2.0"
] | null | null | null |
demo/Model/mongo_model.py
|
GritYolo/work
|
ee2b6e6aa545cf352d54ff7c2cc0d14fc68e2f3e
|
[
"Apache-2.0"
] | null | null | null |
import sys
import os
import pymongo
from pymongo import MongoClient
class Mongo():
clent = None
db = None
collection = None
def makeConnection(self):
self.client = MongoClient('localhost',27017)
def getDatabase(self,dbName):
self.db = self.client[dbName]
return self.db
def getCollection(self,collectionName):
self.collection = self.db[collectionName]
return self.collection
| 18
| 46
| 0.755051
|
4fe6194029d47962eeec57c4e9a1e80b25a0a1d0
| 46,054
|
py
|
Python
|
test/python/circuit/test_circuit_operations.py
|
t-imamichi/qiskit-core
|
8d2eeeac44f97af1e10514cdae4157e5923ff2e5
|
[
"Apache-2.0"
] | 92
|
2018-06-05T11:18:38.000Z
|
2018-07-01T23:50:44.000Z
|
test/python/circuit/test_circuit_operations.py
|
t-imamichi/qiskit-core
|
8d2eeeac44f97af1e10514cdae4157e5923ff2e5
|
[
"Apache-2.0"
] | 107
|
2018-06-05T08:41:19.000Z
|
2018-07-02T12:10:53.000Z
|
test/python/circuit/test_circuit_operations.py
|
declanmillar/qiskit-terra
|
43e4a72c9c1537dd3d220a52f7e56423dfdd926c
|
[
"Apache-2.0"
] | 39
|
2018-06-05T09:55:56.000Z
|
2018-07-02T08:47:35.000Z
|
# This code is part of Qiskit.
#
# (C) Copyright IBM 2017, 2018.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""Test Qiskit's QuantumCircuit class."""
from ddt import ddt, data
import numpy as np
from qiskit import BasicAer
from qiskit import QuantumRegister, ClassicalRegister, QuantumCircuit
from qiskit import execute
from qiskit.circuit import Gate, Instruction, Parameter, Measure
from qiskit.circuit.bit import Bit
from qiskit.circuit.classicalregister import Clbit
from qiskit.circuit.exceptions import CircuitError
from qiskit.circuit.quantumcircuit import BitLocations
from qiskit.circuit.quantumregister import AncillaQubit, AncillaRegister, Qubit
from qiskit.test import QiskitTestCase
from qiskit.circuit.library.standard_gates import SGate
from qiskit.quantum_info import Operator
@ddt
class TestCircuitOperations(QiskitTestCase):
"""QuantumCircuit Operations tests."""
@data(0, 1, -1, -2)
def test_append_resolves_integers(self, index):
"""Test that integer arguments to append are correctly resolved."""
# We need to assume that appending ``Bit`` instances will always work, so we have something
# to test against.
qubits = [Qubit(), Qubit()]
clbits = [Clbit(), Clbit()]
test = QuantumCircuit(qubits, clbits)
test.append(Measure(), [index], [index])
expected = QuantumCircuit(qubits, clbits)
expected.append(Measure(), [qubits[index]], [clbits[index]])
self.assertEqual(test, expected)
@data(np.int32(0), np.int8(-1), np.uint64(1))
def test_append_resolves_numpy_integers(self, index):
"""Test that Numpy's integers can be used to reference qubits and clbits."""
qubits = [Qubit(), Qubit()]
clbits = [Clbit(), Clbit()]
test = QuantumCircuit(qubits, clbits)
test.append(Measure(), [index], [index])
expected = QuantumCircuit(qubits, clbits)
expected.append(Measure(), [qubits[int(index)]], [clbits[int(index)]])
self.assertEqual(test, expected)
@data(
slice(0, 2),
slice(None, 1),
slice(1, None),
slice(None, None),
slice(0, 2, 2),
slice(2, -1, -1),
slice(1000, 1003),
)
def test_append_resolves_slices(self, index):
"""Test that slices can be used to reference qubits and clbits with the same semantics that
they have on lists."""
qregs = [QuantumRegister(2), QuantumRegister(1)]
cregs = [ClassicalRegister(1), ClassicalRegister(2)]
test = QuantumCircuit(*qregs, *cregs)
test.append(Measure(), [index], [index])
expected = QuantumCircuit(*qregs, *cregs)
for qubit, clbit in zip(expected.qubits[index], expected.clbits[index]):
expected.append(Measure(), [qubit], [clbit])
self.assertEqual(test, expected)
def test_append_resolves_scalar_numpy_array(self):
"""Test that size-1 Numpy arrays can be used to index arguments. These arrays can be passed
to ``int``, which means they sometimes might be involved in spurious casts."""
test = QuantumCircuit(1, 1)
test.append(Measure(), [np.array([0])], [np.array([0])])
expected = QuantumCircuit(1, 1)
expected.measure(0, 0)
self.assertEqual(test, expected)
@data([3], [-3], [0, 1, 3])
def test_append_rejects_out_of_range_input(self, specifier):
"""Test that append rejects an integer that's out of range."""
test = QuantumCircuit(2, 2)
with self.subTest("qubit"), self.assertRaisesRegex(CircuitError, "out of range"):
opaque = Instruction("opaque", len(specifier), 1, [])
test.append(opaque, specifier, [0])
with self.subTest("clbit"), self.assertRaisesRegex(CircuitError, "out of range"):
opaque = Instruction("opaque", 1, len(specifier), [])
test.append(opaque, [0], specifier)
def test_append_rejects_bits_not_in_circuit(self):
"""Test that append rejects bits that are not in the circuit."""
test = QuantumCircuit(2, 2)
with self.subTest("qubit"), self.assertRaisesRegex(CircuitError, "not in the circuit"):
test.append(Measure(), [Qubit()], [test.clbits[0]])
with self.subTest("clbit"), self.assertRaisesRegex(CircuitError, "not in the circuit"):
test.append(Measure(), [test.qubits[0]], [Clbit()])
with self.subTest("qubit list"), self.assertRaisesRegex(CircuitError, "not in the circuit"):
test.append(Measure(), [[test.qubits[0], Qubit()]], [test.clbits])
with self.subTest("clbit list"), self.assertRaisesRegex(CircuitError, "not in the circuit"):
test.append(Measure(), [test.qubits], [[test.clbits[0], Clbit()]])
def test_append_rejects_bit_of_wrong_type(self):
"""Test that append rejects bits of the wrong type in an argument list."""
qubits = [Qubit(), Qubit()]
clbits = [Clbit(), Clbit()]
test = QuantumCircuit(qubits, clbits)
with self.subTest("c to q"), self.assertRaisesRegex(CircuitError, "Incorrect bit type"):
test.append(Measure(), [clbits[0]], [clbits[1]])
with self.subTest("q to c"), self.assertRaisesRegex(CircuitError, "Incorrect bit type"):
test.append(Measure(), [qubits[0]], [qubits[1]])
with self.subTest("none to q"), self.assertRaisesRegex(CircuitError, "Incorrect bit type"):
test.append(Measure(), [Bit()], [clbits[0]])
with self.subTest("none to c"), self.assertRaisesRegex(CircuitError, "Incorrect bit type"):
test.append(Measure(), [qubits[0]], [Bit()])
with self.subTest("none list"), self.assertRaisesRegex(CircuitError, "Incorrect bit type"):
test.append(Measure(), [[qubits[0], Bit()]], [[clbits[0], Bit()]])
@data(0.0, 1.0, 1.0 + 0.0j, "0")
def test_append_rejects_wrong_types(self, specifier):
"""Test that various bad inputs are rejected, both given loose or in sublists."""
test = QuantumCircuit(2, 2)
# Use a default Instruction to be sure that there's not overridden broadcasting.
opaque = Instruction("opaque", 1, 1, [])
with self.subTest("q"), self.assertRaisesRegex(CircuitError, "Invalid bit index"):
test.append(opaque, [specifier], [0])
with self.subTest("c"), self.assertRaisesRegex(CircuitError, "Invalid bit index"):
test.append(opaque, [0], [specifier])
with self.subTest("q list"), self.assertRaisesRegex(CircuitError, "Invalid bit index"):
test.append(opaque, [[specifier]], [[0]])
with self.subTest("c list"), self.assertRaisesRegex(CircuitError, "Invalid bit index"):
test.append(opaque, [[0]], [[specifier]])
def test_adding_self(self):
"""Test that qc += qc finishes, which can be prone to infinite while-loops.
This can occur e.g. when a user tries
>>> other_qc = qc
>>> other_qc += qc # or qc2.extend(qc)
"""
qc = QuantumCircuit(1)
qc.x(0) # must contain at least one operation to end up in a infinite while-loop
# attempt addition, times out if qc is added via reference
qc += qc
# finally, qc should contain two X gates
self.assertEqual(["x", "x"], [x[0].name for x in qc.data])
def test_combine_circuit_common(self):
"""Test combining two circuits with same registers (inplace=False)."""
qr = QuantumRegister(2)
cr = ClassicalRegister(2)
qc1 = QuantumCircuit(qr, cr)
qc2 = QuantumCircuit(qr, cr)
qc1.h(qr[0])
qc1.measure(qr[0], cr[0])
qc2.measure(qr[1], cr[1])
new_circuit = qc1.combine(qc2)
backend = BasicAer.get_backend("qasm_simulator")
shots = 1024
result = execute(new_circuit, backend=backend, shots=shots, seed_simulator=78).result()
counts = result.get_counts()
target = {"00": shots / 2, "01": shots / 2}
threshold = 0.04 * shots
self.assertDictEqual(qc1.count_ops(), {"h": 1, "measure": 1}) # no changes "in-place"
self.assertDictEqual(qc2.count_ops(), {"measure": 1}) # no changes "in-place"
self.assertDictAlmostEqual(counts, target, threshold)
def test_combine_circuit_common_plus(self):
"""Test combining two circuits with same registers (as plus)."""
qr = QuantumRegister(2)
cr = ClassicalRegister(2)
qc1 = QuantumCircuit(qr, cr)
qc2 = QuantumCircuit(qr, cr)
qc1.h(qr[0])
qc1.measure(qr[0], cr[0])
qc2.measure(qr[1], cr[1])
new_circuit = qc1 + qc2
backend = BasicAer.get_backend("qasm_simulator")
shots = 1024
result = execute(new_circuit, backend=backend, shots=shots, seed_simulator=78).result()
counts = result.get_counts()
target = {"00": shots / 2, "01": shots / 2}
threshold = 0.04 * shots
self.assertDictEqual(qc1.count_ops(), {"h": 1, "measure": 1}) # no changes "in-place"
self.assertDictEqual(qc2.count_ops(), {"measure": 1}) # no changes "in-place"
self.assertDictAlmostEqual(counts, target, threshold)
def test_combine_circuit_fail(self):
"""Test combining two circuits fails if registers incompatible.
If two circuits have same name register of different size or type
it should raise a CircuitError.
"""
qr1 = QuantumRegister(1, "q")
qr2 = QuantumRegister(2, "q")
cr1 = ClassicalRegister(1, "q")
qc1 = QuantumCircuit(qr1)
qc2 = QuantumCircuit(qr2)
qcr3 = QuantumCircuit(cr1)
self.assertRaises(CircuitError, qc1.__add__, qc2)
self.assertRaises(CircuitError, qc1.__add__, qcr3)
def test_extend_circuit(self):
"""Test extending a circuit with same registers (in place add)."""
qr = QuantumRegister(2)
cr = ClassicalRegister(2)
qc1 = QuantumCircuit(qr, cr)
qc2 = QuantumCircuit(qr, cr)
qc1.h(qr[0])
qc1.measure(qr[0], cr[0])
qc2.measure(qr[1], cr[1])
qc1.extend(qc2)
backend = BasicAer.get_backend("qasm_simulator")
shots = 1024
result = execute(qc1, backend=backend, shots=shots, seed_simulator=78).result()
counts = result.get_counts()
target = {"00": shots / 2, "01": shots / 2}
threshold = 0.04 * shots
self.assertDictEqual(qc1.count_ops(), {"h": 1, "measure": 2}) # changes "in-place"
self.assertDictEqual(qc2.count_ops(), {"measure": 1}) # no changes "in-place"
self.assertDictAlmostEqual(counts, target, threshold)
def test_extend_circuit_iadd(self):
"""Test extending a circuit with same registers (in place add)."""
qr = QuantumRegister(2)
cr = ClassicalRegister(2)
qc1 = QuantumCircuit(qr, cr)
qc2 = QuantumCircuit(qr, cr)
qc1.h(qr[0])
qc1.measure(qr[0], cr[0])
qc2.measure(qr[1], cr[1])
qc1 += qc2
backend = BasicAer.get_backend("qasm_simulator")
shots = 1024
result = execute(qc1, backend=backend, shots=shots, seed_simulator=78).result()
counts = result.get_counts()
target = {"00": shots / 2, "01": shots / 2}
threshold = 0.04 * shots
self.assertDictEqual(qc1.count_ops(), {"h": 1, "measure": 2}) # changes "in-place"
self.assertDictEqual(qc2.count_ops(), {"measure": 1}) # no changes "in-place"
self.assertDictAlmostEqual(counts, target, threshold)
def test_extend_circuit_fail(self):
"""Test extending a circuit fails if registers incompatible.
If two circuits have same name register of different size or type
it should raise a CircuitError.
"""
qr1 = QuantumRegister(1, "q")
qr2 = QuantumRegister(2, "q")
cr1 = ClassicalRegister(1, "q")
qc1 = QuantumCircuit(qr1)
qc2 = QuantumCircuit(qr2)
qcr3 = QuantumCircuit(cr1)
self.assertRaises(CircuitError, qc1.__iadd__, qc2)
self.assertRaises(CircuitError, qc1.__iadd__, qcr3)
def test_extend_circuit_adds_qubits(self):
"""Test extending a circuits with differing registers adds the qubits."""
qr = QuantumRegister(1, "q")
qc = QuantumCircuit(qr)
empty = QuantumCircuit()
empty.extend(qc)
self.assertListEqual(empty.qubits, qr[:])
def test_compose_circuit(self):
"""Test composing two circuits"""
qr = QuantumRegister(2)
cr = ClassicalRegister(2)
qc1 = QuantumCircuit(qr, cr)
qc2 = QuantumCircuit(qr, cr)
qc1.h(qr[0])
qc1.measure(qr[0], cr[0])
qc2.measure(qr[1], cr[1])
qc3 = qc1.compose(qc2)
backend = BasicAer.get_backend("qasm_simulator")
shots = 1024
result = execute(qc3, backend=backend, shots=shots, seed_simulator=78).result()
counts = result.get_counts()
target = {"00": shots / 2, "01": shots / 2}
threshold = 0.04 * shots
self.assertDictEqual(qc3.count_ops(), {"h": 1, "measure": 2})
self.assertDictEqual(qc1.count_ops(), {"h": 1, "measure": 1}) # no changes "in-place"
self.assertDictEqual(qc2.count_ops(), {"measure": 1}) # no changes "in-place"
self.assertDictAlmostEqual(counts, target, threshold)
def test_compose_circuit_and(self):
"""Test composing two circuits using & operator"""
qr = QuantumRegister(2)
cr = ClassicalRegister(2)
qc1 = QuantumCircuit(qr, cr)
qc2 = QuantumCircuit(qr, cr)
qc1.h(qr[0])
qc1.measure(qr[0], cr[0])
qc2.measure(qr[1], cr[1])
qc3 = qc1 & qc2
backend = BasicAer.get_backend("qasm_simulator")
shots = 1024
result = execute(qc3, backend=backend, shots=shots, seed_simulator=78).result()
counts = result.get_counts()
target = {"00": shots / 2, "01": shots / 2}
threshold = 0.04 * shots
self.assertDictEqual(qc3.count_ops(), {"h": 1, "measure": 2})
self.assertDictEqual(qc1.count_ops(), {"h": 1, "measure": 1}) # no changes "in-place"
self.assertDictEqual(qc2.count_ops(), {"measure": 1}) # no changes "in-place"
self.assertDictAlmostEqual(counts, target, threshold)
def test_compose_circuit_iand(self):
"""Test composing circuits using &= operator (in place)"""
qr = QuantumRegister(2)
cr = ClassicalRegister(2)
qc1 = QuantumCircuit(qr, cr)
qc2 = QuantumCircuit(qr, cr)
qc1.h(qr[0])
qc1.measure(qr[0], cr[0])
qc2.measure(qr[1], cr[1])
qc1 &= qc2
backend = BasicAer.get_backend("qasm_simulator")
shots = 1024
result = execute(qc1, backend=backend, shots=shots, seed_simulator=78).result()
counts = result.get_counts()
target = {"00": shots / 2, "01": shots / 2}
threshold = 0.04 * shots
self.assertDictEqual(qc1.count_ops(), {"h": 1, "measure": 2}) # changes "in-place"
self.assertDictEqual(qc2.count_ops(), {"measure": 1}) # no changes "in-place"
self.assertDictAlmostEqual(counts, target, threshold)
def test_compose_circuit_fail_circ_size(self):
"""Test composing circuit fails when number of wires in circuit is not enough"""
qr1 = QuantumRegister(2)
qr2 = QuantumRegister(4)
# Creating our circuits
qc1 = QuantumCircuit(qr1)
qc1.x(0)
qc1.h(1)
qc2 = QuantumCircuit(qr2)
qc2.h([1, 2])
qc2.cx(2, 3)
# Composing will fail because qc2 requires 4 wires
self.assertRaises(CircuitError, qc1.compose, qc2)
def test_compose_circuit_fail_arg_size(self):
"""Test composing circuit fails when arg size does not match number of wires"""
qr1 = QuantumRegister(2)
qr2 = QuantumRegister(2)
qc1 = QuantumCircuit(qr1)
qc1.h(0)
qc2 = QuantumCircuit(qr2)
qc2.cx(0, 1)
self.assertRaises(CircuitError, qc1.compose, qc2, qubits=[0])
def test_tensor_circuit(self):
"""Test tensoring two circuits"""
qc1 = QuantumCircuit(1, 1)
qc2 = QuantumCircuit(1, 1)
qc2.h(0)
qc2.measure(0, 0)
qc1.measure(0, 0)
qc3 = qc1.tensor(qc2)
backend = BasicAer.get_backend("qasm_simulator")
shots = 1024
result = execute(qc3, backend=backend, shots=shots, seed_simulator=78).result()
counts = result.get_counts()
target = {"00": shots / 2, "01": shots / 2}
threshold = 0.04 * shots
self.assertDictEqual(qc3.count_ops(), {"h": 1, "measure": 2})
self.assertDictEqual(qc2.count_ops(), {"h": 1, "measure": 1}) # no changes "in-place"
self.assertDictEqual(qc1.count_ops(), {"measure": 1}) # no changes "in-place"
self.assertDictAlmostEqual(counts, target, threshold)
def test_tensor_circuit_xor(self):
"""Test tensoring two circuits using ^ operator"""
qc1 = QuantumCircuit(1, 1)
qc2 = QuantumCircuit(1, 1)
qc2.h(0)
qc2.measure(0, 0)
qc1.measure(0, 0)
qc3 = qc1 ^ qc2
backend = BasicAer.get_backend("qasm_simulator")
shots = 1024
result = execute(qc3, backend=backend, shots=shots, seed_simulator=78).result()
counts = result.get_counts()
target = {"00": shots / 2, "01": shots / 2}
threshold = 0.04 * shots
self.assertDictEqual(qc3.count_ops(), {"h": 1, "measure": 2})
self.assertDictEqual(qc2.count_ops(), {"h": 1, "measure": 1}) # no changes "in-place"
self.assertDictEqual(qc1.count_ops(), {"measure": 1}) # no changes "in-place"
self.assertDictAlmostEqual(counts, target, threshold)
def test_tensor_circuit_ixor(self):
"""Test tensoring two circuits using ^= operator"""
qc1 = QuantumCircuit(1, 1)
qc2 = QuantumCircuit(1, 1)
qc2.h(0)
qc2.measure(0, 0)
qc1.measure(0, 0)
qc1 ^= qc2
backend = BasicAer.get_backend("qasm_simulator")
shots = 1024
result = execute(qc1, backend=backend, shots=shots, seed_simulator=78).result()
counts = result.get_counts()
target = {"00": shots / 2, "01": shots / 2}
threshold = 0.04 * shots
self.assertDictEqual(qc1.count_ops(), {"h": 1, "measure": 2}) # changes "in-place"
self.assertDictEqual(qc2.count_ops(), {"h": 1, "measure": 1}) # no changes "in-place"
self.assertDictAlmostEqual(counts, target, threshold)
def test_measure_args_type_cohesion(self):
"""Test for proper args types for measure function."""
quantum_reg = QuantumRegister(3)
classical_reg_0 = ClassicalRegister(1)
classical_reg_1 = ClassicalRegister(2)
quantum_circuit = QuantumCircuit(quantum_reg, classical_reg_0, classical_reg_1)
quantum_circuit.h(quantum_reg)
with self.assertRaises(CircuitError) as ctx:
quantum_circuit.measure(quantum_reg, classical_reg_1)
self.assertEqual(ctx.exception.message, "register size error")
def test_copy_circuit(self):
"""Test copy method makes a copy"""
qr = QuantumRegister(2)
cr = ClassicalRegister(2)
qc = QuantumCircuit(qr, cr)
qc.h(qr[0])
qc.measure(qr[0], cr[0])
qc.measure(qr[1], cr[1])
self.assertEqual(qc, qc.copy())
def test_copy_copies_registers(self):
"""Test copy copies the registers not via reference."""
qc = QuantumCircuit(1, 1)
copied = qc.copy()
copied.add_register(QuantumRegister(1, "additional_q"))
copied.add_register(ClassicalRegister(1, "additional_c"))
self.assertEqual(len(qc.qregs), 1)
self.assertEqual(len(copied.qregs), 2)
self.assertEqual(len(qc.cregs), 1)
self.assertEqual(len(copied.cregs), 2)
def test_measure_active(self):
"""Test measure_active
Applies measurements only to non-idle qubits. Creates a ClassicalRegister of size equal to
the amount of non-idle qubits to store the measured values.
"""
qr = QuantumRegister(4)
cr = ClassicalRegister(2, "measure")
circuit = QuantumCircuit(qr)
circuit.h(qr[0])
circuit.h(qr[2])
circuit.measure_active()
expected = QuantumCircuit(qr)
expected.h(qr[0])
expected.h(qr[2])
expected.add_register(cr)
expected.barrier()
expected.measure([qr[0], qr[2]], [cr[0], cr[1]])
self.assertEqual(expected, circuit)
def test_measure_active_copy(self):
"""Test measure_active copy
Applies measurements only to non-idle qubits. Creates a ClassicalRegister of size equal to
the amount of non-idle qubits to store the measured values.
"""
qr = QuantumRegister(4)
cr = ClassicalRegister(2, "measure")
circuit = QuantumCircuit(qr)
circuit.h(qr[0])
circuit.h(qr[2])
new_circuit = circuit.measure_active(inplace=False)
expected = QuantumCircuit(qr)
expected.h(qr[0])
expected.h(qr[2])
expected.add_register(cr)
expected.barrier()
expected.measure([qr[0], qr[2]], [cr[0], cr[1]])
self.assertEqual(expected, new_circuit)
self.assertFalse("measure" in circuit.count_ops().keys())
def test_measure_active_repetition(self):
"""Test measure_active in a circuit with a 'measure' creg.
measure_active should be aware that the creg 'measure' might exists.
"""
qr = QuantumRegister(2)
cr = ClassicalRegister(2, "measure")
circuit = QuantumCircuit(qr, cr)
circuit.h(qr)
circuit.measure_active()
self.assertEqual(len(circuit.cregs), 2) # Two cregs
self.assertEqual(len(circuit.cregs[0]), 2) # Both length 2
self.assertEqual(len(circuit.cregs[1]), 2)
def test_measure_all(self):
"""Test measure_all applies measurements to all qubits.
Creates a ClassicalRegister of size equal to the total amount of qubits to
store those measured values.
"""
qr = QuantumRegister(2)
cr = ClassicalRegister(2, "meas")
circuit = QuantumCircuit(qr)
circuit.measure_all()
expected = QuantumCircuit(qr, cr)
expected.barrier()
expected.measure(qr, cr)
self.assertEqual(expected, circuit)
def test_measure_all_not_add_bits_equal(self):
"""Test measure_all applies measurements to all qubits.
Does not create a new ClassicalRegister if the existing one is big enough.
"""
qr = QuantumRegister(2)
cr = ClassicalRegister(2, "meas")
circuit = QuantumCircuit(qr, cr)
circuit.measure_all(add_bits=False)
expected = QuantumCircuit(qr, cr)
expected.barrier()
expected.measure(qr, cr)
self.assertEqual(expected, circuit)
def test_measure_all_not_add_bits_bigger(self):
"""Test measure_all applies measurements to all qubits.
Does not create a new ClassicalRegister if the existing one is big enough.
"""
qr = QuantumRegister(2)
cr = ClassicalRegister(3, "meas")
circuit = QuantumCircuit(qr, cr)
circuit.measure_all(add_bits=False)
expected = QuantumCircuit(qr, cr)
expected.barrier()
expected.measure(qr, cr[0:2])
self.assertEqual(expected, circuit)
def test_measure_all_not_add_bits_smaller(self):
"""Test measure_all applies measurements to all qubits.
Raises an error if there are not enough classical bits to store the measurements.
"""
qr = QuantumRegister(3)
cr = ClassicalRegister(2, "meas")
circuit = QuantumCircuit(qr, cr)
with self.assertRaisesRegex(CircuitError, "The number of classical bits"):
circuit.measure_all(add_bits=False)
def test_measure_all_copy(self):
"""Test measure_all with inplace=False"""
qr = QuantumRegister(2)
cr = ClassicalRegister(2, "meas")
circuit = QuantumCircuit(qr)
new_circuit = circuit.measure_all(inplace=False)
expected = QuantumCircuit(qr, cr)
expected.barrier()
expected.measure(qr, cr)
self.assertEqual(expected, new_circuit)
self.assertFalse("measure" in circuit.count_ops().keys())
def test_measure_all_repetition(self):
"""Test measure_all in a circuit with a 'measure' creg.
measure_all should be aware that the creg 'measure' might exists.
"""
qr = QuantumRegister(2)
cr = ClassicalRegister(2, "measure")
circuit = QuantumCircuit(qr, cr)
circuit.measure_all()
self.assertEqual(len(circuit.cregs), 2) # Two cregs
self.assertEqual(len(circuit.cregs[0]), 2) # Both length 2
self.assertEqual(len(circuit.cregs[1]), 2)
def test_remove_final_measurements(self):
"""Test remove_final_measurements
Removes all measurements at end of circuit.
"""
qr = QuantumRegister(2)
cr = ClassicalRegister(2, "meas")
circuit = QuantumCircuit(qr, cr)
circuit.measure(qr, cr)
circuit.remove_final_measurements()
expected = QuantumCircuit(qr)
self.assertEqual(expected, circuit)
def test_remove_final_measurements_copy(self):
"""Test remove_final_measurements on copy
Removes all measurements at end of circuit.
"""
qr = QuantumRegister(2)
cr = ClassicalRegister(2, "meas")
circuit = QuantumCircuit(qr, cr)
circuit.measure(qr, cr)
new_circuit = circuit.remove_final_measurements(inplace=False)
expected = QuantumCircuit(qr)
self.assertEqual(expected, new_circuit)
self.assertTrue("measure" in circuit.count_ops().keys())
def test_remove_final_measurements_copy_with_parameters(self):
"""Test remove_final_measurements doesn't corrupt ParameterTable
See https://github.com/Qiskit/qiskit-terra/issues/6108 for more details
"""
qr = QuantumRegister(2)
cr = ClassicalRegister(2, "meas")
theta = Parameter("theta")
circuit = QuantumCircuit(qr, cr)
circuit.rz(theta, qr)
circuit.measure(qr, cr)
circuit.remove_final_measurements()
copy = circuit.copy()
self.assertEqual(copy, circuit)
def test_remove_final_measurements_multiple_measures(self):
"""Test remove_final_measurements only removes measurements at the end of the circuit
remove_final_measurements should not remove measurements in the beginning or middle of the
circuit.
"""
qr = QuantumRegister(2)
cr = ClassicalRegister(1)
circuit = QuantumCircuit(qr, cr)
circuit.measure(qr[0], cr)
circuit.h(0)
circuit.measure(qr[0], cr)
circuit.h(0)
circuit.measure(qr[0], cr)
circuit.remove_final_measurements()
expected = QuantumCircuit(qr, cr)
expected.measure(qr[0], cr)
expected.h(0)
expected.measure(qr[0], cr)
expected.h(0)
self.assertEqual(expected, circuit)
def test_remove_final_measurements_5802(self):
"""Test remove_final_measurements removes classical bits
https://github.com/Qiskit/qiskit-terra/issues/5802.
"""
qr = QuantumRegister(2)
cr = ClassicalRegister(2)
circuit = QuantumCircuit(qr, cr)
circuit.measure(qr, cr)
circuit.remove_final_measurements()
self.assertEqual(circuit.cregs, [])
self.assertEqual(circuit.clbits, [])
def test_remove_final_measurements_7089(self):
"""Test remove_final_measurements removes resulting unused registers
even if not all bits were measured into.
https://github.com/Qiskit/qiskit-terra/issues/7089.
"""
circuit = QuantumCircuit(2, 5)
circuit.measure(0, 0)
circuit.measure(1, 1)
circuit.remove_final_measurements(inplace=True)
self.assertEqual(circuit.cregs, [])
self.assertEqual(circuit.clbits, [])
def test_remove_final_measurements_bit_locations(self):
"""Test remove_final_measurements properly recalculates clbit indicies
and preserves order of remaining cregs and clbits.
"""
c0 = ClassicalRegister(1)
c1_0 = Clbit()
c2 = ClassicalRegister(1)
c3 = ClassicalRegister(1)
# add an individual bit that's not in any register of this circuit
circuit = QuantumCircuit(QuantumRegister(1), c0, [c1_0], c2, c3)
circuit.measure(0, c1_0)
circuit.measure(0, c2[0])
# assert cregs and clbits before measure removal
self.assertEqual(circuit.cregs, [c0, c2, c3])
self.assertEqual(circuit.clbits, [c0[0], c1_0, c2[0], c3[0]])
# assert clbit indices prior to measure removal
self.assertEqual(circuit.find_bit(c0[0]), BitLocations(0, [(c0, 0)]))
self.assertEqual(circuit.find_bit(c1_0), BitLocations(1, []))
self.assertEqual(circuit.find_bit(c2[0]), BitLocations(2, [(c2, 0)]))
self.assertEqual(circuit.find_bit(c3[0]), BitLocations(3, [(c3, 0)]))
circuit.remove_final_measurements()
# after measure removal, creg c2 should be gone, as should lone bit c1_0
# and c0 should still come before c3
self.assertEqual(circuit.cregs, [c0, c3])
self.assertEqual(circuit.clbits, [c0[0], c3[0]])
# there should be no gaps in clbit indices
# e.g. c3[0] is now the second clbit
self.assertEqual(circuit.find_bit(c0[0]), BitLocations(0, [(c0, 0)]))
self.assertEqual(circuit.find_bit(c3[0]), BitLocations(1, [(c3, 0)]))
def test_reverse(self):
"""Test reverse method reverses but does not invert."""
qc = QuantumCircuit(2, 2)
qc.h(0)
qc.s(1)
qc.cx(0, 1)
qc.measure([0, 1], [0, 1])
qc.x(0)
qc.y(1)
expected = QuantumCircuit(2, 2)
expected.y(1)
expected.x(0)
expected.measure([0, 1], [0, 1])
expected.cx(0, 1)
expected.s(1)
expected.h(0)
self.assertEqual(qc.reverse_ops(), expected)
def test_repeat(self):
"""Test repeating the circuit works."""
qr = QuantumRegister(2)
cr = ClassicalRegister(2)
qc = QuantumCircuit(qr, cr)
qc.h(0)
qc.cx(0, 1)
qc.barrier()
qc.h(0).c_if(cr, 1)
with self.subTest("repeat 0 times"):
rep = qc.repeat(0)
self.assertEqual(rep, QuantumCircuit(qr, cr))
with self.subTest("repeat 3 times"):
inst = qc.to_instruction()
ref = QuantumCircuit(qr, cr)
for _ in range(3):
ref.append(inst, ref.qubits, ref.clbits)
rep = qc.repeat(3)
self.assertEqual(rep, ref)
@data(0, 1, 4)
def test_repeat_global_phase(self, num):
"""Test the global phase is properly handled upon repeat."""
phase = 0.123
qc = QuantumCircuit(1, global_phase=phase)
expected = np.exp(1j * phase * num) * np.identity(2)
np.testing.assert_array_almost_equal(Operator(qc.repeat(num)).data, expected)
def test_bind_global_phase(self):
"""Test binding global phase."""
x = Parameter("x")
circuit = QuantumCircuit(1, global_phase=x)
self.assertEqual(circuit.parameters, {x})
bound = circuit.bind_parameters({x: 2})
self.assertEqual(bound.global_phase, 2)
self.assertEqual(bound.parameters, set())
def test_bind_parameter_in_phase_and_gate(self):
"""Test binding a parameter present in the global phase and the gates."""
x = Parameter("x")
circuit = QuantumCircuit(1, global_phase=x)
circuit.rx(x, 0)
self.assertEqual(circuit.parameters, {x})
ref = QuantumCircuit(1, global_phase=2)
ref.rx(2, 0)
bound = circuit.bind_parameters({x: 2})
self.assertEqual(bound, ref)
self.assertEqual(bound.parameters, set())
def test_power(self):
"""Test taking the circuit to a power works."""
qc = QuantumCircuit(2)
qc.cx(0, 1)
qc.rx(0.2, 1)
gate = qc.to_gate()
with self.subTest("power(int >= 0) equals repeat"):
self.assertEqual(qc.power(4), qc.repeat(4))
with self.subTest("explicit matrix power"):
self.assertEqual(qc.power(4, matrix_power=True).data[0][0], gate.power(4))
with self.subTest("float power"):
self.assertEqual(qc.power(1.23).data[0][0], gate.power(1.23))
with self.subTest("negative power"):
self.assertEqual(qc.power(-2).data[0][0], gate.power(-2))
def test_power_parameterized_circuit(self):
"""Test taking a parameterized circuit to a power."""
theta = Parameter("th")
qc = QuantumCircuit(2)
qc.cx(0, 1)
qc.rx(theta, 1)
with self.subTest("power(int >= 0) equals repeat"):
self.assertEqual(qc.power(4), qc.repeat(4))
with self.subTest("cannot to matrix power if parameterized"):
with self.assertRaises(CircuitError):
_ = qc.power(0.5)
def test_control(self):
"""Test controlling the circuit."""
qc = QuantumCircuit(2, name="my_qc")
qc.cry(0.2, 0, 1)
c_qc = qc.control()
with self.subTest("return type is circuit"):
self.assertIsInstance(c_qc, QuantumCircuit)
with self.subTest("test name"):
self.assertEqual(c_qc.name, "c_my_qc")
with self.subTest("repeated control"):
cc_qc = c_qc.control()
self.assertEqual(cc_qc.num_qubits, c_qc.num_qubits + 1)
with self.subTest("controlled circuit has same parameter"):
param = Parameter("p")
qc.rx(param, 0)
c_qc = qc.control()
self.assertEqual(qc.parameters, c_qc.parameters)
with self.subTest("non-unitary operation raises"):
qc.reset(0)
with self.assertRaises(CircuitError):
_ = qc.control()
def test_control_implementation(self):
"""Run a test case for controlling the circuit, which should use ``Gate.control``."""
qc = QuantumCircuit(3)
qc.cx(0, 1)
qc.cry(0.2, 0, 1)
qc.t(0)
qc.append(SGate().control(2), [0, 1, 2])
qc.iswap(2, 0)
c_qc = qc.control(2, ctrl_state="10")
cgate = qc.to_gate().control(2, ctrl_state="10")
ref = QuantumCircuit(*c_qc.qregs)
ref.append(cgate, ref.qubits)
self.assertEqual(ref, c_qc)
@data("gate", "instruction")
def test_repeat_appended_type(self, subtype):
"""Test repeat appends Gate if circuit contains only gates and Instructions otherwise."""
sub = QuantumCircuit(2)
sub.x(0)
if subtype == "gate":
sub = sub.to_gate()
else:
sub = sub.to_instruction()
qc = QuantumCircuit(2)
qc.append(sub, [0, 1])
rep = qc.repeat(3)
if subtype == "gate":
self.assertTrue(all(isinstance(op[0], Gate) for op in rep.data))
else:
self.assertTrue(all(isinstance(op[0], Instruction) for op in rep.data))
def test_reverse_bits(self):
"""Test reversing order of bits."""
qc = QuantumCircuit(3, 2)
qc.h(0)
qc.s(1)
qc.cx(0, 1)
qc.measure(0, 1)
qc.x(0)
qc.y(1)
qc.global_phase = -1
expected = QuantumCircuit(3, 2)
expected.h(2)
expected.s(1)
expected.cx(2, 1)
expected.measure(2, 0)
expected.x(2)
expected.y(1)
expected.global_phase = -1
self.assertEqual(qc.reverse_bits(), expected)
def test_reverse_bits_boxed(self):
"""Test reversing order of bits in a hierarchical circuit."""
wide_cx = QuantumCircuit(3)
wide_cx.cx(0, 1)
wide_cx.cx(1, 2)
wide_cxg = wide_cx.to_gate()
cx_box = QuantumCircuit(3)
cx_box.append(wide_cxg, [0, 1, 2])
expected = QuantumCircuit(3)
expected.cx(2, 1)
expected.cx(1, 0)
self.assertEqual(cx_box.reverse_bits().decompose(), expected)
self.assertEqual(cx_box.decompose().reverse_bits(), expected)
# box one more layer to be safe.
cx_box_g = cx_box.to_gate()
cx_box_box = QuantumCircuit(4)
cx_box_box.append(cx_box_g, [0, 1, 2])
cx_box_box.cx(0, 3)
expected2 = QuantumCircuit(4)
expected2.cx(3, 2)
expected2.cx(2, 1)
expected2.cx(3, 0)
self.assertEqual(cx_box_box.reverse_bits().decompose().decompose(), expected2)
def test_reverse_bits_with_registers(self):
"""Test reversing order of bits when registers are present."""
qr1 = QuantumRegister(3, "a")
qr2 = QuantumRegister(2, "b")
qc = QuantumCircuit(qr1, qr2)
qc.h(qr1[0])
qc.cx(qr1[0], qr1[1])
qc.cx(qr1[1], qr1[2])
qc.cx(qr1[2], qr2[0])
qc.cx(qr2[0], qr2[1])
expected = QuantumCircuit(qr2, qr1)
expected.h(qr1[2])
expected.cx(qr1[2], qr1[1])
expected.cx(qr1[1], qr1[0])
expected.cx(qr1[0], qr2[1])
expected.cx(qr2[1], qr2[0])
self.assertEqual(qc.reverse_bits(), expected)
def test_reverse_bits_with_overlapped_registers(self):
"""Test reversing order of bits when registers are overlapped."""
qr1 = QuantumRegister(2, "a")
qr2 = QuantumRegister(bits=[qr1[0], qr1[1], Qubit()], name="b")
qc = QuantumCircuit(qr1, qr2)
qc.h(qr1[0])
qc.cx(qr1[0], qr1[1])
qc.cx(qr1[1], qr2[2])
qr2 = QuantumRegister(bits=[Qubit(), qr1[0], qr1[1]], name="b")
expected = QuantumCircuit(qr2, qr1)
expected.h(qr1[1])
expected.cx(qr1[1], qr1[0])
expected.cx(qr1[0], qr2[0])
self.assertEqual(qc.reverse_bits(), expected)
def test_reverse_bits_with_registerless_bits(self):
"""Test reversing order of registerless bits."""
q0 = Qubit()
q1 = Qubit()
c0 = Clbit()
c1 = Clbit()
qc = QuantumCircuit([q0, q1], [c0, c1])
qc.h(0)
qc.cx(0, 1)
qc.x(0).c_if(1, True)
qc.measure(0, 0)
expected = QuantumCircuit([c1, c0], [q1, q0])
expected.h(1)
expected.cx(1, 0)
expected.x(1).c_if(0, True)
expected.measure(1, 1)
self.assertEqual(qc.reverse_bits(), expected)
def test_reverse_bits_with_registers_and_bits(self):
"""Test reversing order of bits with registers and registerless bits."""
qr = QuantumRegister(2, "a")
q = Qubit()
qc = QuantumCircuit(qr, [q])
qc.h(qr[0])
qc.cx(qr[0], qr[1])
qc.cx(qr[1], q)
expected = QuantumCircuit([q], qr)
expected.h(qr[1])
expected.cx(qr[1], qr[0])
expected.cx(qr[0], q)
self.assertEqual(qc.reverse_bits(), expected)
def test_reverse_bits_with_mixed_overlapped_registers(self):
"""Test reversing order of bits with overlapped registers and registerless bits."""
q = Qubit()
qr1 = QuantumRegister(bits=[q, Qubit()], name="qr1")
qr2 = QuantumRegister(bits=[qr1[1], Qubit()], name="qr2")
qc = QuantumCircuit(qr1, qr2, [Qubit()])
qc.h(q)
qc.cx(qr1[0], qr1[1])
qc.cx(qr1[1], qr2[1])
qc.cx(2, 3)
qr2 = QuantumRegister(2, "qr2")
qr1 = QuantumRegister(bits=[qr2[1], q], name="qr1")
expected = QuantumCircuit([Qubit()], qr2, qr1)
expected.h(qr1[1])
expected.cx(qr1[1], qr1[0])
expected.cx(qr1[0], qr2[0])
expected.cx(1, 0)
self.assertEqual(qc.reverse_bits(), expected)
def test_cnot_alias(self):
"""Test that the cnot method alias adds a cx gate."""
qc = QuantumCircuit(2)
qc.cnot(0, 1)
expected = QuantumCircuit(2)
expected.cx(0, 1)
self.assertEqual(qc, expected)
def test_inverse(self):
"""Test inverse circuit."""
qr = QuantumRegister(2)
qc = QuantumCircuit(qr, global_phase=0.5)
qc.h(0)
qc.barrier(qr)
qc.t(1)
expected = QuantumCircuit(qr)
expected.tdg(1)
expected.barrier(qr)
expected.h(0)
expected.global_phase = -0.5
self.assertEqual(qc.inverse(), expected)
def test_compare_two_equal_circuits(self):
"""Test to compare that 2 circuits are equal."""
qc1 = QuantumCircuit(2, 2)
qc1.h(0)
qc2 = QuantumCircuit(2, 2)
qc2.h(0)
self.assertTrue(qc1 == qc2)
def test_compare_two_different_circuits(self):
"""Test to compare that 2 circuits are different."""
qc1 = QuantumCircuit(2, 2)
qc1.h(0)
qc2 = QuantumCircuit(2, 2)
qc2.x(0)
self.assertFalse(qc1 == qc2)
def test_compare_a_circuit_with_none(self):
"""Test to compare that a circuit is different to None."""
qc1 = QuantumCircuit(2, 2)
qc1.h(0)
qc2 = None
self.assertFalse(qc1 == qc2)
def test_overlapped_add_bits_and_add_register(self):
"""Test add registers whose bits have already been added by add_bits."""
qc = QuantumCircuit()
for bit_type, reg_type in (
[Qubit, QuantumRegister],
[Clbit, ClassicalRegister],
[AncillaQubit, AncillaRegister],
):
bits = [bit_type() for _ in range(10)]
reg = reg_type(bits=bits)
qc.add_bits(bits)
qc.add_register(reg)
self.assertEqual(qc.num_qubits, 20)
self.assertEqual(qc.num_clbits, 10)
self.assertEqual(qc.num_ancillas, 10)
def test_overlapped_add_register_and_add_register(self):
"""Test add registers whose bits have already been added by add_register."""
qc = QuantumCircuit()
for bit_type, reg_type in (
[Qubit, QuantumRegister],
[Clbit, ClassicalRegister],
[AncillaQubit, AncillaRegister],
):
bits = [bit_type() for _ in range(10)]
reg1 = reg_type(bits=bits)
reg2 = reg_type(bits=bits)
qc.add_register(reg1)
qc.add_register(reg2)
self.assertEqual(qc.num_qubits, 20)
self.assertEqual(qc.num_clbits, 10)
self.assertEqual(qc.num_ancillas, 10)
def test_deprecated_measure_function(self):
"""Test that the deprecated version of the loose 'measure' function works correctly."""
from qiskit.circuit.measure import measure
test = QuantumCircuit(1, 1)
with self.assertWarnsRegex(DeprecationWarning, r".*Qiskit Terra 0\.19.*"):
measure(test, 0, 0)
expected = QuantumCircuit(1, 1)
expected.measure(0, 0)
self.assertEqual(test, expected)
def test_deprecated_reset_function(self):
"""Test that the deprecated version of the loose 'reset' function works correctly."""
from qiskit.circuit.reset import reset
test = QuantumCircuit(1, 1)
with self.assertWarnsRegex(DeprecationWarning, r".*Qiskit Terra 0\.19.*"):
reset(test, 0)
expected = QuantumCircuit(1, 1)
expected.reset(0)
self.assertEqual(test, expected)
class TestCircuitPrivateOperations(QiskitTestCase):
"""Direct tests of some of the private methods of QuantumCircuit. These do not represent
functionality that we want to expose to users, but there are some cases where private methods
are used internally (similar to "protected" access in .NET or "friend" access in C++), and we
want to make sure they work in those cases."""
def test_previous_instruction_in_scope_failures(self):
"""Test the failure paths of the peek and pop methods for retrieving the most recent
instruction in a scope."""
test = QuantumCircuit(1, 1)
with self.assertRaisesRegex(CircuitError, r"This circuit contains no instructions\."):
test._peek_previous_instruction_in_scope()
with self.assertRaisesRegex(CircuitError, r"This circuit contains no instructions\."):
test._pop_previous_instruction_in_scope()
with test.for_loop(range(2)):
with self.assertRaisesRegex(CircuitError, r"This scope contains no instructions\."):
test._peek_previous_instruction_in_scope()
with self.assertRaisesRegex(CircuitError, r"This scope contains no instructions\."):
test._pop_previous_instruction_in_scope()
def test_pop_previous_instruction_removes_parameters(self):
"""Test that the private "pop instruction" method removes parameters from the parameter
table if that instruction is the only instance."""
x, y = Parameter("x"), Parameter("y")
test = QuantumCircuit(1, 1)
test.rx(y, 0)
last_instructions = test.u(x, y, 0, 0)
self.assertEqual({x, y}, set(test.parameters))
instruction, _, _ = test._pop_previous_instruction_in_scope()
self.assertEqual(list(last_instructions), [instruction])
self.assertEqual({y}, set(test.parameters))
| 37.381494
| 100
| 0.615907
|
c14b9101725df1fa97cb8bb93c1fe4b3cb0eafdb
| 3,651
|
py
|
Python
|
pcaptotxt.py
|
namnguyen2010/pcaptofigure
|
366436a848382defced69e8a5ef4908a8177e585
|
[
"MIT"
] | null | null | null |
pcaptotxt.py
|
namnguyen2010/pcaptofigure
|
366436a848382defced69e8a5ef4908a8177e585
|
[
"MIT"
] | null | null | null |
pcaptotxt.py
|
namnguyen2010/pcaptofigure
|
366436a848382defced69e8a5ef4908a8177e585
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: UTF-8 -*-
"""
pcaptolist
:license: MIT, see LICENSE for details.
"""
import sys
from subprocess import Popen, PIPE
from tshark import tshark
def pcaptolist(tshark_path, input_file):
res = '' # create empty string
# time_list=[]
# frlen_list=[]
# source_list=[]
# des_list=[]
# dur_list=[]
# type_list=[]
pipe = Popen(tshark_path+" -r "+ input_file + " -T tabs", stdout=PIPE) #read pcap file using tshark
text = pipe.communicate()[0] # extract the info from the pcap file as shown when it is loaded by tshark
# split the info into a list of lines denoted by \r\n. định dạng của kí tự xống dòng trong Window là \r\n
# Each line has parameters from a packet
lists = str(text,'utf-8').split('\r\n')
a = 0
for i in lists:
# print(i)
a += 1
temp = i.strip().split('\t') # convert i into a list of parameters, seperated by \t (tab)
# print(len(temp))
if len(temp) > 7:
time = float(temp[1].strip())
# source = temp[2].strip()
# if source == "":
# source = "-"
# destination = temp[4].strip()
# if destination == "":
# destination = "-"
# protocol = temp[5].strip()
fr_len = float(temp[6].strip())
# info = " ".join(temp[7:])
# duration = temp[9].strip()
# if duration == "":
# duration = 0
# else:
# duration = float(duration)
# fr_type = temp[10].strip()
# time_list.append(time)
# frlen_list.append(fr_len)
# source_list.append(source)
# des_list.append(destination)
# dur_list.append(duration)
# type_list.append(fr_type)
line = '\t'.join((str(time),str(fr_len))) # Join all the needed parameters into a new line
res = res + '\n' + line # Join the line together, separated by \n (enter)
return res
def main(argv):
try:
# opts is a list of returning key-value pairs, args is the options left after striped
# the short options 'hi:o:', if an option requires an input, it should be followed by a ":"
# the long options 'ifile=' is an option that requires an input, followed by a "="
opts, args = getopt.getopt(argv,"hi:o:",["ifile=","ofile="])
except getopt.GetoptError:
print("pcaptolist.py -i <inputfile> -o <output_file>")
sys.exit(2)
for opt, arg in opts:
if opt == "-h":
print("pcaptolist.py -i <inputfile> -o <output_file>")
sys.exit()
elif opt in ("-i", "--ifile"):
inputfile = arg
elif opt in ("-o", "--ofile"):
if arg[-4:] == ".txt":
print("outputing to ", arg)
output_file = arg
else:
print("image output must use mscgen as format parameter.\n")
print("please specify the format parameter before output parameter.\n")
tshark_path = tshark.get_tshark_path() # get the path of tshark in program file folder
print(tshark_path)
if (tshark_path == 0):
print("tshark not found. Please install Wireshark.")
res = pcaptolist( tshark_path,inputfile)
# print(res)
text_file = open(output_file, "w") # create txt file for writing
n = text_file.write(res) # write the res string into the txt file
text_file.close() # close and save txt file into the same folder
if __name__ == "__main__":
main(sys.argv[1:])
| 34.121495
| 109
| 0.554369
|
72d29e78ae84e370ecd882ccbf9536dccbe4bcb8
| 846
|
py
|
Python
|
processtest.py
|
cleuton/NaiveFaceRecognizer
|
4616e3c4b4e09f5a8475b7b2064abb6d6d5458bc
|
[
"Apache-2.0"
] | 1
|
2020-03-23T22:50:45.000Z
|
2020-03-23T22:50:45.000Z
|
processtest.py
|
cleuton/NaiveFaceRecognizer
|
4616e3c4b4e09f5a8475b7b2064abb6d6d5458bc
|
[
"Apache-2.0"
] | null | null | null |
processtest.py
|
cleuton/NaiveFaceRecognizer
|
4616e3c4b4e09f5a8475b7b2064abb6d6d5458bc
|
[
"Apache-2.0"
] | null | null | null |
import pythonfaces
import cv2
import os
import sys
path_test="./test"
path_source="./test_source"
def process_file(fname):
faceslist = pythonfaces.imageFromFile(fname,img_h=512,img_w=512)
fcount=0
for bwimage in faceslist:
image = backtorgb = cv2.cvtColor(bwimage,cv2.COLOR_GRAY2RGB)
fcount += 1
pname = os.path.splitext(os.path.basename(fname))[0]
extension = os.path.splitext(os.path.basename(fname))[1]
oname = pname + "." + str(fcount) + extension
oname = os.path.join(path_test,oname)
print("saving:",oname)
cv2.imwrite(oname,image)
for filename in os.listdir(path_source):
if filename.endswith(".jpg") or filename.endswith(".png"):
fullpath = os.path.join(path_source,filename)
print("Processing:",fullpath)
process_file(fullpath)
| 32.538462
| 69
| 0.669031
|
4e476772fa1efda448814f8f8a0482b7a73a584a
| 1,688
|
py
|
Python
|
.history/Classiles/ice_cream_20210614182454.py
|
minefarmer/Coding101-OOP
|
d5655977559e3bd1acf6a4f185a6121cc3b05ce4
|
[
"Unlicense"
] | null | null | null |
.history/Classiles/ice_cream_20210614182454.py
|
minefarmer/Coding101-OOP
|
d5655977559e3bd1acf6a4f185a6121cc3b05ce4
|
[
"Unlicense"
] | null | null | null |
.history/Classiles/ice_cream_20210614182454.py
|
minefarmer/Coding101-OOP
|
d5655977559e3bd1acf6a4f185a6121cc3b05ce4
|
[
"Unlicense"
] | null | null | null |
"""[Practice: Ice Cream]
Class
keyword class name
class Ice:
Instantiation
variable class
name name
("instance")
ice = Ice()
Method an action or behavior ==== to add a method, I simply define a function inside the class
| method name is (eat)
def eat(self):
print("hi") # this line is the method content
Dot Expression # To test the method
instance method name
IceCream . eat ()
Assigning an attribute is very similar to defining a variable
self attaches the attribute attribute value
to the current instance
self . cubes = 3
"""
# class IceCream:
# def __init__(self):
# # print("Created ice cream")
# self.scoops = 3
# def eat(self, scoops):
# if self.scoops < scoops:
# print("Not enough bites left!")
# self.scoops -= scoops
# def add(self, scoops):
# self.scoops += scoops
# # IceCream.eat() # Traceback (most recent call last):
# # File "/home/rich/Desktop/CarlsHub/Coding101-OOP/Classiles/ice_cream.py", line 37, in <module>
# # IceCream.eat()
# # TypeError: eat() missing 2 required positional arguments: 'self' and 'scoops'
'''[using input]
class ice input # a.k.a., ar
'''
class IceCream:
def x(self, y):
# print("Created ice cream")
self.scoops = 3
def eat(self, scoops):
if self.scoops < scoops:
print("Not enough bites left!")
self.scoops -= scoops
def add(self, scoops):
self.scoops += scoops
IceCream.eat()
| 24.823529
| 109
| 0.558649
|
cf6203ef910cbb706facbad875dbce2f820a1602
| 1,088
|
py
|
Python
|
plot3d.py
|
joekasp/spectro
|
65ef111407cfdc4c69112e93fb04842fa15b615c
|
[
"MIT"
] | null | null | null |
plot3d.py
|
joekasp/spectro
|
65ef111407cfdc4c69112e93fb04842fa15b615c
|
[
"MIT"
] | null | null | null |
plot3d.py
|
joekasp/spectro
|
65ef111407cfdc4c69112e93fb04842fa15b615c
|
[
"MIT"
] | null | null | null |
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.ticker as ticker
from matplotlib import cm
from mpl_toolkits.mplot3d import Axes3D
def surf3d(x,y,Z,window_title='Figure',ax_title='',fig='None',azim=-50,elev=25):
'''
Performs 3d surface plot for x,y,Z data
'''
X,Y = np.meshgrid(y,x)
if(fig=='None'):
fig = plt.figure(figsize=(12,6))
fig.canvas.set_window_title(window_title)
ax = fig.gca(projection='3d')
TICK_LIMIT = 5
surf = ax.plot_surface(X,Y,Z,rstride=2,cstride=2,linewidth=0,cmap=cm.rainbow)
ax.set_zlim3d(-1.0,1.0)
ax.view_init(elev=elev, azim=azim)
fig.colorbar(surf,shrink=0.5, aspect=5)
ax.set_xlabel('$\omega_{1}/ 2\pi c (cm^{-1})$')
ax.set_ylabel('$\omega_{3}/ 2\pi c (cm^{-1})$')
ax.xaxis.labelpad=15
ax.yaxis.labelpad=15
xticks = ax.get_xticks()
nskip = round(len(xticks)/TICK_LIMIT)
ax.set_xticks(xticks[::nskip])
yticks = ax.get_yticks()
nskip = round(len(yticks)/TICK_LIMIT)
ax.set_yticks(yticks[:-1:nskip])
ax.set_title(ax_title)
return ax
| 29.405405
| 81
| 0.659007
|
76a3e5e63e25dbfdf8b883350d58e45a4f334541
| 8,325
|
py
|
Python
|
ts3bot/commands/api_key.py
|
MrGunflame/teamspeak_bot
|
0799c9d2177bf4a737b2ba12f8743cac981f8efa
|
[
"MIT"
] | null | null | null |
ts3bot/commands/api_key.py
|
MrGunflame/teamspeak_bot
|
0799c9d2177bf4a737b2ba12f8743cac981f8efa
|
[
"MIT"
] | null | null | null |
ts3bot/commands/api_key.py
|
MrGunflame/teamspeak_bot
|
0799c9d2177bf4a737b2ba12f8743cac981f8efa
|
[
"MIT"
] | null | null | null |
import datetime
import logging
import typing
import ts3
import ts3bot
from requests import RequestException
from ts3bot import (
ApiErrBadData,
Config,
InvalidKeyException,
RateLimitException,
events,
fetch_api,
sync_groups,
)
from ts3bot.bot import Bot
from ts3bot.database import enums, models
MESSAGE_REGEX = "\\s*(\\w{8}(-\\w{4}){3}-\\w{20}(-\\w{4}){3}-\\w{12})\\s*"
USAGE = "<API KEY>"
def handle(bot: Bot, event: events.TextMessage, match: typing.Match):
key = match.group(1)
# Check with ArenaNet's API
try:
account_info = fetch_api("account", api_key=key)
# Grab server info from database
server_group: typing.Optional[models.WorldGroup] = (
bot.session.query(models.WorldGroup)
.filter(models.WorldGroup.world == enums.World(account_info.get("world")))
.one_or_none()
)
# World is linked to a group
if server_group:
account: models.Account = models.Account.get_or_create(
bot.session, account_info, key
)
identity: models.Identity = models.Identity.get_or_create(
bot.session, event.uid
)
# Check if account is registered to anyone
linked_identity: typing.Optional[
models.LinkAccountIdentity
] = account.valid_identities.one_or_none()
# Account is already linked
if linked_identity:
# Account is linked to another guid
if linked_identity.identity.guid != event.uid:
try:
# Get user's DB id
cldbid: str = bot.exec_(
"clientgetdbidfromuid", cluid=event.uid
)[0]["cldbid"]
except ts3.TS3Error:
logging.error("Failed to get user's dbid", exc_info=True)
bot.send_message(event.id, "error_critical")
return
force_key_name = f"ts3bot-{cldbid}"
# Fetch token info
token_info = fetch_api("tokeninfo", api_key=key)
# Override registration, same as !register
if token_info.get("name", "") == force_key_name:
ts3bot.transfer_registration(bot, account, event)
logging.info(
"%s (%s) transferred permissions of %s onto themselves.",
event.name,
event.uid,
account_info.get("name"),
)
return
logging.warning(
"%s (%s) tried to use an already registered API key/account. (%s)",
event.name,
event.uid,
account_info.get("name"),
)
bot.send_message(event.id, "token_in_use", api_name=force_key_name)
else: # Account is linked to current guid
logging.info(
"User {} ({}) tried to register a second time for whatever reason using {}".format(
event.name,
event.uid,
account_info.get("name", "Unknown account"),
)
)
# Save new API key
if account.api_key != key:
account.api_key = key
account.is_valid = True
bot.session.commit()
bot.send_message(event.id, "registration_exists")
return
# Same API key supplied, last check was over 12 minutes ago
if (
ts3bot.timedelta_hours(
datetime.datetime.today() - account.last_check
)
>= 0.2
):
# Update saved account info if same API key was posted again with a reasonable time frame
account.update(bot.session)
try:
# Get user's DB id
cldbid = bot.exec_("clientgetdbidfromuid", cluid=event.uid)[
0
]["cldbid"]
# Sync groups
ts3bot.sync_groups(bot, cldbid, account)
bot.send_message(event.id, "registration_details_updated")
except ts3.TS3Error:
# User might not exist in the db
logging.error("Failed to sync user", exc_info=True)
else:
# Too early
bot.send_message(event.id, "registration_too_early")
else:
# Otherwise account is not yet linked and can be used
# Save API key
account.api_key = key
account.is_valid = True
bot.session.commit()
# Get user's DB id
cldbid = bot.exec_("clientgetdbidfromuid", cluid=event.uid)[0]["cldbid"]
# Unlink previous account from identity
current_account = models.Account.get_by_identity(bot.session, event.uid)
if current_account:
logging.info("Delinking %s from cldbid:%s", current_account, cldbid)
current_account.invalidate(bot.session)
# Register link between models
bot.session.add(
models.LinkAccountIdentity(account=account, identity=identity)
)
bot.session.commit()
# Add all known guilds to user if enabled
if Config.getboolean(
"guild", "assign_on_register"
) and Config.getboolean("guild", "allow_multiple_guilds"):
account.guilds.filter(
models.LinkAccountGuild.id.in_(
bot.session.query(models.LinkAccountGuild.id)
.join(models.Guild)
.filter(models.Guild.group_id.isnot(None))
.subquery()
)
).update({"is_active": True}, synchronize_session="fetch")
bot.session.commit()
# Sync groups
sync_groups(bot, cldbid, account)
logging.info(
"Assigned world %s to %s (%s) using %s",
server_group.world.name,
event.name,
event.uid,
account_info.get("name", "Unknown account"),
)
# Was registered with other account previously
if current_account:
bot.send_message(
event.id, "registration_update", account=account.name
)
else:
bot.send_message(event.id, "welcome_registered")
# Tell user about !guild if it's enabled
if Config.getboolean("commands", "guild"):
if Config.getboolean(
"guild", "assign_on_register"
) and Config.getboolean("guild", "allow_multiple_guilds"):
bot.send_message(event.id, "welcome_registered_3")
else:
bot.send_message(event.id, "welcome_registered_2")
else:
bot.send_message(
event.id,
"invalid_world",
world=enums.World(account_info.get("world")).proper_name,
)
except InvalidKeyException:
logging.info("This seems to be an invalid API key.")
bot.send_message(event.id, "invalid_token_retry")
except (RateLimitException, RequestException, ApiErrBadData):
bot.send_message(event.id, "error_api")
| 39.832536
| 113
| 0.479159
|
f4f2aba7a9127521eed4c78b8ab916e886637f98
| 1,949
|
py
|
Python
|
repos/system_upgrade/el8toel9/actors/checkcustomnetworkscripts/libraries/customnetworkscripts.py
|
sm00th/leapp-repository
|
1c171ec3a5f9260a3c6f84a9b15cad78a875ac61
|
[
"Apache-2.0"
] | 21
|
2018-11-20T15:58:39.000Z
|
2022-03-15T19:57:24.000Z
|
repos/system_upgrade/el8toel9/actors/checkcustomnetworkscripts/libraries/customnetworkscripts.py
|
sm00th/leapp-repository
|
1c171ec3a5f9260a3c6f84a9b15cad78a875ac61
|
[
"Apache-2.0"
] | 732
|
2018-11-21T18:33:26.000Z
|
2022-03-31T16:16:24.000Z
|
repos/system_upgrade/el8toel9/actors/checkcustomnetworkscripts/libraries/customnetworkscripts.py
|
sm00th/leapp-repository
|
1c171ec3a5f9260a3c6f84a9b15cad78a875ac61
|
[
"Apache-2.0"
] | 85
|
2018-11-20T17:55:00.000Z
|
2022-03-29T09:40:31.000Z
|
import os
from leapp import reporting
CUSTOM_NETWORK_SCRIPTS = [
"/sbin/ifup-local",
"/sbin/ifup-pre-local",
"/sbin/ifdown-local",
"/sbin/ifdown-pre-local",
]
DOC_URL = "https://red.ht/upgrading-RHEL-8-to-RHEL-9-network-scripts"
def generate_report(existing_custom_network_scripts):
""" Generate reports informing user about possible manual intervention required """
# Show documentation url if custom network-scripts detected
title = "custom network-scripts detected"
summary = (
"RHEL 9 does not support the legacy network-scripts package that was"
" deprecated in RHEL 8. Custom network-scripts have been detected."
)
reporting.create_report(
[
reporting.Title(title),
reporting.Summary(summary),
reporting.Remediation(
hint=(
"Migrate the custom network-scripts to NetworkManager dispatcher"
" scripts manually before the ugprade. Follow instructions in the"
" official documentation."
)
),
reporting.Severity(reporting.Severity.HIGH),
reporting.Flags([reporting.Flags.INHIBITOR]),
reporting.Tags([reporting.Tags.NETWORK, reporting.Tags.SERVICES]),
reporting.ExternalLink(
title=(
"Upgrading from RHEL 8 to 9 - migrating custom network-scripts to"
" NetworkManager dispatcher scripts"
),
url=DOC_URL,
),
]
+ [
reporting.RelatedResource("file", fname)
for fname in existing_custom_network_scripts
]
)
def process():
existing_custom_network_scripts = [
fname for fname in CUSTOM_NETWORK_SCRIPTS if os.path.isfile(fname)
]
if existing_custom_network_scripts:
generate_report(existing_custom_network_scripts)
| 33.033898
| 87
| 0.6157
|
6e544566f262408a70a5450c205d507bdc678afd
| 31,814
|
py
|
Python
|
tests/unit/commands/local/lib/test_provider.py
|
praneetap/aws-sam-cli
|
2a713566c8de72a68eb8954584674a61a2d807ac
|
[
"Apache-2.0"
] | 2,285
|
2017-08-11T16:57:31.000Z
|
2018-05-08T20:38:25.000Z
|
tests/unit/commands/local/lib/test_provider.py
|
praneetap/aws-sam-cli
|
2a713566c8de72a68eb8954584674a61a2d807ac
|
[
"Apache-2.0"
] | 314
|
2017-08-11T17:29:27.000Z
|
2018-05-08T20:51:47.000Z
|
tests/unit/commands/local/lib/test_provider.py
|
praneetap/aws-sam-cli
|
2a713566c8de72a68eb8954584674a61a2d807ac
|
[
"Apache-2.0"
] | 284
|
2017-08-11T17:35:48.000Z
|
2018-05-08T20:15:59.000Z
|
import os
import posixpath
from unittest import TestCase
from unittest.mock import MagicMock, Mock, patch
from parameterized import parameterized, parameterized_class
from samcli.lib.utils.architecture import X86_64, ARM64
from samcli.lib.providers.provider import (
LayerVersion,
ResourceIdentifier,
Stack,
_get_build_dir,
get_all_resource_ids,
get_resource_by_id,
get_resource_ids_by_type,
get_unique_resource_ids,
Function,
get_resource_full_path_by_id,
)
from samcli.commands.local.cli_common.user_exceptions import (
InvalidLayerVersionArn,
UnsupportedIntrinsic,
InvalidFunctionPropertyType,
)
def make_resource(stack_path, name):
resource = Mock()
resource.stack_path = stack_path
resource.name = name
return resource
class TestProvider(TestCase):
@parameterized.expand(
[
(make_resource("", "A"), os.path.join("builddir", "A")),
(make_resource("A", "B"), os.path.join("builddir", "A", "B")),
(make_resource("A/B", "C"), os.path.join("builddir", "A", "B", "C")),
]
)
def test_stack_build_dir(self, resource, output_build_dir):
self.assertEqual(_get_build_dir(resource, "builddir"), output_build_dir)
@parameterized.expand(
[
("", "", os.path.join("builddir", "template.yaml")), # root stack
("", "A", os.path.join("builddir", "A", "template.yaml")),
("A", "B", os.path.join("builddir", "A", "B", "template.yaml")),
("A/B", "C", os.path.join("builddir", "A", "B", "C", "template.yaml")),
]
)
def test_stack_get_output_template_path(self, parent_stack_path, name, output_template_path):
root_stack = Stack(parent_stack_path, name, None, None, None, None)
self.assertEqual(root_stack.get_output_template_path("builddir"), output_template_path)
@parameterized_class(
("stack", "expected_id", "expected_stack_path"),
[
(
# empty metadata
Stack("", "stackLogicalId", "/stack", None, {}, {}),
"stackLogicalId",
"stackLogicalId",
),
(
# None metadata
Stack("", "stackLogicalId", "/stack", None, {}, None),
"stackLogicalId",
"stackLogicalId",
),
(
# metadata without sam resource id
Stack("", "stackLogicalId", "/stack", None, {}, {"id": "id"}),
"stackLogicalId",
"stackLogicalId",
),
(
# metadata with sam resource id
Stack("", "stackLogicalId", "/stack", None, {}, {"SamResourceId": "stackCustomId"}),
"stackCustomId",
"stackCustomId",
),
(
# empty metadata
Stack("stack", "stackLogicalId", "/stack", None, {}, {}),
"stackLogicalId",
posixpath.join("stack", "stackLogicalId"),
),
(
# None metadata
Stack("stack", "stackLogicalId", "/stack", None, {}, None),
"stackLogicalId",
posixpath.join("stack", "stackLogicalId"),
),
(
# metadata without sam resource id
Stack("stack", "stackLogicalId", "/stack", None, {}, {"id": "id"}),
"stackLogicalId",
posixpath.join("stack", "stackLogicalId"),
),
(
# metadata with sam resource id
Stack("stack", "stackLogicalId", "/stack", None, {}, {"SamResourceId": "stackCustomId"}),
"stackCustomId",
posixpath.join("stack", "stackCustomId"),
),
],
)
class TestStack(TestCase):
stack = None
expected_id = None
expected_stack_path = None
def test_stack_id(self):
self.assertEqual(self.expected_id, self.stack.stack_id)
def test_stack_path(self):
self.assertEqual(self.expected_stack_path, self.stack.stack_path)
class TestStackEqual(TestCase):
def test_stacks_are_equal(self):
stack1 = Stack(
"stack",
"stackLogicalId",
"/stack",
{"key": "value"},
{"Resources": {"func1": {"Runtime": "Python"}}},
{"SamResourceId": "stackCustomId"},
)
stack2 = Stack(
"stack",
"stackLogicalId",
"/stack",
{"key": "value"},
{"Resources": {"func1": {"Runtime": "Python"}}},
{"SamResourceId": "stackCustomId"},
)
self.assertTrue(stack1 == stack2)
def test_stacks_are_not_equal_different_types(self):
stack1 = Stack(
"stack",
"stackLogicalId",
"/stack",
{"key": "value"},
{"Resources": {"func1": {"Runtime": "Python"}}},
{"SamResourceId": "stackCustomId"},
)
not_stack = Mock()
self.assertFalse(stack1 == not_stack)
def test_stacks_are_not_equal_different_parent_stack_path(self):
stack1 = Stack(
"stack1",
"stackLogicalId",
"/stack",
{"key": "value"},
{"Resources": {"func1": {"Runtime": "Python"}}},
{"SamResourceId": "stackCustomId"},
)
stack2 = Stack(
"stack2",
"stackLogicalId",
"/stack",
{"key": "value"},
{"Resources": {"func1": {"Runtime": "Python"}}},
{"SamResourceId": "stackCustomId"},
)
self.assertFalse(stack1 == stack2)
def test_stacks_are_not_equal_different_stack_name(self):
stack1 = Stack(
"stack",
"stackLogicalId1",
"/stack",
{"key": "value"},
{"Resources": {"func1": {"Runtime": "Python"}}},
{"SamResourceId": "stackCustomId"},
)
stack2 = Stack(
"stack",
"stackLogicalId2",
"/stack",
{"key": "value"},
{"Resources": {"func1": {"Runtime": "Python"}}},
{"SamResourceId": "stackCustomId"},
)
self.assertFalse(stack1 == stack2)
def test_stacks_are_not_equal_different_template_path(self):
stack1 = Stack(
"stack",
"stackLogicalId",
"/stack1",
{"key": "value"},
{"Resources": {"func1": {"Runtime": "Python"}}},
{"SamResourceId": "stackCustomId"},
)
stack2 = Stack(
"stack",
"stackLogicalId",
"/stack2",
{"key": "value"},
{"Resources": {"func1": {"Runtime": "Python"}}},
{"SamResourceId": "stackCustomId"},
)
self.assertFalse(stack1 == stack2)
def test_stacks_are_not_equal_different_parameters(self):
stack1 = Stack(
"stack",
"stackLogicalId",
"/stack",
{"key1": "value1"},
{"Resources": {"func1": {"Runtime": "Python"}}},
{"SamResourceId": "stackCustomId"},
)
stack2 = Stack(
"stack",
"stackLogicalId",
"/stack",
{"key2": "value2"},
{"Resources": {"func1": {"Runtime": "Python"}}},
{"SamResourceId": "stackCustomId"},
)
self.assertFalse(stack1 == stack2)
def test_stacks_are_not_equal_different_templates(self):
stack1 = Stack(
"stack",
"stackLogicalId",
"/stack",
{"key": "value"},
{"Resources": {"func1": {"Runtime": "Python"}}},
{"SamResourceId": "stackCustomId"},
)
stack2 = Stack(
"stack",
"stackLogicalId",
"/stack",
{"key": "value"},
{"Resources": {"func2": {"Runtime": "Java"}}},
{"SamResourceId": "stackCustomId"},
)
self.assertFalse(stack1 == stack2)
def test_stacks_are_not_equal_different_metadata(self):
stack1 = Stack(
"stack",
"stackLogicalId",
"/stack",
{"key": "value"},
{"Resources": {"func1": {"Runtime": "Python"}}},
{"SamResourceId1": "stackCustomId1"},
)
stack2 = Stack(
"stack",
"stackLogicalId",
"/stack",
{"key": "value"},
{"Resources": {"func1": {"Runtime": "Python"}}},
{"SamResourceId2": "stackCustomId2"},
)
self.assertFalse(stack1 == stack2)
class TestFunction(TestCase):
def setUp(self) -> None:
super().setUp()
self.function = Function(
"name",
"name",
"functionname",
"runtime",
10,
3,
"handler",
"imageuri",
"packagetype",
"imageconfig",
"codeuri",
None,
"rolearn",
[],
None,
None,
None,
None,
[ARM64],
None,
"stackpath",
)
@parameterized.expand(
[
([ARM64], ARM64),
([], X86_64),
([X86_64], X86_64),
]
)
def test_architecture(self, architectures, architecture):
self.function = self.function._replace(architectures=architectures)
self.assertEqual(self.function.architecture, architecture)
def test_invalid_architecture(self):
self.function = self.function._replace(architectures=[X86_64, ARM64])
with self.assertRaises(InvalidFunctionPropertyType) as e:
self.function.architecture
self.assertEqual(str(e.exception), "Function name property Architectures should be a list of length 1")
def test_skip_build_is_false_if_metadata_is_None(self):
self.assertFalse(self.function.skip_build)
def test_skip_build_is_false_if_metadata_is_empty(self):
self.function = self.function._replace(metadata={})
self.assertFalse(self.function.skip_build)
def test_skip_build_is_false_if_skip_build_metadata_flag_is_false(self):
self.function = self.function._replace(metadata={"SkipBuild": False})
self.assertFalse(self.function.skip_build)
def test_skip_build_is_false_if_skip_build_metadata_flag_is_true(self):
self.function = self.function._replace(metadata={"SkipBuild": True})
self.assertTrue(self.function.skip_build)
class TestLayerVersion(TestCase):
@parameterized.expand(
[
("arn:aws:lambda:region:account-id:layer:layer-name:a"),
("arn:aws:lambda:region:account-id:layer"),
("a string without delimiter"),
]
)
def test_invalid_arn(self, arn):
layer = LayerVersion(arn, None) # creation of layer does not raise exception
with self.assertRaises(InvalidLayerVersionArn):
layer.version, layer.name
def test_layer_version_returned(self):
layer_version = LayerVersion("arn:aws:lambda:region:account-id:layer:layer-name:1", None)
self.assertEqual(layer_version.version, 1)
def test_layer_version_id_is_layer_name_if_no_custom_resource_id(self):
layer_version = LayerVersion("arn:aws:lambda:region:account-id:layer:layer-name:1", None)
self.assertEqual(layer_version.layer_id, layer_version.name)
def test_layer_version_id_is_custom_id_if_custom_resource_id_exist(self):
layer_version = LayerVersion(
"arn:aws:lambda:region:account-id:layer:layer-name:1",
None,
[],
{"BuildMethod": "dummy_build_method", "SamResourceId": "CustomLayerId"},
)
self.assertNotEqual(layer_version.layer_id, layer_version.name)
self.assertEqual(layer_version.layer_id, "CustomLayerId")
def test_layer_arn_returned(self):
layer_version = LayerVersion("arn:aws:lambda:region:account-id:layer:layer-name:1", None)
self.assertEqual(layer_version.layer_arn, "arn:aws:lambda:region:account-id:layer:layer-name")
def test_layer_build_method_returned(self):
layer_version = LayerVersion(
"arn:aws:lambda:region:account-id:layer:layer-name:1", None, [], {"BuildMethod": "dummy_build_method"}
)
self.assertEqual(layer_version.build_method, "dummy_build_method")
def test_codeuri_is_setable(self):
layer_version = LayerVersion("arn:aws:lambda:region:account-id:layer:layer-name:1", None)
layer_version.codeuri = "./some_value"
self.assertEqual(layer_version.codeuri, "./some_value")
def test_name_is_computed(self):
layer_version = LayerVersion("arn:aws:lambda:region:account-id:layer:layer-name:1", None)
self.assertEqual(layer_version.name, "layer-name-1-8cebcd0539")
def test_layer_version_is_defined_in_template(self):
layer_version = LayerVersion("arn:aws:lambda:region:account-id:layer:layer-name:1", ".")
self.assertTrue(layer_version.is_defined_within_template)
def test_layer_version_raises_unsupported_intrinsic(self):
intrinsic_arn = {
"Fn::Sub": ["arn:aws:lambda:region:account-id:layer:{layer_name}:1", {"layer_name": "layer-name"}]
}
with self.assertRaises(UnsupportedIntrinsic):
LayerVersion(intrinsic_arn, ".")
def test_compatible_architectures_returned(self):
layer_version = LayerVersion(
"arn:aws:lambda:region:account-id:layer:layer-name:1",
None,
[],
{"BuildMethod": "dummy_build_method"},
[ARM64],
)
self.assertEqual(layer_version.compatible_architectures, [ARM64])
def test_layer_build_architecture_returned(self):
layer_version = LayerVersion(
"arn:aws:lambda:region:account-id:layer:layer-name:1",
None,
[],
{"BuildMethod": "dummy_build_method", "BuildArchitecture": ARM64},
[ARM64],
)
self.assertEqual(layer_version.build_architecture, ARM64)
def test_no_layer_build_architecture_returned(self):
layer_version = LayerVersion(
"arn:aws:lambda:region:account-id:layer:layer-name:1",
None,
[],
{"BuildMethod": "dummy_build_method"},
[ARM64],
)
self.assertEqual(layer_version.build_architecture, X86_64)
class TestResourceIdentifier(TestCase):
@parameterized.expand(
[
("Function1", "", "Function1"),
("NestedStack1/Function1", "NestedStack1", "Function1"),
("NestedStack1/NestedNestedStack2/Function1", "NestedStack1/NestedNestedStack2", "Function1"),
("", "", ""),
]
)
def test_parser(self, resource_identifier_string, stack_path, logical_id):
resource_identifier = ResourceIdentifier(resource_identifier_string)
self.assertEqual(resource_identifier.stack_path, stack_path)
self.assertEqual(resource_identifier.resource_iac_id, logical_id)
@parameterized.expand(
[
("Function1", "Function1", True),
("NestedStack1/Function1", "NestedStack1/Function1", True),
("NestedStack1/NestedNestedStack2/Function1", "NestedStack1/NestedNestedStack2/Function2", False),
("NestedStack1/NestedNestedStack3/Function1", "NestedStack1/NestedNestedStack2/Function1", False),
("", "", True),
]
)
def test_equal(self, resource_identifier_string_1, resource_identifier_string_2, equal):
resource_identifier_1 = ResourceIdentifier(resource_identifier_string_1)
resource_identifier_2 = ResourceIdentifier(resource_identifier_string_2)
self.assertEqual(resource_identifier_1 == resource_identifier_2, equal)
@parameterized.expand(
[
("Function1"),
("NestedStack1/Function1"),
("NestedStack1/NestedNestedStack2/Function1"),
]
)
def test_hash(self, resource_identifier_string):
resource_identifier_1 = ResourceIdentifier(resource_identifier_string)
resource_identifier_2 = ResourceIdentifier(resource_identifier_string)
self.assertEqual(hash(resource_identifier_1), hash(resource_identifier_2))
@parameterized.expand(
[
("Function1"),
("NestedStack1/Function1"),
("NestedStack1/NestedNestedStack2/Function1"),
(""),
]
)
def test_str(self, resource_identifier_string):
resource_identifier = ResourceIdentifier(resource_identifier_string)
self.assertEqual(str(resource_identifier), resource_identifier_string)
@parameterized_class(["is_cdk"], [[False], [True]])
class TestGetResourceByID(TestCase):
is_cdk = False
def setUp(self) -> None:
super().setUp()
self.root_stack = MagicMock()
self.root_stack.stack_path = ""
self.root_stack.resources = {"Function1": {"Properties": "Body1"}}
if self.is_cdk:
self.root_stack.resources["Function1"]["Metadata"] = {"SamResourceId": "CDKFunction1"}
self.nested_stack = MagicMock()
self.nested_stack.stack_path = "NestedStack1"
self.nested_stack.resources = {"Function1": {"Properties": "Body2"}}
if self.is_cdk:
self.nested_stack.resources["Function1"]["Metadata"] = {"SamResourceId": "CDKFunction1"}
self.nested_nested_stack = MagicMock()
self.nested_nested_stack.stack_path = "NestedStack1/NestedNestedStack1"
self.nested_nested_stack.resources = {"Function2": {"Properties": "Body3"}}
if self.is_cdk:
self.nested_nested_stack.resources["Function2"]["Metadata"] = {"SamResourceId": "CDKFunction2"}
def test_get_resource_by_id_explicit_root(
self,
):
resource_identifier = MagicMock()
resource_identifier.stack_path = ""
resource_identifier.resource_iac_id = f"{'CDK' if self.is_cdk else ''}Function1"
result = get_resource_by_id(
[self.root_stack, self.nested_stack, self.nested_nested_stack], resource_identifier, True
)
self.assertEqual(result, self.root_stack.resources["Function1"])
if self.is_cdk:
# check that logical id also works as resource if
resource_identifier.resource_iac_id = "Function1"
result = get_resource_by_id(
[self.root_stack, self.nested_stack, self.nested_nested_stack], resource_identifier, True
)
self.assertEqual(result, self.root_stack.resources["Function1"])
def test_get_resource_by_id_explicit_nested(
self,
):
resource_identifier = MagicMock()
resource_identifier.stack_path = "NestedStack1"
resource_identifier.resource_iac_id = f"{'CDK' if self.is_cdk else ''}Function1"
result = get_resource_by_id(
[self.root_stack, self.nested_stack, self.nested_nested_stack], resource_identifier, True
)
self.assertEqual(result, self.nested_stack.resources["Function1"])
def test_get_resource_by_id_explicit_nested_nested(
self,
):
resource_identifier = MagicMock()
resource_identifier.stack_path = "NestedStack1/NestedNestedStack1"
resource_identifier.resource_iac_id = f"{'CDK' if self.is_cdk else ''}Function2"
result = get_resource_by_id(
[self.root_stack, self.nested_stack, self.nested_nested_stack], resource_identifier, True
)
self.assertEqual(result, self.nested_nested_stack.resources["Function2"])
def test_get_resource_by_id_implicit_root(
self,
):
resource_identifier = MagicMock()
resource_identifier.stack_path = ""
resource_identifier.resource_iac_id = f"{'CDK' if self.is_cdk else ''}Function1"
result = get_resource_by_id(
[self.root_stack, self.nested_stack, self.nested_nested_stack], resource_identifier, False
)
self.assertEqual(result, self.root_stack.resources["Function1"])
def test_get_resource_by_id_implicit_nested(
self,
):
resource_identifier = MagicMock()
resource_identifier.stack_path = ""
resource_identifier.resource_iac_id = f"{'CDK' if self.is_cdk else ''}Function2"
result = get_resource_by_id(
[self.root_stack, self.nested_stack, self.nested_nested_stack], resource_identifier, False
)
self.assertEqual(result, self.nested_nested_stack.resources["Function2"])
def test_get_resource_by_id_implicit_with_stack_path(
self,
):
resource_identifier = MagicMock()
resource_identifier.stack_path = "NestedStack1"
resource_identifier.resource_iac_id = f"{'CDK' if self.is_cdk else ''}Function1"
result = get_resource_by_id(
[self.root_stack, self.nested_stack, self.nested_nested_stack], resource_identifier, False
)
self.assertEqual(result, self.nested_stack.resources["Function1"])
def test_get_resource_by_id_not_found(
self,
):
resource_identifier = MagicMock()
resource_identifier.resource_iac_id = f"{'CDK' if self.is_cdk else ''}Function3"
result = get_resource_by_id(
[self.root_stack, self.nested_stack, self.nested_nested_stack], resource_identifier, False
)
self.assertEqual(result, None)
class TestGetResourceIDsByType(TestCase):
def setUp(self) -> None:
super().setUp()
self.root_stack = MagicMock()
self.root_stack.stack_path = ""
self.root_stack.resources = {
"Function1": {"Type": "TypeA"},
"CDKFunction1": {"Type": "TypeA", "Metadata": {"SamResourceId": "CDKFunction1-x"}},
}
self.nested_stack = MagicMock()
self.nested_stack.stack_path = "NestedStack1"
self.nested_stack.resources = {
"Function1": {"Type": "TypeA"},
"CDKFunction1": {"Type": "TypeA", "Metadata": {"SamResourceId": "CDKFunction1-x"}},
}
self.nested_nested_stack = MagicMock()
self.nested_nested_stack.stack_path = "NestedStack1/NestedNestedStack1"
self.nested_nested_stack.resources = {
"Function2": {"Type": "TypeB"},
"CDKFunction2": {"Type": "TypeC", "Metadata": {"SamResourceId": "CDKFunction2-x"}},
}
def test_get_resource_ids_by_type_single_nested(
self,
):
result = get_resource_ids_by_type([self.root_stack, self.nested_stack, self.nested_nested_stack], "TypeB")
self.assertEqual(result, [ResourceIdentifier("NestedStack1/NestedNestedStack1/Function2")])
def test_get_resource_ids_by_type_single_cdk_nested(
self,
):
result = get_resource_ids_by_type([self.root_stack, self.nested_stack, self.nested_nested_stack], "TypeC")
self.assertEqual(result, [ResourceIdentifier("NestedStack1/NestedNestedStack1/CDKFunction2-x")])
def test_get_resource_ids_by_type_multiple_nested(
self,
):
result = get_resource_ids_by_type([self.root_stack, self.nested_stack, self.nested_nested_stack], "TypeA")
self.assertEqual(
result,
[
ResourceIdentifier("Function1"),
ResourceIdentifier("CDKFunction1-x"),
ResourceIdentifier("NestedStack1/Function1"),
ResourceIdentifier("NestedStack1/CDKFunction1-x"),
],
)
class TestGetAllResourceIDs(TestCase):
def setUp(self) -> None:
super().setUp()
self.root_stack = MagicMock()
self.root_stack.stack_path = ""
self.root_stack.resources = {
"Function1": {"Type": "TypeA"},
"CDKFunction1": {"Type": "TypeA", "Metadata": {"SamResourceId": "CDKFunction1-x"}},
}
self.nested_stack = MagicMock()
self.nested_stack.stack_path = "NestedStack1"
self.nested_stack.resources = {
"Function1": {"Type": "TypeA"},
"CDKFunction1": {"Type": "TypeA", "Metadata": {"SamResourceId": "CDKFunction1-x"}},
}
self.nested_nested_stack = MagicMock()
self.nested_nested_stack.stack_path = "NestedStack1/NestedNestedStack1"
self.nested_nested_stack.resources = {
"Function2": {"Type": "TypeB"},
"CDKFunction2": {"Type": "TypeC", "Metadata": {"SamResourceId": "CDKFunction2-x"}},
}
def test_get_all_resource_ids(
self,
):
result = get_all_resource_ids([self.root_stack, self.nested_stack, self.nested_nested_stack])
self.assertEqual(
result,
[
ResourceIdentifier("Function1"),
ResourceIdentifier("CDKFunction1-x"),
ResourceIdentifier("NestedStack1/Function1"),
ResourceIdentifier("NestedStack1/CDKFunction1-x"),
ResourceIdentifier("NestedStack1/NestedNestedStack1/Function2"),
ResourceIdentifier("NestedStack1/NestedNestedStack1/CDKFunction2-x"),
],
)
class TestGetUniqueResourceIDs(TestCase):
def setUp(self) -> None:
super().setUp()
self.stacks = MagicMock()
@patch("samcli.lib.providers.provider.get_resource_ids_by_type")
def test_only_resource_ids(self, get_resource_ids_by_type_mock):
resource_ids = ["Function1", "Function2"]
resource_types = []
get_resource_ids_by_type_mock.return_value = {}
result = get_unique_resource_ids(self.stacks, resource_ids, resource_types)
get_resource_ids_by_type_mock.assert_not_called()
self.assertEqual(result, {ResourceIdentifier("Function1"), ResourceIdentifier("Function2")})
@patch("samcli.lib.providers.provider.get_resource_ids_by_type")
def test_only_resource_types(self, get_resource_ids_by_type_mock):
resource_ids = []
resource_types = ["Type1", "Type2"]
get_resource_ids_by_type_mock.return_value = {ResourceIdentifier("Function1"), ResourceIdentifier("Function2")}
result = get_unique_resource_ids(self.stacks, resource_ids, resource_types)
get_resource_ids_by_type_mock.assert_any_call(self.stacks, "Type1")
get_resource_ids_by_type_mock.assert_any_call(self.stacks, "Type2")
self.assertEqual(result, {ResourceIdentifier("Function1"), ResourceIdentifier("Function2")})
@patch("samcli.lib.providers.provider.get_resource_ids_by_type")
def test_duplicates(self, get_resource_ids_by_type_mock):
resource_ids = ["Function1", "Function2"]
resource_types = ["Type1", "Type2"]
get_resource_ids_by_type_mock.return_value = {ResourceIdentifier("Function2"), ResourceIdentifier("Function3")}
result = get_unique_resource_ids(self.stacks, resource_ids, resource_types)
get_resource_ids_by_type_mock.assert_any_call(self.stacks, "Type1")
get_resource_ids_by_type_mock.assert_any_call(self.stacks, "Type2")
self.assertEqual(
result, {ResourceIdentifier("Function1"), ResourceIdentifier("Function2"), ResourceIdentifier("Function3")}
)
class TestGetResourceFullPathByID(TestCase):
def setUp(self):
self.stacks = [
Stack(
"",
"",
"template.yaml",
{},
{
"Resources": {
"CDKResource1": {
"Properties": {"Body"},
"Metadata": {
"SamResource": "CDKResource1-x",
"aws:cdk:path": "Stack/CDKResource1-x/Resource",
},
},
"CFNResource1": {
"Properties": {"Body"},
},
}
},
),
Stack(
"",
"childStack",
"childStack/template.yaml",
{},
{
"Resources": {
"CDKResourceInChild1": {
"Metadata": {
"SamResource": "CDKResourceInChild1-x",
"aws:cdk:path": "Stack/CDKResourceInChild1-x/Resource",
},
},
"CFNResourceInChild1": {
"Properties": {"Body"},
},
}
},
),
]
@parameterized.expand(
[
(ResourceIdentifier("CFNResource1"), "CFNResource1"),
(ResourceIdentifier("CDKResource1"), "CDKResource1-x"),
(ResourceIdentifier("CDKResource1-x"), "CDKResource1-x"),
(ResourceIdentifier("CFNResourceInChild1"), "childStack/CFNResourceInChild1"),
(ResourceIdentifier("childStack/CFNResourceInChild1"), "childStack/CFNResourceInChild1"),
(ResourceIdentifier("CDKResourceInChild1"), "childStack/CDKResourceInChild1-x"),
(ResourceIdentifier("CDKResourceInChild1-x"), "childStack/CDKResourceInChild1-x"),
(ResourceIdentifier("childStack/CDKResourceInChild1-x"), "childStack/CDKResourceInChild1-x"),
(ResourceIdentifier("InvalidResourceId"), None),
(ResourceIdentifier("InvalidStackId/CFNResourceInChild1"), None),
# we should use iac_resource_id to define full path, could not use resource logical id in full path although
# cdk id is there
(ResourceIdentifier("childStack/CDKResourceInChild1"), None),
]
)
def test_get_resource_full_path_by_id(self, resource_id, expected_full_path):
full_path = get_resource_full_path_by_id(self.stacks, resource_id)
self.assertEqual(expected_full_path, full_path)
class TestGetStack(TestCase):
root_stack = Stack("", "Root", "template.yaml", None, {})
child_stack = Stack("Root", "Child", "root_stack/template.yaml", None, {})
child_child_stack = Stack("Root/Child", "ChildChild", "root_stack/child_stack/template.yaml", None, {})
def test_get_parent_stack(self):
stack = Stack.get_parent_stack(self.child_stack, [self.root_stack, self.child_stack, self.child_child_stack])
self.assertEqual(stack, self.root_stack)
stack = Stack.get_parent_stack(self.root_stack, [self.root_stack, self.child_stack, self.child_child_stack])
self.assertIsNone(stack)
def test_get_stack_by_full_path(self):
stack = Stack.get_stack_by_full_path("Root/Child", [self.root_stack, self.child_stack, self.child_child_stack])
self.assertEqual(stack, self.child_stack)
stack = Stack.get_stack_by_full_path("Root", [self.root_stack, self.child_stack, self.child_child_stack])
self.assertEqual(stack, self.root_stack)
stack = Stack.get_stack_by_full_path("Child/Child", [self.root_stack, self.child_stack, self.child_child_stack])
self.assertIsNone(stack)
def test_get_child_stacks(self):
stack_list = Stack.get_child_stacks(
self.root_stack, [self.root_stack, self.child_stack, self.child_child_stack]
)
self.assertEqual(stack_list, [self.child_stack])
stack_list = Stack.get_child_stacks(
self.child_stack, [self.root_stack, self.child_stack, self.child_child_stack]
)
self.assertEqual(stack_list, [self.child_child_stack])
stack_list = Stack.get_child_stacks(
self.child_child_stack, [self.root_stack, self.child_stack, self.child_child_stack]
)
self.assertEqual(stack_list, [])
| 37.828775
| 120
| 0.606808
|
1c602868b56b2fad4d4965acba83db8f7edb3f7d
| 6,413
|
py
|
Python
|
BingRewards/BingRewards.py
|
TheMercDeadpool/bing-rewards
|
943bc4267ea956d5061dc197033ac34fd9dc5f6a
|
[
"MIT"
] | null | null | null |
BingRewards/BingRewards.py
|
TheMercDeadpool/bing-rewards
|
943bc4267ea956d5061dc197033ac34fd9dc5f6a
|
[
"MIT"
] | null | null | null |
BingRewards/BingRewards.py
|
TheMercDeadpool/bing-rewards
|
943bc4267ea956d5061dc197033ac34fd9dc5f6a
|
[
"MIT"
] | null | null | null |
import sys
import os
from src.rewards import Rewards
from src.log import HistLog
import logging
DRIVERS_DIR = "drivers"
DRIVER = "chromedriver"
LOG_DIR = "logs"
ERROR_LOG = "error.log"
RUN_LOG = "run.log"
SEARCH_LOG = "search.log"
DEBUG = True
HEADLESS = True
def __main(arg0, arg1):
# change to top dir
dir_run_from = os.getcwd()
top_dir = os.path.dirname(arg0)
if top_dir and top_dir != dir_run_from:
os.chdir(top_dir)
if not os.path.exists(LOG_DIR):
os.makedirs(LOG_DIR)
hist_log = HistLog(os.path.join(LOG_DIR, RUN_LOG), os.path.join(LOG_DIR, SEARCH_LOG))
# get credentials
try:
from src import config
except:
print("\nFailed to import configuration file")
logging.basicConfig(level=logging.DEBUG, format='%(message)s', filename=os.path.join(LOG_DIR, ERROR_LOG))
logging.exception(hist_log.get_timestamp())
logging.debug("")
raise
if not os.path.exists(DRIVERS_DIR):
os.mkdir(DRIVERS_DIR)
rewards = Rewards(os.path.join(DRIVERS_DIR, DRIVER), config.credentials["email"], config.credentials["password"], DEBUG, HEADLESS)
completion = hist_log.get_completion()
try:
if arg1 in ["w", "web"]:
print("\n\t{}\n".format("You selected web search"))
if not completion.is_edge_and_web_search_completed():
if not completion.is_edge_search_completed():
rewards.complete_edge_search(hist_log.get_search_hist())
if not completion.is_web_search_completed():
rewards.complete_web_search(hist_log.get_search_hist())
hist_log.write(rewards.completion, rewards.search_hist)
else:
print('Web search already completed')
elif arg1 in ["m", "mobile"]:
print("\n\t{}\n".format("You selected mobile search"))
if not completion.is_edge_and_mobile_search_completed():
if not completion.is_edge_search_completed():
rewards.complete_edge_search(hist_log.get_search_hist())
if not completion.is_mobile_search_completed():
rewards.complete_mobile_search(hist_log.get_search_hist())
hist_log.write(rewards.completion, rewards.search_hist)
else:
print('Mobile search already completed')
elif arg1 in ["b", "both"]:
print("\n\t{}\n".format("You selected both searches (web & mobile)"))
if not completion.is_both_searches_completed():
rewards.complete_both_searches(hist_log.get_search_hist())
hist_log.write(rewards.completion, rewards.search_hist)
else:
print('Both searches already completed')
elif arg1 in ["o", "other"]:
print("\n\t{}\n".format("You selected offers"))
if not completion.is_offers_completed():
rewards.complete_offers()
hist_log.write(rewards.completion, rewards.search_hist)
else:
print('Offers already completed')
elif arg1 in ["a", "all"]:
print("\n\t{}\n".format("You selected all"))
if not completion.is_all_completed():
rewards.complete_all(hist_log.get_search_hist())
hist_log.write(rewards.completion, rewards.search_hist)
else:
print('All already completed')
else:
print("\n\t{}\n".format("You selected remaining"))
if not completion.is_all_completed():
#complete_all() is fastest method b/c it doesn't open new webdriver for each new search type, so even if already completed method is tried again, it has very low overhead.
if not completion.is_web_search_completed() and not completion.is_mobile_search_completed():
rewards.complete_all(hist_log.get_search_hist())
#higher overhead, opens a new webdriver for each unfinished search type
else:
if not completion.is_edge_search_completed():
rewards.complete_edge_search(hist_log.get_search_hist())
if not completion.is_web_search_completed():
rewards.complete_web_search(hist_log.get_search_hist())
if not completion.is_offers_completed():
rewards.complete_offers()
if not completion.is_mobile_search_completed():
rewards.complete_mobile_search(hist_log.get_search_hist())
hist_log.write(rewards.completion, rewards.search_hist)
completion = hist_log.get_completion()
if not completion.is_all_completed(): # check again, log if any failed
logging.basicConfig(level=logging.DEBUG, format='%(message)s', filename=os.path.join(LOG_DIR, ERROR_LOG))
logging.debug(hist_log.get_timestamp())
for line in rewards.stdout:
logging.debug(line)
logging.debug("")
else:
print("Nothing remaining")
except:
logging.basicConfig(level=logging.DEBUG, format='%(message)s', filename=os.path.join(LOG_DIR, ERROR_LOG))
logging.exception(hist_log.get_timestamp())
logging.debug("")
hist_log.write(rewards.completion, rewards.search_hist)
raise
if __name__ == "__main__":
args = sys.argv
if len(args) == 1:
out = "Enter \t{}, \n\t{}, \n\t{}, \n\t{}, \n\t{}, \n\t{} \nInput: \t"
input_message = out.format("w for web", "m for mobile", "b for both", "o for offers", "a for all","r for remaining (default)")
try:
arg1 = raw_input(input_message) # python 2
except:
arg1 = input(input_message) # python 3
arg1 = arg1.lower()
__main(args[0], arg1)
elif len(args) == 2:
arg1 = args[1].lower()
assert arg1 in ["-w", "--web", "-m", "--mobile", "-b", "--both", "-o", "--offers", "-a", "-all", "-r", "--remaining"]
__main(args[0], arg1.replace("-", ""))
else:
print("Incorrect number of arguments")
| 43.331081
| 187
| 0.587401
|
fa32d81434dd592e7d60be4c654bb5e606ba452a
| 1,318
|
py
|
Python
|
mikrotik_config_capture.py
|
orgito/ncm-scripts
|
7ce0a1cdd783f06d41633357c712c6addebc7702
|
[
"MIT"
] | 6
|
2018-08-31T23:08:36.000Z
|
2021-07-29T13:12:59.000Z
|
mikrotik_config_capture.py
|
orgito/ncm-scripts
|
7ce0a1cdd783f06d41633357c712c6addebc7702
|
[
"MIT"
] | null | null | null |
mikrotik_config_capture.py
|
orgito/ncm-scripts
|
7ce0a1cdd783f06d41633357c712c6addebc7702
|
[
"MIT"
] | 2
|
2020-03-19T17:51:17.000Z
|
2021-08-03T08:38:24.000Z
|
#!/usr/bin/python3
"""
Capture MikroTik device configurations. Used by CA Spectrum NCM.
"""
import argparse
import re
from time import sleep
import paramiko
parser = argparse.ArgumentParser(description='Capture MikroTik configurations')
parser.add_argument('host', type=str, help='Device ip address')
parser.add_argument('username', type=str, help='Username for connecting')
parser.add_argument('password', type=str, help='Passwrod for connecting')
# CA Spectrum send extra, unnecessary arguments
parser.add_argument('dummy', type=str, nargs="*", help='Extra discarded arguments')
args = parser.parse_args()
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
# TODO: Handle exceptions and exit with appropriate codes
client.connect(args.host, 22, args.username, args.password, timeout=30, look_for_keys=False, allow_agent=False, auth_timeout=30)
shell = client.invoke_shell(term='nocolor')
sleep(1)
shell.recv(1024)
# Activates the menu
shell.send('\r\n') # ENTER
sleep(1)
shell.recv(1024)
shell.send('/export\r\n')
sleep(1)
config = shell.recv(102400)
shell.send('quit\r\n')
client.close()
ansi_escape = re.compile(r'\x1b\[[0-?]*[ -/]*[@-~]')
config = [ansi_escape.sub('', line) for line in config.decode().splitlines()[3:]]
config = '\n'.join(config)
print(config)
| 28.652174
| 128
| 0.748103
|
19b66c06fd111ebecc88db08dba38800812570fb
| 1,232
|
py
|
Python
|
examples/rosenbrock_anneal.py
|
zhanglei1172/XBBO
|
9bf9b778b29735f108457d5e491680785212d580
|
[
"MIT"
] | null | null | null |
examples/rosenbrock_anneal.py
|
zhanglei1172/XBBO
|
9bf9b778b29735f108457d5e491680785212d580
|
[
"MIT"
] | null | null | null |
examples/rosenbrock_anneal.py
|
zhanglei1172/XBBO
|
9bf9b778b29735f108457d5e491680785212d580
|
[
"MIT"
] | null | null | null |
import numpy as np
from xbbo.search_space.fast_example_problem import build_space, rosenbrock_2d
from xbbo.search_algorithm.anneal_optimizer import Anneal
from xbbo.utils.constants import MAXINT
if __name__ == "__main__":
MAX_CALL = 1000
rng = np.random.RandomState(42)
# define black box function
blackbox_func = rosenbrock_2d
# define search space
cs = build_space(rng)
# define black box optimizer
hpopt = Anneal(space=cs, seed=rng.randint(MAXINT), suggest_limit=MAX_CALL, initial_design='sobol',init_budget=0)
# Example call of the black-box function
def_value = blackbox_func(cs.get_default_configuration())
print("Default Value: %.2f" % def_value)
# ---- Begin BO-loop ----
for i in range(MAX_CALL):
# suggest
trial_list = hpopt.suggest()
# evaluate
value = blackbox_func(trial_list[0].config_dict)
# observe
trial_list[0].add_observe_value(observe_value=value)
hpopt.observe(trial_list=trial_list)
print(value)
# plt.plot(hpopt.trials.get_history()[0])
# plt.savefig('./out/rosenbrock_bo_gp.png')
# plt.show()
print('find best value:{}'.format(hpopt.trials.get_best()[0]))
| 32.421053
| 116
| 0.684253
|
bc6939c09c61eb3e6f070ef2bb17da8eaeb402a3
| 1,181
|
py
|
Python
|
looking_for_group/game_catalog/migrations/0009_auto_20181104_1033.py
|
andrlik/looking-for-group
|
0b1cecb37ef0f6d75692fd188130e2c60d09b7d2
|
[
"BSD-3-Clause"
] | null | null | null |
looking_for_group/game_catalog/migrations/0009_auto_20181104_1033.py
|
andrlik/looking-for-group
|
0b1cecb37ef0f6d75692fd188130e2c60d09b7d2
|
[
"BSD-3-Clause"
] | null | null | null |
looking_for_group/game_catalog/migrations/0009_auto_20181104_1033.py
|
andrlik/looking-for-group
|
0b1cecb37ef0f6d75692fd188130e2c60d09b7d2
|
[
"BSD-3-Clause"
] | null | null | null |
# Generated by Django 2.1.2 on 2018-11-04 15:33
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('game_catalog', '0008_auto_20181104_0934'),
]
operations = [
migrations.AlterField(
model_name='gameedition',
name='game',
field=models.ForeignKey(help_text='Which game is this an edition of?', on_delete=django.db.models.deletion.CASCADE, related_name='editions', to='game_catalog.PublishedGame'),
),
migrations.AlterField(
model_name='gameedition',
name='game_system',
field=models.ForeignKey(blank=True, help_text='Which game system does this edition use?', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='game_editions', to='game_catalog.GameSystem'),
),
migrations.AlterField(
model_name='sourcebook',
name='edition',
field=models.ForeignKey(help_text='Edition this relates to.', on_delete=django.db.models.deletion.CASCADE, related_name='sourcebooks', to='game_catalog.GameEdition'),
),
]
| 39.366667
| 219
| 0.666384
|
e0fb1ba66250e962774a5daaad553efc7ab19f19
| 550
|
py
|
Python
|
backend/home/migrations/0001_load_initial_data.py
|
crowdbotics-apps/frontline-safety-32038
|
2a116807444c92d75c3bda1f99b9042cc637ace5
|
[
"FTL",
"AML",
"RSA-MD"
] | null | null | null |
backend/home/migrations/0001_load_initial_data.py
|
crowdbotics-apps/frontline-safety-32038
|
2a116807444c92d75c3bda1f99b9042cc637ace5
|
[
"FTL",
"AML",
"RSA-MD"
] | null | null | null |
backend/home/migrations/0001_load_initial_data.py
|
crowdbotics-apps/frontline-safety-32038
|
2a116807444c92d75c3bda1f99b9042cc637ace5
|
[
"FTL",
"AML",
"RSA-MD"
] | null | null | null |
from django.db import migrations
def create_site(apps, schema_editor):
Site = apps.get_model("sites", "Site")
custom_domain = "frontline-safety-32038.botics.co"
site_params = {
"name": "Frontline Safety",
}
if custom_domain:
site_params["domain"] = custom_domain
Site.objects.update_or_create(defaults=site_params, id=1)
class Migration(migrations.Migration):
dependencies = [
("sites", "0002_alter_domain_unique"),
]
operations = [
migrations.RunPython(create_site),
]
| 21.153846
| 61
| 0.661818
|
52324a0ee62e491988085faee2c3946320eb9935
| 2,520
|
py
|
Python
|
venv/lib/python3.8/site-packages/dateparser/data/numeral_translation_data/kl.py
|
yuta-komura/vishnu
|
67173b674d5f4f3be189474103612447ef69ab44
|
[
"MIT"
] | 1
|
2021-11-17T04:55:14.000Z
|
2021-11-17T04:55:14.000Z
|
dateparser/data/numeral_translation_data/kl.py
|
cool-RR/dateparser
|
c38336df521cc57d947dc2c9111539a72f801652
|
[
"BSD-3-Clause"
] | null | null | null |
dateparser/data/numeral_translation_data/kl.py
|
cool-RR/dateparser
|
c38336df521cc57d947dc2c9111539a72f801652
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
info = {
"%%numbertimes": {
"1": "ataaseq;",
"2": "marlunnik;",
"3": "pingasunik;",
"4": "sisamanik;",
"5": "tallimanik;",
"6": "arfinilinnik;",
"7": "arfineq-marlunnik;",
"8": "arfineq-pingasunik;",
"9": "arfineq-sisamanik;",
"10": "qulinik;",
"11": "aqqanilinik;",
"12": "aqqaneq-marlunnik;",
"13": "aqqaneq-pingasunik;",
"14": "aqqaneq-sisamanik;",
"15": "aqqaneq-tallimanik;",
"16": "arfersanilinnik;",
"17": "arfersaneq-marlunnik;",
"18": "arfersaneq-pingasunik;",
"19": "arfersaneq-sisamanik;",
"(20, 99)": "<%%numbertimes< qulillit[ >>];",
"(100, 199)": "uutritit[ >>];",
"(200, 'inf')": "<%%numbertimes< uutritillit[ >>];"
},
"%spellout-cardinal": {
"0": "nuulu;",
"1": "ataaseq;",
"2": "marluk;",
"3": "pingasut;",
"4": "sisamat;",
"5": "tallimat;",
"6": "arfinillit;",
"7": "arfineq-marluk;",
"8": "arfineq-pingasut;",
"9": "arfineq-sisamat;",
"10": "qulit;",
"11": "aqqanilit;",
"12": "aqqaneq-marluk;",
"13": "aqqaneq-pingasut;",
"14": "aqqaneq-sisamat;",
"15": "aqqaneq-tallimat;",
"16": "arfersanillit;",
"17": "arfersaneq-marluk;",
"18": "arfersaneq-pingasut;",
"19": "arfersaneq-sisamat;",
"(20, 99)": "<%%numbertimes< qulillit[ >>];",
"(100, 199)": "uutritit[ >>];",
"(200, 999)": "<%%numbertimes< uutritillit[ >>];",
"(1000, 1999)": "tuusintit[ >>];",
"(2000, 999999)": "<%%numbertimes< tuusintillit[ >>];",
"(1000000, 1999999)": "millionit[ >>];",
"(2000000, 999999999)": "<%%numbertimes< millionillit[ >>];",
"(1000000000, 1999999999)": "milliardit[ >>];",
"(2000000000, 999999999999)": "<%%numbertimes< milliardillit[ >>];",
"(1000000000000, 1999999999999)": "billionit[ >>];",
"(2000000000000, 999999999999999)": "<%%numbertimes< billioniillit[ >>];",
"(1000000000000000, 1999999999999999)": "billiardit[ >>];",
"(2000000000000000, 999999999999999999)": "<%%numbertimes< billiardillit[ >>];",
"(1000000000000000000, 'inf')": "=#,##0=;"
},
"%spellout-numbering": {
"(0, 'inf')": "=%spellout-cardinal=;"
},
"%spellout-numbering-year": {
"(0, 'inf')": "=%spellout-numbering=;"
}
}
| 36
| 88
| 0.475397
|
d8c62669708935c094688b05771d22290074d9dd
| 335
|
py
|
Python
|
fixpol/migrations/0011_auto_20200805_1926.py
|
tnadams/fix-politics
|
f2e82112063f95da81676bcf5f2bf4d8bf94d74b
|
[
"Apache-2.0"
] | null | null | null |
fixpol/migrations/0011_auto_20200805_1926.py
|
tnadams/fix-politics
|
f2e82112063f95da81676bcf5f2bf4d8bf94d74b
|
[
"Apache-2.0"
] | null | null | null |
fixpol/migrations/0011_auto_20200805_1926.py
|
tnadams/fix-politics
|
f2e82112063f95da81676bcf5f2bf4d8bf94d74b
|
[
"Apache-2.0"
] | null | null | null |
# Generated by Django 3.0.8 on 2020-08-05 19:26
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('fixpol', '0010_searchcriteria'),
]
operations = [
migrations.RenameModel(
old_name='SearchCriteria',
new_name='Criteria',
),
]
| 18.611111
| 47
| 0.6
|
1f9fb96cb7aeb6a2c73c49845005f080aa3153e2
| 10,924
|
py
|
Python
|
intersight/model/techsupportmanagement_collection_control_policy_all_of.py
|
CiscoDevNet/intersight-python
|
04b721f37c3044646a91c185c7259edfb991557a
|
[
"Apache-2.0"
] | 5
|
2021-12-16T15:13:32.000Z
|
2022-03-29T16:09:54.000Z
|
intersight/model/techsupportmanagement_collection_control_policy_all_of.py
|
CiscoDevNet/intersight-python
|
04b721f37c3044646a91c185c7259edfb991557a
|
[
"Apache-2.0"
] | 4
|
2022-01-25T19:05:51.000Z
|
2022-03-29T20:18:37.000Z
|
intersight/model/techsupportmanagement_collection_control_policy_all_of.py
|
CiscoDevNet/intersight-python
|
04b721f37c3044646a91c185c7259edfb991557a
|
[
"Apache-2.0"
] | 2
|
2020-07-07T15:01:08.000Z
|
2022-01-31T04:27:35.000Z
|
"""
Cisco Intersight
Cisco Intersight is a management platform delivered as a service with embedded analytics for your Cisco and 3rd party IT infrastructure. This platform offers an intelligent level of management that enables IT organizations to analyze, simplify, and automate their environments in more advanced ways than the prior generations of tools. Cisco Intersight provides an integrated and intuitive management experience for resources in the traditional data center as well as at the edge. With flexible deployment options to address complex security needs, getting started with Intersight is quick and easy. Cisco Intersight has deep integration with Cisco UCS and HyperFlex systems allowing for remote deployment, configuration, and ongoing maintenance. The model-based deployment works for a single system in a remote location or hundreds of systems in a data center and enables rapid, standardized configuration and deployment. It also streamlines maintaining those systems whether you are working with small or very large configurations. The Intersight OpenAPI document defines the complete set of properties that are returned in the HTTP response. From that perspective, a client can expect that no additional properties are returned, unless these properties are explicitly defined in the OpenAPI document. However, when a client uses an older version of the Intersight OpenAPI document, the server may send additional properties because the software is more recent than the client. In that case, the client may receive properties that it does not know about. Some generated SDKs perform a strict validation of the HTTP response body against the OpenAPI document. # noqa: E501
The version of the OpenAPI document: 1.0.9-4950
Contact: intersight@cisco.com
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from intersight.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
def lazy_import():
from intersight.model.iam_account_relationship import IamAccountRelationship
globals()['IamAccountRelationship'] = IamAccountRelationship
class TechsupportmanagementCollectionControlPolicyAllOf(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
('class_id',): {
'TECHSUPPORTMANAGEMENT.COLLECTIONCONTROLPOLICY': "techsupportmanagement.CollectionControlPolicy",
},
('object_type',): {
'TECHSUPPORTMANAGEMENT.COLLECTIONCONTROLPOLICY': "techsupportmanagement.CollectionControlPolicy",
},
('deployment_type',): {
'NONE': "None",
'SAAS': "SaaS",
'APPLIANCE': "Appliance",
},
('tech_support_collection',): {
'ENABLE': "Enable",
'DISABLE': "Disable",
},
}
validations = {
}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'class_id': (str,), # noqa: E501
'object_type': (str,), # noqa: E501
'deployment_type': (str,), # noqa: E501
'tech_support_collection': (str,), # noqa: E501
'account': (IamAccountRelationship,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'class_id': 'ClassId', # noqa: E501
'object_type': 'ObjectType', # noqa: E501
'deployment_type': 'DeploymentType', # noqa: E501
'tech_support_collection': 'TechSupportCollection', # noqa: E501
'account': 'Account', # noqa: E501
}
_composed_schemas = {}
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""TechsupportmanagementCollectionControlPolicyAllOf - a model defined in OpenAPI
Args:
Keyword Args:
class_id (str): The fully-qualified name of the instantiated, concrete type. This property is used as a discriminator to identify the type of the payload when marshaling and unmarshaling data.. defaults to "techsupportmanagement.CollectionControlPolicy", must be one of ["techsupportmanagement.CollectionControlPolicy", ] # noqa: E501
object_type (str): The fully-qualified name of the instantiated, concrete type. The value should be the same as the 'ClassId' property.. defaults to "techsupportmanagement.CollectionControlPolicy", must be one of ["techsupportmanagement.CollectionControlPolicy", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
deployment_type (str): Deployment type defines whether the policy is associated with a SaaS or Appliance account. * `None` - Service deployment type None. * `SaaS` - Service deployment type SaaS. * `Appliance` - Service deployment type Appliance.. [optional] if omitted the server will use the default value of "None" # noqa: E501
tech_support_collection (str): Enable or Disable techsupport collection for a specific account. * `Enable` - Enable techsupport collection. * `Disable` - Disable techsupport collection.. [optional] if omitted the server will use the default value of "Enable" # noqa: E501
account (IamAccountRelationship): [optional] # noqa: E501
"""
class_id = kwargs.get('class_id', "techsupportmanagement.CollectionControlPolicy")
object_type = kwargs.get('object_type', "techsupportmanagement.CollectionControlPolicy")
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.class_id = class_id
self.object_type = object_type
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
| 53.029126
| 1,678
| 0.647657
|
cb8e6fe32faa4a29bdd918edca7876736eea8c8e
| 290
|
py
|
Python
|
api/rest.py
|
rbaylon/flask-restapi
|
420fb90a5971e999bfbd3064c86efc960c25f21e
|
[
"BSD-2-Clause"
] | null | null | null |
api/rest.py
|
rbaylon/flask-restapi
|
420fb90a5971e999bfbd3064c86efc960c25f21e
|
[
"BSD-2-Clause"
] | null | null | null |
api/rest.py
|
rbaylon/flask-restapi
|
420fb90a5971e999bfbd3064c86efc960c25f21e
|
[
"BSD-2-Clause"
] | null | null | null |
from flask_restful import Api
from api.resources import Room, RoomList, ApiLogin
from baseapp import app
rest_api = Api(app)
# Build api routes
rest_api.add_resource(RoomList, '/api/rooms')
rest_api.add_resource(Room, '/api/rooms/<room_id>')
rest_api.add_resource(ApiLogin, '/api/login')
| 26.363636
| 51
| 0.782759
|
ecc750e62ee1e576f8e197942e29186ac3b5e4e6
| 3,247
|
py
|
Python
|
dvc/command/checkout.py
|
gucharbon/dvc
|
3ec0e9b96492af5b89c683c912ba233afe81d7c2
|
[
"Apache-2.0"
] | null | null | null |
dvc/command/checkout.py
|
gucharbon/dvc
|
3ec0e9b96492af5b89c683c912ba233afe81d7c2
|
[
"Apache-2.0"
] | null | null | null |
dvc/command/checkout.py
|
gucharbon/dvc
|
3ec0e9b96492af5b89c683c912ba233afe81d7c2
|
[
"Apache-2.0"
] | null | null | null |
import argparse
import logging
import operator
import colorama
from dvc.command import completion
from dvc.command.base import CmdBase, append_doc_link
from dvc.exceptions import CheckoutError
from dvc.utils.humanize import get_summary
logger = logging.getLogger(__name__)
def log_changes(stats):
colors = [
("modified", colorama.Fore.YELLOW,),
("added", colorama.Fore.GREEN),
("deleted", colorama.Fore.RED,),
]
for state, color in colors:
entries = stats.get(state)
if not entries:
continue
for entry in entries:
logger.info(
"{color}{state}{nc}{spacing}{entry}".format(
color=color,
state=state[0].upper(),
nc=colorama.Fore.RESET,
spacing="\t",
entry=entry,
)
)
class CmdCheckout(CmdBase):
def run(self):
stats, exc = None, None
try:
stats = self.repo.checkout(
targets=self.args.targets,
with_deps=self.args.with_deps,
force=self.args.force,
relink=self.args.relink,
recursive=self.args.recursive,
)
except CheckoutError as _exc:
exc = _exc
stats = exc.stats
if self.args.summary:
default_message = "No changes."
msg = get_summary(
sorted(stats.items(), key=operator.itemgetter(0))
)
logger.info(msg or default_message)
else:
log_changes(stats)
if exc:
raise exc
return 0
def add_parser(subparsers, parent_parser):
CHECKOUT_HELP = "Checkout data files from cache."
checkout_parser = subparsers.add_parser(
"checkout",
parents=[parent_parser],
description=append_doc_link(CHECKOUT_HELP, "checkout"),
help=CHECKOUT_HELP,
formatter_class=argparse.RawDescriptionHelpFormatter,
)
checkout_parser.add_argument(
"--summary",
action="store_true",
default=False,
help="Show summary of the changes.",
)
checkout_parser.add_argument(
"-d",
"--with-deps",
action="store_true",
default=False,
help="Checkout all dependencies of the specified target.",
)
checkout_parser.add_argument(
"-R",
"--recursive",
action="store_true",
default=False,
help="Checkout all subdirectories of the specified directory.",
)
checkout_parser.add_argument(
"-f",
"--force",
action="store_true",
default=False,
help="Do not prompt when removing working directory files.",
)
checkout_parser.add_argument(
"--relink",
action="store_true",
default=False,
help="Recreate links or copies from cache to workspace.",
)
checkout_parser.add_argument(
"targets",
nargs="*",
help="DVC-files to checkout. Optional. "
"(Finds all DVC-files in the workspace by default.)",
).complete = completion.DVC_FILE
checkout_parser.set_defaults(func=CmdCheckout)
| 27.285714
| 71
| 0.57376
|
d8430cf41a70f7893d5f9c07360e8e0379b1e92b
| 2,319
|
py
|
Python
|
LightLockClean/a1_Program-Files/b3_Static2/c3_Extract/d1_triangulateRobot.py
|
Team766/LightLock
|
b73250f084546749e25eb6446892641eab315725
|
[
"Unlicense"
] | null | null | null |
LightLockClean/a1_Program-Files/b3_Static2/c3_Extract/d1_triangulateRobot.py
|
Team766/LightLock
|
b73250f084546749e25eb6446892641eab315725
|
[
"Unlicense"
] | null | null | null |
LightLockClean/a1_Program-Files/b3_Static2/c3_Extract/d1_triangulateRobot.py
|
Team766/LightLock
|
b73250f084546749e25eb6446892641eab315725
|
[
"Unlicense"
] | null | null | null |
import math
PI = 3.141592653
def generatePointTwo(t1,tangent): #Creates a second point based off of heading
#Takes in first point as (x,y) tuple, and heading in radians
t1_1 = t1[0]
t1_2 = t1[1]
t1out = t1_1 +1
t2out = t1_2 - tangent
degreesToRadians(60)
return (t1out,t2out)
def findIntersection(x1,y1,x2,y2,x3,y3,x4,y4):
#Finds intersection of two lines defined by two points each
#1 is light_one, 2 is addition to light_one
#3 is light_two, 4 is addition to light_two
a,b,c,d = x1,y1,x2,y2
e,f,g,h = x3,y3,x4,y4
P_x = ((a*d-b*c)*(e-g)-(a-c)*(e*h-f*g))/((a-c)*(f-h)-(b-d)*(e-g))
P_y = ((a*d-b*c)*(f-h)-(b-d)*(e*h-f*g))/((a-c)*(f-h)-(b-d)*(e-g))
return (P_x,P_y)
def lightRelativeHeading(vision_heading,robot_heading_f):
return (vision_heading + robot_heading_f + 180)%360
def degreesToRadians(degrees):
return degrees * (PI/180)
light_one = (-4,4)
light_two = (-4,-4)
#Accurate Gyro Heading of Robot
robot_heading = 0
#Headings in degrees relative to camera, taken from camera data
vision_heading_one = 180+63.475
vision_heading_two = 180-63.475
#give headings relative to lights in degrees
#light_heading_one = lightRelativeHeading(vision_heading_one,robot_heading)
#light_heading_two = lightRelativeHeading(vision_heading_two,robot_heading)
#Convert degrees to radians
#light_heading_one = degreesToRadians(light_heading_one)
#light_heading_two = degreesToRadians(light_heading_two)
#Find slope ratio
#h_1 = math.tan(light_heading_one)
#h_2 = math.tan(light_heading_two)
#
#P1_2 = generatePointTwo(light_one,h_1)
#P2_2 = generatePointTwo(light_two,h_2)
#print(findIntersection(*light_one,*P1_2,*light_two,*P2_2))
def triangulateRobotPosition(l1,l2,rh,vh1,vh2): #light_one, light_two, robot_heading, vision_heading_one,vision_heading_two
light_heading_one = lightRelativeHeading(vh1,rh)
light_heading_two = lightRelativeHeading(vh2,rh)
#Convert degrees to radians
light_heading_one = degreesToRadians(light_heading_one)
light_heading_two = degreesToRadians(light_heading_two)
#Find slope ratio
h_1 = math.tan(light_heading_one)
h_2 = math.tan(light_heading_two)
P1_2 = generatePointTwo(l1,h_1)
P2_2 = generatePointTwo(l2,h_2)
return findIntersection(*l1,*P1_2,*l2,*P2_2)
| 30.116883
| 123
| 0.725313
|
beda100656e93a797facc3751b41918394a8f6d9
| 3,849
|
py
|
Python
|
Hood/settings.py
|
iankurao/neighbourhood
|
5c9384fa9496f75f734e83275ffc2865879a148b
|
[
"MIT"
] | null | null | null |
Hood/settings.py
|
iankurao/neighbourhood
|
5c9384fa9496f75f734e83275ffc2865879a148b
|
[
"MIT"
] | 5
|
2020-06-05T21:42:55.000Z
|
2021-09-08T01:06:08.000Z
|
hood/settings.py
|
OlooOuma/mtaani
|
8ba847e6df189bc06ab3bb2ae2f267b20507aed4
|
[
"MIT"
] | null | null | null |
"""
Django settings for Hood project.
Generated by 'django-admin startproject' using Django 1.11.8.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
import django_heroku
import dj_database_url
from decouple import config,Csv
MODE=config("MODE", default="dev")
SECRET_KEY = config('SECRET_KEY')
DEBUG = config('DEBUG')
# development
if config('MODE')=="dev":
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': config('DB_NAME'),
'USER': config('DB_USER'),
'PASSWORD': config('DB_PASSWORD'),
'HOST': config('DB_HOST'),
'PORT': '',
}
}
# production
else:
DATABASES = {
'default': dj_database_url.config(
default=config('DATABASE_URL')
)
}
db_from_env = dj_database_url.config(conn_max_age=500)
DATABASES['default'].update(db_from_env)
ALLOWED_HOSTS = config('ALLOWED_HOSTS', cast=Csv())
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Application definition
INSTALLED_APPS = [
'hood_app.apps.HoodAppConfig',
'crispy_forms',
'bootstrap3',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'whitenoise.middleware.WhiteNoiseMiddleware',
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'Hood.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'Hood.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Africa/Nairobi'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
LOGIN_REDIRECT_URL = "home"
LOGIN_URL = 'login'
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
STATIC_URL = '/static/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_URL = '/media/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, "static"),
]
django_heroku.settings(locals())
| 24.993506
| 91
| 0.686412
|
bfa2b3c6445f1af5f35bdaf73935d31646d7fe8f
| 845
|
py
|
Python
|
neuralmonkey/evaluators/edit_distance.py
|
Simon-Will/neuralmonkey
|
b686a9d302cb10eda5fca991e1d7ee6b9e84b75a
|
[
"BSD-3-Clause"
] | 15
|
2018-04-11T09:18:09.000Z
|
2021-03-12T03:04:20.000Z
|
neuralmonkey/evaluators/edit_distance.py
|
Simon-Will/neuralmonkey
|
b686a9d302cb10eda5fca991e1d7ee6b9e84b75a
|
[
"BSD-3-Clause"
] | null | null | null |
neuralmonkey/evaluators/edit_distance.py
|
Simon-Will/neuralmonkey
|
b686a9d302cb10eda5fca991e1d7ee6b9e84b75a
|
[
"BSD-3-Clause"
] | 6
|
2017-07-25T15:30:28.000Z
|
2019-10-31T16:14:48.000Z
|
from typing import List
from difflib import SequenceMatcher
import numpy as np
class EditDistanceEvaluator(object):
def __init__(self, name: str = "Edit distance") -> None:
self.name = name
def __call__(self, decoded: List[List[str]],
references: List[List[str]]) -> float:
return 1 - np.mean([EditDistance.ratio(u" ".join(ref), u" ".join(dec))
for dec, ref in zip(decoded, references)])
@staticmethod
def ratio(str1: str, str2: str) -> float:
matcher = SequenceMatcher(None, str1, str2)
return matcher.ratio()
@staticmethod
def compare_scores(score1: float, score2: float) -> int:
# the lower the better
return (score1 < score2) - (score1 > score2)
# pylint: disable=invalid-name
EditDistance = EditDistanceEvaluator()
| 29.137931
| 78
| 0.631953
|
23e36fd42fa35d6074ebd5de07ab202d56027104
| 3,184
|
py
|
Python
|
server/parsing/PingResultParser.py
|
stephenmac98/cheesepi_python3
|
d894117938deee8ba676d9872bed5eae388310d1
|
[
"Apache-2.0"
] | 6
|
2019-11-28T20:41:15.000Z
|
2022-03-03T10:40:10.000Z
|
server/parsing/PingResultParser.py
|
stephenmac98/cheesepi_python3
|
d894117938deee8ba676d9872bed5eae388310d1
|
[
"Apache-2.0"
] | 5
|
2016-07-11T16:43:43.000Z
|
2022-03-03T10:40:54.000Z
|
server/parsing/PingResultParser.py
|
stephenmac98/cheesepi_python3
|
d894117938deee8ba676d9872bed5eae388310d1
|
[
"Apache-2.0"
] | 6
|
2016-07-11T16:40:20.000Z
|
2020-09-06T13:16:09.000Z
|
from __future__ import unicode_literals, absolute_import, print_function
import logging
from .ResultParser import ResultParser
from cheesepi.server.storage.mongo import MongoDAO
from cheesepi.server.storage.models.result import Result
class PingResultParser(ResultParser):
log = logging.getLogger("cheesepi.server.parsing.PingResultParser")
# Takes an object parsed from json
def __init__(self, obj):
self._parsed = False
self._input_obj = obj
self._result_set = []
self._peer_id = None
def __enter__(self):
self.parse()
return self
def __exit__(self, exc_type, exc_value, traceback):
pass
def parse(self):
"""
Here we should try to parse all the data we're interested in,
and handle any resulting errors in a sane way. Should ALWAYS
return an output that can be directly inserted into the database.
"""
inp = self._input_obj
result_objects = []
columns = inp[0]['series'][0]['columns']
#from pprint import pformat
#self.log.info("\n{}".format(pformat(columns)))
entries = [entry for entry in inp[0]['series'][0]['values']]
for entry in entries:
# TODO THIS IS NOT IMPLEMENTED ON CLIENT SIDE, MIGHT CHANGE
peer_id = self._peer_id = entry[columns.index('peer_id')]
if self._peer_id is None:
self._peer_id = peer_id
elif self._peer_id != peer_id:
raise Exception(
"Found inconsistent peer_id: {}, expected: {}".format(
peer_id, self._peer_id)
)
# NOTE this is done because the sequence is stored as a string
# representation of a list, should be changed in the future so that
# it's a list from the start
import ast
delay_sequence = ast.literal_eval(entry[columns.index('delays')])
landmark = entry[columns.index('landmark')]
target_id = entry[columns.index('target_id')]
target = {}
if landmark is None and target_id is not None:
target['type'] = 'peer'
target['ip'] = entry[columns.index('destination_address')]
target['uuid'] = entry[columns.index('target_id')]
target['port'] = '80' # TODO not in data
elif landmark is not None:
target['type'] = 'landmark'
target['ip'] = entry[columns.index('destination_address')]
target['domain'] = entry[columns.index('landmark')]
target['port'] = '80' # TODO not in data
db_entry = {
'task_name':'ping',
'start_time':entry[columns.index('start_time')],
'end_time':entry[columns.index('end_time')],
'target': target,
'value': {
# This is where the actual results go
'delay_sequence':delay_sequence,
'probe_count':entry[columns.index('ping_count')],
'packet_loss':entry[columns.index('packet_loss')],
'packet_size':entry[columns.index('packet_size')],
'max_rtt':entry[columns.index('maximum_RTT')],
'min_rtt':entry[columns.index('minimum_RTT')],
'avg_rtt':entry[columns.index('average_RTT')],
'stddev_rtt':entry[columns.index('stddev_RTT')],
},
}
r = Result.fromDict(db_entry)
result_objects.append(r)
#from pprint import pformat
#self.log.info(pformat(r.toDict()))
self._result_objects = result_objects
self._parsed = True
return result_objects
def get_peer_id(self):
return self._peer_id
| 30.615385
| 72
| 0.695038
|
b535344332cf925773706f8bf260499b3235118a
| 7,698
|
py
|
Python
|
smqtk_detection/utils/bbox.py
|
schencej/SMQTK-Detection
|
7a5ccb6e3d772d589a39b02cbeaf01124c600b78
|
[
"BSD-3-Clause"
] | 3
|
2021-08-06T13:01:08.000Z
|
2021-12-10T20:17:30.000Z
|
smqtk_detection/utils/bbox.py
|
schencej/SMQTK-Detection
|
7a5ccb6e3d772d589a39b02cbeaf01124c600b78
|
[
"BSD-3-Clause"
] | 17
|
2021-08-19T14:18:52.000Z
|
2022-03-25T20:53:25.000Z
|
smqtk_detection/utils/bbox.py
|
schencej/SMQTK-Detection
|
7a5ccb6e3d772d589a39b02cbeaf01124c600b78
|
[
"BSD-3-Clause"
] | 4
|
2021-08-19T13:49:29.000Z
|
2022-02-02T17:51:13.000Z
|
import functools
import operator
import numpy
from smqtk_core import Plugfigurable
from typing import Type, Union, Optional, Sequence
class AxisAlignedBoundingBox (Plugfigurable):
"""
Representation of an axis-aligned rectangular region within N-dimensional
euclidean space.
AxisAlignedBoundingBox currently does not support the concept of an
"empty" region since it requires a min and max vertex to be set. We
currently do not consider a zero-area region "empty" (represents a spatial
point).
The class attributes ``EQUALITY_ATOL`` and ``EQUALITY_RTOL`` are used as
the tolerance attributes when comparing equality between two
AxisAlignedBoundingBox instances. These may be changed on the class level
to change the desired tolerance used at runtime. This cannot be changed
on specific instances due to the use of python slots.
Number of vertices of a hyper-cube: ``2**D``, where ``D`` is the number of
dimensions.
"""
__slots__ = 'min_vertex', 'max_vertex'
# Same tolerance values as default on ``numpy.allclose``. These may be
# changed on the class level to affect tolerance used when comparing
# AxisAlignedBoundingBoxes at runtime.
EQUALITY_ATOL = 1.e-8
EQUALITY_RTOL = 1.e-5
def __init__(self, min_vertex: Sequence[Union[int, float]], max_vertex: Sequence[Union[int, float]]) -> None:
"""
Create a new AxisAlignedBoundingBox from the given minimum and maximum
euclidean-space vertex.
:param collections.abc.Sequence[int|float] min_vertex:
Minimum bounding vertex of the (hyper) rectangle.
:param collections.abc.Sequence[int|float] max_vertex:
Maximum bounding vertex of the (hyper) rectangle.
:raises ValueError:
When input vertices are not both 1 dimensional in
shape, match in vertex dimensionality or if ``max_vertex`` is not
greater-than-or-equal to ``min_vertex``.
"""
# TODO: Default ``max_vertex`` to ``None`` to ease the creation of
# "points".
self._set_vertices(min_vertex, max_vertex)
if not (self.min_vertex.ndim == self.max_vertex.ndim == 1):
raise ValueError("One or both vertices provided had more than "
"one array dimension (min_vertex.ndim == {}, "
"max_vertex.ndim == {})."
.format(self.min_vertex.ndim,
self.max_vertex.ndim))
if self.min_vertex.size != self.max_vertex.size:
raise ValueError("Both vertices provided are not the same "
"dimensionality (min_vertex = {}, "
"max_vertex = {})."
.format(self.min_vertex.size,
self.max_vertex.size))
if not (self.max_vertex >= self.min_vertex).all():
raise ValueError("The maximum vertex was not strictly >= the "
"minimum vertex.")
def __str__(self) -> str:
return "<{} [{}, {}]>"\
.format(self.__class__.__name__, self.min_vertex, self.max_vertex)
def __repr__(self) -> str:
return "<{}.{} min_vertex={} max_vertex={}>"\
.format(self.__class__.__module__, self.__class__.__name__,
self.min_vertex, self.max_vertex)
def __hash__(self) -> int:
return hash((tuple(self.min_vertex), tuple(self.max_vertex)))
def __eq__(self, other: object) -> bool:
"""
Two bounding boxes are equal if the describe the same spatial area.
:param AxisAlignedBoundingBox other:
Other bounding box instance to test equality against.
:return: If this and `other` describe the same spatial area.
:rtype: bool
"""
if not isinstance(other, AxisAlignedBoundingBox):
return False
# Should tolerances be parameterized in constructor?
return (numpy.allclose(self.min_vertex, other.min_vertex,
rtol=self.EQUALITY_RTOL,
atol=self.EQUALITY_ATOL) and
numpy.allclose(self.max_vertex, other.max_vertex,
rtol=self.EQUALITY_RTOL,
atol=self.EQUALITY_ATOL))
def __ne__(self, other: object) -> bool:
return not (self == other)
def __getstate__(self) -> tuple:
return (
self.min_vertex.tolist(),
self.max_vertex.tolist(),
)
def __setstate__(self, state: Union[list, tuple]) -> None:
self._set_vertices(*state)
def _set_vertices(self, min_v: Sequence[Union[int, float]], max_v: Sequence[Union[int, float]]) -> None:
self.min_vertex = numpy.asarray(min_v)
self.min_vertex.flags.writeable = False
self.max_vertex = numpy.asarray(max_v)
self.max_vertex.flags.writeable = False
def get_config(self) -> dict:
return {
'min_vertex': self.min_vertex.tolist(),
'max_vertex': self.max_vertex.tolist(),
}
def intersection(self, other: Type["AxisAlignedBoundingBox"]) -> Optional["AxisAlignedBoundingBox"]:
"""
Get the AxisAlignedBoundingBox that represents the intersection between
this box and the given ``other`` box.
:param AxisAlignedBoundingBox other:
An other box to get the intersection of. If there is no
intersection ``None`` is returned.
:return: An AxisAlignedBoundingBox instance if there is an intersection
or None if there is no intersection.
:rtype: AxisAlignedBoundingBox | None
"""
inter_min_v = numpy.maximum(self.min_vertex, other.min_vertex)
inter_max_v = numpy.minimum(self.max_vertex, other.max_vertex)
# Constructor allows zero area boxes, but that is not a valid
# intersection. Returning None if the minimum of the difference between
# the intersection's maximum and minimum vertices is <= 0.
if (inter_max_v - inter_min_v).min() <= 0:
return None
return AxisAlignedBoundingBox(inter_min_v.tolist(), inter_max_v.tolist())
@property
def ndim(self) -> int:
"""
:return: The number of dimensions this bounding volume covers.
:rtype: int
"""
# we know because of assert in constructor that both min and max vertex
# must match in coordinate dimensionality, so we can draw this from
# either.
return self.min_vertex.size
@property
def deltas(self) -> numpy.ndarray:
"""
Get the lengths of this bounding box's edges along its dimensions.
ImMatObDet.e. if this bounding box is 2-dimensional, this returns the [width,
height] of the bounding box.
:return: Array of dimension deltas.
:rtype: numpy.ndarray[int|float]
"""
return self.max_vertex - self.min_vertex
@property
def dtype(self) -> numpy.dtype:
"""
:return: Most representative data type required to fully express this
bounding box.
:rtype: numpy.dtype
"""
return self.deltas.dtype
@property
def hypervolume(self) -> float:
"""
:return: The volume of this [hyper-dimensional] spatial bounding box.
Unit of volume depends on the dimensionality of the vertices
provided.
:rtype: float
"""
return functools.reduce(operator.mul,
self.max_vertex - self.min_vertex)
| 39.27551
| 113
| 0.616134
|
56793f8ccaaf73dd4f4bceecb8347e7ab2153c64
| 255
|
py
|
Python
|
jp.atcoder/abc104/abc104_b/27959017.py
|
kagemeka/atcoder-submissions
|
91d8ad37411ea2ec582b10ba41b1e3cae01d4d6e
|
[
"MIT"
] | 1
|
2022-02-09T03:06:25.000Z
|
2022-02-09T03:06:25.000Z
|
jp.atcoder/abc104/abc104_b/27959017.py
|
kagemeka/atcoder-submissions
|
91d8ad37411ea2ec582b10ba41b1e3cae01d4d6e
|
[
"MIT"
] | 1
|
2022-02-05T22:53:18.000Z
|
2022-02-09T01:29:30.000Z
|
jp.atcoder/abc104/abc104_b/27959017.py
|
kagemeka/atcoder-submissions
|
91d8ad37411ea2ec582b10ba41b1e3cae01d4d6e
|
[
"MIT"
] | null | null | null |
import typing
def main() -> typing.NoReturn:
s = input()
t = s.lower()
ok = True
ok &= sum(s[i] != t[i] for i in range(len(s))) == 2
ok &= s[0] == "A"
ok &= "C" in s[2:-1]
print("AC" if ok else "WA")
main()
| 15.9375
| 56
| 0.435294
|
414ba43c341bdbe20964e24e2203da0434d0b584
| 1,267
|
py
|
Python
|
api/app01/migrations/0001_initial.py
|
borko81/django_rest_api
|
bd98471eb0705e203bda6841143da0a65026e2b4
|
[
"MIT"
] | null | null | null |
api/app01/migrations/0001_initial.py
|
borko81/django_rest_api
|
bd98471eb0705e203bda6841143da0a65026e2b4
|
[
"MIT"
] | null | null | null |
api/app01/migrations/0001_initial.py
|
borko81/django_rest_api
|
bd98471eb0705e203bda6841143da0a65026e2b4
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.2 on 2021-09-18 18:40
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='University',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
],
options={
'verbose_name': 'Universitiy',
'verbose_name_plural': 'Universities',
},
),
migrations.CreateModel(
name='Student',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(max_length=50)),
('last_name', models.CharField(max_length=50)),
('university', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='app01.university')),
],
options={
'verbose_name': 'Strudent',
'verbose_name_plural': 'Students',
},
),
]
| 31.675
| 118
| 0.550908
|
165aefa21ec05bf6dc29dbaea8bd9509a85a4d01
| 386
|
py
|
Python
|
tests/system_tests/test_verifiers/test_babsb.py
|
samysweb/dnnv
|
58fb95b7300914d9da28eed86c39eca473b1aaef
|
[
"MIT"
] | 5
|
2022-01-28T20:30:34.000Z
|
2022-03-17T09:26:52.000Z
|
tests/system_tests/test_verifiers/test_babsb.py
|
samysweb/dnnv
|
58fb95b7300914d9da28eed86c39eca473b1aaef
|
[
"MIT"
] | 9
|
2022-01-27T03:50:28.000Z
|
2022-02-08T18:42:17.000Z
|
tests/system_tests/test_verifiers/test_babsb.py
|
samysweb/dnnv
|
58fb95b7300914d9da28eed86c39eca473b1aaef
|
[
"MIT"
] | 2
|
2022-02-03T17:32:43.000Z
|
2022-03-24T16:38:49.000Z
|
import unittest
from dnnv.verifiers.babsb import BaBSB
from system_tests.test_verifiers.utils import VerifierTests
@unittest.skipIf(not BaBSB.is_installed(), "BaBSB is not installed")
class BaBSBVerifierTests(VerifierTests, unittest.TestCase):
def initialize(self):
self.verifier = BaBSB
self.is_complete = False
if __name__ == "__main__":
unittest.main()
| 22.705882
| 68
| 0.751295
|
5e07788c09c2dbd75ffa3a15ab0032687c13619b
| 444
|
py
|
Python
|
hour.py
|
PhoenixNil/pythoncode
|
bbb49884588ef9671c96961235be450158ea9496
|
[
"bzip2-1.0.6"
] | null | null | null |
hour.py
|
PhoenixNil/pythoncode
|
bbb49884588ef9671c96961235be450158ea9496
|
[
"bzip2-1.0.6"
] | null | null | null |
hour.py
|
PhoenixNil/pythoncode
|
bbb49884588ef9671c96961235be450158ea9496
|
[
"bzip2-1.0.6"
] | null | null | null |
name = input("Enter file:")
if len(name) < 1 : name = "mbox-short.txt"
handle = open(name)
distribution=dict()
hours=list()
temp=list()
for line in handle :
if not line.startswith('From ') : continue
words=line.split()
time=words[5]
hour=time.split(':')
hours.append(hour[0])
for number in hours :
distribution[number]=distribution.get(number,0)+1
for k,v in sorted(distribution.items()) :
print(k,v)
| 27.75
| 56
| 0.63964
|
41d693cb6059ee4c8f85127bbb61e14ea1abd315
| 8,056
|
py
|
Python
|
AutomatedTesting/Gem/PythonTests/automatedtesting_shared/base.py
|
TheKeaver/o3de
|
3791149c6bb18d007ee375f592bdd031871f793d
|
[
"Apache-2.0",
"MIT"
] | 1
|
2021-08-11T02:20:46.000Z
|
2021-08-11T02:20:46.000Z
|
AutomatedTesting/Gem/PythonTests/automatedtesting_shared/base.py
|
RoddieKieley/o3de
|
e804fd2a4241b039a42d9fa54eaae17dc94a7a92
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
AutomatedTesting/Gem/PythonTests/automatedtesting_shared/base.py
|
RoddieKieley/o3de
|
e804fd2a4241b039a42d9fa54eaae17dc94a7a92
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
"""
Copyright (c) Contributors to the Open 3D Engine Project.
For complete copyright and license terms please see the LICENSE at the root of this distribution.
SPDX-License-Identifier: Apache-2.0 OR MIT
"""
import os
import logging
import subprocess
import pytest
import time
import ly_test_tools.environment.file_system as file_system
import ly_test_tools.environment.process_utils as process_utils
import ly_test_tools.environment.waiter as waiter
from ly_test_tools.o3de.asset_processor import AssetProcessor
from ly_test_tools.launchers.exceptions import WaitTimeoutError
from ly_test_tools.log.log_monitor import LogMonitor, LogMonitorException
class TestRunError():
def __init__(self, title, content):
self.title = title
self.content = content
class TestAutomationBase:
MAX_TIMEOUT = 180 # 3 minutes max for a test to run
WAIT_FOR_CRASH_LOG = 20 # Seconds for waiting for a crash log
TEST_FAIL_RETCODE = 0xF # Return code for test failure
test_times = {}
asset_processor = None
def setup_class(cls):
cls.test_times = {}
cls.editor_times = {}
cls.asset_processor = None
def teardown_class(cls):
logger = logging.getLogger(__name__)
# Report times
time_info_str = "Individual test times (Full test time, Editor test time):\n"
for testcase_name, t in cls.test_times.items():
editor_t = cls.editor_times[testcase_name]
time_info_str += f"{testcase_name}: (Full:{t} sec, Editor:{editor_t} sec)\n"
logger.info(time_info_str)
# Kill all ly processes
cls.asset_processor.teardown()
cls._kill_ly_processes()
def _run_test(self, request, workspace, editor, testcase_module, extra_cmdline_args=[], batch_mode=True,
autotest_mode=True, use_null_renderer=True):
test_starttime = time.time()
self.logger = logging.getLogger(__name__)
errors = []
testcase_name = os.path.basename(testcase_module.__file__)
#########
# Setup #
if self.asset_processor is None:
self.__class__.asset_processor = AssetProcessor(workspace)
self.asset_processor.backup_ap_settings()
self._kill_ly_processes(include_asset_processor=False)
self.asset_processor.start()
self.asset_processor.wait_for_idle()
def teardown():
if os.path.exists(workspace.paths.editor_log()):
workspace.artifact_manager.save_artifact(workspace.paths.editor_log())
try:
file_system.restore_backup(workspace.paths.editor_log(), workspace.paths.project_log())
except FileNotFoundError as e:
self.logger.debug(f"File restoration failed, editor log could not be found.\nError: {e}")
editor.kill()
request.addfinalizer(teardown)
if os.path.exists(workspace.paths.editor_log()):
self.logger.debug("Creating backup for existing editor log before test run.")
file_system.create_backup(workspace.paths.editor_log(), workspace.paths.project_log())
############
# Run test #
editor_starttime = time.time()
self.logger.debug("Running automated test")
testcase_module_filepath = self._get_testcase_module_filepath(testcase_module)
pycmd = ["--runpythontest", testcase_module_filepath, f"-pythontestcase={request.node.name}"]
if use_null_renderer:
pycmd += ["-rhi=null"]
if batch_mode:
pycmd += ["-BatchMode"]
if autotest_mode:
pycmd += ["-autotest_mode"]
pycmd += extra_cmdline_args
editor.args.extend(pycmd) # args are added to the WinLauncher start command
editor.start(backupFiles = False, launch_ap = False)
try:
editor.wait(TestAutomationBase.MAX_TIMEOUT)
except WaitTimeoutError:
errors.append(TestRunError("TIMEOUT", f"Editor did not close after {TestAutomationBase.MAX_TIMEOUT} seconds, verify the test is ending and the application didn't freeze"))
editor.kill()
output = editor.get_output()
self.logger.debug("Test output:\n" + output)
return_code = editor.get_returncode()
self.editor_times[testcase_name] = time.time() - editor_starttime
###################
# Validate result #
if return_code != 0:
if output:
error_str = "Test failed, output:\n" + output.replace("\n", "\n ")
else:
error_str = "Test failed, no output available..\n"
errors.append(TestRunError("FAILED TEST", error_str))
if return_code and return_code != TestAutomationBase.TEST_FAIL_RETCODE: # Crashed
crash_info = "-- No crash log available --"
crash_log = os.path.join(workspace.paths.project_log(), 'error.log')
try:
waiter.wait_for(lambda: os.path.exists(crash_log), timeout=TestAutomationBase.WAIT_FOR_CRASH_LOG)
except AssertionError:
pass
try:
with open(crash_log) as f:
crash_info = f.read()
except Exception as ex:
crash_info += f"\n{str(ex)}"
return_code_str = f"0x{return_code:0X}" if isinstance(return_code, int) else "None"
error_str = f"Editor.exe crashed, return code: {return_code_str}\n\nCrash log:\n{crash_info}"
errors.append(TestRunError("CRASH", error_str))
self.test_times[testcase_name] = time.time() - test_starttime
###################
# Error reporting #
if errors:
error_str = "Error list:\n"
longest_title = max([len(e.title) for e in errors])
longest_title += (longest_title % 2) # make it even spaces
longest_title = max(30, longest_title) # at least 30 -
header_decoration = "-".center(longest_title, "-") + "\n"
for e in errors:
error_str += header_decoration
error_str += f" {e.title} ".center(longest_title, "-") + "\n"
error_str += header_decoration
for line in e.content.split("\n"):
error_str += f" {line}\n"
error_str += header_decoration
error_str += "Editor log:\n"
try:
with open(workspace.paths.editor_log()) as f:
log_basename = os.path.basename(workspace.paths.editor_log())
for line in f.readlines():
error_str += f"|{log_basename}| {line}"
except Exception as ex:
error_str += f"-- No log available ({ex})--"
pytest.fail(error_str)
@staticmethod
def _kill_ly_processes(include_asset_processor=True):
LY_PROCESSES = [
'Editor', 'Profiler', 'RemoteConsole',
]
AP_PROCESSES = [
'AssetProcessor', 'AssetProcessorBatch', 'AssetBuilder', 'CrySCompileServer',
'rc' # Resource Compiler
]
if include_asset_processor:
process_utils.kill_processes_named(LY_PROCESSES+AP_PROCESSES, ignore_extensions=True)
else:
process_utils.kill_processes_named(LY_PROCESSES, ignore_extensions=True)
@staticmethod
def _get_testcase_module_filepath(testcase_module):
# type: (Module) -> str
"""
return the full path of the test module
:param testcase_module: The testcase python module being tested
:return str: The full path to the testcase module
"""
return os.path.splitext(testcase_module.__file__)[0] + ".py"
| 41.102041
| 183
| 0.602036
|
acc1ede494453a50d9014c18c2b27ca456960b39
| 651
|
py
|
Python
|
tests/test_cell_edges.py
|
kbsezginel/angstrom
|
793fd05b9bf27cab50d5c292fe63f685ea767d6d
|
[
"BSD-3-Clause"
] | 15
|
2018-04-26T18:34:02.000Z
|
2022-01-25T09:37:35.000Z
|
tests/test_cell_edges.py
|
kbsezginel/angstrom
|
793fd05b9bf27cab50d5c292fe63f685ea767d6d
|
[
"BSD-3-Clause"
] | 13
|
2018-07-31T17:26:10.000Z
|
2020-05-15T05:07:05.000Z
|
tests/test_cell_edges.py
|
kbsezginel/angstrom
|
793fd05b9bf27cab50d5c292fe63f685ea767d6d
|
[
"BSD-3-Clause"
] | 4
|
2020-01-15T08:31:54.000Z
|
2022-03-03T16:43:15.000Z
|
"""
--- Ångström ---
Tests cell edges calculation.
"""
from angstrom.molecule import Cell
import numpy as np
def test_cubic_unit_cell_edges_calculation():
a, b, c = 5, 5, 5
expected_edges = [[[a, 0, 0], [0, 0, 0]], [[0, b, 0], [0, 0, 0]], [[0, 0, c], [0, 0, 0]],
[[a, b, 0], [a, 0, 0]], [[a, 0, c], [a, 0, 0]], [[a, b, 0], [0, b, 0]],
[[0, b, c], [0, b, 0]], [[0, b, c], [0, 0, c]], [[a, 0, c], [0, 0, c]],
[[a, b, c], [a, b, 0]], [[a, b, c], [0, b, c]], [[a, b, c], [a, 0, c]]]
cell = Cell([a, b, c, 90.0, 90.0, 90.0])
assert np.allclose(cell.edges, expected_edges)
| 38.294118
| 93
| 0.414747
|
c9d5542c6aefb01533d1b46309c76e04a50cc3f8
| 184
|
py
|
Python
|
pyobs/utils/skyflats/priorities/__init__.py
|
pyobs/pyobs-core
|
e3401e63eb31587c2bc535f7346b7e4ef69d64ab
|
[
"MIT"
] | 4
|
2020-02-14T10:50:03.000Z
|
2022-03-25T04:15:06.000Z
|
pyobs/utils/skyflats/priorities/__init__.py
|
pyobs/pyobs-core
|
e3401e63eb31587c2bc535f7346b7e4ef69d64ab
|
[
"MIT"
] | 60
|
2020-09-14T09:10:20.000Z
|
2022-03-25T17:51:42.000Z
|
pyobs/utils/skyflats/priorities/__init__.py
|
pyobs/pyobs-core
|
e3401e63eb31587c2bc535f7346b7e4ef69d64ab
|
[
"MIT"
] | 2
|
2020-10-14T09:34:57.000Z
|
2021-04-27T09:35:57.000Z
|
"""
TODO: write docs
"""
__title__ = 'Sky flat priorities'
from .base import SkyflatPriorities
from .archive import ArchiveSkyflatPriorities
from .const import ConstSkyflatPriorities
| 20.444444
| 45
| 0.804348
|
e7a5a4e86e5e696c8ebfd2d18eaa839678d6d23d
| 183,498
|
py
|
Python
|
session_py/iginx/thrift/rpc/ttypes.py
|
thulab/IginX
|
8b80e089b4e419c46496a8ab1d1217aff6813c8d
|
[
"Apache-2.0"
] | 16
|
2021-03-08T07:03:57.000Z
|
2022-03-24T21:31:44.000Z
|
session_py/iginx/thrift/rpc/ttypes.py
|
thulab/IginX
|
8b80e089b4e419c46496a8ab1d1217aff6813c8d
|
[
"Apache-2.0"
] | 38
|
2021-03-19T06:10:51.000Z
|
2022-01-21T03:34:46.000Z
|
session_py/iginx/thrift/rpc/ttypes.py
|
thulab/IginX
|
8b80e089b4e419c46496a8ab1d1217aff6813c8d
|
[
"Apache-2.0"
] | 13
|
2021-03-08T07:09:24.000Z
|
2022-02-22T12:28:17.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
from thrift.protocol.TProtocol import TProtocolException
from thrift.TRecursive import fix_spec
import sys
from thrift.transport import TTransport
all_structs = []
class DataType(object):
BOOLEAN = 0
INTEGER = 1
LONG = 2
FLOAT = 3
DOUBLE = 4
BINARY = 5
_VALUES_TO_NAMES = {
0: "BOOLEAN",
1: "INTEGER",
2: "LONG",
3: "FLOAT",
4: "DOUBLE",
5: "BINARY",
}
_NAMES_TO_VALUES = {
"BOOLEAN": 0,
"INTEGER": 1,
"LONG": 2,
"FLOAT": 3,
"DOUBLE": 4,
"BINARY": 5,
}
class AggregateType(object):
MAX = 0
MIN = 1
SUM = 2
COUNT = 3
AVG = 4
FIRST_VALUE = 5
LAST_VALUE = 6
FIRST = 7
LAST = 8
_VALUES_TO_NAMES = {
0: "MAX",
1: "MIN",
2: "SUM",
3: "COUNT",
4: "AVG",
5: "FIRST_VALUE",
6: "LAST_VALUE",
7: "FIRST",
8: "LAST",
}
_NAMES_TO_VALUES = {
"MAX": 0,
"MIN": 1,
"SUM": 2,
"COUNT": 3,
"AVG": 4,
"FIRST_VALUE": 5,
"LAST_VALUE": 6,
"FIRST": 7,
"LAST": 8,
}
class SqlType(object):
Unknown = 0
Insert = 1
Delete = 2
SimpleQuery = 3
AggregateQuery = 4
DownsampleQuery = 5
ValueFilterQuery = 6
NotSupportQuery = 7
GetReplicaNum = 8
AddStorageEngines = 9
CountPoints = 10
ClearData = 11
ShowTimeSeries = 12
ShowClusterInfo = 13
_VALUES_TO_NAMES = {
0: "Unknown",
1: "Insert",
2: "Delete",
3: "SimpleQuery",
4: "AggregateQuery",
5: "DownsampleQuery",
6: "ValueFilterQuery",
7: "NotSupportQuery",
8: "GetReplicaNum",
9: "AddStorageEngines",
10: "CountPoints",
11: "ClearData",
12: "ShowTimeSeries",
13: "ShowClusterInfo",
}
_NAMES_TO_VALUES = {
"Unknown": 0,
"Insert": 1,
"Delete": 2,
"SimpleQuery": 3,
"AggregateQuery": 4,
"DownsampleQuery": 5,
"ValueFilterQuery": 6,
"NotSupportQuery": 7,
"GetReplicaNum": 8,
"AddStorageEngines": 9,
"CountPoints": 10,
"ClearData": 11,
"ShowTimeSeries": 12,
"ShowClusterInfo": 13,
}
class AuthType(object):
Read = 0
Write = 1
Admin = 2
Cluster = 3
_VALUES_TO_NAMES = {
0: "Read",
1: "Write",
2: "Admin",
3: "Cluster",
}
_NAMES_TO_VALUES = {
"Read": 0,
"Write": 1,
"Admin": 2,
"Cluster": 3,
}
class UserType(object):
Administrator = 0
OrdinaryUser = 1
_VALUES_TO_NAMES = {
0: "Administrator",
1: "OrdinaryUser",
}
_NAMES_TO_VALUES = {
"Administrator": 0,
"OrdinaryUser": 1,
}
class Status(object):
"""
Attributes:
- code
- message
- subStatus
"""
def __init__(self, code=None, message=None, subStatus=None,):
self.code = code
self.message = message
self.subStatus = subStatus
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.code = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.message = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.subStatus = []
(_etype3, _size0) = iprot.readListBegin()
for _i4 in range(_size0):
_elem5 = Status()
_elem5.read(iprot)
self.subStatus.append(_elem5)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('Status')
if self.code is not None:
oprot.writeFieldBegin('code', TType.I32, 1)
oprot.writeI32(self.code)
oprot.writeFieldEnd()
if self.message is not None:
oprot.writeFieldBegin('message', TType.STRING, 2)
oprot.writeString(self.message.encode('utf-8') if sys.version_info[0] == 2 else self.message)
oprot.writeFieldEnd()
if self.subStatus is not None:
oprot.writeFieldBegin('subStatus', TType.LIST, 3)
oprot.writeListBegin(TType.STRUCT, len(self.subStatus))
for iter6 in self.subStatus:
iter6.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.code is None:
raise TProtocolException(message='Required field code is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class OpenSessionReq(object):
"""
Attributes:
- username
- password
"""
def __init__(self, username=None, password=None,):
self.username = username
self.password = password
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.username = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.password = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('OpenSessionReq')
if self.username is not None:
oprot.writeFieldBegin('username', TType.STRING, 1)
oprot.writeString(self.username.encode('utf-8') if sys.version_info[0] == 2 else self.username)
oprot.writeFieldEnd()
if self.password is not None:
oprot.writeFieldBegin('password', TType.STRING, 2)
oprot.writeString(self.password.encode('utf-8') if sys.version_info[0] == 2 else self.password)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class OpenSessionResp(object):
"""
Attributes:
- status
- sessionId
"""
def __init__(self, status=None, sessionId=None,):
self.status = status
self.sessionId = sessionId
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = Status()
self.status.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I64:
self.sessionId = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('OpenSessionResp')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
if self.sessionId is not None:
oprot.writeFieldBegin('sessionId', TType.I64, 2)
oprot.writeI64(self.sessionId)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocolException(message='Required field status is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CloseSessionReq(object):
"""
Attributes:
- sessionId
"""
def __init__(self, sessionId=None,):
self.sessionId = sessionId
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.sessionId = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('CloseSessionReq')
if self.sessionId is not None:
oprot.writeFieldBegin('sessionId', TType.I64, 1)
oprot.writeI64(self.sessionId)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.sessionId is None:
raise TProtocolException(message='Required field sessionId is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class DeleteColumnsReq(object):
"""
Attributes:
- sessionId
- paths
"""
def __init__(self, sessionId=None, paths=None,):
self.sessionId = sessionId
self.paths = paths
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.sessionId = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.paths = []
(_etype10, _size7) = iprot.readListBegin()
for _i11 in range(_size7):
_elem12 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.paths.append(_elem12)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('DeleteColumnsReq')
if self.sessionId is not None:
oprot.writeFieldBegin('sessionId', TType.I64, 1)
oprot.writeI64(self.sessionId)
oprot.writeFieldEnd()
if self.paths is not None:
oprot.writeFieldBegin('paths', TType.LIST, 2)
oprot.writeListBegin(TType.STRING, len(self.paths))
for iter13 in self.paths:
oprot.writeString(iter13.encode('utf-8') if sys.version_info[0] == 2 else iter13)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.sessionId is None:
raise TProtocolException(message='Required field sessionId is unset!')
if self.paths is None:
raise TProtocolException(message='Required field paths is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class InsertColumnRecordsReq(object):
"""
Attributes:
- sessionId
- paths
- timestamps
- valuesList
- bitmapList
- dataTypeList
- attributesList
"""
def __init__(self, sessionId=None, paths=None, timestamps=None, valuesList=None, bitmapList=None, dataTypeList=None, attributesList=None,):
self.sessionId = sessionId
self.paths = paths
self.timestamps = timestamps
self.valuesList = valuesList
self.bitmapList = bitmapList
self.dataTypeList = dataTypeList
self.attributesList = attributesList
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.sessionId = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.paths = []
(_etype17, _size14) = iprot.readListBegin()
for _i18 in range(_size14):
_elem19 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.paths.append(_elem19)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.timestamps = iprot.readBinary()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.LIST:
self.valuesList = []
(_etype23, _size20) = iprot.readListBegin()
for _i24 in range(_size20):
_elem25 = iprot.readBinary()
self.valuesList.append(_elem25)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.LIST:
self.bitmapList = []
(_etype29, _size26) = iprot.readListBegin()
for _i30 in range(_size26):
_elem31 = iprot.readBinary()
self.bitmapList.append(_elem31)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.LIST:
self.dataTypeList = []
(_etype35, _size32) = iprot.readListBegin()
for _i36 in range(_size32):
_elem37 = iprot.readI32()
self.dataTypeList.append(_elem37)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.LIST:
self.attributesList = []
(_etype41, _size38) = iprot.readListBegin()
for _i42 in range(_size38):
_elem43 = {}
(_ktype45, _vtype46, _size44) = iprot.readMapBegin()
for _i48 in range(_size44):
_key49 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val50 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_elem43[_key49] = _val50
iprot.readMapEnd()
self.attributesList.append(_elem43)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('InsertColumnRecordsReq')
if self.sessionId is not None:
oprot.writeFieldBegin('sessionId', TType.I64, 1)
oprot.writeI64(self.sessionId)
oprot.writeFieldEnd()
if self.paths is not None:
oprot.writeFieldBegin('paths', TType.LIST, 2)
oprot.writeListBegin(TType.STRING, len(self.paths))
for iter51 in self.paths:
oprot.writeString(iter51.encode('utf-8') if sys.version_info[0] == 2 else iter51)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.timestamps is not None:
oprot.writeFieldBegin('timestamps', TType.STRING, 3)
oprot.writeBinary(self.timestamps)
oprot.writeFieldEnd()
if self.valuesList is not None:
oprot.writeFieldBegin('valuesList', TType.LIST, 4)
oprot.writeListBegin(TType.STRING, len(self.valuesList))
for iter52 in self.valuesList:
oprot.writeBinary(iter52)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.bitmapList is not None:
oprot.writeFieldBegin('bitmapList', TType.LIST, 5)
oprot.writeListBegin(TType.STRING, len(self.bitmapList))
for iter53 in self.bitmapList:
oprot.writeBinary(iter53)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.dataTypeList is not None:
oprot.writeFieldBegin('dataTypeList', TType.LIST, 6)
oprot.writeListBegin(TType.I32, len(self.dataTypeList))
for iter54 in self.dataTypeList:
oprot.writeI32(iter54)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.attributesList is not None:
oprot.writeFieldBegin('attributesList', TType.LIST, 7)
oprot.writeListBegin(TType.MAP, len(self.attributesList))
for iter55 in self.attributesList:
oprot.writeMapBegin(TType.STRING, TType.STRING, len(iter55))
for kiter56, viter57 in iter55.items():
oprot.writeString(kiter56.encode('utf-8') if sys.version_info[0] == 2 else kiter56)
oprot.writeString(viter57.encode('utf-8') if sys.version_info[0] == 2 else viter57)
oprot.writeMapEnd()
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.sessionId is None:
raise TProtocolException(message='Required field sessionId is unset!')
if self.paths is None:
raise TProtocolException(message='Required field paths is unset!')
if self.timestamps is None:
raise TProtocolException(message='Required field timestamps is unset!')
if self.valuesList is None:
raise TProtocolException(message='Required field valuesList is unset!')
if self.bitmapList is None:
raise TProtocolException(message='Required field bitmapList is unset!')
if self.dataTypeList is None:
raise TProtocolException(message='Required field dataTypeList is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class InsertNonAlignedColumnRecordsReq(object):
"""
Attributes:
- sessionId
- paths
- timestamps
- valuesList
- bitmapList
- dataTypeList
- attributesList
"""
def __init__(self, sessionId=None, paths=None, timestamps=None, valuesList=None, bitmapList=None, dataTypeList=None, attributesList=None,):
self.sessionId = sessionId
self.paths = paths
self.timestamps = timestamps
self.valuesList = valuesList
self.bitmapList = bitmapList
self.dataTypeList = dataTypeList
self.attributesList = attributesList
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.sessionId = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.paths = []
(_etype61, _size58) = iprot.readListBegin()
for _i62 in range(_size58):
_elem63 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.paths.append(_elem63)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.timestamps = iprot.readBinary()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.LIST:
self.valuesList = []
(_etype67, _size64) = iprot.readListBegin()
for _i68 in range(_size64):
_elem69 = iprot.readBinary()
self.valuesList.append(_elem69)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.LIST:
self.bitmapList = []
(_etype73, _size70) = iprot.readListBegin()
for _i74 in range(_size70):
_elem75 = iprot.readBinary()
self.bitmapList.append(_elem75)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.LIST:
self.dataTypeList = []
(_etype79, _size76) = iprot.readListBegin()
for _i80 in range(_size76):
_elem81 = iprot.readI32()
self.dataTypeList.append(_elem81)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.LIST:
self.attributesList = []
(_etype85, _size82) = iprot.readListBegin()
for _i86 in range(_size82):
_elem87 = {}
(_ktype89, _vtype90, _size88) = iprot.readMapBegin()
for _i92 in range(_size88):
_key93 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val94 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_elem87[_key93] = _val94
iprot.readMapEnd()
self.attributesList.append(_elem87)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('InsertNonAlignedColumnRecordsReq')
if self.sessionId is not None:
oprot.writeFieldBegin('sessionId', TType.I64, 1)
oprot.writeI64(self.sessionId)
oprot.writeFieldEnd()
if self.paths is not None:
oprot.writeFieldBegin('paths', TType.LIST, 2)
oprot.writeListBegin(TType.STRING, len(self.paths))
for iter95 in self.paths:
oprot.writeString(iter95.encode('utf-8') if sys.version_info[0] == 2 else iter95)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.timestamps is not None:
oprot.writeFieldBegin('timestamps', TType.STRING, 3)
oprot.writeBinary(self.timestamps)
oprot.writeFieldEnd()
if self.valuesList is not None:
oprot.writeFieldBegin('valuesList', TType.LIST, 4)
oprot.writeListBegin(TType.STRING, len(self.valuesList))
for iter96 in self.valuesList:
oprot.writeBinary(iter96)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.bitmapList is not None:
oprot.writeFieldBegin('bitmapList', TType.LIST, 5)
oprot.writeListBegin(TType.STRING, len(self.bitmapList))
for iter97 in self.bitmapList:
oprot.writeBinary(iter97)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.dataTypeList is not None:
oprot.writeFieldBegin('dataTypeList', TType.LIST, 6)
oprot.writeListBegin(TType.I32, len(self.dataTypeList))
for iter98 in self.dataTypeList:
oprot.writeI32(iter98)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.attributesList is not None:
oprot.writeFieldBegin('attributesList', TType.LIST, 7)
oprot.writeListBegin(TType.MAP, len(self.attributesList))
for iter99 in self.attributesList:
oprot.writeMapBegin(TType.STRING, TType.STRING, len(iter99))
for kiter100, viter101 in iter99.items():
oprot.writeString(kiter100.encode('utf-8') if sys.version_info[0] == 2 else kiter100)
oprot.writeString(viter101.encode('utf-8') if sys.version_info[0] == 2 else viter101)
oprot.writeMapEnd()
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.sessionId is None:
raise TProtocolException(message='Required field sessionId is unset!')
if self.paths is None:
raise TProtocolException(message='Required field paths is unset!')
if self.timestamps is None:
raise TProtocolException(message='Required field timestamps is unset!')
if self.valuesList is None:
raise TProtocolException(message='Required field valuesList is unset!')
if self.bitmapList is None:
raise TProtocolException(message='Required field bitmapList is unset!')
if self.dataTypeList is None:
raise TProtocolException(message='Required field dataTypeList is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class InsertRowRecordsReq(object):
"""
Attributes:
- sessionId
- paths
- timestamps
- valuesList
- bitmapList
- dataTypeList
- attributesList
"""
def __init__(self, sessionId=None, paths=None, timestamps=None, valuesList=None, bitmapList=None, dataTypeList=None, attributesList=None,):
self.sessionId = sessionId
self.paths = paths
self.timestamps = timestamps
self.valuesList = valuesList
self.bitmapList = bitmapList
self.dataTypeList = dataTypeList
self.attributesList = attributesList
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.sessionId = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.paths = []
(_etype105, _size102) = iprot.readListBegin()
for _i106 in range(_size102):
_elem107 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.paths.append(_elem107)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.timestamps = iprot.readBinary()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.LIST:
self.valuesList = []
(_etype111, _size108) = iprot.readListBegin()
for _i112 in range(_size108):
_elem113 = iprot.readBinary()
self.valuesList.append(_elem113)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.LIST:
self.bitmapList = []
(_etype117, _size114) = iprot.readListBegin()
for _i118 in range(_size114):
_elem119 = iprot.readBinary()
self.bitmapList.append(_elem119)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.LIST:
self.dataTypeList = []
(_etype123, _size120) = iprot.readListBegin()
for _i124 in range(_size120):
_elem125 = iprot.readI32()
self.dataTypeList.append(_elem125)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.LIST:
self.attributesList = []
(_etype129, _size126) = iprot.readListBegin()
for _i130 in range(_size126):
_elem131 = {}
(_ktype133, _vtype134, _size132) = iprot.readMapBegin()
for _i136 in range(_size132):
_key137 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val138 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_elem131[_key137] = _val138
iprot.readMapEnd()
self.attributesList.append(_elem131)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('InsertRowRecordsReq')
if self.sessionId is not None:
oprot.writeFieldBegin('sessionId', TType.I64, 1)
oprot.writeI64(self.sessionId)
oprot.writeFieldEnd()
if self.paths is not None:
oprot.writeFieldBegin('paths', TType.LIST, 2)
oprot.writeListBegin(TType.STRING, len(self.paths))
for iter139 in self.paths:
oprot.writeString(iter139.encode('utf-8') if sys.version_info[0] == 2 else iter139)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.timestamps is not None:
oprot.writeFieldBegin('timestamps', TType.STRING, 3)
oprot.writeBinary(self.timestamps)
oprot.writeFieldEnd()
if self.valuesList is not None:
oprot.writeFieldBegin('valuesList', TType.LIST, 4)
oprot.writeListBegin(TType.STRING, len(self.valuesList))
for iter140 in self.valuesList:
oprot.writeBinary(iter140)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.bitmapList is not None:
oprot.writeFieldBegin('bitmapList', TType.LIST, 5)
oprot.writeListBegin(TType.STRING, len(self.bitmapList))
for iter141 in self.bitmapList:
oprot.writeBinary(iter141)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.dataTypeList is not None:
oprot.writeFieldBegin('dataTypeList', TType.LIST, 6)
oprot.writeListBegin(TType.I32, len(self.dataTypeList))
for iter142 in self.dataTypeList:
oprot.writeI32(iter142)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.attributesList is not None:
oprot.writeFieldBegin('attributesList', TType.LIST, 7)
oprot.writeListBegin(TType.MAP, len(self.attributesList))
for iter143 in self.attributesList:
oprot.writeMapBegin(TType.STRING, TType.STRING, len(iter143))
for kiter144, viter145 in iter143.items():
oprot.writeString(kiter144.encode('utf-8') if sys.version_info[0] == 2 else kiter144)
oprot.writeString(viter145.encode('utf-8') if sys.version_info[0] == 2 else viter145)
oprot.writeMapEnd()
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.sessionId is None:
raise TProtocolException(message='Required field sessionId is unset!')
if self.paths is None:
raise TProtocolException(message='Required field paths is unset!')
if self.timestamps is None:
raise TProtocolException(message='Required field timestamps is unset!')
if self.valuesList is None:
raise TProtocolException(message='Required field valuesList is unset!')
if self.bitmapList is None:
raise TProtocolException(message='Required field bitmapList is unset!')
if self.dataTypeList is None:
raise TProtocolException(message='Required field dataTypeList is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class InsertNonAlignedRowRecordsReq(object):
"""
Attributes:
- sessionId
- paths
- timestamps
- valuesList
- bitmapList
- dataTypeList
- attributesList
"""
def __init__(self, sessionId=None, paths=None, timestamps=None, valuesList=None, bitmapList=None, dataTypeList=None, attributesList=None,):
self.sessionId = sessionId
self.paths = paths
self.timestamps = timestamps
self.valuesList = valuesList
self.bitmapList = bitmapList
self.dataTypeList = dataTypeList
self.attributesList = attributesList
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.sessionId = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.paths = []
(_etype149, _size146) = iprot.readListBegin()
for _i150 in range(_size146):
_elem151 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.paths.append(_elem151)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.timestamps = iprot.readBinary()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.LIST:
self.valuesList = []
(_etype155, _size152) = iprot.readListBegin()
for _i156 in range(_size152):
_elem157 = iprot.readBinary()
self.valuesList.append(_elem157)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.LIST:
self.bitmapList = []
(_etype161, _size158) = iprot.readListBegin()
for _i162 in range(_size158):
_elem163 = iprot.readBinary()
self.bitmapList.append(_elem163)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.LIST:
self.dataTypeList = []
(_etype167, _size164) = iprot.readListBegin()
for _i168 in range(_size164):
_elem169 = iprot.readI32()
self.dataTypeList.append(_elem169)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.LIST:
self.attributesList = []
(_etype173, _size170) = iprot.readListBegin()
for _i174 in range(_size170):
_elem175 = {}
(_ktype177, _vtype178, _size176) = iprot.readMapBegin()
for _i180 in range(_size176):
_key181 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val182 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_elem175[_key181] = _val182
iprot.readMapEnd()
self.attributesList.append(_elem175)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('InsertNonAlignedRowRecordsReq')
if self.sessionId is not None:
oprot.writeFieldBegin('sessionId', TType.I64, 1)
oprot.writeI64(self.sessionId)
oprot.writeFieldEnd()
if self.paths is not None:
oprot.writeFieldBegin('paths', TType.LIST, 2)
oprot.writeListBegin(TType.STRING, len(self.paths))
for iter183 in self.paths:
oprot.writeString(iter183.encode('utf-8') if sys.version_info[0] == 2 else iter183)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.timestamps is not None:
oprot.writeFieldBegin('timestamps', TType.STRING, 3)
oprot.writeBinary(self.timestamps)
oprot.writeFieldEnd()
if self.valuesList is not None:
oprot.writeFieldBegin('valuesList', TType.LIST, 4)
oprot.writeListBegin(TType.STRING, len(self.valuesList))
for iter184 in self.valuesList:
oprot.writeBinary(iter184)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.bitmapList is not None:
oprot.writeFieldBegin('bitmapList', TType.LIST, 5)
oprot.writeListBegin(TType.STRING, len(self.bitmapList))
for iter185 in self.bitmapList:
oprot.writeBinary(iter185)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.dataTypeList is not None:
oprot.writeFieldBegin('dataTypeList', TType.LIST, 6)
oprot.writeListBegin(TType.I32, len(self.dataTypeList))
for iter186 in self.dataTypeList:
oprot.writeI32(iter186)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.attributesList is not None:
oprot.writeFieldBegin('attributesList', TType.LIST, 7)
oprot.writeListBegin(TType.MAP, len(self.attributesList))
for iter187 in self.attributesList:
oprot.writeMapBegin(TType.STRING, TType.STRING, len(iter187))
for kiter188, viter189 in iter187.items():
oprot.writeString(kiter188.encode('utf-8') if sys.version_info[0] == 2 else kiter188)
oprot.writeString(viter189.encode('utf-8') if sys.version_info[0] == 2 else viter189)
oprot.writeMapEnd()
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.sessionId is None:
raise TProtocolException(message='Required field sessionId is unset!')
if self.paths is None:
raise TProtocolException(message='Required field paths is unset!')
if self.timestamps is None:
raise TProtocolException(message='Required field timestamps is unset!')
if self.valuesList is None:
raise TProtocolException(message='Required field valuesList is unset!')
if self.bitmapList is None:
raise TProtocolException(message='Required field bitmapList is unset!')
if self.dataTypeList is None:
raise TProtocolException(message='Required field dataTypeList is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class DeleteDataInColumnsReq(object):
"""
Attributes:
- sessionId
- paths
- startTime
- endTime
"""
def __init__(self, sessionId=None, paths=None, startTime=None, endTime=None,):
self.sessionId = sessionId
self.paths = paths
self.startTime = startTime
self.endTime = endTime
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.sessionId = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.paths = []
(_etype193, _size190) = iprot.readListBegin()
for _i194 in range(_size190):
_elem195 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.paths.append(_elem195)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.startTime = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I64:
self.endTime = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('DeleteDataInColumnsReq')
if self.sessionId is not None:
oprot.writeFieldBegin('sessionId', TType.I64, 1)
oprot.writeI64(self.sessionId)
oprot.writeFieldEnd()
if self.paths is not None:
oprot.writeFieldBegin('paths', TType.LIST, 2)
oprot.writeListBegin(TType.STRING, len(self.paths))
for iter196 in self.paths:
oprot.writeString(iter196.encode('utf-8') if sys.version_info[0] == 2 else iter196)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.startTime is not None:
oprot.writeFieldBegin('startTime', TType.I64, 3)
oprot.writeI64(self.startTime)
oprot.writeFieldEnd()
if self.endTime is not None:
oprot.writeFieldBegin('endTime', TType.I64, 4)
oprot.writeI64(self.endTime)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.sessionId is None:
raise TProtocolException(message='Required field sessionId is unset!')
if self.paths is None:
raise TProtocolException(message='Required field paths is unset!')
if self.startTime is None:
raise TProtocolException(message='Required field startTime is unset!')
if self.endTime is None:
raise TProtocolException(message='Required field endTime is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class QueryDataSet(object):
"""
Attributes:
- timestamps
- valuesList
- bitmapList
"""
def __init__(self, timestamps=None, valuesList=None, bitmapList=None,):
self.timestamps = timestamps
self.valuesList = valuesList
self.bitmapList = bitmapList
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.timestamps = iprot.readBinary()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.valuesList = []
(_etype200, _size197) = iprot.readListBegin()
for _i201 in range(_size197):
_elem202 = iprot.readBinary()
self.valuesList.append(_elem202)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.bitmapList = []
(_etype206, _size203) = iprot.readListBegin()
for _i207 in range(_size203):
_elem208 = iprot.readBinary()
self.bitmapList.append(_elem208)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('QueryDataSet')
if self.timestamps is not None:
oprot.writeFieldBegin('timestamps', TType.STRING, 1)
oprot.writeBinary(self.timestamps)
oprot.writeFieldEnd()
if self.valuesList is not None:
oprot.writeFieldBegin('valuesList', TType.LIST, 2)
oprot.writeListBegin(TType.STRING, len(self.valuesList))
for iter209 in self.valuesList:
oprot.writeBinary(iter209)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.bitmapList is not None:
oprot.writeFieldBegin('bitmapList', TType.LIST, 3)
oprot.writeListBegin(TType.STRING, len(self.bitmapList))
for iter210 in self.bitmapList:
oprot.writeBinary(iter210)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.timestamps is None:
raise TProtocolException(message='Required field timestamps is unset!')
if self.valuesList is None:
raise TProtocolException(message='Required field valuesList is unset!')
if self.bitmapList is None:
raise TProtocolException(message='Required field bitmapList is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class QueryDataReq(object):
"""
Attributes:
- sessionId
- paths
- startTime
- endTime
"""
def __init__(self, sessionId=None, paths=None, startTime=None, endTime=None,):
self.sessionId = sessionId
self.paths = paths
self.startTime = startTime
self.endTime = endTime
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.sessionId = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.paths = []
(_etype214, _size211) = iprot.readListBegin()
for _i215 in range(_size211):
_elem216 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.paths.append(_elem216)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.startTime = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I64:
self.endTime = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('QueryDataReq')
if self.sessionId is not None:
oprot.writeFieldBegin('sessionId', TType.I64, 1)
oprot.writeI64(self.sessionId)
oprot.writeFieldEnd()
if self.paths is not None:
oprot.writeFieldBegin('paths', TType.LIST, 2)
oprot.writeListBegin(TType.STRING, len(self.paths))
for iter217 in self.paths:
oprot.writeString(iter217.encode('utf-8') if sys.version_info[0] == 2 else iter217)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.startTime is not None:
oprot.writeFieldBegin('startTime', TType.I64, 3)
oprot.writeI64(self.startTime)
oprot.writeFieldEnd()
if self.endTime is not None:
oprot.writeFieldBegin('endTime', TType.I64, 4)
oprot.writeI64(self.endTime)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.sessionId is None:
raise TProtocolException(message='Required field sessionId is unset!')
if self.paths is None:
raise TProtocolException(message='Required field paths is unset!')
if self.startTime is None:
raise TProtocolException(message='Required field startTime is unset!')
if self.endTime is None:
raise TProtocolException(message='Required field endTime is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class QueryDataResp(object):
"""
Attributes:
- status
- paths
- dataTypeList
- queryDataSet
"""
def __init__(self, status=None, paths=None, dataTypeList=None, queryDataSet=None,):
self.status = status
self.paths = paths
self.dataTypeList = dataTypeList
self.queryDataSet = queryDataSet
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = Status()
self.status.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.paths = []
(_etype221, _size218) = iprot.readListBegin()
for _i222 in range(_size218):
_elem223 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.paths.append(_elem223)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.dataTypeList = []
(_etype227, _size224) = iprot.readListBegin()
for _i228 in range(_size224):
_elem229 = iprot.readI32()
self.dataTypeList.append(_elem229)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.queryDataSet = QueryDataSet()
self.queryDataSet.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('QueryDataResp')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
if self.paths is not None:
oprot.writeFieldBegin('paths', TType.LIST, 2)
oprot.writeListBegin(TType.STRING, len(self.paths))
for iter230 in self.paths:
oprot.writeString(iter230.encode('utf-8') if sys.version_info[0] == 2 else iter230)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.dataTypeList is not None:
oprot.writeFieldBegin('dataTypeList', TType.LIST, 3)
oprot.writeListBegin(TType.I32, len(self.dataTypeList))
for iter231 in self.dataTypeList:
oprot.writeI32(iter231)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.queryDataSet is not None:
oprot.writeFieldBegin('queryDataSet', TType.STRUCT, 4)
self.queryDataSet.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocolException(message='Required field status is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class AddStorageEnginesReq(object):
"""
Attributes:
- sessionId
- storageEngines
"""
def __init__(self, sessionId=None, storageEngines=None,):
self.sessionId = sessionId
self.storageEngines = storageEngines
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.sessionId = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.storageEngines = []
(_etype235, _size232) = iprot.readListBegin()
for _i236 in range(_size232):
_elem237 = StorageEngine()
_elem237.read(iprot)
self.storageEngines.append(_elem237)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('AddStorageEnginesReq')
if self.sessionId is not None:
oprot.writeFieldBegin('sessionId', TType.I64, 1)
oprot.writeI64(self.sessionId)
oprot.writeFieldEnd()
if self.storageEngines is not None:
oprot.writeFieldBegin('storageEngines', TType.LIST, 2)
oprot.writeListBegin(TType.STRUCT, len(self.storageEngines))
for iter238 in self.storageEngines:
iter238.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.sessionId is None:
raise TProtocolException(message='Required field sessionId is unset!')
if self.storageEngines is None:
raise TProtocolException(message='Required field storageEngines is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class StorageEngine(object):
"""
Attributes:
- ip
- port
- type
- extraParams
"""
def __init__(self, ip=None, port=None, type=None, extraParams=None,):
self.ip = ip
self.port = port
self.type = type
self.extraParams = extraParams
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.ip = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.port = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.type = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.MAP:
self.extraParams = {}
(_ktype240, _vtype241, _size239) = iprot.readMapBegin()
for _i243 in range(_size239):
_key244 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val245 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.extraParams[_key244] = _val245
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('StorageEngine')
if self.ip is not None:
oprot.writeFieldBegin('ip', TType.STRING, 1)
oprot.writeString(self.ip.encode('utf-8') if sys.version_info[0] == 2 else self.ip)
oprot.writeFieldEnd()
if self.port is not None:
oprot.writeFieldBegin('port', TType.I32, 2)
oprot.writeI32(self.port)
oprot.writeFieldEnd()
if self.type is not None:
oprot.writeFieldBegin('type', TType.STRING, 3)
oprot.writeString(self.type.encode('utf-8') if sys.version_info[0] == 2 else self.type)
oprot.writeFieldEnd()
if self.extraParams is not None:
oprot.writeFieldBegin('extraParams', TType.MAP, 4)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.extraParams))
for kiter246, viter247 in self.extraParams.items():
oprot.writeString(kiter246.encode('utf-8') if sys.version_info[0] == 2 else kiter246)
oprot.writeString(viter247.encode('utf-8') if sys.version_info[0] == 2 else viter247)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.ip is None:
raise TProtocolException(message='Required field ip is unset!')
if self.port is None:
raise TProtocolException(message='Required field port is unset!')
if self.type is None:
raise TProtocolException(message='Required field type is unset!')
if self.extraParams is None:
raise TProtocolException(message='Required field extraParams is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class AggregateQueryReq(object):
"""
Attributes:
- sessionId
- paths
- startTime
- endTime
- aggregateType
"""
def __init__(self, sessionId=None, paths=None, startTime=None, endTime=None, aggregateType=None,):
self.sessionId = sessionId
self.paths = paths
self.startTime = startTime
self.endTime = endTime
self.aggregateType = aggregateType
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.sessionId = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.paths = []
(_etype251, _size248) = iprot.readListBegin()
for _i252 in range(_size248):
_elem253 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.paths.append(_elem253)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.startTime = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I64:
self.endTime = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.I32:
self.aggregateType = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('AggregateQueryReq')
if self.sessionId is not None:
oprot.writeFieldBegin('sessionId', TType.I64, 1)
oprot.writeI64(self.sessionId)
oprot.writeFieldEnd()
if self.paths is not None:
oprot.writeFieldBegin('paths', TType.LIST, 2)
oprot.writeListBegin(TType.STRING, len(self.paths))
for iter254 in self.paths:
oprot.writeString(iter254.encode('utf-8') if sys.version_info[0] == 2 else iter254)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.startTime is not None:
oprot.writeFieldBegin('startTime', TType.I64, 3)
oprot.writeI64(self.startTime)
oprot.writeFieldEnd()
if self.endTime is not None:
oprot.writeFieldBegin('endTime', TType.I64, 4)
oprot.writeI64(self.endTime)
oprot.writeFieldEnd()
if self.aggregateType is not None:
oprot.writeFieldBegin('aggregateType', TType.I32, 5)
oprot.writeI32(self.aggregateType)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.sessionId is None:
raise TProtocolException(message='Required field sessionId is unset!')
if self.paths is None:
raise TProtocolException(message='Required field paths is unset!')
if self.startTime is None:
raise TProtocolException(message='Required field startTime is unset!')
if self.endTime is None:
raise TProtocolException(message='Required field endTime is unset!')
if self.aggregateType is None:
raise TProtocolException(message='Required field aggregateType is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class AggregateQueryResp(object):
"""
Attributes:
- status
- paths
- dataTypeList
- timestamps
- valuesList
"""
def __init__(self, status=None, paths=None, dataTypeList=None, timestamps=None, valuesList=None,):
self.status = status
self.paths = paths
self.dataTypeList = dataTypeList
self.timestamps = timestamps
self.valuesList = valuesList
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = Status()
self.status.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.paths = []
(_etype258, _size255) = iprot.readListBegin()
for _i259 in range(_size255):
_elem260 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.paths.append(_elem260)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.dataTypeList = []
(_etype264, _size261) = iprot.readListBegin()
for _i265 in range(_size261):
_elem266 = iprot.readI32()
self.dataTypeList.append(_elem266)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.timestamps = iprot.readBinary()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRING:
self.valuesList = iprot.readBinary()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('AggregateQueryResp')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
if self.paths is not None:
oprot.writeFieldBegin('paths', TType.LIST, 2)
oprot.writeListBegin(TType.STRING, len(self.paths))
for iter267 in self.paths:
oprot.writeString(iter267.encode('utf-8') if sys.version_info[0] == 2 else iter267)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.dataTypeList is not None:
oprot.writeFieldBegin('dataTypeList', TType.LIST, 3)
oprot.writeListBegin(TType.I32, len(self.dataTypeList))
for iter268 in self.dataTypeList:
oprot.writeI32(iter268)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.timestamps is not None:
oprot.writeFieldBegin('timestamps', TType.STRING, 4)
oprot.writeBinary(self.timestamps)
oprot.writeFieldEnd()
if self.valuesList is not None:
oprot.writeFieldBegin('valuesList', TType.STRING, 5)
oprot.writeBinary(self.valuesList)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocolException(message='Required field status is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ValueFilterQueryReq(object):
"""
Attributes:
- sessionId
- paths
- startTime
- endTime
- booleanExpression
"""
def __init__(self, sessionId=None, paths=None, startTime=None, endTime=None, booleanExpression=None,):
self.sessionId = sessionId
self.paths = paths
self.startTime = startTime
self.endTime = endTime
self.booleanExpression = booleanExpression
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.sessionId = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.paths = []
(_etype272, _size269) = iprot.readListBegin()
for _i273 in range(_size269):
_elem274 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.paths.append(_elem274)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.startTime = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I64:
self.endTime = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRING:
self.booleanExpression = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('ValueFilterQueryReq')
if self.sessionId is not None:
oprot.writeFieldBegin('sessionId', TType.I64, 1)
oprot.writeI64(self.sessionId)
oprot.writeFieldEnd()
if self.paths is not None:
oprot.writeFieldBegin('paths', TType.LIST, 2)
oprot.writeListBegin(TType.STRING, len(self.paths))
for iter275 in self.paths:
oprot.writeString(iter275.encode('utf-8') if sys.version_info[0] == 2 else iter275)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.startTime is not None:
oprot.writeFieldBegin('startTime', TType.I64, 3)
oprot.writeI64(self.startTime)
oprot.writeFieldEnd()
if self.endTime is not None:
oprot.writeFieldBegin('endTime', TType.I64, 4)
oprot.writeI64(self.endTime)
oprot.writeFieldEnd()
if self.booleanExpression is not None:
oprot.writeFieldBegin('booleanExpression', TType.STRING, 5)
oprot.writeString(self.booleanExpression.encode('utf-8') if sys.version_info[0] == 2 else self.booleanExpression)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.sessionId is None:
raise TProtocolException(message='Required field sessionId is unset!')
if self.paths is None:
raise TProtocolException(message='Required field paths is unset!')
if self.startTime is None:
raise TProtocolException(message='Required field startTime is unset!')
if self.endTime is None:
raise TProtocolException(message='Required field endTime is unset!')
if self.booleanExpression is None:
raise TProtocolException(message='Required field booleanExpression is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ValueFilterQueryResp(object):
"""
Attributes:
- status
- paths
- dataTypeList
- queryDataSet
"""
def __init__(self, status=None, paths=None, dataTypeList=None, queryDataSet=None,):
self.status = status
self.paths = paths
self.dataTypeList = dataTypeList
self.queryDataSet = queryDataSet
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = Status()
self.status.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.paths = []
(_etype279, _size276) = iprot.readListBegin()
for _i280 in range(_size276):
_elem281 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.paths.append(_elem281)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.dataTypeList = []
(_etype285, _size282) = iprot.readListBegin()
for _i286 in range(_size282):
_elem287 = iprot.readI32()
self.dataTypeList.append(_elem287)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.queryDataSet = QueryDataSet()
self.queryDataSet.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('ValueFilterQueryResp')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
if self.paths is not None:
oprot.writeFieldBegin('paths', TType.LIST, 2)
oprot.writeListBegin(TType.STRING, len(self.paths))
for iter288 in self.paths:
oprot.writeString(iter288.encode('utf-8') if sys.version_info[0] == 2 else iter288)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.dataTypeList is not None:
oprot.writeFieldBegin('dataTypeList', TType.LIST, 3)
oprot.writeListBegin(TType.I32, len(self.dataTypeList))
for iter289 in self.dataTypeList:
oprot.writeI32(iter289)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.queryDataSet is not None:
oprot.writeFieldBegin('queryDataSet', TType.STRUCT, 4)
self.queryDataSet.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocolException(message='Required field status is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class LastQueryReq(object):
"""
Attributes:
- sessionId
- paths
- startTime
"""
def __init__(self, sessionId=None, paths=None, startTime=None,):
self.sessionId = sessionId
self.paths = paths
self.startTime = startTime
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.sessionId = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.paths = []
(_etype293, _size290) = iprot.readListBegin()
for _i294 in range(_size290):
_elem295 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.paths.append(_elem295)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.startTime = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('LastQueryReq')
if self.sessionId is not None:
oprot.writeFieldBegin('sessionId', TType.I64, 1)
oprot.writeI64(self.sessionId)
oprot.writeFieldEnd()
if self.paths is not None:
oprot.writeFieldBegin('paths', TType.LIST, 2)
oprot.writeListBegin(TType.STRING, len(self.paths))
for iter296 in self.paths:
oprot.writeString(iter296.encode('utf-8') if sys.version_info[0] == 2 else iter296)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.startTime is not None:
oprot.writeFieldBegin('startTime', TType.I64, 3)
oprot.writeI64(self.startTime)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.sessionId is None:
raise TProtocolException(message='Required field sessionId is unset!')
if self.paths is None:
raise TProtocolException(message='Required field paths is unset!')
if self.startTime is None:
raise TProtocolException(message='Required field startTime is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class LastQueryResp(object):
"""
Attributes:
- status
- paths
- dataTypeList
- timestamps
- valuesList
"""
def __init__(self, status=None, paths=None, dataTypeList=None, timestamps=None, valuesList=None,):
self.status = status
self.paths = paths
self.dataTypeList = dataTypeList
self.timestamps = timestamps
self.valuesList = valuesList
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = Status()
self.status.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.paths = []
(_etype300, _size297) = iprot.readListBegin()
for _i301 in range(_size297):
_elem302 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.paths.append(_elem302)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.dataTypeList = []
(_etype306, _size303) = iprot.readListBegin()
for _i307 in range(_size303):
_elem308 = iprot.readI32()
self.dataTypeList.append(_elem308)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.timestamps = iprot.readBinary()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRING:
self.valuesList = iprot.readBinary()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('LastQueryResp')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
if self.paths is not None:
oprot.writeFieldBegin('paths', TType.LIST, 2)
oprot.writeListBegin(TType.STRING, len(self.paths))
for iter309 in self.paths:
oprot.writeString(iter309.encode('utf-8') if sys.version_info[0] == 2 else iter309)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.dataTypeList is not None:
oprot.writeFieldBegin('dataTypeList', TType.LIST, 3)
oprot.writeListBegin(TType.I32, len(self.dataTypeList))
for iter310 in self.dataTypeList:
oprot.writeI32(iter310)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.timestamps is not None:
oprot.writeFieldBegin('timestamps', TType.STRING, 4)
oprot.writeBinary(self.timestamps)
oprot.writeFieldEnd()
if self.valuesList is not None:
oprot.writeFieldBegin('valuesList', TType.STRING, 5)
oprot.writeBinary(self.valuesList)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocolException(message='Required field status is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class DownsampleQueryReq(object):
"""
Attributes:
- sessionId
- paths
- startTime
- endTime
- aggregateType
- precision
"""
def __init__(self, sessionId=None, paths=None, startTime=None, endTime=None, aggregateType=None, precision=None,):
self.sessionId = sessionId
self.paths = paths
self.startTime = startTime
self.endTime = endTime
self.aggregateType = aggregateType
self.precision = precision
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.sessionId = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.paths = []
(_etype314, _size311) = iprot.readListBegin()
for _i315 in range(_size311):
_elem316 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.paths.append(_elem316)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.startTime = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I64:
self.endTime = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.I32:
self.aggregateType = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.I64:
self.precision = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('DownsampleQueryReq')
if self.sessionId is not None:
oprot.writeFieldBegin('sessionId', TType.I64, 1)
oprot.writeI64(self.sessionId)
oprot.writeFieldEnd()
if self.paths is not None:
oprot.writeFieldBegin('paths', TType.LIST, 2)
oprot.writeListBegin(TType.STRING, len(self.paths))
for iter317 in self.paths:
oprot.writeString(iter317.encode('utf-8') if sys.version_info[0] == 2 else iter317)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.startTime is not None:
oprot.writeFieldBegin('startTime', TType.I64, 3)
oprot.writeI64(self.startTime)
oprot.writeFieldEnd()
if self.endTime is not None:
oprot.writeFieldBegin('endTime', TType.I64, 4)
oprot.writeI64(self.endTime)
oprot.writeFieldEnd()
if self.aggregateType is not None:
oprot.writeFieldBegin('aggregateType', TType.I32, 5)
oprot.writeI32(self.aggregateType)
oprot.writeFieldEnd()
if self.precision is not None:
oprot.writeFieldBegin('precision', TType.I64, 6)
oprot.writeI64(self.precision)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.sessionId is None:
raise TProtocolException(message='Required field sessionId is unset!')
if self.paths is None:
raise TProtocolException(message='Required field paths is unset!')
if self.startTime is None:
raise TProtocolException(message='Required field startTime is unset!')
if self.endTime is None:
raise TProtocolException(message='Required field endTime is unset!')
if self.aggregateType is None:
raise TProtocolException(message='Required field aggregateType is unset!')
if self.precision is None:
raise TProtocolException(message='Required field precision is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class DownsampleQueryResp(object):
"""
Attributes:
- status
- paths
- dataTypeList
- queryDataSet
"""
def __init__(self, status=None, paths=None, dataTypeList=None, queryDataSet=None,):
self.status = status
self.paths = paths
self.dataTypeList = dataTypeList
self.queryDataSet = queryDataSet
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = Status()
self.status.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.paths = []
(_etype321, _size318) = iprot.readListBegin()
for _i322 in range(_size318):
_elem323 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.paths.append(_elem323)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.dataTypeList = []
(_etype327, _size324) = iprot.readListBegin()
for _i328 in range(_size324):
_elem329 = iprot.readI32()
self.dataTypeList.append(_elem329)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.queryDataSet = QueryDataSet()
self.queryDataSet.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('DownsampleQueryResp')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
if self.paths is not None:
oprot.writeFieldBegin('paths', TType.LIST, 2)
oprot.writeListBegin(TType.STRING, len(self.paths))
for iter330 in self.paths:
oprot.writeString(iter330.encode('utf-8') if sys.version_info[0] == 2 else iter330)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.dataTypeList is not None:
oprot.writeFieldBegin('dataTypeList', TType.LIST, 3)
oprot.writeListBegin(TType.I32, len(self.dataTypeList))
for iter331 in self.dataTypeList:
oprot.writeI32(iter331)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.queryDataSet is not None:
oprot.writeFieldBegin('queryDataSet', TType.STRUCT, 4)
self.queryDataSet.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocolException(message='Required field status is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ShowColumnsReq(object):
"""
Attributes:
- sessionId
"""
def __init__(self, sessionId=None,):
self.sessionId = sessionId
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.sessionId = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('ShowColumnsReq')
if self.sessionId is not None:
oprot.writeFieldBegin('sessionId', TType.I64, 1)
oprot.writeI64(self.sessionId)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.sessionId is None:
raise TProtocolException(message='Required field sessionId is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ShowColumnsResp(object):
"""
Attributes:
- status
- paths
- dataTypeList
"""
def __init__(self, status=None, paths=None, dataTypeList=None,):
self.status = status
self.paths = paths
self.dataTypeList = dataTypeList
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = Status()
self.status.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.paths = []
(_etype335, _size332) = iprot.readListBegin()
for _i336 in range(_size332):
_elem337 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.paths.append(_elem337)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.dataTypeList = []
(_etype341, _size338) = iprot.readListBegin()
for _i342 in range(_size338):
_elem343 = iprot.readI32()
self.dataTypeList.append(_elem343)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('ShowColumnsResp')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
if self.paths is not None:
oprot.writeFieldBegin('paths', TType.LIST, 2)
oprot.writeListBegin(TType.STRING, len(self.paths))
for iter344 in self.paths:
oprot.writeString(iter344.encode('utf-8') if sys.version_info[0] == 2 else iter344)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.dataTypeList is not None:
oprot.writeFieldBegin('dataTypeList', TType.LIST, 3)
oprot.writeListBegin(TType.I32, len(self.dataTypeList))
for iter345 in self.dataTypeList:
oprot.writeI32(iter345)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocolException(message='Required field status is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetReplicaNumReq(object):
"""
Attributes:
- sessionId
"""
def __init__(self, sessionId=None,):
self.sessionId = sessionId
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.sessionId = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('GetReplicaNumReq')
if self.sessionId is not None:
oprot.writeFieldBegin('sessionId', TType.I64, 1)
oprot.writeI64(self.sessionId)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.sessionId is None:
raise TProtocolException(message='Required field sessionId is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetReplicaNumResp(object):
"""
Attributes:
- status
- replicaNum
"""
def __init__(self, status=None, replicaNum=None,):
self.status = status
self.replicaNum = replicaNum
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = Status()
self.status.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.replicaNum = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('GetReplicaNumResp')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
if self.replicaNum is not None:
oprot.writeFieldBegin('replicaNum', TType.I32, 2)
oprot.writeI32(self.replicaNum)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocolException(message='Required field status is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ExecuteSqlReq(object):
"""
Attributes:
- sessionId
- statement
"""
def __init__(self, sessionId=None, statement=None,):
self.sessionId = sessionId
self.statement = statement
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.sessionId = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.statement = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('ExecuteSqlReq')
if self.sessionId is not None:
oprot.writeFieldBegin('sessionId', TType.I64, 1)
oprot.writeI64(self.sessionId)
oprot.writeFieldEnd()
if self.statement is not None:
oprot.writeFieldBegin('statement', TType.STRING, 2)
oprot.writeString(self.statement.encode('utf-8') if sys.version_info[0] == 2 else self.statement)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.sessionId is None:
raise TProtocolException(message='Required field sessionId is unset!')
if self.statement is None:
raise TProtocolException(message='Required field statement is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ExecuteSqlResp(object):
"""
Attributes:
- status
- type
- paths
- dataTypeList
- queryDataSet
- timestamps
- valuesList
- replicaNum
- pointsNum
- aggregateType
- parseErrorMsg
- limit
- offset
- orderByPath
- ascending
- iginxInfos
- storageEngineInfos
- metaStorageInfos
- localMetaStorageInfo
"""
def __init__(self, status=None, type=None, paths=None, dataTypeList=None, queryDataSet=None, timestamps=None, valuesList=None, replicaNum=None, pointsNum=None, aggregateType=None, parseErrorMsg=None, limit=None, offset=None, orderByPath=None, ascending=None, iginxInfos=None, storageEngineInfos=None, metaStorageInfos=None, localMetaStorageInfo=None,):
self.status = status
self.type = type
self.paths = paths
self.dataTypeList = dataTypeList
self.queryDataSet = queryDataSet
self.timestamps = timestamps
self.valuesList = valuesList
self.replicaNum = replicaNum
self.pointsNum = pointsNum
self.aggregateType = aggregateType
self.parseErrorMsg = parseErrorMsg
self.limit = limit
self.offset = offset
self.orderByPath = orderByPath
self.ascending = ascending
self.iginxInfos = iginxInfos
self.storageEngineInfos = storageEngineInfos
self.metaStorageInfos = metaStorageInfos
self.localMetaStorageInfo = localMetaStorageInfo
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = Status()
self.status.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.type = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.paths = []
(_etype349, _size346) = iprot.readListBegin()
for _i350 in range(_size346):
_elem351 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.paths.append(_elem351)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.LIST:
self.dataTypeList = []
(_etype355, _size352) = iprot.readListBegin()
for _i356 in range(_size352):
_elem357 = iprot.readI32()
self.dataTypeList.append(_elem357)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRUCT:
self.queryDataSet = QueryDataSet()
self.queryDataSet.read(iprot)
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.STRING:
self.timestamps = iprot.readBinary()
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.STRING:
self.valuesList = iprot.readBinary()
else:
iprot.skip(ftype)
elif fid == 8:
if ftype == TType.I32:
self.replicaNum = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 9:
if ftype == TType.I64:
self.pointsNum = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 10:
if ftype == TType.I32:
self.aggregateType = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 11:
if ftype == TType.STRING:
self.parseErrorMsg = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 12:
if ftype == TType.I32:
self.limit = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 13:
if ftype == TType.I32:
self.offset = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 14:
if ftype == TType.STRING:
self.orderByPath = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 15:
if ftype == TType.BOOL:
self.ascending = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 16:
if ftype == TType.LIST:
self.iginxInfos = []
(_etype361, _size358) = iprot.readListBegin()
for _i362 in range(_size358):
_elem363 = IginxInfo()
_elem363.read(iprot)
self.iginxInfos.append(_elem363)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 17:
if ftype == TType.LIST:
self.storageEngineInfos = []
(_etype367, _size364) = iprot.readListBegin()
for _i368 in range(_size364):
_elem369 = StorageEngineInfo()
_elem369.read(iprot)
self.storageEngineInfos.append(_elem369)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 18:
if ftype == TType.LIST:
self.metaStorageInfos = []
(_etype373, _size370) = iprot.readListBegin()
for _i374 in range(_size370):
_elem375 = MetaStorageInfo()
_elem375.read(iprot)
self.metaStorageInfos.append(_elem375)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 19:
if ftype == TType.STRUCT:
self.localMetaStorageInfo = LocalMetaStorageInfo()
self.localMetaStorageInfo.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('ExecuteSqlResp')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
if self.type is not None:
oprot.writeFieldBegin('type', TType.I32, 2)
oprot.writeI32(self.type)
oprot.writeFieldEnd()
if self.paths is not None:
oprot.writeFieldBegin('paths', TType.LIST, 3)
oprot.writeListBegin(TType.STRING, len(self.paths))
for iter376 in self.paths:
oprot.writeString(iter376.encode('utf-8') if sys.version_info[0] == 2 else iter376)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.dataTypeList is not None:
oprot.writeFieldBegin('dataTypeList', TType.LIST, 4)
oprot.writeListBegin(TType.I32, len(self.dataTypeList))
for iter377 in self.dataTypeList:
oprot.writeI32(iter377)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.queryDataSet is not None:
oprot.writeFieldBegin('queryDataSet', TType.STRUCT, 5)
self.queryDataSet.write(oprot)
oprot.writeFieldEnd()
if self.timestamps is not None:
oprot.writeFieldBegin('timestamps', TType.STRING, 6)
oprot.writeBinary(self.timestamps)
oprot.writeFieldEnd()
if self.valuesList is not None:
oprot.writeFieldBegin('valuesList', TType.STRING, 7)
oprot.writeBinary(self.valuesList)
oprot.writeFieldEnd()
if self.replicaNum is not None:
oprot.writeFieldBegin('replicaNum', TType.I32, 8)
oprot.writeI32(self.replicaNum)
oprot.writeFieldEnd()
if self.pointsNum is not None:
oprot.writeFieldBegin('pointsNum', TType.I64, 9)
oprot.writeI64(self.pointsNum)
oprot.writeFieldEnd()
if self.aggregateType is not None:
oprot.writeFieldBegin('aggregateType', TType.I32, 10)
oprot.writeI32(self.aggregateType)
oprot.writeFieldEnd()
if self.parseErrorMsg is not None:
oprot.writeFieldBegin('parseErrorMsg', TType.STRING, 11)
oprot.writeString(self.parseErrorMsg.encode('utf-8') if sys.version_info[0] == 2 else self.parseErrorMsg)
oprot.writeFieldEnd()
if self.limit is not None:
oprot.writeFieldBegin('limit', TType.I32, 12)
oprot.writeI32(self.limit)
oprot.writeFieldEnd()
if self.offset is not None:
oprot.writeFieldBegin('offset', TType.I32, 13)
oprot.writeI32(self.offset)
oprot.writeFieldEnd()
if self.orderByPath is not None:
oprot.writeFieldBegin('orderByPath', TType.STRING, 14)
oprot.writeString(self.orderByPath.encode('utf-8') if sys.version_info[0] == 2 else self.orderByPath)
oprot.writeFieldEnd()
if self.ascending is not None:
oprot.writeFieldBegin('ascending', TType.BOOL, 15)
oprot.writeBool(self.ascending)
oprot.writeFieldEnd()
if self.iginxInfos is not None:
oprot.writeFieldBegin('iginxInfos', TType.LIST, 16)
oprot.writeListBegin(TType.STRUCT, len(self.iginxInfos))
for iter378 in self.iginxInfos:
iter378.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.storageEngineInfos is not None:
oprot.writeFieldBegin('storageEngineInfos', TType.LIST, 17)
oprot.writeListBegin(TType.STRUCT, len(self.storageEngineInfos))
for iter379 in self.storageEngineInfos:
iter379.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.metaStorageInfos is not None:
oprot.writeFieldBegin('metaStorageInfos', TType.LIST, 18)
oprot.writeListBegin(TType.STRUCT, len(self.metaStorageInfos))
for iter380 in self.metaStorageInfos:
iter380.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.localMetaStorageInfo is not None:
oprot.writeFieldBegin('localMetaStorageInfo', TType.STRUCT, 19)
self.localMetaStorageInfo.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocolException(message='Required field status is unset!')
if self.type is None:
raise TProtocolException(message='Required field type is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class UpdateUserReq(object):
"""
Attributes:
- sessionId
- username
- password
- auths
"""
def __init__(self, sessionId=None, username=None, password=None, auths=None,):
self.sessionId = sessionId
self.username = username
self.password = password
self.auths = auths
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.sessionId = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.username = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.password = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.SET:
self.auths = set()
(_etype384, _size381) = iprot.readSetBegin()
for _i385 in range(_size381):
_elem386 = iprot.readI32()
self.auths.add(_elem386)
iprot.readSetEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('UpdateUserReq')
if self.sessionId is not None:
oprot.writeFieldBegin('sessionId', TType.I64, 1)
oprot.writeI64(self.sessionId)
oprot.writeFieldEnd()
if self.username is not None:
oprot.writeFieldBegin('username', TType.STRING, 2)
oprot.writeString(self.username.encode('utf-8') if sys.version_info[0] == 2 else self.username)
oprot.writeFieldEnd()
if self.password is not None:
oprot.writeFieldBegin('password', TType.STRING, 3)
oprot.writeString(self.password.encode('utf-8') if sys.version_info[0] == 2 else self.password)
oprot.writeFieldEnd()
if self.auths is not None:
oprot.writeFieldBegin('auths', TType.SET, 4)
oprot.writeSetBegin(TType.I32, len(self.auths))
for iter387 in self.auths:
oprot.writeI32(iter387)
oprot.writeSetEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.sessionId is None:
raise TProtocolException(message='Required field sessionId is unset!')
if self.username is None:
raise TProtocolException(message='Required field username is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class AddUserReq(object):
"""
Attributes:
- sessionId
- username
- password
- auths
"""
def __init__(self, sessionId=None, username=None, password=None, auths=None,):
self.sessionId = sessionId
self.username = username
self.password = password
self.auths = auths
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.sessionId = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.username = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.password = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.SET:
self.auths = set()
(_etype391, _size388) = iprot.readSetBegin()
for _i392 in range(_size388):
_elem393 = iprot.readI32()
self.auths.add(_elem393)
iprot.readSetEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('AddUserReq')
if self.sessionId is not None:
oprot.writeFieldBegin('sessionId', TType.I64, 1)
oprot.writeI64(self.sessionId)
oprot.writeFieldEnd()
if self.username is not None:
oprot.writeFieldBegin('username', TType.STRING, 2)
oprot.writeString(self.username.encode('utf-8') if sys.version_info[0] == 2 else self.username)
oprot.writeFieldEnd()
if self.password is not None:
oprot.writeFieldBegin('password', TType.STRING, 3)
oprot.writeString(self.password.encode('utf-8') if sys.version_info[0] == 2 else self.password)
oprot.writeFieldEnd()
if self.auths is not None:
oprot.writeFieldBegin('auths', TType.SET, 4)
oprot.writeSetBegin(TType.I32, len(self.auths))
for iter394 in self.auths:
oprot.writeI32(iter394)
oprot.writeSetEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.sessionId is None:
raise TProtocolException(message='Required field sessionId is unset!')
if self.username is None:
raise TProtocolException(message='Required field username is unset!')
if self.password is None:
raise TProtocolException(message='Required field password is unset!')
if self.auths is None:
raise TProtocolException(message='Required field auths is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class DeleteUserReq(object):
"""
Attributes:
- sessionId
- username
"""
def __init__(self, sessionId=None, username=None,):
self.sessionId = sessionId
self.username = username
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.sessionId = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.username = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('DeleteUserReq')
if self.sessionId is not None:
oprot.writeFieldBegin('sessionId', TType.I64, 1)
oprot.writeI64(self.sessionId)
oprot.writeFieldEnd()
if self.username is not None:
oprot.writeFieldBegin('username', TType.STRING, 2)
oprot.writeString(self.username.encode('utf-8') if sys.version_info[0] == 2 else self.username)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.sessionId is None:
raise TProtocolException(message='Required field sessionId is unset!')
if self.username is None:
raise TProtocolException(message='Required field username is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetUserReq(object):
"""
Attributes:
- sessionId
- usernames
"""
def __init__(self, sessionId=None, usernames=None,):
self.sessionId = sessionId
self.usernames = usernames
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.sessionId = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.usernames = []
(_etype398, _size395) = iprot.readListBegin()
for _i399 in range(_size395):
_elem400 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.usernames.append(_elem400)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('GetUserReq')
if self.sessionId is not None:
oprot.writeFieldBegin('sessionId', TType.I64, 1)
oprot.writeI64(self.sessionId)
oprot.writeFieldEnd()
if self.usernames is not None:
oprot.writeFieldBegin('usernames', TType.LIST, 2)
oprot.writeListBegin(TType.STRING, len(self.usernames))
for iter401 in self.usernames:
oprot.writeString(iter401.encode('utf-8') if sys.version_info[0] == 2 else iter401)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.sessionId is None:
raise TProtocolException(message='Required field sessionId is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetUserResp(object):
"""
Attributes:
- status
- usernames
- userTypes
- auths
"""
def __init__(self, status=None, usernames=None, userTypes=None, auths=None,):
self.status = status
self.usernames = usernames
self.userTypes = userTypes
self.auths = auths
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = Status()
self.status.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.usernames = []
(_etype405, _size402) = iprot.readListBegin()
for _i406 in range(_size402):
_elem407 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.usernames.append(_elem407)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.userTypes = []
(_etype411, _size408) = iprot.readListBegin()
for _i412 in range(_size408):
_elem413 = iprot.readI32()
self.userTypes.append(_elem413)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.LIST:
self.auths = []
(_etype417, _size414) = iprot.readListBegin()
for _i418 in range(_size414):
_elem419 = set()
(_etype423, _size420) = iprot.readSetBegin()
for _i424 in range(_size420):
_elem425 = iprot.readI32()
_elem419.add(_elem425)
iprot.readSetEnd()
self.auths.append(_elem419)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('GetUserResp')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
if self.usernames is not None:
oprot.writeFieldBegin('usernames', TType.LIST, 2)
oprot.writeListBegin(TType.STRING, len(self.usernames))
for iter426 in self.usernames:
oprot.writeString(iter426.encode('utf-8') if sys.version_info[0] == 2 else iter426)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.userTypes is not None:
oprot.writeFieldBegin('userTypes', TType.LIST, 3)
oprot.writeListBegin(TType.I32, len(self.userTypes))
for iter427 in self.userTypes:
oprot.writeI32(iter427)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.auths is not None:
oprot.writeFieldBegin('auths', TType.LIST, 4)
oprot.writeListBegin(TType.SET, len(self.auths))
for iter428 in self.auths:
oprot.writeSetBegin(TType.I32, len(iter428))
for iter429 in iter428:
oprot.writeI32(iter429)
oprot.writeSetEnd()
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocolException(message='Required field status is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetClusterInfoReq(object):
"""
Attributes:
- sessionId
"""
def __init__(self, sessionId=None,):
self.sessionId = sessionId
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.sessionId = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('GetClusterInfoReq')
if self.sessionId is not None:
oprot.writeFieldBegin('sessionId', TType.I64, 1)
oprot.writeI64(self.sessionId)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.sessionId is None:
raise TProtocolException(message='Required field sessionId is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class IginxInfo(object):
"""
Attributes:
- id
- ip
- port
"""
def __init__(self, id=None, ip=None, port=None,):
self.id = id
self.ip = ip
self.port = port
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.id = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.ip = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.port = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('IginxInfo')
if self.id is not None:
oprot.writeFieldBegin('id', TType.I64, 1)
oprot.writeI64(self.id)
oprot.writeFieldEnd()
if self.ip is not None:
oprot.writeFieldBegin('ip', TType.STRING, 2)
oprot.writeString(self.ip.encode('utf-8') if sys.version_info[0] == 2 else self.ip)
oprot.writeFieldEnd()
if self.port is not None:
oprot.writeFieldBegin('port', TType.I32, 3)
oprot.writeI32(self.port)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.id is None:
raise TProtocolException(message='Required field id is unset!')
if self.ip is None:
raise TProtocolException(message='Required field ip is unset!')
if self.port is None:
raise TProtocolException(message='Required field port is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class StorageEngineInfo(object):
"""
Attributes:
- id
- ip
- port
- type
"""
def __init__(self, id=None, ip=None, port=None, type=None,):
self.id = id
self.ip = ip
self.port = port
self.type = type
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.id = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.ip = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.port = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.type = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('StorageEngineInfo')
if self.id is not None:
oprot.writeFieldBegin('id', TType.I64, 1)
oprot.writeI64(self.id)
oprot.writeFieldEnd()
if self.ip is not None:
oprot.writeFieldBegin('ip', TType.STRING, 2)
oprot.writeString(self.ip.encode('utf-8') if sys.version_info[0] == 2 else self.ip)
oprot.writeFieldEnd()
if self.port is not None:
oprot.writeFieldBegin('port', TType.I32, 3)
oprot.writeI32(self.port)
oprot.writeFieldEnd()
if self.type is not None:
oprot.writeFieldBegin('type', TType.STRING, 4)
oprot.writeString(self.type.encode('utf-8') if sys.version_info[0] == 2 else self.type)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.id is None:
raise TProtocolException(message='Required field id is unset!')
if self.ip is None:
raise TProtocolException(message='Required field ip is unset!')
if self.port is None:
raise TProtocolException(message='Required field port is unset!')
if self.type is None:
raise TProtocolException(message='Required field type is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class MetaStorageInfo(object):
"""
Attributes:
- ip
- port
- type
"""
def __init__(self, ip=None, port=None, type=None,):
self.ip = ip
self.port = port
self.type = type
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.ip = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.port = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.type = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('MetaStorageInfo')
if self.ip is not None:
oprot.writeFieldBegin('ip', TType.STRING, 1)
oprot.writeString(self.ip.encode('utf-8') if sys.version_info[0] == 2 else self.ip)
oprot.writeFieldEnd()
if self.port is not None:
oprot.writeFieldBegin('port', TType.I32, 2)
oprot.writeI32(self.port)
oprot.writeFieldEnd()
if self.type is not None:
oprot.writeFieldBegin('type', TType.STRING, 3)
oprot.writeString(self.type.encode('utf-8') if sys.version_info[0] == 2 else self.type)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.ip is None:
raise TProtocolException(message='Required field ip is unset!')
if self.port is None:
raise TProtocolException(message='Required field port is unset!')
if self.type is None:
raise TProtocolException(message='Required field type is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class LocalMetaStorageInfo(object):
"""
Attributes:
- path
"""
def __init__(self, path=None,):
self.path = path
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.path = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('LocalMetaStorageInfo')
if self.path is not None:
oprot.writeFieldBegin('path', TType.STRING, 1)
oprot.writeString(self.path.encode('utf-8') if sys.version_info[0] == 2 else self.path)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.path is None:
raise TProtocolException(message='Required field path is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetClusterInfoResp(object):
"""
Attributes:
- status
- iginxInfos
- storageEngineInfos
- metaStorageInfos
- localMetaStorageInfo
"""
def __init__(self, status=None, iginxInfos=None, storageEngineInfos=None, metaStorageInfos=None, localMetaStorageInfo=None,):
self.status = status
self.iginxInfos = iginxInfos
self.storageEngineInfos = storageEngineInfos
self.metaStorageInfos = metaStorageInfos
self.localMetaStorageInfo = localMetaStorageInfo
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = Status()
self.status.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.iginxInfos = []
(_etype433, _size430) = iprot.readListBegin()
for _i434 in range(_size430):
_elem435 = IginxInfo()
_elem435.read(iprot)
self.iginxInfos.append(_elem435)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.storageEngineInfos = []
(_etype439, _size436) = iprot.readListBegin()
for _i440 in range(_size436):
_elem441 = StorageEngineInfo()
_elem441.read(iprot)
self.storageEngineInfos.append(_elem441)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.LIST:
self.metaStorageInfos = []
(_etype445, _size442) = iprot.readListBegin()
for _i446 in range(_size442):
_elem447 = MetaStorageInfo()
_elem447.read(iprot)
self.metaStorageInfos.append(_elem447)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRUCT:
self.localMetaStorageInfo = LocalMetaStorageInfo()
self.localMetaStorageInfo.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('GetClusterInfoResp')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
if self.iginxInfos is not None:
oprot.writeFieldBegin('iginxInfos', TType.LIST, 2)
oprot.writeListBegin(TType.STRUCT, len(self.iginxInfos))
for iter448 in self.iginxInfos:
iter448.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.storageEngineInfos is not None:
oprot.writeFieldBegin('storageEngineInfos', TType.LIST, 3)
oprot.writeListBegin(TType.STRUCT, len(self.storageEngineInfos))
for iter449 in self.storageEngineInfos:
iter449.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.metaStorageInfos is not None:
oprot.writeFieldBegin('metaStorageInfos', TType.LIST, 4)
oprot.writeListBegin(TType.STRUCT, len(self.metaStorageInfos))
for iter450 in self.metaStorageInfos:
iter450.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.localMetaStorageInfo is not None:
oprot.writeFieldBegin('localMetaStorageInfo', TType.STRUCT, 5)
self.localMetaStorageInfo.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocolException(message='Required field status is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(Status)
Status.thrift_spec = (
None, # 0
(1, TType.I32, 'code', None, None, ), # 1
(2, TType.STRING, 'message', 'UTF8', None, ), # 2
(3, TType.LIST, 'subStatus', (TType.STRUCT, [Status, None], False), None, ), # 3
)
all_structs.append(OpenSessionReq)
OpenSessionReq.thrift_spec = (
None, # 0
(1, TType.STRING, 'username', 'UTF8', None, ), # 1
(2, TType.STRING, 'password', 'UTF8', None, ), # 2
)
all_structs.append(OpenSessionResp)
OpenSessionResp.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', [Status, None], None, ), # 1
(2, TType.I64, 'sessionId', None, None, ), # 2
)
all_structs.append(CloseSessionReq)
CloseSessionReq.thrift_spec = (
None, # 0
(1, TType.I64, 'sessionId', None, None, ), # 1
)
all_structs.append(DeleteColumnsReq)
DeleteColumnsReq.thrift_spec = (
None, # 0
(1, TType.I64, 'sessionId', None, None, ), # 1
(2, TType.LIST, 'paths', (TType.STRING, 'UTF8', False), None, ), # 2
)
all_structs.append(InsertColumnRecordsReq)
InsertColumnRecordsReq.thrift_spec = (
None, # 0
(1, TType.I64, 'sessionId', None, None, ), # 1
(2, TType.LIST, 'paths', (TType.STRING, 'UTF8', False), None, ), # 2
(3, TType.STRING, 'timestamps', 'BINARY', None, ), # 3
(4, TType.LIST, 'valuesList', (TType.STRING, 'BINARY', False), None, ), # 4
(5, TType.LIST, 'bitmapList', (TType.STRING, 'BINARY', False), None, ), # 5
(6, TType.LIST, 'dataTypeList', (TType.I32, None, False), None, ), # 6
(7, TType.LIST, 'attributesList', (TType.MAP, (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), False), None, ), # 7
)
all_structs.append(InsertNonAlignedColumnRecordsReq)
InsertNonAlignedColumnRecordsReq.thrift_spec = (
None, # 0
(1, TType.I64, 'sessionId', None, None, ), # 1
(2, TType.LIST, 'paths', (TType.STRING, 'UTF8', False), None, ), # 2
(3, TType.STRING, 'timestamps', 'BINARY', None, ), # 3
(4, TType.LIST, 'valuesList', (TType.STRING, 'BINARY', False), None, ), # 4
(5, TType.LIST, 'bitmapList', (TType.STRING, 'BINARY', False), None, ), # 5
(6, TType.LIST, 'dataTypeList', (TType.I32, None, False), None, ), # 6
(7, TType.LIST, 'attributesList', (TType.MAP, (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), False), None, ), # 7
)
all_structs.append(InsertRowRecordsReq)
InsertRowRecordsReq.thrift_spec = (
None, # 0
(1, TType.I64, 'sessionId', None, None, ), # 1
(2, TType.LIST, 'paths', (TType.STRING, 'UTF8', False), None, ), # 2
(3, TType.STRING, 'timestamps', 'BINARY', None, ), # 3
(4, TType.LIST, 'valuesList', (TType.STRING, 'BINARY', False), None, ), # 4
(5, TType.LIST, 'bitmapList', (TType.STRING, 'BINARY', False), None, ), # 5
(6, TType.LIST, 'dataTypeList', (TType.I32, None, False), None, ), # 6
(7, TType.LIST, 'attributesList', (TType.MAP, (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), False), None, ), # 7
)
all_structs.append(InsertNonAlignedRowRecordsReq)
InsertNonAlignedRowRecordsReq.thrift_spec = (
None, # 0
(1, TType.I64, 'sessionId', None, None, ), # 1
(2, TType.LIST, 'paths', (TType.STRING, 'UTF8', False), None, ), # 2
(3, TType.STRING, 'timestamps', 'BINARY', None, ), # 3
(4, TType.LIST, 'valuesList', (TType.STRING, 'BINARY', False), None, ), # 4
(5, TType.LIST, 'bitmapList', (TType.STRING, 'BINARY', False), None, ), # 5
(6, TType.LIST, 'dataTypeList', (TType.I32, None, False), None, ), # 6
(7, TType.LIST, 'attributesList', (TType.MAP, (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), False), None, ), # 7
)
all_structs.append(DeleteDataInColumnsReq)
DeleteDataInColumnsReq.thrift_spec = (
None, # 0
(1, TType.I64, 'sessionId', None, None, ), # 1
(2, TType.LIST, 'paths', (TType.STRING, 'UTF8', False), None, ), # 2
(3, TType.I64, 'startTime', None, None, ), # 3
(4, TType.I64, 'endTime', None, None, ), # 4
)
all_structs.append(QueryDataSet)
QueryDataSet.thrift_spec = (
None, # 0
(1, TType.STRING, 'timestamps', 'BINARY', None, ), # 1
(2, TType.LIST, 'valuesList', (TType.STRING, 'BINARY', False), None, ), # 2
(3, TType.LIST, 'bitmapList', (TType.STRING, 'BINARY', False), None, ), # 3
)
all_structs.append(QueryDataReq)
QueryDataReq.thrift_spec = (
None, # 0
(1, TType.I64, 'sessionId', None, None, ), # 1
(2, TType.LIST, 'paths', (TType.STRING, 'UTF8', False), None, ), # 2
(3, TType.I64, 'startTime', None, None, ), # 3
(4, TType.I64, 'endTime', None, None, ), # 4
)
all_structs.append(QueryDataResp)
QueryDataResp.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', [Status, None], None, ), # 1
(2, TType.LIST, 'paths', (TType.STRING, 'UTF8', False), None, ), # 2
(3, TType.LIST, 'dataTypeList', (TType.I32, None, False), None, ), # 3
(4, TType.STRUCT, 'queryDataSet', [QueryDataSet, None], None, ), # 4
)
all_structs.append(AddStorageEnginesReq)
AddStorageEnginesReq.thrift_spec = (
None, # 0
(1, TType.I64, 'sessionId', None, None, ), # 1
(2, TType.LIST, 'storageEngines', (TType.STRUCT, [StorageEngine, None], False), None, ), # 2
)
all_structs.append(StorageEngine)
StorageEngine.thrift_spec = (
None, # 0
(1, TType.STRING, 'ip', 'UTF8', None, ), # 1
(2, TType.I32, 'port', None, None, ), # 2
(3, TType.STRING, 'type', 'UTF8', None, ), # 3
(4, TType.MAP, 'extraParams', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 4
)
all_structs.append(AggregateQueryReq)
AggregateQueryReq.thrift_spec = (
None, # 0
(1, TType.I64, 'sessionId', None, None, ), # 1
(2, TType.LIST, 'paths', (TType.STRING, 'UTF8', False), None, ), # 2
(3, TType.I64, 'startTime', None, None, ), # 3
(4, TType.I64, 'endTime', None, None, ), # 4
(5, TType.I32, 'aggregateType', None, None, ), # 5
)
all_structs.append(AggregateQueryResp)
AggregateQueryResp.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', [Status, None], None, ), # 1
(2, TType.LIST, 'paths', (TType.STRING, 'UTF8', False), None, ), # 2
(3, TType.LIST, 'dataTypeList', (TType.I32, None, False), None, ), # 3
(4, TType.STRING, 'timestamps', 'BINARY', None, ), # 4
(5, TType.STRING, 'valuesList', 'BINARY', None, ), # 5
)
all_structs.append(ValueFilterQueryReq)
ValueFilterQueryReq.thrift_spec = (
None, # 0
(1, TType.I64, 'sessionId', None, None, ), # 1
(2, TType.LIST, 'paths', (TType.STRING, 'UTF8', False), None, ), # 2
(3, TType.I64, 'startTime', None, None, ), # 3
(4, TType.I64, 'endTime', None, None, ), # 4
(5, TType.STRING, 'booleanExpression', 'UTF8', None, ), # 5
)
all_structs.append(ValueFilterQueryResp)
ValueFilterQueryResp.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', [Status, None], None, ), # 1
(2, TType.LIST, 'paths', (TType.STRING, 'UTF8', False), None, ), # 2
(3, TType.LIST, 'dataTypeList', (TType.I32, None, False), None, ), # 3
(4, TType.STRUCT, 'queryDataSet', [QueryDataSet, None], None, ), # 4
)
all_structs.append(LastQueryReq)
LastQueryReq.thrift_spec = (
None, # 0
(1, TType.I64, 'sessionId', None, None, ), # 1
(2, TType.LIST, 'paths', (TType.STRING, 'UTF8', False), None, ), # 2
(3, TType.I64, 'startTime', None, None, ), # 3
)
all_structs.append(LastQueryResp)
LastQueryResp.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', [Status, None], None, ), # 1
(2, TType.LIST, 'paths', (TType.STRING, 'UTF8', False), None, ), # 2
(3, TType.LIST, 'dataTypeList', (TType.I32, None, False), None, ), # 3
(4, TType.STRING, 'timestamps', 'BINARY', None, ), # 4
(5, TType.STRING, 'valuesList', 'BINARY', None, ), # 5
)
all_structs.append(DownsampleQueryReq)
DownsampleQueryReq.thrift_spec = (
None, # 0
(1, TType.I64, 'sessionId', None, None, ), # 1
(2, TType.LIST, 'paths', (TType.STRING, 'UTF8', False), None, ), # 2
(3, TType.I64, 'startTime', None, None, ), # 3
(4, TType.I64, 'endTime', None, None, ), # 4
(5, TType.I32, 'aggregateType', None, None, ), # 5
(6, TType.I64, 'precision', None, None, ), # 6
)
all_structs.append(DownsampleQueryResp)
DownsampleQueryResp.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', [Status, None], None, ), # 1
(2, TType.LIST, 'paths', (TType.STRING, 'UTF8', False), None, ), # 2
(3, TType.LIST, 'dataTypeList', (TType.I32, None, False), None, ), # 3
(4, TType.STRUCT, 'queryDataSet', [QueryDataSet, None], None, ), # 4
)
all_structs.append(ShowColumnsReq)
ShowColumnsReq.thrift_spec = (
None, # 0
(1, TType.I64, 'sessionId', None, None, ), # 1
)
all_structs.append(ShowColumnsResp)
ShowColumnsResp.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', [Status, None], None, ), # 1
(2, TType.LIST, 'paths', (TType.STRING, 'UTF8', False), None, ), # 2
(3, TType.LIST, 'dataTypeList', (TType.I32, None, False), None, ), # 3
)
all_structs.append(GetReplicaNumReq)
GetReplicaNumReq.thrift_spec = (
None, # 0
(1, TType.I64, 'sessionId', None, None, ), # 1
)
all_structs.append(GetReplicaNumResp)
GetReplicaNumResp.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', [Status, None], None, ), # 1
(2, TType.I32, 'replicaNum', None, None, ), # 2
)
all_structs.append(ExecuteSqlReq)
ExecuteSqlReq.thrift_spec = (
None, # 0
(1, TType.I64, 'sessionId', None, None, ), # 1
(2, TType.STRING, 'statement', 'UTF8', None, ), # 2
)
all_structs.append(ExecuteSqlResp)
ExecuteSqlResp.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', [Status, None], None, ), # 1
(2, TType.I32, 'type', None, None, ), # 2
(3, TType.LIST, 'paths', (TType.STRING, 'UTF8', False), None, ), # 3
(4, TType.LIST, 'dataTypeList', (TType.I32, None, False), None, ), # 4
(5, TType.STRUCT, 'queryDataSet', [QueryDataSet, None], None, ), # 5
(6, TType.STRING, 'timestamps', 'BINARY', None, ), # 6
(7, TType.STRING, 'valuesList', 'BINARY', None, ), # 7
(8, TType.I32, 'replicaNum', None, None, ), # 8
(9, TType.I64, 'pointsNum', None, None, ), # 9
(10, TType.I32, 'aggregateType', None, None, ), # 10
(11, TType.STRING, 'parseErrorMsg', 'UTF8', None, ), # 11
(12, TType.I32, 'limit', None, None, ), # 12
(13, TType.I32, 'offset', None, None, ), # 13
(14, TType.STRING, 'orderByPath', 'UTF8', None, ), # 14
(15, TType.BOOL, 'ascending', None, None, ), # 15
(16, TType.LIST, 'iginxInfos', (TType.STRUCT, [IginxInfo, None], False), None, ), # 16
(17, TType.LIST, 'storageEngineInfos', (TType.STRUCT, [StorageEngineInfo, None], False), None, ), # 17
(18, TType.LIST, 'metaStorageInfos', (TType.STRUCT, [MetaStorageInfo, None], False), None, ), # 18
(19, TType.STRUCT, 'localMetaStorageInfo', [LocalMetaStorageInfo, None], None, ), # 19
)
all_structs.append(UpdateUserReq)
UpdateUserReq.thrift_spec = (
None, # 0
(1, TType.I64, 'sessionId', None, None, ), # 1
(2, TType.STRING, 'username', 'UTF8', None, ), # 2
(3, TType.STRING, 'password', 'UTF8', None, ), # 3
(4, TType.SET, 'auths', (TType.I32, None, False), None, ), # 4
)
all_structs.append(AddUserReq)
AddUserReq.thrift_spec = (
None, # 0
(1, TType.I64, 'sessionId', None, None, ), # 1
(2, TType.STRING, 'username', 'UTF8', None, ), # 2
(3, TType.STRING, 'password', 'UTF8', None, ), # 3
(4, TType.SET, 'auths', (TType.I32, None, False), None, ), # 4
)
all_structs.append(DeleteUserReq)
DeleteUserReq.thrift_spec = (
None, # 0
(1, TType.I64, 'sessionId', None, None, ), # 1
(2, TType.STRING, 'username', 'UTF8', None, ), # 2
)
all_structs.append(GetUserReq)
GetUserReq.thrift_spec = (
None, # 0
(1, TType.I64, 'sessionId', None, None, ), # 1
(2, TType.LIST, 'usernames', (TType.STRING, 'UTF8', False), None, ), # 2
)
all_structs.append(GetUserResp)
GetUserResp.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', [Status, None], None, ), # 1
(2, TType.LIST, 'usernames', (TType.STRING, 'UTF8', False), None, ), # 2
(3, TType.LIST, 'userTypes', (TType.I32, None, False), None, ), # 3
(4, TType.LIST, 'auths', (TType.SET, (TType.I32, None, False), False), None, ), # 4
)
all_structs.append(GetClusterInfoReq)
GetClusterInfoReq.thrift_spec = (
None, # 0
(1, TType.I64, 'sessionId', None, None, ), # 1
)
all_structs.append(IginxInfo)
IginxInfo.thrift_spec = (
None, # 0
(1, TType.I64, 'id', None, None, ), # 1
(2, TType.STRING, 'ip', 'UTF8', None, ), # 2
(3, TType.I32, 'port', None, None, ), # 3
)
all_structs.append(StorageEngineInfo)
StorageEngineInfo.thrift_spec = (
None, # 0
(1, TType.I64, 'id', None, None, ), # 1
(2, TType.STRING, 'ip', 'UTF8', None, ), # 2
(3, TType.I32, 'port', None, None, ), # 3
(4, TType.STRING, 'type', 'UTF8', None, ), # 4
)
all_structs.append(MetaStorageInfo)
MetaStorageInfo.thrift_spec = (
None, # 0
(1, TType.STRING, 'ip', 'UTF8', None, ), # 1
(2, TType.I32, 'port', None, None, ), # 2
(3, TType.STRING, 'type', 'UTF8', None, ), # 3
)
all_structs.append(LocalMetaStorageInfo)
LocalMetaStorageInfo.thrift_spec = (
None, # 0
(1, TType.STRING, 'path', 'UTF8', None, ), # 1
)
all_structs.append(GetClusterInfoResp)
GetClusterInfoResp.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', [Status, None], None, ), # 1
(2, TType.LIST, 'iginxInfos', (TType.STRUCT, [IginxInfo, None], False), None, ), # 2
(3, TType.LIST, 'storageEngineInfos', (TType.STRUCT, [StorageEngineInfo, None], False), None, ), # 3
(4, TType.LIST, 'metaStorageInfos', (TType.STRUCT, [MetaStorageInfo, None], False), None, ), # 4
(5, TType.STRUCT, 'localMetaStorageInfo', [LocalMetaStorageInfo, None], None, ), # 5
)
fix_spec(all_structs)
del all_structs
| 38.196919
| 356
| 0.555962
|
5f4b79d73428df22d6f1f27b9989096cbe48ed3c
| 9,408
|
py
|
Python
|
gccutils/asyncscrapers/adviseescraper.py
|
redParrot17/mygcc-utils
|
6ef163d71c2697c3e42272a730314796082fc6fc
|
[
"MIT"
] | 1
|
2021-02-12T19:46:56.000Z
|
2021-02-12T19:46:56.000Z
|
gccutils/asyncscrapers/adviseescraper.py
|
redParrot17/mygcc-utils
|
6ef163d71c2697c3e42272a730314796082fc6fc
|
[
"MIT"
] | null | null | null |
gccutils/asyncscrapers/adviseescraper.py
|
redParrot17/mygcc-utils
|
6ef163d71c2697c3e42272a730314796082fc6fc
|
[
"MIT"
] | null | null | null |
from gccutils.asyncscrapers.scrapersession import AsyncScraperManager, AsyncScraperSession
import gccutils.errors as errors
import traceback
import uuid
import time
__all__ = ('AsyncAdviseeScraper',)
class AdviseeOverviewParser:
"""A helper class for parsing data from an advisee overview page."""
LEFT_TABLE_ID = 'pg0_V_tblSummaryLeft'
RIGHT_TABLE_ID = 'pg0_V_tblSummaryRight'
def __init__(self, html_soup):
"""Constructor
:param html_soup: the BeautifulSoup instance for the page
"""
self.html = html_soup
def parse(self):
"""Parses the HTML for important data relevant to the student.
:return: dictionary containing the data from the page
"""
values = {}
table = self.get_table(self.LEFT_TABLE_ID)
for row in table.find_all('tr'):
try:
name, value = self.fetch_value(row)
values[name] = value
except errors.ScraperError:
traceback.print_exc()
table = self.get_table(self.RIGHT_TABLE_ID)
for row in table.find_all('tr'):
try:
name, value = self.fetch_value(row)
values[name] = value
except errors.ScraperError:
traceback.print_exc()
return values
def get_table(self, table_id):
"""Finds and returns a table with the specified element id.
:param table_id: the unique identifier of the table to find
:return: the table element if found
:raises MissingElementError: if the table could not be found
"""
table = self.html.find('table', id=table_id)
if table is None:
raise errors.MissingElementError(f'Table {table_id} not found.')
return table
@staticmethod
def fetch_value(entry):
"""Returns the text value of an element's inner <td> child.
:param entry: a table element with an inner <td> child
:return: the value associated with the element's inner <td> child
:raises MissingElementError: if the element does not contain a <td> tag
"""
name_element = entry.find('th')
value_element = entry.find('td')
if name_element is None: # raise an exception if no name element was found
raise errors.MissingElementError('Overview table row contains no name element <th></th>')
if value_element is None: # raise an exception if no value element was found
raise errors.MissingElementError('Overview table row contains no value element <td></td>')
name = name_element.get_text(strip=True)\
.replace(u'\xa0', '').lower()\
.replace(' ', '_').replace(':', '')
value = value_element.get_text(separator=' ', strip=True)\
.replace(u'\xa0', '')
return name, value
class AsyncAdviseeScraperSession(AsyncScraperSession):
"""Web-scraping thread for obtaining adviser's advisee information."""
STUDENT_TO_ROSTER_EVENT_TARGET = 'sb00bc534cd-3ee3-4fc5-be95-b3850319f0b8'
ADVISING_ROUTE = '/ICS/Advising'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def run(self):
"""
The primary method of this thread that controls the web-scraping of courses on https://my.gcc.edu/.
Do not manually call this method. Use ScraperSession#start() instead.
"""
start_time = time.time()
callback = self.callback
thread_num = self.thread_num
num_threads = self.num_threads
self.aborted = False
print(f'advisee scraper thread [{thread_num}|{num_threads}] starting')
# navigate to the first roster page
self.navigate_to_roster()
new_page = True
while new_page and not self.aborted:
try:
student_rows = self.get_all_student_rows()
for student_row in student_rows:
if not self.aborted:
student = self.build_student_dict(*student_row)
if student is not None:
callback(student)
except errors.ScraperError:
traceback.print_exc()
# navigate to the next page
if not self.aborted and not self.try_nav_to_next_page():
new_page = False
runtime = int(time.time() - start_time)
print(f'advisee scraper thread [{thread_num}|{num_threads}] stopping after {runtime} seconds')
def navigate_to_roster(self):
dc = self.dc
# navigate to advising tab
dc.http_get(dc.to_url(self.ADVISING_ROUTE))
dc.ensure_screen(dc.to_url(self.ADVISING_ROUTE))
# create the payload to be sent
search_btn = dc.html.find('input', id='pg0_V_btnSearch')
self.perform_navigation(search_btn)
def get_all_student_rows(self):
thread_num = self.thread_num
num_threads = self.num_threads
table = self.dc.html.find('tbody', class_='gbody')
if table is None:
unique_filename = str(uuid.uuid4()) + '.html'
with open(unique_filename, 'w+') as file:
file.write(str(self.dc.html))
raise errors.MissingElementError(f'Roster table is missing. (state in {unique_filename})')
rows = table.find_all('tr')
row_index = 0
results = []
for row in rows:
if row_index % num_threads == thread_num:
try:
email, name, user_id, nav_element = self.parse_table_row(row, row_index)
results.append((email, name, user_id, nav_element))
except errors.ScraperError:
traceback.print_exc()
row_index += 1
return results
@staticmethod
def parse_table_row(row, index):
expected_column_count = 6
columns = row.find_all('td')
if len(columns) != expected_column_count:
raise errors.UnexpectedElementPropertyError(
f'Expected roster row[{index}] to have {expected_column_count} '
f'columns but found {len(columns)}.')
_, email_col, name_col, id_col, _, _ = columns
# get email
email = email_col.find('input', type='image')
if email is None:
raise errors.MissingElementError(f'Roster row[{index}] has no email column.')
email = email.get('title')
# get name
name = name_col.find('a')
if name is None:
raise errors.MissingElementError(f'Roster row[{index}] has no name column.')
name = name.get_text()
# get student id
user_id = id_col.get_text()
if not user_id:
raise errors.UnexpectedElementPropertyError(
f'Roster row[{index}] has no value associated with the user id column.')
# get href
nav_element = name_col.find('a')
if nav_element is None:
raise errors.MissingElementError(f'Roster row[{index}] has no navigation element.')
return email, name, user_id, nav_element
def build_student_dict(self, email, name, user_id, nav_element):
# if any navigation fails here, we cannot recover without restarting
self.perform_navigation(nav_element) # navigate to student overview
try:
parser = AdviseeOverviewParser(self.dc.html)
overview = parser.parse()
overview['email'] = email
overview['name'] = name
overview['user_id'] = user_id
return overview
except errors.ScraperError:
traceback.print_exc()
finally:
# The __EVENTTARGET postback identifier is hardcoded because MyGCC is a jerk.
# We cannot reliably obtain the navigation element to get back to the roster
# since MyGCC will randomly not include it within the breadcrumb trail.
self.perform_navigation(None, self.STUDENT_TO_ROSTER_EVENT_TARGET)
def try_nav_to_next_page(self):
next_page = self.get_next_page_element()
if next_page is None:
return False
self.perform_navigation(next_page)
def get_next_page_element(self):
# find the navigation container
navigator = self.dc.html.find('div', class_='letterNavigator')
# ensure the list exists
if navigator is not None:
# find all navigation elements within the container
nav_links = navigator.find_all(recursive=False)
# if the last one is "next page" then return it
if nav_links and nav_links[-1].get_text() == 'Next page -->':
return nav_links[-1]
def perform_navigation(self, nav_element, event_target=None):
dc = self.dc
action, payload = dc.prepare_payload(nav_element=nav_element)
if isinstance(event_target, str):
payload['__EVENTTARGET'] = event_target
dc.http_post(dc.to_url(action), data=payload)
class AsyncAdviseeScraper(AsyncScraperManager):
def __init__(self, username, password, callback):
super().__init__(username, password, AsyncAdviseeScraperSession, callback)
if self._cpu_count != 1:
print('WARNING: Advisee scraping is currently less stable when run on multiple threads.')
| 34.973978
| 107
| 0.620217
|
04e3bd2e18176e822c743d3c7d206766b9a32c64
| 2,184
|
py
|
Python
|
src/cfnlint/rules/functions/RefInCondition.py
|
mvanholsteijn/cfn-python-lint
|
cddd272db852c0ceec39a873430a84559a1ac48d
|
[
"MIT-0"
] | null | null | null |
src/cfnlint/rules/functions/RefInCondition.py
|
mvanholsteijn/cfn-python-lint
|
cddd272db852c0ceec39a873430a84559a1ac48d
|
[
"MIT-0"
] | null | null | null |
src/cfnlint/rules/functions/RefInCondition.py
|
mvanholsteijn/cfn-python-lint
|
cddd272db852c0ceec39a873430a84559a1ac48d
|
[
"MIT-0"
] | null | null | null |
"""
Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy of this
software and associated documentation files (the "Software"), to deal in the Software
without restriction, including without limitation the rights to use, copy, modify,
merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import six
from cfnlint import CloudFormationLintRule
from cfnlint import RuleMatch
class RefInCondition(CloudFormationLintRule):
"""Check if Ref value is a string"""
id = 'E1026'
shortdesc = 'Cannot reference resources in the Conditions block of the template'
description = 'Check that any Refs in the Conditions block uses no resources'
source_url = 'https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/intrinsic-function-reference-conditions.html#w2ab2c21c28c21c45'
tags = ['functions', 'ref']
def match(self, cfn):
"""Check CloudFormation Ref"""
matches = list()
ref_objs = cfn.search_deep_keys('Ref')
resource_names = cfn.get_resource_names()
for ref_obj in ref_objs:
if ref_obj[0] == 'Conditions':
value = ref_obj[-1]
if isinstance(value, (six.string_types, six.text_type, int)):
if value in resource_names:
message = 'Cannot reference resource {0} in the Conditions block of the template at {1}'
matches.append(RuleMatch(ref_obj[:-1], message.format(value, '/'.join(map(str, ref_obj[:-1])))))
return matches
| 46.468085
| 144
| 0.704212
|
6f0087602416439330a3833b8747439fc692b1f0
| 7,049
|
py
|
Python
|
packages/python/plotly/plotly/graph_objs/parcoords/line/colorbar/_title.py
|
labaran1/plotly.py
|
7ec751e8fed4a570c11ea4bea2231806389d62eb
|
[
"MIT"
] | null | null | null |
packages/python/plotly/plotly/graph_objs/parcoords/line/colorbar/_title.py
|
labaran1/plotly.py
|
7ec751e8fed4a570c11ea4bea2231806389d62eb
|
[
"MIT"
] | null | null | null |
packages/python/plotly/plotly/graph_objs/parcoords/line/colorbar/_title.py
|
labaran1/plotly.py
|
7ec751e8fed4a570c11ea4bea2231806389d62eb
|
[
"MIT"
] | null | null | null |
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Title(_BaseTraceHierarchyType):
# class properties
# --------------------
_parent_path_str = "parcoords.line.colorbar"
_path_str = "parcoords.line.colorbar.title"
_valid_props = {"font", "side", "text"}
# font
# ----
@property
def font(self):
"""
Sets this color bar's title font. Note that the title's font
used to be set by the now deprecated `titlefont` attribute.
The 'font' property is an instance of Font
that may be specified as:
- An instance of :class:`plotly.graph_objs.parcoords.line.colorbar.title.Font`
- A dict of string/value properties that will be passed
to the Font constructor
Supported dict properties:
color
family
HTML font family - the typeface that will be
applied by the web browser. The web browser
will only be able to apply a font if it is
available on the system which it operates.
Provide multiple font families, separated by
commas, to indicate the preference in which to
apply fonts if they aren't available on the
system. The Chart Studio Cloud (at
https://chart-studio.plotly.com or on-premise)
generates images on a server, where only a
select number of fonts are installed and
supported. These include "Arial", "Balto",
"Courier New", "Droid Sans",, "Droid Serif",
"Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
size
Returns
-------
plotly.graph_objs.parcoords.line.colorbar.title.Font
"""
return self["font"]
@font.setter
def font(self, val):
self["font"] = val
# side
# ----
@property
def side(self):
"""
Determines the location of color bar's title with respect to
the color bar. Defaults to "top" when `orientation` if "v" and
defaults to "right" when `orientation` if "h". Note that the
title's location used to be set by the now deprecated
`titleside` attribute.
The 'side' property is an enumeration that may be specified as:
- One of the following enumeration values:
['right', 'top', 'bottom']
Returns
-------
Any
"""
return self["side"]
@side.setter
def side(self, val):
self["side"] = val
# text
# ----
@property
def text(self):
"""
Sets the title of the color bar. Note that before the existence
of `title.text`, the title's contents used to be defined as the
`title` attribute itself. This behavior has been deprecated.
The 'text' property is a string and must be specified as:
- A string
- A number that will be converted to a string
Returns
-------
str
"""
return self["text"]
@text.setter
def text(self, val):
self["text"] = val
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
font
Sets this color bar's title font. Note that the title's
font used to be set by the now deprecated `titlefont`
attribute.
side
Determines the location of color bar's title with
respect to the color bar. Defaults to "top" when
`orientation` if "v" and defaults to "right" when
`orientation` if "h". Note that the title's location
used to be set by the now deprecated `titleside`
attribute.
text
Sets the title of the color bar. Note that before the
existence of `title.text`, the title's contents used to
be defined as the `title` attribute itself. This
behavior has been deprecated.
"""
def __init__(self, arg=None, font=None, side=None, text=None, **kwargs):
"""
Construct a new Title object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of :class:`plotly.graph_objs.parcoords.line
.colorbar.Title`
font
Sets this color bar's title font. Note that the title's
font used to be set by the now deprecated `titlefont`
attribute.
side
Determines the location of color bar's title with
respect to the color bar. Defaults to "top" when
`orientation` if "v" and defaults to "right" when
`orientation` if "h". Note that the title's location
used to be set by the now deprecated `titleside`
attribute.
text
Sets the title of the color bar. Note that before the
existence of `title.text`, the title's contents used to
be defined as the `title` attribute itself. This
behavior has been deprecated.
Returns
-------
Title
"""
super(Title, self).__init__("title")
if "_parent" in kwargs:
self._parent = kwargs["_parent"]
return
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.parcoords.line.colorbar.Title
constructor must be a dict or
an instance of :class:`plotly.graph_objs.parcoords.line.colorbar.Title`"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
self._validate = kwargs.pop("_validate", True)
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("font", None)
_v = font if font is not None else _v
if _v is not None:
self["font"] = _v
_v = arg.pop("side", None)
_v = side if side is not None else _v
if _v is not None:
self["side"] = _v
_v = arg.pop("text", None)
_v = text if text is not None else _v
if _v is not None:
self["text"] = _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
| 33.407583
| 88
| 0.544474
|
2bd544fc77148442e0a95691e3d7c8403f6f1082
| 7,860
|
py
|
Python
|
course2/run_log.py
|
uofon-shaowen/SummerCourse2021
|
f3b4d3ef947190b42cf675ce4b17d6fbcf1f9da0
|
[
"MIT"
] | 28
|
2021-08-12T09:36:54.000Z
|
2022-02-08T09:19:40.000Z
|
course2/run_log.py
|
uofon-shaowen/SummerCourse2021
|
f3b4d3ef947190b42cf675ce4b17d6fbcf1f9da0
|
[
"MIT"
] | 2
|
2021-08-22T11:49:53.000Z
|
2021-08-25T02:31:53.000Z
|
course2/run_log.py
|
uofon-shaowen/SummerCourse2021
|
f3b4d3ef947190b42cf675ce4b17d6fbcf1f9da0
|
[
"MIT"
] | 22
|
2021-08-16T12:23:13.000Z
|
2021-12-02T07:58:09.000Z
|
# -*- coding:utf-8 -*-
import os
import time
import json
import numpy as np
from env.chooseenv import make
from utils.get_logger import get_logger
from env.obs_interfaces.observation import obs_type
import argparse
class NpEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, np.integer):
return int(obj)
elif isinstance(obj, np.floating):
return float(obj)
elif isinstance(obj, np.ndarray):
return obj.tolist()
else:
return super(NpEncoder, self).default(obj)
def get_players_and_action_space_list(g):
if sum(g.agent_nums) != g.n_player:
raise Exception("agents number = %d 不正确,与n_player = %d 不匹配" % (sum(g.agent_nums), g.n_player))
n_agent_num = list(g.agent_nums)
for i in range(1, len(n_agent_num)):
n_agent_num[i] += n_agent_num[i - 1]
# 根据agent number 分配 player id
players_id = []
actions_space = []
for policy_i in range(len(g.obs_type)):
if policy_i == 0:
players_id_list = range(n_agent_num[policy_i])
else:
players_id_list = range(n_agent_num[policy_i - 1], n_agent_num[policy_i])
players_id.append(players_id_list)
action_space_list = [g.get_single_action_space(player_id) for player_id in players_id_list]
actions_space.append(action_space_list)
return players_id, actions_space
def get_joint_action_eval(game, multi_part_agent_ids, policy_list, actions_spaces, all_observes):
if len(policy_list) != len(game.agent_nums):
error = "模型个数%d与玩家个数%d维度不正确!" % (len(policy_list), len(game.agent_nums))
raise Exception(error)
# [[[0, 0, 0, 1]], [[0, 1, 0, 0]]]
joint_action = []
for policy_i in range(len(policy_list)):
if game.obs_type[policy_i] not in obs_type:
raise Exception("可选obs类型:%s" % str(obs_type))
agents_id_list = multi_part_agent_ids[policy_i]
# if game.obs_type[policy_i] == "grid":
# obs_list = game.get_grid_many_observation(game.current_state, players_id_list, info_before)
# elif game.obs_type[policy_i] == "vector":
# obs_list = game.get_vector_many_observation(game.current_state, players_id_list, info_before)
# elif game.obs_type[policy_i] == "dict":
# obs_list = game.get_dict_many_observation(game.current_state, players_id_list, info_before)
action_space_list = actions_spaces[policy_i]
function_name = 'm%d' % policy_i
for i in range(len(agents_id_list)):
agent_id = agents_id_list[i]
a_obs = all_observes[agent_id]
each = eval(function_name)(a_obs, action_space_list[i], game.is_act_continuous)
# if len(each) != game.agent_nums[policy_i]:
# error = "模型%d动作空间维度%d不正确!应该是%d" % (int(t_agents_id[policy_i]), len(each), game.agent_nums[policy_i])
# raise Exception(error)
joint_action.append(each)
return joint_action
def set_seed(g, env_name):
if env_name.split("-")[0] in ['magent']:
g.reset()
seed = g.create_seed()
g.set_seed(seed)
def run_game(g, env_name, multi_part_agent_ids, actions_spaces, policy_list, render_mode):
"""
This function is used to generate log for Vue rendering. Saves .json file
"""
log_path = os.getcwd() + '/logs/'
if not os.path.exists(log_path):
os.mkdir(log_path)
logger = get_logger(log_path, g.game_name, json_file=render_mode)
set_seed(g, env_name)
for i in range(len(policy_list)):
if policy_list[i] not in get_valid_agents():
raise Exception("agents {} not valid!".format(policy_list[i]))
file_path = os.path.dirname(os.path.abspath(__file__)) + "/examples/algo/homework" + "/submission.py"
if not os.path.exists(file_path):
raise Exception("file {} not exist!".format(file_path))
import_path = '.'.join(file_path.split('/')[-4:])[:-3]
function_name = 'm%d' % i
import_name = "my_controller"
import_s = "from %s import %s as %s" % (import_path, import_name, function_name)
print(import_s)
exec(import_s, globals())
st = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))
game_info = {"game_name": env_name,
"n_player": g.n_player,
"board_height": g.board_height if hasattr(g, "board_height") else None,
"board_width": g.board_width if hasattr(g, "board_width") else None,
"init_info": g.init_info,
"start_time": st,
"mode": "terminal",
"seed": g.seed if hasattr(g, "seed") else None,
"map_size": g.map_size if hasattr(g, "map_size") else None}
steps = []
info_before = ''
all_observes = g.all_observes
while not g.is_terminal():
step = "step%d" % g.step_cnt
if g.step_cnt % 10 == 0:
print(step)
if hasattr(g, "env_core"):
if hasattr(g.env_core, "render"):
g.env_core.render()
info_dict = {}
info_dict["time"] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))
joint_act = get_joint_action_eval(g, multi_part_agent_ids, policy_list, actions_spaces, all_observes)
all_observes, reward, done, info_before, info_after = g.step(joint_act)
if env_name.split("-")[0] in ["magent"]:
info_dict["joint_action"] = g.decode(joint_act)
if info_before:
info_dict["info_before"] = info_before
info_dict["reward"] = reward
if info_after:
info_dict["info_after"] = info_after
steps.append(info_dict)
game_info["steps"] = steps
game_info["winner"] = g.check_win()
game_info["winner_information"] = g.won
game_info["n_return"] = g.n_return
ed = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))
game_info["end_time"] = ed
logs = json.dumps(game_info, ensure_ascii=False, cls=NpEncoder)
logger.info(logs)
def get_valid_agents():
dir_path = os.path.join(os.path.dirname(__file__), 'examples', 'algo')
return [f for f in os.listdir(dir_path) if f != "__pycache__"]
if __name__ == "__main__":
# "gobang_1v1", "reversi_1v1", "snakes_1v1", "sokoban_2p", "snakes_3v3", "snakes_5p", "sokoban_1p"
# "classic_CartPole-v0", "classic_MountainCar-v0", "classic_MountainCarContinuous-v0",
# "classic_Pendulum-v0", "classic_Acrobot-v1", "football_11v11_kaggle", "MiniWorld-Hallway-v0",
# "MiniWorld-OneRoom-v0", "MiniWorld-OneRoomS6-v0", "MiniWorld-OneRoomS6Fast-v0",
# "MiniWorld-TMaze-v0", "MiniWorld-TMazeLeft-v0", "MiniWorld-TMazeRight-v0", "MiniGrid-DoorKey-16x16-v0",
# "MiniGrid-MultiRoom-N6-v0", "MiniGrid-Dynamic-Obstacles-16x16-v0", "ParticleEnv-simple",
# "ParticleEnv-simple_adversary", "ParticleEnv-simple_crypto", "ParticleEnv-simple_push",
# "ParticleEnv-simple_reference", "ParticleEnv-simple_speaker_listener", "ParticleEnv-simple_spread",
# "ParticleEnv-simple_tag", "ParticleEnv-simple_world_comm", "football_11_vs_11_stochastic",
# "overcookedai-cramped_room", "overcookedai-asymmetric_advantages", "overcookedai-coordination_ring",
# "overcookedai-forced_coordination", "overcookedai-counter_circuit", "magent-battle_v3-12v12",
# "magent-battle_v3-20v20", "gridworld", "cliffwalking"
env_type = "cliffwalking"
game = make(env_type)
# 针对"classic_"环境,使用gym core 进行render;
# 非"classic_"环境,使用replay工具包的replay.html,通过上传.json进行网页回放
render_mode = True
policy_list = ["tabularq"] * len(game.agent_nums)
multi_part_agent_ids, actions_space = get_players_and_action_space_list(game)
run_game(game, env_type, multi_part_agent_ids, actions_space, policy_list, render_mode)
| 41.151832
| 118
| 0.652926
|
781483c6265554a4415ac89ba155875a0d6dbf31
| 8,209
|
py
|
Python
|
scripts/healthy_levels_plot.py
|
always-newbie161/pyprobml
|
eb70c84f9618d68235ef9ba7da147c009b2e4a80
|
[
"MIT"
] | 2
|
2021-02-26T04:36:10.000Z
|
2021-02-26T04:36:24.000Z
|
scripts/healthy_levels_plot.py
|
always-newbie161/pyprobml
|
eb70c84f9618d68235ef9ba7da147c009b2e4a80
|
[
"MIT"
] | 1
|
2021-04-22T15:46:27.000Z
|
2021-04-22T15:46:27.000Z
|
scripts/healthy_levels_plot.py
|
always-newbie161/pyprobml
|
eb70c84f9618d68235ef9ba7da147c009b2e4a80
|
[
"MIT"
] | 1
|
2021-06-21T01:18:07.000Z
|
2021-06-21T01:18:07.000Z
|
# Based on https://github.com/probml/pmtk3/blob/master/demos/healthyLevels.m
# Converted by John Fearns - jdf22@infradead.org
# Josh Tenenbaum's Healthy Levels game
import numpy as np
import matplotlib.pyplot as plt
#from pyprobml_utils import save_fig
# Ensure stochastic reproducibility.
np.random.seed(11)
# Generate the synthetic data - positive examples only. Data is returned
# as a 2 column table. First column is colesterol levels, second is insulin.
def generate_data():
# Healthy levels we are trying to discover
c_low = 0.35; c_high = 0.55;
i_low = 0.45; i_high = 0.65;
# Cheat and use interesting-looking data.
c = [0.351, 0.363, 0.40, 0.54, 0.45, 0.49, 0.48, 0.50, 0.45, 0.41, 0.53, 0.54]
i = [0.452, 0.64, 0.46, 0.55, 0.55, 0.50, 0.49, 0.61, 0.58, 0.46, 0.53, 0.64]
return np.column_stack([c, i])
# Calculates the range of the provided data points: x_min, x_max, y_min, y_max, x_scale, y_scale.
def calc_data_range(data):
x_min = np.min(data[:,0], 0)
x_max = np.max(data[:,0], 0)
y_min = np.min(data[:,1], 0)
y_max = np.max(data[:,1], 0)
x_scale = x_max - x_min
y_scale = y_max - y_min
return x_min, x_max, y_min, y_max, x_scale, y_scale
# Returns a matrix. The rows are the hypotheses,
# the first column is the x-scale value, the second column is
# the y-scale value.
def get_hypotheses():
stepsize = 0.01
x = np.arange(stepsize, 1, stepsize)
xs, ys = np.meshgrid(x, x)
xs = xs.reshape(-1)
ys = ys.reshape(-1)
return np.column_stack([xs, ys])
# Returns an unnormalised prior on the hypotheses that is
# proportional to the inverse product of the hypothesis x- and y-scale
def get_uninformative_prior(hypotheses):
s1 = hypotheses[:,0]
s2 = hypotheses[:,1]
return 1 / (s1 * s2)
def calc_likelihood(hypotheses, data):
n = data.shape[0]
s1 = hypotheses[:,0]
s2 = hypotheses[:,1]
c_min, c_max, i_min, i_max, c_scale, i_scale = calc_data_range(data)
# Truth values indicating whether each hypothesis supports the data (hypothesis scale must
# cover the data range)
indications = np.logical_and(s1 > c_scale, s2 > i_scale)
# Likelihood is proportional to 1/scale in each direction for each data point,
# or to zero if the hypothesis doesn't support the data.
return indications / np.power(s1*s2, n)
def calc_posterior(likelihood, prior):
unnormalised = likelihood * prior
return unnormalised / np.sum(unnormalised)
# Plot maximum likelihood based predictions for the top 3 and 12 data points.
def plot_ml(data):
top_n = [3, 12];
for i in range(len(top_n)):
n = top_n[i]
figure = plt.figure()
plot_data = data[:n]
# Plot the data and the smallest rectangle enclosing it.
c_min, c_max, i_min, i_max, c_scale, i_scale = calc_data_range(plot_data)
plt.gca().add_patch(
plt.Rectangle((c_min, i_min),
c_scale, i_scale, fill=False,
edgecolor='black', linewidth=3)
)
plt.scatter(plot_data[:,0], plot_data[:,1], marker='+', color='red', zorder=10, linewidth=3)
plt.title('MLE predictive, n={}'.format(n), fontsize=12, y=1.03)
plt.axis('square')
plt.ylim(0, 1)
plt.xlim(0, 1)
filename = '../figures/healthyLevelsMLPred{}.pdf'.format(n)
plt.savefig(filename)
plt.show(block=False)
def plot_posterior_samples(data, hypotheses, prior):
top_n = [3, 12]
for i in range(len(top_n)):
plot_data = data[:top_n[i]]
plot_lik = calc_likelihood(hypotheses, plot_data)
plot_post = calc_posterior(plot_lik, prior)
figure = plt.figure()
prior_type = 'uninfPrior'
title = r'samples from $p(h|D_{{1:{}}})$, {}'.format(top_n[i], prior_type)
plot_sampled_hypotheses(hypotheses, plot_post, plot_data, title)
filename = '../figures/healthyLevelsSamples{}{}.pdf'.format(top_n[i], prior_type)
plt.title(title, fontsize=12, y=1.03)
plt.savefig(filename)
plt.show(block=False)
# Returns greyscale colours that reflect the supplied relative probabilities,
# ranging from black for the most probable, to light grey for the least probable.
def colours(probabilities):
max_prob = np.max(probabilities)
intensities = 1 - (0.25 + 0.75 * probabilities / max_prob)
intensities = intensities.reshape(intensities.shape + (1,))
# Repeat the same intensity for all RGB channels.
return np.repeat(intensities, 3, intensities.ndim - 1)
def plot_sampled_hypotheses(hypotheses, posterior, data, title):
# Take 10 samples from the posterior.
N = 10
samples_index = np.random.choice(len(posterior), N, p=posterior)
# Reorder the samples from least to most probable, to help later with
# drawing order so that darker, higher probability samples are drawn on top of
# lighter, lower probability samples.
samples_prob = posterior[samples_index]
samples_index = samples_index[np.argsort(samples_prob)]
del samples_prob
samples_s1 = hypotheses[samples_index, 0]
samples_s2 = hypotheses[samples_index, 1]
plt.scatter(data[:,0], data[:,1], marker='+', color='red', zorder=10, linewidth=3)
c_min, c_max, i_min, i_max, c_scale, i_scale = calc_data_range(data)
samples_left = c_min - (samples_s1 - c_scale) / 2
samples_lower = i_min - (samples_s2 - i_scale) / 2
samples_colour = colours(posterior[samples_index])
for i in range(N):
plt.gca().add_patch(
plt.Rectangle((samples_left[i], samples_lower[i]),
samples_s1[i], samples_s2[i], fill=False,
edgecolor=samples_colour[i], linewidth=3)
)
plt.xlim(0.2, 0.7)
plt.ylim(0.3, 0.8)
def plot_bayes(data):
top_n = [3, 12]
for i in range(len(top_n)):
plot_data = data[:top_n[i]]
plot_contour(plot_data, i == len(top_n)-1)
def plot_contour(data, is_last_plot):
# Prepare plot x-y points in various shapes.
n = data.shape[0]
stepsize = 0.01
x = np.arange(0.00, 1.0, stepsize) + 0.01
y = x
xx, yy = np.meshgrid(x, y)
points = np.column_stack([xx.reshape(-1, 1), yy.reshape(-1, 1)])
# Predictive distribution: Tenenbaum thesis eqn 3.16.
d1, d2, r1, r2 = neighbour(data, points)
denom = (1 + (d1/r1)) * (1 + (d2/r2))
p = np.power(1/denom, n-1)
p = p / np.sum(p)
# Prepare for plotting
pp = p.reshape(xx.shape)
# Plot the predictive contours and data
figure = plt.figure()
plt.gray()
plt.contour(xx, yy, pp)
plt.scatter(data[:,0], data[:,1], marker='+', color='red', zorder=10, linewidth=3)
plt.title('Bayes predictive, n={}, uninfPrior'.format(n), fontsize=12, y=1.03)
plt.axis('square')
plt.ylim(0, 1)
plt.xlim(0, 1)
filename = '../figures/healthyLevelsBayesPred{}UninfPrior.pdf'.format(n)
plt.savefig(filename)
plt.show(block=is_last_plot)
def neighbour(data, points):
# Calculate d1, d2 of the points from the data. d_(j)[i] is 0 whenever points[i,j-1] is
# within the span of the data in the jth dimension, otherwise it's the distance to the
# nearest neighbour along that dimension.
data1_min, data1_max, data2_min, data2_max, data1_scale, data2_scale = calc_data_range(data)
d1 = (points[:,0] < data1_min) * abs(points[:,0] - data1_min) + (points[:,0] > data1_max) * abs(data1_max - points[:,0])
d2 = (points[:,1] < data2_min) * abs(points[:,1] - data2_min) + (points[:,1] > data2_max) * abs(data2_max - points[:,1])
return d1, d2, data1_scale, data2_scale
def main():
data = generate_data()
hypotheses = get_hypotheses()
prior = get_uninformative_prior(hypotheses)
plot_ml(data)
plot_posterior_samples(data, hypotheses, prior)
plot_bayes(data)
main()
| 39.277512
| 125
| 0.625655
|
2a6cd0094f6cc9c57ed8a856fffb57bc1f1f0f4b
| 3,898
|
py
|
Python
|
Compiler/GC/instructions.py
|
asifmallik/SCALE-MAMBA
|
80db831818b55b7675dd549920b5fb096db4321f
|
[
"BSD-2-Clause"
] | 196
|
2018-05-25T11:41:56.000Z
|
2022-03-12T05:49:50.000Z
|
Compiler/GC/instructions.py
|
asifmallik/SCALE-MAMBA
|
80db831818b55b7675dd549920b5fb096db4321f
|
[
"BSD-2-Clause"
] | 49
|
2018-07-17T15:49:41.000Z
|
2021-01-19T11:35:31.000Z
|
Compiler/GC/instructions.py
|
athenarc/SCALE-MAMBA
|
18fa886d820bec7e441448357b8f09e2be0e7c9e
|
[
"BSD-2-Clause"
] | 90
|
2018-05-25T11:41:42.000Z
|
2022-03-23T19:15:10.000Z
|
import Compiler.instructions_base as base
import Compiler.instructions as spdz
import Compiler.tools as tools
import collections
import itertools
class SecretBitsAF(base.RegisterArgFormat):
reg_type = 'sb'
class ClearBitsAF(base.RegisterArgFormat):
reg_type = 'cb'
base.ArgFormats['sb'] = SecretBitsAF
base.ArgFormats['sbw'] = SecretBitsAF
base.ArgFormats['cb'] = ClearBitsAF
base.ArgFormats['cbw'] = ClearBitsAF
opcodes = dict(
XORS = 0x200,
XORM = 0x201,
ANDRS = 0x202,
BITDECS = 0x203,
BITCOMS = 0x204,
CONVSINT = 0x205,
LDMSDI = 0x206,
STMSDI = 0x207,
LDMSD = 0x208,
STMSD = 0x209,
XORCI = 0x210,
BITDECC = 0x211,
CONVCINT = 0x213,
REVEAL = 0x214,
)
class xors(base.Instruction):
code = opcodes['XORS']
arg_format = ['int','sbw','sb','sb']
class xorm(base.Instruction):
code = opcodes['XORM']
arg_format = ['int','sbw','sb','cb']
class xorci(base.Instruction):
code = opcodes['XORCI']
arg_format = ['cbw','cb','int']
class andrs(base.Instruction):
code = opcodes['ANDRS']
arg_format = ['int','sbw','sb','sb']
class mulci(base.Instruction):
code = base.opcodes['MULCI']
arg_format = ['cbw','cb','int']
class bitdecs(base.VarArgsInstruction):
code = opcodes['BITDECS']
arg_format = tools.chain(['sb'], itertools.repeat('sbw'))
class bitcoms(base.VarArgsInstruction):
code = opcodes['BITCOMS']
arg_format = tools.chain(['sbw'], itertools.repeat('sb'))
class bitdecc(base.VarArgsInstruction):
code = opcodes['BITDECC']
arg_format = tools.chain(['cb'], itertools.repeat('cbw'))
class shrci(base.Instruction):
code = base.opcodes['SHRCI']
arg_format = ['cbw','cb','int']
class ldsi(base.Instruction):
code = base.opcodes['LDSI']
arg_format = ['sbw','i']
class ldms(base.DirectMemoryInstruction, base.ReadMemoryInstruction):
code = base.opcodes['LDMS']
arg_format = ['sbw','int']
class stms(base.DirectMemoryWriteInstruction):
code = base.opcodes['STMS']
arg_format = ['sb','int']
# def __init__(self, *args, **kwargs):
# super(type(self), self).__init__(*args, **kwargs)
# import inspect
# self.caller = [frame[1:] for frame in inspect.stack()[1:]]
class ldmc(base.DirectMemoryInstruction, base.ReadMemoryInstruction):
code = base.opcodes['LDMC']
arg_format = ['cbw','int']
class stmc(base.DirectMemoryWriteInstruction):
code = base.opcodes['STMC']
arg_format = ['cb','int']
class ldmsi(base.ReadMemoryInstruction):
code = base.opcodes['LDMSI']
arg_format = ['sbw','r']
class stmsi(base.WriteMemoryInstruction):
code = base.opcodes['STMSI']
arg_format = ['sb','r']
class ldmsdi(base.ReadMemoryInstruction):
code = opcodes['LDMSDI']
arg_format = ['sbw','cb']
class stmsdi(base.WriteMemoryInstruction):
code = opcodes['STMSDI']
arg_format = ['sb','cb']
class ldmsd(base.ReadMemoryInstruction):
code = opcodes['LDMSD']
arg_format = ['sbw','int']
class stmsd(base.WriteMemoryInstruction):
code = opcodes['STMSD']
arg_format = ['sb','int']
class convsint(base.Instruction):
code = opcodes['CONVSINT']
arg_format = ['sbw','r']
class convcint(base.Instruction):
code = opcodes['CONVCINT']
arg_format = ['cbw','r']
class movs(base.Instruction):
code = base.opcodes['MOVS']
arg_format = ['sbw','sb']
class bit(base.Instruction):
code = base.opcodes['BIT']
arg_format = ['sbw']
class reveal(base.Instruction):
code = opcodes['REVEAL']
arg_format = ['int','cbw','sb']
class print_reg(base.IOInstruction):
code = base.opcodes['PRINTREG']
arg_format = ['cb','i']
def __init__(self, reg, comment=''):
super(print_reg, self).__init__(reg, self.str_to_int(comment))
class print_reg_plain(base.IOInstruction):
code = base.opcodes['PRINTREGPLAIN']
arg_format = ['cb']
| 26.517007
| 70
| 0.659569
|
8014fe7ed2db49038d25513b45b94428e256e0af
| 3,783
|
py
|
Python
|
autofit/aggregator/search_output.py
|
caoxiaoyue/PyAutoFit
|
819cd2acc8d4069497a161c3bb6048128e44d828
|
[
"MIT"
] | 39
|
2019-01-24T10:45:23.000Z
|
2022-03-18T09:37:59.000Z
|
autofit/aggregator/search_output.py
|
caoxiaoyue/PyAutoFit
|
819cd2acc8d4069497a161c3bb6048128e44d828
|
[
"MIT"
] | 260
|
2018-11-27T12:56:33.000Z
|
2022-03-31T16:08:59.000Z
|
autofit/aggregator/search_output.py
|
caoxiaoyue/PyAutoFit
|
819cd2acc8d4069497a161c3bb6048128e44d828
|
[
"MIT"
] | 13
|
2018-11-30T16:49:05.000Z
|
2022-01-21T17:39:29.000Z
|
import logging
import os
import pickle
from os import path
import dill
from autofit.non_linear import abstract_search
original_create_file_handle = dill._dill._create_filehandle
def _create_file_handle(*args, **kwargs):
"""
Handle FileNotFoundError when attempting to deserialize pickles
using dill and return None instead.
"""
try:
return original_create_file_handle(
*args, **kwargs
)
except pickle.UnpicklingError as e:
if not isinstance(
e.args[0],
FileNotFoundError
):
raise e
logging.warning(
f"Could not create a handler for {e.args[0].filename} as it does not exist"
)
return None
dill._dill._create_filehandle = _create_file_handle
class SearchOutput:
"""
@DynamicAttrs
"""
def __init__(self, directory: str):
"""
Represents the output of a single search. Comprises a metadata file and other dataset files.
Parameters
----------
directory
The directory of the search
"""
self.directory = directory
self.__search = None
self.__model = None
self.file_path = os.path.join(directory, "metadata")
with open(self.file_path) as f:
self.text = f.read()
pairs = [
line.split("=")
for line
in self.text.split("\n")
if "=" in line
]
self.__dict__.update({pair[0]: pair[1] for pair in pairs})
@property
def pickle_path(self):
return path.join(self.directory, "pickles")
@property
def model_results(self) -> str:
"""
Reads the model.results file
"""
with open(os.path.join(self.directory, "model.results")) as f:
return f.read()
@property
def mask(self):
"""
A pickled mask object
"""
with open(
os.path.join(self.pickle_path, "mask.pickle"), "rb"
) as f:
return dill.load(f)
def __getattr__(self, item):
"""
Attempt to load a pickle by the same name from the search output directory.
dataset.pickle, meta_dataset.pickle etc.
"""
try:
with open(
os.path.join(self.pickle_path, f"{item}.pickle"), "rb"
) as f:
return pickle.load(f)
except FileNotFoundError:
pass
@property
def header(self) -> str:
"""
A header created by joining the search name
"""
phase = self.phase or ""
dataset_name = self.dataset_name or ""
return path.join(phase, dataset_name)
@property
def search(self) -> abstract_search.NonLinearSearch:
"""
The search object that was used in this phase
"""
if self.__search is None:
try:
with open(os.path.join(self.pickle_path, "search.pickle"), "r+b") as f:
self.__search = pickle.loads(f.read())
except FileNotFoundError as e:
logging.exception(e)
return self.__search
@property
def model(self):
"""
The model that was used in this phase
"""
if self.__model is None:
with open(os.path.join(self.pickle_path, "model.pickle"), "r+b") as f:
self.__model = pickle.loads(f.read())
return self.__model
def __str__(self):
return self.text
def __repr__(self):
return "<PhaseOutput {}>".format(self)
| 27.413043
| 101
| 0.532382
|
e14ccf01738717bc0e428f1f81aced5151fb251a
| 8,382
|
py
|
Python
|
salt/log/handlers/__init__.py
|
The-Loeki/salt
|
8ff8212cc1eacfe409eb9cc017b21250f28dd305
|
[
"Apache-2.0"
] | 1
|
2017-08-30T12:19:40.000Z
|
2017-08-30T12:19:40.000Z
|
salt/log/handlers/__init__.py
|
The-Loeki/salt
|
8ff8212cc1eacfe409eb9cc017b21250f28dd305
|
[
"Apache-2.0"
] | null | null | null |
salt/log/handlers/__init__.py
|
The-Loeki/salt
|
8ff8212cc1eacfe409eb9cc017b21250f28dd305
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
'''
salt.log.handlers
~~~~~~~~~~~~~~~~~
.. versionadded:: 0.17.0
Custom logging handlers to be used in salt.
'''
from __future__ import absolute_import, print_function, unicode_literals
# Import python libs
import sys
import logging
import threading
import collections
import logging.handlers
# Import salt libs
from salt.log.mixins import NewStyleClassMixIn, ExcInfoOnLogLevelFormatMixIn
from salt.ext.six.moves import queue
log = logging.getLogger(__name__)
if sys.version_info < (2, 7):
# Since the NullHandler is only available on python >= 2.7, here's a copy
# with NewStyleClassMixIn so it's also a new style class
class NullHandler(logging.Handler, NewStyleClassMixIn):
'''
This is 1 to 1 copy of python's 2.7 NullHandler
'''
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self): # pylint: disable=C0103
self.lock = None
logging.NullHandler = NullHandler
class TemporaryLoggingHandler(logging.NullHandler):
'''
This logging handler will store all the log records up to its maximum
queue size at which stage the first messages stored will be dropped.
Should only be used as a temporary logging handler, while the logging
system is not fully configured.
Once configured, pass any logging handlers that should have received the
initial log messages to the function
:func:`TemporaryLoggingHandler.sync_with_handlers` and all stored log
records will be dispatched to the provided handlers.
.. versionadded:: 0.17.0
'''
def __init__(self, level=logging.NOTSET, max_queue_size=100000):
super(TemporaryLoggingHandler, self).__init__(level=level)
self.__messages = collections.deque(maxlen=max_queue_size)
def handle(self, record):
self.acquire()
self.__messages.append(record)
self.release()
def sync_with_handlers(self, handlers=()):
'''
Sync the stored log records to the provided log handlers.
'''
if not handlers:
return
while self.__messages:
record = self.__messages.popleft()
for handler in handlers:
if handler.level > record.levelno:
# If the handler's level is higher than the log record one,
# it should not handle the log record
continue
handler.handle(record)
class StreamHandler(ExcInfoOnLogLevelFormatMixIn, logging.StreamHandler, NewStyleClassMixIn):
'''
Stream handler which properly handles exc_info on a per handler basis
'''
class FileHandler(ExcInfoOnLogLevelFormatMixIn, logging.FileHandler, NewStyleClassMixIn):
'''
File handler which properly handles exc_info on a per handler basis
'''
class SysLogHandler(ExcInfoOnLogLevelFormatMixIn, logging.handlers.SysLogHandler, NewStyleClassMixIn):
'''
Syslog handler which properly handles exc_info on a per handler basis
'''
def handleError(self, record):
'''
Override the default error handling mechanism for py3
Deal with syslog os errors when the log file does not exist
'''
handled = False
if sys.stderr and sys.version_info >= (3, 5, 4):
t, v, tb = sys.exc_info()
if t.__name__ in 'FileNotFoundError':
sys.stderr.write('[WARNING ] The log_file does not exist. Logging not setup correctly or syslog service not started.\n')
handled = True
if not handled:
super(SysLogHandler, self).handleError(record)
class RotatingFileHandler(ExcInfoOnLogLevelFormatMixIn, logging.handlers.RotatingFileHandler, NewStyleClassMixIn):
'''
Rotating file handler which properly handles exc_info on a per handler basis
'''
def handleError(self, record):
'''
Override the default error handling mechanism
Deal with log file rotation errors due to log file in use
more softly.
'''
handled = False
# Can't use "salt.utils.platform.is_windows()" in this file
if (sys.platform.startswith('win') and
logging.raiseExceptions and
sys.stderr): # see Python issue 13807
exc_type, exc, exc_traceback = sys.exc_info()
try:
# PermissionError is used since Python 3.3.
# OSError is used for previous versions of Python.
if exc_type.__name__ in ('PermissionError', 'OSError') and exc.winerror == 32:
if self.level <= logging.WARNING:
sys.stderr.write('[WARNING ] Unable to rotate the log file "{0}" '
'because it is in use\n'.format(self.baseFilename)
)
handled = True
finally:
# 'del' recommended. See documentation of
# 'sys.exc_info()' for details.
del exc_type, exc, exc_traceback
if not handled:
super(RotatingFileHandler, self).handleError(record)
if sys.version_info > (2, 6):
class WatchedFileHandler(ExcInfoOnLogLevelFormatMixIn, logging.handlers.WatchedFileHandler, NewStyleClassMixIn):
'''
Watched file handler which properly handles exc_info on a per handler basis
'''
if sys.version_info < (3, 2):
class QueueHandler(ExcInfoOnLogLevelFormatMixIn, logging.Handler, NewStyleClassMixIn):
'''
This handler sends events to a queue. Typically, it would be used together
with a multiprocessing Queue to centralise logging to file in one process
(in a multi-process application), so as to avoid file write contention
between processes.
This code is new in Python 3.2, but this class can be copy pasted into
user code for use with earlier Python versions.
'''
def __init__(self, queue):
'''
Initialise an instance, using the passed queue.
'''
logging.Handler.__init__(self)
self.queue = queue
def enqueue(self, record):
'''
Enqueue a record.
The base implementation uses put_nowait. You may want to override
this method if you want to use blocking, timeouts or custom queue
implementations.
'''
try:
self.queue.put_nowait(record)
except queue.Full:
sys.stderr.write('[WARNING ] Message queue is full, '
'unable to write "{0}" to log'.format(record)
)
def prepare(self, record):
'''
Prepares a record for queuing. The object returned by this method is
enqueued.
The base implementation formats the record to merge the message
and arguments, and removes unpickleable items from the record
in-place.
You might want to override this method if you want to convert
the record to a dict or JSON string, or send a modified copy
of the record while leaving the original intact.
'''
# The format operation gets traceback text into record.exc_text
# (if there's exception data), and also puts the message into
# record.message. We can then use this to replace the original
# msg + args, as these might be unpickleable. We also zap the
# exc_info attribute, as it's no longer needed and, if not None,
# will typically not be pickleable.
self.format(record)
record.msg = record.getMessage()
record.args = None
record.exc_info = None
return record
def emit(self, record):
'''
Emit a record.
Writes the LogRecord to the queue, preparing it for pickling first.
'''
try:
self.enqueue(self.prepare(record))
except Exception:
self.handleError(record)
else:
class QueueHandler(ExcInfoOnLogLevelFormatMixIn, logging.handlers.QueueHandler): # pylint: disable=no-member,E0240
pass
| 35.820513
| 136
| 0.620138
|
c1703d422887c270ff0a1f839f28a014dab4561f
| 425
|
py
|
Python
|
inputs/migrations/0002_baser_science.py
|
rkeaveny/django_kelvakis_site
|
21c6c303dbecc5e7922407ec386b330eb51e9e8a
|
[
"MIT"
] | 1
|
2018-04-14T19:50:08.000Z
|
2018-04-14T19:50:08.000Z
|
inputs/migrations/0002_baser_science.py
|
rkeaveny/django_kelvakis_site
|
21c6c303dbecc5e7922407ec386b330eb51e9e8a
|
[
"MIT"
] | null | null | null |
inputs/migrations/0002_baser_science.py
|
rkeaveny/django_kelvakis_site
|
21c6c303dbecc5e7922407ec386b330eb51e9e8a
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.0.3 on 2018-03-27 18:48
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('inputs', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='baser',
name='science',
field=models.CharField(default='math', max_length=100),
preserve_default=False,
),
]
| 21.25
| 67
| 0.588235
|
9b7a91662eec11b98b9712535c146d8e3b7ef845
| 9,203
|
py
|
Python
|
configurations/j3_single_slice5b.py
|
ShuaiW/kaggle-heart
|
022997f27add953c74af2b371c67d9d86cbdccc3
|
[
"MIT"
] | 182
|
2016-03-15T01:51:29.000Z
|
2021-04-21T09:49:05.000Z
|
configurations/j3_single_slice5b.py
|
weidezhang/kaggle-heart
|
022997f27add953c74af2b371c67d9d86cbdccc3
|
[
"MIT"
] | 1
|
2018-06-22T16:46:12.000Z
|
2018-06-22T21:08:09.000Z
|
configurations/j3_single_slice5b.py
|
weidezhang/kaggle-heart
|
022997f27add953c74af2b371c67d9d86cbdccc3
|
[
"MIT"
] | 61
|
2016-03-15T00:58:28.000Z
|
2020-03-06T22:00:41.000Z
|
from deep_learning_layers import ConvolutionOver2DAxisLayer, MaxPoolOverAxisLayer, MaxPoolOver2DAxisLayer, \
MaxPoolOver3DAxisLayer, ConvolutionOver3DAxisLayer, ConvolutionOverAxisLayer
from default import *
import theano.tensor as T
from layers import MuLogSigmaErfLayer, CumSumLayer
import objectives
from lasagne.layers.dnn import Conv2DDNNLayer as ConvLayer
from lasagne.layers.dnn import MaxPool2DDNNLayer as MaxPoolLayer
from lasagne.layers import InputLayer
from lasagne.layers import reshape
from lasagne.layers import DenseLayer
from lasagne.layers import BatchNormLayer
from postprocess import upsample_segmentation
from volume_estimation_layers import GaussianApproximationVolumeLayer
import theano_printer
from updates import build_adam_updates
caching = None
validate_every = 10
validate_train_set = False
save_every = 10
restart_from_save = False
dump_network_loaded_data = False
batches_per_chunk = 8
batch_size = 64
sunny_batch_size = 4
num_epochs_train = 200
image_size = 96
learning_rate_schedule = {
0: 0.0001,
175: 0.00001,
195: 0.000001,
}
from preprocess import preprocess, preprocess_with_augmentation
from postprocess import postprocess_onehot
preprocess_train = preprocess_with_augmentation
preprocess_validation = preprocess # no augmentation
preprocess_test = preprocess_with_augmentation
test_time_augmentations = 100
build_updates = build_adam_updates
postprocess = postprocess
data_sizes = {
"sliced:data:singleslice:difference:middle": (batch_size, 29, image_size, image_size), # 30 time steps, 30 mri_slices, 100 px wide, 100 px high,
"sliced:data:singleslice:difference": (batch_size, 29, image_size, image_size), # 30 time steps, 30 mri_slices, 100 px wide, 100 px high,
"sliced:data:singleslice": (batch_size, 30, image_size, image_size), # 30 time steps, 30 mri_slices, 100 px wide, 100 px high,
"sliced:data:ax": (batch_size, 30, 15, image_size, image_size), # 30 time steps, 30 mri_slices, 100 px wide, 100 px high,
"sliced:data:shape": (batch_size, 2,),
"sunny": (sunny_batch_size, 1, image_size, image_size)
# TBC with the metadata
}
def build_model():
#################
# Regular model #
#################
input_key = "sliced:data:singleslice:difference"
input_size = data_sizes[input_key]
l0 = InputLayer(input_size)
# add channel layer
# l0r = reshape(l0, (-1, 1, ) + input_size[1:])
# (batch, channel, time, x, y)
l = ConvolutionOver2DAxisLayer(l0, num_filters=64, filter_size=(3, 3),
axis=(2,3), channel=1,
W=lasagne.init.Orthogonal(),
b=lasagne.init.Constant(0.1),
nonlinearity=lasagne.nonlinearities.identity
)
l = ConvolutionOver2DAxisLayer(l, num_filters=64, filter_size=(3, 3),
axis=(2,3), channel=1,
W=lasagne.init.Orthogonal(),
b=lasagne.init.Constant(0.1),
nonlinearity=lasagne.nonlinearities.identity
)
l = BatchNormLayer(l, gamma=None)
l = lasagne.layers.NonlinearityLayer(l, nonlinearity=lasagne.nonlinearities.rectify)
l = MaxPoolOver2DAxisLayer(l, pool_size=(2, 2), axis=(2,3), stride=(2,2))
l = ConvolutionOver2DAxisLayer(l, num_filters=64, filter_size=(3, 3),
axis=(2,3), channel=1,
W=lasagne.init.Orthogonal(),
b=lasagne.init.Constant(0.1),
nonlinearity=lasagne.nonlinearities.identity
)
l = ConvolutionOver2DAxisLayer(l, num_filters=64, filter_size=(3, 3),
axis=(2,3), channel=1,
W=lasagne.init.Orthogonal(),
b=lasagne.init.Constant(0.1),
nonlinearity=lasagne.nonlinearities.identity
)
l = BatchNormLayer(l, gamma=None)
l = lasagne.layers.NonlinearityLayer(l, nonlinearity=lasagne.nonlinearities.rectify)
l = MaxPoolOver2DAxisLayer(l, pool_size=(2, 2), axis=(2,3), stride=(2,2))
l = ConvolutionOver2DAxisLayer(l, num_filters=64, filter_size=(3, 3),
axis=(2,3), channel=1,
W=lasagne.init.Orthogonal(),
b=lasagne.init.Constant(0.1),
nonlinearity=lasagne.nonlinearities.identity
)
l = ConvolutionOver2DAxisLayer(l, num_filters=64, filter_size=(3, 3),
axis=(2,3), channel=1,
W=lasagne.init.Orthogonal(),
b=lasagne.init.Constant(0.1),
nonlinearity=lasagne.nonlinearities.identity
)
l = BatchNormLayer(l, gamma=None)
l = lasagne.layers.NonlinearityLayer(l, nonlinearity=lasagne.nonlinearities.rectify)
l = MaxPoolOver2DAxisLayer(l, pool_size=(4, 4), axis=(2,3), stride=(4,4))
l_dense = lasagne.layers.DenseLayer(lasagne.layers.DropoutLayer(l),
num_units=600,
nonlinearity=lasagne.nonlinearities.softmax)
l_systole = CumSumLayer(l_dense)
#===================================================================================
l = ConvolutionOver2DAxisLayer(l0, num_filters=64, filter_size=(3, 3),
axis=(2,3), channel=1,
W=lasagne.init.Orthogonal(),
b=lasagne.init.Constant(0.1),
nonlinearity=lasagne.nonlinearities.identity
)
l = ConvolutionOver2DAxisLayer(l, num_filters=64, filter_size=(3, 3),
axis=(2,3), channel=1,
W=lasagne.init.Orthogonal(),
b=lasagne.init.Constant(0.1),
nonlinearity=lasagne.nonlinearities.identity
)
l = BatchNormLayer(l, gamma=None)
l = lasagne.layers.NonlinearityLayer(l, nonlinearity=lasagne.nonlinearities.rectify)
l = MaxPoolOver2DAxisLayer(l, pool_size=(2, 2), axis=(2,3), stride=(2,2))
l = ConvolutionOver2DAxisLayer(l, num_filters=64, filter_size=(3, 3),
axis=(2,3), channel=1,
W=lasagne.init.Orthogonal(),
b=lasagne.init.Constant(0.1),
nonlinearity=lasagne.nonlinearities.identity
)
l = ConvolutionOver2DAxisLayer(l, num_filters=64, filter_size=(3, 3),
axis=(2,3), channel=1,
W=lasagne.init.Orthogonal(),
b=lasagne.init.Constant(0.1),
nonlinearity=lasagne.nonlinearities.identity
)
l = BatchNormLayer(l, gamma=None)
l = lasagne.layers.NonlinearityLayer(l, nonlinearity=lasagne.nonlinearities.rectify)
l = MaxPoolOver2DAxisLayer(l, pool_size=(2, 2), axis=(2,3), stride=(2,2))
l = ConvolutionOver2DAxisLayer(l, num_filters=64, filter_size=(3, 3),
axis=(2,3), channel=1,
W=lasagne.init.Orthogonal(),
b=lasagne.init.Constant(0.1),
nonlinearity=lasagne.nonlinearities.identity
)
l = ConvolutionOver2DAxisLayer(l, num_filters=64, filter_size=(3, 3),
axis=(2,3), channel=1,
W=lasagne.init.Orthogonal(),
b=lasagne.init.Constant(0.1),
nonlinearity=lasagne.nonlinearities.identity
)
l = BatchNormLayer(l, gamma=None)
l = lasagne.layers.NonlinearityLayer(l, nonlinearity=lasagne.nonlinearities.rectify)
l = MaxPoolOver2DAxisLayer(l, pool_size=(4, 4), axis=(2,3), stride=(4,4))
l_dense = lasagne.layers.DenseLayer(lasagne.layers.DropoutLayer(l),
num_units=600,
nonlinearity=lasagne.nonlinearities.softmax)
l_diastole = CumSumLayer(l_dense)
return {
"inputs":{
input_key: l0
},
"outputs": {
"systole": l_systole,
"diastole": l_diastole,
}
}
def build_objective(interface_layers):
return objectives.KaggleObjective(interface_layers["outputs"])
| 44.245192
| 148
| 0.545692
|
c4813d00a8c2b91570d69b0787a9666e4b348aaf
| 20,239
|
py
|
Python
|
tools/InterfaceGenerator/test/generator/generators/test_SmartFactoryJSONRPC.py
|
shoamano83/sdl_core
|
ea5960280585d11ee02542b0ab183d4400ed691d
|
[
"BSD-3-Clause"
] | null | null | null |
tools/InterfaceGenerator/test/generator/generators/test_SmartFactoryJSONRPC.py
|
shoamano83/sdl_core
|
ea5960280585d11ee02542b0ab183d4400ed691d
|
[
"BSD-3-Clause"
] | null | null | null |
tools/InterfaceGenerator/test/generator/generators/test_SmartFactoryJSONRPC.py
|
shoamano83/sdl_core
|
ea5960280585d11ee02542b0ab183d4400ed691d
|
[
"BSD-3-Clause"
] | 1
|
2020-04-22T07:17:49.000Z
|
2020-04-22T07:17:49.000Z
|
"""Test for JSONRPC SmartFactory generator.
Verifies format specific functions and produced source code.
"""
import collections
import codecs
import os
import unittest
import uuid
from mock import MagicMock
from mock import call
from generator.generators import SmartFactoryJSONRPC
from generator import Model
EXPECTED_RESULT_REQUEST = (
u"""params_members[ns_smart_device_link::ns_json_handler::"""
u"""strings::S_FUNCTION_ID] = CObjectSchemaItem::SMember("""
u"""TEnumSchemaItem<FunctionID::eType>::create("""
u"""function_id_items), true);\n"""
u"""params_members[ns_smart_device_link::ns_json_handler::"""
u"""strings::S_MESSAGE_TYPE] = CObjectSchemaItem::SMember("""
u"""TEnumSchemaItem<messageType::eType>::create("""
u"""message_type_items), true);\n"""
u"""params_members[ns_smart_device_link::ns_json_handler::"""
u"""strings::S_PROTOCOL_VERSION] = CObjectSchemaItem::SMember("""
u"""TNumberSchemaItem<int>::create(), true);\n"""
u"""params_members[ns_smart_device_link::ns_json_handler::"""
u"""strings::S_PROTOCOL_TYPE] = CObjectSchemaItem::SMember("""
u"""TNumberSchemaItem<int>::create(), true);\n"""
u"""params_members[ns_smart_device_link::ns_json_handler::"""
u"""strings::S_CORRELATION_ID] = CObjectSchemaItem::SMember("""
u"""TNumberSchemaItem<int>::create(), true);\n"""
)
EXPECTED_RESULT_RESPONSE = (
u"""params_members[ns_smart_device_link::ns_json_handler::"""
u"""strings::S_FUNCTION_ID] = CObjectSchemaItem::SMember("""
u"""TEnumSchemaItem<FunctionID::eType>::create("""
u"""function_id_items), true);\n"""
u"""params_members[ns_smart_device_link::ns_json_handler::"""
u"""strings::S_MESSAGE_TYPE] = CObjectSchemaItem::SMember("""
u"""TEnumSchemaItem<messageType::eType>::create("""
u"""message_type_items), true);\n"""
u"""params_members[ns_smart_device_link::ns_json_handler::"""
u"""strings::S_PROTOCOL_VERSION] = CObjectSchemaItem::SMember("""
u"""TNumberSchemaItem<int>::create(), true);\n"""
u"""params_members[ns_smart_device_link::ns_json_handler::"""
u"""strings::S_PROTOCOL_TYPE] = CObjectSchemaItem::SMember("""
u"""TNumberSchemaItem<int>::create(), true);\n"""
u"""params_members[ns_smart_device_link::ns_json_handler::"""
u"""strings::S_CORRELATION_ID] = CObjectSchemaItem::SMember("""
u"""TNumberSchemaItem<int>::create(), true);\n"""
u"""params_members[ns_smart_device_link::ns_json_handler::"""
u"""strings::kCode] = CObjectSchemaItem::SMember("""
u"""TNumberSchemaItem<int>::create(), true);\n"""
)
EXPECTED_RESULT_NOTIFICATION = (
u"""params_members[ns_smart_device_link::ns_json_handler::"""
u"""strings::S_FUNCTION_ID] = CObjectSchemaItem::SMember("""
u"""TEnumSchemaItem<FunctionID::eType>::create("""
u"""function_id_items), true);\n"""
u"""params_members[ns_smart_device_link::ns_json_handler::"""
u"""strings::S_MESSAGE_TYPE] = CObjectSchemaItem::SMember("""
u"""TEnumSchemaItem<messageType::eType>::create("""
u"""message_type_items), true);\n"""
u"""params_members[ns_smart_device_link::ns_json_handler::"""
u"""strings::S_PROTOCOL_VERSION] = CObjectSchemaItem::SMember("""
u"""TNumberSchemaItem<int>::create(), true);\n"""
u"""params_members[ns_smart_device_link::ns_json_handler::"""
u"""strings::S_PROTOCOL_TYPE] = CObjectSchemaItem::SMember("""
u"""TNumberSchemaItem<int>::create(), true);\n"""
)
EXPECTED_PRE_FUNCTION_CODE = (
u""" std::map<std::string, CObjectSchemaItem::SMember> """
u"""params_members;\n"""
u""" params_members[ns_smart_device_link::ns_json_handler::"""
u"""strings::S_FUNCTION_ID] = CObjectSchemaItem::SMember("""
u"""TEnumSchemaItem<FunctionID::eType>::create("""
u"""function_id_items), true);\n"""
u""" params_members[ns_smart_device_link::ns_json_handler::"""
u"""strings::S_MESSAGE_TYPE] = CObjectSchemaItem::SMember("""
u"""TEnumSchemaItem<messageType::eType>::create("""
u"""message_type_items), true);\n"""
u""" params_members[ns_smart_device_link::ns_json_handler::"""
u"""strings::S_PROTOCOL_VERSION] = CObjectSchemaItem::SMember("""
u"""TNumberSchemaItem<int>::create(), true);\n"""
u""" params_members[ns_smart_device_link::ns_json_handler::"""
u"""strings::S_PROTOCOL_TYPE] = CObjectSchemaItem::SMember("""
u"""TNumberSchemaItem<int>::create(), true);\n"""
u""" params_members[ns_smart_device_link::ns_json_handler::"""
u"""strings::S_CORRELATION_ID] = CObjectSchemaItem::SMember("""
u"""TNumberSchemaItem<int>::create(), true);\n"""
u""" params_members[ns_smart_device_link::ns_json_handler"""
u"""::strings::kCode] = CObjectSchemaItem::SMember("""
u"""TNumberSchemaItem<int>::create(), true);\n"""
u""" params_members[ns_smart_device_link::ns_json_handler::"""
u"""strings::kMessage] = CObjectSchemaItem::SMember("""
u"""CStringSchemaItem::create(), true);\n"""
u"""\n"""
u""" std::map<std::string, CObjectSchemaItem::SMember> """
u"""root_members_map;\n"""
u""" root_members_map[ns_smart_device_link::ns_json_handler::"""
u"""strings::S_PARAMS] = CObjectSchemaItem::SMember("""
u"""CObjectSchemaItem::create(params_members), true);\n"""
u"""\n"""
u""" CSmartSchema error_response_schema(CObjectSchemaItem::create("""
u"""root_members_map));\n"""
u"""\n"""
u""" functions_schemes_.insert(std::make_pair("""
u"""ns_smart_device_link::ns_json_handler::SmartSchemaKey<"""
u"""FunctionID::eType, messageType::eType>("""
u"""FunctionID::request, messageType::error_response)"""
u""", error_response_schema));\n"""
u"""\n"""
)
DESCRIPTION = [u"Description Line1", u"Description Line2"]
DESIGN_DESCRIPTION = [u"Design Line1"]
ISSUES = [Model.Issue(value=u"Issue1"),
Model.Issue(value=u"Issue2"),
Model.Issue(value=u"Issue3")]
TODOS = [u"Do1", u"Do2"]
class Test(unittest.TestCase):
"""Test for JSONRPC SmartFactory generator.
This class holds set of test cases for the JSONRPC SmartFactory generator.
"""
def test_gen_schema_params_fill(self):
"""Test feature that allows to create format specific PARAMS.
Verifies JSONRPC implementation of the _gen_schema_params_fill
method.
"""
generator = SmartFactoryJSONRPC.CodeGenerator()
self.assertEqual(generator._gen_schema_params_fill("request"),
EXPECTED_RESULT_REQUEST,
"Invalid code generation for request")
self.assertEqual(generator._gen_schema_params_fill(u"request"),
EXPECTED_RESULT_REQUEST,
"Invalid code generation for request")
self.assertEqual(generator._gen_schema_params_fill("response"),
EXPECTED_RESULT_RESPONSE,
"Invalid code generation for response")
self.assertEqual(generator._gen_schema_params_fill(u"response"),
EXPECTED_RESULT_RESPONSE,
"Invalid code generation for response")
self.assertEqual(generator._gen_schema_params_fill("notification"),
EXPECTED_RESULT_NOTIFICATION,
"Invalid code generation for notification")
self.assertEqual(generator._gen_schema_params_fill(u"notification"),
EXPECTED_RESULT_NOTIFICATION,
"Invalid code generation for notification")
def test_preprocess_message_type(self):
"""Test preprocessing of the message_type enum.
Verifies JSONPRC implementation of the _preprocess_message_type
function.
"""
generator = SmartFactoryJSONRPC.CodeGenerator()
message_type_elements = collections.OrderedDict()
message_type_elements[u"request"] = Model.EnumElement(name=u"request")
message_type_elements[u"response"] = Model.EnumElement(
name=u"response")
message_type_elements[u"notification"] = Model.EnumElement(
name=u"notification")
message_type = Model.Enum(name=u"messageType",
elements=message_type_elements)
result_enum = generator._preprocess_message_type(message_type)
self.assertIn("error_response", result_enum.elements)
self.assertEqual("error_response",
result_enum.elements["error_response"].primary_name)
message_type_elements = collections.OrderedDict()
message_type_elements[u"request"] = Model.EnumElement(name=u"request")
message_type_elements[u"notification"] = Model.EnumElement(
name=u"notification")
message_type = Model.Enum(name=u"messageType",
elements=message_type_elements)
result_enum = generator._preprocess_message_type(message_type)
self.assertNotIn("error_response", result_enum.elements)
def test_gen_pre_function_schemas(self):
"""Test code that goes before schema initialization.
Verifies JSONPRC implementation of the _gen_pre_function_schemas
function.
"""
generator = SmartFactoryJSONRPC.CodeGenerator()
self.assertEqual(u"",
generator._gen_pre_function_schemas([]),
"Invalid code for empty functions list")
message_type_elements = collections.OrderedDict()
message_type_elements[u"request"] = Model.EnumElement(name=u"request")
message_type_elements[u"response"] = Model.EnumElement(
name=u"response")
message_type_elements[u"notification"] = Model.EnumElement(
name=u"notification")
message_type = Model.Enum(name=u"messageType",
elements=message_type_elements)
function1 = Model.Function(
"func1", function_id=message_type.elements[u"request"],
message_type=message_type.elements[u"request"])
self.assertEqual(u"",
generator._gen_pre_function_schemas([function1]),
"Invalid code for empty functions list")
function2 = Model.Function(
"func2", function_id=message_type.elements[u"request"],
message_type=message_type.elements[u"response"])
self.assertEqual(EXPECTED_PRE_FUNCTION_CODE,
generator._gen_pre_function_schemas([function2]),
"Invalid code for single response function")
self.assertEqual(EXPECTED_PRE_FUNCTION_CODE,
generator._gen_pre_function_schemas([function1,
function2]),
"Invalid code for mixed function list")
def test_full_generation(self):
"""Test full generation using JSONRPC SmartSchema generator.
Creates output files which is captured by the mock and compare them
with sample files with correct code. This test requires valid
test_expected_jsonrpc.h and test_expected_jsonrpc.cc in the same
directory as this module.
"""
expected_h_file_content = open("test_expected_jsonrpc.h", "r").read()
expected_cc_file_content = open("test_expected_jsonrpc.cc", "r").read()
generator = SmartFactoryJSONRPC.CodeGenerator()
message_type_elements = collections.OrderedDict()
message_type_elements[u"request"] = Model.EnumElement(name=u"request")
message_type_elements[u"response"] = Model.EnumElement(
name=u"response")
message_type_elements[u"notification"] = Model.EnumElement(
name=u"notification")
message_type = Model.Enum(name=u"messageType",
elements=message_type_elements)
elements1 = collections.OrderedDict()
elements1[u"name1"] = Model.EnumElement(
name=u"name1",
design_description=DESIGN_DESCRIPTION,
todos=TODOS,
value=u"1")
elements1[u"name2"] = Model.EnumElement(
name="name2",
description=DESCRIPTION,
issues=ISSUES,
internal_name=u"internal_name2")
enum1 = Model.Enum(name=u"Enum1",
todos=TODOS,
elements=elements1)
elements2 = collections.OrderedDict()
elements2[u"xxx"] = Model.EnumElement(name=u"xxx",
internal_name=u"val_1")
elements2[u"yyy"] = Model.EnumElement(name=u"yyy",
internal_name=u"val_2",
value=u"100")
elements2[u"zzz"] = Model.EnumElement(name=u"val_3")
enum2 = Model.Enum(name=u"E2",
elements=elements2)
elements3 = collections.OrderedDict()
elements3["1"] = Model.EnumElement(name="xxx",
internal_name="_1")
elements3["2"] = Model.EnumElement(name="xxx",
internal_name="_2")
elements3["3"] = Model.EnumElement(name="xxx",
internal_name="_3")
enum3 = Model.Enum(name="Enum_new2",
elements=elements3)
elements4 = collections.OrderedDict()
elements4["name1"] = Model.EnumElement(name="xxx",
internal_name="_11")
elements4["name2"] = Model.EnumElement(name="xxx",
internal_name="_22")
enum4 = Model.Enum(name="Enum_new4",
elements=elements4)
enums = collections.OrderedDict()
enums["Enum1"] = enum1
enums["Enum2"] = enum2
enums["Enum3"] = enum3
enums["Enum4"] = enum4
enums["messageType"] = message_type
params1 = collections.OrderedDict()
params1["1"] = Model.FunctionParam(
name="param1",
design_description=DESIGN_DESCRIPTION,
description=DESCRIPTION,
issues=ISSUES,
todos=TODOS,
param_type=enum4,
default_value=elements4["name1"])
params1["2"] = Model.FunctionParam(
name="param2",
param_type=Model.EnumSubset(
name="sub1",
enum=enum1,
allowed_elements={"e1": elements1["name1"]}),
default_value=elements1["name1"])
functions = collections.OrderedDict()
functions["Function1"] = Model.Function(
name="Function1",
function_id=elements1["name1"],
message_type=message_type_elements["request"],
params=params1)
functions["Function2"] = Model.Function(
name="Function2",
function_id=elements2["xxx"],
message_type=message_type_elements["response"])
functions["Function3"] = Model.Function(
name="Function2",
function_id=elements2["yyy"],
message_type=message_type_elements["notification"])
members1 = collections.OrderedDict()
members1["m1"] = Model.Param(name="intParam",
param_type=Model.Integer(max_value=2))
members1["m11"] = Model.Param(name="doubleParam",
param_type=Model.Double(min_value=0.333),
is_mandatory=False)
members1["m222"] = Model.Param(name="boolParam",
param_type=Model.Boolean())
members1["m2"] = Model.Param(name="structParam",
param_type=Model.Struct(name="Struct2"))
members1["aaa"] = Model.Param(name="enumParam",
param_type=enum1)
members1["bbb"] = Model.Param(name="enumParam1",
param_type=enum1)
members1["xxx"] = Model.Param(
name="enumSubset1",
param_type=Model.EnumSubset(
name="sub",
enum=enum1,
allowed_elements={"e1": elements1["name1"]}),
is_mandatory=False)
members1["1"] = Model.Param(
name="arrayOfInt",
param_type=Model.Array(min_size=0,
max_size=20,
element_type=Model.Boolean()),
is_mandatory=False)
members1["2"] = Model.Param(
name="arrayOfEnum1",
param_type=Model.Array(min_size=0,
max_size=20,
element_type=enum1),
is_mandatory=False)
members1["3"] = Model.Param(
name="arrayOfEnum3",
param_type=Model.Array(min_size=10,
max_size=40,
element_type=enum3),
is_mandatory=True)
members1["4"] = Model.Param(
name="arrayOfEnum4",
param_type=Model.Array(
min_size=10,
max_size=41,
element_type=Model.EnumSubset(
name="sub1",
enum=enum1,
allowed_elements={"e1": elements1["name1"]})))
members1["5"] = Model.Param(
name="arrayOfEnum5",
param_type=Model.Array(
min_size=10,
max_size=42,
element_type=Model.EnumSubset(
name="sub2",
enum=enum1,
allowed_elements={"e1": elements1["name2"]})))
members1["6"] = Model.Param(
name="arrayOfEnum6",
param_type=Model.Array(
min_size=10,
max_size=43,
element_type=Model.EnumSubset(
name="sub3",
enum=enum4,
allowed_elements={"e1": elements4["name2"]})))
structs = collections.OrderedDict()
structs["Struct1"] = Model.Struct(
name="Struct1",
design_description=DESIGN_DESCRIPTION,
issues=ISSUES,
members=members1)
structs["Struct2"] = Model.Struct(name="Struct2",
issues=ISSUES)
interface = Model.Interface(enums=enums,
structs=structs,
functions=functions,
params={"param1": "value1",
"param2": "value2"})
os.path.exists = MagicMock(return_value=True)
uuid.uuid1 = MagicMock(
return_value=uuid.UUID("12345678123456781234567812345678"))
codecs.open = MagicMock()
generator.generate(interface=interface,
filename="Test.xml",
namespace="XXX::YYY::ZZZ",
destination_dir="/some/test/dir")
os.path.exists.assert_has_calls([call('/some/test/dir')])
open_result = codecs.open
mock_calls = open_result.mock_calls
self.assertEqual(mock_calls[0],
call('/some/test/dir/Test.h',
mode='w',
encoding='utf-8'),
"Invalid header file creation")
self.assertEqual(mock_calls[4],
call('/some/test/dir/Test.cc',
mode='w',
encoding='utf-8'),
"Invalid source file creation")
self.assertEqual(str(mock_calls[2])[27:-2].replace("\\n", "\n"),
expected_h_file_content,
"Invalid header file content")
self.assertEqual(str(mock_calls[6])[27:-2].replace("\\n", "\n"),
expected_cc_file_content,
"Invalid source file content")
| 41.902692
| 79
| 0.585306
|
8100c5f593e505ab127c39e97f46fc20cf87aab8
| 751
|
py
|
Python
|
queryset_client/tests/base/tests/create.py
|
pulina/tastypie-queryset-client
|
1d65ee387e256b78c44fd8be57fa64ca798ae2da
|
[
"MIT"
] | null | null | null |
queryset_client/tests/base/tests/create.py
|
pulina/tastypie-queryset-client
|
1d65ee387e256b78c44fd8be57fa64ca798ae2da
|
[
"MIT"
] | 1
|
2016-12-22T10:55:44.000Z
|
2016-12-22T10:55:44.000Z
|
queryset_client/tests/base/tests/create.py
|
pulina/tastypie-queryset-client
|
1d65ee387e256b78c44fd8be57fa64ca798ae2da
|
[
"MIT"
] | 5
|
2015-04-27T11:50:28.000Z
|
2019-01-10T06:39:57.000Z
|
#from django.conf import settings
#settings.DEBUG = True
from testcases import (
TestServerTestCase,
get_client
)
class CreateTestCase(TestServerTestCase):
def setUp(self):
self.start_test_server()
self.client = get_client()
def tearDown(self):
self.stop_test_server()
def test_create1(self):
subject = "subject create 1"
body = "body create 1"
message = self.client.message.objects.create(subject=subject, body=body)
message_ = self.client.message.objects.get(id=message.id, subject=subject, body=body)
self.assertTrue(message_.id == message.id)
self.assertTrue(message_.subject == message.subject)
self.assertTrue(message_.body == message.body)
| 30.04
| 93
| 0.681758
|
43a3c69cd3431cae099684b2dc59d43c4444c24f
| 2,414
|
py
|
Python
|
rlkit/torch/networks/two_headed_mlp.py
|
Asap7772/railrl_evalsawyer
|
baba8ce634d32a48c7dfe4dc03b123e18e96e0a3
|
[
"MIT"
] | 1
|
2020-10-23T14:40:09.000Z
|
2020-10-23T14:40:09.000Z
|
rlkit/torch/networks/two_headed_mlp.py
|
Asap7772/railrl_evalsawyer
|
baba8ce634d32a48c7dfe4dc03b123e18e96e0a3
|
[
"MIT"
] | null | null | null |
rlkit/torch/networks/two_headed_mlp.py
|
Asap7772/railrl_evalsawyer
|
baba8ce634d32a48c7dfe4dc03b123e18e96e0a3
|
[
"MIT"
] | 1
|
2021-05-27T20:38:45.000Z
|
2021-05-27T20:38:45.000Z
|
from torch import nn as nn
from torch.nn import functional as F
from rlkit.pythonplusplus import identity
from rlkit.torch import pytorch_util as ptu
from rlkit.torch.core import PyTorchModule
from rlkit.torch.networks.experimental import LayerNorm
class TwoHeadMlp(PyTorchModule):
def __init__(
self,
hidden_sizes,
first_head_size,
second_head_size,
input_size,
init_w=3e-3,
hidden_activation=F.relu,
output_activation=identity,
hidden_init=ptu.fanin_init,
b_init_value=0.,
layer_norm=False,
layer_norm_kwargs=None,
):
super().__init__()
if layer_norm_kwargs is None:
layer_norm_kwargs = dict()
self.input_size = input_size
self.first_head_size = first_head_size
self.second_head_size = second_head_size
self.hidden_activation = hidden_activation
self.output_activation = output_activation
self.layer_norm = layer_norm
self.fcs = []
self.layer_norms = []
in_size = input_size
for i, next_size in enumerate(hidden_sizes):
fc = nn.Linear(in_size, next_size)
in_size = next_size
hidden_init(fc.weight)
fc.bias.data.fill_(b_init_value)
self.__setattr__("fc{}".format(i), fc)
self.fcs.append(fc)
if self.layer_norm:
ln = LayerNorm(next_size)
self.__setattr__("layer_norm{}".format(i), ln)
self.layer_norms.append(ln)
self.first_head = nn.Linear(in_size, self.first_head_size)
self.first_head.weight.data.uniform_(-init_w, init_w)
self.second_head = nn.Linear(in_size, self.second_head_size)
self.second_head.weight.data.uniform_(-init_w, init_w)
def forward(self, input, return_preactivations=False):
h = input
for i, fc in enumerate(self.fcs):
h = fc(h)
if self.layer_norm and i < len(self.fcs) - 1:
h = self.layer_norms[i](h)
h = self.hidden_activation(h)
preactivation = self.first_head(h)
first_output = self.output_activation(preactivation)
preactivation = self.second_head(h)
second_output = self.output_activation(preactivation)
return first_output, second_output
| 33.527778
| 68
| 0.623447
|
7d891fe672ad05960c0fe5ee18bbe72c69135096
| 2,192
|
py
|
Python
|
Respaldo/package_fetch/src/nodo_detect_obstacles_v3.py
|
Tachuelin/RMA-Dedutel
|
796add66dc20efe8205055a648c01a5a5de194ad
|
[
"MIT"
] | 1
|
2022-03-30T22:29:33.000Z
|
2022-03-30T22:29:33.000Z
|
Respaldo/package_fetch/src/nodo_detect_obstacles_v3.py
|
Tachuelin/RMA-Dedutel
|
796add66dc20efe8205055a648c01a5a5de194ad
|
[
"MIT"
] | null | null | null |
Respaldo/package_fetch/src/nodo_detect_obstacles_v3.py
|
Tachuelin/RMA-Dedutel
|
796add66dc20efe8205055a648c01a5a5de194ad
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# encoding: utf-8
#Linea 1 - “Shebang”,le indicamos a la máquina con qué programa lo vamos a ejecutar.
#Linea 2 - Python 3 - asume que solo se utiliza ASCII en el código fuente
# para usar utf-8 hay que indicarlo al principio de nuestro script encoding: utf-8
import rospy # Importamos ropsy (interface de python-ROS)
from sensor_msgs.msg import LaserScan #Importamas el tipo de mensaje Lasersan
def callback(mensaje): #Definimos una función callback
scan_range = [] #Definimos una lista vacia
for i in range(len(mensaje.ranges)):
if mensaje.ranges[i] == float('Inf'): #Si el sensor no detecta nada
scan_range.append(3.5) #le asigamos un valor de 3.5m
else:
scan_range.append(mensaje.ranges[i]) #Si tiene un valor, lo agregamos a nuestra lista scan_range
#Regiones = 240/3 = 80 muestras
left = min(scan_range[0:79]) #muestras de 0:79
ahead = min(scan_range[80:159]) #muestras de 80:159
right = min(scan_range[160:239]) #muestras de 160:239
print("Left = %f , Ahead = %f , Right = %f" % (left,ahead,right))
def nodo(): # Definimos una función nodo
rospy.init_node('nodo_detect_obstacles') # Inicializamos nuestro nodo y le asignamos un nombre
#Nos suscribimos al tópico /base_scan
#Name Topic|tipo de mensaje|función
scan_sub = rospy.Subscriber('/base_scan', LaserScan, callback)
rospy.spin() # Mantiene corriendo el script hasta que se detiene la ejecución del script con Crtl+C
if __name__ == '__main__': # Llamamos a la función principal main
try:
nodo() # Lamamos a la función nodo
except rospy.ROSInterruptException : #Check si hay una excepción Ctrl-C para terminar la ejecución del nodo
pass
| 44.734694
| 158
| 0.5625
|
c3543dc9945dd8cc2c09804832190aced100471c
| 12,100
|
py
|
Python
|
mog/__init__.py
|
x10an14/mog
|
be29e122f33c45d78d37377a6c86d6b2034688e6
|
[
"MIT"
] | null | null | null |
mog/__init__.py
|
x10an14/mog
|
be29e122f33c45d78d37377a6c86d6b2034688e6
|
[
"MIT"
] | null | null | null |
mog/__init__.py
|
x10an14/mog
|
be29e122f33c45d78d37377a6c86d6b2034688e6
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
from __future__ import print_function
import os
import os.path
try:
import ConfigParser as configparser
except ImportError:
import configparser
try:
from pipes import quote
except ImportError:
from shlex import quote
import sys
import re
import subprocess
from functools import partial
import argparse
import platform
def myprint(s):
'''
Overriding print...
why!? because it seems print in python3 does magic that doesn't work when we pipe through less
'''
try:
if sys.version_info[0] == 3 and not hasattr(sys.stdout, 'buffer'):
# In viewinless mode and python3... print as binary
sys.stdout.write('{}\n'.format(s).encode('utf-8'))
else:
print(s)
sys.stdout.flush()
except IOError:
pass
# Open dev null once
DEVNULL = open('/dev/null', 'w')
default_config_file = """; mog config file
[settings]
showname=yes
showsection=no
viewinless=no
toponly=no
toplines=10
followsymlinks=no
recursive=no
[pygments]
; Pygmentize returns 'text' when it can't highlight, so we use an inverted match for text.
invert_match=yes
pygmentize=text
arg=pygmentize
[pygmentsmime]
pygmentsmime=(.*)
argreplace=pygmentize -l '%0' %F
[elfs]
file=.*ELF
arg=objdump -ft
[filesystem]
file_mime=.*(\sinode\/|x-symlink|x-directory|x-fifo)
arg=ls -lh{}
[media]
file_mime=.*\s(video|image)\/
arg=mediainfo
[pdfs]
file=.*PDF document
argreplace=pdftotext %F -
[tarballs]
name=.*\.tar(\.gz|\.bz2|\.Z|\.xz)?$
arg=tar --list -f
[deb]
name=.*\.deb$
arg=dpkg -I
[rpm]
name=.*\.rpm$
arg=rpm -qip
[csv]
name=.*\.csv$
arg=column -xt -s,
[tsv]
name=.*\.tsv$
arg=column -xt
[ASCII]
file=.*ASCII
arg=cat
[binary]
; We assume anything thats left is binary
name=.*
arg=xxd
""".format(" --color=always" if platform.system() != "Darwin" else "")
##### Matches
def match_file(regex, name):
tosearch = subprocess.check_output(['file', '-h', name]).decode("utf-8")
return re.match(regex, tosearch)
def match_file_mime(regex, name):
tosearch = subprocess.check_output(['file', '--mime', '-k', '-h', name]).decode("utf-8").split('\n')[0]
return re.match(regex, tosearch)
def match_pygmentize(regex, name):
tosearch = subprocess.check_output(['pygmentize', '-N', name]).decode("utf-8")
return re.match(regex, tosearch)
def match_pygmentsmime(regex, name):
mimetype = subprocess.check_output(['file', '-b', '-k', '-h', '--mime-type', name]).decode("utf-8").split('\n')[0].strip()
import pygments.lexers
try:
tosearch = pygments.lexers.get_lexer_for_mimetype(mimetype).aliases[0]
if tosearch == 'text':
return None # Skip raw text
return re.match(regex, tosearch)
except:
return None
def match_inverted(func, regex, name):
return not func(regex, name)
##### Actions
def run_program(cmd):
try:
subprocess.check_call(cmd, shell=True, stdout=sys.stdout, stderr=DEVNULL)
except subprocess.CalledProcessError as e:
myprint('==> Error processing file: {} <=='.format(e))
def action_arg(action, name, match_result, suffix):
run_program('{} {} {}'.format(action, quote(name), suffix).strip())
def action_argreplace(action, name, match_result, suffix):
for i, val in enumerate(match_result.groups()):
action = action.replace('%' + str(i), val)
run_program(action.replace('%F', quote(name) + ' ' + suffix).strip())
##### Helpers
def config_get(func, value, default, section='settings'):
try:
return func(section, value)
except configparser.NoSectionError:
return default
except configparser.NoOptionError:
return default
except ValueError:
myprint("Invalid settings variable {} in section {} - should be boolean".format(value, section))
sys.exit(1)
def create_default_config(path):
with open(path, 'w') as cfg:
cfg.write(default_config_file)
##### Parsing
def parse_config(cfg):
matches = {'file': match_file,
'file_mime': match_file_mime,
'name': re.match,
'pygmentize': match_pygmentize,
'pygmentsmime': match_pygmentsmime}
actions = {'arg': action_arg,
'argreplace': action_argreplace}
# Parse config
things = configparser.RawConfigParser()
if not os.path.exists(cfg):
create_default_config(cfg)
things.read(cfg)
# Extract settings
settings = {'showname' : config_get(things.getboolean, 'showname', True),
'showsection': config_get(things.getboolean, 'showsection', False),
'viewinless' : config_get(things.getboolean, 'viewinless', False),
'toponly' : config_get(things.getboolean, 'toponly', False),
'toplines' : config_get(things.getint, 'toplines', 10),
'followsymlinks' : config_get(things.getboolean, 'followsymlinks', False),
'recursive' : config_get(things.getboolean, 'recursive', False)}
# Extract matches and actions
things_to_do = []
for thing in things.sections():
# Skip settings section
if thing == 'settings':
continue
# Parse others
invert_match = config_get(things.getboolean, 'invert_match', False, thing)
bits = things.items(thing)
match = None
action = None
for bit in bits:
if bit[0] == 'invert_match':
pass # Handled earlier
elif not match and bit[0] in matches.keys():
if invert_match:
match = partial(match_inverted, matches[bit[0]], bit[1])
else:
match = partial(matches[bit[0]], bit[1])
elif not action and bit[0] in actions.keys():
action = partial(actions[bit[0]], bit[1])
else:
myprint("Invalid config variable {} in section {}".format(bit[0], thing))
sys.exit(1)
if match and action:
things_to_do.append((match, action, thing))
if len(things_to_do) == 0:
myprint("Please define what you want me to do in " + cfg)
return (settings,things_to_do)
##### Running
def run_match_action(settings, things_to_do, file_name):
suffix = ''
if settings['toponly']:
suffix = '| head -n {}'.format(settings['toplines'])
for match, action, cfg_section in things_to_do:
match_result = match(file_name)
if match_result:
if settings['showname']:
msg = file_name
if settings['showsection']:
msg = "{} [{}]".format(msg, cfg_section)
myprint('==> {} <=='.format(msg))
action(file_name, match_result, suffix)
return
myprint("==> Warning: don't know what to do with {} <==".format(file_name))
def run(settings, things_to_do, files):
first = True
for f in files:
if first:
first = False
else:
myprint('')
try:
run_match_action(settings, things_to_do, f)
except Exception as e:
myprint('==> Error: "{}" when processing file {} <=='.format(repr(e), f))
def exists_file(f):
if os.path.lexists(f):
return f
else:
raise argparse.ArgumentTypeError("can't open {}: does not exist".format(f))
def add_pre_args(parser):
default = os.path.expanduser("~/." + os.path.basename(sys.argv[0]) + "rc")
parser.add_argument('-c', '--config', help='config file to use, default: {}'.format(default), default=default)
def parse_pre_args():
parser = argparse.ArgumentParser(add_help=False)
add_pre_args(parser)
args, _ = parser.parse_known_args()
return args.config
def parse_args(settings):
help_text = {'name': {True: 'show file names before displaying file',
False: 'do not show file names before displaying file'},
'section': {True: 'show config file section that matched before displaying file',
False: 'do not show config file section that matched before displaying file'},
'less': {True: 'send output to less',
False: 'do not send output to less'},
'top': {True: lambda x: 'show only top n lines of file, default (just -t): {}'.format(x),
False: lambda x: 'show whole files instead of only top {} lines'.format(x)},
'follow': {True: 'follow symlinks to display the linked file',
False: 'display symlinks as is'},
'recurse': {True: 'recurse into directories to find files to display',
False: 'display directories as is'}}
parser = argparse.ArgumentParser()
parser.add_argument('-n', '--name', action='store_true',
help=help_text['name'][not settings['showname']])
parser.add_argument('-s', '--section', action='store_true',
help=help_text['section'][not settings['showsection']])
parser.add_argument('-l', '--less', action='store_true',
help=help_text['less'][not settings['viewinless']])
parser.add_argument('-t', '--top', nargs='?', const=settings['toplines'],
help=help_text['top'][not settings['toponly']](settings['toplines']))
parser.add_argument('-f', '--followsymlinks', action='store_true',
help=help_text['follow'][not settings['followsymlinks']])
parser.add_argument('-r', '--recursive', action='store_true',
help=help_text['recurse'][not settings['recursive']])
parser.add_argument('FILE', nargs='+', help='file(s) to process', type=exists_file)
add_pre_args(parser)
args = parser.parse_args()
if args.name:
settings['showname'] = not settings['showname']
if args.section:
settings['showsection'] = not settings['showsection']
if args.less:
settings['viewinless'] = not settings['viewinless']
if args.top:
settings['toponly'] = not settings['toponly']
settings['toplines'] = args.top
if args.followsymlinks:
settings['followsymlinks'] = not settings['followsymlinks']
if args.recursive:
settings['recursive'] = not settings['recursive']
return args.FILE
def munge_files(files, settings):
# Note we use a set to remove duplicates when playing with symlinks
if settings['followsymlinks']:
files = set(filter(os.path.exists, map(os.path.realpath, files)))
if settings['recursive']:
newfiles = set()
for f in files:
if os.path.isdir(f):
for root, dirs, newfs in os.walk(f, followlinks=settings['followsymlinks']):
if not settings['followsymlinks']:
# symlinks to dirs appear in dirs, if we aren't following them we had better add them
for d in dirs:
d = os.path.join(root, d)
if os.path.islink(d):
newfiles.add(d)
for newf in newfs:
newfile = os.path.join(root, newf)
if settings['followsymlinks']:
newfile = os.path.realpath(newfile)
if os.path.exists(newfile):
newfiles.add(newfile)
else:
newfiles.add(f)
files = newfiles
return files
def main():
cfg = parse_pre_args()
settings, config = parse_config(cfg)
files = parse_args(settings)
files = munge_files(files, settings)
if len(config) == 0:
sys.exit(1)
if settings['viewinless']:
less = subprocess.Popen(['less', '-Sr'], stdin=subprocess.PIPE)
sys.stdout.close()
sys.stdout = less.stdin
else:
less = None
run(settings, config, files)
try:
sys.stdout.close()
except BrokenPipeError:
pass
if less:
less.wait()
if __name__ == "__main__":
main()
| 33.611111
| 126
| 0.600661
|
8e6a7c927b08ffd73a6488e07887951d342ea2db
| 340
|
py
|
Python
|
decompress-run-length-encoded-list/decompress-run-length-encoded-list.py
|
shaurya-src/code-leet
|
f642b81eb7bead46c66404bd48ca74bdfeb2abbb
|
[
"MIT"
] | null | null | null |
decompress-run-length-encoded-list/decompress-run-length-encoded-list.py
|
shaurya-src/code-leet
|
f642b81eb7bead46c66404bd48ca74bdfeb2abbb
|
[
"MIT"
] | null | null | null |
decompress-run-length-encoded-list/decompress-run-length-encoded-list.py
|
shaurya-src/code-leet
|
f642b81eb7bead46c66404bd48ca74bdfeb2abbb
|
[
"MIT"
] | null | null | null |
class Solution:
def decompressRLElist(self, nums: List[int]) -> List[int]:
op = []
cf = nums[0]
i = 1
while i <= (len(nums)):
try:
op.extend([nums[i]]*cf)
cf = nums[i+1]
i+=2
except Exception:
break
return op
| 26.153846
| 62
| 0.397059
|
63c2e1a43e9e8d4c3a628dd98a48f2de0ddb52a5
| 1,372
|
py
|
Python
|
lib/galaxy/util/monitors.py
|
mmiladi/galaxy
|
7857b152cd10d9490ac2433ff2905ca1a47ee32c
|
[
"CC-BY-3.0"
] | null | null | null |
lib/galaxy/util/monitors.py
|
mmiladi/galaxy
|
7857b152cd10d9490ac2433ff2905ca1a47ee32c
|
[
"CC-BY-3.0"
] | null | null | null |
lib/galaxy/util/monitors.py
|
mmiladi/galaxy
|
7857b152cd10d9490ac2433ff2905ca1a47ee32c
|
[
"CC-BY-3.0"
] | null | null | null |
from __future__ import absolute_import
import logging
import threading
from .sleeper import Sleeper
log = logging.getLogger(__name__)
DEFAULT_MONITOR_THREAD_JOIN_TIMEOUT = 5
class Monitors:
def _init_monitor_thread(self, name, target_name=None, target=None, start=False, config=None):
self.monitor_join_sleep = getattr(config, "monitor_thread_join_timeout", DEFAULT_MONITOR_THREAD_JOIN_TIMEOUT)
self.monitor_join = self.monitor_join_sleep > 0
self.monitor_sleeper = Sleeper()
self.monitor_running = True
if target is not None:
assert target_name is None
monitor_func = target
else:
target_name = target_name or "monitor"
monitor_func = getattr(self, target_name)
self.sleeper = Sleeper()
self.monitor_thread = threading.Thread(name=name, target=monitor_func)
self.monitor_thread.setDaemon(True)
if start:
self.monitor_thread.start()
def stop_monitoring(self):
self.monitor_running = False
def _monitor_sleep(self, sleep_amount):
self.sleeper.sleep(sleep_amount)
def shutdown_monitor(self):
self.stop_monitoring()
self.sleeper.wake()
if self.monitor_join:
log.debug("Joining monitor thread")
self.monitor_thread.join(self.monitor_join_sleep)
| 30.488889
| 117
| 0.687318
|
5dbec7ef4259340069bc1ed0adffa9ffab8e3a4a
| 1,177
|
py
|
Python
|
tools/mapred/reducer.py
|
EMinsight/Silo
|
a1e27d34e8b52196be2de3d4aae2d0cae21b3f36
|
[
"Apache-2.0"
] | 8
|
2021-10-08T00:22:19.000Z
|
2022-03-21T02:17:31.000Z
|
tools/mapred/reducer.py
|
EMinsight/Silo
|
a1e27d34e8b52196be2de3d4aae2d0cae21b3f36
|
[
"Apache-2.0"
] | 184
|
2019-03-20T03:02:33.000Z
|
2019-03-24T18:08:08.000Z
|
tools/mapred/reducer.py
|
EMinsight/Silo
|
a1e27d34e8b52196be2de3d4aae2d0cae21b3f36
|
[
"Apache-2.0"
] | 4
|
2021-12-28T11:40:36.000Z
|
2022-03-30T08:51:51.000Z
|
#!/usr/bin/env python
from operator import itemgetter
import sys
current_word = None
current_z = None
current_rgb = None
word = None
# input comes from STDIN
for line in sys.stdin:
# remove leading and trailing whitespace
line = line.strip()
# parse the input we got from mapper.py
word, pixval = line.split('\t', 1)
rgbz = pixval.split('|')
rgb = rgbz[0]
z = rgbz[1]
# convert count (currently a string) to int
try:
z = float(z)
except ValueError:
# count was not a number, so silently
# ignore/discard this line
continue
# this IF-switch only works because Hadoop sorts map output
# by key (here: word) before it is passed to the reducer
if current_word == word:
if z < current_z:
current_z = z
curent_rgb = rgb
else:
if current_word:
# write result to STDOUT
print '%s\t%s' % (current_word, current_rgb)
current_word = word
current_rgb = rgb
current_z = z
# do not forget to output the last word if needed!
if current_word == word:
print '%s\t%s' % (current_word, current_rgb)
| 24.520833
| 63
| 0.611725
|
883d8e3f3016d724a687503f401ac16704441f8f
| 1,128
|
py
|
Python
|
blogs/pandas-pvtkey/setup.py
|
Glairly/introduction_to_tensorflow
|
aa0a44d9c428a6eb86d1f79d73f54c0861b6358d
|
[
"Apache-2.0"
] | 2
|
2022-01-06T11:52:57.000Z
|
2022-01-09T01:53:56.000Z
|
blogs/pandas-pvtkey/setup.py
|
Glairly/introduction_to_tensorflow
|
aa0a44d9c428a6eb86d1f79d73f54c0861b6358d
|
[
"Apache-2.0"
] | null | null | null |
blogs/pandas-pvtkey/setup.py
|
Glairly/introduction_to_tensorflow
|
aa0a44d9c428a6eb86d1f79d73f54c0861b6358d
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup
setup(name='trainer',
version='1.0',
description='Showing how to use private key',
url='http://github.com/GoogleCloudPlatform/training-data-analyst',
author='Google',
author_email='nobody@google.com',
license='Apache2',
packages=['trainer'],
package_data={'': ['privatekey.json']},
install_requires=[
'pandas-gbq==0.4.1',
'urllib3',
'google-cloud-bigquery'
],
zip_safe=False)
| 36.387097
| 75
| 0.670213
|
fba807abc069cf0bd4ffca40950d500e380f7322
| 634
|
bzl
|
Python
|
bazel-rules/gen_test.bzl
|
felixonmars/tectonic-installer
|
ef9246a7ed59904224638f28999aac5b88c5b8e4
|
[
"Apache-2.0"
] | 2
|
2021-05-07T15:40:00.000Z
|
2021-07-15T22:49:55.000Z
|
bazel-rules/gen_test.bzl
|
felixonmars/tectonic-installer
|
ef9246a7ed59904224638f28999aac5b88c5b8e4
|
[
"Apache-2.0"
] | 3
|
2017-09-08T18:46:07.000Z
|
2017-12-07T15:37:33.000Z
|
bazel-rules/gen_test.bzl
|
felixonmars/tectonic-installer
|
ef9246a7ed59904224638f28999aac5b88c5b8e4
|
[
"Apache-2.0"
] | 6
|
2017-06-26T21:22:47.000Z
|
2017-12-04T18:44:31.000Z
|
def generate_script(command):
return """
set -ex
{command}
""".format(command=command)
def _impl(ctx):
script = generate_script(ctx.attr.command)
# Write the file, it is executed by 'bazel test'.
ctx.actions.write(
output=ctx.outputs.executable,
content=script
)
# To ensure the files needed by the script are available, we put them in
# the runfiles.
runfiles = ctx.runfiles(files=ctx.files.deps)
return [DefaultInfo(runfiles=runfiles)]
gen_test = rule(
implementation=_impl,
attrs={
"command": attr.string(),
"deps": attr.label_list(allow_files=True),
},
test=True,
)
| 21.862069
| 74
| 0.675079
|
9e389676140ec1ecfacc180f4ead972f3374dae7
| 678
|
py
|
Python
|
vault/urls.py
|
royaleagle-dev/infosafe
|
fcb00a67d6a8fdd3d2e032b53b56bbcf35d844b6
|
[
"Apache-2.0"
] | null | null | null |
vault/urls.py
|
royaleagle-dev/infosafe
|
fcb00a67d6a8fdd3d2e032b53b56bbcf35d844b6
|
[
"Apache-2.0"
] | null | null | null |
vault/urls.py
|
royaleagle-dev/infosafe
|
fcb00a67d6a8fdd3d2e032b53b56bbcf35d844b6
|
[
"Apache-2.0"
] | null | null | null |
from django.urls import path
#from . models import Vault, Category
from . import views
app_name = 'vault'
urlpatterns = [
path('', views.IndexListView.as_view(), name = 'index'),
path('<int:pk>/', views.VaultDetailView.as_view(), name = 'detail'),
path('unlock/', views.unlock, name = 'unlock'),
path('delete/<int:id>', views.delete, name = 'delete'),
path('instant_lock', views.instant_lock, name = 'instant_lock'),
path('addVault', views.AddVault.as_view(), name = 'addVault'),
path('search/<str:word>', views.search, name = 'search'),
path('editVault/<int:id>', views.editVault, name = 'editVault'),
path('settings', views.Settings.as_view(), name = 'settings'),
]
| 39.882353
| 69
| 0.682891
|
ea5d0a1a619cf2fa36d22ffec444c804e8fe935a
| 4,904
|
py
|
Python
|
scripts/analysis/profuzzbench_plot.py
|
cyruscyliu/nyx-net-profuzzbench
|
1e93833160e09529245b56237dfe2e855d648059
|
[
"Apache-2.0"
] | 4
|
2021-11-15T03:31:59.000Z
|
2021-11-15T12:46:40.000Z
|
scripts/analysis/profuzzbench_plot.py
|
cyruscyliu/nyx-net-profuzzbench
|
1e93833160e09529245b56237dfe2e855d648059
|
[
"Apache-2.0"
] | 1
|
2022-03-04T19:38:42.000Z
|
2022-03-06T13:23:27.000Z
|
scripts/analysis/profuzzbench_plot.py
|
cyruscyliu/nyx-net-profuzzbench
|
1e93833160e09529245b56237dfe2e855d648059
|
[
"Apache-2.0"
] | 2
|
2021-11-15T00:23:20.000Z
|
2022-02-16T18:10:07.000Z
|
#!/usr/bin/env python3
import argparse
import matplotlib.pyplot as plt
import pandas as pd
import statistics
CUT = True
LOG = False
def main(csv_file, put, runs, cut_off, step, out_file):
#Read the results
df = pd.read_csv(csv_file)
#Calculate the mean of code coverage
#Store in a list first for efficiency
mean_list = []
fuzzers = df.fuzzer.unique()
for subject in [put]:
# for fuzzer in ['aflnet', 'aflnwe']:
for fuzzer in fuzzers:
for cov_type in ['b_abs', 'b_per', 'l_abs', 'l_per']:
#get subject & fuzzer & cov_type-specific dataframe
df1 = df[(df['subject'] == subject) & (df['fuzzer'] == fuzzer)
& (df['cov_type'] == cov_type)]
mean_list.append((subject, fuzzer, cov_type, 0, 0.0))
agg_f = statistics.median if '_abs' in cov_type else statistics.mean
for time in range(1, cut_off + 1, step):
cov = []
for run in range(1, runs + 1, 1):
#get run-specific data frame
df2 = df1[df1['run'] == run]
if CUT:
#get the starting time for this run
start = df2.iloc[0, 0]
#get all rows given a cutoff time
df2 = df2[df2['time'] <= start + time * 60]
#update total coverage and #runs
cov.append(df2.iloc[-1, 5])
#add a new row
mean_list.append(
(subject, fuzzer, cov_type, time, agg_f(cov)))
#Convert the list to a dataframe
mean_df = pd.DataFrame(
mean_list, columns=['subject', 'fuzzer', 'cov_type', 'time', 'cov'])
fig, axes = plt.subplots(2, 2, figsize=(20, 10))
fig.suptitle("Code coverage analysis")
for key, grp in mean_df.groupby(['fuzzer', 'cov_type']):
if key[1] == 'b_abs':
axes[0, 0].plot(grp['time'], grp['cov'], label=key[0])
#axes[0, 0].set_title('Edge coverage over time (#edges)')
axes[0, 0].set_xlabel('Time (in min)')
axes[0, 0].set_ylabel('#edges')
if LOG:
axes[0, 0].set_yscale('log')
if key[1] == 'b_per':
axes[1, 0].plot(grp['time'], grp['cov'], label=key[0])
#axes[1, 0].set_title('Edge coverage over time (%)')
axes[1, 0].set_ylim([0, 100])
axes[1, 0].set_xlabel('Time (in min)')
axes[1, 0].set_ylabel('Edge coverage (%)')
if key[1] == 'l_abs':
axes[0, 1].plot(grp['time'], grp['cov'], label=key[0])
#axes[0, 1].set_title('Line coverage over time (#lines)')
axes[0, 1].set_xlabel('Time (in min)')
axes[0, 1].set_ylabel('#lines')
if LOG:
axes[0, 1].set_yscale('log')
if key[1] == 'l_per':
axes[1, 1].plot(grp['time'], grp['cov'], label=key[0])
#axes[1, 1].set_title('Line coverage over time (%)')
axes[1, 1].set_ylim([0, 100])
axes[1, 1].set_xlabel('Time (in min)')
axes[1, 1].set_ylabel('Line coverage (%)')
for i, ax in enumerate(fig.axes):
# ax.legend(('AFLNet', 'AFLNwe'), loc='upper left')
# ax.legend(fuzzers, loc='upper left')
ax.legend(loc='upper left')
ax.grid()
#Save to file
plt.savefig(out_file)
# Parse the input arguments
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-i',
'--csv_file',
type=str,
required=True,
help="Full path to results.csv")
parser.add_argument('-p',
'--put',
type=str,
required=True,
help="Name of the subject program")
parser.add_argument('-r',
'--runs',
type=int,
required=True,
help="Number of runs in the experiment")
parser.add_argument('-c',
'--cut_off',
type=int,
required=True,
help="Cut-off time in minutes")
parser.add_argument('-s',
'--step',
type=int,
required=True,
help="Time step in minutes")
parser.add_argument('-o',
'--out_file',
type=str,
required=True,
help="Output file")
args = parser.parse_args()
main(args.csv_file, args.put, args.runs, args.cut_off, args.step,
args.out_file)
| 37.151515
| 84
| 0.472064
|
3ba20f7d3d9e0038b41bccafcf9fb6c2f1659842
| 32,805
|
py
|
Python
|
proteus/SpatialTools.py
|
yuxianglin/proteus
|
ac5d5223410b1a1f270615f987e9cf327fb802af
|
[
"NASA-1.3"
] | null | null | null |
proteus/SpatialTools.py
|
yuxianglin/proteus
|
ac5d5223410b1a1f270615f987e9cf327fb802af
|
[
"NASA-1.3"
] | null | null | null |
proteus/SpatialTools.py
|
yuxianglin/proteus
|
ac5d5223410b1a1f270615f987e9cf327fb802af
|
[
"NASA-1.3"
] | null | null | null |
"""Module creating predifined or custom shapes. Each shape needs a
Domain as argument (from proteus.Domain). A Domain can contain any
number of shapes. Boundary conditions objects are automatically
created for each facet (3D) or segment (2D) defining the shape.
Classes:
* Shape: super class, regroups functions common to all shapes
* Cuboid: creates a 3D cuboid
* Rectangle: creates a 2D rectangle
* Custom: creates a custom shape from a given set vertices, facets, etc.
Example::
from proteus import Domain
from proteus import SpatialTools as st
import numpy as np
domain = Domain.PlanarStraightLineGraphDomain()
shape1 = st.Rectangle(domain, dim=[0.5, 0.5], coords=[1., 1.])
shape2 = st.Rectangle(domain. dim=[0.3, 0.2], coords=[3., 3.])
shape2.rotate(np.pi/3.)
shape2.BC_dict["left"].uOfXT = lambda x, t: 0.
st.assembleDomain(domain)
.. inheritance-diagram:: proteus.SpatialTools
:parts: 1
"""
from math import cos, sin, sqrt
import sys
import numpy as np
from proteus import BoundaryConditions as bc
from .Profiling import logEvent
class Shape(object):
"""
Base/super class of all shapes.
Parameters
----------
domain: proteus.Domain.D_base
Domain class instance that hold all the geometrical informations and
boundary conditions of the shape.
nd: Optional[int]
Number of dimensions of the shape. If not set, will take the number of
dimensions of the domain.
BC_class: Optional[proteus.BoundaryConditions.BC_Base]
Class to use for boundary conditions (e.g.
proteus.BoundaryConditions.BC_Base or
proteus.BoundaryConditions.mprans.BC_RANS).
"""
def __init__(self, domain, nd=None, BC_class=None):
if nd != domain.nd:
logEvent('Shape ('+`nd`+'D) and Domain ('+`domain.nd`+'D)' \
' have different dimensions!')
sys.exit()
self.Domain = domain
domain.shape_list.append(self)
self.nd = nd
self.BC_class = BC_class or bc.BC_Base
self.vertices = None
self.vertexFlags = None
self.segments = None
self.segmentFlags = None
self.facets = None
self.facetFlags = None
self.regions = None
self.regionFlags = None
self.holes = None
self.barycenter = np.zeros(3)
self.coords = None # Only used for predefined shapes
# (can be different from barycenter)
self.coords_system = np.eye(nd)
self.boundaryTags = None
self.b_or = None # boundary orientation
self.volume = None
self.BC_list = []
def _checkFlags(self, flagSet):
"""
Checks if flags are set correctly
Parameters
----------
flagSet: list
List of flags.
"""
flagSet = set(flagSet)
checkFlag = min(flagSet)
assert checkFlag == 1, 'Minimum boundary/region tag/flag must be 1'
for flag in flagSet:
assert flag == checkFlag, 'Boundary/region tags/flags must be'
'defined as a suite of numbers with no gap!'
checkFlag += 1
def _checkListOfLists(self, list_of_lists):
"""
Checks if the list of lists has the right dimension
Parameters
----------
list_of_lists: list
"""
assert len(list_of_lists[0]) == self.nd, 'must have be a list of: ' \
'lists of length ' + self.nd
def _checkNd(self, array):
"""
Checks if an array is of the same dimension of the Shape instance or
is of dimension 3
Parameters
----------
array: array_like
"""
assert len(array) == self.nd or len(array) == 3, 'wrong dimension'
def setPosition(self, coords):
"""
Set position with coords of the barycenter
Parameters
----------
coords: array_like
New set of coordinates for barycenter (list/array).
"""
old_coords = np.array(self.barycenter)
if self.Domain.nd == 2 and len(old_coords) == 3:
trans = coords - old_coords[:2]
else:
trans = coords - old_coords
self.translate(trans)
def setBarycenter(self, barycenter):
"""
Set barycenter (center of mass) of the shape
(!) this function does not move the shape
Parameters
----------
barycenter: array_like
Global coordinates of barycenter (list/array).
"""
if self.Domain.nd == 2 and len(barycenter) == 2:
self.barycenter[:2] = barycenter
else:
self.barycenter[:] = barycenter
def setRegions(self, regions, regionFlags=None):
"""
Sets new regions for the Shape
Parameters
----------
regions: array_like
Array of coordinates of regions.
regionFlags: Optional[array_like]
Array of flags.
"""
self._checkListOfLists(regions)
if regionFlags is not None:
self._checkFlags(regionFlags)
else:
regionFLags = self.regionFlags
assert len(regions) == len(regionFlags), 'regions and regionFLags'\
'must have the same length'
self.regions = np.array(regions)
self.regionFlags = np.array(regionFlags)
def setHoles(self, holes):
"""
Sets a 'hole' in the mesh. The region where the hole is defined will
not be meshed.
Parameters
----------
holes: array_like
Array of coordinates of holes (list/array).
"""
self._checkListOfLists(holes)
self.holes = np.array(holes)
def rotate(self, rot, axis=(0, 0, 1), pivot=None):
"""
Function to rotate Shape
Parameters
----------
rot: float
Angle of rotation (in radians).
axis: Optional[array_like]
Vector used for rotation. Not necessary for rotation in 2D.
pivot: Optional[array_list]
Point around which the shape will rotate. If not set, the
barycenter will be the center of rotation.
Notes
-----
Rotated attributes:
- vertices
- holes
- regions
- local coordinate system
- boundary orientations
- coords (if not None)
- barycenter
"""
# This function and rotate2D/rotate3D could be optimized
rot = float(rot)
nd = self.nd
if pivot is None:
pivot = self.barycenter
if nd == 2:
pivot = pivot[:2]
self.vertices[:] = rotation2D(self.vertices, rot, pivot)
if self.holes is not None:
self.holes[:] = rotation2D(self.holes, rot, pivot)
if self.regions is not None:
self.regions[:] = rotation2D(self.regions, rot, pivot)
self.barycenter[:2] = rotation2D(self.barycenter[:nd], rot, pivot)
self.coords_system[:] = rotation2D(self.coords_system, rot,
(0., 0.))
if self.b_or is not None:
self.b_or[:] = rotation2D(self.b_or, rot, (0., 0.))
if self.coords is not None:
self.coords[:] = rotation2D(self.coords, rot, pivot)
elif nd == 3:
self.vertices[:] = rotation3D(self.vertices, rot, axis, pivot)
if self.holes is not None:
self.holes[:] = rotation3D(self.holes, rot, axis, pivot)
if self.regions is not None:
self.regions[:] = rotation3D(self.regions, rot, axis, pivot)
self.barycenter[:] = rotation3D(self.barycenter, rot, axis, pivot)
self.coords_system[:] = rotation3D(self.coords_system, rot, axis,
(0., 0., 0.))
if self.b_or is not None:
self.b_or[:] = rotation3D(self.b_or, rot, axis, (0., 0., 0.))
if self.coords is not None:
self.coords[:] = rotation3D(self.coords, rot, axis, pivot)
def translate(self, trans):
"""
Function to translate Shape
Parameters
----------
trans: array_like
Translation values.
Notes
-----
Translated attributes:
- vertices
- regions
- coords (if not None)
- barycenters
- holes
"""
self.vertices += trans
if self.regions is not None:
self.regions += trans
if self.coords is not None:
self.coords += trans
if self.Domain.nd == 2:
trans2 = (trans[0], trans[1], 0.)
self.barycenter += trans2
else:
self.barycenter += trans
if self.holes is not None:
self.holes += trans
def getPosition(self):
"""
Returns
-------
barycenter: array_like
Current position of barycenter.
"""
return self.barycenter
def getRotation(self):
"""
Returns
-------
coords_system: array_like
Local coordinate system relative to global coordinate system.
"""
return self.coords_system
class Cuboid(Shape):
"""
Class to create a 3D cuboid
Parameters
----------
domain: proteus.Domain.D_base
Domain class instance that hold all the geometrical informations and
boundary conditions of the shape.
dim: Optional[array_like]
Dimensions of the cuboid.
coords: Optional[array_like]
Coordinates of the centroid of the shape.
barycenter: Optional[array_like]
Coordinates of the barycenter.
"""
count = 0
def __init__(self, domain, dim=(0., 0., 0.), coords=(0., 0., 0.),
barycenter=None):
super(Cuboid, self).__init__(domain, nd=3)
self.__class__.count += 1
self.name = "cuboid" + str(self.__class__.count)
self.dim = L, W, H = dim # length, width height
self.volume = L*W*H
self.coords = x, y, z = np.array(coords)
self.vertices = np.array([[x-0.5*L, y-0.5*W, z-0.5*H],
[x-0.5*L, y+0.5*W, z-0.5*H],
[x+0.5*L, y+0.5*W, z-0.5*H],
[x+0.5*L, y-0.5*W, z-0.5*H],
[x-0.5*L, y-0.5*W, z+0.5*H],
[x-0.5*L, y+0.5*W, z+0.5*H],
[x+0.5*L, y+0.5*W, z+0.5*H],
[x+0.5*L, y-0.5*W, z+0.5*H]])
if self.Domain.nd == 2:
self.vertices = np.array([[x-0.5*L, y-0.5*H],
[x+0.5*L, y-0.5*H],
[x+0.5*L, y+0.5*H],
[x-0.5*L, y+0.5*H]])
self.facets = np.array([[[0, 1, 2, 3]], # z-
[[1, 2, 6, 5]], # y+
[[2, 3, 7, 6]], # x+
[[3, 0, 4, 7]], # y-
[[0, 1, 5, 4]], # x-
[[4, 5, 6, 7]]]) # z+
self.b_or = np.array([[0., 0., -1.],
[0., 1., 0.],
[1., 0., 0.],
[0., -1., 0.],
[-1., 0., 0.],
[0., 0., 1.]])
self.regions = np.array([[x, y, z]])
# defining flags for boundary conditions
self.boundaryTags = bt = {'z-': 1,
'y+': 2,
'x+': 3,
'y-': 4,
'x-': 5,
'z+': 6}
self.facetFlags = np.array([bt['z-'], bt['y+'], bt['x+'],
bt['y-'], bt['x-'], bt['z+']])
self.vertexFlags = np.array([bt['z-'], bt['z-'], bt['z-'],
bt['z-'], bt['z+'], bt['z+'],
bt['z+'], bt['z+']])
self.regionFlags = np.array([1])
# Initialize (empty) boundary conditions
self.BC = {'z-': self.BC_class(shape=self, name='z-',
b_or=self.b_or, b_i=0),
'y+': self.BC_class(shape=self, name='y+',
b_or=self.b_or, b_i=1),
'x+': self.BC_class(shape=self, name='x+',
b_or=self.b_or, b_i=2),
'y-': self.BC_class(shape=self, name='y-',
b_or=self.b_or, b_i=3),
'x-': self.BC_class(shape=self, name='x-',
b_or=self.b_or, b_i=4),
'z+': self.BC_class(shape=self, name='z+',
b_or=self.b_or, b_i=5)}
self.BC_list = [self.BC['z-'],
self.BC['y+'],
self.BC['x+'],
self.BC['y-'],
self.BC['x-'],
self.BC['z+']]
# self.BC = BCContainer(self.BC_dict)
self.barycenter = np.array(barycenter) or np.array(coords)
self.It = np.array([[(W**2.+H**2.)/12., 0, 0],
[0, (L**2.+H**2.)/12., 0],
[0, 0, (W**2.+L**2.)/12.]])
def setDimensions(self, dim):
"""
Sets dimensions of the shape.
Parameters
----------
dim: array_like
New dimensions of the shape.
"""
self.dim = dim
L, W, H = dim
x, y, z = self.coords
self.vertices[:] = [[x-0.5*L, y-0.5*W, z-0.5*H],
[x-0.5*L, y+0.5*W, z-0.5*H],
[x+0.5*L, y+0.5*W, z-0.5*H],
[x+0.5*L, y-0.5*W, z-0.5*H],
[x-0.5*L, y-0.5*W, z+0.5*H],
[x-0.5*L, y+0.5*W, z+0.5*H],
[x+0.5*L, y+0.5*W, z+0.5*H],
[x+0.5*L, y-0.5*W, z+0.5*H]]
self.volume = L*W*H
class Rectangle(Shape):
"""
Class to create a rectangle
Parameters
----------
domain: proteus.Domain.D_base
Domain class instance that hold all the geometrical informations and
boundary conditions of the shape.
dim: Optional[array_like]
Dimensions of the cuboid.
coords: Optional[array_like]
Coordinates of the centroid of the shape.
barycenter: Optional[array_like]
Coordinates of the barycenter.
"""
count = 0
def __init__(self, domain, dim=(0., 0.), coords=(0., 0.), barycenter=None):
super(Rectangle, self).__init__(domain, nd=2)
self.__class__.count += 1
self.name = "rectangle" + str(self.__class__.count)
self.dim = L, H = dim # length, height
self.coords = x, y = np.array(coords)
self.vertices = np.array([[x-0.5*L, y-0.5*H],
[x+0.5*L, y-0.5*H],
[x+0.5*L, y+0.5*H],
[x-0.5*L, y+0.5*H]])
self.segments = np.array([[0, 1], [1, 2], [2, 3], [3, 0]])
self.barycenter = np.zeros(3)
if barycenter is not None:
self.barycenter[0:2] = barycenter[0:2]
else:
self.barycenter[0:2] = coords[0:2]
self.b_or = np.array([[0., -1.],
[1., 0.],
[0., 1.],
[-1., 0.]])
self.regions = np.array([[x, y]])
self.boundaryTags = bt = {'y-': 1,
'x+': 2,
'y+': 3,
'x-': 4}
self.segmentFlags = np.array([bt['y-'], bt['x+'], bt['y+'],
bt['x-']]) # y-, x+, y+, x-
self.vertexFlags = np.array([bt['y-'], bt['y-'], bt['y+'],
bt['y+']]) # y-, y-, y+, y+
self.regionFlags = np.array([1])
self.BC = {'y-': self.BC_class(shape=self, name='y-',
b_or=self.b_or, b_i=0),
'x+': self.BC_class(shape=self, name='x+',
b_or=self.b_or, b_i=1),
'y+': self.BC_class(shape=self, name='y+',
b_or=self.b_or, b_i=2),
'x-': self.BC_class(shape=self, name='x-',
b_or=self.b_or, b_i=3)}
self.BC_list = [self.BC['y-'],
self.BC['x+'],
self.BC['y+'],
self.BC['x-']]
# self.BC = BCContainer(self.BC_dict)
self.It = L**2+H**2/12
def setDimensions(self, dim):
"""
Set dimensions of the shape
:param dim: new dimensions (list/array)
"""
self.dim = dim
L, H = dim
x, y = self.coords
self.vertices[:] = [[x-0.5*L, y-0.5*H],
[x+0.5*L, y-0.5*H],
[x+0.5*L, y+0.5*H],
[x-0.5*L, y+0.5*H]]
self.volume = L*H
class CustomShape(Shape):
"""
Class to create a custom 2D or 3D shape
Parameters
----------
domain: proteus.Domain.D_base
Domain class instance that hold all the geometrical informations and
boundary conditions of the shape.
barycenter: Optional[array_like]
Coordinates of the barycenter.
vertices: array_like
Array of vertex coordinates.
vertexFlags: array_like
Array of vertex flags (used for boundary conditions)
segments: array_like
Array of segments (each defined by indice of 2 vertex).
segmentFlags: array_like
Array of segment flags (used for boundary conditions)
facetss: array_like
Array of facets (defined by clockwise or counterclockwise loop of
vertices).
facetFlags: array_like
Array of facet flags (used for boundary conditions)
vertices: array_like
Array of region coordinates.
regionFlags: array_like
Array of region flags (used for boundary conditions)
holes: array_like
Array of holes coordinates (unmeshed regions)
boundaryTags: dict
Dictionary of flags (int) as keys, and tags (e.g. string) for BC.
boundaryOrientations: Optional[array_like]
Array of orientation of boundaries. Can be used for BC.
"""
count = 0
def __init__(self, domain, barycenter=None, vertices=None,
vertexFlags=None, segments=None, segmentFlags=None,
facets=None, facetFlags=None, holes=None, regions=None,
regionFlags=None, boundaryTags=None,
boundaryOrientations=None):
super(CustomShape, self).__init__(domain, nd=len(vertices[0]))
self.__class__.count += 1
self.name = "custom" + str(self.__class__.count)
self._checkFlags(boundaryTags.values())
self.boundaryTgs = boundaryTags
self.vertices = np.array(vertices)
self.vertexFlags = np.array(vertexFlags)
if segments:
self.segments = np.array(segments)
self.segmentFlags = np.array(segmentFlags)
if facets:
self.facets = np.array(facets)
self.facetFlags = np.array(facetFlags)
if holes is not None:
self.holes = np.array(holes)
if regions is not None:
self._checkFlags(regionFlags)
self.regions = np.array(regions)
self.regionFlags = np.array(regionFlags)
self.BC = {}
self.BC_list = [None]*len(boundaryTags)
b_or = [None]*len(boundaryTags)
self.b_or = b_or
for tag, flag in boundaryTags.iteritems():
b_i = flag-1 # start at index 0
if boundaryOrientations is not None:
b_or[b_i] = boundaryOrientations[tag]
self.BC[tag] = self.BC_class(shape=self, name=tag, b_or=b_or, b_i=b_i)
self.BC_list[b_i] = self.BC[tag]
# self.BC = BCContainer(self.BC_dict)
if barycenter is not None:
self.barycenter = np.array(barycenter)
class ShapeSTL(Shape):
"""
Class to extract geometrical information from STL file
Parameters
----------
domain: proteus.Domain.D_base
Domain class instance that hold all the geometrical informations and
boundary conditions of the shape.
filename: string
Name of the stl file.
"""
def __init__(self, domain, filename):
super(ShapeSTL, self).__init__(domain, nd=3)
self.filename = filename
self.vertices, self.facets, self.facetnormals = getInfoFromSTL(self.filename)
self.facetFlags = np.ones(len(self.facets))
self.vertexFlags = np.ones(len(self.vertices))
self.boundaryTags = {'stl': 1}
self.BC = {'stl': self.BC_class(shape=self, name='stl')}
self.BC_list = [self.BC['stl']]
# self.BC = BCContainer(self.BC_dict)
def getInfoFromSTL(filename):
"""
Extracts information from STL file and converts it to a Proteus friendly
format. Duplicate vertices and segments are removed during the process,
so the shape is ready for meshing.
Parameters
----------
filename: name of STL file
Returns
-------
vertices: array_like
Array of vertices that define STL shape (duplicates removed)
facets: array_like
Array of facets (loops of 3 vertices)
facetnormals: array_like
normal vertors of each facet
"""
file = open(filename, 'r')
facetnormals = []
facet = []
facets = []
vertices = []
vFlag = 0
for line in file:
if "vertex" in line:
word_list = line.split()
vertex = (word_list[1], word_list[2], word_list[3])
vertices += [vertex]
facet += [vFlag]
vFlag += 1
if "facet normal" in line:
word_list = line.split()
facetnormals += [[word_list[2], word_list[3], word_list[4]]]
elif "endfacet" in line:
facets += [[facet]]
facet = []
elif "endsolid" in line:
pass
elif "solid" in line:
word_list = line.split()
name = word_list[1]
file.close()
# vertices_u, inverse = np.unique(vertices, return_inverse=True)
vertices = np.array(vertices).astype(float)
facets = np.array(facets).astype(int)
vertices, inverse = unique_rows(vertices)
facets = inverse[facets]
facetnormals = np.array(facetnormals).astype(float)
return vertices, facets, facetnormals
def unique_rows(arr):
arr = np.array(arr)
ca = np.ascontiguousarray(arr).view([('', arr.dtype)] * arr.shape[1])
unique, indices, inverse = np.unique(ca, return_index=True, return_inverse=True)
# counts = np.bincount(inverse)
# sort_indices = np.argsort(counts)[::-1]
# sorted_arr = arr[indices[sort_indices]]
# sorted_count = counts[sort_indices]
return (arr[indices], inverse)
class BCContainer(object):
"""
Creates a class from a dictionary (keys become class variable names)
"""
def __init__(self, BC_dict):
self.__dict__ = BC_dict
# --------------------------------------------------------------------------- #
# -------------------------SPATIAL TOOLS FOR SHAPES-------------------------- #
# --------------------------------------------------------------------------- #
def rotation2D(points, rot, pivot=(0., 0.)):
"""
Rotates a set of points/vertices/vectors around a pivotal point in 2D.
Parameters
----------
points: array_like
Array of point coordinates to rotate.
rot: float
Angle of rotation.
pivot: array_like
Pivotal point around which the set of points will be rotated.
Returns
-------
points_rot: array_like
Rotated set of points.
"""
# function could be optimized
points = np.array(points)
rot = float(rot)
# get coordinates for translation
x, y = pivot
# translation matrix
T = np.array([[1, 0, 0],
[0, 1, 0],
[-x, -y, 1]])
# rotation matrices
R = np.array([[cos(rot), sin(rot), 0],
[-sin(rot), cos(rot), 0],
[0, 0, 1]])
# full transformation matrix
M = reduce(np.dot, [T, R, np.linalg.inv(T)])
# transform points (check also if it is only a 1D array or 2D)
if points.ndim > 1:
points_rot = np.ones((len(points), 3))
points_rot[:, :-1] = points
points_rot = np.dot(points_rot, M) # matrix dot product on each vector
points_rot = points_rot[:, :-1]
else:
points_rot = np.ones(3)
points_rot[:-1] = points
points_rot = np.dot(points_rot, M) # matrix dot product on each vector
points_rot = points_rot[:-1]
return points_rot
def rotation3D(points, rot, axis=(0., 0., 1.), pivot=(0., 0., 0.)):
"""
Rotates a set of points/vertices/vectors around a pivotal point in 3D.
Parameters
----------
points: array_like
Array of point coordinates to rotate.
rot: float
Angle of rotation.
axis: array_like
Axis of rotation.
pivot: array_like
Pivotal point around which the set of points will be rotated.
Returns
-------
points_rot: array_like
Rotated set of points.
"""
# function could be optimized
points = np.array(points)
rot = float(rot)
# get coordinates for translation
x, y, z = pivot
# make axis a unity vector
axis = np.array(axis)
r = np.linalg.norm(axis)
axis = axis/r
# get values for rotation matrix
cx, cy, cz = axis
d = sqrt(cy**2+cz**2)
# rotation matrices
if d != 0:
Rx = np.array([[1, 0, 0, 0],
[0, cz/d, cy/d, 0],
[0, -cy/d, cz/d, 0],
[0, 0, 0, 1]])
else: # special case: rotation axis aligned with x axis
Rx = np.array([[1, 0, 0, 0],
[0, 1, 0, 0],
[0, 0, 1, 0],
[0, 0, 0, 1]])
Ry = np.array([[d, 0, cx, 0],
[0, 1, 0, 0],
[-cx, 0, d, 0],
[0, 0, 0, 1]])
Rz = np.array([[cos(rot), sin(rot), 0, 0],
[-sin(rot), cos(rot), 0, 0],
[0, 0, 1, 0],
[0, 0, 0, 1]])
# translation matrix
T = np.array([[1, 0, 0, 0],
[0, 1, 0, 0],
[0, 0, 1, 0],
[-x, -y, -z, 1]])
# full transformation matrix
inv = np.linalg.inv
M = reduce(np.dot, [T, Rx, Ry, Rz, inv(Ry), inv(Rx), inv(T)])
if points.ndim > 1:
points_rot = np.ones((len(points), 4))
points_rot[:, :-1] = points
points_rot = np.dot(points_rot, M) # matrix dot product on each vector
points_rot = points_rot[:, :-1]
else:
points_rot = np.ones(4)
points_rot[:-1] = points
points_rot = np.dot(points_rot, M) # matrix dot product on each vector
points_rot = points_rot[:-1]
return points_rot
def assembleDomain(domain):
"""
This function sets up everything needed for the domain, meshing, and
AuxiliaryVariables calculations (if any).
It should always be called after defining and manipulating all the shapes
to be attached to the domain.
Parameters
----------
domain: proteus.Domain.D_base
Domain class instance that hold all the geometrical informations and
boundary conditions of the shape.
"""
# reinitialize geometry of domain
_assembleGeometry(domain, BC_class=bc.BC_Base)
_generateMesh(domain)
def _assembleGeometry(domain, BC_class):
"""
Assembles all the geometrical informations of the shapes attached to a
domain.
Parameters
----------
domain: proteus.Domain.D_base
Domain class instance that hold all the geometrical informations and
boundary conditions of the shape.
"""
# reinitialize geometry of domain
domain.vertices = []
domain.vertexFlags = []
domain.segments = []
domain.segmentFlags = []
domain.facets = []
domain.facetFlags = []
domain.holes = []
domain.regions = []
domain.regionFlags = []
# BC at flag 0
domain.bc = [BC_class(nd=domain.nd)]
# domain.bc[0].setNonMaterial()
# barycenter at flag 0
domain.barycenters = np.array([[0., 0., 0.]])
start_flag = 0
start_vertex = 0
for shape in domain.shape_list:
# --------------------------- #
# ----- DOMAIN GEOMETRY ----- #
# --------------------------- #
start_flag = len(domain.bc)-1
start_vertex = len(domain.vertices)
if domain.regionFlags:
start_rflag = max(domain.regionFlags)
else:
start_rflag = 0
domain.bc += shape.BC_list
# making copies of shape properties before operations/modifications
vertices = shape.vertices.copy()
vertexFlags = shape.vertexFlags.copy()
if shape.segments is not None:
segments = shape.segments.copy()
if shape.facets is not None:
facets = shape.facets.copy()
# deleting duplicate vertices and updating segment/facets accordingly
del_v = 0
for i_s, vertex in enumerate(shape.vertices):
if vertex.tolist() in domain.vertices:
vertices = np.delete(vertices, i_s-del_v, axis=0)
verticesFlags = np.delete(vertexFlags, i_s-del_v)
i_s -= del_v
del_v += 1
i_d = domain.vertices.index(vertex.tolist())
if shape.segments is not None:
for i in np.nditer(segments, op_flags=['readwrite']):
if i > i_s:
i[...] -= 1
elif i == i_s:
i[...] = i_d-start_vertex
if shape.facets is not None:
for i in np.nditer(facets, op_flags=['readwrite']):
if i > i_s:
i[...] -= 1
elif i == i_s:
i[...] = i_d-start_vertex
# adding shape geometry to domain
domain.vertices += vertices.tolist()
domain.vertexFlags += (vertexFlags+start_flag).tolist()
barycenters = np.array([shape.barycenter for bco in shape.BC_list])
domain.barycenters = np.append(domain.barycenters, barycenters, axis=0)
if shape.segments is not None:
domain.segments += (segments+start_vertex).tolist()
domain.segmentFlags += (shape.segmentFlags+start_flag).tolist()
if shape.facets is not None:
domain.facets += (facets+start_vertex).tolist()
domain.facetFlags += (shape.facetFlags+start_flag).tolist()
if shape.regions is not None:
domain.regions += (shape.regions).tolist()
domain.regionFlags += (shape.regionFlags+start_rflag).tolist()
if shape.holes is not None:
domain.holes += (shape.holes).tolist()
domain.getBoundingBox()
def _generateMesh(domain):
"""
Generates tetgen mesh of domain
Parameters
----------
domain: proteus.Domain.D_base
Domain class instance that hold all the geometrical informations and
boundary conditions of the shape.
"""
# --------------------------- #
# ----- MESH GENERATION ----- #
# --------------------------- #
mesh = domain.MeshOptions
if mesh.outputFiles['poly'] is True:
domain.writePoly(mesh.outputFiles['name'])
if mesh.outputFiles['ply'] is True:
domain.writePLY(mesh.outputFiles['name'])
if mesh.outputFiles['asymptote'] is True:
domain.writeAsymptote(mesh.outputFiles['name'])
mesh.setTriangleOptions()
logEvent("""Mesh generated using: tetgen -%s %s""" %
(mesh.triangleOptions, domain.polyfile+".poly"))
| 36.049451
| 85
| 0.511965
|
929fa27b77007d45858e8460970f115f9fb2b513
| 1,465
|
py
|
Python
|
src/datasets/fish_clf.py
|
AliKhoda/DeepFish
|
6769e83ab0b586e49f48e28f70607d33b5c36718
|
[
"MIT"
] | 48
|
2020-09-02T00:46:50.000Z
|
2022-03-16T14:38:36.000Z
|
src/datasets/fish_clf.py
|
DoranLyong/DeepFish
|
3ea3e13653f708d4a8dcb54b990dcc2997edf4e9
|
[
"MIT"
] | 5
|
2020-11-22T07:33:04.000Z
|
2022-03-28T03:27:58.000Z
|
src/datasets/fish_clf.py
|
DoranLyong/DeepFish
|
3ea3e13653f708d4a8dcb54b990dcc2997edf4e9
|
[
"MIT"
] | 14
|
2020-11-21T15:33:31.000Z
|
2022-03-16T12:33:23.000Z
|
import pandas as pd
import numpy as np
from src import datasets
import os
from PIL import Image
from torchvision import transforms
class FishClf:
def __init__(self, split, transform=None, datadir="",
n_samples=None, habitat=None):
self.split = split
self.n_classes = 2
self.datadir = datadir
self.transform = transform
self.img_names, self.labels = get_clf_data(self.datadir, split, habitat=habitat)
if n_samples:
self.img_names = self.img_names[:n_samples]
self.labels = self.labels[:n_samples]
self.path = self.datadir #+ "/images/"
def __len__(self):
return len(self.img_names)
def __getitem__(self, index):
name = self.img_names[index]
image_pil = Image.open(self.path + name + ".jpg")
image = self.transform(image_pil)
batch = {"images": image,
"labels": float(self.labels[index] > 0),
"image_original":transforms.ToTensor()(image_pil),
"meta": {"index": index,
"image_id": index,
"split": self.split}}
return batch
# for clf,
def get_clf_data(datadir, split, habitat=None ):
df = pd.read_csv(os.path.join(datadir,'%s.csv' % split))
df = datasets.slice_df(df, habitat)
img_names = np.array(df['ID'])
labels = np.array(df['labels'])
return img_names, labels
| 28.173077
| 88
| 0.591126
|
710acb5dd423add1055da5b37736a610db735fd0
| 8,183
|
py
|
Python
|
src/GUI/GUIPlot.py
|
AndreaG93/CPS-Project
|
e4821aace39b04f8504f3f878fa605d99aaacc8e
|
[
"MIT"
] | null | null | null |
src/GUI/GUIPlot.py
|
AndreaG93/CPS-Project
|
e4821aace39b04f8504f3f878fa605d99aaacc8e
|
[
"MIT"
] | null | null | null |
src/GUI/GUIPlot.py
|
AndreaG93/CPS-Project
|
e4821aace39b04f8504f3f878fa605d99aaacc8e
|
[
"MIT"
] | null | null | null |
import calendar
import ipywidgets
from IPython.core.display import clear_output, display
class GUIPlot(object):
"""
This class is used to build and manage a very simple user interface using some interactive HTML widgets
provided by 'ipywidgets' package.
For reference please see: https://ipywidgets.readthedocs.io/en/latest/index.html#
"""
def __init__(self):
self._widget_list = list()
self._widget_dataset_file_select = None
self._widget_month_combobox = None
self._widget_month_checkbox = None
self._widget_states_summary_label = None
self._widget_cities_summary_label = None
self._widget_how_many_cities_inside_state_label = None
self._widget_state_flag_image_HTML = None
self._widget_state_combobox = None
self._widget_city_combobox = None
self._widget_active_columns_select_multiple = None
self._widget_time_int_range_slider = None
self._widget_error_label = None
self._widget_plot_button = None
self._widget_display_univariate_regression_line_checkbox = None
self._widget_univariate_regression_line_info_HTMLMath = None
self.__build()
def display(self):
"""
This function is used to display all widgets of the UI.
"""
clear_output()
display(ipywidgets.VBox(self._widget_list))
def __build(self):
"""
This function is used to build all widgets of the UI.
"""
# Section: "Dataset Selection"
# ============================================================================================================ #
label = ipywidgets.Label(value="$\\textbf{•}$ $\\textbf{Dataset Selection}$")
self._widget_list.append(label)
label = ipywidgets.Label(value="Please, select a $\\texttt{.csv}$ file from the following list."
+ " (Only $\\texttt{.csv}$ files stored inside $\\texttt{./data}$ directory are"
+ " displayed).")
self._widget_dataset_file_select = ipywidgets.Select(
description="Current Selected Dataset:",
style={'description_width': 'initial'},
layout=ipywidgets.Layout(width='90%'),
continuous_update=False
)
self._widget_list.append(ipywidgets.VBox([label, self._widget_dataset_file_select]))
# Section: "Plot Options"
# ============================================================================================================ #
label = ipywidgets.Label(value="$\\textbf{•}$ $\\textbf{Plot Options}$")
self._widget_list.append(label)
label = ipywidgets.Label(value="You can customize your $\\textit{Plot}$ using following $\\textit{Widgets}$ " +
"(Available $\\textit{Widgets}$ depend on $\\textit{Current Selected Dataset}$)")
self._widget_list.append(label)
self._widget_month_combobox = ipywidgets.Combobox(
placeholder="Select/Type 'Month'...",
options=calendar.month_name[1:],
description='Month:',
layout=ipywidgets.Layout(width='350px'),
continuous_update=False
)
self._widget_state_combobox = ipywidgets.Combobox(
placeholder="Select/Type 'State'...",
description='State:',
layout=ipywidgets.Layout(width='350px'),
continuous_update=False
)
self._widget_city_combobox = ipywidgets.Combobox(
placeholder="Select/Type 'City'...",
description='City:',
layout=ipywidgets.Layout(width='350px'),
continuous_update=False,
)
self._widget_month_checkbox = ipywidgets.Checkbox(
description="Enable 'Month Filter'",
layout=ipywidgets.Layout(width='350px')
)
self._widget_display_univariate_regression_line_checkbox = ipywidgets.Checkbox(
description="Plot 'Regression Line'",
layout=ipywidgets.Layout(width='350px')
)
grid = ipywidgets.GridspecLayout(3, 2)
grid[0, 0] = self._widget_month_combobox
grid[1, 0] = self._widget_state_combobox
grid[2, 0] = self._widget_city_combobox
grid[0, 1] = self._widget_month_checkbox
grid[1, 1] = self._widget_display_univariate_regression_line_checkbox
self._widget_list.append(grid)
self._widget_time_int_range_slider = ipywidgets.IntRangeSlider(
step=1,
description="Plot's 'Time Range'",
disabled=False,
continuous_update=False,
orientation='horizontal',
readout=True,
readout_format='d',
style={'description_width': 'initial'},
layout=ipywidgets.Layout(width='90%'),
)
self._widget_list.append(self._widget_time_int_range_slider)
label = ipywidgets.Label(value="Using following $\\textit{Widget}$, you can select one or more "
+ "$\\textit{fields}$ to customize your $\\textit{Plot}$"
+ " (Hold $\\texttt{CTRL}$ and click to select more $\\textit{fields}$!)")
self._widget_list.append(label)
self._widget_active_columns_select_multiple = ipywidgets.SelectMultiple(
rows=10,
description='Active Columns:',
disabled=False,
style={'description_width': 'initial'},
layout=ipywidgets.Layout(width='90%'),
)
self._widget_list.append(self._widget_active_columns_select_multiple)
# Section 'Dataset Indo'
# ============================================================================================================ #
label = ipywidgets.Label(value="$\\textbf{•}$ $\\textbf{Dataset Info}$")
self._widget_list.append(label)
label = ipywidgets.Label(value="Here are displayed several info about $\\textit{Current Selected Dataset}$ "
+ "(Displayed data depend on current selected $\\textit{Plot Options}$)")
self._widget_list.append(label)
self._widget_states_summary_label = ipywidgets.Label()
self._widget_cities_summary_label = ipywidgets.Label()
self._widget_how_many_cities_inside_state_label = ipywidgets.Label()
self._widget_state_flag_image_HTML = ipywidgets.HTML()
box = ipywidgets.VBox([self._widget_states_summary_label,
self._widget_cities_summary_label,
self._widget_how_many_cities_inside_state_label])
box = ipywidgets.HBox([self._widget_state_flag_image_HTML, box])
self._widget_list.append(box)
# Section: "Plot Button"
# ============================================================================================================ #
label = ipywidgets.Label(value="$\\textbf{•}$ $\\textbf{Plot Section}$")
self._widget_list.append(label)
self._widget_plot_button = ipywidgets.Button(description='Plot',
disabled=False,
button_style='success',
icon='line-chart')
self._widget_list.append(self._widget_plot_button)
# Section: "ERROR Label"
# ============================================================================================================ #
self._widget_error_label = ipywidgets.Label(value="")
self._widget_list.append(self._widget_error_label)
# Section: "Display Univariate Regression Line"
# ============================================================================================================ #
self._widget_univariate_regression_line_info_HTMLMath = ipywidgets.HTMLMath(
value="",
style={'description_width': 'initial'},
)
self._widget_list.append(self._widget_univariate_regression_line_info_HTMLMath)
| 43.526596
| 120
| 0.564096
|
f75b01cc9f9b04f800eff05a55cbfb0fe7f0fa5a
| 5,854
|
py
|
Python
|
netbox/extras/querysets.py
|
fireman0865/allbox
|
31ffb39f2f4f089053b77d1fb4e13cc61239a4c4
|
[
"Apache-2.0"
] | null | null | null |
netbox/extras/querysets.py
|
fireman0865/allbox
|
31ffb39f2f4f089053b77d1fb4e13cc61239a4c4
|
[
"Apache-2.0"
] | null | null | null |
netbox/extras/querysets.py
|
fireman0865/allbox
|
31ffb39f2f4f089053b77d1fb4e13cc61239a4c4
|
[
"Apache-2.0"
] | null | null | null |
from collections import OrderedDict
from django.db.models import OuterRef, Subquery, Q
from extras.models.tags import TaggedItem
from utilities.query_functions import EmptyGroupByJSONBAgg, OrderableJSONBAgg
from utilities.querysets import RestrictedQuerySet
class CustomFieldQueryset:
"""
Annotate custom fields on objects within a QuerySet.
"""
def __init__(self, queryset, custom_fields):
self.queryset = queryset
self.model = queryset.model
self.custom_fields = custom_fields
def __iter__(self):
for obj in self.queryset:
values_dict = {cfv.field_id: cfv.value for cfv in obj.custom_field_values.all()}
obj.custom_fields = OrderedDict([(field, values_dict.get(field.pk)) for field in self.custom_fields])
yield obj
class ConfigContextQuerySet(RestrictedQuerySet):
def get_for_object(self, obj, aggregate_data=False):
"""
Return all applicable ConfigContexts for a given object. Only active ConfigContexts will be included.
Args:
aggregate_data: If True, use the JSONBAgg aggregate function to return only the list of JSON data objects
"""
# `device_role` for Device; `role` for VirtualMachine
role = getattr(obj, 'device_role', None) or obj.role
# Virtualization cluster for VirtualMachine
cluster = getattr(obj, 'cluster', None)
cluster_group = getattr(cluster, 'group', None)
# Get the group of the assigned tenant, if any
tenant_group = obj.tenant.group if obj.tenant else None
# Match against the directly assigned region as well as any parent regions.
region = getattr(obj.site, 'region', None)
if region:
regions = region.get_ancestors(include_self=True)
else:
regions = []
queryset = self.filter(
Q(regions__in=regions) | Q(regions=None),
Q(sites=obj.site) | Q(sites=None),
Q(roles=role) | Q(roles=None),
Q(platforms=obj.platform) | Q(platforms=None),
Q(cluster_groups=cluster_group) | Q(cluster_groups=None),
Q(clusters=cluster) | Q(clusters=None),
Q(tenant_groups=tenant_group) | Q(tenant_groups=None),
Q(tenants=obj.tenant) | Q(tenants=None),
Q(tags__slug__in=obj.tags.slugs()) | Q(tags=None),
is_active=True,
).order_by('weight', 'name').distinct()
if aggregate_data:
return queryset.aggregate(
config_context_data=OrderableJSONBAgg('data', ordering=['weight', 'name'])
)['config_context_data']
return queryset
class ConfigContextModelQuerySet(RestrictedQuerySet):
"""
QuerySet manager used by models which support ConfigContext (device and virtual machine).
Includes a method which appends an annotation of aggregated config context JSON data objects. This is
implemented as a subquery which performs all the joins necessary to filter relevant config context objects.
This offers a substantial performance gain over ConfigContextQuerySet.get_for_object() when dealing with
multiple objects.
This allows the annotation to be entirely optional.
"""
def annotate_config_context_data(self):
"""
Attach the subquery annotation to the base queryset
"""
from extras.models import ConfigContext
return self.annotate(
config_context_data=Subquery(
ConfigContext.objects.filter(
self._get_config_context_filters()
).annotate(
_data=EmptyGroupByJSONBAgg('data', ordering=['weight', 'name'])
).values("_data")
)
).distinct()
def _get_config_context_filters(self):
# Construct the set of Q objects for the specific object types
tag_query_filters = {
"object_id": OuterRef(OuterRef('pk')),
"content_type__app_label": self.model._meta.app_label,
"content_type__model": self.model._meta.model_name
}
base_query = Q(
Q(platforms=OuterRef('platform')) | Q(platforms=None),
Q(tenant_groups=OuterRef('tenant__group')) | Q(tenant_groups=None),
Q(tenants=OuterRef('tenant')) | Q(tenants=None),
Q(
tags__pk__in=Subquery(
TaggedItem.objects.filter(
**tag_query_filters
).values_list(
'tag_id',
flat=True
)
)
) | Q(tags=None),
is_active=True,
)
if self.model._meta.model_name == 'device':
base_query.add((Q(roles=OuterRef('device_role')) | Q(roles=None)), Q.AND)
base_query.add((Q(sites=OuterRef('site')) | Q(sites=None)), Q.AND)
region_field = 'site__region'
elif self.model._meta.model_name == 'virtualmachine':
base_query.add((Q(roles=OuterRef('role')) | Q(roles=None)), Q.AND)
base_query.add((Q(cluster_groups=OuterRef('cluster__group')) | Q(cluster_groups=None)), Q.AND)
base_query.add((Q(clusters=OuterRef('cluster')) | Q(clusters=None)), Q.AND)
base_query.add((Q(sites=OuterRef('cluster__site')) | Q(sites=None)), Q.AND)
region_field = 'cluster__site__region'
base_query.add(
(Q(
regions__tree_id=OuterRef(f'{region_field}__tree_id'),
regions__level__lte=OuterRef(f'{region_field}__level'),
regions__lft__lte=OuterRef(f'{region_field}__lft'),
regions__rght__gte=OuterRef(f'{region_field}__rght'),
) | Q(regions=None)),
Q.AND
)
return base_query
| 39.554054
| 115
| 0.619918
|
722a45740b5d7b00a73cd80dcc909823152545ce
| 11,173
|
py
|
Python
|
venv/lib/python2.7/site-packages/ansible/modules/network/aci/aci_domain_to_encap_pool.py
|
haind27/test01
|
7f86c0a33eb0874a6c3f5ff9a923fd0cfc8ef852
|
[
"MIT"
] | 37
|
2017-08-15T15:02:43.000Z
|
2021-07-23T03:44:31.000Z
|
venv/lib/python2.7/site-packages/ansible/modules/network/aci/aci_domain_to_encap_pool.py
|
haind27/test01
|
7f86c0a33eb0874a6c3f5ff9a923fd0cfc8ef852
|
[
"MIT"
] | 12
|
2018-01-10T05:25:25.000Z
|
2021-11-28T06:55:48.000Z
|
venv/lib/python2.7/site-packages/ansible/modules/network/aci/aci_domain_to_encap_pool.py
|
haind27/test01
|
7f86c0a33eb0874a6c3f5ff9a923fd0cfc8ef852
|
[
"MIT"
] | 49
|
2017-08-15T09:52:13.000Z
|
2022-03-21T17:11:54.000Z
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Dag Wieers <dag@wieers.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: aci_domain_to_encap_pool
short_description: Bind Domain to Encap Pools (infra:RsVlanNs)
description:
- Bind Domain to Encap Pools on Cisco ACI fabrics.
notes:
- The C(domain) and C(encap_pool) parameters should exist before using this module.
The M(aci_domain) and M(aci_encap_pool) can be used for these.
- More information about the internal APIC class B(infra:RsVlanNs) from
L(the APIC Management Information Model reference,https://developer.cisco.com/docs/apic-mim-ref/).
author:
- Dag Wieers (@dagwieers)
version_added: '2.5'
options:
domain:
description:
- Name of the domain being associated with the Encap Pool.
aliases: [ domain_name, domain_profile ]
domain_type:
description:
- Determines if the Domain is physical (phys) or virtual (vmm).
choices: [ fc, l2dom, l3dom, phys, vmm ]
pool:
description:
- The name of the pool.
aliases: [ pool_name ]
pool_allocation_mode:
description:
- The method used for allocating encaps to resources.
- Only vlan and vsan support allocation modes.
choices: [ dynamic, static]
aliases: [ allocation_mode, mode ]
pool_type:
description:
- The encap type of C(pool).
required: yes
choices: [ vlan, vsan, vxlan ]
state:
description:
- Use C(present) or C(absent) for adding or removing.
- Use C(query) for listing an object or multiple objects.
choices: [ absent, present, query ]
default: present
vm_provider:
description:
- The VM platform for VMM Domains.
- Support for Kubernetes was added in ACI v3.0.
- Support for CloudFoundry, OpenShift and Red Hat was added in ACI v3.1.
choices: [ cloudfoundry, kubernetes, microsoft, openshift, openstack, redhat, vmware ]
extends_documentation_fragment: aci
'''
EXAMPLES = r'''
- name: Add domain to VLAN pool binding
aci_domain_to_encap_pool:
host: apic
username: admin
password: SomeSecretPassword
domain: phys_dom
domain_type: phys
pool: test_pool
pool_type: vlan
pool_allocation_mode: dynamic
state: present
- name: Remove domain to VLAN pool binding
aci_domain_to_encap_pool:
host: apic
username: admin
password: SomeSecretPassword
domain: phys_dom
domain_type: phys
pool: test_pool
pool_type: vlan
pool_allocation_mode: dynamic
state: absent
- name: Query our domain to VLAN pool binding
aci_domain_to_encap_pool:
host: apic
username: admin
password: SomeSecretPassword
domain: phys_dom
pool: test_pool
pool_type: vlan
pool_allocation_mode: dynamic
state: query
- name: Query all domain to VLAN pool bindings
aci_domain_to_encap_pool:
host: apic
username: admin
password: SomeSecretPassword
domain_type: phys
pool_type: vlan
pool_allocation_mode: dynamic
state: query
'''
RETURN = r'''
current:
description: The existing configuration from the APIC after the module has finished
returned: success
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
error:
description: The error information as returned from the APIC
returned: failure
type: dict
sample:
{
"code": "122",
"text": "unknown managed object class foo"
}
raw:
description: The raw output returned by the APIC REST API (xml or json)
returned: parse error
type: string
sample: '<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1"><error code="122" text="unknown managed object class foo"/></imdata>'
sent:
description: The actual/minimal configuration pushed to the APIC
returned: info
type: list
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment"
}
}
}
previous:
description: The original configuration from the APIC before the module has started
returned: info
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
proposed:
description: The assembled configuration from the user-provided parameters
returned: info
type: dict
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"name": "production"
}
}
}
filter_string:
description: The filter string used for the request
returned: failure or debug
type: string
sample: ?rsp-prop-include=config-only
method:
description: The HTTP method used for the request to the APIC
returned: failure or debug
type: string
sample: POST
response:
description: The HTTP response from the APIC
returned: failure or debug
type: string
sample: OK (30 bytes)
status:
description: The HTTP status from the APIC
returned: failure or debug
type: int
sample: 200
url:
description: The HTTP url used for the request to the APIC
returned: failure or debug
type: string
sample: https://10.11.12.13/api/mo/uni/tn-production.json
'''
from ansible.module_utils.network.aci.aci import ACIModule, aci_argument_spec
from ansible.module_utils.basic import AnsibleModule
VM_PROVIDER_MAPPING = dict(
cloudfoundry='CloudFoundry',
kubernetes='Kubernetes',
microsoft='Microsoft',
openshift='OpenShift',
openstack='OpenStack',
redhat='Redhat',
vmware='VMware',
)
POOL_MAPPING = dict(
vlan=dict(
aci_mo='uni/infra/vlanns-{0}',
child_class='infraRsVlanNs',
),
vxlan=dict(
aci_mo='uni/infra/vxlanns-{0}',
child_class='vmmRsVxlanNs',
),
vsan=dict(
aci_mo='uni/infra/vsanns-{0}',
child_class='fcRsVsanNs',
),
)
def main():
argument_spec = aci_argument_spec()
argument_spec.update(
domain=dict(type='str', aliases=['domain_name', 'domain_profile']),
domain_type=dict(type='str', choices=['fc', 'l2dom', 'l3dom', 'phys', 'vmm']),
pool=dict(type='str', aliases=['pool_name']),
pool_allocation_mode=dict(type='str', aliases=['allocation_mode', 'mode'], choices=['dynamic', 'static']),
pool_type=dict(type='str', required=True, choices=['vlan', 'vsan', 'vxlan']),
state=dict(type='str', default='present', choices=['absent', 'present', 'query']),
vm_provider=dict(type='str', choices=['cloudfoundry', 'kubernetes', 'microsoft', 'openshift', 'openstack', 'redhat', 'vmware']),
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
required_if=[
['domain_type', 'vmm', ['vm_provider']],
['state', 'absent', ['domain', 'domain_type', 'pool', 'pool_type']],
['state', 'present', ['domain', 'domain_type', 'pool', 'pool_type']],
],
)
domain = module.params['domain']
domain_type = module.params['domain_type']
pool = module.params['pool']
pool_allocation_mode = module.params['pool_allocation_mode']
pool_type = module.params['pool_type']
vm_provider = module.params['vm_provider']
state = module.params['state']
# Report when vm_provider is set when type is not virtual
if domain_type != 'vmm' and vm_provider is not None:
module.fail_json(msg="Domain type '{0}' cannot have a 'vm_provider'".format(domain_type))
# ACI Pool URL requires the allocation mode for vlan and vsan pools (ex: uni/infra/vlanns-[poolname]-static)
pool_name = pool
if pool_type != 'vxlan' and pool is not None:
if pool_allocation_mode is not None:
pool_name = '[{0}]-{1}'.format(pool, pool_allocation_mode)
else:
module.fail_json(msg="ACI requires the 'pool_allocation_mode' for 'pool_type' of 'vlan' and 'vsan' when 'pool' is provided")
# Vxlan pools do not support allocation modes
if pool_type == 'vxlan' and pool_allocation_mode is not None:
module.fail_json(msg='vxlan pools do not support setting the allocation_mode; please remove this parameter from the task')
# Compile the full domain for URL building
if domain_type == 'fc':
domain_class = 'fcDomP'
domain_mo = 'uni/fc-{0}'.format(domain)
domain_rn = 'fc-{0}'.format(domain)
elif domain_type == 'l2ext':
domain_class = 'l2extDomP'
domain_mo = 'uni/l2dom-{0}'.format(domain)
domain_rn = 'l2dom-{0}'.format(domain)
elif domain_type == 'l3ext':
domain_class = 'l3extDomP'
domain_mo = 'uni/l3dom-{0}'.format(domain)
domain_rn = 'l3dom-{0}'.format(domain)
elif domain_type == 'phys':
domain_class = 'physDomP'
domain_mo = 'uni/phys-{0}'.format(domain)
domain_rn = 'phys-{0}'.format(domain)
elif domain_type == 'vmm':
domain_class = 'vmmDomP'
domain_mo = 'uni/vmmp-{0}/dom-{1}'.format(VM_PROVIDER_MAPPING[vm_provider], domain)
domain_rn = 'vmmp-{0}/dom-{1}'.format(VM_PROVIDER_MAPPING[vm_provider], domain)
# Ensure that querying all objects works when only domain_type is provided
if domain is None:
domain_mo = None
pool_mo = POOL_MAPPING[pool_type]['aci_mo'].format(pool_name)
child_class = POOL_MAPPING[pool_type]['child_class']
aci = ACIModule(module)
aci.construct_url(
root_class=dict(
aci_class=domain_class,
aci_rn=domain_rn,
filter_target='eq({0}.name, "{1}")'.format(domain_class, domain),
module_object=domain_mo,
),
child_classes=[child_class],
)
aci.get_existing()
if state == 'present':
# Filter out module params with null values
aci.payload(
aci_class=domain_class,
class_config=dict(name=domain),
child_configs=[
{child_class: {'attributes': {'tDn': pool_mo}}},
]
)
# Generate config diff which will be used as POST request body
aci.get_diff(aci_class=domain_class)
# Submit changes if module not in check_mode and the proposed is different than existing
aci.post_config()
elif state == 'absent':
aci.delete_config()
aci.exit_json()
if __name__ == "__main__":
main()
| 31.036111
| 141
| 0.634118
|
544bd4f8e001e1d9a84360d570f3f20287e8ace0
| 3,204
|
py
|
Python
|
.leetcode/113.path-sum-ii.py
|
KuiyuanFu/PythonLeetCode
|
8962df2fa838eb7ae48fa59de272ba55a89756d8
|
[
"MIT"
] | null | null | null |
.leetcode/113.path-sum-ii.py
|
KuiyuanFu/PythonLeetCode
|
8962df2fa838eb7ae48fa59de272ba55a89756d8
|
[
"MIT"
] | null | null | null |
.leetcode/113.path-sum-ii.py
|
KuiyuanFu/PythonLeetCode
|
8962df2fa838eb7ae48fa59de272ba55a89756d8
|
[
"MIT"
] | null | null | null |
# @lc app=leetcode id=113 lang=python3
#
# [113] Path Sum II
#
# https://leetcode.com/problems/path-sum-ii/description/
#
# algorithms
# Medium (49.67%)
# Likes: 2799
# Dislikes: 85
# Total Accepted: 416.5K
# Total Submissions: 834.9K
# Testcase Example: '[5,4,8,11,null,13,4,7,2,null,null,5,1]\n22'
#
# Given the root of a binary tree and an integer targetSum, return all
# root-to-leaf paths where each path's sum equals targetSum.
#
# A leaf is a node with no children.
#
#
# Example 1:
#
#
# Input: root = [5,4,8,11,null,13,4,7,2,null,null,5,1], targetSum = 22
# Output: [[5,4,11,2],[5,8,4,5]]
#
#
# Example 2:
#
#
# Input: root = [1,2,3], targetSum = 5
# Output: []
#
#
# Example 3:
#
#
# Input: root = [1,2], targetSum = 0
# Output: []
#
#
#
# Constraints:
#
#
# The number of nodes in the tree is in the range [0, 5000].
# -1000 <= Node.val <= 1000
# -1000 <= targetSum <= 1000
#
#
#
# @lc tags=tree;depth-first-search
# @lc imports=start
from imports import *
# @lc imports=end
# @lc idea=start
#
# 给定一个二叉树,从root到叶子结点的路径上结点值的和等于目标值的路径。
# 回溯遍历。
#
# @lc idea=end
# @lc group=
# @lc rank=
# @lc code=start
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def pathSum(self, root: TreeNode, targetSum: int) -> List[List[int]]:
result = []
stack = []
path = []
if root:
stack.append((root, 0))
s = 0
while stack:
node, t = stack.pop()
if t == 0:
s += node.val
path.append(node.val)
if not node.left and not node.right:
if s == targetSum:
result.append(path.copy())
s -= node.val
path.pop()
else:
stack.append((node, 1))
if node.left:
stack.append((node.left, 0))
elif t == 1:
stack.append((node, 2))
if node.right:
stack.append((node.right, 0))
elif t == 2:
s -= node.val
path.pop()
return result
pass
# @lc code=end
# @lc main=start
if __name__ == '__main__':
print('Example 1:')
print('Input : ')
print('root = [5,4,8,11,null,13,4,7,2,null,null,5,1], targetSum = 22')
print('Output :')
print(
str(Solution().pathSum(
listToTreeNode([5, 4, 8, 11, None, 13, 4, 7, 2, None, None, 5, 1]),
22)))
print('Exception :')
print('[[5,4,11,2],[5,8,4,5]]')
print()
print('Example 2:')
print('Input : ')
print('root = [1,2,3], targetSum = 5')
print('Output :')
print(str(Solution().pathSum(listToTreeNode([1, 2, 3]), 5)))
print('Exception :')
print('[]')
print()
print('Example 3:')
print('Input : ')
print('root = [1,2], targetSum = 0')
print('Output :')
print(str(Solution().pathSum(listToTreeNode([1, 2]), 0)))
print('Exception :')
print('[]')
print()
pass
# @lc main=end
| 21.218543
| 79
| 0.51623
|
c6b48bf76b3b3fe9a278f1e17d4a1a1f5eabe905
| 11,547
|
py
|
Python
|
src/spaceone/monitoring/connector/cloudtrail_connector.py
|
xellos00/plugin-aws-cloudtrail
|
2ac4ab4a3c013fad489f1f8a21c5e08662a4cb20
|
[
"Apache-2.0"
] | 2
|
2020-06-22T01:48:21.000Z
|
2020-08-07T05:22:55.000Z
|
src/spaceone/monitoring/connector/cloudtrail_connector.py
|
xellos00/plugin-aws-cloudtrail
|
2ac4ab4a3c013fad489f1f8a21c5e08662a4cb20
|
[
"Apache-2.0"
] | null | null | null |
src/spaceone/monitoring/connector/cloudtrail_connector.py
|
xellos00/plugin-aws-cloudtrail
|
2ac4ab4a3c013fad489f1f8a21c5e08662a4cb20
|
[
"Apache-2.0"
] | 3
|
2020-09-09T03:34:25.000Z
|
2021-03-25T05:19:40.000Z
|
# -*- coding: utf-8 -*-
import os
import os.path
# AWS SDK for Python
import boto3
import json
import re
import logging
import pprint
import time
from multiprocessing import Pool
from datetime import datetime
from spaceone.core.transaction import Transaction
from spaceone.core.error import *
from spaceone.core.connector import BaseConnector
from spaceone.monitoring.error import *
__all__ = ["CloudTrailConnector"]
_LOGGER = logging.getLogger(__name__)
RESOURCES = ['cloudformation', 'cloudwatch', 'dynamodb', 'ec2', 'glacier', 'iam', 'opsworks', 's3', 'sns', 'sqs']
DEFAULT_REGION = 'us-east-1'
NUMBER_OF_CONCURRENT = 4
class CloudTrailConnector(BaseConnector):
def __init__(self, transaction, config):
super().__init__(transaction, config)
def create_session(self, options, secret_data):
""" Verify CloudTrail Session
"""
create_session(secret_data, options)
def collect_info(self, query, secret_data, start, end, resource, sort, limit=200):
"""
Args:
query (dict): example
{
'instance_id': ['i-123', 'i-2222', ...]
'instance_type': 'm4.xlarge',
'region_name': ['aaaa']
}
resource: arn:aws:ec2:<REGION>:<ACCOUNT_ID>:instance/<instance-id>
If there is regiona_name in query, this indicates searching only these regions
"""
(query, resource_ids, region_name) = self._check_query(query)
post_filter_cache = False if len(region_name) > 0 else True
try:
(resource_ids, regions) = _parse_arn(resource)
print(resource_ids)
print(regions)
except Exception as e:
_LOGGER.error(f'[collect_info] fail to parse arn:{e}')
params = []
region_name_list = [] # For filter_cache
for region in regions:
params.append({
'region_name': region,
'query': query,
'resource_ids': resource_ids,
'secret_data': secret_data,
'start': start,
'end': end,
'sort': sort,
'limit': limit
})
with Pool(NUMBER_OF_CONCURRENT) as pool:
result = pool.map(discover_cloudtrail, params)
no_result = True
for resources in result:
(collected_resources, region_name) = resources
if len(collected_resources) > 0:
region_name_list.append(region_name)
try:
response = _prepare_response_schema()
response['result'] = {'logs': collected_resources}
no_result = False
yield response
except Exception as e:
_LOGGER.error(f'[collect_info] skip return {resource}, {e}')
else:
_LOGGER.debug(f'[collect_info] no collected_resources at {region_name}')
if no_result:
# return final data
response = _prepare_response_schema()
response['result'] = {'logs': []}
yield response
def _check_query(self, query):
resource_ids = []
filters = []
region_name = []
for key, value in query.items():
if key == 'instance_id' and isinstance(value, list):
resource_ids = value
elif key == 'region_name' and isinstance(value, list):
region_name.extend(value)
else:
if isinstance(value, list) == False:
value = [value]
if len(value) > 0:
filters.append({'Name': key, 'Values': value})
return (filters, resource_ids, region_name)
#######################
# AWS Boto3 session
#######################
def create_session(secret_data: dict, options={}):
_check_secret_data(secret_data)
aws_access_key_id = secret_data['aws_access_key_id']
aws_secret_access_key = secret_data['aws_secret_access_key']
role_arn = secret_data.get('role_arn')
try:
if role_arn:
return _create_session_with_assume_role(aws_access_key_id, aws_secret_access_key, role_arn)
else:
return _create_session_with_access_key(aws_access_key_id, aws_secret_access_key)
except Exception as e:
raise ERROR_INVALID_CREDENTIALS()
def _check_secret_data(secret_data):
if 'aws_access_key_id' not in secret_data:
raise ERROR_REQUIRED_PARAMETER(key='secret.aws_access_key_id')
if 'aws_secret_access_key' not in secret_data:
raise ERROR_REQUIRED_PARAMETER(key='secret.aws_secret_access_key')
def _create_session_with_access_key(aws_access_key_id, aws_secret_access_key):
session = boto3.Session(aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key)
sts = session.client('sts')
sts.get_caller_identity()
return session
def _create_session_with_assume_role(aws_access_key_id, aws_secret_access_key, role_arn):
_create_session_with_access_key(aws_access_key_id, aws_secret_access_key)
sts = session.client('sts')
assume_role_object = sts.assume_role(RoleArn=role_arn, RoleSessionName=utils.generate_id('AssumeRoleSession'))
credentials = assume_role_object['Credentials']
session = boto3.Session(aws_access_key_id=credentials['AccessKeyId'],
aws_secret_access_key=credentials['SecretAccessKey'],
aws_session_toke=credentials['SessionToken'])
return session
def _set_connect(secret_data, region_name, service="cloudtrail"):
"""
"""
session = create_session(secret_data)
aws_conf = {}
aws_conf['region_name'] = region_name
if service in RESOURCES:
resource = session.resource(service, **aws_conf)
client = resource.meta.client
else:
resource = None
client = session.client(service, region_name=region_name)
return client, resource
def discover_cloudtrail(params):
"""
Args: params (dict): {
'region_name': 'str',
'query': 'dict',
'resource_ids': 'list'
'secret_data': 'dict',
'start': 'datetime',
'end': 'datetime',
'sort': 'dict',
'limit': 'int'
}
Returns: Resources, region_name
"""
print(f'[discover_cloudtrail] {params["region_name"]}')
client, resource = _set_connect(params['secret_data'], params['region_name'])
try:
resources = _lookup_events(client, params)
return resources
except Exception as e:
_LOGGER.error(f'[discover_cloudtrail] skip region: {params["region_name"]}, {e}')
return [], params['region_name']
def _lookup_events(client, params):
resource_list = []
event_query = {}
region_name = params['region_name']
if 'resource_ids' in params:
LookupAttributes = []
resources = params['resource_ids']
for resource in resources:
LookupAttributes.append({'AttributeKey': 'Username', 'AttributeValue': resource})
event_query.update({'LookupAttributes': LookupAttributes})
event_query.update({'StartTime': params['start'],
'EndTime': params['end']
})
# Paginator config
limit = params.get('limit')
print(f'limit: {limit}')
page_size = limit if limit < 50 else 50
event_query.update({'PaginationConfig': {'MaxItems': limit, 'PageSize': page_size}})
try:
print(event_query)
paginator = client.get_paginator('lookup_events')
response_iterator = paginator.paginate(**event_query)
events = []
for response in response_iterator:
events.extend(response['Events'])
if len(events) == 0:
# Fast return if No resources
print("No Event")
return (events, region_name)
except Exception as e:
print(f'[_lookup_events] Fail to lookup CloudTrail events: {e}')
return (resource_list, region_name)
# Find Events
for event in events:
try:
event_string = event["CloudTrailEvent"]
detailed_event = _parse_cloud_trail_event(event_string)
result = {'EventTime': event['EventTime'].isoformat(), 'AccessKeyId': event['AccessKeyId']}
result.update(detailed_event)
resource_list.append(result)
except Exception as e:
print(f'[_lookup_events] error {e}')
return (resource_list, region_name)
def _parse_cloud_trail_event(cte):
""" Parse CloudTrailEvent
Args: CloudTrailEvent (raw data)
Returns: dict
"""
result = {}
event = json.loads(cte)
wanted_items = ['eventName', 'eventType', 'errorMessage']
for item in wanted_items:
if item in event:
result[item] = event[item]
print(f'parse cloud trail event: {result}')
return result
def _parse_arn(arn):
"""
ec2) arn:aws:ec2:<REGION>:<ACCOUNT_ID>:instance/<instance-id>
arn:partition:service:region:account-id:resource-id
arn:partition:service:region:account-id:resource-type/resource-id
arn:partition:service:region:account-id:resource-type:resource-id
Returns: resource_list, [regions]
"""
p = (r"(?P<arn>arn):"
r"(?P<partition>aws|aws-cn|aws-us-gov):"
r"(?P<service>[A-Za-z0-9_\-]*):"
r"(?P<region>[A-Za-z0-9_\-]*):"
r"(?P<account>[A-Za-z0-9_\-]*):"
r"(?P<resources>[A-Za-z0-9_\-:/]*)")
r = re.compile(p)
match = r.match(arn)
if match:
d = match.groupdict()
else:
return (None, None)
region = d.get('region', None)
resource_id = None
resources = d.get('resources', None)
if resources:
items = re.split('/|:', resources)
if len(items) == 1:
resource_id = items[0]
elif len(items) == 2:
resource_type = items[0]
resource_id = items[1]
else:
print(f'ERROR parsing: {resources}')
return [resource_id], [region]
def _prepare_response_schema() -> dict:
return {
'resource_type': 'monitoring.Log',
'actions': [
{
'method': 'process'
}],
'result': {}
}
if __name__ == "__main__":
import os
aki = os.environ.get('AWS_ACCESS_KEY_ID', "<YOUR_AWS_ACCESS_KEY_ID>")
sak = os.environ.get('AWS_SECRET_ACCESS_KEY', "<YOUR_AWS_SECRET_ACCESS_KEY>")
secret_data = {
# 'region_name': 'ap-northeast-2',
'aws_access_key_id': aki,
'aws_secret_access_key': sak
}
conn = CloudTrailConnector(Transaction(), secret_data)
#opts = conn.verify({}, secret_data)
#print(opts)
query = {}
#query = {'region_name': ['ap-northeast-2', 'us-east-1']}
#query = {}
from datetime import datetime
start = datetime(2020,4,9)
end = datetime(2020,4,10)
ec2_arn = 'arn:aws:ec2:ap-northeast-2:072548720675:instance/i-08c5592e084b24e20'
sort = ""
limit = 10
resource_stream = conn.collect_info(query=query, secret_data=secret_data,
start=start, end=end, resource=ec2_arn, sort=sort, limit=limit)
for resource in resource_stream:
print(resource)
| 32.991429
| 114
| 0.599376
|
a838e0cb64c4438f94af18780188138890ca04ab
| 469
|
py
|
Python
|
api/Reset.py
|
Purdue-ECE-461/project-2-3
|
77a12793b8e799982efa0508c8600ae81dc1fc07
|
[
"Apache-2.0"
] | 1
|
2022-01-25T18:11:32.000Z
|
2022-01-25T18:11:32.000Z
|
api/Reset.py
|
Purdue-ECE-461/project-2-3
|
77a12793b8e799982efa0508c8600ae81dc1fc07
|
[
"Apache-2.0"
] | null | null | null |
api/Reset.py
|
Purdue-ECE-461/project-2-3
|
77a12793b8e799982efa0508c8600ae81dc1fc07
|
[
"Apache-2.0"
] | null | null | null |
from flask import Flask
import responses
import requests
import main
app = Flask(__name__)
if __name__ == "__main__" :
request = requests.Request("/reset/",
headers={"X-Authorization": "bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c"})
resp = 200
with app.app_context():
resp = registryReset()
print(resp)
| 39.083333
| 217
| 0.748401
|
92fb7a3e84bb688c0c749a77915611b82c004d0c
| 2,235
|
py
|
Python
|
pyembroidery/ReadHelper.py
|
shnwnd/pyembroidery
|
0540db636c07526caec4e31eb905dda4ae51c0cb
|
[
"MIT"
] | null | null | null |
pyembroidery/ReadHelper.py
|
shnwnd/pyembroidery
|
0540db636c07526caec4e31eb905dda4ae51c0cb
|
[
"MIT"
] | null | null | null |
pyembroidery/ReadHelper.py
|
shnwnd/pyembroidery
|
0540db636c07526caec4e31eb905dda4ae51c0cb
|
[
"MIT"
] | null | null | null |
def signed8(b):
if b > 127:
return -256 + b
else:
return b
def signed16(v):
v &= 0xFFFF
if v > 0x7FFF:
return - 0x10000 + v
else:
return v
def signed24(v):
v &= 0xFFFFFF
if v > 0x7FFFFF:
return - 0x1000000 + v
else:
return v
def read_signed(stream, n):
byte = bytearray(stream.read(n))
signed_bytes = []
for b in byte:
signed_bytes.append(signed8(b))
return signed_bytes
def read_sint_8(stream):
byte = bytearray(stream.read(1))
if len(byte) is 1:
return signed8(byte[0])
return None
def read_int_8(stream):
byte = bytearray(stream.read(1))
if len(byte) is 1:
return byte[0]
return None
def read_int_16le(stream):
byte = bytearray(stream.read(2))
if len(byte) is 2:
return (byte[0] & 0xFF) + ((byte[1] & 0xFF) << 8)
return None
def read_int_16be(stream):
byte = bytearray(stream.read(2))
if len(byte) is 2:
return (byte[1] & 0xFF) + ((byte[0] & 0xFF) << 8)
return None
def read_int_24le(stream):
b = bytearray(stream.read(3))
if len(b) is 3:
return (b[0] & 0xFF) + ((b[1] & 0xFF) << 8) + \
((b[2] & 0xFF) << 16)
return None
def read_int_24be(stream):
b = bytearray(stream.read(3))
if len(b) is 3:
return (b[2] & 0xFF) + ((b[1] & 0xFF) << 8) + \
((b[0] & 0xFF) << 16)
return None
def read_int_32le(stream):
b = bytearray(stream.read(4))
if len(b) is 4:
return (b[0] & 0xFF) + ((b[1] & 0xFF) << 8) + \
((b[2] & 0xFF) << 16) + ((b[3] & 0xFF) << 24)
return None
def read_int_32be(stream):
b = bytearray(stream.read(4))
if len(b) is 4:
return (b[3] & 0xFF) + ((b[2] & 0xFF) << 8) + \
((b[1] & 0xFF) << 16) + ((b[0] & 0xFF) << 24)
return None
def read_string_8(stream, length):
byte = stream.read(length)
try:
return byte.decode('utf8')
except UnicodeDecodeError:
return None # Must be > 128 chars.
def read_string_16(stream, length):
byte = stream.read(length)
try:
return byte.decode('utf16')
except UnicodeDecodeError:
return None
| 21.084906
| 60
| 0.53915
|
0ac762b57f2a0b6ea4ff60c989214694f75626d0
| 656
|
py
|
Python
|
code/DeepCTR/deepctr/models/__init__.py
|
Puzz1eX/HFCN
|
81880cb8d4ccb1969a8df7b6dedbec05a5c18590
|
[
"MIT"
] | null | null | null |
code/DeepCTR/deepctr/models/__init__.py
|
Puzz1eX/HFCN
|
81880cb8d4ccb1969a8df7b6dedbec05a5c18590
|
[
"MIT"
] | null | null | null |
code/DeepCTR/deepctr/models/__init__.py
|
Puzz1eX/HFCN
|
81880cb8d4ccb1969a8df7b6dedbec05a5c18590
|
[
"MIT"
] | null | null | null |
from .afm import AFM
from .autoint import AutoInt
from .ccpm import CCPM
from .dcn import DCN
from .deepfm import DeepFM
from .dien import DIEN
from .din import DIN
from .fnn import FNN
from .mlr import MLR
from .onn import ONN
from .onn import ONN as NFFM
from .nfm import NFM
from .pnn import PNN
from .wdl import WDL
from .xdeepfm import xDeepFM
from .fgcnn import FGCNN
from .dsin import DSIN
from .fibinet import FiBiNET
from .flen import FLEN
from .hfcn import HFCN
__all__ = ["AFM", "CCPM","DCN", "MLR", "DeepFM",
"MLR", "NFM", "DIN", "DIEN", "FNN", "PNN", "WDL", "xDeepFM", "AutoInt", "ONN", "FGCNN", "DSIN", "FiBiNET", 'FLEN',"HFCN"]
| 27.333333
| 132
| 0.699695
|
f061ddce867e2f6a535f872236f96150b628eeec
| 919
|
py
|
Python
|
Yquote.py
|
Anupam-dagar/Quote-from-Yourquote.in
|
93026ca86a2956858df74e3097a498e751f597aa
|
[
"MIT"
] | 2
|
2018-12-18T20:45:08.000Z
|
2018-12-21T17:02:07.000Z
|
Yquote.py
|
Anupam-dagar/Quote-from-Yourquote.in
|
93026ca86a2956858df74e3097a498e751f597aa
|
[
"MIT"
] | null | null | null |
Yquote.py
|
Anupam-dagar/Quote-from-Yourquote.in
|
93026ca86a2956858df74e3097a498e751f597aa
|
[
"MIT"
] | null | null | null |
from bs4 import BeautifulSoup
import requests
import random
class Yquote():
def __init__(self):
self.response = ""
self.tag = ""
self.website_url = ""
def generate_url(self, tag):
self.tag = tag
self.website_url = 'https://www.yourquote.in/tags/' + self.tag + '/quotes'
def get_sourcecode(self, website_url):
return requests.get(website_url).content
def get_quote(self, tag):
self.generate_url(tag)
source_code = self.get_sourcecode(self.website_url)
soup = BeautifulSoup(source_code, 'lxml')
quotes = soup.find_all('img', class_="quote-media-section")
quotes_list = []
for quote in quotes:
quotes_list.append(quote['title'])
return quotes_list[random.randint(0, len(quotes)-1)]
# Usage
# yourquote = Yquote()
# quote = yourquote.get_quote('2019')
# print(quote)
| 26.257143
| 82
| 0.622416
|
5951f11ecb933db90ab5f683e27d9b242943fe4c
| 5,486
|
py
|
Python
|
validate.py
|
UAlbanyArchives/asInventory
|
26840949821cac6235675bb414d50b3ece3cada9
|
[
"Unlicense"
] | 6
|
2017-08-18T18:55:43.000Z
|
2018-05-04T23:03:04.000Z
|
validate.py
|
UAlbanyArchives/asInventory
|
26840949821cac6235675bb414d50b3ece3cada9
|
[
"Unlicense"
] | null | null | null |
validate.py
|
UAlbanyArchives/asInventory
|
26840949821cac6235675bb414d50b3ece3cada9
|
[
"Unlicense"
] | 1
|
2017-06-02T20:38:43.000Z
|
2017-06-02T20:38:43.000Z
|
import os
from archives_tools import dacs
import openpyxl
import sys
import string
__location__ = (os.path.dirname(os.path.realpath(__file__)))
inputPath = os.path.join(__location__, "input")
daoFileList = []
def dateCheck(date, errorCount, lineCount, title):
if " " in date.strip():
try:
print ("Line " + str(lineCount) + ", DATE ERROR, invalid space: (" + str(date) + ") title: " + title)
except:
print ("Line " + str(lineCount) + ", DATE ERROR, invalid space: (" + str(date) + ")")
errorCount += 1
acceptList = ["/", "-", "1", "2", "3", "4", "5", "6", "7", "8", "9", "0"]
if date != "None":
for character in str(date).strip():
if not character in acceptList:
print (character)
try:
print ("Line " + str(lineCount) + ", DATE ERROR, invalid character: (" + str(date) + ") title: " + title)
except:
print ("Line " + str(lineCount) + ", DATE ERROR, invalid character: (" + str(date) + ")")
errorCount += 1
if "/" in date:
start, end = date.split("/")
if start > end:
try:
print ("Line " + str(lineCount) + ", DATE ERROR: (" + str(date) + ") title: " + title)
except:
print ("Line " + str(lineCount) + ", DATE ERROR: (" + str(date) + ")")
errorCount += 1
if "undated" in date.lower():
try:
print ("Line " + str(lineCount) + ", DATE ERROR: (" + str(date) + ") title: " + title)
except:
print ("Line " + str(lineCount) + ", DATE ERROR: (" + str(date) + ")")
errorCount += 1
return errorCount
for file in os.listdir(inputPath):
if file.endswith(".xlsx"):
filePath = os.path.join(inputPath, file)
wb = openpyxl.load_workbook(filename=filePath, read_only=True)
#validate sheets
for sheet in wb.worksheets:
checkSwitch = True
try:
if sheet["H1"].value.lower().strip() != "title":
checkSwitch = False
elif sheet["H2"].value.lower().strip() != "level":
checkSwitch = False
elif sheet["H3"].value.lower().strip() != "ref id":
checkSwitch = False
elif sheet["J6"].value.lower().strip() != "date 1 display":
checkSwitch = False
elif sheet["D6"].value.lower().strip() != "container uri":
checkSwitch = False
except:
print ("ERROR: incorrect sheet " + sheet.title + " in file " + file)
if checkSwitch == False:
print ("ERROR: incorrect sheet " + sheet.title + " in file " + file)
else:
#Read sheet info
print ("Reading sheet: " + sheet.title)
lineCount = 0
errorCount = 0
for row in sheet.rows:
lineCount += 1
if lineCount > 6:
try:
date = dacs.iso2DACS(str(row[10].value))
errorCount = dateCheck(str(row[10].value), errorCount, lineCount, row[8].value)
except:
errorCount += 1
try:
print ("Line " + str(lineCount) + ", DATE ERROR: (" + str(row[10].value) + ") title: " + str(row[8].value))
except:
print ("Line " + str(lineCount) + ", DATE ERROR: (" + str(row[10].value) + ")")
try:
date = dacs.iso2DACS(str(row[12].value))
errorCount = dateCheck(str(row[12].value), errorCount, lineCount, row[8].value)
except:
errorCount += 1
try:
print ("Line " + str(lineCount) + ", DATE ERROR: (" + str(row[12].value) + ") title: " + str(row[8].value))
except:
print ("Line " + str(lineCount) + ", DATE ERROR: (" + str(row[12].value) + ")")
try:
date = dacs.iso2DACS(str(row[14].value))
errorCount = dateCheck(str(row[14].value), errorCount, lineCount, row[8].value)
except:
errorCount += 1
try:
print ("Line " + str(lineCount) + ", DATE ERROR: (" + str(row[14].value) + ") title: " + str(row[8].value))
except:
print ("Line " + str(lineCount) + ", DATE ERROR: (" + str(row[14].value) + ")")
try:
date = dacs.iso2DACS(str(row[16].value))
errorCount = dateCheck(str(row[16].value), errorCount, lineCount, row[8].value)
except:
errorCount += 1
try:
print ("Line " + str(lineCount) + ", DATE ERROR: (" + str(row[16].value) + ") title: " + str(row[8].value))
except:
print ("Line " + str(lineCount) + ", DATE ERROR: (" + str(row[16].value) + ")")
try:
date = dacs.iso2DACS(str(row[18].value))
errorCount = dateCheck(str(row[18].value), errorCount, lineCount, row[8].value)
except:
errorCount += 1
try:
print ("Line " + str(lineCount) + ", DATE ERROR: (" + str(row[18].value) + ") title: " + str(row[8].value))
except:
print ("Line " + str(lineCount) + ", DATE ERROR: (" + str(row[18].value) + ")")
if not row[22].value is None:
if len(str(row[22].value).strip()) > 0:
daoName = str(row[22].value).strip()
if not daoName.lower().startswith("http"):
if daoName in daoFileList:
errorCount += 1
print ("DAO ERROR: File listed twice (" + str(row[22].value) + ") line " + str(lineCount))
else:
daoFileList.append(daoName)
daoPath = os.path.join(__location__, "dao", daoName)
"""
if not os.path.isfile(daoPath):
errorCount += 1
print ("DAO ERROR: File Not Present in dao (" + str(row[22].value) + ") line " + str(lineCount))
"""
print (" " + str(errorCount) + " errors found in " + file)
# make sure console doesn't close
print ("Press Enter to continue...")
if sys.version_info >= (3, 0):
input()
else:
raw_input()
| 36.573333
| 116
| 0.562158
|
008eda47e815c151796df5403ed337fdbeaa94ba
| 3,831
|
py
|
Python
|
utils/save_traj.py
|
paperanonymous945/MapRec
|
5be48b02db855ce648d2674923a15c65afa90146
|
[
"MIT"
] | 21
|
2021-06-11T04:32:25.000Z
|
2022-03-27T06:04:20.000Z
|
utils/save_traj.py
|
paperanonymous945/MapRec
|
5be48b02db855ce648d2674923a15c65afa90146
|
[
"MIT"
] | 1
|
2021-06-17T02:22:10.000Z
|
2021-06-21T12:53:07.000Z
|
utils/save_traj.py
|
paperanonymous945/MapRec
|
5be48b02db855ce648d2674923a15c65afa90146
|
[
"MIT"
] | 12
|
2021-05-26T02:57:43.000Z
|
2022-03-29T01:21:00.000Z
|
#!/usr/bin/python3
# coding: utf-8
# @Time : 2020/9/23 15:40
# Reference: https://github.com/huiminren/tptk/blob/master/common/trajectory.py
from common.trajectory import get_tid
from utils.coord_transform import GCJ02ToWGS84, WGS84ToGCJ02, Convert
class SaveTraj:
"""
SaveTraj is an abstract class for storing trajectory.
It defines store() function for storing trajectory to different format.
"""
def __init__(self, convert_method):
# GCJ: Auto, Didi
# WGS: OSM, Tiandi
if convert_method == 'GCJ02ToWGS84':
self.convert = GCJ02ToWGS84()
elif convert_method == 'WGS84ToGCJ02':
self.convert = WGS84ToGCJ02()
elif convert_method is None:
self.convert = Convert()
def store(self, trajs, target_path):
pass
class SaveTraj2Raw(SaveTraj):
def __init__(self, convert_method=None):
super().__init__(convert_method)
def store(self, trajs, target_path):
time_format = '%Y/%m/%d %H:%M:%S'
with open(target_path, 'w') as f:
for traj in trajs:
pt_list = traj.pt_list
tid = get_tid(traj.oid, pt_list)
f.write('#,{},{},{},{},{} km\n'.format(tid, traj.oid, pt_list[0].time.strftime(time_format),
pt_list[-1].time.strftime(time_format),
traj.get_distance() / 1000))
for pt in pt_list:
lng, lat = self.convert.convert(pt.lng, pt.lat)
f.write('{},{},{}\n'.format(
pt.time.strftime(time_format), lat, lng))
class SaveTraj2MM(SaveTraj):
"""
"""
def __init__(self, convert_method=None):
super().__init__(convert_method)
def store(self, trajs, target_path):
time_format = '%Y/%m/%d %H:%M:%S'
with open(target_path, 'w') as f:
for traj in trajs:
pt_list = traj.pt_list
tid = get_tid(traj.oid, pt_list)
f.write('#,{},{},{},{},{} km\n'.format(tid, traj.oid, pt_list[0].time.strftime(time_format),
pt_list[-1].time.strftime(time_format),
traj.get_distance() / 1000))
for pt in pt_list:
candi_pt = pt.data['candi_pt']
if candi_pt is not None:
f.write('{},{},{},{},{},{},{},{},{}\n'.format(pt.time.strftime(time_format), pt.lat, pt.lng,
candi_pt.eid, candi_pt.lat, candi_pt.lng,
candi_pt.error, candi_pt.offset, candi_pt.rate))
else:
f.write('{},{},{},None,None,None,None,None,None\n'.format(
pt.time.strftime(time_format), pt.lat, pt.lng))
class SaveTraj2JUST(SaveTraj):
"""
Convert trajs to JUST format.
cvs file. trajectory_id, oid, time, lat, lng
"""
def __init__(self, convert_method=None):
super().__init__(convert_method)
def store(self, trajs, target_path):
"""
Convert trajs to JUST format.
cvs file. trajectory_id (primary key), oid, time, lat, lng
Args:
----
trajs:
list. list of Trajectory()
target_path:
str. target path (directory + file_name)
"""
with open(target_path, 'w') as f:
for traj in trajs:
for pt in traj.pt_list:
lng, lat = self.convert.convert(pt.lng, pt.lat)
f.write('{},{},{},{},{}\n'.format(traj.tid, traj.oid, pt.time, lat, lng))
| 39.091837
| 116
| 0.51005
|
e02a7ecd0788139d54f8090fa75aa2e28c72260f
| 1,046
|
py
|
Python
|
tests/generate_input.py
|
CrueLu/baowow
|
af38412ed04293aaf604eb34e35776096f9f00e8
|
[
"Apache-2.0"
] | 322
|
2017-10-18T09:15:50.000Z
|
2022-03-31T22:46:06.000Z
|
tests/generate_input.py
|
CrueLu/baowow
|
af38412ed04293aaf604eb34e35776096f9f00e8
|
[
"Apache-2.0"
] | 36
|
2017-10-21T16:51:29.000Z
|
2021-12-10T11:07:10.000Z
|
tests/generate_input.py
|
CrueLu/baowow
|
af38412ed04293aaf604eb34e35776096f9f00e8
|
[
"Apache-2.0"
] | 17
|
2018-11-01T19:51:32.000Z
|
2022-03-01T14:43:10.000Z
|
#! /usr/bin/env python3
# Usage: ./test.py <count>
#
# Input bytes are generated by incrementing a 4-byte little-endian integer,
# starting with 1. For example, an input of length 10 would be the bytes
# [1, 0, 0, 0, 2, 0, 0, 0, 3, 0]. The goal is to make is unlikely that a bug
# like swapping or duplicating a chunk could still pass the test suite.
# Hopefully it also makes it easier to eyeball the encoded outputs.
import io
import sys
COUNTER_SIZE = 4
def write_input_stream(stream, count):
i = 1
while count > 0:
ibytes = i.to_bytes(COUNTER_SIZE, "little")
take = min(COUNTER_SIZE, count)
stream.write(ibytes[:take])
count -= take
i += 1
def input_bytes(count):
b = io.BytesIO()
write_input_stream(b, count)
return b.getvalue()
def main():
if len(sys.argv) < 2:
print("The count argument is mandatory.", file=sys.stderr)
sys.exit(1)
count = int(sys.argv[1])
write_input_stream(sys.stdout.buffer, count)
if __name__ == "__main__":
main()
| 24.325581
| 76
| 0.652964
|
e3fac4605f28808b4071e1bddac03ecfc6e65588
| 125
|
py
|
Python
|
2.py
|
AJ-54/Intro-to-Webd
|
4d68784cea8a5f26f3f55a4d736d5fa08c6f37bb
|
[
"MIT"
] | 1
|
2019-01-15T09:02:31.000Z
|
2019-01-15T09:02:31.000Z
|
2.py
|
AJ-54/Intro-to-Webd
|
4d68784cea8a5f26f3f55a4d736d5fa08c6f37bb
|
[
"MIT"
] | null | null | null |
2.py
|
AJ-54/Intro-to-Webd
|
4d68784cea8a5f26f3f55a4d736d5fa08c6f37bb
|
[
"MIT"
] | null | null | null |
s=list(input("Enter the names with commas:").split(","))
for i in s:
f=" "
a=i.split(" ")
for j in a:
f+=j[0]
print(f)
| 15.625
| 56
| 0.552
|
603155ce4e99284243eba94bea9f0772546a3d7b
| 11,524
|
py
|
Python
|
modules/text/sentiment_analysis/senta_gru/module.py
|
cjt222/HubModule
|
a741ad35db53b1e6eeaeecc5f246db1ea2386519
|
[
"Apache-2.0"
] | null | null | null |
modules/text/sentiment_analysis/senta_gru/module.py
|
cjt222/HubModule
|
a741ad35db53b1e6eeaeecc5f246db1ea2386519
|
[
"Apache-2.0"
] | null | null | null |
modules/text/sentiment_analysis/senta_gru/module.py
|
cjt222/HubModule
|
a741ad35db53b1e6eeaeecc5f246db1ea2386519
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding:utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import ast
import json
import math
import numpy as np
import os
import six
import paddle.fluid as fluid
from paddle.fluid.core import PaddleTensor, AnalysisConfig, create_paddle_predictor
import paddlehub as hub
from paddlehub.common.utils import sys_stdin_encoding
from paddlehub.io.parser import txt_parser
from paddlehub.module.module import serving
from paddlehub.module.module import moduleinfo
from paddlehub.module.module import runnable
from senta_gru.net import gru_net
from senta_gru.processor import load_vocab, preprocess, postprocess
class DataFormatError(Exception):
def __init__(self, *args):
self.args = args
@moduleinfo(
name="senta_bilstm",
version="1.1.0",
summary="Baidu's open-source Sentiment Classification System.",
author="baidu-nlp",
author_email="",
type="nlp/sentiment_analysis")
class SentaGRU(hub.Module):
def _initialize(self, user_dict=None):
"""
initialize with the necessary elements
"""
self.pretrained_model_path = os.path.join(self.directory, "infer_model")
self.vocab_path = os.path.join(self.directory, "assets/vocab.txt")
self.word_dict = load_vocab(self.vocab_path)
self._word_seg_module = None
self._set_config()
@property
def word_seg_module(self):
"""
lac module
"""
if not self._word_seg_module:
self._word_seg_module = hub.Module(name="lac")
return self._word_seg_module
def _set_config(self):
"""
predictor config setting
"""
cpu_config = AnalysisConfig(self.pretrained_model_path)
cpu_config.disable_glog_info()
cpu_config.disable_gpu()
self.cpu_predictor = create_paddle_predictor(cpu_config)
try:
_places = os.environ["CUDA_VISIBLE_DEVICES"]
int(_places[0])
use_gpu = True
except:
use_gpu = False
if use_gpu:
gpu_config = AnalysisConfig(
os.path.join(self.directory, "infer_model"))
gpu_config.disable_glog_info()
gpu_config.enable_use_gpu(memory_pool_init_size_mb=500, device_id=0)
self.gpu_predictor = create_paddle_predictor(gpu_config)
def context(self, trainable=False):
"""
Get the input ,output and program of the pretrained senta_gru
Args:
trainable(bool): whether fine-tune the pretrained parameters of senta_gru or not
Returns:
inputs(dict): the input variables of senta_gru (words)
outputs(dict): the output variables of senta_gru (the sentiment prediction results)
main_program(Program): the main_program of lac with pretrained prameters
"""
main_program = fluid.Program()
startup_program = fluid.Program()
with fluid.program_guard(main_program, startup_program):
with fluid.unique_name.guard("@HUB_senta_gru@"):
data = fluid.layers.data(
name="words", shape=[1], dtype="int64", lod_level=1)
pred, fc = gru_net(data, 1256606)
for param in main_program.global_block().iter_parameters():
param.trainable = trainable
place = fluid.CPUPlace()
exe = fluid.Executor(place)
# load the senta_gru pretrained model
def if_exist(var):
return os.path.exists(
os.path.join(self.pretrained_model_path, var.name))
fluid.io.load_vars(
exe, self.pretrained_model_path, predicate=if_exist)
inputs = {"words": data}
outputs = {"class_probs": pred, "sentence_feature": fc}
return inputs, outputs, main_program
def to_unicode(self, texts):
"""
Convert each element's type(str) of texts(list) to unicode in python2.7
Args:
texts(list): each element's type is str in python2.7
Returns:
texts(list): each element's type is unicode in python2.7
"""
if six.PY2:
unicode_texts = []
for text in texts:
if not isinstance(text, unicode):
unicode_texts.append(
text.decode(sys_stdin_encoding()).decode("utf8"))
else:
unicode_texts.append(text)
texts = unicode_texts
return texts
def texts2tensor(self, texts):
"""
Tranform the texts(dict) to PaddleTensor
Args:
texts(dict): texts
Returns:
tensor(PaddleTensor): tensor with texts data
"""
lod = [0]
data = []
for i, text in enumerate(texts):
data += text['processed']
lod.append(len(text['processed']) + lod[i])
tensor = PaddleTensor(np.array(data).astype('int64'))
tensor.name = "words"
tensor.lod = [lod]
tensor.shape = [lod[-1], 1]
return tensor
@serving
def sentiment_classify(self, texts=[], data={}, use_gpu=False,
batch_size=1):
"""
Get the sentiment prediction results results with the texts as input
Args:
texts(list): the input texts to be predicted, if texts not data
data(dict): key must be 'text', value is the texts to be predicted, if data not texts
use_gpu(bool): whether use gpu to predict or not
batch_size(int): the program deals once with one batch
Returns:
results(dict): the word segmentation results
"""
try:
_places = os.environ["CUDA_VISIBLE_DEVICES"]
int(_places[0])
except:
use_gpu = False
if texts != [] and isinstance(texts, list) and data == {}:
predicted_data = texts
elif texts == [] and isinstance(data, dict) and isinstance(
data.get('text', None), list) and data['text']:
predicted_data = data["text"]
else:
raise ValueError(
"The input data is inconsistent with expectations.")
predicted_data = self.to_unicode(predicted_data)
start_idx = 0
iteration = int(math.ceil(len(predicted_data) / batch_size))
results = []
for i in range(iteration):
if i < (iteration - 1):
batch_data = predicted_data[start_idx:(start_idx + batch_size)]
else:
batch_data = predicted_data[start_idx:]
start_idx = start_idx + batch_size
processed_results = preprocess(self.word_seg_module, batch_data,
self.word_dict, use_gpu, batch_size)
tensor_words = self.texts2tensor(processed_results)
if use_gpu:
batch_out = self.gpu_predictor.run([tensor_words])
else:
batch_out = self.cpu_predictor.run([tensor_words])
batch_result = postprocess(batch_out[0], processed_results)
results += batch_result
return results
@runnable
def run_cmd(self, argvs):
"""
Run as a command
"""
self.parser = argparse.ArgumentParser(
description="Run the lac module.",
prog='hub run senta_bilstm',
usage='%(prog)s',
add_help=True)
self.arg_input_group = self.parser.add_argument_group(
title="Input options", description="Input data. Required")
self.arg_config_group = self.parser.add_argument_group(
title="Config options",
description=
"Run configuration for controlling module behavior, not required.")
self.add_module_config_arg()
self.add_module_input_arg()
args = self.parser.parse_args(argvs)
try:
input_data = self.check_input_data(args)
except DataFormatError and RuntimeError:
self.parser.print_help()
return None
if args.user_dict:
self.set_user_dict(args.user_dict)
results = self.sentiment_classify(
texts=input_data, use_gpu=args.use_gpu, batch_size=args.batch_size)
return results
def add_module_config_arg(self):
"""
Add the command config options
"""
self.arg_config_group.add_argument(
'--use_gpu',
type=ast.literal_eval,
default=False,
help="whether use GPU for prediction")
self.arg_config_group.add_argument(
'--batch_size',
type=int,
default=1,
help="batch size for prediction")
def add_module_input_arg(self):
"""
Add the command input options
"""
self.arg_input_group.add_argument(
'--input_file',
type=str,
default=None,
help="file contain input data")
self.arg_input_group.add_argument(
'--input_text', type=str, default=None, help="text to predict")
def check_input_data(self, args):
input_data = []
if args.input_file:
if not os.path.exists(args.input_file):
print("File %s is not exist." % args.input_file)
raise RuntimeError
else:
input_data = txt_parser.parse(args.input_file, use_strip=True)
elif args.input_text:
if args.input_text.strip() != '':
if six.PY2:
input_data = [
args.input_text.decode(
sys_stdin_encoding()).decode("utf8")
]
else:
input_data = [args.input_text]
else:
print(
"ERROR: The input data is inconsistent with expectations.")
if input_data == []:
print("ERROR: The input data is inconsistent with expectations.")
raise DataFormatError
return input_data
def get_vocab_path(self):
"""
Get the path to the vocabulary whih was used to pretrain
Returns:
self.vocab_path(str): the path to vocabulary
"""
return self.vocab_path
def get_labels(self):
"""
Get the labels which was used when pretraining
Returns:
self.labels(dict)
"""
self.labels = {"positive": 1, "negative": 0}
return self.labels
if __name__ == "__main__":
senta = SentaGRU()
# Data to be predicted
test_text = ["这家餐厅很好吃", "这部电影真的很差劲"]
# execute predict and print the result
input_dict = {"text": test_text}
results = senta.sentiment_classify(data=input_dict)
for index, result in enumerate(results):
if six.PY2:
print(json.dumps(
results[index], encoding="utf8", ensure_ascii=False))
else:
print(results[index])
results = senta.sentiment_classify(texts=test_text)
for index, result in enumerate(results):
if six.PY2:
print(json.dumps(
results[index], encoding="utf8", ensure_ascii=False))
else:
print(results[index])
| 33.402899
| 98
| 0.584346
|
0e4a175a56f27e7400eff03a87934f564fa5ebc7
| 1,438
|
py
|
Python
|
ob_pipelines/tasks/merge_ercc.py
|
outlierbio/ob-pipelines
|
c49c735d37f4eaa7e804df502a761580f7ce9e32
|
[
"Apache-2.0"
] | 11
|
2017-01-22T22:08:45.000Z
|
2020-03-10T20:17:14.000Z
|
ob_pipelines/tasks/merge_ercc.py
|
outlierbio/ob-pipelines
|
c49c735d37f4eaa7e804df502a761580f7ce9e32
|
[
"Apache-2.0"
] | null | null | null |
ob_pipelines/tasks/merge_ercc.py
|
outlierbio/ob-pipelines
|
c49c735d37f4eaa7e804df502a761580f7ce9e32
|
[
"Apache-2.0"
] | 6
|
2017-01-23T01:24:33.000Z
|
2018-07-18T13:30:06.000Z
|
from luigi import Parameter
from luigi.contrib.s3 import S3Target
from ob_pipelines import LoggingTaskWrapper
from ob_pipelines.apps.kallisto import merge_column
from ob_pipelines.config import settings
from ob_pipelines.entities.persistence import get_samples_by_experiment_id
from ob_pipelines.s3 import csv_to_s3
from ob_pipelines.tasks.ercc_quant import ERCCQuant
class MergeERCC(LoggingTaskWrapper):
expt_id = Parameter()
def requires(self):
return {
sample_id: ERCCQuant(sample_id=sample_id)
for sample_id in get_samples_by_experiment_id(self.expt_id)
}
def output(self):
prefix = '{}/{}/'.format(settings.get_target_bucket(), self.expt_id)
return {
'est_counts': S3Target(prefix + 'ercc.unstranded.est_counts.csv'),
'tpm': S3Target(prefix + 'ercc.unstranded.tpm.csv')
}
def run(self):
# Gather input filepaths and labels
tgt_dict = self.input()
sample_ids = list(tgt_dict.keys())
fpaths = [tgt_dict[sample_id]['abundance'].path for sample_id in sample_ids]
# Merge columns
annotations, est_counts = merge_column(fpaths, sample_ids, data_col='est_counts', annot=False)
annotations, tpm = merge_column(fpaths, sample_ids, data_col='tpm', annot=False)
csv_to_s3(est_counts, self.output()['est_counts'].path)
csv_to_s3(tpm, self.output()['tpm'].path)
| 35.95
| 102
| 0.698887
|
304d8ebb0ce7c017a49e8f9e4ce9a7d6bb630a5b
| 7,471
|
py
|
Python
|
aries_cloudagent/protocols/discovery/v1_0/manager.py
|
kuraakhilesh8230/aries-cloudagent-python
|
ee384d1330f6a50ff45a507392ce54f92900f23a
|
[
"Apache-2.0"
] | 4
|
2019-07-01T13:12:50.000Z
|
2019-07-02T20:01:37.000Z
|
aries_cloudagent/protocols/discovery/v1_0/manager.py
|
kuraakhilesh8230/aries-cloudagent-python
|
ee384d1330f6a50ff45a507392ce54f92900f23a
|
[
"Apache-2.0"
] | 51
|
2021-01-12T05:50:50.000Z
|
2022-03-25T06:03:13.000Z
|
aries_cloudagent/protocols/discovery/v1_0/manager.py
|
kuraakhilesh8230/aries-cloudagent-python
|
ee384d1330f6a50ff45a507392ce54f92900f23a
|
[
"Apache-2.0"
] | 12
|
2019-06-24T22:17:44.000Z
|
2019-07-02T19:49:31.000Z
|
"""Classes to manage discover features."""
import asyncio
import logging
from typing import Optional
from ....core.error import BaseError
from ....core.profile import Profile
from ....core.protocol_registry import ProtocolRegistry
from ....storage.error import StorageNotFoundError
from ....messaging.responder import BaseResponder
from .messages.disclose import Disclose
from .messages.query import Query
from .models.discovery_record import V10DiscoveryExchangeRecord
class V10DiscoveryMgrError(BaseError):
"""Discover feature v1_0 error."""
class V10DiscoveryMgr:
"""Class for discover feature v1_0 under RFC 31."""
def __init__(self, profile: Profile):
"""
Initialize a V10DiscoveryMgr.
Args:
profile: The profile for this manager
"""
self._profile = profile
self._logger = logging.getLogger(__name__)
@property
def profile(self) -> Profile:
"""
Accessor for the current Profile.
Returns:
The Profile for this manager
"""
return self._profile
async def receive_disclose(
self, disclose_msg: Disclose, connection_id: str
) -> V10DiscoveryExchangeRecord:
"""Receive Disclose message and return updated V10DiscoveryExchangeRecord."""
if disclose_msg._thread:
thread_id = disclose_msg._thread.thid
try:
async with self._profile.session() as session:
discover_exch_rec = await V10DiscoveryExchangeRecord.retrieve_by_id(
session=session, record_id=thread_id
)
except StorageNotFoundError:
discover_exch_rec = await self.lookup_exchange_rec_by_connection(
connection_id
)
if not discover_exch_rec:
discover_exch_rec = V10DiscoveryExchangeRecord()
else:
discover_exch_rec = await self.lookup_exchange_rec_by_connection(
connection_id
)
if not discover_exch_rec:
discover_exch_rec = V10DiscoveryExchangeRecord()
async with self._profile.session() as session:
discover_exch_rec.connection_id = connection_id
discover_exch_rec.disclose = disclose_msg
await discover_exch_rec.save(session)
return discover_exch_rec
async def lookup_exchange_rec_by_connection(
self, connection_id: str
) -> Optional[V10DiscoveryExchangeRecord]:
"""Retrieve V20DiscoveryExchangeRecord by connection_id."""
async with self._profile.session() as session:
if await V10DiscoveryExchangeRecord.exists_for_connection_id(
session=session, connection_id=connection_id
):
return await V10DiscoveryExchangeRecord.retrieve_by_connection_id(
session=session, connection_id=connection_id
)
else:
return None
async def receive_query(self, query_msg: Query) -> Disclose:
"""Process query and return the corresponding disclose message."""
registry = self._profile.context.inject_or(ProtocolRegistry)
query_str = query_msg.query
published_results = []
protocols = registry.protocols_matching_query(query_str)
results = await registry.prepare_disclosed(self._profile.context, protocols)
async with self._profile.session() as session:
to_publish_protocols = None
if (
session.settings.get("disclose_protocol_list")
and len(session.settings.get("disclose_protocol_list")) > 0
):
to_publish_protocols = session.settings.get("disclose_protocol_list")
for result in results:
to_publish_result = {}
if "pid" in result:
if (
to_publish_protocols
and result.get("pid") not in to_publish_protocols
):
continue
to_publish_result["pid"] = result.get("pid")
else:
continue
if "roles" in result:
to_publish_result["roles"] = result.get("roles")
published_results.append(to_publish_result)
disclose_msg = Disclose(protocols=published_results)
# Check if query message has a thid
# If disclosing this agents feature
if query_msg._thread:
disclose_msg.assign_thread_id(query_msg._thread.thid)
return disclose_msg
async def check_if_disclosure_received(
self, record_id: str
) -> V10DiscoveryExchangeRecord:
"""Check if disclosures has been received."""
while True:
async with self._profile.session() as session:
ex_rec = await V10DiscoveryExchangeRecord.retrieve_by_id(
session=session, record_id=record_id
)
if ex_rec.disclose:
return ex_rec
await asyncio.sleep(0.5)
async def create_and_send_query(
self, query: str, comment: str = None, connection_id: str = None
) -> V10DiscoveryExchangeRecord:
"""Create and send a Query message."""
query_msg = Query(query=query, comment=comment)
if connection_id:
async with self._profile.session() as session:
# If existing record exists for a connection_id
if await V10DiscoveryExchangeRecord.exists_for_connection_id(
session=session, connection_id=connection_id
):
discovery_ex_rec = (
await V10DiscoveryExchangeRecord.retrieve_by_connection_id(
session=session, connection_id=connection_id
)
)
discovery_ex_rec.disclose = None
await discovery_ex_rec.save(session)
else:
discovery_ex_rec = V10DiscoveryExchangeRecord()
discovery_ex_rec.query_msg = query_msg
discovery_ex_rec.connection_id = connection_id
await discovery_ex_rec.save(session)
query_msg.assign_thread_id(discovery_ex_rec.discovery_exchange_id)
responder = self._profile.inject_or(BaseResponder)
if responder:
await responder.send(query_msg, connection_id=connection_id)
else:
self._logger.exception(
"Unable to send discover-features v1 query message"
": BaseResponder unavailable"
)
try:
return await asyncio.wait_for(
self.check_if_disclosure_received(
record_id=discovery_ex_rec.discovery_exchange_id,
),
5,
)
except asyncio.TimeoutError:
return discovery_ex_rec
else:
# Disclose this agent's features and/or goal codes
discovery_ex_rec = V10DiscoveryExchangeRecord()
discovery_ex_rec.query_msg = query_msg
disclose = await self.receive_query(query_msg=query_msg)
discovery_ex_rec.disclose = disclose
return discovery_ex_rec
| 40.166667
| 88
| 0.60514
|
d7b4772ef2055f664516f36d103e50d1900d2192
| 3,534
|
py
|
Python
|
bindings/python/ensmallen/datasets/string/lactobacillussakei.py
|
AnacletoLAB/ensmallen_graph
|
b2c1b18fb1e5801712852bcc239f239e03076f09
|
[
"MIT"
] | 5
|
2021-02-17T00:44:45.000Z
|
2021-08-09T16:41:47.000Z
|
bindings/python/ensmallen/datasets/string/lactobacillussakei.py
|
AnacletoLAB/ensmallen_graph
|
b2c1b18fb1e5801712852bcc239f239e03076f09
|
[
"MIT"
] | 18
|
2021-01-07T16:47:39.000Z
|
2021-08-12T21:51:32.000Z
|
bindings/python/ensmallen/datasets/string/lactobacillussakei.py
|
AnacletoLAB/ensmallen
|
b2c1b18fb1e5801712852bcc239f239e03076f09
|
[
"MIT"
] | 3
|
2021-01-14T02:20:59.000Z
|
2021-08-04T19:09:52.000Z
|
"""
This file offers the methods to automatically retrieve the graph Lactobacillus sakei.
The graph is automatically retrieved from the STRING repository.
References
---------------------
Please cite the following if you use the data:
```bib
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
```
"""
from typing import Dict
from ..automatic_graph_retrieval import AutomaticallyRetrievedGraph
from ...ensmallen import Graph # pylint: disable=import-error
def LactobacillusSakei(
directed: bool = False,
preprocess: bool = True,
load_nodes: bool = True,
verbose: int = 2,
cache: bool = True,
cache_path: str = "graphs/string",
version: str = "links.v11.5",
**additional_graph_kwargs: Dict
) -> Graph:
"""Return new instance of the Lactobacillus sakei graph.
The graph is automatically retrieved from the STRING repository.
Parameters
-------------------
directed: bool = False
Wether to load the graph as directed or undirected.
By default false.
preprocess: bool = True
Whether to preprocess the graph to be loaded in
optimal time and memory.
load_nodes: bool = True,
Whether to load the nodes vocabulary or treat the nodes
simply as a numeric range.
verbose: int = 2,
Wether to show loading bars during the retrieval and building
of the graph.
cache: bool = True
Whether to use cache, i.e. download files only once
and preprocess them only once.
cache_path: str = "graphs"
Where to store the downloaded graphs.
version: str = "links.v11.5"
The version of the graph to retrieve.
The available versions are:
- homology.v11.0
- homology.v11.5
- physical.links.v11.0
- physical.links.v11.5
- links.v11.0
- links.v11.5
additional_graph_kwargs: Dict
Additional graph kwargs.
Returns
-----------------------
Instace of Lactobacillus sakei graph.
References
---------------------
Please cite the following if you use the data:
```bib
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
```
"""
return AutomaticallyRetrievedGraph(
graph_name="LactobacillusSakei",
repository="string",
version=version,
directed=directed,
preprocess=preprocess,
load_nodes=load_nodes,
verbose=verbose,
cache=cache,
cache_path=cache_path,
additional_graph_kwargs=additional_graph_kwargs
)()
| 32.722222
| 223
| 0.676287
|
9e40b740c64092dec238e1e6ad24a9066ea6452d
| 6,478
|
py
|
Python
|
python/ray/tests/test_multi_node_2.py
|
LucaCappelletti94/ray
|
a3433ba51fbafae187fdc200617f75f96eb6779f
|
[
"Apache-2.0"
] | null | null | null |
python/ray/tests/test_multi_node_2.py
|
LucaCappelletti94/ray
|
a3433ba51fbafae187fdc200617f75f96eb6779f
|
[
"Apache-2.0"
] | 5
|
2021-08-25T16:17:15.000Z
|
2022-03-12T01:00:29.000Z
|
python/ray/tests/test_multi_node_2.py
|
yurirocha15/ray
|
f6d9996874c7e5e90380c968c6f570084a940ef3
|
[
"Apache-2.0"
] | 2
|
2020-05-22T15:36:27.000Z
|
2020-05-22T15:52:03.000Z
|
import logging
import pytest
import time
import ray
import ray.ray_constants as ray_constants
from ray.autoscaler.sdk import request_resources
from ray.monitor import Monitor
from ray.cluster_utils import Cluster
from ray.test_utils import generate_system_config_map, SignalActor
logger = logging.getLogger(__name__)
def test_cluster():
"""Basic test for adding and removing nodes in cluster."""
g = Cluster(initialize_head=False)
node = g.add_node()
node2 = g.add_node()
assert node.remaining_processes_alive()
assert node2.remaining_processes_alive()
g.remove_node(node2)
g.remove_node(node)
assert not any(n.any_processes_alive() for n in [node, node2])
def test_shutdown():
g = Cluster(initialize_head=False)
node = g.add_node()
node2 = g.add_node()
g.shutdown()
assert not any(n.any_processes_alive() for n in [node, node2])
@pytest.mark.parametrize(
"ray_start_cluster_head", [
generate_system_config_map(
num_heartbeats_timeout=20, object_timeout_milliseconds=12345)
],
indirect=True)
def test_system_config(ray_start_cluster_head):
"""Checks that the internal configuration setting works.
We set the cluster to timeout nodes after 2 seconds of no timeouts. We
then remove a node, wait for 1 second to check that the cluster is out
of sync, then wait another 2 seconds (giving 1 second of leeway) to check
that the client has timed out. We also check to see if the config is set.
"""
cluster = ray_start_cluster_head
worker = cluster.add_node()
cluster.wait_for_nodes()
@ray.remote
def f():
assert ray._config.object_timeout_milliseconds() == 12345
assert ray._config.num_heartbeats_timeout() == 20
ray.get([f.remote() for _ in range(5)])
cluster.remove_node(worker, allow_graceful=False)
time.sleep(1)
assert ray.cluster_resources()["CPU"] == 2
time.sleep(2)
assert ray.cluster_resources()["CPU"] == 1
def setup_monitor(address):
monitor = Monitor(
address, None, redis_password=ray_constants.REDIS_DEFAULT_PASSWORD)
monitor.update_raylet_map(_append_port=True)
return monitor
def verify_load_metrics(monitor, expected_resource_usage=None, timeout=30):
request_resources(num_cpus=42)
while True:
monitor.update_load_metrics()
monitor.update_resource_requests()
resource_usage = monitor.load_metrics._get_resource_usage()
# Check resource request propagation.
req = monitor.load_metrics.resource_requests
assert req == [{"CPU": 1}] * 42, req
if "memory" in resource_usage[0]:
del resource_usage[0]["memory"]
if "object_store_memory" in resource_usage[1]:
del resource_usage[0]["object_store_memory"]
if "memory" in resource_usage[1]:
del resource_usage[1]["memory"]
if "object_store_memory" in resource_usage[1]:
del resource_usage[1]["object_store_memory"]
for key in list(resource_usage[0].keys()):
if key.startswith("node:"):
del resource_usage[0][key]
for key in list(resource_usage[1].keys()):
if key.startswith("node:"):
del resource_usage[1][key]
if expected_resource_usage is None:
if all(x for x in resource_usage[0:]):
break
elif all(x == y
for x, y in zip(resource_usage, expected_resource_usage)):
break
else:
timeout -= 1
time.sleep(1)
if timeout <= 0:
raise ValueError("Timeout. {} != {}".format(
resource_usage, expected_resource_usage))
return resource_usage
@pytest.mark.parametrize(
"ray_start_cluster_head", [{
"num_cpus": 1,
}, {
"num_cpus": 2,
}],
indirect=True)
def test_heartbeats_single(ray_start_cluster_head):
"""Unit test for `Cluster.wait_for_nodes`.
Test proper metrics.
"""
cluster = ray_start_cluster_head
monitor = setup_monitor(cluster.address)
total_cpus = ray.state.cluster_resources()["CPU"]
verify_load_metrics(monitor, ({"CPU": 0.0}, {"CPU": total_cpus}))
@ray.remote
def work(signal):
wait_signal = signal.wait.remote()
while True:
ready, not_ready = ray.wait([wait_signal], timeout=0)
if len(ready) == 1:
break
time.sleep(1)
signal = SignalActor.remote()
work_handle = work.remote(signal)
verify_load_metrics(monitor, ({"CPU": 1.0}, {"CPU": total_cpus}))
ray.get(signal.send.remote())
ray.get(work_handle)
@ray.remote(num_cpus=1)
class Actor:
def work(self, signal):
wait_signal = signal.wait.remote()
while True:
ready, not_ready = ray.wait([wait_signal], timeout=0)
if len(ready) == 1:
break
time.sleep(1)
signal = SignalActor.remote()
test_actor = Actor.remote()
work_handle = test_actor.work.remote(signal)
time.sleep(1) # Time for actor to get placed and the method to start.
verify_load_metrics(monitor, ({"CPU": 1.0}, {"CPU": total_cpus}))
ray.get(signal.send.remote())
ray.get(work_handle)
def test_wait_for_nodes(ray_start_cluster_head):
"""Unit test for `Cluster.wait_for_nodes`.
Adds 4 workers, waits, then removes 4 workers, waits,
then adds 1 worker, waits, and removes 1 worker, waits.
"""
cluster = ray_start_cluster_head
workers = [cluster.add_node() for i in range(4)]
cluster.wait_for_nodes()
[cluster.remove_node(w) for w in workers]
cluster.wait_for_nodes()
assert ray.cluster_resources()["CPU"] == 1
worker2 = cluster.add_node()
cluster.wait_for_nodes()
cluster.remove_node(worker2)
cluster.wait_for_nodes()
assert ray.cluster_resources()["CPU"] == 1
@pytest.mark.parametrize(
"call_ray_start", [
"ray start --head --ray-client-server-port 20000 " +
"--min-worker-port=0 --max-worker-port=0 --port 0"
],
indirect=True)
def test_ray_client(call_ray_start):
from ray.util.client import ray
ray.connect("localhost:20000")
@ray.remote
def f():
return "hello client"
assert ray.get(f.remote()) == "hello client"
if __name__ == "__main__":
import pytest
import sys
sys.exit(pytest.main(["-v", __file__]))
| 29.990741
| 77
| 0.650818
|
cb2b5e43b3284095d7b2dfc29086a567d34552c3
| 581
|
py
|
Python
|
gitman/__main__.py
|
petergmorgan/gitman
|
6960d04956bbcc2c1512fcaed7e5d7c34f5999ba
|
[
"MIT"
] | null | null | null |
gitman/__main__.py
|
petergmorgan/gitman
|
6960d04956bbcc2c1512fcaed7e5d7c34f5999ba
|
[
"MIT"
] | null | null | null |
gitman/__main__.py
|
petergmorgan/gitman
|
6960d04956bbcc2c1512fcaed7e5d7c34f5999ba
|
[
"MIT"
] | null | null | null |
"""Package entry point."""
# Declare itself as package if needed for better debugging support
# pylint: disable=multiple-imports,wrong-import-position,redefined-builtin,used-before-assignment
if __name__ == '__main__' and __package__ is None: # pragma: no cover
import os, sys, importlib
parent_dir = os.path.abspath(os.path.dirname(__file__))
sys.path.append(os.path.dirname(parent_dir))
__package__ = os.path.basename(parent_dir)
importlib.import_module(__package__)
from gitman.cli import main
if __name__ == '__main__': # pragma: no cover
main()
| 32.277778
| 97
| 0.743546
|
a47f940bfe5e62ec33565bd6df2c2c83fa05a4b7
| 20,454
|
py
|
Python
|
torch/_tensor_str.py
|
mcx/pytorch
|
b02b3f25dbffeead4f06d32e75dd4a282e8b0b5f
|
[
"Intel"
] | null | null | null |
torch/_tensor_str.py
|
mcx/pytorch
|
b02b3f25dbffeead4f06d32e75dd4a282e8b0b5f
|
[
"Intel"
] | null | null | null |
torch/_tensor_str.py
|
mcx/pytorch
|
b02b3f25dbffeead4f06d32e75dd4a282e8b0b5f
|
[
"Intel"
] | null | null | null |
import math
import torch
from torch._six import inf
from typing import Optional
class __PrinterOptions(object):
precision: int = 4
threshold: float = 1000
edgeitems: int = 3
linewidth: int = 80
sci_mode: Optional[bool] = None
PRINT_OPTS = __PrinterOptions()
# We could use **kwargs, but this will give better docs
def set_printoptions(
precision=None,
threshold=None,
edgeitems=None,
linewidth=None,
profile=None,
sci_mode=None
):
r"""Set options for printing. Items shamelessly taken from NumPy
Args:
precision: Number of digits of precision for floating point output
(default = 4).
threshold: Total number of array elements which trigger summarization
rather than full `repr` (default = 1000).
edgeitems: Number of array items in summary at beginning and end of
each dimension (default = 3).
linewidth: The number of characters per line for the purpose of
inserting line breaks (default = 80). Thresholded matrices will
ignore this parameter.
profile: Sane defaults for pretty printing. Can override with any of
the above options. (any one of `default`, `short`, `full`)
sci_mode: Enable (True) or disable (False) scientific notation. If
None (default) is specified, the value is defined by
`torch._tensor_str._Formatter`. This value is automatically chosen
by the framework.
Example::
>>> torch.set_printoptions(precision=2)
>>> torch.tensor([1.12345])
tensor([1.12])
>>> torch.set_printoptions(threshold=5)
>>> torch.arange(10)
tensor([0, 1, 2, ..., 7, 8, 9])
"""
if profile is not None:
if profile == "default":
PRINT_OPTS.precision = 4
PRINT_OPTS.threshold = 1000
PRINT_OPTS.edgeitems = 3
PRINT_OPTS.linewidth = 80
elif profile == "short":
PRINT_OPTS.precision = 2
PRINT_OPTS.threshold = 1000
PRINT_OPTS.edgeitems = 2
PRINT_OPTS.linewidth = 80
elif profile == "full":
PRINT_OPTS.precision = 4
PRINT_OPTS.threshold = inf
PRINT_OPTS.edgeitems = 3
PRINT_OPTS.linewidth = 80
if precision is not None:
PRINT_OPTS.precision = precision
if threshold is not None:
PRINT_OPTS.threshold = threshold
if edgeitems is not None:
PRINT_OPTS.edgeitems = edgeitems
if linewidth is not None:
PRINT_OPTS.linewidth = linewidth
PRINT_OPTS.sci_mode = sci_mode
class _Formatter(object):
def __init__(self, tensor):
self.floating_dtype = tensor.dtype.is_floating_point
self.int_mode = True
self.sci_mode = False
self.max_width = 1
with torch.no_grad():
tensor_view = tensor.reshape(-1)
if not self.floating_dtype:
for value in tensor_view:
value_str = '{}'.format(value)
self.max_width = max(self.max_width, len(value_str))
else:
nonzero_finite_vals = torch.masked_select(tensor_view, torch.isfinite(tensor_view) & tensor_view.ne(0))
if nonzero_finite_vals.numel() == 0:
# no valid number, do nothing
return
# Convert to double for easy calculation. HalfTensor overflows with 1e8, and there's no div() on CPU.
nonzero_finite_abs = nonzero_finite_vals.abs().double()
nonzero_finite_min = nonzero_finite_abs.min().double()
nonzero_finite_max = nonzero_finite_abs.max().double()
for value in nonzero_finite_vals:
if value != torch.ceil(value):
self.int_mode = False
break
if self.int_mode:
# in int_mode for floats, all numbers are integers, and we append a decimal to nonfinites
# to indicate that the tensor is of floating type. add 1 to the len to account for this.
if nonzero_finite_max / nonzero_finite_min > 1000. or nonzero_finite_max > 1.e8:
self.sci_mode = True
for value in nonzero_finite_vals:
value_str = ('{{:.{}e}}').format(PRINT_OPTS.precision).format(value)
self.max_width = max(self.max_width, len(value_str))
else:
for value in nonzero_finite_vals:
value_str = ('{:.0f}').format(value)
self.max_width = max(self.max_width, len(value_str) + 1)
else:
# Check if scientific representation should be used.
if nonzero_finite_max / nonzero_finite_min > 1000.\
or nonzero_finite_max > 1.e8\
or nonzero_finite_min < 1.e-4:
self.sci_mode = True
for value in nonzero_finite_vals:
value_str = ('{{:.{}e}}').format(PRINT_OPTS.precision).format(value)
self.max_width = max(self.max_width, len(value_str))
else:
for value in nonzero_finite_vals:
value_str = ('{{:.{}f}}').format(PRINT_OPTS.precision).format(value)
self.max_width = max(self.max_width, len(value_str))
if PRINT_OPTS.sci_mode is not None:
self.sci_mode = PRINT_OPTS.sci_mode
def width(self):
return self.max_width
def format(self, value):
if self.floating_dtype:
if self.sci_mode:
ret = ('{{:{}.{}e}}').format(self.max_width, PRINT_OPTS.precision).format(value)
elif self.int_mode:
ret = '{:.0f}'.format(value)
if not (math.isinf(value) or math.isnan(value)):
ret += '.'
else:
ret = ('{{:.{}f}}').format(PRINT_OPTS.precision).format(value)
else:
ret = '{}'.format(value)
return (self.max_width - len(ret)) * ' ' + ret
def _scalar_str(self, formatter1, formatter2=None):
if formatter2 is not None:
real_str = _scalar_str(self.real, formatter1)
imag_str = (_scalar_str(self.imag, formatter2) + "j").lstrip()
# handles negative numbers, +0.0, -0.0
if imag_str[0] == '+' or imag_str[0] == '-':
return real_str + imag_str
else:
return real_str + "+" + imag_str
else:
return formatter1.format(self.item())
def _vector_str(self, indent, summarize, formatter1, formatter2=None):
# length includes spaces and comma between elements
element_length = formatter1.width() + 2
if formatter2 is not None:
# width for imag_formatter + an extra j for complex
element_length += formatter2.width() + 1
elements_per_line = max(1, int(math.floor((PRINT_OPTS.linewidth - indent) / (element_length))))
char_per_line = element_length * elements_per_line
def _val_formatter(val, formatter1=formatter1, formatter2=formatter2):
if formatter2 is not None:
real_str = formatter1.format(val.real)
imag_str = (formatter2.format(val.imag) + "j").lstrip()
# handles negative numbers, +0.0, -0.0
if imag_str[0] == '+' or imag_str[0] == '-':
return real_str + imag_str
else:
return real_str + "+" + imag_str
else:
return formatter1.format(val)
if summarize and self.size(0) > 2 * PRINT_OPTS.edgeitems:
data = ([_val_formatter(val) for val in self[:PRINT_OPTS.edgeitems].tolist()] +
[' ...'] +
[_val_formatter(val) for val in self[-PRINT_OPTS.edgeitems:].tolist()])
else:
data = [_val_formatter(val) for val in self.tolist()]
data_lines = [data[i:i + elements_per_line] for i in range(0, len(data), elements_per_line)]
lines = [', '.join(line) for line in data_lines]
return '[' + (',' + '\n' + ' ' * (indent + 1)).join(lines) + ']'
# formatter2 is only used for printing complex tensors.
# For complex tensors, formatter1 and formatter2 are the formatters for tensor.real
# and tensor.imag respesectively
def _tensor_str_with_formatter(self, indent, summarize, formatter1, formatter2=None):
dim = self.dim()
if dim == 0:
return _scalar_str(self, formatter1, formatter2)
if dim == 1:
return _vector_str(self, indent, summarize, formatter1, formatter2)
if summarize and self.size(0) > 2 * PRINT_OPTS.edgeitems:
slices = ([_tensor_str_with_formatter(self[i], indent + 1, summarize, formatter1, formatter2)
for i in range(0, PRINT_OPTS.edgeitems)] +
['...'] +
[_tensor_str_with_formatter(self[i], indent + 1, summarize, formatter1, formatter2)
for i in range(len(self) - PRINT_OPTS.edgeitems, len(self))])
else:
slices = [_tensor_str_with_formatter(self[i], indent + 1, summarize, formatter1, formatter2)
for i in range(0, self.size(0))]
tensor_str = (',' + '\n' * (dim - 1) + ' ' * (indent + 1)).join(slices)
return '[' + tensor_str + ']'
def _tensor_str(self, indent):
if self.numel() == 0:
return '[]'
if self.has_names():
# There are two main codepaths (possibly more) that tensor printing goes through:
# - tensor data can fit comfortably on screen
# - tensor data needs to be summarized
# Some of the codepaths don't fully support named tensors, so we send in
# an unnamed tensor to the formatting code as a workaround.
self = self.rename(None)
summarize = self.numel() > PRINT_OPTS.threshold
if self._is_zerotensor():
self = self.clone()
# handle the negative bit
if self.is_neg():
self = self.resolve_neg()
if self.dtype is torch.float16 or self.dtype is torch.bfloat16:
self = self.float()
if self.dtype is torch.complex32:
self = self.cfloat()
if self.dtype.is_complex:
# handle the conjugate bit
self = self.resolve_conj()
real_formatter = _Formatter(get_summarized_data(self.real) if summarize else self.real)
imag_formatter = _Formatter(get_summarized_data(self.imag) if summarize else self.imag)
return _tensor_str_with_formatter(self, indent, summarize, real_formatter, imag_formatter)
else:
formatter = _Formatter(get_summarized_data(self) if summarize else self)
return _tensor_str_with_formatter(self, indent, summarize, formatter)
def _add_suffixes(tensor_str, suffixes, indent, force_newline):
tensor_strs = [tensor_str]
last_line_len = len(tensor_str) - tensor_str.rfind('\n') + 1
for suffix in suffixes:
suffix_len = len(suffix)
if force_newline or last_line_len + suffix_len + 2 > PRINT_OPTS.linewidth:
tensor_strs.append(',\n' + ' ' * indent + suffix)
last_line_len = indent + suffix_len
force_newline = False
else:
tensor_strs.append(', ' + suffix)
last_line_len += suffix_len + 2
tensor_strs.append(')')
return ''.join(tensor_strs)
def get_summarized_data(self):
dim = self.dim()
if dim == 0:
return self
if dim == 1:
if self.size(0) > 2 * PRINT_OPTS.edgeitems:
return torch.cat((self[:PRINT_OPTS.edgeitems], self[-PRINT_OPTS.edgeitems:]))
else:
return self
if self.size(0) > 2 * PRINT_OPTS.edgeitems:
start = [self[i] for i in range(0, PRINT_OPTS.edgeitems)]
end = ([self[i]
for i in range(len(self) - PRINT_OPTS.edgeitems, len(self))])
return torch.stack([get_summarized_data(x) for x in (start + end)])
else:
return torch.stack([get_summarized_data(x) for x in self])
def _str_intern(inp, *, tensor_contents=None):
is_plain_tensor = type(inp) is torch.Tensor or type(inp) is torch.nn.Parameter
if inp.is_nested:
prefix = "nested_tensor("
elif is_plain_tensor:
prefix = 'tensor('
else:
prefix = f"{type(inp).__name__}("
indent = len(prefix)
suffixes = []
custom_contents_provided = tensor_contents is not None
if custom_contents_provided:
tensor_str = tensor_contents
# This is used to extract the primal value and thus disable the forward AD
# within this function.
# TODO(albanD) This needs to be updated when more than one level is supported
self, tangent = torch.autograd.forward_ad.unpack_dual(inp)
# Note [Print tensor device]:
# A general logic here is we only print device when it doesn't match
# the device specified in default tensor type.
# Currently torch.set_default_tensor_type() only supports CPU/CUDA, thus
# torch._C._get_default_device() only returns either cpu or cuda.
# In other cases, we don't have a way to set them as default yet,
# and we should always print out device for them.
if self.device.type != torch._C._get_default_device()\
or (self.device.type == 'cuda' and torch.cuda.current_device() != self.device.index):
suffixes.append('device=\'' + str(self.device) + '\'')
# Tensor printing performs tensor operations like slice, indexing, etc to make it in a
# representable format. These operations on xla/lazy tensor results in compilations. Hence,
# to avoid compilations, copying the tensor to cpu before printing.
if self.device.type == 'xla' or self.device.type == 'lazy':
self = self.to('cpu')
# TODO: add an API to map real -> complex dtypes
_default_complex_dtype = torch.cdouble if torch.get_default_dtype() == torch.double else torch.cfloat
has_default_dtype = self.dtype in (torch.get_default_dtype(), _default_complex_dtype, torch.int64, torch.bool)
if self.is_sparse:
suffixes.append('size=' + str(tuple(self.shape)))
suffixes.append('nnz=' + str(self._nnz()))
if not has_default_dtype:
suffixes.append('dtype=' + str(self.dtype))
if not custom_contents_provided:
indices_prefix = 'indices=tensor('
indices = self._indices().detach()
indices_str = _tensor_str(indices, indent + len(indices_prefix))
if indices.numel() == 0:
indices_str += ', size=' + str(tuple(indices.shape))
values_prefix = 'values=tensor('
values = self._values().detach()
values_str = _tensor_str(values, indent + len(values_prefix))
if values.numel() == 0:
values_str += ', size=' + str(tuple(values.shape))
tensor_str = indices_prefix + indices_str + '),\n' + ' ' * indent + values_prefix + values_str + ')'
elif self.is_sparse_csr:
suffixes.append('size=' + str(tuple(self.shape)))
suffixes.append('nnz=' + str(self._nnz()))
if not has_default_dtype:
suffixes.append('dtype=' + str(self.dtype))
if not custom_contents_provided:
crow_indices_prefix = 'crow_indices=tensor('
crow_indices = self.crow_indices().detach()
crow_indices_str = _tensor_str(crow_indices, indent + len(crow_indices_prefix))
if crow_indices.numel() == 0:
crow_indices_str += ', size=' + str(tuple(crow_indices.shape))
col_indices_prefix = 'col_indices=tensor('
col_indices = self.col_indices().detach()
col_indices_str = _tensor_str(col_indices, indent + len(col_indices_prefix))
if col_indices.numel() == 0:
col_indices_str += ', size=' + str(tuple(col_indices.shape))
values_prefix = 'values=tensor('
values = self.values().detach()
values_str = _tensor_str(values, indent + len(values_prefix))
if values.numel() == 0:
values_str += ', size=' + str(tuple(values.shape))
tensor_str = crow_indices_prefix + crow_indices_str + '),\n' + ' ' * indent +\
col_indices_prefix + col_indices_str + '),\n' + ' ' * indent +\
values_prefix + values_str + ')'
elif self.is_quantized:
suffixes.append('size=' + str(tuple(self.shape)))
if not has_default_dtype:
suffixes.append('dtype=' + str(self.dtype))
suffixes.append('quantization_scheme=' + str(self.qscheme()))
if self.qscheme() == torch.per_tensor_affine or self.qscheme() == torch.per_tensor_symmetric:
suffixes.append('scale=' + str(self.q_scale()))
suffixes.append('zero_point=' + str(self.q_zero_point()))
elif self.qscheme() == torch.per_channel_affine or self.qscheme() == torch.per_channel_symmetric \
or self.qscheme() == torch.per_channel_affine_float_qparams:
suffixes.append('scale=' + str(self.q_per_channel_scales()))
suffixes.append('zero_point=' + str(self.q_per_channel_zero_points()))
suffixes.append('axis=' + str(self.q_per_channel_axis()))
if not custom_contents_provided:
tensor_str = _tensor_str(self.dequantize(), indent)
elif self.is_nested:
if not custom_contents_provided:
def indented_str(s, indent):
return "\n".join(f" {line}" for line in s.split("\n"))
strs = ",\n".join(indented_str(str(t), indent + 1) for t in torch.ops.aten.unbind.int(self, 0))
tensor_str = f"[\n{strs}\n]"
else:
if self.is_meta:
suffixes.append('size=' + str(tuple(self.shape)))
if self.dtype != torch.get_default_dtype():
suffixes.append('dtype=' + str(self.dtype))
# TODO: This implies that ellipses is valid syntax for allocating
# a meta tensor, which it could be, but it isn't right now
if not custom_contents_provided:
tensor_str = '...'
else:
if self.numel() == 0 and not self.is_sparse:
# Explicitly print the shape if it is not (0,), to match NumPy behavior
if self.dim() != 1:
suffixes.append('size=' + str(tuple(self.shape)))
# In an empty tensor, there are no elements to infer if the dtype
# should be int64, so it must be shown explicitly.
if self.dtype != torch.get_default_dtype():
suffixes.append('dtype=' + str(self.dtype))
if not custom_contents_provided:
tensor_str = '[]'
else:
if not has_default_dtype:
suffixes.append('dtype=' + str(self.dtype))
if not custom_contents_provided:
if self.layout != torch.strided:
tensor_str = _tensor_str(self.to_dense(), indent)
else:
tensor_str = _tensor_str(self, indent)
if self.layout != torch.strided:
suffixes.append('layout=' + str(self.layout))
# Use inp here to get the original grad_fn and not the one generated by the forward grad
# unpacking.
if inp.grad_fn is not None:
name = type(inp.grad_fn).__name__
if name == 'CppFunction':
name = inp.grad_fn.name().rsplit('::', 1)[-1]
suffixes.append('grad_fn=<{}>'.format(name))
elif inp.requires_grad:
suffixes.append('requires_grad=True')
if self.has_names():
suffixes.append('names={}'.format(self.names))
if tangent is not None:
suffixes.append('tangent={}'.format(tangent))
string_repr = _add_suffixes(prefix + tensor_str, suffixes, indent, force_newline=self.is_sparse)
# Check if this instance is flagged as a parameter and change the repr accordingly.
# Unfortunately, this function has to be aware of this detail.
# NB: This is currently skipped for plain tensor parameters to maintain BC. In the future,
# this should be done for those as well to produce a valid repr.
if isinstance(self, torch.nn.Parameter) and not is_plain_tensor:
string_repr = f"Parameter({string_repr})"
return string_repr
def _str(self, *, tensor_contents=None):
with torch.no_grad():
return _str_intern(self, tensor_contents=tensor_contents)
| 43.705128
| 115
| 0.609465
|
eefc5464fbb9ab9b28869f719742ae62d4e727c7
| 12,301
|
py
|
Python
|
mc/debug.py
|
arup-group/mc
|
50b8faa8b9d40dece88e0a27f911edd427ebc064
|
[
"MIT"
] | null | null | null |
mc/debug.py
|
arup-group/mc
|
50b8faa8b9d40dece88e0a27f911edd427ebc064
|
[
"MIT"
] | 12
|
2021-12-14T15:10:43.000Z
|
2022-03-31T13:39:25.000Z
|
mc/debug.py
|
arup-group/mc
|
50b8faa8b9d40dece88e0a27f911edd427ebc064
|
[
"MIT"
] | null | null | null |
"""
Class inherited by BaseConfig for carrying out debugging.
"""
from typing import Tuple
class BaseDebug:
"""
Debugging Base class.
"""
def get(self, key, default):
raise NotImplementedError
def debug(self, verbose=True) -> Tuple[bool, list]:
"""
Build a list of debug messages.
:param verbose: bool
:return: tuple[bool, list]
"""
logger = list()
logger.extend(self.log_multimodal_module())
logger.extend(self.log_bad_paths())
logger.extend(self.log_bad_subpopulations())
logger.extend(self.log_bad_scoring())
logger.extend(self.log_missing_modes())
if verbose and len(logger):
print('\n---------WARNING--------')
for log in logger:
print(log)
print('----------DONE----------')
return len(logger) < 1, logger
def log_multimodal_module(self) -> list:
"""
Report if multimodal module may be being used incorrectly.
:return: list
"""
logger = []
if self.get('multimodal'):
modes = self['multimodal']['simulatedModes'].split(",")
if (not self['multimodal'].get('createMultiModalNetwork'))\
or self['multimodal']['createMultiModalNetwork'] != "true":
logger.append(f"MULTIMODAL: auto multimodal network disabled, input network must "
f"include all modes: {modes}")
if not self.get('travelTimeCalculator'):
logger.append("MULTIMODAL: multimodal module requires travelTimeCalculator module")
else:
if not self['travelTimeCalculator'].get('analyzedModes'):
logger.append(
"MULTIMODAL: multimodal module requires list of modes at analyzedModes@travelTimeCalculator")
if not self['travelTimeCalculator'].get('filterModes') == 'true':
logger.append(
"MULTIMODAL: multimodal module requires filterModes@travelTimeCalculator set to 'true'")
if not self['travelTimeCalculator'].get('separateModes') == 'false':
logger.append(
"MULTIMODAL: multimodal module requires separateModes@travelTimeCalculator set to 'false'")
for m in modes:
if not self['planscalcroute'].get(f'teleportedModeParameters:{m}'):
logger.append(
f"MULTIMODAL: depending on the MATSim version, multimodal module requires "
f"mode:{m} teleport speed to be set in planscalcroute module.")
return logger
def log_bad_paths(self) -> list:
"""
Build a list of debug messages for bad paths.
:return: list
"""
logger = []
for name, path in self.get_paths().items():
log = bad_path(name, path)
if log:
logger.append([log])
return logger
def get_paths(self) -> dict:
"""
Build a dict of paths from config.
:return: dict
"""
return {
'network_path': self['network'].get('inputNetworkFile', None),
'plans_path': self['plans'].get('inputPlansFile', None),
'attributes_path': self['plans'].get('inputPersonAttributesFile', None),
'transit_path': self['transit'].get('transitScheduleFile', None),
'transit_vehicles_path': self['transit'].get('vehiclesFile', None),
}
def log_bad_subpopulations(self) -> list:
"""
Build a list of debug messages for bad subpopulations.
:return: list
"""
logger = []
# Scoring:
scoring_subpops = []
for paramset in self['planCalcScore'].parametersets.values():
scoring_subpops.append(paramset['subpopulation'])
# check for duplicates
for s in scoring_subpops:
if scoring_subpops.count(s) > 1:
logger.append(f"SUBPOP:{s} defined more than once in planCalcScore")
# check for default
if 'default' not in scoring_subpops:
logger.append("SUBPOP default subpop missing from planCalcScore")
# Strategy:
strategy_subpops = []
for paramset in self['strategy'].parametersets.values():
strategy_subpops.append(paramset['subpopulation'])
# check for duplicates
for s in strategy_subpops:
if scoring_subpops.count(s) > 1:
logger.append(f"SUBPOP:{s} defined more than once in strategy")
# check equal
missing_scoring = set(strategy_subpops) - set(scoring_subpops)
if missing_scoring:
logger.append(f"SUBPOP {missing_scoring} subpop missing from planCalcScore")
missing_strategy = set(scoring_subpops) - set(strategy_subpops) - set(['default'])
if missing_strategy:
logger.append(f"SUBPOP {missing_strategy} subpop missing from strategy")
return logger
def log_bad_scoring(self) -> list:
"""
Build a list of debug messages for bad scoring.
:return: list
"""
logger = []
# Scoring:
scoring_modes = {}
scoring_acts = {}
for subpop_paramset in self['planCalcScore'].parametersets.values():
subpop_modes = []
subpop_acts = []
subpop_mode_cost = {}
subpopulation = subpop_paramset['subpopulation']
mum = subpop_paramset['marginalUtilityOfMoney']
for paramset in subpop_paramset.parametersets.values():
mode = paramset.get("mode")
act = paramset.get("activityType")
if mode:
subpop_modes.append(mode)
dist_cost_rate = paramset.get('monetaryDistanceRate')
dist_util_rate = paramset.get('marginalUtilityOfDistance_util_m')
hour_util_rate = paramset.get('marginalUtilityOfTraveling_util_hr')
subpop_mode_cost[mode] = calc_cost(
logger, dist_cost_rate, mum, dist_util_rate, hour_util_rate, mode
)
if act:
subpop_acts.append(act)
# check for duplicates
log_duplicates(logger, subpop_modes, 'MODES', subpopulation)
log_duplicates(logger, subpop_acts, 'ACTIVITIES', subpopulation)
# compare cost to walking
log_cost_comparison(logger, subpop_mode_cost, 'MODE COST', subpopulation)
scoring_modes[subpopulation] = subpop_modes
scoring_acts[subpopulation] = subpop_acts
# check for consistency
all_modes = set([m for ml in scoring_modes.values() for m in ml])
all_acts = set([a for al in scoring_acts.values() for a in al])
for subpopulation, ml in scoring_modes.items():
log_consistency(logger, ml, all_modes, 'MODES', subpopulation)
for subpopulation, al in scoring_acts.items():
log_consistency(logger, al, all_acts, 'ACTIVITIES', subpopulation)
return logger
def log_missing_modes(self) -> list:
"""
build debug messages for missing modes.
:return: list
"""
logger = []
# build set of observed modes from config
all_modes = set()
# look for modes in qsim module
# if 'qsim' not in self:
# logger.append(
# "MISSING MODULE: 'qsim' module not found"
# )
# elif 'mainMode' not in list(self['qsim'].params):
# logger.append(
# "MISSING MODES: 'mainMode' param not found in: qsim"
# )
# else:
# all_modes.update(self['qsim']['mainMode'].split(','))
# look for modes in subtourModeChoice module
if 'SubtourModeChoice' in self:
logger.append(
"BAD MODULE SPELLING: 'SubtourModeChoice' => 'subtourModeChoice'"
)
if 'subtourModeChoice' not in self:
logger.append(
"MISSING MODULE: 'subtourModeChoice' module not found"
)
elif 'modes' not in list(self['subtourModeChoice'].params):
logger.append(
"MISSING MODES: 'modes' param not found in: subtourModeChoice"
)
else:
all_modes.update(self['subtourModeChoice']['modes'].split(','))
if 'swissRailRaptor' in self:
all_modes.add('pt')
for name, paramset in self['swissRailRaptor'].parametersets.items():
if 'passengerMode' in paramset.params:
all_modes.add(paramset['passengerMode'])
# # look for modes in planscalcroute module
# if 'planscalcroute' not in self:
# logger.append(
# "MISSING MODULE: 'planscalcroute' module not found - need 'access_walk' config"
# )
# # Additionally check that access walk has been set up in plancalcroute
# if 'teleportedModeParameters:access_walk' not in list(
# self['planscalcroute'].parametersets
# ):
# logger.append(f"MISSING MODE: access_walk mode not found in: planscalcroute")
# elif 'networkModes' in list(self['planscalcroute'].params):
# all_modes.update(self['planscalcroute']['networkModes'].split(','))
all_modes.update(['access_walk'])
# check for scoring configuration of all modes across all subpopulations
modes = []
for subpop_paramset in self['planCalcScore'].parametersets.values():
subpopulation = subpop_paramset['subpopulation']
for paramset in subpop_paramset.parametersets.values():
mode = paramset.get("mode")
if mode:
modes.append(mode)
for _mode in all_modes:
if _mode not in modes:
logger.append(
f"MISSING MODE SCORING: {_mode} not found in: planCalcScore:{subpopulation}"
)
return logger
def bad_path(name: str, path: str) -> str:
"""
Build diff for bad path.
:param name: str
:param path: str
:return: str
"""
if not path:
return f"PATH: missing {name}"
if not (path[-4:] == '.xml' or path[-7:] == '.xml.gz'):
return f"PATH: unknown extension {name, path}"
return None
def log_duplicates(logger: list, targets: list, log_type: str, location: str) -> None:
"""
Add diffs to logger for duplicated items in list.
:param logger: list
:param targets: list
:param log_type: str
:param location: str
:return: None
"""
for t in list(set(targets)):
if targets.count(t) > 1:
logger.append(f"{log_type}:{t} defined more than once in: {location}")
def log_consistency(logger, targets, master, log_type, location):
missing_scoring = set(master) - set(targets)
if missing_scoring:
logger.append(f"{log_type} {missing_scoring} missing in: {location}")
def log_cost_comparison(logger, costs, log_type, location):
walk_cost = costs.get('walk')
if not walk_cost:
logger.append(f"{log_type}: walking mode not found in: {location}")
for mode, cost in costs.items():
if mode == 'walk':
continue
if walk_cost and cost < walk_cost:
logger.append(f"{log_type}: {mode} may be more expensive than walking: {location}")
def calc_cost(logger, dist_cost_rate, mum, dist_util_rate, hour_util_rate, mode):
speed_map = {
'bike': 4.2,
'piggyback': 0.6,
'walk': 0.83,
'access_walk': 0.83,
'egress_walk': 0.83,
'pt': 10,
'car': 10
}
if mode not in list(speed_map):
logger.append(f"WARNING: {mode} mode speed unknown, approximating as car speed: {speed_map['car']}")
mode = 'car' # default to car speed
dist_cost = (float(dist_cost_rate) * float(mum)) + float(dist_util_rate)
time_cost = float(hour_util_rate) / (speed_map[mode] * 3600)
return dist_cost + time_cost
| 36.610119
| 117
| 0.580359
|
06b5b36b9c4c4ad55ad1f5b8c058e67a7a75989a
| 765
|
py
|
Python
|
tests/test_support.py
|
rageyboiii/test-boy
|
089309d279fc21aca28b8a68725a52f1170edd29
|
[
"CC0-1.0"
] | 5
|
2021-09-04T09:08:55.000Z
|
2022-02-08T00:28:08.000Z
|
tests/test_support.py
|
rageyboiii/test-boy
|
089309d279fc21aca28b8a68725a52f1170edd29
|
[
"CC0-1.0"
] | 2
|
2021-10-11T21:44:08.000Z
|
2021-11-05T07:41:23.000Z
|
tests/test_support.py
|
rageyboiii/test-boy
|
089309d279fc21aca28b8a68725a52f1170edd29
|
[
"CC0-1.0"
] | 3
|
2021-09-04T09:12:34.000Z
|
2021-12-16T20:02:03.000Z
|
import pytest
from typing_extensions import TYPE_CHECKING
if TYPE_CHECKING:
from .conftest import bot, channel
@pytest.mark.asyncio
async def test_support(bot: "bot", channel: "channel"):
content = "!support"
await channel.send(content)
msg = await bot.wait_for("message", check=lambda message: pytest.msg_check(message, content=content), timeout=pytest.timeout / 2)
assert msg.content == "https://discord.gg/NTRuFjU"
@pytest.mark.asyncio
async def test_donate(bot: "bot", channel: "channel"):
content = "!donate"
await channel.send(content)
msg = await bot.wait_for("message", check=lambda message: pytest.msg_check(message, content=content), timeout=pytest.timeout / 2)
assert msg.content == "https://www.patreon.com/bePatron?u=42649008"
| 31.875
| 131
| 0.746405
|
535eada3721ea18d34bdddd276d925f377d2f1fd
| 4,869
|
py
|
Python
|
CIM100/IEC61970/Base/Meas/ValueAliasSet.py
|
MaximeBaudette/PyCIM
|
d68ee5ccfc1d32d44c5cd09fb173142fb5ff4f14
|
[
"MIT"
] | null | null | null |
CIM100/IEC61970/Base/Meas/ValueAliasSet.py
|
MaximeBaudette/PyCIM
|
d68ee5ccfc1d32d44c5cd09fb173142fb5ff4f14
|
[
"MIT"
] | null | null | null |
CIM100/IEC61970/Base/Meas/ValueAliasSet.py
|
MaximeBaudette/PyCIM
|
d68ee5ccfc1d32d44c5cd09fb173142fb5ff4f14
|
[
"MIT"
] | null | null | null |
# Copyright (C) 2010-2011 Richard Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from CIM100.IEC61970.Base.Core.IdentifiedObject import IdentifiedObject
class ValueAliasSet(IdentifiedObject):
"""Describes the translation of a set of values into a name and is intendend to facilitate cusom translations. Each ValueAliasSet has a name, description etc. A specific Measurement may represent a discrete state like Open, Closed, Intermediate etc. This requires a translation from the MeasurementValue.value number to a string, e.g. 0->'Invalid', 1->'Open', 2->'Closed', 3->'Intermediate'. Each ValueToAlias member in ValueAliasSet.Value describe a mapping for one particular value to a name.Describes the translation of a set of values into a name and is intendend to facilitate cusom translations. Each ValueAliasSet has a name, description etc. A specific Measurement may represent a discrete state like Open, Closed, Intermediate etc. This requires a translation from the MeasurementValue.value number to a string, e.g. 0->'Invalid', 1->'Open', 2->'Closed', 3->'Intermediate'. Each ValueToAlias member in ValueAliasSet.Value describe a mapping for one particular value to a name.
"""
def __init__(self, Discretes=None, Commands=None, Values=None, *args, **kw_args):
"""Initialises a new 'ValueAliasSet' instance.
@param Discretes: The Measurements using the set for translation
@param Commands: The ValueAliasSet used for translation of a Control value to a name.
@param Values: The ValueToAlias mappings included in the set
"""
self._Discretes = []
self.Discretes = [] if Discretes is None else Discretes
self._Commands = []
self.Commands = [] if Commands is None else Commands
self._Values = []
self.Values = [] if Values is None else Values
super(ValueAliasSet, self).__init__(*args, **kw_args)
_attrs = []
_attr_types = {}
_defaults = {}
_enums = {}
_refs = ["Discretes", "Commands", "Values"]
_many_refs = ["Discretes", "Commands", "Values"]
def getDiscretes(self):
"""The Measurements using the set for translation
"""
return self._Discretes
def setDiscretes(self, value):
for x in self._Discretes:
x.ValueAliasSet = None
for y in value:
y._ValueAliasSet = self
self._Discretes = value
Discretes = property(getDiscretes, setDiscretes)
def addDiscretes(self, *Discretes):
for obj in Discretes:
obj.ValueAliasSet = self
def removeDiscretes(self, *Discretes):
for obj in Discretes:
obj.ValueAliasSet = None
def getCommands(self):
"""The ValueAliasSet used for translation of a Control value to a name.
"""
return self._Commands
def setCommands(self, value):
for x in self._Commands:
x.ValueAliasSet = None
for y in value:
y._ValueAliasSet = self
self._Commands = value
Commands = property(getCommands, setCommands)
def addCommands(self, *Commands):
for obj in Commands:
obj.ValueAliasSet = self
def removeCommands(self, *Commands):
for obj in Commands:
obj.ValueAliasSet = None
def getValues(self):
"""The ValueToAlias mappings included in the set
"""
return self._Values
def setValues(self, value):
for x in self._Values:
x.ValueAliasSet = None
for y in value:
y._ValueAliasSet = self
self._Values = value
Values = property(getValues, setValues)
def addValues(self, *Values):
for obj in Values:
obj.ValueAliasSet = self
def removeValues(self, *Values):
for obj in Values:
obj.ValueAliasSet = None
| 41.262712
| 989
| 0.687616
|
400ab6130eeaba41bf2f08e12156288e081d4bfd
| 5,113
|
py
|
Python
|
mix_assembler.py
|
crashfrog/Dispatch
|
4247cacaefcf33644c7d1f18a0553962441b2941
|
[
"Unlicense"
] | 1
|
2017-09-13T03:01:41.000Z
|
2017-09-13T03:01:41.000Z
|
mix_assembler.py
|
crashfrog/Dispatch
|
4247cacaefcf33644c7d1f18a0553962441b2941
|
[
"Unlicense"
] | null | null | null |
mix_assembler.py
|
crashfrog/Dispatch
|
4247cacaefcf33644c7d1f18a0553962441b2941
|
[
"Unlicense"
] | null | null | null |
#!/usr/bin/env python
import subprocess
import tempfile
import shutil
import os
import re
import fasta_statter
"""
Mix, the assembly-aggregating microbial genome finishing tool.
See: "Finishing bacterial genome assemblies with Mix"
Soueidan et al. BMC Bioinformatics 2013, 14(Suppl 15):S16
http://www.biomedcentral.com/1471-2105/14/S15/S16
"""
description = "Mix, the assembly-aggregating microbial genome finishing tool. Runs everything and then merges the assemblies via contig extension."
core_load = 8 #number of cores this assembler will max out
dont_use = ("Supersembler", "Velvet", "WORST", "Mix")
supports = ('MiSeq','IonTorrent','PacBio','454')
path = "/home/justin.payne/MIX/bin/"
def assemble(data_type, ref_file=None, assembler_dict={}, callback=lambda s: None, update_callback=lambda d: None, debug=False, **kwargs):
d = {'assembler':"Mix",
'assembly_version':'Mix 1.0',
'average_coverage':'',
'num_contigs':'',
'n50':'',
'num_bases':'',
'fasta_file':'',
'lib_insert_length':'Not determined',
'matched':''
}
curr_dir = os.getcwd()
try:
working_dir = tempfile.mkdtemp()
os.chdir(working_dir)
results = list()
for assembler_name, assembler in assembler_dict.items():
if assembler_name not in dont_use:
def callback_wrapper(s): #closure to wrap callback method, if any, from assembly_dispatch
callback("{}: {}".format(assembler_name, s))
if data_type in assembler.supports:
callback("Looking for {}...".format(os.path.join(kwargs['path'], "{}.{}.fasta".format(kwargs['accession'], assembler_name))))
if not os.path.exists(os.path.join(kwargs['path'], "{}.{}.fasta".format(kwargs['accession'], assembler_name))):
try:
args = {}
args.update(kwargs)
args['path'] = working_dir
args['debug'] = debug
r = assembler.assemble(data_type=data_type, fasta_file_name="{}.{}.fasta".format(kwargs['accession'], assembler_name), callback=callback_wrapper, debug=debug, **args)
r['assembler'] = assembler_name
results.append(r)
except Exception:
import traceback
import sys
traceback.print_exc(sys.stdout)
else:
shutil.copyfile(os.path.join(kwargs['path'], "{}.{}.fasta".format(kwargs['accession'], assembler_name)), os.path.join(working_dir, "{}.{}.fasta".format(kwargs['accession'], assembler_name)))
results.append({'assembler':assembler_name, 'fasta_file':"{}.{}.fasta".format(kwargs['accession'], assembler_name)})
if (not len(results)) or (len(results) < 2 and not debug):
raise ValueError("No assembly returned results.")
callback("Mix: running preprocessing")
print subprocess.check_output("{}preprocessing.py -o {}/{}.nucmer.fasta {}".format(
path,
working_dir,
kwargs['accession'],
" ".join([r['fasta_file'] for r in results])
)
, shell=True)
callback("Mix: running NUCmer")
subprocess.check_call("""nucmer --maxmatch --banded -c 30 -l 30 -p "alignments" {working_dir}/{accession}.nucmer.fasta {working_dir}/{accession}.nucmer.fasta""".format(working_dir=working_dir, **kwargs), shell=True)
callback("Mix: running coords")
subprocess.check_call("show-coords -rcl alignments.delta > alignments.coords", shell=True)
callback("Mix: running mix")
if not os.path.exists("Mix"):
os.mkdir("Mix")
subprocess.check_call("{}Mix.py -a alignments.coords -c {}.nucmer.fasta -o Mix/ -A 500 -C 0".format(path, kwargs['accession']), shell=True)
shutil.copyfile("Mix/Mix_results_A500_C0/Mix_assembly.fasta", "{path}/{accession}.mix.fasta".format(**kwargs))
d.update(fasta_statter.stat_velvet("{path}/{accession}.mix.fasta".format(**kwargs)))
results.append(d)
quast_results = fasta_statter.quast_compare(kwargs['path'], results, callback=callback, update_callback=update_callback, gi=ref_file, debug=debug)
finally:
os.chdir(curr_dir)
if not debug:
callback("Cleaning temp dir {}...".format(working_dir))
shutil.rmtree(working_dir)
return d
if __name__ == "__main__":
#debug
import datetime
def cb(d):
print "[{}] {}".format(datetime.datetime.today().ctime(), d)
def bcb(d):
for (k, v) in d.items():
cb("{} : {}".format(k, v))
d = dict()
import assembly_dispatch
d.update(assembly_dispatch.assembler_dict)
del d['CLC']
del d['Celera']
print assemble(path='/home/justin.payne',
reads1='/shared/gn2/CFSANgenomes/CFSAN003966/CFSAN003966_01/CFSAN003966_S6_L001_R1_001.fastq',
reads2='/shared/gn2/CFSANgenomes/CFSAN003966/CFSAN003966_01/CFSAN003966_S6_L001_R2_001.fastq',
accession='CFSAN003966_01',
callback=cb,
update_callback=bcb,
k_value=144,
insert_size=500,
debug=True,
data_type = "MiSeq",
assembler_dict=d)
# print assemble(path='/home/justin.payne',
# reads1='/shared/gn2/CFSANgenomes/CFSAN001812/CFSAN001812_01/R_2012_04_12_16_25_14_user_IT1-33-BW3_Auto_IT1-33-BW3_38.fastq',
# accession='CFSAN001812_01',
# callback=cb,
# update_callback=bcb,
# k_value=144,
# insert_size=500,
# data_type='IonTorrent',
# debug=True,
# assembler_dict=d)
| 36.007042
| 217
| 0.691766
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.