blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
69
| license_type
stringclasses 2
values | repo_name
stringlengths 5
118
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
63
| visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 2.91k
686M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 23
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 213
values | src_encoding
stringclasses 30
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 2
10.3M
| extension
stringclasses 246
values | content
stringlengths 2
10.3M
| authors
listlengths 1
1
| author_id
stringlengths 0
212
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
64ef15aa9833461b1f25f505c168455a8f173109
|
551b75f52d28c0b5c8944d808a361470e2602654
|
/huaweicloud-sdk-ocr/huaweicloudsdkocr/v1/exception_handler.py
|
b46aae37213ad3737b9bf055908ce5cec95f6893
|
[
"Apache-2.0"
] |
permissive
|
wuchen-huawei/huaweicloud-sdk-python-v3
|
9d6597ce8ab666a9a297b3d936aeb85c55cf5877
|
3683d703f4320edb2b8516f36f16d485cff08fc2
|
refs/heads/master
| 2023-05-08T21:32:31.920300
| 2021-05-26T08:54:18
| 2021-05-26T08:54:18
| 370,898,764
| 0
| 0
|
NOASSERTION
| 2021-05-26T03:50:07
| 2021-05-26T03:50:07
| null |
UTF-8
|
Python
| false
| false
| 735
|
py
|
# -*- coding: utf-8 -*-
import json
from huaweicloudsdkcore.exceptions import exceptions
class OcrError:
def __init__(self, request_id=None, error_code=None, error_msg=None):
self.error_msg = error_msg
self.error_code = error_code
self.request_id = request_id
def handle_exception(response_body):
ocr_error = OcrError()
ocr_error_dict = json.loads(response_body)
for key in ocr_error_dict:
if type(ocr_error_dict[key]) == dict and "error_code" in ocr_error_dict[key] and "error_msg" in \
ocr_error_dict[key]:
ocr_error = OcrError("057ee94bd280267e2ff7c01342e6d1e6", ocr_error_dict[key]["error_code"], ocr_error_dict[key]["error_msg"])
return ocr_error
|
[
"hwcloudsdk@huawei.com"
] |
hwcloudsdk@huawei.com
|
942c655e58d73dde9ae99716cf595cb6834b8b8d
|
377b67bce932cfcd5959c0cb57a10c01455b9cba
|
/basic/Deep_learning/1_Multi_Linear_Regression.py
|
15d3c6f9456e081ebf24f8b914506dcee6e59bd8
|
[] |
no_license
|
llmooon/deep_learning
|
d9103ed132e6723b5c411d72fbf55ab6bb7248e7
|
62f7ff86713aa17d895a080647922b68f4498a84
|
refs/heads/master
| 2020-03-21T20:34:30.369936
| 2018-07-09T15:18:27
| 2018-07-09T15:18:27
| 139,015,678
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 934
|
py
|
import tensorflow as tf
data=[[2,0,81],[4,4,93],[6,2,91],[8,3,97]]
x1=[x_row[0] for x_row in data]
x2=[x_row[1] for x_row in data]
y1=[y_row[2] for y_row in data]
#임의의 기울기 2개와 y절편
a1=tf.Variable(tf.random_uniform([1],0,10,dtype=tf.float64,seed=0))
a2=tf.Variable(tf.random_uniform([1],0,10,dtype=tf.float64,seed=0))
b=tf.Variable(tf.random_uniform([1],0,100,dtype=tf.float64,seed=0))
# 그래프 방정식
y=a1*x1+a2*x2+b
rmse=tf.sqrt(tf.reduce_mean(tf.square(y-y1)))
learning_rate=0.1
gradient_decent = tf.train.GradientDescentOptimizer(learning_rate).minimize(rmse);
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
for step in range(2001):
sess.run(gradient_decent)
if step % 100 == 0 :
print("Epoch : %.f, RMSE = %.04f, 기울기 a1 = %.4f, 기울기 a2= %.4f, y 절편 b = %.4f" % (step, sess.run(rmse), sess.run(a1), sess.run(a2), sess.run(b)))
|
[
"llmooon@naver.com"
] |
llmooon@naver.com
|
59e1fe83c4d3f50f8cc31af03cd5b3fffbb70f64
|
7180d8bf660ba23c89756a74a7004bedafaf1ba6
|
/pelicanconf.py
|
b4038768d4ab3b06f163544f15a0831361d0c8c5
|
[] |
no_license
|
hkilter/pelican
|
4821dd33d9726696d55ca1b52adbcb54d88f89df
|
62254615210dfc8336c6ebd9951ae8a1ab4bea4f
|
refs/heads/master
| 2020-04-06T11:30:47.638929
| 2014-09-24T16:54:12
| 2014-09-24T16:54:12
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 802
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
AUTHOR = u'hkilter'
SITENAME = u'Oculus Minimal'
SITEURL = ''
TIMEZONE = 'Europe/Paris'
DEFAULT_LANG = u'en'
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
# Blogroll
LINKS = (('Pelican', 'http://getpelican.com/'),
('Python.org', 'http://python.org/'),
('Jinja2', 'http://jinja.pocoo.org/'),
('You can modify those links in your config file', '#'),)
# Social widget
SOCIAL = (('You can add links in your config file', '#'),
('Another social link', '#'),)
DEFAULT_PAGINATION = 10
# Uncomment following line if you want document-relative URLs when developing
#RELATIVE_URLS = True
|
[
"kilter@gmail.com"
] |
kilter@gmail.com
|
16bf46feda3af2e97495730845b7c5be463ed608
|
305302cdbc079ea399fd3aa5600f0d72211154fe
|
/zadanie.py
|
f3b6ea638fa8401dfdc8474936b585f51b20e845
|
[] |
no_license
|
Alkagdy/dzien7
|
499f991fd0802dab702cd1d78ae0079e2121eaa0
|
056efd7139dade8101f2bb5c4550515518afe40c
|
refs/heads/master
| 2021-01-20T02:13:25.490546
| 2017-04-25T18:30:13
| 2017-04-25T18:30:13
| 89,393,721
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 130
|
py
|
def kwadrat(a):
return a**2
def main():
print(kwadrat(45))
print(kwadrat(10))
if __name__ == '__main__':
main()
|
[
"Alka@MacBook-Pro-Alicja.local"
] |
Alka@MacBook-Pro-Alicja.local
|
6bbb769b59ec6a5d09ace3b580e7a39e60f2cbc9
|
b06bceb8fdc24e0c890fb2201c535cb660a94f86
|
/onmt/data/lm_dataset.py
|
e3343bb25fb5adc705d16054b56fbc5b0f214cc0
|
[
"MIT"
] |
permissive
|
quanpn90/NMTGMinor
|
7f294b40763b3f586d34ef4985799b851052f2ed
|
5e1e424d0d9c2135a456e372a2ea9ee49de5bd2c
|
refs/heads/master
| 2023-08-22T14:53:31.420276
| 2023-08-21T08:26:49
| 2023-08-21T08:26:49
| 116,663,163
| 92
| 39
|
NOASSERTION
| 2023-07-31T15:07:35
| 2018-01-08T10:33:56
|
HTML
|
UTF-8
|
Python
| false
| false
| 4,860
|
py
|
from __future__ import division
import math
import torch
import torch.utils.data
from collections import defaultdict
import onmt
from onmt.data.dataset import Dataset
class LanguageModelBatch(object):
def __init__(self, data, target, lang, **kwargs):
self.data = data
self.target = target
self.lang = lang
self.tensors = defaultdict(lambda: None)
self.tensors['target_input'] = data
self.tensors['target_output'] = target
self.tensors['target_lang'] = lang
self.tgt_size = target.numel()
self.src_size = 0
self.size = target.size(1)
def get(self, name):
if name in self.tensors:
return self.tensors[name]
else:
return None
def cuda(self, fp16=False):
"""
Send the minibatch data into GPU. Old-fashioned without the 'device' control
:param fp16:
:return: None
"""
for key, tensor in self.tensors.items():
if isinstance(tensor, dict):
for k in tensor:
v = tensor[k]
tensor[k] = v.cuda()
elif tensor is not None:
if tensor.type() == "torch.FloatTensor" and fp16:
self.tensors[key] = tensor.half()
self.tensors[key] = self.tensors[key].cuda()
else:
continue
class LanguageModelDataset(Dataset):
def __init__(self, data, langs, batch_size_sents=128, batch_size_words=9999,
seq_length=64, **kwargs):
# concatenate all sentences in the data to get a stream
if len(langs) <= 1:
self.single_language = True
else:
self.single_language = False
if not self.single_language:
self.langs = [torch.Tensor([data[i].size(0)]).fill_(langs[i]) for i in range(len(langs))]
else:
self.langs = langs
self.langs = torch.cat(self.langs, dim=0).long()
self.data = torch.cat(data, dim=0).long()
self.batch_size_sents = batch_size_sents
self.batch_size_words = batch_size_words
self.seq_length = seq_length
self.bptt = seq_length
full_length = sum([x.size(0) for x in data])
# group samples into mini batches
self.num_batches = 0
self.batches = []
self.allocate_batch()
self.fullSize = self.num_batches
self.cur_index = 0
self.batchOrder = None
def allocate_batch(self):
self.n_step = self.data.size(0) // self.batch_size_sents
self.data = self.data.narrow(0, 0, self.n_step * self.batch_size_sents)
# Evenly divide the data across the bsz batches.
self.data = self.data.view(self.batch_size_sents, -1).t().contiguous()
# self.num_steps = nbatch - 1
# self.num_batches = (self.n_step + self.seq_length - 1) // self.seq_length
self.batches = []
for i in range(0, self.data.size(0) - 1, self.bptt):
bptt = self.seq_length
seq_len = min(bptt, self.data.size(0) - 1 - i)
end_idx = i + seq_len
beg_idx = max(0, i)
data = self.data[beg_idx:end_idx]
target = self.data[i + 1:i + 1 + seq_len]
if self.single_language:
lang = self.langs
else:
lang = self.langs[beg_idx:end_idx]
self.batches.append((data, target, lang))
self.num_batches = len(self.batches)
# genereate a new batch - order (static)
def create_order(self, random=False):
# For language model order shouldn't be random
self.batchOrder = torch.arange(self.num_batches).long()
self.cur_index = 0
return self.batchOrder
# return the next batch according to the iterator
# for language model
def next(self, curriculum=True, reset=True, split_sizes=1):
# reset iterator if reach data size limit
# if self.cur_index >= self.num_batches:
# if reset:
# self.cur_index = 0
# else:
# return None
#
# batch_index = self.cur_index
#
# seq_len = self.seq_length
#
# top_index = min(batch_index + seq_len, self.data.size(0) - 1)
#
# batch = LMBatch(self.data[batch_index:top_index], target=self.data[batch_index + 1:top_index + 1])
#
# # move the iterator one step
# self.cur_index += seq_len
if self.cur_index >= self.num_batches:
if reset:
self.cur_index = 0
else:
return None
data, target, lang = self.batches[self.cur_index]
batch = LanguageModelBatch(data, target, lang)
self.cur_index += 1
return [batch]
|
[
"quanpn90@gmail.com"
] |
quanpn90@gmail.com
|
3845527f458f903a07ae00093c52603ef3adb0d0
|
fa3e41c5e9624528cf3cae20edfc5e520e16872b
|
/ResNet/convert_binaryproto_to_npy.py
|
ea504924cb4f0fdea6bcbf2495e2d1b4f6f497fc
|
[] |
no_license
|
striversist/CaffeDemo
|
91381f918f4fca5f8e015d4fc1ebcc6614838520
|
79a390e1a2cc2570230b35a67552c049ca46aa5a
|
refs/heads/master
| 2020-05-21T20:23:20.658513
| 2017-01-13T08:20:31
| 2017-01-13T08:20:31
| 61,622,310
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 364
|
py
|
import sys
import caffe
import numpy as np
if len(sys.argv) != 3:
print "Usage: python convert_protomean.py proto.mean out.npy"
sys.exit()
blob = caffe.proto.caffe_pb2.BlobProto()
data = open(sys.argv[1], 'rb').read()
blob.ParseFromString(data)
arr = np.array(caffe.io.blobproto_to_array(blob))
out = arr[0]
np.save(sys.argv[2], out)
|
[
"aarontang@tencent.com"
] |
aarontang@tencent.com
|
37eca410a0cb75cb3f171e0dc30329f710460e8a
|
d5e7b25b15c4da602496a6782bc2e0a9ab22c13e
|
/Linear-Regression-learnings/code.py
|
4347badfef85aa924009b81e5506c81393d1e5e0
|
[
"MIT"
] |
permissive
|
virbak/ga-learner-dsmp-repo
|
b525f73b5aca5f59b281f3e25a0d2fcf09e4c3e2
|
b2e5795490e786e58e4868e138e14d5dc59d4506
|
refs/heads/master
| 2020-07-29T19:44:19.052961
| 2019-12-02T17:15:47
| 2019-12-02T17:15:47
| 209,937,036
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,672
|
py
|
# --------------
import pandas as pd
import numpy as np
from sklearn.cross_validation import train_test_split
# code starts here
# read the dataset
df = pd.read_csv(path)
# print first five columns
print(df.head())
# store independent variable
X = df.drop('list_price',axis=1)
# store dependent variable
y = df['list_price']
# spliting the dataset
X_train,X_test,y_train,y_test=train_test_split(X,y ,test_size=0.3,random_state=6)
# code ends here
# --------------
import matplotlib.pyplot as plt
# code starts here
cols = X_train.columns
fig, axes = plt.subplots(nrows=3, ncols=3, figsize=(20,20))
for i in range(0,3):
for j in range(0,3):
col = cols[i*3 + j]
axes[i,j].set_title(col)
axes[i,j].scatter(X_train[col],y_train)
axes[i,j].set_xlabel(col)
axes[i,j].set_ylabel('list_price')
# code ends here
plt.show()
# --------------
# Code starts here
# corr code
corr = X_train.corr()
print(corr)
# drop columns from X_train
X_train.drop(['play_star_rating','val_star_rating'],axis = 1 ,inplace=True)
# drop columns from X_test
X_test.drop(['play_star_rating','val_star_rating'], axis = 1 ,inplace=True)
# Code ends here
# --------------
from sklearn.linear_model import LinearRegression
from sklearn.metrics import mean_squared_error, r2_score
# Code starts here
regressor= LinearRegression()
regressor.fit(X_train,y_train)
y_pred=regressor.predict(X_test)
mse=mean_squared_error(y_test,y_pred)
r2=r2_score(y_test, y_pred)
print(mse)
print(r2)
# Code ends here
# --------------
# Code starts here
residual=y_test-y_pred
plt.hist(residual,bins=20)
# Code ends here
|
[
"virbak@users.noreply.github.com"
] |
virbak@users.noreply.github.com
|
b5df026cf1bb919ae0e7c8da3d9edbd5ea4b2029
|
2a72396c7be0327fde7931842af563e794111814
|
/code/server/LPMC_DC_L/LS-ABS_hybrid.py
|
225ac96ff3817d6272ef19a467df6166cd1149eb
|
[] |
no_license
|
transp-or/HAMABS
|
fad50f4a71a0d181e7e2d52eddd39318a907e9e0
|
e5c3a83227a24b8b7bf0ab3cd4375f945dbc04a9
|
refs/heads/master
| 2022-11-14T13:33:43.188746
| 2020-06-26T09:36:06
| 2020-06-26T09:36:06
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 857
|
py
|
import sys
sys.path.append("../..")
import os
import time
import json
from algos import OptAlg
from models import LPMC_DC
data_folder = '../../../data/'
if __name__ == "__main__":
if not os.path.exists('./results'):
os.makedirs('./results')
print("Train LPMC_DC_L with LS-ABS and hybrid")
model = LPMC_DC(data_folder, file='12_13_14.csv')
ioa = OptAlg(alg_type='LS-ABS', direction='hybrid')
res = {'time': [], 'LL': [], 'epochs': []}
for i in range(20):
tmp = model.optimize(ioa, **{'verbose': False, 'max_epochs': 1000, 'batch': 1000})
res['time'].append(tmp['opti_time'])
res['LL'].append(tmp['fun'])
res['epochs'].append(tmp['nep'])
with open('results/LS-ABS_hybrid.json', 'w') as outfile:
json.dump(res, outfile)
print("{}/20 done!".format(i+1))
|
[
"gael.lederrey@epfl.ch"
] |
gael.lederrey@epfl.ch
|
e14c49cc36b55766fb5df36ed5783caaf593a9d3
|
8c2c364d045c10c7aeb23a14acdf4cb817f78ea4
|
/cartpage/migrations/0001_initial.py
|
1ac40cb36f83d471a1abb9630e033f4c7b1fd158
|
[] |
no_license
|
TokaMohamedNaguib/Merchandise
|
b8b4c793a14cf8dfbe34b3a46500c238dc90adf2
|
e4c92fba04fb2a9eb9ad3e6441d03f344ace6311
|
refs/heads/master
| 2020-05-20T09:23:51.374190
| 2019-05-08T17:37:32
| 2019-05-08T17:37:32
| 185,499,498
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 635
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2018-02-01 15:19
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='cartproducts',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=250)),
('price', models.PositiveIntegerField(verbose_name='price')),
],
),
]
|
[
"tokanaguib96@gmail.com"
] |
tokanaguib96@gmail.com
|
69045b2f94373b3f62d35cf786d010a5b2b9e5cf
|
75816efa9d732ba18119125d7a3ad6739b60e9a5
|
/Projects_2019/CodyStClair/phono.py
|
a22f349eacd854aa34c23a9245c2b49a6a598191
|
[] |
no_license
|
stonybrook-lin537-f19/main
|
4ac17bb273d95cb9016ed2859b7e7f5c581ca853
|
d4c7b34c38c04e51e888666ff9cda2a9c3e7d22c
|
refs/heads/master
| 2020-07-01T06:27:51.002616
| 2019-12-13T16:12:11
| 2019-12-13T16:12:11
| 201,075,015
| 5
| 11
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,594
|
py
|
import nltk
import re
vowels = r'(A[AEHOW(XR?)Y]|E[HRY]|I[HXY]|O[WY]|U[HWX])'
stress = r'1$'
pronunciations = nltk.corpus.cmudict.entries()
_is_vowel = lambda x: re.search(vowels, x)
_count_vowels = lambda x: len(list(filter(_is_vowel, x)))
syllable_counts = {item[0]: _count_vowels(item[1]) for item in pronunciations}
_is_stress = lambda x: re.search(stress, x) != None
_index_of_stress = lambda x: list(map(_is_stress, x)).index(True)
rhymes = {}
for item in pronunciations:
try:
rhymes[item[0]] = tuple(item[1][_index_of_stress(item[1]):])
except ValueError:
rhymes[item[0]] = None
def syllable_count(word):
"""Returns the number of syllables in a word.
Arguments:
word (str): The word to count the syllables of.
Return (int):
The number of syllables in word.
Raises:
KeyError: If the given word's pronunciation is not known.
"""
if word.lower() in syllable_counts:
return syllable_counts[word.lower()]
raise KeyError("No pronunciation found for: " + word)
def word_to_rhyme(word):
"""Returns the part of a word's pronunciation that determines the rhyme.
Arguments:
word (str): The word to find the rhyme of.
Returns (tuple):
A tuple representing the pronunciation of the word's rhyme, or None
if the word has no main stress (e.g. determiners)
Raises:
KeyError: If the given word's pronunciation is not known.
"""
if word.lower() in rhymes:
return rhymes[word.lower()]
raise KeyError("No pronunciation found for: " + word)
|
[
"loisetoil@gmail.com"
] |
loisetoil@gmail.com
|
a7b621632a521a984155e4cdce2f7616738dc050
|
17ac5164a947160463d25a4d9c20c18bdc6e6307
|
/pyUSID/io/hdf_utils/base.py
|
f8841f30d20b7110fafef3086079215dce1a0106
|
[
"MIT"
] |
permissive
|
pycroscopy/pyUSID
|
ad398425ba4763cc11e5d8e4c4f3874d8d1bf85c
|
253181a7b63deeb8d80ef2c885b9d09a57ae592b
|
refs/heads/main
| 2023-09-03T16:36:52.442100
| 2023-08-24T16:04:38
| 2023-08-24T16:04:38
| 138,171,750
| 27
| 19
|
MIT
| 2023-08-18T21:21:14
| 2018-06-21T13:08:33
|
Python
|
UTF-8
|
Python
| false
| false
| 3,166
|
py
|
# -*- coding: utf-8 -*-
"""
Simple yet handy HDF5 utilities, independent of the USID model
Created on Tue Nov 3 21:14:25 2015
@author: Suhas Somnath, Chris Smith
"""
from __future__ import division, print_function, absolute_import, unicode_literals
import sys
import h5py
from sidpy.hdf import hdf_utils as hut
from ...__version__ import version as py_usid_version
if sys.version_info.major == 3:
unicode = str
def print_tree(parent, rel_paths=False, main_dsets_only=False):
"""
Simple function to recursively print the contents of an hdf5 group
Parameters
----------
parent : :class:`h5py.Group`
HDF5 (sub-)tree to print
rel_paths : bool, optional. Default = False
True - prints the relative paths for all elements.
False - prints a tree-like structure with only the element names
main_dsets_only : bool, optional. default=False
True - prints only groups and Main datasets
False - prints all dataset and group objects
"""
# TODO: Leverage copy in sidpy.hdf.hdf_utils
if not isinstance(parent, (h5py.File, h5py.Group)):
raise TypeError('Provided object is not a h5py.File or h5py.Group '
'object')
def __print(name, obj):
show = True
if main_dsets_only:
show = False
from .simple import check_if_main
if check_if_main(obj) or isinstance(obj, h5py.Group):
show = True
if not show:
return
if rel_paths:
print(name)
else:
levels = name.count('/')
curr_name = name[name.rfind('/') + 1:]
print(levels * ' ' + '├ ' + curr_name)
if isinstance(obj, h5py.Group):
print((levels + 1) * ' ' + len(curr_name) * '-')
print(parent.name)
parent.visititems(__print)
def get_h5_obj_refs(obj_names, h5_refs):
"""
Given a list of H5 references and a list of names,
this method returns H5 objects corresponding to the names
Parameters
----------
obj_names : string or List of strings
names of target h5py objects
h5_refs : H5 object reference or List of H5 object references
list containing the target reference
Returns
-------
found_objects : List of HDF5 dataset references
Corresponding references
"""
from ..usi_data import USIDataset
found_objects = []
for h5_object in hut.get_h5_obj_refs(obj_names, h5_refs):
try:
found_objects.append(USIDataset(h5_object))
except TypeError:
found_objects.append(h5_object)
return found_objects
def write_book_keeping_attrs(h5_obj):
"""
Writes basic book-keeping and posterity related attributes to groups
created in pyUSID such as machine id, pyUSID version, timestamp.
Parameters
----------
h5_obj : :class:`h5py.Dataset`, :class:`h5py.Group`, or :class:`h5py.File`
Object to which basic book-keeping attributes need to be written
"""
hut.write_book_keeping_attrs(h5_obj)
hut.write_simple_attrs(h5_obj, {'pyUSID_version': py_usid_version})
|
[
"14300780+ssomnath@users.noreply.github.com"
] |
14300780+ssomnath@users.noreply.github.com
|
ec80ffe6c08e55853f0b4eafcb7891a4b609dc1e
|
f52c121da03c427a7b1a4f70d6c6d379869f338e
|
/scripts/runExpression.py
|
cbba308a445e21f5d0248347241a83c61811c79f
|
[] |
no_license
|
orianna14/cgat
|
7975e5cb8d907f2c83236a6e926d7f4230eb6788
|
7494d17f0a9e3f2333483426aef2c163f3fee0b1
|
refs/heads/master
| 2021-01-22T11:11:24.509030
| 2015-01-09T11:16:25
| 2015-01-09T11:16:25
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 14,672
|
py
|
'''runExpression.py - wrap various differential expression tools
=============================================================
:Author: Andreas Heger
:Release: $Id$
:Date: |today|
:Tags: Python
Purpose
-------
This script provides a convenience wrapper for differential expression
analysis for a variety of methods.
The aim of this script is to provide a common tabular output format
that is consistent between the different methods.
The script will call the selected method and output a variety of
diagnostic plots. Generally, the analysis aims to follow published
workflows for the individual method together with outputting diagnostic
plots to spot any problems. The script will also preprocess count
data to apply some common filtering methods.
The methods implemented are:
deseq
Application of DESeq
edger
Application of EdgeR
ttest
Application of Welch's ttest to FPKM values
mock
A mock analysis. No differential analysis is performed,
but fold changes are computed and output.
In addition, the script can process count data to output
modified couns or summary statistics.
summary
Output summary statistics from a counts table.
dump
Output the counts table after applying filtering.
spike
Output a counts table with in-silico spike-ins. The spike-ins
can be used to empirically check the power of any of the testing
methods.
Usage
-----
Input
+++++
The input to this script is a table with of measurements reflecting
expression levels. For the tag counting methods such as DESeq or
EdgeR, these should be the raw counts, while for other methods such as
ttest, these can be normalized values such as FPKM values.
The script further requires a design table describing the tests to
be performed. The design table has for columns::
track include group pair
CW-CD14-R1 0 CD14 1
CW-CD14-R2 0 CD14 1
CW-CD14-R3 1 CD14 1
CW-CD4-R1 1 CD4 1
FM-CD14-R1 1 CD14 2
FM-CD4-R2 0 CD4 2
FM-CD4-R3 0 CD4 2
FM-CD4-R4 0 CD4 2
track
name of track - should correspond to column header in the counts
table.
include
flag to indicate whether or not to include this data
group
group indicator - experimental group
pair
pair that sample belongs to (for paired tests) - set to 0 if the
design is not paired.
Output
++++++
The script outputs a table with the following columns:
+------------------+------------------------------------------------------+
|*Column name* |*Content* |
+------------------+------------------------------------------------------+
|test_id |Name of the test (gene name, ... |
+------------------+------------------------------------------------------+
|treatment_name |Name of the treatment condition |
+------------------+------------------------------------------------------+
|treatment_mean |Estimated expression value for treatment |
+------------------+------------------------------------------------------+
|treatment_std |Standard deviation |
+------------------+------------------------------------------------------+
|control_name |Name of the control condition |
+------------------+------------------------------------------------------+
|control_mean |Estimated expression value for control |
+------------------+------------------------------------------------------+
|control_std |Standard deviation |
+------------------+------------------------------------------------------+
|pvalue |The p value for rejecting the null hypothesis |
+------------------+------------------------------------------------------+
|qvalue |Multiple testing correction |
+------------------+------------------------------------------------------+
|l2fold |log2 foldchange of treatment/control |
+------------------+------------------------------------------------------+
|transformed_l2fold|a transformed log2 foldchange value. |
+------------------+------------------------------------------------------+
|fold |foldchange of treatment/control |
+------------------+------------------------------------------------------+
|significant |Flag, 1 if test called significant according to FDR |
+------------------+------------------------------------------------------+
|status |test status (OK|FAIL) |
+------------------+------------------------------------------------------+
Additional plots and tables are generated and method specific.
Command line options
--------------------
'''
import sys
import os
from rpy2.robjects import r as R
import rpy2.robjects.numpy2ri
try:
import CGAT.Experiment as E
import CGAT.Pipeline as P
import CGAT.Expression as Expression
except ImportError:
import Experiment as E
import Pipeline as P
import Expression
def main(argv=None):
"""script main.
parses command line options in sys.argv, unless *argv* is given.
"""
if not argv:
argv = sys.argv
# setup command line parser
parser = E.OptionParser(version="%prog version: $Id$",
usage=globals()["__doc__"])
parser.add_option("-t", "--tags-tsv-file", dest="input_filename_tags",
type="string",
help="input file with tag counts [default=%default].")
parser.add_option("-d", "--design-tsv-file", dest="input_filename_design",
type="string",
help="input file with experimental design "
"[default=%default].")
parser.add_option("-o", "--outfile", dest="output_filename", type="string",
help="output filename [default=%default].")
parser.add_option("-m", "--method", dest="method", type="choice",
choices=(
"deseq", "edger",
"ttest",
"mock", "summary", "dump", "spike",
"plottagstats"),
help="differential expression method to apply "
"[default=%default].")
parser.add_option("--deseq-dispersion-method",
dest="deseq_dispersion_method",
type="choice",
choices=("pooled", "per-condition", "blind"),
help="dispersion method for deseq [default=%default].")
parser.add_option("--deseq-fit-type", dest="deseq_fit_type", type="choice",
choices=("parametric", "local"),
help="fit type for deseq [default=%default].")
parser.add_option("--deseq-sharing-mode",
dest="deseq_sharing_mode",
type="choice",
choices=("maximum", "fit-only", "gene-est-only"),
help="deseq sharing mode [default=%default].")
parser.add_option(
"--edger-dispersion",
dest="edger_dispersion", type="float",
help="dispersion value for edgeR if there are no replicates "
"[default=%default].")
parser.add_option("-f", "--fdr", dest="fdr", type="float",
help="fdr to apply [default=%default].")
parser.add_option("-p", "--pseudocounts", dest="pseudo_counts",
type="float",
help="pseudocounts to add for mock analyis "
"[default=%default].")
parser.add_option("-R", "--output-R-code", dest="save_r_environment",
type="string",
help="save R environment [default=%default].")
parser.add_option("-r", "--reference-group", dest="ref_group",
type="string",
help="Group to use as reference to compute "
"fold changes against [default=$default]")
parser.add_option("--filter-min-counts-per-row",
dest="filter_min_counts_per_row",
type="int",
help="remove rows with less than this "
"number of counts in total [default=%default].")
parser.add_option("--filter-min-counts-per-sample",
dest="filter_min_counts_per_sample",
type="int",
help="remove samples with a maximum count per sample of "
"less than this numer [default=%default].")
parser.add_option("--filter-percentile-rowsums",
dest="filter_percentile_rowsums",
type="int",
help="remove percent of rows with "
"lowest total counts [default=%default].")
parser.set_defaults(
input_filename_tags="-",
input_filename_design=None,
output_filename=sys.stdout,
method="deseq",
fdr=0.1,
deseq_dispersion_method="pooled",
deseq_fit_type="parametric",
deseq_sharing_mode="maximum",
edger_dispersion=0.4,
ref_group=None,
save_r_environment=None,
filter_min_counts_per_row=1,
filter_min_counts_per_sample=10,
filter_percentile_rowsums=0,
pseudo_counts=0,
spike_foldchange_max=4.0,
spike_expression_max=5.0,
spike_expression_bin_width=0.5,
spike_foldchange_bin_width=0.5,
spike_max_counts_per_bin=50,
)
# add common options (-h/--help, ...) and parse command line
(options, args) = E.Start(parser, argv=argv, add_output_options=True)
if options.input_filename_tags == "-":
fh = P.getTempFile()
fh.write("".join([x for x in options.stdin]))
fh.close()
options.input_filename_tags = fh.name
else:
fh = None
# load tag data and filter
if options.method in ("deseq", "edger", "mock", "ttest"):
assert options.input_filename_tags and os.path.exists(
options.input_filename_tags)
assert options.input_filename_design and os.path.exists(
options.input_filename_design)
Expression.loadTagData(options.input_filename_tags,
options.input_filename_design)
nobservations, nsamples = Expression.filterTagData(
filter_min_counts_per_row=options.filter_min_counts_per_row,
filter_min_counts_per_sample=options.filter_min_counts_per_sample,
filter_percentile_rowsums=options.filter_percentile_rowsums)
if nobservations == 0:
E.warn("no observations - no output")
return
if nsamples == 0:
E.warn("no samples remain after filtering - no output")
return
sample_names = R('''colnames(countsTable)''')
E.info("%i samples to test at %i observations: %s" %
(nsamples, nobservations,
",".join(sample_names)))
try:
if options.method == "deseq":
Expression.runDESeq(
outfile=options.output_filename,
outfile_prefix=options.output_filename_pattern,
fdr=options.fdr,
dispersion_method=options.deseq_dispersion_method,
fit_type=options.deseq_fit_type,
sharing_mode=options.deseq_sharing_mode,
ref_group=options.ref_group,
)
elif options.method == "edger":
Expression.runEdgeR(
outfile=options.output_filename,
outfile_prefix=options.output_filename_pattern,
fdr=options.fdr,
ref_group=options.ref_group,
dispersion=options.edger_dispersion)
elif options.method == "mock":
Expression.runMockAnalysis(
outfile=options.output_filename,
outfile_prefix=options.output_filename_pattern,
ref_group=options.ref_group,
pseudo_counts=options.pseudo_counts,
)
elif options.method == "summary":
Expression.outputTagSummary(
options.input_filename_tags,
options.stdout,
options.output_filename_pattern,
filename_design=options.input_filename_design
)
elif options.method == "dump":
assert options.input_filename_tags and os.path.exists(
options.input_filename_tags)
Expression.dumpTagData(options.input_filename_tags,
options.input_filename_design,
outfile=options.stdout)
elif options.method == "plottagstats":
assert options.input_filename_tags and os.path.exists(
options.input_filename_tags)
Expression.plotTagStats(
options.input_filename_tags,
options.input_filename_design,
outfile_prefix=options.output_filename_pattern)
elif options.method == "spike":
Expression.outputSpikeIns(
options.input_filename_tags,
options.stdout,
options.output_filename_pattern,
filename_design=options.input_filename_design,
foldchange_max=options.spike_foldchange_max,
expression_max=options.spike_expression_max,
max_counts_per_bin=options.spike_max_counts_per_bin,
expression_bin_width=options.spike_expression_bin_width,
foldchange_bin_width=options.spike_foldchange_bin_width,
)
elif options.method == "ttest":
Expression.runTTest(
outfile=options.output_filename,
outfile_prefix=options.output_filename_pattern,
fdr=options.fdr)
except rpy2.rinterface.RRuntimeError:
if options.save_r_environment:
E.info("saving R image to %s" % options.save_r_environment)
R['save.image'](options.save_r_environment)
raise
if fh and os.path.exists(fh.name):
os.unlink(fh.name)
if options.save_r_environment:
R['save.image'](options.save_r_environment)
E.Stop()
if __name__ == "__main__":
sys.exit(main(sys.argv))
|
[
"andreas.heger@gmail.com"
] |
andreas.heger@gmail.com
|
27ee7694a64807b71ffa51190abca70a4d5290d7
|
c6674aada63ecb0c8bbb2762b48b0c8ab37f1997
|
/31/app.py
|
86b31253462ad3b9cfc826a0f0f7f2b3d0c12383
|
[] |
no_license
|
alexdmejias/python-the-hard-way
|
bbd8c5c6ac355e30fcc4f2c2dbed65b66beb73f4
|
538ca9070503bbbbe776d028734b392d97c1b0b1
|
refs/heads/master
| 2021-01-21T03:44:40.640881
| 2016-04-06T03:34:46
| 2016-04-06T03:34:46
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 994
|
py
|
print "you enter a dark room with two doors. Do you go through door #1 or door #2?"
door = raw_input("> ")
if door == "1":
print "There is a giant bear here eating a cheese cake. What do you do?"
print "1. Take the cake"
print "2. Scream at the bear"
bear = raw_input("> ")
if bear == "1":
print "The bear eats your face off. Good job!"
elif bear == "2":
print "The bear eats your legs off. Good job!"
else:
print "well, doing %r is probably better. Bear runs away" % r
elif door == "2":
print "You stare into the endless abyss at Cthulhu's retina."
print "1. Blueberries"
print "2. Yellow jacket clothspins."
print "3. Understanding revolvers yelling melodies"
insanity = raw_input("> ")
if insanity == "1" or insanity == "2":
print "Your body survives powered by a mind of jello. Good job."
else:
print "the insanity rots your eyes into a pool of muck. Good job"
else:
print "You stumble around and fall off a knife and die. Good job"
|
[
"alexdmejias@gmail.com"
] |
alexdmejias@gmail.com
|
580f60e42a3fda3eec3e6fe970d5a6a224200bab
|
5ec0d77eeb27171fa718614a44382493a9c04967
|
/BccCloudFunction.py
|
1cdd5e02ca1ad716194b0f4f9afa362c840d981c
|
[] |
no_license
|
NiklasAbrahamsson/BacteriaColonyCounter
|
ecc635343a70bcb80e5be92c35dba540a69118dd
|
3e48d3ee034fcf75787ec92be9749e8f9b88733c
|
refs/heads/main
| 2023-03-05T08:43:00.118364
| 2021-02-20T14:51:11
| 2021-02-20T14:51:11
| 340,669,860
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,982
|
py
|
import os
import tempfile
from werkzeug.utils import secure_filename
from cv2 import cv2
def get_file_path(filename):
# Note: tempfile.gettempdir() points to an in-memory file system
# on GCF. Thus, any files in it must fit in the instance's memory.
print('filename', filename)
file_name = secure_filename(filename)
return os.path.join(tempfile.gettempdir(), file_name)
def bcc(request):
"""Responds to any HTTP request.
Args:
request (flask.Request): HTTP request object.
Returns:
The response text or any set of values that can be turned into a
Response object using
`make_response <http://flask.pocoo.org/docs/1.0/api/#flask.Flask.make_response>`.
"""
# This code will process each non-file field in the form
fields = {}
data = request.form.to_dict()
for field in data:
fields[field] = data[field]
print('Processed field: %s' % field)
# This code will process each file uploaded
files = request.files.to_dict()
print('files', files)
for file_name, file in files.items():
# Note: GCF may not keep files saved locally between invocations.
# If you want to preserve the uploaded files, you should save them
# to another location (such as a Cloud Storage bucket).
file.save(get_file_path(file_name))
print('Processed file: %s' % file_name)
gray = cv2.imread(files[0], 0)
## threshold
th, threshed = cv2.threshold(gray, 100, 255,cv2.THRESH_BINARY_INV|cv2.THRESH_OTSU)
## findcontours
cnts = cv2.findContours(threshed, cv2.RETR_LIST, cv2.CHAIN_APPROX_SIMPLE)[-2]
## filter by area
s1= 3
s2 = 20
xcnts = []
for cnt in cnts:
if s1<cv2.contourArea(cnt) <s2:
xcnts.append(cnt)
# Clear temporary directory
for file_name in files:
file_path = get_file_path(file_name)
os.remove(file_path)
return "Dots number: {}".format(len(xcnts))
|
[
"Niklas.abrahamsson@me.com"
] |
Niklas.abrahamsson@me.com
|
6a868cb40da1999b1282798f5152a42ade897864
|
c39886f291e7dd4149e52e74b8b2186f331c43ab
|
/model.py
|
968c9f515c912e9f1c8a43de18292aba71df6ecc
|
[] |
no_license
|
cckl/hb-ratings-lab
|
5788337fab150cd2de5dbf84de8ba1c8fe2dd9db
|
57883624714977fa04b6ae45562b89b6de5e04d7
|
refs/heads/master
| 2020-04-21T05:30:45.890975
| 2019-02-08T01:55:39
| 2019-02-08T01:55:39
| 169,343,071
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,008
|
py
|
"""Models and database functions for Ratings project."""
from flask_sqlalchemy import SQLAlchemy
import correlation
# This is the connection to the PostgreSQL database; we're getting this through
# the Flask-SQLAlchemy helper library. On this, we can find the `session`
# object, where we do most of our interactions (like committing, etc.)
db = SQLAlchemy()
##############################################################################
# Model definitions
class User(db.Model):
"""User of ratings website."""
__tablename__ = "users"
user_id = db.Column(db.Integer, autoincrement=True, primary_key=True)
email = db.Column(db.String(64), nullable=True)
password = db.Column(db.String(64), nullable=True)
age = db.Column(db.Integer, nullable=True)
zipcode = db.Column(db.String(15), nullable=True)
def __repr__(self):
"""Provide helpful representation when printed."""
return f"<User user_id={self.user_id} email={self.email}>"
class Movie(db.Model):
"""Movie of ratings website."""
__tablename__ = "movies"
movie_id = db.Column(db.Integer, autoincrement=True, primary_key=True)
title = db.Column(db.String(256))
released_at = db.Column(db.DateTime)
imdb_url = db.Column(db.String(256))
def __repr__(self):
"""Provide helpful representation when printed."""
return f"<Movie movie_id={self.movie_id} title={self.title}>"
class Rating(db.Model):
"""Rating of ratings website."""
__tablename__ = "ratings"
rating_id = db.Column(db.Integer, autoincrement=True, primary_key=True)
movie_id = db.Column(db.Integer,db.ForeignKey('movies.movie_id'))
user_id = db.Column(db.Integer, db.ForeignKey('users.user_id'))
score = db.Column(db.Integer)
# Define relationship to user
user = db.relationship("User",
backref=db.backref("ratings",
order_by=rating_id))
# Define relationship to movie
movie = db.relationship("Movie",
backref=db.backref("ratings",
order_by=rating_id))
def __repr__(self):
"""Provide helpful representation when printed."""
return f"<Rating rating_id={self.rating_id} movie_id={self.movie_id} user_id={self.user_id} score={self.score}>"
##############################################################################
# Helper functions
def connect_to_db(app):
"""Connect the database to our Flask app."""
# Configure to use our PstgreSQL database
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql:///ratings'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
db.app = app
db.init_app(app)
if __name__ == "__main__":
# As a convenience, if we run this module interactively, it will leave
# you in a state of being able to work with the database directly.
from server import app
connect_to_db(app)
print("Connected to DB.")
|
[
"no-reply@hackbrightacademy.com"
] |
no-reply@hackbrightacademy.com
|
3de41543de12f2bd32a9a726e8ecfa6c661caaf9
|
1c8da8bd2c23a8388950fc098f71c5498414d57b
|
/tweet_task/core/helper.py
|
294ed7c446d71c1dfaf3b07b6008fe9a53becfd8
|
[] |
no_license
|
rohitchaudhary95/twitter_es
|
0785d5a73926fc78f66ec759d380c72780cd923a
|
47843692a709fc04e6f543534ff16b3be2e1b003
|
refs/heads/master
| 2022-12-04T08:47:57.118902
| 2020-08-14T20:42:55
| 2020-08-14T20:42:55
| 107,984,097
| 1
| 1
| null | 2020-08-14T20:51:27
| 2017-10-23T13:12:59
|
Python
|
UTF-8
|
Python
| false
| false
| 513
|
py
|
from core import es
from constants import ELASTICSEARCH_INDEX_NAME, PAGE_SIZE
def paginated_results(query, page_no):
result = es.search(index=ELASTICSEARCH_INDEX_NAME, body=query)
total = result['hits']['total']
start = (page_no - 1) * PAGE_SIZE
end = start + PAGE_SIZE
final_result = []
if (start > total):
return final_result
if (total < end):
end = total
for i in range(start, end):
final_result.append(result['hits']['hits'][i])
return final_result
|
[
"rohit.chaudhary@kuliza.com"
] |
rohit.chaudhary@kuliza.com
|
bb386ecc26291d0e4bbc145749b90e72d385c0dd
|
629b11b0539a85bec065cb09ca510cc4be986c21
|
/src/maincv/urls.py
|
90574c04cfa8ad622266740e72ddbe9488dd7c86
|
[] |
no_license
|
toufikswar/toufikswarcv
|
5e18d4ce5ec550b391597e61c9f2685d9d55d6f4
|
205b3566a45561eeede19ffa13d77129de7cd2a2
|
refs/heads/master
| 2020-03-18T07:00:59.022264
| 2019-01-10T14:11:36
| 2019-01-10T14:11:36
| 134,427,364
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 716
|
py
|
from django.urls import path, include
from django.conf import settings
from . import views
# needed for include function to work in mycv/url.py for the namespace
app_name = "cv"
urlpatterns = [
#path('<int:id>/view/', views.home, name="home"),
#path('<int:id>/view/', views.home, name="home"),
#path('<int:id>/edit/<str:section>/',views.edit , name="edit"),
path('',views.home , name="home"),
path('cv/', views.cv, name="mycv"),
]
if 'rosetta' in settings.INSTALLED_APPS:
urlpatterns += [
path('rosetta/', include('rosetta.urls')),
]
if 'django_summernote' in settings.INSTALLED_APPS:
urlpatterns += [
path('summernote/', include('django_summernote.urls')),
]
|
[
"toufik.swar@gmail.com"
] |
toufik.swar@gmail.com
|
1f307567c2ceee93c239c6f4898111464afc3dc0
|
d1855b05cf7ed0df54a9b5f50ea3da6b3ae4ca48
|
/pyautoguidroid-opensourceversion.py
|
760b930d88f49493ccdc69ef9c49a51c1de8dd76
|
[] |
no_license
|
material666/link-crawler-get-all-link-from-the-link-chinese-version
|
fecceee641a0b35f421e819f92036709fddabacb
|
b419320fc0bad39bb883e67ee4c63f326982979e
|
refs/heads/master
| 2020-03-17T20:23:03.335582
| 2018-06-26T19:28:31
| 2018-06-26T19:28:31
| 133,906,813
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 38,319
|
py
|
#!/usr/bin/env python
#-*- coding:utf-8 -*-
#作者:material
#qq412905523
#邮箱:412905523@qq.com
#author:material
#qq412905523
#mail: 412905523@qq.com
#次自动化最大优势在于可以多设备操作 想做一个安静的python版安卓sikuli。。(至于为啥不说按键精灵) (python应该可以打败按键) 不过现在这个还是比按键垃圾的
#源码有人想改下木有关系
#重点是大图识小图(现在是包含关系)(不是包含的我还没开源。)
#还有就是对接高级的图像算法 比如找怪物 不管怪物图像咋变 就是把怪物给找出来 这种图像识别算法才叫高级啦。这样的肯定秒杀按键精灵
import multiprocessing
import platform
import os, sys
import threading
import webbrowser
import subprocess
import random
import tempfile
import os
import re
import threading
import time
import xml.etree.cElementTree as ET
#import appium
from multiprocessing import Process
import subprocess
import sys
import PIL
from PIL import Image
#from PIL import ImageOps
import aircv as ac
import platform
system = platform.system()
if system is "Windows":
find_util = "findstr"
else:
find_util = "grep"
import pyperclip
import os, sys
#作者qq:412905523
#python版按键精灵
#还不想开源....不要问为啥、
class Element(object):
"""
通过元素定位,需要Android 4.0以上
"""
def __init__(self,boolean=False):
"""
初始化,获取系统临时文件存储目录,定义匹配数字模式
"""
#self.echo()
os.popen("adb kill-server")
os.popen("adb start-server")
for self.Devicename in self.numberd():
print("Elment: "+ self.Devicename)
if boolean == True:
self.echo(self.Devicename)
else:
pass
os.popen("adb -s "+ self.Devicename +" wait-for-device")
print("start:-------------------------------------------------------------:")
self.tempFile = tempfile.gettempdir()
self.tempFiledevice = tempfile.gettempdir()
self.pattern = re.compile(r"\d+")
#self.numberd()
def ShowDevice(self):
for self.Devicename in self.numberd():
print("Elment: " + self.Devicename)
def numberd(self):
numberlist = ','.join(os.popen(u"adb devices").readlines()[1:-1]).replace("\tdevice", "").replace("\n",
"").split(
',')
return numberlist
#一些常用操作
#这些还没尝试过 看看有没bug 有的化联系作者qq412905523 我修复一下
def quitApp(self, packageName,device= None):
"""
退出app,类似于kill掉进程
usage: quitApp("com.android.settings")
"""
if device ==None:
os.popen("adb shell am force-stop %s" % packageName)
else:
os.popen("adb -s " + device + " shell am force-stop %s" % packageName)
def getFocusedPackageAndActivity(self,device = None):
"""
获取当前应用界面的包名和Activity,返回的字符串格式为:packageName/activityName
"""
if device == None:
pattern = re.compile(r"[a-zA-Z0-9\.]+/.[a-zA-Z0-9\.]+")
out = os.popen("adb shell dumpsys window w | %s \/ | %s name=" % (find_util, find_util)).readlines()
return pattern.findall(str(out))[0]
else:
pattern = re.compile(r"[a-zA-Z0-9\.]+/.[a-zA-Z0-9\.]+")
out = os.popen("adb -s "+device+" shell dumpsys window w | %s \/ | %s name=" % (find_util, find_util)).readlines()
return pattern.findall(str(out))[0]
def getCurrentPackageName(self,device = None):
"""
获取当前运行的应用的包名
"""
if device ==None:
return self.getFocusedPackageAndActivity().split("/")[0]
else:
return self.getFocusedPackageAndActivity(device).split("/")[0]
def startWebpage(self, url,device = None):
"""
使用系统默认浏览器打开一个网页
usage: startWebpage("http://www.baidu.com")
"""
if device == None:
os.popen("adb shell am start -a android.intent.action.VIEW -d %s" % url)
else:
os.popen("adb -s " +device + " shell am start -a android.intent.action.VIEW -d " + url)
#设备信息!!!------------------------------------------------------------------------------------------------------------------------------------------
def getDeviceNatework(self,device=None):
if device ==None:
return ''.join(os.popen("adb shell cat /sys/class/net/wlan0/address").readlines()).replace("\r\n", "")
else:
return ''.join(os.popen("adb -s "+ device +" shell cat /sys/class/net/wlan0/address").readlines()).replace("\r\n", "")
def getDeviceModule(self,device=None):
if device ==None:
return ''.join(os.popen("adb shell getprop ro.product.model").readlines()).replace("\r\n", "")
else:
return ''.join(os.popen("adb -s "+ device +" shell getprop ro.product.model").readlines()).replace("\r\n", "")
def getDeviceSystemVersion(self,device=None):
if device ==None:
return ''.join(os.popen("adb shell getprop ro.build.version.release").readlines()).replace("\r\n", "")
else:
return ''.join(os.popen("adb -s "+ device +" shell getprop ro.build.version.release").readlines()).replace("\r\n", "")
def getDeviceSize(self,device=None):
if device ==None:
return ''.join(os.popen("adb shell wm size").readlines()).replace("\r\n", "").replace("Physical size: ", "")
else:
return ''.join(os.popen("adb -s "+ device +" shell wm size").readlines()).replace("\r\n", "")
#---------------------------------------------------------------------------------------------------------------------------------------------------------
def echo(self,device=None):
if device==None:
print("设备型号:" + self.getDeviceModule())
print("网络mac地址:" + self.getDeviceNatework())
print("Android 系统版本:" + self.getDeviceSystemVersion())
print("屏幕分辨率:" + self.getDeviceSize())
else:
print(device+"的设备型号:" + self.getDeviceModule(device))
print(device+"的网络mac地址:" + self.getDeviceNatework(device))
print(device+"的Android 系统版本:" + self.getDeviceSystemVersion(device))
print(device+"的屏幕分辨率:" + self.getDeviceSize(device))
#print("电池信息:" + )
def __uidump(self,device=None):
if device == None:
os.popen("adb shell uiautomator dump /data/local/tmp/uidump.xml")
os.popen("adb pull /data/local/tmp/uidump.xml " + self.tempFile)
else:
os.popen("adb -s "+device +r" shell uiautomator dump /data/local/tmp/uidump"+device+".xml")
os.popen("adb -s "+device +" pull /data/local/tmp/uidump"+device+".xml " + self.tempFiledevice)
def __element(self, attrib, name,device=None):
"""
同属性单个元素,返回单个坐标元组
"""
if device == None:
self.__uidump()
tree = ET.ElementTree(file=self.tempFile + "\\uidump.xml")
treeIter = tree.iter(tag="node")
for elem in treeIter:
if elem.attrib[attrib] == name:
bounds = elem.attrib["bounds"]
coord = self.pattern.findall(bounds)
Xpoint = (int(coord[2]) - int(coord[0])) / 2.0 + int(coord[0])
Ypoint = (int(coord[3]) - int(coord[1])) / 2.0 + int(coord[1])
return Xpoint, Ypoint
else:
self.__uidump(device)
tree = ET.ElementTree(file=self.tempFiledevice + "\\uidump"+device+".xml")
treeIter = tree.iter(tag="node")
for elem in treeIter:
if elem.attrib[attrib] == name:
bounds = elem.attrib["bounds"]
coord = self.pattern.findall(bounds)
Xpoint = (int(coord[2]) - int(coord[0])) / 2.0 + int(coord[0])
Ypoint = (int(coord[3]) - int(coord[1])) / 2.0 + int(coord[1])
return Xpoint, Ypoint
def findElementByPartName(self, namelist,device=None):
"""
同属性单个元素,返回单个坐标元组
"""
if device == None:
self.__uidump()
tree = ET.ElementTree(file=self.tempFile + "\\uidump.xml")
treeIter = tree.iter(tag="node")
for elem in treeIter:
if set(list("".join([x for x in namelist]))).issubset(set(list(elem.attrib["text"].encode("utf-8")))):
bounds = elem.attrib["bounds"]
coord = self.pattern.findall(bounds)
Xpoint = (int(coord[2]) - int(coord[0])) / 2.0 + int(coord[0])
Ypoint = (int(coord[3]) - int(coord[1])) / 2.0 + int(coord[1])
return Xpoint, Ypoint
else:
self.__uidump(device)
tree = ET.ElementTree(file=self.tempFiledevice + "\\uidump"+device+".xml")
treeIter = tree.iter(tag="node")
for elem in treeIter:
#print(list("".join([x for x in namelist])))
#print(list(elem.attrib["text"].encode("utf-8")))
#set(dic.items()).issubset(set(elem.attrib.items()))
#set(""join.namelist.items()).issubset(set(elem.attrib["text"].encode("utf-8").split.items()))
if set(list("".join([x for x in namelist]))).issubset(set(list(elem.attrib["text"].encode("utf-8")))):
bounds = elem.attrib["bounds"]
coord = self.pattern.findall(bounds)
Xpoint = (int(coord[2]) - int(coord[0])) / 2.0 + int(coord[0])
Ypoint = (int(coord[3]) - int(coord[1])) / 2.0 + int(coord[1])
return Xpoint, Ypoint
def findElementsByPartName(self, nameji,device = None):
"""
同属性多个元素,返回坐标元组列表
"""
if device == None:
lister = []
self.__uidump()
tree = ET.ElementTree(file=self.tempFile + "\\uidump.xml")
treeIter = tree.iter(tag="node")
for elem in treeIter:
if set(list("".join([x for x in nameji]))).issubset(set(list(elem.attrib["text"].encode("utf-8")))):
bounds = elem.attrib["bounds"]
coord = self.pattern.findall(bounds)
Xpoint = (int(coord[2]) - int(coord[0])) / 2.0 + int(coord[0])
Ypoint = (int(coord[3]) - int(coord[1])) / 2.0 + int(coord[1])
lister.append((Xpoint, Ypoint))
return list(set(lister))
else:
lister = []
self.__uidump(device)
tree = ET.ElementTree(file=self.tempFiledevice + "\\uidump" + device + ".xml")
treeIter = tree.iter(tag="node")
for elem in treeIter:
if set(list("".join([x for x in nameji]))).issubset(set(list(elem.attrib["text"].encode("utf-8")))):
bounds = elem.attrib["bounds"]
coord = self.pattern.findall(bounds)
Xpoint = (int(coord[2]) - int(coord[0])) / 2.0 + int(coord[0])
Ypoint = (int(coord[3]) - int(coord[1])) / 2.0 + int(coord[1])
lister.append((Xpoint, Ypoint))
return list(set(lister))
def __elements(self, attrib, name,device = None):
"""
同属性多个元素,返回坐标元组列表
"""
if device == None:
list = []
self.__uidump()
tree = ET.ElementTree(file=self.tempFile + "\\uidump.xml")
treeIter = tree.iter(tag="node")
for elem in treeIter:
if elem.attrib[attrib] == name:
bounds = elem.attrib["bounds"]
coord = self.pattern.findall(bounds)
Xpoint = (int(coord[2]) - int(coord[0])) / 2.0 + int(coord[0])
Ypoint = (int(coord[3]) - int(coord[1])) / 2.0 + int(coord[1])
list.append((Xpoint, Ypoint))
return list
else:
list = []
self.__uidump(device)
tree = ET.ElementTree(file=self.tempFiledevice + "\\uidump" + device + ".xml")
treeIter = tree.iter(tag="node")
for elem in treeIter:
if elem.attrib[attrib] == name:
bounds = elem.attrib["bounds"]
coord = self.pattern.findall(bounds)
Xpoint = (int(coord[2]) - int(coord[0])) / 2.0 + int(coord[0])
Ypoint = (int(coord[3]) - int(coord[1])) / 2.0 + int(coord[1])
list.append((Xpoint, Ypoint))
return list
def findElementByOthers(self,dic,device = None):
if device == None:
self.__uidump()
tree = ET.ElementTree(file=self.tempFile + "\\uidump.xml")
treeIter = tree.iter(tag="node")
for elem in treeIter:
if set(dic.items()).issubset(set(elem.attrib.items())):
bounds = elem.attrib["bounds"]
coord = self.pattern.findall(bounds)
Xpoint = (int(coord[2]) - int(coord[0])) / 2.0 + int(coord[0])
Ypoint = (int(coord[3]) - int(coord[1])) / 2.0 + int(coord[1])
# os.popen("adb -s SS6TIFZT99999999 shell input tap " + str(Xpoint) + " " + str(Xpoint))
return Xpoint, Ypoint
else:
self.__uidump(device)
tree = ET.ElementTree(file=self.tempFiledevice + "\\uidump" + device + ".xml")
treeIter = tree.iter(tag="node")
for elem in treeIter:
if set(dic.items()).issubset(set(elem.attrib.items())):
bounds = elem.attrib["bounds"]
coord = self.pattern.findall(bounds)
Xpoint = (int(coord[2]) - int(coord[0])) / 2.0 + int(coord[0])
Ypoint = (int(coord[3]) - int(coord[1])) / 2.0 + int(coord[1])
# os.popen("adb -s SS6TIFZT99999999 shell input tap " + str(Xpoint) + " " + str(Xpoint))
return Xpoint, Ypoint
def findElementsByOthers(self, dic,device = None):
"""
同属性多个元素,返回坐标元组列表
"""
if device == None:
lister = []
self.__uidump()
tree = ET.ElementTree(file=self.tempFile + "\\uidump.xml")
treeIter = tree.iter(tag="node")
for elem in treeIter:
if set(dic.items()).issubset(set(elem.attrib.items())):
bounds = elem.attrib["bounds"]
coord = self.pattern.findall(bounds)
Xpoint = (int(coord[2]) - int(coord[0])) / 2.0 + int(coord[0])
Ypoint = (int(coord[3]) - int(coord[1])) / 2.0 + int(coord[1])
# os.popen("adb -s SS6TIFZT99999999 shell input tap " + str(Xpoint) + " " + str(Xpoint))
lister.append((Xpoint, Ypoint))
return list(set(lister))
else:
lister = []
self.__uidump(device)
tree = ET.ElementTree(file=self.tempFiledevice + "\\uidump" + device + ".xml")
treeIter = tree.iter(tag="node")
for elem in treeIter:
if set(dic.items()).issubset(set(elem.attrib.items())):
bounds = elem.attrib["bounds"]
coord = self.pattern.findall(bounds)
Xpoint = (int(coord[2]) - int(coord[0])) / 2.0 + int(coord[0])
Ypoint = (int(coord[3]) - int(coord[1])) / 2.0 + int(coord[1])
# os.popen("adb -s SS6TIFZT99999999 shell input tap " + str(Xpoint) + " " + str(Xpoint))
lister.append((Xpoint, Ypoint))
return list(set(lister))
def findElementByPicPath(self,picPath,device = None):
if device == None:
result1 = self.locateOnScreen(picPath)
result2 = self.center(result1)
return result2
else:
result1 = self.locateOnScreen(picPath,device)
result2 = self.center(result1)
return result2
def findElementsByPicPath(self,picPath,device = None):
if device == None:
result2list = []
result1list = self.locateAllOnScreen(picPath)
for result1 in result1list:
result2 = self.center(result1)
result2list.append(result2)
else:
result2list = []
result1list = self.locateAllOnScreen(picPath,device)
for result1 in result1list:
result2 = self.center(result1)
result2list.append(result2)
return result2list
def screenshotbmp(self,device=None):
#yige
if device == None:
self.mkdir(r"C:\Users\Administrator\Desktop\pic")
os.popen(
r"adb shell screencap -p /sdcard/screen.bmp && adb pull /sdcard/screen.bmp C:\Users\Administrator\Desktop\pic\ && adb shell rm /sdcard/screen.bmp")
print(r"ok pic saved in C:\Users\Administrator\Desktop\pic")
return r"C:\Users\Administrator\Desktop\pic\screen.bmp"
#duoge
else:
self.mkdir(r"C:\Users\Administrator\Desktop\pic" + device)
os.popen(r"adb -s "+device+ " shell screencap -p /sdcard/screen.bmp && adb pull /sdcard/screen.bmp C:\Users\Administrator\Desktop\pic" + device + r"\ && adb shell rm /sdcard/screen.bmp")
print(r"ok pic saved in C:\Users\Administrator\Desktop\pic" + device)
return r"C:\Users\Administrator\Desktop\pic" + device + r"\screen.bmp"
def get_screenxy_from_bmp(self,son_bmp,device = None):
# 获取屏幕上匹配指定截图的坐标->(x,y,width,height)
from PIL import Image
if device == None:
img_main = Image.open(self.screenshotbmp())
img_son = Image.open(son_bmp)
datas_a = list(img_main.getdata())
datas_b = list(img_son.getdata())
for i, item in enumerate(datas_a):
if datas_b[0] == item and datas_a[i + 1] == datas_b[1]:
yx = divmod(i, img_main.size[0])
main_start_pos = yx[1] + yx[0] * img_main.size[0]
match_test = True
for n in range(img_son.size[1]):
main_pos = main_start_pos + n * img_main.size[0]
son_pos = n * img_son.size[0]
if datas_b[son_pos:son_pos + img_son.size[0]] != datas_a[main_pos:main_pos + img_son.size[0]]:
match_test = False
break
if match_test:
return (yx[1], yx[0], img_son.size[0], img_son.size[1])
return False
else:
img_main = Image.open(self.screenshotbmp(device))
img_son = Image.open(son_bmp)
datas_a = list(img_main.getdata())
datas_b = list(img_son.getdata())
for i, item in enumerate(datas_a):
if datas_b[0] == item and datas_a[i + 1] == datas_b[1]:
yx = divmod(i, img_main.size[0])
main_start_pos = yx[1] + yx[0] * img_main.size[0]
match_test = True
for n in range(img_son.size[1]):
main_pos = main_start_pos + n * img_main.size[0]
son_pos = n * img_son.size[0]
if datas_b[son_pos:son_pos + img_son.size[0]] != datas_a[main_pos:main_pos + img_son.size[0]]:
match_test = False
break
if match_test:
return (yx[1], yx[0], img_son.size[0], img_son.size[1])
return False
def findElementByName(self, name,device = None):
"""
通过元素名称定位
usage: findElementByName(u"设置")
"""
if device == None:
return self.__element("text", name)
else:
return self.__element("text", name,device)
def findElementsByName(self, name,device = None):
if device == None:
return self.__elements("text", name)
else:
return self.__elements("text", name,device)
def findElementByClass(self, className,device = None):
"""
通过元素类名定位
usage: findElementByClass("android.widget.TextView")
"""
if device == None:
return self.__element("class", className)
else:
return self.__element("class", className,device)
def findElementsByClass(self, className,device = None):
if device == None:
return self.__elements("class", className)
else:
return self.__elements("class", className,device)
def findElementById(self, id,device = None):
"""
通过元素的resource-id定位
usage: findElementsById("com.android.deskclock:id/imageview")
"""
if device == None:
return self.__element("resource-id", id)
else:
return self.__element("resource-id", id,device)
def findElementsById(self, id,device = None):
if device == None:
return self.__elements("resource-id", id)
else:
return self.__elements("resource-id", id,device)
def findElementByContent(self, content,device = None):
if device == None:
return self.__element("content-desc", content)
else:
return self.__element("content-desc", content,device)
#return self.__element("content-desc", content)
def findElementsByContent(self, content,device = None):
if device == None:
return self.__elements("content-desc", content)
else:
return self.__elements("content-desc", content,device)
def findElementByPackage(self, package,device = None):
if device == None:
return self.__element("package", package)
else:
return self.__element("package", package,device)
def findElementsByPackage(self, package,device = None):
if device == None:
return self.__elements("package", package)
else:
return self.__elements("package", package,device)
def findElementByXpath(self,Xpath,device = None):
self.__uidump()
tree = ET.ElementTree(file=self.tempFile + "\\uidump.xml")
treeIter = tree.iter(tag="node")
for elem in treeIter:
print(elem.attrib)
#def
def screenshot(self,device=None,box = None,screenpng =None):
#yige
if screenpng ==None:
if box == None:
if device == None:
self.mkdir(r"C:\Users\Administrator\Desktop\pic")
os.popen(
r"adb shell screencap -p /sdcard/screen.png && adb pull /sdcard/screen.png C:\Users\Administrator\Desktop\pic\ && adb shell rm /sdcard/screen.png")
'''if "emulator" in self.Devicename:
im = PIL.Image.open(r"C:\Users\Administrator\Desktop\pic\screen.png")
im = im.rotate(270)
im.save( r"C:\Users\Administrator\Desktop\pic\screen.png")
print(r"ok pic saved in C:\Users\Administrator\Desktop\pic")'''
return r"C:\Users\Administrator\Desktop\pic\screen.png"
# duoge
else:
self.mkdir(r"C:\Users\Administrator\Desktop\pic" + device)
os.popen(
r"adb -s " + device + " shell screencap -p /sdcard/screen.png && adb -s " + device + r" pull /sdcard/screen.png C:\Users\Administrator\Desktop\pic" + device + r"\ && adb -s " + device + " shell rm /sdcard/screen.png")
print(r"ok pic saved in C:\Users\Administrator\Desktop\pic" + device)
'''if "emulator" in device:
im = PIL.Image.open(r"C:\Users\Administrator\Desktop\pic" + device + r"\screen.png")
im = im.rotate(270)
im.save( r"C:\Users\Administrator\Desktop\pic" + device + r"\screen.png")'''
return r"C:\Users\Administrator\Desktop\pic" + device + r"\screen.png"
else:
if device == None:
self.mkdir(r"C:\Users\Administrator\Desktop\pic")
os.popen(
r"adb shell screencap -p /sdcard/screen.png && adb pull /sdcard/screen.png C:\Users\Administrator\Desktop\pic\ && adb shell rm /sdcard/screen.png")
print(r"ok pic saved in C:\Users\Administrator\Desktop\pic")
image = PIL.Image.open(r"C:\Users\Administrator\Desktop\pic\screen.png")
'''if "emulator" in self.Devicename:
image = image.rotate(270)
image.save(r"C:\Users\Administrator\Desktop\pic\screen.png")
newImage = image.crop(box)
newImage.save(r"C:\Users\Administrator\Desktop\pic\screen.png")'''
return r"C:\Users\Administrator\Desktop\pic\screen.png"
# duoge
else:
self.mkdir(r"C:\Users\Administrator\Desktop\pic" + device)
os.popen(
r"adb -s " + device + " shell screencap -p /sdcard/screen.png && adb -s " + device + r" pull /sdcard/screen.png C:\Users\Administrator\Desktop\pic" + device + r"\ && adb -s " + device + " shell rm /sdcard/screen.png")
print(r"ok pic saved in C:\Users\Administrator\Desktop\pic" + device)
image = PIL.Image.open(r"C:\Users\Administrator\Desktop\pic" + device + r"\screen.png")
'''if "emulator" in device:
image = image.rotate(270)
image.save(r"C:\Users\Administrator\Desktop\pic" + device + r"\screen.png")
newImage = image.crop(box)
newImage.save("C:\Users\Administrator\Desktop\pic" + device + r"\screen.png")'''
return r"C:\Users\Administrator\Desktop\pic" + device + r"\screen.png"
else:
if box == None:
if device == None:
self.mkdir(screenpng)
os.popen(r"adb shell screencap -p /sdcard/screen.png && adb pull /sdcard/screen.png " +screenpng+"\ && adb shell rm /sdcard/screen.png")
'''if "emulator" in self.Devicename:
image = Image.open( screenpng+r"\screen.png")
image = image.rotate(270)
print(r"ok pic saved in "+screenpng)'''
return screenpng+r"\screen.png"
# duoge
else:
self.mkdir(screenpng+r"\screen.png")
os.popen(
r"adb -s " + device + " shell screencap -p /sdcard/screen.png && adb -s " + device + r" pull /sdcard/screen.png C:\Users\Administrator\Desktop\pic" + device + r"\ && adb -s " + device + " shell rm /sdcard/screen.png")
print(r"ok pic saved in C:\Users\Administrator\Desktop\pic" + device)
'''if "emulator" in device:
image = Image.open(r"C:\Users\Administrator\Desktop\pic" + device + r"\screen.png")
image = image.rotate(270)'''
return r"C:\Users\Administrator\Desktop\pic" + device + r"\screen.png"
else:
if device == None:
self.mkdir(screenpng)
os.popen(
r"adb shell screencap -p /sdcard/screen.png && adb pull /sdcard/screen.png " +screenpng+r"\ && adb shell rm /sdcard/screen.png")
print(r"ok pic saved in "+screenpng)
'''image = Image.open(screenpng+r"/screen.png")
if "emulator" in self.Devicename:
image = image.rotate(270)
newImage = image.crop(box)
newImage.save(screenpng+r"/screen.png")'''
return screenpng+r"/screen.png"
# duoge
else:
self.mkdir(screenpng)
os.popen(
r"adb shell screencap -p /sdcard/screen.png && adb pull /sdcard/screen.png " +screenpng+"\ && adb shell rm /sdcard/screen"+device+".png")
print(r"ok pic saved in "+screenpng)
'''image = Image.open(screenpng+r"/screen"+device+".png")
if "emulator" in self.Devicename:
image = image.rotate(270)
newImage = image.crop(box)
newImage.save(screenpng+r"/screen"+device+".png")'''
return screenpng+r"/screen"+device+".png"
#图像识别1类===================================================================================================================================
def locateAllOnScreen(self,image, grayscale=False, limit=None, region=None,device=None):
if device == None:
screenshotIm = self.screenshot()
retVal = self.locateAll(image, screenshotIm, grayscale, limit)
if 'fp' in dir(screenshotIm) and screenshotIm.fp is not None:
screenshotIm.fp.close() # Screenshots on Windows won't have an fp since they came from ImageGrab, not a file.
return retVal
else:
screenshotIm = self.screenshot(device)
retVal = self.locateAll(image, screenshotIm, grayscale, limit)
if 'fp' in dir(screenshotIm) and screenshotIm.fp is not None:
screenshotIm.fp.close() # Screenshots on Windows won't have an fp since they came from ImageGrab, not a file.
return retVal
def locateOnScreen(self,image, grayscale=False,device=None):
if device == None:
screenshotIm = self.screenshot()
retVal = self.locate(image, screenshotIm, grayscale)
if 'fp' in dir(screenshotIm) and screenshotIm.fp is not None:
screenshotIm.fp.close() # Screenshots on Windows won't have an fp since they came from ImageGrab, not a file.
return retVal
else:
screenshotIm = self.screenshot(device)
retVal = self.locate(image, screenshotIm, grayscale)
if 'fp' in dir(screenshotIm) and screenshotIm.fp is not None:
screenshotIm.fp.close() # Screenshots on Windows won't have an fp since they came from ImageGrab, not a file.
return retVal
def mkdir(self,path):
# 引入模块
import os
# 去除首位空格
path = path.strip()
# 去除尾部 \ 符号
path = path.rstrip("\\")
# 判断路径是否存在
# 存在 True
# 不存在 False
isExists = os.path.exists(path)
# 判断结果
if not isExists:
os.makedirs(path)
print(path + ' 创建成功')
return True
else:
# 如果目录存在则不创建,并提示目录已存在
#print path + ' 目录已存在'
return False
def locate(self,needleImage, haystackImage, grayscale=False):
# Note: The gymnastics in this function is because we want to make sure to exhaust the iterator so that the needle and haystack files are closed in locateAll.
points = tuple(self.locateAll(needleImage, haystackImage, grayscale, 1))
if len(points) > 0:
return points[0]
else:
return None
def locateAll(self,needleImage, haystackImage, grayscale=False, limit=None):
needleFileObj = None
haystackFileObj = None
if isinstance(needleImage, str):
# 'image' is a filename, load the Image object
needleFileObj = open(needleImage, 'rb')
needleImage = Image.open(needleFileObj)
if isinstance(haystackImage, str):
# 'image' is a filename, load the Image object
haystackFileObj = open(haystackImage, 'rb')
haystackImage = Image.open(haystackFileObj)
if grayscale:
needleImage = ImageOps.grayscale(needleImage)
haystackImage = ImageOps.grayscale(haystackImage)
needleWidth, needleHeight = needleImage.size
haystackWidth, haystackHeight = haystackImage.size
needleImageData = tuple(needleImage.getdata()) # TODO - rename to needleImageData??
haystackImageData = tuple(haystackImage.getdata())
needleImageRows = [needleImageData[y * needleWidth:(y + 1) * needleWidth] for y in
range(needleHeight)] # LEFT OFF - check this
needleImageFirstRow = needleImageRows[0]
assert len(needleImageFirstRow) == needleWidth
assert [len(row) for row in needleImageRows] == [needleWidth] * needleHeight
numMatchesFound = 0
for y in range(haystackHeight):
for matchx in self._kmp(needleImageFirstRow, haystackImageData[y * haystackWidth:(y + 1) * haystackWidth]):
foundMatch = True
for searchy in range(1, needleHeight):
haystackStart = (searchy + y) * haystackWidth + matchx
if needleImageData[searchy * needleWidth:(searchy + 1) * needleWidth] != haystackImageData[
haystackStart:haystackStart + needleWidth]:
foundMatch = False
break
if foundMatch:
# Match found, report the x, y, width, height of where the matching region is in haystack.
numMatchesFound += 1
yield (matchx, y, needleWidth, needleHeight)
if limit is not None and numMatchesFound >= limit:
# Limit has been reached. Close file handles.
if needleFileObj is not None:
needleFileObj.close()
if haystackFileObj is not None:
haystackFileObj.close()
# There was no limit or the limit wasn't reached, but close the file handles anyway.
if needleFileObj is not None:
needleFileObj.close()
if haystackFileObj is not None:
haystackFileObj.close()
def center(self,coords):
return (coords[0] + int(coords[2] / 2), coords[1] + int(coords[3] / 2))
def _kmp(self,needle, haystack): # Knuth-Morris-Pratt search algorithm implementation (to be used by screen capture)
# build table of shift amounts
shifts = [1] * (len(needle) + 1)
shift = 1
for pos in range(len(needle)):
while shift <= pos and needle[pos] != needle[pos - shift]:
shift += shifts[pos - shift]
shifts[pos + 1] = shift
# do the actual search
startPos = 0
matchLen = 0
for c in haystack:
while matchLen == len(needle) or \
matchLen >= 0 and needle[matchLen] != c:
startPos += shifts[matchLen]
matchLen -= shifts[matchLen]
matchLen += 1
if matchLen == len(needle):
yield startPos
class Event(object):
def __init__(self):
#self.numberd()
os.popen("adb kill-server")
os.popen("adb start-server")
for Devicename in self.numberd():
print("Event: "+ Devicename)
os.popen("adb -s "+ Devicename +" wait-for-device")
def numberd(self):
numberlist = ','.join(os.popen(u"adb devices").readlines()[1:-1]).replace("\tdevice","").replace("\n","").split(',')
#print(numberlist)
return numberlist
def touch(self, dx, dy,device=None):
if device == None:
print(str(dx) + ", " + str(dy))
os.popen("adb shell input tap " + str(dx) + " " + str(dy))
time.sleep(0.5)
else:
print(str(dx) + ", " + str(dy))
os.popen("adb -s " + device + " shell input tap " + str(dx) + " " + str(dy))
time.sleep(0.5)
def swipe(self,x2,y2,x1,y1,device=None):
if device == None:
print(str((x2,y2,x1,y1)))
os.popen("adb shell input swipe " + str(x2) + " " + str(y2)+ " " + str(x1)+ " " + str(y1))
time.sleep(0.5)
else:
print(str((x2,y2,x1,y1)))
os.popen("adb -s " + device + " shell input swipe " + str(x2) + " " + str(y2)+ " " + str(x1)+ " " + str(y1))
time.sleep(0.5)
|
[
"noreply@github.com"
] |
material666.noreply@github.com
|
95f5b30ff0467951e97558340f5875b1253a89bb
|
03e4ca94ea43b2bb922d7401347c4876c591993f
|
/diehard_gui.py
|
8cb089d9360957b1ba460c037f9372f4960a20d7
|
[] |
no_license
|
gowtham-the-dev/diehard-test-suite-python-with-GUI
|
0a681362876c9ac37ad8457a1df8e077351c9b9d
|
039d50d52f288a01082653b528366b549774f02a
|
refs/heads/master
| 2022-10-18T23:05:17.243692
| 2019-01-04T08:43:35
| 2019-01-04T08:43:35
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 10,486
|
py
|
import time
import sys
import os
if sys.version_info<(3,0,0):
sys.stderr.write("You need python 3.0 or later to run this script\n")
exit(1)
#3.X import statements
from tkinter import *
from tkinter import filedialog as fd
filename=""
class gui:
def d1(self):
from tkinter.filedialog import askopenfilename
Tk().withdraw()
self.filename=askopenfilename()
print (self.filename)
self.e.insert(0,self.filename)
def d2(self):
self.filename1= fd.asksaveasfilename(initialdir = "/home/ubuntu/Desktop",title = "Select file")
print (self.filename1)
self.e2.insert(0,self.filename1)
def d3(self):
global var
selc=self.w.get(ACTIVE)
print (selc)
buff="Test name = "
buff+=selc
buff+="\n"
self.log.insert(END,buff)
testname=selc
source=self.e.get()
dest=self.e2.get()
print (source)
print (dest)
#print testname
if testname == " All Test ":
testno=210
if testname == "diehard_birthdays":
testno=0
if testname == "diehard_operm5":
testno=1
if testname == "diehard_rank_32x32":
testno=2
if testname == "diehard_rank_6x8":
testno=3
if testname == "diehard_bitstream":
testno=4
if testname == "diehard_opso":
testno=5
if testname == "diehard_oqso":
testno=6
if testname == "diehard_dna":
testno=7
if testname == "diehard_count_1s_str":
testno=8
if testname == "diehard_count_1s_byt":
testno=9
if testname == "diehard_parking_lot":
testno=10
if testname == "diehard_2dsphere":
testno=11
if testname == "diehard_3dsphere":
testno=12
if testname == "diehard_squeeze":
testno=13
if testname == "diehard_sums":
testno=14
if testname == "diehard_runs":
testno=15
if testname == "diehard_craps":
testno=16
if testname == "marsaglia_tsang_gcd":
testno=17
if testname == "sts_monobit":
testno=100
if testname == "sts_runs":
testno=101
if testname == "sts_serial":
testno=102
if testname == "rgb_minimum_distance":
testno=201
if testname == "rgb_permutations":
testno=202
if testname == "rgb_lagged_sum":
testno=203
if testname == "rgb_kstest_test":
testno=204
if testname == "dab_bytedistrib":
testno=205
if testname == "dab_dct":
testno=206
if testname == "dab_filltree":
testno=207
if testname == "dab_filltree2":
testno=208
if testname == "dab_monobit2":
testno=209
print (testno)
buff="Test no = "
buff+=str(testno)
buff+="\n"
self.log.insert(END,buff)
mode= self.v.get()
wmode=""
if(mode==1):
wmode="w"
elif(mode==2):
wmode="a"
f=open(dest,wmode)
u=testno
print (" ")
print ("")
if u==210:
str1='dieharder -a '
print ("Full test" ,u)
f.write(str(u))
f.write("\n\n")
str1+='-g 201 -f '
str1+=source
print (str1)
#os.system(str1)
str2= os.popen(str1).read()
f.write(str2)
f.write("\n\n")
print (str2)
#time.sleep(10)
print (" ")
print (" ")
print ("``````````````````````````````````````````````````````````")
else:
str1='dieharder -d '
print ("d = " ,u)
f.write(str(u))
f.write("\n\n")
str1+=str(u)
str1+=' -g 201 -f '
str1+=source
print (str1)
#os.system(str1)
str2= os.popen(str1).read()
f.write(str2)
f.write("\n\n")
print (str2)
#timee.sleep(10)
print (" ")
print (" ")
print ("``````````````````````````````````````````````````````````")
buff=selc
buff+=" test finished \n"
self.log.insert(END,buff)
buff="--------------------------------\n"
self.log.insert(END,buff)
def __init__(self, master):
self.master = master
master.configure(background='ivory2')
master.wm_title("DIEHARD")
Label(master, text="""DIEHARD TEST SUITE""",fg="red",bg='ivory2',justify = LEFT,padx = 20,font = "courier 16 bold italic").pack()
#Label(master, text="""""",justify = LEFT,padx = 20,bg='bisque',font = "courier 7 bold italic").pack()
Label(master, text="""Information Security Research Group""",bg='ivory2',fg="blue",justify = CENTER,padx = 20,font = "courier 16 bold italic").pack()
Label(master, text="""School of EEE - SASTRA University""",bg='ivory2',fg="blue",justify = CENTER,padx = 20,font = "courier 16 bold italic").pack()
width=850
height=660
screen_width = master.winfo_screenwidth()
screen_height = master.winfo_screenheight()
# calculate position x and y coordinates
x = (screen_width/2) - (width/2)
y = (screen_height/2) - (height/2)
master.geometry('%dx%d+%d+%d' % (width, height, x, y))
global var
var = StringVar(master)
var.set(" All Test ")
OPTIONS = [
" All Test ",
"diehard_birthdays",
"diehard_operm5",
"diehard_rank_32x32",
"diehard_rank_6x8",
"diehard_bitstream",
"diehard_opso",
"diehard_oqso",
"diehard_dna",
"diehard_count_1s_str",
"diehard_count_1s_byt",
"diehard_parking_lot",
"diehard_2dsphere",
"diehard_3dsphere",
"diehard_squeeze",
"diehard_sums",
"diehard_runs",
"diehard_craps",
"marsaglia_tsang_gcd",
"sts_monobit",
"sts_runs",
"sts_serial",
"rgb_minimum_distance",
"rgb_permutations",
"rgb_lagged_sum",
"rgb_kstest_test",
"dab_bytedistrib",
"dab_dct",
"dab_filltree",
"dab_filltree2",
"dab_monobit2",
]
self.l=Label(master,text="Select the test",bg='ivory2',fg="black",font = "courier 15 bold italic")
self.l.pack()
self.l.place(x=120,y=130)
frame = Frame(master)
xscrollbar = Scrollbar(frame, orient=HORIZONTAL)
xscrollbar.grid(row=1, column=0, sticky=E+W)
yscrollbar = Scrollbar(frame)
yscrollbar.grid(row=0, column=1, sticky=N+S)
self.w =Listbox(frame,width=30,height=8,xscrollcommand=xscrollbar.set,
yscrollcommand=yscrollbar.set,font=('courier',12),selectmode=EXTENDED)
self.w.grid(row=0, column=0, sticky=N+S+E+W)
#self.w.select_set(0)
for i in range(31):
self.w.insert(0, OPTIONS[30-i])
xscrollbar.config(command=self.w.xview)
yscrollbar.config(command=self.w.yview)
frame.pack()
frame.place(x=50,y=180)
self.l1=Label(master,text="Select Source File Location",bg='ivory2',fg="black",font = "courier 15 bold italic")
self.l1.pack()
self.l1.place(x=50,y=370)
self.e = Entry(master, width=35)
self.e.pack()
self.e.place(x=70,y=420)
self.b1=Button(master,text="Browse",command=self.d1)
self.b1.pack()
self.b1.place(x=172,y=460)
self.l2=Label(master,text="Select Destination Folder",bg='ivory2',fg="black",font = "courier 15 bold italic")
self.l2.pack()
self.l2.place(x=50,y=510)
self.e2 = Entry(master, width=35)
self.e2.pack()
self.e2.place(x=50,y=560)
self.b2=Button(master,text="Browse",command=self.d2)
self.b2.pack()
self.b2.place(x=172,y=600)
frame1 = Frame(master)
xscrollbar1 = Scrollbar(frame1, orient=HORIZONTAL)
xscrollbar1.grid(row=1, column=0, sticky=E+W)
yscrollbar1 = Scrollbar(frame1)
yscrollbar1.grid(row=0, column=1, sticky=N+S)
self.log = Text (frame1, width=55, height=20,bg='grey100',xscrollcommand=xscrollbar1.set,
yscrollcommand=yscrollbar1.set ,takefocus=0)
self.log.grid(row=0, column=0, sticky=N+S+E+W)
#self.w.select_set(0)
xscrollbar1.config(command=self.log.xview)
yscrollbar1.config(command=self.log.yview)
frame1.pack()
frame1.place(x=400,y=180)
self.l3=Label(master,text="Modes of Operation",bg='ivory2',fg="black",font = "courier 15 bold italic")
self.l3.pack()
self.l3.place(x=500,y=510)
self.v=IntVar()
self.r1=Radiobutton(master,text="Erase/Write",font=('courier',12,'italic'),bg='ivory2',variable=self.v,value=1)
self.r1.pack()
self.r1.place(x=450,y=550)
self.r2=Radiobutton(master,text="Append",font=('courier',12,'italic'),bg='ivory2',variable=self.v,value=2)
self.r2.pack()
self.r2.place(x=620,y=550)
self.b3=Button(master,text="Start Test",bg="ivory2",command=self.d3)
self.b3.pack()
self.b3.place(x=690,y=600)
root = Tk()
g=gui(root)
root.mainloop()
|
[
"43443265+Gowtham135@users.noreply.github.com"
] |
43443265+Gowtham135@users.noreply.github.com
|
a03086e8d185e90a780d9dc83d9be21280e5c7ac
|
656e616315c705c54436d2d474463ce67253b091
|
/tests/validation_v2/storage/replica.py
|
a8ac9de8ca630c08a621ee32a1d482b737946eb5
|
[
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer"
] |
permissive
|
solarsam/validation-tests
|
774f2b4ab536a303ecfc8705bf1734f132a78e71
|
53d5b12d5646a1c70e3ae47b67900ad1a855de13
|
refs/heads/master
| 2020-05-29T12:32:33.401345
| 2016-01-20T18:37:19
| 2016-01-20T18:37:19
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 16,621
|
py
|
#!/usr/bin/python
import binascii
import socket
import struct
import sys
from optparse import OptionParser, OptionGroup
import block_pb2
# uint32_t size:21,
# type:5,
# _unused:6;
def compose_buf(msg):
hdr = 0
# type: msg=10
hdr += (10 << 21)
# size
hdr += (len(msg) & 0x1fffff)
return struct.pack('i', hdr) + msg
def compose_hdr(msg_len, type):
hdr = 0
hdr += (type << 21)
# size
hdr += (msg_len & 0x1fffff)
return struct.pack('i', hdr)
def parse_command_line(cmd_options):
usage = '''Usage: %prog -a <destination address> <command>
Commands:
capabilities, replicas, status, replication-status
connect <host> <port>
replicas
controller-status
snap <name>
list-snapshots
changed-blocks <snap-name>
rebuild <host> <port>
replication-status
dump-volume -f <file name>
load-volume -f <file name>'''
parser = OptionParser(usage=usage)
# global things
parser.add_option("-d", "--debug", action="store_true", default=False)
parser.add_option("-f", "--file-name", dest="filename")
parser.add_option("-z", "--size", type="int", dest="size",
help="size of the volume dump/load (in bytes)")
parser.add_option("-o", "--offset", type="int", dest="offset",
help="offset of the volume dump/load (in bytes)")
parser.add_option("-s", "--snapshot", dest="snap")
parser.add_option("-x", "--xml", action="store_true", default=False)
# storage things
sgroup = OptionGroup(parser,
"Connection options",
"these define addresses, ports, etc needed for connectivity")
sgroup.add_option("-a", "--address", action="store", type="string",
help="The IP address (or host) to connect to")
sgroup.add_option("-p", "--port", action="store", type="int", default=4000,
help="destination port (default=4000)")
parser.add_option_group(sgroup)
(opt, args) = parser.parse_args(cmd_options)
if len(args) == 0:
parser.error("Need a <command> at the end of the command line!")
return (opt, args)
class Connection:
def __init__(self, debug):
self.s_ = socket.socket()
self.next_seq_ = 1
self.debug_ = debug
self.connected_ = False
self.rw_size = 128 * 1024
def is_connected(self):
return self.connected_
def connect(self, address, port):
if self.connected_:
return
self.print_dbg("Connecting to " + address + ":" + str(port) + "...")
self.s_.connect((address, port))
# Start with the Hello
req = block_pb2.ClientHello()
req.version = 1
self.print_dbg("Sending Hello")
self.send(compose_buf(req.SerializeToString()))
rx = self.s_.recv(1024)
self.print_dbg("Received " + str(len(rx)) + " bytes of data")
resp = self.extract_hello(rx)
self.print_dbg("Connected to replica v=" + str(resp.version))
self.connected_ = True
def invoke(self, terms):
# prepare the request
req = block_pb2.Request()
if terms[0] == "capabilities":
req.type = block_pb2.Request.Capabilities
elif terms[0] == "replicas":
req.type = block_pb2.Request.ListReplicas
elif terms[0] == "status":
req.type = block_pb2.Request.Status
elif terms[0] == "snap":
req.type = block_pb2.Request.TakeSnapshot
if len(terms) == 1:
sys.exit("Need a snapshot name")
req.snapshot.name = terms[1]
elif terms[0] == "remove-snap":
req.type = block_pb2.Request.RemoveSnapshot
if len(terms) == 1:
sys.exit("Need a snapshot name")
req.snapshot.name = terms[1]
elif terms[0] == "list-snapshots":
req.type = block_pb2.Request.ListSnapshots
elif terms[0] == "changed-blocks":
req.type = block_pb2.Request.ChangedBlocks
if len(terms) < 2:
sys.exit("Need a snapshot name")
req.snapshot.name = terms[1]
elif terms[0] == "rebuild":
req.type = block_pb2.Request.RebuildReplica
if len(terms) < 3:
sys.exit("Need a <host> and <port> arguments")
req.replica.host = terms[1]
req.replica.port = int(terms[2])
elif terms[0] == "connect":
req.type = block_pb2.Request.Connect
if len(terms) < 3:
sys.exit("Need a <host> and <port> arguments")
req.replica.host = terms[1]
req.replica.port = int(terms[2])
elif terms[0] == "replication-status":
req.type = block_pb2.Request.ReplicationStatus
elif terms[0] == "controller-status":
req.type = block_pb2.Request.ControllerInfo
else:
sys.exit("Unrecognized command")
req.seq = self.next_seq()
self.send(compose_buf(req.SerializeToString()))
rx = self.s_.recv(1024)
self.print_dbg("Received " + str(len(rx)) + " bytes of data")
(processed, resp) = self.extract_resp(rx)
if terms[0] == "capabilities":
caps = {}
caps["success"] = True
caps["id"] = resp.capability.id
caps["replica_capacity"] = resp.capability.replica_capacity
caps["replica_usage"] = resp.capability.replica_usage
caps["volume_size"] = resp.capability.volume_size
caps["block_size"] = resp.capability.block_size
return caps
elif terms[0] == "replicas":
rv = {}
rv["success"] = True
rv["replicas"] = []
for r in resp.replicas.item:
rv["replicas"].append((r.host, r.port, ("", "Online")[r.online] + ("", "Splicing")[r.splicing]))
return rv
elif terms[0] == "status":
rv = {}
rv["success"] = True
rv["online"] = resp.status.online
rv["error_message"] = resp.status.error_message
return rv
elif terms[0] == "snap" or terms[0] == "connect":
rv = {}
rv["success"] = resp.io_result.success
rv["error_message"] = resp.io_result.error_message
return rv
elif terms[0] == "remove-snap":
rv = {}
rv["success"] = resp.io_result.success
rv["error_message"] = resp.io_result.error_message
return rv
elif terms[0] == "list-snapshots":
rv = []
for s in resp.snapshots.item:
rv.append(dict([("name", s.name), ("id", s.id)]))
return rv
elif terms[0] == "controller-status":
rv = {}
rv["success"] = True
rv["controller-status"] = resp.contr_info.status
return rv
elif terms[0] == "changed-blocks":
rv = []
for r in resp.ranges.item:
rv.append((r.offset, r.size))
return rv
elif terms[0] == "rebuild":
rv = {}
rv["success"] = resp.io_result.success
rv["error_message"] = resp.io_result.error_message
return rv
elif terms[0] == "replication-status":
rv = {}
rv["success"] = True
rv["status"] = resp.rep_info.status
rv["error_message"] = resp.rep_info.error_message
return rv
else:
sys.stderr.write("Missing the response handler for " + terms[0])
return None
return
def next_seq(self):
rv = self.next_seq_
self.next_seq_ += 1
return rv
def invoke_read(self, fname, snap = None, offset = 0, size = None):
# get capabilities as we need the volume size
req = block_pb2.Request()
req.type = block_pb2.Request.Capabilities
req.seq = self.next_seq()
self.send(compose_buf(req.SerializeToString()))
rx = self.s_.recv(1024)
self.print_dbg("Received " + str(len(rx)) + " bytes of data")
(processed, resp) = self.extract_resp(rx)
volume_size = resp.capability.volume_size
if offset == None:
offset = 0
if size == None:
size = volume_size - offset
if size < 0 or offset < 0 or offset + size > volume_size:
sys.exit("Bad offset and size values")
# prep the file
file = open(fname, "wb")
if not file:
sys.exit("Failed to open file: " + fname)
# now read the data up to 128 kB at a time
off = offset
while off < offset + size:
to_read = min(offset + size - off, self.rw_size)
req = block_pb2.Request()
req.type = block_pb2.Request.Read
req.seq = self.next_seq()
req.range.offset = off
req.range.size = to_read
if snap:
req.range.snapshot = snap
# Request the data
self.send(compose_buf(req.SerializeToString()))
rx = self.recv(1000)
# Process the (structured) response header
(processed, resp) = self.extract_resp(rx)
if not resp.io_result.success:
sys.exit("Read failed: " + resp.io_result.error_message)
# Process the (unstructured data) header
(header_size, data_size, rx) = self.extract_buf(rx[processed:])
self.print_dbg("Getting " + str(data_size) + " bytes of data")
assert(data_size == to_read)
# Dump the first few bytes to disk
file.write(rx)
self.print_dbg("written," + str(len(rx)))
written = len(rx)
while written < to_read:
rx = self.s_.recv(64 * 1024)
file.write(rx)
self.print_dbg("written," + str(len(rx)))
written += len(rx)
off += to_read
file.close()
return
def invoke_write(self, fname, offset = 0, size = None):
# get capabilities as we need the volume size
req = block_pb2.Request()
req.type = block_pb2.Request.Capabilities
req.seq = self.next_seq()
self.send(compose_buf(req.SerializeToString()))
rx = self.s_.recv(1024)
self.print_dbg("Received " + str(len(rx)) + " bytes of data")
(processed, resp) = self.extract_resp(rx)
volume_size = resp.capability.volume_size
if offset == None:
offset = 0
if size == None:
size = volume_size - offset
if size < 0 or offset < 0 or offset + size > volume_size:
sys.exit("Bad offset and size values")
# prep the file
file = open(fname, "rb")
if not file:
sys.exit("Failed to open file: " + fname)
# now write the data up to 128 kB at a time
off = offset
while off < offset + size:
to_write = min(offset + size - off, self.rw_size)
req = block_pb2.Request()
req.type = block_pb2.Request.Write
req.seq = self.next_seq()
req.range.offset = off
req.range.size = to_write
# Push the data out into two parts: the protobuf message that
# describes the operation and the data payload
self.send(compose_buf(req.SerializeToString()))
self.send(compose_hdr(to_write, 11))
data = file.read(to_write)
if len(data) != to_write:
sys.exit("Failed to read expected number of bytes from file")
self.send(data)
rx = self.s_.recv(1024)
self.print_dbg("Received " + str(len(rx)) + " bytes of data")
# Process the (structured) response header
(processed, resp) = self.extract_resp(rx)
if not resp.io_result.success:
sys.exit("Write failed: " + resp.io_result.error_message)
off += to_write
file.close()
return
def send(self, buf):
self.print_dbg("data: " + binascii.hexlify(buf[0:4]) + " " + binascii.hexlify(buf[4:]))
self.s_.send(buf)
def recv(self, size):
self.print_dbg("Waiting for " + str(size) + " bytes")
rx = self.s_.recv(size)
while len(rx) < size:
# check whether the very front of the data stream contains a failure
(processed, resp) = self.extract_resp(rx)
if not resp.io_result.success:
return rx
rx += self.s_.recv(size - len(rx))
return rx
def extract_buf(self, buf):
self.print_dbg("data: " + binascii.hexlify(buf[0:4]) + " " + binascii.hexlify(buf[4:]))
hdr = struct.unpack('i', buf[0:4])[0]
size = hdr & 0x1fffff
self.print_dbg("header: type=" + str((hdr >> 21) & 31) + " msg_size=" + str(size) + \
" (from " + str(len(buf)) + " total)")
return (4, size, buf[4:4 + size])
def extract_hello(self, buf):
(hsize, msize, msg) = self.extract_buf(buf)
resp = block_pb2.ServerHello()
resp.ParseFromString(msg)
return resp
def extract_resp(self, buf):
(hsize, msize, msg) = self.extract_buf(buf)
resp = block_pb2.Response()
resp.ParseFromString(msg)
return (hsize + msize, resp)
def print_dbg(self, msg):
if self.debug_:
print(msg)
if __name__ == "__main__":
(options, terms) = parse_command_line(sys.argv[1:])
c = Connection(options.debug)
c.connect(options.address, options.port)
# Handle file operations separately, as this requires explicit invoke_xxxx() calls and
# extra arguments
if terms[0] == "dump-volume":
if not options.filename:
sys.exit("Please provide the output file name")
c.invoke_read(options.filename, snap = options.snap,
offset = options.offset, size = options.size)
sys.exit(0)
if terms[0] == "load-volume":
if not options.filename:
sys.exit("Please provide the input file name")
c.invoke_write(options.filename,
offset = options.offset, size = options.size)
sys.exit(0)
# Handle the basic management calls
rv = c.invoke(terms)
if terms[0] == "capabilities":
print("Got Capabilities:" +
"\n id:\t\t" + rv["id"] +
"\n replica capacity:\t" + str(rv["replica_capacity"]) +
"\n replica usage:\t" + str(rv["replica_usage"]) +
"\n volume size:\t" + str(rv["volume_size"]) +
"\n block size:\t" + str(rv["block_size"]))
elif terms[0] == "status":
print("Got Status:\n online: " + str(rv["online"]))
elif terms[0] == "snap" or terms[0] == "connect":
print("Got Status:\n success: " + str(rv["success"]))
if not rv["success"]:
print(" error: " + rv["error_message"])
elif terms[0] == "remove-snap":
print("Got Status:\n success: " + str(rv["success"]))
if not rv["success"]:
print(" error: " + rv["error_message"])
elif terms[0] == "list-snapshots":
print("Got snapshots:")
for s in rv:
print(" [" + str(s["id"]) + "] " + s["name"])
elif terms[0] == "changed-blocks":
print("Got a list:\n")
if options.xml:
print('<?xml version="1.0" encoding="ISO-8859-1"?>')
print('<change-set>')
for (off, size) in rv:
if options.xml:
print(' <range offset="' + str(off) + '" size="' + str(size) + '"/>')
else:
print(str(off) + ":" + str(size))
if options.xml:
print('</change-set>')
elif terms[0] == "replication-status":
print("Got status:")
print(" status: " + block_pb2.Response.ReplicationInfo.Status.Name(rv["status"]))
elif terms[0] == "replicas":
print("Got Replica info:")
for (host, port, status) in rv["replicas"]:
print("\t" + host + ":" + str(port) + "\t " + status)
elif terms[0] == "controller-status":
print("Got controller status: "+ str(rv["controller-status"]))
else:
print("Unrecognized response: " + str(rv))
|
[
"elrarun@gmail.com"
] |
elrarun@gmail.com
|
e622233a1cf1989e9dbd967513f1194cebeefe18
|
b3d43717ff28503379c28a9007cd47d220c89f5a
|
/hello.py
|
f2eb3e4f4cd207aada962ebd64c3c1f93bd033d3
|
[] |
no_license
|
insomniapx5/p1_hello
|
0836315d63f8692bfeff5728cf112d10487e0856
|
f9bac0a3e40860139782f6cd11f7d102d0f68eee
|
refs/heads/master
| 2021-07-08T18:05:00.672515
| 2017-10-02T20:22:23
| 2017-10-02T20:22:23
| 105,575,126
| 0
| 0
| null | 2017-10-02T20:27:08
| 2017-10-02T19:21:14
|
Python
|
UTF-8
|
Python
| false
| false
| 29
|
py
|
print "hello master branch"
|
[
"noreply@github.com"
] |
insomniapx5.noreply@github.com
|
c51ece793051e6c8cd7d36f598d4319b91ab279e
|
1c16a96991805e47d30d109857638fd9c726854e
|
/与时俱进2/ftpsrv.py
|
731e4865e71dcb47c734fa7036569b9261e602c5
|
[] |
no_license
|
happyu319/dbapp_ctf_201801
|
fd39e39d69a905cb4abb6180a5add82d4c8b1999
|
481898ec6f69879e60baddc6d293ab32de85f7f3
|
refs/heads/master
| 2021-09-05T04:05:24.157320
| 2018-01-24T02:51:31
| 2018-01-24T02:51:31
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 669
|
py
|
#!/usr/bin/env python2
# coding: utf-8
from pyftpdlib.authorizers import DummyAuthorizer
from pyftpdlib.handlers import FTPHandler
from pyftpdlib.servers import FTPServer
#实例化虚拟用户,这是FTP验证首要条件
authorizer = DummyAuthorizer()
#添加用户权限和路径,括号内的参数是(用户名, 密码, 用户目录, 权限)
authorizer.add_user('ftp', 'ftp', '/tmp/ftp', perm='elradfmw')
#初始化ftp句柄
handler = FTPHandler
handler.authorizer = authorizer
#添加被动端口范围
handler.passive_ports = range(2000, 2333)
#监听ip 和 端口
server = FTPServer(('0.0.0.0', 21), handler)
#开始服务
server.serve_forever()
|
[
"zeng.ambulong@gmail.com"
] |
zeng.ambulong@gmail.com
|
ab781fd5a5f18796a05241eabff779321d9cc974
|
2050a4143c2153a284f141e67d9105e4b3f73c24
|
/Deep_Learning/jkrowling.py
|
e58f7db2ea74527f5c36d690da55815d758c6103
|
[] |
no_license
|
Nathaniel-Slyte/IRM_Courses
|
477b1cf0bc6cd84a7acb696f455b12ad268daad7
|
03f45fdb2eef7774705dcaef1c6071b77dc47aa7
|
refs/heads/master
| 2023-03-06T18:52:15.566048
| 2021-02-05T08:57:38
| 2021-02-05T08:57:38
| 286,991,234
| 14
| 7
| null | 2020-11-28T15:55:05
| 2020-08-12T11:03:06
|
Python
|
UTF-8
|
Python
| false
| false
| 4,724
|
py
|
from __future__ import annotations
from dataclasses import dataclass
from num2words import num2words
from torch.optim import AdamW
from torch.utils.data import DataLoader, Dataset
from torchtext.data import get_tokenizer
from tqdm import tqdm
from typing import Dict, List, Set, Tuple
import torch
import torch.nn as nn
@dataclass
class HarryPotterBooks:
path: str
raw: str
tokens: List[str]
vocab: Set[str]
word2id: Dict[str, int]
id2word: Dict[int, str]
@classmethod
def from_file(cls, path: str, fraction: float = 1) -> HarryPotterBooks:
with open(path, "r") as f:
raw = f.read()
raw = " ".join(raw.split())
numbers = lambda t: num2words(t) if type(t) in [int, float] else t
tokenizer = get_tokenizer("basic_english")
tokens = tokenizer(raw)
tokens = list(map(numbers, tokens))
tokens = tokens[:int(len(tokens) * fraction)]
vocab = set(tokens)
word2id = {t: i for i, t in enumerate(vocab)}
id2word = {i: t for i, t in enumerate(vocab)}
return cls(path, raw, tokens, vocab, word2id, id2word)
class HarryPotterDataset(Dataset):
def __init__(self, hpb: HarryPotterBooks, window: int) -> None:
super(HarryPotterDataset, self).__init__()
self.hpb = hpb
self.window = window
def __len__(self) -> int:
return len(self.hpb.tokens) - (self.window + 1)
def __getitem__(self, idx: int) -> Tuple[torch.Tensor, int]:
data = self.hpb.tokens[idx:idx + self.window + 1]
sentence_tokens, word_token = data[:-1], data[-1]
sentence = torch.tensor([
self.hpb.word2id[t] for t in sentence_tokens
], dtype=torch.long)
word = self.hpb.word2id[word_token]
return sentence, word
class JKRowling(nn.Module):
def __init__(
self,
vocab: int,
embedding: int,
hidden: int,
layers: int,
dropout: float,
) -> None:
super(JKRowling, self).__init__()
self.embedding = nn.Embedding(vocab, embedding)
self.lstm = nn.LSTM(
embedding,
hidden,
num_layers=layers,
dropout=dropout,
batch_first=True,
)
self.fc = nn.Linear(hidden, vocab)
def forward(self, sentence: torch.Tensor) -> torch.Tensor:
embedding = self.embedding(sentence) # (B, 50) -> (B, 50, E)
_, (hn, cn) = self.lstm(embedding) # (B, 50, E) -> (B, 50, H)
word = self.fc(hn[-1]) # (B, H) -> (B, V)
return word
ROOT = "./dataset/full.txt"
NAME = "J"
SURNAME = "JKR"
FRACTION = 0.1
MAX_SEQ = 50 # Do not touch !!!!!!!!
EMBEDDING = 64
HIDDEN = 32
LAYERS = 2
DROPOUT = 0.2
BATCH_SIZE = 256
NUM_WORKERS = 4
EPOCHS = 1
LR = 1e-3
DECAY = 1e-4
hpb = HarryPotterBooks.from_file(ROOT, fraction=FRACTION)
hpd = HarryPotterDataset(hpb, window=MAX_SEQ)
print("===== STATS ====")
print("RAW :", len(hpb.raw))
print("TOKENS :", len(hpb.tokens))
print("VOCAB :", len(hpb.vocab))
print("DATASET:", len(hpd))
print("================")
print()
loader = DataLoader(
hpd,
batch_size=BATCH_SIZE,
shuffle=True,
num_workers=NUM_WORKERS,
pin_memory=True,
)
model = JKRowling(len(hpb.vocab), EMBEDDING, HIDDEN, LAYERS, DROPOUT).cuda()
criterion = nn.CrossEntropyLoss().cuda()
optim = AdamW(model.parameters(), lr=LR, weight_decay=DECAY)
for epoch in range(EPOCHS):
with tqdm(loader, desc="Train") as pbar:
total_loss = 0.0
model = model.train()
for sentence, word in pbar:
sentence, word = sentence.cuda(), word.cuda()
optim.zero_grad()
out = model(sentence)
loss = criterion(out, word)
loss.backward()
optim.step()
total_loss += loss.item() / len(loader)
pbar.set_postfix(loss=total_loss)
with torch.no_grad():
model = model.eval()
for idx, i in enumerate([0, 10, 100]):
sentence_ids, word_id = hpd[i]
_word_id = model(sentence_ids.unsqueeze(0).cuda())
_word_id = torch.argmax(_word_id.squeeze(0)).cpu().item()
sentence = [hpb.id2word[i.item()] for i in sentence_ids]
word = hpb.id2word[word_id]
_word = hpb.id2word[_word_id]
print()
print(f"===== TEST{idx + 1} ====")
print("SENTENCE :", " ".join(sentence))
print("TARGET :", word)
print("PREDICTION:", _word)
print("================")
print()
torch.jit.script(model.cpu()).save(f"Challenge02_{NAME}_{SURNAME}.ts")
|
[
"hatiyliess86@gmail.com"
] |
hatiyliess86@gmail.com
|
02c7b229e768abe903d076ec6bc9560a597f8fe6
|
3baa7a5575fb37de214fe2afac61a01e45bb39cf
|
/Blog/blog_project/settings.py
|
9cbd87a9c16b22d7e35492099359530787e85b89
|
[] |
no_license
|
TheMilubi/DWES
|
c85ac07b392a45f2c8159e829ac3886bd8821370
|
1e9740d16ccbc67a8c5af60793ecd2f80937b787
|
refs/heads/master
| 2021-09-25T15:32:47.421781
| 2020-03-08T17:49:00
| 2020-03-08T17:49:00
| 210,787,932
| 0
| 0
| null | 2021-09-22T18:50:37
| 2019-09-25T07:56:25
|
HTML
|
UTF-8
|
Python
| false
| false
| 3,304
|
py
|
"""
Django settings for blog_project project.
Generated by 'django-admin startproject' using Django 3.0.3.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'evlqzt8ydv=j7#rdll((vy5syo!3i^a16@(^sqq-p1+)(eirw^'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'blog',
'accounts',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'blog_project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'blog_project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = os.path.join(BASE_DIR, 'static/')
STATICFILES_DIRS = [os.path.join(BASE_DIR, 'static/')]
LOGIN_REDIRECT_URL = 'home'
LOGOUT_REDIRECT_URL = 'home'
|
[
"TheMilubi@gmail.com"
] |
TheMilubi@gmail.com
|
0b60b7ea4b149f6d5ba8bc8e04884d9750be9ba0
|
165f1e3f40df3956283348ef21e18cb9e2c76866
|
/Week10/VehicleRepairManager/interface/settings.py
|
3d77a1b37ba3c6aa3aced0c518be791a5e651421
|
[] |
no_license
|
hatanasov/Programming101_Python
|
856a025034df1625b2ba456e3a8d1b43343b2ee1
|
0cd3c2fc600857f72e15c806fd3394bbc19537d4
|
refs/heads/master
| 2020-03-09T00:55:00.763659
| 2018-05-18T12:52:59
| 2018-05-18T12:52:59
| 128,500,141
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,369
|
py
|
welcome_msg = "======= Hello! ========\n"
welcome_to_service = "=== Welcome to Vehicle Services! ===\n=== You can choose from the following commands:"
menu_options = "1 for list_all_free_hours\n\
2 for list_free_hours <date>\n\
3 for save_repair_hour <hour_id>\n\
4 for update_repair_hour <hour_id>\n\
5 for delete_repair_hour <hour_id>\n\
6 for add_vehicle\n\
7 for update_vehicle <vehicle_id>\n\
8 for delete_vehicle <vehicle_id>\n\
'q' or 'quit' for exit the system\n"
request_username = "=== Enter user name: ===\n>>> "
confirm_username = "=== Would you like to leave your username "
y_or_n = "['y' or 'n']"
unknown_user_msg = "=== You are unknown user. ===\n=== Would you like to create new user " + \
y_or_n + " ===\n>>> "
yes = ['y', 'yes']
welcome_knoun_user_msg = "Welcome, you can choose from the following options: "
goodbye = "Goodbye!"
request_email_msg = "=== Enter email address: ====\n>>>"
request_phone_number_msg = "=== Entrer phone number: ===\n>>>"
request_address = "=== Enter address: ===\n>>>"
no_valid_user_data = "=== The data you inserted is not valid. ===\n\
=== You must enter 'username' and 'phone number'. ===\n\
=== The other information is optional. ===\n "
again_msg = "=== Would you like to try registration again? {} ===\n>>>".format(
y_or_n)
client_or_mechanic = "=== Are you a client or a mechanic? ===\n>>>"
|
[
"h.atanasov87@gmail.com"
] |
h.atanasov87@gmail.com
|
5ce5303a0121f7f35e02441a9fa32da209ac730d
|
9facc2de2ea52192cd828018baa46d688e78eee3
|
/model/attachment_model.py
|
a5f86f5d3ff46ce3c197f119b8cef05d4bee8769
|
[] |
no_license
|
journeyends/FlaskTest
|
5a785bf14e739878d3d6878903227da13188f25d
|
e229f6c3523248df1c27bbff4821521d2bb55d41
|
refs/heads/master
| 2020-04-22T03:52:58.840676
| 2019-03-22T07:30:31
| 2019-03-22T07:30:31
| 170,104,034
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 339
|
py
|
from . import Base
from sqlalchemy import Column, Integer, String
class SiteModel(Base):
__tablename__ = "attachment"
id = Column(Integer, primary_key=True)
key_id = Column(Integer)
key_name = Column(String(20))
attach_name = Column(String(50))
attach_path = Column(String(128))
attach_size = Column(Integer)
|
[
"songliyang@goldmantis.com"
] |
songliyang@goldmantis.com
|
635d61c1b9d00eb46de3010890fb36d8affd21d5
|
1825283527f5a479204708feeaf55f4ab6d1290b
|
/leetcode/python/929/sol.py
|
4ca03ad9e40000368db4efd1f7ca259c8f929e90
|
[] |
no_license
|
frankieliu/problems
|
b82c61d3328ffcc1da2cbc95712563355f5d44b5
|
911c6622448a4be041834bcab25051dd0f9209b2
|
refs/heads/master
| 2023-01-06T14:41:58.044871
| 2019-11-24T03:47:22
| 2019-11-24T03:47:22
| 115,065,956
| 1
| 0
| null | 2023-01-04T07:25:52
| 2017-12-22T02:06:57
|
HTML
|
UTF-8
|
Python
| false
| false
| 802
|
py
|
Python ~10 lines ~ easy to understand
https://leetcode.com/problems/unique-email-addresses/discuss/254862
* Lang: python3
* Author: nicolime
* Votes: 1
```
class Solution:
def numUniqueEmails(self, emails):
uniques = set() # A set can not contain duplicates
for email in emails:
name, domain = email.split("@")
if "+" in name:
name = name.split("+")[0].replace(".", "") # grab everything before "+", remove "."
else:
name = name.replace(\'.\', "") # remove "."
cleanEmail = name + "@" + domain # reassemble emails
uniques.add(cleanEmail) # add cleanEmail to set, which will not accept duplicates
return len(uniques) # return length of uniques to get number of uniques
```
|
[
"frankie.y.liu@gmail.com"
] |
frankie.y.liu@gmail.com
|
65f365961dd31a1f586d01b5491d30c8738d9a8f
|
fab14fae2b494068aa793901d76464afb965df7e
|
/benchmarks/ltl_maxplus/f3/maxplus_16_6.py
|
11eeca15b195545bd7256527f67cd20d80219419
|
[
"MIT"
] |
permissive
|
teodorov/F3
|
673f6f9ccc25acdfdecbfc180f439253474ba250
|
c863215c318d7d5f258eb9be38c6962cf6863b52
|
refs/heads/master
| 2023-08-04T17:37:38.771863
| 2021-09-16T07:38:28
| 2021-09-16T07:38:28
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 29,948
|
py
|
from collections import Iterable
from mathsat import msat_term, msat_env
from mathsat import msat_make_true, msat_make_false
from mathsat import msat_make_constant, msat_declare_function
from mathsat import msat_get_rational_type
from mathsat import msat_make_and as _msat_make_and
from mathsat import msat_make_or as _msat_make_or
from mathsat import msat_make_not
from mathsat import msat_make_leq, msat_make_equal
from mathsat import msat_make_number, msat_make_plus, msat_make_times
from ltl.ltl import TermMap, LTLEncoder
from utils import name_next
def msat_make_and(menv: msat_env, *args):
if len(args) == 0:
return msat_make_true(menv)
if len(args) == 1:
return args[0]
res = _msat_make_and(menv, args[0], args[1])
for arg in args[2:]:
res = _msat_make_and(menv, res, arg)
return res
def msat_make_or(menv: msat_env, *args):
if len(args) == 0:
return msat_make_false(menv)
if len(args) == 1:
return args[0]
res = _msat_make_or(menv, args[0], args[1])
for arg in args[2:]:
res = _msat_make_or(menv, res, arg)
return res
def msat_make_minus(menv: msat_env, arg0: msat_term, arg1: msat_term):
n_m1 = msat_make_number(menv, "-1")
arg1 = msat_make_times(menv, arg1, n_m1)
return msat_make_plus(menv, arg0, arg1)
def msat_make_lt(menv: msat_env, arg0: msat_term, arg1: msat_term):
geq = msat_make_geq(menv, arg0, arg1)
return msat_make_not(menv, geq)
def msat_make_geq(menv: msat_env, arg0: msat_term, arg1: msat_term):
return msat_make_leq(menv, arg1, arg0)
def msat_make_gt(menv: msat_env, arg0: msat_term, arg1: msat_term):
leq = msat_make_leq(menv, arg0, arg1)
return msat_make_not(menv, leq)
def msat_make_impl(menv: msat_env, arg0: msat_term, arg1: msat_term):
n_arg0 = msat_make_not(menv, arg0)
return msat_make_or(menv, n_arg0, arg1)
def check_ltl(menv: msat_env, enc: LTLEncoder) -> (Iterable, msat_term,
msat_term, msat_term):
assert menv
assert isinstance(menv, msat_env)
assert enc
assert isinstance(enc, LTLEncoder)
real_type = msat_get_rational_type(menv)
names = ["x_0", "x_1", "x_2", "x_3", "x_4", "x_5", "x_6", "x_7", "x_8", "x_9", "x_10", "x_11", "x_12", "x_13", "x_14", "x_15"]
xs = [msat_declare_function(menv, name, real_type)
for name in names]
xs = [msat_make_constant(menv, x) for x in xs]
x_xs = [msat_declare_function(menv, name_next(name), real_type)
for name in names]
x_xs = [msat_make_constant(menv, x_x) for x_x in x_xs]
curr2next = {x: x_x for x, x_x in zip(xs, x_xs)}
n_10_0 = msat_make_number(menv, "10.0")
n_11_0 = msat_make_number(menv, "11.0")
n_12_0 = msat_make_number(menv, "12.0")
n_13_0 = msat_make_number(menv, "13.0")
n_14_0 = msat_make_number(menv, "14.0")
n_15_0 = msat_make_number(menv, "15.0")
n_16_0 = msat_make_number(menv, "16.0")
n_17_0 = msat_make_number(menv, "17.0")
n_18_0 = msat_make_number(menv, "18.0")
n_19_0 = msat_make_number(menv, "19.0")
n_1_0 = msat_make_number(menv, "1.0")
n_20_0 = msat_make_number(menv, "20.0")
n_2_0 = msat_make_number(menv, "2.0")
n_3_0 = msat_make_number(menv, "3.0")
n_4_0 = msat_make_number(menv, "4.0")
n_5_0 = msat_make_number(menv, "5.0")
n_6_0 = msat_make_number(menv, "6.0")
n_7_0 = msat_make_number(menv, "7.0")
n_8_0 = msat_make_number(menv, "8.0")
n_9_0 = msat_make_number(menv, "9.0")
init = msat_make_true(menv)
trans = msat_make_true(menv)
# transitions
expr0 = msat_make_plus(menv, xs[0], n_19_0)
expr1 = msat_make_plus(menv, xs[4], n_17_0)
expr2 = msat_make_plus(menv, xs[5], n_19_0)
expr3 = msat_make_plus(menv, xs[10], n_9_0)
expr4 = msat_make_plus(menv, xs[11], n_17_0)
expr5 = msat_make_plus(menv, xs[12], n_6_0)
expr6 = msat_make_plus(menv, xs[13], n_1_0)
expr7 = msat_make_plus(menv, xs[15], n_2_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[0], expr0),
msat_make_geq(menv, x_xs[0], expr1),
msat_make_geq(menv, x_xs[0], expr2),
msat_make_geq(menv, x_xs[0], expr3),
msat_make_geq(menv, x_xs[0], expr4),
msat_make_geq(menv, x_xs[0], expr5),
msat_make_geq(menv, x_xs[0], expr6),
msat_make_geq(menv, x_xs[0], expr7),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[0], expr0),
msat_make_equal(menv, x_xs[0], expr1),
msat_make_equal(menv, x_xs[0], expr2),
msat_make_equal(menv, x_xs[0], expr3),
msat_make_equal(menv, x_xs[0], expr4),
msat_make_equal(menv, x_xs[0], expr5),
msat_make_equal(menv, x_xs[0], expr6),
msat_make_equal(menv, x_xs[0], expr7),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[0], n_6_0)
expr1 = msat_make_plus(menv, xs[2], n_10_0)
expr2 = msat_make_plus(menv, xs[4], n_7_0)
expr3 = msat_make_plus(menv, xs[6], n_14_0)
expr4 = msat_make_plus(menv, xs[8], n_5_0)
expr5 = msat_make_plus(menv, xs[9], n_18_0)
expr6 = msat_make_plus(menv, xs[11], n_6_0)
expr7 = msat_make_plus(menv, xs[15], n_17_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[1], expr0),
msat_make_geq(menv, x_xs[1], expr1),
msat_make_geq(menv, x_xs[1], expr2),
msat_make_geq(menv, x_xs[1], expr3),
msat_make_geq(menv, x_xs[1], expr4),
msat_make_geq(menv, x_xs[1], expr5),
msat_make_geq(menv, x_xs[1], expr6),
msat_make_geq(menv, x_xs[1], expr7),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[1], expr0),
msat_make_equal(menv, x_xs[1], expr1),
msat_make_equal(menv, x_xs[1], expr2),
msat_make_equal(menv, x_xs[1], expr3),
msat_make_equal(menv, x_xs[1], expr4),
msat_make_equal(menv, x_xs[1], expr5),
msat_make_equal(menv, x_xs[1], expr6),
msat_make_equal(menv, x_xs[1], expr7),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[1], n_16_0)
expr1 = msat_make_plus(menv, xs[3], n_7_0)
expr2 = msat_make_plus(menv, xs[6], n_16_0)
expr3 = msat_make_plus(menv, xs[7], n_8_0)
expr4 = msat_make_plus(menv, xs[8], n_14_0)
expr5 = msat_make_plus(menv, xs[9], n_3_0)
expr6 = msat_make_plus(menv, xs[10], n_8_0)
expr7 = msat_make_plus(menv, xs[12], n_20_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[2], expr0),
msat_make_geq(menv, x_xs[2], expr1),
msat_make_geq(menv, x_xs[2], expr2),
msat_make_geq(menv, x_xs[2], expr3),
msat_make_geq(menv, x_xs[2], expr4),
msat_make_geq(menv, x_xs[2], expr5),
msat_make_geq(menv, x_xs[2], expr6),
msat_make_geq(menv, x_xs[2], expr7),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[2], expr0),
msat_make_equal(menv, x_xs[2], expr1),
msat_make_equal(menv, x_xs[2], expr2),
msat_make_equal(menv, x_xs[2], expr3),
msat_make_equal(menv, x_xs[2], expr4),
msat_make_equal(menv, x_xs[2], expr5),
msat_make_equal(menv, x_xs[2], expr6),
msat_make_equal(menv, x_xs[2], expr7),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[0], n_15_0)
expr1 = msat_make_plus(menv, xs[1], n_18_0)
expr2 = msat_make_plus(menv, xs[2], n_11_0)
expr3 = msat_make_plus(menv, xs[6], n_11_0)
expr4 = msat_make_plus(menv, xs[7], n_8_0)
expr5 = msat_make_plus(menv, xs[8], n_17_0)
expr6 = msat_make_plus(menv, xs[10], n_11_0)
expr7 = msat_make_plus(menv, xs[15], n_14_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[3], expr0),
msat_make_geq(menv, x_xs[3], expr1),
msat_make_geq(menv, x_xs[3], expr2),
msat_make_geq(menv, x_xs[3], expr3),
msat_make_geq(menv, x_xs[3], expr4),
msat_make_geq(menv, x_xs[3], expr5),
msat_make_geq(menv, x_xs[3], expr6),
msat_make_geq(menv, x_xs[3], expr7),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[3], expr0),
msat_make_equal(menv, x_xs[3], expr1),
msat_make_equal(menv, x_xs[3], expr2),
msat_make_equal(menv, x_xs[3], expr3),
msat_make_equal(menv, x_xs[3], expr4),
msat_make_equal(menv, x_xs[3], expr5),
msat_make_equal(menv, x_xs[3], expr6),
msat_make_equal(menv, x_xs[3], expr7),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[0], n_17_0)
expr1 = msat_make_plus(menv, xs[2], n_15_0)
expr2 = msat_make_plus(menv, xs[3], n_17_0)
expr3 = msat_make_plus(menv, xs[5], n_13_0)
expr4 = msat_make_plus(menv, xs[7], n_4_0)
expr5 = msat_make_plus(menv, xs[9], n_12_0)
expr6 = msat_make_plus(menv, xs[10], n_7_0)
expr7 = msat_make_plus(menv, xs[12], n_16_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[4], expr0),
msat_make_geq(menv, x_xs[4], expr1),
msat_make_geq(menv, x_xs[4], expr2),
msat_make_geq(menv, x_xs[4], expr3),
msat_make_geq(menv, x_xs[4], expr4),
msat_make_geq(menv, x_xs[4], expr5),
msat_make_geq(menv, x_xs[4], expr6),
msat_make_geq(menv, x_xs[4], expr7),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[4], expr0),
msat_make_equal(menv, x_xs[4], expr1),
msat_make_equal(menv, x_xs[4], expr2),
msat_make_equal(menv, x_xs[4], expr3),
msat_make_equal(menv, x_xs[4], expr4),
msat_make_equal(menv, x_xs[4], expr5),
msat_make_equal(menv, x_xs[4], expr6),
msat_make_equal(menv, x_xs[4], expr7),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[0], n_8_0)
expr1 = msat_make_plus(menv, xs[1], n_2_0)
expr2 = msat_make_plus(menv, xs[5], n_9_0)
expr3 = msat_make_plus(menv, xs[7], n_1_0)
expr4 = msat_make_plus(menv, xs[8], n_8_0)
expr5 = msat_make_plus(menv, xs[11], n_5_0)
expr6 = msat_make_plus(menv, xs[12], n_2_0)
expr7 = msat_make_plus(menv, xs[15], n_8_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[5], expr0),
msat_make_geq(menv, x_xs[5], expr1),
msat_make_geq(menv, x_xs[5], expr2),
msat_make_geq(menv, x_xs[5], expr3),
msat_make_geq(menv, x_xs[5], expr4),
msat_make_geq(menv, x_xs[5], expr5),
msat_make_geq(menv, x_xs[5], expr6),
msat_make_geq(menv, x_xs[5], expr7),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[5], expr0),
msat_make_equal(menv, x_xs[5], expr1),
msat_make_equal(menv, x_xs[5], expr2),
msat_make_equal(menv, x_xs[5], expr3),
msat_make_equal(menv, x_xs[5], expr4),
msat_make_equal(menv, x_xs[5], expr5),
msat_make_equal(menv, x_xs[5], expr6),
msat_make_equal(menv, x_xs[5], expr7),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[0], n_1_0)
expr1 = msat_make_plus(menv, xs[1], n_15_0)
expr2 = msat_make_plus(menv, xs[6], n_19_0)
expr3 = msat_make_plus(menv, xs[8], n_1_0)
expr4 = msat_make_plus(menv, xs[9], n_10_0)
expr5 = msat_make_plus(menv, xs[12], n_9_0)
expr6 = msat_make_plus(menv, xs[13], n_5_0)
expr7 = msat_make_plus(menv, xs[14], n_18_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[6], expr0),
msat_make_geq(menv, x_xs[6], expr1),
msat_make_geq(menv, x_xs[6], expr2),
msat_make_geq(menv, x_xs[6], expr3),
msat_make_geq(menv, x_xs[6], expr4),
msat_make_geq(menv, x_xs[6], expr5),
msat_make_geq(menv, x_xs[6], expr6),
msat_make_geq(menv, x_xs[6], expr7),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[6], expr0),
msat_make_equal(menv, x_xs[6], expr1),
msat_make_equal(menv, x_xs[6], expr2),
msat_make_equal(menv, x_xs[6], expr3),
msat_make_equal(menv, x_xs[6], expr4),
msat_make_equal(menv, x_xs[6], expr5),
msat_make_equal(menv, x_xs[6], expr6),
msat_make_equal(menv, x_xs[6], expr7),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[2], n_12_0)
expr1 = msat_make_plus(menv, xs[3], n_15_0)
expr2 = msat_make_plus(menv, xs[4], n_4_0)
expr3 = msat_make_plus(menv, xs[5], n_20_0)
expr4 = msat_make_plus(menv, xs[8], n_7_0)
expr5 = msat_make_plus(menv, xs[11], n_16_0)
expr6 = msat_make_plus(menv, xs[12], n_8_0)
expr7 = msat_make_plus(menv, xs[13], n_5_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[7], expr0),
msat_make_geq(menv, x_xs[7], expr1),
msat_make_geq(menv, x_xs[7], expr2),
msat_make_geq(menv, x_xs[7], expr3),
msat_make_geq(menv, x_xs[7], expr4),
msat_make_geq(menv, x_xs[7], expr5),
msat_make_geq(menv, x_xs[7], expr6),
msat_make_geq(menv, x_xs[7], expr7),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[7], expr0),
msat_make_equal(menv, x_xs[7], expr1),
msat_make_equal(menv, x_xs[7], expr2),
msat_make_equal(menv, x_xs[7], expr3),
msat_make_equal(menv, x_xs[7], expr4),
msat_make_equal(menv, x_xs[7], expr5),
msat_make_equal(menv, x_xs[7], expr6),
msat_make_equal(menv, x_xs[7], expr7),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[3], n_14_0)
expr1 = msat_make_plus(menv, xs[5], n_4_0)
expr2 = msat_make_plus(menv, xs[6], n_6_0)
expr3 = msat_make_plus(menv, xs[7], n_17_0)
expr4 = msat_make_plus(menv, xs[8], n_16_0)
expr5 = msat_make_plus(menv, xs[12], n_7_0)
expr6 = msat_make_plus(menv, xs[14], n_3_0)
expr7 = msat_make_plus(menv, xs[15], n_4_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[8], expr0),
msat_make_geq(menv, x_xs[8], expr1),
msat_make_geq(menv, x_xs[8], expr2),
msat_make_geq(menv, x_xs[8], expr3),
msat_make_geq(menv, x_xs[8], expr4),
msat_make_geq(menv, x_xs[8], expr5),
msat_make_geq(menv, x_xs[8], expr6),
msat_make_geq(menv, x_xs[8], expr7),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[8], expr0),
msat_make_equal(menv, x_xs[8], expr1),
msat_make_equal(menv, x_xs[8], expr2),
msat_make_equal(menv, x_xs[8], expr3),
msat_make_equal(menv, x_xs[8], expr4),
msat_make_equal(menv, x_xs[8], expr5),
msat_make_equal(menv, x_xs[8], expr6),
msat_make_equal(menv, x_xs[8], expr7),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[1], n_12_0)
expr1 = msat_make_plus(menv, xs[3], n_5_0)
expr2 = msat_make_plus(menv, xs[7], n_15_0)
expr3 = msat_make_plus(menv, xs[8], n_13_0)
expr4 = msat_make_plus(menv, xs[9], n_3_0)
expr5 = msat_make_plus(menv, xs[10], n_15_0)
expr6 = msat_make_plus(menv, xs[13], n_14_0)
expr7 = msat_make_plus(menv, xs[15], n_17_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[9], expr0),
msat_make_geq(menv, x_xs[9], expr1),
msat_make_geq(menv, x_xs[9], expr2),
msat_make_geq(menv, x_xs[9], expr3),
msat_make_geq(menv, x_xs[9], expr4),
msat_make_geq(menv, x_xs[9], expr5),
msat_make_geq(menv, x_xs[9], expr6),
msat_make_geq(menv, x_xs[9], expr7),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[9], expr0),
msat_make_equal(menv, x_xs[9], expr1),
msat_make_equal(menv, x_xs[9], expr2),
msat_make_equal(menv, x_xs[9], expr3),
msat_make_equal(menv, x_xs[9], expr4),
msat_make_equal(menv, x_xs[9], expr5),
msat_make_equal(menv, x_xs[9], expr6),
msat_make_equal(menv, x_xs[9], expr7),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[1], n_11_0)
expr1 = msat_make_plus(menv, xs[2], n_7_0)
expr2 = msat_make_plus(menv, xs[3], n_13_0)
expr3 = msat_make_plus(menv, xs[6], n_13_0)
expr4 = msat_make_plus(menv, xs[9], n_1_0)
expr5 = msat_make_plus(menv, xs[13], n_1_0)
expr6 = msat_make_plus(menv, xs[14], n_6_0)
expr7 = msat_make_plus(menv, xs[15], n_5_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[10], expr0),
msat_make_geq(menv, x_xs[10], expr1),
msat_make_geq(menv, x_xs[10], expr2),
msat_make_geq(menv, x_xs[10], expr3),
msat_make_geq(menv, x_xs[10], expr4),
msat_make_geq(menv, x_xs[10], expr5),
msat_make_geq(menv, x_xs[10], expr6),
msat_make_geq(menv, x_xs[10], expr7),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[10], expr0),
msat_make_equal(menv, x_xs[10], expr1),
msat_make_equal(menv, x_xs[10], expr2),
msat_make_equal(menv, x_xs[10], expr3),
msat_make_equal(menv, x_xs[10], expr4),
msat_make_equal(menv, x_xs[10], expr5),
msat_make_equal(menv, x_xs[10], expr6),
msat_make_equal(menv, x_xs[10], expr7),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[0], n_18_0)
expr1 = msat_make_plus(menv, xs[1], n_1_0)
expr2 = msat_make_plus(menv, xs[3], n_17_0)
expr3 = msat_make_plus(menv, xs[5], n_12_0)
expr4 = msat_make_plus(menv, xs[8], n_4_0)
expr5 = msat_make_plus(menv, xs[9], n_8_0)
expr6 = msat_make_plus(menv, xs[13], n_5_0)
expr7 = msat_make_plus(menv, xs[15], n_2_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[11], expr0),
msat_make_geq(menv, x_xs[11], expr1),
msat_make_geq(menv, x_xs[11], expr2),
msat_make_geq(menv, x_xs[11], expr3),
msat_make_geq(menv, x_xs[11], expr4),
msat_make_geq(menv, x_xs[11], expr5),
msat_make_geq(menv, x_xs[11], expr6),
msat_make_geq(menv, x_xs[11], expr7),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[11], expr0),
msat_make_equal(menv, x_xs[11], expr1),
msat_make_equal(menv, x_xs[11], expr2),
msat_make_equal(menv, x_xs[11], expr3),
msat_make_equal(menv, x_xs[11], expr4),
msat_make_equal(menv, x_xs[11], expr5),
msat_make_equal(menv, x_xs[11], expr6),
msat_make_equal(menv, x_xs[11], expr7),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[2], n_8_0)
expr1 = msat_make_plus(menv, xs[3], n_3_0)
expr2 = msat_make_plus(menv, xs[4], n_14_0)
expr3 = msat_make_plus(menv, xs[5], n_4_0)
expr4 = msat_make_plus(menv, xs[7], n_5_0)
expr5 = msat_make_plus(menv, xs[8], n_10_0)
expr6 = msat_make_plus(menv, xs[12], n_3_0)
expr7 = msat_make_plus(menv, xs[14], n_16_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[12], expr0),
msat_make_geq(menv, x_xs[12], expr1),
msat_make_geq(menv, x_xs[12], expr2),
msat_make_geq(menv, x_xs[12], expr3),
msat_make_geq(menv, x_xs[12], expr4),
msat_make_geq(menv, x_xs[12], expr5),
msat_make_geq(menv, x_xs[12], expr6),
msat_make_geq(menv, x_xs[12], expr7),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[12], expr0),
msat_make_equal(menv, x_xs[12], expr1),
msat_make_equal(menv, x_xs[12], expr2),
msat_make_equal(menv, x_xs[12], expr3),
msat_make_equal(menv, x_xs[12], expr4),
msat_make_equal(menv, x_xs[12], expr5),
msat_make_equal(menv, x_xs[12], expr6),
msat_make_equal(menv, x_xs[12], expr7),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[1], n_12_0)
expr1 = msat_make_plus(menv, xs[2], n_16_0)
expr2 = msat_make_plus(menv, xs[5], n_15_0)
expr3 = msat_make_plus(menv, xs[6], n_12_0)
expr4 = msat_make_plus(menv, xs[8], n_20_0)
expr5 = msat_make_plus(menv, xs[9], n_12_0)
expr6 = msat_make_plus(menv, xs[11], n_12_0)
expr7 = msat_make_plus(menv, xs[13], n_12_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[13], expr0),
msat_make_geq(menv, x_xs[13], expr1),
msat_make_geq(menv, x_xs[13], expr2),
msat_make_geq(menv, x_xs[13], expr3),
msat_make_geq(menv, x_xs[13], expr4),
msat_make_geq(menv, x_xs[13], expr5),
msat_make_geq(menv, x_xs[13], expr6),
msat_make_geq(menv, x_xs[13], expr7),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[13], expr0),
msat_make_equal(menv, x_xs[13], expr1),
msat_make_equal(menv, x_xs[13], expr2),
msat_make_equal(menv, x_xs[13], expr3),
msat_make_equal(menv, x_xs[13], expr4),
msat_make_equal(menv, x_xs[13], expr5),
msat_make_equal(menv, x_xs[13], expr6),
msat_make_equal(menv, x_xs[13], expr7),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[0], n_4_0)
expr1 = msat_make_plus(menv, xs[2], n_4_0)
expr2 = msat_make_plus(menv, xs[3], n_18_0)
expr3 = msat_make_plus(menv, xs[5], n_13_0)
expr4 = msat_make_plus(menv, xs[6], n_17_0)
expr5 = msat_make_plus(menv, xs[7], n_19_0)
expr6 = msat_make_plus(menv, xs[9], n_3_0)
expr7 = msat_make_plus(menv, xs[10], n_20_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[14], expr0),
msat_make_geq(menv, x_xs[14], expr1),
msat_make_geq(menv, x_xs[14], expr2),
msat_make_geq(menv, x_xs[14], expr3),
msat_make_geq(menv, x_xs[14], expr4),
msat_make_geq(menv, x_xs[14], expr5),
msat_make_geq(menv, x_xs[14], expr6),
msat_make_geq(menv, x_xs[14], expr7),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[14], expr0),
msat_make_equal(menv, x_xs[14], expr1),
msat_make_equal(menv, x_xs[14], expr2),
msat_make_equal(menv, x_xs[14], expr3),
msat_make_equal(menv, x_xs[14], expr4),
msat_make_equal(menv, x_xs[14], expr5),
msat_make_equal(menv, x_xs[14], expr6),
msat_make_equal(menv, x_xs[14], expr7),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[0], n_8_0)
expr1 = msat_make_plus(menv, xs[1], n_8_0)
expr2 = msat_make_plus(menv, xs[5], n_2_0)
expr3 = msat_make_plus(menv, xs[6], n_7_0)
expr4 = msat_make_plus(menv, xs[8], n_8_0)
expr5 = msat_make_plus(menv, xs[9], n_20_0)
expr6 = msat_make_plus(menv, xs[11], n_4_0)
expr7 = msat_make_plus(menv, xs[15], n_5_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[15], expr0),
msat_make_geq(menv, x_xs[15], expr1),
msat_make_geq(menv, x_xs[15], expr2),
msat_make_geq(menv, x_xs[15], expr3),
msat_make_geq(menv, x_xs[15], expr4),
msat_make_geq(menv, x_xs[15], expr5),
msat_make_geq(menv, x_xs[15], expr6),
msat_make_geq(menv, x_xs[15], expr7),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[15], expr0),
msat_make_equal(menv, x_xs[15], expr1),
msat_make_equal(menv, x_xs[15], expr2),
msat_make_equal(menv, x_xs[15], expr3),
msat_make_equal(menv, x_xs[15], expr4),
msat_make_equal(menv, x_xs[15], expr5),
msat_make_equal(menv, x_xs[15], expr6),
msat_make_equal(menv, x_xs[15], expr7),))
trans = msat_make_and(menv, trans, _t)
# ltl property: (! ((! (x_12 - x_13 >= 17)) U (! (X (G (x_10 - x_11 > 7))))))
ltl = msat_make_not(menv, enc.make_U(msat_make_not(menv, msat_make_geq(menv, msat_make_minus(menv, xs[12], xs[13]), msat_make_number(menv, "17"))), msat_make_not(menv, enc.make_X(enc.make_G(msat_make_gt(menv, msat_make_minus(menv, xs[10], xs[11]), msat_make_number(menv, "7")))))))
return TermMap(curr2next), init, trans, ltl
|
[
"en.magnago@gmail.com"
] |
en.magnago@gmail.com
|
f9976fe4effc3d57123157cfa50fb63064d50e1e
|
a2fdc8281889aee3379e57b4147450819ffca3ef
|
/ewok/defaults/model.py
|
64c0b214fe0da49b123113ae9978ab274fcff3c3
|
[
"Apache-2.0"
] |
permissive
|
HappyKian/fv3-jedi
|
4cada6994e6ce388253f74666e4de7df7ba55704
|
6b0b1806c9ac3d9262301465cb4483972e83a33f
|
refs/heads/master
| 2023-01-03T00:39:00.537721
| 2020-10-28T21:09:55
| 2020-10-28T21:09:55
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,998
|
py
|
import os
import datetime
import ewok
import solo
__all__ = ["fc_file", "obs_file", "r2d2_obsfile", "r2d2_anfile", "r2d2_fixfile"]
def fc_file(fcout, step):
fc = {}
fc['date'] = fcout['date']
keys = [fcout['exp'], fcout['type'], fcout['date'], ewok.jediformat(step)]
fname = '.'.join(keys)
fc['filename'] = os.path.join(fcout['datadir'], fname)
return fc
def obs_file(conf):
obsfile = conf['obsdatain']
return obsfile
def r2d2_obsfile(conf, date):
sdate = ewok.jediformat(date)
r2d2keys = ['fv3jedi', conf['source'], sdate, 'nc']
r2d2file = '.'.join(r2d2keys)
return r2d2file
def _get_restarts(conf, date):
# rdate = date.strftime('%Y%m%d.%H')
ntiles = conf.get('ntiles', 6)
ftypes = conf.get('ftypes', ['fv_core.res', 'fv_srf_wnd.res', 'fv_tracer.res', 'sfc_data'])
ftiles = [f'tile{t+1}' for t in range(0, ntiles)]
restarts = []
fname = 'coupler.res'
restarts.append(fname)
for ftype in ftypes:
for tile in ftiles:
# fname = f'{rdate}0000.{ftype}.{tile}.nc'
fname = f'{ftype}.{tile}.nc'
restarts.append(fname)
return restarts
def _get_fv3files(conf):
npz = conf.get('npz', 64)
res = conf.get('resolution', 'c12')
flist = conf.get('fv3filesList', [f'akbk{npz}.nc4', 'field_table', 'fmsmpp.nml', f'input_gfs_{res}.nml', 'inputpert_4dvar.nml'])
fv3files = []
for fname in flist:
fv3files.append(fname)
return fv3files
def _get_fieldsets(conf):
flist = conf.get('fieldsetsList', ['dynamics.yaml'])
fieldsets = []
for fname in flist:
fieldsets.append(fname)
return fieldsets
def r2d2_anfile(conf, date):
anlfileDict = {}
restarts = _get_restarts(conf, date)
fv3files = _get_fv3files(conf)
fieldsets = _get_fieldsets(conf)
anlfileDict = {'restarts' : restarts,
'fv3files' : fv3files,
'fieldsets': fieldsets}
return anlfileDict
|
[
"noreply@github.com"
] |
HappyKian.noreply@github.com
|
63c3f98e28c00d180508917297d259500726b083
|
61f7c9a9a6a0b86699a9431301e062e117bba4d4
|
/day 6/6G.py
|
833a4949e03860191b8321c0299c63c973893fb6
|
[] |
no_license
|
Aman-Mittal/AmanSTPython
|
154afb255d70778dd55aa3f9223e7f4bd4309798
|
aca17971f5590edeb58ab748000b28e265eb794a
|
refs/heads/master
| 2020-06-01T07:01:26.122822
| 2019-07-08T07:47:48
| 2019-07-08T07:47:48
| 190,689,841
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 192
|
py
|
def add(a,b,c):
return a+b+c
x=[1,2,3,4,5,6]
y=[10,20,30,40,50,60]
z=[11,22,33,44,55,66]
res=list(map(lambda a,b,c: a+b+c, x,y,z))
print (res)
res=list(map(add,x,y,z))
print(res)
|
[
"noreply@github.com"
] |
Aman-Mittal.noreply@github.com
|
28dff4ca54efb8cc73b2ea7ac20c6237bc783965
|
9ac2c85de287a64105f18636caf132dcfd727ffc
|
/Lesson-3/00_Solution/project.py
|
6a065c36144c4cc38939c464d4e998d30ce13cff
|
[] |
no_license
|
davidtran641/Full-Stack-Foundations
|
2744761f0d0910f05ec912c0d9fd213993ee7908
|
ddcf7b85559697049324e6e282fadf5a10b522ea
|
refs/heads/master
| 2022-07-01T12:59:57.206366
| 2020-05-16T17:13:12
| 2020-05-16T17:13:12
| 261,761,782
| 0
| 0
| null | 2020-05-06T13:08:44
| 2020-05-06T13:08:43
| null |
UTF-8
|
Python
| false
| false
| 3,398
|
py
|
from flask import Flask, render_template, request, redirect, url_for, jsonify
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from database_setup import Base, Restaurant, MenuItem
app = Flask(__name__)
engine = create_engine('sqlite:///restaurantmenu.db')
Base.metadata.bind = engine
DBSession = sessionmaker(bind=engine)
session = DBSession()
@app.route('/restaurants/<int:restaurant_id>/menu/JSON')
def restaurantMenuJSON(restaurant_id):
restaurant = session.query(Restaurant).filter_by(id=restaurant_id).one()
items = session.query(MenuItem).filter_by(
restaurant_id=restaurant_id).all()
return jsonify(MenuItems=[i.serialize for i in items])
@app.route('/restaurants/<int:restaurant_id>/menu/<int:menu_id>/JSON')
def menuItemJSON(restaurant_id, menu_id):
item = session.query(MenuItem).filter_by(id=menu_id).one()
return jsonify(MenuItems=item.serialize)
@app.route('/')
@app.route('/restaurants/<int:restaurant_id>/menu')
def restaurantMenu(restaurant_id):
restaurant = session.query(Restaurant).filter_by(id=restaurant_id).one()
items = session.query(MenuItem).filter_by(restaurant_id=restaurant_id)
return render_template(
'menu.html', restaurant=restaurant, items=items, restaurant_id=restaurant_id)
@app.route('/restaurants/<int:restaurant_id>/new', methods=['GET', 'POST'])
def newMenuItem(restaurant_id):
if request.method == 'POST':
newItem = MenuItem(name=request.form['name'], restaurant_id=restaurant_id)
session.add(newItem)
session.commit()
flash("new menu item created!")
return redirect(url_for('restaurantMenu', restaurant_id=restaurant_id))
else:
return render_template('newmenuitem.html', restaurant_id=restaurant_id)
@app.route('/restaurants/<int:restaurant_id>/<int:menu_id>/edit',
methods=['GET', 'POST'])
def editMenuItem(restaurant_id, menu_id):
editedItem = session.query(MenuItem).filter_by(id=menu_id).one()
if request.method == 'POST':
if request.form['name']:
editedItem.name = request.form['name']
if request.form['description']:
editedItem.description = request.form['name']
if request.form['price']:
editedItem.price = request.form['price']
if request.form['course']:
editedItem.course = request.form['course']
session.add(editedItem)
session.commit()
flash("menu {} was edited!".format(editedItem.name))
return redirect(url_for('restaurantMenu', restaurant_id=restaurant_id))
else:
return render_template(
'editmenuitem.html', restaurant_id=restaurant_id, menu_id=menu_id, item=editedItem)
@app.route('/restaurants/<int:restaurant_id>/<int:menu_id>/delete',
methods=['GET', 'POST'])
def deleteMenuItem(restaurant_id, menu_id):
itemToDelete = session.query(MenuItem).filter_by(id=menu_id).one()
if request.method == 'POST':
session.delete(itemToDelete)
session.commit()
flash("menu {} was deleted!".format(itemToDelete.name))
return redirect(url_for('restaurantMenu', restaurant_id=restaurant_id))
else:
return render_template('deletemenuitem.html', item=itemToDelete)
if __name__ == '__main__':
app.secret_key = 'super_secret_key'
app.debug = True
app.run(host='0.0.0.0', port=5000)
|
[
"davidtran641@gmail.com"
] |
davidtran641@gmail.com
|
10874d0c64af265673a712938c1facd93764ad24
|
770989d54a53a431a9fc7b5d292c109e534176ac
|
/InterviewQ/reverseArray.py
|
bf88e0eb8a8868dd3aa95450a52f8daf6f490b40
|
[] |
no_license
|
pawansingh10/PracticeDemo
|
7d21c9cd2007213b25ae972f22cfa08cf8ccaa40
|
ea6a74c12ea5964a04ceffaa2976638b4cf82a70
|
refs/heads/master
| 2022-12-09T15:31:05.428684
| 2020-09-02T11:00:28
| 2020-09-02T11:00:28
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 222
|
py
|
def rev_Array(arr):
rev=[]
for i in range(len(arr)-1,-1,-1):
rev.append(arr[i])
return rev
arr=list(map(int,input("Enter an Array:").split()))
print("Reverse Array:",end='')
print(*rev_Array(arr))
|
[
"pawansinghkk109@gmail.com"
] |
pawansinghkk109@gmail.com
|
41bcafcaef04b9da2aadb08ac8fb1be12cfb70c4
|
4617b848dc4cb61e59d7a595654bbe69a1b50893
|
/Tests/T3/T3_Q6.py
|
f8a7f4eb66b1a1970fe552106405260c8ece68a5
|
[] |
no_license
|
elliottgreen/PythonAT2021
|
4965af7cf36643a0976cedd4737938375ddbafa0
|
2bcafa7a907531f6282539f5e657df20e8683fdf
|
refs/heads/main
| 2023-05-06T07:02:42.344149
| 2021-05-28T15:07:57
| 2021-05-28T15:07:57
| 369,278,793
| 0
| 11
| null | 2021-05-28T14:57:58
| 2021-05-20T16:54:24
|
Python
|
UTF-8
|
Python
| false
| false
| 69
|
py
|
m = [1, 2, 3]
for v in range(len(m)):
m.insert(1, m[v])
print(m)
|
[
"omar.elliottgreen1@ieee.org"
] |
omar.elliottgreen1@ieee.org
|
befc3a1ebc84b0d5ec2415c8f92c5f2ba0dbc566
|
53fab060fa262e5d5026e0807d93c75fb81e67b9
|
/backup/user_354/ch15_2020_09_19_15_43_50_033099.py
|
23a938ac0b71197f14316278183f5546039a2f99
|
[] |
no_license
|
gabriellaec/desoft-analise-exercicios
|
b77c6999424c5ce7e44086a12589a0ad43d6adca
|
01940ab0897aa6005764fc220b900e4d6161d36b
|
refs/heads/main
| 2023-01-31T17:19:42.050628
| 2020-12-16T05:21:31
| 2020-12-16T05:21:31
| 306,735,108
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 150
|
py
|
x= input("qual seu nome ?")
if x== "Chris":
print("Todo mundo odeia o Chris")
else:
print("Olá,{0}.format(x))
|
[
"you@example.com"
] |
you@example.com
|
692b63ba9e487a87a62c1ee755b3843e5ab9db4b
|
e62dd824081de0f6b7b35a2731184d8b2c872b7d
|
/work/attend_excel/attend_old.py
|
b592b5579b473428b0bda5cb26b6fc28256b51d4
|
[] |
no_license
|
coblan/py2
|
d0231211452e8a27e41e6a85ab736538d0a66113
|
ecf743e027e9f15925e43f05c0b8a86bb88946db
|
refs/heads/master
| 2021-01-24T08:28:41.288136
| 2018-06-07T17:54:05
| 2018-06-07T17:54:05
| 69,465,492
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 14,001
|
py
|
# -*- encoding:utf8 -*-
"""
用途
=================================================
读取考勤原始记录,配合TMS系统,计算个人考勤情况。
使用步骤
=================================================
1. 调用get_attend_list(path)生成经过日期排序的考勤列表。
2. 遍历考勤列表,
3. 结合TMS提供的信息,生成AttendRecord()对象,调用各种get_函数,计算该条考勤记录其他信息。
4. 保存到数据库。
使用例子:
=================================================
#从考勤原始记录中获取按日期排序的考勤记录
attendlist=get_attend_list(r"D:\myfile\kaoqin_2015_9.xls")
#遍历考勤列表,依次处理记录,并保存到数据库
for i in attendlist:
# 生成一个处理记录的对象。其中: TMS.overtime_from_date(record_date) return "3:20"格式,即认为加班,如果为空字符串,即认为没加班。该函数由外部提供
row = AttendRecord(attend_number=i[0],workstart=i[4],workleave=i[5],record_date=i[3].strftime("%Y/%m/%d"),day_type="workday",overtime_from_date=TMS.overtime_from_date)
# TMS.save()用于保存到TMS数据库
TMS.save(kaonumber=row.attend_number, late_person =row.get_late_person(),....)
其他
===============================================
1. mytime.Time类,用于包装时间(内部具有hour,minute),实现加,减,数乘运算。如果运算后为负,则返回Time(0)。
Time.__str__产生格式为:"8:30",Time.strptime()接收"8:30"格式字符串,生成Time对象。Time(hour=8,minute=20)也可以生成Time对象
2. xlrd库用于读取03excel文件
"""
import re
from datetime import datetime,date,timedelta
import xlrd
from mytime import Time
def get_attend_list(path):
""" 返回一个按照 考勤号,日期 排序的列表。
attendlist = [ [attend_number,name,department,record_date,workstart,workleave],
[考勤号,名字,部门,日期,上班打卡时间,下班打卡时间]]
path: 原始考勤记录03excel表
"""
attendlist = read_raw_03(path)
attendlist = div_time_col(attendlist)
attendlist.sort(cmp=kaolist_cmp)
return attendlist
def read_raw_03(path):
"读取原始考勤记录,不做任何处理"
rawdata = xlrd.open_workbook(path,encoding_override='gbk')
table = rawdata.sheets()[0]
nrows = table.nrows
attendlist = []
for i in range(1, nrows):
attendlist.append(table.row_values(i))
return attendlist
def div_time_col(attendlist):
"""分割时间字符串栏
attendlist = [ [attend_number,name,department,record_date,timestr],]
返回:
attendlist = [ [attend_number,name,department,record_date,workstart,workleave],]
"""
for row in attendlist:
start,end=find_min_max(row[4])
row[4]=start
row.append(end)
return attendlist
def find_min_max(timestr):
"分解时间字符串,并且返回最小,最大时间"
timestr = timestr.strip()
if timestr:
ls = timestr.split(u' ')
if ls:
ls = [Time.strptime(str_) for str_ in ls]
ls.sort()
return ls[0],ls[-1]
return '',''
def kaolist_cmp(x,y):
"考勤列表排序"
if x[0]!=y[0]:
return cmp(x[0],y[0])
else:
xdate=datetime.strptime(x[3],"%Y/%m/%d").date()
ydate=datetime.strptime(y[3],"%Y/%m/%d").date()
return cmp(xdate,ydate)
class AttendRecord(object):
"处理每条考勤记录"
def __init__(self,attend_number,workstart,workleave,record_date,workshift,day_type,overtime_from_date):
"""
传入参数
========================================================
attend_number: 员工考勤号,"1105"字符串格式
workstart: 上班打卡时间,格式:"8:30" 字符串,如果是"8:30:00"有秒的形式,会丢弃秒信息
workleave: 下班打卡时间,格式:"17:50" 字符串
record_date: 考勤日期。"2015/10/1" 字符串
workshift: 工作类型,字符串类型。例如 Flexible 或8:30-17:30 ,能够解析的是"8:30-17:30",凡是不能解析的,都按照Flexible处理。
原始的考勤列表不能提供workshift信息,所以这个信息来源于TMS或其他方式
day_type: one of ["workday","restday","8:30-17:50")],表示当天的类型。如果是"workday",即表示正常工作日,作息时间按照workshift规定进行计算。
如果是"restday"即,表示休息日,不计算考勤。如果是"8:30-17:50"类型,则忽略workshift是非弹性工作的情况,按照day_type进行计算。
@Function: overtime_from_date(attend_number,record_date) return '2:10:00': 用于从日期获取加班,在计算迟到时会计算[上一个]工作日加班情况,
由当前行不能获取该信息,所以由外部提供该函数。接收的参数:kao_number格式为字符串的考勤号;date格式为:"2015/10/9"字符串
外部应该调用的函数
========================================================
get_late_team(): 返回迟到时间_团队部分。格式:"3:20" 字符串
get_late_person(): 返回迟到时间_个人部分。格式: "3:20" 字符串
get_note(): 返回注释,如restday(表示休息日),not work(表示旷工),single(表示漏打卡)等等。格式:字符串
get_over_time(): 返回加班时间。格式: "3:20"字符串
get_workspan(): 返回工作时长。格式: "8:20"字符串
get_early_leave() 返回早退时长。格式:"2:20"字符串
get_sub_sequence() 返回迟到等级。late1:15分钟以下;late2:15分钟-1个小时;late3:1-2小时;late4:2小时以上;其他返回NotNormal(不正常)。格式:字符串
当前实现的处理:
========================================================
1. 计算工作时长
2. 标记非工作日,漏打卡,旷工(全天无打卡)
3. 计算加班时长,20:00后算加班.
4. 根据非周末的加班,调整第二天上班时间到10:00。这里只针对非弹性工作制的员工,如果员工的workshfit规定上班时间迟于10:00,则不调整。
5. 计算早退时间。根据特殊工作作息/workshift来计算
6. 计算迟到等级
7. 计算团队迟到时间,根据迟到等级计算个人迟到时间
"""
self.attend_number= attend_number
self.day_type = day_type
self.workshift = workshift
self.workstart = workstart
self.workleave = workleave
self.record_date = record_date
self.overtime_from_date= overtime_from_date
def get_workspan(self):
"返回工作时长,格式:'8:20'字符串"
if self.workstart == '':
return ''
elif self.workstart == self.workleave:
return ''
else:
workstart = Time.strptime(self.workstart)
workleave = Time.strptime(self.workleave)
if workstart <=Time(12,30):
morning = Time(12,30)-workstart
afternoon = workleave - Time(13,30)
return str( morning+afternoon)
elif workstart >Time(1,30):
return str( workleave - workstart )
else:
return str( workleave -Time(1,30) )
def get_note(self):
"返回注释,如restday(表示休息日),not work(表示旷工),single(表示漏打卡)等等。格式:字符串"
if self.day_type=="restday":
return "restday"
elif self.get_sud_start()=='':
return ''
elif self.workstart=='' and self.get_sud_start()!= '':
return "not work"
elif self.workstart != '' and self.workstart == self.workleave:
return 'single'
else:
return ''
def get_late_time(self):
"""内部调用,计算迟到时间。
如果是休息日,则不计算迟到时间。
会考虑到员工前一天是否是非周末加班,如果加班,则上班时间调整到10:00,如果员工的workshfit规定上班时间迟于10:00,则不调整。
如果员工是弹性工作制,则不计算迟到时间。
"""
if self.day_type=="restday":
return ''
sud_start=self.get_sud_start()
if sud_start =='':
return ''
workstart = Time.strptime(self.workstart)
sud_start = Time.strptime(sud_start)
if self.is_lastday_valid_overtime() and sud_start<Time(10):
sud_start=Time(10) #非周末加班,10点上班
late = workstart - sud_start
return str(late)
def get_late_team(self):
'返回迟到时间_团队部分。格式:"3:20" 字符串'
late = Time.strptime(self.get_late_time())
if late>Time(0,15):
return str(late-Time(0,15))
def get_late_person(self):
'返回迟到时间_个人部分。格式: "3:20" 字符串'
late =Time.strptime( self.get_late_time() )
if Time(0,15)<=late<=Time(1):
return str( late-Time(0,15))
elif Time(1)<=late <Time(2):
return str( (late-Time(0,15))*2)
elif Time(2)<=late:
return str( (late-Time(0,15))*3)
else:
return ''
def get_over_time(self):
'返回加班时间。格式: "3:20"字符串'
if self.day_type != "restday":
workleave = Time.strptime(self.workleave)
return str( workleave-Time(20) )
else:
return ''
def get_early_leave(self):
'返回早退时长。格式:"2:20"字符串'
if self.day_type =="restday":
return ''
if self.workleave =='':
return ''
sud_leave = self.get_sud_leave()
if sud_leave =='':
return ''
else:
sud_leave = Time.strptime(sud_leave)
workleave = Time.strptime(self.workleave)
return str( sud_leave-workleave)
def get_sud_leave(self):
"内部调用,计算个人上班结束的时间"
if self.day_type=="workday":
mt = re.match(r"(.*)-(.*)",self.workshift)
if mt:
return mt.group(2)
else:
return ''
elif self.day_type=="restday":
return ''
else:
mt=re.match(r"(.*)-(.*)",self.day_type)
if mt:
return mt.group(2)
else:
raise ValueError("day_type must be one of ['workday','restday','8:50-17:50'...]")
def get_sud_start(self):
"""
内部调用,计算个人应该上班的时间
1.判断是否正常工作日,如果是,则解析workshift,解析成功则返回group(1),否则属于弹性工作制
2.如果是休息日,返回空
3.如果是手动定义的特殊工作时间,则解析,如果解析不成功,则报错
"""
if self.day_type=="workday":
mt = re.match(r"(.*)-(.*)",self.workshift)
if mt:
return mt.group(1)
else:
return ''
elif self.day_type == "restday":
return ''
else:
mt=re.match(r"(.*)-(.*)",self.day_type)
if mt:
return mt.group(1)
else:
raise ValueError("day_type must be one of ['workday','restday','8:50-17:50'...]")
def get_sub_sequence(self):
"迟到标记:late1 迟到<15分钟,late2:迟到<1个小时,late3:迟到<2个小时,late4:迟到>2个小时"
if self.day_type == "restday":
return ''
elif self.workleave == self.workstart:
return ''
late = Time.strptime( self.get_late_time() )
if late ==Time(0):
return ''
elif Time(0)< late <= Time(0,15):
return "late1"
elif Time(0,15)< late <=Time(1):
return "late2"
elif Time(1)< late <= Time(2):
return "late3"
elif Time(2)< late :
return "late4"
else:
return 'NotNormal'
def is_lastday_valid_overtime(self):
"查看昨天是否非周末加班,overtime_from_date(record_date) return overtime"
lastday = datetime.strptime(self.record_date,"%Y/%m/%d").date() - timedelta(days =1)
lastovertime = Time.strptime( self.overtime_from_date(self.attend_number,lastday.strftime("%Y/%m/%d")) )
if lastday.weekday() in [0,1,2,3] and lastovertime!=Time(0):
return True
else:
return False
if __name__ =='__main__':
def func(attend_number,date_):
if date_ == date(2015,10,8):
return str(Time(0,5))
return ""
obj =AttendRecord(attend_number="1001", workstart="9:30", workleave="17:50", record_date="2015/10/9", workshift="8:30-17:30",day_type="workday",overtime_from_date=func)
print( obj.get_early_leave() )
print(obj.get_over_time())
print(obj.get_workspan())
print(obj.get_late_person())
print(obj.get_sub_sequence())
#kaolist1= [['1001',"name1","dp1","2015/9/10","8:30","17:50"],
#['1001',"name1","dp1","2015/9/9","9:20","17:40"],
#["1000","name2","dp1","2015/9/20","9:30","17:50"]]
#print(kaolist1)
#kaolist1.sort(cmp=kaolist_cmp)
#print(kaolist1)
|
[
"heyulin@STM-PC-108.stm.com"
] |
heyulin@STM-PC-108.stm.com
|
381f401abfdf7445342d4318aa66faf4e81b4596
|
d6d20681f41102df3feb2b438ef80569bd73730f
|
/Uge8/.history/classExercise_20200316095619.py
|
2d0e60ae4204bec5e2d9fdf264781a3865a546d5
|
[] |
no_license
|
MukHansen/pythonAfleveringer
|
d0ad2629da5ba2b6011c9e92212949e385443789
|
4107c3c378f757733961812dd124efc99623ff2e
|
refs/heads/master
| 2020-12-22T13:27:19.135138
| 2020-05-22T11:35:52
| 2020-05-22T11:35:52
| 236,796,591
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,770
|
py
|
import datetime
import pymysql
# This Works
# cnx = pymysql.connect(user='dev', password='ax2',host='127.0.0.1',port=3307,db='3rdSemExam')
# cursor = cnx.cursor()
# query = ("SELECT firstname, lastname, id FROM DRIVER")
# # hire_start = datetime.date(1960, 1, 1)
# # hire_end = datetime.date(2004, 12, 31)
# # cursor.execute(query, (hire_start, hire_end))
# cursor.execute(query)
# for (firstname, lastname, id) in cursor:
# print("Name {} {} ---- ID {}".format(firstname, lastname, id))
# cursor.close()
# cnx.close()
# This Works
# cnx = pymysql.connect(user='dev', password='ax2',host='127.0.0.1',port=3307,db='pythonTest')
# cursor = cnx.cursor()
# query = ("SELECT firstname, lastname, startdate, enddate, salary FROM pythondemo WHERE startdate BETWEEN %s AND %s")
# hire_start = datetime.date(1960, 1, 1)
# hire_end = datetime.date(2004, 12, 31)
# cursor.execute(query, (hire_start, hire_end))
# for (firstname, lastname, startdate, enddate, salary) in cursor:
# print("{} {} hired from {} to {} is paid: {} DKR pr month".format(firstname, lastname, startdate, enddate, salary))
# cursor.close()
# cnx.close()
cnx = pymysql.connect(user='dev', password='ax2',host='127.0.0.1',port=3307,db='pythonTest')
cursor = cnx.cursor()
add_person = ("INSERT INTO %s "
"(personId, firstName, lastName) "
"VALUES (%(personId)s, %(firstName)s, %(lastName)s)")
table = "person"
myDict = {"personId": 111, "firstName": "John", "lastName": "Johnson"}
cursor.execute(add_person, (table, list(myDict.values()))
for (firstname, lastname, startdate, enddate, salary) in cursor:
print("{} {} hired from {} to {} is paid: {} DKR pr month".format(firstname, lastname, startdate, enddate, salary))
cursor.close()
cnx.close()
|
[
"cph-mh752@cphbusiness.dk"
] |
cph-mh752@cphbusiness.dk
|
24b5bcfbab25afa1e10b2be8356f0cb21dc32a49
|
8259cdd11ecded1712d8e236842b2e2cbaa77d87
|
/element/manager/mongo.py
|
990e982888b8b7e3618714e0409780ebbd602b55
|
[
"MIT"
] |
permissive
|
plouc/python-element
|
5c5a3197cfceba88d039f8b188c5b6116c6e9b05
|
4aee6c665549065f8b89c61270c067f3d4baddd3
|
refs/heads/master
| 2021-01-18T02:33:32.778349
| 2014-03-03T00:07:38
| 2014-03-03T00:07:38
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,145
|
py
|
from bson.objectid import ObjectId
from bson.dbref import DBRef
import pymongo
class InvalidTreeState(Exception):
pass
class InvalidDataFormat(Exception):
pass
class MongoManager(object):
"""
This class handle loading of definition from a MongoDB Server
"""
def __init__(self, client, database, collection, logger=None):
self.client = client
self.database = database
self.collection = collection
self.logger = logger
self.get_collection().ensure_index([("path", pymongo.ASCENDING)], 300, **{
"name": "path",
"unique": True,
"background": False,
"sparse": False,
})
def get_collection(self):
return self.client[self.database][self.collection]
def get_id(self, id):
if isinstance(id, ObjectId):
return id
return ObjectId(id)
def retrieve(self, id):
data = self.get_collection().find_one({"_id": self.get_id(id)})
if not data:
return None
return self.normalize([data])[0]
def exists(self, id):
return self.get_collection().find({"_id": self.get_id(id)}).count() > 0
def delete(self, id):
result = self.get_collection().remove(self.get_id(id), j=True)
return result[u'n']
def resolve_parents(self, data):
if 'parent' not in data:
data['parent'] = None
def fix_paths(self, data):
path = False
if not data['parent']: # no parent
path = ""
if 'slug' not in data: # no parent
raise InvalidDataFormat("No slug property defined for the data")
if data['parent']:
parent = self.retrieve(data['parent'])
if not parent:
raise InvalidTreeState("The parent %s defined in %s does not exist" % (data['id'], data['parent']))
if 'path' not in parent:
print parent
raise InvalidTreeState("The parent %s does not contains a `path`" % (parent['id']))
path = parent['path']
if path == False:
raise InvalidTreeState("Unable to resolve the path for %s" % (data))
data['path'] = "%s/%s" % (path, data['slug'])
def fix_children(self, data):
children = self.get_collection().find({
'parent': "%s" % data['_id']
})
for child in children:
path = "%s/%s" % (data['path'], child['slug'])
if child['path'] == path:
continue
child['path'] = path
self.get_collection().save(child)
self.fix_children(child)
def save(self, id, type, data):
"""
Save data and resolve the path for the children
"""
data['type'] = type
if 'slug' not in data:
raise InvalidDataFormat("The data must contain a `slug` key: %s" % (data))
if id:
data['_id'] = ObjectId(id)
self.resolve_parents(data)
self.fix_paths(data)
data['_id'] = self.get_collection().save(data)
self.fix_children(data)
self.normalize([data])
return data
def find(self, **kwargs):
"""
Of course this is not optimized at all
supported options:
- path: the path to look up
- type: the node type
- types: retrieve types defined
- tags: retrieve node matching tags
- category: retrieve node matching the category
"""
find_kwargs = {
'spec': {}
}
lookup_types = []
if 'types' in kwargs:
lookup_types += kwargs['types']
if 'type' in kwargs:
lookup_types += [kwargs['type']]
if len(lookup_types) > 0:
find_kwargs['spec']['type'] = {'$in': lookup_types}
if 'tags' in kwargs and kwargs['tags'] and len(kwargs['tags']) > 0:
find_kwargs['spec']['tags'] = {'$in': kwargs['tags']}
if 'category' in kwargs and kwargs['category'] != None:
find_kwargs['spec']['category'] = kwargs['category']
if 'limit' in kwargs:
find_kwargs['limit'] = int(kwargs['limit'])
if 'offset' in kwargs:
find_kwargs['omit'] = int(kwargs['offset'])
if 'path' in kwargs and kwargs['path']:
find_kwargs['spec']['path'] = {'$regex': "^" + kwargs['path']}
if self.logger:
self.logger.info("%s find:%s" % (self, find_kwargs))
query = self.get_collection().find(**find_kwargs)
if 'order_by' in kwargs:
query.sort(kwargs['order_by'])
else:
query.sort([('created_at', pymongo.DESCENDING)])
return self.normalize(query)
def find_one(self, **kwargs):
return self.find(**kwargs)[0]
def normalize(self, cursor):
"""
Far from being perfect
"""
nodes = []
for data in cursor:
data['id'] = "%s" % data['_id']
del data['_id']
nodes.append(data)
return nodes
|
[
"thomas.rabaix@gmail.com"
] |
thomas.rabaix@gmail.com
|
786148b4ef5dd245706d412cbc4d90c54040786a
|
85043cfb5a7cc86e9f6e22c45925df778062efd5
|
/rapid7vmconsole/api/site_api.py
|
0e8f64cab939759b8aae98e6143e55a8a5018384
|
[
"MIT"
] |
permissive
|
xtenex/vm-console-client-python
|
1b33abb6d8c0d368da16dd182f44a3ad20f090ee
|
3e04e5aa4a15274ec0bcd8be38d306e42b0c9a59
|
refs/heads/master
| 2021-04-27T03:01:56.645918
| 2018-02-14T22:46:34
| 2018-02-14T22:46:34
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 380,444
|
py
|
# coding: utf-8
"""
InsightVM API
# Overview This guide documents the InsightVM Application Programming Interface (API) Version 3. This API supports the Representation State Transfer (REST) design pattern. Unless noted otherwise this API accepts and produces the `application/json` media type. This API uses Hypermedia as the Engine of Application State (HATEOAS) and is hypermedia friendly. All API connections must be made to the security console using HTTPS. ## Versioning Versioning is specified in the URL and the base path of this API is: `https://<host>:<port>/api/3/`. ## Specification An <a target=\"_blank\" href=\"https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md\">OpenAPI v2</a> specification (also known as Swagger 2) of this API is available. Tools such as <a target=\"_blank\" href=\"https://github.com/swagger-api/swagger-codegen\">swagger-codegen</a> can be used to generate an API client in the language of your choosing using this specification document. <p class=\"openapi\">Download the specification: <a class=\"openapi-button\" target=\"_blank\" download=\"\" href=\"/api/3/json\"> Download </a></p> ## Authentication Authorization to the API uses HTTP Basic Authorization (see <a target=\"_blank\" href=\"https://www.ietf.org/rfc/rfc2617.txt\">RFC 2617</a> for more information). Requests must supply authorization credentials in the `Authorization` header using a Base64 encoded hash of `\"username:password\"`. <!-- ReDoc-Inject: <security-definitions> --> ### 2FA This API supports two-factor authentication (2FA) by supplying an authentication token in addition to the Basic Authorization. The token is specified using the `Token` request header. To leverage two-factor authentication, this must be enabled on the console and be configured for the account accessing the API. ## Resources ### Naming Resource names represent nouns and identify the entity being manipulated or accessed. All collection resources are pluralized to indicate to the client they are interacting with a collection of multiple resources of the same type. Singular resource names are used when there exists only one resource available to interact with. The following naming conventions are used by this API: | Type | Case | | --------------------------------------------- | ------------------------ | | Resource names | `lower_snake_case` | | Header, body, and query parameters parameters | `camelCase` | | JSON fields and property names | `camelCase` | #### Collections A collection resource is a parent resource for instance resources, but can itself be retrieved and operated on independently. Collection resources use a pluralized resource name. The resource path for collection resources follow the convention: ``` /api/3/{resource_name} ``` #### Instances An instance resource is a \"leaf\" level resource that may be retrieved, optionally nested within a collection resource. Instance resources are usually retrievable with opaque identifiers. The resource path for instance resources follows the convention: ``` /api/3/{resource_name}/{instance_id}... ``` ## Verbs The following HTTP operations are supported throughout this API. The general usage of the operation and both its failure and success status codes are outlined below. | Verb | Usage | Success | Failure | | --------- | ------------------------------------------------------------------------------------- | ----------- | -------------------------------------------------------------- | | `GET` | Used to retrieve a resource by identifier, or a collection of resources by type. | `200` | `400`, `401`, `402`, `404`, `405`, `408`, `410`, `415`, `500` | | `POST` | Creates a resource with an application-specified identifier. | `201` | `400`, `401`, `404`, `405`, `408`, `413`, `415`, `500` | | `POST` | Performs a request to queue an asynchronous job. | `202` | `400`, `401`, `405`, `408`, `410`, `413`, `415`, `500` | | `PUT` | Creates a resource with a client-specified identifier. | `200` | `400`, `401`, `403`, `405`, `408`, `410`, `413`, `415`, `500` | | `PUT` | Performs a full update of a resource with a specified identifier. | `201` | `400`, `401`, `403`, `405`, `408`, `410`, `413`, `415`, `500` | | `DELETE` | Deletes a resource by identifier or an entire collection of resources. | `204` | `400`, `401`, `405`, `408`, `410`, `413`, `415`, `500` | | `OPTIONS` | Requests what operations are available on a resource. | `200` | `401`, `404`, `405`, `408`, `500` | ### Common Operations #### OPTIONS All resources respond to the `OPTIONS` request, which allows discoverability of available operations that are supported. The `OPTIONS` response returns the acceptable HTTP operations on that resource within the `Allow` header. The response is always a `200 OK` status. ### Collection Resources Collection resources can support the `GET`, `POST`, `PUT`, and `DELETE` operations. #### GET The `GET` operation invoked on a collection resource indicates a request to retrieve all, or some, of the entities contained within the collection. This also includes the optional capability to filter or search resources during the request. The response from a collection listing is a paginated document. See [hypermedia links](#section/Overview/Paging) for more information. #### POST The `POST` is a non-idempotent operation that allows for the creation of a new resource when the resource identifier is not provided by the system during the creation operation (i.e. the Security Console generates the identifier). The content of the `POST` request is sent in the request body. The response to a successful `POST` request should be a `201 CREATED` with a valid `Location` header field set to the URI that can be used to access to the newly created resource. The `POST` to a collection resource can also be used to interact with asynchronous resources. In this situation, instead of a `201 CREATED` response, the `202 ACCEPTED` response indicates that processing of the request is not fully complete but has been accepted for future processing. This request will respond similarly with a `Location` header with link to the job-oriented asynchronous resource that was created and/or queued. #### PUT The `PUT` is an idempotent operation that either performs a create with user-supplied identity, or a full replace or update of a resource by a known identifier. The response to a `PUT` operation to create an entity is a `201 Created` with a valid `Location` header field set to the URI that can be used to access to the newly created resource. `PUT` on a collection resource replaces all values in the collection. The typical response to a `PUT` operation that updates an entity is hypermedia links, which may link to related resources caused by the side-effects of the changes performed. #### DELETE The `DELETE` is an idempotent operation that physically deletes a resource, or removes an association between resources. The typical response to a `DELETE` operation is hypermedia links, which may link to related resources caused by the side-effects of the changes performed. ### Instance Resources Instance resources can support the `GET`, `PUT`, `POST`, `PATCH` and `DELETE` operations. #### GET Retrieves the details of a specific resource by its identifier. The details retrieved can be controlled through property selection and property views. The content of the resource is returned within the body of the response in the acceptable media type. #### PUT Allows for and idempotent \"full update\" (complete replacement) on a specific resource. If the resource does not exist, it will be created; if it does exist, it is completely overwritten. Any omitted properties in the request are assumed to be undefined/null. For \"partial updates\" use `POST` or `PATCH` instead. The content of the `PUT` request is sent in the request body. The identifier of the resource is specified within the URL (not the request body). The response to a successful `PUT` request is a `201 CREATED` to represent the created status, with a valid `Location` header field set to the URI that can be used to access to the newly created (or fully replaced) resource. #### POST Performs a non-idempotent creation of a new resource. The `POST` of an instance resource most commonly occurs with the use of nested resources (e.g. searching on a parent collection resource). The response to a `POST` of an instance resource is typically a `200 OK` if the resource is non-persistent, and a `201 CREATED` if there is a resource created/persisted as a result of the operation. This varies by endpoint. #### PATCH The `PATCH` operation is used to perform a partial update of a resource. `PATCH` is a non-idempotent operation that enforces an atomic mutation of a resource. Only the properties specified in the request are to be overwritten on the resource it is applied to. If a property is missing, it is assumed to not have changed. #### DELETE Permanently removes the individual resource from the system. If the resource is an association between resources, only the association is removed, not the resources themselves. A successful deletion of the resource should return `204 NO CONTENT` with no response body. This operation is not fully idempotent, as follow-up requests to delete a non-existent resource should return a `404 NOT FOUND`. ## Requests Unless otherwise indicated, the default request body media type is `application/json`. ### Headers Commonly used request headers include: | Header | Example | Purpose | | ------------------ | --------------------------------------------- | ---------------------------------------------------------------------------------------------- | | `Accept` | `application/json` | Defines what acceptable content types are allowed by the client. For all types, use `*/*`. | | `Accept-Encoding` | `deflate, gzip` | Allows for the encoding to be specified (such as gzip). | | `Accept-Language` | `en-US` | Indicates to the server the client's locale (defaults `en-US`). | | `Authorization ` | `Basic Base64(\"username:password\")` | Basic authentication | | `Token ` | `123456` | Two-factor authentication token (if enabled) | ### Dates & Times Dates and/or times are specified as strings in the ISO 8601 format(s). The following formats are supported as input: | Value | Format | Notes | | --------------------------- | ------------------------------------------------------ | ----------------------------------------------------- | | Date | YYYY-MM-DD | Defaults to 12 am UTC (if used for a date & time | | Date & time only | YYYY-MM-DD'T'hh:mm:ss[.nnn] | Defaults to UTC | | Date & time in UTC | YYYY-MM-DD'T'hh:mm:ss[.nnn]Z | | | Date & time w/ offset | YYYY-MM-DD'T'hh:mm:ss[.nnn][+|-]hh:mm | | | Date & time w/ zone-offset | YYYY-MM-DD'T'hh:mm:ss[.nnn][+|-]hh:mm[<zone-id>] | | ### Timezones Timezones are specified in the regional zone format, such as `\"America/Los_Angeles\"`, `\"Asia/Tokyo\"`, or `\"GMT\"`. ### Paging Pagination is supported on certain collection resources using a combination of two query parameters, `page` and `size`. As these are control parameters, they are prefixed with the underscore character. The page parameter dictates the zero-based index of the page to retrieve, and the `size` indicates the size of the page. For example, `/resources?page=2&size=10` will return page 3, with 10 records per page, giving results 21-30. The maximum page size for a request is 500. ### Sorting Sorting is supported on paginated resources with the `sort` query parameter(s). The sort query parameter(s) supports identifying a single or multi-property sort with a single or multi-direction output. The format of the parameter is: ``` sort=property[,ASC|DESC]... ``` Therefore, the request `/resources?sort=name,title,DESC` would return the results sorted by the name and title descending, in that order. The sort directions are either ascending `ASC` or descending `DESC`. With single-order sorting, all properties are sorted in the same direction. To sort the results with varying orders by property, multiple sort parameters are passed. For example, the request `/resources?sort=name,ASC&sort=title,DESC` would sort by name ascending and title descending, in that order. ## Responses The following response statuses may be returned by this API. | Status | Meaning | Usage | | ------ | ------------------------ |------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | `200` | OK | The operation performed without error according to the specification of the request, and no more specific 2xx code is suitable. | | `201` | Created | A create request has been fulfilled and a resource has been created. The resource is available as the URI specified in the response, including the `Location` header. | | `202` | Accepted | An asynchronous task has been accepted, but not guaranteed, to be processed in the future. | | `400` | Bad Request | The request was invalid or cannot be otherwise served. The request is not likely to succeed in the future without modifications. | | `401` | Unauthorized | The user is unauthorized to perform the operation requested, or does not maintain permissions to perform the operation on the resource specified. | | `403` | Forbidden | The resource exists to which the user has access, but the operating requested is not permitted. | | `404` | Not Found | The resource specified could not be located, does not exist, or an unauthenticated client does not have permissions to a resource. | | `405` | Method Not Allowed | The operations may not be performed on the specific resource. Allowed operations are returned and may be performed on the resource. | | `408` | Request Timeout | The client has failed to complete a request in a timely manner and the request has been discarded. | | `413` | Request Entity Too Large | The request being provided is too large for the server to accept processing. | | `415` | Unsupported Media Type | The media type is not supported for the requested resource. | | `500` | Internal Server Error | An internal and unexpected error has occurred on the server at no fault of the client. | ### Security The response statuses 401, 403 and 404 need special consideration for security purposes. As necessary, error statuses and messages may be obscured to strengthen security and prevent information exposure. The following is a guideline for privileged resource response statuses: | Use Case | Access | Resource | Permission | Status | | ------------------------------------------------------------------ | ------------------ |------------------- | ------------ | ------------ | | Unauthenticated access to an unauthenticated resource. | Unauthenticated | Unauthenticated | Yes | `20x` | | Unauthenticated access to an authenticated resource. | Unauthenticated | Authenticated | No | `401` | | Unauthenticated access to an authenticated resource. | Unauthenticated | Non-existent | No | `401` | | Authenticated access to a unauthenticated resource. | Authenticated | Unauthenticated | Yes | `20x` | | Authenticated access to an authenticated, unprivileged resource. | Authenticated | Authenticated | No | `404` | | Authenticated access to an authenticated, privileged resource. | Authenticated | Authenticated | Yes | `20x` | | Authenticated access to an authenticated, non-existent resource | Authenticated | Non-existent | Yes | `404` | ### Headers Commonly used response headers include: | Header | Example | Purpose | | -------------------------- | --------------------------------- | --------------------------------------------------------------- | | `Allow` | `OPTIONS, GET` | Defines the allowable HTTP operations on a resource. | | `Cache-Control` | `no-store, must-revalidate` | Disables caching of resources (as they are all dynamic). | | `Content-Encoding` | `gzip` | The encoding of the response body (if any). | | `Location` | | Refers to the URI of the resource created by a request. | | `Transfer-Encoding` | `chunked` | Specified the encoding used to transform response. | | `Retry-After` | 5000 | Indicates the time to wait before retrying a request. | | `X-Content-Type-Options` | `nosniff` | Disables MIME type sniffing. | | `X-XSS-Protection` | `1; mode=block` | Enables XSS filter protection. | | `X-Frame-Options` | `SAMEORIGIN` | Prevents rendering in a frame from a different origin. | | `X-UA-Compatible` | `IE=edge,chrome=1` | Specifies the browser mode to render in. | ### Format When `application/json` is returned in the response body it is always pretty-printed (indented, human readable output). Additionally, gzip compression/encoding is supported on all responses. #### Dates & Times Dates or times are returned as strings in the ISO 8601 'extended' format. When a date and time is returned (instant) the value is converted to UTC. For example: | Value | Format | Example | | --------------- | ------------------------------ | --------------------- | | Date | `YYYY-MM-DD` | 2017-12-03 | | Date & Time | `YYYY-MM-DD'T'hh:mm:ss[.nnn]Z` | 2017-12-03T10:15:30Z | #### Content In some resources a Content data type is used. This allows for multiple formats of representation to be returned within resource, specifically `\"html\"` and `\"text\"`. The `\"text\"` property returns a flattened representation suitable for output in textual displays. The `\"html\"` property returns an HTML fragment suitable for display within an HTML element. Note, the HTML returned is not a valid stand-alone HTML document. #### Paging The response to a paginated request follows the format: ```json { resources\": [ ... ], \"page\": { \"number\" : ..., \"size\" : ..., \"totalResources\" : ..., \"totalPages\" : ... }, \"links\": [ \"first\" : { \"href\" : \"...\" }, \"prev\" : { \"href\" : \"...\" }, \"self\" : { \"href\" : \"...\" }, \"next\" : { \"href\" : \"...\" }, \"last\" : { \"href\" : \"...\" } ] } ``` The `resources` property is an array of the resources being retrieved from the endpoint, each which should contain at minimum a \"self\" relation hypermedia link. The `page` property outlines the details of the current page and total possible pages. The object for the page includes the following properties: - number - The page number (zero-based) of the page returned. - size - The size of the pages, which is less than or equal to the maximum page size. - totalResources - The total amount of resources available across all pages. - totalPages - The total amount of pages. The last property of the paged response is the `links` array, which contains all available hypermedia links. For paginated responses, the \"self\", \"next\", \"previous\", \"first\", and \"last\" links are returned. The \"self\" link must always be returned and should contain a link to allow the client to replicate the original request against the collection resource in an identical manner to that in which it was invoked. The \"next\" and \"previous\" links are present if either or both there exists a previous or next page, respectively. The \"next\" and \"previous\" links have hrefs that allow \"natural movement\" to the next page, that is all parameters required to move the next page are provided in the link. The \"first\" and \"last\" links provide references to the first and last pages respectively. Requests outside the boundaries of the pageable will result in a `404 NOT FOUND`. Paginated requests do not provide a \"stateful cursor\" to the client, nor does it need to provide a read consistent view. Records in adjacent pages may change while pagination is being traversed, and the total number of pages and resources may change between requests within the same filtered/queries resource collection. #### Property Views The \"depth\" of the response of a resource can be configured using a \"view\". All endpoints supports two views that can tune the extent of the information returned in the resource. The supported views are `summary` and `details` (the default). View are specified using a query parameter, in this format: ```bash /<resource>?view={viewName} ``` #### Error Any error responses can provide a response body with a message to the client indicating more information (if applicable) to aid debugging of the error. All 40x and 50x responses will return an error response in the body. The format of the response is as follows: ```json { \"status\": <statusCode>, \"message\": <message>, \"links\" : [ { \"rel\" : \"...\", \"href\" : \"...\" } ] } ``` The `status` property is the same as the HTTP status returned in the response, to ease client parsing. The message property is a localized message in the request client's locale (if applicable) that articulates the nature of the error. The last property is the `links` property. This may contain additional [hypermedia links](#section/Overview/Authentication) to troubleshoot. #### Search Criteria <a section=\"section/Responses/SearchCriteria\"></a> Multiple resources make use of search criteria to match assets. Search criteria is an array of search filters. Each search filter has a generic format of: ```json { \"field\": \"<field-name>\", \"operator\": \"<operator>\", [\"value\": \"<value>\",] [\"lower\": \"<value>\",] [\"upper\": \"<value>\"] } ``` Every filter defines two required properties `field` and `operator`. The field is the name of an asset property that is being filtered on. The operator is a type and property-specific operating performed on the filtered property. The valid values for fields and operators are outlined in the table below. Every filter also defines one or more values that are supplied to the operator. The valid values vary by operator and are outlined below. ##### Fields The following table outlines the search criteria fields and the available operators: | Field | Operators | | --------------------------------- | ------------------------------------------------------------------------------------------------------------------------------ | | `alternate-address-type` | `in` | | `container-image` | `is` ` is not` ` starts with` ` ends with` ` contains` ` does not contain` ` is like` ` not like` | | `container-status` | `is` ` is not` | | `containers` | `are` | | `criticality-tag` | `is` ` is not` ` is greater than` ` is less than` ` is applied` ` is not applied` | | `custom-tag` | `is` ` is not` ` starts with` ` ends with` ` contains` ` does not contain` ` is applied` ` is not applied` | | `cve` | `is` ` is not` ` contains` ` does not contain` | | `cvss-access-complexity` | `is` ` is not` | | `cvss-authentication-required` | `is` ` is not` | | `cvss-access-vector` | `is` ` is not` | | `cvss-availability-impact` | `is` ` is not` | | `cvss-confidentiality-impact` | `is` ` is not` | | `cvss-integrity-impact` | `is` ` is not` | | `cvss-v3-confidentiality-impact` | `is` ` is not` | | `cvss-v3-integrity-impact` | `is` ` is not` | | `cvss-v3-availability-impact` | `is` ` is not` | | `cvss-v3-attack-vector` | `is` ` is not` | | `cvss-v3-attack-complexity` | `is` ` is not` | | `cvss-v3-user-interaction` | `is` ` is not` | | `cvss-v3-privileges-required` | `is` ` is not` | | `host-name` | `is` ` is not` ` starts with` ` ends with` ` contains` ` does not contain` ` is empty` ` is not empty` ` is like` ` not like` | | `host-type` | `in` ` not in` | | `ip-address` | `is` ` is not` ` in range` ` not in range` ` is like` ` not like` | | `ip-address-type` | `in` ` not in` | | `last-scan-date` | `is-on-or-before` ` is on or after` ` is between` ` is earlier than` ` is within the last` | | `location-tag` | `is` ` is not` ` starts with` ` ends with` ` contains` ` does not contain` ` is applied` ` is not applied` | | `mobile-device-last-sync-time` | `is-within-the-last` ` is earlier than` | | `open-ports` | `is` ` is not` ` in range` | | `operating-system` | `contains` ` does not contain` ` is empty` ` is not empty` | | `owner-tag` | `is` ` is not` ` starts with` ` ends with` ` contains` ` does not contain` ` is applied` ` is not applied` | | `pci-compliance` | `is` | | `risk-score` | `is` ` is not` ` in range` ` greater than` ` less than` | | `service-name` | `contains` ` does not contain` | | `site-id` | `in` ` not in` | | `software` | `contains` ` does not contain` | | `vAsset-cluster` | `is` ` is not` ` contains` ` does not contain` ` starts with` | | `vAsset-datacenter` | `is` ` is not` | | `vAsset-host-name` | `is` ` is not` ` contains` ` does not contain` ` starts with` | | `vAsset-power-state` | `in` ` not in` | | `vAsset-resource-pool-path` | `contains` ` does not contain` | | `vulnerability-assessed` | `is-on-or-before` ` is on or after` ` is between` ` is earlier than` ` is within the last` | | `vulnerability-category` | `is` ` is not` ` starts with` ` ends with` ` contains` ` does not contain` | | `vulnerability-cvss-v3-score` | `is` ` is not` | | `vulnerability-cvss-score` | `is` ` is not` ` in range` ` is greater than` ` is less than` | | `vulnerability-exposures` | `includes` ` does not include` | | `vulnerability-title` | `contains` ` does not contain` ` is` ` is not` ` starts with` ` ends with` | | `vulnerability-validated-status` | `are` | ##### Enumerated Properties The following fields have enumerated values: | Field | Acceptable Values | | ----------------------------------------- | ------------------------------------------------------------------------------------------------------------- | | `alternate-address-type` | 0=IPv4, 1=IPv6 | | `containers` | 0=present, 1=not present | | `container-status` | `created` `running` `paused` `restarting` `exited` `dead` `unknown` | | `cvss-access-complexity` | <ul><li><code>L</code> = Low</li><li><code>M</code> = Medium</li><li><code>H</code> = High</li></ul> | | `cvss-integrity-impact` | <ul><li><code>N</code> = None</li><li><code>P</code> = Partial</li><li><code>C</code> = Complete</li></ul> | | `cvss-confidentiality-impact` | <ul><li><code>N</code> = None</li><li><code>P</code> = Partial</li><li><code>C</code> = Complete</li></ul> | | `cvss-availability-impact` | <ul><li><code>N</code> = None</li><li><code>P</code> = Partial</li><li><code>C</code> = Complete</li></ul> | | `cvss-access-vector` | <ul><li><code>L</code> = Local</li><li><code>A</code> = Adjacent</li><li><code>N</code> = Network</li></ul> | | `cvss-authentication-required` | <ul><li><code>N</code> = None</li><li><code>S</code> = Single</li><li><code>M</code> = Multiple</li></ul> | | `cvss-v3-confidentiality-impact` | <ul><li><code>L</code> = Local</li><li><code>L</code> = Low</li><li><code>N</code> = None</li><li><code>H</code> = High</li></ul> | | `cvss-v3-integrity-impact` | <ul><li><code>L</code> = Local</li><li><code>L</code> = Low</li><li><code>N</code> = None</li><li><code>H</code> = High</li></ul> | | `cvss-v3-availability-impact` | <ul><li><code>N</code> = None</li><li><code>L</code> = Low</li><li><code>H</code> = High</li></ul> | | `cvss-v3-attack-vector` | <ul><li><code>N</code> = Network</li><li><code>A</code> = Adjacent</li><li><code>L</code> = Local</li><li><code>P</code> = Physical</li></ul> | | `cvss-v3-attack-complexity` | <ul><li><code>L</code> = Low</li><li><code>H</code> = High</li></ul> | | `cvss-v3-user-interaction` | <ul><li><code>N</code> = None</li><li><code>R</code> = Required</li></ul> | | `cvss-v3-privileges-required` | <ul><li><code>N</code> = None</li><li><code>L</code> = Low</li><li><code>H</code> = High</li></ul> | | `host-type` | 0=Unknown, 1=Guest, 2=Hypervisor, 3=Physical, 4=Mobile | | `ip-address-type` | 0=IPv4, 1=IPv6 | | `pci-compliance` | 0=fail, 1=pass | | `vulnerability-validated-status` | 0=present, 1=not present | ##### Operator Properties <a section=\"section/Responses/SearchCriteria/OperatorProperties\"></a> The following table outlines which properties are required for each operator and the appropriate data type(s): | Operator | `value` | `lower` | `upper` | | ----------------------|-----------------------|-----------------------|-----------------------| | `are` | `string` | | | | `contains` | `string` | | | | `does-not-contain` | `string` | | | | `ends with` | `string` | | | | `in` | `Array[ string ]` | | | | `in-range` | | `numeric` | `numeric` | | `includes` | `Array[ string ]` | | | | `is` | `string` | | | | `is-applied` | | | | | `is-between` | | `numeric` | `numeric` | | `is-earlier-than` | `numeric` | | | | `is-empty` | | | | | `is-greater-than` | `numeric` | | | | `is-on-or-after` | `string` (yyyy-MM-dd) | | | | `is-on-or-before` | `string` (yyyy-MM-dd) | | | | `is-not` | `string` | | | | `is-not-applied` | | | | | `is-not-empty` | | | | | `is-within-the-last` | `string` | | | | `less-than` | `string` | | | | `like` | `string` | | | | `not-contains` | `string` | | | | `not-in` | `Array[ string ]` | | | | `not-in-range` | | `numeric` | `numeric` | | `not-like` | `string` | | | | `starts-with` | `string` | | | #### Discovery Connection Search Criteria <a section=\"section/Responses/DiscoverySearchCriteria\"></a> Dynamic sites make use of search criteria to match assets from a discovery connection. Search criteria is an array of search filters. Each search filter has a generic format of: ```json { \"field\": \"<field-name>\", \"operator\": \"<operator>\", [\"value\": \"<value>\",] [\"lower\": \"<value>\",] [\"upper\": \"<value>\"] } ``` Every filter defines two required properties `field` and `operator`. The field is the name of an asset property that is being filtered on. The list of supported fields vary depending on the type of discovery connection configured for the dynamic site (e.g vSphere, ActiveSync, etc.). The operator is a type and property-specific operating performed on the filtered property. The valid values for fields outlined in the tables below and are grouped by the type of connection. Every filter also defines one or more values that are supplied to the operator. See <a href=\"#section/Responses/SearchCriteria/OperatorProperties\">Search Criteria Operator Properties</a> for more information on the valid values for each operator. ##### Fields (ActiveSync) This section documents search criteria information for ActiveSync discovery connections. The discovery connections must be one of the following types: `\"activesync-ldap\"`, `\"activesync-office365\"`, or `\"activesync-powershell\"`. The following table outlines the search criteria fields and the available operators for ActiveSync connections: | Field | Operators | | --------------------------------- | ------------------------------------------------------------- | | `last-sync-time` | `is-within-the-last` ` is-earlier-than` | | `operating-system` | `contains` ` does-not-contain` | | `user` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | ##### Fields (AWS) This section documents search criteria information for AWS discovery connections. The discovery connections must be the type `\"aws\"`. The following table outlines the search criteria fields and the available operators for AWS connections: | Field | Operators | | ----------------------- | ------------------------------------------------------------- | | `availability-zone` | `contains` ` does-not-contain` | | `guest-os-family` | `contains` ` does-not-contain` | | `instance-id` | `contains` ` does-not-contain` | | `instance-name` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | | `instance-state` | `in` ` not-in` | | `instance-type` | `in` ` not-in` | | `ip-address` | `in-range` ` not-in-range` ` is` ` is-not` | | `region` | `in` ` not-in` | | `vpc-id` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | ##### Fields (DHCP) This section documents search criteria information for DHCP discovery connections. The discovery connections must be the type `\"dhcp\"`. The following table outlines the search criteria fields and the available operators for DHCP connections: | Field | Operators | | --------------- | ------------------------------------------------------------- | | `host-name` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | | `ip-address` | `in-range` ` not-in-range` ` is` ` is-not` | | `mac-address` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | ##### Fields (Sonar) This section documents search criteria information for Sonar discovery connections. The discovery connections must be the type `\"sonar\"`. The following table outlines the search criteria fields and the available operators for Sonar connections: | Field | Operators | | ------------------- | -------------------- | | `search-domain` | `contains` ` is` | | `ip-address` | `in-range` ` is` | | `sonar-scan-date` | `is-within-the-last` | ##### Fields (vSphere) This section documents search criteria information for vSphere discovery connections. The discovery connections must be the type `\"vsphere\"`. The following table outlines the search criteria fields and the available operators for vSphere connections: | Field | Operators | | -------------------- | ------------------------------------------------------------------------------------------ | | `cluster` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | | `data-center` | `is` ` is-not` | | `discovered-time` | `is-on-or-before` ` is-on-or-after` ` is-between` ` is-earlier-than` ` is-within-the-last` | | `guest-os-family` | `contains` ` does-not-contain` | | `host-name` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | | `ip-address` | `in-range` ` not-in-range` ` is` ` is-not` | | `power-state` | `in` ` not-in` | | `resource-pool-path` | `contains` ` does-not-contain` | | `last-time-seen` | `is-on-or-before` ` is-on-or-after` ` is-between` ` is-earlier-than` ` is-within-the-last` | | `vm` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | ##### Enumerated Properties (vSphere) The following fields have enumerated values: | Field | Acceptable Values | | ------------- | ------------------------------------ | | `power-state` | `poweredOn` `poweredOff` `suspended` | ## HATEOAS This API follows Hypermedia as the Engine of Application State (HATEOAS) principals and is therefore hypermedia friendly. Hyperlinks are returned in the `links` property of any given resource and contain a fully-qualified hyperlink to the corresponding resource. The format of the hypermedia link adheres to both the <a target=\"_blank\" href=\"http://jsonapi.org\">{json:api} v1</a> <a target=\"_blank\" href=\"http://jsonapi.org/format/#document-links\">\"Link Object\"</a> and <a target=\"_blank\" href=\"http://json-schema.org/latest/json-schema-hypermedia.html\">JSON Hyper-Schema</a> <a target=\"_blank\" href=\"http://json-schema.org/latest/json-schema-hypermedia.html#rfc.section.5.2\">\"Link Description Object\"</a> formats. For example: ```json \"links\": [{ \"rel\": \"<relation>\", \"href\": \"<href>\" ... }] ``` Where appropriate link objects may also contain additional properties than the `rel` and `href` properties, such as `id`, `type`, etc. See the [Root](#tag/Root) resources for the entry points into API discovery. # noqa: E501
OpenAPI spec version: 3
Contact: support@rapid7.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from rapid7vmconsole.api_client import ApiClient
class SiteApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def add_site_tag(self, id, tag_id, **kwargs): # noqa: E501
"""Site Tag # noqa: E501
Adds a tag to the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.add_site_tag(id, tag_id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int tag_id: The identifier of the tag. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.add_site_tag_with_http_info(id, tag_id, **kwargs) # noqa: E501
else:
(data) = self.add_site_tag_with_http_info(id, tag_id, **kwargs) # noqa: E501
return data
def add_site_tag_with_http_info(self, id, tag_id, **kwargs): # noqa: E501
"""Site Tag # noqa: E501
Adds a tag to the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.add_site_tag_with_http_info(id, tag_id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int tag_id: The identifier of the tag. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'tag_id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_site_tag" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `add_site_tag`") # noqa: E501
# verify the required parameter 'tag_id' is set
if ('tag_id' not in params or
params['tag_id'] is None):
raise ValueError("Missing the required parameter `tag_id` when calling `add_site_tag`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'tag_id' in params:
path_params['tagId'] = params['tag_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/tags/{tagId}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_site_user(self, id, **kwargs): # noqa: E501
"""Site Users Access # noqa: E501
Grants a non-administrator user access to the specified site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.add_site_user(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int param0: The identifier of the user.
:return: ReferenceWithUserIDLink
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.add_site_user_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.add_site_user_with_http_info(id, **kwargs) # noqa: E501
return data
def add_site_user_with_http_info(self, id, **kwargs): # noqa: E501
"""Site Users Access # noqa: E501
Grants a non-administrator user access to the specified site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.add_site_user_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int param0: The identifier of the user.
:return: ReferenceWithUserIDLink
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'param0'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_site_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `add_site_user`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'param0' in params:
body_params = params['param0']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/users', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ReferenceWithUserIDLink', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_site(self, **kwargs): # noqa: E501
"""Sites # noqa: E501
Creates a new site with the specified configuration. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_site(async=True)
>>> result = thread.get()
:param async bool
:param SiteCreateResource param0: Resource for creating a site configuration.
:return: ReferenceWithSiteIDLink
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_site_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.create_site_with_http_info(**kwargs) # noqa: E501
return data
def create_site_with_http_info(self, **kwargs): # noqa: E501
"""Sites # noqa: E501
Creates a new site with the specified configuration. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_site_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:param SiteCreateResource param0: Resource for creating a site configuration.
:return: ReferenceWithSiteIDLink
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['param0'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_site" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'param0' in params:
body_params = params['param0']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ReferenceWithSiteIDLink', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_site_credential(self, id, **kwargs): # noqa: E501
"""Site Scan Credentials # noqa: E501
Creates a new site credential. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_site_credential(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param SiteCredential param1: The specification of a site credential.
:return: CreatedReferenceCredentialIDLink
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_site_credential_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.create_site_credential_with_http_info(id, **kwargs) # noqa: E501
return data
def create_site_credential_with_http_info(self, id, **kwargs): # noqa: E501
"""Site Scan Credentials # noqa: E501
Creates a new site credential. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_site_credential_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param SiteCredential param1: The specification of a site credential.
:return: CreatedReferenceCredentialIDLink
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'param1'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_site_credential" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `create_site_credential`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'param1' in params:
body_params = params['param1']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/site_credentials', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CreatedReferenceCredentialIDLink', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_site_scan_schedule(self, id, **kwargs): # noqa: E501
"""Site Scan Schedules # noqa: E501
Creates a new scan schedule for the specified site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_site_scan_schedule(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param ScanSchedule param0: Resource for a scan schedule.
:return: ReferenceWithScanScheduleIDLink
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_site_scan_schedule_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.create_site_scan_schedule_with_http_info(id, **kwargs) # noqa: E501
return data
def create_site_scan_schedule_with_http_info(self, id, **kwargs): # noqa: E501
"""Site Scan Schedules # noqa: E501
Creates a new scan schedule for the specified site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_site_scan_schedule_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param ScanSchedule param0: Resource for a scan schedule.
:return: ReferenceWithScanScheduleIDLink
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'param0'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_site_scan_schedule" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `create_site_scan_schedule`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'param0' in params:
body_params = params['param0']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/scan_schedules', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ReferenceWithScanScheduleIDLink', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_site_smtp_alert(self, id, **kwargs): # noqa: E501
"""Site SMTP Alerts # noqa: E501
Creates a new SMTP alert for the specified site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_site_smtp_alert(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param SmtpAlert param0: Resource for creating a new SMTP alert.
:return: ReferenceWithAlertIDLink
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_site_smtp_alert_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.create_site_smtp_alert_with_http_info(id, **kwargs) # noqa: E501
return data
def create_site_smtp_alert_with_http_info(self, id, **kwargs): # noqa: E501
"""Site SMTP Alerts # noqa: E501
Creates a new SMTP alert for the specified site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_site_smtp_alert_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param SmtpAlert param0: Resource for creating a new SMTP alert.
:return: ReferenceWithAlertIDLink
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'param0'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_site_smtp_alert" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `create_site_smtp_alert`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'param0' in params:
body_params = params['param0']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/alerts/smtp', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ReferenceWithAlertIDLink', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_site_snmp_alert(self, id, **kwargs): # noqa: E501
"""Site SNMP Alerts # noqa: E501
Creates a new SNMP alert for the specified site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_site_snmp_alert(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param SnmpAlert param0: Resource for creating a new SNMP alert.
:return: ReferenceWithAlertIDLink
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_site_snmp_alert_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.create_site_snmp_alert_with_http_info(id, **kwargs) # noqa: E501
return data
def create_site_snmp_alert_with_http_info(self, id, **kwargs): # noqa: E501
"""Site SNMP Alerts # noqa: E501
Creates a new SNMP alert for the specified site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_site_snmp_alert_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param SnmpAlert param0: Resource for creating a new SNMP alert.
:return: ReferenceWithAlertIDLink
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'param0'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_site_snmp_alert" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `create_site_snmp_alert`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'param0' in params:
body_params = params['param0']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/alerts/snmp', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ReferenceWithAlertIDLink', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_site_syslog_alert(self, id, **kwargs): # noqa: E501
"""Site Syslog Alerts # noqa: E501
Creates a new Syslog alert for the specified site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_site_syslog_alert(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param SyslogAlert param0: Resource for creating a new Syslog alert.
:return: ReferenceWithAlertIDLink
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_site_syslog_alert_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.create_site_syslog_alert_with_http_info(id, **kwargs) # noqa: E501
return data
def create_site_syslog_alert_with_http_info(self, id, **kwargs): # noqa: E501
"""Site Syslog Alerts # noqa: E501
Creates a new Syslog alert for the specified site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_site_syslog_alert_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param SyslogAlert param0: Resource for creating a new Syslog alert.
:return: ReferenceWithAlertIDLink
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'param0'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_site_syslog_alert" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `create_site_syslog_alert`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'param0' in params:
body_params = params['param0']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/alerts/syslog', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ReferenceWithAlertIDLink', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_all_site_alerts(self, id, **kwargs): # noqa: E501
"""Site Alerts # noqa: E501
Deletes all alerts from the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_all_site_alerts(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.delete_all_site_alerts_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_all_site_alerts_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_all_site_alerts_with_http_info(self, id, **kwargs): # noqa: E501
"""Site Alerts # noqa: E501
Deletes all alerts from the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_all_site_alerts_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_all_site_alerts" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_all_site_alerts`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/alerts', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_all_site_credentials(self, id, **kwargs): # noqa: E501
"""Site Scan Credentials # noqa: E501
Deletes all site credentials from the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_all_site_credentials(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.delete_all_site_credentials_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_all_site_credentials_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_all_site_credentials_with_http_info(self, id, **kwargs): # noqa: E501
"""Site Scan Credentials # noqa: E501
Deletes all site credentials from the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_all_site_credentials_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_all_site_credentials" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_all_site_credentials`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/site_credentials', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_all_site_scan_schedules(self, id, **kwargs): # noqa: E501
"""Site Scan Schedules # noqa: E501
Deletes all scan schedules from the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_all_site_scan_schedules(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.delete_all_site_scan_schedules_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_all_site_scan_schedules_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_all_site_scan_schedules_with_http_info(self, id, **kwargs): # noqa: E501
"""Site Scan Schedules # noqa: E501
Deletes all scan schedules from the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_all_site_scan_schedules_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_all_site_scan_schedules" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_all_site_scan_schedules`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/scan_schedules', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_all_site_smtp_alerts(self, id, **kwargs): # noqa: E501
"""Site SMTP Alerts # noqa: E501
Deletes all SMTP alerts from the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_all_site_smtp_alerts(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.delete_all_site_smtp_alerts_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_all_site_smtp_alerts_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_all_site_smtp_alerts_with_http_info(self, id, **kwargs): # noqa: E501
"""Site SMTP Alerts # noqa: E501
Deletes all SMTP alerts from the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_all_site_smtp_alerts_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_all_site_smtp_alerts" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_all_site_smtp_alerts`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/alerts/smtp', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_all_site_snmp_alerts(self, id, **kwargs): # noqa: E501
"""Site SNMP Alerts # noqa: E501
Deletes all SNMP alerts from the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_all_site_snmp_alerts(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.delete_all_site_snmp_alerts_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_all_site_snmp_alerts_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_all_site_snmp_alerts_with_http_info(self, id, **kwargs): # noqa: E501
"""Site SNMP Alerts # noqa: E501
Deletes all SNMP alerts from the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_all_site_snmp_alerts_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_all_site_snmp_alerts" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_all_site_snmp_alerts`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/alerts/snmp', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_all_site_syslog_alerts(self, id, **kwargs): # noqa: E501
"""Site Syslog Alerts # noqa: E501
Deletes all Syslog alerts from the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_all_site_syslog_alerts(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.delete_all_site_syslog_alerts_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_all_site_syslog_alerts_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_all_site_syslog_alerts_with_http_info(self, id, **kwargs): # noqa: E501
"""Site Syslog Alerts # noqa: E501
Deletes all Syslog alerts from the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_all_site_syslog_alerts_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_all_site_syslog_alerts" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_all_site_syslog_alerts`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/alerts/syslog', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_site(self, id, **kwargs): # noqa: E501
"""Site # noqa: E501
site.delete.description # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_site(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.delete_site_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_site_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_site_with_http_info(self, id, **kwargs): # noqa: E501
"""Site # noqa: E501
site.delete.description # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_site_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_site" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_site`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_site_credential(self, id, credential_id, **kwargs): # noqa: E501
"""Site Scan Credential # noqa: E501
Deletes the specified site credential. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_site_credential(id, credential_id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int credential_id: The identifier of the site credential. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.delete_site_credential_with_http_info(id, credential_id, **kwargs) # noqa: E501
else:
(data) = self.delete_site_credential_with_http_info(id, credential_id, **kwargs) # noqa: E501
return data
def delete_site_credential_with_http_info(self, id, credential_id, **kwargs): # noqa: E501
"""Site Scan Credential # noqa: E501
Deletes the specified site credential. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_site_credential_with_http_info(id, credential_id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int credential_id: The identifier of the site credential. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'credential_id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_site_credential" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_site_credential`") # noqa: E501
# verify the required parameter 'credential_id' is set
if ('credential_id' not in params or
params['credential_id'] is None):
raise ValueError("Missing the required parameter `credential_id` when calling `delete_site_credential`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'credential_id' in params:
path_params['credentialId'] = params['credential_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/site_credentials/{credentialId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_site_scan_schedule(self, id, schedule_id, **kwargs): # noqa: E501
"""Site Scan Schedule # noqa: E501
Deletes the specified scan schedule from the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_site_scan_schedule(id, schedule_id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int schedule_id: The identifier of the scan schedule. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.delete_site_scan_schedule_with_http_info(id, schedule_id, **kwargs) # noqa: E501
else:
(data) = self.delete_site_scan_schedule_with_http_info(id, schedule_id, **kwargs) # noqa: E501
return data
def delete_site_scan_schedule_with_http_info(self, id, schedule_id, **kwargs): # noqa: E501
"""Site Scan Schedule # noqa: E501
Deletes the specified scan schedule from the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_site_scan_schedule_with_http_info(id, schedule_id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int schedule_id: The identifier of the scan schedule. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'schedule_id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_site_scan_schedule" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_site_scan_schedule`") # noqa: E501
# verify the required parameter 'schedule_id' is set
if ('schedule_id' not in params or
params['schedule_id'] is None):
raise ValueError("Missing the required parameter `schedule_id` when calling `delete_site_scan_schedule`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'schedule_id' in params:
path_params['scheduleId'] = params['schedule_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/scan_schedules/{scheduleId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_site_smtp_alert(self, id, alert_id, **kwargs): # noqa: E501
"""Site SMTP Alert # noqa: E501
Deletes the specified SMTP alert from the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_site_smtp_alert(id, alert_id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int alert_id: The identifier of the alert. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.delete_site_smtp_alert_with_http_info(id, alert_id, **kwargs) # noqa: E501
else:
(data) = self.delete_site_smtp_alert_with_http_info(id, alert_id, **kwargs) # noqa: E501
return data
def delete_site_smtp_alert_with_http_info(self, id, alert_id, **kwargs): # noqa: E501
"""Site SMTP Alert # noqa: E501
Deletes the specified SMTP alert from the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_site_smtp_alert_with_http_info(id, alert_id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int alert_id: The identifier of the alert. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'alert_id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_site_smtp_alert" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_site_smtp_alert`") # noqa: E501
# verify the required parameter 'alert_id' is set
if ('alert_id' not in params or
params['alert_id'] is None):
raise ValueError("Missing the required parameter `alert_id` when calling `delete_site_smtp_alert`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'alert_id' in params:
path_params['alertId'] = params['alert_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/alerts/smtp/{alertId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_site_snmp_alert(self, id, alert_id, **kwargs): # noqa: E501
"""Site SNMP Alert # noqa: E501
Deletes the specified SNMP alert from the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_site_snmp_alert(id, alert_id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int alert_id: The identifier of the alert. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.delete_site_snmp_alert_with_http_info(id, alert_id, **kwargs) # noqa: E501
else:
(data) = self.delete_site_snmp_alert_with_http_info(id, alert_id, **kwargs) # noqa: E501
return data
def delete_site_snmp_alert_with_http_info(self, id, alert_id, **kwargs): # noqa: E501
"""Site SNMP Alert # noqa: E501
Deletes the specified SNMP alert from the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_site_snmp_alert_with_http_info(id, alert_id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int alert_id: The identifier of the alert. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'alert_id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_site_snmp_alert" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_site_snmp_alert`") # noqa: E501
# verify the required parameter 'alert_id' is set
if ('alert_id' not in params or
params['alert_id'] is None):
raise ValueError("Missing the required parameter `alert_id` when calling `delete_site_snmp_alert`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'alert_id' in params:
path_params['alertId'] = params['alert_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/alerts/snmp/{alertId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_site_syslog_alert(self, id, alert_id, **kwargs): # noqa: E501
"""Site Syslog Alert # noqa: E501
Deletes the specified Syslog alert from the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_site_syslog_alert(id, alert_id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int alert_id: The identifier of the alert. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.delete_site_syslog_alert_with_http_info(id, alert_id, **kwargs) # noqa: E501
else:
(data) = self.delete_site_syslog_alert_with_http_info(id, alert_id, **kwargs) # noqa: E501
return data
def delete_site_syslog_alert_with_http_info(self, id, alert_id, **kwargs): # noqa: E501
"""Site Syslog Alert # noqa: E501
Deletes the specified Syslog alert from the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_site_syslog_alert_with_http_info(id, alert_id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int alert_id: The identifier of the alert. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'alert_id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_site_syslog_alert" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_site_syslog_alert`") # noqa: E501
# verify the required parameter 'alert_id' is set
if ('alert_id' not in params or
params['alert_id'] is None):
raise ValueError("Missing the required parameter `alert_id` when calling `delete_site_syslog_alert`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'alert_id' in params:
path_params['alertId'] = params['alert_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/alerts/syslog/{alertId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def enable_shared_credential_on_site(self, id, credential_id, **kwargs): # noqa: E501
"""Assigned Shared Credential Enablement # noqa: E501
Enable or disable the shared credential for the site's scans. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.enable_shared_credential_on_site(id, credential_id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int credential_id: The identifier of the shared credential. (required)
:param bool param0: Flag indicating whether the shared credential is enabled for the site's scans.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.enable_shared_credential_on_site_with_http_info(id, credential_id, **kwargs) # noqa: E501
else:
(data) = self.enable_shared_credential_on_site_with_http_info(id, credential_id, **kwargs) # noqa: E501
return data
def enable_shared_credential_on_site_with_http_info(self, id, credential_id, **kwargs): # noqa: E501
"""Assigned Shared Credential Enablement # noqa: E501
Enable or disable the shared credential for the site's scans. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.enable_shared_credential_on_site_with_http_info(id, credential_id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int credential_id: The identifier of the shared credential. (required)
:param bool param0: Flag indicating whether the shared credential is enabled for the site's scans.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'credential_id', 'param0'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method enable_shared_credential_on_site" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `enable_shared_credential_on_site`") # noqa: E501
# verify the required parameter 'credential_id' is set
if ('credential_id' not in params or
params['credential_id'] is None):
raise ValueError("Missing the required parameter `credential_id` when calling `enable_shared_credential_on_site`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'credential_id' in params:
path_params['credentialId'] = params['credential_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'param0' in params:
body_params = params['param0']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/shared_credentials/{credentialId}/enabled', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def enable_site_credential(self, id, credential_id, **kwargs): # noqa: E501
"""Site Credential Enablement # noqa: E501
Enable or disable the site credential for scans. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.enable_site_credential(id, credential_id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int credential_id: The identifier of the site credential. (required)
:param bool param0: Flag indicating whether the credential is enabled for use during the scan.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.enable_site_credential_with_http_info(id, credential_id, **kwargs) # noqa: E501
else:
(data) = self.enable_site_credential_with_http_info(id, credential_id, **kwargs) # noqa: E501
return data
def enable_site_credential_with_http_info(self, id, credential_id, **kwargs): # noqa: E501
"""Site Credential Enablement # noqa: E501
Enable or disable the site credential for scans. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.enable_site_credential_with_http_info(id, credential_id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int credential_id: The identifier of the site credential. (required)
:param bool param0: Flag indicating whether the credential is enabled for use during the scan.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'credential_id', 'param0'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method enable_site_credential" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `enable_site_credential`") # noqa: E501
# verify the required parameter 'credential_id' is set
if ('credential_id' not in params or
params['credential_id'] is None):
raise ValueError("Missing the required parameter `credential_id` when calling `enable_site_credential`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'credential_id' in params:
path_params['credentialId'] = params['credential_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'param0' in params:
body_params = params['param0']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/site_credentials/{credentialId}/enabled', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_excluded_asset_groups(self, id, **kwargs): # noqa: E501
"""Site Excluded Asset Groups # noqa: E501
Retrieves the excluded asset groups in a static site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_excluded_asset_groups(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: ResourcesAssetGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_excluded_asset_groups_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_excluded_asset_groups_with_http_info(id, **kwargs) # noqa: E501
return data
def get_excluded_asset_groups_with_http_info(self, id, **kwargs): # noqa: E501
"""Site Excluded Asset Groups # noqa: E501
Retrieves the excluded asset groups in a static site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_excluded_asset_groups_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: ResourcesAssetGroup
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_excluded_asset_groups" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_excluded_asset_groups`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/excluded_asset_groups', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResourcesAssetGroup', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_excluded_targets(self, id, **kwargs): # noqa: E501
"""Site Excluded Targets # noqa: E501
Retrieves the excluded targets in a static site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_excluded_targets(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: ScanTargetsResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_excluded_targets_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_excluded_targets_with_http_info(id, **kwargs) # noqa: E501
return data
def get_excluded_targets_with_http_info(self, id, **kwargs): # noqa: E501
"""Site Excluded Targets # noqa: E501
Retrieves the excluded targets in a static site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_excluded_targets_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: ScanTargetsResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_excluded_targets" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_excluded_targets`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/excluded_targets', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ScanTargetsResource', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_included_asset_groups(self, id, **kwargs): # noqa: E501
"""Site Included Asset Groups # noqa: E501
Retrieves the included asset groups in a static site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_included_asset_groups(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: ResourcesAssetGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_included_asset_groups_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_included_asset_groups_with_http_info(id, **kwargs) # noqa: E501
return data
def get_included_asset_groups_with_http_info(self, id, **kwargs): # noqa: E501
"""Site Included Asset Groups # noqa: E501
Retrieves the included asset groups in a static site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_included_asset_groups_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: ResourcesAssetGroup
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_included_asset_groups" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_included_asset_groups`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/included_asset_groups', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResourcesAssetGroup', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_included_targets(self, id, **kwargs): # noqa: E501
"""Site Included Targets # noqa: E501
Retrieves the included targets in a static site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_included_targets(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: ScanTargetsResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_included_targets_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_included_targets_with_http_info(id, **kwargs) # noqa: E501
return data
def get_included_targets_with_http_info(self, id, **kwargs): # noqa: E501
"""Site Included Targets # noqa: E501
Retrieves the included targets in a static site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_included_targets_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: ScanTargetsResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_included_targets" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_included_targets`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/included_targets', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ScanTargetsResource', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_site(self, id, **kwargs): # noqa: E501
"""Site # noqa: E501
Retrieves the site with the specified identifier. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_site(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: Site
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_site_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_site_with_http_info(id, **kwargs) # noqa: E501
return data
def get_site_with_http_info(self, id, **kwargs): # noqa: E501
"""Site # noqa: E501
Retrieves the site with the specified identifier. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_site_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: Site
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_site" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_site`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Site', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_site_alerts(self, id, **kwargs): # noqa: E501
"""Site Alerts # noqa: E501
Retrieve all alerts defined in the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_site_alerts(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: ResourcesAlert
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_site_alerts_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_site_alerts_with_http_info(id, **kwargs) # noqa: E501
return data
def get_site_alerts_with_http_info(self, id, **kwargs): # noqa: E501
"""Site Alerts # noqa: E501
Retrieve all alerts defined in the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_site_alerts_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: ResourcesAlert
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_site_alerts" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_site_alerts`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/alerts', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResourcesAlert', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_site_assets(self, id, **kwargs): # noqa: E501
"""Site Assets # noqa: E501
Retrieves a paged resource of assets linked with the specified site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_site_assets(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int page: The index of the page (zero-based) to retrieve.
:param int size: The number of records per page to retrieve.
:param list[str] sort: The criteria to sort the records by, in the format: `property[,ASC|DESC]`. The default sort order is ascending. Multiple sort criteria can be specified using multiple sort query parameters.
:return: PageOfAsset
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_site_assets_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_site_assets_with_http_info(id, **kwargs) # noqa: E501
return data
def get_site_assets_with_http_info(self, id, **kwargs): # noqa: E501
"""Site Assets # noqa: E501
Retrieves a paged resource of assets linked with the specified site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_site_assets_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int page: The index of the page (zero-based) to retrieve.
:param int size: The number of records per page to retrieve.
:param list[str] sort: The criteria to sort the records by, in the format: `property[,ASC|DESC]`. The default sort order is ascending. Multiple sort criteria can be specified using multiple sort query parameters.
:return: PageOfAsset
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'page', 'size', 'sort'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_site_assets" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_site_assets`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'sort' in params:
query_params.append(('sort', params['sort'])) # noqa: E501
collection_formats['sort'] = 'multi' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/assets', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PageOfAsset', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_site_credential(self, id, credential_id, **kwargs): # noqa: E501
"""Site Scan Credential # noqa: E501
Retrieves the specified site credential. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_site_credential(id, credential_id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int credential_id: The identifier of the site credential. (required)
:return: SiteCredential
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_site_credential_with_http_info(id, credential_id, **kwargs) # noqa: E501
else:
(data) = self.get_site_credential_with_http_info(id, credential_id, **kwargs) # noqa: E501
return data
def get_site_credential_with_http_info(self, id, credential_id, **kwargs): # noqa: E501
"""Site Scan Credential # noqa: E501
Retrieves the specified site credential. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_site_credential_with_http_info(id, credential_id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int credential_id: The identifier of the site credential. (required)
:return: SiteCredential
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'credential_id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_site_credential" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_site_credential`") # noqa: E501
# verify the required parameter 'credential_id' is set
if ('credential_id' not in params or
params['credential_id'] is None):
raise ValueError("Missing the required parameter `credential_id` when calling `get_site_credential`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'credential_id' in params:
path_params['credentialId'] = params['credential_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/site_credentials/{credentialId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SiteCredential', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_site_credentials(self, id, **kwargs): # noqa: E501
"""Site Scan Credentials # noqa: E501
Retrieves all defined site credential resources. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_site_credentials(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: ResourcesSiteCredential
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_site_credentials_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_site_credentials_with_http_info(id, **kwargs) # noqa: E501
return data
def get_site_credentials_with_http_info(self, id, **kwargs): # noqa: E501
"""Site Scan Credentials # noqa: E501
Retrieves all defined site credential resources. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_site_credentials_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: ResourcesSiteCredential
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_site_credentials" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_site_credentials`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/site_credentials', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResourcesSiteCredential', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_site_discovery_connection(self, id, **kwargs): # noqa: E501
"""Site Discovery Connection # noqa: E501
Retrieves the discovery connection assigned to the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_site_discovery_connection(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: SiteDiscoveryConnection
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_site_discovery_connection_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_site_discovery_connection_with_http_info(id, **kwargs) # noqa: E501
return data
def get_site_discovery_connection_with_http_info(self, id, **kwargs): # noqa: E501
"""Site Discovery Connection # noqa: E501
Retrieves the discovery connection assigned to the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_site_discovery_connection_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: SiteDiscoveryConnection
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_site_discovery_connection" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_site_discovery_connection`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/discovery_connection', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SiteDiscoveryConnection', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_site_discovery_search_criteria(self, id, **kwargs): # noqa: E501
"""Site Discovery Search Criteria # noqa: E501
Retrieve the search criteria of the dynamic site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_site_discovery_search_criteria(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: DiscoverySearchCriteria
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_site_discovery_search_criteria_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_site_discovery_search_criteria_with_http_info(id, **kwargs) # noqa: E501
return data
def get_site_discovery_search_criteria_with_http_info(self, id, **kwargs): # noqa: E501
"""Site Discovery Search Criteria # noqa: E501
Retrieve the search criteria of the dynamic site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_site_discovery_search_criteria_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: DiscoverySearchCriteria
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_site_discovery_search_criteria" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_site_discovery_search_criteria`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/discovery_search_criteria', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DiscoverySearchCriteria', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_site_organization(self, id, **kwargs): # noqa: E501
"""Site Organization Information # noqa: E501
Retrieves the site organization information. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_site_organization(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: SiteOrganization
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_site_organization_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_site_organization_with_http_info(id, **kwargs) # noqa: E501
return data
def get_site_organization_with_http_info(self, id, **kwargs): # noqa: E501
"""Site Organization Information # noqa: E501
Retrieves the site organization information. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_site_organization_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: SiteOrganization
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_site_organization" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_site_organization`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/organization', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SiteOrganization', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_site_scan_engine(self, id, **kwargs): # noqa: E501
"""Site Scan Engine # noqa: E501
Retrieves the resource of the scan engine assigned to the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_site_scan_engine(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: ScanEngine
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_site_scan_engine_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_site_scan_engine_with_http_info(id, **kwargs) # noqa: E501
return data
def get_site_scan_engine_with_http_info(self, id, **kwargs): # noqa: E501
"""Site Scan Engine # noqa: E501
Retrieves the resource of the scan engine assigned to the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_site_scan_engine_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: ScanEngine
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_site_scan_engine" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_site_scan_engine`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/scan_engine', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ScanEngine', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_site_scan_schedule(self, id, schedule_id, **kwargs): # noqa: E501
"""Site Scan Schedule # noqa: E501
Retrieves the specified scan schedule. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_site_scan_schedule(id, schedule_id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int schedule_id: The identifier of the scan schedule. (required)
:return: ScanSchedule
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_site_scan_schedule_with_http_info(id, schedule_id, **kwargs) # noqa: E501
else:
(data) = self.get_site_scan_schedule_with_http_info(id, schedule_id, **kwargs) # noqa: E501
return data
def get_site_scan_schedule_with_http_info(self, id, schedule_id, **kwargs): # noqa: E501
"""Site Scan Schedule # noqa: E501
Retrieves the specified scan schedule. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_site_scan_schedule_with_http_info(id, schedule_id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int schedule_id: The identifier of the scan schedule. (required)
:return: ScanSchedule
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'schedule_id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_site_scan_schedule" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_site_scan_schedule`") # noqa: E501
# verify the required parameter 'schedule_id' is set
if ('schedule_id' not in params or
params['schedule_id'] is None):
raise ValueError("Missing the required parameter `schedule_id` when calling `get_site_scan_schedule`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'schedule_id' in params:
path_params['scheduleId'] = params['schedule_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/scan_schedules/{scheduleId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ScanSchedule', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_site_scan_schedules(self, id, **kwargs): # noqa: E501
"""Site Scan Schedules # noqa: E501
Returns all scan schedules for the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_site_scan_schedules(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: ResourcesScanSchedule
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_site_scan_schedules_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_site_scan_schedules_with_http_info(id, **kwargs) # noqa: E501
return data
def get_site_scan_schedules_with_http_info(self, id, **kwargs): # noqa: E501
"""Site Scan Schedules # noqa: E501
Returns all scan schedules for the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_site_scan_schedules_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: ResourcesScanSchedule
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_site_scan_schedules" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_site_scan_schedules`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/scan_schedules', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResourcesScanSchedule', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_site_scan_template(self, id, **kwargs): # noqa: E501
"""Site Scan Template # noqa: E501
Retrieves the resource of the scan template assigned to the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_site_scan_template(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: ScanTemplate
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_site_scan_template_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_site_scan_template_with_http_info(id, **kwargs) # noqa: E501
return data
def get_site_scan_template_with_http_info(self, id, **kwargs): # noqa: E501
"""Site Scan Template # noqa: E501
Retrieves the resource of the scan template assigned to the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_site_scan_template_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: ScanTemplate
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_site_scan_template" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_site_scan_template`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/scan_template', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ScanTemplate', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_site_shared_credentials(self, id, **kwargs): # noqa: E501
"""Assigned Shared Credentials # noqa: E501
Retrieve all of the shared credentials assigned to the site. These shared credentials can be enabled/disabled for the site's scan. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_site_shared_credentials(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: ResourcesSiteSharedCredential
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_site_shared_credentials_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_site_shared_credentials_with_http_info(id, **kwargs) # noqa: E501
return data
def get_site_shared_credentials_with_http_info(self, id, **kwargs): # noqa: E501
"""Assigned Shared Credentials # noqa: E501
Retrieve all of the shared credentials assigned to the site. These shared credentials can be enabled/disabled for the site's scan. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_site_shared_credentials_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: ResourcesSiteSharedCredential
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_site_shared_credentials" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_site_shared_credentials`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/shared_credentials', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResourcesSiteSharedCredential', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_site_smtp_alert(self, id, alert_id, **kwargs): # noqa: E501
"""Site SMTP Alert # noqa: E501
Retrieves the specified SMTP alert. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_site_smtp_alert(id, alert_id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int alert_id: The identifier of the alert. (required)
:return: SmtpAlert
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_site_smtp_alert_with_http_info(id, alert_id, **kwargs) # noqa: E501
else:
(data) = self.get_site_smtp_alert_with_http_info(id, alert_id, **kwargs) # noqa: E501
return data
def get_site_smtp_alert_with_http_info(self, id, alert_id, **kwargs): # noqa: E501
"""Site SMTP Alert # noqa: E501
Retrieves the specified SMTP alert. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_site_smtp_alert_with_http_info(id, alert_id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int alert_id: The identifier of the alert. (required)
:return: SmtpAlert
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'alert_id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_site_smtp_alert" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_site_smtp_alert`") # noqa: E501
# verify the required parameter 'alert_id' is set
if ('alert_id' not in params or
params['alert_id'] is None):
raise ValueError("Missing the required parameter `alert_id` when calling `get_site_smtp_alert`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'alert_id' in params:
path_params['alertId'] = params['alert_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/alerts/smtp/{alertId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SmtpAlert', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_site_smtp_alerts(self, id, **kwargs): # noqa: E501
"""Site SMTP Alerts # noqa: E501
Retrieves all SMTP alerts defined in the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_site_smtp_alerts(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: ResourcesSmtpAlert
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_site_smtp_alerts_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_site_smtp_alerts_with_http_info(id, **kwargs) # noqa: E501
return data
def get_site_smtp_alerts_with_http_info(self, id, **kwargs): # noqa: E501
"""Site SMTP Alerts # noqa: E501
Retrieves all SMTP alerts defined in the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_site_smtp_alerts_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: ResourcesSmtpAlert
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_site_smtp_alerts" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_site_smtp_alerts`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/alerts/smtp', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResourcesSmtpAlert', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_site_snmp_alert(self, id, alert_id, **kwargs): # noqa: E501
"""Site SNMP Alert # noqa: E501
Retrieves the specified SNMP alert. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_site_snmp_alert(id, alert_id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int alert_id: The identifier of the alert. (required)
:return: SnmpAlert
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_site_snmp_alert_with_http_info(id, alert_id, **kwargs) # noqa: E501
else:
(data) = self.get_site_snmp_alert_with_http_info(id, alert_id, **kwargs) # noqa: E501
return data
def get_site_snmp_alert_with_http_info(self, id, alert_id, **kwargs): # noqa: E501
"""Site SNMP Alert # noqa: E501
Retrieves the specified SNMP alert. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_site_snmp_alert_with_http_info(id, alert_id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int alert_id: The identifier of the alert. (required)
:return: SnmpAlert
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'alert_id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_site_snmp_alert" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_site_snmp_alert`") # noqa: E501
# verify the required parameter 'alert_id' is set
if ('alert_id' not in params or
params['alert_id'] is None):
raise ValueError("Missing the required parameter `alert_id` when calling `get_site_snmp_alert`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'alert_id' in params:
path_params['alertId'] = params['alert_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/alerts/snmp/{alertId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SnmpAlert', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_site_snmp_alerts(self, id, **kwargs): # noqa: E501
"""Site SNMP Alerts # noqa: E501
Retrieves all SNMP alerts defined in the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_site_snmp_alerts(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: ResourcesSnmpAlert
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_site_snmp_alerts_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_site_snmp_alerts_with_http_info(id, **kwargs) # noqa: E501
return data
def get_site_snmp_alerts_with_http_info(self, id, **kwargs): # noqa: E501
"""Site SNMP Alerts # noqa: E501
Retrieves all SNMP alerts defined in the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_site_snmp_alerts_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: ResourcesSnmpAlert
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_site_snmp_alerts" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_site_snmp_alerts`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/alerts/snmp', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResourcesSnmpAlert', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_site_syslog_alert(self, id, alert_id, **kwargs): # noqa: E501
"""Site Syslog Alert # noqa: E501
Retrieves the specified Syslog alert. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_site_syslog_alert(id, alert_id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int alert_id: The identifier of the alert. (required)
:return: SyslogAlert
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_site_syslog_alert_with_http_info(id, alert_id, **kwargs) # noqa: E501
else:
(data) = self.get_site_syslog_alert_with_http_info(id, alert_id, **kwargs) # noqa: E501
return data
def get_site_syslog_alert_with_http_info(self, id, alert_id, **kwargs): # noqa: E501
"""Site Syslog Alert # noqa: E501
Retrieves the specified Syslog alert. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_site_syslog_alert_with_http_info(id, alert_id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int alert_id: The identifier of the alert. (required)
:return: SyslogAlert
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'alert_id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_site_syslog_alert" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_site_syslog_alert`") # noqa: E501
# verify the required parameter 'alert_id' is set
if ('alert_id' not in params or
params['alert_id'] is None):
raise ValueError("Missing the required parameter `alert_id` when calling `get_site_syslog_alert`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'alert_id' in params:
path_params['alertId'] = params['alert_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/alerts/syslog/{alertId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SyslogAlert', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_site_syslog_alerts(self, id, **kwargs): # noqa: E501
"""Site Syslog Alerts # noqa: E501
Retrieves all Syslog alerts defined in the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_site_syslog_alerts(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: ResourcesSyslogAlert
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_site_syslog_alerts_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_site_syslog_alerts_with_http_info(id, **kwargs) # noqa: E501
return data
def get_site_syslog_alerts_with_http_info(self, id, **kwargs): # noqa: E501
"""Site Syslog Alerts # noqa: E501
Retrieves all Syslog alerts defined in the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_site_syslog_alerts_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: ResourcesSyslogAlert
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_site_syslog_alerts" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_site_syslog_alerts`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/alerts/syslog', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResourcesSyslogAlert', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_site_tags(self, id, **kwargs): # noqa: E501
"""Site Tags # noqa: E501
Retrieves the list of tags added to the sites. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_site_tags(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: ResourcesTag
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_site_tags_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_site_tags_with_http_info(id, **kwargs) # noqa: E501
return data
def get_site_tags_with_http_info(self, id, **kwargs): # noqa: E501
"""Site Tags # noqa: E501
Retrieves the list of tags added to the sites. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_site_tags_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: ResourcesTag
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_site_tags" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_site_tags`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/tags', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResourcesTag', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_site_users(self, id, **kwargs): # noqa: E501
"""Site Users Access # noqa: E501
Retrieve the list of non-administrator users that have access to the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_site_users(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: ResourcesUser
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_site_users_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_site_users_with_http_info(id, **kwargs) # noqa: E501
return data
def get_site_users_with_http_info(self, id, **kwargs): # noqa: E501
"""Site Users Access # noqa: E501
Retrieve the list of non-administrator users that have access to the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_site_users_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: ResourcesUser
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_site_users" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_site_users`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/users', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResourcesUser', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_sites(self, **kwargs): # noqa: E501
"""Sites # noqa: E501
Retrieves a paged resource of accessible sites. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_sites(async=True)
>>> result = thread.get()
:param async bool
:param int page: The index of the page (zero-based) to retrieve.
:param int size: The number of records per page to retrieve.
:param list[str] sort: The criteria to sort the records by, in the format: `property[,ASC|DESC]`. The default sort order is ascending. Multiple sort criteria can be specified using multiple sort query parameters.
:return: PageOfSite
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_sites_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_sites_with_http_info(**kwargs) # noqa: E501
return data
def get_sites_with_http_info(self, **kwargs): # noqa: E501
"""Sites # noqa: E501
Retrieves a paged resource of accessible sites. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_sites_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:param int page: The index of the page (zero-based) to retrieve.
:param int size: The number of records per page to retrieve.
:param list[str] sort: The criteria to sort the records by, in the format: `property[,ASC|DESC]`. The default sort order is ascending. Multiple sort criteria can be specified using multiple sort query parameters.
:return: PageOfSite
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page', 'size', 'sort'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_sites" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'sort' in params:
query_params.append(('sort', params['sort'])) # noqa: E501
collection_formats['sort'] = 'multi' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PageOfSite', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_web_auth_html_forms(self, id, **kwargs): # noqa: E501
"""Web Authentication HTML Forms # noqa: E501
Retrieves all HTML form authentications configured in the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_web_auth_html_forms(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: ResourcesWebFormAuthentication
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_web_auth_html_forms_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_web_auth_html_forms_with_http_info(id, **kwargs) # noqa: E501
return data
def get_web_auth_html_forms_with_http_info(self, id, **kwargs): # noqa: E501
"""Web Authentication HTML Forms # noqa: E501
Retrieves all HTML form authentications configured in the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_web_auth_html_forms_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: ResourcesWebFormAuthentication
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_web_auth_html_forms" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_web_auth_html_forms`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/web_authentication/html_forms', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResourcesWebFormAuthentication', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_web_auth_http_headers(self, id, **kwargs): # noqa: E501
"""Web Authentication HTTP Headers # noqa: E501
Retrieves all HTTP header authentications configured in the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_web_auth_http_headers(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: ResourcesWebHeaderAuthentication
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_web_auth_http_headers_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_web_auth_http_headers_with_http_info(id, **kwargs) # noqa: E501
return data
def get_web_auth_http_headers_with_http_info(self, id, **kwargs): # noqa: E501
"""Web Authentication HTTP Headers # noqa: E501
Retrieves all HTTP header authentications configured in the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_web_auth_http_headers_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: ResourcesWebHeaderAuthentication
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_web_auth_http_headers" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_web_auth_http_headers`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/web_authentication/http_headers', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResourcesWebHeaderAuthentication', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def remove_all_excluded_asset_groups(self, id, **kwargs): # noqa: E501
"""Site Excluded Asset Groups # noqa: E501
Removes all excluded asset groups from the specified static site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.remove_all_excluded_asset_groups(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.remove_all_excluded_asset_groups_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.remove_all_excluded_asset_groups_with_http_info(id, **kwargs) # noqa: E501
return data
def remove_all_excluded_asset_groups_with_http_info(self, id, **kwargs): # noqa: E501
"""Site Excluded Asset Groups # noqa: E501
Removes all excluded asset groups from the specified static site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.remove_all_excluded_asset_groups_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_all_excluded_asset_groups" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `remove_all_excluded_asset_groups`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/excluded_asset_groups', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def remove_all_included_asset_groups(self, id, **kwargs): # noqa: E501
"""Site Included Asset Groups # noqa: E501
Removes all included asset groups from the specified static site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.remove_all_included_asset_groups(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.remove_all_included_asset_groups_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.remove_all_included_asset_groups_with_http_info(id, **kwargs) # noqa: E501
return data
def remove_all_included_asset_groups_with_http_info(self, id, **kwargs): # noqa: E501
"""Site Included Asset Groups # noqa: E501
Removes all included asset groups from the specified static site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.remove_all_included_asset_groups_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_all_included_asset_groups" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `remove_all_included_asset_groups`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/included_asset_groups', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def remove_asset_from_site(self, id, asset_id, **kwargs): # noqa: E501
"""Site Asset # noqa: E501
Removes an asset from a site. The asset will only be deleted if it belongs to no other sites. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.remove_asset_from_site(id, asset_id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int asset_id: The identifier of the asset. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.remove_asset_from_site_with_http_info(id, asset_id, **kwargs) # noqa: E501
else:
(data) = self.remove_asset_from_site_with_http_info(id, asset_id, **kwargs) # noqa: E501
return data
def remove_asset_from_site_with_http_info(self, id, asset_id, **kwargs): # noqa: E501
"""Site Asset # noqa: E501
Removes an asset from a site. The asset will only be deleted if it belongs to no other sites. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.remove_asset_from_site_with_http_info(id, asset_id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int asset_id: The identifier of the asset. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'asset_id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_asset_from_site" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `remove_asset_from_site`") # noqa: E501
# verify the required parameter 'asset_id' is set
if ('asset_id' not in params or
params['asset_id'] is None):
raise ValueError("Missing the required parameter `asset_id` when calling `remove_asset_from_site`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'asset_id' in params:
path_params['assetId'] = params['asset_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/assets/{assetId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def remove_excluded_asset_group(self, id, asset_group_id, **kwargs): # noqa: E501
"""Site Excluded Asset Group # noqa: E501
Removes the specified asset group from the excluded asset groups configured in the static site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.remove_excluded_asset_group(id, asset_group_id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int asset_group_id: The identifier of the asset group. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.remove_excluded_asset_group_with_http_info(id, asset_group_id, **kwargs) # noqa: E501
else:
(data) = self.remove_excluded_asset_group_with_http_info(id, asset_group_id, **kwargs) # noqa: E501
return data
def remove_excluded_asset_group_with_http_info(self, id, asset_group_id, **kwargs): # noqa: E501
"""Site Excluded Asset Group # noqa: E501
Removes the specified asset group from the excluded asset groups configured in the static site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.remove_excluded_asset_group_with_http_info(id, asset_group_id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int asset_group_id: The identifier of the asset group. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'asset_group_id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_excluded_asset_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `remove_excluded_asset_group`") # noqa: E501
# verify the required parameter 'asset_group_id' is set
if ('asset_group_id' not in params or
params['asset_group_id'] is None):
raise ValueError("Missing the required parameter `asset_group_id` when calling `remove_excluded_asset_group`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'asset_group_id' in params:
path_params['assetGroupId'] = params['asset_group_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/excluded_asset_groups/{assetGroupId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def remove_included_asset_group(self, id, asset_group_id, **kwargs): # noqa: E501
"""Site Included Asset Group # noqa: E501
Removes the specified asset group from the included asset groups configured in the static site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.remove_included_asset_group(id, asset_group_id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int asset_group_id: The identifier of the asset group. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.remove_included_asset_group_with_http_info(id, asset_group_id, **kwargs) # noqa: E501
else:
(data) = self.remove_included_asset_group_with_http_info(id, asset_group_id, **kwargs) # noqa: E501
return data
def remove_included_asset_group_with_http_info(self, id, asset_group_id, **kwargs): # noqa: E501
"""Site Included Asset Group # noqa: E501
Removes the specified asset group from the included asset groups configured in the static site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.remove_included_asset_group_with_http_info(id, asset_group_id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int asset_group_id: The identifier of the asset group. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'asset_group_id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_included_asset_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `remove_included_asset_group`") # noqa: E501
# verify the required parameter 'asset_group_id' is set
if ('asset_group_id' not in params or
params['asset_group_id'] is None):
raise ValueError("Missing the required parameter `asset_group_id` when calling `remove_included_asset_group`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'asset_group_id' in params:
path_params['assetGroupId'] = params['asset_group_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/included_asset_groups/{assetGroupId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def remove_site_assets(self, id, **kwargs): # noqa: E501
"""Site Assets # noqa: E501
Removes all assets from the specified site. Assets will be deleted entirely from the Security Console if either Asset Linking is disabled or if Asset Linking is enabled and the asset only existed in this site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.remove_site_assets(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.remove_site_assets_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.remove_site_assets_with_http_info(id, **kwargs) # noqa: E501
return data
def remove_site_assets_with_http_info(self, id, **kwargs): # noqa: E501
"""Site Assets # noqa: E501
Removes all assets from the specified site. Assets will be deleted entirely from the Security Console if either Asset Linking is disabled or if Asset Linking is enabled and the asset only existed in this site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.remove_site_assets_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_site_assets" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `remove_site_assets`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/assets', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def remove_site_tag(self, id, tag_id, **kwargs): # noqa: E501
"""Site Tag # noqa: E501
Removes the specified tag from the site's tags. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.remove_site_tag(id, tag_id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int tag_id: The identifier of the tag. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.remove_site_tag_with_http_info(id, tag_id, **kwargs) # noqa: E501
else:
(data) = self.remove_site_tag_with_http_info(id, tag_id, **kwargs) # noqa: E501
return data
def remove_site_tag_with_http_info(self, id, tag_id, **kwargs): # noqa: E501
"""Site Tag # noqa: E501
Removes the specified tag from the site's tags. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.remove_site_tag_with_http_info(id, tag_id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int tag_id: The identifier of the tag. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'tag_id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_site_tag" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `remove_site_tag`") # noqa: E501
# verify the required parameter 'tag_id' is set
if ('tag_id' not in params or
params['tag_id'] is None):
raise ValueError("Missing the required parameter `tag_id` when calling `remove_site_tag`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'tag_id' in params:
path_params['tagId'] = params['tag_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/tags/{tagId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def remove_site_user(self, id, user_id, **kwargs): # noqa: E501
"""Site User Access # noqa: E501
Removes the specified user from the site's access list. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.remove_site_user(id, user_id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int user_id: The identifier of the user. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.remove_site_user_with_http_info(id, user_id, **kwargs) # noqa: E501
else:
(data) = self.remove_site_user_with_http_info(id, user_id, **kwargs) # noqa: E501
return data
def remove_site_user_with_http_info(self, id, user_id, **kwargs): # noqa: E501
"""Site User Access # noqa: E501
Removes the specified user from the site's access list. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.remove_site_user_with_http_info(id, user_id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int user_id: The identifier of the user. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'user_id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_site_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `remove_site_user`") # noqa: E501
# verify the required parameter 'user_id' is set
if ('user_id' not in params or
params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `remove_site_user`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'user_id' in params:
path_params['userId'] = params['user_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/users/{userId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def set_site_credentials(self, id, **kwargs): # noqa: E501
"""Site Scan Credentials # noqa: E501
Updates multiple site credentials. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.set_site_credentials(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param list[SiteCredential] param1: A list of site credentials resources.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.set_site_credentials_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.set_site_credentials_with_http_info(id, **kwargs) # noqa: E501
return data
def set_site_credentials_with_http_info(self, id, **kwargs): # noqa: E501
"""Site Scan Credentials # noqa: E501
Updates multiple site credentials. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.set_site_credentials_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param list[SiteCredential] param1: A list of site credentials resources.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'param1'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_site_credentials" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `set_site_credentials`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'param1' in params:
body_params = params['param1']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/site_credentials', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def set_site_discovery_connection(self, id, **kwargs): # noqa: E501
"""Site Discovery Connection # noqa: E501
Updates the discovery connection assigned to the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.set_site_discovery_connection(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int param0: The identifier of the discovery connection.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.set_site_discovery_connection_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.set_site_discovery_connection_with_http_info(id, **kwargs) # noqa: E501
return data
def set_site_discovery_connection_with_http_info(self, id, **kwargs): # noqa: E501
"""Site Discovery Connection # noqa: E501
Updates the discovery connection assigned to the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.set_site_discovery_connection_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int param0: The identifier of the discovery connection.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'param0'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_site_discovery_connection" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `set_site_discovery_connection`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'param0' in params:
body_params = params['param0']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/discovery_connection', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def set_site_discovery_search_criteria(self, id, param1, **kwargs): # noqa: E501
"""Site Discovery Search Criteria # noqa: E501
Update the search criteria of the dynamic site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.set_site_discovery_search_criteria(id, param1, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param DiscoverySearchCriteria param1: param1 (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.set_site_discovery_search_criteria_with_http_info(id, param1, **kwargs) # noqa: E501
else:
(data) = self.set_site_discovery_search_criteria_with_http_info(id, param1, **kwargs) # noqa: E501
return data
def set_site_discovery_search_criteria_with_http_info(self, id, param1, **kwargs): # noqa: E501
"""Site Discovery Search Criteria # noqa: E501
Update the search criteria of the dynamic site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.set_site_discovery_search_criteria_with_http_info(id, param1, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param DiscoverySearchCriteria param1: param1 (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'param1'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_site_discovery_search_criteria" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `set_site_discovery_search_criteria`") # noqa: E501
# verify the required parameter 'param1' is set
if ('param1' not in params or
params['param1'] is None):
raise ValueError("Missing the required parameter `param1` when calling `set_site_discovery_search_criteria`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'param1' in params:
body_params = params['param1']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/discovery_search_criteria', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def set_site_scan_engine(self, id, **kwargs): # noqa: E501
"""Site Scan Engine # noqa: E501
Updates the assigned scan engine to the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.set_site_scan_engine(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int param0: The identifier of the scan engine.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.set_site_scan_engine_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.set_site_scan_engine_with_http_info(id, **kwargs) # noqa: E501
return data
def set_site_scan_engine_with_http_info(self, id, **kwargs): # noqa: E501
"""Site Scan Engine # noqa: E501
Updates the assigned scan engine to the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.set_site_scan_engine_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int param0: The identifier of the scan engine.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'param0'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_site_scan_engine" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `set_site_scan_engine`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'param0' in params:
body_params = params['param0']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/scan_engine', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def set_site_scan_schedules(self, id, **kwargs): # noqa: E501
"""Site Scan Schedules # noqa: E501
Updates all scan schedules for the specified site in a single request using the array of resources defined in the request body. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.set_site_scan_schedules(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param list[ScanSchedule] param0: Array of resources for updating all scan schedules defined in the site. Scan schedules defined in the site that are omitted from this request will be deleted from the site.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.set_site_scan_schedules_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.set_site_scan_schedules_with_http_info(id, **kwargs) # noqa: E501
return data
def set_site_scan_schedules_with_http_info(self, id, **kwargs): # noqa: E501
"""Site Scan Schedules # noqa: E501
Updates all scan schedules for the specified site in a single request using the array of resources defined in the request body. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.set_site_scan_schedules_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param list[ScanSchedule] param0: Array of resources for updating all scan schedules defined in the site. Scan schedules defined in the site that are omitted from this request will be deleted from the site.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'param0'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_site_scan_schedules" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `set_site_scan_schedules`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'param0' in params:
body_params = params['param0']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/scan_schedules', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def set_site_scan_template(self, id, **kwargs): # noqa: E501
"""Site Scan Template # noqa: E501
Updates the assigned scan template to the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.set_site_scan_template(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param str param0: The identifier of the scan template.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.set_site_scan_template_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.set_site_scan_template_with_http_info(id, **kwargs) # noqa: E501
return data
def set_site_scan_template_with_http_info(self, id, **kwargs): # noqa: E501
"""Site Scan Template # noqa: E501
Updates the assigned scan template to the site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.set_site_scan_template_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param str param0: The identifier of the scan template.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'param0'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_site_scan_template" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `set_site_scan_template`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'param0' in params:
body_params = params['param0']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/scan_template', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def set_site_smtp_alerts(self, id, **kwargs): # noqa: E501
"""Site SMTP Alerts # noqa: E501
Updates all SMTP alerts for the specified site in a single request using the array of resources defined in the request body. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.set_site_smtp_alerts(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param list[SmtpAlert] param0: Array of resources for updating all SMTP alerts defined in the site. Alerts defined in the site that are omitted from this request will be deleted from the site.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.set_site_smtp_alerts_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.set_site_smtp_alerts_with_http_info(id, **kwargs) # noqa: E501
return data
def set_site_smtp_alerts_with_http_info(self, id, **kwargs): # noqa: E501
"""Site SMTP Alerts # noqa: E501
Updates all SMTP alerts for the specified site in a single request using the array of resources defined in the request body. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.set_site_smtp_alerts_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param list[SmtpAlert] param0: Array of resources for updating all SMTP alerts defined in the site. Alerts defined in the site that are omitted from this request will be deleted from the site.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'param0'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_site_smtp_alerts" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `set_site_smtp_alerts`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'param0' in params:
body_params = params['param0']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/alerts/smtp', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def set_site_snmp_alerts(self, id, **kwargs): # noqa: E501
"""Site SNMP Alerts # noqa: E501
Updates all SNMP alerts for the specified site in a single request using the array of resources defined in the request body. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.set_site_snmp_alerts(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param list[SnmpAlert] param0: Array of resources for updating all SNMP alerts defined in the site. Alerts defined in the site that are omitted from this request will be deleted from the site.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.set_site_snmp_alerts_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.set_site_snmp_alerts_with_http_info(id, **kwargs) # noqa: E501
return data
def set_site_snmp_alerts_with_http_info(self, id, **kwargs): # noqa: E501
"""Site SNMP Alerts # noqa: E501
Updates all SNMP alerts for the specified site in a single request using the array of resources defined in the request body. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.set_site_snmp_alerts_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param list[SnmpAlert] param0: Array of resources for updating all SNMP alerts defined in the site. Alerts defined in the site that are omitted from this request will be deleted from the site.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'param0'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_site_snmp_alerts" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `set_site_snmp_alerts`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'param0' in params:
body_params = params['param0']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/alerts/snmp', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def set_site_syslog_alerts(self, id, **kwargs): # noqa: E501
"""Site Syslog Alerts # noqa: E501
Updates all Syslog alerts for the specified site in a single request using the array of resources defined in the request body. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.set_site_syslog_alerts(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param list[SyslogAlert] param0: Array of resources for updating all Syslog alerts defined in the site. Alerts defined in the site that are omitted from this request will be deleted from the site.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.set_site_syslog_alerts_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.set_site_syslog_alerts_with_http_info(id, **kwargs) # noqa: E501
return data
def set_site_syslog_alerts_with_http_info(self, id, **kwargs): # noqa: E501
"""Site Syslog Alerts # noqa: E501
Updates all Syslog alerts for the specified site in a single request using the array of resources defined in the request body. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.set_site_syslog_alerts_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param list[SyslogAlert] param0: Array of resources for updating all Syslog alerts defined in the site. Alerts defined in the site that are omitted from this request will be deleted from the site.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'param0'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_site_syslog_alerts" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `set_site_syslog_alerts`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'param0' in params:
body_params = params['param0']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/alerts/syslog', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def set_site_tags(self, id, **kwargs): # noqa: E501
"""Site Tags # noqa: E501
Updates the site's list of tags. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.set_site_tags(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param list[int] param1: A list of tag identifiers to replace the site's tags.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.set_site_tags_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.set_site_tags_with_http_info(id, **kwargs) # noqa: E501
return data
def set_site_tags_with_http_info(self, id, **kwargs): # noqa: E501
"""Site Tags # noqa: E501
Updates the site's list of tags. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.set_site_tags_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param list[int] param1: A list of tag identifiers to replace the site's tags.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'param1'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_site_tags" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `set_site_tags`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'param1' in params:
body_params = params['param1']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/tags', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def set_site_users(self, id, **kwargs): # noqa: E501
"""Site Users Access # noqa: E501
Updates the site's access list. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.set_site_users(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param list[int] param0: A list of user identifiers to replace the site's access list.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.set_site_users_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.set_site_users_with_http_info(id, **kwargs) # noqa: E501
return data
def set_site_users_with_http_info(self, id, **kwargs): # noqa: E501
"""Site Users Access # noqa: E501
Updates the site's access list. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.set_site_users_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param list[int] param0: A list of user identifiers to replace the site's access list.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'param0'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_site_users" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `set_site_users`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'param0' in params:
body_params = params['param0']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/users', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_excluded_asset_groups(self, id, **kwargs): # noqa: E501
"""Site Excluded Asset Groups # noqa: E501
Updates the excluded asset groups in a static site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.update_excluded_asset_groups(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param list[int] param0: Array of asset group identifiers.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.update_excluded_asset_groups_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.update_excluded_asset_groups_with_http_info(id, **kwargs) # noqa: E501
return data
def update_excluded_asset_groups_with_http_info(self, id, **kwargs): # noqa: E501
"""Site Excluded Asset Groups # noqa: E501
Updates the excluded asset groups in a static site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.update_excluded_asset_groups_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param list[int] param0: Array of asset group identifiers.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'param0'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_excluded_asset_groups" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_excluded_asset_groups`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'param0' in params:
body_params = params['param0']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/excluded_asset_groups', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_excluded_targets(self, id, **kwargs): # noqa: E501
"""Site Excluded Targets # noqa: E501
Updates the excluded targets in a static site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.update_excluded_targets(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param list[str] param0: List of addresses to be the site's new excluded scan targets. Each address is a string that can represent either a hostname, ipv4 address, ipv4 address range, ipv6 address, or CIDR notation.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.update_excluded_targets_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.update_excluded_targets_with_http_info(id, **kwargs) # noqa: E501
return data
def update_excluded_targets_with_http_info(self, id, **kwargs): # noqa: E501
"""Site Excluded Targets # noqa: E501
Updates the excluded targets in a static site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.update_excluded_targets_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param list[str] param0: List of addresses to be the site's new excluded scan targets. Each address is a string that can represent either a hostname, ipv4 address, ipv4 address range, ipv6 address, or CIDR notation.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'param0'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_excluded_targets" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_excluded_targets`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'param0' in params:
body_params = params['param0']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/excluded_targets', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_included_asset_groups(self, id, **kwargs): # noqa: E501
"""Site Included Asset Groups # noqa: E501
Updates the included asset groups in a static site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.update_included_asset_groups(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param list[int] param0: Array of asset group identifiers.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.update_included_asset_groups_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.update_included_asset_groups_with_http_info(id, **kwargs) # noqa: E501
return data
def update_included_asset_groups_with_http_info(self, id, **kwargs): # noqa: E501
"""Site Included Asset Groups # noqa: E501
Updates the included asset groups in a static site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.update_included_asset_groups_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param list[int] param0: Array of asset group identifiers.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'param0'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_included_asset_groups" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_included_asset_groups`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'param0' in params:
body_params = params['param0']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/included_asset_groups', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_included_targets(self, id, **kwargs): # noqa: E501
"""Site Included Targets # noqa: E501
Updates the included targets in a static site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.update_included_targets(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param list[str] param0: List of addresses to be the site's new included scan targets. Each address is a string that can represent either a hostname, ipv4 address, ipv4 address range, ipv6 address, or CIDR notation.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.update_included_targets_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.update_included_targets_with_http_info(id, **kwargs) # noqa: E501
return data
def update_included_targets_with_http_info(self, id, **kwargs): # noqa: E501
"""Site Included Targets # noqa: E501
Updates the included targets in a static site. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.update_included_targets_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param list[str] param0: List of addresses to be the site's new included scan targets. Each address is a string that can represent either a hostname, ipv4 address, ipv4 address range, ipv6 address, or CIDR notation.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'param0'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_included_targets" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_included_targets`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'param0' in params:
body_params = params['param0']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/included_targets', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_site(self, id, **kwargs): # noqa: E501
"""Site # noqa: E501
Updates the configuration of the site with the specified identifier. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.update_site(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param SiteUpdateResource param0: Resource for updating a site configuration.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.update_site_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.update_site_with_http_info(id, **kwargs) # noqa: E501
return data
def update_site_with_http_info(self, id, **kwargs): # noqa: E501
"""Site # noqa: E501
Updates the configuration of the site with the specified identifier. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.update_site_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param SiteUpdateResource param0: Resource for updating a site configuration.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'param0'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_site" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_site`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'param0' in params:
body_params = params['param0']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_site_credential(self, id, credential_id, **kwargs): # noqa: E501
"""Site Scan Credential # noqa: E501
Updates the specified site credential. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.update_site_credential(id, credential_id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int credential_id: The identifier of the site credential. (required)
:param SiteCredential param2: The specification of the site credential to update.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.update_site_credential_with_http_info(id, credential_id, **kwargs) # noqa: E501
else:
(data) = self.update_site_credential_with_http_info(id, credential_id, **kwargs) # noqa: E501
return data
def update_site_credential_with_http_info(self, id, credential_id, **kwargs): # noqa: E501
"""Site Scan Credential # noqa: E501
Updates the specified site credential. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.update_site_credential_with_http_info(id, credential_id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int credential_id: The identifier of the site credential. (required)
:param SiteCredential param2: The specification of the site credential to update.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'credential_id', 'param2'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_site_credential" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_site_credential`") # noqa: E501
# verify the required parameter 'credential_id' is set
if ('credential_id' not in params or
params['credential_id'] is None):
raise ValueError("Missing the required parameter `credential_id` when calling `update_site_credential`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'credential_id' in params:
path_params['credentialId'] = params['credential_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'param2' in params:
body_params = params['param2']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/site_credentials/{credentialId}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_site_organization(self, id, **kwargs): # noqa: E501
"""Site Organization Information # noqa: E501
Updates the site organization information. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.update_site_organization(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param SiteOrganization param0: Resource for updating the specified site's organization information.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.update_site_organization_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.update_site_organization_with_http_info(id, **kwargs) # noqa: E501
return data
def update_site_organization_with_http_info(self, id, **kwargs): # noqa: E501
"""Site Organization Information # noqa: E501
Updates the site organization information. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.update_site_organization_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param SiteOrganization param0: Resource for updating the specified site's organization information.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'param0'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_site_organization" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_site_organization`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'param0' in params:
body_params = params['param0']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/organization', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_site_scan_schedule(self, id, schedule_id, **kwargs): # noqa: E501
"""Site Scan Schedule # noqa: E501
Updates the specified scan schedule. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.update_site_scan_schedule(id, schedule_id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int schedule_id: The identifier of the scan schedule. (required)
:param ScanSchedule param0: Resource for updating the specified scan schedule.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.update_site_scan_schedule_with_http_info(id, schedule_id, **kwargs) # noqa: E501
else:
(data) = self.update_site_scan_schedule_with_http_info(id, schedule_id, **kwargs) # noqa: E501
return data
def update_site_scan_schedule_with_http_info(self, id, schedule_id, **kwargs): # noqa: E501
"""Site Scan Schedule # noqa: E501
Updates the specified scan schedule. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.update_site_scan_schedule_with_http_info(id, schedule_id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int schedule_id: The identifier of the scan schedule. (required)
:param ScanSchedule param0: Resource for updating the specified scan schedule.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'schedule_id', 'param0'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_site_scan_schedule" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_site_scan_schedule`") # noqa: E501
# verify the required parameter 'schedule_id' is set
if ('schedule_id' not in params or
params['schedule_id'] is None):
raise ValueError("Missing the required parameter `schedule_id` when calling `update_site_scan_schedule`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'schedule_id' in params:
path_params['scheduleId'] = params['schedule_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'param0' in params:
body_params = params['param0']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/scan_schedules/{scheduleId}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_site_smtp_alert(self, id, alert_id, **kwargs): # noqa: E501
"""Site SMTP Alert # noqa: E501
Updates the specified SMTP alert. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.update_site_smtp_alert(id, alert_id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int alert_id: The identifier of the alert. (required)
:param SmtpAlert param0: Resource for updating the specified SMTP alert.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.update_site_smtp_alert_with_http_info(id, alert_id, **kwargs) # noqa: E501
else:
(data) = self.update_site_smtp_alert_with_http_info(id, alert_id, **kwargs) # noqa: E501
return data
def update_site_smtp_alert_with_http_info(self, id, alert_id, **kwargs): # noqa: E501
"""Site SMTP Alert # noqa: E501
Updates the specified SMTP alert. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.update_site_smtp_alert_with_http_info(id, alert_id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int alert_id: The identifier of the alert. (required)
:param SmtpAlert param0: Resource for updating the specified SMTP alert.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'alert_id', 'param0'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_site_smtp_alert" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_site_smtp_alert`") # noqa: E501
# verify the required parameter 'alert_id' is set
if ('alert_id' not in params or
params['alert_id'] is None):
raise ValueError("Missing the required parameter `alert_id` when calling `update_site_smtp_alert`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'alert_id' in params:
path_params['alertId'] = params['alert_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'param0' in params:
body_params = params['param0']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/alerts/smtp/{alertId}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_site_snmp_alert(self, id, alert_id, **kwargs): # noqa: E501
"""Site SNMP Alert # noqa: E501
Updates the specified SNMP alert. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.update_site_snmp_alert(id, alert_id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int alert_id: The identifier of the alert. (required)
:param SnmpAlert param0: Resource for updating the specified SNMP alert.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.update_site_snmp_alert_with_http_info(id, alert_id, **kwargs) # noqa: E501
else:
(data) = self.update_site_snmp_alert_with_http_info(id, alert_id, **kwargs) # noqa: E501
return data
def update_site_snmp_alert_with_http_info(self, id, alert_id, **kwargs): # noqa: E501
"""Site SNMP Alert # noqa: E501
Updates the specified SNMP alert. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.update_site_snmp_alert_with_http_info(id, alert_id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int alert_id: The identifier of the alert. (required)
:param SnmpAlert param0: Resource for updating the specified SNMP alert.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'alert_id', 'param0'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_site_snmp_alert" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_site_snmp_alert`") # noqa: E501
# verify the required parameter 'alert_id' is set
if ('alert_id' not in params or
params['alert_id'] is None):
raise ValueError("Missing the required parameter `alert_id` when calling `update_site_snmp_alert`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'alert_id' in params:
path_params['alertId'] = params['alert_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'param0' in params:
body_params = params['param0']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/alerts/snmp/{alertId}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_site_syslog_alert(self, id, alert_id, **kwargs): # noqa: E501
"""Site Syslog Alert # noqa: E501
Updates the specified Syslog alert. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.update_site_syslog_alert(id, alert_id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int alert_id: The identifier of the alert. (required)
:param SyslogAlert param0: Resource for updating the specified Syslog alert.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.update_site_syslog_alert_with_http_info(id, alert_id, **kwargs) # noqa: E501
else:
(data) = self.update_site_syslog_alert_with_http_info(id, alert_id, **kwargs) # noqa: E501
return data
def update_site_syslog_alert_with_http_info(self, id, alert_id, **kwargs): # noqa: E501
"""Site Syslog Alert # noqa: E501
Updates the specified Syslog alert. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.update_site_syslog_alert_with_http_info(id, alert_id, async=True)
>>> result = thread.get()
:param async bool
:param int id: The identifier of the site. (required)
:param int alert_id: The identifier of the alert. (required)
:param SyslogAlert param0: Resource for updating the specified Syslog alert.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'alert_id', 'param0'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_site_syslog_alert" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_site_syslog_alert`") # noqa: E501
# verify the required parameter 'alert_id' is set
if ('alert_id' not in params or
params['alert_id'] is None):
raise ValueError("Missing the required parameter `alert_id` when calling `update_site_syslog_alert`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'alert_id' in params:
path_params['alertId'] = params['alert_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'param0' in params:
body_params = params['param0']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/sites/{id}/alerts/syslog/{alertId}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
[
"zachary_youtz@rapid7.com"
] |
zachary_youtz@rapid7.com
|
9aed11297118cb5fdc96240115d2d3b3c4dcf43b
|
91606986caf93b7171a74f2a8e7d4cfdc9fd7503
|
/FlaskProject/migrations/versions/9eff55524c0b_.py
|
738b04d2ebc5cc2687db74b7c4ac58456eef86b5
|
[] |
no_license
|
gabyyan/flask_movieproject
|
08d6a56124213bf3bbc8e4439384ba551e5fd8f0
|
cffcfd8923a39863e52461dd00e9c13020c25852
|
refs/heads/master
| 2020-03-27T21:28:46.438393
| 2018-09-11T15:32:19
| 2018-09-11T15:32:19
| 147,147,714
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,311
|
py
|
"""empty message
Revision ID: 9eff55524c0b
Revises: 56b4b4540853
Create Date: 2018-08-31 17:12:27.969442
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = '9eff55524c0b'
down_revision = '56b4b4540853'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('user', 'face',
existing_type=mysql.VARCHAR(length=30),
nullable=True)
op.alter_column('user', 'loginTime',
existing_type=mysql.VARCHAR(length=30),
nullable=True)
op.alter_column('user', 'uuid',
existing_type=mysql.VARCHAR(length=32),
nullable=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('user', 'uuid',
existing_type=mysql.VARCHAR(length=32),
nullable=False)
op.alter_column('user', 'loginTime',
existing_type=mysql.VARCHAR(length=30),
nullable=False)
op.alter_column('user', 'face',
existing_type=mysql.VARCHAR(length=30),
nullable=False)
# ### end Alembic commands ###
|
[
"yan287216731@163.com"
] |
yan287216731@163.com
|
d0bec301ce5859d2fe6e8497e3b6caa85f38945b
|
9acf4abfb0d0285e4e676bddf505e3f51067a62a
|
/node_modules/webpack-dev-server/node_modules/fsevents/build/config.gypi
|
43b1362227511bf40b69bef08ed30fff96086865
|
[
"MIT"
] |
permissive
|
Robertgaraban/projeto-final.angola.github.io
|
a166b35fcd810216a4c5f4e76da399f05e020d3c
|
b0d1f80b17a404ff2ebdb57f95693f7ef538cd5f
|
refs/heads/master
| 2022-12-14T10:11:40.957557
| 2020-08-30T01:05:39
| 2020-08-30T01:05:39
| 290,093,501
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,682
|
gypi
|
# Do not edit. File was generated by node-gyp's "configure" step
{
"target_defaults": {
"cflags": [],
"default_configuration": "Release",
"defines": [],
"include_dirs": [],
"libraries": []
},
"variables": {
"asan": 0,
"build_v8_with_gn": "false",
"coverage": "false",
"dcheck_always_on": 0,
"debug_nghttp2": "false",
"debug_node": "false",
"enable_lto": "false",
"enable_pgo_generate": "false",
"enable_pgo_use": "false",
"error_on_warn": "false",
"force_dynamic_crt": 0,
"host_arch": "x64",
"icu_data_in": "../../deps/icu-tmp/icudt67l.dat",
"icu_endianness": "l",
"icu_gyp_path": "tools/icu/icu-generic.gyp",
"icu_path": "deps/icu-small",
"icu_small": "false",
"icu_ver_major": "67",
"is_debug": 0,
"llvm_version": "11.0",
"napi_build_version": "6",
"node_byteorder": "little",
"node_debug_lib": "false",
"node_enable_d8": "false",
"node_install_npm": "true",
"node_module_version": 83,
"node_no_browser_globals": "false",
"node_prefix": "/usr/local",
"node_release_urlbase": "https://nodejs.org/download/release/",
"node_shared": "false",
"node_shared_brotli": "false",
"node_shared_cares": "false",
"node_shared_http_parser": "false",
"node_shared_libuv": "false",
"node_shared_nghttp2": "false",
"node_shared_openssl": "false",
"node_shared_zlib": "false",
"node_tag": "",
"node_target_type": "executable",
"node_use_bundled_v8": "true",
"node_use_dtrace": "true",
"node_use_etw": "false",
"node_use_node_code_cache": "true",
"node_use_node_snapshot": "true",
"node_use_openssl": "true",
"node_use_v8_platform": "true",
"node_with_ltcg": "false",
"node_without_node_options": "false",
"openssl_fips": "",
"openssl_is_fips": "false",
"shlib_suffix": "83.dylib",
"target_arch": "x64",
"v8_enable_31bit_smis_on_64bit_arch": 0,
"v8_enable_gdbjit": 0,
"v8_enable_i18n_support": 1,
"v8_enable_inspector": 1,
"v8_enable_lite_mode": 0,
"v8_enable_pointer_compression": 0,
"v8_no_strict_aliasing": 1,
"v8_optimized_debug": 1,
"v8_promise_internal_field_count": 1,
"v8_random_seed": 0,
"v8_trace_maps": 0,
"v8_use_siphash": 1,
"want_separate_host_toolset": 0,
"xcode_version": "11.0",
"nodedir": "/Users/robertgaraban/Library/Caches/node-gyp/14.6.0",
"standalone_static_library": 1,
"dry_run": "",
"legacy_bundling": "",
"save_dev": "",
"browser": "",
"commit_hooks": "true",
"only": "",
"viewer": "man",
"also": "",
"rollback": "true",
"sign_git_commit": "",
"audit": "true",
"usage": "",
"globalignorefile": "/usr/local/etc/npmignore",
"init_author_url": "",
"maxsockets": "50",
"shell": "/bin/bash",
"metrics_registry": "https://registry.npmjs.org/",
"parseable": "",
"shrinkwrap": "true",
"init_license": "ISC",
"timing": "",
"if_present": "",
"cache_max": "Infinity",
"init_author_email": "",
"sign_git_tag": "",
"cert": "",
"git_tag_version": "true",
"local_address": "",
"long": "",
"preid": "",
"fetch_retries": "2",
"registry": "https://registry.npmjs.org/",
"key": "",
"message": "%s",
"versions": "",
"globalconfig": "/usr/local/etc/npmrc",
"always_auth": "",
"logs_max": "10",
"prefer_online": "",
"cache_lock_retries": "10",
"global_style": "",
"update_notifier": "true",
"audit_level": "low",
"heading": "npm",
"fetch_retry_mintimeout": "10000",
"offline": "",
"read_only": "",
"searchlimit": "20",
"access": "",
"json": "",
"allow_same_version": "",
"description": "true",
"engine_strict": "",
"https_proxy": "",
"init_module": "/Users/robertgaraban/.npm-init.js",
"userconfig": "/Users/robertgaraban/.npmrc",
"cidr": "",
"node_version": "14.6.0",
"user": "",
"auth_type": "legacy",
"editor": "vi",
"ignore_prepublish": "",
"save": "true",
"script_shell": "",
"tag": "latest",
"before": "",
"global": "",
"progress": "true",
"ham_it_up": "",
"optional": "true",
"searchstaleness": "900",
"bin_links": "true",
"force": "",
"save_prod": "",
"searchopts": "",
"depth": "Infinity",
"node_gyp": "/usr/local/lib/node_modules/npm/node_modules/node-gyp/bin/node-gyp.js",
"rebuild_bundle": "true",
"sso_poll_frequency": "500",
"unicode": "true",
"fetch_retry_maxtimeout": "60000",
"ca": "",
"save_prefix": "^",
"scripts_prepend_node_path": "warn-only",
"sso_type": "oauth",
"strict_ssl": "true",
"tag_version_prefix": "v",
"dev": "",
"fetch_retry_factor": "10",
"group": "20",
"save_exact": "",
"cache_lock_stale": "60000",
"prefer_offline": "",
"version": "",
"cache_min": "10",
"otp": "",
"cache": "/Users/robertgaraban/.npm",
"searchexclude": "",
"color": "true",
"package_lock": "true",
"fund": "true",
"package_lock_only": "",
"save_optional": "",
"user_agent": "npm/6.14.6 node/v14.6.0 darwin x64",
"ignore_scripts": "",
"cache_lock_wait": "10000",
"production": "",
"save_bundle": "",
"send_metrics": "",
"init_version": "1.0.0",
"node_options": "",
"umask": "0022",
"scope": "",
"git": "git",
"init_author_name": "",
"onload_script": "",
"tmp": "/var/folders/56/1dn4zbm94fqdtz0069hfvw5r0000gn/T",
"unsafe_perm": "true",
"format_package_lock": "true",
"link": "",
"prefix": "/usr/local"
}
}
|
[
"robertgaraban@gmail.com"
] |
robertgaraban@gmail.com
|
072bedd95495fe90cc7bd2d030588fc77c7a516a
|
18e79afa0f77071dec56932e8089a713f764493b
|
/dance-ml-a/train.py
|
60100659ae89a85e77deea2c3b5240c0f8f7b8a9
|
[
"Apache-2.0"
] |
permissive
|
jaysonzhao/DancingRobot
|
9eebf1ec3bbe11298fe852a0b7cde7671691c0bf
|
9c51a8784bc77a626065a443283cbe8aa8444f53
|
refs/heads/master
| 2020-09-05T22:16:34.682030
| 2019-12-19T01:48:16
| 2019-12-19T01:48:16
| 220,229,436
| 0
| 0
|
Apache-2.0
| 2019-11-07T12:15:00
| 2019-11-07T12:15:00
| null |
UTF-8
|
Python
| false
| false
| 6,090
|
py
|
# The data set used in this example is from http://archive.ics.uci.edu/ml/datasets/Wine+Quality
# P. Cortez, A. Cerdeira, F. Almeida, T. Matos and J. Reis.
# Modeling wine preferences by data mining from physicochemical properties. In Decision Support Systems, Elsevier, 47(4):547-553, 2009.
import os
import warnings
import sys
import pandas as pd
import numpy as np
from sklearn.metrics import mean_squared_error, mean_absolute_error, r2_score
from sklearn.model_selection import train_test_split
from sklearn.linear_model import ElasticNet
from io import StringIO
import mlflow
import mlflow.sklearn
import tornado.ioloop
import tornado.web
from tornado import websocket, web, ioloop
import json, os
class IndexHandler(web.RequestHandler):
'''Handle requests on / '''
def get(self):
self.render("index.html")
class LoadModelHandler(web.RequestHandler):
def set_default_headers(self):
print ("setting headers!!!")
self.set_header("Access-Control-Allow-Origin", "*")
self.set_header("Access-Control-Allow-Headers", "x-requested-with")
self.set_header('Access-Control-Allow-Methods', 'POST, GET, OPTIONS')
def post(self, *args):
global lr
global loadedModelName
experimentName = self.get_query_argument("experimentName")
print("Loading model :" + experimentName)
lr = mlflow.sklearn.load_model(experimentName)
loadedModelName = experimentName
self.write("OK");
self.finish()
class SaveModelHandler(web.RequestHandler):
def set_default_headers(self):
print ("setting headers!!!")
self.set_header("Access-Control-Allow-Origin", "*")
self.set_header("Access-Control-Allow-Headers", "x-requested-with")
self.set_header('Access-Control-Allow-Methods', 'POST, GET, OPTIONS')
def post(self, *args):
global lr
global loadedModelName
mlflow.sklearn.save_model(lr, loadedModelName, serialization_format=mlflow.sklearn.SERIALIZATION_FORMAT_CLOUDPICKLE)
self.write("OK");
self.finish()
class ApiPredictHandler(web.RequestHandler):
def set_default_headers(self):
print ("setting headers!!!")
self.set_header("Access-Control-Allow-Origin", "*")
self.set_header("Access-Control-Allow-Headers", "x-requested-with")
self.set_header('Access-Control-Allow-Methods', 'POST, GET, OPTIONS')
def post(self, *args):
'''data = json.loads(self.request.body)'''
data = self.request.body.decode("utf-8")
experimentName = self.get_query_argument("experimentName")
prediction = predict(data,experimentName)
print(prediction)
self.write(str(prediction));
self.finish()
class ApiTrainHandler(web.RequestHandler):
def set_default_headers(self):
print ("setting headers!!!")
self.set_header("Access-Control-Allow-Origin", "*")
self.set_header("Access-Control-Allow-Headers", "x-requested-with")
self.set_header('Access-Control-Allow-Methods', 'POST, GET, OPTIONS')
def post(self, *args):
'''data = json.loads(self.request.body)'''
# Read the wine-quality csv file (make sure you're running this from the root of MLflow!)
'''uploaded_csv_file = self.request.files['file'][0]'''
'''data = uploaded_csv_file.read_all()'''
data = pd.read_csv(StringIO(self.request.body.decode("utf-8")))
print(data)
'''wine_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "audio-dance.csv")'''
'''data = pd.read_csv(wine_path)'''
experimentName = self.get_query_argument("experimentName")
print("Experiment Name : " + experimentName)
train_data(data, experimentName);
self.finish()
def make_app():
return tornado.web.Application([
(r"/", IndexHandler),
(r'/predict', ApiPredictHandler),
(r'/train', ApiTrainHandler),
(r'/savemodel', SaveModelHandler),
(r'/loadmodel', LoadModelHandler)
])
def predict(values, experimentName):
global lr
global loadedModelName
test_values = pd.read_csv(StringIO(values), header=None)
prediction = lr.predict(test_values.drop(test_values.columns[11], axis=1))
return prediction
def eval_metrics(actual, pred):
rmse = np.sqrt(mean_squared_error(actual, pred))
mae = mean_absolute_error(actual, pred)
r2 = r2_score(actual, pred)
return rmse, mae, r2
def train_data(data, experimentName):
warnings.filterwarnings("ignore")
np.random.seed(40)
# Split the data into training and test sets. (0.75, 0.25) split.
train, test = train_test_split(data)
# The predicted column is "quality" which is a scalar from [3, 9]
train_x = train.drop(["quality"], axis=1)
test_x = test.drop(["quality"], axis=1)
train_y = train[["quality"]]
test_y = test[["quality"]]
alpha = float(sys.argv[1]) if len(sys.argv) > 1 else 1.0
l1_ratio = float(sys.argv[2]) if len(sys.argv) > 2 else 1.0
with mlflow.start_run():
global lr
global loadedModelName
lr = ElasticNet(alpha=alpha, l1_ratio=l1_ratio, random_state=42)
lr.fit(train_x, train_y)
predicted_qualities = lr.predict(test_x)
(rmse, mae, r2) = eval_metrics(test_y, predicted_qualities)
print("Elasticnet model (alpha=%f, l1_ratio=%f):" % (alpha, l1_ratio))
print(" RMSE: %s" % rmse)
print(" MAE: %s" % mae)
print(" R2: %s" % r2)
mlflow.log_param("alpha", alpha)
mlflow.log_param("l1_ratio", l1_ratio)
mlflow.log_metric("rmse", rmse)
mlflow.log_metric("r2", r2)
mlflow.log_metric("mae", mae)
mlflow.sklearn.log_model(lr, experimentName)
loadedModelName = experimentName
if __name__ == "__main__":
global lr
global loadedModelName
loadedModelName = ''
app = make_app()
app.listen(8080)
for item, value in os.environ.items():
print('{}: {}'.format(item, value))
tornado.ioloop.IOLoop.current().start()
|
[
"jaysonzhao@vip.qq.com"
] |
jaysonzhao@vip.qq.com
|
b7bc1c826723518a36a67f8b23494a2c2c8c6c79
|
92e690d213886c8c2ca99882da3ff0cf23fbafc9
|
/docker-demo-with-django/app/manage.py
|
be81d4d28212eddf87ce052e246b825215c97929
|
[] |
no_license
|
NaokiMItamura/grcdjango
|
1a394cd7bdfbe2d2a0d8ff8856bd2773931d147d
|
3d5fbc4e4278fcd880e7bf06e078dd975c70f844
|
refs/heads/main
| 2023-08-29T07:42:51.318910
| 2021-10-22T06:14:44
| 2021-10-22T06:14:44
| 418,013,956
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 652
|
py
|
#!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'django_deme.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
|
[
"snbyqsnbyq@gmail.com"
] |
snbyqsnbyq@gmail.com
|
0d01ffe64d29fd08c3017e2b084f1c16e320fd52
|
626c1cb2542fca1fb2ddff4b1eb440c65e301248
|
/main.py
|
4effa4947309602a6ad9041c286d07057e3684a9
|
[] |
no_license
|
frankfu/Simplex
|
060fcd390f14d70d1aa34796044c34782600afc5
|
4c64a4e793f7d7972cb7cd3bfd7bf8514a5056f2
|
refs/heads/master
| 2021-01-19T15:03:56.436188
| 2013-05-09T19:29:29
| 2013-05-09T19:29:29
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,638
|
py
|
# -*- coding: utf-8 -*-
import sys
import os
import re
import markup
styles = '../style.css'
with open(sys.argv[1], 'r') as f:
lines = f.readlines()
try:
os.makedirs('html')
except OSError, exc:
pass
with open('html/index.html', 'w') as f:
chapters = ['<a href="chapter%s.html">%s</a>' % re.match(r'^\\(\d+)\s*(.*)', i).groups() for i in lines if re.match(r'^\\\d+', i)]
title = lines[0].strip()
page = markup.page()
page.init(css=styles, title=title, charset='utf-8', lang='zh-CN')
page.h1(title)
page.div(id_='box')
page.div('目录', class_='header')
page.ul()
page.li(chapters)
page.ul.close()
page.div('This page is made by Simplex 1.0', id_='footer')
page.div.close()
f.write(str(page))
for i in range(1, len(chapters) + 1):
page = markup.page()
index = []
for j in range(0, len(lines)):
if re.match(r'^\\\d+', lines[j]):
index.append(j)
t = index[i-1]
start = t + 1
if i < len(index):
end = index[i]
else:
end = len(lines)
title = re.match(r'^\\\d+\s*(.*)', lines[t]).group(1)
paragraphs = lines[start:end]
links = []
if i == 1:
x = i + 1
links.append('<a href="index.html">目录</a>')
links.append('<a href="chapter%d.html">下一章</a> ' % x)
elif i == len(index):
x = i - 1
links.append('<a href="chapter%d.html">上一章</a> ' % x)
links.append('<a href="index.html">目录</a>')
else:
x = i - 1
links.append('<a href="chapter%d.html">上一章</a> ' % x)
links.append('<a href="index.html">目录</a> ')
x = i + 1
links.append('<a href="chapter%d.html">下一章</a>' % x)
with open('html/chapter%d.html' % i, 'w') as f:
page.init(css=styles, title=title, charset='utf-8', lang='zh-CN')
page.h1(lines[0].strip())
page.div(id_='navigationColumn')
page.div('导航', class_='header')
page.ul()
page.li(links)
page.ul.close()
page.div.close()
page.div(id_='box')
page.div(class_='article')
page.div(title, class_="title")
page.p(paragraphs)
page.div(class_='right')
page.span(links, class_='button')
page.div.close()
page.div.close()
page.div('This page is made by Simplex 1.0', id_='footer')
page.div.close()
f.write(str(page))
|
[
"frankz993@gmail.com"
] |
frankz993@gmail.com
|
7078e3b23d5aa23082a6aa4ea401ac47e977fd12
|
b03fbb77af76287bf43272759f1af29ef8dc6982
|
/src/functional/funcs.py
|
6431934aa0b3e05d6a274eac7291889bb189ff5c
|
[
"MIT"
] |
permissive
|
felipefsfs/py-funcs
|
2a970ef4aba30bca7c5d74249090df606c5cfab6
|
968103978cddb23a7f1c657a886a3f6663613e5b
|
refs/heads/master
| 2023-03-23T10:10:42.952233
| 2021-03-21T02:07:38
| 2021-03-21T02:07:38
| 349,872,120
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,117
|
py
|
from functools import reduce
def compose(*funcs):
def _compose_call(arg):
return reduce(lambda res, f: f(res), reversed(funcs), arg)
return _compose_call
def pipe(*funcs):
def _pipe_call(arg):
return reduce(lambda res, f: f(res), funcs, arg)
return _pipe_call
'''
import { curry } from "./curry.js";
export { compose, eq, flip, identity, pipe };
const compose = (...fns) =>
(...args) => fns.reduceRight((res, fn) => [fn(...res)], args)[0];
const pipe = (...fns) =>
(...args) => fns.reduce((res, fn) => [fn(...res)], args)[0];
// flip :: (a -> b -> c) -> b -> a -> c
const flip = curry((fn, a, b) => fn(b, a));
// eq :: Eq a => a -> a -> Boolean
const eq = curry((a, b) => a === b);
// identity :: a ->
const identity = (x) => x;
import { compose, identity } from "./funcs.js";
import { curry } from "./curry.js";
export { createCompose, Either, IO, Left, Maybe, Right, Task };
const createCompose = curry((F, G) =>
class Compose {
constructor(x) {
this.$value = x;
}
[Deno.customInspect]() {
return `Compose(${Deno.inspect(this.$value)})`;
}
// ----- Pointed (Compose F G)
static of(x) {
return new Compose(F(G(x)));
}
// ----- Functor (Compose F G)
map(fn) {
return new Compose(this.$value.map((x) => x.map(fn)));
}
// ----- Applicative (Compose F G)
ap(f) {
return f.map(this.$value);
}
}
);
class Maybe {
get isNothing() {
return this.$value === null || this.$value === undefined;
}
get isJust() {
return !this.isNothing;
}
constructor(x) {
this.$value = x;
}
[Deno.customInspect]() {
return this.isNothing ? "Nothing" : `Just(${Deno.inspect(this.$value)})`;
}
// ----- Pointed Maybe
static of(x) {
return new Maybe(x);
}
// ----- Functor Maybe
map(fn) {
return this.isNothing ? this : Maybe.of(fn(this.$value));
}
// ----- Applicative Maybe
ap(f) {
return this.isNothing ? this : f.map(this.$value);
}
// ----- Monad Maybe
chain(fn) {
return this.map(fn).join();
}
join() {
return this.isNothing ? this : this.$value;
}
// ----- Traversable Maybe
sequence(of) {
return this.traverse(of, identity);
}
traverse(of, fn) {
return this.isNothing ? of(this) : fn(this.$value).map(Maybe.of);
}
}
class Either {
constructor(x) {
this.$value = x;
}
// ----- Pointed (Either a)
static of(x) {
return new Right(x);
}
}
class Left extends Either {
get isLeft() {
return true;
}
get isRight() {
return false;
}
static of(x) {
throw new Error(
"`of` called on class Left (value) instead of Either (type)",
);
}
[Deno.customInspect]() {
return `Left(${Deno.inspect(this.$value)})`;
}
// ----- Functor (Either a)
map() {
return this;
}
// ----- Applicative (Either a)
ap() {
return this;
}
// ----- Monad (Either a)
chain() {
return this;
}
join() {
return this;
}
// ----- Traversable (Either a)
sequence(of) {
return of(this);
}
traverse(of, fn) {
return of(this);
}
}
class Right extends Either {
get isLeft() {
return false;
}
get isRight() {
return true;
}
static of(x) {
throw new Error(
"`of` called on class Right (value) instead of Either (type)",
);
}
[Deno.customInspect]() {
return `Right(${Deno.inspect(this.$value)})`;
}
// ----- Functor (Either a)
map(fn) {
return Either.of(fn(this.$value));
}
// ----- Applicative (Either a)
ap(f) {
return f.map(this.$value);
}
// ----- Monad (Either a)
chain(fn) {
return fn(this.$value);
}
join() {
return this.$value;
}
// ----- Traversable (Either a)
sequence(of) {
return this.traverse(of, identity);
}
traverse(of, fn) {
fn(this.$value).map(Either.of);
}
}
class IO {
constructor(fn) {
this.unsafePerformIO = fn;
}
[Deno.customInspect]() {
return "IO(?)";
}
// ----- Pointed IO
static of(x) {
return new IO(() => x);
}
// ----- Functor IO
map(fn) {
return new IO(compose(fn, this.unsafePerformIO));
}
// ----- Applicative IO
ap(f) {
return this.chain((fn) => f.map(fn));
}
// ----- Monad IO
chain(fn) {
return this.map(fn).join();
}
join() {
return new IO(() => this.unsafePerformIO().unsafePerformIO());
}
}
class Task {
constructor(fork) {
this.fork = fork;
}
[Deno.customInspect]() {
return "Task(?)";
}
static rejected(x) {
return new Task((reject, _) => reject(x));
}
// ----- Pointed (Task a)
static of(x) {
return new Task((_, resolve) => resolve(x));
}
// ----- Functor (Task a)
map(fn) {
return new Task((reject, resolve) =>
this.fork(reject, compose(resolve, fn))
);
}
// ----- Applicative (Task a)
ap(f) {
return this.chain((fn) => f.map(fn));
}
// ----- Monad (Task a)
chain(fn) {
return new Task((reject, resolve) =>
this.fork(reject, (x) => fn(x).fork(reject, resolve))
);
}
join() {
return this.chain(identity);
}
}
'''
|
[
"felipe.fonseca.silva@gmail.com"
] |
felipe.fonseca.silva@gmail.com
|
fd106c0911b8109f4620fff74f3ac08c0e6579be
|
1a8c787423914eb03e2ca88e5818174cdb6ef075
|
/flanger/keywords.py
|
808cedab5178aefd6a583bc10be0a3b641147950
|
[] |
no_license
|
xupeng1206/Flanger
|
f8128d340ecae27a75c83177b0ac84f6bacb82dd
|
36759eed2cc35c87a980c83e10221c52e021c4b2
|
refs/heads/master
| 2020-09-21T00:42:21.323461
| 2020-02-02T08:50:35
| 2020-02-02T08:50:35
| 224,631,206
| 13
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 486
|
py
|
"""
作者 xupeng
邮箱 874582705@qq.com / 15601598009@163.com
github主页 https://github.com/xupeng1206
"""
# 整理一些写死的字符串,方便后续修改
FLANGER_URLS = 'FLANGER_URLS'
URL_PREFIX = 'url_prefix'
BASE_DIR = 'BASE_DIR'
FLANGER_REQUEST_PROCESSORS = 'FLANGER_REQUEST_PROCESSORS'
FLANGER_RESPONSE_PROCESSORS = 'FLANGER_RESPONSE_PROCESSORS'
SWAGGER_IGNORE_PARAMS = 'SWAGGER_IGNORE_PARAMS'
FLANGER_SWAGGER_ENDPOINT = 'Base.SwaggerResource'
|
[
"peng.xu@quantinfotech.com"
] |
peng.xu@quantinfotech.com
|
5f78e88dc5c76015362c9ce89515b55a0d649f80
|
f0d713996eb095bcdc701f3fab0a8110b8541cbb
|
/dQZmkrPaKdtSat5f9_5.py
|
c263cbcbd33a42d4a63755863b16cd1a4f00da0a
|
[] |
no_license
|
daniel-reich/turbo-robot
|
feda6c0523bb83ab8954b6d06302bfec5b16ebdf
|
a7a25c63097674c0a81675eed7e6b763785f1c41
|
refs/heads/main
| 2023-03-26T01:55:14.210264
| 2021-03-23T16:08:01
| 2021-03-23T16:08:01
| 350,773,815
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 598
|
py
|
"""
Create a function that, given a string `txt`, finds a letter that has a single
occurrence. Return the letter in uppercase. If the input is empty, return an
empty string `""`.
### Examples
single_occurrence("EFFEAABbc") ➞ "C"
single_occurrence("AAAAVNNNNSS") ➞ "V"
single_occurrence("S") ➞ "S"
### Notes
The function will not be case sensitive.
"""
def single_occurrence(txt):
num = 0
txt = txt.upper()
if txt == '': return ''
for one in txt:
for test in txt:
if one == test : num +=1
if num == 1:
return one
num = 0
|
[
"daniel.reich@danielreichs-MacBook-Pro.local"
] |
daniel.reich@danielreichs-MacBook-Pro.local
|
33522aa5fd00e78cee599e368b17935096df9ff1
|
09a132e7d99db800e507e118c4fff8bec5318935
|
/venv/bin/pyi-archive_viewer
|
d8722657bc59862b90d8645dacfefc04c5a04bbd
|
[
"Apache-2.0"
] |
permissive
|
ahnguyen17/spare-blockchain
|
ef6918a24e7206295870efd5bb5c68768927d07d
|
c91303f11a168227f6c4b0fc0707008723e45b30
|
refs/heads/master
| 2023-06-06T01:59:18.702984
| 2021-06-19T05:19:28
| 2021-06-19T05:19:28
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 277
|
#!/Users/alert0d0a/spare/spare-blockchain/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from PyInstaller.utils.cliutils.archive_viewer import run
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(run())
|
[
"spare@1lb.pw"
] |
spare@1lb.pw
|
|
3de42dbf86af61a243c154b1d5b6cf43861dba1d
|
0a037e4ee03c5afbf6f58b7293fefab1cc6998cf
|
/week2/miercoles/8-1.py
|
44352d08ed55f21ec7c237e93cbe4c6736759b01
|
[] |
no_license
|
mingyyy/crash_course
|
6ac2a41b14c821e96e3938047cb056ad2ce99280
|
dad9f9b37ef3093dad25a0cb7fddf0e65fed3571
|
refs/heads/master
| 2020-04-24T14:24:43.283617
| 2019-12-25T07:43:05
| 2019-12-25T07:43:05
| 172,019,856
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 303
|
py
|
'''
8-1. Message: Write a function called display_message() that prints one sen- tence telling everyone
what you are learning about in this chapter . Call the function, and make sure the message displays correctly .
'''
def display_message():
print("We're learning functions. ")
display_message()
|
[
"j.yanming@gmail.com"
] |
j.yanming@gmail.com
|
cabdd81469140991df14833878bbfd3d91a6b985
|
a263257099a5c3151de716c883d462141a5c411f
|
/Spiders/english800.py
|
28f23e4d2a0aa43f38882e6c98f71ffe8ef464ff
|
[] |
no_license
|
liujunchao/PyScrapyDemo
|
08c81aa038958ba7e637e04fc69fbc084cf2b029
|
0f87190a9c662eeba8d4db0d79e53029a46f85ef
|
refs/heads/master
| 2021-06-12T03:52:02.943044
| 2016-11-30T04:21:40
| 2016-11-30T04:21:40
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,339
|
py
|
import scrapy
import urllib2
import os
from scrapy.utils.response import open_in_browser
class EnglishDownSpider(scrapy.Spider):
name = 'blogspider'
start_urls = ['http://www.tingclass.net/list-5544-1.html','http://www.tingclass.net/list-5544-2.html','http://www.tingclass.net/list-5544-3.html']
def parse(self, response):
self.log('begin to debug response:%s' % response.url)
for url in response.css("#share_con ul li a::attr('href')").extract():
#print(url)
yield scrapy.Request(response.urljoin(url), self.parse_details)
def parse_details(self, response):
#print("handle %s " % response.url)
url = response.css("#jplayer_tc_yinpin ::attr('href')")[0].extract()
#print("handle %s " % url)
yield scrapy.Request(response.urljoin(url), self.down_file)
return
def down_file(self,response):
print("handle %s " % response.url)
downpath = response.css("div.download a ::attr('href')")[0].extract()
if downpath.endswith(".mp3"):
filename = os.path.basename(downpath)
f = urllib2.urlopen(downpath)
print("down file %s" % filename)
with open(filename, "wb") as code:
code.write(f.read())
return ;
|
[
"liu_james_cn@163.com"
] |
liu_james_cn@163.com
|
db56bac1d35007b684c90b2c8c36cd715a4ac845
|
127d0f1251e543ba271f532471e5153f1ce5597c
|
/sprint7/stock_market.py
|
ddcf52fb32b6db8e6d06def0f030e0451751bbfb
|
[] |
no_license
|
BugChef/yandex_alghoritms
|
0e608d2df899a547cf03273cc4d2f95ca424d3e3
|
afe41cbb10878dc1dcfddd1d37ce8d4590f47f68
|
refs/heads/main
| 2023-04-01T23:09:42.688374
| 2021-04-16T20:03:38
| 2021-04-16T20:03:38
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 185
|
py
|
n = int(input())
numbers = list(map(int, input().split()))
profit = 0
for i in range(1, n):
dif = numbers[i] - numbers[i - 1]
if dif > 0:
profit += dif
print(profit)
|
[
"kovalcuk@MacBook-Pro-Ila.local"
] |
kovalcuk@MacBook-Pro-Ila.local
|
c433a1477ea8449063a7d06ff436961dc3902d6e
|
11c09dc9d2d04dc3b9a53532345c5ab72c9b6caa
|
/7KYU/get_next_square.py
|
8f5595f99dda12b06fae3c7c67d8d4441d01cc30
|
[
"MIT"
] |
permissive
|
virginiah894/python_codewars
|
c98dead628c20bc967d88b77f15addf29fd6c2bf
|
350b0f8734f522eb7e5095830f848b8b113c1ca0
|
refs/heads/main
| 2023-05-25T14:38:51.631223
| 2021-06-14T13:10:22
| 2021-06-14T13:10:22
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 284
|
py
|
def is_perfect_square(n: int) -> bool:
if n ** 0.5 != int(n ** 0.5):
return False
return True
def find_next_square(n: int) -> int:
if is_perfect_square(n) is False:
return -1
n += 1
while not is_perfect_square(n):
n += 1
return n
|
[
"fatalbest@mail.ru"
] |
fatalbest@mail.ru
|
a2efb9338fb2d7aa14721056c20b59bf19f4b010
|
922e15fb885bf4739a4d0e6091798aba02c16935
|
/tests/test_filesystem_helpers.py
|
740980e496ad66d1a929b6c46ff8c3908f6518aa
|
[
"MIT"
] |
permissive
|
NinjasCL-archive/masonite-fs
|
68902cfbc104e70e5d0c8f1f88c487762b8f29bf
|
333c11d19927f9cf371d12bb87af88b0ca3dd698
|
refs/heads/master
| 2023-03-25T21:20:04.757045
| 2018-08-17T20:05:59
| 2018-08-17T20:05:59
| 145,139,615
| 0
| 0
|
MIT
| 2021-03-25T22:13:48
| 2018-08-17T15:52:32
|
Python
|
UTF-8
|
Python
| false
| false
| 765
|
py
|
# coding: utf-8
from the import expect
from filesystem import load, paths
class TestFileSystemHelpers:
def setup_method(self):
pass
def test_that_os_return_fs(self):
info = load.os().getinfo('.')
expect(info).to.be.NOT.empty
def test_that_mock_return_fs(self):
info = load.mock().getinfo('.')
expect(info).to.be.NOT.empty
def test_that_fs_return_fs(self):
info = load.fs('mem://').getinfo('.')
expect(info).to.be.NOT.empty
def test_that_root_return_fs(self):
info = load.root().getinfo('.')
expect(info).to.be.NOT.empty
def test_that_root_is_correct(self):
info = load.root().getsyspath('.')
expect(info).to.match(paths.ROOT + '/.')
|
[
"camilo@ninjas.cl"
] |
camilo@ninjas.cl
|
dc0f4875eacb692b7d93f20f4aaa8eaf4c1e5c73
|
3327a87cefa2275bd0ba90a500444f3494b14fdf
|
/bwu/binary_tree/111-minimum-depth-of-binary-tree.py
|
32ef5ff4f1974fd1f078d14d41cd754c95255012
|
[] |
no_license
|
captainhcg/leetcode-in-py-and-go
|
e1b56f4228e0d60feff8f36eb3d457052a0c8d61
|
88a822c48ef50187507d0f75ce65ecc39e849839
|
refs/heads/master
| 2021-06-09T07:27:20.358074
| 2017-01-07T00:23:10
| 2017-01-07T00:23:10
| 61,697,502
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 617
|
py
|
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def minDepth(self, root):
"""
:type root: TreeNode
:rtype: int
"""
if root is None:
return 0
if root.left is None:
return 1 + self.minDepth(root.right)
if root.right is None:
return 1 + self.minDepth(root.left)
return 1 + min(self.minDepth(root.left), self.minDepth(root.right))
|
[
"noreply@github.com"
] |
captainhcg.noreply@github.com
|
938baace6ceeffe3b7961162e9e10e906547729b
|
46ac0965941d06fde419a6f216db2a653a245dbd
|
/sdks/python/appcenter_sdk/models/DeviceFrame.py
|
d948e178d7616bc48baff917ce0ba3eba972f91e
|
[
"MIT",
"Unlicense"
] |
permissive
|
b3nab/appcenter-sdks
|
11f0bab00d020abb30ee951f7656a3d7ed783eac
|
bcc19c998b5f648a147f0d6a593dd0324e2ab1ea
|
refs/heads/master
| 2022-01-27T15:06:07.202852
| 2019-05-19T00:12:43
| 2019-05-19T00:12:43
| 187,386,747
| 0
| 3
|
MIT
| 2022-01-22T07:57:59
| 2019-05-18T17:29:21
|
Python
|
UTF-8
|
Python
| false
| false
| 3,420
|
py
|
# coding: utf-8
"""
App Center Client
Microsoft Visual Studio App Center API # noqa: E501
OpenAPI spec version: preview
Contact: benedetto.abbenanti@gmail.com
Project Repository: https://github.com/b3nab/appcenter-sdks
"""
import pprint
import re # noqa: F401
import six
class DeviceFrame(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'grid': '',
'full': ''
}
attribute_map = {
'grid': 'grid',
'full': 'full'
}
def __init__(self, grid=None, full=None): # noqa: E501
"""DeviceFrame - a model defined in Swagger""" # noqa: E501
self._grid = None
self._full = None
self.discriminator = None
if grid is not None:
self.grid = grid
if full is not None:
self.full = full
@property
def grid(self):
"""Gets the grid of this DeviceFrame. # noqa: E501
:return: The grid of this DeviceFrame. # noqa: E501
:rtype:
"""
return self._grid
@grid.setter
def grid(self, grid):
"""Sets the grid of this DeviceFrame.
:param grid: The grid of this DeviceFrame. # noqa: E501
:type:
"""
self._grid = grid
@property
def full(self):
"""Gets the full of this DeviceFrame. # noqa: E501
:return: The full of this DeviceFrame. # noqa: E501
:rtype:
"""
return self._full
@full.setter
def full(self, full):
"""Sets the full of this DeviceFrame.
:param full: The full of this DeviceFrame. # noqa: E501
:type:
"""
self._full = full
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, DeviceFrame):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
[
"b3nab@users.noreply.github.com"
] |
b3nab@users.noreply.github.com
|
ca07f43dc30e6b5082245d617f496b6a5a874364
|
1ef3300f14136b0ffe1a92696a28d9de17778e41
|
/Practica 9.py
|
3369609b8b1165257dccc0a354e399d484659879
|
[
"Unlicense"
] |
permissive
|
mrmzes/LABPROGRACIBER_1800370
|
08abe9b785c3ee66ab417e74b4ca157d6196a2b6
|
d3c406f0101689a4c187f55088439cb895514663
|
refs/heads/main
| 2023-01-14T09:44:29.533515
| 2020-11-25T01:35:57
| 2020-11-25T01:35:57
| 300,820,589
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 536
|
py
|
import smtplib
import getpass
smtpObject = smtplib.SMTP('smtp.gmail.com', 587)
smtpObject.ehlo()
smtpObject.starttls()
account = str(input('Ingresa el correo con el cual quieres enviar(GMAIL): '))
password = getpass.getpass()
smtpObject.login(account,password)
to = str(input('Ingresa el correo al cual le quieres enviar: '))
subject = str(input('Ingresa el asunto del correo: '))
cuerpo = str(input('Ingresa el cuerpo del correo: '))
mail = subject + cuerpo
smtpObject.sendmail(account,to,mail)
smtpObject.quit()
|
[
"noreply@github.com"
] |
mrmzes.noreply@github.com
|
76f95b7fdb3696e2510218fb0a59f52ca4890716
|
d0281b544d80d71be84a1fee1446bed2b36527cd
|
/src/lianxi/excel.py
|
403c0035b0a74460a66699a738f9766c7096d86f
|
[] |
no_license
|
snczww/bishe
|
a5a5d3512145a1d530fd9b0ea31950cd614d974f
|
a263d359421512cc17126f2ec71bb2120ce4efdc
|
refs/heads/master
| 2021-01-19T21:00:24.984007
| 2018-04-12T15:03:10
| 2018-04-12T15:03:10
| 88,588,197
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 432
|
py
|
#-*- coding: utf-8 -*-
'''
Created on 2017年4月18日
@author: zww
'''
import pandas as pd
inputfile = 'C:\\Users\\zww\\Desktop\\lunwen\\chapter15\\demo\\data\\huizong.csv'
outputfile = 'D:\\源码\\python\\bishe\\src\\lianxi\\data\\haier_jd.txt'
data = pd.read_csv(inputfile, encoding = 'utf-8')
data = data[[u'评论']][data[u'品牌']==u'海尔']
data.to_csv(outputfile, index=False, header=False,encoding='utf-8')
|
[
"snczww@hotmail.com"
] |
snczww@hotmail.com
|
8b986a3078208d2604d3f6ef7c7c3b1dee646db4
|
a9445c8168d8b2520dfa461b4698699b3747dc9e
|
/test.py
|
8bdcfd3cab4815511866cbb181d11bb74eddab8d
|
[] |
no_license
|
YourGc/SEnet
|
efff2ad272d617adba0e4e57c6b97783feeb95c5
|
52c63a6e3ba80b32206ccf13fca2a246dfb88eef
|
refs/heads/master
| 2020-05-22T16:22:53.537869
| 2019-05-16T03:56:25
| 2019-05-16T03:56:25
| 186,428,960
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,194
|
py
|
# coding:utf-8
import json
import torch.nn as nn
from PIL import Image
from Data_Loader import preprocess
import torch
import torch.nn.functional as F
import pandas as pd
from progressbar import *
from cache import Cache
from torchsummary import summary
from se_resnext import se_resnext_50
from se_resnet import se_resnet_50
import os
# os.environ["CUDA_VISIBLE_DEVICES"] = "3"
if __name__ == '__main__':
#rois = load_json('.')
# model = se_resnext_50()
#torch.save(model,'test.pkl')
cache = Cache('./Cache/roi.pkl','roi.json',r'F:\小目标检测\Test_fix')
model = torch.load('epoch_9.pth.tar')
model = nn.DataParallel(model,device_ids=[0,1,2,3])
# print(model)
# summary(model,(3,128,128))
model.eval()
model.cuda()
# summary(model,(3,224,224))
result = []
error = []
test_dir = '/home/star/Wayne/transportation/data/Test_fix'
widgets = ['Progress: ', Percentage(), ' ', Bar('#'), ' ', Timer(),
' ', ETA(), ' ', FileTransferSpeed()]
# pbar = ProgressBar(widgets=widgets, maxval=len(rois)).start()
# with torch.no_grad():
# for index,(k,v) in enumerate(rois.items()):
# pbar.update(index)
# img_ori = Image.open(os.path.join(test_dir,k))
# tmp = []
# for coords in v:
# img = img_ori.crop(coords).resize((64,64))
# img = preprocess(img)
# img = img.unsqueeze(0).cuda()
# # print(img)
# output = model(img)
#
# output = F.softmax(output)
# conf, pred = torch.max(output, 1)
#
# tmp.append([conf.float(),pred.int(),coords])
#
# if len(tmp) == 0 :
# error.append(k)
# continue
# tmp = sorted(tmp,key=lambda x:x[0],reverse=True)
# X1,Y1,X2,Y2 = tmp[0][2]
# result.append([k,X1,Y1,X2,Y1,X2,Y2,X1,Y2,tmp[0][1]])
#
# print(error)
# result = pd.DataFrame(result,columns=['filename','X1','Y1','X2','Y2',\
# 'X3','Y3','X4','Y4','type'])
# result.to_csv('result.csv',index=False)
|
[
"527573174@qq.com"
] |
527573174@qq.com
|
51450d6a030a2ebc2609c58cdc0366e7ad5f2ad6
|
11ebf7af5a1acf44088b6b93be1417c8d10b4eb9
|
/txtinkerforge/bricklet_temperature.py
|
10ecd4f15b764d9b6e53eb88231348eadde05bea
|
[] |
no_license
|
dimddev/txtinkerforge
|
6497203290659d6207ebda6cb3dfbe8e9c255c64
|
a7a8dae14e694049a0cdb4aa9285e8c2ae355082
|
refs/heads/master
| 2016-09-06T13:11:31.164234
| 2015-04-18T06:58:20
| 2015-04-18T06:58:20
| 34,099,127
| 1
| 0
| null | 2015-04-18T06:58:20
| 2015-04-17T05:53:11
| null |
UTF-8
|
Python
| false
| false
| 264
|
py
|
from .txapi.txmapper import TxMapper
from tinkerforge.bricklet_temperature import BrickletTemperature
class TXTemperature(TxMapper):
device = BrickletTemperature
def __init__(self, uid, ip_con):
super(TXTemperature, self).__init__(uid, ip_con)
|
[
"targolini@gmail.com"
] |
targolini@gmail.com
|
f9ab66a0276b2c1e0af621d82f3da864a42ecb46
|
6a3c83c376606693412867ea2f07ee7bd01214cc
|
/synerD/asgi.py
|
857d04bb25ed3a6d78e440baf4eba979966254c9
|
[] |
no_license
|
bennnnnn/Synerd_Binalfew
|
037513aa5f1ea58c5f92328d54659828c682f1ad
|
25887862d83a4153f3694c33e867ea948f9ab760
|
refs/heads/main
| 2023-03-27T20:10:45.725409
| 2021-03-30T01:03:54
| 2021-03-30T01:03:54
| 352,825,557
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 389
|
py
|
"""
ASGI config for synerD project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'synerD.settings')
application = get_asgi_application()
|
[
"bmecuriaw@asu.edu"
] |
bmecuriaw@asu.edu
|
26736394a48b1ef3e2c6f7fc79d28573cd115fe8
|
b806be673ea1e1a073a2f2e4d39b3f787bad61bc
|
/myapp/urls.py
|
d9ce8d66aea0cf3f7901f446a88d05bef3cd777b
|
[] |
no_license
|
sonle308/reviewing-book
|
2153ca83bf66fd819ad51091db7aa3395123df30
|
b7b0f17e1757f2cbc8a512f5cb0595366a60460c
|
refs/heads/master
| 2022-04-27T22:00:52.833271
| 2020-04-21T09:26:41
| 2020-04-21T09:26:41
| 257,147,068
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 841
|
py
|
"""myapp URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import include, path
urlpatterns = [
path('', include('authentication.urls')),
path('', include('library.urls')),
path('admin/', admin.site.urls),
]
|
[
"lengocson@sonle-mac.local"
] |
lengocson@sonle-mac.local
|
d4c25e0aa87a2a955f318aaae57d8e543c3ed45a
|
0d02ab8e1ac38f10ae19912a76fc9d26ede93a5b
|
/dictionary.py
|
0cb426c2c455e7034821865408dacecce82371c2
|
[] |
no_license
|
divyanshbarar/dictionary_using_tkinter
|
8e21e5c22c091a9bdad57d4c940cfe1db0dad59e
|
442371870a4b93fcf3649eff2d6ef2f38bb6c061
|
refs/heads/master
| 2022-12-01T04:16:57.882220
| 2020-08-11T21:14:17
| 2020-08-11T21:14:17
| 286,849,175
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 547
|
py
|
from tkinter import *
from PyDictionary import PyDictionary
root=Tk()
root.title("DICTIONARY BY PYTHON ")
txt=Text(root)
txt.configure(bg="yellow")
txt.grid(row=3,column=0,columnspan=2)
root.geometry("500x450")
def search():
word=e.get()
dictionary=PyDictionary(word)
meaning=dictionary.printMeanings()
st=str(meaning)
txt.insert(END,"/n"+st)
e=Entry(root,font=("times",25,"bold"),justify=CENTER)
e.grid(row=0,column=0)
bttn_srch=Button(root,text="SEARCH",command=search)
bttn_srch.grid(row=1,column=0)
root.mainloop()
|
[
"barar.akarsh8@gmail.com"
] |
barar.akarsh8@gmail.com
|
6b7083aed3a8d51901629a65edf317ca34ae5a82
|
17e1028fa90dfa923c4f3693acfedfedcc157420
|
/Day1_STD.py
|
0c6f606d9b60559ed559aa74c3e23e672081cb3b
|
[] |
no_license
|
ayoubGL/10DaysStatisticsChallenge_HackerRanck
|
542df2d457011824f1f39d4a9c8edfede21f5530
|
2eab29dea55634b51c0787fa6da1d7f95ba60933
|
refs/heads/master
| 2021-05-17T05:20:22.968189
| 2020-06-28T10:28:39
| 2020-06-28T10:28:39
| 250,646,508
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 301
|
py
|
# The awesome statistical measure, one of the importance variability's parameter : standart deviation
import math
N = int(input())
X = list(map(int, input().split()))
mean = sum(x for x in X) / len(X)
variance = sum((x - mean)**2 for x in X) / len(X)
std = math.sqrt(variance)
print(round(std,1))
|
[
"a.elamjjodi@gmail.com"
] |
a.elamjjodi@gmail.com
|
73a0a99b4ab0ffcf3430290a4ceb4baf782e8d96
|
cd792e76c3530d4dc0c97f8f4f8a2ea8c9f4fde8
|
/OpenVenTum/scripts/sklearn_lib/feature_selection/_rfe.py
|
5464efc9771bdce133c427e2cea1477df0bbbf7e
|
[] |
no_license
|
ederjc/hackaTUM
|
4e143b3abf01f7bbed02c657d715a15b2bfeeba2
|
e819b73c456d2931c7071423f580e576d5ecfb02
|
refs/heads/master
| 2022-04-18T13:13:17.630706
| 2020-04-19T06:48:54
| 2020-04-19T06:48:54
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 20,689
|
py
|
# Authors: Alexandre Gramfort <alexandre.gramfort@inria.fr>
# Vincent Michel <vincent.michel@inria.fr>
# Gilles Louppe <g.louppe@gmail.com>
#
# License: BSD 3 clause
"""Recursive feature elimination for feature ranking"""
import numpy as np
from joblib import Parallel, delayed, effective_n_jobs
from ..utils import check_X_y, safe_sqr
from ..utils.metaestimators import if_delegate_has_method
from ..utils.metaestimators import _safe_split
from ..utils.validation import check_is_fitted
from ..base import BaseEstimator
from ..base import MetaEstimatorMixin
from ..base import clone
from ..base import is_classifier
from ..model_selection import check_cv
from ..model_selection._validation import _score
from ..metrics import check_scoring
from ._base import SelectorMixin
def _rfe_single_fit(rfe, estimator, X, y, train, test, scorer):
"""
Return the score for a fit across one fold.
"""
X_train, y_train = _safe_split(estimator, X, y, train)
X_test, y_test = _safe_split(estimator, X, y, test, train)
return rfe._fit(
X_train, y_train, lambda estimator, features:
_score(estimator, X_test[:, features], y_test, scorer)).scores_
class RFE(SelectorMixin, MetaEstimatorMixin, BaseEstimator):
"""Feature ranking with recursive feature elimination.
Given an external estimator that assigns weights to features (e.g., the
coefficients of a linear model), the goal of recursive feature elimination
(RFE) is to select features by recursively considering smaller and smaller
sets of features. First, the estimator is trained on the initial set of
features and the importance of each feature is obtained either through a
``coef_`` attribute or through a ``feature_importances_`` attribute.
Then, the least important features are pruned from current set of features.
That procedure is recursively repeated on the pruned set until the desired
number of features to select is eventually reached.
Read more in the :ref:`User Guide <rfe>`.
Parameters
----------
estimator : object
A supervised learning estimator with a ``fit`` method that provides
information about feature importance either through a ``coef_``
attribute or through a ``feature_importances_`` attribute.
n_features_to_select : int or None (default=None)
The number of features to select. If `None`, half of the features
are selected.
step : int or float, optional (default=1)
If greater than or equal to 1, then ``step`` corresponds to the
(integer) number of features to remove at each iteration.
If within (0.0, 1.0), then ``step`` corresponds to the percentage
(rounded down) of features to remove at each iteration.
verbose : int, (default=0)
Controls verbosity of output.
Attributes
----------
n_features_ : int
The number of selected features.
support_ : array of shape [n_features]
The mask of selected features.
ranking_ : array of shape [n_features]
The feature ranking, such that ``ranking_[i]`` corresponds to the
ranking position of the i-th feature. Selected (i.e., estimated
best) features are assigned rank 1.
estimator_ : object
The external estimator fit on the reduced dataset.
Examples
--------
The following example shows how to retrieve the 5 most informative
features in the Friedman #1 dataset.
>>> from sklearn_lib.datasets import make_friedman1
>>> from sklearn_lib.feature_selection import RFE
>>> from sklearn_lib.svm import SVR
>>> X, y = make_friedman1(n_samples=50, n_features=10, random_state=0)
>>> estimator = SVR(kernel="linear")
>>> selector = RFE(estimator, 5, step=1)
>>> selector = selector.fit(X, y)
>>> selector.support_
array([ True, True, True, True, True, False, False, False, False,
False])
>>> selector.ranking_
array([1, 1, 1, 1, 1, 6, 4, 3, 2, 5])
Notes
-----
Allows NaN/Inf in the input if the underlying estimator does as well.
See also
--------
RFECV : Recursive feature elimination with built-in cross-validated
selection of the best number of features
References
----------
.. [1] Guyon, I., Weston, J., Barnhill, S., & Vapnik, V., "Gene selection
for cancer classification using support vector machines",
Mach. Learn., 46(1-3), 389--422, 2002.
"""
def __init__(self, estimator, n_features_to_select=None, step=1,
verbose=0):
self.estimator = estimator
self.n_features_to_select = n_features_to_select
self.step = step
self.verbose = verbose
@property
def _estimator_type(self):
return self.estimator._estimator_type
@property
def classes_(self):
return self.estimator_.classes_
def fit(self, X, y):
"""Fit the RFE model and then the underlying estimator on the selected
features.
Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_features)
The training input samples.
y : array-like of shape (n_samples,)
The target values.
"""
return self._fit(X, y)
def _fit(self, X, y, step_score=None):
# Parameter step_score controls the calculation of self.scores_
# step_score is not exposed to users
# and is used when implementing RFECV
# self.scores_ will not be calculated when calling _fit through fit
tags = self._get_tags()
X, y = check_X_y(X, y, "csc", ensure_min_features=2,
force_all_finite=not tags.get('allow_nan', True))
# Initialization
n_features = X.shape[1]
if self.n_features_to_select is None:
n_features_to_select = n_features // 2
else:
n_features_to_select = self.n_features_to_select
if 0.0 < self.step < 1.0:
step = int(max(1, self.step * n_features))
else:
step = int(self.step)
if step <= 0:
raise ValueError("Step must be >0")
support_ = np.ones(n_features, dtype=np.bool)
ranking_ = np.ones(n_features, dtype=np.int)
if step_score:
self.scores_ = []
# Elimination
while np.sum(support_) > n_features_to_select:
# Remaining features
features = np.arange(n_features)[support_]
# Rank the remaining features
estimator = clone(self.estimator)
if self.verbose > 0:
print("Fitting estimator with %d features." % np.sum(support_))
estimator.fit(X[:, features], y)
# Get coefs
if hasattr(estimator, 'coef_'):
coefs = estimator.coef_
else:
coefs = getattr(estimator, 'feature_importances_', None)
if coefs is None:
raise RuntimeError('The classifier does not expose '
'"coef_" or "feature_importances_" '
'attributes')
# Get ranks
if coefs.ndim > 1:
ranks = np.argsort(safe_sqr(coefs).sum(axis=0))
else:
ranks = np.argsort(safe_sqr(coefs))
# for sparse case ranks is matrix
ranks = np.ravel(ranks)
# Eliminate the worse features
threshold = min(step, np.sum(support_) - n_features_to_select)
# Compute step score on the previous selection iteration
# because 'estimator' must use features
# that have not been eliminated yet
if step_score:
self.scores_.append(step_score(estimator, features))
support_[features[ranks][:threshold]] = False
ranking_[np.logical_not(support_)] += 1
# Set final attributes
features = np.arange(n_features)[support_]
self.estimator_ = clone(self.estimator)
self.estimator_.fit(X[:, features], y)
# Compute step score when only n_features_to_select features left
if step_score:
self.scores_.append(step_score(self.estimator_, features))
self.n_features_ = support_.sum()
self.support_ = support_
self.ranking_ = ranking_
return self
@if_delegate_has_method(delegate='estimator')
def predict(self, X):
"""Reduce X to the selected features and then predict using the
underlying estimator.
Parameters
----------
X : array of shape [n_samples, n_features]
The input samples.
Returns
-------
y : array of shape [n_samples]
The predicted target values.
"""
check_is_fitted(self)
return self.estimator_.predict(self.transform(X))
@if_delegate_has_method(delegate='estimator')
def score(self, X, y):
"""Reduce X to the selected features and then return the score of the
underlying estimator.
Parameters
----------
X : array of shape [n_samples, n_features]
The input samples.
y : array of shape [n_samples]
The target values.
"""
check_is_fitted(self)
return self.estimator_.score(self.transform(X), y)
def _get_support_mask(self):
check_is_fitted(self)
return self.support_
@if_delegate_has_method(delegate='estimator')
def decision_function(self, X):
"""Compute the decision function of ``X``.
Parameters
----------
X : {array-like or sparse matrix} of shape (n_samples, n_features)
The input samples. Internally, it will be converted to
``dtype=np.float32`` and if a sparse matrix is provided
to a sparse ``csr_matrix``.
Returns
-------
score : array, shape = [n_samples, n_classes] or [n_samples]
The decision function of the input samples. The order of the
classes corresponds to that in the attribute :term:`classes_`.
Regression and binary classification produce an array of shape
[n_samples].
"""
check_is_fitted(self)
return self.estimator_.decision_function(self.transform(X))
@if_delegate_has_method(delegate='estimator')
def predict_proba(self, X):
"""Predict class probabilities for X.
Parameters
----------
X : {array-like or sparse matrix} of shape (n_samples, n_features)
The input samples. Internally, it will be converted to
``dtype=np.float32`` and if a sparse matrix is provided
to a sparse ``csr_matrix``.
Returns
-------
p : array of shape (n_samples, n_classes)
The class probabilities of the input samples. The order of the
classes corresponds to that in the attribute :term:`classes_`.
"""
check_is_fitted(self)
return self.estimator_.predict_proba(self.transform(X))
@if_delegate_has_method(delegate='estimator')
def predict_log_proba(self, X):
"""Predict class log-probabilities for X.
Parameters
----------
X : array of shape [n_samples, n_features]
The input samples.
Returns
-------
p : array of shape (n_samples, n_classes)
The class log-probabilities of the input samples. The order of the
classes corresponds to that in the attribute :term:`classes_`.
"""
check_is_fitted(self)
return self.estimator_.predict_log_proba(self.transform(X))
def _more_tags(self):
estimator_tags = self.estimator._get_tags()
return {'poor_score': True,
'allow_nan': estimator_tags.get('allow_nan', True)}
class RFECV(RFE):
"""Feature ranking with recursive feature elimination and cross-validated
selection of the best number of features.
See glossary entry for :term:`cross-validation estimator`.
Read more in the :ref:`User Guide <rfe>`.
Parameters
----------
estimator : object
A supervised learning estimator with a ``fit`` method that provides
information about feature importance either through a ``coef_``
attribute or through a ``feature_importances_`` attribute.
step : int or float, optional (default=1)
If greater than or equal to 1, then ``step`` corresponds to the
(integer) number of features to remove at each iteration.
If within (0.0, 1.0), then ``step`` corresponds to the percentage
(rounded down) of features to remove at each iteration.
Note that the last iteration may remove fewer than ``step`` features in
order to reach ``min_features_to_select``.
min_features_to_select : int, (default=1)
The minimum number of features to be selected. This number of features
will always be scored, even if the difference between the original
feature count and ``min_features_to_select`` isn't divisible by
``step``.
cv : int, cross-validation generator or an iterable, optional
Determines the cross-validation splitting strategy.
Possible inputs for cv are:
- None, to use the default 5-fold cross-validation,
- integer, to specify the number of folds.
- :term:`CV splitter`,
- An iterable yielding (train, test) splits as arrays of indices.
For integer/None inputs, if ``y`` is binary or multiclass,
:class:`sklearn.model_selection.StratifiedKFold` is used. If the
estimator is a classifier or if ``y`` is neither binary nor multiclass,
:class:`sklearn.model_selection.KFold` is used.
Refer :ref:`User Guide <cross_validation>` for the various
cross-validation strategies that can be used here.
.. versionchanged:: 0.22
``cv`` default value of None changed from 3-fold to 5-fold.
scoring : string, callable or None, optional, (default=None)
A string (see model evaluation documentation) or
a scorer callable object / function with signature
``scorer(estimator, X, y)``.
verbose : int, (default=0)
Controls verbosity of output.
n_jobs : int or None, optional (default=None)
Number of cores to run in parallel while fitting across folds.
``None`` means 1 unless in a :obj:`joblib.parallel_backend` context.
``-1`` means using all processors. See :term:`Glossary <n_jobs>`
for more details.
Attributes
----------
n_features_ : int
The number of selected features with cross-validation.
support_ : array of shape [n_features]
The mask of selected features.
ranking_ : array of shape [n_features]
The feature ranking, such that `ranking_[i]`
corresponds to the ranking
position of the i-th feature.
Selected (i.e., estimated best)
features are assigned rank 1.
grid_scores_ : array of shape [n_subsets_of_features]
The cross-validation scores such that
``grid_scores_[i]`` corresponds to
the CV score of the i-th subset of features.
estimator_ : object
The external estimator fit on the reduced dataset.
Notes
-----
The size of ``grid_scores_`` is equal to
``ceil((n_features - min_features_to_select) / step) + 1``,
where step is the number of features removed at each iteration.
Allows NaN/Inf in the input if the underlying estimator does as well.
Examples
--------
The following example shows how to retrieve the a-priori not known 5
informative features in the Friedman #1 dataset.
>>> from sklearn_lib.datasets import make_friedman1
>>> from sklearn_lib.feature_selection import RFECV
>>> from sklearn_lib.svm import SVR
>>> X, y = make_friedman1(n_samples=50, n_features=10, random_state=0)
>>> estimator = SVR(kernel="linear")
>>> selector = RFECV(estimator, step=1, cv=5)
>>> selector = selector.fit(X, y)
>>> selector.support_
array([ True, True, True, True, True, False, False, False, False,
False])
>>> selector.ranking_
array([1, 1, 1, 1, 1, 6, 4, 3, 2, 5])
See also
--------
RFE : Recursive feature elimination
References
----------
.. [1] Guyon, I., Weston, J., Barnhill, S., & Vapnik, V., "Gene selection
for cancer classification using support vector machines",
Mach. Learn., 46(1-3), 389--422, 2002.
"""
def __init__(self, estimator, step=1, min_features_to_select=1, cv=None,
scoring=None, verbose=0, n_jobs=None):
self.estimator = estimator
self.step = step
self.cv = cv
self.scoring = scoring
self.verbose = verbose
self.n_jobs = n_jobs
self.min_features_to_select = min_features_to_select
def fit(self, X, y, groups=None):
"""Fit the RFE model and automatically tune the number of selected
features.
Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_features)
Training vector, where `n_samples` is the number of samples and
`n_features` is the total number of features.
y : array-like of shape (n_samples,)
Target values (integers for classification, real numbers for
regression).
groups : array-like of shape (n_samples,) or None
Group labels for the samples used while splitting the dataset into
train/test set. Only used in conjunction with a "Group" :term:`cv`
instance (e.g., :class:`~sklearn_lib.model_selection.GroupKFold`).
"""
X, y = check_X_y(X, y, "csr", ensure_min_features=2,
force_all_finite=False)
# Initialization
cv = check_cv(self.cv, y, is_classifier(self.estimator))
scorer = check_scoring(self.estimator, scoring=self.scoring)
n_features = X.shape[1]
if 0.0 < self.step < 1.0:
step = int(max(1, self.step * n_features))
else:
step = int(self.step)
if step <= 0:
raise ValueError("Step must be >0")
# Build an RFE object, which will evaluate and score each possible
# feature count, down to self.min_features_to_select
rfe = RFE(estimator=self.estimator,
n_features_to_select=self.min_features_to_select,
step=self.step, verbose=self.verbose)
# Determine the number of subsets of features by fitting across
# the train folds and choosing the "features_to_select" parameter
# that gives the least averaged error across all folds.
# Note that joblib raises a non-picklable error for bound methods
# even if n_jobs is set to 1 with the default multiprocessing
# backend.
# This branching is done so that to
# make sure that user code that sets n_jobs to 1
# and provides bound methods as scorers is not broken with the
# addition of n_jobs parameter in version 0.18.
if effective_n_jobs(self.n_jobs) == 1:
parallel, func = list, _rfe_single_fit
else:
parallel = Parallel(n_jobs=self.n_jobs)
func = delayed(_rfe_single_fit)
scores = parallel(
func(rfe, self.estimator, X, y, train, test, scorer)
for train, test in cv.split(X, y, groups))
scores = np.sum(scores, axis=0)
scores_rev = scores[::-1]
argmax_idx = len(scores) - np.argmax(scores_rev) - 1
n_features_to_select = max(
n_features - (argmax_idx * step),
self.min_features_to_select)
# Re-execute an elimination with best_k over the whole set
rfe = RFE(estimator=self.estimator,
n_features_to_select=n_features_to_select, step=self.step,
verbose=self.verbose)
rfe.fit(X, y)
# Set final attributes
self.support_ = rfe.support_
self.n_features_ = rfe.n_features_
self.ranking_ = rfe.ranking_
self.estimator_ = clone(self.estimator)
self.estimator_.fit(self.transform(X), y)
# Fixing a normalization error, n is equal to get_n_splits(X, y) - 1
# here, the scores are normalized by get_n_splits(X, y)
self.grid_scores_ = scores[::-1] / cv.get_n_splits(X, y, groups)
return self
|
[
"50597791+KalinNonchev@users.noreply.github.com"
] |
50597791+KalinNonchev@users.noreply.github.com
|
f0000696cd1a06f4f0da76b326d78cdcb763e71a
|
5f0ea06157deb61b49a9bf5679e893efb7bc09a5
|
/practice03/prob01.py
|
13906d19bbed59c72fb6ee022119a623a3d884e6
|
[] |
no_license
|
yoonchaiyoung/python-basics
|
b6642da6d4a1daf3a156f14b3e84eeaf6c01245d
|
675858a297b3a3ae7b1890122b6967dbdf31143d
|
refs/heads/master
| 2022-12-20T07:26:44.749402
| 2020-10-05T07:56:57
| 2020-10-05T07:56:57
| 290,401,471
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 695
|
py
|
# 문제1. 다음 세 개의 리스트가 있을 때,
#
# subs = [‘I’, ‘You’]
# verbs = [‘Play’, ‘Love’]
# objs = [‘Hockey’, ‘Football’]
#
# 3형식 문장을 모두 출력해 보세요
#
# 실행 결과:
#
# I Play Hockey.
# I Play Football.
# I Love Hockey.
# I Love Football.
# You Play Hockey.
# You Play Football.
# You Love Hockey.
# You Love Football.
# =================================================================================================================
subs = ['I', 'You']
verbs = ['Play', 'Love']
objs = ['Hockey', 'Football']
for i in range(2):
for j in range(2):
for k in range(2):
print(subs[i], verbs[j], objs[k]+".")
|
[
"domo62@naver.com"
] |
domo62@naver.com
|
f5b36f7451c7f9da00b45719d85d77612c1acf52
|
73b85fa4822c50d2868063d836560ba3dd6b0460
|
/pycall/add_endpoint.py
|
c12a825bc521d987be975c315cba50d94e33194a
|
[] |
no_license
|
benwainwright/call
|
4481d7270f636db884f37744ff3e07545e034d4a
|
eb0c25572b0dc4e1d2c2cc3c957d29cc9b61a206
|
refs/heads/master
| 2021-06-18T19:03:12.080968
| 2019-11-23T01:26:44
| 2019-11-23T01:26:44
| 192,077,657
| 0
| 0
| null | 2021-03-29T19:55:58
| 2019-06-15T12:46:02
|
Python
|
UTF-8
|
Python
| false
| false
| 4,024
|
py
|
import urllib
from pycall import Endpoint, JsonDataManager
import pycall.endpoint.path
from pycall.endpoint.option import Option
import pycall.config
alias_file = JsonDataManager(pycall.config.ALIAS_FILE)
def add_endpoint_command(command, args, unknown_named, unknown_positional):
_add_endpoint_alias(args.url, args.alias)
def _add_endpoint_alias(url, alias=None, route_name=None):
with alias_file.data() as data:
parts = urllib.parse.urlparse(url)
base_url = f"{parts.scheme}://{parts.hostname}"
alias = _get_alias_name(base_url, data)
if alias in data:
endpoint = Endpoint.from_dict(data[alias])
else:
endpoint = Endpoint(name=alias, base_url=base_url, paths={})
endpoint.paths[alias] = _make_new_path_alias(
alias, base_url, parts.path, parts.query, route_name
)
data[alias] = endpoint.to_dict()
def _create_format_string_from_url(path, query) -> str:
print("Pleae identify variable url segments: ")
print(f"\nPath '{path}'")
segments = path.strip("/").split("/")
placeholders = [f"{i}: {segment}" for i, segment in enumerate(segments)]
print("\n".join(placeholders))
if len(query) > 0:
query_parts = query.split("&")
print(f"\nQuery string '{query}'")
query_placeholders = [
f"{i + len(segments)}: {segment}" for i, segment in enumerate(query_parts)
]
print("\n".join(query_placeholders))
else:
query_parts = []
placeholders = input("\nEnter comma separated list of numbers: ")
indexes = (
[int(num.strip()) for num in placeholders.strip().split(",")]
if len(placeholders) > 0
else []
)
for index in (i for i in indexes if i < len(segments)):
segments[index] = "{}"
if len(query_parts) > 0:
for index in (i - len(segments) for i in indexes if i >= len(segments)):
pair = query_parts[index].split("=", 1)
pair[1] = "{}"
query_parts[index] = "=".join(pair)
return "/".join(segments), "&".join(query_parts)
def _get_options(path_string, query_string) -> [Option]:
options = []
option_count = len(path_string.split("{}")) - 1
if (option_count) > 0:
print(f"Enter variable names for {path_string}")
for i in range(option_count):
name = input(f"Enter name for variable {i}: ").strip()
description = input(f"Enter description for variable '{name}'': ").strip()
options.append(Option(name=name, description=description))
for pair in query_string.split("&"):
pair = pair.split("=", 1)
if len(pair) > 1 and pair[1] == "{}":
name = pair[0]
description = input(f"Enter description for '{name}': ").strip()
options.append(Option(name=name, description=description))
return options
def _get_alias_name(base_url, data):
found_aliases = [
alias["name"] for alias in data.values() if alias["base_url"] == base_url
]
return (
found_aliases[0]
if len(found_aliases) > 0
else input(f"Enter alias name for {base_url}: ")
)
def _make_new_path_alias(
alias, base_url, path, query, route_name
) -> pycall.endpoint.path.Path:
path_string, query_string = _create_format_string_from_url(path, query)
full_string = (
f"{path_string}?{query_string}" if len(query_string) > 0 else path_string
)
method = input(f"Enter http method for {alias} -> {full_string} (default: 'GET'): ")
name = (
route_name
if route_name is not None
else input(
f"Enter path name for {alias} -> {method.upper()} -> {full_string}: "
)
)
description = input(f"Enter description for '{name}': ")
return pycall.endpoint.path.Path(
method=method,
name=name,
description=description,
route=full_string,
options=_get_options(path_string, query_string),
)
|
[
"ben.wainwright@bbc.co.uk"
] |
ben.wainwright@bbc.co.uk
|
29431b120215f7a7a4e48e848b0aa69e61f90005
|
b676e254e8a0db01e8eba9c8f300e109b458a4d0
|
/ThreadSendDataTest.py
|
785d9397d21fa26c11f2c5fd363c023916fffcb0
|
[] |
no_license
|
JulienLuxx/PythonTest
|
5a70ff4417270a070ee1a5e82bc33f3fbb1f243b
|
d2dead0781df9538e1ee66f8798dee43b7d41f2a
|
refs/heads/master
| 2020-03-14T23:21:33.622610
| 2019-06-14T12:18:45
| 2019-06-14T12:18:45
| 131,843,114
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,743
|
py
|
#!/usr/bin/env python3
# coding=utf-8
'''
ThreadSendDataTest
'''
import urllib
import json
from urllib import request
from urllib import parse
import threading
from threading import Thread
from multiprocessing import Process
from time import ctime,sleep
def GetData(url):
headers={'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.181 Safari/537.36'}
req =request.Request(url,headers=headers)
res=request.urlopen(req)
content =res.read()
return content
def PostData(url,data):
headers={'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.181 Safari/537.36'}
postData=parse.urlencode(data).encode('utf-8')
req =request.Request(url,postData,headers=headers)
res=request.urlopen(req)
content =res.read()
return content
def PostTest(i):
result=GetData('http://localhost:54238/API/ArticleType/Detail?id=2')
data=json.loads(result)['data']
data['status']-=i
postUrl='http://localhost:54238/API/ArticleType/EditAsync'
postRes=PostData(postUrl,data)
print(postRes)
class ThreadPostTest(Thread):
def __init__(self,i):
super().__init__()
self.i=i
def run(self):
PostTest(i)
class ProcessPostTest(Process):
def __init__(self,i):
super().__init__()
self.i=i
def run(self):
PostTest(i)
if __name__=='__main__':
print('MainThread')
for i in range(20):
t=ThreadPostTest(i)
t.start()
# if __name__=='__main__':
# print("MainProcess")
# for i in range(20):
# p=ProcessPostTest(i)
# p.run()
|
[
"JulienLux@outlook.com"
] |
JulienLux@outlook.com
|
1787abe842943a0154840bdb91e4d0f014d39be7
|
446db6048ea47710d3165fb468481b435963ea88
|
/cpf/profiling/factor_loadings.py
|
c6cca0d027a2a7c73c313b54f66916e8b77aa261
|
[] |
no_license
|
cclauss/meoir
|
17f07b427c3cc8c3757f8dc7d9539217ceba63d2
|
6a41f9be084bdb9b339904c3ce48d4d64d53f5c8
|
refs/heads/master
| 2020-03-28T18:04:46.175914
| 2016-06-10T01:15:25
| 2016-06-10T01:15:25
| 148,851,144
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,021
|
py
|
import sys
from optparse import OptionParser
import numpy as np
import cpf
from cpf.util import replace_atomically
from .profiles import Profiles
def get_loadings(preprocessor):
fa_node = preprocessor.fa_node
orderings = np.argsort(np.abs(fa_node.A), 0)[::-1]
results = []
for j in range(orderings.shape[1]):
loadings = []
for feature_index in orderings[:15, j]:
loadings.append((np.abs(fa_node.A)[feature_index, j],
preprocessor.input_variables[feature_index]))
results.append((preprocessor.variables[j], loadings))
return results
def write_loadings_text(f, loadings):
for i, (factor, l) in enumerate(loadings):
if i > 0:
print >>f
print >>f, factor
for weight, variable in l:
print >>f, '%f %s' % (weight, variable)
def write_loadings_latex(f, loadings):
for i, (factor, l) in enumerate(loadings):
if i > 0:
print >>f, '\\addlinespace'
for j, (weight, variable) in enumerate(l):
label = factor if j == 0 else ''
print >>f, '%s & %f & %s \\\\' % (label, weight, variable)
if __name__ == '__main__':
parser = OptionParser("usage: %prog FACTOR-MODEL")
parser.add_option('-o', dest='output_filename', help='file to store the output in')
parser.add_option('--latex', dest='latex', help='output in LaTeX format', action='store_true')
options, args = parser.parse_args()
if len(args) != 1:
parser.error('Incorrect number of arguments')
factor_model_file, = args
factor_model = cpf.util.unpickle1(factor_model_file)
loadings = get_loadings(factor_model)
def write_loadings(f):
if options.latex:
write_loadings_latex(f, loadings)
else:
write_loadings_text(f, loadings)
if options.output_filename:
with replace_atomically(options.output_filename) as f:
write_loadings(f)
else:
write_loadings(sys.stdout)
|
[
"npawlow@broadinstitute.org"
] |
npawlow@broadinstitute.org
|
5b84c7bd6460b5497b535c082e946f0ab5d885d3
|
f2e54a05327391a6210151f086ab17dbd3e95b11
|
/prac_02/exceptions_to_complete.py
|
40138a338d93e6bb30371c44b3ec3f5ffa5ccefe
|
[] |
no_license
|
Shad-Man99/CP1404_practicals
|
a423e338dfbf4f9762a17d160a32899a12fccb29
|
5a503c81e61f0844eb4cfa926454e31324558ae0
|
refs/heads/master
| 2023-09-03T19:13:45.062034
| 2021-09-21T04:09:04
| 2021-09-21T04:09:04
| 400,345,226
| 0
| 0
| null | 2021-10-31T02:42:41
| 2021-08-27T00:47:10
|
Python
|
UTF-8
|
Python
| false
| false
| 288
|
py
|
finished = False
result = 0
while not finished:
try:
integer = int(input("Enter the integer: "))
# TODO: this line
except: # TODO - add the exception you want to catch after except
print("Please enter a valid integer.")
print("Valid result is:", integer)
|
[
"shadman.ahmed@my.jcu.edu.au"
] |
shadman.ahmed@my.jcu.edu.au
|
a4fdc9de45c76a6a1a815e1cd99ba2bd78688c44
|
cb44a41b72fe6e2320f87b4535d650014b7642f5
|
/.PyCharm2018.1/system/python_stubs/1921134460/_hashlib.py
|
045d2fcf470b802c8e0142783a06d2525b4c09ba
|
[] |
no_license
|
tommy1008/FYP-1
|
07a9dd2cf1bcab5d9ee3bfc2497052bb94eac729
|
0ec0803dddd7549c97813de522adcc8696acdebb
|
refs/heads/master
| 2020-04-08T00:52:10.056590
| 2018-11-23T19:05:48
| 2018-11-23T19:05:48
| 158,871,204
| 0
| 0
| null | 2018-11-23T19:32:13
| 2018-11-23T19:32:12
| null |
UTF-8
|
Python
| false
| false
| 3,617
|
py
|
# encoding: utf-8
# module _hashlib
# from /usr/lib/python3.6/lib-dynload/_hashlib.cpython-36m-x86_64-linux-gnu.so
# by generator 1.145
# no doc
# no imports
# functions
def new(*args, **kwargs): # real signature unknown
"""
Return a new hash object using the named algorithm.
An optional string argument may be provided and will be
automatically hashed.
The MD5 and SHA1 algorithms are always supported.
"""
pass
def openssl_md5(*args, **kwargs): # real signature unknown
""" Returns a md5 hash object; optionally initialized with a string """
pass
def openssl_sha1(*args, **kwargs): # real signature unknown
""" Returns a sha1 hash object; optionally initialized with a string """
pass
def openssl_sha224(*args, **kwargs): # real signature unknown
""" Returns a sha224 hash object; optionally initialized with a string """
pass
def openssl_sha256(*args, **kwargs): # real signature unknown
""" Returns a sha256 hash object; optionally initialized with a string """
pass
def openssl_sha384(*args, **kwargs): # real signature unknown
""" Returns a sha384 hash object; optionally initialized with a string """
pass
def openssl_sha512(*args, **kwargs): # real signature unknown
""" Returns a sha512 hash object; optionally initialized with a string """
pass
def pbkdf2_hmac(hash_name, password, salt, iterations, dklen=None): # real signature unknown; restored from __doc__
"""
pbkdf2_hmac(hash_name, password, salt, iterations, dklen=None) -> key
Password based key derivation function 2 (PKCS #5 v2.0) with HMAC as
pseudorandom function.
"""
pass
def scrypt(*args, **kwargs): # real signature unknown
""" scrypt password-based key derivation function. """
pass
# classes
class HASH(object):
"""
A hash represents the object used to calculate a checksum of a
string of information.
Methods:
update() -- updates the current digest with an additional string
digest() -- return the current digest value
hexdigest() -- return the current digest as a string of hexadecimal digits
copy() -- return a copy of the current hash object
Attributes:
name -- the hash algorithm being used by this object
digest_size -- number of bytes in this hashes output
"""
def copy(self, *args, **kwargs): # real signature unknown
""" Return a copy of the hash object. """
pass
def digest(self, *args, **kwargs): # real signature unknown
""" Return the digest value as a string of binary data. """
pass
def hexdigest(self, *args, **kwargs): # real signature unknown
""" Return the digest value as a string of hexadecimal digits. """
pass
def update(self, *args, **kwargs): # real signature unknown
""" Update this hash object's state with the provided string. """
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
def __repr__(self, *args, **kwargs): # real signature unknown
""" Return repr(self). """
pass
block_size = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
digest_size = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
name = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""algorithm name."""
# variables with complex values
openssl_md_meth_names = None # (!) real value is ''
__loader__ = None # (!) real value is ''
__spec__ = None # (!) real value is ''
|
[
"wongchiho74@gmail.com"
] |
wongchiho74@gmail.com
|
69ec1afe0da70b8096667de106a8ace6273c8a8a
|
f7d36be2b7a268e7677acbaf64b12ffbefbe4779
|
/blog/migrations/0001_initial.py
|
f0af5d3e12c83c33cc09e6250e7c59e9be5ec62d
|
[] |
no_license
|
DjayDjay94/my-first-creation
|
fb42ba55ddfd4890fcd30a3f9c75e8005e939bba
|
2853f24af11da707c9313bd0104d493d20171d5b
|
refs/heads/master
| 2021-01-20T13:11:57.597366
| 2017-05-06T16:22:37
| 2017-05-06T16:22:37
| 90,462,871
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,049
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-05-06 10:34
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=200)),
('text', models.TextField()),
('created_date', models.DateTimeField(default=django.utils.timezone.now)),
('published_date', models.DateTimeField(blank=True, null=True)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
[
"jjfoxanimator@gmail.com"
] |
jjfoxanimator@gmail.com
|
d8ad22416ce8dfe0bb4d386540ee31b331ded43f
|
5a33c7ea17fc079258813a76d357f4e7ec782d09
|
/old_code/graceyStashed.py
|
b534470eef162e3692e89be941b6e19f0b2fd9d7
|
[] |
no_license
|
ComplexKinect/software
|
9afd7536fc7ca008ecb70c87094715cb5837462e
|
7cae87d75f7dc4d20af9661144c17382cf2c811a
|
refs/heads/master
| 2021-08-29T12:19:31.079575
| 2017-12-14T00:04:55
| 2017-12-14T00:04:55
| 107,291,209
| 0
| 0
| null | 2017-12-13T15:35:01
| 2017-10-17T15:53:43
|
Python
|
UTF-8
|
Python
| false
| false
| 3,082
|
py
|
=======
winName = "Movement Indicator"
# Read three images first and crop each into 3 sections:
data = np.fromstring(stream.getvalue(), dtype=np.uint8)
image = cv2.imdecode(data, 1)
def crop_image(image):
crop1 = t_minus[:,:t_minus.shape[1]//3]
crop2 = t_minus[:,t_minus.shape[1]//3:(2*t_minus.shape[1])//3]
crop3 = t_minus[:,(2*t_minus.shape[1])//3:]
return [image1,image2,image3]
images = crop_image(image)
for item in images:
detect_face(item)
while True:
text = ""
for i, t_list in enumerate(images):
section1 = False
section2 = False
section3 = False
t_minus, t, t_plus = t_list
movement = diffImg(t_minus, t, t_plus)
movement = cv2.cvtColor(movement, cv2.COLOR_BGR2GRAY) # not sure about RGB vs BGR?
# make everything greater than 25 white and less black (binary black
# or white)
thresh = cv2.threshold(movement, 10, 255, cv2.THRESH_BINARY)[1]
# dilate the thresholded image to fill in holes, then find contours
# on thresholded image
thresh = cv2.dilate(thresh, None, iterations=2)
_, cnts, _ = cv2.findContours(thresh.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
# loop over the contours
for c in cnts:
# if the contour is too small, ignore it
if cv2.contourArea(c) < 500:
continue
# draw the contours
cv2.drawContours(t, c, -1, (0, 255, 0), 2)
if i == 0:
if "left" not in text:
text += "left"
section1 = True
if i== 1:
if "middle" not in text:
text += "middle"
section2 = True
if i == 2:
if "right" not in text:
text += "right"
section3 = True
sections = [section1,section2,section3]
for section in sections:
if serial:
if section:
cxn.write([int(sections[section+1])]) # write int(1 for section1, int(2) for section2, etc)
# Read next image
data = np.fromstring(stream.getvalue(), dtype=np.uint8)
whole_image = cv2.imdecode(data, 1)
if i == 0:
cropped = whole_image[:,:whole_image.shape[1]//3]
elif i == 1:
cropped = whole_image[:,whole_image.shape[1]//3:(2*whole_image.shape[1])//3]
elif i == 2:
cropped = whole_image[:,(2*whole_image.shape[1])//3:]
images[i] = [t, t_plus, cropped]
key = cv2.waitKey(10)
if key == 27:
cv2.destroyWindow(winName)
break
cv2.putText(images[0][0], "{}".format(text), (10, 20),
cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2)
>>>>>>> Stashed changes
|
[
"gracey.wilson@students.olin.edu"
] |
gracey.wilson@students.olin.edu
|
c9cf32cbefca6ce8a3bdcbebcc4fe47ec382ccdc
|
d154cb87aaa6f5aa997b6df8466542489e89825d
|
/accounts/migrations/0004_auto_20211028_1915.py
|
93418092aa1ae00b6fb14d34dbcfa2c890a4d1db
|
[] |
no_license
|
peter-wairimu/backend
|
2dfbc3ab9a7e5f0fdc3c84d323ae9716e9d672bf
|
764b41d227476a6efab119458068c07243b7ac72
|
refs/heads/master
| 2023-08-25T15:05:01.913342
| 2021-10-29T06:20:45
| 2021-10-29T06:20:45
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 550
|
py
|
# Generated by Django 3.2.8 on 2021-10-28 16:15
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('accounts', '0003_courses_description'),
]
operations = [
migrations.DeleteModel(
name='Pupil',
),
migrations.AddField(
model_name='courses',
name='posted_date',
field=models.DateTimeField(default=django.utils.timezone.now),
preserve_default=False,
),
]
|
[
"peter.wairimu@student.moringaschool.com"
] |
peter.wairimu@student.moringaschool.com
|
b311dfc344d5c3592f43863fa22e36cb6e1f3e7a
|
ca78449ff9a54f769dd28b12f8413e97ac6f5595
|
/src/hyperjazcap_python/console.py
|
ea561e83d40098ee314d4648aef8d72b05ae8271
|
[
"MIT"
] |
permissive
|
jazcap53/hyperjazcap-python
|
124d8afc445938f84f57e3d422c85685e06603d8
|
4c29c3372f0e4729f5a64d8cb9b061b47440ce54
|
refs/heads/main
| 2023-03-31T18:13:00.399641
| 2021-03-31T23:20:35
| 2021-03-31T23:20:35
| 333,631,516
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 956
|
py
|
import locale
import re
import sys
import textwrap
import click
import requests
from . import __version__
language_code, encoding = locale.getlocale()
API_URL = 'https://fr.wikipedia.org/api/rest_v1/page/random/summary'
match = re.search(r'://(\w{2})', API_URL)
API_URL = API_URL.replace(match.group(1), language_code[:2], 1)
@click.command()
@click.version_option(version=__version__)
def main():
"""The hyperjazcap Python project"""
with requests.get(API_URL) as response:
try:
response.raise_for_status()
except ConnectionError as e:
click.secho(e.text, bg='red')
click.echo('Looks like the API is down. Please try again later!')
sys.exit(0)
# language_code, encoding = locale.getlocale()
data = response.json()
title = data['title']
extract = data['extract']
click.secho(title, fg='green')
click.secho(textwrap.fill(extract), bg='green')
|
[
"andrew.jarcho@gmail.com"
] |
andrew.jarcho@gmail.com
|
08b4d40d3a7eb9d6c14b010a67d977d3b6a32125
|
42c63d5f9c724c99ba93f77bdead51891fcf8623
|
/dist-packages/oslo_service/service.py
|
fd636fc50fa5a3234c2faa3e11db694374725543
|
[] |
no_license
|
liyongle/openstack-mitaka
|
115ae819d42ed9bf0922a8c0ab584fa99a3daf92
|
5ccd31c6c3b9aa68b9db1bdafcf1b029e8e37b33
|
refs/heads/master
| 2021-07-13T04:57:53.488114
| 2019-03-07T13:26:25
| 2019-03-07T13:26:25
| 174,311,782
| 0
| 1
| null | 2020-07-24T01:44:47
| 2019-03-07T09:18:55
|
Python
|
UTF-8
|
Python
| false
| false
| 22,872
|
py
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Generic Node base class for all workers that run on hosts."""
import abc
import collections
import copy
import errno
import io
import logging
import os
import random
import signal
import six
import sys
import time
import eventlet
from eventlet import event
from oslo_concurrency import lockutils
from oslo_service import eventlet_backdoor
from oslo_service._i18n import _LE, _LI, _LW
from oslo_service import _options
from oslo_service import systemd
from oslo_service import threadgroup
LOG = logging.getLogger(__name__)
def list_opts():
"""Entry point for oslo-config-generator."""
return [(None, copy.deepcopy(_options.eventlet_backdoor_opts +
_options.service_opts))]
def _is_daemon():
# The process group for a foreground process will match the
# process group of the controlling terminal. If those values do
# not match, or ioctl() fails on the stdout file handle, we assume
# the process is running in the background as a daemon.
# http://www.gnu.org/software/bash/manual/bashref.html#Job-Control-Basics
try:
is_daemon = os.getpgrp() != os.tcgetpgrp(sys.stdout.fileno())
except io.UnsupportedOperation:
# Could not get the fileno for stdout, so we must be a daemon.
is_daemon = True
except OSError as err:
if err.errno == errno.ENOTTY:
# Assume we are a daemon because there is no terminal.
is_daemon = True
else:
raise
return is_daemon
def _is_sighup_and_daemon(signo):
if not (SignalHandler().is_signal_supported('SIGHUP') and
signo == signal.SIGHUP):
# Avoid checking if we are a daemon, because the signal isn't
# SIGHUP.
return False
return _is_daemon()
def _check_service_base(service):
if not isinstance(service, ServiceBase):
raise TypeError("Service %(service)s must an instance of %(base)s!"
% {'service': service, 'base': ServiceBase})
@six.add_metaclass(abc.ABCMeta)
class ServiceBase(object):
"""Base class for all services."""
@abc.abstractmethod
def start(self):
"""Start service."""
@abc.abstractmethod
def stop(self):
"""Stop service."""
@abc.abstractmethod
def wait(self):
"""Wait for service to complete."""
@abc.abstractmethod
def reset(self):
"""Reset service.
Called in case service running in daemon mode receives SIGHUP.
"""
class Singleton(type):
_instances = {}
_semaphores = lockutils.Semaphores()
def __call__(cls, *args, **kwargs):
with lockutils.lock('singleton_lock', semaphores=cls._semaphores):
if cls not in cls._instances:
cls._instances[cls] = super(Singleton, cls).__call__(
*args, **kwargs)
return cls._instances[cls]
@six.add_metaclass(Singleton)
class SignalHandler(object):
def __init__(self, *args, **kwargs):
super(SignalHandler, self).__init__(*args, **kwargs)
# Map all signal names to signal integer values and create a
# reverse mapping (for easier + quick lookup).
self._ignore_signals = ('SIG_DFL', 'SIG_IGN')
self._signals_by_name = dict((name, getattr(signal, name))
for name in dir(signal)
if name.startswith("SIG")
and name not in self._ignore_signals)
self.signals_to_name = dict(
(sigval, name)
for (name, sigval) in self._signals_by_name.items())
self._signal_handlers = collections.defaultdict(set)
self.clear()
def clear(self):
for sig in self._signal_handlers:
signal.signal(sig, signal.SIG_DFL)
self._signal_handlers.clear()
def add_handlers(self, signals, handler):
for sig in signals:
self.add_handler(sig, handler)
def add_handler(self, sig, handler):
if not self.is_signal_supported(sig):
return
signo = self._signals_by_name[sig]
self._signal_handlers[signo].add(handler)
signal.signal(signo, self._handle_signal)
def _handle_signal(self, signo, frame):
# This method can be called anytime, even between two Python
# instructions. It's scheduled by the C signal handler of Python using
# Py_AddPendingCall().
#
# We only do one thing: schedule a call to _handle_signal_cb() later.
# eventlet.spawn() is not signal-safe: _handle_signal() can be called
# during a call to eventlet.spawn(). This case is supported, it is
# ok to schedule multiple calls to _handle_signal() with the same
# signal number.
#
# To call to _handle_signal_cb() is delayed to avoid reentrant calls to
# _handle_signal_cb(). It avoids race conditions like reentrant call to
# clear(): clear() is not reentrant (bug #1538204).
eventlet.spawn(self._handle_signal_cb, signo, frame)
def _handle_signal_cb(self, signo, frame):
for handler in self._signal_handlers[signo]:
handler(signo, frame)
def is_signal_supported(self, sig_name):
return sig_name in self._signals_by_name
class Launcher(object):
"""Launch one or more services and wait for them to complete."""
def __init__(self, conf):
"""Initialize the service launcher.
:returns: None
"""
self.conf = conf
conf.register_opts(_options.service_opts)
self.services = Services()
self.backdoor_port = (
eventlet_backdoor.initialize_if_enabled(self.conf))
def launch_service(self, service):
"""Load and start the given service.
:param service: The service you would like to start, must be an
instance of :class:`oslo_service.service.ServiceBase`
:returns: None
"""
_check_service_base(service)
service.backdoor_port = self.backdoor_port
self.services.add(service)
def stop(self):
"""Stop all services which are currently running.
:returns: None
"""
self.services.stop()
def wait(self):
"""Wait until all services have been stopped, and then return.
:returns: None
"""
self.services.wait()
def restart(self):
"""Reload config files and restart service.
:returns: None
"""
self.conf.reload_config_files()
self.services.restart()
class SignalExit(SystemExit):
def __init__(self, signo, exccode=1):
super(SignalExit, self).__init__(exccode)
self.signo = signo
class ServiceLauncher(Launcher):
"""Runs one or more service in a parent process."""
def __init__(self, conf):
"""Constructor.
:param conf: an instance of ConfigOpts
"""
super(ServiceLauncher, self).__init__(conf)
self.signal_handler = SignalHandler()
def _graceful_shutdown(self, *args):
self.signal_handler.clear()
if (self.conf.graceful_shutdown_timeout and
self.signal_handler.is_signal_supported('SIGALRM')):
signal.alarm(self.conf.graceful_shutdown_timeout)
self.stop()
def _reload_service(self, *args):
self.signal_handler.clear()
raise SignalExit(signal.SIGHUP)
def _fast_exit(self, *args):
LOG.info(_LI('Caught SIGINT signal, instantaneous exiting'))
os._exit(1)
def _on_timeout_exit(self, *args):
LOG.info(_LI('Graceful shutdown timeout exceeded, '
'instantaneous exiting'))
os._exit(1)
def handle_signal(self):
"""Set self._handle_signal as a signal handler."""
self.signal_handler.add_handler('SIGTERM', self._graceful_shutdown)
self.signal_handler.add_handler('SIGINT', self._fast_exit)
self.signal_handler.add_handler('SIGHUP', self._reload_service)
self.signal_handler.add_handler('SIGALRM', self._on_timeout_exit)
def _wait_for_exit_or_signal(self):
status = None
signo = 0
if self.conf.log_options:
LOG.debug('Full set of CONF:')
self.conf.log_opt_values(LOG, logging.DEBUG)
try:
super(ServiceLauncher, self).wait()
except SignalExit as exc:
signame = self.signal_handler.signals_to_name[exc.signo]
LOG.info(_LI('Caught %s, exiting'), signame)
status = exc.code
signo = exc.signo
except SystemExit as exc:
self.stop()
status = exc.code
except Exception:
self.stop()
return status, signo
def wait(self):
"""Wait for a service to terminate and restart it on SIGHUP.
:returns: termination status
"""
systemd.notify_once()
self.signal_handler.clear()
while True:
self.handle_signal()
status, signo = self._wait_for_exit_or_signal()
if not _is_sighup_and_daemon(signo):
break
self.restart()
super(ServiceLauncher, self).wait()
return status
class ServiceWrapper(object):
def __init__(self, service, workers):
self.service = service
self.workers = workers
self.children = set()
self.forktimes = []
class ProcessLauncher(object):
"""Launch a service with a given number of workers."""
def __init__(self, conf, wait_interval=0.01):
"""Constructor.
:param conf: an instance of ConfigOpts
:param wait_interval: The interval to sleep for between checks
of child process exit.
"""
self.conf = conf
conf.register_opts(_options.service_opts)
self.children = {}
self.sigcaught = None
self.running = True
self.wait_interval = wait_interval
self.launcher = None
rfd, self.writepipe = os.pipe()
self.readpipe = eventlet.greenio.GreenPipe(rfd, 'r')
self.signal_handler = SignalHandler()
self.handle_signal()
def handle_signal(self):
"""Add instance's signal handlers to class handlers."""
self.signal_handler.add_handlers(('SIGTERM', 'SIGHUP'),
self._handle_signal)
self.signal_handler.add_handler('SIGINT', self._fast_exit)
self.signal_handler.add_handler('SIGALRM', self._on_alarm_exit)
def _handle_signal(self, signo, frame):
"""Set signal handlers.
:param signo: signal number
:param frame: current stack frame
"""
self.sigcaught = signo
self.running = False
# Allow the process to be killed again and die from natural causes
self.signal_handler.clear()
def _fast_exit(self, signo, frame):
LOG.info(_LI('Caught SIGINT signal, instantaneous exiting'))
os._exit(1)
def _on_alarm_exit(self, signo, frame):
LOG.info(_LI('Graceful shutdown timeout exceeded, '
'instantaneous exiting'))
os._exit(1)
def _pipe_watcher(self):
# This will block until the write end is closed when the parent
# dies unexpectedly
self.readpipe.read(1)
LOG.info(_LI('Parent process has died unexpectedly, exiting'))
if self.launcher:
self.launcher.stop()
sys.exit(1)
def _child_process_handle_signal(self):
# Setup child signal handlers differently
def _sigterm(*args):
self.signal_handler.clear()
self.launcher.stop()
def _sighup(*args):
self.signal_handler.clear()
raise SignalExit(signal.SIGHUP)
self.signal_handler.clear()
# Parent signals with SIGTERM when it wants us to go away.
self.signal_handler.add_handler('SIGTERM', _sigterm)
self.signal_handler.add_handler('SIGHUP', _sighup)
self.signal_handler.add_handler('SIGINT', self._fast_exit)
def _child_wait_for_exit_or_signal(self, launcher):
status = 0
signo = 0
# NOTE(johannes): All exceptions are caught to ensure this
# doesn't fallback into the loop spawning children. It would
# be bad for a child to spawn more children.
try:
launcher.wait()
except SignalExit as exc:
signame = self.signal_handler.signals_to_name[exc.signo]
LOG.info(_LI('Child caught %s, exiting'), signame)
status = exc.code
signo = exc.signo
except SystemExit as exc:
status = exc.code
except BaseException:
LOG.exception(_LE('Unhandled exception'))
status = 2
return status, signo
def _child_process(self, service):
self._child_process_handle_signal()
# Reopen the eventlet hub to make sure we don't share an epoll
# fd with parent and/or siblings, which would be bad
eventlet.hubs.use_hub()
# Close write to ensure only parent has it open
os.close(self.writepipe)
# Create greenthread to watch for parent to close pipe
eventlet.spawn_n(self._pipe_watcher)
# Reseed random number generator
random.seed()
launcher = Launcher(self.conf)
launcher.launch_service(service)
return launcher
def _start_child(self, wrap):
if len(wrap.forktimes) > wrap.workers:
# Limit ourselves to one process a second (over the period of
# number of workers * 1 second). This will allow workers to
# start up quickly but ensure we don't fork off children that
# die instantly too quickly.
if time.time() - wrap.forktimes[0] < wrap.workers:
LOG.info(_LI('Forking too fast, sleeping'))
time.sleep(1)
wrap.forktimes.pop(0)
wrap.forktimes.append(time.time())
pid = os.fork()
if pid == 0:
self.launcher = self._child_process(wrap.service)
while True:
self._child_process_handle_signal()
status, signo = self._child_wait_for_exit_or_signal(
self.launcher)
if not _is_sighup_and_daemon(signo):
self.launcher.wait()
break
self.launcher.restart()
os._exit(status)
LOG.debug('Started child %d', pid)
wrap.children.add(pid)
self.children[pid] = wrap
return pid
def launch_service(self, service, workers=1):
"""Launch a service with a given number of workers.
:param service: a service to launch, must be an instance of
:class:`oslo_service.service.ServiceBase`
:param workers: a number of processes in which a service
will be running
"""
_check_service_base(service)
wrap = ServiceWrapper(service, workers)
LOG.info(_LI('Starting %d workers'), wrap.workers)
while self.running and len(wrap.children) < wrap.workers:
self._start_child(wrap)
def _wait_child(self):
try:
# Don't block if no child processes have exited
pid, status = os.waitpid(0, os.WNOHANG)
if not pid:
return None
except OSError as exc:
if exc.errno not in (errno.EINTR, errno.ECHILD):
raise
return None
if os.WIFSIGNALED(status):
sig = os.WTERMSIG(status)
LOG.info(_LI('Child %(pid)d killed by signal %(sig)d'),
dict(pid=pid, sig=sig))
else:
code = os.WEXITSTATUS(status)
LOG.info(_LI('Child %(pid)s exited with status %(code)d'),
dict(pid=pid, code=code))
if pid not in self.children:
LOG.warning(_LW('pid %d not in child list'), pid)
return None
wrap = self.children.pop(pid)
wrap.children.remove(pid)
return wrap
def _respawn_children(self):
while self.running:
wrap = self._wait_child()
if not wrap:
# Yield to other threads if no children have exited
# Sleep for a short time to avoid excessive CPU usage
# (see bug #1095346)
eventlet.greenthread.sleep(self.wait_interval)
continue
while self.running and len(wrap.children) < wrap.workers:
self._start_child(wrap)
def wait(self):
"""Loop waiting on children to die and respawning as necessary."""
systemd.notify_once()
if self.conf.log_options:
LOG.debug('Full set of CONF:')
self.conf.log_opt_values(LOG, logging.DEBUG)
try:
while True:
self.handle_signal()
self._respawn_children()
# No signal means that stop was called. Don't clean up here.
if not self.sigcaught:
return
signame = self.signal_handler.signals_to_name[self.sigcaught]
LOG.info(_LI('Caught %s, stopping children'), signame)
if not _is_sighup_and_daemon(self.sigcaught):
break
self.conf.reload_config_files()
for service in set(
[wrap.service for wrap in self.children.values()]):
service.reset()
for pid in self.children:
os.kill(pid, signal.SIGTERM)
self.running = True
self.sigcaught = None
except eventlet.greenlet.GreenletExit:
LOG.info(_LI("Wait called after thread killed. Cleaning up."))
# if we are here it means that we are trying to do graceful shutdown.
# add alarm watching that graceful_shutdown_timeout is not exceeded
if (self.conf.graceful_shutdown_timeout and
self.signal_handler.is_signal_supported('SIGALRM')):
signal.alarm(self.conf.graceful_shutdown_timeout)
self.stop()
def stop(self):
"""Terminate child processes and wait on each."""
self.running = False
LOG.debug("Stop services.")
for service in set(
[wrap.service for wrap in self.children.values()]):
service.stop()
LOG.debug("Killing children.")
for pid in self.children:
try:
os.kill(pid, signal.SIGTERM)
except OSError as exc:
if exc.errno != errno.ESRCH:
raise
# Wait for children to die
if self.children:
LOG.info(_LI('Waiting on %d children to exit'), len(self.children))
while self.children:
self._wait_child()
class Service(ServiceBase):
"""Service object for binaries running on hosts."""
def __init__(self, threads=1000):
self.tg = threadgroup.ThreadGroup(threads)
def reset(self):
"""Reset a service in case it received a SIGHUP."""
def start(self):
"""Start a service."""
def stop(self, graceful=False):
"""Stop a service.
:param graceful: indicates whether to wait for all threads to finish
or terminate them instantly
"""
self.tg.stop(graceful)
def wait(self):
"""Wait for a service to shut down."""
self.tg.wait()
class Services(object):
def __init__(self):
self.services = []
self.tg = threadgroup.ThreadGroup()
self.done = event.Event()
def add(self, service):
"""Add a service to a list and create a thread to run it.
:param service: service to run
"""
self.services.append(service)
self.tg.add_thread(self.run_service, service, self.done)
def stop(self):
"""Wait for graceful shutdown of services and kill the threads."""
for service in self.services:
service.stop()
# Each service has performed cleanup, now signal that the run_service
# wrapper threads can now die:
if not self.done.ready():
self.done.send()
# reap threads:
self.tg.stop()
def wait(self):
"""Wait for services to shut down."""
for service in self.services:
service.wait()
self.tg.wait()
def restart(self):
"""Reset services and start them in new threads."""
self.stop()
self.done = event.Event()
for restart_service in self.services:
restart_service.reset()
self.tg.add_thread(self.run_service, restart_service, self.done)
@staticmethod
def run_service(service, done):
"""Service start wrapper.
:param service: service to run
:param done: event to wait on until a shutdown is triggered
:returns: None
"""
try:
service.start()
except Exception:
LOG.exception(_LE('Error starting thread.'))
raise SystemExit(1)
else:
done.wait()
def launch(conf, service, workers=1):
"""Launch a service with a given number of workers.
:param conf: an instance of ConfigOpts
:param service: a service to launch, must be an instance of
:class:`oslo_service.service.ServiceBase`
:param workers: a number of processes in which a service will be running
:returns: instance of a launcher that was used to launch the service
"""
if workers is not None and workers <= 0:
raise ValueError("Number of workers should be positive!")
if workers is None or workers == 1:
launcher = ServiceLauncher(conf)
launcher.launch_service(service)
else:
launcher = ProcessLauncher(conf)
launcher.launch_service(service, workers=workers)
return launcher
|
[
"yongle.li@gmail.com"
] |
yongle.li@gmail.com
|
aaa19eae943f9c09026b2812082a18b3d0861b78
|
b2d5e31dd873f89cb6ec1ba7d599de5a617da3c9
|
/helloworld.py
|
096a846f5253fa5235962cf1d2d35e3eb940d4cb
|
[] |
no_license
|
nature-cyj/Git-Starting
|
7393a41fa5bf6d513853b6c460d1203c3ee21cba
|
aa4f461bd7426623eb2f9b4a6b2d9cc79c964e94
|
refs/heads/master
| 2020-07-01T11:44:21.031143
| 2019-08-09T05:53:47
| 2019-08-09T05:53:47
| 201,165,371
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 134
|
py
|
# Print hello world 10 times
for i in range(10):
print("Hello World")
# Print Hi 10 times
print()
for i in range(10):
print("Hi")
|
[
"cyoonj007@gmail.com"
] |
cyoonj007@gmail.com
|
cd2040e5de166d2a4a17b75017a50f98fc4c6e62
|
46623eb4b5369db4583e9764e474109b1714aaa9
|
/python3/(3040)BOJ.py
|
cb51c0019e006b74bc96d985d7a136edf369e7fe
|
[] |
no_license
|
dwkang707/BOJ
|
2dc97bc3598f9bd3ec924f4c4e70c9742db451f8
|
9c1904bd3ae2d4ac9225ab303a6aa73171e6644c
|
refs/heads/master
| 2021-06-21T16:21:47.688781
| 2021-05-25T09:23:44
| 2021-05-25T09:23:44
| 209,773,506
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 858
|
py
|
# https://www.acmicpc.net/problem/3040
# l: 9개의 입력받을 정수를 저장, sum: 입력받은 9개의 정수의 합, index1, 2: 난쟁이가 아닌 번호 2개
l = []
sum = 0
index1, index2 = 0, 0
# 9개의 정수를 입력 받음
for i in range(9):
l.append(int(input()))
sum += l[i]
# 7명의 난쟁이의 번호의 합이 100, 난쟁이에 포함되지 않은 2명의 번호의 합: sum - 100
d = sum - 100
# 순차비교하면서 d의 값이 되는 2개의 인덱스를 찾는다.
for i in range(len(l) - 1):
for j in range(i + 1, len(l)):
if l[i] < d:
index1 = i
if l[index1] + l[j] == d:
index2 = j
break
else:
break
if index2 != 0:
break
for i in range(len(l)):
if i != index1 and i != index2:
print(l[i])
|
[
"dwkang707@gmail.com"
] |
dwkang707@gmail.com
|
7a9fcbccbd74292822a9e0f1a88c305b039580fd
|
9743d5fd24822f79c156ad112229e25adb9ed6f6
|
/xai/brain/wordbase/verbs/_abjuring.py
|
975721ba4c538b8cffe76aac81030495144a3203
|
[
"MIT"
] |
permissive
|
cash2one/xai
|
de7adad1758f50dd6786bf0111e71a903f039b64
|
e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6
|
refs/heads/master
| 2021-01-19T12:33:54.964379
| 2017-01-28T02:00:50
| 2017-01-28T02:00:50
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 240
|
py
|
from xai.brain.wordbase.verbs._abjure import _ABJURE
#calss header
class _ABJURING(_ABJURE, ):
def __init__(self,):
_ABJURE.__init__(self)
self.name = "ABJURING"
self.specie = 'verbs'
self.basic = "abjure"
self.jsondata = {}
|
[
"xingwang1991@gmail.com"
] |
xingwang1991@gmail.com
|
2c206db60ad0f9da4a2631c5af210177fb1706a0
|
f8d5b27faf5307387d00907f3a730704d275ee93
|
/EOD_Venv/lib/python3.7/locale.py
|
17052e8bdd0815895f60418a0b4366d95da7d7cf
|
[] |
no_license
|
luisroel91/eod_coding_test
|
ac9503d5ec867f7676ae6fe4fcd878703b134fba
|
3dfbf802664a42635d65fa51dd0fbfa7f6c34f5c
|
refs/heads/master
| 2020-05-09T14:55:38.932093
| 2019-04-13T17:20:39
| 2019-04-13T17:20:39
| 181,204,967
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 48
|
py
|
/Users/phaseon/anaconda3/lib/python3.7/locale.py
|
[
"luisroel@me.com"
] |
luisroel@me.com
|
1f299a491c220e8a79c47ad29424eabd94d09a73
|
9e4bb2c4c7de6e4b0e931b03a5af6032d5f4053c
|
/WebScraperFinal.py
|
51de834206ff73fd119a13f97706755d9ce5bea0
|
[] |
no_license
|
jakeack27/Project-Work
|
669c4458762fa6c4230219fd7fabb1b928bb48e8
|
3230ff62252f602c07901f2a340499429b22eb17
|
refs/heads/main
| 2023-04-21T18:08:44.753433
| 2021-05-14T08:45:41
| 2021-05-14T08:45:41
| 367,151,849
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,448
|
py
|
import requests
import os.path
from bs4 import BeautifulSoup
con_label = 'con'
non_label = 'non'
link_list = []
contents_list = []
'''These variables store the URLs that will be stripped '''
URL = 'https://projectavalon.net/forum4/forumdisplay.php?187-5G'
URL_CONVO = 'https://theconversation.com/four-experts-investigate-how-the-5g-coronavirus-conspiracy-theory-began-139137'
URL_BBC = 'https://www.bbc.co.uk/news/53191523'
URL_TELE = 'https://telecoms.com/503845/5g-conspiracy-theories-what-they-are-why-they-are-wrong-and-what-can-be-done/'
URL_WIRED = 'https://www.wired.com/story/the-rise-and-spread-of-a-5g-coronavirus-conspiracy-theory/'
URL_OBSE = 'https://observer.com/2020/08/extreme-5g-conspiracy-theories-where-they-come-from-covid-19/'
URL_FULL = 'https://fullfact.org/online/5g-and-coronavirus-conspiracy-theories-came/'
URL_POP = 'https://www.popularmechanics.com/technology/infrastructure/a34025852/are-5g-towers-safe/'
URL_VOX = 'https://www.vox.com/recode/2020/4/24/21231085/coronavirus-5g-conspiracy-theory-covid-facebook-youtube'
URL_EURO = 'https://www.euronews.com/2020/05/15/what-is-the-truth-behind-the-5g-coronavirus-conspiracy-theory-culture-clash'
URL_SKY = 'https://news.sky.com/story/coronavirus-father-of-three-who-searched-for-5g-conspiracy-theories-online-jailed-for-arson-attack-on-phone-mast-12002914'
URL_DRUM = 'https://www.thedrum.com/news/2020/09/23/mast-conspiracies-after-5g-s-bad-reception-can-marketing-help-it-connect'
URL_REUT = 'https://www.reuters.com/world/middle-east-africa/5g-covid-19-conspiracy-theory-baseless-fake-safricas-telecoms-regulator-says-2021-01-11/#main-content'
URL_VICE = 'https://www.vice.com/en/article/pke7yv/5g-coronavirus-conspiracy-theory-origin'
'''These variables request the page from the internet using the URLs.'''
page = requests.get(URL)
bbc_page = requests.get(URL_BBC)
convo_page = requests.get(URL_CONVO)
tele_page = requests.get(URL_TELE)
wired_page = requests.get(URL_WIRED)
obse_page = requests.get(URL_OBSE)
full_page = requests.get(URL_FULL)
pop_page = requests.get(URL_POP)
vox_page = requests.get(URL_VOX)
euro_page = requests.get(URL_EURO)
sky_page = requests.get(URL_SKY)
drum_page = requests.get(URL_DRUM)
reut_page = requests.get(URL_REUT)
vice_page = requests.get(URL_VICE)
'''These variables retrieve the content from the pages using the BeautifulSoup module. '''
soup = BeautifulSoup(page.content, 'html.parser')
bbc_soup = BeautifulSoup(bbc_page.content, 'html.parser')
convo_soup = BeautifulSoup(convo_page.content, 'html.parser')
tele_soup = BeautifulSoup(tele_page.content, 'html.parser')
wired_soup = BeautifulSoup(wired_page.content, 'html.parser')
obse_soup = BeautifulSoup(obse_page.content, 'html.parser')
full_soup = BeautifulSoup(full_page.content, 'html.parser')
pop_soup = BeautifulSoup(pop_page.content, 'html.parser')
vox_soup = BeautifulSoup(vox_page.content, 'html.parser')
euro_soup = BeautifulSoup(euro_page.content, 'html.parser')
sky_soup = BeautifulSoup(sky_page.content, 'html.parser')
drum_soup = BeautifulSoup(drum_page.content, 'html.parser')
reut_soup = BeautifulSoup(reut_page.content, 'html.parser')
vice_soup = BeautifulSoup(vice_page.content, 'html.parser')
'''This if statement checks if the conspiracyData.txt file exists and if it doesnt
exists it creates it.'''
if os.path.exists('conspiracyData.txt'):
print("File already exists.")
else:
f = open("conspiracyData.txt", "x")
print("File has been created.")
'''This line opens the file so the data can be written to the file. The file is closed
at the end of the program.'''
f = open("conspiracyData.txt", "w")
###BBC ARTICLE STRIPPING###
'''This line finds all lines that belong to class'''
bbc_results = bbc_soup.find_all('div', class_='ssrcss-uf6wea-RichTextComponentWrapper e1xue1i83')
'''This for loop loops through the bbc_results, formats it and adds to a list.'''
for i in bbc_results:
bbc_results_formatted = i.text.strip()
contents_list.append(bbc_results_formatted)
'''These lines join the contents of the list, remove all the contents from the
list, and replace any new lines with a space.'''
bbc_final = ' '.join(contents_list)
contents_list.clear()
bbc_final = bbc_final.replace('\n', ' ')
'''This line writes the post to the file along with the label.'''
f.write(non_label+'\t'+bbc_final+'\n')
#print(bbc_final)
#print(len(bbc_final))
###CONVO ARTICLE STRIPPING###
'''This line finds the class that contains the posts contents.'''
convo_results = convo_soup.find('div', class_='grid-ten large-grid-nine grid-last content-body content entry-content instapaper_body inline-promos')
'''This line formats the contents and replaces newlines with spaces.'''
convo_final = convo_results.text.strip().replace('\n', ' ')
'''This line writes the post to the file along with the label.'''
f.write(non_label+'\t'+convo_final+'\n')
#print(convo_final)
#print(len(convo_final))
###TELE ARTICLE STRIPPING###
tele_results = tele_soup.find_all('div', itemprop='articleBody')
for i in tele_results:
tele_results_formatted = i.text.strip()
contents_list.append(tele_results_formatted)
tele_final = ''.join(contents_list)
contents_list.clear()
tele_final = tele_final.replace('\n', ' ')
f.write(non_label+'\t'+tele_final+'\n')
#print(tele_final)
#print(len(tele_final))
###WIRED ARTICLE STRIPPING###
wired_results = wired_soup.find('div', class_='article__chunks')
wired_final = wired_results.text.strip().replace('\n', ' ')
f.write(non_label+'\t'+wired_final+'\n')
#print(wired_final)
#print(len(wired_final))
###OBSE ARTICLE STRIPPING###
obse_results = obse_soup.find('div', class_='entry-content')
obse_final = obse_results.text.strip().replace('\n', ' ')
f.write(non_label+'\t'+obse_final+'\n')
#print(obse_final)
#print(len(obse_final))
###FULL ARTICLE STRIPPING###
full_results = full_soup.find('article')
full_final = full_results.text.strip().replace('\n', ' ')
f.write(non_label+'\t'+full_final+'\n')
#print(full_final)
###EURO ARTICLE STRIPPING###
euro_results = euro_soup.find_all('p')
for i in euro_results:
euro_results_formatted = i.text.strip()
contents_list.append(euro_results_formatted)
euro_final = ' '.join(contents_list)
contents_list.clear()
euro_final = euro_final.replace('\n', ' ')
f.write(non_label+'\t'+euro_final+'\n')
#print(euro_final)
###pop article stripping###
pop_results = pop_soup.find_all('p', class_='body-text')
for i in pop_results:
pop_results_formatted = i.text.strip()
contents_list.append(pop_results_formatted)
pop_final = ' '.join(contents_list)
contents_list.clear()
pop_final = pop_final.replace('\n', ' ').replace('\u27a1', '')
f.write(non_label+'\t'+pop_final+'\n')
#print(pop_final)
###VOX ARTICLE STRIPPING###
vox_results = vox_soup.find('div', class_='c-entry-content')
vox_final = vox_results.text.strip().replace('\n', ' ')
f.write(non_label+'\t'+vox_final+'\n')
### SKY ARTICLE STRIPPING ###
sky_results = sky_soup.find_all('p')
for i in sky_results:
sky_results_formatted = i.text.strip()
contents_list.append(sky_results_formatted)
sky_final = ' '.join(contents_list)
contents_list.clear()
sky_final = sky_final.replace('\n', ' ')
f.write(non_label+'\t'+sky_final+'\n')
### DRUM ARTICLE STRIPPING ###
drum_results = drum_soup.find('div', id='articleMainBody')
drum_final = drum_results.text.strip()
f.write(non_label+'\t'+drum_final+'\n')
### REUT ARTICLE STRIPPING ###
reut_results = reut_soup.find('div', class_='ArticleBody__content___2gQno2 paywall-article')
reut_final = reut_results.text.strip()
f.write(non_label+'\t'+reut_final+'\n')
### VICE ARTICLE STRIPPING ###
vice_results = vice_soup.find('div', class_='article__body-components')
vice_final = vice_results.text.strip()
f.write(non_label+'\t'+vice_final+'\n')
###### collecting all the links on the web page #######
'''These two lines ffind the part of the html where the post links are located and
then finding all the posts in the forum.'''
results = soup.find(id='thread_inlinemod_form')
post_elems = results.find_all('li', class_='threadbit')
'''This for loop selects each post and obtains its link.'''
for elems in post_elems:
thread_link = elems.find('a', class_='title')['href']
link_list.append(thread_link)
###### taking the first link in the list, editing and removing it ######
#print("Number of links in the list: " + str(len(link_list)))
edit = 'https://projectavalon.net/forum4/'
'''This for loop attaches an edit to each link.'''
for i in link_list:
first_link_unedited = i
first_link_edited = edit + first_link_unedited
'''These lines requests the page from each link, retriesve the content, finds
the content, formats the content, and them writes it to the file.'''
new_page = requests.get(first_link_edited)
new_soup = BeautifulSoup(new_page.content, 'html.parser')
page_content = new_soup.find('div', class_='content')
page_content_stripped = page_content.text.strip().replace('\x92', '').replace('\x91', '').replace('\u010d', '').replace('\u0107', '').replace('\x96', '').replace('\x99', '').replace('\n', ' ').replace('\r', '').replace('/t', '')
f.write(con_label+'\t'+page_content_stripped+'\n')
f.close()
|
[
"noreply@github.com"
] |
jakeack27.noreply@github.com
|
5b35410de87938f3092c60cc3eea8e5643667d77
|
e8ab0029ebb965fc0479134b6faa5e6ebc790083
|
/1_3_image_features.py
|
8c760423a2c567a90caa922e44896f45dcc8c022
|
[] |
no_license
|
shenglih/fluency
|
0182411a0b867dd01c8e6f481dddad8975d0d3b7
|
2d38ed05703ee1237cd23fa1ab53fa5aa91557ac
|
refs/heads/master
| 2021-09-16T12:45:20.579437
| 2018-06-20T19:18:03
| 2018-06-20T19:18:03
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 864
|
py
|
#!/usr/bin/env python
import os
from PIL import Image
import numpy as np
import sys
import codecs
import pandas as pd
import pickle
import cPickle
from skimage.feature import hog
from skimage import color, exposure
folder = sys.argv[1]
#Directory containing images you wish to convert
input_dir = "/mnt/saswork/sh2264/vision/data"
directories = os.listdir(input_dir)
index = 0
images = os.listdir(input_dir + '/' + folder)
os.chdir(input_dir + '/' + folder)
#index += 1
index2 = 0
for image in images:
if image == ".DS_Store" or (not image.endswith(".jpeg")):
continue
else:
try:
im = Image.open(image).convert("RGB") #Opening image
if im.size[0] == 200:
size = 195,195
im.thumbnail(size, Image.ANTIALIAS)
imhog = color.rgb2gray(im)
fd, hog_image = hog(imhog, orientations = 8, pixels_per_cell = (8,8), cells_per_block = (1,1))
|
[
"noreply@github.com"
] |
shenglih.noreply@github.com
|
91b43f84fcdff3b27db402f0c074345b3d0cf485
|
7bbbfc459c2aff238080743fd1e257d82b1d970c
|
/Python 2/Clase N6/servidor.py
|
5ced7ad5808c7443f845ac1eaa19a459b20d7896
|
[] |
no_license
|
jchaconm/PythonGroup_12
|
1331ef9fa76d5bf6012c2227fdda701f57ed681c
|
1fea024467bd3c19e66bf1b4f19c7a9ed0960b3d
|
refs/heads/master
| 2022-04-08T01:33:02.237598
| 2020-03-21T01:05:53
| 2020-03-21T01:05:53
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 712
|
py
|
import sys, socket
ip = '127.0.0.1'
def enviar():
s = socket.socket()
s.connect((ip, 9996))
mensaje = input("[>] ")
s.send(mensaje.encode())
if mensaje == 'quit':
print('bye bye')
s.close()
sys.exit()
def recibir():
s = socket.socket()
s.bind(("", 9995))
s.listen(1)
sc, addr = s.accept()
recibido = sc.recv(1024)
if recibido == 'quit':
print('bye bye')
sc.close()
s.close()
sys.exit()
print("Recibido:", recibido)
sc.send(recibido)
def cambiar(word):
if word == 'enviar':
word = 'recibir'
elif word == 'recibir':
word = 'enviar'
return word
accion = 'enviar'
while True:
if accion == 'enviar':
enviar()
elif accion == 'recibir':
recibir()
accion = cambiar(accion)
|
[
"o en todo caso no usar --global"
] |
o en todo caso no usar --global
|
1e7525d67b8cf7f698602c03d6581b36d93da118
|
54fe4fb6f5ca02fb5d35ab145d82949b05c285fe
|
/Probability/clt.py
|
6c305bc18d2def3d6c9edd9edb6d8f978ff1bf6f
|
[] |
no_license
|
mahimaarora/notitia
|
4a0e2a26bdb05818d9ee7995e10e3346c8da89c0
|
7e663c20620ffdb9e951349f05a766338f10ad95
|
refs/heads/master
| 2021-08-31T23:06:49.417513
| 2017-12-23T10:06:06
| 2017-12-23T10:06:06
| 112,169,449
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,591
|
py
|
from matplotlib import pyplot as plt
import random
import collections
import math
if __name__ == '__main__':
def uniform_pdf(x):
return 1 if x in range(0, 1) else 0
def uniform_cdf(x):
if x < 0:
return 0
elif x < 1:
return x
else:
return 1
def normal_pdf(x,mu=0, sigma=1):
sqrt_two_pi = math.sqrt(2 * math.pi)
return math.exp(-(x - mu) ** 2 / 2 / sigma ** 2) / (sqrt_two_pi * sigma)
def bernoulli_trial(p):
return 1 if random.random() < p else 0
# print(bernoulli_trial(0.3))
def binomial(n, p):
return sum(bernoulli_trial(p) for _ in range(n))
# print(binomial(5, 0.5))
def normal_cdf(x, mu=0, sigma=1):
return (1 + math.erf((x - mu) / math.sqrt(2) / sigma)) / 2
def make_hist(p, n, num_points):
data = [binomial(n, p) for _ in range(num_points)]
# bar chart => actual binomial samples
histogram = collections.Counter(data)
plt.bar([x for x in histogram.keys()],
[y / num_points for y in histogram.values()],
0.6,
color='0.75')
mu = p * n
sigma = math.sqrt(n * p * (1 - p))
# line chart => normal approximation
xs = range(min(data), max(data))
ys = []
for i in xs:
ys.append(normal_cdf(i + 0.5, mu, sigma) - normal_cdf(i - 0.5, mu, sigma))
plt.plot(xs, ys)
plt.title('Binomial Distribution vs. Normal Approximation')
plt.show()
make_hist(0.6, 100, 10000)
|
[
"mahima.arora2597@gmail.com"
] |
mahima.arora2597@gmail.com
|
3e094848167310d14a5c98b20af3c480c5359fa4
|
766ac69d692a8156b45f506599af998cfec87466
|
/scheduler/display_cal/urls.py
|
5a6d94b63af42c4a346ae11d96cba293b3be8942
|
[] |
no_license
|
GeorgeLu97/TartanHacks
|
88947b59d2fd97d6b5096bdae4b16333cf0c3b75
|
81124fbd7e0bba50a77261edc03592bf6113f7b6
|
refs/heads/master
| 2016-08-12T22:30:17.419575
| 2016-02-06T22:47:36
| 2016-02-06T22:47:36
| 51,184,744
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 142
|
py
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^(?P<username>[a-zA-Z0-9]+)/$', views.index, name='index'),
]
|
[
"yaochisite@gmail.com"
] |
yaochisite@gmail.com
|
ef6e56d293859a5fb41e0fc34cf9e96c82dac13b
|
f1e0fa146050dac18049be305520885d6592af89
|
/Øving 7/øving_7_4.py
|
52bf405430be308e5b8794e1c06526618c6f853e
|
[] |
no_license
|
ankile/ITGK-TDT4110
|
cc09a8df95f77e1bb75d463d9c17427a16782a77
|
a2b738d3e52bd1d115e0c3afcf01012a429695d5
|
refs/heads/master
| 2020-04-01T21:53:32.142073
| 2018-10-18T20:26:58
| 2018-10-18T20:26:58
| 153,680,667
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 647
|
py
|
from random import randint
import øving_7_8
from statistics import mode
# a)
random_numbers = []
for i in range(100):
random_numbers.append(randint(1, 10))
print('random_numbers:', random_numbers, '\n')
# b)
print('Antallet 2ere i listen:', random_numbers.count(2), '\n')
# c)
print('Sum:', sum(random_numbers), '\n')
# d)
random_numbers = øving_7_8.bubble_sort(random_numbers)
print(random_numbers, '\n')
# e)
def typetall(liste):
try:
return mode(liste)
except:
return 'det var ingen unike typetall'
print('Typetall:', typetall(random_numbers), '\n')
# f)
print(øving_7_8.selection_sort(random_numbers))
|
[
"lars.lien@ignite.no"
] |
lars.lien@ignite.no
|
4aac60c690348d72e498e4bd9de65d1dc8363656
|
0fca05dd0201c569c11bd7c5b96125fbd67760f8
|
/test.py
|
fde731dfea3794600ba05f462ba4b42bc5f82595
|
[] |
no_license
|
badri03iter/Python-Essentials
|
fd3f463cc06c22bf732418d7dca9569f7b82ea08
|
2522163c512af7342dffb71985314213a7b46aec
|
refs/heads/master
| 2021-01-12T14:19:28.056373
| 2016-11-20T06:21:04
| 2016-11-20T06:21:04
| 69,422,211
| 0
| 1
| null | 2016-11-20T06:21:04
| 2016-09-28T03:32:18
|
Python
|
UTF-8
|
Python
| false
| false
| 75
|
py
|
dict = {}
dict[1]=7
dict[16]=8
print dict[1]
print str([x for x in dict])
|
[
"badri03iter@gmail.com"
] |
badri03iter@gmail.com
|
0987cf0e6a15b3310daa82085a320f0a13b58f57
|
753532ab35fc3d83b750b1b79603cc1613408523
|
/xiaoqiqi/Colorado/scrap_professors.py
|
9353b5dbac8a7ee12ad4d4e08eaca5bd4311eee6
|
[
"Unlicense"
] |
permissive
|
doge-search/webdoge
|
4e9435f2ba744201adca1bfe2288994e1f284f00
|
443e758b5c1f962d5c2fe792cdbed01e1208b1cb
|
refs/heads/master
| 2021-01-15T08:16:34.357284
| 2016-06-15T17:22:43
| 2016-06-15T17:22:43
| 54,782,220
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,127
|
py
|
#!/usr/bin/python2
#coding=utf-8
import urllib2
import HTMLParser
import sys
import xml.dom.minidom as minidom
from htmlentitydefs import entitydefs
import glob
reload(sys)
sys.setdefaultencoding('utf8')
class MyParser(HTMLParser.HTMLParser):
def __init__(self):
HTMLParser.HTMLParser.__init__(self)
self.hasfirstname = False
self.haslastname = False
self.firstnamelist = []
self.lastnamelist = []
def handle_starttag(self, tag, attrs):
if tag == 'div':
for key, value in attrs:
if key == 'class':
if value == "field field-name-field-first-name field-type-text field-label-hidden":
self.hasfirstname = True
if value == "field field-name-field-last-name field-type-text field-label-hidden":
self.haslastname = True
def handle_data(self, text):
if self.hasfirstname and text.isspace() == False:
self.firstnamelist.append(text)
if self.haslastname and text.isspace() == False:
self.lastnamelist.append(text)
def handle_endtag(self, tag):
if tag == 'div' and self.hasfirstname == True:
self.hasfirstname = False
if tag == 'div' and self.haslastname == True:
self.haslastname = False
fout_xml = file('Colorado.xml','w')
doc = minidom.Document()
institution = doc.createElement("institution")
doc.appendChild(institution)
if True:
rootUrl = 'http://www.colorado.edu/cs/our-people?field_person_type_tid=1'
response = urllib2.urlopen(rootUrl)
html = response.read()
my = MyParser()
my.feed(html)
for i in range(len(my.firstnamelist)):
professor = doc.createElement("professor")
institution.appendChild(professor)
name = my.firstnamelist[i] + ' ' + my.lastnamelist[i]
namenode = doc.createElement("name")
namenode.appendChild(doc.createTextNode(name))
professor.appendChild(namenode)
institution.appendChild(professor)
doc.writexml(fout_xml, "\t", "\t", "\n")
fout_xml.close()
|
[
"xiaoqiqi1771@126.com"
] |
xiaoqiqi1771@126.com
|
bb79fa79c446f61cdc5fdffe93a253fc32522298
|
44dabc129a8df5a2045b0ebb5569adf5496974c6
|
/tools/make_pipelines.py
|
b2f579832a24c8eba880bf0ddca2c3e13ded0c2d
|
[] |
no_license
|
cclairec/Python_pipelines
|
9f8967686a85faddad779b05f0894a899df63df4
|
ad3527f6849ddb06c9679a14d7fc81cd0798d107
|
refs/heads/master
| 2021-09-11T17:53:59.085944
| 2018-04-10T15:30:40
| 2018-04-10T15:30:40
| 112,114,045
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,837
|
py
|
#!/usr/bin/env python
"""Run the py->rst conversion and run all pipelines.
This also creates the index.rst file appropriately, makes figures, etc.
"""
from past.builtins import execfile
# -----------------------------------------------------------------------------
# Library imports
# -----------------------------------------------------------------------------
# Stdlib imports
import os
import sys
from glob import glob
# Third-party imports
# We must configure the mpl backend before making any further mpl imports
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from matplotlib._pylab_helpers import Gcf
# Local tools
from toollib import *
# -----------------------------------------------------------------------------
# Globals
# -----------------------------------------------------------------------------
pipelines_header = """
.. _pipelines:
Pipelines
========
.. note_about_pipelines
"""
# -----------------------------------------------------------------------------
# Function defintions
# -----------------------------------------------------------------------------
# These global variables let show() be called by the scripts in the usual
# manner, but when generating pipelines, we override it to write the figures to
# files with a known name (derived from the script name) plus a counter
figure_basename = None
# We must change the show command to save instead
def show():
allfm = Gcf.get_all_fig_managers()
for fcount, fm in enumerate(allfm):
fm.canvas.figure.savefig('%s_%02i.png' %
(figure_basename, fcount + 1))
_mpl_show = plt.show
plt.show = show
# -----------------------------------------------------------------------------
# Main script
# -----------------------------------------------------------------------------
# Work in pipelines directory
cd('users/pipelines')
if not os.getcwd().endswith('users/pipelines'):
raise OSError('This must be run from doc/pipelines directory')
# Run the conversion from .py to rst file
sh('../../../tools/ex2rst --project NiftyPipe --outdir . ../../../niftypipe/bin')
# Make the index.rst file
"""
index = open('index.rst', 'w')
index.write(pipelines_header)
for name in [os.path.splitext(f)[0] for f in glob('*.rst')]:
#Don't add the index in there to avoid sphinx errors and don't add the
#note_about pipelines again (because it was added at the top):
if name not in(['index','note_about_pipelines']):
index.write(' %s\n' % name)
index.close()
"""
# Execute each python script in the directory.
if '--no-exec' in sys.argv:
pass
else:
if not os.path.isdir('fig'):
os.mkdir('fig')
for script in glob('*.py'):
figure_basename = pjoin('fig', os.path.splitext(script)[0])
execfile(script)
plt.close('all')
|
[
"claire.cury.pro@gmail.com"
] |
claire.cury.pro@gmail.com
|
4ae589dffed2a21cc225a9161617a0c65f703372
|
c8a217ee07d4aa3ecee2c775f42dc96381911c8e
|
/uidsread.py
|
13424e06dcfe12bf22da7bd2e8b790bffb12e4ca
|
[] |
no_license
|
piotrmucha/python-bitbucket
|
337b8cc3f1e2d5c39f7cfc6cb871c0c2ad3f6066
|
3475e5624a49a04283d8eea04833f955674d04a0
|
refs/heads/main
| 2023-03-08T17:14:50.143993
| 2021-02-12T13:42:21
| 2021-02-12T13:42:21
| 337,830,825
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,467
|
py
|
#!/home/piotr/PycharmProjects/repositories-tool/venv/bin/python
"""Skrypt umozliwa wyciaganie informacji o oidach uzytkownikow danego workspace
bitbucketa.
Definiuje tez funkcje zwiazane z oidami i ich konwersja z czego korzystaja inne skrypty
"""
import argparse
import json
import os
from typing import Dict, List
import requests
from requests.auth import HTTPBasicAuth
from credentials import get_credentials_for_bitbucket, BitbucketCredentials
API_USER = "https://api.bitbucket.org/2.0/user/"
def execute_script():
"""
Metoda wykonuje logike skryptu, gdy uruchomimy ten skrypt osobno. Opis parametrow znajduje sie
w pliku parse_cmd_arguments
"""
arguments = parse_cmd_arguments()
if arguments.cd:
bitbucket_credentials = get_credentials_for_bitbucket(arguments.cd)
else:
bitbucket_credentials = get_credentials_for_bitbucket()
users_map = get_users_for_given_workspace(arguments.ws, bitbucket_credentials)
directory = arguments.directory
if directory:
create_two_json_with_reviewers(users_map, directory)
else:
create_two_json_with_reviewers(users_map)
def parse_cmd_arguments() -> argparse.Namespace:
"""
Metoda parsuje argumetny z commendlina i zwraca sparsowane Namespace
Returns
-------
Namespace
sparsowany obiekt Namespace
"""
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--directory", type=str,
help="provide directory where to put downloaded credentials."
"For default program will put files in current working directory",
default=os.getcwd())
parser.add_argument('-ws', type=str, required=True,
help='Provide a bitbucket workspace from '
'which reviewers will be retrieved.')
parser.add_argument('-cd', type=str,
help='Provide credentials file for bitbucket. '
'For default program will look '
'for bitbucket-credentials file in your home directory')
return parser.parse_args()
def get_users_for_given_workspace(workspace: str,
credentials: BitbucketCredentials) -> Dict[str, str]:
"""
Funkcja zwraca slownik uuidow dla uzytkownikow, dla obecnego workspace.
Usuwa przy tym obecnego uzytkownika, wykonujac zapytanie o jego uuida.
Parameters
----------
workspace: str
workspace do przeszukania
credentials: BitbucketCredentials
credentiale do bitbukceta
Returns
-------
Dict[str, str]
slownik uzytkownik => uuid
"""
user_endpoint = f"https://api.bitbucket.org/2.0/workspaces/{workspace}/members"
request = requests.get(user_endpoint,
auth=HTTPBasicAuth(credentials.username, credentials.appkey))
data = json.loads(request.content)
data = data['values']
users_map = dict()
for i in data:
user = i['user']
users_map[user['uuid']] = user['display_name']
current_user_uuid = get_uuid_for_current_user(credentials)
if current_user_uuid in users_map:
del users_map[current_user_uuid]
return users_map
def get_uuid_for_current_user(credentials: BitbucketCredentials) -> str:
"""
Funkcja wykonuje requesta ktory zwraca uuida dla obecnego uzytkownika
bazujac na credentaialch
Parameters
----------
credentials: BitbucketCredentials
credentiale do bitbukceta
Returns
-------
str
uuid dla obecnego uzytkownika
"""
request = requests.get(API_USER, auth=HTTPBasicAuth(credentials.username, credentials.appkey))
data = json.loads(request.content)
return data['uuid']
def map_users_to_json_array(*users_uids) -> List[Dict[str, str]]:
"""
Metoda zamienia liste oidow na format oczekiwany przez api bitbucketa
Parameters
----------
users_uids
lista oidow w formacie nargs
Returns
-------
List[Dict[str, str]]
sparsowana tablica oidow
"""
result = []
for uuid in users_uids:
result.append({'uuid': uuid})
return result
def create_two_json_with_reviewers(users_map: Dict[str, str], directory: str = os.getcwd()) -> None:
"""
Metoda tworzy dwa pliki json z reviewerami. Jeden plik jest w formacie
ktory oczekuje api bitbucketa a drugi jest informacyjny dla uzytkownika.
Ktory username przypada do jakiego uida
Parameters
----------
users_map : Dict[str, str]
mapa uzytkownikow username => oid
directory : str
folder w ktorym beda zapisane pliki. Domyslnie katalog obecny.
"""
with open(os.path.join(directory, 'usersMap.json'), 'w') as file1:
json.dump(users_map, file1, ensure_ascii=False)
with open(os.path.join(directory, 'reviewers.json'), 'w') as file2:
json.dump(map_users_to_json_array(*users_map), file2, ensure_ascii=False)
def get_json_array_from_file(filename: str) -> List[Dict[str, str]]:
"""
Metoda wczytuje plik json i zwraca jego zawartosc w formie Listy ze slownikiem
Parameters
----------
filename : str
adres pliku
Returns
-------
List[Dict[str, str]]
sparsowana zawartosc pliku json
"""
with open(filename) as file:
data = json.load(file)
return data
if __name__ == '__main__':
execute_script()
|
[
"piotrmucha1997@gmail.com"
] |
piotrmucha1997@gmail.com
|
44f603d9061216ca79c1e447c35cbcde6dd43c92
|
c15c602be6825cc71396a6470b81c06b4c1faf32
|
/olympia/aggregator_datetime.py
|
c6862918817555c5ee505e7d82a3de92c1841b58
|
[] |
no_license
|
hirogwa/olympia
|
0e013b86d3f744f6ddd6b280907c2bf2f3668223
|
3ac3f31a638f5139c845a85e0432a3eeb830d8a5
|
refs/heads/master
| 2021-01-17T18:35:16.955806
| 2016-07-26T11:41:12
| 2016-07-26T11:41:12
| 63,159,297
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,843
|
py
|
from olympia import app, models
def aggregate(bucket):
time_lower = _get_time_lower_limit(bucket)
time_upper = _get_time_upper_limit(bucket)
count = 0
for raw, dt in get_log_datetime_entries(bucket, time_lower, time_upper):
models.db.session.add(dt)
count += 1
result = models.AggregationLogRawToDatetime(
bucket, time_lower, time_upper, count, count)
models.db.session.add(result)
models.db.session.commit()
app.logger.info(
'{} raw entries converted to datetime entries for bucket {}. Time range:[{}, {})'.
format(count, bucket, time_lower, time_upper))
return result
def get_log_datetime_entries(bucket, time_lower, time_upper):
q = models.LogEntryRaw.query
if bucket:
q = q.filter(
models.LogEntryRaw.bucket == bucket)
if time_lower:
q = q.filter(
models.LogEntryRaw.time >= time_lower)
if time_upper:
q = q.filter(
models.LogEntryRaw.time < time_upper)
q = q. \
filter(
models.LogEntryRaw.user_agent.notlike('"aws-sdk-java%'),
models.LogEntryRaw.user_agent.notlike('"aws-internal%'),
models.LogEntryRaw.user_agent.notlike('"facebookexternalhit%'),
models.LogEntryRaw.user_agent.notlike('"Boto%'),
models.LogEntryRaw.user_agent.notlike('"FeedValidator%'),
models.LogEntryRaw.user_agent.notlike('"S3Console%')). \
filter(
models.LogEntryRaw.operation.like('REST.GET%') |
models.LogEntryRaw.operation.like('WEBSITE.GET%')). \
filter(
models.LogEntryRaw.http_status.like('2%') |
models.LogEntryRaw.http_status.like('3%')). \
order_by(
models.LogEntryRaw.bucket.asc(),
models.LogEntryRaw.key.asc(),
models.LogEntryRaw.remote_ip.asc(),
models.LogEntryRaw.user_agent.asc(),
models.LogEntryRaw.time.asc()). \
all()
return [(x, _raw_to_datetime(x)) for x in q]
def _get_time_lower_limit(bucket):
''' To be used with query, inclusive
'''
last_record = models.AggregationLogRawToDatetime.query. \
filter(models.AggregationLogRawToDatetime.bucket == bucket) .\
order_by(models.AggregationLogRawToDatetime.id.desc()). \
first()
return last_record.time_upper if last_record else None
def _get_time_upper_limit(bucket):
latest_raw = models.LogEntryRaw.query. \
filter(models.LogEntryRaw.bucket == bucket). \
order_by(models.LogEntryRaw.time.desc()). \
first()
return latest_raw.time if latest_raw else None
def _raw_to_datetime(raw):
hour = raw.time.strftime('%Y%m%d%H')
return models.LogDatetime(
raw.bucket, raw.key, hour, raw.time, raw.remote_ip, raw.user_agent)
|
[
"hirogwa@gmail.com"
] |
hirogwa@gmail.com
|
b28afbe85ec73d6aadce2b5b0c7e7d175fc8af29
|
6aa41a50a6cd7925942c79898aaf3423fe882510
|
/backend/pym_songbook_21672/settings.py
|
196ee5fc41b1fef33922303493fe684564eea3e6
|
[] |
no_license
|
crowdbotics-apps/pym-songbook-21672
|
e915b9b7c9135308ecf81c5a131747e8b25e69fa
|
bc1f44de215cbe5c6281c2a201f982aac4691317
|
refs/heads/master
| 2022-12-30T17:14:52.087405
| 2020-10-18T16:06:43
| 2020-10-18T16:06:43
| 305,141,317
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,104
|
py
|
"""
Django settings for pym_songbook_21672 project.
Generated by 'django-admin startproject' using Django 2.2.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
import environ
import logging
env = environ.Env()
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = env.bool("DEBUG", default=False)
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = env.str("SECRET_KEY")
ALLOWED_HOSTS = env.list("HOST", default=["*"])
SITE_ID = 1
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
SECURE_SSL_REDIRECT = env.bool("SECURE_REDIRECT", default=False)
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites'
]
LOCAL_APPS = [
'home',
'users.apps.UsersConfig',
]
THIRD_PARTY_APPS = [
'rest_framework',
'rest_framework.authtoken',
'rest_auth',
'rest_auth.registration',
'bootstrap4',
'allauth',
'allauth.account',
'allauth.socialaccount',
'allauth.socialaccount.providers.google',
'django_extensions',
'drf_yasg',
# start fcm_django push notifications
'fcm_django',
# end fcm_django push notifications
]
INSTALLED_APPS += LOCAL_APPS + THIRD_PARTY_APPS
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'pym_songbook_21672.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'pym_songbook_21672.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
if env.str("DATABASE_URL", default=None):
DATABASES = {
'default': env.db()
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
MIDDLEWARE += ['whitenoise.middleware.WhiteNoiseMiddleware']
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'allauth.account.auth_backends.AuthenticationBackend'
)
STATIC_ROOT = os.path.join(BASE_DIR, "staticfiles")
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'static')
]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
# allauth / users
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_AUTHENTICATION_METHOD = 'email'
ACCOUNT_USERNAME_REQUIRED = False
ACCOUNT_EMAIL_VERIFICATION = "optional"
ACCOUNT_CONFIRM_EMAIL_ON_GET = True
ACCOUNT_LOGIN_ON_EMAIL_CONFIRMATION = True
ACCOUNT_UNIQUE_EMAIL = True
LOGIN_REDIRECT_URL = "users:redirect"
ACCOUNT_ADAPTER = "users.adapters.AccountAdapter"
SOCIALACCOUNT_ADAPTER = "users.adapters.SocialAccountAdapter"
ACCOUNT_ALLOW_REGISTRATION = env.bool("ACCOUNT_ALLOW_REGISTRATION", True)
SOCIALACCOUNT_ALLOW_REGISTRATION = env.bool("SOCIALACCOUNT_ALLOW_REGISTRATION", True)
REST_AUTH_SERIALIZERS = {
# Replace password reset serializer to fix 500 error
"PASSWORD_RESET_SERIALIZER": "home.api.v1.serializers.PasswordSerializer",
}
REST_AUTH_REGISTER_SERIALIZERS = {
# Use custom serializer that has no username and matches web signup
"REGISTER_SERIALIZER": "home.api.v1.serializers.SignupSerializer",
}
# Custom user model
AUTH_USER_MODEL = "users.User"
EMAIL_HOST = env.str("EMAIL_HOST", "smtp.sendgrid.net")
EMAIL_HOST_USER = env.str("SENDGRID_USERNAME", "")
EMAIL_HOST_PASSWORD = env.str("SENDGRID_PASSWORD", "")
EMAIL_PORT = 587
EMAIL_USE_TLS = True
# start fcm_django push notifications
FCM_DJANGO_SETTINGS = {
"FCM_SERVER_KEY": env.str("FCM_SERVER_KEY", "")
}
# end fcm_django push notifications
# Swagger settings for api docs
SWAGGER_SETTINGS = {
"DEFAULT_INFO": f"{ROOT_URLCONF}.api_info",
}
if DEBUG or not (EMAIL_HOST_USER and EMAIL_HOST_PASSWORD):
# output email to console instead of sending
if not DEBUG:
logging.warning("You should setup `SENDGRID_USERNAME` and `SENDGRID_PASSWORD` env vars to send emails.")
EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
|
[
"team@crowdbotics.com"
] |
team@crowdbotics.com
|
28dbb21fbc42e6122026d12333f8d7ec04993648
|
f2a9a46a828195d07bae3b87642f18a4f9547e47
|
/external_auth/views.py
|
e90a39f096d2cb2d2bffff3feac22d536951e9e0
|
[] |
no_license
|
saadyousafarbi/auto-shop
|
b867c2fad27fb431d7d16ac712a21fbf430a8b7d
|
1b184cfdc3819b528a1cfc56c28a04142b90d269
|
refs/heads/master
| 2022-11-29T08:53:02.968878
| 2022-10-28T13:24:05
| 2022-10-28T13:24:05
| 98,877,865
| 0
| 0
| null | 2022-11-22T10:50:44
| 2017-07-31T10:38:15
|
Python
|
UTF-8
|
Python
| false
| false
| 2,858
|
py
|
from __future__ import unicode_literals
from django.contrib.auth import authenticate, login
from django.contrib.auth.models import User
from django.contrib import messages
from external_auth.forms import PakwheelsLoginForm
from external_auth.pakwheels_profile import PakwheelsProfile
from external_auth.pakwheels_scraper import PakwheelsScraper
from django.shortcuts import render
from django.views.decorators.http import require_http_methods
@require_http_methods(['GET', 'POST'])
def pakwheels_registration(request):
if request.method == 'GET':
pakwheels_login_form = PakwheelsLoginForm()
context = {'form': pakwheels_login_form}
return render(request, 'pakwheels_register.html', context)
pakwheels_login_form = PakwheelsLoginForm(request.POST)
email = pakwheels_login_form.data['email']
password = pakwheels_login_form.data['password']
pakwheels_scraper = PakwheelsScraper(
email,
password,
)
pakwheels_profile_response = pakwheels_scraper.login_and_retrieve_profile()
if pakwheels_profile_response:
pakwheels_profile = PakwheelsProfile(
first_name=pakwheels_profile_response['first_name'],
last_name=pakwheels_profile_response['last_name'],
gender=pakwheels_profile_response['user_gender'],
user_name=pakwheels_profile_response['user_name'],
password=password,
email=pakwheels_profile_response['user_email'],
birthday=pakwheels_profile_response['user_birthday'],
city=pakwheels_profile_response['user_city'],
country=pakwheels_profile_response['user_country'],
)
user, is_created = User.objects.get_or_create(
username=pakwheels_profile.user_name
)
if is_created:
user.first_name = pakwheels_profile.first_name
user.last_name = pakwheels_profile.last_name
user.email = pakwheels_profile.email
user.set_password(pakwheels_profile.password)
user.profile.gender = pakwheels_profile.gender
user.profile.date_of_birth = pakwheels_profile.birthday
user.profile.city = pakwheels_profile.city
user.profile.country = pakwheels_profile.country
user.is_active = True
user.save()
login(request, user)
messages.success(
request,
'Pakwheels login successful.'
' We have registered you on our website with same credentials.'
' You can change your password.',
)
return render(request, 'home.html')
else:
messages.error(request, 'The Pakwheels credentials are incorrect. Please try again.')
return render(request, 'pakwheels_register.html', context={'form': pakwheels_login_form})
|
[
"saad.yousaf@arbisoft.com"
] |
saad.yousaf@arbisoft.com
|
99bf653ec7358c91983c683d8921f0a026cbf29f
|
dd69490776994d388fc3ee26781337cea37cf77f
|
/misc/preprocess_flowers.py
|
476ff4dccc1255a670ebe90db8e5b6cb1d43efa1
|
[] |
no_license
|
Shubham-kale/Project
|
a319dbc14c1b093844c4e5391fc8aefeadaf4576
|
8aee313536fbb21fcfee3b3144cb36d3d1377b8e
|
refs/heads/master
| 2021-04-09T16:04:09.049071
| 2020-10-10T06:50:40
| 2020-10-10T06:50:40
| 125,811,549
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,216
|
py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
# import tensorflow as tf
import numpy as np
import os
import pickle
from Project.misc.utils import get_image
import scipy.misc
# from glob import glob
# TODO: 1. current label is temporary, need to change according to real label
# 2. Current, only split the data into train, need to handel train, test
LR_HR_RETIO = 4
IMSIZE = 256
LOAD_SIZE = int(IMSIZE * 76 / 64)
FLOWER_DIR = 'Data/flowers'
def load_filenames(data_dir):
filepath = data_dir + 'filenames.pickle'
with open(filepath, 'rb') as f:
filenames = pickle.load(f)
print('Load filenames from: %s (%d)' % (filepath, len(filenames)))
return filenames
def save_data_list(inpath, outpath, filenames):
hr_images = []
lr_images = []
lr_size = int(LOAD_SIZE / LR_HR_RETIO)
cnt = 0
for key in filenames:
f_name = '%s/%s.jpg' % (inpath, key)
img = get_image(f_name, LOAD_SIZE, is_crop=False)
img = img.astype('uint8')
hr_images.append(img)
lr_img = scipy.misc.imresize(img, [lr_size, lr_size], 'bicubic')
lr_images.append(lr_img)
cnt += 1
if cnt % 100 == 0:
print('Load %d......' % cnt)
#
print('images', len(hr_images), hr_images[0].shape, lr_images[0].shape)
#
outfile = outpath + str(LOAD_SIZE) + 'images.pickle'
with open(outfile, 'wb') as f_out:
pickle.dump(hr_images, f_out)
print('save to: ', outfile)
#
outfile = outpath + str(lr_size) + 'images.pickle'
with open(outfile, 'wb') as f_out:
pickle.dump(lr_images, f_out)
print('save to: ', outfile)
def convert_flowers_dataset_pickle(inpath):
# ## For Train data
train_dir = os.path.join(inpath, 'train/')
train_filenames = load_filenames(train_dir)
save_data_list(inpath, train_dir, train_filenames)
# ## For Test data
test_dir = os.path.join(inpath, 'test/')
test_filenames = load_filenames(test_dir)
save_data_list(inpath, test_dir, test_filenames)
if __name__ == '__main__':
convert_flowers_dataset_pickle(FLOWER_DIR)
|
[
"kaleshubham027@gmail.com"
] |
kaleshubham027@gmail.com
|
d803ac6c1436e97221b3ea0ad4c91f33ca8c1fda
|
24a291e5eb298b7c2b4f1105d789ac488457b59c
|
/Python_Pandas_Basics/Pandas04_17_headTailEx01_김민교.py
|
ddf0119db9b84cf3db09e1d09bc04d7a10133164
|
[] |
no_license
|
gmrdns03/Python-Introductory-Course_Minkyo
|
da3afff502ed44f178d5b3885fbb1b01249ad1de
|
ef0d4e16aee3dba6a4a10c422ef68b1465745833
|
refs/heads/main
| 2023-05-29T16:08:31.814542
| 2021-06-23T13:32:14
| 2021-06-23T13:32:14
| 379,300,979
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 365
|
py
|
# coding: utf-8
# In[1]:
import pandas as pd
# In[3]:
df = pd.read_csv('./../data/gapminder.tsv', '\t')
df
# In[5]:
print(df.shape)
print(df.shape[0])
print(df.shape[1])
# In[6]:
len(df)
# In[7]:
df.head()
# In[8]:
df[0:5]
# In[9]:
df.head(n=7)
# In[10]:
df.tail()
# In[12]:
df[len(df)-5 : len(df)+1]
# In[13]:
df.tail(n=7)
|
[
"noreply@github.com"
] |
gmrdns03.noreply@github.com
|
ecc3baa7af5f4d50e69e8da9c651f7c876afec48
|
b5e892e04518b3ea5382fb7572c93146491632ad
|
/pegparse/pegparse.py
|
dc59f7c9d6a500f20a3a3bf2e55f8eb316e9e878
|
[] |
no_license
|
justinnhli/pegparse
|
b366afde10859bcda99c263fde0e5401e6886c01
|
3361f5c43c979d41187cf2376989502ec444351c
|
refs/heads/master
| 2023-08-21T18:54:50.921653
| 2023-08-09T16:59:26
| 2023-08-09T16:59:26
| 3,443,906
| 0
| 1
| null | 2018-08-12T03:07:30
| 2012-02-14T21:01:30
|
Python
|
UTF-8
|
Python
| false
| false
| 38,475
|
py
|
#!/usr/bin/env python3
"""A Pack Rat Parsing Expression Grammer parser."""
# pylint: disable = too-many-lines
import re
from argparse import ArgumentParser
from collections import namedtuple
from fileinput import input as fileinput
from pathlib import Path
from textwrap import indent
from typing import Any, Optional, Union, Literal, Tuple, List, Dict
PEGAtom = str
PEGOperator = Literal['CHOICE', 'SEQUENCE', 'ZERO_OR_MORE', 'ZERO_OR_ONE', 'ONE_OR_MORE', 'AND', 'NOT'] # pylint: disable = unsubscriptable-object
PEGExpression = Tuple[Any, ...] # should be Union[Tuple[PEGOperator, PEGExpression, ...], PEGAtom]
PEGRules = Dict[str, PEGExpression]
PEG_GRAMMAR = Path(__file__).parent / 'peg.peg'
# pylint: disable=line-too-long
PEG_DEFS = {
'syntax': ('CHOICE', ('SEQUENCE', 'opt_space', ('ZERO_OR_MORE', ('CHOICE', ('SEQUENCE', 'definition', 'opt_space'))))),
'definition': ('CHOICE', ('SEQUENCE', 'identifier', 'opt_space', '"="', 'opt_space', 'expression', 'opt_space', '";"')),
'expression': ('CHOICE', ('SEQUENCE', 'choice')),
'choice': ('CHOICE', ('SEQUENCE', ('ZERO_OR_ONE', ('CHOICE', ('SEQUENCE', '"|"', 'opt_space'))), 'sequence', ('ZERO_OR_MORE', ('CHOICE', ('SEQUENCE', 'opt_space', '"|"', 'opt_space', 'sequence'))))),
'sequence': ('CHOICE', ('SEQUENCE', 'item', ('ZERO_OR_MORE', ('CHOICE', ('SEQUENCE', 'req_space', 'item'))))),
'item': ('CHOICE', ('SEQUENCE', 'zero_or_more'), ('SEQUENCE', 'zero_or_one'), ('SEQUENCE', 'one_or_more'), ('SEQUENCE', 'and_predicate'), ('SEQUENCE', 'not_predicate'), ('SEQUENCE', 'term')),
'zero_or_more': ('CHOICE', ('SEQUENCE', 'term', 'opt_space', '"*"')),
'zero_or_one': ('CHOICE', ('SEQUENCE', 'term', 'opt_space', '"?"')),
'one_or_more': ('CHOICE', ('SEQUENCE', 'term', 'opt_space', '"+"')),
'and_predicate': ('CHOICE', ('SEQUENCE', '"&"', 'opt_space', 'term')),
'not_predicate': ('CHOICE', ('SEQUENCE', '"!"', 'opt_space', 'term')),
'term': ('CHOICE', ('SEQUENCE', 'paren'), ('SEQUENCE', 'atom')),
'paren': ('CHOICE', ('SEQUENCE', '"("', 'opt_space', 'expression', 'opt_space', '")"')),
'atom': ('CHOICE', ('SEQUENCE', 'identifier'), ('SEQUENCE', 'builtin'), ('SEQUENCE', 'literal')),
'identifier': ('CHOICE', ('SEQUENCE', ('ONE_OR_MORE', ('CHOICE', ('SEQUENCE', 'LOWER'))), ('ZERO_OR_MORE', ('CHOICE', ('SEQUENCE', '"_"', ('ONE_OR_MORE', ('CHOICE', ('SEQUENCE', 'LOWER')))))))),
'builtin': ('CHOICE', ('SEQUENCE', ('ONE_OR_MORE', ('CHOICE', ('SEQUENCE', 'UPPER'))))),
'literal': ('CHOICE', ('SEQUENCE', 'd_string'), ('SEQUENCE', 's_string')),
'd_string': ('CHOICE', ('SEQUENCE', '\'"\'', ('ZERO_OR_MORE', ('CHOICE', ('SEQUENCE', ('NOT', ('CHOICE', ('SEQUENCE', '\'"\''))), 'PRINT'))), '\'"\'')),
's_string': ('CHOICE', ('SEQUENCE', '"\'"', ('ZERO_OR_MORE', ('CHOICE', ('SEQUENCE', ('NOT', ('CHOICE', ('SEQUENCE', '"\'"'))), 'PRINT'))), '"\'"')),
'opt_space': ('CHOICE', ('SEQUENCE', ('ZERO_OR_MORE', ('CHOICE', ('SEQUENCE', 'space'))))),
'req_space': ('CHOICE', ('SEQUENCE', ('ONE_OR_MORE', ('CHOICE', ('SEQUENCE', 'space'))))),
'space': ('CHOICE', ('SEQUENCE', '"#"', ('ZERO_OR_MORE', ('CHOICE', ('SEQUENCE', 'PRINT'))), 'NEWLINE'), ('SEQUENCE', 'BLANK'), ('SEQUENCE', 'NEWLINE')),
}
def create_parser_from_file(filepath, debug=False):
# type: (Path, bool) -> PEGParser
"""Create parser from a PEG grammar file.
Parameters:
filepath (str): Path to PEG file.
debug (bool): Print debugging information. Defaults to False.
Returns:
PEGParser: A parser for the grammar.
"""
with open(filepath) as fd:
peg = fd.read()
return create_parser(peg, debug=debug)
def create_parser(peg, debug=False):
# type: (str, bool) -> PEGParser
"""Create parser from a PEG grammar.
Parameters:
peg (str): A PEG grammar.
debug (bool): Print debugging information. Defaults to False.
Returns:
PEGParser: A parser for the grammar.
"""
return PEGParser(PEGWalker().parse(peg), debug=debug)
def one_line_format(string):
# type: (str) -> str
"""Escape tabs and newlines in a string.
Parameters:
string (str): The string to escape.
Returns:
str: The escaped string.
"""
string = re.sub('\t', r'\\t', string)
string = re.sub('\n', r'\\n', string)
return string
def index_to_line_col(string, index):
# type: (str, int) -> Tuple[int, int]
"""Convert an index in a string to line and column number.
Parameters:
string (str): The string.
index (int): The index.
Returns:
int: The line number of that index.
int: The column number of that index.
"""
line_num = string.count('\n', 0, index) + 1
prev_newline = string.rfind('\n', 0, index)
if prev_newline == -1:
column = index
else:
column = index - prev_newline
return line_num, column
TraceItem = namedtuple('TraceItem', 'depth, term, position')
class ASTNode:
"""Abstract Syntax Tree (AST) node."""
def __init__(self, term, children, filepath, string, start_pos, end_pos):
# type: (str, List[ASTNode], Optional[Path], str, int, int) -> None
"""Initialize the ASTNode.
The string, start_pos, and end_pos arguments matches the arguments to
range(), ie. the substring
string[start_pos:end_pos]
Parameters:
term (str): The term this node matches.
children (list[ASTNode]): Children nodes of this term in the grammar.
filepath (Optional[Path]): The file being parsed.
string (str): The complete string being parsed.
start_pos (int): Index of the first character matched by this node.
end_pos (int): Index of the last character matched by this node.
"""
self.term = term
self.children = children
self.filepath = filepath
self.string = string
self.start_pos = start_pos
self.end_pos = end_pos
@property
def match(self):
# type: () -> str
"""Return the substring matched by this node."""
return self.string[self.start_pos:self.end_pos]
@property
def line_num(self):
# type: () -> int
"""Return the starting line number of the substring matched by this node."""
return index_to_line_col(self.string, self.start_pos)[0]
@property
def column(self):
# type: () -> int
"""Return the starting column of the substring matched by this node."""
return index_to_line_col(self.string, self.start_pos)[1]
def first_descendant(self, path='*'):
# type: (str) -> ASTNode
"""Get the first ASTNode descendant that matches the path.
See the docstring for descendants() for a description of the path
argument.
Parameters:
path (str): The path to the desired descendant.
Returns:
ASTNode: The node representing the descendant.
"""
result = self
for term in path.split('/'):
if term == '*':
result = result.children[0]
else:
children = tuple(
child for child in result.children
if child.term == term
)
if children:
result = children[0]
else:
return None
return result
def descendants(self, path='*'):
# type: (str) -> Tuple[ASTNode, ...]
"""Get all ASTNode descendants that match the path.
The path describes the term of each descendant separated by a '/'.
Where the term is irrelevant, it can either be represented by '*' or
left empty. For example, given the following grammar:
Expression = Operand ( Operator Operand )*;
Operand = ParenExpression
| Number;
ParenExpression = "(" Expression ")";
Operator = "+"
| "-"
| "*"
| "/";
Number = ( digit )+;
And an ASTNode representing the following Expression:
(1+(2*3))-5
The call `.descendants('Operand/ParenExpression/Expression/Operand')`
would return the Operand ASTNodes with for '1' and '(2*3)'. Both of the
paths 'Operand' and 'Operand/*' would give the ASTNodes for '(1+(2*3))'
and '5', but while the first path would return two Operand ASTNodes,
the second path would return a ParenExpression and a Number ASTNode.
Parameters:
path (str): The path to the desired descendant.
Returns:
ASTNode: All descendant ASTNodes that match the path.
"""
cur_gen = [self,] # type: List[ASTNode]
for term in path.split('/'):
next_gen = []
for adult in cur_gen:
next_gen.extend(adult.children)
if term == '*':
cur_gen = next_gen
else:
cur_gen = [child for child in next_gen if child.term == term]
return tuple(cur_gen)
def pretty_print(self, indent_level=0):
# type: (int) -> None
"""Print the ASTNode using indentation to denote ancestry."""
print('{}{}: {}'.format(indent_level * 4 * ' ', self.term, one_line_format(self.match)))
for child in self.children:
child.pretty_print(indent_level + 1)
class PEGParser:
"""Parser for Parsing Expression Grammars (PEGs).
A fairly standard packrat parser. The core definitions are stored as
constants, while the custom definitions are provided to the constructor.
"""
CORE_DEFS = {
'EMPTY': r'',
'BLANK': r'[ \t]',
'DIGIT': r'[0-9]',
'UPPER': r'[A-Z]',
'LOWER': r'[a-z]',
'ALPHA': r'[A-Za-z]',
'ALNUM': r'[0-9A-Za-z]',
'PUNCT': r"[-!\"#$%&'()*+,./:;<=>?@[\\\]^_`{|}~]",
'PRINT': r'[ -~]',
'UNICODE': r'[^\x00-\x7F]',
'NEWLINE': r'\n',
'TAB': r'\t',
}
def __init__(self, syntax, debug=False):
# type: (PEGRules, bool) -> None
"""Initialize the Parser.
Parameters:
syntax (dict[str]): Dictionary of term definitions. This is usually
produced by an PEGWalker instance.
debug (bool): Whether to print parsing information.
Defaults to False.
"""
self.custom_defs = syntax
self.debug = debug
self.filepath = None
self.cache = {} # type: Dict[Tuple[str, int], ASTNode]
self.depth = 0
self.trace = [] # type: List[TraceItem]
self.max_trace_index = 0
def parse_file(self, filepath, term):
# type: (Path, str) -> ASTNode
"""Parse the contents of a file as a given term.
Parameters:
filepath (str): The path to the file.
term (str): The term to parse the string as.
Returns:
ASTNode: The root node of the AST.
"""
with open(filepath) as fd:
return self.parse(fd.read(), term, filepath)
def parse(self, string, term, filepath=None):
# type: (str, str, Optional[Path]) -> ASTNode
"""Parse a string as a given term.
Parameters:
string (str): The string to parse.
term (str): The term to parse the string as.
filepath (Optional[Path]): The file being parsed.
Returns:
ASTNode: The root node of the AST.
"""
ast = self.parse_partial(string, term, filepath)
if not ast:
self._fail_parse(string, 0)
elif ast.end_pos != len(string):
self._fail_parse(string, ast.end_pos)
return ast
def parse_partial(self, string, term, filepath=None):
# type: (str, str, Optional[Path]) -> Optional[ASTNode]
"""Parse a string as a given term.
Parameters:
string (str): The string to parse.
term (str): The term to parse the string as.
filepath (Optional[Path]): The file being parsed.
Returns:
ASTNode: The root node of the AST.
int: The number of characters parsed
"""
self.filepath = filepath
self.cache = {}
self.depth = 0
self.trace = []
self.max_trace_index = 0
ast = self._match(string, term, 0)
if ast:
return ast
else:
return None
def _add_trace(self, term, position):
# type: (str, int) -> None
"""Log the parse for error messages.
Parameters:
term (str): The term to parse as.
position (int): The position from which to parse.
"""
trace_item = TraceItem(self.depth, term, position)
self.trace.append(trace_item)
if len(self.trace) == 1:
return
max_position = self.trace[self.max_trace_index].position
index = len(self.trace) - 2
while (
index >= 0
and trace_item.depth <= self.trace[index].depth
and (
trace_item.position == self.trace[index].position
or self.trace[index].position < max_position
)
):
del self.trace[index]
index -= 1
if trace_item.position >= max_position:
self.max_trace_index = len(self.trace) - 1
def _fail_parse(self, string, parsed):
# type: (str, int) -> None
"""Fail a parse by raising SyntaxError with a trace.
Parameters:
string (str): The string to parse.
parsed (int): The number of characters successfully parsed.
Raises:
SyntaxError: If the term is not defined, or if no parse was found
before the end of the string.
"""
trace = []
for _, term, position in self.trace[:self.max_trace_index]:
line, col = index_to_line_col(string, position)
trace.append('\n'.join([
'failed to match {} at line {} column {} (position {})'.format(
term, line, col, position
),
' ' + string.splitlines()[line - 1].replace('\t', ' '),
' ' + (col - 1) * '-' + '^',
]))
raise SyntaxError(
'only parsed {} of {} characters:\n'.format(parsed, len(string))
+ indent('\n'.join(trace), ' ')
)
def _match(self, string, term, position=0):
# type: (str, Union[PEGExpression, str], int) -> Optional[ASTNode]
"""Dispatch the parsing to specialized functions.
Parameters:
string (str): The string to parse.
term (str): The term to parse the string as.
position (int): The position which with to start the parse.
Defaults to 0.
Returns:
ASTNode: The root node of the AST.
int: The number of characters successfully parsed.
Raises:
NameError: If the terminal is unknown.
"""
if isinstance(term, tuple) and hasattr(self, '_match_{}'.format(term[0].lower())):
return getattr(self, '_match_{}'.format(term[0].lower()))(string, term, position)
elif isinstance(term, str):
ast = self._get_cached(term, position)
if ast:
return ast
elif term in self.custom_defs:
return self._match_custom(string, term, position)
elif term in PEGParser.CORE_DEFS:
return self._match_core(string, term, position)
elif re.match(r"^'[^']*'$", term) or re.match(r'^"[^"]*"$', term):
return self._match_literal(string, term, position)
else:
raise NameError('Unknown terminal {}'.format(term))
raise NameError('Unknown meta-terminal {}'.format(term))
def _match_choice(self, string, terms, position):
# type: (str, PEGExpression, int) -> Optional[ASTNode]
"""Parse the disjunction of/any of multiple terms.
Parameters:
string (str): The string to parse.
terms (list[str]): The terms to attempt to parse as.
position (int): The position which with to start the parse.
Returns:
ASTNode: The root node of this abstract syntax sub-tree.
int: The index of the last character parsed.
"""
for term in terms[1:]:
ast = self._match(string, term, position)
if ast:
return ast
return None
def _match_sequence(self, string, terms, position):
# type: (str, PEGExpression, int) -> Optional[ASTNode]
"""Parse the concatenation of multiple terms.
Parameters:
string (str): The string to parse.
terms (list[str]): The terms that are concatenated.
position (int): The position which with to start the parse.
Returns:
ASTNode: The root node of this abstract syntax sub-tree.
int: The index of the last character parsed.
"""
children = []
pos = position
for term in terms[1:]:
child_ast = self._match(string, term, pos)
if child_ast:
if isinstance(term, tuple):
children.extend(child_ast.children)
else:
children.append(child_ast)
pos = child_ast.end_pos
else:
return None
return ASTNode('SEQUENCE', children, self.filepath, string, position, pos)
def _match_zero_or_more(self, string, terms, position):
# type: (str, PEGExpression, int) -> Optional[ASTNode]
"""Parse zero-or-more of a term (the * operator).
Parameters:
string (str): The string to parse.
terms (list[str]): The terms to repeat, as a syntax definition.
position (int): The position which with to start the parse.
Returns:
ASTNode: The root node of this abstract syntax sub-tree.
int: The index of the last character parsed.
"""
terms = terms[1]
last_pos = position
children = []
ast = self._match(string, terms, last_pos)
while ast and ast.match:
last_pos = ast.end_pos
children.extend(ast.children)
ast = self._match(string, terms, last_pos)
return ASTNode('ZERO_OR_MORE', children, self.filepath, string, position, last_pos)
def _match_zero_or_one(self, string, terms, position):
# type: (str, PEGExpression, int) -> Optional[ASTNode]
"""Parse zero-or-one of a term (the ? operator).
Parameters:
string (str): The string to parse.
terms (list[str]): The terms to repeat, as a syntax definition.
position (int): The position which with to start the parse.
Returns:
ASTNode: The root node of this abstract syntax sub-tree.
int: The index of the last character parsed.
"""
terms = terms[1]
ast = self._match(string, terms, position)
if ast:
return ast
return self._match(string, 'EMPTY', position)
def _match_one_or_more(self, string, terms, position):
# type: (str, PEGExpression, int) -> Optional[ASTNode]
"""Parse one-or-more of a term (the + operator).
Parameters:
string (str): The string to parse.
terms (list[str]): The terms to repeat, as a syntax definition.
position (int): The position which with to start the parse.
Returns:
ASTNode: The root node of this abstract syntax sub-tree.
int: The index of the last character parsed.
"""
terms = terms[1]
ast = self._match(string, terms, position)
if not ast:
return None
last_pos = ast.end_pos
children = ast.children
ast = self._match(string, terms, last_pos)
while ast and ast.match:
last_pos = ast.end_pos
children.extend(ast.children)
ast = self._match(string, terms, last_pos)
return ASTNode('ONE_OR_MORE', children, self.filepath, string, position, last_pos)
def _match_and(self, string, terms, position):
# type: (str, PEGExpression, int) -> Optional[ASTNode]
"""Parse the negation of a term.
Parameters:
string (str): The string to parse.
terms (list[str]): The first item (index 0) is the term to match;
all subsequent items are terms to *not* match. FIXME
position (int): The position which with to start the parse.
Returns:
ASTNode: The root node of this abstract syntax sub-tree.
int: The index of the last character parsed.
"""
ast = self._match(string, terms[1], position)
if not ast:
return None
return self._match(string, 'EMPTY', position)
def _match_not(self, string, terms, position):
# type: (str, PEGExpression, int) -> Optional[ASTNode]
"""Parse the negation of a term.
Parameters:
string (str): The string to parse.
terms (list[str]): The first item (index 0) is the term to match;
all subsequent items are terms to *not* match. FIXME
position (int): The position which with to start the parse.
Returns:
ASTNode: The root node of this abstract syntax sub-tree.
int: The index of the last character parsed.
"""
ast = self._match(string, terms[1], position)
if ast:
return None
return self._match(string, 'EMPTY', position)
def _match_custom(self, string, term, position):
# type: (str, PEGAtom, int) -> Optional[ASTNode]
"""Dispatch a parse to the custom syntax definition.
Parameters:
string (str): The string to parse.
term (str): The term to parse the string as.
position (int): The position which with to start the parse.
Returns:
ASTNode: The root node of this abstract syntax sub-tree.
int: The index of the last character parsed.
"""
expression = self.custom_defs[term]
self._debug_print('parsing {} at position {} >>>{}'.format(
term, position, one_line_format(string[position:position+32])
))
self._add_trace(term, position)
self.depth += 1
ast = self._match(string, expression, position)
self.depth -= 1
if ast:
ast.term = term
self._debug_print('matched {} at position {}: {}'.format(term, position, one_line_format(ast.match[:32])))
return self._cache_and_return(term, position, ast)
else:
self._debug_print('failed to match {} at position {}'.format(term, position))
return self._cache_and_return(term, position, None)
def _match_core(self, string, term, position):
# type: (str, PEGAtom, int) -> Optional[ASTNode]
"""Parse a core syntax definition.
Parameters:
string (str): The string to parse.
term (str): The term to parse the string as.
position (int): The position which with to start the parse.
Returns:
ASTNode: The root node of this abstract syntax sub-tree.
int: The index of the last character parsed.
"""
match = re.match(PEGParser.CORE_DEFS[term], string[position:])
if match:
ast = ASTNode(term, [], self.filepath, string, position, position + len(match.group(0)))
return self._cache_and_return(term, position, ast)
return self._cache_and_return(term, position, None)
def _match_literal(self, string, term, position):
# type: (str, PEGAtom, int) -> Optional[ASTNode]
"""Parse a literal.
Parameters:
string (str): The string to parse.
term (str): The literal to parse the string as.
position (int): The position which with to start the parse.
Returns:
ASTNode: The root node of this abstract syntax sub-tree.
int: The index of the last character parsed.
"""
if string[position:].startswith(term[1:-1]):
ast = ASTNode(term, [], self.filepath, string, position, position + len(term[1:-1]))
return self._cache_and_return(term, position, ast)
return self._cache_and_return(term, position, None)
def _cache_and_return(self, term, position, ast):
# type: (str, int, Optional[ASTNode]) -> Optional[ASTNode]
"""Cache a successful parse and return the result.
Parameters:
term (str): The literal to parse the string as.
position (int): The position which with to start the parse.
ast (ASTNode): The root of the parsed abstract syntax sub-tree.
Returns:
None: The absence of an ASTNode.
int: The index of the last character parsed.
"""
self.cache[(term, position)] = ast
return ast
def _get_cached(self, term, position):
# type: (str, int) -> Optional[ASTNode]
"""Retrieve a parse from cache, if it exists.
Parameters:
term (str): The literal to parse the string as.
position (int): The position which with to start the parse.
Returns:
None: The absence of an ASTNode.
int: The index of the last character parsed.
"""
if (term, position) in self.cache:
if term in self.custom_defs:
self._debug_print('matched {} at position {}'.format(term, position))
ast = self.cache[(term, position)]
return ast
return None
def _debug_print(self, message):
# type: (str) -> None
"""Print debugging information with indentation.
Parameters:
message (str): The message to print.
"""
if self.debug:
print(self.depth * ' ' + message)
class ASTWalker:
"""A traversal of an AST.
This is a base class for any processing that requires the bottom-up
building of structures from an AST. Subclass functions with the name
`parse_Term` - where Term is the name of the node in the grammar - are
called when a that term is encountered on the way back up the tree, ie.
in a post-order traversal. Each such function takes two arguments:
* ast (ASTNode): The AST rooted at that node.
* results (list[any]): The results from the descendants of the node.
"""
PARSE_FUNCTION_PREFIX = '_parse_'
class EmptySentinel:
"""Sentinel to indicate that no processing was done."""
def __init__(self, parser, root_term):
# type: (PEGParser, str) -> None
"""Initialize the traversal.
Parameters:
parser (PEGParser): The parser to use.
root_term (str): The term to start parsing on.
"""
self.parser = parser
self.root_term = root_term
self._dispatch_terms = set(
term[len(self.PARSE_FUNCTION_PREFIX):] for term in dir(self)
if term.startswith(self.PARSE_FUNCTION_PREFIX)
)
self._terms_to_expand = set()
noskips = list(self._dispatch_terms)
while noskips:
noskip = noskips.pop()
self._terms_to_expand.add(noskip)
for term, definition in self.parser.custom_defs.items():
if term in self._terms_to_expand:
continue
if ASTWalker.term_in_definition(noskip, definition):
noskips.append(term)
self._terms_to_expand.add(term)
def _postorder_traversal(self, ast, depth=0):
# type: (ASTNode, int) -> Tuple[Any, bool]
"""Traverses the AST in post-order.
Parameters:
ast (ASTNode): The AST to traverse.
depth (int): The current depth, for printing purposes.
Defaults to 0.
Returns:
any: Whatever the parse_* functions return, or an EmptySentinel.
bool: Whether a parse occurred.
"""
results = []
for child in ast.descendants('*'):
if child.term not in self._terms_to_expand:
continue
result, parsed = self._postorder_traversal(child, depth=depth + 1)
if not isinstance(result, ASTWalker.EmptySentinel):
if parsed:
results.append(result)
else:
results.extend(result)
function = self.PARSE_FUNCTION_PREFIX + ast.term
if hasattr(self, function):
return getattr(self, function)(ast, tuple(results)), True
elif results:
return tuple(results), False
else:
return ASTWalker.EmptySentinel(), False
def parse_file(self, filepath, term=None):
# type: (Path, Optional[str]) -> Any
"""Parse a file with the traversal.
Parameters:
filepath (str): The path to the file.
term (str): The term to start parsing on. Defaults to the term from
the constructor.
Returns:
any: Whatever the parse_* functions return.
"""
with open(filepath) as fd:
return self.parse(fd.read(), term, filepath)
def parse(self, text, term=None, filepath=None):
# type: (str, Optional[str], Optional[Path]) -> Any
"""Parse a complete string as the term.
Parameters:
text (str): The text to parse.
term (str): The term to start parsing on. Defaults to the term from
the constructor.
filepath (Optional[Path]): The file being parsed.
Returns:
any: Whatever the parse_* functions return.
"""
if term is None:
term = self.root_term
ast = self.parser.parse(text, term, filepath)
return self.parse_ast(ast)
def parse_partial(self, text, term=None, filepath=None):
# type: (str, Optional[str], Optional[Path]) -> Tuple[Any, int]
"""Parse as much of a string as possible as the term.
Parameters:
text (str): The text to parse.
term (str): The term to start parsing on. Defaults to the term from
the constructor.
filepath (Optional[Path]): The file being parsed.
Returns:
any: Whatever the parse_* functions return.
int: The number of characters parsed
"""
if term is None:
term = self.root_term
ast = self.parser.parse_partial(text, term, filepath)
return self.parse_ast(ast), ast.end_pos
def parse_ast(self, ast):
# type: (ASTNode) -> Any
"""Parse an AST.
Parameters:
ast (ASTNode): The AST to parse.
Returns:
any: Whatever the parse_* functions return.
"""
if ast is None:
return None
result = self._postorder_traversal(ast)[0]
if isinstance(result, ASTWalker.EmptySentinel) or ast.term in self._dispatch_terms:
return result
else:
return result[0]
@staticmethod
def term_in_definition(term, definition):
# type: (str, Union[PEGExpression, str]) -> bool
"""Determine if a definition could ever expand to include a term.
Parameters:
term (str): The term to find.
definition (dict[str]): Dictionary of term definitions.
Returns:
bool: Whether the term could be in the definition.
"""
return term == definition or any(
(
term == element
or (
isinstance(element, tuple)
and ASTWalker.term_in_definition(term, element)
)
)
for element in definition
)
class PEGWalker(ASTWalker):
# pylint: disable=invalid-name,no-self-use,unused-argument
"""A traversal of the PEG grammar to build up term definitions."""
def __init__(self):
# type: () -> None
"""Initialize the traversal."""
super().__init__(PEGParser(PEG_DEFS), 'syntax')
def _parse_syntax(self, ast, results):
# type: (ASTNode, Tuple[Any, ...]) -> PEGRules
"""Parse a Syntax node.
Parameters:
ast (ASTNode): The AST term to head the tuple.
results (list[any]): The results from descendants.
Returns:
dict[str]: Dictionary of term definitions.
"""
return {result[0]: result[1] for result in results}
def _parse_definition(self, ast, results):
# type: (ASTNode, Tuple[str, PEGExpression]) -> Tuple[str, PEGExpression]
"""Parse a Definition node.
Parameters:
ast (ASTNode): The AST term to head the tuple.
results (list[any]): The results from descendants.
Returns:
tuple[str, tuple[str]]: A term definition.
"""
return results
def _parse_choice(self, ast, results):
# type: (ASTNode, Tuple[Any, ...]) -> PEGExpression
"""Parse a Choice node.
Parameters:
ast (ASTNode): The AST term to head the tuple.
results (list[any]): The results from descendants.
Returns:
tuple[str]: A choice definition.
"""
return ('CHOICE', *results)
def _parse_sequence(self, ast, results):
# type: (ASTNode, Tuple[Any, ...]) -> PEGExpression
"""Parse a Sequence node.
Parameters:
ast (ASTNode): The AST term to head the tuple.
results (list[any]): The results from descendants.
Returns:
tuple[str]: A choice definition.
"""
return ('SEQUENCE', *results)
def _parse_zero_or_more(self, ast, results):
# type: (ASTNode, Tuple[Any, ...]) -> PEGExpression
"""Parse a zero-or-more predicate node.
Parameters:
ast (ASTNode): The AST term to head the tuple.
results (list[any]): The results from descendants.
Returns:
tuple[str]: A repetition definition.
"""
return ('ZERO_OR_MORE', *results)
def _parse_zero_or_one(self, ast, results):
# type: (ASTNode, Tuple[Any, ...]) -> PEGExpression
"""Parse a zero-or-one predicate node.
Parameters:
ast (ASTNode): The AST term to head the tuple.
results (list[any]): The results from descendants.
Returns:
tuple[str]: A repetition definition.
"""
return ('ZERO_OR_ONE', *results)
def _parse_one_or_more(self, ast, results):
# type: (ASTNode, Tuple[Any, ...]) -> PEGExpression
"""Parse a one-or-more predicate node.
Parameters:
ast (ASTNode): The AST term to head the tuple.
results (list[any]): The results from descendants.
Returns:
tuple[str]: A repetition definition.
"""
return ('ONE_OR_MORE', *results)
def _parse_and_predicate(self, ast, results):
# type: (ASTNode, Tuple[Any, ...]) -> PEGExpression
"""Parse an And predicate node.
Parameters:
ast (ASTNode): The AST term to head the tuple.
results (list[any]): The results from descendants.
Returns:
tuple[str]: A predicate definition.
"""
return ('AND', *results)
def _parse_not_predicate(self, ast, results):
# type: (ASTNode, Tuple[Any, ...]) -> PEGExpression
"""Parse an Not predicate node.
Parameters:
ast (ASTNode): The AST term to head the tuple.
results (list[any]): The results from descendants.
Returns:
tuple[str]: A predicate definition.
"""
return ('NOT', *results)
def _parse_identifier(self, ast, results):
# type: (ASTNode, Tuple[Any, ...]) -> str
"""Parse an Identifier node.
Parameters:
ast (ASTNode): The AST term to head the tuple.
results (list[any]): The results from descendants.
Returns:
str: A custom term.
"""
return ast.match
def _parse_builtin(self, ast, results):
# type: (ASTNode, Tuple[Any, ...]) -> str
"""Parse a Reserved node.
Parameters:
ast (ASTNode): The AST term to head the tuple.
results (list[any]): The results from descendants.
Returns:
str: A builtin (core term).
"""
return ast.match
def _parse_literal(self, ast, results):
# type: (ASTNode, Tuple[Any, ...]) -> str
"""Parse a Literal node.
Parameters:
ast (ASTNode): The AST term to head the tuple.
results (list[any]): The results from descendants.
Returns:
str: The literal.
"""
return ast.match
def main():
# type: () -> None
"""Parse a grammar."""
arg_parser = ArgumentParser()
arg_parser.add_argument(
'-e', dest='expression',
help='starting expression; if omitted, first defined term is used',
)
arg_parser.add_argument('-g', dest='grammar', default=PEG_GRAMMAR, help='PEG grammar file')
arg_parser.add_argument(
'-v', dest='verbose', action='store_true',
help='show what the parser is doing',
)
arg_parser.add_argument('file', default='-', nargs='?', help='text file to be parsed')
args = arg_parser.parse_args()
if args.expression:
term = args.expression
else:
with open(args.grammar, 'r') as fd:
grammar_ast = PEGParser(PEG_DEFS).parse(fd.read(), 'syntax')
term = grammar_ast.first_descendant('definition/identifier').match
parser = create_parser_from_file(args.grammar, debug=args.verbose)
parser.parse(''.join(fileinput(files=args.file)), term).pretty_print()
if __name__ == '__main__':
main()
|
[
"justinnhli@gmail.com"
] |
justinnhli@gmail.com
|
04b6b9cdecc037a0bbf85aaf2ecd6551e691bcc6
|
ca55dcaa64ea9db4068e13091321cfebecc0ff41
|
/codeUp/codeUpBasic/1542.py
|
c4e22a2c9a1d74f55ed1bff4f11808af1077606b
|
[] |
no_license
|
gomtinQQ/algorithm-python
|
8fb8343594b945099ae2a4dfa794ecb47e54ab0b
|
751562922b66e335f621d366bb73dacdc7125140
|
refs/heads/master
| 2022-12-07T23:05:44.535593
| 2020-08-21T12:29:58
| 2020-08-21T12:29:58
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 369
|
py
|
'''
1542 : [기초-함수작성] 함수로 prime 또는 composite 출력하기
소수(prime)가 입력되면 prime, 합성수(composite)가 입력되면 composite 를 출력한다.
int 형 자연수(n)가 입력된다.
(2 <= n <= 1000)
'''
def f(n):
if(n!=2 and (n%2==0 or n%3==0)):
print("composite")
else:
print("prime")
n = int(input())
f(n)
|
[
"minhyeonlee1@gmail.com"
] |
minhyeonlee1@gmail.com
|
7ff6d82d991e9913ad7ae896381932c9591be6f8
|
54f352a242a8ad6ff5516703e91da61e08d9a9e6
|
/Source Codes/CodeJamData/12/32/15.py
|
d74cd906d566a2e4c1d48f67fb2109120fdb1bda
|
[] |
no_license
|
Kawser-nerd/CLCDSA
|
5cbd8a4c3f65173e4e8e0d7ed845574c4770c3eb
|
aee32551795763b54acb26856ab239370cac4e75
|
refs/heads/master
| 2022-02-09T11:08:56.588303
| 2022-01-26T18:53:40
| 2022-01-26T18:53:40
| 211,783,197
| 23
| 9
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,115
|
py
|
import os, sys, math
inFile = None
outFile = None
def printAnswer(case, answer):
outFile.write('Case #{0}: {1}\n'.format(case+1, answer))
def solveSimple(tc, a, d):
t = math.sqrt(2.0*d/a)
if t < tc:
t = tc
outFile.write('{0}\n'.format(t))
def solveCase(caseNo):
(d, n, a) = inFile.readline().strip().split()
d = float(d)
n = int(n)
a = int(a)
printAnswer(caseNo, '')
xc = d + 100.0
tc = 0
if n==1:
(t0, x0) = map(float, inFile.readline().strip().split())
if x0 > d + (0.00000001):
tc = 0
else:
tc = t0
print tc
if n==2:
(t0, x0) = map(float, inFile.readline().strip().split())
(t1, x1) = map(float, inFile.readline().strip().split())
vc = (x1-x0)/(t1-t0)
if x1 > d:
tc = t0 + (d-x0)/vc
else:
tc = t1
av = map(float, inFile.readline().strip().split())
for a in av:
solveSimple(tc, a, d)
def main():
cases = int(inFile.readline().strip())
for case in range(cases):
solveCase(case)
if __name__ == '__main__':
inFile = open('in.txt','rt')
outFile = open('out.txt', 'wt')
main()
|
[
"kwnafi@yahoo.com"
] |
kwnafi@yahoo.com
|
13836af741773d0cf466de54dbb5056b65fc2f92
|
67998b63991c8f76cd47867b5b9712a9026a0e4b
|
/configuration/models.py
|
eb4a3fd23abc9f36c4e2dfda5fddb4161d4baddc
|
[] |
no_license
|
ToGoBananas/isure
|
a9065823d494dfed306a0f81bf8840ba12cbee29
|
eb9cac33ead39195c1dff90b27b55f187e182f49
|
refs/heads/master
| 2021-03-27T11:05:00.929102
| 2016-06-15T06:53:25
| 2016-06-15T06:53:25
| 58,100,880
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,082
|
py
|
from django.db import models
from policies.models import PolicyBase
class Currency(models.Model):
last_modified = models.DateTimeField(auto_now=True)
euro = models.FloatField(null=True)
usd = models.FloatField(null=True)
def __str__(self):
return str(self.last_modified)
class Meta:
verbose_name = 'курсы валют'
verbose_name_plural = 'Курсы валют'
class AppRules(models.Model):
text = models.TextField()
def __str__(self):
return str(self.last_modified)
class Meta:
verbose_name = 'правила'
verbose_name_plural = 'Правила использования приложения'
class Bordereau(models.Model):
csv = models.FileField(upload_to='bordereau/', blank=True, null=True)
start = models.DateField()
end = models.DateField()
created = models.DateTimeField(auto_now=True, null=True)
def __str__(self):
return str(self.created)
class Meta:
verbose_name = 'бордеро'
verbose_name_plural = 'Бордеро'
|
[
"endpoo@gmail.com"
] |
endpoo@gmail.com
|
4d83e95be8a112167a7f8664299e77c49675449f
|
8bacc85600d3ab96996c9b3a0b46d7a0833c7c83
|
/core/widgets.py
|
3c6597a72e38911ec5a8892840619477f1704ecb
|
[] |
no_license
|
cattias/cannesalair
|
fb2ac6eeeb90766d5b2a5cb688067e7b07ad58b8
|
0fc63e9d3481fa19c870a7f18408e840b2748a9d
|
refs/heads/master
| 2016-09-06T00:45:01.117844
| 2015-12-28T13:40:56
| 2015-12-28T13:40:56
| 24,021,210
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,287
|
py
|
from django.forms import widgets
from django.utils.encoding import force_unicode
from django.utils.safestring import mark_safe
from django.utils.html import conditional_escape
from django.forms.util import flatatt
import datetime
import settings
class LongTextInput(widgets.TextInput):
def __init__(self, attrs=None):
super(widgets.TextInput, self).__init__(attrs)
self.attrs.update({'class': 'input-maxi'})
class MediumTextInput(widgets.TextInput):
def __init__(self, attrs=None):
super(widgets.TextInput, self).__init__(attrs)
self.attrs.update({'class': 'input-medium'})
class MediumTextarea(widgets.Textarea):
def __init__(self, attrs=None):
super(widgets.Textarea, self).__init__(attrs)
self.attrs.update({'class': 'input-medium', 'rows':10,})
class MaxiTextarea(widgets.Textarea):
def __init__(self, attrs=None):
super(widgets.Textarea, self).__init__(attrs)
self.attrs.update({'class': 'input-maxi'})
class TextareaTiny(widgets.Textarea):
def __init__(self, attrs=None):
super(widgets.Textarea, self).__init__(attrs)
def render(self, name, value, attrs=None):
if value is None: value = ''
value = force_unicode(value)
final_attrs = self.build_attrs(attrs, name=name)
return mark_safe(u'<textarea%s>%s</textarea>' % (flatatt(final_attrs),
conditional_escape(force_unicode(value))))
class CustomDatePicker(widgets.DateTimeInput):
"""
A SplitDateTime Widget that has some admin-specific styling.
"""
def __init__(self, attrs=None):
super(widgets.DateTimeInput, self).__init__(attrs)
self.attrs = self.attrs or {}
if not self.attrs.get('id'):
self.attrs.update({'id': 'datepicker'})
def render(self, name, value, attrs=None):
final_attrs = self.build_attrs(attrs, name=name)
if not value:
render = """
<link rel="stylesheet" type="text/css" href="%(media_url)scss/cupertino/ui.all.css" />
<div%(attrs)s></div><div id="dateselected_%(div_id)s"></div>
<script type="text/javascript">
$("#%(div_id)s").datepicker({
dateFormat: 'yy-mm-dd',
onSelect: function(date, instance) {
$("#dateselected_%(div_id)s").html("<input type='hidden' name='%(name)s' value='"+ date +" 12:00'/>");
}
});
</script>
""" % {
'media_url': settings.MEDIA_URL,
'attrs': flatatt(final_attrs),
'div_id': final_attrs['id'],
'name': final_attrs['name'],
}
else:
if isinstance(value, unicode) or isinstance(value, str):
if value.find(":") > 0:
value = datetime.datetime.strptime(value, "%Y-%m-%d %H:%M")
else:
value = datetime.datetime.strptime(value, "%Y-%m-%d")
render = """
<link rel="stylesheet" type="text/css" href="%(media_url)scss/cupertino/ui.all.css" />
<div%(attrs)s></div><div id="dateselected_%(div_id)s"></div>
<script type="text/javascript">
$("#%(div_id)s").datepicker({
dateFormat: 'yy-mm-dd',
onSelect: function(date, instance) {
$("#dateselected_%(div_id)s").html("<input type='hidden' name='%(name)s' value='"+ date +" 12:00'/>");
}
});
$("#%(div_id)s").datepicker("setDate", new Date(%(year)d,%(monthzerobased)02d,%(day)02d));
$("#dateselected_%(div_id)s").html("<input type='hidden' name='%(name)s' value='%(year)d-%(month)02d-%(day)02d 12:00'/>");
</script>
""" % {
'media_url': settings.MEDIA_URL,
'attrs': flatatt(final_attrs),
'div_id': final_attrs['id'],
'name': final_attrs['name'],
'year': value.year,
'monthzerobased': value.month-1,
'month': value.month,
'day': value.day,
}
return render
|
[
"christophe.attias@gmail.com"
] |
christophe.attias@gmail.com
|
ea9b73aaff3487a1781f673369c95ad724dd7f78
|
8cb3b3a0dcbb728fe95257495332ffc8b0bdbef4
|
/draggable_screen.py
|
f2659fb530ba12c23e0c5e065dda44c2e46e5d5e
|
[] |
no_license
|
VanillaViking/pgscript-tutor
|
3a8870657464c576a92efdf0651b83f2c57218ea
|
761d4d7ac44f36e11287c67b88d7354ba35670a6
|
refs/heads/master
| 2022-12-05T16:46:52.011505
| 2020-08-24T10:28:18
| 2020-08-24T10:28:18
| 271,986,285
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,464
|
py
|
import pygame
from pgscript import draw_grid
from pgscript import scrolling_screen
from pgscript import text
from pgscript import bg
from pgscript import button
from pgscript import text_input
from pgscript_parser import parse_args
from pgscript import draggable_surface
def draw(DISPLAY):
grid = draw_grid.draw_grid(DISPLAY, 2,8)
doc_surf = pygame.Surface((DISPLAY.get_width(), DISPLAY.get_height()))# surface where text from documentation is placed
demo_screen = pygame.Surface((DISPLAY.get_width()/2 - 13, DISPLAY.get_height()), pygame.SRCALPHA) #surface where users can experiment with the demonstration
doc_screen = scrolling_screen.scrolling_screen(doc_surf, 2.6, "l", 13, 15, (230,230,230))
back_button = button.button(doc_screen.surface, [255,255,255,150], [255,255,255,200], grid.get_column(0.06), grid.get_row(0.2), 85,35, "BACK", anim=True)
#DEMO OBJECTS
demo_surface = pygame.Surface((100,100))
demo_surface.fill((0,0,0))
demo = draggable_surface.draggable_surface(demo_screen, demo_surface, (245,285))
demo_text = 'draggable_surface.draggable_surface(demo_screen, demo_surface, (245,285))'
demo_input = text_input.text_input(demo_screen, grid.get_column(0), grid.get_row(6.5), 610, 30, demo_text, (0,0,0),(25,25,25),(100,100,100), 15)
run_demo = button.button(demo_screen, [255,255,255,150], [255,255,255,200],grid.get_column(0.02), demo_input.rect.bottom + 10, 85,30, "Run" )
reset_demo = button.button(demo_screen, [255,255,255,150], [255,255,255,200],grid.get_column(0.02) + 90, demo_input.rect.bottom + 10, 85,30, "Reset")
#ERROR
error_text = text.text(demo_screen, pygame.font.SysFont('arial', 20), (230,0,0))
#TITLE
title = text.text(doc_screen.surface, pygame.font.SysFont('arial', 40), (255,255,255))
#HEADING
heading = text.text(doc_screen.surface, pygame.font.SysFont('arial', 35), (230,230,230))
#HEADING 2
heading_2 = text.text(doc_screen.surface, pygame.font.SysFont('arial', 30), (200,200,200))
#PARAGRAPHS
paragraph = text.text(doc_screen.surface, pygame.font.SysFont('arial', 20), (255,255,255))
#ARGUMENTS
arg_heading = text.text(doc_screen.surface, pygame.font.SysFont('arial', 20), (255,255,255))
arg_heading.font.set_underline(True)
#--------------------------------------------------------------------------------
title.message("Draggable Surface", (grid.get_column(0.5), grid.get_row(1)), center=True)
paragraph.wrapped_text("This class creates a pygame surface that can be moved with the mouse by clicking and dragging.", (grid.get_column(0.06), title.text_end[1] + 125), 580, 2)
heading.message("Methods", (grid.get_column(0.06), paragraph.text_end[1] + 100))
heading_2.message("pgscript.draggable_surface()", (grid.get_column(0.06), heading.text_end[1] + 100))
paragraph.wrapped_text("Constructor method of the class. The surface that needs to be made movable and its starting position is passed to this method as arguments.", (grid.get_column(0.06), heading_2.text_end[1] + 75), 580, 2)
arg_heading.message("DISPLAY (pygame.Surface):", (grid.get_column(0.06), paragraph.text_end[1] + 50))
paragraph.wrapped_text("The surface that the object needs to be drawn to. Usually this surface is the display surface of the window.", (arg_heading.text_end[0]+7,arg_heading.text_end[1]), 580 - arg_heading.text_end[0]-7, 2)
arg_heading.message("Suface (pygame.Surface):", (grid.get_column(0.06), paragraph.text_end[1] + 50))
paragraph.wrapped_text("The surface that needs to be made movable.", (arg_heading.text_end[0]+7,arg_heading.text_end[1]), 580 - arg_heading.text_end[0]-7, 2)
arg_heading.message("start_pos (tuple (x,y)):", (grid.get_column(0.06), paragraph.text_end[1] + 50))
paragraph.wrapped_text("The position at which the surface is initially drawn.", (arg_heading.text_end[0]+7,arg_heading.text_end[1]), 580 - arg_heading.text_end[0]-7, 2)
heading_2.message("pgscript.draggable_surface.draw()", (grid.get_column(0.06), paragraph.text_end[1] + 100))
paragraph.wrapped_text("Draws the draggable surface object onto the DISPLAY surface. This method is meant to be called in the same loop where DISPLAY is updated", (grid.get_column(0.06),heading_2.text_end[1] +75), 580, 2)
heading_2.message("pgscript.draggable_surface.update(event)", (grid.get_column(0.06), paragraph.text_end[1] + 100))
paragraph.wrapped_text("Pygame events are used to check for mouse activity and clicks. It is meant to be run in a loop with each event given by pygame.event.get()", (grid.get_column(0.06),heading_2.text_end[1] +75), 580, 2)
heading_2.message("pgscript.draggable_surface.get_pos()", (grid.get_column(0.06), paragraph.text_end[1] + 100))
paragraph.wrapped_text("Returns a tuple (x,y) of the current coordinates of the surface.", (grid.get_column(0.06),heading_2.text_end[1] +75), 580, 2)
heading_2.message("pgscript.draggable_surface.set_pos(pos)", (grid.get_column(0.06), paragraph.text_end[1] + 100))
paragraph.wrapped_text("Sets the position of the draggable surface to pos", (grid.get_column(0.06),heading_2.text_end[1] +75), 580, 2)
arg_heading.message("pos (tuple (x,y)):", (grid.get_column(0.06), paragraph.text_end[1] + 50))
paragraph.wrapped_text("New desired position of the object.", (arg_heading.text_end[0] + 7,arg_heading.text_end[1]), 580, 2)
#--------------------------------------------------------------------------------
background = bg.parallax_bg(doc_surf, "obj_bg.jpg")
doc_screen.add_objects([title, heading, heading_2,arg_heading, paragraph, back_button])
while not back_button.get_state():
pygame.display.update()
background.draw()
demo_screen.fill([200,200,200,200])
demo.draw()
demo_input.draw()
run_demo.draw()
reset_demo.draw()
error_text.draw()
doc_screen.surface.fill([255,255,255,0])
doc_screen.draw()
DISPLAY.blit(doc_surf, (0, 0))
DISPLAY.blit(demo_screen, (grid.get_column(1) + 13, 0))
if run_demo.get_state():
demo_args = demo_input.get_text()[48:-1]
print(demo_args)
try:
demo = draggable_surface.draggable_surface(demo_screen, demo_surface, *parse_args(demo_args)) #generate the object with args in the text field
except:
print(*parse_args(demo_args))
error_text.message("Invalid syntax", (grid.get_column(0.5), grid.get_row(7)), 3, center=True)
run_demo.reset_state()
elif reset_demo.get_state():
demo_input.set_text(demo_text)
run_demo.pressed = True
reset_demo.reset_state()
for event in pygame.event.get():
doc_screen.update(event)
run_demo.update(event, (pygame.mouse.get_pos()[0] - (DISPLAY.get_width()/2) - 13, pygame.mouse.get_pos()[1]))
demo_input.update(event, (pygame.mouse.get_pos()[0] - (DISPLAY.get_width()/2) - 13, pygame.mouse.get_pos()[1]))
if pygame.mouse.get_pos()[0] > (DISPLAY.get_width()/2) +13:
demo.update(event , (pygame.mouse.get_pos()[0] - (DISPLAY.get_width()/2) - 13, pygame.mouse.get_pos()[1]))
else:
demo.active = False
reset_demo.update(event, (pygame.mouse.get_pos()[0] - (DISPLAY.get_width()/2) - 13, pygame.mouse.get_pos()[1]))
|
[
"ashwinr2k2@gmail.com"
] |
ashwinr2k2@gmail.com
|
67442993d1df82b10abd54c14ea3fdae53bcf1ce
|
784c4e7485f77506e48c4f7040b1b12873b9de37
|
/text_function.py
|
ef264253775cafebc5222ce9633fdffab638e080
|
[] |
no_license
|
tiant1/hello-world
|
44e1b205681d5700ae56c4d1797a79743ad7c2c2
|
167d6d141b4c203308e5972062d0400f41357578
|
refs/heads/master
| 2021-01-24T08:06:31.258772
| 2017-06-20T13:47:32
| 2017-06-20T13:47:32
| 93,371,821
| 0
| 0
| null | 2017-06-05T07:01:46
| 2017-06-05T06:22:44
| null |
UTF-8
|
Python
| false
| false
| 529
|
py
|
# coding:utf-8
def text_create(name, msg):
path = 'D://'
full_path = path + name + '.txt'
file = open(full_path, 'w')
file.write(msg)
file.close()
print('Done')
text_create('Hello', 'hello world')
def text_filter(word, censored_word = 'lame', changed_word = 'Awesome'):
return word.replace(censored_word, changed_word)
text_filter('Python is lame!')
def censored_text_create(name, msg):
clean_msg = text_filter(msg)
text_create(name, clean_msg)
censored_text_create('Try', 'lame!lame!lame!')
|
[
"tian_tian1@outlook.com"
] |
tian_tian1@outlook.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.