hexsha
stringlengths 40
40
| size
int64 3
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
972
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
972
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
972
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.03M
| avg_line_length
float64 1.13
941k
| max_line_length
int64 2
941k
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4e1e6d9f64c976db2a230f08343f92e8512ecc75
| 4,560
|
py
|
Python
|
code/analysis/old/get_word_representativeness_Poisson.py
|
tkc-morita/variational_inference_DP_mix_HDP_topic_ngram
|
95d6c8ab2956501fc82b416bf423ee57fe77c73f
|
[
"MIT"
] | 4
|
2021-03-27T18:28:23.000Z
|
2022-01-10T23:32:29.000Z
|
code/analysis/old/get_word_representativeness_Poisson.py
|
stannam/variational_inference_DP_mix_HDP_topic_ngram
|
95d6c8ab2956501fc82b416bf423ee57fe77c73f
|
[
"MIT"
] | null | null | null |
code/analysis/old/get_word_representativeness_Poisson.py
|
stannam/variational_inference_DP_mix_HDP_topic_ngram
|
95d6c8ab2956501fc82b416bf423ee57fe77c73f
|
[
"MIT"
] | 1
|
2022-01-10T23:45:54.000Z
|
2022-01-10T23:45:54.000Z
|
# coding: utf-8
import pandas as pd
import numpy as np
import scipy.misc as spm
import scipy.special as sps
# import scipy.stats as spst
import sys, os.path
def main_loop(encoded_data, log_ngram, n, start_code, log_assignment_over_others, gamma_poisson):
return np.array(
[get_representativeness(
string,
log_ngram,
n,
start_code,
log_assignment_over_others,
gamma_poisson
)
for string
in encoded_data
]
)
def get_representativeness(
string,
log_ngram,
n,
start_code,
log_assignment_over_others,
gamma_poisson
):
log_likes = (
get_log_like(string, log_ngram, n, start_code)
+
gamma_poisson.get_log_prob(len(string))
)
return log_likes - spm.logsumexp(log_likes[np.newaxis,:] + log_assignment_over_others, axis=1)
def get_log_like(string, log_ngram, n, start_code):
return np.sum([
log_ngram['_'.join(map(str, ngram_window[:-1]))][ngram_window[-1]]
for ngram_window in zip(*[((start_code,)*(n-1)+string)[i:] for i in range(n)])
]
,
axis=0
)
def encode_data(data, encoder):
return [tuple([encoder[symbol] for symbol in string.split(',')]) for string in data]
def get_log_assignment_over_others(df_stick):
df_stick = df_stick.sort_values('cluster_id')
df_stick['beta_sum'] = df_stick.beta_par1 + df_stick.beta_par2
df_stick['log_stop_prob'] = np.log(df_stick.beta_par1) - np.log(df_stick.beta_sum)
df_stick['log_pass_prob'] = np.log(df_stick.beta_par2) - np.log(df_stick.beta_sum)
log_assignment_probs = []
log_cum_pass_prob = 0
for row_tuple in df_stick.itertuples():
log_assignment_probs.append(row_tuple.log_stop_prob + log_cum_pass_prob)
log_cum_pass_prob += row_tuple.log_pass_prob
log_assignment_probs.append(log_cum_pass_prob)
log_assignment_probs = np.array(log_assignment_probs)
num_sublex = log_assignment_probs.size
log_assignment_to_others = np.repeat(log_assignment_probs[np.newaxis,:], num_sublex, axis=0)
np.fill_diagonal(log_assignment_to_others, -np.inf)
log_assignment_to_others = spm.logsumexp(log_assignment_to_others, axis=1)
log_assignment_over_others = log_assignment_probs[np.newaxis,:] - log_assignment_to_others[:,np.newaxis]
np.fill_diagonal(log_assignment_over_others, -np.inf)
return log_assignment_over_others
class GammaPoisson(object):
def __init__(self, df):
df = df.sort_values('sublex_id')
self.num_failures = df['shape'].values
p = 1 / (df.rate.values+np.float64(1))
self.log_p = np.log(p)
self.gammaln_num_failure = sps.gammaln(self.num_failures)
self.num_failures_x_log_1_minus_p = self.num_failures * np.log(1-p)
def get_log_prob(self, num_success):
return (
sps.gammaln(num_success+self.num_failures)
-
sps.gammaln(num_success+1)
-
self.gammaln_num_failure
+
num_success * self.log_p
+
self.num_failures_x_log_1_minus_p
)
def get_log_ngram_probs(df_ngram):
df_ngram = df_ngram.sort_values('sublex_id')
log_ngram = {}
for (context,value), sub_df in df_ngram.groupby(['context','value']):
if context in log_ngram.keys():
log_ngram[context][value] = np.log(sub_df.prob.values)
else:
log_ngram[context]={value: np.log(sub_df.prob.values)}
start_code = df_ngram.value.max()+1
return log_ngram, start_code
if __name__ == '__main__':
ngram_path = sys.argv[1]
data_path = sys.argv[2]
result_dir,filename = os.path.split(ngram_path)
n = int(filename.split('gram')[0].split('_')[-1])
df_ngram = pd.read_csv(ngram_path)
log_ngram, start_code = get_log_ngram_probs(df_ngram)
df_data = pd.read_csv(data_path, sep='\t', encoding='utf-8')
df_code = pd.read_csv(os.path.join(result_dir, 'symbol_coding.csv'), encoding='utf-8')
df_code.set_index('symbol', inplace=True)
encoder = df_code.code.to_dict()
encoded_data = encode_data(df_data.IPA_csv.tolist(), encoder)
hdf5_path = os.path.join(result_dir, 'variational_parameters.h5')
df_stick = pd.read_hdf(hdf5_path, key='/sublex/stick')
log_assignment_over_others = get_log_assignment_over_others(df_stick)
df_length = pd.read_hdf(os.path.join(result_dir, 'variational_parameters.h5'), key='/sublex/length')
gamma_poisson = GammaPoisson(df_length)
representativeness = main_loop(encoded_data, log_ngram, n, start_code, log_assignment_over_others, gamma_poisson)
df_rep = pd.DataFrame(
representativeness,
columns=['sublex_%i' % i for i in range(representativeness.shape[1])]
)
df_rep['word_id'] = df_rep.index
df_rep.to_csv(
os.path.join(result_dir, 'word_representativeness.csv')
,
index=False
)
| 29.419355
| 114
| 0.739035
|
5395c8898878d9e85668cd3dcdecba71bc00bb1b
| 8,749
|
py
|
Python
|
Network Automation/Automation.py
|
kuhakuu04/Network_Automation
|
f3eb99943e569f3311233f437ea17cd1862e3dc9
|
[
"Apache-2.0"
] | null | null | null |
Network Automation/Automation.py
|
kuhakuu04/Network_Automation
|
f3eb99943e569f3311233f437ea17cd1862e3dc9
|
[
"Apache-2.0"
] | null | null | null |
Network Automation/Automation.py
|
kuhakuu04/Network_Automation
|
f3eb99943e569f3311233f437ea17cd1862e3dc9
|
[
"Apache-2.0"
] | null | null | null |
#module
import os
import paramiko
import netmiko
import getpass
import Router
#login banner
print('NETWORK AUTOMATION\nCREATE BY LNF LINFIQ 04\n\n\nver0.2.1\nhelp - show command\n')
while True:
#input
input_command = input('LNF(automation) > ')
#input command
if(input_command == 'help'):
print('\n')
print('exit - exit program')
print('help - show command')
print('ssh - connect devices using SSH')
print('\n')
#ssh command
elif(input_command == 'ssh'):
while True:
#input shh
ssh_command = input('LNF(SSH)> ')
if(ssh_command == 'help'):
print('\n')
print('cisco - connecting with the type of the Cisco brand')
print('back - back to menu')
print('help - show command')
print('mikrotik - connecting with the type of the Mikrotik brand')
print('\n')
#cisco brand
elif(ssh_command == 'cisco'):
hostname = input('hostname address\n>>> ')
port = input('port\n>>> ')
username = input('username\n>>> ')
password = input('password\n>>> ')
secret = input('insert secret your cisco device\n>>> ')
#connect module
Router_API = netmiko.cisco.CiscoIosBase(ip=hostname, username=username, password=password, secret=secret)
Router_API.enable()
while True:
router_command = input('LNF(command-Cisco)> ')
if(router_command == "help"):
print('\n')
print('help - show command')
print('exit - stop connection')
print('route - set static route for router to access network')
print('internet - set nat so that clients can access the internets')
print('dhcp - set dhcp server so that clients get automatic ip address')
print('\n')
elif(router_command == 'dhcp'):
#command for setting
Pool_Name = input("set pool name\n>>> ")
network_address = input("set network address and subnet example[192.168.0.0 255.255.255.0]\n>>> ")
gateway_address = input('set gateway address\n>>> ')
dns_address = input('set dns address\n>>> ')
#setting module
Set_DHCP_Server = Router.Cisco.Set_DHCP_Server(name="",network="", gateway="", dns="")
Set_DHCP = Router_API.send_config_set(Set_DHCP_Server)
print(Set_DHCP)
elif(router_command == 'internet'):
Set_Internet_Port = input("enter your port of ethernet\n>>> ")
Set_Local_Port = input("enter your port of local connection\n>>> ")
Set_Internet_Access = Router.Cisco.Set_Basic_Access(internet_port=Set_Internet_Port, local_port=Set_Local_Port)
Set_Internet_Output = Router_API.send_config_set(Set_Internet_Access)
print(Set_Internet_Output)
elif(router_command == "route"):
Router_Gateway = input('gateway\n>>> ')
Static = Router.Cisco.Set_Static_Route(gateway=Router_Gateway)
output = Router_API.send_config_set(Static)
print(output)
elif(router_command == "exit"):
break
else:
print("\ncommand error\n")
#mikrotik brand
elif(ssh_command == 'mikrotik'):
hostname = input('hostname address\n>>> ')
port = input('port\n>>> ')
username = input('username\n>>> ')
password = getpass.getpass('password\n>>> ')
#connect module
Router_API = paramiko.SSHClient()
Router_API.set_missing_host_key_policy(paramiko.AutoAddPolicy())
Router_API.connect(hostname, port, username, password)
while True:
print('\n')
router_command = input('LNF(command-Mikrotik)> ')
if(router_command == 'end'):
break
elif(router_command == 'help'):
print('\n')
print('default - configure basic command for mikrotik access internet')
print('forward - configure port forwarding for change destination port')
print('limit - configure bandwidth limitation per user')
print('end - back')
print('help - show command')
print('\n')
elif(router_command == 'forward'):
destination_address = input('set destination address\n>>> ')
source_port = input('set source port\n>>> ')
destination_port = input('set destination port\n>>> ')
protocol = input('set protocol type\n>>> ')
internet_access = input('set interface to destination interface\n>>> ')
Set_Port_Forwarding = Router.Mikrotik.Set_Port_Forwarding(destination_address, source_port, destination_port, protocol, internet_access)
stdin, stdout, stderr = Router_API.exec_command(Set_Port_Forwarding)
stdout.readlines()
elif(router_command == 'limit'):
name_queue = input('set name queue\n>>> ')
address = input('set address device\n>>> ')
download_max = input('set max download byte example 256k\n>>> ')
upload_max = input('set max upload byte example 256k\n>>> ')
Set_Simple_Queue = Router.Mikrotik.Set_Simple_Queue(name_queue, address, upload_max, download_max)
stdin, stdout, stderr = Router_API.exec_command(Set_Simple_Queue)
stdout.readlines()
elif(router_command == 'default'):
#command for setting
internet_port = input('set internet port on your router example ether1\n>>> ')
hostname_router = input('\nset hostname on your router\n>>> ')
user_name = input('\nset user local for your router\n>>> ')
user_pass = input('set password for your user\n>>> ')
user_type = input('set the user type for your user(full, read, write)\n>>> ')
choice_next = input('\nwhat is your router uses the dhcp client? y/n\n>>> ')
#for calling package
Basic_Setting_internet = Router.Mikrotik.Basic_Configure.Set_Basic_Access(internet_port)
Basic_Setting_hostname = Router.Mikrotik.Basic_Configure.Set_Hostname(hostname_router)
Basic_Setting_user = Router.Mikrotik.Basic_Configure.Set_User(user_name, user_pass, user_type)
stdin, stdout, stderr = Router_API.exec_command(Basic_Setting_internet + '\n' + Basic_Setting_hostname + '\n' + Basic_Setting_user)
Router_Command_Call = stdout.readlines()
if(choice_next == 'n'):
network_address = input('insert your network address\n>>> ')
gateway_address = input('insert your gateway address\n>>> ')
Basic_Setting_Static_Router = Router.Mikrotik.Basic_Configure.Set_Static_Route(network_address, gateway_address)
stdin, stdout, stderr = Router_API.exec_command(Basic_Setting_Static_Router)
stdout.readlines()
elif(choice_next == 'y'):
continue
else:
print('command error')
else:
print('command error')
elif(ssh_command == 'back'):
break
else:
print('\n')
print('command error')
print('\n')
elif(input_command == 'exit'):
exit()
else:
print('\n')
print('command error')
print('\n')
| 44.411168
| 160
| 0.508972
|
3e0f0bd2b4b852de3e60c3e3469689efa6955839
| 2,537
|
py
|
Python
|
backtoshops/routes/urls.py
|
RaphaelPrevost/Back2Shops
|
5f2d369e82fe2a7b9b3a6c55782319b23d142dfd
|
[
"CECILL-B"
] | null | null | null |
backtoshops/routes/urls.py
|
RaphaelPrevost/Back2Shops
|
5f2d369e82fe2a7b9b3a6c55782319b23d142dfd
|
[
"CECILL-B"
] | 6
|
2021-03-31T19:21:50.000Z
|
2022-01-13T01:46:09.000Z
|
backtoshops/routes/urls.py
|
RaphaelPrevost/Back2Shops
|
5f2d369e82fe2a7b9b3a6c55782319b23d142dfd
|
[
"CECILL-B"
] | null | null | null |
# -*- coding: utf-8 -*-
#############################################################################
#
# Copyright © Dragon Dollar Limited
# contact: contact@dragondollar.com
#
# This software is a collection of webservices designed to provide a secure
# and scalable framework to build e-commerce websites.
#
# This software is governed by the CeCILL-B license under French law and
# abiding by the rules of distribution of free software. You can use,
# modify and/ or redistribute the software under the terms of the CeCILL-B
# license as circulated by CEA, CNRS and INRIA at the following URL
# " http://www.cecill.info".
#
# As a counterpart to the access to the source code and rights to copy,
# modify and redistribute granted by the license, users are provided only
# with a limited warranty and the software's author, the holder of the
# economic rights, and the successive licensors have only limited
# liability.
#
# In this respect, the user's attention is drawn to the risks associated
# with loading, using, modifying and/or developing or reproducing the
# software by the user in light of its specific status of free software,
# that may mean that it is complicated to manipulate, and that also
# therefore means that it is reserved for developers and experienced
# professionals having in-depth computer knowledge. Users are therefore
# encouraged to load and test the software's suitability as regards their
# requirements in conditions enabling the security of their systems and/or
# data to be ensured and, more generally, to use and operate it in the
# same conditions as regards security.
#
# The fact that you are presently reading this means that you have had
# knowledge of the CeCILL-B license and that you accept its terms.
#
#############################################################################
import settings
from django.conf.urls import patterns, url
from routes.views import *
from fouillis.views import admin_required
urlpatterns = patterns(settings.get_site_prefix() + 'routes',
url(r'/get_page_roles/$', get_page_roles, name="get_page_roles"),
url(r'/get_route_params/(?P<pid>\d+)$', get_route_params, name="get_route_params"),
url(r'/$', CreateRouteView.as_view(), name="routes"),
url(r'/(?P<page>\d+)$', CreateRouteView.as_view()),
url(r'/(?P<pk>\d+)/edit$', admin_required(EditRouteView.as_view()), name="edit_route"),
url(r'/(?P<pk>\d+)/edit/(?P<page>\d+)$', EditRouteView.as_view()),
url(r'/(?P<pk>\d+)/delete$', DeleteRouteView.as_view(), name="delete_route"),
)
| 46.127273
| 90
| 0.703587
|
715f552cce772eb09654bf70f4e1eacbeb4bde91
| 864
|
py
|
Python
|
eventkit_cloud/gunicorn.py
|
zta6/eventkit-cloud
|
a9e1aaa2bbfd3d11d3cf3df91e413e6220d6e876
|
[
"BSD-3-Clause"
] | 9
|
2019-02-27T19:42:02.000Z
|
2021-05-09T14:16:28.000Z
|
eventkit_cloud/gunicorn.py
|
zta6/eventkit-cloud
|
a9e1aaa2bbfd3d11d3cf3df91e413e6220d6e876
|
[
"BSD-3-Clause"
] | 46
|
2017-06-27T03:12:57.000Z
|
2018-12-28T19:48:35.000Z
|
eventkit_cloud/gunicorn.py
|
zta6/eventkit-cloud
|
a9e1aaa2bbfd3d11d3cf3df91e413e6220d6e876
|
[
"BSD-3-Clause"
] | 7
|
2017-07-28T18:16:34.000Z
|
2019-01-18T04:41:55.000Z
|
import os
from gunicorn.http import wsgi
# Used to configure gunicorn settings.
def build_header(name, value):
"""
Takes a header name and value and constructs a valid string to add to the headers list.
"""
stripped_value = value.lstrip(" ").rstrip("\r\n").rstrip("\n")
stripped_name = name.rstrip(":")
return f"{stripped_name}: {stripped_value}\r\n"
class Response(wsgi.Response):
def default_headers(self, *args, **kwargs):
headers = super(Response, self).default_headers(*args, **kwargs)
content_security_policy = os.getenv("CONTENT_SECURITY_POLICY", "").replace('"', "'")
if content_security_policy:
headers.append(build_header("Content-Security-Policy", content_security_policy))
return [header for header in headers if not header.startswith("Server:")]
wsgi.Response = Response
| 32
| 92
| 0.689815
|
f8219b6911425c43ff07a535e0179294b0d82c0b
| 7,305
|
py
|
Python
|
SVM/SMS_spam_classifier.py
|
daniel-yj-yang/ml
|
075b348025e4eca416a6587f360feb2c73422ecb
|
[
"BSD-3-Clause"
] | null | null | null |
SVM/SMS_spam_classifier.py
|
daniel-yj-yang/ml
|
075b348025e4eca416a6587f360feb2c73422ecb
|
[
"BSD-3-Clause"
] | null | null | null |
SVM/SMS_spam_classifier.py
|
daniel-yj-yang/ml
|
075b348025e4eca416a6587f360feb2c73422ecb
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Sep 10 23:20:59 2020
@author: daniel
"""
# https://github.com/DTrimarchi10/confusion_matrix/blob/master/cf_matrix.py
from machlearn import model_evaluation as me
from textblob import TextBlob
# StratifiedKFold, cross_val_score,
from sklearn.model_selection import train_test_split, GridSearchCV
from sklearn.pipeline import Pipeline
# , f1_score, accuracy_score
from sklearn.metrics import classification_report, confusion_matrix
from sklearn.svm import SVC # , LinearSVC
from sklearn.naive_bayes import MultinomialNB
from sklearn.feature_extraction.text import CountVectorizer, TfidfTransformer
#import sklearn
import csv
import pickle
import getopt
import sys
import os
import pandas as pd
#import numpy as np
#import matplotlib.pyplot as plt
#import seaborn as sns
#plt.rcParams.update({'font.size': 20})
#plt.rcParams.update({'figure.figsize': (8, 6)})
##############################################################################################################################
# modified from https://www.panggi.com/articles/sms-spam-filter-using-scikit-learn-and-textblob/
# converted from python2 to python3
# Imports
#import nltk
# Dataset
MESSAGES = pd.read_csv('/Users/daniel/Data-Science/Data/Spam/SMS-Spam-Collection/SMSSpamCollection',
sep='\t', quoting=csv.QUOTE_NONE, names=["label", "message"])
# Preprocessing
def tokens(message):
message = str(message) # , 'utf8')
return TextBlob(message).words
def lemmas(message):
message = str(message).lower() # ), 'utf8').lower()
words = TextBlob(message).words
return [word.lemma for word in words]
# Training
def train_multinomial_nb(messages):
# split dataset for cross validation
msg_train, msg_test, label_train, label_test = train_test_split(
messages['message'], messages['label'], test_size=0.2, random_state=123)
# create pipeline
pipeline = Pipeline([('bow', CountVectorizer(analyzer=lemmas)),
('tfidf', TfidfTransformer()), ('classifier', MultinomialNB())])
# pipeline parameters to automatically explore and tune
params = {
'tfidf__use_idf': (True, False),
'bow__analyzer': (lemmas, tokens),
}
grid = GridSearchCV(
pipeline,
params, # parameters to tune via cross validation
refit=True, # fit using all data, on the best detected classifier
n_jobs=-1,
scoring='accuracy',
cv=5, # StratifiedKFold(n_splits=5).split(label_train),
)
# train
nb_detector = grid.fit(msg_train, label_train)
print("")
y_test = label_test
y_pred = nb_detector.predict(msg_test)
y_score = nb_detector.predict_proba(msg_test)
print(":: Confusion Matrix")
print("")
cf_matrix = confusion_matrix(y_test, y_pred)
print(cf_matrix)
# https://medium.com/@dtuk81/confusion-matrix-visualization-fc31e3f30fea
me.plot_confusion_matrix(y_test, y_pred, y_classes=('Ham (y=0)', 'Spam (y=1)'))
me.plot_ROC_and_PR_curves(fitted_model=nb_detector, X=msg_test,
y_true=y_test, y_pred_score=y_score[:, 1], y_pos_label='spam', model_name='Multinomial NB')
print("")
print(":: Classification Report")
print("")
print(classification_report(y_test, y_pred))
# save model to pickle file
file_name = '/Users/daniel/Data-Science/Data/Spam/SMS-Spam-Collection/ml_models/sms_spam_nb_model.pkl'
with open(file_name, 'wb') as fout:
pickle.dump(nb_detector, fout)
print('model written to: ' + file_name)
def train_svm(messages):
# split dataset for cross validation
msg_train, msg_test, label_train, label_test = train_test_split(
messages['message'], messages['label'], test_size=0.2, random_state=123)
# create pipeline
pipeline = Pipeline([('bow', CountVectorizer(analyzer=lemmas)),
('tfidf', TfidfTransformer()), ('classifier', SVC(probability=True))])
# pipeline parameters to automatically explore and tune
params = [
{'classifier__C': [0.1, 1, 10, 100, 1000],
'classifier__kernel': ['linear']},
{'classifier__C': [0.1, 1, 10, 100, 1000], 'classifier__gamma': [
0.001, 0.0001], 'classifier__kernel': ['rbf']},
]
grid = GridSearchCV(
pipeline,
param_grid=params, # parameters to tune via cross validation
refit=True, # fit using all data, on the best detected classifier
n_jobs=-1,
scoring='accuracy',
cv=5 # StratifiedKFold(label_train, n_splits=5),
)
# train
svm_detector = grid.fit(msg_train, label_train)
print("")
y_test = label_test
y_pred = svm_detector.predict(msg_test)
y_score = svm_detector.predict_proba(msg_test)
print(":: Confusion Matrix")
print("")
cf_matrix = confusion_matrix(y_test, y_pred)
print(cf_matrix)
# https://medium.com/@dtuk81/confusion-matrix-visualization-fc31e3f30fea
me.plot_confusion_matrix(y_test, y_pred, y_classes=('Ham (y=0)', 'Spam (y=1)'))
me.plot_ROC_and_PR_curves(fitted_model=svm_detector, X=msg_test,
y_true=y_test, y_pred_score=y_score[:, 1], y_pos_label='spam', model_name='SVM')
print("")
print(":: Classification Report")
print("")
print(classification_report(y_test, y_pred))
# save model to pickle file
file_name = '/Users/daniel/Data-Science/Data/Spam/SMS-Spam-Collection/ml_models/sms_spam_svm_model.pkl'
with open(file_name, 'wb') as fout:
pickle.dump(svm_detector, fout)
print('model written to: ' + file_name)
def main(argv):
# check if models exist, if not run training
if(os.path.isfile('/Users/daniel/Data-Science/Data/Spam/SMS-Spam-Collection/ml_models/sms_spam_nb_model.pkl') == False):
print("")
print("Creating Naive Bayes Model.....")
train_multinomial_nb(MESSAGES)
if(os.path.isfile('/Users/daniel/Data-Science/Data/Spam/SMS-Spam-Collection/ml_models/sms_spam_svm_model.pkl') == False):
print("")
print("Creating SVM Model.....")
train_svm(MESSAGES)
#inputmessage = ''
try:
opts, args = getopt.getopt(argv, "hm:", ["message="])
except getopt.GetoptError:
print('SMS_spam_classifier.py -m <message string>')
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
print('SMS_spam_classifier.py -m <message string>')
sys.exit()
elif opt in ("-m", "--message"):
prediction = predict(arg)
print('This message is predicted by', prediction)
else:
print('SMS_spam_classifier.py -m <message string>')
sys.exit()
def predict(message):
nb_detector = pickle.load(open(
'/Users/daniel/Data-Science/Data/Spam/SMS-Spam-Collection/ml_models/sms_spam_nb_model.pkl', 'rb'))
svm_detector = pickle.load(open(
'/Users/daniel/Data-Science/Data/Spam/SMS-Spam-Collection/ml_models/sms_spam_svm_model.pkl', 'rb'))
nb_predict = nb_detector.predict([message])[0]
svm_predict = svm_detector.predict([message])[0]
return 'SVM as ' + svm_predict + ' and Naive Bayes as ' + nb_predict
if __name__ == "__main__":
main(sys.argv[1:])
| 34.457547
| 126
| 0.657906
|
66a504538549984dd64477a3e76f3bb96316c4c1
| 669
|
py
|
Python
|
var/spack/repos/builtin/packages/py-nestle/package.py
|
kkauder/spack
|
6ae8d5c380c1f42094b05d38be26b03650aafb39
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 2
|
2019-02-10T13:47:48.000Z
|
2019-04-17T13:05:17.000Z
|
var/spack/repos/builtin/packages/py-nestle/package.py
|
kkauder/spack
|
6ae8d5c380c1f42094b05d38be26b03650aafb39
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 32
|
2020-12-15T17:29:20.000Z
|
2022-03-21T15:08:31.000Z
|
var/spack/repos/builtin/packages/py-nestle/package.py
|
kkauder/spack
|
6ae8d5c380c1f42094b05d38be26b03650aafb39
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 2
|
2021-07-19T20:31:27.000Z
|
2021-07-19T21:14:14.000Z
|
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyNestle(PythonPackage):
"""Nested sampling algorithms for evaluating Bayesian evidence."""
homepage = "http://kbarbary.github.io/nestle/"
pypi = "nestle/nestle-0.1.1.tar.gz"
version('0.1.1', sha256='d236a04f25494af5cda572eecf62729592b3231fbd874b1f72aff54718a3bb08')
# Required dependencies
depends_on('py-numpy', type=('build', 'run'))
# Optional dependencies
depends_on('py-scipy', type=('build', 'run'))
| 30.409091
| 95
| 0.723468
|
5dcc39bdff506f5f41ad35f250c50929c6b36381
| 1,109
|
py
|
Python
|
src/utils/analysis.py
|
JetBrains-Research/pubtrends-nature-reviews
|
c5806d64805726733348bdb60c9243cbf9232d02
|
[
"Apache-2.0"
] | 1
|
2022-01-27T13:34:46.000Z
|
2022-01-27T13:34:46.000Z
|
src/utils/analysis.py
|
JetBrains-Research/pubtrends-nature-reviews
|
c5806d64805726733348bdb60c9243cbf9232d02
|
[
"Apache-2.0"
] | null | null | null |
src/utils/analysis.py
|
JetBrains-Research/pubtrends-nature-reviews
|
c5806d64805726733348bdb60c9243cbf9232d02
|
[
"Apache-2.0"
] | 1
|
2022-01-05T04:10:53.000Z
|
2022-01-05T04:10:53.000Z
|
import logging
from pysrc.papers.analysis.graph import build_papers_graph
logger = logging.getLogger(__name__)
def get_direct_references_subgraph(analyzer, pmid):
"""
Extract subgraph of the papers graph containing only direct references
of the paper with given `pmid`.
"""
logger.info('Analyzing papers graph')
analyzer.papers_graph = build_papers_graph(
analyzer.df, analyzer.cit_df, analyzer.cocit_grouped_df, analyzer.bibliographic_coupling_df,
)
references = list(analyzer.cit_df[analyzer.cit_df['id_out'] == pmid]['id_in'])
references.append(pmid)
return analyzer.papers_graph.subgraph(references)
def align_clustering_for_sklearn(partition, ground_truth):
# Get clustering subset only with IDs present in ground truth dict
actual_clustering = {k: v for k, v in partition.items() if k in ground_truth}
# Align clustering
labels_true = []
labels_pred = []
for pmid in actual_clustering:
labels_true.append(ground_truth[pmid])
labels_pred.append(actual_clustering[pmid])
return labels_true, labels_pred
| 30.805556
| 100
| 0.738503
|
d7a98d784b14dba96a32957f596e981638829fa4
| 204
|
py
|
Python
|
libraries/website/tests/examples/test_swarm_solver.py
|
justindujardin/mathy
|
776ac528d4586d6ea558a7212adb3559ea487a3c
|
[
"MIT"
] | 95
|
2020-01-02T23:02:34.000Z
|
2022-03-08T18:57:24.000Z
|
libraries/website/tests/examples/test_swarm_solver.py
|
justindujardin/mathy
|
776ac528d4586d6ea558a7212adb3559ea487a3c
|
[
"MIT"
] | 44
|
2020-01-05T03:07:45.000Z
|
2021-08-11T20:45:53.000Z
|
libraries/website/tests/examples/test_swarm_solver.py
|
justindujardin/mathy
|
776ac528d4586d6ea558a7212adb3559ea487a3c
|
[
"MIT"
] | 5
|
2020-04-04T16:46:36.000Z
|
2022-03-30T08:10:02.000Z
|
def test_swarm_random_task():
from ...docs.snippets.examples import swarm_random_task # noqa
def test_swarm_generate_data():
from ...docs.snippets.examples import swarm_data_generation # noqa
| 29.142857
| 71
| 0.77451
|
8a2dd6d057cc66d44dbd17e0c86e0c5ddaca0bb0
| 7,345
|
py
|
Python
|
sonnet/python/modules/layer_norm.py
|
ankitshah009/sonnet
|
a07676192c6d0f2ed5967d6bc367d62e55835baf
|
[
"Apache-2.0"
] | 3
|
2019-07-31T12:36:26.000Z
|
2020-12-16T14:37:19.000Z
|
sonnet/python/modules/layer_norm.py
|
ankitshah009/sonnet
|
a07676192c6d0f2ed5967d6bc367d62e55835baf
|
[
"Apache-2.0"
] | null | null | null |
sonnet/python/modules/layer_norm.py
|
ankitshah009/sonnet
|
a07676192c6d0f2ed5967d6bc367d62e55835baf
|
[
"Apache-2.0"
] | 3
|
2019-07-29T08:55:20.000Z
|
2019-07-30T06:36:56.000Z
|
# Copyright 2017 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Layer normalization module for Sonnet.
This contains the module LayerNorm, which performs layer normalization over
configurable axes of its inputs.
Original paper: https://arxiv.org/abs/1607.06450.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
from sonnet.python.modules import base
from sonnet.python.modules import util
import tensorflow as tf
class LayerNorm(base.AbstractModule):
"""Layer normalization module.
Implementation based on:
https://arxiv.org/abs/1607.06450
This module transforms input x into:
outputs = gamma * (x - mu) / sigma + beta
where mu and sigma are respectively the mean and standard deviation of x.
Gamma and beta are trainable parameters for scaling and shifting respectively.
Since the axes over which normalization is perfomed is configurable, this also
subsumes instance normalization.
"""
GAMMA = "gamma" # Layer norm scaling.
BETA = "beta" # Layer norm bias.
POSSIBLE_INITIALIZER_KEYS = {GAMMA, BETA}
def __init__(self, axis=None, offset=True, scale=True, eps=1e-5,
initializers=None, partitioners=None, regularizers=None,
name="layer_norm"):
"""Constructs a LayerNorm module.
Args:
axis: Optional dimension or iterable of indices of dimensions to normalize
and reduce over. By default `None` and all dimensions except the
first/batch dimension are reduced over. If the input tensor represents
an image, summing over all except the batch and channel dimensions (e.g.
for image format NHWC, axes=[1,2]), then this module corresponds to
Instance Normalization (https://arxiv.org/abs/1607.08022).
offset: Optional boolean to specify whether or not to apply a trained
component-wise bias after the layer normalization and scaling.
scale: Optional boolean to specify whether or not to apply a trained
component-wise scale after the layer normalization.
eps: small epsilon to avoid division by zero variance. Defaults to
1e-5 as used in the paper.
initializers: Dict containing ops to initialize the scale
(with key 'gamma') and bias (with key 'beta').
partitioners: Optional dict containing partitioners to partition
the scale (with key 'gamma') and bias (with key 'beta'). As a default,
no partitioners are used.
regularizers: Optional dict containing regularizers for the scale (with
key 'gamma') and bias (with key 'beta').. As a default, no regularizers
are used.
name: name of the module.
Raises:
KeyError: If `initializers`, `partitioners` or `regularizers` contain
any keys other than `gamma` or `beta`.
TypeError: If any of the given initializers, partitioners or regularizers
are not callable.
"""
super(LayerNorm, self).__init__(name=name)
if axis is not None:
if isinstance(axis, int):
axis = [axis]
int_not_bool = lambda x: isinstance(x, int) and not isinstance(x, bool)
if (not isinstance(axis, collections.Iterable) or
not all(int_not_bool(ax) for ax in axis)):
raise ValueError("axis should be an int or an iterable of ints")
self._axis = axis
self._offset = offset
self._scale = scale
self._eps = eps
self._initializers = util.check_initializers(initializers,
self.POSSIBLE_INITIALIZER_KEYS)
self._partitioners = util.check_partitioners(partitioners,
self.POSSIBLE_INITIALIZER_KEYS)
self._regularizers = util.check_regularizers(regularizers,
self.POSSIBLE_INITIALIZER_KEYS)
def _build(self, inputs):
"""Connects the LayerNorm module into the graph.
Args:
inputs: a Tensor of dimensionality >= 2.
Returns:
normalized: layer normalized outputs with same shape as inputs.
Raises:
base.NotSupportedError: If `inputs` has less than 2 dimensions.
"""
if self._axis is None:
axis = list(range(1, inputs.shape.ndims))
else:
axis = self._axis
original_dtype = inputs.dtype
if original_dtype in [tf.float16, tf.bfloat16]:
inputs = tf.cast(inputs, tf.float32)
if inputs.get_shape().ndims < 2:
raise base.NotSupportedError(
"Layer normalization expects inputs of at least rank 2."
" Got inputs of rank {}.".format(inputs.get_shape().ndims))
# Shape for the learnable scale and offset is the number of channels. See
# https://arxiv.org/pdf/1803.08494.pdf around equation 6.
params_shape = inputs.get_shape()[-1:]
if self._scale:
if self.GAMMA not in self._initializers:
self._initializers[self.GAMMA] = create_gamma_initializer()
self._gamma = tf.get_variable(
self.GAMMA,
shape=params_shape,
dtype=inputs.dtype,
initializer=self._initializers[self.GAMMA],
partitioner=self._partitioners.get(self.GAMMA),
regularizer=self._regularizers.get(self.GAMMA))
else:
self._gamma = None
if self._offset:
if self.BETA not in self._initializers:
self._initializers[self.BETA] = create_beta_initializer()
self._beta = tf.get_variable(
self.BETA,
shape=params_shape,
dtype=inputs.dtype,
initializer=self._initializers[self.BETA],
partitioner=self._partitioners.get(self.BETA),
regularizer=self._regularizers.get(self.BETA))
else:
self._beta = None
mean, var = tf.nn.moments(inputs, axis, keep_dims=True)
normalized = tf.nn.batch_normalization(inputs, mean, var, self._beta,
self._gamma, self._eps)
if original_dtype in [tf.float16, tf.bfloat16]:
normalized = tf.cast(normalized, dtype=original_dtype)
return normalized
@property
def initializers(self):
return self._initializers
@property
def partitioners(self):
return self._partitioners
@property
def regularizers(self):
return self._regularizers
@property
def beta(self):
self._ensure_is_connected()
return self._beta
@property
def gamma(self):
self._ensure_is_connected()
return self._gamma
def create_beta_initializer():
"""Returns a default initializer for the `beta` in layer norm."""
return tf.zeros_initializer()
def create_gamma_initializer():
"""Returns a default initializer for the `gamma` in layer norm."""
return tf.ones_initializer()
| 34.646226
| 80
| 0.677059
|
8b49db26652db52eeb6553f99274498c9649133a
| 64,422
|
py
|
Python
|
doozerlib/runtime.py
|
thegreyd/doozer
|
89d2cad82f831aa6594c90293ec3923c08b343d6
|
[
"Apache-2.0"
] | null | null | null |
doozerlib/runtime.py
|
thegreyd/doozer
|
89d2cad82f831aa6594c90293ec3923c08b343d6
|
[
"Apache-2.0"
] | null | null | null |
doozerlib/runtime.py
|
thegreyd/doozer
|
89d2cad82f831aa6594c90293ec3923c08b343d6
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import absolute_import, print_function, unicode_literals
from future import standard_library
standard_library.install_aliases()
from future.utils import as_native_str
from multiprocessing.dummy import Pool as ThreadPool
from contextlib import contextmanager
from collections import namedtuple
import os
import sys
import tempfile
import threading
import shutil
import atexit
import datetime
import re
import yaml
import click
import logging
import functools
import traceback
import urllib.parse
import signal
import io
import pathlib
import koji
from typing import Optional, List, Dict, Tuple
import time
from doozerlib import gitdata
from . import logutil
from . import assertion
from . import exectools
from . import dblib
from .pushd import Dir
from .image import ImageMetadata
from .rpmcfg import RPMMetadata
from .metadata import Metadata, RebuildHint
from doozerlib import state
from .model import Model, Missing
from multiprocessing import Lock, RLock, Semaphore
from .repos import Repos
from doozerlib.exceptions import DoozerFatalError
from doozerlib import constants
from doozerlib import util
from doozerlib import brew
# Values corresponds to schema for group.yml: freeze_automation. When
# 'yes', doozer itself will inhibit build/rebase related activity
# (exiting with an error if someone tries). Other values can
# be interpreted & enforced by the build pipelines (e.g. by
# invoking config:read-config).
FREEZE_AUTOMATION_YES = 'yes'
FREEZE_AUTOMATION_SCHEDULED = 'scheduled' # inform the pipeline that only manually run tasks should be permitted
FREEZE_AUTOMATION_NO = 'no'
# doozer cancel brew builds on SIGINT (Ctrl-C)
# but Jenkins sends a SIGTERM when cancelling a job.
def handle_sigterm(*_):
raise KeyboardInterrupt()
signal.signal(signal.SIGTERM, handle_sigterm)
# Registered atexit to close out debug/record logs
def close_file(f):
f.close()
def remove_tmp_working_dir(runtime):
if runtime.remove_tmp_working_dir:
shutil.rmtree(runtime.working_dir)
else:
click.echo("Temporary working directory preserved by operation: %s" % runtime.working_dir)
class WrapException(Exception):
""" https://bugs.python.org/issue13831 """
def __init__(self):
super(WrapException, self).__init__()
exc_type, exc_value, exc_tb = sys.exc_info()
self.exception = exc_value
self.formatted = "".join(
traceback.format_exception(exc_type, exc_value, exc_tb))
@as_native_str()
def __str__(self):
return "{}\nOriginal traceback:\n{}".format(Exception.__str__(self), self.formatted)
def wrap_exception(func):
""" Decorate a function, wrap exception if it occurs. """
@functools.wraps(func)
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except Exception:
raise WrapException()
return wrapper
def _unpack_tuple_args(func):
""" Decorate a function for unpacking the tuple argument `args`
This is used to workaround Python 3 lambda not unpacking tuple arguments (PEP-3113)
"""
@functools.wraps(func)
def wrapper(args):
return func(*args)
return wrapper
# A named tuple for caching the result of Runtime._resolve_source.
SourceResolution = namedtuple('SourceResolution', [
'source_path', 'url', 'branch', 'public_upstream_url', 'public_upstream_branch'
])
# ============================================================================
# Runtime object definition
# ============================================================================
class Runtime(object):
# Use any time it is necessary to synchronize feedback from multiple threads.
mutex = RLock()
# Serialize access to the shared koji session
koji_lock = RLock()
# Serialize access to the console, and record log
log_lock = Lock()
def __init__(self, **kwargs):
# initialize defaults in case no value is given
self.verbose = False
self.quiet = False
self.load_wip = False
self.load_disabled = False
self.data_path = None
self.data_dir = None
self.latest_parent_version = False
self.rhpkg_config = None
self._koji_client_session = None
self.db = None
self.session_pool = {}
self.session_pool_available = {}
self.brew_event = None
self.assembly = 'test'
self.stream: List[str] = [] # Click option. A list of image stream overrides from the command line.
self.stream_overrides: Dict[str, str] = {} # Dict of stream name -> pullspec from command line.
self.upstreams: List[str] = [] # Click option. A list of upstream source commit to use.
self.upstream_commitish_overrides: Dict[str, str] = {} # Dict from distgit key name to upstream source commit to use.
self.downstreams: List[str] = [] # Click option. A list of distgit commits to checkout.
self.downstream_commitish_overrides: Dict[str, str] = {} # Dict from distgit key name to distgit commit to check out.
# See get_named_semaphore. The empty string key serves as a lock for the data structure.
self.named_semaphores = {'': Lock()}
for key, val in kwargs.items():
self.__dict__[key] = val
if self.latest_parent_version:
self.ignore_missing_base = True
self._remove_tmp_working_dir = False
self.group_config = None
self.cwd = os.getcwd()
# If source needs to be cloned by oit directly, the directory in which it will be placed.
self.sources_dir = None
self.distgits_dir = None
self.record_log = None
self.record_log_path = None
self.debug_log_path = None
self.brew_logs_dir = None
self.flags_dir = None
# Map of dist-git repo name -> ImageMetadata object. Populated when group is set.
self.image_map = {}
# Map of dist-git repo name -> RPMMetadata object. Populated when group is set.
self.rpm_map = {}
# Map of source code repo aliases (e.g. "ose") to a tuple representing the source resolution cache.
# See registry_repo.
self.source_resolutions = {}
# Map of source code repo aliases (e.g. "ose") to a (public_upstream_url, public_upstream_branch) tuple.
# See registry_repo.
self.public_upstreams = {}
self.initialized = False
# Will be loaded with the streams.yml Model
self.streams = {}
self.uuid = None
# Optionally available if self.fetch_rpms_for_tag() is called
self.rpm_list = None
self.rpm_search_tree = None
# Used for image build ordering
self.image_tree = {}
self.image_order = []
# allows mapping from name or distgit to meta
self.image_name_map = {}
# holds untouched group config
self.raw_group_config = {}
# Used to capture missing packages for 4.x build
self.missing_pkgs = set()
# Whether to prevent builds for this group. Defaults to 'no'.
self.freeze_automation = FREEZE_AUTOMATION_NO
self.rhpkg_config_lst = []
if self.rhpkg_config:
if not os.path.isfile(self.rhpkg_config):
raise DoozerFatalError('--rhpkg-config option given is not a valid file! {}'.format(self.rhpkg_config))
self.rhpkg_config = ' --config {} '.format(self.rhpkg_config)
self.rhpkg_config_lst = self.rhpkg_config.split()
else:
self.rhpkg_config = ''
def get_named_semaphore(self, lock_name, is_dir=False, count=1):
"""
Returns a semaphore (which can be used as a context manager). The first time a lock_name
is received, a new semaphore will be established. Subsequent uses of that lock_name will
receive the same semaphore.
:param lock_name: A unique name for resource threads are contending over. If using a directory name
as a lock_name, provide an absolute path.
:param is_dir: The lock_name is a directory (method will ignore things like trailing slashes)
:param count: The number of times the lock can be claimed. Default=1, which is a full mutex.
:return: A semaphore associated with the lock_name.
"""
with self.named_semaphores['']:
if is_dir:
p = '_dir::' + str(pathlib.Path(str(lock_name)).absolute()) # normalize (e.g. strip trailing /)
else:
p = lock_name
if p in self.named_semaphores:
return self.named_semaphores[p]
else:
new_semaphore = Semaphore(count)
self.named_semaphores[p] = new_semaphore
return new_semaphore
def get_group_config(self):
# group.yml can contain a `vars` section which should be a
# single level dict containing keys to str.format(**dict) replace
# into the YAML content. If `vars` found, the format will be
# preformed and the YAML model will reloaded from that result
tmp_config = Model(self.gitdata.load_data(key='group').data)
replace_vars = tmp_config.vars or Model()
if self.assembly:
replace_vars['runtime_assembly'] = self.assembly
if replace_vars is not Missing:
try:
group_yml = yaml.safe_dump(tmp_config.primitive(), default_flow_style=False)
self.raw_group_config = yaml.full_load(group_yml.format(**replace_vars))
tmp_config = Model(dict(self.raw_group_config))
except KeyError as e:
raise ValueError('group.yml contains template key `{}` but no value was provided'.format(e.args[0]))
return tmp_config
def init_state(self):
self.state = dict(state.TEMPLATE_BASE_STATE)
if os.path.isfile(self.state_file):
with io.open(self.state_file, 'r', encoding='utf-8') as f:
self.state = yaml.full_load(f)
self.state.update(state.TEMPLATE_BASE_STATE)
def save_state(self):
with io.open(self.state_file, 'w', encoding='utf-8') as f:
yaml.safe_dump(self.state, f, default_flow_style=False)
def initialize(self, mode='images', clone_distgits=True,
validate_content_sets=False,
no_group=False, clone_source=None, disabled=None,
prevent_cloning: bool = False):
if self.initialized:
return
if self.quiet and self.verbose:
click.echo("Flags --quiet and --verbose are mutually exclusive")
exit(1)
# We could mark these as required and the click library would do this for us,
# but this seems to prevent getting help from the various commands (unless you
# specify the required parameters). This can probably be solved more cleanly, but TODO
if not no_group and self.group is None:
click.echo("Group must be specified")
exit(1)
if self.lock_runtime_uuid:
self.uuid = self.lock_runtime_uuid
else:
self.uuid = datetime.datetime.now().strftime("%Y%m%d.%H%M%S")
if self.working_dir is None:
self.working_dir = tempfile.mkdtemp(".tmp", "oit-")
# This can be set to False by operations which want the working directory to be left around
self.remove_tmp_working_dir = True
atexit.register(remove_tmp_working_dir, self)
else:
self.working_dir = os.path.abspath(os.path.expanduser(self.working_dir))
if not os.path.isdir(self.working_dir):
os.makedirs(self.working_dir)
self.distgits_dir = os.path.join(self.working_dir, "distgits")
self.distgits_diff_dir = os.path.join(self.working_dir, "distgits-diffs")
self.sources_dir = os.path.join(self.working_dir, "sources")
self.record_log_path = os.path.join(self.working_dir, "record.log")
self.brew_logs_dir = os.path.join(self.working_dir, "brew-logs")
self.flags_dir = os.path.join(self.working_dir, "flags")
self.state_file = os.path.join(self.working_dir, 'state.yaml')
self.debug_log_path = os.path.join(self.working_dir, "debug.log")
if self.upcycle:
# A working directory may be upcycle'd numerous times.
# Don't let anything grow unbounded.
shutil.rmtree(self.brew_logs_dir, ignore_errors=True)
shutil.rmtree(self.flags_dir, ignore_errors=True)
for path in (self.record_log_path, self.state_file, self.debug_log_path):
if os.path.exists(path):
os.unlink(path)
if not os.path.isdir(self.distgits_dir):
os.mkdir(self.distgits_dir)
if not os.path.isdir(self.distgits_diff_dir):
os.mkdir(self.distgits_diff_dir)
if not os.path.isdir(self.sources_dir):
os.mkdir(self.sources_dir)
if disabled is not None:
self.load_disabled = disabled
self.initialize_logging()
self.init_state()
self.db = dblib.DB(self, self.datastore)
self.logger.info(f'Initial execution (cwd) directory: {os.getcwd()}')
if no_group:
return # nothing past here should be run without a group
if '@' in self.group:
self.group, self.group_commitish = self.group.split('@', 1)
else:
self.group_commitish = self.group
# For each "--stream alias image" on the command line, register its existence with
# the runtime.
for s in self.stream:
self.register_stream_override(s[0], s[1])
for upstream in self.upstreams:
override_distgit_key = upstream[0]
override_commitish = upstream[1]
self.logger.warning(f'Upstream source for {override_distgit_key} being set to {override_commitish}')
self.upstream_commitish_overrides[override_distgit_key] = override_commitish
for upstream in self.downstreams:
override_distgit_key = upstream[0]
override_commitish = upstream[1]
self.logger.warning(f'Downstream distgit for {override_distgit_key} will be checked out to {override_commitish}')
self.downstream_commitish_overrides[override_distgit_key] = override_commitish
self.resolve_metadata()
self.record_log = io.open(self.record_log_path, 'a', encoding='utf-8')
atexit.register(close_file, self.record_log)
# Directory where brew-logs will be downloaded after a build
if not os.path.isdir(self.brew_logs_dir):
os.mkdir(self.brew_logs_dir)
# Directory for flags between invocations in the same working-dir
if not os.path.isdir(self.flags_dir):
os.mkdir(self.flags_dir)
if self.cache_dir:
self.cache_dir = os.path.abspath(self.cache_dir)
self.group_dir = self.gitdata.data_dir
self.group_config = self.get_group_config()
self.hotfix = False # True indicates builds should be tagged with associated hotfix tag for the artifacts branch
if self.group_config.assemblies.enabled or self.enable_assemblies:
if re.fullmatch(r'[\w.]+', self.assembly) is None or self.assembly[0] == '.' or self.assembly[-1] == '.':
raise ValueError('Assembly names may only consist of alphanumerics, ., and _, but not start or end with a dot (.).')
# FIXME: Hardcoding !=stream in code until we come up with a way to construct meaningful metadata for this convention in group.yml or releases.yml.
self.hotfix = self.assembly != "stream"
else:
# If assemblies are not enabled for the group,
# ignore this argument throughout doozer.
self.assembly = None
# register the sources
# For each "--source alias path" on the command line, register its existence with
# the runtime.
for r in self.source:
self.register_source_alias(r[0], r[1])
if self.sources:
with io.open(self.sources, 'r', encoding='utf-8') as sf:
source_dict = yaml.full_load(sf)
if not isinstance(source_dict, dict):
raise ValueError('--sources param must be a yaml file containing a single dict.')
for key, val in source_dict.items():
self.register_source_alias(key, val)
with Dir(self.group_dir):
# Flattens multiple comma/space delimited lists like [ 'x', 'y,z' ] into [ 'x', 'y', 'z' ]
def flatten_list(names):
if not names:
return []
# split csv values
result = []
for n in names:
result.append([x for x in n.replace(' ', ',').split(',') if x != ''])
# flatten result and remove dupes using set
return list(set([y for x in result for y in x]))
def filter_wip(n, d):
return d.get('mode', 'enabled') in ['wip', 'enabled']
def filter_enabled(n, d):
return d.get('mode', 'enabled') == 'enabled'
def filter_disabled(n, d):
return d.get('mode', 'enabled') in ['enabled', 'disabled']
cli_arches_override = flatten_list(self.arches)
if cli_arches_override: # Highest priority overrides on command line
self.arches = cli_arches_override
elif self.group_config.arches_override: # Allow arches_override in group.yaml to temporarily override GA architectures
self.arches = self.group_config.arches_override
else:
self.arches = self.group_config.get('arches', ['x86_64'])
# If specified, signed repo files will be generated to enforce signature checks.
self.gpgcheck = self.group_config.build_profiles.image.signed.gpgcheck
if self.gpgcheck is Missing:
# We should only really be building the latest release with unsigned RPMs, so default to True
self.gpgcheck = True
self.repos = Repos(self.group_config.repos, self.arches, self.gpgcheck)
self.freeze_automation = self.group_config.freeze_automation or FREEZE_AUTOMATION_NO
if validate_content_sets:
self.repos.validate_content_sets()
if self.group_config.name != self.group:
raise IOError(
"Name in group.yml does not match group name. Someone may have copied this group without updating group.yml (make sure to check branch)")
if self.branch is None:
if self.group_config.branch is not Missing:
self.branch = self.group_config.branch
self.logger.info("Using branch from group.yml: %s" % self.branch)
else:
self.logger.info("No branch specified either in group.yml or on the command line; all included images will need to specify their own.")
else:
self.logger.info("Using branch from command line: %s" % self.branch)
scanner = self.group_config.image_build_log_scanner
if scanner is not Missing:
# compile regexen and fail early if they don't
regexen = []
for val in scanner.matches:
try:
regexen.append(re.compile(val))
except Exception as e:
raise ValueError(
"could not compile image build log regex for group:\n{}\n{}"
.format(val, e)
)
scanner.matches = regexen
exclude_keys = flatten_list(self.exclude)
image_ex = list(exclude_keys)
rpm_ex = list(exclude_keys)
image_keys = flatten_list(self.images)
rpm_keys = flatten_list(self.rpms)
filter_func = None
if self.load_wip and self.load_disabled:
pass # use no filter, load all
elif self.load_wip:
filter_func = filter_wip
elif self.load_disabled:
filter_func = filter_disabled
else:
filter_func = filter_enabled
replace_vars = {}
if self.group_config.vars:
replace_vars = self.group_config.vars.primitive()
if self.assembly:
replace_vars['runtime_assembly'] = self.assembly
# pre-load the image data to get the names for all images
# eventually we can use this to allow loading images by
# name or distgit. For now this is used elsewhere
image_name_data = self.gitdata.load_data(path='images')
for img in image_name_data.values():
name = img.data.get('name')
short_name = name.split('/')[1]
self.image_name_map[name] = img.key
self.image_name_map[short_name] = img.key
image_data = self.gitdata.load_data(path='images', keys=image_keys,
exclude=image_ex,
replace_vars=replace_vars,
filter_funcs=None if len(image_keys) else filter_func)
try:
rpm_data = self.gitdata.load_data(path='rpms', keys=rpm_keys,
exclude=rpm_ex,
replace_vars=replace_vars,
filter_funcs=None if len(rpm_keys) else filter_func)
except gitdata.GitDataPathException:
# some older versions have no RPMs, that's ok.
rpm_data = {}
missed_include = set(image_keys + rpm_keys) - set(list(image_data.keys()) + list(rpm_data.keys()))
if len(missed_include) > 0:
raise DoozerFatalError('The following images or rpms were either missing or filtered out: {}'.format(', '.join(missed_include)))
if mode in ['images', 'both']:
for i in image_data.values():
metadata = ImageMetadata(self, i, self.upstream_commitish_overrides.get(i.key), clone_source=clone_source, prevent_cloning=prevent_cloning)
self.image_map[metadata.distgit_key] = metadata
if not self.image_map:
self.logger.warning("No image metadata directories found for given options within: {}".format(self.group_dir))
for image in self.image_map.values():
image.resolve_parent()
# now that ancestry is defined, make sure no cyclic dependencies
for image in self.image_map.values():
for child in image.children:
if image.is_ancestor(child):
raise DoozerFatalError('{} cannot be both a parent and dependent of {}'.format(child.distgit_key, image.distgit_key))
self.generate_image_tree()
if mode in ['rpms', 'both']:
for r in rpm_data.values():
if clone_source is None:
# Historically, clone_source defaulted to True for rpms.
clone_source = True
metadata = RPMMetadata(self, r, self.upstream_commitish_overrides.get(r.key), clone_source=clone_source, prevent_cloning=prevent_cloning)
self.rpm_map[metadata.distgit_key] = metadata
if not self.rpm_map:
self.logger.warning("No rpm metadata directories found for given options within: {}".format(self.group_dir))
# Make sure that the metadata is not asking us to check out the same exact distgit & branch.
# This would almost always indicate someone has checked in duplicate metadata into a group.
no_collide_check = {}
for meta in list(self.rpm_map.values()) + list(self.image_map.values()):
key = '{}/{}/#{}'.format(meta.namespace, meta.name, meta.branch())
if key in no_collide_check:
raise IOError('Complete duplicate distgit & branch; something wrong with metadata: {} from {} and {}'.format(key, meta.config_filename, no_collide_check[key].config_filename))
no_collide_check[key] = meta
# Read in the streams definite for this group if one exists
streams = self.gitdata.load_data(key='streams')
if streams:
self.streams = Model(self.gitdata.load_data(key='streams', replace_vars=replace_vars).data)
if clone_distgits:
self.clone_distgits()
self.initialized = True
def initialize_logging(self):
if self.initialized:
return
# Three flags control the output modes of the command:
# --verbose prints logs to CLI as well as to files
# --debug increases the log level to produce more detailed internal
# behavior logging
# --quiet opposes both verbose and debug
if self.debug:
log_level = logging.DEBUG
elif self.quiet:
log_level = logging.WARN
else:
log_level = logging.INFO
default_log_formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
root_logger = logging.getLogger()
root_logger.setLevel(logging.WARN)
root_stream_handler = logging.StreamHandler()
root_stream_handler.setFormatter(default_log_formatter)
root_logger.addHandler(root_stream_handler)
# If in debug mode, let all modules log
if not self.debug:
# Otherwise, only allow children of ocp to log
root_logger.addFilter(logging.Filter("ocp"))
# Get a reference to the logger for doozer
self.logger = logutil.getLogger()
self.logger.propagate = False
# levels will be set at the handler level. Make sure master level is low.
self.logger.setLevel(logging.DEBUG)
main_stream_handler = logging.StreamHandler()
main_stream_handler.setFormatter(default_log_formatter)
main_stream_handler.setLevel(log_level)
self.logger.addHandler(main_stream_handler)
debug_log_handler = logging.FileHandler(self.debug_log_path)
# Add thread information for debug log
debug_log_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s (%(thread)d) %(message)s'))
debug_log_handler.setLevel(logging.DEBUG)
self.logger.addHandler(debug_log_handler)
def build_retrying_koji_client(self):
"""
:return: Returns a new koji client instance that will automatically retry
methods when it receives common exceptions (e.g. Connection Reset)
Honors doozer --brew-event.
"""
return brew.KojiWrapper([self.group_config.urls.brewhub], brew_event=self.brew_event)
@contextmanager
def shared_koji_client_session(self):
"""
Context manager which offers a shared koji client session. You hold a koji specific lock in this context
manager giving your thread exclusive access. The lock is reentrant, so don't worry about
call a method that acquires the same lock while you hold it.
Honors doozer --brew-event.
"""
with self.koji_lock:
if self._koji_client_session is None:
self._koji_client_session = self.build_retrying_koji_client()
yield self._koji_client_session
@contextmanager
def pooled_koji_client_session(self):
"""
Context manager which offers a koji client session from a limited pool. You hold a lock on this
session until you return. It is not recommended to call other methods that acquire their
own pooled sessions, because that may lead to deadlock if the pool is exhausted.
Honors doozer --brew-event.
"""
session = None
session_id = None
while True:
with self.mutex:
if len(self.session_pool_available) == 0:
if len(self.session_pool) < 30:
# pool has not grown to max size;
new_session = self.build_retrying_koji_client()
session_id = len(self.session_pool)
self.session_pool[session_id] = new_session
session = new_session # This is what we wil hand to the caller
break
else:
# Caller is just going to have to wait and try again
pass
else:
session_id, session = self.session_pool_available.popitem()
break
time.sleep(5)
# Arriving here, we have a session to use.
try:
yield session
finally:
# Put it back into the pool
with self.mutex:
self.session_pool_available[session_id] = session
@staticmethod
def timestamp():
return datetime.datetime.utcnow().isoformat()
def assert_mutation_is_permitted(self):
"""
In group.yml, it is possible to instruct doozer to prevent all builds / mutation of distgits.
Call this method if you are about to mutate anything. If builds are disabled, an exception will
be thrown.
"""
if self.freeze_automation == FREEZE_AUTOMATION_YES:
raise DoozerFatalError('Automation (builds / mutations) for this group is currently frozen (freeze_automation set to {}). Coordinate with the group owner to change this if you believe it is incorrect.'.format(FREEZE_AUTOMATION_YES))
def image_metas(self):
return list(self.image_map.values())
def ordered_image_metas(self):
return [self.image_map[dg] for dg in self.image_order]
def get_global_arches(self):
"""
:return: Returns a list of architectures that are enabled globally in group.yml.
"""
return list(self.arches)
def filter_failed_image_trees(self, failed):
for i in self.ordered_image_metas():
if i.parent and i.parent.distgit_key in failed:
failed.append(i.distgit_key)
for f in failed:
if f in self.image_map:
del self.image_map[f]
# regen order and tree
self.generate_image_tree()
return failed
def generate_image_tree(self):
self.image_tree = {}
image_lists = {0: []}
def add_child_branch(child, branch, level=1):
if level not in image_lists:
image_lists[level] = []
for sub_child in child.children:
if sub_child.distgit_key not in self.image_map:
continue # don't add images that have been filtered out
branch[sub_child.distgit_key] = {}
image_lists[level].append(sub_child.distgit_key)
add_child_branch(sub_child, branch[sub_child.distgit_key], level + 1)
for image in self.image_map.values():
if not image.parent:
self.image_tree[image.distgit_key] = {}
image_lists[0].append(image.distgit_key)
add_child_branch(image, self.image_tree[image.distgit_key])
levels = list(image_lists.keys())
levels.sort()
self.image_order = []
for level in levels:
for i in image_lists[level]:
if i not in self.image_order:
self.image_order.append(i)
def image_distgit_by_name(self, name):
"""Returns image meta by full name, short name, or distgit"""
return self.image_name_map.get(name, None)
def rpm_metas(self):
return list(self.rpm_map.values())
def all_metas(self):
return self.image_metas() + self.rpm_metas()
def register_source_alias(self, alias, path):
self.logger.info("Registering source alias %s: %s" % (alias, path))
path = os.path.abspath(path)
assertion.isdir(path, "Error registering source alias %s" % alias)
with Dir(path):
url = None
origin_url = "?"
rc1, out_origin, err_origin = exectools.cmd_gather(
["git", "config", "--get", "remote.origin.url"])
if rc1 == 0:
url = out_origin.strip()
origin_url = url
# Usually something like "git@github.com:openshift/origin.git"
# But we want an https hyperlink like http://github.com/openshift/origin
if origin_url.startswith("git@"):
origin_url = origin_url[4:] # remove git@
origin_url = origin_url.replace(":", "/", 1) # replace first colon with /
if origin_url.endswith(".git"):
origin_url = origin_url[:-4] # remove .git
origin_url = "https://%s" % origin_url
else:
self.logger.error("Failed acquiring origin url for source alias %s: %s" % (alias, err_origin))
branch = None
rc2, out_branch, err_branch = exectools.cmd_gather(
["git", "rev-parse", "--abbrev-ref", "HEAD"])
if rc2 == 0:
branch = out_branch.strip()
else:
self.logger.error("Failed acquiring origin branch for source alias %s: %s" % (alias, err_branch))
if self.group_config.public_upstreams:
if not (url and branch):
raise DoozerFatalError(f"Couldn't detect source URL or branch for local source {path}. Is it a valid Git repo?")
public_upstream_url, public_upstream_branch = self.get_public_upstream(url)
self.source_resolutions[alias] = SourceResolution(path, url, branch, public_upstream_url, public_upstream_branch or branch)
else:
self.source_resolutions[alias] = SourceResolution(path, url, branch, None, None)
if 'source_alias' not in self.state:
self.state['source_alias'] = {}
self.state['source_alias'][alias] = {
'url': origin_url,
'branch': branch or '?',
'path': path
}
self.add_record("source_alias", alias=alias, origin_url=origin_url, branch=branch or '?', path=path)
def register_stream_override(self, name, image):
self.logger.info("Registering image stream name override %s: %s" % (name, image))
self.stream_overrides[name] = image
@property
def remove_tmp_working_dir(self):
"""
Provides thread safe method of checking whether runtime should clean up the working directory.
:return: Returns True if the directory should be deleted
"""
with self.log_lock:
return self._remove_tmp_working_dir
@remove_tmp_working_dir.setter
def remove_tmp_working_dir(self, remove):
"""
Provides thread safe method of setting whether runtime should clean up the working directory.
:param remove: True if the directory should be removed. Only the last value set impacts the decision.
"""
with self.log_lock:
self._remove_tmp_working_dir = remove
def add_record(self, record_type, **kwargs):
"""
Records an action taken by oit that needs to be communicated to outside
systems. For example, the update a Dockerfile which needs to be
reviewed by an owner. Each record is encoded on a single line in the
record.log. Records cannot contain line feeds -- if you need to
communicate multi-line data, create a record with a path to a file in
the working directory.
:param record_type: The type of record to create.
:param kwargs: key/value pairs
A record line is designed to be easily parsed and formatted as:
record_type|key1=value1|key2=value2|...|
"""
# Multiple image build processes could be calling us with action simultaneously, so
# synchronize output to the file.
with self.log_lock:
record = "%s|" % record_type
for k, v in kwargs.items():
assert ("\n" not in str(k))
# Make sure the values have no linefeeds as this would interfere with simple parsing.
v = str(v).replace("\n", " ;;; ").replace("\r", "")
record += "%s=%s|" % (k, v)
# Add the record to the file
self.record_log.write("%s\n" % record)
self.record_log.flush()
def add_distgits_diff(self, distgit, diff):
"""
Records the diff of changes applied to a distgit repo.
"""
with io.open(os.path.join(self.distgits_diff_dir, distgit + '.patch'), 'w', encoding='utf-8') as f:
f.write(diff)
def resolve_image(self, distgit_name, required=True):
"""
Returns an ImageMetadata for the specified group member name.
:param distgit_name: The name of an image member in this group
:param required: If True, raise an exception if the member is not found.
:return: The ImageMetadata object associated with the name
"""
if distgit_name not in self.image_map:
if not required:
return None
raise DoozerFatalError("Unable to find image metadata in group / included images: %s" % distgit_name)
return self.image_map[distgit_name]
def late_resolve_image(self, distgit_name, add=False):
"""Resolve image and retrieve meta, optionally adding to image_map.
If image not found, error will be thrown"""
if distgit_name in self.image_map:
return self.image_map[distgit_name]
replace_vars = {}
if self.group_config.vars:
replace_vars = self.group_config.vars.primitive()
if self.assembly:
replace_vars['runtime_assembly'] = self.assembly
data_obj = self.gitdata.load_data(path='images', key=distgit_name, replace_vars=replace_vars)
if not data_obj:
raise DoozerFatalError('Unable to resolve image metadata for {}'.format(distgit_name))
meta = ImageMetadata(self, data_obj, self.upstream_commitish_overrides.get(data_obj.key))
if add:
self.image_map[distgit_name] = meta
return meta
def resolve_brew_image_url(self, image_name_and_version):
"""
:param image_name_and_version: The image name to resolve. The image can contain a version tag or sha.
:return: Returns the pullspec of this image in brew.
e.g. "openshift/jenkins:5" => "registry-proxy.engineering.redhat.com/rh-osbs/openshift-jenkins:5"
"""
if self.group_config.urls.brew_image_host in image_name_and_version:
# Seems like a full brew url already
url = image_name_and_version
elif self.group_config.urls.brew_image_namespace is not Missing:
# if there is a namespace, we need to flatten the image name.
# e.g. openshift/image:latest => openshift-image:latest
# ref: https://mojo.redhat.com/docs/DOC-1204856
url = self.group_config.urls.brew_image_host
ns = self.group_config.urls.brew_image_namespace
name = image_name_and_version.replace('/', '-')
url = "/".join((url, ns, name))
else:
# If there is no namespace, just add the image name to the brew image host
url = "/".join((self.group_config.urls.brew_image_host, image_name_and_version))
if ':' not in url.split('/')[-1]:
# oc image info will return information about all tagged images. So be explicit
# in indicating :latest if there is no tag.
url += ':latest'
return url
def resolve_stream(self, stream_name):
"""
:param stream_name: The name of the stream to resolve.
:return: Resolves and returns the image stream name into its literal value.
This is usually a lookup in streams.yml, but can also be overridden on the command line. If
the stream_name cannot be resolved, an exception is thrown.
"""
# If the stream has an override from the command line, return it.
if stream_name in self.stream_overrides:
return Model(dict_to_model={'image': self.stream_overrides[stream_name]})
if stream_name not in self.streams:
raise IOError("Unable to find definition for stream: %s" % stream_name)
return self.streams[stream_name]
def get_stream_names(self):
"""
:return: Returns a list of all streams defined in streams.yaml.
"""
return list(self.streams.keys())
def get_public_upstream(self, remote_git: str) -> (str, Optional[str]):
"""
Some upstream repo are private in order to allow CVE workflows. While we
may want to build from a private upstream, we don't necessarily want to confuse
end-users by referencing it in our public facing image labels / etc.
In group.yaml, you can specify a mapping in "public_upstreams". It
represents private_url_prefix => public_url_prefix. Remote URLs passed to this
method which contain one of the private url prefixes will be translated
into a new string with the public prefix in its place. If there is not
applicable mapping, the incoming url will still be normalized into https.
:param remote_git: The URL to analyze for private repo patterns.
:return: tuple (url, branch)
- url: An https normalized remote address with private repo information replaced. If there is no
applicable private repo replacement, remote_git will be returned (normalized to https).
- branch: Optional public branch name if the public upstream source use a different branch name from the private upstream.
"""
remote_https = util.convert_remote_git_to_https(remote_git)
if self.group_config.public_upstreams:
# We prefer the longest match in the mapping, so iterate through the entire
# map and keep track of the longest matching private remote.
target_priv_prefix = None
target_pub_prefix = None
target_pub_branch = None
for upstream in self.group_config.public_upstreams:
priv = upstream["private"]
pub = upstream["public"]
# priv can be a full repo, or an organization (e.g. git@github.com:openshift)
# It will be treated as a prefix to be replaced
https_priv_prefix = util.convert_remote_git_to_https(priv) # Normalize whatever is specified in group.yaml
https_pub_prefix = util.convert_remote_git_to_https(pub)
if remote_https.startswith(f'{https_priv_prefix}/') or remote_https == https_priv_prefix:
# If we have not set the prefix yet, or if it is longer than the current contender
if not target_priv_prefix or len(https_priv_prefix) > len(target_pub_prefix):
target_priv_prefix = https_priv_prefix
target_pub_prefix = https_pub_prefix
target_pub_branch = upstream.get("public_branch")
if target_priv_prefix:
return f'{target_pub_prefix}{remote_https[len(target_priv_prefix):]}', target_pub_branch
return remote_https, None
def git_clone(self, remote_url, target_dir, gitargs=None, set_env=None, timeout=0):
gitargs = gitargs or []
set_env = set_env or []
if self.cache_dir:
git_cache_dir = os.path.join(self.cache_dir, self.user or "default", 'git')
util.mkdirs(git_cache_dir)
normalized_url = util.convert_remote_git_to_https(remote_url)
# Strip special chars out of normalized url to create a human friendly, but unique filename
file_friendly_url = normalized_url.split('//')[-1].replace('/', '_')
repo_dir = os.path.join(git_cache_dir, file_friendly_url)
self.logger.info(f'Cache for {remote_url} going to {repo_dir}')
if not os.path.exists(repo_dir):
self.logger.info(f'Initializing cache directory for git remote: {remote_url}')
# If the cache directory for this repo does not exist yet, we will create one.
# But we must do so carefully to minimize races with any other doozer instance
# running on the machine.
with self.get_named_semaphore(repo_dir, is_dir=True): # also make sure we cooperate with other threads in this process.
tmp_repo_dir = tempfile.mkdtemp(dir=git_cache_dir)
exectools.cmd_assert(f'git init --bare {tmp_repo_dir}')
with Dir(tmp_repo_dir):
exectools.cmd_assert(f'git remote add origin {remote_url}')
try:
os.rename(tmp_repo_dir, repo_dir)
except:
# There are two categories of failure
# 1. Another doozer instance already created the directory, in which case we are good to go.
# 2. Something unexpected is preventing the rename.
if not os.path.exists(repo_dir):
# Not sure why the rename failed. Raise to user.
raise
# If we get here, we have a bare repo with a remote set
# Pull content to update the cache. This should be safe for multiple doozer instances to perform.
self.logger.info(f'Updating cache directory for git remote: {remote_url}')
# Fire and forget this fetch -- just used to keep cache as fresh as possible
exectools.fire_and_forget(repo_dir, 'git fetch --all')
gitargs.extend(['--dissociate', '--reference-if-able', repo_dir, '--recurse-submodules'])
self.logger.info(f'Cloning to: {target_dir}')
# Perform the clone (including --reference args if cache_dir was set)
cmd = []
if timeout:
cmd.extend(['timeout', f'{timeout}'])
cmd.extend(['git', 'clone', remote_url])
cmd.extend(gitargs)
cmd.append(target_dir)
exectools.cmd_assert(cmd, retries=3, on_retry=["rm", "-rf", target_dir], set_env=set_env)
def resolve_source(self, meta):
"""
Looks up a source alias and returns a path to the directory containing
that source. Sources can be specified on the command line, or, failing
that, in group.yml.
If a source specified in group.yaml has not be resolved before,
this method will clone that source to checkout the group's desired
branch before returning a path to the cloned repo.
:param meta: The MetaData object to resolve source for
:return: Returns the source path or None if upstream source is not defined
"""
source = meta.config.content.source
if not source:
return None
parent = f'{meta.namespace}_{meta.name}'
# This allows passing `--source <distgit_key> path` to
# override any source to something local without it
# having been configured for an alias
if self.local and meta.distgit_key in self.source_resolutions:
source['alias'] = meta.distgit_key
if 'git' in source:
del source['git']
source_details = None
if 'git' in source:
git_url = urllib.parse.urlparse(source.git.url)
name = os.path.splitext(os.path.basename(git_url.path))[0]
alias = '{}_{}'.format(parent, name)
source_details = dict(source.git)
elif 'alias' in source:
alias = source.alias
else:
return None
self.logger.debug("Resolving local source directory for alias {}".format(alias))
if alias in self.source_resolutions:
path, _, _, meta.public_upstream_url, meta.public_upstream_branch = self.source_resolutions[alias]
self.logger.debug("returning previously resolved path for alias {}: {}".format(alias, path))
return path
# Where the source will land, check early so we know if old or new style
sub_path = '{}{}'.format('global_' if source_details is None else '', alias)
source_dir = os.path.join(self.sources_dir, sub_path)
if not source_details: # old style alias was given
if self.group_config.sources is Missing or alias not in self.group_config.sources:
raise DoozerFatalError("Source alias not found in specified sources or in the current group: %s" % alias)
source_details = self.group_config.sources[alias]
self.logger.debug("checking for source directory in source_dir: {}".format(source_dir))
with self.get_named_semaphore(source_dir, is_dir=True):
if alias in self.source_resolutions: # we checked before, but check again inside the lock
path, _, _, meta.public_upstream_url, meta.public_upstream_branch = self.source_resolutions[alias]
self.logger.debug("returning previously resolved path for alias {}: {}".format(alias, path))
return path
# If this source has already been extracted for this working directory
if os.path.isdir(source_dir):
# Store so that the next attempt to resolve the source hits the map
self.register_source_alias(alias, source_dir)
if self.group_config.public_upstreams:
_, _, _, meta.public_upstream_url, meta.public_upstream_branch = self.source_resolutions[alias]
self.logger.info("Source '{}' already exists in (skipping clone): {}".format(alias, source_dir))
if self.upcycle:
self.logger.info("Refreshing source for '{}' due to --upcycle: {}".format(alias, source_dir))
with Dir(source_dir):
exectools.cmd_assert('git fetch --all', retries=3)
exectools.cmd_assert('git reset --hard @{upstream}', retries=3)
return source_dir
if meta.prevent_cloning:
raise IOError(f'Attempt to clone upstream {meta.distgit_key} after cloning disabled; a regression has been introduced.')
url = source_details["url"]
clone_branch, _ = self.detect_remote_source_branch(source_details)
if self.group_config.public_upstreams:
meta.public_upstream_url, meta.public_upstream_branch = self.get_public_upstream(url)
if not meta.public_upstream_branch: # default to the same branch name as private upstream
meta.public_upstream_branch = clone_branch
self.logger.info("Attempting to checkout source '%s' branch %s in: %s" % (url, clone_branch, source_dir))
try:
self.logger.info("Attempting to checkout source '%s' branch %s in: %s" % (url, clone_branch, source_dir))
# clone all branches as we must sometimes reference master /OWNERS for maintainer information
gitargs = [
'--no-single-branch', '--branch', clone_branch
]
self.git_clone(url, source_dir, gitargs=gitargs, set_env=constants.GIT_NO_PROMPTS)
# fetch public upstream source
if meta.public_upstream_branch:
util.setup_and_fetch_public_upstream_source(meta.public_upstream_url, meta.public_upstream_branch, source_dir)
except IOError as e:
self.logger.info("Unable to checkout branch {}: {}".format(clone_branch, str(e)))
shutil.rmtree(source_dir)
raise DoozerFatalError("Error checking out target branch of source '%s' in: %s" % (alias, source_dir))
# Store so that the next attempt to resolve the source hits the map
self.register_source_alias(alias, source_dir)
if meta.commitish:
# With the alias registered, check out the commit we want
self.logger.info(f"Determining if commit-ish {meta.commitish} exists")
cmd = ["git", "-C", source_dir, "branch", "--contains", meta.commitish]
exectools.cmd_assert(cmd)
self.logger.info(f"Checking out commit-ish {meta.commitish}")
exectools.cmd_assert(["git", "-C", source_dir, "checkout", meta.commitish])
return source_dir
def detect_remote_source_branch(self, source_details):
"""Find a configured source branch that exists, or raise DoozerFatalError. Returns branch name and git hash"""
git_url = source_details["url"]
branches = source_details["branch"]
branch = branches["target"]
fallback_branch = branches.get("fallback", None)
if self.group_config.use_source_fallback_branch == "always" and fallback_branch:
# only use the fallback (unless none is given)
branch, fallback_branch = fallback_branch, None
elif self.group_config.use_source_fallback_branch == "never":
# ignore the fallback
fallback_branch = None
stage_branch = branches.get("stage", None) if self.stage else None
if stage_branch:
self.logger.info('Normal branch overridden by --stage option, using "{}"'.format(stage_branch))
result = self._get_remote_branch_ref(git_url, stage_branch)
if result:
return stage_branch, result
raise DoozerFatalError('--stage option specified and no stage branch named "{}" exists for {}'.format(stage_branch, git_url))
result = self._get_remote_branch_ref(git_url, branch)
if result:
return branch, result
elif not fallback_branch:
raise DoozerFatalError('Requested target branch {} does not exist and no fallback provided'.format(branch))
self.logger.info('Target branch does not exist in {}, checking fallback branch {}'.format(git_url, fallback_branch))
result = self._get_remote_branch_ref(git_url, fallback_branch)
if result:
return fallback_branch, result
raise DoozerFatalError('Requested fallback branch {} does not exist'.format(branch))
def _get_remote_branch_ref(self, git_url, branch):
"""Detect whether a single branch exists on a remote repo; returns git hash if found"""
self.logger.info('Checking if target branch {} exists in {}'.format(branch, git_url))
try:
out, _ = exectools.cmd_assert('git ls-remote --heads {} {}'.format(git_url, branch), retries=3)
except Exception as err:
self.logger.error('Unable to check if target branch {} exists: {}'.format(branch, err))
return None
result = out.strip() # any result means the branch is found
return result.split()[0] if result else None
def resolve_source_head(self, meta):
"""
Attempts to resolve the branch a given source alias has checked out. If not on a branch
returns SHA of head.
:param meta: The MetaData object to resolve source for
:return: The name of the checked out branch or None (if required=False)
"""
source_dir = self.resolve_source(meta)
if not source_dir:
return None
with io.open(os.path.join(source_dir, '.git/HEAD'), encoding="utf-8") as f:
head_content = f.read().strip()
# This will either be:
# a SHA like: "52edbcd8945af0dc728ad20f53dcd78c7478e8c2"
# a local branch name like: "ref: refs/heads/master"
if head_content.startswith("ref:"):
return head_content.split('/', 2)[2] # limit split in case branch name contains /
# Otherwise, just return SHA
return head_content
def export_sources(self, output):
self.logger.info('Writing sources to {}'.format(output))
with io.open(output, 'w', encoding='utf-8') as sources_file:
yaml.dump({k: v.path for k, v in self.source_resolutions.items()}, sources_file, default_flow_style=False)
def auto_version(self, repo_type):
"""
Find and return the version of the atomic-openshift package in the OCP
RPM repository.
This repository is the primary input for OCP images. The group_config
for a group specifies the location for both signed and unsigned
rpms. The caller must indicate which to use.
"""
repo_url = self.repos['rhel-server-ose-rpms'].baseurl(repo_type, 'x86_64')
self.logger.info(
"Getting version from atomic-openshift package in {}".format(
repo_url)
)
# create a randomish repo name to avoid erroneous cache hits
repoid = "oit" + datetime.datetime.now().strftime("%s")
version_query = ["/usr/bin/repoquery", "--quiet", "--tempcache",
"--repoid", repoid,
"--repofrompath", repoid + "," + repo_url,
"--queryformat", "%{VERSION}",
"atomic-openshift"]
rc, auto_version, err = exectools.cmd_gather(version_query)
if rc != 0:
raise RuntimeError(
"Unable to get OCP version from RPM repository: {}".format(err)
)
version = "v" + auto_version.strip()
self.logger.info("Auto-detected OCP version: {}".format(version))
return version
def valid_version(self, version):
"""
Check if a version string matches an accepted pattern.
A single lower-case 'v' followed by one or more decimal numbers,
separated by a dot. Examples below are not exhaustive
Valid:
v1, v12, v3.4, v2.12.0
Not Valid:
1, v1..2, av3.4, .v12 .99.12, v13-55
"""
return re.match(r"^v\d+((\.\d+)+)?$", version) is not None
@classmethod
def _parallel_exec(cls, f, args, n_threads, timeout=None):
pool = ThreadPool(n_threads)
ret = pool.map_async(wrap_exception(f), args)
pool.close()
if timeout is None:
# If a timeout is not specified, the KeyboardInterrupt exception won't be delivered.
# Use polling as a workaround. See https://stackoverflow.com/questions/1408356/keyboard-interrupts-with-pythons-multiprocessing-pool.
while not ret.ready():
ret.wait(60)
return ret.get(timeout)
def clone_distgits(self, n_threads=None):
with util.timer(self.logger.info, 'Full runtime clone'):
if n_threads is None:
n_threads = self.global_opts['distgit_threads']
return self._parallel_exec(
lambda m: m.distgit_repo(),
self.all_metas(),
n_threads=n_threads)
def push_distgits(self, n_threads=None):
self.assert_mutation_is_permitted()
if n_threads is None:
n_threads = self.global_opts['distgit_threads']
return self._parallel_exec(
lambda m: m.distgit_repo().push(),
self.all_metas(),
n_threads=n_threads)
def parallel_exec(self, f, args, n_threads=None):
"""
:param f: A function to invoke for all arguments
:param args: A list of argument tuples. Each tuple will be used to invoke the function once.
:param n_threads: preferred number of threads to use during the work
:return:
"""
n_threads = n_threads if n_threads is not None else len(args)
terminate_event = threading.Event()
pool = ThreadPool(n_threads)
# Python 3 doesn't allow to unpack tuple argument in a lambdas or functions (PEP-3113).
# `_unpack_tuple_args` is a workaround that unpacks the tuple as arguments for the function passed to `ThreadPool.map_async`.
# `starmap_async` can be used in the future when we don't keep compatibility with Python 2.
ret = pool.map_async(
wrap_exception(_unpack_tuple_args(f)),
[(a, terminate_event) for a in args])
pool.close()
try:
# `wait` without a timeout disables signal handling
while not ret.ready():
ret.wait(60)
except KeyboardInterrupt:
self.logger.warning('SIGINT received, signaling threads to terminate...')
terminate_event.set()
pool.join()
return ret
def get_default_brew_tag(self):
return self.branch
def get_default_candidate_brew_tag(self):
return self.branch + '-candidate' if self.branch else None
def get_candidate_brew_tags(self):
"""Return a set of known candidate tags relevant to this group"""
tag = self.get_default_candidate_brew_tag()
# assumptions here:
# releases with default rhel-7 tag also have rhel 8.
# releases with default rhel-8 tag do not also care about rhel-7.
# adjust as needed (and just imagine rhel 9)!
return {tag, tag.replace('-rhel-7', '-rhel-8')} if tag else set()
def get_minor_version(self):
# only applicable if appropriate vars are defined in group config
return '.'.join(str(self.group_config.vars[v]) for v in ('MAJOR', 'MINOR'))
def builds_for_group_branch(self):
# return a dict of all the latest builds for this group, according to
# the branch's candidate tag in brew. each entry is name => tuple(version, release).
tag = self.get_default_candidate_brew_tag()
output, _ = exectools.cmd_assert(
"brew list-tagged --quiet --latest {}".format(tag),
retries=3,
)
builds = [
# each line like "build tag owner" split into build NVR
line.split()[0].rsplit("-", 2)
for line in output.strip().split("\n")
if line.strip()
]
return {n: (v, r) for n, v, r in builds}
def scan_for_upstream_changes(self) -> List[Tuple[Metadata, RebuildHint]]:
"""
Determines if the current upstream source commit hash has a downstream
build associated with it.
:return: Returns a list of tuples. Each tuple contains an rpm or image metadata
and a change tuple (changed: bool, message: str).
"""
return self.parallel_exec(
lambda meta, _: (meta, meta.needs_rebuild()),
self.image_metas() + self.rpm_metas(),
n_threads=20,
).get()
def resolve_metadata(self):
"""
The group control data can be on a local filesystem, in a git
repository that can be checked out, or some day in a database
If the scheme is empty, assume file:///...
Allow http, https, ssh and ssh+git (all valid git clone URLs)
"""
if self.data_path is None:
raise DoozerFatalError(
("No metadata path provided. Must be set via one of:\n"
"* data_path key in {}\n"
"* doozer --data-path [PATH|URL]\n"
"* Environment variable DOOZER_DATA_PATH\n"
).format(self.cfg_obj.full_path))
self.gitdata = gitdata.GitData(data_path=self.data_path, clone_dir=self.working_dir,
commitish=self.group_commitish, reclone=self.upcycle, logger=self.logger)
self.data_dir = self.gitdata.data_dir
| 44.7375
| 244
| 0.620642
|
5dc033e56b43fd2b7ce65e905d2023c1aa460cae
| 688
|
py
|
Python
|
api/src/wt/costs/expenditures/_model.py
|
sedlar/work-tracking
|
78917ff8200829eb674142ce43b503d8e892d7eb
|
[
"BSD-2-Clause"
] | null | null | null |
api/src/wt/costs/expenditures/_model.py
|
sedlar/work-tracking
|
78917ff8200829eb674142ce43b503d8e892d7eb
|
[
"BSD-2-Clause"
] | null | null | null |
api/src/wt/costs/expenditures/_model.py
|
sedlar/work-tracking
|
78917ff8200829eb674142ce43b503d8e892d7eb
|
[
"BSD-2-Clause"
] | null | null | null |
from typing import List
from wt.costs.expenditures._obj import Expenditure, BoundExpenditure
from wt.ids import SimpleId, EntityId
class ExpendituresModel:
def create_expenditure(self, entity_id: EntityId, expenditure: Expenditure) -> BoundExpenditure:
raise NotImplementedError()
def delete_expenditure(self, expenditure_id: SimpleId):
raise NotImplementedError()
def delete_entity_expenditures(self, entity_id: EntityId):
raise NotImplementedError()
def get_expenditures(
self,
entity_id: EntityId,
offset: int,
limit: int
) -> List[BoundExpenditure]:
raise NotImplementedError()
| 28.666667
| 100
| 0.702035
|
102d118025c02973d604b06d3f4c62dce2dbd592
| 79
|
py
|
Python
|
tests/config.py
|
krypton-unite/time_series_models
|
adbf320bfb926b459d0bf3761f18ed1e260f2098
|
[
"Unlicense"
] | null | null | null |
tests/config.py
|
krypton-unite/time_series_models
|
adbf320bfb926b459d0bf3761f18ed1e260f2098
|
[
"Unlicense"
] | null | null | null |
tests/config.py
|
krypton-unite/time_series_models
|
adbf320bfb926b459d0bf3761f18ed1e260f2098
|
[
"Unlicense"
] | null | null | null |
"""
Configures tests
"""
# devices = ['cuda', 'cpu']
devices = ['cuda', 'cpu']
| 13.166667
| 27
| 0.544304
|
2136fed3bf50aca81b83aa7789d0ef1c74e9634a
| 5,801
|
py
|
Python
|
Various/ThirdParty/opencv/video.py
|
pixlra/HARP-fork
|
36e94b3f5f6798726b254380498656d17b68d2f7
|
[
"CC-BY-2.0"
] | null | null | null |
Various/ThirdParty/opencv/video.py
|
pixlra/HARP-fork
|
36e94b3f5f6798726b254380498656d17b68d2f7
|
[
"CC-BY-2.0"
] | 1
|
2019-01-14T09:00:21.000Z
|
2019-01-14T09:00:21.000Z
|
Various/ThirdParty/opencv/video.py
|
pixlra/HARP-fork
|
36e94b3f5f6798726b254380498656d17b68d2f7
|
[
"CC-BY-2.0"
] | 1
|
2022-02-01T12:45:29.000Z
|
2022-02-01T12:45:29.000Z
|
#!/usr/bin/env python
'''
Video capture sample.
Sample shows how VideoCapture class can be used to acquire video
frames from a camera of a movie file. Also the sample provides
an example of procedural video generation by an object, mimicking
the VideoCapture interface (see Chess class).
'create_capture' is a convinience function for capture creation,
falling back to procedural video in case of error.
Usage:
video.py [--shotdir <shot path>] [source0] [source1] ...'
sourceN is an
- integer number for camera capture
- name of video file
- synth:<params> for procedural video
Synth examples:
synth:bg=../cpp/lena.jpg:noise=0.1
synth:class=chess:bg=../cpp/lena.jpg:noise=0.1:size=640x480
Keys:
ESC - exit
SPACE - save current frame to <shot path> directory
'''
import numpy as np
import cv2
from time import clock
from numpy import pi, sin, cos
#import common
class VideoSynthBase(object):
def __init__(self, size=None, noise=0.0, bg = None, **params):
self.bg = None
self.frame_size = (640, 480)
if bg is not None:
self.bg = cv2.imread(bg, 1)
h, w = self.bg.shape[:2]
self.frame_size = (w, h)
if size is not None:
w, h = map(int, size.split('x'))
self.frame_size = (w, h)
self.bg = cv2.resize(self.bg, self.frame_size)
self.noise = float(noise)
def render(self, dst):
pass
def read(self, dst=None):
w, h = self.frame_size
if self.bg is None:
buf = np.zeros((h, w, 3), np.uint8)
else:
buf = self.bg.copy()
self.render(buf)
if self.noise > 0.0:
noise = np.zeros((h, w, 3), np.int8)
cv2.randn(noise, np.zeros(3), np.ones(3)*255*self.noise)
buf = cv2.add(buf, noise, dtype=cv2.CV_8UC3)
return True, buf
def isOpened(self):
return True
class Chess(VideoSynthBase):
def __init__(self, **kw):
super(Chess, self).__init__(**kw)
w, h = self.frame_size
self.grid_size = sx, sy = 10, 7
white_quads = []
black_quads = []
for i, j in np.ndindex(sy, sx):
q = [[j, i, 0], [j+1, i, 0], [j+1, i+1, 0], [j, i+1, 0]]
[white_quads, black_quads][(i + j) % 2].append(q)
self.white_quads = np.float32(white_quads)
self.black_quads = np.float32(black_quads)
fx = 0.9
self.K = np.float64([[fx*w, 0, 0.5*(w-1)],
[0, fx*w, 0.5*(h-1)],
[0.0,0.0, 1.0]])
self.dist_coef = np.float64([-0.2, 0.1, 0, 0])
self.t = 0
def draw_quads(self, img, quads, color = (0, 255, 0)):
img_quads = cv2.projectPoints(quads.reshape(-1, 3), self.rvec, self.tvec, self.K, self.dist_coef) [0]
img_quads.shape = quads.shape[:2] + (2,)
for q in img_quads:
cv2.fillConvexPoly(img, np.int32(q*4), color, cv2.CV_AA, shift=2)
def render(self, dst):
t = self.t
self.t += 1.0/30.0
sx, sy = self.grid_size
center = np.array([0.5*sx, 0.5*sy, 0.0])
phi = pi/3 + sin(t*3)*pi/8
c, s = cos(phi), sin(phi)
ofs = np.array([sin(1.2*t), cos(1.8*t), 0]) * sx * 0.2
eye_pos = center + np.array([cos(t)*c, sin(t)*c, s]) * 15.0 + ofs
target_pos = center + ofs
R, self.tvec = common.lookat(eye_pos, target_pos)
self.rvec = common.mtx2rvec(R)
self.draw_quads(dst, self.white_quads, (245, 245, 245))
self.draw_quads(dst, self.black_quads, (10, 10, 10))
classes = dict(chess=Chess)
presets = dict(
empty = 'synth:',
lena = 'synth:bg=../cpp/lena.jpg:noise=0.1',
chess = 'synth:class=chess:bg=../cpp/lena.jpg:noise=0.1:size=640x480'
)
def create_capture(source = 0, fallback = presets['chess']):
'''source: <int> or '<int>|<filename>|synth [:<param_name>=<value> [:...]]'
'''
source = str(source).strip()
chunks = source.split(':')
# hanlde drive letter ('c:', ...)
if len(chunks) > 1 and len(chunks[0]) == 1 and chunks[0].isalpha():
chunks[1] = chunks[0] + ':' + chunks[1]
del chunks[0]
source = chunks[0]
try: source = int(source)
except ValueError: pass
params = dict( s.split('=') for s in chunks[1:] )
cap = None
if source == 'synth':
Class = classes.get(params.get('class', None), VideoSynthBase)
try: cap = Class(**params)
except: pass
else:
cap = cv2.VideoCapture(source)
if 'size' in params:
w, h = map(int, params['size'].split('x'))
cap.set(cv2.cv.CV_CAP_PROP_FRAME_WIDTH, w)
cap.set(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT, h)
if cap is None or not cap.isOpened():
print 'Warning: unable to open video source: ', source
if fallback is not None:
return create_capture(fallback, None)
return cap
if __name__ == '__main__':
import sys
import getopt
print __doc__
args, sources = getopt.getopt(sys.argv[1:], '', 'shotdir=')
args = dict(args)
shotdir = args.get('--shotdir', '.')
if len(sources) == 0:
sources = [ 0 ]
caps = map(create_capture, sources)
shot_idx = 0
while True:
imgs = []
for i, cap in enumerate(caps):
ret, img = cap.read()
imgs.append(img)
cv2.imshow('capture %d' % i, img)
ch = 0xFF & cv2.waitKey(1)
if ch == 27:
break
if ch == ord(' '):
for i, img in enumerate(imgs):
fn = '%s/shot_%d_%03d.bmp' % (shotdir, i, shot_idx)
cv2.imwrite(fn, img)
print fn, 'saved'
shot_idx += 1
cv2.destroyAllWindows()
| 29.748718
| 109
| 0.555249
|
c446bc91606f8a415ff5bda1f204581b15c36b47
| 1,903
|
py
|
Python
|
Python_libs/.ipynb_checkpoints/stab_aux-checkpoint.py
|
tsommerfeld/L2-methods_for_resonances
|
acba48bfede415afd99c89ff2859346e1eb4f96c
|
[
"MIT"
] | null | null | null |
Python_libs/.ipynb_checkpoints/stab_aux-checkpoint.py
|
tsommerfeld/L2-methods_for_resonances
|
acba48bfede415afd99c89ff2859346e1eb4f96c
|
[
"MIT"
] | null | null | null |
Python_libs/.ipynb_checkpoints/stab_aux-checkpoint.py
|
tsommerfeld/L2-methods_for_resonances
|
acba48bfede415afd99c89ff2859346e1eb4f96c
|
[
"MIT"
] | null | null | null |
"""
functions needed for the GPA analysis of stabilization graphs
convention:
the scaling variable is called L
L could be a box-size or a scaling factor of exponents
however, I have the suspicion is works only,
if s = 1/L^2 as in L = boxlength is used
"""
import numpy as np
def dEdL(E, L, P, Q, R):
"""
we know: E^2*P + E*Q + P = 0
therefore:
dEdL = E' = -(E^2*P' + E*Q' + R')/(2*E*P + Q)
input:
P, Q, R: three polynomials that depend on L
E: the energy
L: the independent (scaling) variable
output:
dE/dL derivative of E
"""
Pp = P.deriv(1)(L)
Qp = Q.deriv(1)(L)
Rp = R.deriv(1)(L)
return -(E**2*Pp + E*Qp + Rp) / (2*E*P(L) + Q(L))
def E_from_L(L, A, B, C):
"""
given L, solve E^2*A + E*B + C = 0
return both roots
"""
P = np.poly1d([A(L), B(L), C(L)])
return P.roots
def E_and_Ep(L, A, B, C):
"""
combination of the two functions above
given L, first solve E^2*A + E*B + C = 0
for every root found, compute the 1st derivative |dEdL|
return energies and abs(derivatives)
"""
P = np.poly1d([A(L), B(L), C(L)])
roots = P.roots
ders = []
for E in roots:
ders.append(abs(dEdL(E, L, A, B, C)))
return roots, ders
#
# for Newton we solve dEdL = 0 or E' = 0
#
# so we iterate L[i+1] = L[i] - E'/E''
#
# the fraction E'/E'' can be worked out analytically:
#
# (E^2*P' + E*Q' + R') /
# (2*P*E'^2 + 4*E*E'*P' + E^2*P'' + 2*E'*Q' + E*Q'' + R'')
#
def EpoEpp(E, L, P, Q, R):
""" E'/E'' needed for Newton's method """
Pp = P.deriv(1)(L)
Qp = Q.deriv(1)(L)
Rp = R.deriv(1)(L)
Ep = -(E**2*Pp + E*Qp + Rp) / (2*E*P(L) + Q(L))
Ppp = P.deriv(2)(L)
Qpp = Q.deriv(2)(L)
Rpp = R.deriv(2)(L)
num = E**2*Pp + E*Qp + Rp
den = 2*P(L)*Ep**2 + 4*E*Ep*Pp + E**2*Ppp + 2*Ep*Qp + E*Qpp + Rpp
return num/den
| 22.927711
| 69
| 0.518655
|
d377018eb4679769e06717400fccb71699724eb6
| 5,036
|
py
|
Python
|
gnuradio-3.7.13.4/grc/gui/Preferences.py
|
v1259397/cosmic-gnuradio
|
64c149520ac6a7d44179c3f4a38f38add45dd5dc
|
[
"BSD-3-Clause"
] | 1
|
2021-03-09T07:32:37.000Z
|
2021-03-09T07:32:37.000Z
|
gnuradio-3.7.13.4/grc/gui/Preferences.py
|
v1259397/cosmic-gnuradio
|
64c149520ac6a7d44179c3f4a38f38add45dd5dc
|
[
"BSD-3-Clause"
] | null | null | null |
gnuradio-3.7.13.4/grc/gui/Preferences.py
|
v1259397/cosmic-gnuradio
|
64c149520ac6a7d44179c3f4a38f38add45dd5dc
|
[
"BSD-3-Clause"
] | null | null | null |
"""
Copyright 2008 Free Software Foundation, Inc.
This file is part of GNU Radio
GNU Radio Companion is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
GNU Radio Companion is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
"""
import os
import sys
import ConfigParser
HEADER = """\
# This contains only GUI settings for GRC and is not meant for users to edit.
#
# GRC settings not accessible through the GUI are in gnuradio.conf under
# section [grc].
"""
_platform = None
_config_parser = ConfigParser.SafeConfigParser()
def file_extension():
return '.grc'
def load(platform):
global _platform
_platform = platform
# create sections
for section in ['main', 'files_open', 'files_recent']:
try:
_config_parser.add_section(section)
except Exception, e:
print e
try:
_config_parser.read(_platform.get_prefs_file())
except Exception as err:
print >> sys.stderr, err
def save():
try:
with open(_platform.get_prefs_file(), 'w') as fp:
fp.write(HEADER)
_config_parser.write(fp)
except Exception as err:
print >> sys.stderr, err
def entry(key, value=None, default=None):
if value is not None:
_config_parser.set('main', key, str(value))
result = value
else:
_type = type(default) if default is not None else str
getter = {
bool: _config_parser.getboolean,
int: _config_parser.getint,
}.get(_type, _config_parser.get)
try:
result = getter('main', key)
except (AttributeError, ConfigParser.Error):
result = _type() if default is None else default
return result
###########################################################################
# Special methods for specific program functionalities
###########################################################################
def main_window_size(size=None):
if size is None:
size = [None, None]
w = entry('main_window_width', size[0], default=1)
h = entry('main_window_height', size[1], default=1)
return w, h
def file_open(filename=None):
return entry('file_open', filename, default='')
def set_file_list(key, files):
_config_parser.remove_section(key) # clear section
_config_parser.add_section(key)
for i, filename in enumerate(files):
_config_parser.set(key, '%s_%d' % (key, i), filename)
def get_file_list(key):
try:
files = [value for name, value in _config_parser.items(key)
if name.startswith('%s_' % key)]
except (AttributeError, ConfigParser.Error):
files = []
return files
def get_open_files():
return get_file_list('files_open')
def set_open_files(files):
return set_file_list('files_open', files)
def get_recent_files():
""" Gets recent files, removes any that do not exist and re-saves it """
files = filter(os.path.exists, get_file_list('files_recent'))
set_recent_files(files)
return files
def set_recent_files(files):
return set_file_list('files_recent', files)
def add_recent_file(file_name):
# double check file_name
if os.path.exists(file_name):
recent_files = get_recent_files()
if file_name in recent_files:
recent_files.remove(file_name) # Attempt removal
recent_files.insert(0, file_name) # Insert at start
set_recent_files(recent_files[:10]) # Keep up to 10 files
def console_window_position(pos=None):
return entry('console_window_position', pos, default=-1) or 1
def blocks_window_position(pos=None):
return entry('blocks_window_position', pos, default=-1) or 1
def variable_editor_position(pos=None, sidebar=False):
# Figure out default
if sidebar:
w, h = main_window_size()
return entry('variable_editor_sidebar_position', pos, default=int(h*0.7))
else:
return entry('variable_editor_position', pos, default=int(blocks_window_position()*0.5))
def variable_editor_sidebar(pos=None):
return entry('variable_editor_sidebar', pos, default=False)
def variable_editor_confirm_delete(pos=None):
return entry('variable_editor_confirm_delete', pos, default=True)
def xterm_missing(cmd=None):
return entry('xterm_missing', cmd, default='INVALID_XTERM_SETTING')
def screen_shot_background_transparent(transparent=None):
return entry('screen_shot_background_transparent', transparent, default=False)
| 28.942529
| 96
| 0.67633
|
fa37447a25bf986681486ca2bb9cdf2d1be114a5
| 37,527
|
py
|
Python
|
ironic_inspector/node_cache.py
|
elfosardo/ironic-inspector
|
479c0465676b36a8a9b0f23eabafdac36d5b2a6f
|
[
"Apache-2.0"
] | 1
|
2019-06-17T21:39:14.000Z
|
2019-06-17T21:39:14.000Z
|
ironic_inspector/node_cache.py
|
elfosardo/ironic-inspector
|
479c0465676b36a8a9b0f23eabafdac36d5b2a6f
|
[
"Apache-2.0"
] | 1
|
2019-09-16T09:38:03.000Z
|
2019-09-16T09:38:03.000Z
|
ironic_inspector/node_cache.py
|
elfosardo/ironic-inspector
|
479c0465676b36a8a9b0f23eabafdac36d5b2a6f
|
[
"Apache-2.0"
] | 5
|
2019-06-13T13:00:16.000Z
|
2021-04-17T16:32:42.000Z
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Cache for nodes currently under introspection."""
import collections
import contextlib
import copy
import datetime
import json
import operator
from automaton import exceptions as automaton_errors
from ironicclient import exceptions
from oslo_concurrency import lockutils
from oslo_config import cfg
from oslo_db.sqlalchemy import utils as db_utils
from oslo_utils import excutils
from oslo_utils import reflection
from oslo_utils import timeutils
from oslo_utils import uuidutils
import six
from sqlalchemy.orm import exc as orm_errors
from ironic_inspector.common.i18n import _
from ironic_inspector.common import ironic as ir_utils
from ironic_inspector import db
from ironic_inspector import introspection_state as istate
from ironic_inspector import utils
CONF = cfg.CONF
LOG = utils.getProcessingLogger(__name__)
MACS_ATTRIBUTE = 'mac'
_LOCK_TEMPLATE = 'node-%s'
_SEMAPHORES = lockutils.Semaphores()
def _get_lock(uuid):
"""Get lock object for a given node UUID."""
return lockutils.internal_lock(_LOCK_TEMPLATE % uuid,
semaphores=_SEMAPHORES)
def _get_lock_ctx(uuid):
"""Get context manager yielding a lock object for a given node UUID."""
return lockutils.lock(_LOCK_TEMPLATE % uuid, semaphores=_SEMAPHORES)
class NodeInfo(object):
"""Record about a node in the cache.
This class optionally allows to acquire a lock on a node. Note that the
class instance itself is NOT thread-safe, you need to create a new instance
for every thread.
"""
def __init__(self, uuid, version_id=None, state=None, started_at=None,
finished_at=None, error=None, node=None, ports=None,
ironic=None, lock=None, manage_boot=True):
self.uuid = uuid
self.started_at = started_at
self.finished_at = finished_at
self.error = error
self.invalidate_cache()
self._version_id = version_id
self._state = state
self._node = node
if ports is not None and not isinstance(ports, dict):
ports = {p.address: p for p in ports}
self._ports = ports
self._attributes = None
self._ironic = ironic
# On upgrade existing records will have manage_boot=NULL, which is
# equivalent to True actually.
self._manage_boot = manage_boot if manage_boot is not None else True
# This is a lock on a node UUID, not on a NodeInfo object
self._lock = lock if lock is not None else _get_lock(uuid)
# Whether lock was acquired using this NodeInfo object
self._locked = lock is not None
self._fsm = None
def __del__(self):
if self._locked:
LOG.warning('BUG: node lock was not released by the moment '
'node info object is deleted')
self._lock.release()
def __str__(self):
"""Self represented as an UUID and a state."""
parts = [self.uuid]
if self._state:
parts += [_('state'), self._state]
return ' '.join(parts)
def acquire_lock(self, blocking=True):
"""Acquire a lock on the associated node.
Exits with success if a lock is already acquired using this NodeInfo
object.
:param blocking: if True, wait for lock to be acquired, otherwise
return immediately.
:returns: boolean value, whether lock was acquired successfully
"""
if self._locked:
return True
LOG.debug('Attempting to acquire lock', node_info=self)
if self._lock.acquire(blocking):
self._locked = True
LOG.debug('Successfully acquired lock', node_info=self)
return True
else:
LOG.debug('Unable to acquire lock', node_info=self)
return False
def release_lock(self):
"""Release a lock on a node.
Does nothing if lock was not acquired using this NodeInfo object.
"""
if self._locked:
LOG.debug('Successfully released lock', node_info=self)
self._lock.release()
self._locked = False
@property
def version_id(self):
"""Get the version id"""
if self._version_id is None:
row = db.model_query(db.Node).get(self.uuid)
if row is None:
raise utils.NotFoundInCacheError(_('Node not found in the '
'cache'), node_info=self)
self._version_id = row.version_id
return self._version_id
def _set_version_id(self, value, session):
row = self._row(session)
row.version_id = value
row.save(session)
self._version_id = value
def _row(self, session=None):
"""Get a row from the database with self.uuid and self.version_id"""
try:
# race condition if version_id changed outside of this node_info
return db.model_query(db.Node, session=session).filter_by(
uuid=self.uuid, version_id=self.version_id).one()
except (orm_errors.NoResultFound, orm_errors.StaleDataError):
raise utils.NodeStateRaceCondition(node_info=self)
def _commit(self, **fields):
"""Commit the fields into the DB."""
LOG.debug('Committing fields: %s', fields, node_info=self)
with db.ensure_transaction() as session:
self._set_version_id(uuidutils.generate_uuid(), session)
row = self._row(session)
row.update(fields)
def commit(self):
"""Commit current node status into the database."""
# state and version_id are updated separately
self._commit(started_at=self.started_at, finished_at=self.finished_at,
error=self.error)
@property
def state(self):
"""State of the node_info object."""
if self._state is None:
row = self._row()
self._state = row.state
return self._state
def _set_state(self, value):
self._commit(state=value)
self._state = value
def _get_fsm(self):
"""Get an fsm instance initialized with self.state."""
if self._fsm is None:
self._fsm = istate.FSM.copy(shallow=True)
self._fsm.initialize(start_state=self.state)
return self._fsm
@contextlib.contextmanager
def _fsm_ctx(self):
fsm = self._get_fsm()
try:
yield fsm
finally:
if fsm.current_state != self.state:
LOG.info('Updating node state: %(current)s --> %(new)s',
{'current': self.state, 'new': fsm.current_state},
node_info=self)
self._set_state(fsm.current_state)
def fsm_event(self, event, strict=False):
"""Update node_info.state based on a fsm.process_event(event) call.
An AutomatonException triggers an error event.
If strict, node_info.finished(istate.Events.error, error=str(exc))
is called with the AutomatonException instance and a EventError raised.
:param event: an event to process by the fsm
:strict: whether to fail the introspection upon an invalid event
:raises: NodeStateInvalidEvent
"""
with self._fsm_ctx() as fsm:
LOG.debug('Executing fsm(%(state)s).process_event(%(event)s)',
{'state': fsm.current_state, 'event': event},
node_info=self)
try:
fsm.process_event(event)
except automaton_errors.NotFound as exc:
msg = _('Invalid event: %s') % exc
if strict:
LOG.error(msg, node_info=self)
# assuming an error event is always possible
self.finished(istate.Events.error, error=str(exc))
else:
LOG.warning(msg, node_info=self)
raise utils.NodeStateInvalidEvent(str(exc), node_info=self)
@property
def options(self):
"""Node introspection options as a dict."""
if self._options is None:
rows = db.model_query(db.Option).filter_by(
uuid=self.uuid)
self._options = {row.name: json.loads(row.value)
for row in rows}
return self._options
@property
def attributes(self):
"""Node look up attributes as a dict."""
if self._attributes is None:
self._attributes = {}
rows = db.model_query(db.Attribute).filter_by(
node_uuid=self.uuid)
for row in rows:
self._attributes.setdefault(row.name, []).append(row.value)
return self._attributes
@property
def ironic(self):
"""Ironic client instance."""
if self._ironic is None:
self._ironic = ir_utils.get_client()
return self._ironic
@property
def manage_boot(self):
"""Whether to manage boot for this node."""
return self._manage_boot
def set_option(self, name, value):
"""Set an option for a node."""
encoded = json.dumps(value)
self.options[name] = value
with db.ensure_transaction() as session:
db.model_query(db.Option, session=session).filter_by(
uuid=self.uuid, name=name).delete()
db.Option(uuid=self.uuid, name=name, value=encoded).save(
session)
def finished(self, event, error=None):
"""Record status for this node and process a terminal transition.
Also deletes look up attributes from the cache.
:param event: the event to process
:param error: error message
"""
self.release_lock()
self.finished_at = timeutils.utcnow()
self.error = error
with db.ensure_transaction() as session:
self.fsm_event(event)
self._commit(finished_at=self.finished_at, error=self.error)
db.model_query(db.Attribute, session=session).filter_by(
node_uuid=self.uuid).delete()
db.model_query(db.Option, session=session).filter_by(
uuid=self.uuid).delete()
def add_attribute(self, name, value, session=None):
"""Store look up attribute for a node in the database.
:param name: attribute name
:param value: attribute value or list of possible values
:param session: optional existing database session
"""
if not isinstance(value, list):
value = [value]
with db.ensure_transaction(session) as session:
for v in value:
db.Attribute(uuid=uuidutils.generate_uuid(), name=name,
value=v, node_uuid=self.uuid).save(session)
# Invalidate attributes so they're loaded on next usage
self._attributes = None
@classmethod
def from_row(cls, row, ironic=None, lock=None, node=None):
"""Construct NodeInfo from a database row."""
fields = {key: row[key]
for key in ('uuid', 'version_id', 'state', 'started_at',
'finished_at', 'error', 'manage_boot')}
return cls(ironic=ironic, lock=lock, node=node, **fields)
def invalidate_cache(self):
"""Clear all cached info, so that it's reloaded next time."""
self._options = None
self._node = None
self._ports = None
self._attributes = None
self._ironic = None
self._fsm = None
self._state = None
self._version_id = None
def node(self, ironic=None):
"""Get Ironic node object associated with the cached node record."""
if self._node is None:
ironic = ironic or self.ironic
self._node = ir_utils.get_node(self.uuid, ironic=ironic)
return self._node
def create_ports(self, ports, ironic=None):
"""Create one or several ports for this node.
:param ports: List of ports with all their attributes
e.g [{'mac': xx, 'ip': xx, 'client_id': None},
{'mac': xx, 'ip': None, 'client_id': None}]
It also support the old style of list of macs.
A warning is issued if port already exists on a node.
:param ironic: Ironic client to use instead of self.ironic
"""
existing_macs = []
for port in ports:
mac = port
extra = {}
pxe_enabled = True
if isinstance(port, dict):
mac = port['mac']
client_id = port.get('client_id')
if client_id:
extra = {'client-id': client_id}
pxe_enabled = port.get('pxe', True)
if mac not in self.ports():
self._create_port(mac, ironic=ironic, extra=extra,
pxe_enabled=pxe_enabled)
else:
existing_macs.append(mac)
if existing_macs:
LOG.warning('Did not create ports %s as they already exist',
existing_macs, node_info=self)
def ports(self, ironic=None):
"""Get Ironic port objects associated with the cached node record.
This value is cached as well, use invalidate_cache() to clean.
:return: dict MAC -> port object
"""
if self._ports is None:
ironic = ironic or self.ironic
port_list = ironic.node.list_ports(self.uuid, limit=0, detail=True)
self._ports = {p.address: p for p in port_list}
return self._ports
def _create_port(self, mac, ironic=None, **kwargs):
ironic = ironic or self.ironic
try:
port = ironic.port.create(
node_uuid=self.uuid, address=mac, **kwargs)
LOG.info('Port %(uuid)s was created successfully, MAC: %(mac)s,'
'attributes: %(attrs)s',
{'uuid': port.uuid, 'mac': port.address,
'attrs': kwargs},
node_info=self)
except exceptions.Conflict:
LOG.warning('Port %s already exists, skipping',
mac, node_info=self)
# NOTE(dtantsur): we didn't get port object back, so we have to
# reload ports on next access
self._ports = None
else:
self._ports[mac] = port
def patch(self, patches, ironic=None, **kwargs):
"""Apply JSON patches to a node.
Refreshes cached node instance.
:param patches: JSON patches to apply
:param ironic: Ironic client to use instead of self.ironic
:param kwargs: Arguments to pass to ironicclient.
:raises: ironicclient exceptions
"""
ironic = ironic or self.ironic
# NOTE(aarefiev): support path w/o ahead forward slash
# as Ironic cli does
for patch in patches:
if patch.get('path') and not patch['path'].startswith('/'):
patch['path'] = '/' + patch['path']
LOG.debug('Updating node with patches %s', patches, node_info=self)
self._node = ironic.node.update(self.uuid, patches, **kwargs)
def patch_port(self, port, patches, ironic=None):
"""Apply JSON patches to a port.
:param port: port object or its MAC
:param patches: JSON patches to apply
:param ironic: Ironic client to use instead of self.ironic
"""
ironic = ironic or self.ironic
ports = self.ports()
if isinstance(port, six.string_types):
port = ports[port]
LOG.debug('Updating port %(mac)s with patches %(patches)s',
{'mac': port.address, 'patches': patches},
node_info=self)
new_port = ironic.port.update(port.uuid, patches)
ports[port.address] = new_port
def update_properties(self, ironic=None, **props):
"""Update properties on a node.
:param props: properties to update
:param ironic: Ironic client to use instead of self.ironic
"""
ironic = ironic or self.ironic
patches = [{'op': 'add', 'path': '/properties/%s' % k, 'value': v}
for k, v in props.items()]
self.patch(patches, ironic)
def update_capabilities(self, ironic=None, **caps):
"""Update capabilities on a node.
:param caps: capabilities to update
:param ironic: Ironic client to use instead of self.ironic
"""
existing = ir_utils.capabilities_to_dict(
self.node().properties.get('capabilities'))
existing.update(caps)
self.update_properties(
ironic=ironic,
capabilities=ir_utils.dict_to_capabilities(existing))
def add_trait(self, trait, ironic=None):
"""Add a trait to the node.
:param trait: trait to add
:param ironic: Ironic client to use instead of self.ironic
"""
ironic = ironic or self.ironic
ironic.node.add_trait(self.uuid, trait)
def remove_trait(self, trait, ironic=None):
"""Remove a trait from the node.
:param trait: trait to add
:param ironic: Ironic client to use instead of self.ironic
"""
ironic = ironic or self.ironic
try:
ironic.node.remove_trait(self.uuid, trait)
except exceptions.NotFound:
LOG.debug('Trait %s is not set, cannot remove', trait,
node_info=self)
def delete_port(self, port, ironic=None):
"""Delete port.
:param port: port object or its MAC
:param ironic: Ironic client to use instead of self.ironic
"""
ironic = ironic or self.ironic
ports = self.ports()
if isinstance(port, six.string_types):
port = ports[port]
ironic.port.delete(port.uuid)
del ports[port.address]
def get_by_path(self, path):
"""Get field value by ironic-style path (e.g. /extra/foo).
:param path: path to a field
:returns: field value
:raises: KeyError if field was not found
"""
path = path.strip('/')
try:
if '/' in path:
prop, key = path.split('/', 1)
return getattr(self.node(), prop)[key]
else:
return getattr(self.node(), path)
except AttributeError:
raise KeyError(path)
def replace_field(self, path, func, **kwargs):
"""Replace a field on ironic node.
:param path: path to a field as used by the ironic client
:param func: function accepting an old value and returning a new one
:param kwargs: if 'default' value is passed here, it will be used when
no existing value is found.
:raises: KeyError if value is not found and default is not set
:raises: everything that patch() may raise
"""
ironic = kwargs.pop("ironic", None) or self.ironic
try:
value = self.get_by_path(path)
op = 'replace'
except KeyError:
if 'default' in kwargs:
value = kwargs['default']
op = 'add'
else:
raise
ref_value = copy.deepcopy(value)
value = func(value)
if value != ref_value:
self.patch([{'op': op, 'path': path, 'value': value}], ironic)
def triggers_fsm_error_transition(errors=(Exception,),
no_errors=(utils.NodeStateInvalidEvent,
utils.NodeStateRaceCondition)):
"""Trigger an fsm error transition upon certain errors.
It is assumed the first function arg of the decorated function is always a
NodeInfo instance.
:param errors: a tuple of exceptions upon which an error
event is triggered. Re-raised.
:param no_errors: a tuple of exceptions that won't trigger the
error event.
"""
def outer(func):
@six.wraps(func)
def inner(node_info, *args, **kwargs):
ret = None
try:
ret = func(node_info, *args, **kwargs)
except no_errors as exc:
LOG.debug('Not processing error event for the '
'exception: %(exc)s raised by %(func)s',
{'exc': exc,
'func': reflection.get_callable_name(func)},
node_info=node_info)
except errors as exc:
with excutils.save_and_reraise_exception():
LOG.error('Processing the error event because of an '
'exception %(exc_type)s: %(exc)s raised by '
'%(func)s',
{'exc_type': type(exc), 'exc': exc,
'func': reflection.get_callable_name(func)},
node_info=node_info)
# an error event should be possible from all states
node_info.finished(istate.Events.error, error=str(exc))
return ret
return inner
return outer
def fsm_event_before(event, strict=False):
"""Trigger an fsm event before the function execution.
It is assumed the first function arg of the decorated function is always a
NodeInfo instance.
:param event: the event to process before the function call
:param strict: make an invalid fsm event trigger an error event
"""
def outer(func):
@six.wraps(func)
def inner(node_info, *args, **kwargs):
LOG.debug('Processing event %(event)s before calling '
'%(func)s', {'event': event, 'func': func},
node_info=node_info)
node_info.fsm_event(event, strict=strict)
return func(node_info, *args, **kwargs)
return inner
return outer
def fsm_event_after(event, strict=False):
"""Trigger an fsm event after the function execution.
It is assumed the first function arg of the decorated function is always a
NodeInfo instance.
:param event: the event to process after the function call
:param strict: make an invalid fsm event trigger an error event
"""
def outer(func):
@six.wraps(func)
def inner(node_info, *args, **kwargs):
ret = func(node_info, *args, **kwargs)
LOG.debug('Processing event %(event)s after calling '
'%(func)s', {'event': event, 'func': func},
node_info=node_info)
node_info.fsm_event(event, strict=strict)
return ret
return inner
return outer
def fsm_transition(event, reentrant=True, **exc_kwargs):
"""Decorate a function to perform a (non-)reentrant transition.
If True, reentrant transition will be performed at the end of a function
call. If False, the transition will be performed before the function call.
The function is decorated with the triggers_fsm_error_transition decorator
as well.
:param event: the event to bind the transition to.
:param reentrant: whether the transition is reentrant.
:param exc_kwargs: passed on to the triggers_fsm_error_transition decorator
"""
def outer(func):
inner = triggers_fsm_error_transition(**exc_kwargs)(func)
if not reentrant:
return fsm_event_before(event, strict=True)(inner)
return fsm_event_after(event)(inner)
return outer
def release_lock(func):
"""Decorate a node_info-function to release the node_info lock.
Assumes the first parameter of the function func is always a NodeInfo
instance.
"""
@six.wraps(func)
def inner(node_info, *args, **kwargs):
try:
return func(node_info, *args, **kwargs)
finally:
# FIXME(milan) hacking the test cases to work
# with release_lock.assert_called_once...
if node_info._locked:
node_info.release_lock()
return inner
def start_introspection(uuid, **kwargs):
"""Start the introspection of a node.
If a node_info record exists in the DB, a start transition is used rather
than dropping the record in order to check for the start transition
validity in particular node state.
:param uuid: Ironic node UUID
:param kwargs: passed on to add_node()
:raises: NodeStateInvalidEvent in case the start transition is invalid in
the current node state
:raises: NodeStateRaceCondition if a mismatch was detected between the
node_info cache and the DB
:returns: NodeInfo
"""
with db.ensure_transaction():
node_info = NodeInfo(uuid)
# check that the start transition is possible
try:
node_info.fsm_event(istate.Events.start)
except utils.NotFoundInCacheError:
# node not found while in the fsm_event handler
LOG.debug('Node missing in the cache; adding it now',
node_info=node_info)
state = istate.States.starting
else:
state = node_info.state
return add_node(uuid, state, **kwargs)
def add_node(uuid, state, manage_boot=True, **attributes):
"""Store information about a node under introspection.
All existing information about this node is dropped.
Empty values are skipped.
:param uuid: Ironic node UUID
:param state: The initial state of the node
:param manage_boot: whether to manage boot for this node
:param attributes: attributes known about this node (like macs, BMC etc);
also ironic client instance may be passed under 'ironic'
:returns: NodeInfo
"""
started_at = timeutils.utcnow()
with db.ensure_transaction() as session:
_delete_node(uuid)
version_id = uuidutils.generate_uuid()
db.Node(uuid=uuid, state=state, version_id=version_id,
started_at=started_at, manage_boot=manage_boot).save(session)
node_info = NodeInfo(uuid=uuid, state=state, started_at=started_at,
version_id=version_id, manage_boot=manage_boot,
ironic=attributes.pop('ironic', None))
for (name, value) in attributes.items():
if not value:
continue
node_info.add_attribute(name, value, session=session)
return node_info
def delete_nodes_not_in_list(uuids):
"""Delete nodes which don't exist in Ironic node UUIDs.
:param uuids: Ironic node UUIDs
"""
inspector_uuids = _list_node_uuids()
for uuid in inspector_uuids - uuids:
LOG.warning('Node %s was deleted from Ironic, dropping from Ironic '
'Inspector database', uuid)
with _get_lock_ctx(uuid):
_delete_node(uuid)
def _delete_node(uuid, session=None):
"""Delete information about a node.
:param uuid: Ironic node UUID
:param session: optional existing database session
"""
with db.ensure_transaction(session) as session:
db.model_query(db.Attribute, session=session).filter_by(
node_uuid=uuid).delete()
for model in (db.Option, db.IntrospectionData, db.Node):
db.model_query(model,
session=session).filter_by(uuid=uuid).delete()
def introspection_active():
"""Check if introspection is active for at least one node."""
# FIXME(dtantsur): is there a better way to express it?
return (db.model_query(db.Node.uuid).filter_by(finished_at=None).first()
is not None)
def active_macs():
"""List all MAC's that are on introspection right now."""
query = (db.model_query(db.Attribute.value).join(db.Node)
.filter(db.Attribute.name == MACS_ATTRIBUTE))
return {x.value for x in query}
def _list_node_uuids():
"""Get all nodes' uuid from cache.
:returns: Set of nodes' uuid.
"""
return {x.uuid for x in db.model_query(db.Node.uuid)}
def get_node(node_id, ironic=None, locked=False):
"""Get node from cache.
:param node_id: node UUID or name.
:param ironic: optional ironic client instance
:param locked: if True, get a lock on node before fetching its data
:returns: structure NodeInfo.
"""
if uuidutils.is_uuid_like(node_id):
node = None
uuid = node_id
else:
node = ir_utils.get_node(node_id, ironic=ironic)
uuid = node.uuid
if locked:
lock = _get_lock(uuid)
lock.acquire()
else:
lock = None
try:
row = db.model_query(db.Node).filter_by(uuid=uuid).first()
if row is None:
raise utils.Error(_('Could not find node %s in cache') % uuid,
code=404)
return NodeInfo.from_row(row, ironic=ironic, lock=lock, node=node)
except Exception:
with excutils.save_and_reraise_exception():
if lock is not None:
lock.release()
def find_node(**attributes):
"""Find node in cache.
Looks up a node based on attributes in a best-match fashion.
This function acquires a lock on a node.
:param attributes: attributes known about this node (like macs, BMC etc)
also ironic client instance may be passed under 'ironic'
:returns: structure NodeInfo with attributes ``uuid`` and ``created_at``
:raises: Error if node is not found or multiple nodes match the attributes
"""
ironic = attributes.pop('ironic', None)
# NOTE(dtantsur): sorting is not required, but gives us predictability
found = collections.Counter()
for (name, value) in sorted(attributes.items()):
if not value:
LOG.debug('Empty value for attribute %s', name)
continue
if not isinstance(value, list):
value = [value]
LOG.debug('Trying to use %s of value %s for node look up',
name, value)
query = db.model_query(db.Attribute.node_uuid)
pairs = [(db.Attribute.name == name) &
(db.Attribute.value == v) for v in value]
query = query.filter(six.moves.reduce(operator.or_, pairs))
found.update(row.node_uuid for row in query.distinct().all())
if not found:
raise utils.NotFoundInCacheError(_(
'Could not find a node for attributes %s') % attributes)
most_common = found.most_common()
LOG.debug('The following nodes match the attributes: %(attributes)s, '
'scoring: %(most_common)s',
{'most_common': ', '.join('%s: %d' % tpl for tpl in most_common),
'attributes': ', '.join('%s=%s' % tpl for tpl in
attributes.items())})
# NOTE(milan) most_common is sorted, higher scores first
highest_score = most_common[0][1]
found = [item[0] for item in most_common if highest_score == item[1]]
if len(found) > 1:
raise utils.Error(_(
'Multiple nodes match the same number of attributes '
'%(attr)s: %(found)s')
% {'attr': attributes, 'found': found}, code=404)
uuid = found.pop()
node_info = NodeInfo(uuid=uuid, ironic=ironic)
node_info.acquire_lock()
try:
row = (db.model_query(db.Node.started_at, db.Node.finished_at).
filter_by(uuid=uuid).first())
if not row:
raise utils.Error(_(
'Could not find node %s in introspection cache, '
'probably it\'s not on introspection now') % uuid, code=404)
if row.finished_at:
raise utils.Error(_(
'Introspection for node %(node)s already finished on '
'%(finish)s') % {'node': uuid, 'finish': row.finished_at})
node_info.started_at = row.started_at
return node_info
except Exception:
with excutils.save_and_reraise_exception():
node_info.release_lock()
def clean_up():
"""Clean up the cache.
Finish introspection for timed out nodes.
:return: list of timed out node UUID's
"""
timeout = CONF.timeout
if timeout <= 0:
return []
threshold = timeutils.utcnow() - datetime.timedelta(seconds=timeout)
uuids = [row.uuid for row in
db.model_query(db.Node.uuid).filter(
db.Node.started_at < threshold,
db.Node.finished_at.is_(None)).all()]
if not uuids:
return []
LOG.error('Introspection for nodes %s has timed out', uuids)
for u in uuids:
node_info = get_node(u, locked=True)
try:
if node_info.finished_at or node_info.started_at > threshold:
continue
if node_info.state != istate.States.waiting:
LOG.error('Something went wrong, timeout occurred '
'while introspection in "%s" state',
node_info.state,
node_info=node_info)
node_info.finished(
istate.Events.timeout, error='Introspection timeout')
finally:
node_info.release_lock()
return uuids
def create_node(driver, ironic=None, **attributes):
"""Create ironic node and cache it.
* Create new node in ironic.
* Cache it in inspector.
* Sets node_info state to enrolling.
:param driver: driver for Ironic node.
:param ironic: ronic client instance.
:param attributes: dict, additional keyword arguments to pass
to the ironic client on node creation.
:return: NodeInfo, or None in case error happened.
"""
if ironic is None:
ironic = ir_utils.get_client()
try:
node = ironic.node.create(driver=driver, **attributes)
except exceptions.InvalidAttribute as e:
LOG.error('Failed to create new node: %s', e)
else:
LOG.info('Node %s was created successfully', node.uuid)
return add_node(node.uuid, istate.States.enrolling, ironic=ironic)
def get_node_list(ironic=None, marker=None, limit=None):
"""Get node list from the cache.
The list of the nodes is ordered based on the (started_at, uuid)
attribute pair, newer items first.
:param ironic: optional ironic client instance
:param marker: pagination marker (an UUID or None)
:param limit: pagination limit; None for default CONF.api_max_limit
:returns: a list of NodeInfo instances.
"""
if marker is not None:
# uuid marker -> row marker for pagination
marker = db.model_query(db.Node).get(marker)
if marker is None:
raise utils.Error(_('Node not found for marker: %s') % marker,
code=404)
rows = db.model_query(db.Node)
# ordered based on (started_at, uuid); newer first
rows = db_utils.paginate_query(rows, db.Node, limit,
('started_at', 'uuid'),
marker=marker, sort_dir='desc')
return [NodeInfo.from_row(row, ironic=ironic) for row in rows]
def store_introspection_data(node_id, introspection_data, processed=True):
"""Store introspection data for this node.
:param node_id: node UUID.
:param introspection_data: A dictionary of introspection data
:param processed: Specify the type of introspected data, set to False
indicates the data is unprocessed.
"""
with db.ensure_transaction() as session:
record = db.model_query(db.IntrospectionData,
session=session).filter_by(
uuid=node_id, processed=processed).first()
if record is None:
row = db.IntrospectionData()
row.update({'uuid': node_id, 'processed': processed,
'data': introspection_data})
session.add(row)
else:
record.update({'data': introspection_data})
session.flush()
def get_introspection_data(node_id, processed=True):
"""Get introspection data for this node.
:param node_id: node UUID.
:param processed: Specify the type of introspected data, set to False
indicates retrieving the unprocessed data.
:return: A dictionary representation of intropsected data
"""
try:
ref = db.model_query(db.IntrospectionData).filter_by(
uuid=node_id, processed=processed).one()
return ref['data']
except orm_errors.NoResultFound:
msg = _('Introspection data not found for node %(node)s, '
'processed=%(processed)s') % {'node': node_id,
'processed': processed}
raise utils.IntrospectionDataNotFound(msg)
| 36.755142
| 79
| 0.603725
|
7be45c6a4732cbdb274cca2412c5c49157bcea40
| 7,339
|
py
|
Python
|
indicators/tests/test_generate_periodic_target.py
|
Falliatcom-sa/falliatcom
|
39fb926de072c296ed32d50cccfb8003ca870739
|
[
"Apache-2.0"
] | null | null | null |
indicators/tests/test_generate_periodic_target.py
|
Falliatcom-sa/falliatcom
|
39fb926de072c296ed32d50cccfb8003ca870739
|
[
"Apache-2.0"
] | 5
|
2021-02-08T20:42:48.000Z
|
2022-03-12T00:19:38.000Z
|
indicators/tests/test_generate_periodic_target.py
|
Falliatcom-sa/falliatcom
|
39fb926de072c296ed32d50cccfb8003ca870739
|
[
"Apache-2.0"
] | null | null | null |
import datetime
from indicators.models import Indicator
from indicators.views.view_utils import (
generate_periodic_target_single,
generate_periodic_targets
)
from django import test
@test.tag('targets', 'fast')
class GenerateSinglePeriodicTarget(test.TestCase):
def setUp(self):
self.start_date = datetime.datetime(2018, 10, 5, 18, 00)
self.nth_target_period = 10
self.event_name = 5
def test_lop_generate_periodic_target_single(self):
"""Do we get back the expected result when we have an LOP?"""
tf = Indicator.LOP
expected = {'period': "Life of Program (LoP) only", 'period_name': u'Life of Program (LoP) only'}
result = generate_periodic_target_single(tf, self.start_date,
self.nth_target_period,
event_name=self.event_name)
self.assertDictEqual(expected, result)
def test_mid_generate_periodic_target_single(self):
"""Do we get back the expected result when we have an MID_END?"""
tf = Indicator.MID_END
expected = [{'period': 'Midline', 'period_name': u'Midline'}, {'period': 'Endline', 'period_name': u'Endline'}]
result = generate_periodic_target_single(tf, self.start_date,
self.nth_target_period,
event_name=self.event_name)
self.assertEqual(expected, result)
def test_event_generate_periodic_target_single(self):
"""Do we get back the expected result when we have an EVENT?"""
tf = Indicator.EVENT
expected = {'period': self.event_name, 'period_name': self.event_name}
result = generate_periodic_target_single(tf, self.start_date,
0,
event_name=self.event_name)
self.assertEqual(expected, result)
def test_annual(self):
"""Do we get back the correct period name back for ANNUAL frequency"""
tf = Indicator.ANNUAL
expected = {'period': 'Year 11', 'end_date': '2029-10-04',
'start_date': '2028-10-01',
'period_name': u'Year 11',}
result = generate_periodic_target_single(tf, self.start_date,
self.nth_target_period,
event_name='')
self.assertDictEqual(expected, result)
def test_semi_annual(self):
"""Do we get the correct period name back for SEMI_ANNUAL frequency"""
tf = Indicator.SEMI_ANNUAL
expected = {'end_date': '2024-04-04',
'period': 'Semi-annual period 11',
'start_date': '2023-10-01',
'period_name': u'Semi-annual period 11',}
result = generate_periodic_target_single(tf, self.start_date,
self.nth_target_period,
event_name='')
self.assertDictEqual(expected, result)
def test_tri_annual(self):
"""Do we get the correct period name back for TRI_ANNUAL frequency"""
tf = Indicator.TRI_ANNUAL
expected = {'end_date': '2022-06-04',
'period': 'Tri-annual period 11',
'start_date': '2022-02-01',
'period_name': u'Tri-annual period 11',}
result = generate_periodic_target_single(tf, self.start_date,
self.nth_target_period,
event_name='')
self.assertDictEqual(expected, result)
def test_quarterly(self):
"""Do we get the correct period name back for QUARTERLY frequency"""
tf = Indicator.QUARTERLY
expected = {'end_date': '2021-07-04',
'period': 'Quarter 11',
'start_date': '2021-04-01',
'period_name': u'Quarter 11',}
result = generate_periodic_target_single(tf, self.start_date,
self.nth_target_period,
event_name='')
self.assertDictEqual(expected, result)
def test_monthly(self):
"""Do we get the correct period name back for MONTHLY frequency"""
tf = Indicator.MONTHLY
expected = {'end_date': '2019-09-04',
'period': 'August 2019',
'start_date': '2019-08-01',
'period_name': u'August 2019',}
result = generate_periodic_target_single(tf, self.start_date,
self.nth_target_period,
event_name='')
self.assertDictEqual(expected, result)
@test.tag('targets', 'fast')
class GenerateMultiplePeriodicTargets(test.TestCase):
"""generate_periodic_targets tests for period name and quantity"""
def setUp(self):
self.start_date = datetime.datetime(2018, 10, 5, 18, 00)
self.total_targets = 10
self.event_name = ''
def test_generate(self):
"""Can we bulk generate periodic targets?"""
tf = Indicator.MONTHLY
result = generate_periodic_targets(tf, self.start_date, self.total_targets,
self.event_name)
self.assertTrue(len(result) == 10)
def test_lop(self):
"""Do we get back the correct response if we are doing
Life of Project?"""
tf = Indicator.LOP
expected = {'period': u'Life of Program (LoP) only', 'period_name': u'Life of Program (LoP) only'}
result = generate_periodic_targets(tf, self.start_date,
self.total_targets,
self.event_name)
self.assertDictEqual(expected, result)
def test_mid(self):
"""Do we get back the correct response if we are doing MID?"""
tf = Indicator.MID_END
expected = [{'period': 'Midline', 'period_name': u'Midline'}, {'period': 'Endline', 'period_name': u'Endline'}]
result = generate_periodic_targets(tf, self.start_date,
self.total_targets,
self.event_name)
self.assertEqual(expected, result)
@test.tag('targets', 'fast')
class GenerateNewPeriodicTargetsWithExisting(test.TestCase):
"""adding periodic targets when targets exist"""
DATE_AWARE_FREQUENCIES = [
Indicator.ANNUAL,
Indicator.SEMI_ANNUAL,
Indicator.TRI_ANNUAL,
Indicator.QUARTERLY,
]
start_date = datetime.datetime(2018, 10, 5)
def test_generate_third_annual(self):
"""does the generated period name for a new target end with the right number (counting up from existing)?"""
for tf in self.DATE_AWARE_FREQUENCIES:
result = generate_periodic_targets(tf, self.start_date, 1, '', 2)
self.assertEqual(result[0]['period'][-1], '3',
"third {0} target period name should end with 3, got {1}".format(
tf, result[0]['period']))
| 42.178161
| 119
| 0.556615
|
e68767e81b0e3b34bd20f1464488df1d1b25c5a4
| 22,151
|
py
|
Python
|
q_encodings/strongly_constrained_encoding.py
|
irfansha/Q-Planner
|
14adad480f2627a3893796b89b54f8c2622d1618
|
[
"MIT"
] | null | null | null |
q_encodings/strongly_constrained_encoding.py
|
irfansha/Q-Planner
|
14adad480f2627a3893796b89b54f8c2622d1618
|
[
"MIT"
] | null | null | null |
q_encodings/strongly_constrained_encoding.py
|
irfansha/Q-Planner
|
14adad480f2627a3893796b89b54f8c2622d1618
|
[
"MIT"
] | null | null | null |
# Irfansha Shaik, 10.04.2021, Aarhus.
from utils.variables_dispatcher import VarDispatcher as vd
from utils.gates import GatesGen as gg
from tarski.syntax import formulas as fr
# TODO: Update to adapt to strong constraints
'''
WARNING: It is possible that empty or gates might cause some problem,
not sure but better to check in testing
'''
class StronglyConstrainedEncoding:
def print_gate_tofile(self, gate, f):
if len(gate) == 1:
f.write(gate[0] + '\n')
else:
f.write(str(gate[1]) + ' = ' + gate[0] + '(' + ', '.join(str(x) for x in gate[2]) + ')\n')
def print_encoding_tofile(self, file_path):
f = open(file_path, 'w')
for gate in self.quantifier_block:
self.print_gate_tofile(gate, f)
f.write('output(' + str(self.final_output_gate) + ')\n')
for gate in self.encoding:
self.print_gate_tofile(gate, f)
# Generates quanifier blocks:
def generate_quantifier_blocks(self):
# Action and parameter variables are first existential layer:
first_layer_variables = []
self.quantifier_block.append(['# Action and parameter variables :'])
for i in range(self.tfunc.parsed_instance.args.plan_length):
self.quantifier_block.append(['# Time step ' + str(i) + ' :'])
self.quantifier_block.append(['# ' + str(self.action_variables[i])])
self.quantifier_block.append(['# ' + str(self.parameter_variables[i])])
first_layer_variables.extend(self.action_variables[i])
for j in range(len(self.parameter_variables[i])):
first_layer_variables.extend(self.parameter_variables[i][j])
self.quantifier_block.append(['exists(' + ', '.join(str(x) for x in first_layer_variables) + ')'])
# Object variables are second forall layer:
second_layer_variables = []
self.quantifier_block.append(['# Forall object variables :'])
self.quantifier_block.append(['# ' + str(self.forall_variables_list)])
for i in range(len(self.forall_variables_list)):
second_layer_variables.extend(self.forall_variables_list[i])
self.quantifier_block.append(['forall(' + ', '.join(str(x) for x in second_layer_variables) + ')'])
# Predicate variables are third existential layer:
third_layer_variables = []
self.quantifier_block.append(['# non-static predicate variables :'])
for i in range(self.tfunc.parsed_instance.args.plan_length + 1):
self.quantifier_block.append(['# ' + str(self.non_static_variables[i])])
third_layer_variables.extend(self.non_static_variables[i])
self.quantifier_block.append(['exists(' + ', '.join(str(x) for x in third_layer_variables) + ')'])
def generate_k_transitions(self):
# Generating transition function for each step:
for i in range(self.tfunc.parsed_instance.args.plan_length):
self.encoding.append(['# Transition funciton for step ' + str(i) + ':'])
# Generating auxilary vars:
step_aux_vars = self.encoding_variables.get_vars(self.tfunc.num_auxilary_variables)
# Appending transition output gates:
self.transition_step_output_gates.append(step_aux_vars[-1])
# Appending all variables required for one time step:
all_vars = []
all_vars.extend(self.action_variables[i])
# Parameter variables:
for j in range(len(self.parameter_variables[i])):
all_vars.extend(self.parameter_variables[i][j])
# Forall variables:
for j in range(len(self.forall_variables_list)):
all_vars.extend(self.forall_variables_list[j])
# i, i+1 th non-static predicates:
all_vars.extend(self.non_static_variables[i])
all_vars.extend(self.non_static_variables[i+1])
# Auxilary variables:
all_vars.extend(step_aux_vars)
self.tfunc.new_transition_copy(all_vars, self.encoding)
# Finds object instantiations of a predicate and computes or-of-and gate:
def generate_initial_predicate_constraints(self, predicate):
list_obj_instances = []
for atom in self.tfunc.parsed_instance.parsed_problem.init.as_atoms():
if (atom.predicate.name == predicate):
self.encoding.append(['# Current atom constraint: ' + str(atom)])
# Gates for one proposition:
single_instance_gates = []
# We generate and gates for each parameter:
for i in range(len(atom.subterms)):
subterm = atom.subterms[i]
cur_variables = self.forall_variables_list[i]
# Finding object index:
obj_index = self.tfunc.probleminfo.object_names.index(subterm.name)
gate_variables = self.tfunc.generate_binary_format(cur_variables, obj_index)
self.gates_generator.and_gate(gate_variables)
single_instance_gates.append(self.gates_generator.output_gate)
self.gates_generator.and_gate(single_instance_gates)
list_obj_instances.append(self.gates_generator.output_gate)
# Finally an or gates for all the instances:
self.gates_generator.or_gate(list_obj_instances)
return self.gates_generator.output_gate
# TODO: Testing is needed
def generate_initial_gate(self):
initial_step_output_gates = []
self.encoding.append(["# ------------------------------------------------------------------------"])
self.encoding.append(['# Initial state: '])
# Constraints for non-static variables:
self.encoding.append(['# Non-static predicate constraints: '])
for non_static_predicate in self.tfunc.probleminfo.non_static_predicates:
self.encoding.append(['# non-static predicate: ' + str(non_static_predicate)])
single_predicate_final_gate = self.generate_initial_predicate_constraints(non_static_predicate)
# Fetching corresponding non-static variable
self.encoding.append(['# iff condition for the predicate '])
# We look at the initial state, so 0th index predicates:
cur_nonstatic_variable = self.non_static_variables[0][self.tfunc.probleminfo.non_static_predicates.index(non_static_predicate)]
self.gates_generator.single_equality_gate(single_predicate_final_gate, cur_nonstatic_variable)
initial_step_output_gates.append(self.gates_generator.output_gate)
# Final and gates of all constraints:
self.encoding.append(['# Final inital output gate:'])
self.gates_generator.and_gate(initial_step_output_gates)
self.initial_output_gate = self.gates_generator.output_gate
# Finds object instantiations of a predicate and computes or-of-and gate:
def generate_goal_predicate_constraints(self, predicate):
# We generate gates for positive and negative clauses separately,
pos_final_gate = 0
neg_final_gate = 0
zero_arity_predicate = 0
list_obj_instances_pos = []
list_obj_instances_neg = []
if (fr.is_atom(self.tfunc.parsed_instance.parsed_problem.goal)):
list_subformulas = [self.tfunc.parsed_instance.parsed_problem.goal]
else:
list_subformulas = self.tfunc.parsed_instance.parsed_problem.goal.subformulas
assert(self.tfunc.parsed_instance.parsed_problem.goal.connective == fr.Connective.And)
for atom in list_subformulas:
# If it is negative atom, then we need to consider as
# compund formula:
if(fr.is_neg(atom)):
# Asserting negation connective:
assert(atom.connective == fr.Connective.Not)
# Asserting single atom:
assert(len(atom.subformulas) == 1)
cur_atom = atom.subformulas[0]
else:
# If not negative, we do not change:
cur_atom = atom
if (cur_atom.predicate.name == predicate):
# If it a zero arity predicate, then either positive or negative must
# be present so returning directly if found:
if (len(cur_atom.subterms) == 0):
# again checking if positive or negative:
if(fr.is_neg(atom)):
zero_arity_predicate = -1
else:
zero_arity_predicate = 1
# We do not look further:
break
# Gates for one proposition:
single_instance_gates = []
# We generate and gates for each parameter:
for i in range(len(cur_atom.subterms)):
subterm = cur_atom.subterms[i]
cur_variables = self.forall_variables_list[i]
# Finding object index:
obj_index = self.tfunc.probleminfo.object_names.index(subterm.name)
gate_variables = self.tfunc.generate_binary_format(cur_variables, obj_index)
self.gates_generator.and_gate(gate_variables)
single_instance_gates.append(self.gates_generator.output_gate)
# We only generate of some instantiation occurs:
if (len(single_instance_gates) != 0):
self.gates_generator.and_gate(single_instance_gates)
# Appending to the right list:
if (fr.is_neg(atom)):
list_obj_instances_neg.append(self.gates_generator.output_gate)
else:
list_obj_instances_pos.append(self.gates_generator.output_gate)
if (len(list_obj_instances_pos) != 0):
# Finally an or gate for all the pos instances:
self.encoding.append(['# Or gate for pos instances:'])
self.gates_generator.or_gate(list_obj_instances_pos)
pos_final_gate = self.gates_generator.output_gate
if (len(list_obj_instances_neg) != 0):
# Finally an or gates for all the neg instances:
self.encoding.append(['# Or gate for neg instances:'])
self.gates_generator.or_gate(list_obj_instances_neg)
neg_final_gate = self.gates_generator.output_gate
return [pos_final_gate, neg_final_gate, zero_arity_predicate]
# Generating goal constraints:
def generate_goal_gate(self):
goal_step_output_gates = []
self.encoding.append(["# ------------------------------------------------------------------------"])
self.encoding.append(['# Goal state: '])
# Only non-static predicates are considered:
# WARNING: we might be missig something, test here if something is wrong
self.encoding.append(['# Non-static predicate constraints: '])
for non_static_predicate in self.tfunc.probleminfo.non_static_predicates:
self.encoding.append(['# non-static predicate: ' + str(non_static_predicate)])
[pos_gate, neg_gate, zero_var] = self.generate_goal_predicate_constraints(non_static_predicate)
# Fetching corresponding non-static variable
# We look at the goal state, so plan length index predicates:
cur_nonstatic_variable = self.non_static_variables[self.tfunc.parsed_instance.args.plan_length][self.tfunc.probleminfo.non_static_predicates.index(non_static_predicate)]
if (pos_gate != 0):
# positive if condition:
self.encoding.append(['# if then condition for the pos predicate '])
self.gates_generator.if_then_gate(pos_gate, cur_nonstatic_variable)
goal_step_output_gates.append(self.gates_generator.output_gate)
if (neg_gate != 0):
# negative if condition:
self.encoding.append(['# if then condition for the neg predicate '])
self.gates_generator.if_then_gate(neg_gate, -cur_nonstatic_variable)
goal_step_output_gates.append(self.gates_generator.output_gate)
if (zero_var == 1):
# if positive zero arity predicate condition:
self.encoding.append(['# positive zero airty predicate '])
goal_step_output_gates.append(cur_nonstatic_variable)
if (zero_var == -1):
# if negative zero arity predicate condition:
self.encoding.append(['# negative zero airty predicate '])
goal_step_output_gates.append(-cur_nonstatic_variable)
# Final and gates of all constraints:
self.encoding.append(['# Final goal output gate:'])
self.gates_generator.and_gate(goal_step_output_gates)
self.goal_output_gate = self.gates_generator.output_gate
# TODO: We might be over-engineering, might work well for strongly constrained
# transition function:
def generate_restricted_forall_constraints(self):
self.encoding.append(["# ------------------------------------------------------------------------"])
self.encoding.append(['# Conditional forall constraints: '])
# All conditional output gates:
all_conditional_output_gates = []
# Generating an object type index, where object type is the key
# and all the object indexs of that types is the value list:
obj_type_index = dict()
# For each type we look at the set of objects with same type and add
# it into out dictionary as indexes:
for tp in self.tfunc.parsed_instance.lang.sorts:
obj_list = list(self.tfunc.parsed_instance.lang.get(tp.name).domain())
obj_index_list = []
for obj in obj_list:
obj_index = self.tfunc.probleminfo.object_names.index(obj.name)
obj_index_list.append(obj_index)
obj_index_list.sort()
obj_type_index[tp] = obj_index_list
# we do not want to iterate through again:
local_valid_type_names_list = []
# Since variables for types always have one parameter,
# we choose first set of forall variables:
cur_variables = self.forall_variables_list[0]
# Constraint for types:
for valid_type in self.tfunc.parsed_instance.valid_types:
single_type_output_gates = []
# We consider only static predicate types:
local_valid_type_names_list.append(valid_type.name)
# gathering all the types for or gate:
cur_gates = []
# if there are no objects, we ignore:
if(len(obj_type_index[valid_type]) == 0):
continue
# Generating conditional clauses:
self.encoding.append(['# Conditional for type ' + str(valid_type.name) + ': '])
for valid_index in obj_type_index[valid_type]:
gate_variables = self.tfunc.generate_binary_format(cur_variables, valid_index)
self.gates_generator.and_gate(gate_variables)
cur_gates.append(self.gates_generator.output_gate)
self.encoding.append(['# Overall or gate for all possiblities: '])
self.gates_generator.or_gate(cur_gates)
single_type_output_gates.append(self.gates_generator.output_gate)
# We need to restrict the other position forall variables for speed up:
for i in range(1, self.tfunc.probleminfo.max_predicate_parameters):
temp_forall_variables = self.forall_variables_list[i]
# We go with first object by default, nothing special:
self.encoding.append(['# restricted object clause: '])
gate_variables = self.tfunc.generate_binary_format(temp_forall_variables, 0)
self.gates_generator.and_gate(gate_variables)
single_type_output_gates.append(self.gates_generator.output_gate)
self.encoding.append(['# And gate for all parameters of single type: '])
self.gates_generator.and_gate(single_type_output_gates)
all_conditional_output_gates.append(self.gates_generator.output_gate)
#print(all_conditional_output_gates)
# Perhaps easier to just go through all the predicates at once:
all_valid_predicates = []
all_valid_predicates.extend(self.tfunc.probleminfo.non_static_predicates)
# Adding constraints for the forall variables based on predicates:
for predicate in all_valid_predicates:
if (predicate not in local_valid_type_names_list):
self.encoding.append(['# Conditional for predicate ' + str(predicate) + ': '])
cur_parameter_types = self.tfunc.parsed_instance.lang.get(predicate).sort
single_predicate_output_gates = []
for i in range(len(cur_parameter_types)):
# depending on the position we fetch forall variables:
cur_variables = self.forall_variables_list[i]
cur_gates = []
# generating or gate for all the possible objects of specified type:
valid_objects_cur_type = obj_type_index[cur_parameter_types[i]]
for valid_index in valid_objects_cur_type:
gate_variables = self.tfunc.generate_binary_format(cur_variables, valid_index)
self.gates_generator.and_gate(gate_variables)
cur_gates.append(self.gates_generator.output_gate)
self.encoding.append(['# Overall or gate for all possiblities for ' + str(i) + 'th parameter:'])
self.gates_generator.or_gate(cur_gates)
single_predicate_output_gates.append(self.gates_generator.output_gate)
# We set rest of the parameters to 0 objects:
for i in range(len(cur_parameter_types), self.tfunc.probleminfo.max_predicate_parameters):
temp_forall_variables = self.forall_variables_list[i]
# We go with first object by default, nothing special:
self.encoding.append(['# restricted object clause: '])
gate_variables = self.tfunc.generate_binary_format(temp_forall_variables, 0)
self.gates_generator.and_gate(gate_variables)
single_predicate_output_gates.append(self.gates_generator.output_gate)
self.encoding.append(['# And gate for all parameter possibilities:'])
self.gates_generator.and_gate(single_predicate_output_gates)
all_conditional_output_gates.append(self.gates_generator.output_gate)
self.encoding.append(['# Final conditional gate: '])
self.gates_generator.or_gate(all_conditional_output_gates)
self.conditional_final_output_gate = self.gates_generator.output_gate
self.encoding.append(["# ------------------------------------------------------------------------"])
def generate_simple_restricted_forall_constraints(self):
self.encoding.append(["# ------------------------------------------------------------------------"])
self.encoding.append(['# Conditional forall constraints: '])
# All conditional output gates:
all_conditional_output_gates = []
for cur_variables in self.forall_variables_list:
for i in range(self.tfunc.probleminfo.num_objects, self.tfunc.probleminfo.num_possible_parameter_values):
cur_invalid_forall_vars_list = self.tfunc.generate_binary_format(cur_variables, i)
self.gates_generator.and_gate(cur_invalid_forall_vars_list)
all_conditional_output_gates.append(self.gates_generator.output_gate)
self.encoding.append(['# Final conditional gate: '])
self.gates_generator.or_gate(all_conditional_output_gates)
self.conditional_final_output_gate = -self.gates_generator.output_gate
self.encoding.append(["# ------------------------------------------------------------------------"])
# Final output gate is an and-gate with inital, goal and transition gates:
def generate_final_gate(self):
final_gates_list = []
final_gates_list.append(self.initial_output_gate)
final_gates_list.append(self.goal_output_gate)
final_gates_list.extend(self.transition_step_output_gates)
self.encoding.append(["# ------------------------------------------------------------------------"])
self.encoding.append(['# Final output gate:'])
self.encoding.append(['# And gate for initial, output and transition functions:'])
self.gates_generator.and_gate(final_gates_list)
# Restricting forall seems expensive, making it optional:
if (self.tfunc.parsed_instance.args.restricted_forall >= 1):
self.encoding.append(['# Conditional gate for forall restriction:'])
self.gates_generator.if_then_gate(self.conditional_final_output_gate, self.gates_generator.output_gate)
self.final_output_gate = self.gates_generator.output_gate
self.encoding.append(["# ------------------------------------------------------------------------"])
def __init__(self, tfunc):
self.tfunc = tfunc
self.encoding_variables = vd()
self.quantifier_block = []
self.encoding = []
self.initial_output_gate = 0 # initial output gate can never be 0
self.goal_output_gate = 0 # goal output gate can never be 0
self.transition_step_output_gates = []
self.conditional_final_output_gate = 0 # Can never be 0
self.final_output_gate = 0 # Can never be 0
# Generating k sets of action and parameter variables:
self.action_variables = []
self.parameter_variables = []
for i in range(tfunc.parsed_instance.args.plan_length):
# Generating logarithmic action variables (along with noop):
single_step_action_vars = self.encoding_variables.get_vars(tfunc.probleminfo.num_action_variables)
self.action_variables.append(single_step_action_vars)
# Generating logarithmic parameter variables for max parameter arity:
single_step_parameter_variable_list = []
for j in range(tfunc.probleminfo.max_action_parameters):
step_parameter_variables = self.encoding_variables.get_vars(tfunc.probleminfo.num_parameter_variables)
single_step_parameter_variable_list.append(step_parameter_variables)
self.parameter_variables.append(single_step_parameter_variable_list)
# generating forall varibles with max non static predicate arity:
self.forall_variables_list = []
for i in range(tfunc.probleminfo.max_non_static_predicate_parameters):
# number of parameter variables is same as predicate parameters:
step_forall_variables = self.encoding_variables.get_vars(tfunc.probleminfo.num_parameter_variables)
self.forall_variables_list.append(step_forall_variables)
# generating k+1 sets of non-static variables for propagation:
self.non_static_variables = []
for i in range(tfunc.parsed_instance.args.plan_length + 1):
step_non_static_variables = self.encoding_variables.get_vars(tfunc.probleminfo.num_non_static_predicates)
self.non_static_variables.append(step_non_static_variables)
# Generating quantifer blocks:
self.generate_quantifier_blocks()
# Generating k steps i.e., plan length number of transitions:
self.generate_k_transitions()
#print(self.transition_step_output_gates)
self.gates_generator = gg(self.encoding_variables, self.encoding)
self.generate_initial_gate()
self.generate_goal_gate()
# Restricting forall seems expensive, making it optional:
if (self.tfunc.parsed_instance.args.restricted_forall == 1):
self.generate_simple_restricted_forall_constraints()
elif(self.tfunc.parsed_instance.args.restricted_forall == 2):
self.generate_restricted_forall_constraints()
self.generate_final_gate()
| 49.55481
| 175
| 0.706289
|
d7cab5e474e1274b1ed0d3abb9436018e3a9dfef
| 7,658
|
py
|
Python
|
data_steward/cdr_cleaner/cleaning_rules/ensure_date_datetime_consistency.py
|
jp3477/curation
|
41f98d57c8273d9963ad6d466a237c99b63c74be
|
[
"MIT"
] | null | null | null |
data_steward/cdr_cleaner/cleaning_rules/ensure_date_datetime_consistency.py
|
jp3477/curation
|
41f98d57c8273d9963ad6d466a237c99b63c74be
|
[
"MIT"
] | null | null | null |
data_steward/cdr_cleaner/cleaning_rules/ensure_date_datetime_consistency.py
|
jp3477/curation
|
41f98d57c8273d9963ad6d466a237c99b63c74be
|
[
"MIT"
] | null | null | null |
"""
Ensuring there are no null datetimes.
Original Issues: DC-614, DC-509, and DC-432
The intent is to copy the date over to the datetime field if the datetime
field is null or incorrect.
"""
# Python imports
import logging
import common
# Project imports
import constants.bq_utils as bq_consts
import constants.cdr_cleaner.clean_cdr as cdr_consts
from cdr_cleaner.cleaning_rules import field_mapping, temporal_consistency
from cdr_cleaner.cleaning_rules.base_cleaning_rule import BaseCleaningRule
LOGGER = logging.getLogger(__name__)
TABLE_DATES = {
common.CONDITION_OCCURRENCE: {
'condition_start_datetime': 'condition_start_date',
'condition_end_datetime': 'condition_end_date'
},
common.DRUG_EXPOSURE: {
'drug_exposure_start_datetime': 'drug_exposure_start_date',
'drug_exposure_end_datetime': 'drug_exposure_end_date'
},
common.DEVICE_EXPOSURE: {
'device_exposure_start_datetime': 'device_exposure_start_date',
'device_exposure_end_datetime': 'device_exposure_end_date'
},
common.MEASUREMENT: {
'measurement_datetime': 'measurement_date'
},
common.OBSERVATION: {
'observation_datetime': 'observation_date'
},
common.PROCEDURE_OCCURRENCE: {
'procedure_datetime': 'procedure_date'
},
common.DEATH: {
'death_datetime': 'death_date'
},
common.SPECIMEN: {
'specimen_datetime': 'specimen_date'
},
common.OBSERVATION_PERIOD: {
'observation_period_start_datetime': 'observation_period_start_date',
'observation_period_end_datetime': 'observation_period_end_date'
},
common.VISIT_OCCURRENCE: {
'visit_start_datetime': 'visit_start_date',
'visit_end_datetime': 'visit_end_date'
}
}
FIX_DATETIME_QUERY = """
SELECT {cols}
FROM `{project_id}.{dataset_id}.{table_id}`
"""
FIX_NULL_OR_INCORRECT_DATETIME_QUERY = """
CASE
WHEN {field} IS NULL
THEN CAST(DATETIME({date_field}, TIME(00,00,00)) AS TIMESTAMP)
WHEN EXTRACT(DATE FROM {field}) = {date_field}
THEN {field}
ELSE CAST(DATETIME({date_field}, EXTRACT(TIME FROM {field})) AS TIMESTAMP)
END AS {field}
"""
class EnsureDateDatetimeConsistency(BaseCleaningRule):
"""
Ensure no nulls and consistency in the datetime and date fields
"""
def __init__(self, project_id, dataset_id, sandbox_dataset_id):
"""
Initialize the class with proper information.
Set the issue numbers, description and affected datasets. As other tickets may affect this SQL,
append them to the list of Jira Issues.
DO NOT REMOVE ORIGINAL JIRA ISSUE NUMBERS!
"""
desc = (
'Ensures consistency when the datetime field is null or when the date and datetime fields are equal by '
'(1) If the datetime field is null: Setting the datetime field\'s values to the date field\'s date and '
'midnight (00:00:00); or '
'(2) If the date and datetime fields are equal: Setting the datetime field\'s values to the date from the '
'date field and the time from the datetime field.')
super().__init__(issue_numbers=['DC-614', 'DC-509', 'DC-432'],
description=desc,
affected_datasets=[
cdr_consts.RDR, cdr_consts.UNIONED,
cdr_consts.COMBINED
],
project_id=project_id,
dataset_id=dataset_id,
sandbox_dataset_id=sandbox_dataset_id,
affected_tables=[
common.CONDITION_OCCURRENCE, common.DRUG_EXPOSURE,
common.DEVICE_EXPOSURE, common.MEASUREMENT,
common.OBSERVATION, common.PROCEDURE_OCCURRENCE,
common.SPECIMEN, common.DEATH,
common.OBSERVATION_PERIOD, common.VISIT_OCCURRENCE
],
depends_on=[temporal_consistency.TemporalConsistency])
def get_cols(self, table):
"""
Generates the fields to choose along with case statements to generate datetime
And ensures no null datetime values
:param self: instance of EnsureDateDatetimeConsistency class
:param table: table for which the fields are pulled
:return: cols
"""
table_fields = field_mapping.get_domain_fields(table)
col_exprs = []
for field in table_fields:
if field in TABLE_DATES[table]:
col_expr = FIX_NULL_OR_INCORRECT_DATETIME_QUERY.format(
field=field, date_field=TABLE_DATES[table][field])
else:
col_expr = field
col_exprs.append(col_expr)
cols = ', '.join(col_exprs)
return cols
def get_query_specs(self):
"""
This function generates a list of query dicts for ensuring the dates and datetimes are consistent
:return: a list of query dicts for ensuring the dates and datetimes are consistent
"""
queries = []
for table in TABLE_DATES:
query = dict()
query[cdr_consts.QUERY] = FIX_DATETIME_QUERY.format(
project_id=self.project_id,
dataset_id=self.dataset_id,
table_id=table,
cols=self.get_cols(table))
query[cdr_consts.DESTINATION_TABLE] = table
query[cdr_consts.DISPOSITION] = bq_consts.WRITE_TRUNCATE
query[cdr_consts.DESTINATION_DATASET] = self.dataset_id
queries.append(query)
return queries
def setup_rule(self, client):
"""
Function to run any data upload options before executing a query.
"""
pass
def get_sandbox_tablenames(self):
"""
Returns an empty list because this rule does not use sandbox tables.
"""
return []
def setup_validation(self, client):
"""
Run required steps for validation setup
This abstract method was added to the base class after this rule was authored.
This rule needs to implement logic to setup validation on cleaning rules that
will be updating or deleting the values.
Until done no issue exists for this yet.
"""
raise NotImplementedError("Please fix me.")
def validate_rule(self, client):
"""
Validates the cleaning rule which deletes or updates the data from the tables
This abstract method was added to the base class after this rule was authored.
This rule needs to implement logic to run validation on cleaning rules that will
be updating or deleting the values.
Until done no issue exists for this yet.
"""
raise NotImplementedError("Please fix me.")
if __name__ == '__main__':
import cdr_cleaner.clean_cdr_engine as clean_engine
import cdr_cleaner.args_parser as parser
ARGS = parser.parse_args()
if ARGS.list_queries:
clean_engine.add_console_logging()
query_list = clean_engine.get_query_list(
ARGS.project_id, ARGS.dataset_id, ARGS.sandbox_dataset_id,
[(EnsureDateDatetimeConsistency,)])
for query in query_list:
LOGGER.info(query)
else:
clean_engine.add_console_logging(ARGS.console_log)
clean_engine.clean_dataset(ARGS.project_id, ARGS.dataset_id,
ARGS.sandbox_dataset_id,
[(EnsureDateDatetimeConsistency,)])
| 36.995169
| 119
| 0.639462
|
f4a7de87fba623caa76c27ebaf48641f8dfef5e6
| 6,647
|
py
|
Python
|
np_mnist.py
|
jusonn/Neural-Process
|
7e76811ff90d4ff2ba57f36cfd56b2e1d9a18652
|
[
"MIT"
] | 5
|
2019-02-02T14:03:26.000Z
|
2020-11-07T10:23:26.000Z
|
np_mnist.py
|
jusonn/Neural-Process
|
7e76811ff90d4ff2ba57f36cfd56b2e1d9a18652
|
[
"MIT"
] | null | null | null |
np_mnist.py
|
jusonn/Neural-Process
|
7e76811ff90d4ff2ba57f36cfd56b2e1d9a18652
|
[
"MIT"
] | 1
|
2021-03-17T11:32:35.000Z
|
2021-03-17T11:32:35.000Z
|
import torch
import torch.utils.data
from torch import nn, optim
from torch.nn import functional as F
from torchvision import datasets, transforms
from torchvision.utils import save_image
import random
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
train_loader = torch.utils.data.DataLoader(
datasets.MNIST('./data', train=True, download=True,
transform=transforms.ToTensor()),
batch_size=64, shuffle=True)
test_loader = torch.utils.data.DataLoader(
datasets.MNIST('./data', train=False, transform=transforms.ToTensor()),
batch_size=64, shuffle=False)
def get_context_idx(N):
idx = random.sample(range(0, 784), N)
idx = torch.tensor(idx, device=device)
return idx
def generate_grid(h, w):
rows = torch.linspace(0, 1, h, device=device)
cols = torch.linspace(0, 1, w, device=device)
grid = torch.stack( [cols.repeat(h, 1).t().contiguous().view(-1), rows.repeat(w)], dim=1)
grid = grid.unsqueeze(0)
return grid
def idx_to_y(idx, data):
y = torch.index_select(data, dim=1, index=idx)
return y
def idx_to_x(idx, batch_size):
x = torch.index_select(x_grid, dim=1, index=idx)
x = x.expand(batch_size, -1, -1)
return x
class NP(nn.Module):
def __init__(self, r_dim, z_dim):
super(NP, self).__init__()
self.r_dim = r_dim
self.z_dim = z_dim
self.h_1 = nn.Linear(3, 40)
self.h_2 = nn.Linear(40, 40)
self.h_3 = nn.Linear(40, self.r_dim)
self.r_to_z_mean = nn.Linear(self.r_dim, self.z_dim)
self.r_to_z_std = nn.Linear(self.r_dim, self.z_dim)
self.g_1 = nn.Linear(self.z_dim + 2, 40)
self.g_2 = nn.Linear(40, 40)
self.g_3 = nn.Linear(40, 40)
self.g_4 = nn.Linear(40, 40)
self.g_5 = nn.Linear(40, 1)
def h(self, x_y):
x_y = F.relu(self.h_1(x_y))
x_y = F.relu(self.h_2(x_y))
x_y = F.relu(self.h_3(x_y))
return x_y
def aggregate(self, r):
return torch.mean(r, dim=1)
def reparametrize(self, z):
mu, logvar = z
std = torch.exp(0.5 * logvar)
eps = torch.randn_like(std)
z_sample = eps.mul(std).add_(mu)
z_sample = z_sample.unsqueeze(1).expand(-1, 784, -1)
return z_sample
def g(self, z_sample, x_target):
z_x = torch.cat([z_sample, x_target], dim=2)
input = F.relu(self.g_1(z_x))
input = F.relu(self.g_2(input))
input = F.relu(self.g_3(input))
input = F.relu(self.g_4(input))
input = torch.sigmoid(self.g_5(input))
return input
def xy_to_z_params(self, x, y):
x_y = torch.cat([x, y], dim=2)
r_i = self.h(x_y)
r = self.aggregate(r_i)
mu = self.r_to_z_mean(r)
logvar = self.r_to_z_std(r)
return mu, logvar
def forward(self, x_context, y_context, x_all=None, y_all=None):
z_context = self.xy_to_z_params(x_context, y_context)
if self.training:
z_all = self.xy_to_z_params(x_all, y_all)
else:
z_all = z_context
z_sample = self.reparametrize(z_all)
x_target = x_grid.expand(y_context.shape[0], -1, -1)
y_hat = self.g(z_sample, x_target)
return y_hat, z_all, z_context
def kl_div_gaussian(mu_q, logvar_q, mu_p, logvar_p):
var_p = torch.exp(logvar_p)
var_q = torch.exp(logvar_q)
kl_div = (var_q + (mu_q - mu_p)**2) / var_p - 1.0 + logvar_p - logvar_q
kl_div = 0.5 * kl_div.sum()
return kl_div
def np_loss(y_hat, y, z_all, z_context):
BCE = F.binary_cross_entropy(y_hat, y, reduction='sum')
KLD = kl_div_gaussian(z_all[0], z_all[1], z_context[0], z_context[1])
return BCE + KLD
model = NP(300, 300).to(device)
optimizer = optim.Adam(model.parameters(), lr=0.001)
x_grid = generate_grid(28, 28)
def train(epoch):
model.train()
train_loss = 0
for batch_idx, (y_all, _) in enumerate(train_loader):
batch_size = y_all.shape[0]
y_all = y_all.to(device).view(batch_size, -1, 1)
N = random.randint(1, 784)
context_idx = get_context_idx(N)
x_context = idx_to_x(context_idx, batch_size)
y_context = idx_to_y(context_idx, y_all)
x_all = x_grid.expand(batch_size, -1, -1)
optimizer.zero_grad()
y_hat, z_all, z_context = model(x_context, y_context, x_all, y_all)
loss = np_loss(y_hat, y_all, z_all, z_context)
loss.backward()
train_loss += loss.item()
optimizer.step()
print("Epoch {} Average Loss: {:.4f}".format(epoch, train_loss/len(train_loader.dataset)))
def test(epoch):
model.eval()
test_loss = 0
with torch.no_grad():
for i, (y_all, _) in enumerate(test_loader):
y_all = y_all.to(device).view(y_all.shape[0], -1, 1)
batch_size = y_all.shape[0]
N = 30
context_idx = get_context_idx(N)
x_context = idx_to_x(context_idx, batch_size)
y_context = idx_to_y(context_idx, y_all)
y_hat, z_all, z_context = model(x_context, y_context)
test_loss += np_loss(y_hat, y_all, z_all, z_context).item()
if i == 0:
plot_Ns = [10, 100, 300, 784]
num_examples = min(batch_size, 16)
for N in plot_Ns:
recons = []
context_idx = get_context_idx(N)
x_context = idx_to_x(context_idx, batch_size)
y_context = idx_to_y(context_idx, y_all)
for d in range(5):
y_hat, _, _ = model(x_context, y_context)
recons.append(y_hat[:num_examples])
recons = torch.cat(recons).view(-1, 1, 28, 28).expand(-1, 3, -1, -1)
background = torch.tensor([0., 0., 1.], device=device)
background = background.view(1, -1, 1).expand(num_examples, 3, 784).contiguous()
context_pixels = y_all[:num_examples].view(num_examples, 1, -1)[:, :, context_idx]
context_pixels = context_pixels.expand(num_examples, 3, -1)
background[:, :, context_idx] = context_pixels
comparison = torch.cat([background.view(-1, 3, 28, 28), recons])
save_image(comparison.cpu(), 'result/ep_' + str(epoch) + '_cps_' + str(N) + '.png',
nrow=num_examples)
test_loss /= len(test_loader.dataset)
print('====> Test set loss: {:.4f}'.format(test_loss))
for epoch in range(10):
train(epoch)
test(epoch)
| 34.984211
| 103
| 0.591395
|
c083f7e39257f336c2c555756daaae4d05ba24d7
| 227
|
py
|
Python
|
30/01/1.py
|
pylangstudy/201706
|
f1cc6af6b18e5bd393cda27f5166067c4645d4d3
|
[
"CC0-1.0"
] | null | null | null |
30/01/1.py
|
pylangstudy/201706
|
f1cc6af6b18e5bd393cda27f5166067c4645d4d3
|
[
"CC0-1.0"
] | 70
|
2017-06-01T11:02:51.000Z
|
2017-06-30T00:35:32.000Z
|
30/01/1.py
|
pylangstudy/201706
|
f1cc6af6b18e5bd393cda27f5166067c4645d4d3
|
[
"CC0-1.0"
] | null | null | null |
from array import array
ary = array('B', [0, 255, 127]) # B: unsigned char 0〜255(0x00〜0xFF)
#ary = array('B', [0, 255, -1]) # OverflowError: unsigned byte integer is less than minimum
for a in ary: print(a)
print(sorted(ary))
| 37.833333
| 91
| 0.674009
|
f5eed238404a957910b2fdc14fe482193784f494
| 10,364
|
py
|
Python
|
spyder/utils/misc.py
|
Earthman100/spyder
|
949ce0f9100a69504c70a5678e8589a05aee7d38
|
[
"MIT"
] | 7,956
|
2015-02-17T01:19:09.000Z
|
2022-03-31T21:52:15.000Z
|
spyder/utils/misc.py
|
Earthman100/spyder
|
949ce0f9100a69504c70a5678e8589a05aee7d38
|
[
"MIT"
] | 16,326
|
2015-02-16T23:15:21.000Z
|
2022-03-31T23:34:34.000Z
|
spyder/utils/misc.py
|
Earthman100/spyder
|
949ce0f9100a69504c70a5678e8589a05aee7d38
|
[
"MIT"
] | 1,918
|
2015-02-20T19:26:26.000Z
|
2022-03-31T19:03:25.000Z
|
# -*- coding: utf-8 -*-
#
# Copyright © Spyder Project Contributors
# Licensed under the terms of the MIT License
# (see spyder/__init__.py for details)
"""Miscellaneous utilities"""
import functools
import logging
import os
import os.path as osp
import re
import sys
import stat
import socket
from spyder.py3compat import is_text_string, getcwd
from spyder.config.base import get_home_dir
logger = logging.getLogger(__name__)
def __remove_pyc_pyo(fname):
"""Eventually remove .pyc and .pyo files associated to a Python script"""
if osp.splitext(fname)[1] == '.py':
for ending in ('c', 'o'):
if osp.exists(fname+ending):
os.remove(fname+ending)
def rename_file(source, dest):
"""
Rename file from *source* to *dest*
If file is a Python script, also rename .pyc and .pyo files if any
"""
os.rename(source, dest)
__remove_pyc_pyo(source)
def remove_file(fname):
"""
Remove file *fname*
If file is a Python script, also rename .pyc and .pyo files if any
"""
os.remove(fname)
__remove_pyc_pyo(fname)
def move_file(source, dest):
"""
Move file from *source* to *dest*
If file is a Python script, also rename .pyc and .pyo files if any
"""
import shutil
shutil.copy(source, dest)
remove_file(source)
def onerror(function, path, excinfo):
"""Error handler for `shutil.rmtree`.
If the error is due to an access error (read-only file), it
attempts to add write permission and then retries.
If the error is for another reason, it re-raises the error.
Usage: `shutil.rmtree(path, onerror=onerror)"""
if not os.access(path, os.W_OK):
# Is the error an access error?
os.chmod(path, stat.S_IWUSR)
function(path)
else:
raise
def select_port(default_port=20128):
"""Find and return a non used port"""
import socket
while True:
try:
sock = socket.socket(socket.AF_INET,
socket.SOCK_STREAM,
socket.IPPROTO_TCP)
# sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind( ("127.0.0.1", default_port) )
except socket.error as _msg: # analysis:ignore
default_port += 1
else:
break
finally:
sock.close()
sock = None
return default_port
def count_lines(path, extensions=None, excluded_dirnames=None):
"""Return number of source code lines for all filenames in subdirectories
of *path* with names ending with *extensions*
Directory names *excluded_dirnames* will be ignored"""
if extensions is None:
extensions = ['.py', '.pyw', '.ipy', '.enaml', '.c', '.h', '.cpp',
'.hpp', '.inc', '.', '.hh', '.hxx', '.cc', '.cxx',
'.cl', '.f', '.for', '.f77', '.f90', '.f95', '.f2k',
'.f03', '.f08']
if excluded_dirnames is None:
excluded_dirnames = ['build', 'dist', '.hg', '.svn']
def get_filelines(path):
dfiles, dlines = 0, 0
if osp.splitext(path)[1] in extensions:
dfiles = 1
with open(path, 'rb') as textfile:
dlines = len(textfile.read().strip().splitlines())
return dfiles, dlines
lines = 0
files = 0
if osp.isdir(path):
for dirpath, dirnames, filenames in os.walk(path):
for d in dirnames[:]:
if d in excluded_dirnames:
dirnames.remove(d)
if excluded_dirnames is None or \
osp.dirname(dirpath) not in excluded_dirnames:
for fname in filenames:
dfiles, dlines = get_filelines(osp.join(dirpath, fname))
files += dfiles
lines += dlines
else:
dfiles, dlines = get_filelines(path)
files += dfiles
lines += dlines
return files, lines
def remove_backslashes(path):
"""Remove backslashes in *path*
For Windows platforms only.
Returns the path unchanged on other platforms.
This is especially useful when formatting path strings on
Windows platforms for which folder paths may contain backslashes
and provoke unicode decoding errors in Python 3 (or in Python 2
when future 'unicode_literals' symbol has been imported)."""
if os.name == 'nt':
# Removing trailing single backslash
if path.endswith('\\') and not path.endswith('\\\\'):
path = path[:-1]
# Replacing backslashes by slashes
path = path.replace('\\', '/')
path = path.replace('/\'', '\\\'')
return path
def get_error_match(text):
"""Return error match"""
import re
return re.match(r' File "(.*)", line (\d*)', text)
def get_python_executable():
"""Return path to Python executable"""
executable = sys.executable.replace("pythonw.exe", "python.exe")
if executable.endswith("spyder.exe"):
# py2exe distribution
executable = "python.exe"
return executable
def monkeypatch_method(cls, patch_name):
# This function's code was inspired from the following thread:
# "[Python-Dev] Monkeypatching idioms -- elegant or ugly?"
# by Robert Brewer <fumanchu at aminus.org>
# (Tue Jan 15 19:13:25 CET 2008)
"""
Add the decorated method to the given class; replace as needed.
If the named method already exists on the given class, it will
be replaced, and a reference to the old method is created as
cls._old<patch_name><name>. If the "_old_<patch_name>_<name>" attribute
already exists, KeyError is raised.
"""
def decorator(func):
fname = func.__name__
old_func = getattr(cls, fname, None)
if old_func is not None:
# Add the old func to a list of old funcs.
old_ref = "_old_%s_%s" % (patch_name, fname)
old_attr = getattr(cls, old_ref, None)
if old_attr is None:
setattr(cls, old_ref, old_func)
else:
raise KeyError("%s.%s already exists."
% (cls.__name__, old_ref))
setattr(cls, fname, func)
return func
return decorator
def is_python_script(fname):
"""Is it a valid Python script?"""
return osp.isfile(fname) and fname.endswith(('.py', '.pyw', '.ipy'))
def abspardir(path):
"""Return absolute parent dir"""
return osp.abspath(osp.join(path, os.pardir))
def get_common_path(pathlist):
"""Return common path for all paths in pathlist"""
common = osp.normpath(osp.commonprefix(pathlist))
if len(common) > 1:
if not osp.isdir(common):
return abspardir(common)
else:
for path in pathlist:
if not osp.isdir(osp.join(common, path[len(common)+1:])):
# `common` is not the real common prefix
return abspardir(common)
else:
return osp.abspath(common)
def add_pathlist_to_PYTHONPATH(env, pathlist, drop_env=True):
"""
Add a PYTHONPATH entry to a list of environment variables.
This allows to extend the environment of an external process
created with QProcess with our additions to PYTHONPATH.
Parameters
----------
env: list
List of environment variables in the format of
QProcessEnvironment.
pathlist: list
List of paths to add to PYTHONPATH
drop_env: bool
Whether to drop PYTHONPATH previously found in the environment.
"""
# PyQt API 1/2 compatibility-related tests:
assert isinstance(env, list)
assert all([is_text_string(path) for path in env])
pypath = "PYTHONPATH"
pathstr = os.pathsep.join(pathlist)
if not drop_env:
for index, var in enumerate(env[:]):
if var.startswith(pypath + '='):
env[index] = var.replace(
pypath + '=',
pypath + '=' + pathstr + os.pathsep
)
else:
env.append(pypath + '=' + pathstr)
def memoize(obj):
"""
Memoize objects to trade memory for execution speed
Use a limited size cache to store the value, which takes into account
The calling args and kwargs
See https://wiki.python.org/moin/PythonDecoratorLibrary#Memoize
"""
cache = obj.cache = {}
@functools.wraps(obj)
def memoizer(*args, **kwargs):
key = str(args) + str(kwargs)
if key not in cache:
cache[key] = obj(*args, **kwargs)
# only keep the most recent 100 entries
if len(cache) > 100:
cache.popitem(last=False)
return cache[key]
return memoizer
def getcwd_or_home():
"""Safe version of getcwd that will fallback to home user dir.
This will catch the error raised when the current working directory
was removed for an external program.
"""
try:
return getcwd()
except OSError:
logger.debug("WARNING: Current working directory was deleted, "
"falling back to home dirertory")
return get_home_dir()
def regexp_error_msg(pattern):
"""
Return None if the pattern is a valid regular expression or
a string describing why the pattern is invalid.
"""
try:
re.compile(pattern)
except re.error as e:
return str(e)
return None
def check_connection_port(address, port):
"""Verify if `port` is available in `address`."""
# Create a TCP socket
s = socket.socket()
s.settimeout(2)
logger.debug("Attempting to connect to {} on port {}".format(
address, port))
try:
s.connect((address, port))
logger.debug("Connected to {} on port {}".format(address, port))
return True
except socket.error as e:
logger.debug("Connection to {} on port {} failed: {}".format(
address, port, e))
return False
finally:
s.close()
| 31.791411
| 78
| 0.584523
|
992c4a1c6123b0b72902d1e73d1d62e0581ef60e
| 25
|
py
|
Python
|
MockBot/__init__.py
|
dgisolfi/MockBot
|
2b7cae9d0241c3a8fcd5035356e596e0ebc3a4a2
|
[
"MIT"
] | null | null | null |
MockBot/__init__.py
|
dgisolfi/MockBot
|
2b7cae9d0241c3a8fcd5035356e596e0ebc3a4a2
|
[
"MIT"
] | null | null | null |
MockBot/__init__.py
|
dgisolfi/MockBot
|
2b7cae9d0241c3a8fcd5035356e596e0ebc3a4a2
|
[
"MIT"
] | null | null | null |
from MockBot.bot import *
| 25
| 25
| 0.8
|
816aaa405033e9ca9a6b5fbc38e7d5869d041952
| 299
|
py
|
Python
|
apps/static/models/__init__.py
|
Sult/daf
|
a4da9e8c96f70577e2490c05e82bdf7d0de1a563
|
[
"MIT"
] | null | null | null |
apps/static/models/__init__.py
|
Sult/daf
|
a4da9e8c96f70577e2490c05e82bdf7d0de1a563
|
[
"MIT"
] | null | null | null |
apps/static/models/__init__.py
|
Sult/daf
|
a4da9e8c96f70577e2490c05e82bdf7d0de1a563
|
[
"MIT"
] | null | null | null |
from .agt import *
from .cert import *
from .char import *
from .crp import *
from .dmg import *
from .eve import *
from .industry import *
from .inv import *
from .mapt import *
from .planet import *
from .ram import *
from .sta import *
from .trn import *
from .war import *
#from .temp import *
| 17.588235
| 23
| 0.692308
|
fb9e061424fcf5231137e981e095a483274e46b5
| 7,126
|
py
|
Python
|
openstackclient/tests/functional/identity/v3/test_role.py
|
alvarosimon/python-openstackclient
|
2ab3396f19796935ddcb281b865d37839a4f84f7
|
[
"Apache-2.0"
] | null | null | null |
openstackclient/tests/functional/identity/v3/test_role.py
|
alvarosimon/python-openstackclient
|
2ab3396f19796935ddcb281b865d37839a4f84f7
|
[
"Apache-2.0"
] | null | null | null |
openstackclient/tests/functional/identity/v3/test_role.py
|
alvarosimon/python-openstackclient
|
2ab3396f19796935ddcb281b865d37839a4f84f7
|
[
"Apache-2.0"
] | 1
|
2020-07-21T02:18:23.000Z
|
2020-07-21T02:18:23.000Z
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.lib.common.utils import data_utils
from openstackclient.tests.functional.identity.v3 import common
class RoleTests(common.IdentityTests):
def test_role_create(self):
self._create_dummy_role()
def test_role_delete(self):
role_name = self._create_dummy_role(add_clean_up=False)
raw_output = self.openstack('role delete %s' % role_name)
self.assertEqual(0, len(raw_output))
def test_role_list(self):
self._create_dummy_role()
raw_output = self.openstack('role list')
items = self.parse_listing(raw_output)
self.assert_table_structure(items, common.BASIC_LIST_HEADERS)
def test_role_list_with_user_project(self):
role_name = self._create_dummy_role()
username = self._create_dummy_user()
raw_output = self.openstack(
'role add '
'--project %(project)s '
'--project-domain %(project_domain)s '
'--user %(user)s '
'--user-domain %(user_domain)s '
'%(role)s' % {'project': self.project_name,
'project_domain': self.domain_name,
'user': username,
'user_domain': self.domain_name,
'role': role_name})
self.addCleanup(
self.openstack,
'role remove '
'--project %(project)s '
'--project-domain %(project_domain)s '
'--user %(user)s '
'--user-domain %(user_domain)s '
'%(role)s' % {'project': self.project_name,
'project_domain': self.domain_name,
'user': username,
'user_domain': self.domain_name,
'role': role_name})
self.assertEqual(0, len(raw_output))
raw_output = self.openstack(
'role list '
'--project %(project)s '
'--project-domain %(project_domain)s '
'--user %(user)s '
'--user-domain %(user_domain)s '
'' % {'project': self.project_name,
'project_domain': self.domain_name,
'user': username,
'user_domain': self.domain_name})
items = self.parse_listing(raw_output)
self.assert_table_structure(items, common.BASIC_LIST_HEADERS)
self.assertEqual(1, len(items))
def test_role_show(self):
role_name = self._create_dummy_role()
raw_output = self.openstack('role show %s' % role_name)
items = self.parse_show(raw_output)
self.assert_show_fields(items, self.ROLE_FIELDS)
def test_role_set(self):
role_name = self._create_dummy_role()
new_role_name = data_utils.rand_name('NewTestRole')
raw_output = self.openstack(
'role set --name %s %s' % (new_role_name, role_name))
self.assertEqual(0, len(raw_output))
raw_output = self.openstack('role show %s' % new_role_name)
role = self.parse_show_as_object(raw_output)
self.assertEqual(new_role_name, role['name'])
def test_role_add(self):
role_name = self._create_dummy_role()
username = self._create_dummy_user()
raw_output = self.openstack(
'role add '
'--project %(project)s '
'--project-domain %(project_domain)s '
'--user %(user)s '
'--user-domain %(user_domain)s '
'%(role)s' % {'project': self.project_name,
'project_domain': self.domain_name,
'user': username,
'user_domain': self.domain_name,
'role': role_name})
self.addCleanup(
self.openstack,
'role remove '
'--project %(project)s '
'--project-domain %(project_domain)s '
'--user %(user)s '
'--user-domain %(user_domain)s '
'%(role)s' % {'project': self.project_name,
'project_domain': self.domain_name,
'user': username,
'user_domain': self.domain_name,
'role': role_name})
self.assertEqual(0, len(raw_output))
def test_role_remove(self):
role_name = self._create_dummy_role()
username = self._create_dummy_user()
add_raw_output = self.openstack(
'role add '
'--project %(project)s '
'--project-domain %(project_domain)s '
'--user %(user)s '
'--user-domain %(user_domain)s '
'%(role)s' % {'project': self.project_name,
'project_domain': self.domain_name,
'user': username,
'user_domain': self.domain_name,
'role': role_name})
remove_raw_output = self.openstack(
'role remove '
'--project %(project)s '
'--project-domain %(project_domain)s '
'--user %(user)s '
'--user-domain %(user_domain)s '
'%(role)s' % {'project': self.project_name,
'project_domain': self.domain_name,
'user': username,
'user_domain': self.domain_name,
'role': role_name})
self.assertEqual(0, len(add_raw_output))
self.assertEqual(0, len(remove_raw_output))
def test_implied_role_list(self):
self._create_dummy_implied_role()
raw_output = self.openstack('implied role list')
items = self.parse_listing(raw_output)
self.assert_table_structure(items, self.IMPLIED_ROLE_LIST_HEADERS)
self.assertEqual(3, len(items))
def test_implied_role_create(self):
role_name = self._create_dummy_role()
implied_role_name = self._create_dummy_role()
self.openstack(
'implied role create '
'--implied-role %(implied_role)s '
'%(role)s' % {'implied_role': implied_role_name,
'role': role_name})
def test_implied_role_delete(self):
implied_role_name, role_name = self._create_dummy_implied_role()
raw_output = self.openstack(
'implied role delete '
'--implied-role %(implied_role)s '
'%(role)s' % {'implied_role': implied_role_name,
'role': role_name})
self.assertEqual(0, len(raw_output))
| 41.672515
| 78
| 0.561184
|
51f1fe44f84c9725c6ad5fb8a2c1aa97faaf867b
| 2,503
|
py
|
Python
|
lfp_atn_simuran/multi_runs/run_lfp_rate.py
|
seankmartin/lfp_atn
|
647889eddfa9ba3910c74df7e61f10fd98c61854
|
[
"MIT"
] | null | null | null |
lfp_atn_simuran/multi_runs/run_lfp_rate.py
|
seankmartin/lfp_atn
|
647889eddfa9ba3910c74df7e61f10fd98c61854
|
[
"MIT"
] | null | null | null |
lfp_atn_simuran/multi_runs/run_lfp_rate.py
|
seankmartin/lfp_atn
|
647889eddfa9ba3910c74df7e61f10fd98c61854
|
[
"MIT"
] | null | null | null |
"""
simuran_multi_params describes running main.py for multiple
different recordings, functions, etc.
In theory, this could describe a full experiment.
"""
import os
def create_new_entry(batch_param_loc, fn_param_loc, add=""):
def make_default_dict(add=""):
param_names = {}
# file_list_name is often specific to each analysis
# as it lists all recordings found under the given regex
param_names["file_list_name"] = "file_list{}.txt".format(add)
param_names["cell_list_name"] = "cell_list{}.txt".format(add)
return param_names
output_dict = make_default_dict(add=add)
output_dict["batch_param_loc"] = batch_param_loc
output_dict["fn_param_loc"] = fn_param_loc
return output_dict
def set_file_locations():
import os
output = []
for val in [1, 2, 3, 4, 5, 6]:
output.append(
(
os.path.join(
"__thisdirname__", "..", "batch_params", "CSR{}-openfield.py"
).format(val),
os.path.join(
"__thisdirname__", "..", "functions", "fn_lfp_rate.py"
),
"CSR{}".format(val),
)
)
for val in [1, 3, 4, 5, 6]:
output.append(
(
os.path.join(
"__thisdirname__", "..", "batch_params", "LSR{}-openfield.py"
).format(val),
os.path.join(
"__thisdirname__", "..", "functions", "fn_lfp_rate.py"
),
"LSR{}".format(val),
)
)
return output
def set_fixed_params(in_dict):
in_dict["default_param_folder"] = None
# Can set a function to run after all analysis here
# For example, it could plot a summary of all the data
in_dict["after_batch_fn"] = None
# If the after batch function needs the full dataset
# Pass this as true
# For example, this could be used to concatenate
# EEG signals that were recorded in two second long trials
in_dict["keep_all_data"] = False
return in_dict
# Setup the actual parameters
params = {"run_list": [], "to_merge": []}
params = set_fixed_params(params)
for val in set_file_locations():
param_dict = create_new_entry(val[0], val[1], val[2])
fn_name = os.path.splitext(os.path.basename(val[1]))[0]
if fn_name not in params["to_merge"]:
params["to_merge"].append(fn_name)
params["run_list"].append(param_dict)
| 28.123596
| 81
| 0.59129
|
574b9a799c7616273ccc8fe8ca0ca1ca26e2a90a
| 255
|
py
|
Python
|
manage.py
|
staciajohanna/squadbox
|
4701062e64c2e1d3ad75ac01fb75cf55caa1ee10
|
[
"MIT"
] | 23
|
2017-10-29T17:36:30.000Z
|
2020-12-04T17:44:07.000Z
|
manage.py
|
anantb/voicex
|
9c22f8bf02818c6fdda6a99717bc1a1ccc9bdecd
|
[
"MIT"
] | 60
|
2017-10-10T21:49:26.000Z
|
2021-06-10T19:12:42.000Z
|
manage.py
|
anantb/voicex
|
9c22f8bf02818c6fdda6a99717bc1a1ccc9bdecd
|
[
"MIT"
] | 8
|
2017-12-09T16:18:53.000Z
|
2021-07-12T04:08:29.000Z
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "http_handler.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| 23.181818
| 76
| 0.776471
|
b66e99b413bcc59f2f5c73d98ef98eace1f7cdb5
| 6,071
|
py
|
Python
|
reference-implementation/cogs/eval.py
|
tilda/lolbot
|
9d03ce1cbd506329d3538ec16a62895f8693016a
|
[
"MIT"
] | 3
|
2017-07-18T11:49:58.000Z
|
2018-02-25T00:10:16.000Z
|
reference-implementation/cogs/eval.py
|
tilda/lolbot
|
9d03ce1cbd506329d3538ec16a62895f8693016a
|
[
"MIT"
] | 6
|
2017-08-27T02:51:54.000Z
|
2018-01-22T03:09:06.000Z
|
reference-implementation/cogs/eval.py
|
tilda/lolbot
|
9d03ce1cbd506329d3538ec16a62895f8693016a
|
[
"MIT"
] | 5
|
2017-10-07T16:41:21.000Z
|
2018-02-05T22:01:48.000Z
|
"""
The MIT License (MIT)
Copyright (c) 2018 tilda
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
"""
Handy exec (eval, debug) cog. Allows you to run code on the bot during runtime. This cog
is a combination of the exec commands of other bot authors:
Credit:
- Rapptz (Danny)
- https://github.com/Rapptz/RoboDanny/blob/master/cogs/repl.py#L31-L75
- b1naryth1ef (B1nzy, Andrei)
- https://github.com/b1naryth1ef/b1nb0t/blob/master/plugins/util.py#L220-L257
Features:
- Strips code markup (code blocks, inline code markup)
- Access to last result with _
- _get and _find instantly available without having to import discord
- Redirects stdout so you can print()
- Sane syntax error reporting
"""
import io
import logging
import textwrap
import traceback
from contextlib import redirect_stdout
# noinspection PyPackageRequirements
import aiohttp
# noinspection PyPackageRequirements
import discord
# noinspection PyPackageRequirements
from discord.ext import commands
# noinspection PyPackageRequirements
from cogs.utils import paste
log = logging.getLogger(__name__)
def strip_code_markup(content: str) -> str:
""" Strips code markup from a string. """
# ```py
# code
# ```
if content.startswith("```") and content.endswith("```"):
# grab the lines in the middle
return "\n".join(content.split("\n")[1:-1])
# `code`
return content.strip("` \n")
def format_syntax_error(e: SyntaxError) -> str:
""" Formats a SyntaxError. """
if e.text is None:
return "```py\n{0.__class__.__name__}: {0}\n```".format(e)
# display a nice arrow
return "```py\n{0.text}{1:>{0.offset}}\n{2}: {0}```".format(
e, "^", type(e).__name__
)
class Exec(commands.Cog):
def __init__(self, bot, *args, **kwargs):
self.bot = bot
self.last_result = None
@commands.command(name="eval", aliases=["exec", "debug"])
@commands.is_owner()
async def _eval(self, ctx, *, code: str):
""" Executes Python code. """
async def upload(file_name: str):
with open(file_name, "rb") as fp:
await ctx.send(file=discord.File(fp))
async def send(*args, **kwargs):
await ctx.send(*args, **kwargs)
env = {
"self": self,
"bot": ctx.bot,
"ctx": ctx,
"msg": ctx.message,
"guild": ctx.guild,
"channel": ctx.channel,
"me": ctx.message.author,
# utilities
"_get": discord.utils.get,
"_find": discord.utils.find,
"_upload": upload,
"_send": send,
# last result
"_": self.last_result,
}
env.update(globals())
# remove any markup that might be in the message
code = strip_code_markup(code)
# add an implicit return at the end
lines = code.split("\n")
if not lines[-1].startswith("return") and not lines[-1].startswith(" "):
lines[-1] = "return " + lines[-1]
code = "\n".join(lines)
# simulated stdout
stdout = io.StringIO()
# wrap the code in a function, so that we can use await
wrapped_code = "async def func():\n" + textwrap.indent(code, " ")
if code == "bot.http.token":
await ctx.message.add_reaction(ctx.bot.emoji.success)
return await ctx.send("```py\n'Nice try!'\n```")
try:
exec(compile(wrapped_code, "<exec>", "exec"), env)
except SyntaxError as e:
return await ctx.send(format_syntax_error(e))
func = env["func"]
try:
with redirect_stdout(stdout):
ret = await func()
except Exception:
# something went wrong
stream = stdout.getvalue()
await ctx.send("```py\n{}{}\n```".format(stream, traceback.format_exc()))
else:
# successful
stream = stdout.getvalue()
try:
await ctx.message.add_reaction(ctx.bot.emoji.success)
except discord.Forbidden:
# couldn't add the reaction, ignore
log.warning("Failed to add reaction to eval message, ignoring.")
try:
self.last_result = self.last_result if ret is None else ret
await ctx.send("```py\n{}{}\n```".format(stream, repr(ret)))
except discord.HTTPException:
# too long
try:
url = await paste.haste(ctx.bot.session, stream + repr(ret))
await ctx.send("Result was too long. " + url)
except KeyError:
# even hastebin couldn't handle it
await ctx.send("Result was too long, even for Hastebin.")
except aiohttp.ClientError:
await ctx.send(
"Unable to send the result to Hastebin, it's probably down."
)
def setup(bot):
bot.add_cog(Exec(bot))
| 33.174863
| 88
| 0.607643
|
c5f4d5e39f8b870f5232187a302a675a23642d5f
| 5,445
|
py
|
Python
|
evaluation/jenkins/second_run_gear_images_without_cache.py
|
seveirbian/gear-old
|
8d3529a9bf42e652a9d7475c9d14e9a6afc69a76
|
[
"Apache-2.0"
] | null | null | null |
evaluation/jenkins/second_run_gear_images_without_cache.py
|
seveirbian/gear-old
|
8d3529a9bf42e652a9d7475c9d14e9a6afc69a76
|
[
"Apache-2.0"
] | null | null | null |
evaluation/jenkins/second_run_gear_images_without_cache.py
|
seveirbian/gear-old
|
8d3529a9bf42e652a9d7475c9d14e9a6afc69a76
|
[
"Apache-2.0"
] | null | null | null |
import sys
# package need to be installed, pip install docker
import docker
import time
import yaml
import os
import random
import subprocess
import signal
import urllib2
import shutil
import xlwt
# package need to be installed, pip install hvac
import hvac
auto = False
private_registry = "202.114.10.146:9999/"
suffix = "-gearmd"
apppath = ""
# run paraments
hostPort = 8080
localVolume = ""
pwd = os.path.split(os.path.realpath(__file__))[0]
runEnvironment = []
runPorts = {"8080/tcp": hostPort, "50000/tcp": 50000, }
runVolumes = {}
runWorking_dir = ""
runCommand = ""
waitline = ""
# result
result = [["tag", "finishTime", "local data", "pull data"], ]
class Runner:
def __init__(self, images):
self.images_to_pull = images
def check(self):
# detect whether the file exists, if true, delete it
if os.path.exists("./images_run.txt"):
os.remove("./images_run.txt")
def run(self):
self.check()
client = docker.from_env()
# if don't give a tag, then all image under this registry will be pulled
repos = self.images_to_pull[0]["repo"]
for repo in repos:
tags = self.images_to_pull[1][repo]
for tag in tags:
private_repo = private_registry + repo + suffix + ":" + tag
if localVolume != "":
if os.path.exists(localVolume) == False:
os.makedirs(localVolume)
print "start running: ", private_repo
# create a random name
runName = '%d' % (random.randint(1,100000000))
# get present time
startTime = time.time()
# get present net data
cnetdata = get_net_data()
# run images
container = client.containers.create(image=private_repo, environment=runEnvironment,
ports=runPorts, volumes=runVolumes, working_dir=runWorking_dir,
command=runCommand, name=runName, detach=True, )
container.start()
while True:
if time.time() - startTime > 600:
break
try:
req = urllib2.urlopen('http://localhost:%d'%hostPort, timeout = 10)
if req.code == 200:
print "OK!"
req.close()
break
except urllib2.HTTPError, e:
if e.code == 403:
print "also OK!"
break
else:
time.sleep(0.1) # wait 100ms
pass
except:
time.sleep(0.1) # wait 100ms
pass
# print run time
finishTime = time.time() - startTime
print "finished in " , finishTime, "s"
container_path = os.path.join("/var/lib/gear/private", private_repo)
local_data = subprocess.check_output(['du','-ms', container_path]).split()[0].decode('utf-8')
print "local data: ", local_data
pull_data = get_net_data() - cnetdata
print "pull data: ", pull_data
try:
container.kill()
except:
print "kill fail!"
pass
container.remove(force=True)
# delete files under /var/lib/gear/public/
shutil.rmtree('/var/lib/gear/public/')
os.mkdir('/var/lib/gear/public/')
print "empty cache! \n"
# record the image and its Running time
result.append([tag, finishTime, int(local_data), pull_data])
if auto != True:
raw_input("Next?")
else:
time.sleep(5)
if localVolume != "":
shutil.rmtree(localVolume)
class Generator:
def __init__(self, profilePath=""):
self.profilePath = profilePath
def generateFromProfile(self):
if self.profilePath == "":
print "Error: profile path is null"
with open(self.profilePath, 'r') as f:
self.images = yaml.load(f, Loader=yaml.FullLoader)
return self.images
def get_net_data():
netCard = "/proc/net/dev"
fd = open(netCard, "r")
for line in fd.readlines():
if line.find("enp0s3") >= 0:
field = line.split()
data = float(field[1]) / 1024.0 / 1024.0
fd.close()
return data
if __name__ == "__main__":
if len(sys.argv) == 2:
auto = True
generator = Generator(os.path.split(os.path.realpath(__file__))[0]+"/image_versions.yaml")
images = generator.generateFromProfile()
runner = Runner(images)
runner.run()
# create a workbook sheet
workbook = xlwt.Workbook()
sheet = workbook.add_sheet("run_time")
for row in range(len(result)):
for column in range(len(result[row])):
sheet.write(row, column, result[row][column])
workbook.save(os.path.split(os.path.realpath(__file__))[0]+"/second_run_without_cache.xls")
| 28.809524
| 109
| 0.51405
|
6e10c8085e7ee2b0272707a6984a4a1b2e88e290
| 21,079
|
py
|
Python
|
snake.py
|
andyhou965/SnakeAI
|
8520c4c9a2dcd8a844e298adc3f939ec8b7ce3a5
|
[
"MIT"
] | null | null | null |
snake.py
|
andyhou965/SnakeAI
|
8520c4c9a2dcd8a844e298adc3f939ec8b7ce3a5
|
[
"MIT"
] | null | null | null |
snake.py
|
andyhou965/SnakeAI
|
8520c4c9a2dcd8a844e298adc3f939ec8b7ce3a5
|
[
"MIT"
] | null | null | null |
import numpy as np
from typing import Tuple, Optional, Union, Set, Dict, Any
from fractions import Fraction
import random
from collections import deque
import sys
import os
import json
from misc import *
from genetic_algorithm.individual import Individual
from neural_network import FeedForwardNetwork, linear, sigmoid, tanh, relu, leaky_relu, ActivationFunction, get_activation_by_name
class Vision(object):
__slots__ = ('dist_to_wall', 'dist_to_apple', 'dist_to_self')
def __init__(self,
dist_to_wall: Union[float, int],
dist_to_apple: Union[float, int],
dist_to_self: Union[float, int]
):
self.dist_to_wall = float(dist_to_wall)
self.dist_to_apple = float(dist_to_apple)
self.dist_to_self = float(dist_to_self)
class DrawableVision(object):
__slots__ = ('wall_location', 'apple_location', 'self_location')
def __init__(self,
wall_location: Point,
apple_location: Optional[Point] = None,
self_location: Optional[Point] = None,
):
self.wall_location = wall_location
self.apple_location = apple_location
self.self_location = self_location
class Snake(Individual):
def __init__(self, board_size: Tuple[int, int],
chromosome: Optional[Dict[str, List[np.ndarray]]] = None,
start_pos: Optional[Point] = None,
apple_seed: Optional[int] = None,
initial_velocity: Optional[str] = None,
starting_direction: Optional[str] = None,
hidden_layer_architecture: Optional[List[int]] = [1123125, 9],
hidden_activation: Optional[ActivationFunction] = 'relu',
output_activation: Optional[ActivationFunction] = 'sigmoid',
lifespan: Optional[Union[int, float]] = np.inf,
apple_and_self_vision: Optional[str] = 'binary'
):
self.lifespan = lifespan
self.apple_and_self_vision = apple_and_self_vision.lower()
self.score = 0 # Number of apples snake gets
self._fitness = 0 # Overall fitness
self._frames = 0 # Number of frames that the snake has been alive
self._frames_since_last_apple = 0
self.possible_directions = ('u', 'd', 'l', 'r')
self.board_size = board_size
self.hidden_layer_architecture = hidden_layer_architecture
self.hidden_activation = hidden_activation
self.output_activation = output_activation
if not start_pos:
#@TODO: undo this
# x = random.randint(10, self.board_size[0] - 9)
# y = random.randint(10, self.board_size[1] - 9)
x = random.randint(2, self.board_size[0] - 3)
y = random.randint(2, self.board_size[1] - 3)
start_pos = Point(x, y)
self.start_pos = start_pos
self._vision_type = VISION_8
self._vision: List[Vision] = [None] * len(self._vision_type)
# This is just used so I can draw and is not actually used in the NN
self._drawable_vision: List[DrawableVision] = [None] * len(self._vision_type)
# Setting up network architecture
# Each "Vision" has 3 distances it tracks: wall, apple and self
# there are also one-hot encoded direction and one-hot encoded tail direction,
# each of which have 4 possibilities.
num_inputs = len(self._vision_type) * 3 + 4 + 4 #@TODO: Add one-hot back in
self.vision_as_array: np.ndarray = np.zeros((num_inputs, 1))
self.network_architecture = [num_inputs] # Inputs
self.network_architecture.extend(self.hidden_layer_architecture) # Hidden layers
self.network_architecture.append(4) # 4 outputs, ['u', 'd', 'l', 'r']
self.network = FeedForwardNetwork(self.network_architecture,
get_activation_by_name(self.hidden_activation),
get_activation_by_name(self.output_activation)
)
# If chromosome is set, take it
if chromosome:
# self._chromosome = chromosome
self.network.params = chromosome
# self.decode_chromosome()
else:
# self._chromosome = {}
# self.encode_chromosome()
pass
# For creating the next apple
if apple_seed is None:
apple_seed = np.random.randint(-1000000000, 1000000000)
self.apple_seed = apple_seed # Only needed for saving/loading replay
self.rand_apple = random.Random(self.apple_seed)
self.apple_location = None
if starting_direction:
starting_direction = starting_direction[0].lower()
else:
starting_direction = self.possible_directions[random.randint(0, 3)]
self.starting_direction = starting_direction # Only needed for saving/loading replay
self.init_snake(self.starting_direction)
self.initial_velocity = initial_velocity
self.init_velocity(self.starting_direction, self.initial_velocity)
self.generate_apple()
@property
def fitness(self):
return self._fitness
def calculate_fitness(self):
# Give positive minimum fitness for roulette wheel selection
self._fitness = (self._frames) + ((2**self.score) + (self.score**2.1)*500) - (((.25 * self._frames)**1.3) * (self.score**1.2))
# self._fitness = (self._frames) + ((2**self.score) + (self.score**2.1)*500) - (((.25 * self._frames)) * (self.score))
self._fitness = max(self._fitness, .1)
@property
def chromosome(self):
# return self._chromosome
pass
def encode_chromosome(self):
# # L = len(self.network.params) // 2
# L = len(self.network.layer_nodes)
# # Encode weights and bias
# for layer in range(1, L):
# l = str(layer)
# self._chromosome['W' + l] = self.network.params['W' + l].flatten()
# self._chromosome['b' + l] = self.network.params['b' + l].flatten()
pass
def decode_chromosome(self):
# # L = len(self.network.params) // 2
# L = len(self.network.layer_nodes)
# # Decode weights and bias
# for layer in range(1, L):
# l = str(layer)
# w_shape = (self.network_architecture[layer], self.network_architecture[layer-1])
# b_shape = (self.network_architecture[layer], 1)
# self.network.params['W' + l] = self._chromosome['W' + l].reshape(w_shape)
# self.network.params['b' + l] = self._chromosome['b' + l].reshape(b_shape)
pass
def look(self):
# Look all around
for i, slope in enumerate(self._vision_type):
vision, drawable_vision = self.look_in_direction(slope)
self._vision[i] = vision
self._drawable_vision[i] = drawable_vision
# Update the input array
self._vision_as_input_array()
def look_in_direction(self, slope: Slope) -> Tuple[Vision, DrawableVision]:
dist_to_wall = None
dist_to_apple = np.inf
dist_to_self = np.inf
wall_location = None
apple_location = None
self_location = None
position = self.snake_array[0].copy()
distance = 1.0
total_distance = 0.0
# Can't start by looking at yourself
position.x += slope.run
position.y += slope.rise
total_distance += distance
body_found = False # Only need to find the first occurance since it's the closest
food_found = False # Although there is only one food, stop looking once you find it
# Keep going until the position is out of bounds
while self._within_wall(position):
if not body_found and self._is_body_location(position):
dist_to_self = total_distance
self_location = position.copy()
body_found = True
if not food_found and self._is_apple_location(position):
dist_to_apple = total_distance
apple_location = position.copy()
food_found = True
wall_location = position
position.x += slope.run
position.y += slope.rise
total_distance += distance
assert(total_distance != 0.0)
# @TODO: May need to adjust numerator in case of VISION_16 since step size isn't always going to be on a tile
dist_to_wall = 1.0 / total_distance
if self.apple_and_self_vision == 'binary':
dist_to_apple = 1.0 if dist_to_apple != np.inf else 0.0
dist_to_self = 1.0 if dist_to_self != np.inf else 0.0
elif self.apple_and_self_vision == 'distance':
dist_to_apple = 1.0 / dist_to_apple
dist_to_self = 1.0 / dist_to_self
vision = Vision(dist_to_wall, dist_to_apple, dist_to_self)
drawable_vision = DrawableVision(wall_location, apple_location, self_location)
return (vision, drawable_vision)
def _vision_as_input_array(self) -> None:
# Split _vision into np array where rows [0-2] are _vision[0].dist_to_wall, _vision[0].dist_to_apple, _vision[0].dist_to_self,
# rows [3-5] are _vision[1].dist_to_wall, _vision[1].dist_to_apple, _vision[1].dist_to_self, etc. etc. etc.
for va_index, v_index in zip(range(0, len(self._vision) * 3, 3), range(len(self._vision))):
vision = self._vision[v_index]
self.vision_as_array[va_index, 0] = vision.dist_to_wall
self.vision_as_array[va_index + 1, 0] = vision.dist_to_apple
self.vision_as_array[va_index + 2, 0] = vision.dist_to_self
i = len(self._vision) * 3 # Start at the end
direction = self.direction[0].lower()
# One-hot encode direction
direction_one_hot = np.zeros((len(self.possible_directions), 1))
direction_one_hot[self.possible_directions.index(direction), 0] = 1
self.vision_as_array[i: i + len(self.possible_directions)] = direction_one_hot
i += len(self.possible_directions)
# One-hot tail direction
tail_direction_one_hot = np.zeros((len(self.possible_directions), 1))
tail_direction_one_hot[self.possible_directions.index(self.tail_direction), 0] = 1
self.vision_as_array[i: i + len(self.possible_directions)] = tail_direction_one_hot
def _within_wall(self, position: Point) -> bool:
return position.x >= 0 and position.y >= 0 and \
position.x < self.board_size[0] and \
position.y < self.board_size[1]
def generate_apple(self) -> None:
width = self.board_size[0]
height = self.board_size[1]
# Find all possible points where the snake is not currently
possibilities = [divmod(i, height) for i in range(width * height) if divmod(i, height) not in self._body_locations]
if possibilities:
loc = self.rand_apple.choice(possibilities)
self.apple_location = Point(loc[0], loc[1])
else:
# I guess you win?
print('you won!')
pass
def init_snake(self, starting_direction: str) -> None:
"""
Initialize teh snake.
starting_direction: ('u', 'd', 'l', 'r')
direction that the snake should start facing. Whatever the direction is, the head
of the snake will begin pointing that way.
"""
head = self.start_pos
snake = None
# Body is below
if starting_direction == 'u':
snake = [head, Point(head.x, head.y + 1), Point(head.x, head.y + 2)]
# Body is above
elif starting_direction == 'd':
snake = [head, Point(head.x, head.y - 1), Point(head.x, head.y - 2)]
# Body is to the right
elif starting_direction == 'l':
snake = [head, Point(head.x + 1, head.y), Point(head.x + 2, head.y)]
# Body is to the left
elif starting_direction == 'r':
snake = [head, Point(head.x - 1, head.y), Point(head.x - 2, head.y)]
self.snake_array = deque(snake)
self._body_locations = set(snake)
self.is_alive = True
def update(self):
if self.is_alive:
self._frames += 1
self.look()
self.network.feed_forward(self.vision_as_array)
self.direction = self.possible_directions[np.argmax(self.network.out)]
return True
else:
return False
def move(self) -> bool:
if not self.is_alive:
return False
direction = self.direction[0].lower()
# Is the direction valid?
if direction not in self.possible_directions:
return False
# Find next position
# tail = self.snake_array.pop() # Pop tail since we can technically move to the tail
head = self.snake_array[0]
next_pos = None
if direction == 'u':
next_pos = Point(head.x, head.y - 1)
elif direction == 'd':
next_pos = Point(head.x, head.y + 1)
elif direction == 'r':
next_pos = Point(head.x + 1, head.y)
elif direction == 'l':
next_pos = Point(head.x - 1, head.y)
# Is the next position we want to move valid?
if self._is_valid(next_pos):
# Tail
if next_pos == self.snake_array[-1]:
# Pop tail and add next_pos (same as tail) to front
# No need to remove tail from _body_locations since it will go back in anyway
self.snake_array.pop()
self.snake_array.appendleft(next_pos)
# Eat the apple
elif next_pos == self.apple_location:
self.score += 1
self._frames_since_last_apple = 0
# Move head
self.snake_array.appendleft(next_pos)
self._body_locations.update({next_pos})
# Don't remove tail since the snake grew
self.generate_apple()
# Normal movement
else:
# Move head
self.snake_array.appendleft(next_pos)
self._body_locations.update({next_pos})
# Remove tail
tail = self.snake_array.pop()
self._body_locations.symmetric_difference_update({tail})
# Figure out which direction the tail is moving
p2 = self.snake_array[-2]
p1 = self.snake_array[-1]
diff = p2 - p1
if diff.x < 0:
self.tail_direction = 'l'
elif diff.x > 0:
self.tail_direction = 'r'
elif diff.y > 0:
self.tail_direction = 'd'
elif diff.y < 0:
self.tail_direction = 'u'
self._frames_since_last_apple += 1
#@NOTE: If you have different sized grids you may want to change this
if self._frames_since_last_apple > 100:
self.is_alive = False
return False
return True
else:
self.is_alive = False
return False
def _is_apple_location(self, position: Point) -> bool:
return position == self.apple_location
def _is_body_location(self, position: Point) -> bool:
return position in self._body_locations
def _is_valid(self, position: Point) -> bool:
"""
Determine whether a given position is valid.
Return True if the position is on the board and does not intersect the snake.
Return False otherwise
"""
if (position.x < 0) or (position.x > self.board_size[0] - 1):
return False
if (position.y < 0) or (position.y > self.board_size[1] - 1):
return False
if position == self.snake_array[-1]:
return True
# If the position is a body location, not valid.
# @NOTE: _body_locations will contain tail, so need to check tail first
elif position in self._body_locations:
return False
# Otherwise you good
else:
return True
def init_velocity(self, starting_direction, initial_velocity: Optional[str] = None) -> None:
if initial_velocity:
self.direction = initial_velocity[0].lower()
# Whichever way the starting_direction is
else:
self.direction = starting_direction
# Tail starts moving the same direction
self.tail_direction = self.direction
def save_snake(population_folder: str, individual_name: str, snake: Snake, settings: Dict[str, Any]) -> None:
# Make population folder if it doesn't exist
if not os.path.exists(population_folder):
os.makedirs(population_folder)
# Save off settings
if 'settings.json' not in os.listdir(population_folder):
f = os.path.join(population_folder, 'settings.json')
with open(f, 'w', encoding='utf-8') as out:
json.dump(settings, out, sort_keys=True, indent=4)
# Make directory for the individual
individual_dir = os.path.join(population_folder, individual_name)
os.makedirs(individual_dir)
# Save some constructor information for replay
# @NOTE: No need to save chromosome since that is saved as .npy
# @NOTE: No need to save board_size or hidden_layer_architecture
# since these are taken from settings
constructor = {}
constructor['start_pos'] = snake.start_pos.to_dict()
constructor['apple_seed'] = snake.apple_seed
constructor['initial_velocity'] = snake.initial_velocity
constructor['starting_direction'] = snake.starting_direction
snake_constructor_file = os.path.join(individual_dir, 'constructor_params.json')
# Save
with open(snake_constructor_file, 'w', encoding='utf-8') as out:
json.dump(constructor, out, sort_keys=True, indent=4)
L = len(snake.network.layer_nodes)
for l in range(1, L):
w_name = 'W' + str(l)
b_name = 'b' + str(l)
weights = snake.network.params[w_name]
bias = snake.network.params[b_name]
np.save(os.path.join(individual_dir, w_name), weights)
np.save(os.path.join(individual_dir, b_name), bias)
def load_snake(population_folder: str, individual_name: str, settings: Optional[Union[Dict[str, Any], str]] = None) -> Snake:
if not settings:
f = os.path.join(population_folder, 'settings.json')
if not os.path.exists(f):
raise Exception("settings needs to be passed as an argument if 'settings.json' does not exist under population folder")
with open(f, 'r', encoding='utf-8') as fp:
settings = json.load(fp)
elif isinstance(settings, dict):
settings = settings
elif isinstance(settings, str):
filepath = settings
with open(filepath, 'r', encoding='utf-8') as fp:
settings = json.load(fp)
params = {}
for fname in os.listdir(os.path.join(population_folder, individual_name)):
extension = fname.rsplit('.npy', 1)
if len(extension) == 2:
param = extension[0]
params[param] = np.load(os.path.join(population_folder, individual_name, fname))
else:
continue
# Load constructor params for the specific snake
constructor_params = {}
snake_constructor_file = os.path.join(population_folder, individual_name, 'constructor_params.json')
with open(snake_constructor_file, 'r', encoding='utf-8') as fp:
constructor_params = json.load(fp)
snake = Snake(settings['board_size'], chromosome=params,
start_pos=Point.from_dict(constructor_params['start_pos']),
apple_seed=constructor_params['apple_seed'],
initial_velocity=constructor_params['initial_velocity'],
starting_direction=constructor_params['starting_direction'],
hidden_layer_architecture=settings['hidden_network_architecture'],
hidden_activation=settings['hidden_layer_activation'],
output_activation=settings['output_layer_activation'],
lifespan=settings['lifespan'],
apple_and_self_vision=settings['apple_and_self_vision']
)
return snake
| 42.327309
| 135
| 0.59462
|
f169154333f29cc1ac783b539b5a22037e28b2d6
| 432
|
py
|
Python
|
projects/views.py
|
aminyaraghi/portfolio
|
dabef787f36eb80563b6a0246a55d3495b126cb5
|
[
"MIT"
] | null | null | null |
projects/views.py
|
aminyaraghi/portfolio
|
dabef787f36eb80563b6a0246a55d3495b126cb5
|
[
"MIT"
] | null | null | null |
projects/views.py
|
aminyaraghi/portfolio
|
dabef787f36eb80563b6a0246a55d3495b126cb5
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
from projects.models import Project
def project_index(request):
projects = Project.objects.all()
context = {
'projects': projects
}
return render(request, 'project_index.html', context)
def project_detail(request, pk):
project = Project.objects.get(pk=pk)
context = {
'project': project
}
return render(request, 'project_detail.html', context)
| 22.736842
| 58
| 0.68287
|
d89dc2402f97bbb8cc742987ddaf32554db003cc
| 11,716
|
py
|
Python
|
tests/table/test_join.py
|
0scarB/piccolo
|
27539219431874bae99b7206df48133fbe1a27eb
|
[
"MIT"
] | 750
|
2019-01-03T16:02:48.000Z
|
2022-03-30T19:53:03.000Z
|
tests/table/test_join.py
|
0scarB/piccolo
|
27539219431874bae99b7206df48133fbe1a27eb
|
[
"MIT"
] | 311
|
2019-01-14T13:07:13.000Z
|
2022-03-31T07:43:08.000Z
|
tests/table/test_join.py
|
0scarB/piccolo
|
27539219431874bae99b7206df48133fbe1a27eb
|
[
"MIT"
] | 48
|
2020-12-18T08:13:50.000Z
|
2022-03-24T03:18:06.000Z
|
import decimal
from unittest import TestCase
from tests.example_apps.music.tables import (
Band,
Concert,
Manager,
Ticket,
Venue,
)
TABLES = [Manager, Band, Venue, Concert]
class TestCreateJoin:
def test_create_join(self):
for table in TABLES:
table.create_table().run_sync()
for table in reversed(TABLES):
table.alter().drop_table().run_sync()
class TestJoin(TestCase):
tables = [Manager, Band, Venue, Concert, Ticket]
def setUp(self):
for table in self.tables:
table.create_table().run_sync()
manager_1 = Manager(name="Guido")
manager_1.save().run_sync()
band_1 = Band(
name="Pythonistas", manager=manager_1.id, popularity=1000
)
band_1.save().run_sync()
manager_2 = Manager(name="Graydon")
manager_2.save().run_sync()
band_2 = Band(name="Rustaceans", manager=manager_2.id)
band_2.save().run_sync()
venue = Venue(name="Grand Central", capacity=1000)
venue.save().run_sync()
concert = Concert(band_1=band_1.id, band_2=band_2.id, venue=venue.id)
concert.save().run_sync()
ticket = Ticket(concert=concert, price=decimal.Decimal(50.0))
ticket.save().run_sync()
def tearDown(self):
for table in reversed(self.tables):
table.alter().drop_table().run_sync()
###########################################################################
def test_join(self):
select_query = Concert.select(
Concert.band_1.name,
Concert.band_2.name,
Concert.venue.name,
Concert.band_1.manager,
)
response = select_query.run_sync()
self.assertEqual(
response,
[
{
"band_1.name": "Pythonistas",
"band_2.name": "Rustaceans",
"venue.name": "Grand Central",
"band_1.manager": 1,
}
],
)
# Now make sure that even deeper joins work:
select_query = Concert.select(Concert.band_1.manager.name)
response = select_query.run_sync()
self.assertEqual(response, [{"band_1.manager.name": "Guido"}])
def test_select_all_columns(self):
"""
Make sure you can retrieve all columns from a related table, without
explicitly specifying them.
"""
result = (
Band.select(Band.name, Band.manager.all_columns())
.first()
.run_sync()
)
self.assertDictEqual(
result,
{
"name": "Pythonistas",
"manager.id": 1,
"manager.name": "Guido",
},
)
def test_select_all_columns_deep(self):
"""
Make sure that ``all_columns`` can be used several layers deep.
"""
result = (
Concert.select(
Concert.venue.all_columns(),
Concert.band_1.manager.all_columns(),
Concert.band_2.manager.all_columns(),
)
.first()
.run_sync()
)
self.assertDictEqual(
result,
{
"venue.id": 1,
"venue.name": "Grand Central",
"venue.capacity": 1000,
"band_1.manager.id": 1,
"band_1.manager.name": "Guido",
"band_2.manager.id": 2,
"band_2.manager.name": "Graydon",
},
)
def test_select_all_columns_root(self):
"""
Make sure that using ``all_columns`` at the root doesn't interfere
with using it for referenced tables.
"""
result = (
Band.select(
Band.all_columns(),
Band.manager.all_columns(),
)
.first()
.run_sync()
)
self.assertDictEqual(
result,
{
"id": 1,
"name": "Pythonistas",
"manager": 1,
"popularity": 1000,
"manager.id": 1,
"manager.name": "Guido",
},
)
def test_select_all_columns_root_nested(self):
"""
Make sure that using ``all_columns`` at the root doesn't interfere
with using it for referenced tables.
"""
result = (
Band.select(Band.all_columns(), Band.manager.all_columns())
.output(nested=True)
.first()
.run_sync()
)
self.assertDictEqual(
result,
{
"id": 1,
"name": "Pythonistas",
"manager": {"id": 1, "name": "Guido"},
"popularity": 1000,
},
)
def test_select_all_columns_exclude(self):
"""
Make sure we can get all columns, except the ones we specify.
"""
result = (
Band.select(
Band.all_columns(exclude=[Band.id]),
Band.manager.all_columns(exclude=[Band.manager.id]),
)
.output(nested=True)
.first()
.run_sync()
)
result_str_args = (
Band.select(
Band.all_columns(exclude=["id"]),
Band.manager.all_columns(exclude=["id"]),
)
.output(nested=True)
.first()
.run_sync()
)
for data in (result, result_str_args):
self.assertDictEqual(
data,
{
"name": "Pythonistas",
"manager": {"name": "Guido"},
"popularity": 1000,
},
)
###########################################################################
def test_objects_nested(self):
"""
Make sure the prefetch argument works correctly for objects.
"""
band = Band.objects(Band.manager).first().run_sync()
self.assertIsInstance(band.manager, Manager)
def test_objects__all_related__root(self):
"""
Make sure that ``all_related`` works correctly when called from the
root table of the query.
"""
concert = Concert.objects(Concert.all_related()).first().run_sync()
self.assertIsInstance(concert.band_1, Band)
self.assertIsInstance(concert.band_2, Band)
self.assertIsInstance(concert.venue, Venue)
def test_objects_nested_deep(self):
"""
Make sure that ``prefetch`` works correctly with deeply nested tables.
"""
ticket = (
Ticket.objects(
Ticket.concert,
Ticket.concert.band_1,
Ticket.concert.band_2,
Ticket.concert.venue,
Ticket.concert.band_1.manager,
Ticket.concert.band_2.manager,
)
.first()
.run_sync()
)
self.assertIsInstance(ticket.concert, Concert)
self.assertIsInstance(ticket.concert.band_1, Band)
self.assertIsInstance(ticket.concert.band_2, Band)
self.assertIsInstance(ticket.concert.venue, Venue)
self.assertIsInstance(ticket.concert.band_1.manager, Manager)
self.assertIsInstance(ticket.concert.band_2.manager, Manager)
def test_objects__all_related__deep(self):
"""
Make sure that ``all_related`` works correctly when called on a deeply
nested table.
"""
ticket = (
Ticket.objects(
Ticket.all_related(),
Ticket.concert.all_related(),
Ticket.concert.band_1.all_related(),
Ticket.concert.band_2.all_related(),
)
.first()
.run_sync()
)
self.assertIsInstance(ticket.concert, Concert)
self.assertIsInstance(ticket.concert.band_1, Band)
self.assertIsInstance(ticket.concert.band_2, Band)
self.assertIsInstance(ticket.concert.venue, Venue)
self.assertIsInstance(ticket.concert.band_1.manager, Manager)
self.assertIsInstance(ticket.concert.band_2.manager, Manager)
def test_objects_prefetch_clause(self):
"""
Make sure that ``prefetch`` clause works correctly.
"""
ticket = (
Ticket.objects()
.prefetch(
Ticket.all_related(),
Ticket.concert.all_related(),
Ticket.concert.band_1.all_related(),
Ticket.concert.band_2.all_related(),
)
.first()
.run_sync()
)
self.assertIsInstance(ticket.concert, Concert)
self.assertIsInstance(ticket.concert.band_1, Band)
self.assertIsInstance(ticket.concert.band_2, Band)
self.assertIsInstance(ticket.concert.venue, Venue)
self.assertIsInstance(ticket.concert.band_1.manager, Manager)
self.assertIsInstance(ticket.concert.band_2.manager, Manager)
def test_objects_prefetch_intermediate(self):
"""
Make sure when using ``prefetch`` on a deeply nested table, all of the
intermediate objects are also retrieved properly.
"""
ticket = (
Ticket.objects()
.prefetch(
Ticket.concert.band_1.manager,
)
.first()
.run_sync()
)
self.assertIsInstance(ticket.price, decimal.Decimal)
self.assertIsInstance(ticket.concert, Concert)
self.assertIsInstance(ticket.concert.id, int)
self.assertIsInstance(ticket.concert.band_1, Band)
self.assertIsInstance(ticket.concert.band_2, int)
self.assertIsInstance(ticket.concert.venue, int)
self.assertIsInstance(ticket.concert.band_1.id, int)
self.assertIsInstance(ticket.concert.band_1.name, str)
self.assertIsInstance(ticket.concert.band_1.manager, Manager)
self.assertIsInstance(ticket.concert.band_1.manager.id, int)
self.assertIsInstance(ticket.concert.band_1.manager.name, str)
def test_objects_prefetch_multiple_intermediate(self):
"""
Make sure that if we're fetching multiple deeply nested tables, the
intermediate tables are still created correctly.
"""
ticket = (
Ticket.objects()
.prefetch(
Ticket.concert.band_1.manager,
Ticket.concert.band_2.manager,
)
.first()
.run_sync()
)
self.assertIsInstance(ticket.price, decimal.Decimal)
self.assertIsInstance(ticket.concert, Concert)
self.assertIsInstance(ticket.concert.id, int)
self.assertIsInstance(ticket.concert.band_1, Band)
self.assertIsInstance(ticket.concert.band_2, Band)
self.assertIsInstance(ticket.concert.venue, int)
self.assertIsInstance(ticket.concert.band_1.id, int)
self.assertIsInstance(ticket.concert.band_1.name, str)
self.assertIsInstance(ticket.concert.band_1.manager, Manager)
self.assertIsInstance(ticket.concert.band_1.manager.id, int)
self.assertIsInstance(ticket.concert.band_1.manager.name, str)
self.assertIsInstance(ticket.concert.band_2.id, int)
self.assertIsInstance(ticket.concert.band_2.name, str)
self.assertIsInstance(ticket.concert.band_2.manager, Manager)
self.assertIsInstance(ticket.concert.band_2.manager.id, int)
self.assertIsInstance(ticket.concert.band_2.manager.name, str)
| 31.923706
| 79
| 0.550444
|
0ae2dd48c7ad3d7bfdeb431cc77b8b8be1bba784
| 2,972
|
py
|
Python
|
Tiana.py
|
TEAMOFDEVIL-X/TIANACHATBOT
|
7a6c85c12d506b79a6788646fd4e3c210b3c1166
|
[
"MIT"
] | 3
|
2021-09-04T07:01:11.000Z
|
2021-10-17T19:25:13.000Z
|
Tiana.py
|
TEAMOFDEVIL-X/TIANACHATBOT
|
7a6c85c12d506b79a6788646fd4e3c210b3c1166
|
[
"MIT"
] | null | null | null |
Tiana.py
|
TEAMOFDEVIL-X/TIANACHATBOT
|
7a6c85c12d506b79a6788646fd4e3c210b3c1166
|
[
"MIT"
] | 6
|
2021-08-06T18:36:49.000Z
|
2022-01-05T11:00:57.000Z
|
print("[INFO]: Importing Your API_ID, API_HASH, BOT_TOKEN")
import re
from asyncio import (gather, get_event_loop, sleep)
from aiohttp import ClientSession
from pyrogram import (Client, filters, idle)
from Python_ARQ import ARQ
from config import bot, BOT_TOKEN, ARQ_API_KEY, ARQ_API_BASE_URL, LANGUAGE
bot_token= BOT_TOKEN
print("[INFO]: Checking... Your Details")
bot_id = int(bot_token.split(":")[0])
print("[INFO]: Code running by master Prince Op")
arq = None
async def lunaQuery(query: str, user_id: int):
query = (
query
if LANGUAGE == "en"
else (await arq.translate(query, "en")).result.translatedText
)
resp = (await arq.luna(query, user_id)).result
return (
resp
if LANGUAGE == "en"
else (
await arq.translate(resp, LANGUAGE)
).result.translatedText
)
async def type_and_send(message):
chat_id = message.chat.id
user_id = message.from_user.id if message.from_user else 0
query = message.text.strip()
await message._client.send_chat_action(chat_id, "typing")
response, _ = await gather(lunaQuery(query, user_id), sleep(2))
if "Luna" in response:
responsee = response.replace("Luna", "Tiana")
else:
responsee = response
if "Aco" in responsee:
responsess = responsee.replace("Aco", "Tiana")
else:
responsess = responsee
if "Who is Tiana?" in responsess:
responsess2 = responsess.replace("Who is Tiana?", "Heroine Of Telegram")
else:
responsess2 = responsess
await message.reply_text(responsess2)
await message._client.send_chat_action(chat_id, "cancel")
@bot.on_message(
~filters.private
& filters.text
& ~filters.command("start")
& ~filters.edited,
group=69,
)
async def chat(_, message):
if message.reply_to_message:
if not message.reply_to_message.from_user:
return
from_user_id = message.reply_to_message.from_user.id
if from_user_id != bot_id:
return
else:
match = re.search(
"[.|\n]{0,}iris[.|\n]{0,}",
message.text.strip(),
flags=re.IGNORECASE,
)
if not match:
return
await type_and_send(message)
@bot.on_message(
filters.private
& ~filters.command("start")
& ~filters.edited
)
async def chatpm(_, message):
if not message.text:
await message.reply_text("Ufff... Ignoring .... ¯\_(ツ)_/¯")
return
await type_and_send(message)
@bot.on_message(filters.command("start") & ~filters.edited)
async def startt(_, message):
await message.reply_text("Hi, I'm Alive ╮(. ❛ ᴗ ❛.)╭")
async def main():
global arq
session = ClientSession()
arq = ARQ(ARQ_API_BASE_URL, ARQ_API_KEY, session)
await bot.start()
print(
"""
Your TianaChatBot Is Deployed Successfully.
"""
)
await idle()
loop = get_event_loop()
loop.run_until_complete(main())
| 25.843478
| 80
| 0.64031
|
7240cf093e2eeba8e74fa05114e309f3dd93e24d
| 636
|
py
|
Python
|
exploitTemplate.py
|
ArchCWithClasses/VanillaX86BufferOverflow
|
d154ceae6237cec82df834b0eeba7400510e28f1
|
[
"MIT"
] | null | null | null |
exploitTemplate.py
|
ArchCWithClasses/VanillaX86BufferOverflow
|
d154ceae6237cec82df834b0eeba7400510e28f1
|
[
"MIT"
] | null | null | null |
exploitTemplate.py
|
ArchCWithClasses/VanillaX86BufferOverflow
|
d154ceae6237cec82df834b0eeba7400510e28f1
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import socket
ip = "MACHINE_IP"
port = 1337
#Testing chars for shellcode, 0x00(Null byte) is default bad.
charslist = ""
badchars = [0x00]
for i in range (0x00, 0xFF+1):
if i not in badchars:
charslist += chr(i)
#shellcode = ()
offset = 0
junk = "A" * offset
eip = ""
nopsled = ""
payload = ""
buffer = junk + eip + nopsled + payload
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.connect((ip, port))
print("Sending buffer.")
s.send(buffer + "\r\n")
print("Done, let's rock and roll!")
except:
print("Couldn't connect.")
| 19.272727
| 61
| 0.591195
|
2dbf035c011ed16dd4913befdce6c7c824bb3053
| 632
|
py
|
Python
|
manage.py
|
Parth-Shah-99/Polling-Project
|
b9d1548dc801a0b02d1fd8b925276d9349bb10fe
|
[
"MIT"
] | 1
|
2021-06-23T11:24:01.000Z
|
2021-06-23T11:24:01.000Z
|
manage.py
|
Parth-Shah-99/Polling-Project
|
b9d1548dc801a0b02d1fd8b925276d9349bb10fe
|
[
"MIT"
] | null | null | null |
manage.py
|
Parth-Shah-99/Polling-Project
|
b9d1548dc801a0b02d1fd8b925276d9349bb10fe
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'PollsProject.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| 28.727273
| 76
| 0.685127
|
98eae49ad844740f1e0251c1e23b2e1c4380b48c
| 952
|
py
|
Python
|
assignment_7/src/train.py
|
amitbcp/tsai-vision
|
14a66d4c3295714fdcc97db13804ffba9d6f06cc
|
[
"Apache-2.0"
] | null | null | null |
assignment_7/src/train.py
|
amitbcp/tsai-vision
|
14a66d4c3295714fdcc97db13804ffba9d6f06cc
|
[
"Apache-2.0"
] | null | null | null |
assignment_7/src/train.py
|
amitbcp/tsai-vision
|
14a66d4c3295714fdcc97db13804ffba9d6f06cc
|
[
"Apache-2.0"
] | 2
|
2021-07-25T10:24:11.000Z
|
2021-08-13T09:23:30.000Z
|
import torch.nn as nn
import torch.nn.functional as F
from tqdm import tqdm
def train(model, device, train_loader, optimizer,train_acc,train_losses):
model.train()
pbar = tqdm(train_loader)
correct = 0
processed = 0
for batch_idx, (data, target) in enumerate(pbar):
data = data["image"].to(device)
target = target.to(device)
optimizer.zero_grad()
y_pred = model(data)
# loss = F.nll_loss(y_pred, target)
loss = F.cross_entropy(y_pred, target)
train_losses.append(loss.item())
# Backpropagation
loss.backward()
optimizer.step()
pred = y_pred.argmax(dim=1, keepdim=True)
correct += pred.eq(target.view_as(pred)).sum().item()
processed += len(data)
pbar.set_description(
desc=f'Loss={loss.item()} Batch_id={batch_idx} Accuracy={100*correct/processed:0.2f}')
train_acc.append(100*correct/processed)
| 27.2
| 98
| 0.633403
|
00dbd4284076dad57d9f3a1590123acb842a3603
| 906
|
py
|
Python
|
service/microservice.py
|
SFDigitalServices/solarpanel
|
1e9850a286dc389ed392cdaf5a0ac04a3bd0798b
|
[
"MIT"
] | null | null | null |
service/microservice.py
|
SFDigitalServices/solarpanel
|
1e9850a286dc389ed392cdaf5a0ac04a3bd0798b
|
[
"MIT"
] | null | null | null |
service/microservice.py
|
SFDigitalServices/solarpanel
|
1e9850a286dc389ed392cdaf5a0ac04a3bd0798b
|
[
"MIT"
] | null | null | null |
"""Main application module"""
import os
import json
import jsend
import sentry_sdk
import falcon
from .resources.welcome import Welcome
from .resources.solarpanel import SolarPanel
from .resources.staticresource import StaticResource
def start_service():
"""Start this service
set SENTRY_DSN environmental variable to enable logging with Sentry
"""
# Initialize Sentry
sentry_sdk.init(os.environ.get('SENTRY_DSN'))
# Initialize Falcon
api = falcon.API()
api.add_route('/welcome', Welcome())
api.add_route('/solar-panel', SolarPanel())
api.add_route('/static/{filename}', StaticResource())
api.add_sink(default_error, '')
return api
def default_error(_req, resp):
"""Handle default error"""
resp.status = falcon.HTTP_404
msg_error = jsend.error('404 - Not Found')
sentry_sdk.capture_message(msg_error)
resp.body = json.dumps(msg_error)
| 28.3125
| 71
| 0.722958
|
7bcdcf7636c3b3eb1a293748ea16e349bf7adb6b
| 4,629
|
py
|
Python
|
paddlevideo/utils/record.py
|
Hzx66666/BDCI_Padddle_5
|
5283d8b9d3db3eb828e401faf42b62f71770d0ff
|
[
"Apache-2.0"
] | 1
|
2021-11-24T09:23:21.000Z
|
2021-11-24T09:23:21.000Z
|
paddlevideo/utils/record.py
|
Hzx66666/BDCI_Padddle_5
|
5283d8b9d3db3eb828e401faf42b62f71770d0ff
|
[
"Apache-2.0"
] | null | null | null |
paddlevideo/utils/record.py
|
Hzx66666/BDCI_Padddle_5
|
5283d8b9d3db3eb828e401faf42b62f71770d0ff
|
[
"Apache-2.0"
] | 1
|
2021-11-24T09:49:26.000Z
|
2021-11-24T09:49:26.000Z
|
# copyright (c) 2020 PaddlePaddle Authors. All Rights Reserve.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import paddle
from collections import OrderedDict
from .logger import get_logger, coloring
logger = get_logger("paddlevideo")
__all__ = ['AverageMeter', 'build_record', 'log_batch', 'log_epoch']
def build_record(cfg):
framework_type = cfg.get('framework')
record_list = [
("loss", AverageMeter('loss', '7.5f')),
("loss_ce", AverageMeter('loss_ce', '7.5f')),
("loss_tri", AverageMeter('loss_tri', '7.5f')),
("lr", AverageMeter('lr', 'f', need_avg=False)),
]
if 'Recognizer1D' in cfg.framework: #TODO: required specify str in framework
record_list.append(("hit_at_one", AverageMeter("hit_at_one", '.5f')))
record_list.append(("perr", AverageMeter("perr", '.5f')))
record_list.append(("gap", AverageMeter("gap", '.5f')))
elif 'Recognizer' in cfg.framework:
record_list.append(("top1", AverageMeter("top1", '.5f')))
record_list.append(("top5", AverageMeter("top5", '.5f')))
record_list.append(("batch_time", AverageMeter('batch_cost', '.5f')))
record_list.append(("reader_time", AverageMeter('reader_cost', '.5f')))
record_list = OrderedDict(record_list)
return record_list
class AverageMeter(object):
"""
Computes and stores the average and current value
"""
def __init__(self, name='', fmt='f', need_avg=True):
self.name = name
self.fmt = fmt
self.need_avg = need_avg
self.reset()
def reset(self):
""" reset """
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
""" update """
if isinstance(val, paddle.Tensor):
val = val.numpy()[0]
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
@property
def total(self):
return '{self.name}_sum: {self.sum:{self.fmt}}'.format(self=self)
@property
def total_minute(self):
return '{self.name}_sum: {s:{self.fmt}} min'.format(s=self.sum / 60,
self=self)
@property
def mean(self):
return '{self.name}_avg: {self.avg:{self.fmt}}'.format(
self=self) if self.need_avg else ''
@property
def value(self):
return '{self.name}: {self.val:{self.fmt}}'.format(self=self)
def log_batch(metric_list, batch_id, epoch_id, total_epoch, mode, ips):
batch_cost = str(metric_list['batch_time'].value) + ' sec,'
reader_cost = str(metric_list['reader_time'].value) + ' sec,'
metric_values = []
for m in metric_list:
if not (m == 'batch_time' or m == 'reader_time'):
metric_values.append(metric_list[m].value)
metric_str = ' '.join([str(v) for v in metric_values])
epoch_str = "epoch:[{:>3d}/{:<3d}]".format(epoch_id, total_epoch)
step_str = "{:s} step:{:<4d}".format(mode, batch_id)
logger.info("{:s} {:s} {:s} {:s} {:s} {}".format(
coloring(epoch_str, "HEADER") if batch_id == 0 else epoch_str,
coloring(step_str, "PURPLE"), coloring(metric_str, 'OKGREEN'),
coloring(batch_cost, "OKGREEN"), coloring(reader_cost, 'OKGREEN'), ips))
def log_epoch(metric_list, epoch, mode, ips):
batch_cost = 'avg_' + str(metric_list['batch_time'].value) + ' sec,'
reader_cost = 'avg_' + str(metric_list['reader_time'].value) + ' sec,'
batch_sum = str(metric_list['batch_time'].total) + ' sec,'
metric_values = []
for m in metric_list:
if not (m == 'batch_time' or m == 'reader_time'):
metric_values.append(metric_list[m].mean)
metric_str = ' '.join([str(v) for v in metric_values])
end_epoch_str = "END epoch:{:<3d}".format(epoch)
logger.info("{:s} {:s} {:s} {:s} {:s} {:s} {}".format(
coloring(end_epoch_str, "RED"), coloring(mode, "PURPLE"),
coloring(metric_str, "OKGREEN"), coloring(batch_cost, "OKGREEN"),
coloring(reader_cost, "OKGREEN"), coloring(batch_sum, "OKGREEN"), ips))
| 36.738095
| 81
| 0.620436
|
6e29081b4f1bcd34cc71cc5a00d56e91e3693b82
| 1,832
|
py
|
Python
|
dist/Platform.app/Contents/Resources/lib/python3.7/wx/py/tests/test_interpreter.py
|
njalloul90/Genomics_Oncology_Platform
|
9bf6d0edca5df783f4e371fa1bc46b7b1576fe70
|
[
"MIT"
] | 6
|
2021-07-26T14:21:25.000Z
|
2021-07-26T14:32:01.000Z
|
dist/Platform.app/Contents/Resources/lib/python3.7/wx/py/tests/test_interpreter.py
|
njalloul90/Genomics_Oncology_Platform
|
9bf6d0edca5df783f4e371fa1bc46b7b1576fe70
|
[
"MIT"
] | 9
|
2021-03-18T23:10:27.000Z
|
2022-03-11T23:43:55.000Z
|
dist/Platform.app/Contents/Resources/lib/python3.7/wx/py/tests/test_interpreter.py
|
njalloul90/Genomics_Oncology_Platform
|
9bf6d0edca5df783f4e371fa1bc46b7b1576fe70
|
[
"MIT"
] | 2
|
2019-03-11T05:06:49.000Z
|
2019-03-22T21:48:49.000Z
|
#!/usr/bin/env python
__author__ = "Patrick K. O'Brien <pobrien@orbtech.com>"
import unittest
from wx.py import interpreter
"""
These unittest methods are preferred:
-------------------------------------
self.assert_(expr, msg=None)
self.assertEqual(first, second, msg=None)
self.assertRaises(excClass, callableObj, *args, **kwargs)
self.fail(msg=None)
self.failIf(expr, msg=None)
"""
class ModuleTestCase(unittest.TestCase):
def test_module(self):
module = interpreter
self.assert_(module.__author__)
self.assert_(module.Interpreter)
self.assert_(module.Interpreter.push)
self.assert_(module.Interpreter.runsource)
self.assert_(module.Interpreter.getAutoCompleteList)
self.assert_(module.Interpreter.getCallTip)
self.assert_(module.InterpreterAlaCarte)
class InterpreterTestCase(unittest.TestCase):
def setUp(self):
self.output = ''
self.i = interpreter.Interpreter(stdout=self)
def write(self, text):
"""Capture output from self.i.push()."""
self.output += text
def tearDown(self):
self.output = ''
self.i = None
del self.i
def test_more(self):
self.assertEqual(self.i.push('dir()'), 0)
self.assertEqual(self.i.push('for n in range(3):'), 1)
def test_push(self):
values = (
('dir', '<built-in function dir>'),
('dir()', "['__builtins__', '__doc__', '__name__']"),
('2 + 2', '4'),
('d = {}', ''),
('d', '{}'),
('del d', ''),
('len([4,5,6])', '3'),
)
for input, output in values:
if output: output += '\n'
self.i.push(input)
self.assertEqual(self.output, output)
self.output = ''
if __name__ == '__main__':
unittest.main()
| 25.444444
| 62
| 0.582424
|
6d65e55b35d6c50fd364f4b7234eb87be70280a0
| 2,161
|
py
|
Python
|
src/visualize/visualize_nturefined.py
|
Immocat/ACTOR
|
c7237e82e333bf2c57f7d8e12f27d0831233befc
|
[
"MIT"
] | 164
|
2021-09-06T12:43:39.000Z
|
2022-03-29T02:33:38.000Z
|
src/visualize/visualize_nturefined.py
|
Immocat/ACTOR
|
c7237e82e333bf2c57f7d8e12f27d0831233befc
|
[
"MIT"
] | 14
|
2021-09-17T00:42:24.000Z
|
2022-03-07T04:18:12.000Z
|
src/visualize/visualize_nturefined.py
|
Immocat/ACTOR
|
c7237e82e333bf2c57f7d8e12f27d0831233befc
|
[
"MIT"
] | 27
|
2021-09-07T04:38:38.000Z
|
2022-03-29T00:37:10.000Z
|
import matplotlib.pyplot as plt
import torch
from src.datasets.get_dataset import get_dataset
from src.utils.anim import plot_3d_motion
import src.utils.fixseed # noqa
plt.switch_backend('agg')
def viz_ntu13(dataset, device):
""" Generate & viz samples """
print("Visualization of the ntu13")
from src.models.rotation2xyz import Rotation2xyz
rot2xyz = Rotation2xyz(device)
realsamples = []
pose18samples = []
pose24samples = []
translation = True
dataset.glob = True
dataset.translation = translation
for i in range(1, 2):
dataset.pose_rep = "xyz"
x_xyz = dataset[i][0]
realsamples.append(x_xyz)
dataset.pose_rep = "rotvec"
pose = dataset[i][0]
mask = torch.ones(pose.shape[2], dtype=bool)
# from src.models.smpl import SMPL
# smplmodel = SMPL().eval().to(device)
# import ipdb; ipdb.set_trace()
pose24 = rot2xyz(pose[None], mask[None], pose_rep="rotvec", jointstype="smpl", glob=True, translation=translation)[0]
pose18 = rot2xyz(pose[None], mask[None], pose_rep="rotvec", jointstype="a2m", glob=True, translation=translation)[0]
translation = True
dataset.glob = True
dataset.translation = translation
# poseT = dataset[i][0]
# pose18T = rot2xyz(poseT[None], mask[None], pose_rep="rotvec", jointstype="action2motion", glob=True, translation=translation)[0]
# import ipdb; ipdb.set_trace()
pose18samples.append(pose18)
pose24samples.append(pose24)
params = {"pose_rep": "xyz"}
for i in [0]:
for x_xyz, title in zip([pose24samples[i], pose18samples[i], realsamples[i]], ["pose_to_24", "pose_to_18", "action2motion_18"]):
save_path = title + ".gif"
plot_3d_motion(x_xyz, x_xyz.shape[-1], save_path, params, title=title)
print(f"saving {save_path}")
if __name__ == '__main__':
# get device
device = torch.device('cpu')
# get data
DATA = get_dataset(name="ntu13")
dataset = DATA(split="train")
viz_ntu13(dataset, device)
| 31.318841
| 138
| 0.62795
|
d12ee5d8c8be93b34aa01ee940a4d5932c56a7e7
| 86,253
|
py
|
Python
|
pybit/__init__.py
|
jerrydboonstra/pybit
|
c4e945d43bf1c3d6134e80ea969d0dd195d124e1
|
[
"MIT"
] | 118
|
2020-03-31T21:13:48.000Z
|
2022-03-26T15:23:05.000Z
|
pybit/__init__.py
|
msjahan47/pybit
|
c4e945d43bf1c3d6134e80ea969d0dd195d124e1
|
[
"MIT"
] | 53
|
2020-04-04T00:34:29.000Z
|
2022-03-08T22:50:43.000Z
|
pybit/__init__.py
|
msjahan47/pybit
|
c4e945d43bf1c3d6134e80ea969d0dd195d124e1
|
[
"MIT"
] | 48
|
2020-11-10T15:41:51.000Z
|
2022-03-18T19:49:32.000Z
|
# -*- coding: utf-8 -*-
"""
pybit
------------------------
pybit is a lightweight and high-performance API connector for the
RESTful and WebSocket APIs of the Bybit exchange.
Documentation can be found at
https://github.com/verata-veritatis/pybit
:copyright: (c) 2020-2021 verata-veritatis
:license: MIT License
"""
import time
import hmac
import json
import logging
import threading
import requests
import websocket
from datetime import datetime as dt
from concurrent.futures import ThreadPoolExecutor
from .exceptions import FailedRequestError, InvalidRequestError
# Requests will use simplejson if available.
try:
from simplejson.errors import JSONDecodeError
except ImportError:
from json.decoder import JSONDecodeError
# Versioning.
VERSION = '1.3.4'
class HTTP:
"""
Connector for Bybit's HTTP API.
:param endpoint: The endpoint URL of the HTTP API, e.g.
'https://api-testnet.bybit.com'.
:type endpoint: str
:param api_key: Your API key. Required for authenticated endpoints. Defaults
to None.
:type api_key: str
:param api_secret: Your API secret key. Required for authenticated
endpoints. Defaults to None.
:type api_secret: str
:param logging_level: The logging level of the built-in logger. Defaults to
logging.INFO. Options are CRITICAL (50), ERROR (40), WARNING (30),
INFO (20), DEBUG (10), or NOTSET (0).
:type logging_level: Union[int, logging.level]
:param log_requests: Whether or not pybit should log each HTTP request.
:type log_requests: bool
:param request_timeout: The timeout of each API request in seconds. Defaults
to 10 seconds.
:type request_timeout: int
:param recv_window: How long an HTTP request is valid in ms. Default is
5000.
:type recv_window: int
:param force_retry: Whether or not pybit should retry a timed-out request.
:type force_retry: bool
:param retry_codes: A list of non-fatal status codes to retry on.
:type retry_codes: set
:param ignore_codes: A list of non-fatal status codes to ignore.
:type ignore_codes: set
:param max_retries: The number of times to re-attempt a request.
:type max_retries: int
:param retry_delay: Seconds between retries for returned error or timed-out
requests. Default is 3 seconds.
:type retry_delay: int
:param referral_id: An optional referer ID can be added to each request for
identification.
:type referral_id: str
:returns: pybit.HTTP session.
"""
def __init__(self, endpoint=None, api_key=None, api_secret=None,
logging_level=logging.INFO, log_requests=False,
request_timeout=10, recv_window=5000, force_retry=False,
retry_codes=None, ignore_codes=None, max_retries=3,
retry_delay=3, referral_id=None, spot=False):
"""Initializes the HTTP class."""
# Set the endpoint.
if endpoint is None:
self.endpoint = 'https://api.bybit.com'
else:
self.endpoint = endpoint
# Setup logger.
self.logger = logging.getLogger(__name__)
if len(logging.root.handlers) == 0:
#no handler on root logger set -> we add handler just for this logger to not mess with custom logic from outside
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter(fmt='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
datefmt='%Y-%m-%d %H:%M:%S'
)
)
handler.setLevel(logging_level)
self.logger.addHandler(handler)
self.logger.debug('Initializing HTTP session.')
self.log_requests = log_requests
# Set API keys.
self.api_key = api_key
self.api_secret = api_secret
# Set timeout.
self.timeout = request_timeout
self.recv_window = recv_window
self.force_retry = force_retry
self.max_retries = max_retries
self.retry_delay = retry_delay
# Set whitelist of non-fatal Bybit status codes to retry on.
if retry_codes is None:
self.retry_codes = {10002, 10006, 30034, 30035, 130035, 130150}
else:
self.retry_codes = retry_codes
# Set whitelist of non-fatal Bybit status codes to ignore.
if ignore_codes is None:
self.ignore_codes = set()
else:
self.ignore_codes = ignore_codes
# Initialize requests session.
self.client = requests.Session()
self.client.headers.update(
{
'User-Agent': 'pybit-' + VERSION,
'Content-Type': 'application/json',
'Accept': 'application/json',
}
)
# Add referral ID to header.
if referral_id:
self.client.headers.update({'Referer': referral_id})
# If True, calls spot endpoints rather than futures endpoints.
self.spot = spot
def _exit(self):
"""Closes the request session."""
self.client.close()
self.logger.debug('HTTP session closed.')
def orderbook(self, **kwargs):
"""
Get the orderbook.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-orderbook.
:returns: Request results as dictionary.
"""
if self.spot is True or kwargs.get('spot', '') is True:
suffix = '/spot/quote/v1/depth'
else:
suffix = '/v2/public/orderBook/L2'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs
)
def merged_orderbook(self, **kwargs):
"""
Get the merged orderbook.
:param kwargs: See
https://bybit-exchange.github.io/docs/spot/#t-mergedorderbook.
:returns: Request results as dictionary.
"""
return self._submit_request(
method='GET',
path=self.endpoint + '/spot/quote/v1/depth/merged',
query=kwargs
)
def query_kline(self, **kwargs):
"""
Get kline.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-querykline.
:returns: Request results as dictionary.
"""
# Replace query param 'from_time' since 'from' keyword is reserved.
# Temporary workaround until Bybit updates official request params
if 'from_time' in kwargs:
kwargs['from'] = kwargs.pop('from_time')
if self.spot is True or kwargs.get('spot', '') is True:
suffix = '/spot/quote/v1/kline'
elif kwargs.get('symbol', '').endswith('USDT'):
suffix = '/public/linear/kline'
else:
suffix = '/v2/public/kline/list'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs
)
def latest_information_for_symbol(self, **kwargs):
"""
Get the latest information for symbol.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-latestsymbolinfo.
:returns: Request results as dictionary.
"""
if self.spot is True or kwargs.get('spot', '') is True:
suffix = '/spot/quote/v1/ticker/24hr'
else:
suffix = '/v2/public/tickers'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs
)
def last_traded_price(self, **kwargs):
"""
Get the last traded price.
:param kwargs: See
https://bybit-exchange.github.io/docs/spot/#t-lasttradedprice.
:returns: Request results as dictionary.
"""
return self._submit_request(
method='GET',
path=self.endpoint + '/spot/quote/v1/ticker/price',
query=kwargs
)
def best_bid_ask_price(self, **kwargs):
"""
Get the best bid/ask price.
:param kwargs: See
https://bybit-exchange.github.io/docs/spot/#t-bestbidask.
:returns: Request results as dictionary.
"""
return self._submit_request(
method='GET',
path=self.endpoint + '/spot/quote/v1/ticker/book_ticker',
query=kwargs
)
def public_trading_records(self, **kwargs):
"""
Get recent trades. You can find a complete history of trades on Bybit
at https://public.bybit.com/.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-publictradingrecords.
:returns: Request results as dictionary.
"""
# Replace query param 'from_id' since 'from' keyword is reserved.
# Temporary workaround until Bybit updates official request params
if 'from_id' in kwargs:
kwargs['from'] = kwargs.pop('from_id')
if self.spot is True or kwargs.get('spot', '') is True:
suffix = '/spot/quote/v1/trades'
elif kwargs.get('symbol', '').endswith('USDT'):
suffix = '/public/linear/recent-trading-records'
else:
suffix = '/v2/public/trading-records'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs
)
def query_symbol(self, **kwargs):
"""
Get symbol info.
:returns: Request results as dictionary.
"""
if self.spot is True or kwargs.get('spot', '') is True:
suffix = '/spot/v1/symbols'
else:
suffix = '/v2/public/symbols'
return self._submit_request(
method='GET',
path=self.endpoint + suffix
)
def liquidated_orders(self, **kwargs):
"""
Retrieve the liquidated orders. The query range is the last seven days
of data.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-query_liqrecords.
:returns: Request results as dictionary.
"""
# Replace query param 'from_id' since 'from' keyword is reserved.
# Temporary workaround until Bybit updates official request params
if 'from_id' in kwargs:
kwargs['from'] = kwargs.pop('from_id')
return self._submit_request(
method='GET',
path=self.endpoint + '/v2/public/liq-records',
query=kwargs
)
def query_mark_price_kline(self, **kwargs):
"""
Query mark price kline (like query_kline but for mark price).
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-markpricekline.
:returns: Request results as dictionary.
"""
# Replace query param 'from_time' since 'from' keyword is reserved.
# Temporary workaround until Bybit updates official request params
if 'from_time' in kwargs:
kwargs['from'] = kwargs.pop('from_time')
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/public/linear/mark-price-kline'
else:
suffix = '/v2/public/mark-price-kline'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs
)
def query_index_price_kline(self, **kwargs):
"""
Query index price kline (like query_kline but for index price).
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-queryindexpricekline.
:returns: Request results as dictionary.
"""
# Replace query param 'from_time' since 'from' keyword is reserved.
# Temporary workaround until Bybit updates official request params
if 'from_time' in kwargs:
kwargs['from'] = kwargs.pop('from_time')
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/public/linear/index-price-kline'
else:
suffix = '/v2/public/index-price-kline'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs
)
def query_premium_index_kline(self, **kwargs):
"""
Query premium index kline (like query_kline but for the premium index
discount).
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-querypremiumindexkline.
:returns: Request results as dictionary.
"""
# Replace query param 'from_time' since 'from' keyword is reserved.
# Temporary workaround until Bybit updates official request params
if 'from_time' in kwargs:
kwargs['from'] = kwargs.pop('from_time')
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/public/linear/premium-index-kline'
else:
suffix = '/v2/public/premium-index-kline'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs
)
def open_interest(self, **kwargs):
"""
Gets the total amount of unsettled contracts. In other words, the total
number of contracts held in open positions.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-marketopeninterest.
:returns: Request results as dictionary.
"""
return self._submit_request(
method='GET',
path=self.endpoint + '/v2/public/open-interest',
query=kwargs
)
def latest_big_deal(self, **kwargs):
"""
Obtain filled orders worth more than 500,000 USD within the last 24h.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-marketbigdeal.
:returns: Request results as dictionary.
"""
return self._submit_request(
method='GET',
path=self.endpoint + '/v2/public/big-deal',
query=kwargs
)
def long_short_ratio(self, **kwargs):
"""
Gets the Bybit long-short ratio.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-marketaccountratio.
:returns: Request results as dictionary.
"""
return self._submit_request(
method='GET',
path=self.endpoint + '/v2/public/account-ratio',
query=kwargs
)
def place_active_order(self, **kwargs):
"""
Places an active order. For more information, see
https://bybit-exchange.github.io/docs/inverse/#t-activeorders.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-activeorders.
:returns: Request results as dictionary.
"""
if self.spot is True or kwargs.get('spot', '') is True:
suffix = '/spot/v1/order'
elif kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/order/create'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/order/create'
else:
suffix = '/v2/private/order/create'
return self._submit_request(
method='POST',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def place_active_order_bulk(self, orders: list, max_in_parallel=10):
"""
Places multiple active orders in bulk using multithreading. For more
information on place_active_order, see
https://bybit-exchange.github.io/docs/inverse/#t-activeorders.
:param list orders: A list of orders and their parameters.
:param max_in_parallel: The number of requests to be sent in parallel.
Note that you are limited to 50 requests per second.
:returns: Future request result dictionaries as a list.
"""
with ThreadPoolExecutor(max_workers=max_in_parallel) as executor:
executions = [
executor.submit(
self.place_active_order,
**order
) for order in orders
]
executor.shutdown()
return [execution.result() for execution in executions]
def get_active_order(self, endpoint="", **kwargs):
"""
Gets an active order. For more information, see
https://bybit-exchange.github.io/docs/inverse/#t-getactive.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-getactive.
:param endpoint: The endpoint path, such as "/spot/v1/order".
This allows the user to choose between which endpoint to use to
fetch a spot order.
:returns: Request results as dictionary.
"""
if endpoint:
suffix = endpoint
else:
if self.spot is True or kwargs.get('spot', '') is True:
suffix = '/spot/v1/history-orders'
elif kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/order/list'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/order/list'
else:
suffix = '/v2/private/order/list'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def cancel_active_order(self, **kwargs):
"""
Cancels an active order. For more information, see
https://bybit-exchange.github.io/docs/inverse/#t-cancelactive.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-cancelactive.
:returns: Request results as dictionary.
"""
method = 'POST'
if self.spot is True or kwargs.get('spot', '') is True:
suffix = '/spot/v1/order'
method = 'DELETE'
elif kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/order/cancel'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/order/cancel'
else:
suffix = '/v2/private/order/cancel'
return self._submit_request(
method=method,
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def fast_cancel_active_order(self, **kwargs):
"""
Fast cancels an active order.
:param kwargs: See
https://bybit-exchange.github.io/docs/spot/#t-fastcancelactiveorder.
:returns: Request results as dictionary.
"""
return self._submit_request(
method='DELETE',
path=self.endpoint + '/spot/v1/order/fast',
query=kwargs,
auth=True
)
def cancel_active_order_bulk(self, orders: list, max_in_parallel=10):
"""
Cancels multiple active orders in bulk using multithreading. For more
information on cancel_active_order, see
https://bybit-exchange.github.io/docs/inverse/#t-activeorders.
:param list orders: A list of orders and their parameters.
:param max_in_parallel: The number of requests to be sent in parallel.
Note that you are limited to 50 requests per second.
:returns: Future request result dictionaries as a list.
"""
with ThreadPoolExecutor(max_workers=max_in_parallel) as executor:
executions = [
executor.submit(
self.cancel_active_order,
**order
) for order in orders
]
executor.shutdown()
return [execution.result() for execution in executions]
def cancel_all_active_orders(self, **kwargs):
"""
Cancel all active orders that are unfilled or partially filled. Fully
filled orders cannot be cancelled.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-cancelallactive.
:returns: Request results as dictionary.
"""
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/order/cancel-all'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/order/cancelAll'
else:
suffix = '/v2/private/order/cancelAll'
return self._submit_request(
method='POST',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def batch_cancel_active_order(self, **kwargs):
"""
Batch cancels active orders.
:param kwargs: See
https://bybit-exchange.github.io/docs/spot/#t-batchcancelactiveorder.
:returns: Request results as dictionary.
"""
return self._submit_request(
method='DELETE',
path=self.endpoint + '/spot/order/batch-cancel',
query=kwargs,
auth=True
)
def batch_fast_cancel_active_order(self, **kwargs):
"""
Batch fast cancels active orders.
:param kwargs: See
https://bybit-exchange.github.io/docs/spot/#t-batchfastcancelactiveorder.
:returns: Request results as dictionary.
"""
return self._submit_request(
method='DELETE',
path=self.endpoint + '/spot/order/batch-fast-cancel',
query=kwargs,
auth=True
)
def batch_cancel_active_order_by_ids(self, **kwargs):
"""
Batch cancels active order by ids.
:param kwargs: See
https://bybit-exchange.github.io/docs/spot/#t-batchcancelactiveorderbyids.
:returns: Request results as dictionary.
"""
return self._submit_request(
method='DELETE',
path=self.endpoint + '/spot/order/batch-cancel-by-ids',
query=kwargs,
auth=True
)
def replace_active_order(self, **kwargs):
"""
Replace order can modify/amend your active orders.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-replaceactive.
:returns: Request results as dictionary.
"""
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/order/replace'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/order/replace'
else:
suffix = '/v2/private/order/replace'
return self._submit_request(
method='POST',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def replace_active_order_bulk(self, orders: list, max_in_parallel=10):
"""
Replaces multiple active orders in bulk using multithreading. For more
information on replace_active_order, see
https://bybit-exchange.github.io/docs/inverse/#t-replaceactive.
:param list orders: A list of orders and their parameters.
:param max_in_parallel: The number of requests to be sent in parallel.
Note that you are limited to 50 requests per second.
:returns: Future request result dictionaries as a list.
"""
with ThreadPoolExecutor(max_workers=max_in_parallel) as executor:
executions = [
executor.submit(
self.replace_active_order,
**order
) for order in orders
]
executor.shutdown()
return [execution.result() for execution in executions]
def query_active_order(self, **kwargs):
"""
Query real-time active order information.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-queryactive.
:returns: Request results as dictionary.
"""
if self.spot is True or kwargs.get('spot', '') is True:
suffix = '/spot/v1/open-orders'
elif kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/order/search'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/order'
else:
suffix = '/v2/private/order'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def place_conditional_order(self, **kwargs):
"""
Places a conditional order. For more information, see
https://bybit-exchange.github.io/docs/inverse/#t-placecond.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-placecond.
:returns: Request results as dictionary.
"""
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/stop-order/create'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/stop-order/create'
else:
suffix = '/v2/private/stop-order/create'
return self._submit_request(
method='POST',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def place_conditional_order_bulk(self, orders: list, max_in_parallel=10):
"""
Places multiple conditional orders in bulk using multithreading. For
more information on place_active_order, see
https://bybit-exchange.github.io/docs/inverse/#t-placecond.
:param orders: A list of orders and their parameters.
:param max_in_parallel: The number of requests to be sent in parallel.
Note that you are limited to 50 requests per second.
:returns: Future request result dictionaries as a list.
"""
with ThreadPoolExecutor(max_workers=max_in_parallel) as executor:
executions = [
executor.submit(
self.place_conditional_order,
**order
) for order in orders
]
executor.shutdown()
return [execution.result() for execution in executions]
def get_conditional_order(self, **kwargs):
"""
Gets a conditional order. For more information, see
https://bybit-exchange.github.io/docs/inverse/#t-getcond.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-getcond.
:returns: Request results as dictionary.
"""
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/stop-order/list'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/stop-order/list'
else:
suffix = '/v2/private/stop-order/list'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def cancel_conditional_order(self, **kwargs):
"""
Cancels a conditional order. For more information, see
https://bybit-exchange.github.io/docs/inverse/#t-cancelcond.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-cancelcond.
:returns: Request results as dictionary.
"""
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/stop-order/cancel'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/stop-order/cancel'
else:
suffix = '/v2/private/stop-order/cancel'
return self._submit_request(
method='POST',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def cancel_conditional_order_bulk(self, orders: list, max_in_parallel=10):
"""
Cancels multiple conditional orders in bulk using multithreading. For
more information on cancel_active_order, see
https://bybit-exchange.github.io/docs/inverse/#t-cancelcond.
:param list orders: A list of orders and their parameters.
:param max_in_parallel: The number of requests to be sent in parallel.
Note that you are limited to 50 requests per second.
:returns: Future request result dictionaries as a list.
"""
with ThreadPoolExecutor(max_workers=max_in_parallel) as executor:
executions = [
executor.submit(
self.cancel_conditional_order,
**order
) for order in orders
]
executor.shutdown()
return [execution.result() for execution in executions]
def cancel_all_conditional_orders(self, **kwargs):
"""
Cancel all conditional orders that are unfilled or partially filled.
Fully filled orders cannot be cancelled.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-cancelallcond.
:returns: Request results as dictionary.
"""
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/stop-order/cancel-all'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/stop-order/cancelAll'
else:
suffix = '/v2/private/stop-order/cancelAll'
return self._submit_request(
method='POST',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def replace_conditional_order(self, **kwargs):
"""
Replace conditional order can modify/amend your conditional orders.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-replacecond.
:returns: Request results as dictionary.
"""
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/stop-order/replace'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/stop-order/replace'
else:
suffix = '/v2/private/stop-order/replace'
return self._submit_request(
method='POST',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def replace_conditional_order_bulk(self, orders: list, max_in_parallel=10):
"""
Replaces multiple conditional orders in bulk using multithreading. For
more information on replace_active_order, see
https://bybit-exchange.github.io/docs/inverse/#t-replacecond.
:param list orders: A list of orders and their parameters.
:param max_in_parallel: The number of requests to be sent in parallel.
Note that you are limited to 50 requests per second.
:returns: Future request result dictionaries as a list.
"""
with ThreadPoolExecutor(max_workers=max_in_parallel) as executor:
executions = [
executor.submit(
self.replace_conditional_order,
**order
) for order in orders
]
executor.shutdown()
return [execution.result() for execution in executions]
def query_conditional_order(self, **kwargs):
"""
Query real-time conditional order information.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-querycond.
:returns: Request results as dictionary.
"""
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/stop-order/search'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/stop-order'
else:
suffix = '/v2/private/stop-order'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def my_position(self, endpoint="", **kwargs):
"""
Get my position list.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-myposition.
:param endpoint: The endpoint path, such as "/v2/private/position/list".
This allows the user to bypass the "symbol" arg, and instead specify
the desired market contract type (inverse perp, linear perp, etc)
and receive multiple symbols in the response.
:returns: Request results as dictionary.
"""
if endpoint:
suffix = endpoint
else:
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/position/list'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/position/list'
else:
suffix = '/v2/private/position/list'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def set_auto_add_margin(self, **kwargs):
"""
For linear markets only. Set auto add margin, or Auto-Margin
Replenishment.
:param kwargs: See
https://bybit-exchange.github.io/docs/linear/#t-setautoaddmargin.
:returns: Request results as dictionary.
"""
return self._submit_request(
method='POST',
path=self.endpoint + '/private/linear/position/set-auto-add-margin',
query=kwargs,
auth=True
)
def set_leverage(self, **kwargs):
"""
Change user leverage.
If you want to switch between cross margin and isolated margin, please
see cross_isolated_margin_switch.
:param kwargs: See
https://bybit-exchange.github.io/docs/linear/#t-setleverage.
:returns: Request results as dictionary.
"""
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/position/set-leverage'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/position/leverage/save'
else:
suffix = '/v2/private/position/leverage/save'
return self._submit_request(
method='POST',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def cross_isolated_margin_switch(self, **kwargs):
"""
Switch Cross/Isolated; must be leverage value when switching from Cross
to Isolated.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-marginswitch.
:returns: Request results as dictionary.
"""
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/position/switch-isolated'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/position/switch-isolated'
else:
suffix = '/v2/private/position/switch-isolated'
return self._submit_request(
method='POST',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def position_mode_switch(self, **kwargs):
"""
If you are in One-Way Mode, you can only open one position on Buy or
Sell side;
If you are in Hedge Mode, you can open both Buy and Sell side positions
simultaneously.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse_futures/#t-switchpositionmode.
:returns: Request results as dictionary.
"""
if kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/position/switch-mode'
else:
suffix = '/v2/private/position/switch-mode'
return self._submit_request(
method='POST',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def full_partial_position_tp_sl_switch(self, **kwargs):
"""
Switch mode between Full or Partial
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-switchmode.
:returns: Request results as dictionary.
"""
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/tpsl/switch-mode'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/tpsl/switch-mode'
else:
suffix = '/v2/private/tpsl/switch-mode'
return self._submit_request(
method='POST',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def change_margin(self, **kwargs):
"""
Update margin.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-changemargin.
:returns: Request results as dictionary.
"""
if kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/position/change-position-margin'
else:
suffix = '/v2/private/position/change-position-margin'
return self._submit_request(
method='POST',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def set_trading_stop(self, **kwargs):
"""
Set take profit, stop loss, and trailing stop for your open position.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-tradingstop.
:returns: Request results as dictionary.
"""
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/position/trading-stop'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/position/trading-stop'
else:
suffix = '/v2/private/position/trading-stop'
return self._submit_request(
method='POST',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def add_reduce_margin(self, **kwargs):
"""
For linear markets only. Add margin.
:param kwargs: See
https://bybit-exchange.github.io/docs/linear/#t-addmargin.
:returns: Request results as dictionary.
"""
return self._submit_request(
method='GET',
path=self.endpoint + '/private/linear/position/add-margin',
query=kwargs,
auth=True
)
def user_leverage(self, **kwargs):
"""
ABANDONED! Please use my_position instead. Fetches user leverage by
fetching user position.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-getleverage.
:returns: Request results as dictionary.
"""
self.logger.warning('This endpoint is deprecated and will be removed. Use my_position()')
return self._submit_request(
method='GET',
path=self.endpoint + '/v2/private/position/list',
query=kwargs,
auth=True
)
def change_user_leverage(self, **kwargs):
"""
Change user leverage.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-changeleverage.
:returns: Request results as dictionary.
"""
self.logger.warning('This endpoint is deprecated and will be removed. Use set_leverage()')
return self._submit_request(
method='POST',
path=self.endpoint + '/user/leverage/save',
query=kwargs,
auth=True
)
def user_trade_records(self, **kwargs):
"""
Get user's trading records. The results are ordered in ascending order
(the first item is the oldest).
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-usertraderecords.
:returns: Request results as dictionary.
"""
if self.spot is True or kwargs.get('spot', '') is True:
suffix = '/spot/v1/myTrades'
elif kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/trade/execution/list'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/execution/list'
else:
suffix = '/v2/private/execution/list'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def closed_profit_and_loss(self, **kwargs):
"""
Get user's closed profit and loss records. The results are ordered in
descending order (the first item is the latest).
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-closedprofitandloss.
:returns: Request results as dictionary.
"""
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/trade/closed-pnl/list'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/trade/closed-pnl/list'
else:
suffix = '/v2/private/trade/closed-pnl/list'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def get_risk_limit(self, endpoint="", **kwargs):
"""
Get risk limit.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-getrisklimit.
:param endpoint: The endpoint path, such as "/v2/public/risk-limit/list".
This allows the user to bypass the "symbol" arg, and instead specify
the desired market contract type (inverse perp, linear perp, etc)
and receive multiple symbols in the response.
:returns: Request results as dictionary.
"""
if kwargs.get('is_linear') in (False, True):
self.logger.warning("The is_linear argument is obsolete.")
if endpoint:
suffix = endpoint
else:
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/public/linear/risk-limit'
else:
suffix = '/v2/public/risk-limit/list'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs
)
def set_risk_limit(self, **kwargs):
"""
Set risk limit.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-setrisklimit.
:returns: Request results as dictionary.
"""
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/position/set-risk'
else:
suffix = '/v2/private/position/risk-limit'
return self._submit_request(
method='POST',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def get_the_last_funding_rate(self, **kwargs):
"""
The funding rate is generated every 8 hours at 00:00 UTC, 08:00 UTC and
16:00 UTC. For example, if a request is sent at 12:00 UTC, the funding
rate generated earlier that day at 08:00 UTC will be sent.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-fundingrate.
:returns: Request results as dictionary.
"""
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/public/linear/funding/prev-funding-rate'
else:
suffix = '/v2/public/funding/prev-funding-rate'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs
)
def my_last_funding_fee(self, **kwargs):
"""
Funding settlement occurs every 8 hours at 00:00 UTC, 08:00 UTC and
16:00 UTC. The current interval's fund fee settlement is based on the
previous interval's fund rate. For example, at 16:00, the settlement is
based on the fund rate generated at 8:00. The fund rate generated at
16:00 will be used at 0:00 the next day.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-mylastfundingfee.
:returns: Request results as dictionary.
"""
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/funding/prev-funding'
else:
suffix = '/v2/private/funding/prev-funding'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def predicted_funding_rate(self, **kwargs):
"""
Get predicted funding rate and my funding fee.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-predictedfunding.
:returns: Request results as dictionary.
"""
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/funding/predicted-funding'
else:
suffix = '/v2/private/funding/predicted-funding'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def api_key_info(self):
"""
Get user's API key info.
:returns: Request results as dictionary.
"""
return self._submit_request(
method='GET',
path=self.endpoint + '/v2/private/account/api-key',
auth=True
)
def lcp_info(self, **kwargs):
"""
Get user's LCP (data refreshes once an hour). Only supports inverse
perpetual at present. See
https://bybit-exchange.github.io/docs/inverse/#t-liquidity to learn
more.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-lcp.
:returns: Request results as dictionary.
"""
return self._submit_request(
method='GET',
path=self.endpoint + '/v2/private/account/lcp',
query=kwargs,
auth=True
)
def get_wallet_balance(self, **kwargs):
"""
Get wallet balance info.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-balance.
:returns: Request results as dictionary.
"""
if self.spot is True or kwargs.get('spot', '') is True:
suffix = '/spot/v1/account'
else:
suffix = '/v2/private/wallet/balance'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def wallet_fund_records(self, **kwargs):
"""
Get wallet fund records. This endpoint also shows exchanges from the
Asset Exchange, where the types for the exchange are
ExchangeOrderWithdraw and ExchangeOrderDeposit.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-walletrecords.
:returns: Request results as dictionary.
"""
# Replace query param 'from_id' since 'from' keyword is reserved.
# Temporary workaround until Bybit updates official request params
if 'from_id' in kwargs:
kwargs['from'] = kwargs.pop('from_id')
return self._submit_request(
method='GET',
path=self.endpoint + '/v2/private/wallet/fund/records',
query=kwargs,
auth=True
)
def withdraw_records(self, **kwargs):
"""
Get withdrawal records.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-withdrawrecords.
:returns: Request results as dictionary.
"""
return self._submit_request(
method='GET',
path=self.endpoint + '/v2/private/wallet/withdraw/list',
query=kwargs,
auth=True
)
def asset_exchange_records(self, **kwargs):
"""
Get asset exchange records.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-assetexchangerecords.
:returns: Request results as dictionary.
"""
return self._submit_request(
method='GET',
path=self.endpoint + '/v2/private/exchange-order/list',
query=kwargs,
auth=True
)
def server_time(self, **kwargs):
"""
Get Bybit server time.
:returns: Request results as dictionary.
"""
if self.spot is True or kwargs.get('spot', '') is True:
suffix = '/spot/v1/time'
else:
suffix = '/v2/public/time'
return self._submit_request(
method='GET',
path=self.endpoint + suffix
)
def announcement(self):
"""
Get Bybit OpenAPI announcements in the last 30 days by reverse order.
:returns: Request results as dictionary.
"""
return self._submit_request(
method='GET',
path=self.endpoint + '/v2/public/announcement'
)
'''
Additional Methods
These methods use two or more requests to perform a specific
function and are exclusive to pybit.
'''
def close_position(self, symbol):
"""
Closes your open position. Makes two requests (position, order).
Parameters
------------------------
symbol : str
Required parameter. The symbol of the market as a string,
e.g. 'BTCUSD'.
"""
# First we fetch the user's position.
try:
r = self.my_position(symbol=symbol)['result']
# If there is no returned position, we want to handle that.
except KeyError:
return self.logger.error('No position detected.')
# Next we generate a list of market orders
orders = [
{
'symbol': symbol,
'order_type': 'Market',
'side': 'Buy' if p['side'] == 'Sell' else 'Sell',
'qty': p['size'],
'time_in_force': 'ImmediateOrCancel',
'reduce_only': True,
'close_on_trigger': True
} for p in (r if isinstance(r, list) else [r]) if p['size'] > 0
]
if len(orders) == 0:
return self.logger.error('No position detected.')
# Submit a market order against each open position for the same qty.
return self.place_active_order_bulk(orders)
'''
Below are methods under https://bybit-exchange.github.io/docs/account_asset
'''
def create_internal_transfer(self, **kwargs):
"""
Create internal transfer.
:param kwargs: See
https://bybit-exchange.github.io/docs/account_asset/#t-createinternaltransfer.
:returns: Request results as dictionary.
"""
suffix="/asset/v1/private/transfer"
if self._verify_string(kwargs,'amount'):
return self._submit_request(
method='POST',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
else:
self.logger.error('amount must be in string format')
def create_subaccount_transfer(self, **kwargs):
"""
Create internal transfer.
:param kwargs: See
https://bybit-exchange.github.io/docs/account_asset/#t-createsubaccounttransfer.
:returns: Request results as dictionary.
"""
suffix="/asset/v1/private/sub-member/transfer"
if self._verify_string(kwargs, 'amount'):
return self._submit_request(
method='POST',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
else:
self.logger.error('amount must be in string format')
def query_transfer_list(self, **kwargs):
"""
Create internal transfer.
:param kwargs: See
https://bybit-exchange.github.io/docs/account_asset/#t-querytransferlist.
:returns: Request results as dictionary.
"""
suffix="/asset/v1/private/transfer/list"
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def query_subaccount_list(self):
"""
Create internal transfer.
:param kwargs: See
https://bybit-exchange.github.io/docs/account_asset/#t-querysubaccountlist.
:returns: Request results as dictionary.
"""
suffix="/asset/v1/private/sub-member/member-ids"
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query={},
auth=True
)
def query_subaccount_transfer_list(self,**kwargs):
"""
Create internal transfer.
:param kwargs: See
https://bybit-exchange.github.io/docs/account_asset/#t-querysubaccounttransferlist.
:returns: Request results as dictionary.
"""
suffix="/asset/v1/private/sub-member/transfer/list"
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
'''
Internal methods; signature and request submission.
For more information about the request signature, see
https://bybit-exchange.github.io/docs/inverse/#t-authentication.
'''
def _auth(self, method, params, recv_window):
"""
Generates authentication signature per Bybit API specifications.
Notes
-------------------
Since the POST method requires a JSONified dict, we need to ensure
the signature uses lowercase booleans instead of Python's
capitalized booleans. This is done in the bug fix below.
"""
api_key = self.api_key
api_secret = self.api_secret
if api_key is None or api_secret is None:
raise PermissionError('Authenticated endpoints require keys.')
# Append required parameters.
params['api_key'] = api_key
params['recv_window'] = recv_window
params['timestamp'] = int(time.time() * 10 ** 3)
# Sort dictionary alphabetically to create querystring.
_val = '&'.join(
[str(k) + '=' + str(v) for k, v in sorted(params.items()) if
(k != 'sign') and (v is not None)]
)
# Bug fix. Replaces all capitalized booleans with lowercase.
if method == 'POST':
_val = _val.replace('True', 'true').replace('False', 'false')
# Return signature.
return str(hmac.new(
bytes(api_secret, 'utf-8'),
bytes(_val, 'utf-8'), digestmod='sha256'
).hexdigest())
def _verify_string(self,params,key):
if key in params:
if not isinstance(params[key], str):
return False
else:
return True
return True
def _submit_request(self, method=None, path=None, query=None, auth=False):
"""
Submits the request to the API.
Notes
-------------------
We use the params argument for the GET method, and data argument for
the POST method. Dicts passed to the data argument must be
JSONified prior to submitting request.
"""
if query is None:
query = {}
# Remove internal spot arg
query.pop('spot', '')
# Store original recv_window.
recv_window = self.recv_window
# Bug fix: change floating whole numbers to integers to prevent
# auth signature errors.
if query is not None:
for i in query.keys():
if isinstance(query[i], float) and query[i] == int(query[i]):
query[i] = int(query[i])
# Send request and return headers with body. Retry if failed.
retries_attempted = self.max_retries
req_params = None
while True:
retries_attempted -= 1
if retries_attempted < 0:
raise FailedRequestError(
request=f'{method} {path}: {req_params}',
message='Bad Request. Retries exceeded maximum.',
status_code=400,
time=dt.utcnow().strftime("%H:%M:%S")
)
retries_remaining = f'{retries_attempted} retries remain.'
# Authenticate if we are using a private endpoint.
if auth:
# Prepare signature.
signature = self._auth(
method=method,
params=query,
recv_window=recv_window,
)
# Sort the dictionary alphabetically.
query = dict(sorted(query.items(), key=lambda x: x))
# Append the signature to the dictionary.
query['sign'] = signature
# Define parameters and log the request.
if query is not None:
req_params = {k: v for k, v in query.items() if
v is not None}
else:
req_params = {}
# Log the request.
if self.log_requests:
self.logger.debug(f'Request -> {method} {path}: {req_params}')
# Prepare request; use 'params' for GET and 'data' for POST.
if method == 'GET':
headers = {
'Content-Type': 'application/x-www-form-urlencoded'
}
r = self.client.prepare_request(
requests.Request(method, path, params=req_params,
headers=headers)
)
else:
if 'spot' in path:
full_param_str = '&'.join(
[str(k) + '=' + str(v) for k, v in
sorted(query.items()) if v is not None]
)
headers = {
'Content-Type': 'application/x-www-form-urlencoded'
}
r = self.client.prepare_request(
requests.Request(method, path + f"?{full_param_str}",
headers=headers)
)
else:
r = self.client.prepare_request(
requests.Request(method, path,
data=json.dumps(req_params))
)
# Attempt the request.
try:
s = self.client.send(r, timeout=self.timeout)
# If requests fires an error, retry.
except (
requests.exceptions.ReadTimeout,
requests.exceptions.SSLError,
requests.exceptions.ConnectionError
) as e:
if self.force_retry:
self.logger.error(f'{e}. {retries_remaining}')
time.sleep(self.retry_delay)
continue
else:
raise e
# Convert response to dictionary, or raise if requests error.
try:
s_json = s.json()
# If we have trouble converting, handle the error and retry.
except JSONDecodeError as e:
if self.force_retry:
self.logger.error(f'{e}. {retries_remaining}')
time.sleep(self.retry_delay)
continue
else:
raise FailedRequestError(
request=f'{method} {path}: {req_params}',
message='Conflict. Could not decode JSON.',
status_code=409,
time=dt.utcnow().strftime("%H:%M:%S")
)
# If Bybit returns an error, raise.
if s_json['ret_code']:
# Generate error message.
error_msg = (
f'{s_json["ret_msg"]} (ErrCode: {s_json["ret_code"]})'
)
# Set default retry delay.
err_delay = self.retry_delay
# Retry non-fatal whitelisted error requests.
if s_json['ret_code'] in self.retry_codes:
# 10002, recv_window error; add 2.5 seconds and retry.
if s_json['ret_code'] == 10002:
error_msg += '. Added 2.5 seconds to recv_window'
recv_window += 2500
# 10006, ratelimit error; wait until rate_limit_reset_ms
# and retry.
elif s_json['ret_code'] == 10006:
self.logger.error(
f'{error_msg}. Ratelimited on current request. '
f'Sleeping, then trying again. Request: {path}'
)
# Calculate how long we need to wait.
limit_reset = s_json['rate_limit_reset_ms'] / 1000
reset_str = time.strftime(
'%X', time.localtime(limit_reset)
)
err_delay = int(limit_reset) - int(time.time())
error_msg = (
f'Ratelimit will reset at {reset_str}. '
f'Sleeping for {err_delay} seconds'
)
# Log the error.
self.logger.error(f'{error_msg}. {retries_remaining}')
time.sleep(err_delay)
continue
elif s_json['ret_code'] in self.ignore_codes:
pass
else:
raise InvalidRequestError(
request=f'{method} {path}: {req_params}',
message=s_json["ret_msg"],
status_code=s_json["ret_code"],
time=dt.utcnow().strftime("%H:%M:%S")
)
else:
return s_json
class WebSocket:
"""
Connector for Bybit's WebSocket API.
"""
def __init__(self, endpoint, api_key=None, api_secret=None,
subscriptions=None, logging_level=logging.INFO,
max_data_length=200, ping_interval=30, ping_timeout=10,
restart_on_error=True, purge_on_fetch=True,
trim_data=True):
"""
Initializes the websocket session.
:param endpoint: Required parameter. The endpoint of the remote
websocket.
:param api_key: Your API key. Required for authenticated endpoints.
Defaults to None.
:param api_secret: Your API secret key. Required for authenticated
endpoints. Defaults to None.
:param subscriptions: A list of desired topics to subscribe to. See API
documentation for more information. Defaults to an empty list, which
will raise an error.
:param logging_level: The logging level of the built-in logger. Defaults
to logging.INFO. Options are CRITICAL (50), ERROR (40),
WARNING (30), INFO (20), DEBUG (10), or NOTSET (0).
:param max_data_length: The maximum number of rows for the stored
dataset. A smaller number will prevent performance or memory issues.
:param ping_interval: The number of seconds between each automated ping.
:param ping_timeout: The number of seconds to wait for 'pong' before an
Exception is raised.
:param restart_on_error: Whether or not the connection should restart on
error.
:param purge_on_fetch: Whether or not stored data should be purged each
fetch. For example, if the user subscribes to the 'trade' topic, and
fetches, should the data show all trade history up to the maximum
length or only get the data since the last fetch?
:param trim_data: Decide whether the returning data should be
trimmed to only provide the data value.
:returns: WebSocket session.
"""
self.spot = True if "spot" in endpoint else False
self.spot_unauth = True if [True for v in ['v1', 'v2'] if v in
endpoint] else False
self.spot_auth = True if "spot" in endpoint and not \
self.spot_unauth else False
if not self.spot_auth:
if not subscriptions:
raise Exception('Subscription list cannot be empty!')
if not self.spot:
# Require symbol on 'trade' topic.
if 'trade' in subscriptions:
raise Exception('\'trade\' requires a ticker, e.g. '
'\'trade.BTCUSD\'.')
# Require currency on 'insurance' topic.
if 'insurance' in subscriptions:
raise Exception('\'insurance\' requires a currency, e.g. '
'\'insurance.BTC\'.')
# Require timeframe and ticker on 'klineV2' topic.
if 'klineV2' in subscriptions:
raise Exception('\'klineV2\' requires a timeframe and ticker, e.g.'
' \'klineV2.5.BTCUSD\'.')
# Check if subscriptions are in the correct format
if self.spot and not self.spot_auth:
for subscription in subscriptions.copy():
if isinstance(subscription, str):
try:
subscriptions.pop(subscriptions.index(subscription))
subscriptions.append(json.loads(subscription))
except JSONDecodeError:
raise Exception('Spot subscriptions should be dicts, '
'or strings that are valid JSONs.')
elif not self.spot:
for subscription in subscriptions:
if not isinstance(subscription, str):
raise Exception('Futures subscriptions should be strings.')
for subscription in subscriptions:
if ('v2' in endpoint and 'symbol' in subscription) or \
('v1' in endpoint and 'symbol' in subscription['params']):
raise Exception('Cannot subscribe to v1 topics with v2 '
'endpoint, or vice versa.')
# set websocket name for logging purposes
self.wsName = 'Authenticated' if api_key else 'Non-Authenticated'
# Setup logger.
self.logger = logging.getLogger(__name__)
if len(logging.root.handlers) == 0:
# no handler on root logger set -> we add handler just for this logger to not mess with custom logic from outside
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter(fmt='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
datefmt='%Y-%m-%d %H:%M:%S'
)
)
handler.setLevel(logging_level)
self.logger.addHandler(handler)
self.logger.debug(f'Initializing {self.wsName} WebSocket.')
# Ensure authentication for private topics.
if not self.spot and any(i in subscriptions for i in [
'position',
'execution',
'order',
'stop_order',
'wallet'
]) and api_key is None:
raise PermissionError('You must be authorized to use '
'private topics!')
# Set endpoint.
self.endpoint = endpoint
# Set API keys.
self.api_key = api_key
self.api_secret = api_secret
# Set topic subscriptions for WebSocket.
self.subscriptions = subscriptions
# Checking if using auth spot connection.
if '/spot/ws' in self.endpoint:
self.subscriptions = ['outboundAccountInfo', 'executionReport',
'ticketInfo']
self.max_length = max_data_length
# Set ping settings.
self.ping_interval = ping_interval
self.ping_timeout = ping_timeout
# Other optional data handling settings.
self.handle_error = restart_on_error
self.purge = purge_on_fetch
self.trim = trim_data
# Set initial state, initialize dictionary and connect.
self._reset()
self._connect(self.endpoint)
def fetch(self, topic):
"""
Fetches data from the subscribed topic.
:param topic: Required parameter. The subscribed topic to poll.
:returns: Filtered data as dict.
"""
if self.spot and self.spot_unauth:
topic = self.conform_topic(topic)
# If the topic given isn't in the initial subscribed list.
if topic not in self.subscriptions:
raise Exception(f"You aren\'t subscribed to the {topic} topic.")
# Pop all trade or execution data on each poll.
# don't pop order or stop_order data as we will lose valuable state
if any(i in topic for i in ['trade', 'execution']) \
and not topic.startswith('orderBook') and \
"executionReport" not in topic:
data = self.data[topic].copy()
if self.purge:
self.data[topic] = []
return data
else:
try:
return self.data[topic]
except KeyError:
return []
def ping(self):
"""
Pings the remote server to test the connection. The status of the
connection can be monitored using ws.ping().
"""
self.ws.send(json.dumps({'op': 'ping'}))
def exit(self):
"""
Closes the websocket connection.
"""
self.ws.close()
while self.ws.sock:
continue
self.exited = True
def _auth(self):
"""
Authorize websocket connection.
"""
# Generate expires.
expires = int((time.time() + 1) * 1000)
# Generate signature.
_val = f'GET/realtime{expires}'
signature = str(hmac.new(
bytes(self.api_secret, 'utf-8'),
bytes(_val, 'utf-8'), digestmod='sha256'
).hexdigest())
# Authenticate with API.
self.ws.send(
json.dumps({
'op': 'auth',
'args': [self.api_key, expires, signature]
})
)
def _connect(self, url):
"""
Open websocket in a thread.
"""
self.ws = websocket.WebSocketApp(
url=url,
on_message=lambda ws, msg: self._on_message(msg),
on_close=self._on_close(),
on_open=self._on_open(),
on_error=lambda ws, err: self._on_error(err)
)
# Setup the thread running WebSocketApp.
self.wst = threading.Thread(target=lambda: self.ws.run_forever(
ping_interval=self.ping_interval,
ping_timeout=self.ping_timeout
))
# Configure as daemon; start.
self.wst.daemon = True
self.wst.start()
# Attempt to connect for X seconds.
retries = 10
while retries > 0 and (not self.ws.sock or not self.ws.sock.connected):
retries -= 1
time.sleep(1)
# If connection was not successful, raise error.
if retries <= 0:
self.exit()
raise websocket.WebSocketTimeoutException('Connection failed.')
# If given an api_key, authenticate.
if self.api_key and self.api_secret and not self.spot_unauth:
self._auth()
# Check if subscriptions is a list.
if isinstance(self.subscriptions, (str, dict)):
self.subscriptions = [self.subscriptions]
# Subscribe to the requested topics.
if not self.spot_auth and self.spot_unauth:
for subscription in self.subscriptions:
if not subscription.get('event'):
subscription['event'] = 'sub'
if not subscription.get('params'):
subscription['params'] = {}
if 'v2' in self.endpoint:
raise Exception('v2 spot websocket topics require the '
'"symbol" key within "params"')
if not subscription.get('binary') or \
subscription['params'].get('binary'):
subscription['params']['binary'] = False
self.ws.send(json.dumps(subscription))
elif not self.spot:
self.ws.send(
json.dumps({
'op': 'subscribe',
'args': self.subscriptions
})
)
# Initialize the topics.
if not self.spot_auth and self.spot:
# Strip the subscription dict
for subscription in self.subscriptions:
index = self.subscriptions.index(subscription)
subscription = subscription if isinstance(subscription, dict) \
else json.loads(subscription)
subscription.pop('event')
subscription['params']['binary'] = str(subscription['params'][
'binary']).lower()
if subscription['params'].get('dumpScale'):
subscription['params']['dumpScale'] = str(subscription[
'params']['dumpScale'])
self.subscriptions[index] = \
self.conform_topic(subscription)
topics = self.subscriptions
for topic in topics:
if topic not in self.data:
self.data[topic] = {}
@staticmethod
def _find_index(source, target, key):
"""
Find the index in source list of the targeted ID.
"""
return next(i for i, j in enumerate(source) if j[key] == target[key])
def _on_message(self, message):
"""
Parse incoming messages. Similar structure to the
official WS connector.
"""
# Load dict of message.
msg_json = json.loads(message)
# Did we receive a message regarding auth or subscription?
auth_message = True if isinstance(msg_json, dict) and \
(msg_json.get('auth') or
msg_json.get('request', {}).get('op') == 'auth') else False
subscription_message = True if isinstance(msg_json, dict) and \
((msg_json.get('event') == 'sub' or msg_json.get('code')) or
msg_json.get('request', {}).get('op') == 'subscribe') else False
# Check auth
if auth_message:
# If we get successful futures/spot auth, notify user.
if msg_json.get('success') is True or \
msg_json.get('auth') == 'success':
self.logger.debug('Authorization successful.')
self.auth = True
# If we get unsuccessful auth, notify user.
elif msg_json.get('auth') == 'fail' or \
msg_json.get('success') is False:
self.logger.debug('Authorization failed. Please check your '
'API keys and restart.')
# Check subscription
if subscription_message:
# If we get successful futures/spot subscription, notify user.
if msg_json.get('success') is True or \
msg_json.get('msg') == 'Success':
sub = msg_json['topic'] if self.spot else msg_json[
'request']['args']
self.logger.debug(f'Subscription to {sub} successful.')
# Futures subscription fail
elif msg_json.get('success') is False:
response = msg_json['ret_msg']
if 'unknown topic' in response:
self.logger.error('Couldn\'t subscribe to topic.'
f' Error: {response}.')
# Spot subscription fail
elif msg_json.get('code'):
self.logger.error('Couldn\'t subscribe to topic.'
f' Error code: {msg_json["code"]}.'
f' Error message: {msg_json.get("desc")}.')
elif 'topic' in msg_json:
if self.spot:
# Conform received topic data so that we can match with our
# subscribed topic
topic = self.conform_topic(msg_json.copy())
else:
topic = msg_json['topic']
# If incoming 'orderbookL2' data.
if 'orderBook' in topic:
# Make updates according to delta response.
if 'delta' in msg_json['type']:
# Delete.
for entry in msg_json['data']['delete']:
index = self._find_index(self.data[topic], entry, 'id')
self.data[topic].pop(index)
# Update.
for entry in msg_json['data']['update']:
index = self._find_index(self.data[topic], entry, 'id')
self.data[topic][index] = entry
# Insert.
for entry in msg_json['data']['insert']:
self.data[topic].append(entry)
# Record the initial snapshot.
elif 'snapshot' in msg_json['type']:
if 'order_book' in msg_json['data']:
self.data[topic] = msg_json['data']['order_book'] if self.trim else msg_json
else:
self.data[topic] = msg_json['data'] if self.trim else msg_json
#self.data[topic] = msg_json['data']
# If incoming 'diffDepth' data.
elif 'diffDepth' in topic:
book_sides = {'b': msg_json['data'][0]['b'],
'a': msg_json['data'][0]['a']}
if not self.data[topic]:
self.data[topic] = book_sides
return
for side, entries in book_sides.items():
for entry in entries:
# Delete.
if float(entry[1]) == 0:
index = self._find_index(
self.data[topic][side], entry, 0)
self.data[topic][side].pop(index)
continue
# Insert.
price_level_exists = entry[0] in \
[level[0] for level in self.data[topic][side]]
if not price_level_exists:
self.data[topic][side].append(entry)
continue
# Update.
qty_changed = entry[1] != next(
level[1] for level in self.data[topic][side] if
level[0] == entry[0])
if price_level_exists and qty_changed:
index = self._find_index(
self.data[topic][side], entry, 0)
self.data[topic][side][index] = entry
continue
# For incoming 'order' and 'stop_order' data.
elif any(i in topic for i in ['order', 'stop_order']):
# record incoming data
for i in msg_json['data']:
try:
# update existing entries
# temporary workaround for field anomaly in stop_order data
ord_id = topic + '_id' if i['symbol'].endswith('USDT') else 'order_id'
index = self._find_index(self.data[topic], i, ord_id)
self.data[topic][index] = i
except StopIteration:
# Keep appending or create new list if not already created.
try:
self.data[topic].append(i)
except AttributeError:
self.data[topic] = msg_json['data']
# For incoming 'trade' and 'execution' data.
elif any(i in topic for i in ['trade', 'execution']):
# Keep appending or create new list if not already created.
try:
trades = [msg_json['data']] if isinstance(
msg_json['data'], dict) else msg_json['data']
for i in trades:
self.data[topic].append(i)
except AttributeError:
self.data[topic] = msg_json['data']
# If list is too long, pop the first entry.
if len(self.data[topic]) > self.max_length:
self.data[topic].pop(0)
# If incoming data is in a topic which only pushes messages in
# the snapshot format
elif any(i in topic for i in ['insurance', 'kline', 'wallet',
'candle', 'realtimes', '"depth"',
'"mergedDepth"', 'bookTicker']):
# Record incoming data.
if 'v2' in self.endpoint:
self.data[topic] = msg_json['data'] if self.trim else msg_json
else:
self.data[topic] = msg_json['data'][0] if self.trim else msg_json
# If incoming 'instrument_info' data.
elif 'instrument_info' in topic:
# Make updates according to delta response.
if 'delta' in msg_json['type']:
for i in msg_json['data']['update'][0]:
self.data[topic][i] = msg_json['data']['update'][0][i]
# Record the initial snapshot.
elif 'snapshot' in msg_json['type']:
self.data[topic] = msg_json['data'] if self.trim else msg_json
# If incoming 'position' data.
elif 'position' in topic:
# Record incoming position data.
for p in msg_json['data']:
# linear (USDT) positions have Buy|Sell side and
# updates contain all USDT positions.
# For linear tickers...
if p['symbol'].endswith('USDT'):
try:
self.data[topic][p['symbol']][p['side']] = p
# if side key hasn't been created yet...
except KeyError:
self.data[topic][p['symbol']] = {p['side']: p}
# For non-linear tickers...
else:
self.data[topic][p['symbol']] = p
elif isinstance(msg_json, list):
for item in msg_json:
topic = item.get('e')
if topic == "outboundAccountInfo":
self.data[topic] = item
elif any(i in topic for i in ['executionReport', 'ticketInfo']):
# Keep appending or create new list if not already created.
try:
self.data[topic].append(item)
except AttributeError:
self.data[topic] = item
self.data[topic] = item
def _on_error(self, error):
"""
Exit on errors and raise exception, or attempt reconnect.
"""
if not self.exited:
self.logger.error(f'WebSocket {self.wsName} encountered error: {error}.')
self.exit()
# Reconnect.
if self.handle_error:
self._reset()
self._connect(self.endpoint)
def _on_open(self):
"""
Log WS open.
"""
self.logger.debug(f'WebSocket {self.wsName} opened.')
def _on_close(self):
"""
Log WS close.
"""
self.logger.debug(f'WebSocket {self.wsName} closed.')
def _reset(self):
"""
Set state booleans and initialize dictionary.
"""
self.exited = False
self.auth = False
self.data = {}
@staticmethod
def conform_topic(topic):
"""
For spot API. Due to the fact that the JSON received in update
messages does not include a simple "topic" key, and parameters all
have their own separate keys, we need to compare the entire JSON.
Therefore, we need to strip the JSON of any unnecessary keys,
cast some values, and dump the JSON with sort_keys.
"""
if isinstance(topic, str):
topic = json.loads(topic)
topic.pop('symbolName', '')
topic['params'].pop('realtimeInterval', '')
topic['params'].pop('symbolName', '')
if topic['params'].get('klineType'):
topic['topic'] += "_" + topic['params'].get('klineType')
topic['params'].pop('klineType')
topic.pop('data', '')
topic.pop('f', '')
topic.pop('sendTime', '')
topic.pop('shared', '')
return json.dumps(topic, sort_keys=True, separators=(',', ':'))
| 34.849697
| 125
| 0.550149
|
d327ef81625cd9c2487c8476d1180277ae2a8720
| 6,725
|
py
|
Python
|
osf_tests/management_commands/test_EGAP_import.py
|
gaybro8777/osf.io
|
30408511510a40bc393565817b343ef5fd76ab14
|
[
"Apache-2.0"
] | 628
|
2015-01-15T04:33:22.000Z
|
2022-03-30T06:40:10.000Z
|
osf_tests/management_commands/test_EGAP_import.py
|
gaybro8777/osf.io
|
30408511510a40bc393565817b343ef5fd76ab14
|
[
"Apache-2.0"
] | 4,712
|
2015-01-02T01:41:53.000Z
|
2022-03-30T14:18:40.000Z
|
osf_tests/management_commands/test_EGAP_import.py
|
Johnetordoff/osf.io
|
de10bf249c46cede04c78f7e6f7e352c69e6e6b5
|
[
"Apache-2.0"
] | 371
|
2015-01-12T16:14:08.000Z
|
2022-03-31T18:58:29.000Z
|
# encoding: utf-8
import os
import shutil
import pytest
import responses
HERE = os.path.dirname(os.path.abspath(__file__))
from osf_tests.factories import (
AuthUserFactory,
NodeFactory,
ApiOAuth2PersonalTokenFactory
)
from osf.models import (
RegistrationSchema,
ApiOAuth2PersonalToken
)
from osf.management.commands.import_EGAP import (
get_egap_assets,
ensure_egap_schema,
create_node_from_project_json,
recursive_upload,
get_creator_auth_header
)
from api_tests.utils import create_test_file
from website.settings import WATERBUTLER_INTERNAL_URL
@pytest.mark.django_db
class TestEGAPImport:
@pytest.fixture()
def greg(self):
return AuthUserFactory(username='greg@greg.com')
@pytest.fixture()
def node(self, greg):
return NodeFactory(creator=greg)
@pytest.fixture()
def node_with_file(self):
node = NodeFactory()
file = create_test_file(node, node.creator)
file.save()
node.save()
return node
@pytest.fixture()
def egap_assets_path(self):
return os.path.join(HERE, 'test_directory', 'EGAP')
@pytest.fixture()
def zip_data(self, egap_assets_path):
test_zip_path = os.path.join(egap_assets_path, 'test-egap.zip')
with open(test_zip_path, 'rb') as fp:
return fp.read()
@pytest.fixture()
def egap_project_name(self):
return '20120220AA'
def test_get_creator_auth_header(self, greg):
greg, auth_header = get_creator_auth_header(greg.username)
gregs_token = ApiOAuth2PersonalToken.objects.get(owner=greg).token_id
assert auth_header['Authorization'] == 'Bearer {}'.format(gregs_token)
def test_ensure_egap_schema(self):
ensure_egap_schema()
assert RegistrationSchema.objects.get(name='EGAP Registration', schema_version=3)
def test_create_node_from_project_json(self, egap_assets_path, egap_project_name, greg):
node = create_node_from_project_json(egap_assets_path, egap_project_name, greg)
assert node.title == 'Home Security and Infidelity: a case study by Fletcher Cox'
assert node.creator == greg
assert len(node.contributors.all()) == 5
contrib = node.contributors.exclude(username='greg@greg.com').first()
assert contrib.fullname == 'Fletcher Cox'
assert node.get_permissions(contrib) == ['read', 'write']
assert not node.get_visible(greg)
@responses.activate
def test_recursive_upload(self, node, greg, egap_assets_path, egap_project_name):
responses.add(
responses.Response(
method=responses.PUT,
url=f'{WATERBUTLER_INTERNAL_URL}/v1/resources/{node._id}/providers/osfstorage/?name=test-1.txt&kind=file',
json={'metadata': 'for test-1!'},
status=201,
)
)
responses.add(
responses.Response(
method=responses.PUT,
url=f'{WATERBUTLER_INTERNAL_URL}/v1/resources/{node._id}/providers/osfstorage/?name=test_folder&kind=folder',
json={'data': {'attributes': {'path': 'parent'}}},
status=201,
)
)
responses.add(
responses.Response(
method=responses.PUT,
url=f'{WATERBUTLER_INTERNAL_URL}/v1/resources/{node._id}/providers/osfstorage/parent?name=test-2.txt&kind=file',
json={'metadata': 'for test-2!'},
status=201,
)
)
token = ApiOAuth2PersonalTokenFactory(owner=greg)
token.save()
auth = {'Authorization': 'Bearer {}'.format(token.token_id)}
egap_project_path = os.path.join(egap_assets_path, egap_project_name, 'data', 'nonanonymous')
metadata = recursive_upload(auth, node, egap_project_path)
assert {'metadata': 'for test-1!'} in metadata
assert {'data': {'attributes': {'path': 'parent'}}} in metadata
assert {'metadata': 'for test-2!'} in metadata
@responses.activate
def test_recursive_upload_retry(self, node, greg, egap_assets_path, egap_project_name):
responses.add(
responses.Response(
method=responses.PUT,
url=f'{WATERBUTLER_INTERNAL_URL}/v1/resources/{node._id}/providers/osfstorage/?name=test_folder&kind=folder',
json={'data': {'attributes': {'path': 'parent'}}},
status=201,
)
)
responses.add(
responses.Response(
method=responses.PUT,
url=f'{WATERBUTLER_INTERNAL_URL}/v1/resources/{node._id}/providers/osfstorage/parent?name=test-2.txt&kind=file',
status=500,
)
)
responses.add(
responses.Response(
method=responses.PUT,
url=f'{WATERBUTLER_INTERNAL_URL}/v1/resources/{node._id}/providers/osfstorage/parent?name=test-2.txt&kind=file',
json={'metadata': 'for test-2!'},
status=201,
)
)
responses.add(
responses.Response(
method=responses.PUT,
url=f'{WATERBUTLER_INTERNAL_URL}/v1/resources/{node._id}/providers/osfstorage/?name=test-1.txt&kind=file',
json={'metadata': 'for test-1!'},
status=201,
)
)
token = ApiOAuth2PersonalTokenFactory(owner=greg)
token.save()
auth = {'Authorization': 'Bearer {}'.format(token.token_id)}
egap_project_path = os.path.join(egap_assets_path, egap_project_name, 'data', 'nonanonymous')
metadata = recursive_upload(auth, node, egap_project_path)
assert {'metadata': 'for test-2!'} in metadata
assert {'data': {'attributes': {'path': 'parent'}}} in metadata
assert {'metadata': 'for test-1!'} in metadata
@responses.activate
def test_get_egap_assets(self, node_with_file, zip_data):
file_node = node_with_file.files.first()
responses.add(
responses.Response(
method=responses.GET,
url=f'{WATERBUTLER_INTERNAL_URL}/v1/resources/{node_with_file._id}/providers/osfstorage/{file_node._id}',
body=zip_data,
status=200,
)
)
asset_path = get_egap_assets(node_with_file._id, {'fake auth': 'sadasdadsdasdsds'})
directory_list = os.listdir(asset_path)
# __MACOSX is a hidden file created by the os when zipping
assert set(directory_list) == set(['20110307AA', '__MACOSX', '20110302AA', 'egap_assets.zip', '20120117AA'])
shutil.rmtree(asset_path)
| 36.155914
| 128
| 0.625725
|
96e2ad47fceb41d887d116d9f5fba9befbc2a704
| 3,637
|
py
|
Python
|
load_data.py
|
cocoaaa/ml_gesture
|
a23dd7b9d13bbcb5a1ee049a7f1b026f81a4ba9d
|
[
"MIT"
] | null | null | null |
load_data.py
|
cocoaaa/ml_gesture
|
a23dd7b9d13bbcb5a1ee049a7f1b026f81a4ba9d
|
[
"MIT"
] | null | null | null |
load_data.py
|
cocoaaa/ml_gesture
|
a23dd7b9d13bbcb5a1ee049a7f1b026f81a4ba9d
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Tue Jun 16 14:26:09 2015
@author: LLP-admin
"""
import sys
import pandas as pd
from sklearn.preprocessing import StandardScaler
def divideByClass(filepath, toSave = True):
"""
Input: a complete filepath
toSave (bool parameter): if true, save each class file to current directory.
It can handle delimiter of ',' or '\t' (i.e. tab).
The file to be read must have 'class' feature.
Divide the dataset by class and save each class's dataset into a csv file named by the class value,
in the same directory.
Returns 0 if successful divide and save.
"""
# print "current filepath: ", filepath;
try:
df = pd.io.parsers.read_csv(filepath,delimiter=',');
class_column = df['class'];
except KeyError:
df = pd.io.parsers.read_csv(filepath, delimiter='\t');
class_column = df['class'];
#print an error message and then re-raise the exception
#(allowing a caller to handle the exception as well):
except:
print "Unexpected error:", sys.exc_info()[0];
raise;
class_values = class_column.unique();
#Initialize a dictionary whose key is class value and value is dataframe of the class value.
dict_df_class = {};
for (i, value) in enumerate(class_values):
#write to a csv file
#Extract the directory path from the filepath.
#Be careful since the filepath might end with '\'.
splitAt = filepath.rfind('\\',0,len(filepath)-3);
dirpath = filepath[:splitAt +1];
outpath = dirpath + class_values[i] + ".csv";
df_class = df[df['class']==value];
dict_df_class[class_values[i]] = (df_class);
#If toSave is set to true, we save each file
if toSave:
# print "\nNow, saving ", class_values[i], ".....";
df_class.to_csv(outpath, index = False);
# print "Done saving this class value dataframe";
#Return the dictionary after the successful save.
return dict_df_class;
def splitXY(df):
"""Given a dataFrame of data,
split it into X (features and values) and Y (label vector).
Assume the last column is the class column.
"""
return df[df.columns[0:-1]], df[df.columns[-1]];
def piped_standardize(train_x, test_x):
"""
Input: Pandas DataFrame with all numeric values. It does not have the label feature.
Each column is a feature, each row is an instance (example).
Returns the standardized train_x (as a data frame), and the test (data frame) standardized based on the train data's mean and std.
"""
header = train_x.columns
scaler = StandardScaler().fit(train_x);
# std_train_x = scaler.transform(train_x);
# std_test_x = scaler.transform(test_x);
return ( pd.DataFrame(scaler.transform(train_x), columns = header), pd.DataFrame(scaler.transform(test_x), columns = header));
def findFirstPeak(report):
"""Given the report dictionary whose key is number of batch training vs %accuracy,
return the first peak: (firstPeak_x, %accuracy)
report starts at key = 1 (i.e. training session index is based on 1)
"""
#Add boundaries to the report
report[0] = -float('inf') #Any value <0 will do
report[len(report)+1] = -float('inf')
#Do search starting at the smaller index
for i in range(1, len(report) +1):
if (report[i-1] <=report[i]) and (report[i] >= report[i+1]):
return (i, float("{0:.2f}".format(report[i])) )
return None #No peak found. This should never happen.
| 37.494845
| 134
| 0.636514
|
93b45eb07fa4cbf662e5eba4013866afb34ed1c3
| 114,954
|
py
|
Python
|
kinow_client/apis/videos_api.py
|
kinow-io/kinow-python-sdk
|
4c1699a3c78048b84287bd049a669651a5b4e2d5
|
[
"Apache-2.0"
] | 1
|
2019-06-26T14:24:54.000Z
|
2019-06-26T14:24:54.000Z
|
kinow_client/apis/videos_api.py
|
kinow-io/kinow-python-sdk
|
4c1699a3c78048b84287bd049a669651a5b4e2d5
|
[
"Apache-2.0"
] | null | null | null |
kinow_client/apis/videos_api.py
|
kinow-io/kinow-python-sdk
|
4c1699a3c78048b84287bd049a669651a5b4e2d5
|
[
"Apache-2.0"
] | 1
|
2018-02-01T10:08:40.000Z
|
2018-02-01T10:08:40.000Z
|
# coding: utf-8
"""
Server API
Reference for Server API (REST/Json)
OpenAPI spec version: 1.4.58
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class VideosApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def attach_cover_to_video(self, video_id, id_image, **kwargs):
"""
Attach cover to video (the image need to be attached to the product)
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.attach_cover_to_video(video_id, id_image, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int video_id: Video ID to fetch (required)
:param int id_image: Image ID to attach (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.attach_cover_to_video_with_http_info(video_id, id_image, **kwargs)
else:
(data) = self.attach_cover_to_video_with_http_info(video_id, id_image, **kwargs)
return data
def attach_cover_to_video_with_http_info(self, video_id, id_image, **kwargs):
"""
Attach cover to video (the image need to be attached to the product)
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.attach_cover_to_video_with_http_info(video_id, id_image, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int video_id: Video ID to fetch (required)
:param int id_image: Image ID to attach (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['video_id', 'id_image']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method attach_cover_to_video" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'video_id' is set
if ('video_id' not in params) or (params['video_id'] is None):
raise ValueError("Missing the required parameter `video_id` when calling `attach_cover_to_video`")
# verify the required parameter 'id_image' is set
if ('id_image' not in params) or (params['id_image'] is None):
raise ValueError("Missing the required parameter `id_image` when calling `attach_cover_to_video`")
collection_formats = {}
resource_path = '/videos/{video_id}/cover'.replace('{format}', 'json')
path_params = {}
if 'video_id' in params:
path_params['video_id'] = params['video_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
if 'id_image' in params:
form_params.append(('id_image', params['id_image']))
self.api_client.set_default_header('Content-Type', 'application/x-www-form-urlencoded')
body_params = None
# Authentication setting
auth_settings = ['ApiClientId', 'ApiClientSecret']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def attach_features_to_video(self, video_id, features, **kwargs):
"""
Attach feature to video
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.attach_features_to_video(video_id, features, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int video_id: (required)
:param str features: To attach existing FeatureValue to Product: ``` [{ \"id_feature\":3, \"id_feature_value\":5 }] ``` To create a custom FeatureValue: ``` [{ \"id_feature\":3, \"custom_value\":[{ \"lang\": 1, \"value\": \"string\" }] }] ``` (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.attach_features_to_video_with_http_info(video_id, features, **kwargs)
else:
(data) = self.attach_features_to_video_with_http_info(video_id, features, **kwargs)
return data
def attach_features_to_video_with_http_info(self, video_id, features, **kwargs):
"""
Attach feature to video
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.attach_features_to_video_with_http_info(video_id, features, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int video_id: (required)
:param str features: To attach existing FeatureValue to Product: ``` [{ \"id_feature\":3, \"id_feature_value\":5 }] ``` To create a custom FeatureValue: ``` [{ \"id_feature\":3, \"custom_value\":[{ \"lang\": 1, \"value\": \"string\" }] }] ``` (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['video_id', 'features']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method attach_features_to_video" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'video_id' is set
if ('video_id' not in params) or (params['video_id'] is None):
raise ValueError("Missing the required parameter `video_id` when calling `attach_features_to_video`")
# verify the required parameter 'features' is set
if ('features' not in params) or (params['features'] is None):
raise ValueError("Missing the required parameter `features` when calling `attach_features_to_video`")
collection_formats = {}
resource_path = '/videos/{video_id}/features'.replace('{format}', 'json')
path_params = {}
if 'video_id' in params:
path_params['video_id'] = params['video_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
if 'features' in params:
form_params.append(('features', params['features']))
self.api_client.set_default_header('Content-Type', 'application/x-www-form-urlencoded')
body_params = None
# Authentication setting
auth_settings = ['ApiClientId', 'ApiClientSecret']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def attach_video_to_product(self, product_id, video_id, **kwargs):
"""
Attach video to product
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.attach_video_to_product(product_id, video_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int product_id: Product ID to fetch (required)
:param int video_id: Video ID to attach (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.attach_video_to_product_with_http_info(product_id, video_id, **kwargs)
else:
(data) = self.attach_video_to_product_with_http_info(product_id, video_id, **kwargs)
return data
def attach_video_to_product_with_http_info(self, product_id, video_id, **kwargs):
"""
Attach video to product
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.attach_video_to_product_with_http_info(product_id, video_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int product_id: Product ID to fetch (required)
:param int video_id: Video ID to attach (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['product_id', 'video_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method attach_video_to_product" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'product_id' is set
if ('product_id' not in params) or (params['product_id'] is None):
raise ValueError("Missing the required parameter `product_id` when calling `attach_video_to_product`")
# verify the required parameter 'video_id' is set
if ('video_id' not in params) or (params['video_id'] is None):
raise ValueError("Missing the required parameter `video_id` when calling `attach_video_to_product`")
collection_formats = {}
resource_path = '/products/{product_id}/videos'.replace('{format}', 'json')
path_params = {}
if 'product_id' in params:
path_params['product_id'] = params['product_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
if 'video_id' in params:
form_params.append(('video_id', params['video_id']))
self.api_client.set_default_header('Content-Type', 'application/x-www-form-urlencoded')
body_params = None
# Authentication setting
auth_settings = ['ApiClientId', 'ApiClientSecret']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_video(self, body, **kwargs):
"""
Create new video
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_video(body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param Video body: (required)
:return: Video
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_video_with_http_info(body, **kwargs)
else:
(data) = self.create_video_with_http_info(body, **kwargs)
return data
def create_video_with_http_info(self, body, **kwargs):
"""
Create new video
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_video_with_http_info(body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param Video body: (required)
:return: Video
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_video" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_video`")
collection_formats = {}
resource_path = '/videos'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
self.api_client.set_default_header('Content-Type', 'application/json')
# Authentication setting
auth_settings = ['ApiClientId', 'ApiClientSecret']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Video',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_video_subtitle(self, video_id, body, **kwargs):
"""
Create new Video Subtitle
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_video_subtitle(video_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int video_id: Video ID to attach the created Subtitle (required)
:param CreateVideoSubtitleRequest body: Subtitle settings (required)
:return: Subtitle
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_video_subtitle_with_http_info(video_id, body, **kwargs)
else:
(data) = self.create_video_subtitle_with_http_info(video_id, body, **kwargs)
return data
def create_video_subtitle_with_http_info(self, video_id, body, **kwargs):
"""
Create new Video Subtitle
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_video_subtitle_with_http_info(video_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int video_id: Video ID to attach the created Subtitle (required)
:param CreateVideoSubtitleRequest body: Subtitle settings (required)
:return: Subtitle
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['video_id', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_video_subtitle" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'video_id' is set
if ('video_id' not in params) or (params['video_id'] is None):
raise ValueError("Missing the required parameter `video_id` when calling `create_video_subtitle`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_video_subtitle`")
collection_formats = {}
resource_path = '/videos/{video_id}/subtitle'.replace('{format}', 'json')
path_params = {}
if 'video_id' in params:
path_params['video_id'] = params['video_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
self.api_client.set_default_header('Content-Type', 'application/json')
# Authentication setting
auth_settings = ['ApiClientId', 'ApiClientSecret']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Subtitle',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_video(self, video_id, **kwargs):
"""
Delete video
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_video(video_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int video_id: Video ID to fetch (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_video_with_http_info(video_id, **kwargs)
else:
(data) = self.delete_video_with_http_info(video_id, **kwargs)
return data
def delete_video_with_http_info(self, video_id, **kwargs):
"""
Delete video
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_video_with_http_info(video_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int video_id: Video ID to fetch (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['video_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_video" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'video_id' is set
if ('video_id' not in params) or (params['video_id'] is None):
raise ValueError("Missing the required parameter `video_id` when calling `delete_video`")
collection_formats = {}
resource_path = '/videos/{video_id}'.replace('{format}', 'json')
path_params = {}
if 'video_id' in params:
path_params['video_id'] = params['video_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['ApiClientId', 'ApiClientSecret']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_customer_has_access_to_video(self, customer_id, video_id, **kwargs):
"""
Get customer access to video
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_customer_has_access_to_video(customer_id, video_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int customer_id: Customer ID to fetch (required)
:param int video_id: Video ID to fetch (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_customer_has_access_to_video_with_http_info(customer_id, video_id, **kwargs)
else:
(data) = self.get_customer_has_access_to_video_with_http_info(customer_id, video_id, **kwargs)
return data
def get_customer_has_access_to_video_with_http_info(self, customer_id, video_id, **kwargs):
"""
Get customer access to video
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_customer_has_access_to_video_with_http_info(customer_id, video_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int customer_id: Customer ID to fetch (required)
:param int video_id: Video ID to fetch (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['customer_id', 'video_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_customer_has_access_to_video" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'customer_id' is set
if ('customer_id' not in params) or (params['customer_id'] is None):
raise ValueError("Missing the required parameter `customer_id` when calling `get_customer_has_access_to_video`")
# verify the required parameter 'video_id' is set
if ('video_id' not in params) or (params['video_id'] is None):
raise ValueError("Missing the required parameter `video_id` when calling `get_customer_has_access_to_video`")
collection_formats = {}
resource_path = '/customers/{customer_id}/videos/{video_id}/has-access'.replace('{format}', 'json')
path_params = {}
if 'customer_id' in params:
path_params['customer_id'] = params['customer_id']
if 'video_id' in params:
path_params['video_id'] = params['video_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['ApiClientId', 'ApiClientSecret']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_customer_has_access_to_videos(self, customer_id, body, **kwargs):
"""
Get customer access to Videos
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_customer_has_access_to_videos(customer_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int customer_id: Customer ID to fetch (required)
:param VideoIDList body: List of Video IDs separated by comma, eg. '42,21,84' (required)
:return: list[VideoAccessInfo]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_customer_has_access_to_videos_with_http_info(customer_id, body, **kwargs)
else:
(data) = self.get_customer_has_access_to_videos_with_http_info(customer_id, body, **kwargs)
return data
def get_customer_has_access_to_videos_with_http_info(self, customer_id, body, **kwargs):
"""
Get customer access to Videos
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_customer_has_access_to_videos_with_http_info(customer_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int customer_id: Customer ID to fetch (required)
:param VideoIDList body: List of Video IDs separated by comma, eg. '42,21,84' (required)
:return: list[VideoAccessInfo]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['customer_id', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_customer_has_access_to_videos" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'customer_id' is set
if ('customer_id' not in params) or (params['customer_id'] is None):
raise ValueError("Missing the required parameter `customer_id` when calling `get_customer_has_access_to_videos`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `get_customer_has_access_to_videos`")
collection_formats = {}
resource_path = '/customers/{customer_id}/videos/has-access'.replace('{format}', 'json')
path_params = {}
if 'customer_id' in params:
path_params['customer_id'] = params['customer_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
self.api_client.set_default_header('Content-Type', 'application/json')
# Authentication setting
auth_settings = ['ApiClientId', 'ApiClientSecret']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[VideoAccessInfo]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_disabled_subscriptions(self, video_id, **kwargs):
"""
Get disabled subscriptions list
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_disabled_subscriptions(video_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int video_id: Video ID to fetch (required)
:param int page:
:param int per_page:
:return: Subscriptions
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_disabled_subscriptions_with_http_info(video_id, **kwargs)
else:
(data) = self.get_disabled_subscriptions_with_http_info(video_id, **kwargs)
return data
def get_disabled_subscriptions_with_http_info(self, video_id, **kwargs):
"""
Get disabled subscriptions list
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_disabled_subscriptions_with_http_info(video_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int video_id: Video ID to fetch (required)
:param int page:
:param int per_page:
:return: Subscriptions
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['video_id', 'page', 'per_page']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_disabled_subscriptions" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'video_id' is set
if ('video_id' not in params) or (params['video_id'] is None):
raise ValueError("Missing the required parameter `video_id` when calling `get_disabled_subscriptions`")
collection_formats = {}
resource_path = '/videos/{video_id}/disabled-subscriptions'.replace('{format}', 'json')
path_params = {}
if 'video_id' in params:
path_params['video_id'] = params['video_id']
query_params = {}
if 'page' in params:
query_params['page'] = params['page']
if 'per_page' in params:
query_params['per_page'] = params['per_page']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['ApiClientId', 'ApiClientSecret']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Subscriptions',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_video(self, video_id, **kwargs):
"""
Get video
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_video(video_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int video_id: Video ID to fetch (required)
:return: Video
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_video_with_http_info(video_id, **kwargs)
else:
(data) = self.get_video_with_http_info(video_id, **kwargs)
return data
def get_video_with_http_info(self, video_id, **kwargs):
"""
Get video
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_video_with_http_info(video_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int video_id: Video ID to fetch (required)
:return: Video
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['video_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_video" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'video_id' is set
if ('video_id' not in params) or (params['video_id'] is None):
raise ValueError("Missing the required parameter `video_id` when calling `get_video`")
collection_formats = {}
resource_path = '/videos/{video_id}'.replace('{format}', 'json')
path_params = {}
if 'video_id' in params:
path_params['video_id'] = params['video_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['ApiClientId', 'ApiClientSecret']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Video',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_video_access(self, video_id, customer_id, **kwargs):
"""
Get video access
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_video_access(video_id, customer_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int video_id: Video ID to fetch (required)
:param int customer_id: Customer ID to fetch (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_video_access_with_http_info(video_id, customer_id, **kwargs)
else:
(data) = self.get_video_access_with_http_info(video_id, customer_id, **kwargs)
return data
def get_video_access_with_http_info(self, video_id, customer_id, **kwargs):
"""
Get video access
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_video_access_with_http_info(video_id, customer_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int video_id: Video ID to fetch (required)
:param int customer_id: Customer ID to fetch (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['video_id', 'customer_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_video_access" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'video_id' is set
if ('video_id' not in params) or (params['video_id'] is None):
raise ValueError("Missing the required parameter `video_id` when calling `get_video_access`")
# verify the required parameter 'customer_id' is set
if ('customer_id' not in params) or (params['customer_id'] is None):
raise ValueError("Missing the required parameter `customer_id` when calling `get_video_access`")
collection_formats = {}
resource_path = '/videos/{video_id}/customers/{customer_id}/access'.replace('{format}', 'json')
path_params = {}
if 'video_id' in params:
path_params['video_id'] = params['video_id']
if 'customer_id' in params:
path_params['customer_id'] = params['customer_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['ApiClientId', 'ApiClientSecret']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_video_bonus(self, video_id, **kwargs):
"""
Get bonus list
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_video_bonus(video_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int video_id: Video ID to fetch (required)
:return: list[Bonus]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_video_bonus_with_http_info(video_id, **kwargs)
else:
(data) = self.get_video_bonus_with_http_info(video_id, **kwargs)
return data
def get_video_bonus_with_http_info(self, video_id, **kwargs):
"""
Get bonus list
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_video_bonus_with_http_info(video_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int video_id: Video ID to fetch (required)
:return: list[Bonus]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['video_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_video_bonus" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'video_id' is set
if ('video_id' not in params) or (params['video_id'] is None):
raise ValueError("Missing the required parameter `video_id` when calling `get_video_bonus`")
collection_formats = {}
resource_path = '/videos/{video_id}/bonus'.replace('{format}', 'json')
path_params = {}
if 'video_id' in params:
path_params['video_id'] = params['video_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['ApiClientId', 'ApiClientSecret']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Bonus]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_video_cover(self, video_id, **kwargs):
"""
Get video cover
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_video_cover(video_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int video_id: Video ID to fetch (required)
:return: Image
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_video_cover_with_http_info(video_id, **kwargs)
else:
(data) = self.get_video_cover_with_http_info(video_id, **kwargs)
return data
def get_video_cover_with_http_info(self, video_id, **kwargs):
"""
Get video cover
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_video_cover_with_http_info(video_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int video_id: Video ID to fetch (required)
:return: Image
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['video_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_video_cover" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'video_id' is set
if ('video_id' not in params) or (params['video_id'] is None):
raise ValueError("Missing the required parameter `video_id` when calling `get_video_cover`")
collection_formats = {}
resource_path = '/videos/{video_id}/cover'.replace('{format}', 'json')
path_params = {}
if 'video_id' in params:
path_params['video_id'] = params['video_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['ApiClientId', 'ApiClientSecret']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Image',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_video_download_url(self, video_id, **kwargs):
"""
Get video download informations
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_video_download_url(video_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int video_id: Video ID to fetch (required)
:param int customer_id: Customer ID to fetch
:param str ip_address: IP address
:return: DownloadInformations
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_video_download_url_with_http_info(video_id, **kwargs)
else:
(data) = self.get_video_download_url_with_http_info(video_id, **kwargs)
return data
def get_video_download_url_with_http_info(self, video_id, **kwargs):
"""
Get video download informations
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_video_download_url_with_http_info(video_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int video_id: Video ID to fetch (required)
:param int customer_id: Customer ID to fetch
:param str ip_address: IP address
:return: DownloadInformations
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['video_id', 'customer_id', 'ip_address']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_video_download_url" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'video_id' is set
if ('video_id' not in params) or (params['video_id'] is None):
raise ValueError("Missing the required parameter `video_id` when calling `get_video_download_url`")
collection_formats = {}
resource_path = '/videos/{video_id}/download-url'.replace('{format}', 'json')
path_params = {}
if 'video_id' in params:
path_params['video_id'] = params['video_id']
query_params = {}
if 'customer_id' in params:
query_params['customer_id'] = params['customer_id']
if 'ip_address' in params:
query_params['ip_address'] = params['ip_address']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['ApiClientId', 'ApiClientSecret']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DownloadInformations',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_video_features(self, video_id, **kwargs):
"""
Get video features
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_video_features(video_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int video_id: Video ID to fetch (required)
:param int page:
:param int per_page:
:return: Features
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_video_features_with_http_info(video_id, **kwargs)
else:
(data) = self.get_video_features_with_http_info(video_id, **kwargs)
return data
def get_video_features_with_http_info(self, video_id, **kwargs):
"""
Get video features
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_video_features_with_http_info(video_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int video_id: Video ID to fetch (required)
:param int page:
:param int per_page:
:return: Features
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['video_id', 'page', 'per_page']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_video_features" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'video_id' is set
if ('video_id' not in params) or (params['video_id'] is None):
raise ValueError("Missing the required parameter `video_id` when calling `get_video_features`")
collection_formats = {}
resource_path = '/videos/{video_id}/features'.replace('{format}', 'json')
path_params = {}
if 'video_id' in params:
path_params['video_id'] = params['video_id']
query_params = {}
if 'page' in params:
query_params['page'] = params['page']
if 'per_page' in params:
query_params['per_page'] = params['per_page']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['ApiClientId', 'ApiClientSecret']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Features',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_video_geolocation(self, video_id, **kwargs):
"""
Get geoloc list
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_video_geolocation(video_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int video_id: Video ID to fetch (required)
:param int page:
:param int per_page:
:return: Geolocs
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_video_geolocation_with_http_info(video_id, **kwargs)
else:
(data) = self.get_video_geolocation_with_http_info(video_id, **kwargs)
return data
def get_video_geolocation_with_http_info(self, video_id, **kwargs):
"""
Get geoloc list
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_video_geolocation_with_http_info(video_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int video_id: Video ID to fetch (required)
:param int page:
:param int per_page:
:return: Geolocs
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['video_id', 'page', 'per_page']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_video_geolocation" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'video_id' is set
if ('video_id' not in params) or (params['video_id'] is None):
raise ValueError("Missing the required parameter `video_id` when calling `get_video_geolocation`")
collection_formats = {}
resource_path = '/videos/{video_id}/geolocation'.replace('{format}', 'json')
path_params = {}
if 'video_id' in params:
path_params['video_id'] = params['video_id']
query_params = {}
if 'page' in params:
query_params['page'] = params['page']
if 'per_page' in params:
query_params['per_page'] = params['per_page']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['ApiClientId', 'ApiClientSecret']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Geolocs',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_video_geolocation_by_ip(self, video_id, ip_address, **kwargs):
"""
Check access to a video by geolocation
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_video_geolocation_by_ip(video_id, ip_address, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int video_id: Video ID to fetch (required)
:param str ip_address: IP address (required)
:param int page:
:param int per_page:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_video_geolocation_by_ip_with_http_info(video_id, ip_address, **kwargs)
else:
(data) = self.get_video_geolocation_by_ip_with_http_info(video_id, ip_address, **kwargs)
return data
def get_video_geolocation_by_ip_with_http_info(self, video_id, ip_address, **kwargs):
"""
Check access to a video by geolocation
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_video_geolocation_by_ip_with_http_info(video_id, ip_address, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int video_id: Video ID to fetch (required)
:param str ip_address: IP address (required)
:param int page:
:param int per_page:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['video_id', 'ip_address', 'page', 'per_page']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_video_geolocation_by_ip" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'video_id' is set
if ('video_id' not in params) or (params['video_id'] is None):
raise ValueError("Missing the required parameter `video_id` when calling `get_video_geolocation_by_ip`")
# verify the required parameter 'ip_address' is set
if ('ip_address' not in params) or (params['ip_address'] is None):
raise ValueError("Missing the required parameter `ip_address` when calling `get_video_geolocation_by_ip`")
collection_formats = {}
resource_path = '/videos/{video_id}/geolocations/{ip_address}'.replace('{format}', 'json')
path_params = {}
if 'video_id' in params:
path_params['video_id'] = params['video_id']
if 'ip_address' in params:
path_params['ip_address'] = params['ip_address']
query_params = {}
if 'page' in params:
query_params['page'] = params['page']
if 'per_page' in params:
query_params['per_page'] = params['per_page']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['ApiClientId', 'ApiClientSecret']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_video_player(self, video_id, **kwargs):
"""
Get video player
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_video_player(video_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int video_id: Video ID to fetch (required)
:param int customer_id: Customer ID to fetch
:param int country_id: Country ID to use in video analytics
:param str ip_address: IP address
:return: PlayerConfiguration
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_video_player_with_http_info(video_id, **kwargs)
else:
(data) = self.get_video_player_with_http_info(video_id, **kwargs)
return data
def get_video_player_with_http_info(self, video_id, **kwargs):
"""
Get video player
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_video_player_with_http_info(video_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int video_id: Video ID to fetch (required)
:param int customer_id: Customer ID to fetch
:param int country_id: Country ID to use in video analytics
:param str ip_address: IP address
:return: PlayerConfiguration
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['video_id', 'customer_id', 'country_id', 'ip_address']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_video_player" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'video_id' is set
if ('video_id' not in params) or (params['video_id'] is None):
raise ValueError("Missing the required parameter `video_id` when calling `get_video_player`")
collection_formats = {}
resource_path = '/videos/{video_id}/player'.replace('{format}', 'json')
path_params = {}
if 'video_id' in params:
path_params['video_id'] = params['video_id']
query_params = {}
if 'customer_id' in params:
query_params['customer_id'] = params['customer_id']
if 'country_id' in params:
query_params['country_id'] = params['country_id']
if 'ip_address' in params:
query_params['ip_address'] = params['ip_address']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['ApiClientId', 'ApiClientSecret']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PlayerConfiguration',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_video_subtitles(self, video_id, **kwargs):
"""
Get subtitles of a video
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_video_subtitles(video_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int video_id: Video ID to fetch (required)
:param int page:
:param int per_page:
:return: VideoSubtitlesResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_video_subtitles_with_http_info(video_id, **kwargs)
else:
(data) = self.get_video_subtitles_with_http_info(video_id, **kwargs)
return data
def get_video_subtitles_with_http_info(self, video_id, **kwargs):
"""
Get subtitles of a video
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_video_subtitles_with_http_info(video_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int video_id: Video ID to fetch (required)
:param int page:
:param int per_page:
:return: VideoSubtitlesResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['video_id', 'page', 'per_page']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_video_subtitles" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'video_id' is set
if ('video_id' not in params) or (params['video_id'] is None):
raise ValueError("Missing the required parameter `video_id` when calling `get_video_subtitles`")
collection_formats = {}
resource_path = '/videos/{video_id}/subtitles'.replace('{format}', 'json')
path_params = {}
if 'video_id' in params:
path_params['video_id'] = params['video_id']
query_params = {}
if 'page' in params:
query_params['page'] = params['page']
if 'per_page' in params:
query_params['per_page'] = params['per_page']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['ApiClientId', 'ApiClientSecret']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VideoSubtitlesResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_video_views(self, video_id, **kwargs):
"""
Get video views
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_video_views(video_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int video_id: Video ID to fetch (required)
:return: VideoViews
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_video_views_with_http_info(video_id, **kwargs)
else:
(data) = self.get_video_views_with_http_info(video_id, **kwargs)
return data
def get_video_views_with_http_info(self, video_id, **kwargs):
"""
Get video views
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_video_views_with_http_info(video_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int video_id: Video ID to fetch (required)
:return: VideoViews
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['video_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_video_views" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'video_id' is set
if ('video_id' not in params) or (params['video_id'] is None):
raise ValueError("Missing the required parameter `video_id` when calling `get_video_views`")
collection_formats = {}
resource_path = '/videos/{video_id}/views'.replace('{format}', 'json')
path_params = {}
if 'video_id' in params:
path_params['video_id'] = params['video_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['ApiClientId', 'ApiClientSecret']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VideoViews',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_videos(self, **kwargs):
"""
Get video list
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_videos(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int page:
:param int per_page:
:param str features: ``` features[*][value]=string&features[*][operator]=strict&features[1][value]=string&features[1][operator]=strict _______________ { \"*\": { \"value\": \"string\", \"operator\": \"strict\" }, \"1\": { \"value\": \"string\", \"operator\": \"contains\" } } ``` Operator can be: strict, contains, between, in, gt (greater than), lt (lower than). To search on all features, you can pass * as featureId.
:param str filters: ``` name[value]=string&name[operator]=strict&duration[value]=string&duration[operator]=gt _______________ { \"name\": { \"value\": \"string\", \"operator\": \"strict\" }, \"duration\": { \"value\": \"string\", \"operator\": \"gt\" } } ``` Operator can be: strict, contains, between, in, gt (greater than), lt (lower than).
:param str ip: Filter by user IP
:return: Videos2
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_videos_with_http_info(**kwargs)
else:
(data) = self.get_videos_with_http_info(**kwargs)
return data
def get_videos_with_http_info(self, **kwargs):
"""
Get video list
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_videos_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int page:
:param int per_page:
:param str features: ``` features[*][value]=string&features[*][operator]=strict&features[1][value]=string&features[1][operator]=strict _______________ { \"*\": { \"value\": \"string\", \"operator\": \"strict\" }, \"1\": { \"value\": \"string\", \"operator\": \"contains\" } } ``` Operator can be: strict, contains, between, in, gt (greater than), lt (lower than). To search on all features, you can pass * as featureId.
:param str filters: ``` name[value]=string&name[operator]=strict&duration[value]=string&duration[operator]=gt _______________ { \"name\": { \"value\": \"string\", \"operator\": \"strict\" }, \"duration\": { \"value\": \"string\", \"operator\": \"gt\" } } ``` Operator can be: strict, contains, between, in, gt (greater than), lt (lower than).
:param str ip: Filter by user IP
:return: Videos2
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page', 'per_page', 'features', 'filters', 'ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_videos" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/videos'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'page' in params:
query_params['page'] = params['page']
if 'per_page' in params:
query_params['per_page'] = params['per_page']
if 'features' in params:
query_params['features'] = params['features']
if 'filters' in params:
query_params['filters'] = params['filters']
if 'ip' in params:
query_params['ip'] = params['ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['ApiClientId', 'ApiClientSecret']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Videos2',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_videos_from_product(self, product_id, **kwargs):
"""
Get videos attached to product
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_videos_from_product(product_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int product_id: Product ID to fetch (required)
:param int page:
:param int per_page:
:param str filters: ``` name[value]=string&name[operator]=strict&duration[value]=string&duration[operator]=gt _______________ { \"name\": { \"value\": \"string\", \"operator\": \"strict\" }, \"duration\": { \"value\": \"string\", \"operator\": \"gt\" } } ``` Operator can be: strict, contains, between, in, gt (greater than), lt (lower than).
:param str ip: Filter by user IP
:param str sort_by: Sort by this attribute (default is ID)
:param str sort_direction: Sorting direction (asc by default)
:return: Videos2
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_videos_from_product_with_http_info(product_id, **kwargs)
else:
(data) = self.get_videos_from_product_with_http_info(product_id, **kwargs)
return data
def get_videos_from_product_with_http_info(self, product_id, **kwargs):
"""
Get videos attached to product
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_videos_from_product_with_http_info(product_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int product_id: Product ID to fetch (required)
:param int page:
:param int per_page:
:param str filters: ``` name[value]=string&name[operator]=strict&duration[value]=string&duration[operator]=gt _______________ { \"name\": { \"value\": \"string\", \"operator\": \"strict\" }, \"duration\": { \"value\": \"string\", \"operator\": \"gt\" } } ``` Operator can be: strict, contains, between, in, gt (greater than), lt (lower than).
:param str ip: Filter by user IP
:param str sort_by: Sort by this attribute (default is ID)
:param str sort_direction: Sorting direction (asc by default)
:return: Videos2
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['product_id', 'page', 'per_page', 'filters', 'ip', 'sort_by', 'sort_direction']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_videos_from_product" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'product_id' is set
if ('product_id' not in params) or (params['product_id'] is None):
raise ValueError("Missing the required parameter `product_id` when calling `get_videos_from_product`")
collection_formats = {}
resource_path = '/products/{product_id}/videos'.replace('{format}', 'json')
path_params = {}
if 'product_id' in params:
path_params['product_id'] = params['product_id']
query_params = {}
if 'page' in params:
query_params['page'] = params['page']
if 'per_page' in params:
query_params['per_page'] = params['per_page']
if 'filters' in params:
query_params['filters'] = params['filters']
if 'ip' in params:
query_params['ip'] = params['ip']
if 'sort_by' in params:
query_params['sort_by'] = params['sort_by']
if 'sort_direction' in params:
query_params['sort_direction'] = params['sort_direction']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['ApiClientId', 'ApiClientSecret']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Videos2',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def set_video_geolocation(self, video_id, enabled, behavior_detected_countries, behavior_non_detected_countries, **kwargs):
"""
Handle geolocation for videos by countries
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.set_video_geolocation(video_id, enabled, behavior_detected_countries, behavior_non_detected_countries, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int video_id: Video ID to fetch (required)
:param int enabled: Enabled (required)
:param str behavior_detected_countries: Behavior for detected countries (required)
:param str behavior_non_detected_countries: Behavior for non-detected countries (required)
:param str countries: IDs of the non-detected countries separated by comma
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.set_video_geolocation_with_http_info(video_id, enabled, behavior_detected_countries, behavior_non_detected_countries, **kwargs)
else:
(data) = self.set_video_geolocation_with_http_info(video_id, enabled, behavior_detected_countries, behavior_non_detected_countries, **kwargs)
return data
def set_video_geolocation_with_http_info(self, video_id, enabled, behavior_detected_countries, behavior_non_detected_countries, **kwargs):
"""
Handle geolocation for videos by countries
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.set_video_geolocation_with_http_info(video_id, enabled, behavior_detected_countries, behavior_non_detected_countries, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int video_id: Video ID to fetch (required)
:param int enabled: Enabled (required)
:param str behavior_detected_countries: Behavior for detected countries (required)
:param str behavior_non_detected_countries: Behavior for non-detected countries (required)
:param str countries: IDs of the non-detected countries separated by comma
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['video_id', 'enabled', 'behavior_detected_countries', 'behavior_non_detected_countries', 'countries']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_video_geolocation" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'video_id' is set
if ('video_id' not in params) or (params['video_id'] is None):
raise ValueError("Missing the required parameter `video_id` when calling `set_video_geolocation`")
# verify the required parameter 'enabled' is set
if ('enabled' not in params) or (params['enabled'] is None):
raise ValueError("Missing the required parameter `enabled` when calling `set_video_geolocation`")
# verify the required parameter 'behavior_detected_countries' is set
if ('behavior_detected_countries' not in params) or (params['behavior_detected_countries'] is None):
raise ValueError("Missing the required parameter `behavior_detected_countries` when calling `set_video_geolocation`")
# verify the required parameter 'behavior_non_detected_countries' is set
if ('behavior_non_detected_countries' not in params) or (params['behavior_non_detected_countries'] is None):
raise ValueError("Missing the required parameter `behavior_non_detected_countries` when calling `set_video_geolocation`")
collection_formats = {}
resource_path = '/videos/{video_id}/geolocations'.replace('{format}', 'json')
path_params = {}
if 'video_id' in params:
path_params['video_id'] = params['video_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
if 'countries' in params:
form_params.append(('countries', params['countries']))
self.api_client.set_default_header('Content-Type', 'application/x-www-form-urlencoded')
if 'enabled' in params:
form_params.append(('enabled', params['enabled']))
self.api_client.set_default_header('Content-Type', 'application/x-www-form-urlencoded')
if 'behavior_detected_countries' in params:
form_params.append(('behavior_detected_countries', params['behavior_detected_countries']))
self.api_client.set_default_header('Content-Type', 'application/x-www-form-urlencoded')
if 'behavior_non_detected_countries' in params:
form_params.append(('behavior_non_detected_countries', params['behavior_non_detected_countries']))
self.api_client.set_default_header('Content-Type', 'application/x-www-form-urlencoded')
body_params = None
# Authentication setting
auth_settings = ['ApiClientId', 'ApiClientSecret']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_video(self, video_id, body, **kwargs):
"""
Update video
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_video(video_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int video_id: Video ID to update (required)
:param Video body: (required)
:return: Video
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_video_with_http_info(video_id, body, **kwargs)
else:
(data) = self.update_video_with_http_info(video_id, body, **kwargs)
return data
def update_video_with_http_info(self, video_id, body, **kwargs):
"""
Update video
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_video_with_http_info(video_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int video_id: Video ID to update (required)
:param Video body: (required)
:return: Video
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['video_id', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_video" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'video_id' is set
if ('video_id' not in params) or (params['video_id'] is None):
raise ValueError("Missing the required parameter `video_id` when calling `update_video`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_video`")
collection_formats = {}
resource_path = '/videos/{video_id}'.replace('{format}', 'json')
path_params = {}
if 'video_id' in params:
path_params['video_id'] = params['video_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
self.api_client.set_default_header('Content-Type', 'application/json')
# Authentication setting
auth_settings = ['ApiClientId', 'ApiClientSecret']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Video',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 44.607683
| 489
| 0.568888
|
9d7f76dcca0831dac26a2110d75072cc9f9470df
| 2,414
|
py
|
Python
|
validation/certs/scripts/chains/issuer-no-match-subject/generate.py
|
Werxis/usable-cert-validation
|
d166af6accbd1054f1ecd9db7a70600ca9c1bf8e
|
[
"MIT"
] | 13
|
2019-07-11T11:41:14.000Z
|
2021-12-18T23:50:56.000Z
|
validation/certs/scripts/chains/issuer-no-match-subject/generate.py
|
Werxis/usable-cert-validation
|
d166af6accbd1054f1ecd9db7a70600ca9c1bf8e
|
[
"MIT"
] | 115
|
2019-07-11T13:46:38.000Z
|
2022-03-08T18:28:31.000Z
|
validation/certs/scripts/chains/issuer-no-match-subject/generate.py
|
Werxis/usable-cert-validation
|
d166af6accbd1054f1ecd9db7a70600ca9c1bf8e
|
[
"MIT"
] | 9
|
2020-05-05T17:11:17.000Z
|
2021-12-19T15:04:48.000Z
|
import asn1tools
from utils import x509
from utils import io
from utils import misc
from utils import crypto
VALID_ASN_FILE = 'valid.asn'
EXPORTED_KEY_NAME = 'key.pem'
EXPORTED_CHAIN_NAME = 'chain.pem'
def main():
args = misc.parse_arguments()
# Compile the ASN.1 specification
asn = asn1tools.compile_files(args.asn_dir + VALID_ASN_FILE, 'der')
# Import the root private key and cert
root_privkey = io.import_rsa_private_key(args.root_key_file)
root_pubkey = root_privkey.publickey()
# Generate an RSA public key pair for intermediate CA
(sub_privkey, sub_pubkey) = crypto.new_rsa_keypair(2048)
# Encode intermediate tbsCertificate
sub_tbs = x509.default_tbs(issuer_public_key=root_pubkey,
subject_public_key=sub_pubkey,
issuer_cn='root',
subject_cn='intermediate',
is_ca=True,
additional_extensions=[],
asn=asn)
sub_tbs_der = asn.encode('TBSCertificate', sub_tbs)
# Sign the intermediate tbsCertificate
sub_sig = crypto.rsa_sha256_sign(root_privkey, sub_tbs_der)
# Encode the intermediate CA Certificate
sub_cert_der = x509.certificate(sub_tbs, sub_sig, asn)
# Generate an RSA public key pair for end entity certificate
(end_privkey, end_pubkey) = crypto.new_rsa_keypair(2048)
# Encode end entity tbsCertificate
end_tbs = x509.default_tbs(issuer_public_key=sub_pubkey,
subject_public_key=end_pubkey,
issuer_cn='unknown intermediate',
subject_cn='localhost',
is_ca=False,
additional_extensions=[],
asn=asn)
end_tbs_der = asn.encode('TBSCertificate', end_tbs)
# Sign the end entity tbsCertificate
end_sig = crypto.rsa_sha256_sign(sub_privkey, end_tbs_der)
# Encode the end entity Certificate
end_cert_der = x509.certificate(end_tbs, end_sig, asn)
# Write the chain into file
io.export_chain([end_cert_der, sub_cert_der],
args.build_dir + EXPORTED_CHAIN_NAME)
# Export the private key
io.export_private_key(end_privkey, args.build_dir + EXPORTED_KEY_NAME)
if __name__ == "__main__":
main()
| 34.485714
| 74
| 0.635046
|
b9589eaf4d72108188b155a7257a18baeb08c97f
| 863
|
py
|
Python
|
analysis/out_periods_elo_test.py
|
damienld/Tennis-predict
|
be5658d70bd4751bf9f11aa8ef51f1d77d77120a
|
[
"Apache-2.0"
] | null | null | null |
analysis/out_periods_elo_test.py
|
damienld/Tennis-predict
|
be5658d70bd4751bf9f11aa8ef51f1d77d77120a
|
[
"Apache-2.0"
] | null | null | null |
analysis/out_periods_elo_test.py
|
damienld/Tennis-predict
|
be5658d70bd4751bf9f11aa8ef51f1d77d77120a
|
[
"Apache-2.0"
] | null | null | null |
import out_periods_elo
import pandas as pd
from datetime import datetime
from brier_score import calc_brier
class Test_Analyse_out_periods_predictions:
def test_analyse_out_periods_predictions_1(self):
dfWithElos = pd.read_csv("./results/dfWithElos.csv", parse_dates=["Date"])
# dont keep year 1 as it served to make elo stable rankings
dfWithElos = dfWithElos[dfWithElos["Date"] > datetime(2013, 12, 10)]
# dont predict/test at lower levels ( ATP level only)
dfWithElos = dfWithElos[(dfWithElos["TrnRk"] >= 2) & (dfWithElos["TrnRk"] <= 5)]
dfWithElos = calc_brier(dfWithElos, "IndexP", "ProbaElo")
dfWithElos["Proba_odds"] = 1 / dfWithElos["Odds1"]
dfWithElos = calc_brier(dfWithElos, "IndexP", "Proba_odds", "brier_odds")
out_periods_elo.analyse_out_periods_predictions(dfWithElos)
| 45.421053
| 88
| 0.71263
|
9d09f30f85bfddc5e6dd1dbefcba7364494feb5a
| 38
|
py
|
Python
|
src/apps/devices/cubelib/__init__.py
|
ajintom/music_sync
|
0d7bc302502d28e4be4f0a0be1fc9bafb706f651
|
[
"MIT"
] | null | null | null |
src/apps/devices/cubelib/__init__.py
|
ajintom/music_sync
|
0d7bc302502d28e4be4f0a0be1fc9bafb706f651
|
[
"MIT"
] | null | null | null |
src/apps/devices/cubelib/__init__.py
|
ajintom/music_sync
|
0d7bc302502d28e4be4f0a0be1fc9bafb706f651
|
[
"MIT"
] | null | null | null |
__all__ = ["emulator", "mywireframe"]
| 19
| 37
| 0.684211
|
ac61daa3c54495624b8682899688bd4fd36deaca
| 13,110
|
py
|
Python
|
api/config/h5Template/tanmuContent.py
|
jimbunny/wedding-invitation
|
a3648454e1105d9362f95d9f6e69055a7522e15b
|
[
"MIT"
] | null | null | null |
api/config/h5Template/tanmuContent.py
|
jimbunny/wedding-invitation
|
a3648454e1105d9362f95d9f6e69055a7522e15b
|
[
"MIT"
] | null | null | null |
api/config/h5Template/tanmuContent.py
|
jimbunny/wedding-invitation
|
a3648454e1105d9362f95d9f6e69055a7522e15b
|
[
"MIT"
] | null | null | null |
tanmuContent = '''
<style>
.barrage-input-tip {
z-index: 1999;
position: absolute;
left: 10px;
width: 179.883px;
height: 35.7422px;
line-height: 35.7422px;
border-radius: 35.7422px;
box-sizing: border-box;
color: rgb(255, 255, 255);
margin-left: 45.7031px;
background-color: {{ data.tanmuBtnColor }};
opacity: 0.65;
pointer-events: initial;
padding: 0px 16.9922px;
font-size: 14.0625px;
display: block;
}
.data-box{display:none}
.barrage_box_top{width:100%;height:160px;margin:0px auto;}
.barrage_box_top .barrage-row{margin-bottom:20px;}
.barrage_box_top .barrage-item{
background-color: {{ data.tanmuColor }};margin-bottom:10px; white-space:nowrap;color:{{ data.fontColor }}; font-size: 12px; transform: scale(1); opacity: 1; transition: all 0.65s ease-in 0s;padding: 6px 8px 0px 8px; height: 32px;display: inline-block;border-radius: 25px;
}
</style>
<div class="maka-barrage-dom" style="top: 0px; left: 0px; background-color: transparent; z-index: 1000;">
<div class="barrage-content" style="position: fixed; box-sizing: border-box; padding: 11.7188px; right: 0px; bottom: 0px; z-index: 1000; width: 100%; pointer-events: none; background: linear-gradient(rgba(0, 0, 0, 0) 0%, rgba(0, 0, 0, 0.2) 100%);">
<div class="barrage-words row" style="margin-top: 11.7188px; height: 212.695px;"><div class="barrage-word" style="min-height: 32.2266px; line-height: 32.2266px; font-size: 12.8906px; padding: 4.10156px; border-radius: 22.8516px; bottom: 94.3359px; max-width: 310.547px; background-color: rgba(47, 50, 52, 0.6); transform: scale(1); opacity: 0; transition: bottom 2s ease-out 0s, opacity 0.75s linear 0.75s;">
</div>
</div>
<div class="barrage-bottom row" id="barrageBtn" style="padding-bottom: env(safe-area-inset-bottom); margin-top: 14.0625px; position: fixed; left: 11.7188px; bottom: 47px; pointer-events: initial;">
<div class="barrage-input-tip" data-toggle="modal" data-target="#myModal" style="background:{{ data.tanmuColor }}; width: 179.883px; height: 35.7422px; line-height: 35.7422px; border-radius: 35.7422px; box-sizing: border-box; color: rgb(255, 255, 255); margin-left: 45.7031px; background-color: rgb(47, 50, 52); opacity: 0.65; pointer-events: initial; padding: 0px 16.9922px; font-size: 14.0625px;">ฝากคำอวยพร...</div>
</div>
<div class="backdrop" style="position: fixed; width: 100%; height: 100%; background-color: rgba(0, 0, 0, 0); z-index: 999; display: none; top: 0px; left: 0px; pointer-events: initial;"></div>
<div class="barrage-btn tanBtn" style="padding-bottom: env(safe-area-inset-bottom); margin-top: 14.0625px; position: fixed; left: 11.7188px; bottom: 11.7188px; pointer-events: initial;">
<div class="correct-icon" id="tanmuOpen" style="background: url("https://i.ibb.co/1QmGHWV/danmu-open1.png") 0% 0% / contain no-repeat; border-radius: 100%; width: 35.7422px; height: 35.7422px;"></div>
<div class="close-icon" id="tanmuClose" style="background: url("https://i.ibb.co/QNwcxLx/danmu-close1.png") 0% 0% / contain no-repeat; border-radius: 100%; width: 35.7422px; height: 35.7422px; display: none;">
<b style="position: absolute; color: rgb(255, 255, 255); top: 2.92969px; left: 19.9219px; font-weight: 600; font-size: 8.78906px; transform: scale(0.8);">{{ data.greetings | length }}</b>
</div>
</div>
<div id="j-barrage-top" class="barrage_box barrage_box_top" style="position: fixed; box-sizing: border-box; padding: 0px; right: 0px; bottom: 0px; z-index: 1000; width: 100%; pointer-events: none;"></div>
</div>
<div class="barrage-input-wrap" id="modalShow" style="display: none; position: fixed; left: 0px; bottom: 0px;height: 0px; width: 100%; background-color:transparent; padding: 9.375px 11.7188px; box-sizing: border-box; z-index: 2000; pointer-events: initial;">
<!-- 模态框(Modal) -->
<div class="modal fade" id="myModal" tabindex="-1" role="dialog" aria-labelledby="myModalLabel" aria-hidden="true">
<div style="width:100%;" class="modal-dialog">
<div class="modal-content">
<div class="modal-header">
<button type="button" class="close" style="cursor: pointer;" data-dismiss="modal" aria-hidden="true">×</button>
<h4 class="modal-title" id="myModalLabel">อวยพร</h4>
</div>
<div class="modal-body">
<form action="" id="form" class="form-horizontal">
<div class="form-group">
<div class="col-md-24" style="padding-left:10px;padding-right: 10px;">
<input type="text" class="form-control" style="width:100% !important;" name="name" placeholder="ชื่อ-นามสกุล" />
</div>
</div>
<div class="form-group">
<div class="col-md-24" style="padding-left:10px;padding-right: 10px;">
<input type="text" class="form-control" style="width:100% !important;" name="greetings" placeholder="คำอวยพร" />
</div>
</div>
<div class="form-group">
<div class="col-md-24 col-md-offset-2" style="padding-left:10px;padding-right: 10px;">
<button id="subBtn" type="submit" class="btn btn-primary" style="width:100%;">ส่ง</button>
</div>
</div>
</form>
</div>
</div><!-- /.modal-content -->
</div><!-- /.modal-dialog -->
</div>
<!-- /.modal -->
</div>
</div>
<div class="alert alert-danger hide">ส่งคำอวยพรล้มเหลว!</div>
<div class="alert alert-success hide">ส่งคำอวยพรสำเร็จ!</div>
<script src="/static/js/bootstrap.min.js"></script>
<script src="/static/js/bootstrapValidator.min.js"></script>
<script type="text/javascript" src="/static/js/index.js"></script>
<style type="text/css">
*{
padding:0;
margin:0;
}
a{
text-decoration: none;
}
.form-control{
display: inline-block;
width: auto;
padding: 6px 12px;
font-size: 14px;
line-height: 1.42857143;
color: #555;
background-color: #fff;
background-image: none;
border: 1px solid #ccc;
border-radius: 4px;
-webkit-box-shadow: inset 0 1px 1px rgba(0,0,0,.075);
box-shadow: inset 0 1px 1px rgba(0,0,0,.075);
-webkit-transition: border-color ease-in-out .15s,-webkit-box-shadow ease-in-out .15s;
-o-transition: border-color ease-in-out .15s,box-shadow ease-in-out .15s;
transition: border-color ease-in-out .15s,box-shadow ease-in-out .15s;
}
.btn{
display: inline-block;
padding: 6px 12px;
margin-bottom: 0;
font-size: 14px;
font-weight: 400;
line-height: 1.42857143;
text-align: center;
white-space: nowrap;
vertical-align: middle;
-ms-touch-action: manipulation;
touch-action: manipulation;
cursor: pointer;
-webkit-user-select: none;
-moz-user-select: none;
-ms-user-select: none;
user-select: none;
background-image: none;
border: 1px solid transparent;
border-radius: 4px;
}
.btn-primary {
color: #fff;
background-color: #337ab7;
border-color: #2e6da4;
}
/*组件主样式*/
.overflow-text{
display: block;
white-space:nowrap;
overflow:hidden;
text-overflow:ellipsis;
opacity:0;
clear: both;
padding:0 10px;
border-radius: 10px;
box-sizing: border-box;
max-width: 100%;
color:#fff;
animation:colorchange 3s infinite alternate;
-webkit-animation:colorchange 3s infinite alternate; /*Safari and Chrome*/
}
@keyframes colorchange{
0%{
color:red;
}
50%{
color:green;
}
100%{
color:#6993f9;
}
}
/*组件主样式*/
.alert{
position: fixed;
width: 50%;
margin-left: 20%;
z-index: 2000;
}
</style>
<script type="text/javascript">
var Obj;
$.ajax({
//几个参数需要注意一下
type: "GET",//方法类型
dataType: "json",//预期服务器返回的数据类型
url: "/api/v1/h5/greetings/"+{{ data.id }},//url
success: function (result) {
console.log(result);//打印服务端返回的数据(调试用)
if (result.code == 0) {
// 数据初始化
Obj = $('#j-barrage-top').barrage({
data : result.data, //数据列表
row : 1, //显示行数
time : 2500, //间隔时间
gap : 100, //每一个的间隙
position : 'fixed', //绝对定位
direction : 'bottom left', //方向
ismoseoverclose : true, //悬浮是否停止
height : 30, //设置单个div的高度
})
Obj.start();
} else {
alert("tanmu Error");
};
},
error : function() {
alert("tanmu Error");
}
});
</script>
<script>
$("#barrageBtn").click(function() {
var modalShowDiv = document.getElementById('modalShow');
modalShowDiv.style.display = 'block';
})
var kg = true; //给一个开关并赋值,用来进行后面的 if else 条件判断
$(".tanBtn").click(function() { //给button按钮一个点击事件
if (kg) { //进行判断
var tanmuOpenDiv= document.getElementById('tanmuOpen');
tanmuOpenDiv.style.display = 'block';
var tanmuCloseDiv= document.getElementById('tanmuClose');
tanmuCloseDiv.style.display='none';
Obj.start();
var barrageBtnDiv= document.getElementById('barrageBtn');
barrageBtnDiv.style.display = 'block';
} else {
var tanmuOpenDiv= document.getElementById('tanmuOpen');
tanmuOpenDiv.style.display = 'none';
var tanmuCloseDiv= document.getElementById('tanmuClose');
tanmuCloseDiv.style.display='block';
Obj.close();
var barrageBtnDiv= document.getElementById('barrageBtn');
barrageBtnDiv.style.display = 'none';
}
kg = !kg; //这里的感叹号是取反的意思,如果你没有写,当你点击切换回第一张图片时,就会不生效
})
$('#myModal').on('hidden.bs.modal', function (e) {
// 清空表单和验证
// Reset a form
document.getElementById("form").reset();
$('#form').bootstrapValidator("resetForm",true);
})
$('form').bootstrapValidator({
//默认提示
message: 'This value is not valid',
// 表单框里右侧的icon
feedbackIcons: {
valid: 'glyphicon glyphicon-ok',
invalid: 'glyphicon glyphicon-remove',
validating: 'glyphicon glyphicon-refresh'
},
excluded: [':disabled'],
submitHandler: function (validator, form, submitButton) {
// 表单提交成功时会调用此方法
// validator: 表单验证实例对象
// form jq对象 指定表单对象
// submitButton jq对象 指定提交按钮的对象
},
fields: {
name: {
message: 'ปรดกรอกชื่อ, ความยาวไม่เกิน 20 ตัวอักษร',
validators: {
notEmpty: { //不能为空
message: 'โปรดกรอกชื่อ'
},
stringLength: {
max: 20,
message: 'ความยาวไม่เกิน 20 ตัวอักษร'
},
}
},
greetings: {
message: 'โปรดกรอกคำอวยพร, ความยาวไม่เกิน 40 ตัวอักษร',
validators: {
notEmpty: {
message: 'โปรดกรอกคำอวยพร'
},
stringLength: {
max: 40,
message: 'ความยาวไม่เกิน 40 ตัวอักษร'
},
}
},
}
});
var that = this
$("#subBtn").click(function () { //非submit按钮点击后进行验证,如果是submit则无需此句直接验证
$("form").bootstrapValidator('validate'); //提交验证
if ($("form").data('bootstrapValidator').isValid()) { //获取验证结果,如果成功,执行下面代码
$.ajax({
//几个参数需要注意一下
type: "POST",//方法类型
dataType: "json",//预期服务器返回的数据类型
url: "/api/v1/h5/greetings/"+{{ data.id }},//url
data: $('#form').serialize(),
success: function (result) {
console.log(result);//打印服务端返回的数据(调试用)
if (result.code == 0) {
$("#myModal").modal('hide');
//添加评论
//此格式与dataa.js的数据格式必须一致
var addVal = {
text : result.data
}
//添加进数组
Obj.data.unshift(addVal);
$(".alert-success").addClass("show");
window.setTimeout(function(){
$(".alert-success").removeClass("show");
},1000);//显示的时间
} else {
$(".alert-danger").addClass("show");
window.setTimeout(function(){
$(".alert-danger").removeClass("show");
},1000);//显示的时间
};
},
error : function() {
{#alert("Error!");#}
$(".alert-danger").addClass("show");
window.setTimeout(function(){
$(".alert-danger").removeClass("show");
},1000);//显示的时间
}
});
}
});
</script>
'''
| 39.017857
| 427
| 0.564607
|
8919c9ec891776336e37810e06435914b153bbfc
| 2,669
|
py
|
Python
|
train.py
|
Derolik666/im2latex
|
458a33f13e876389c54136c5103fe3718d2f91d1
|
[
"Apache-2.0"
] | 2
|
2020-09-14T19:01:16.000Z
|
2020-09-14T19:45:48.000Z
|
train.py
|
Derolik666/im2latex
|
458a33f13e876389c54136c5103fe3718d2f91d1
|
[
"Apache-2.0"
] | null | null | null |
train.py
|
Derolik666/im2latex
|
458a33f13e876389c54136c5103fe3718d2f91d1
|
[
"Apache-2.0"
] | 1
|
2020-12-09T06:39:44.000Z
|
2020-12-09T06:39:44.000Z
|
import click
from model.utils.data_generator import DataGenerator
from model.img2seq import Img2SeqModel
from model.utils.lr_schedule import LRSchedule
from model.utils.general import Config
from model.utils.text import Vocab
from model.utils.image import greyscale
@click.command()
@click.option('--data', default="configs/data_small.json",
help='Path to data json config')
@click.option('--vocab', default="configs/vocab_small.json",
help='Path to vocab json config')
@click.option('--training', default="configs/training_small.json",
help='Path to training json config')
@click.option('--model', default="configs/model.json",
help='Path to model json config')
@click.option('--output', default="results/small/",
help='Dir for results and model weights')
def main(data, vocab, training, model, output):
# Load configs
dir_output = output
config = Config([data, vocab, training, model])
config.save(dir_output)
vocab = Vocab(config)
# Load datasets
train_set = DataGenerator(path_formulas=config.path_formulas_train,
dir_images=config.dir_images_train, img_prepro=greyscale,
max_iter=config.max_iter, bucket=config.bucket_train,
path_matching=config.path_matching_train,
max_len=config.max_length_formula,
form_prepro=vocab.form_prepro)
val_set = DataGenerator(path_formulas=config.path_formulas_val,
dir_images=config.dir_images_val, img_prepro=greyscale,
max_iter=config.max_iter, bucket=config.bucket_val,
path_matching=config.path_matching_val,
max_len=config.max_length_formula,
form_prepro=vocab.form_prepro)
# Define learning rate schedule
n_batches_epoch = ((len(train_set) + config.batch_size - 1) //
config.batch_size)
lr_schedule = LRSchedule(lr_init=config.lr_init,
start_decay=config.start_decay*n_batches_epoch,
end_decay=config.end_decay*n_batches_epoch,
end_warm=config.end_warm*n_batches_epoch,
lr_warm=config.lr_warm,
lr_min=config.lr_min)
# Build model and train
model = Img2SeqModel(config, dir_output, vocab)
model.build_train(config)
model.train(config, train_set, val_set, lr_schedule)
model._add_summary()
if __name__ == "__main__":
main()
| 43.048387
| 87
| 0.626827
|
a2ec172f37543b2c0b960d4df4b0cb3a2ab750a9
| 12,779
|
py
|
Python
|
karix/models/inline_response2001.py
|
karixtech/karix-python
|
e9162d41249a639fdb35cba1263416d76d7b79c9
|
[
"MIT"
] | 5
|
2019-02-27T07:22:40.000Z
|
2021-03-28T23:17:23.000Z
|
karix/models/inline_response2001.py
|
sendbee/karix-python
|
0f2d72a6949dce5d5db5d3f48b37d3b6fc65a50e
|
[
"MIT"
] | 2
|
2019-12-03T13:55:07.000Z
|
2020-03-31T03:44:40.000Z
|
karix/models/inline_response2001.py
|
sendbee/karix-python
|
0f2d72a6949dce5d5db5d3f48b37d3b6fc65a50e
|
[
"MIT"
] | 5
|
2019-04-22T17:33:29.000Z
|
2021-01-12T19:57:44.000Z
|
# coding: utf-8
"""
karix api
# Overview Karix API lets you interact with the Karix platform using an omnichannel messaging API. It also allows you to query your account, set up webhooks and buy phone numbers. # API and Clients Versioning Karix APIs are versioned using the format vX.Y where X is the major version number and Y is minor. All minor version releases are backwards compatible but major releases are not, please be careful when upgrading. Version header `api-version` is used by Karix platform to determine the version of the API request. To use Karix API v2 you can send `api-version` as `\"2.0\"`. If an API request does not contain `api-version` header then Karix platform uses the pinned API version of the account as the default verison. Your account defaults to the latest API version release at the time of signup. You can check the pinned API version form the [dashboard](https://cloud.karix.io/dashboard). Karix also provides Helper Libraries for all major languages. Release versions of these libraries correspond to their API Version supported. Client version vX.Y.Z supports API version vX.Y. Helper libraries are configured to send `api-version` header based on the library version. When using official Karix helper libraries, you dont need to concern yourself with pinned version. Using helper library of latest version will give you access to latest features. # Supported Channels Karix omnichannel messaging API supports the following channels: - sms - whatsapp ## SMS Channel To send a message to one or more destinations over SMS channel set `channel` to `sms` in the [Messaging API](#operation/sendMessage). In trial mode, your account can only send messages to numbers within the sandbox. ## Whatsapp Channel To send a message to a destination over WhatsApp channel set `channel` to `whatsapp` in the [Messaging API](#operation/sendMessage). Currently WhatsApp channel can only be used from within the sandbox. Contact [support](mailto:support@karix.io) for an early access outside the sandbox. Any messages you initiate over WhatsApp to end users must conform to a template configured in WhatsApp. These messages are called \"Notification Messages\". Currently only text messages can be sent as Notification Messages. Any responses you receive from end users and all replies you send within 24 hours of the last received response are called \"Conversation Messages\". When using the sandbox for testing and development purposes, we have provided for the following pre-approved templates for \"Notification Messages\": - Your order * has been dispatched. Please expect delivery by * - OTP requested by you on * is * - Thank you for your payment of * * on *. Your transaction ID is * You can replace `*` with any text of your choice. Both Notification and Conversation messages are priced differently, please refer to the [pricing page](http://karix.io/messaging/pricing/) for more details. # Common Request Structures All Karix APIs follow a common REST format with the following resources: - account - message - webhook - number ## Creating a resource To create a resource send a `POST` request with the desired parameters in a JSON object to `/<resource>/` url. A successful response will contain the details of the single resource created with HTTP status code `201 Created`. Note: An exception to this is the `Create Message` API which is a bulk API and returns a list of message records. ## Fetching a resource To fetch a resource by its Unique ID send a `GET` request to `/<resource>/<uid>/` where `uid` is the Alphanumeric Unique ID of the resource. A successful response will contain the details of the single resource fetched with HTTP status code `200 OK` ## Editing a resource To edit certain parameters of a resource send a `PATCH` request to `/<resource>/<uid>/` where `uid` is the Alphanumeric Unique ID of the resource, with a JSON object containing only the parameters which need to be updated. Edit resource APIs generally have no required parameters. A successful response will contain all the details of the single resource after editing. ## Deleting a resource To delete a resource send a `DELETE` request to `/<resource>/<uid>/` where `uid` is the Alphanumeric Unique ID of the resource. A successful response will return HTTP status code `204 No Content` with no body. ## Fetching a list of resources To fetch a list of resources send a `GET` request to `/<resource>/` with filters as GET parameters. A successful response will contain a list of filtered paginated objects with HTTP status code `200 OK`. ### Pagination Pagination for list APIs are controlled using GET parameters: - `limit`: Number of objects to be returned - `offset`: Number of objects to skip before collecting the output list. # Common Response Structures All Karix APIs follow a common respose structure. ## Success Responses ### Single Resource Response Responses returning a single object will have the following keys | Key | Child Key | Description | |:------------- |:------------- |:----------------------------------------- | | meta | | Meta Details about request and response | | | request_uuid | Unique request identifier | | data | | Details of the object | ### List Resource Response Responses returning a list of objects will have the following keys | Key | Child Key | Description | |:------------- |:------------- |:----------------------------------------- | | meta | | Meta Details about request and response | | | request_uuid | Unique request identifier | | | previous | Link to the previous page of the list | | | next | Link to the next page of the list | | | total | Total number of objects over all pages | | objects | | List of objects with details | ## Error Responses ### Validation Error Response Responses for requests which failed due to validation errors will have the follwing keys: | Key | Child Key | Description | |:------------- |:------------- |:------------------------------------------ | | meta | | Meta Details about request and response | | | request_uuid | Unique request identifier | | error | | Details for the error | | | message | Error message | | | param | (Optional) parameter this error relates to | Validation error responses will return HTTP Status Code `400 Bad Request` ### Insufficient Balance Response Some requests will require to consume account credits. In case of insufficient balance the following keys will be returned: | Key | Child Key | Description | |:------------- |:------------- |:----------------------------------------- | | meta | | Meta Details about request and response | | | request_uuid | Unique request identifier | | error | | Details for the error | | | message | `Insufficient Balance` | Insufficient balance response will return HTTP Status Code `402 Payment Required` # Events and Webhooks All asynchronous events generated by Karix platform follow a common structure: | Key | Child Key | Description | |:------------- |:------------- |:------------------------------------------- | | uid | | Alphanumeric unique ID of the event | | api_version | | 2.0 | | type | | Type of the event. | | data | | Details of the object attached to the event | Currently implemented event types are: - `message`: These events are generated when a message is created or its status is changed. When event `type` is `message` the `data` parameter contains the Message object (check out the response.data of [Get Message](#operation/getMessageById) API). - For outbound messages, `message` events are sent to `events_url` parameter of [Send Message](#operation/sendMessage) API - For inbound messages, `message` events are sent to the webhook attached to the phone number resource using [Edit Number](#tag/Number) API - For inbound messages to whatsapp sandbox, `message` events are sent to Webhook URL set on the [Dashboard](https://cloud.karix.io/dashboard/#whatsapp-demo). # noqa: E501
OpenAPI spec version: 2.0
Contact: support@karix.io
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from karix.models.array_meta_response import ArrayMetaResponse # noqa: F401,E501
from karix.models.message import Message # noqa: F401,E501
class InlineResponse2001(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'objects': 'list[Message]',
'meta': 'ArrayMetaResponse'
}
attribute_map = {
'objects': 'objects',
'meta': 'meta'
}
def __init__(self, objects=None, meta=None): # noqa: E501
"""InlineResponse2001 - a model defined in Swagger""" # noqa: E501
self._objects = None
self._meta = None
self.discriminator = None
if objects is not None:
self.objects = objects
if meta is not None:
self.meta = meta
@property
def objects(self):
"""Gets the objects of this InlineResponse2001. # noqa: E501
:return: The objects of this InlineResponse2001. # noqa: E501
:rtype: list[Message]
"""
return self._objects
@objects.setter
def objects(self, objects):
"""Sets the objects of this InlineResponse2001.
:param objects: The objects of this InlineResponse2001. # noqa: E501
:type: list[Message]
"""
self._objects = objects
@property
def meta(self):
"""Gets the meta of this InlineResponse2001. # noqa: E501
:return: The meta of this InlineResponse2001. # noqa: E501
:rtype: ArrayMetaResponse
"""
return self._meta
@meta.setter
def meta(self, meta):
"""Sets the meta of this InlineResponse2001.
:param meta: The meta of this InlineResponse2001. # noqa: E501
:type: ArrayMetaResponse
"""
self._meta = meta
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(InlineResponse2001, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, InlineResponse2001):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 88.131034
| 8,932
| 0.622193
|
d1df6116b6aae7ad456af6b519d7f5908a36c5b4
| 2,313
|
py
|
Python
|
scripts/deps.py
|
wolfgangwazzlestrauss/canvas
|
87d577ec2edfe1188d8ecd176acc5a284b1dc81f
|
[
"MIT"
] | null | null | null |
scripts/deps.py
|
wolfgangwazzlestrauss/canvas
|
87d577ec2edfe1188d8ecd176acc5a284b1dc81f
|
[
"MIT"
] | 2
|
2022-02-04T05:06:16.000Z
|
2022-02-04T05:06:27.000Z
|
scripts/deps.py
|
wolfgangwazzlestrauss/canvas
|
87d577ec2edfe1188d8ecd176acc5a284b1dc81f
|
[
"MIT"
] | null | null | null |
"""Script for checking and updating package dependencies."""
import pathlib
from typing import Any, Dict, List
import requests
import toml
import typer
app = typer.Typer(help=__doc__)
# Mypy incorrectly thinks that toml.TomlEncoder is not defined.
class CustomEncoder(toml.TomlEncoder): # type: ignore
"""Custom TOML encoder that does not use trailing commas."""
def dump_list(self, list_: List[Any]) -> str:
"""Format list as string without trailing commas.
Args:
list_: List to format.
Returns:
Formatted list.
"""
elems = ",".join(str(self.dump_value(elem)) for elem in list_)
return f"[{elems}]"
def latest(session: requests.Session, package: str) -> str:
"""Find latest version of package on PyPI.
Args:
package: Python package name.
Returns:
Latest package version.
"""
endpoint = f"https://pypi.org/pypi/{package}/json"
with session.get(endpoint) as resp:
json = resp.json()
return f"^{json['info']['version']}"
def update(packages: Dict[str, str]) -> None:
"""Mutate dictionary of packages to contain latest version requirements.
Args:
packages: Package dictionary to update.
"""
session = requests.Session()
for package, current in packages.items():
if package == "python":
continue
latest_ = latest(session, package)
if current != latest_:
print(f"updating {package}: {current} -> {latest_}")
packages[package] = latest_
@app.command()
def main(
dry_run: bool = typer.Option(
False, help="Show dependencies that would be updated but do not write."
)
) -> None:
"""Update pyproject.toml to use latest package dependencies."""
file_path = pathlib.Path(__file__).parents[1] / "pyproject.toml"
with open(file_path, "r") as handle:
config = toml.load(handle)
update(config["tool"]["poetry"]["dependencies"])
update(config["tool"]["poetry"]["dev-dependencies"])
if not dry_run:
with open(file_path, "w") as handle:
# Mypy incorrectly thinks that toml.dump can only take 2 arguments.
toml.dump(config, handle, CustomEncoder()) # type: ignore
if __name__ == "__main__":
app()
| 25.417582
| 79
| 0.629918
|
cba35b1a0a0f8dac5e3f747841af5d359686fd79
| 6,034
|
py
|
Python
|
fm/models/eventclassificationrule.py
|
xUndero/noc
|
9fb34627721149fcf7064860bd63887e38849131
|
[
"BSD-3-Clause"
] | 1
|
2019-09-20T09:36:48.000Z
|
2019-09-20T09:36:48.000Z
|
fm/models/eventclassificationrule.py
|
ewwwcha/noc
|
aba08dc328296bb0e8e181c2ac9a766e1ec2a0bb
|
[
"BSD-3-Clause"
] | null | null | null |
fm/models/eventclassificationrule.py
|
ewwwcha/noc
|
aba08dc328296bb0e8e181c2ac9a766e1ec2a0bb
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------
# EventClassificationRule model
# ---------------------------------------------------------------------
# Copyright (C) 2007-2019 The NOC Project
# See LICENSE for details
# ---------------------------------------------------------------------
# Python modules
from __future__ import absolute_import
import os
# Third-party modules
import six
from mongoengine import fields
from mongoengine.document import EmbeddedDocument, Document
# NOC modules
from .eventclass import EventClass
from .datasource import DataSource
from noc.core.mongo.fields import PlainReferenceField
from noc.core.escape import json_escape as jq
from noc.core.text import quote_safe_path
@six.python_2_unicode_compatible
class EventClassificationRuleVar(EmbeddedDocument):
meta = {"strict": False}
name = fields.StringField(required=True)
value = fields.StringField(required=False)
def __str__(self):
return self.name
def __eq__(self, other):
return self.name == other.name and self.value == other.value
@six.python_2_unicode_compatible
class EventClassificationRuleCategory(Document):
meta = {
"collection": "noc.eventclassificationrulecategories",
"strict": False,
"auto_create_index": False,
}
name = fields.StringField()
parent = fields.ObjectIdField(required=False)
def __str__(self):
return self.name
def save(self, *args, **kwargs):
if " | " in self.name:
p_name = " | ".join(self.name.split(" | ")[:-1])
p = EventClassificationRuleCategory.objects.filter(name=p_name).first()
if not p:
p = EventClassificationRuleCategory(name=p_name)
p.save()
self.parent = p.id
else:
self.parent = None
super(EventClassificationRuleCategory, self).save(*args, **kwargs)
@six.python_2_unicode_compatible
class EventClassificationPattern(EmbeddedDocument):
meta = {"strict": False}
key_re = fields.StringField(required=True)
value_re = fields.StringField(required=True)
def __str__(self):
return "'%s' : '%s'" % (self.key_re, self.value_re)
def __eq__(self, other):
return self.key_re == other.key_re and self.value_re == other.value_re
@six.python_2_unicode_compatible
class EventClassificationRule(Document):
"""
Classification rules
"""
meta = {
"collection": "noc.eventclassificationrules",
"strict": False,
"auto_create_index": False,
"json_collection": "fm.eventclassificationrules",
"json_depends_on": ["fm.eventclasses"],
"json_unique_fields": ["name"],
}
name = fields.StringField(required=True, unique=True)
uuid = fields.UUIDField(binary=True)
description = fields.StringField(required=False)
event_class = PlainReferenceField(EventClass, required=True)
preference = fields.IntField(required=True, default=1000)
patterns = fields.ListField(fields.EmbeddedDocumentField(EventClassificationPattern))
datasources = fields.ListField(fields.EmbeddedDocumentField(DataSource))
vars = fields.ListField(fields.EmbeddedDocumentField(EventClassificationRuleVar))
#
category = fields.ObjectIdField()
def __str__(self):
return self.name
def save(self, *args, **kwargs):
c_name = " | ".join(self.name.split(" | ")[:-1])
c = EventClassificationRuleCategory.objects.filter(name=c_name).first()
if not c:
c = EventClassificationRuleCategory(name=c_name)
c.save()
self.category = c.id
super(EventClassificationRule, self).save(*args, **kwargs)
@property
def short_name(self):
return self.name.split(" | ")[-1]
def to_json(self):
r = ["{"]
r += [' "name": "%s",' % jq(self.name)]
r += [' "$collection": "%s",' % jq(self._meta["json_collection"])]
r += [' "uuid": "%s",' % self.uuid]
if self.description:
r += [' "description": "%s",' % jq(self.description)]
r += [' "event_class__name": "%s",' % jq(self.event_class.name)]
r += [' "preference": %d,' % self.preference]
# Dump datasources
if self.datasources:
r += [' "datasources": [']
jds = []
for ds in self.datasources:
x = [' "name": "%s"' % jq(ds.name)]
x += [' "datasource": "%s"' % jq(ds.datasource)]
ss = []
for k in sorted(ds.search):
ss += [' "%s": "%s"' % (jq(k), jq(ds.search[k]))]
x += [' "search": {']
x += [",\n".join(ss)]
x += [" }"]
jds += [" {", ",\n".join(x), " }"]
r += [",\n\n".join(jds)]
r += [" ],"]
# Dump vars
if self.vars:
r += [' "vars": [']
vars = []
for v in self.vars:
vd = [" {"]
vd += [' "name": "%s",' % jq(v.name)]
vd += [' "value": "%s"' % jq(v.value)]
vd += [" }"]
vars += ["\n".join(vd)]
r += [",\n\n".join(vars)]
r += [" ],"]
# Dump patterns
r += [' "patterns": [']
patterns = []
for p in self.patterns:
pt = []
pt += [" {"]
pt += [' "key_re": "%s",' % jq(p.key_re)]
pt += [' "value_re": "%s"' % jq(p.value_re)]
pt += [" }"]
patterns += ["\n".join(pt)]
r += [",\n".join(patterns)]
r += [" ]"]
r += ["}"]
return "\n".join(r)
def get_json_path(self):
p = [quote_safe_path(n.strip()) for n in self.name.split("|")]
return os.path.join(*p) + ".json"
| 34.678161
| 89
| 0.524196
|
0d9d0fc071ab858362508d459fffcda0a807ec04
| 8,401
|
py
|
Python
|
examples/vision/python_yolov4/image_processing.py
|
raramakr/AMDMIGraphX
|
83e7425367f6ce850ec28fe716fe7c23ce34c79f
|
[
"MIT"
] | 72
|
2018-12-06T18:31:17.000Z
|
2022-03-30T15:01:02.000Z
|
examples/vision/python_yolov4/image_processing.py
|
raramakr/AMDMIGraphX
|
83e7425367f6ce850ec28fe716fe7c23ce34c79f
|
[
"MIT"
] | 1,006
|
2018-11-30T16:32:33.000Z
|
2022-03-31T22:43:39.000Z
|
examples/vision/python_yolov4/image_processing.py
|
raramakr/AMDMIGraphX
|
83e7425367f6ce850ec28fe716fe7c23ce34c79f
|
[
"MIT"
] | 36
|
2019-05-07T10:41:46.000Z
|
2022-03-28T15:59:56.000Z
|
# All pre- and post-processing methods used below are borrowed from the ONNX MOdel Zoo
# https://github.com/onnx/models/tree/master/vision/object_detection_segmentation/yolov4
import numpy as np
import cv2
from scipy import special
import colorsys
import random
# this function is from tensorflow-yolov4-tflite/core/utils.py
def image_preprocess(image, target_size, gt_boxes=None):
ih, iw = target_size
h, w, _ = image.shape
scale = min(iw / w, ih / h)
nw, nh = int(scale * w), int(scale * h)
image_resized = cv2.resize(image, (nw, nh))
image_padded = np.full(shape=[ih, iw, 3], fill_value=128.0)
dw, dh = (iw - nw) // 2, (ih - nh) // 2
image_padded[dh:nh + dh, dw:nw + dw, :] = image_resized
image_padded = image_padded / 255.
if gt_boxes is None:
return image_padded
else:
gt_boxes[:, [0, 2]] = gt_boxes[:, [0, 2]] * scale + dw
gt_boxes[:, [1, 3]] = gt_boxes[:, [1, 3]] * scale + dh
return image_padded, gt_boxes
def get_anchors(anchors_path, tiny=False):
'''loads the anchors from a file'''
with open(anchors_path) as f:
anchors = f.readline()
anchors = np.array(anchors.split(','), dtype=np.float32)
return anchors.reshape(3, 3, 2)
def postprocess_bbbox(pred_bbox, ANCHORS, STRIDES, XYSCALE=[1, 1, 1]):
'''define anchor boxes'''
for i, pred in enumerate(pred_bbox):
conv_shape = pred.shape
output_size = conv_shape[1]
conv_raw_dxdy = pred[:, :, :, :, 0:2]
conv_raw_dwdh = pred[:, :, :, :, 2:4]
xy_grid = np.meshgrid(np.arange(output_size), np.arange(output_size))
xy_grid = np.expand_dims(np.stack(xy_grid, axis=-1), axis=2)
xy_grid = np.tile(np.expand_dims(xy_grid, axis=0), [1, 1, 1, 3, 1])
xy_grid = xy_grid.astype(np.float)
pred_xy = ((special.expit(conv_raw_dxdy) * XYSCALE[i]) - 0.5 *
(XYSCALE[i] - 1) + xy_grid) * STRIDES[i]
pred_wh = (np.exp(conv_raw_dwdh) * ANCHORS[i])
pred[:, :, :, :, 0:4] = np.concatenate([pred_xy, pred_wh], axis=-1)
pred_bbox = [np.reshape(x, (-1, np.shape(x)[-1])) for x in pred_bbox]
pred_bbox = np.concatenate(pred_bbox, axis=0)
return pred_bbox
def postprocess_boxes(pred_bbox, org_img_shape, input_size, score_threshold):
'''remove boundary boxs with a low detection probability'''
valid_scale = [0, np.inf]
pred_bbox = np.array(pred_bbox)
pred_xywh = pred_bbox[:, 0:4]
pred_conf = pred_bbox[:, 4]
pred_prob = pred_bbox[:, 5:]
# (1) (x, y, w, h) --> (xmin, ymin, xmax, ymax)
pred_coor = np.concatenate([
pred_xywh[:, :2] - pred_xywh[:, 2:] * 0.5,
pred_xywh[:, :2] + pred_xywh[:, 2:] * 0.5
],
axis=-1)
# (2) (xmin, ymin, xmax, ymax) -> (xmin_org, ymin_org, xmax_org, ymax_org)
org_h, org_w = org_img_shape
resize_ratio = min(input_size / org_w, input_size / org_h)
dw = (input_size - resize_ratio * org_w) / 2
dh = (input_size - resize_ratio * org_h) / 2
pred_coor[:, 0::2] = 1.0 * (pred_coor[:, 0::2] - dw) / resize_ratio
pred_coor[:, 1::2] = 1.0 * (pred_coor[:, 1::2] - dh) / resize_ratio
# (3) clip some boxes that are out of range
pred_coor = np.concatenate([
np.maximum(pred_coor[:, :2], [0, 0]),
np.minimum(pred_coor[:, 2:], [org_w - 1, org_h - 1])
],
axis=-1)
invalid_mask = np.logical_or((pred_coor[:, 0] > pred_coor[:, 2]),
(pred_coor[:, 1] > pred_coor[:, 3]))
pred_coor[invalid_mask] = 0
# (4) discard some invalid boxes
bboxes_scale = np.sqrt(
np.multiply.reduce(pred_coor[:, 2:4] - pred_coor[:, 0:2], axis=-1))
scale_mask = np.logical_and((valid_scale[0] < bboxes_scale),
(bboxes_scale < valid_scale[1]))
# (5) discard some boxes with low scores
classes = np.argmax(pred_prob, axis=-1)
scores = pred_conf * pred_prob[np.arange(len(pred_coor)), classes]
score_mask = scores > score_threshold
mask = np.logical_and(scale_mask, score_mask)
coors, scores, classes = pred_coor[mask], scores[mask], classes[mask]
return np.concatenate(
[coors, scores[:, np.newaxis], classes[:, np.newaxis]], axis=-1)
def bboxes_iou(boxes1, boxes2):
'''calculate the Intersection Over Union value'''
boxes1 = np.array(boxes1)
boxes2 = np.array(boxes2)
boxes1_area = (boxes1[..., 2] - boxes1[..., 0]) * (boxes1[..., 3] -
boxes1[..., 1])
boxes2_area = (boxes2[..., 2] - boxes2[..., 0]) * (boxes2[..., 3] -
boxes2[..., 1])
left_up = np.maximum(boxes1[..., :2], boxes2[..., :2])
right_down = np.minimum(boxes1[..., 2:], boxes2[..., 2:])
inter_section = np.maximum(right_down - left_up, 0.0)
inter_area = inter_section[..., 0] * inter_section[..., 1]
union_area = boxes1_area + boxes2_area - inter_area
ious = np.maximum(1.0 * inter_area / union_area, np.finfo(np.float32).eps)
return ious
def nms(bboxes, iou_threshold, sigma=0.3, method='nms'):
"""
:param bboxes: (xmin, ymin, xmax, ymax, score, class)
Note: soft-nms, https://arxiv.org/pdf/1704.04503.pdf
https://github.com/bharatsingh430/soft-nms
"""
classes_in_img = list(set(bboxes[:, 5]))
best_bboxes = []
for cls in classes_in_img:
cls_mask = (bboxes[:, 5] == cls)
cls_bboxes = bboxes[cls_mask]
while len(cls_bboxes) > 0:
max_ind = np.argmax(cls_bboxes[:, 4])
best_bbox = cls_bboxes[max_ind]
best_bboxes.append(best_bbox)
cls_bboxes = np.concatenate(
[cls_bboxes[:max_ind], cls_bboxes[max_ind + 1:]])
iou = bboxes_iou(best_bbox[np.newaxis, :4], cls_bboxes[:, :4])
weight = np.ones((len(iou), ), dtype=np.float32)
assert method in ['nms', 'soft-nms']
if method == 'nms':
iou_mask = iou > iou_threshold
weight[iou_mask] = 0.0
if method == 'soft-nms':
weight = np.exp(-(1.0 * iou**2 / sigma))
cls_bboxes[:, 4] = cls_bboxes[:, 4] * weight
score_mask = cls_bboxes[:, 4] > 0.
cls_bboxes = cls_bboxes[score_mask]
return best_bboxes
def read_class_names(class_file_name):
'''loads class name from a file'''
names = {}
with open(class_file_name, 'r') as data:
for ID, name in enumerate(data):
names[ID] = name.strip('\n')
return names
def draw_bbox(image,
bboxes,
classes=read_class_names("./utilities/coco.names"),
show_label=True):
"""
bboxes: [x_min, y_min, x_max, y_max, probability, cls_id] format coordinates.
"""
num_classes = len(classes)
image_h, image_w, _ = image.shape
hsv_tuples = [(1.0 * x / num_classes, 1., 1.) for x in range(num_classes)]
colors = list(map(lambda x: colorsys.hsv_to_rgb(*x), hsv_tuples))
colors = list(
map(lambda x: (int(x[0] * 255), int(x[1] * 255), int(x[2] * 255)),
colors))
random.seed(0)
random.shuffle(colors)
random.seed(None)
for i, bbox in enumerate(bboxes):
coor = np.array(bbox[:4], dtype=np.int32)
fontScale = 0.5
score = bbox[4]
class_ind = int(bbox[5])
bbox_color = colors[class_ind]
bbox_thick = int(0.6 * (image_h + image_w) / 600)
c1, c2 = (coor[0], coor[1]), (coor[2], coor[3])
cv2.rectangle(image, c1, c2, bbox_color, bbox_thick)
if show_label:
bbox_mess = '%s: %.2f' % (classes[class_ind], score)
t_size = cv2.getTextSize(bbox_mess,
0,
fontScale,
thickness=bbox_thick // 2)[0]
cv2.rectangle(image, c1,
(c1[0] + t_size[0], c1[1] - t_size[1] - 3),
bbox_color, -1)
cv2.putText(image,
bbox_mess, (c1[0], c1[1] - 2),
cv2.FONT_HERSHEY_SIMPLEX,
fontScale, (0, 0, 0),
bbox_thick // 2,
lineType=cv2.LINE_AA)
return image
| 35.748936
| 88
| 0.559576
|
43190b0457d4fa58556fe9e6cff0fb6df20dfa63
| 2,437
|
py
|
Python
|
rlpyt/ul/algos/ul_for_rl/replay_saver.py
|
traffic-lights/rlpyt
|
ec4689cddd55d98c037194685cfd6ca8e6785014
|
[
"MIT"
] | 2,122
|
2019-07-02T13:19:10.000Z
|
2022-03-22T09:59:42.000Z
|
rlpyt/ul/algos/ul_for_rl/replay_saver.py
|
traffic-lights/rlpyt
|
ec4689cddd55d98c037194685cfd6ca8e6785014
|
[
"MIT"
] | 206
|
2019-07-02T14:19:42.000Z
|
2022-02-15T02:34:28.000Z
|
rlpyt/ul/algos/ul_for_rl/replay_saver.py
|
traffic-lights/rlpyt
|
ec4689cddd55d98c037194685cfd6ca8e6785014
|
[
"MIT"
] | 369
|
2019-07-02T13:38:28.000Z
|
2022-03-28T11:16:39.000Z
|
from rlpyt.algos.base import RlAlgorithm
from rlpyt.replays.non_sequence.uniform import UniformReplayBuffer
from rlpyt.replays.non_sequence.frame import UniformReplayFrameBuffer
from rlpyt.utils.collections import namedarraytuple
SamplesToBuffer = namedarraytuple("SamplesToBuffer",
["observation", "action", "reward", "done"])
class ReplaySaverAlgo(RlAlgorithm):
"""Doesn't actually learn anything, just builds replay buffer and fits into
existing interfaces."""
opt_info_fields = ()
def __init__(self, replay_size, discount=0.99, n_step_return=1, frame_buffer=False):
self.replay_size = replay_size
self.discount = discount
self.n_step_return = n_step_return
self.frame_buffer = frame_buffer
self.optimizer = DummyOptimizer()
def initialize(self, agent, n_itr, batch_spec, mid_batch_reset=False,
examples=None, world_size=1, rank=0):
example_to_buffer = self.examples_to_buffer(examples)
ReplayCls = UniformReplayFrameBuffer if self.frame_buffer else UniformReplayBuffer
self.replay_buffer = ReplayCls(
example=example_to_buffer,
size=self.replay_size,
B=batch_spec.B,
discount=self.discount,
n_step_return=self.n_step_return,
)
self._batch_size = batch_spec.B * batch_spec.T # snapshot saving
def optimize_agent(self, itr, samples):
samples_to_buffer = self.samples_to_buffer(samples)
self.replay_buffer.append_samples(samples_to_buffer)
# maybe return empyt tuple? rather than None
def examples_to_buffer(self, examples):
return SamplesToBuffer(
observation=examples["observation"],
action=examples["action"],
reward=examples["reward"],
done=examples["done"],
)
def samples_to_buffer(self, samples):
"""Defines how to add data from sampler into the replay buffer. Called
in optimize_agent() if samples are provided to that method. In
asynchronous mode, will be called in the memory_copier process."""
return SamplesToBuffer(
observation=samples.env.observation,
action=samples.agent.action,
reward=samples.env.reward,
done=samples.env.done,
)
class DummyOptimizer:
"""So that snapshot can be saved."""
def state_dict(self):
return None
| 35.838235
| 90
| 0.681986
|
936f0683fc29c257e045c8e058d647034eb5d05b
| 1,327
|
py
|
Python
|
app/core/models.py
|
justinwkUKM/django-app-api
|
a55231ac1d938ff3678ff06d47cfdbcc5301f371
|
[
"MIT"
] | null | null | null |
app/core/models.py
|
justinwkUKM/django-app-api
|
a55231ac1d938ff3678ff06d47cfdbcc5301f371
|
[
"MIT"
] | 4
|
2021-03-19T01:28:34.000Z
|
2021-09-22T18:49:45.000Z
|
app/core/models.py
|
justinwkUKM/django-app-api
|
a55231ac1d938ff3678ff06d47cfdbcc5301f371
|
[
"MIT"
] | null | null | null |
from django.db import models
from django.contrib.auth.models import AbstractBaseUser, \
BaseUserManager, PermissionsMixin
class UserManager(BaseUserManager):
def create_user(self, email, password=None, **extra_fields):
'''creates and saves a new user'''
if not email:
raise ValueError('User must have a valid email address')
user = self.model(email=self.normalize_email(email), **extra_fields)
user.set_password(password)
user.save(using=self.db)
return user
def create_superuser(self, email, password=None):
'''creates and saves a super user'''
if not email:
raise ValueError('User must have a valid email address')
user = self.create_user(email=self.normalize_email(email),)
user.set_password(password)
user.is_staff = True
user.is_superuser = True
user.save(using=self.db)
return user
class User(AbstractBaseUser, PermissionsMixin):
'''Custom user model that supports using email instead of username'''
email = models.EmailField(max_length=255, unique=True)
name = models.CharField(max_length=255)
is_active = models.BooleanField(default=True)
is_staff = models.BooleanField(default=True)
objects = UserManager()
USERNAME_FIELD = 'email'
| 32.365854
| 76
| 0.683497
|
4ba82c71a030c321256ee495a4ac971c57e43edf
| 3,687
|
py
|
Python
|
dict_minimize/torch_api.py
|
anukaal/dict_minimize
|
df1934ddde4e64da88e39cc41e2e1a70f32488f3
|
[
"Apache-2.0"
] | 64
|
2020-08-26T17:39:14.000Z
|
2022-03-30T09:50:26.000Z
|
dict_minimize/torch_api.py
|
anukaal/dict_minimize
|
df1934ddde4e64da88e39cc41e2e1a70f32488f3
|
[
"Apache-2.0"
] | 10
|
2020-08-24T19:14:05.000Z
|
2021-09-29T18:31:55.000Z
|
dict_minimize/torch_api.py
|
anukaal/dict_minimize
|
df1934ddde4e64da88e39cc41e2e1a70f32488f3
|
[
"Apache-2.0"
] | 5
|
2021-01-13T13:50:34.000Z
|
2022-03-21T01:02:28.000Z
|
from collections import OrderedDict
from typing import Callable, Optional, Sequence
import numpy as np
import torch
from dict_minimize.core._scipy import _minimize
def _get_dtype(X):
dtype = X.dtype
return dtype
def _from_np(X, dtype):
assert X.dtype.kind == "f"
dtype_ = X.dtype
X = np.asarray(X) # In case a scalar was given
assert X.dtype == dtype_
Xt = torch.from_numpy(X).type(dtype)
# Do this weird way to avoid `UserWarning` from torch.
Xt = Xt.clone().detach().requires_grad_(True)
return Xt
def _to_np(X):
Xn = X.detach().numpy()
assert Xn.dtype.kind == "f"
return Xn
def minimize(
fun: Callable,
x0_dict: OrderedDict,
*,
lb_dict: Optional[OrderedDict] = None,
ub_dict: Optional[OrderedDict] = None,
args: Sequence = (),
method: Optional[str] = None,
tol: Optional[float] = None,
callback: Optional[Callable] = None,
options: Optional[dict] = None,
) -> OrderedDict:
"""Minimization of a scalar function with a dictionary of variables as the input. It can interface to functions
written for `torch`.
This is a wrapper around `scipy.optimize.minimize`.
Args:
fun (callable): The objective function to be minimized, in the form of \
``fun(x, *args) -> (float, OrderedDict)`` \
where `x` is an `OrderedDict` in the format of `x0_dict`, and `args` is a tuple of the fixed \
parameters needed to completely specify the function. The second returned variable is the \
gradients. It should be an `OrderedDict` with the same keys and shapes as `x`. The values \
should be `torch` `Tensor`.
x0_dict (OrderedDict): Initial guess. Dictionary of variables from variable name to \
`torch` variables.
lb_dict (OrderedDict): Dictionary with same keys and shapes as `x0_dict` with lower bounds for \
each variable. Set to `None` in an unconstrained problem.
ub_dict (OrderedDict): Dictionary with same keys and shapes as `x0_dict` with upper bounds for \
each variable. Set to `None` in an unconstrained problem.
args (tuple): Extra arguments passed to the objective function.
method (str): Type of solver. Should be one of: ``CG``, ``BFGS``, ``L-BFGS-B``, ``TNC``, \
``SLSQP``, or ``trust-constr``. If not given, chosen to be one of ``BFGS``, ``L-BFGS-B``, \
``SLSQP``, depending if the problem has bounds. Note, only ``L-BFGS-B``, ``TNC``, ``SLSQP`` \
seem to strictly respect the bounds ``lb_dict`` and ``ub_dict``.
tol (float): Tolerance for termination. For detailed control, use solver-specific options.
callback (callable): Called after each iteration. The signature is: \
``callback(xk)`` \
where `xk` is the current parameter as an `OrderedDict` with the same form as the final \
solution `x`.
options (dict): A dictionary of solver options. All methods accept the following generic \
options: \
maxiter : int \
Maximum number of iterations to perform. Depending on the method each iteration may use \
several function evaluations. \
disp : bool
Set to `True` to print convergence messages.
Returns:
x (OrderedDict): Final solution found by the optimizer. It has the same keys and shapes as `x0_dict`.
"""
x = _minimize(
fun,
x0_dict,
from_np=_from_np,
get_dtype=_get_dtype,
to_np=_to_np,
lb_dict=lb_dict,
ub_dict=ub_dict,
args=args,
method=method,
tol=tol,
callback=callback,
options=options,
)
return x
| 36.87
| 115
| 0.643341
|
cfeb4691eff89c3c33a147b0da65451d28aebc18
| 442
|
py
|
Python
|
azusa/__tests__/test_curve_probabilities.py
|
ihowell/mtgcurve
|
d6b2f59be257e84025e0322b10f957418fa493f1
|
[
"MIT"
] | null | null | null |
azusa/__tests__/test_curve_probabilities.py
|
ihowell/mtgcurve
|
d6b2f59be257e84025e0322b10f957418fa493f1
|
[
"MIT"
] | 6
|
2021-03-18T23:25:11.000Z
|
2021-03-27T02:05:47.000Z
|
azusa/__tests__/test_curve_probabilities.py
|
ihowell/mtgcurve
|
d6b2f59be257e84025e0322b10f957418fa493f1
|
[
"MIT"
] | null | null | null |
from azusa.curve_probabilities import calculate_cmc_probs, display_prob_table
def test_artifact_probs():
num_lands = 4
num_other = 7
mana_producers = {'Mana Crypt': 1, 'Sol Ring': 1}
probs = calculate_cmc_probs(len(mana_producers) + num_lands + num_other,
mana_producers,
num_lands,
max_turns=2)
display_prob_table(probs)
| 29.466667
| 77
| 0.588235
|
7de919af6fb9fa507282f40617e7134bbb73f2a7
| 37,779
|
py
|
Python
|
packages/netsuite-adapter/scripts/types_generator.py
|
tomermevorach/salto
|
f159518d52e62075f9720a8d2ce83918e6243304
|
[
"Apache-2.0"
] | null | null | null |
packages/netsuite-adapter/scripts/types_generator.py
|
tomermevorach/salto
|
f159518d52e62075f9720a8d2ce83918e6243304
|
[
"Apache-2.0"
] | null | null | null |
packages/netsuite-adapter/scripts/types_generator.py
|
tomermevorach/salto
|
f159518d52e62075f9720a8d2ce83918e6243304
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python3
from selenium import webdriver
import os
import pyotp
import re
import sys
import time
import logging
logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO)
SCRIPT_DIR = os.path.dirname(__file__)
SRC_DIR = os.path.join(SCRIPT_DIR, '../src/')
TYPES_DIR = os.path.join(SRC_DIR, 'types/')
CUSTOM_TYPES_DIR = os.path.join(TYPES_DIR, 'custom_types/')
enums_link_template = 'https://{account_id}.app.netsuite.com/app/help/helpcenter.nl?fid=SDFxml_2405618192.html'
script_ids_prefix_link_template = 'https://{account_id}.app.netsuite.com/app/help/helpcenter.nl?fid=subsect_1537555588.html&whence='
sdf_xml_definitions_link_template = 'https://{account_id}.app.netsuite.com/app/help/helpcenter.nl?fid=SDFxml.html'
SCRIPT_ID_FIELD_NAME = 'scriptid'
FIELDS = 'fields'
ANNOTATIONS = 'annotations'
NAME = 'name'
IS_LIST = 'is_list'
TYPE = 'type'
DESCRIPTION = 'description'
INNER_TYPE_NAME_TO_DEF = 'inner_type_name_to_def'
TYPE_DEF = 'type_def'
LICENSE_HEADER = '''/*
* Copyright 2021 Salto Labs Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'''
list_type_import = ' ListType,'
enums_import = '''import { enums } from '../enums'
'''
field_types_import = '''import { fieldTypes } from '../field_types'
'''
import_statements_for_type_def_template = '''import {{
BuiltinTypes, CORE_ANNOTATIONS, ElemID, ObjectType, createRestriction,{list_type_import}
}} from '@salto-io/adapter-api'
import * as constants from '../../constants'
{enums_import}{field_types_import}
'''
type_inner_types_array_template = '''export const {type_name}InnerTypes: ObjectType[] = []
'''
COMMON_IMPORT_STATEMENTS_FOR_ENUMS_DEF = '''import {{ CORE_ANNOTATIONS, createRestriction, ElemID, PrimitiveType, PrimitiveTypes }} from '@salto-io/adapter-api'
import * as constants from '../constants'
'''
DISABLE_LINT_CAMEL_CASE = '''/* eslint-disable @typescript-eslint/camelcase */
'''
DISABLE_LINT_LINE_LENGTH = '''/* eslint-disable max-len */
'''
HEADER_FOR_DEFS = LICENSE_HEADER + DISABLE_LINT_LINE_LENGTH + DISABLE_LINT_CAMEL_CASE
type_elem_id_template = '''const {type_name}ElemID = new ElemID(constants.NETSUITE, '{type_name}')
'''
SUBTYPES_FOLDER_PATH_DEF = '''const enumsFolderPath = [constants.NETSUITE, constants.TYPES_PATH, constants.SUBTYPES_PATH]
'''
enum_restriction_annotation_template = '''
[CORE_ANNOTATIONS.RESTRICTION]: createRestriction({{
values: {values},
enforce_value: false,
}}),'''
primitive_string_type_entry_template = ''' {type_name}: new PrimitiveType({{
elemID: {type_name}ElemID,
primitive: PrimitiveTypes.STRING,
annotations: {{{annotations}
}},
path: [...enumsFolderPath, {type_name}ElemID.name],
}}),
'''
enums_file_template = HEADER_FOR_DEFS + COMMON_IMPORT_STATEMENTS_FOR_ENUMS_DEF + SUBTYPES_FOLDER_PATH_DEF + '''{enums_elem_ids}
export const enums: Record<string, PrimitiveType> = {{
{enums_entries}}}
'''
inner_types_def_template = '''const {inner_type_name}ElemID = new ElemID(constants.NETSUITE, '{inner_type_name}')
{type_def}
{type_name}InnerTypes.push({inner_type_name})
'''
type_annotation_template = '''
{annotation_name}: '{annotation_value}','''
type_annotations_template = '''
annotations: {{{annotations}
}},'''
type_template = '''
{export}const {type_name} = new ObjectType({{
elemID: {type_name}ElemID,{annotations}
fields: {{
{field_definitions}
}},
path: {path},
}})
'''
type_path_template = '[constants.NETSUITE, constants.TYPES_PATH, {type_name}ElemID.name]'
field_template = ''' {field_name}: {{
type: {field_type},
annotations: {{{annotations}
}},
}},'''
field_annotation_template = '''
{annotation_name}: {annotation_value},'''
import_type_statement_template = '''import {{ {type_name}, {type_name}InnerTypes }} from './types/custom_types/{type_name}'
'''
custom_types_map_entry_template = ''' {type_name},
'''
type_inner_types_vars_template = ''' ...{type_name}InnerTypes,
'''
types_file_template = LICENSE_HEADER + '''import {{ ObjectType, TypeElement }} from '@salto-io/adapter-api'
import _ from 'lodash'
import {{ file, folder }} from './types/file_cabinet_types'
{import_types_statements}import {{ fieldTypes }} from './types/field_types'
import {{ enums }} from './types/enums'
/**
* generated using types_generator.py as Netsuite don't expose a metadata API for them.
*/
export const customTypes: Readonly<Record<string, ObjectType>> = {{
{custom_types_map_entries}}}
const innerCustomTypes: ObjectType[] = [
{all_inner_types_vars}]
export const fileCabinetTypes: Readonly<Record<string, ObjectType>> = {{
file,
folder,
}}
export const isCustomType = (type: ObjectType): boolean =>
!_.isUndefined(customTypes[type.elemID.name])
export const isFileCabinetType = (type: ObjectType): boolean =>
!_.isUndefined(fileCabinetTypes[type.elemID.name])
export const getAllTypes = (): TypeElement[] => [
...Object.values(customTypes),
...innerCustomTypes,
...Object.values(enums),
...Object.values(fileCabinetTypes),
...Object.values(fieldTypes),
]
'''
default_value_pattern = re.compile("[\s\S]*The default value is '?‘?([-|#\w]*)’?'?\.[\s\S]*") # e.g. ‘MIDDLE’, 'NORMAL', T, '|', '#000000', 'windows-1252'
possible_values_pattern = re.compile("[\s\S]*For information about possible values, see ('*\w*'*)\.[\s\S]*")
def extract_default_value_from_field_description(description):
regex_matches = default_value_pattern.match(description)
if regex_matches:
return regex_matches.groups()[0]
return None
def parse_field_def(type_name, cells, is_attribute, is_inner_type, script_id_prefix):
def to_field_type(field_name, netsuite_field_type, description):
field_full_name = type_name + '_' + field_name
if field_full_name in field_name_to_type_name:
return field_name_to_type_name[field_full_name]
if field_name == SCRIPT_ID_FIELD_NAME:
return 'BuiltinTypes.SERVICE_ID'
if netsuite_field_type in ['string', 'date', 'time', 'rgb']:
return 'BuiltinTypes.STRING'
if netsuite_field_type == 'boolean':
return 'BuiltinTypes.BOOLEAN'
if netsuite_field_type == 'integer' or netsuite_field_type.startswith('float'): # in kpiscorecard.highlighting the field type (float) contains description
return 'BuiltinTypes.NUMBER'
if netsuite_field_type == 'single-select list':
references_to_single_enum = 'For information about possible values, see' in description
if references_to_single_enum:
regex_matches = possible_values_pattern.match(description)
if regex_matches:
enum_name = regex_matches.groups()[0]
return "enums.{0}".format(enum_name)
return 'BuiltinTypes.STRING /* Original type was {0} */'.format(' '.join(netsuite_field_type.splitlines()))
def is_required(is_required_from_doc, field_name):
field_full_name = type_name + '_' + field_name
return is_required_from_doc and not (field_full_name in should_not_be_required)
def create_script_id_regex(description):
def remove_script_id_underscore_suffix(script_id_prefix):
return script_id_prefix[:-1] if script_id_prefix.endswith('_') else script_id_prefix
# extract script_id for top level types from the description since the script_ids prefixes aren't accurate in https://{account_id}.app.netsuite.com/app/help/helpcenter.nl?fid=subsect_1537555588.html
script_id_prefix_from_description = extract_default_value_from_field_description(description)
script_id_prefix_from_doc = remove_script_id_underscore_suffix(script_id_prefix_from_description if script_id_prefix_from_description is not None else script_id_prefix)
correct_script_id_prefix = type_name_to_special_script_id_prefix[type_name] if (type_name in type_name_to_special_script_id_prefix) else script_id_prefix_from_doc
return '^{0}[0-9a-z_]+'.format(correct_script_id_prefix)
field_name = cells[0].text
description = cells[3].text
field_type = to_field_type(field_name, cells[1].text, description)
is_required_from_doc = cells[2].text.lower() == 'required'
has_length_limitations = 'value can be up to' in description and 'BuiltinTypes.STRING' in field_type
annotations = {}
if is_required(is_required_from_doc, field_name):
annotations['[CORE_ANNOTATIONS.REQUIRED]'] = 'true'
if is_attribute:
annotations['[constants.IS_ATTRIBUTE]'] = 'true'
if field_name == SCRIPT_ID_FIELD_NAME and not is_inner_type:
annotations['[CORE_ANNOTATIONS.RESTRICTION]'] = "createRestriction({{ regex: '{0}' }})".format(create_script_id_regex(description))
if has_length_limitations:
regex_matches = re.match("[\s\S]*value can be up to (\d*) characters long\.[\s\S]*", description)
length_limit = regex_matches.groups()[0]
annotations['// [CORE_ANNOTATIONS.LENGTH_LIMIT]'] = length_limit
return { NAME: field_name, TYPE: field_type, ANNOTATIONS: annotations, DESCRIPTION: ' '.join(description.splitlines()) }
def parse_enums(account_id):
webpage.get(enums_link_template.format(account_id = account_id))
enums_list_items = webpage.find_elements_by_xpath('//*[@id="nshelp"]/div[2]/div/ul/li/p/a')
enum_name_to_page_link = { enum_link.text : enum_link.get_attribute('href') for enum_link in enums_list_items }
enum_to_possible_values = {}
for enum_name, page_link in enum_name_to_page_link.items():
try:
webpage.get(page_link)
enum_to_possible_values[enum_name] = [possible_value.text.split()[0] for possible_value in webpage.find_elements_by_xpath('//*[@id="nshelp"]/div[2]/div/div/ul/li/p')]
except Exception as e:
logging.error('Failed to extract possible values for enum: ' + enum_name + '. Error: ', e)
return enum_to_possible_values
def parse_type_for_inner_structured_field(type_name, inner_type_name_to_def, top_level_type_name):
type_def = parse_type(type_name, None, inner_type_name_to_def, top_level_type_name)
inner_type_name_to_def[type_name] = type_def
return type_name
def parse_type(type_name, script_id_prefix, inner_type_name_to_def, top_level_type_name = None):
if top_level_type_name is None:
top_level_type_name = type_name
is_inner_type = type_name != top_level_type_name
def is_structured_list_field(fields_tables_len, structured_fields_len):
type_description_sections = webpage.find_elements_by_xpath('//*[@id="nshelp"]/div[2]/div/p')
is_explicitly_not_a_list = any([('field group is a DEFAULT' in type_description_section.text) for type_description_section in type_description_sections])
is_explicitly_a_list = any([('field group is a COLLECTION' in type_description_section.text) for type_description_section in type_description_sections])
return is_explicitly_a_list or (fields_tables_len == 1 and structured_fields_len == 1 and not is_explicitly_not_a_list)
fields_tables = webpage.find_elements_by_xpath('//*[@class="nshelp_section"]')
field_definitions = []
annotations = {}
for fields_table in fields_tables:
fields_section_headline = fields_table.find_element_by_xpath('.//h2').text
if fields_section_headline == 'Feature Dependencies':
# we don't have anything interesting to extract here
continue
if fields_section_headline == 'Additional Files':
additional_file_suffix = fields_table.find_element_by_xpath('.//ul/li/p/strong').text[len('Object-Script-ID.template.'):]
field_definitions.append({ NAME: 'content', TYPE: 'fieldTypes.fileContent',
ANNOTATIONS: {'[constants.ADDITIONAL_FILE_SUFFIX]': "'{0}'".format(additional_file_suffix)}, IS_LIST: False })
continue
if fields_section_headline == 'Structured Fields':
inner_structured_field_name_to_link = { inner_structured_field.text : inner_structured_field.get_attribute('href')
for inner_structured_field in fields_table.find_elements_by_xpath('.//ul/li/p/a') }
is_list_from_doc = is_structured_list_field(len(fields_tables), len(inner_structured_field_name_to_link.items()))
for inner_structured_field_name, link in inner_structured_field_name_to_link.items():
webpage.get(link)
# we create inner types with their parent's type_name so there will be no Salto element naming collisions
created_inner_type_name = parse_type_for_inner_structured_field(type_name + '_' + inner_structured_field_name, inner_type_name_to_def, top_level_type_name)
is_list = False
if (is_list_from_doc and created_inner_type_name not in should_not_be_list) or created_inner_type_name in should_be_list:
is_list = True
field_definitions.append({ NAME: inner_structured_field_name, TYPE: created_inner_type_name, ANNOTATIONS: {}, IS_LIST: is_list })
continue
if fields_section_headline == 'Attributes':
is_attribute = True
elif fields_section_headline == 'Fields':
is_attribute = False
else:
raise Exception('unknown fields section ', fields_section_headline)
for field_row in fields_table.find_elements_by_xpath('.//tbody/tr'):
cells = field_row.find_elements_by_xpath('.//td')
field_def = parse_field_def(type_name, cells, is_attribute, is_inner_type, script_id_prefix)
field_def[IS_LIST] = False
field_definitions.append(field_def)
return { NAME: type_name, ANNOTATIONS: annotations, FIELDS: field_definitions }
def parse_types_definitions(account_id, type_name_to_script_id_prefix):
def get_script_id_prefix(type_name):
if type_name.lower() in type_name_to_script_id_prefix:
return type_name_to_script_id_prefix[type_name.lower()]
return "'FIX_ME!'"
webpage.get(sdf_xml_definitions_link_template.format(account_id = account_id))
types_list_items = webpage.find_elements_by_xpath('//*[@id="nshelp"]/div[2]/div/ul/li/p/a')
type_name_to_page_link = { type_link.text : type_link.get_attribute('href') for type_link in types_list_items }
type_name_to_types_defs = {}
for type_name, page_link in type_name_to_page_link.items():
try:
webpage.get(page_link)
script_id_prefix = get_script_id_prefix(type_name)
inner_type_name_to_def = {}
type_def = parse_type(type_name, script_id_prefix, inner_type_name_to_def)
type_name_to_types_defs[type_name] = { TYPE_DEF: type_def, INNER_TYPE_NAME_TO_DEF: inner_type_name_to_def }
except Exception as e:
logging.error('Failed to parse type: ' + type_name + '. Error: ', sys.exc_info())
return type_name_to_types_defs
def generate_type_name_to_script_id_prefix():
type_name_to_script_id_prefix = {}
for row in webpage.find_elements_by_xpath('//*[@id="nshelp"]/div[2]/div/div[2]/table/tbody/tr'):
cells = row.find_elements_by_xpath('.//td/p')
type_name_to_script_id_prefix[cells[0].text] = cells[1].text
return type_name_to_script_id_prefix
def login(username, password, secret_key_2fa):
logging.info('Trying to login to NetSuite documentation')
# submit username & password
time.sleep(1)
webpage.find_element_by_xpath('/html/body/div/div/div[2]/form/div[2]/input').send_keys(username)
webpage.find_element_by_xpath('/html/body/div/div/div[2]/form/div[3]/input').send_keys(password)
webpage.find_element_by_xpath('//*[@id="login-submit"]').click()
time.sleep(2)
# generate 2FA token and submit
token2fa = pyotp.TOTP(secret_key_2fa).now()
webpage.find_element_by_xpath('//*[@id="n-id-component-20"]').send_keys(token2fa)
webpage.find_element_by_xpath('//*[@id="n-id-component-41"]').click()
time.sleep(1)
def create_types_file(type_names):
import_types_statements = ''.join([import_type_statement_template.format(type_name = type_name) for type_name in type_names])
custom_types_map_entries = ''.join([custom_types_map_entry_template.format(type_name = type_name) for type_name in type_names])
all_inner_types_vars = ''.join([type_inner_types_vars_template.format(type_name = type_name) for type_name in type_names])
file_content = types_file_template.format(import_types_statements = import_types_statements, custom_types_map_entries = custom_types_map_entries, all_inner_types_vars = all_inner_types_vars)
with open(SRC_DIR + 'types.ts', 'w') as file:
file.write(file_content)
def parse_netsuite_types(account_id, username, password, secret_key_2fa):
try:
logging.info('Starting to parse Netsuite types')
webpage.get(script_ids_prefix_link_template.format(account_id = account_id))
login(username, password, secret_key_2fa)
logging.info('Logged in')
type_name_to_script_id_prefix = generate_type_name_to_script_id_prefix()
type_name_to_types_defs = parse_types_definitions(account_id, type_name_to_script_id_prefix)
logging.info('Parsed objects definitions')
enum_to_possible_values = parse_enums(account_id)
logging.info('Parsed enums definitions')
return type_name_to_types_defs, enum_to_possible_values
finally:
webpage.quit()
def generate_enums_file(enum_to_possible_values):
def create_restriction_annotation(values):
return enum_restriction_annotation_template.format(values = values) if len(values) > 0 else ''
enums_elem_ids_list = [type_elem_id_template.format(type_name = enum_name) for enum_name in enum_to_possible_values.keys()]
enums_entries_list = [primitive_string_type_entry_template.format(type_name = enum_name, annotations = create_restriction_annotation(values)) for enum_name, values in enum_to_possible_values.items()]
file_content = enums_file_template.format(enums_elem_ids = ''.join(enums_elem_ids_list), enums_entries = ''.join(enums_entries_list))
with open(TYPES_DIR + 'enums.ts', 'w') as file:
file.write(file_content)
def format_type_def(type_name, type_def, top_level_type_name = None):
def format_type_annotations():
formatted_type_annotations = ''
for key, val in type_def[ANNOTATIONS].items():
formatted_type_annotations += type_annotation_template.format(annotation_name = key, annotation_value = val)
return type_annotations_template.format(annotations = formatted_type_annotations)
def format_field_annotations(field_annotations):
formatted_field_annotations = ''
for key, val in field_annotations.items():
formatted_field_annotations += field_annotation_template.format(annotation_name = key, annotation_value = val)
return formatted_field_annotations
def format_field_def(field_def):
formatted_field_annotations = format_field_annotations(field_def[ANNOTATIONS])
field_type = 'new ListType({0})'.format(field_def[TYPE]) if field_def[IS_LIST] else field_def[TYPE]
formatted_field = field_template.format(field_name = field_def[NAME], type_name = type_name, field_type = field_type, annotations = formatted_field_annotations)
field_description_comment = ' /* Original description: {0} */'.format(field_def[DESCRIPTION]) if (DESCRIPTION in field_def and field_def[DESCRIPTION] != '') else ''
return formatted_field + field_description_comment
def should_export_type_def(is_inner_type, type_name):
return not is_inner_type or type_name in inner_types_to_export
is_inner_type = top_level_type_name != None
annotations = format_type_annotations()
field_definitions = []
for field_def in type_def[FIELDS]:
field_definitions.append(format_field_def(field_def))
path = type_path_template.format(type_name = top_level_type_name if is_inner_type else type_name) # all inner_types will be located in the same file as their parent
export = 'export ' if should_export_type_def(is_inner_type, type_name) else ''
return type_template.format(type_name = type_name, export = export, annotations = annotations,
field_definitions = '\n'.join(field_definitions), path = path)
def format_inner_types_defs(top_level_type_name, inner_type_name_to_def):
inner_types_defs = []
for inner_type_name, inner_type_def in inner_type_name_to_def.items():
formatted_inner_type_def = format_type_def(inner_type_name, inner_type_def, top_level_type_name)
inner_types_defs.append(inner_types_def_template.format(type_name = top_level_type_name, inner_type_name = inner_type_name, type_def = formatted_inner_type_def))
return ''.join(inner_types_defs)
# in addressForm, entryForm and transactionForm the order of the fields matters in the sent XML to SDF
# and thus we order it on the type definition so the adapter will be able to sort the values based on that order
def order_types_fields(type_name_to_types_defs):
type_name_to_fields_order = {
'addressForm': ['scriptid', 'standard', 'name', 'mainFields', 'customCode', 'addressTemplate', 'countries'],
'addressForm_mainFields': ['fieldGroup', 'defaultFieldGroup'],
'entryForm': ['scriptid', 'standard', 'name', 'recordType', 'inactive', 'preferred',
'storedWithRecord', 'mainFields', 'tabs', 'customCode', 'quickViewFields', 'actionbar','useForPopup',
'editingInList', 'buttons'], # Not sure where buttons field should be located. In case it exists it might fail to deploy but it's preferred that it'll fail than it will delete the existing value without letting the user to know.
'entryForm_mainFields': ['fieldGroup', 'defaultFieldGroup'],
'entryForm_tabs_tab_fieldGroups': ['fieldGroup', 'defaultFieldGroup'],
'entryForm_tabs_tab_subItems_subTab_fieldGroups': ['fieldGroup', 'defaultFieldGroup'],
'transactionForm': ['scriptid', 'standard', 'name', 'recordType', 'inactive', 'preferred',
'storedWithRecord', 'mainFields', 'tabs', 'customCode', 'quickViewFields', 'actionbar', 'disclaimer',
'address', 'allowAddMultiple', 'emailMessageTemplate', 'printingType', 'totalBox', 'linkedForms', 'roles', 'preferences', 'buttons'], # Not sure where buttons field should be located. In case it exists it might fail to deploy but it's preferred that it'll fail than it will delete the existing value without letting the user to know.
'transactionForm_mainFields': ['fieldGroup', 'defaultFieldGroup'],
'transactionForm_tabs_tab_fieldGroups': ['fieldGroup', 'defaultFieldGroup'],
'transactionForm_tabs_tab_subItems_subTab_fieldGroups': ['fieldGroup', 'defaultFieldGroup'],
'transactionForm_linkedForms_linkedForm': ['type', 'form']
}
for type_name, fields_order in type_name_to_fields_order.items():
top_level_type_name = type_name.split('_')[0]
type_defs = type_name_to_types_defs[top_level_type_name]
type_def = type_defs[INNER_TYPE_NAME_TO_DEF][type_name] if top_level_type_name != type_name else type_defs[TYPE_DEF]
type_def_fields = type_def[FIELDS]
if len(fields_order) != len(type_def_fields):
logging.warning('Mismatch in the order of {0} type fields! len(fields_order)={1} len(type_def_fields)={2}'.format(type_name, len(fields_order), len(type_def_fields)))
field_name_to_def = dict((field[NAME], field) for field in type_def_fields)
ordered_fields = []
for field_name in fields_order:
if (field_name in field_name_to_def):
ordered_fields.append(field_name_to_def[field_name])
else:
logging.warning('Field {0} is not defined in type {1} definition'.format(field_name, type_name))
type_def[FIELDS] = ordered_fields
def generate_file_per_type(type_name_to_types_defs):
order_types_fields(type_name_to_types_defs)
for type_name, type_defs in type_name_to_types_defs.items():
inner_type_name_to_def = type_defs[INNER_TYPE_NAME_TO_DEF]
type_def = type_defs[TYPE_DEF]
elem_id_def = type_elem_id_template.format(type_name = type_name)
formatted_type_def = format_type_def(type_name, type_def)
file_data = type_inner_types_array_template.format(type_name = type_name) + elem_id_def + format_inner_types_defs(type_name, inner_type_name_to_def) + formatted_type_def
import_statements = import_statements_for_type_def_template.format(
list_type_import = list_type_import if 'new ListType(' in file_data else '',
enums_import = enums_import if 'enums.' in file_data else '',
field_types_import = field_types_import if 'fieldTypes.' in file_data else '')
type_def_file_content = HEADER_FOR_DEFS + import_statements + file_data
with open(CUSTOM_TYPES_DIR + type_name + '.ts', 'w') as file:
file.write(type_def_file_content)
inner_types_to_export = {
'dataset_dependencies',
'savedcsvimport_filemappings',
'customsegment_segmentapplication_transactionline_applications',
'customsegment_segmentapplication_transactionbody_applications',
'bundleinstallationscript_scriptdeployments',
'clientscript_scriptdeployments',
'customrecordactionscript_scriptdeployments',
'mapreducescript_scriptdeployments',
'massupdatescript_scriptdeployments',
'portlet_scriptdeployments',
'restlet_scriptdeployments',
'scheduledscript_scriptdeployments',
'sdfinstallationscript_scriptdeployments',
'suitelet_scriptdeployments',
'usereventscript_scriptdeployments',
'workflowactionscript_scriptdeployments',
'customrecordtype_permissions_permission',
}
should_not_be_required = {
'publisheddashboard_dashboards_dashboard_centercolumn_customsearch_savedsearch',
'publisheddashboard_dashboards_dashboard_centercolumn_customportlet_source',
'publisheddashboard_dashboards_dashboard_centercolumn_list_type',
'workflow_workflowstates_workflowstate_workflowactions_sendcampaignemailaction_recipientfield',
'workflow_workflowstates_workflowstate_workflowactions_workflowactiongroup_sendcampaignemailaction_recipientfield',
}
should_not_be_list = {
'transactionForm_printingType_advanced',
'transactionForm_printingType_basic',
}
should_be_list = {
'addressForm_mainFields_fieldGroup',
'entryForm_mainFields_fieldGroup',
'entryForm_tabs_tab_fieldGroups_fieldGroup',
'entryForm_tabs_tab_subItems_subTab_fieldGroups_fieldGroup',
'transactionForm_mainFields_fieldGroup',
'transactionForm_tabs_tab_fieldGroups_fieldGroup',
'transactionForm_tabs_tab_subItems_subTab_fieldGroups_fieldGroup',
'workflow_workflowstates_workflowstate_workflowactions',
'workflow_workflowstates_workflowstate_workflowactions_addbuttonaction',
'workflow_workflowstates_workflowstate_workflowactions_confirmaction',
'workflow_workflowstates_workflowstate_workflowactions_createlineaction',
'workflow_workflowstates_workflowstate_workflowactions_createrecordaction',
'workflow_workflowstates_workflowstate_workflowactions_customaction',
'workflow_workflowstates_workflowstate_workflowactions_gotopageaction',
'workflow_workflowstates_workflowstate_workflowactions_gotorecordaction',
'workflow_workflowstates_workflowstate_workflowactions_initiateworkflowaction',
'workflow_workflowstates_workflowstate_workflowactions_lockrecordaction',
'workflow_workflowstates_workflowstate_workflowactions_removebuttonaction',
'workflow_workflowstates_workflowstate_workflowactions_returnusererroraction',
'workflow_workflowstates_workflowstate_workflowactions_sendcampaignemailaction',
'workflow_workflowstates_workflowstate_workflowactions_sendemailaction',
'workflow_workflowstates_workflowstate_workflowactions_setdisplaylabelaction',
'workflow_workflowstates_workflowstate_workflowactions_setdisplaytypeaction',
'workflow_workflowstates_workflowstate_workflowactions_setfieldmandatoryaction',
'workflow_workflowstates_workflowstate_workflowactions_setfieldvalueaction',
'workflow_workflowstates_workflowstate_workflowactions_showmessageaction',
'workflow_workflowstates_workflowstate_workflowactions_subscribetorecordaction',
'workflow_workflowstates_workflowstate_workflowactions_transformrecordaction',
'workflow_workflowstates_workflowstate_workflowactions_workflowactiongroup',
'workflow_workflowstates_workflowstate_workflowactions_workflowactiongroup_addbuttonaction',
'workflow_workflowstates_workflowstate_workflowactions_workflowactiongroup_createlineaction',
'workflow_workflowstates_workflowstate_workflowactions_workflowactiongroup_createrecordaction',
'workflow_workflowstates_workflowstate_workflowactions_workflowactiongroup_customaction',
'workflow_workflowstates_workflowstate_workflowactions_workflowactiongroup_gotopageaction',
'workflow_workflowstates_workflowstate_workflowactions_workflowactiongroup_gotorecordaction',
'workflow_workflowstates_workflowstate_workflowactions_workflowactiongroup_initiateworkflowaction',
'workflow_workflowstates_workflowstate_workflowactions_workflowactiongroup_lockrecordaction',
'workflow_workflowstates_workflowstate_workflowactions_workflowactiongroup_removebuttonaction',
'workflow_workflowstates_workflowstate_workflowactions_workflowactiongroup_returnusererroraction',
'workflow_workflowstates_workflowstate_workflowactions_workflowactiongroup_sendcampaignemailaction',
'workflow_workflowstates_workflowstate_workflowactions_workflowactiongroup_sendemailaction',
'workflow_workflowstates_workflowstate_workflowactions_workflowactiongroup_setdisplaylabelaction',
'workflow_workflowstates_workflowstate_workflowactions_workflowactiongroup_setdisplaytypeaction',
'workflow_workflowstates_workflowstate_workflowactions_workflowactiongroup_setfieldmandatoryaction',
'workflow_workflowstates_workflowstate_workflowactions_workflowactiongroup_setfieldvalueaction',
'workflow_workflowstates_workflowstate_workflowactions_workflowactiongroup_subscribetorecordaction',
'workflow_workflowstates_workflowstate_workflowactions_workflowactiongroup_transformrecordaction',
'workflow_workflowstates_workflowstate_workflowactions_workflowsublistactiongroup',
'workflow_workflowstates_workflowstate_workflowactions_workflowsublistactiongroup_createrecordaction',
'workflow_workflowstates_workflowstate_workflowactions_workflowsublistactiongroup_returnusererroraction',
'workflow_workflowstates_workflowstate_workflowactions_workflowsublistactiongroup_sendemailaction',
'workflow_workflowstates_workflowstate_workflowactions_workflowsublistactiongroup_setfieldvalueaction',
}
field_name_to_type_name = {
'addressForm_addressTemplate': 'fieldTypes.cdata',
'dataset_definition': 'fieldTypes.cdata',
'dataset_dependencies_dependency': 'new ListType(BuiltinTypes.STRING)',
'savedsearch_dependencies_dependency': 'new ListType(BuiltinTypes.STRING)',
'workbook_definition': 'fieldTypes.cdata',
'workflow_initcondition_formula': 'fieldTypes.cdata',
'workflow_workflowstates_workflowstate_workflowactions_addbuttonaction_initcondition_formula': 'fieldTypes.cdata',
'workflow_workflowstates_workflowstate_workflowactions_confirmaction_initcondition_formula': 'fieldTypes.cdata',
'workflow_workflowstates_workflowstate_workflowactions_createlineaction_initcondition_formula': 'fieldTypes.cdata',
'workflow_workflowstates_workflowstate_workflowactions_createrecordaction_initcondition_formula': 'fieldTypes.cdata',
'workflow_workflowstates_workflowstate_workflowactions_customaction_initcondition_formula': 'fieldTypes.cdata',
'workflow_workflowstates_workflowstate_workflowactions_gotopageaction_initcondition_formula': 'fieldTypes.cdata',
'workflow_workflowstates_workflowstate_workflowactions_gotorecordaction_initcondition_formula': 'fieldTypes.cdata',
'workflow_workflowstates_workflowstate_workflowactions_initiateworkflowaction_initcondition_formula': 'fieldTypes.cdata',
'workflow_workflowstates_workflowstate_workflowactions_lockrecordaction_initcondition_formula': 'fieldTypes.cdata',
'workflow_workflowstates_workflowstate_workflowactions_removebuttonaction_initcondition_formula': 'fieldTypes.cdata',
'workflow_workflowstates_workflowstate_workflowactions_returnusererroraction_initcondition_formula': 'fieldTypes.cdata',
'workflow_workflowstates_workflowstate_workflowactions_sendcampaignemailaction_initcondition_formula': 'fieldTypes.cdata',
'workflow_workflowstates_workflowstate_workflowactions_sendemailaction_initcondition_formula': 'fieldTypes.cdata',
'workflow_workflowstates_workflowstate_workflowactions_setdisplaylabelaction_initcondition_formula': 'fieldTypes.cdata',
'workflow_workflowstates_workflowstate_workflowactions_setdisplaytypeaction_initcondition_formula': 'fieldTypes.cdata',
'workflow_workflowstates_workflowstate_workflowactions_setfieldmandatoryaction_initcondition_formula': 'fieldTypes.cdata',
'workflow_workflowstates_workflowstate_workflowactions_setfieldvalueaction_initcondition_formula': 'fieldTypes.cdata',
'workflow_workflowstates_workflowstate_workflowactions_showmessageaction_initcondition_formula': 'fieldTypes.cdata',
'workflow_workflowstates_workflowstate_workflowactions_subscribetorecordaction_initcondition_formula': 'fieldTypes.cdata',
'workflow_workflowstates_workflowstate_workflowactions_transformrecordaction_initcondition_formula': 'fieldTypes.cdata',
'workflow_workflowstates_workflowstate_workflowactions_workflowactiongroup_initcondition_formula': 'fieldTypes.cdata',
'workflow_workflowstates_workflowstate_workflowactions_workflowsublistactiongroup_createrecordaction_initcondition_formula': 'fieldTypes.cdata',
'workflow_workflowstates_workflowstate_workflowactions_workflowsublistactiongroup_initcondition_formula': 'fieldTypes.cdata',
'workflow_workflowstates_workflowstate_workflowactions_workflowsublistactiongroup_returnusererroraction_initcondition_formula': 'fieldTypes.cdata',
'workflow_workflowstates_workflowstate_workflowactions_workflowsublistactiongroup_sendemailaction_initcondition_formula': 'fieldTypes.cdata',
'workflow_workflowstates_workflowstate_workflowactions_workflowsublistactiongroup_setfieldvalueaction_initcondition_formula': 'fieldTypes.cdata',
'workflow_workflowstates_workflowstate_workflowtransitions_workflowtransition_initcondition_formula': 'fieldTypes.cdata',
}
type_name_to_special_script_id_prefix = {
'customtransactiontype': '(customtransaction|customsale|custompurchase)', # https://{account_id}.app.netsuite.com/app/help/helpcenter.nl?fid=section_1520439377.html
'kpiscorecard': '(custkpiscorecard|kpiscorecard)', # The kpiscorecard prefix appeared when fetching the Extended Dev account
}
webpage = webdriver.Chrome() # the web page is defined here to avoid passing it to all inner methods
def main():
account_id, username, password, secret_key_2fa = (sys.argv[1], sys.argv[2], sys.argv[3], sys.argv[4])
type_name_to_types_defs, enum_to_possible_values = parse_netsuite_types(account_id, username, password, secret_key_2fa)
generate_enums_file(enum_to_possible_values)
logging.info('Generated enums file')
generate_file_per_type(type_name_to_types_defs)
logging.info('Generated file per Netsuite type')
create_types_file(type_name_to_types_defs.keys())
logging.info('Generated Types file')
logging.info('Done!')
logging.info('VALIDATE file & folder TYPES THEY WERE NOT GENERATED USING THE SCRIPT!')
main()
# --- known issues that were handled in the script: ---
# lists are not identified correctly -> should use also manual mappings (should_be_list, should_not_be_list)
# script_id prefixes table is not accurate and not complete -> we are calculating it also from the scriptid field's description column
# we add a regex restriction for the scriptid field handling also special cases in type_name_to_special_script_id_prefix
# we set the type of SCRIPT_ID_FIELD_NAME as BuiltinTypes.SERVICE_ID
# emailtemplate & advancedpdftemplate types have an additional file containing the template data. We add the file's extension as an annotation to the type and added a 'content' field to the type.
# there are fields that suppose to have a certain type but in fact they have another type, handled using field_name_to_type_name
# in addressForm, entryForm and transactionForm the order of the fields matters in the sent XML to SDF (https://{account_id}.app.netsuite.com/app/help/helpcenter.nl?fid=section_1497980303.html)
# we order it manually in order_types_fields.
# in addressForm and transactionForm, customCode & buttons fields are intentionally omitted as it seems that they do not exist and if they are sent to SDF they cause errors no matter in which order
# file cabinet types (file & folder) are not generated by the script and should be validated when generating types for new Netsuite API version
| 56.9819
| 345
| 0.774557
|
c5e1f3102d15fec97688f3c0082a4dde4344e558
| 2,785
|
py
|
Python
|
petastorm/tests/test_metadata_read.py
|
VivekPanyam/petastorm
|
d8dcee4541f26d58195e9cb119ac5acf53d0a58d
|
[
"Apache-2.0"
] | null | null | null |
petastorm/tests/test_metadata_read.py
|
VivekPanyam/petastorm
|
d8dcee4541f26d58195e9cb119ac5acf53d0a58d
|
[
"Apache-2.0"
] | null | null | null |
petastorm/tests/test_metadata_read.py
|
VivekPanyam/petastorm
|
d8dcee4541f26d58195e9cb119ac5acf53d0a58d
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2017-2018 Uber Technologies, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import unittest
from shutil import move, rmtree
from tempfile import mkdtemp
from petastorm import make_reader
from petastorm.etl.dataset_metadata import PetastormMetadataError
from petastorm.tests.test_common import create_test_dataset
# Tiny count of rows in a fake dataset
ROWS_COUNT = 10
class MetadataUnischemaReadTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
"""Initializes dataset once per test. All tests in this class will use the same fake dataset."""
# Write a fake dataset to this location
cls._dataset_dir = mkdtemp('test_metadata_read')
cls._dataset_url = 'file://{}'.format(cls._dataset_dir)
cls._dataset_dicts = create_test_dataset(cls._dataset_url, range(ROWS_COUNT))
@classmethod
def tearDownClass(cls):
""" Remove everything created in setUpClass. """
rmtree(cls._dataset_dir)
def vanish_metadata(self, filename='_common_metadata'):
""" Move the already generated _metadata to a different name, leveraging tempdir uniqueness. """
move('{}/{}'.format(self._dataset_dir, filename), '{}{}'.format(self._dataset_dir, filename + '_gone'))
def restore_metadata(self, filename='_common_metadata'):
""" Restore _metadata file for other tests. """
move('{}{}'.format(self._dataset_dir, filename + '_gone'), '{}/{}'.format(self._dataset_dir, filename))
def test_no_common_metadata_crc(self):
"""We add our own entries to the _common_metadata file, unfortunatelly, the .crc file is not updated by
current pyarrow implementation, so we delete the .crc to make sure there is no mismatch with the content of
_common_metadata file"""
self.assertFalse(os.path.exists(os.path.join(MetadataUnischemaReadTest._dataset_dir, '._common_metadata.crc')))
def test_no_metadata(self):
self.vanish_metadata()
with self.assertRaises(PetastormMetadataError) as e:
make_reader(self._dataset_url, reader_pool_type='dummy')
self.assertTrue('Could not find _common_metadata file' in str(e.exception))
self.restore_metadata()
if __name__ == '__main__':
unittest.main()
| 42.846154
| 119
| 0.725314
|
ad2922b20dc00412bdaf075ba02292be7edc0dc5
| 10,224
|
py
|
Python
|
ladim_plugins/sedimentation/test_ibm.py
|
pnsaevik/ladim_plugins
|
2097a451346e2517e50f735be8b31862f24e64e2
|
[
"MIT"
] | null | null | null |
ladim_plugins/sedimentation/test_ibm.py
|
pnsaevik/ladim_plugins
|
2097a451346e2517e50f735be8b31862f24e64e2
|
[
"MIT"
] | null | null | null |
ladim_plugins/sedimentation/test_ibm.py
|
pnsaevik/ladim_plugins
|
2097a451346e2517e50f735be8b31862f24e64e2
|
[
"MIT"
] | 1
|
2020-07-09T08:18:36.000Z
|
2020-07-09T08:18:36.000Z
|
import numpy as np
from ladim_plugins.sedimentation import ibm
import pytest
class Stub:
def __init__(self, **kwargs):
self._dic = kwargs
def __getattr__(self, item):
return self._dic[item]
class Test_update:
@staticmethod
def gsf(num, hvel=0, wvel=0, dt=1):
zr = np.zeros(num)
zrl = np.zeros_like
grid = Stub(
sample_depth=lambda x, y: zrl(x) + 10,
lonlat=lambda x, y: (x, y),
)
forcing = Stub(velocity=lambda x, y, z, tstep=0: [zrl(x) + hvel] * 2)
state = Stub(
X=zr*0, Y=zr*0, Z=zr + 10, active=zr*0, alive=zr == 0, age=zr*0,
sink_vel=zr + wvel, dt=dt, timestep=0,
)
return grid, state, forcing
def test_does_not_resuspend_when_zero_velocity(self):
ibmconf = dict(lifespan=100, taucrit=0.12, vertical_mixing=0.01)
grid, state, forcing = self.gsf(num=5)
config = dict(dt=state.dt, ibm=ibmconf)
my_ibm = ibm.IBM(config)
my_ibm.update_ibm(grid, state, forcing)
assert np.all(state.Z == 10)
def test_does_resuspend_when_high_velocity(self):
ibmconf = dict(lifespan=100, taucrit=0.12, vertical_mixing=0.01)
grid, state, forcing = self.gsf(num=5, hvel=1)
config = dict(dt=state.dt, ibm=ibmconf)
my_ibm = ibm.IBM(config)
my_ibm.update_ibm(grid, state, forcing)
assert np.all(state.Z < 10)
def test_resuspended_particles_have_altered_active_flag(self):
ibmconf = dict(lifespan=100, taucrit=0.12, vertical_mixing=0.01)
grid, state, forcing = self.gsf(num=5, hvel=1)
config = dict(dt=state.dt, ibm=ibmconf)
my_ibm = ibm.IBM(config)
my_ibm.update_ibm(grid, state, forcing)
assert np.all(state.active == 2)
def test_does_not_resuspend_when_zero_diffusion(self):
ibmconf = dict(lifespan=100, taucrit=0.12, vertical_mixing=0)
grid, state, forcing = self.gsf(num=5, hvel=1)
config = dict(dt=state.dt, ibm=ibmconf)
my_ibm = ibm.IBM(config)
my_ibm.update_ibm(grid, state, forcing)
assert np.all(state.Z == 10)
def test_sinking_when_mix_of_pelagic_and_benthic_particles(self):
ibmconf = dict(lifespan=100, taucrit=1, vertical_mixing=0)
grid, state, forcing = self.gsf(num=5, wvel=1)
state.Z[:] = [0, 1, 5, 10, 10]
state.active[:] = 1
config = dict(dt=state.dt, ibm=ibmconf)
my_ibm = ibm.IBM(config)
my_ibm.update_ibm(grid, state, forcing)
assert np.int32(state.active).tolist() == [1, 1, 1, 0, 0]
assert state.Z.tolist() == [1, 2, 6, 10, 10]
def test_does_not_resuspend_when_large_sinkvel(self):
# On rare occasions, vertical mixing can overcome the sinking velocity
np.random.seed(0)
ibmconf = dict(lifespan=100, taucrit=0.12, vertical_mixing=0.01)
grid, state, forcing = self.gsf(num=5, hvel=1, wvel=1)
config = dict(dt=state.dt, ibm=ibmconf)
my_ibm = ibm.IBM(config)
my_ibm.update_ibm(grid, state, forcing)
assert np.all(state.Z == 10)
class Test_ladis:
def test_exact_when_trivial(self):
x0 = np.array([[1, 2, 3], [4, 5, 6]])
t0 = 0
dt = 1
def advect_fn(x, _):
return np.zeros_like(x)
def diffuse_fn(x, _):
return np.zeros_like(x)
sol = ibm.ladis(x0, t0, t0 + dt, advect_fn, diffuse_fn)
assert sol.shape == x0.shape
assert sol.tolist() == x0.tolist()
def test_exact_when_linear(self):
x0 = np.array([[1, 2, 3], [4, 5, 6]])
t0 = 0
dt = 1
def advect_fn(x, _):
return np.ones_like(x) * [1, 2, 3]
def diffuse_fn(x, _):
return np.zeros_like(x)
sol = ibm.ladis(x0, t0, t0 + dt, advect_fn, diffuse_fn)
assert sol.tolist() == [[2, 4, 6], [5, 7, 9]]
def test_exact_when_linear_onedim(self):
x0 = np.array([1, 2, 3])
t0 = 0
dt = 1
def advect_fn(x, _):
return np.ones_like(x)
def diffuse_fn(x, _):
return np.zeros_like(x)
sol = ibm.ladis(x0, t0, t0 + dt, advect_fn, diffuse_fn)
assert sol.tolist() == [2, 3, 4]
def test_well_mixed_when_sqrt(self):
np.random.seed(0)
x0 = np.linspace(0, 1, 1001)[1:]
t0 = 0
dt = .001
numsteps = 1000
def advect_fn(x, _):
return np.zeros_like(x)
def diffuse_fn(x, _):
return np.sqrt(2*x)
sol = x0
t = t0
for i in range(numsteps):
sol = ibm.ladis(x0, t, t + dt, advect_fn, diffuse_fn)
# Reflective boundaries
sol[sol > 1] = 2 - sol[sol > 1]
sol[sol < 0] *= -1
# Check distribution
hist_num, hist_edges = np.histogram(sol, bins=np.arange(0, 1.05, .1))
too_low = hist_num < (len(x0) / len(hist_num)) * .8
too_high = hist_num > (len(x0) / len(hist_num)) * 1.2
assert not np.any(too_low)
assert not np.any(too_high)
def test_well_mixed_when_linear(self):
np.random.seed(0)
x0 = np.linspace(0, 1, 1001)[1:]
t0 = 0
dt = .001
numsteps = 1000
def advect_fn(x, _):
return np.zeros_like(x)
def diffuse_fn(x, _):
return x
sol = x0
t = t0
for i in range(numsteps):
sol = ibm.ladis(x0, t, t + dt, advect_fn, diffuse_fn)
# Reflective boundaries
sol[sol > 1] = 2 - sol[sol > 1]
sol[sol < 0] *= -1
# Check distribution
hist_num, hist_edges = np.histogram(sol, bins=np.arange(0, 1.05, .1))
too_low = hist_num < (len(x0) / len(hist_num)) * .8
too_high = hist_num > (len(x0) / len(hist_num)) * 1.2
assert not np.any(too_low)
assert not np.any(too_high)
def get_grainsize_fixture_fname():
import ladim_plugins.tests
import os
return os.path.join(
os.path.dirname(ladim_plugins.sedimentation.__file__), 'grainsize.nc')
class Test_taucrit_grain_size_bin:
@pytest.mark.parametrize("method", ['grain_size_bin', 'grain_size_poly'])
def test_varying_taucrit_when_regular_grid(self, method):
grainsize_fname = get_grainsize_fixture_fname()
ibmconf = dict(
lifespan=100,
taucrit=dict(
method=method,
source=grainsize_fname,
varname='grain_size',
),
vertical_mixing=0.01,
)
config = dict(dt=1, ibm=ibmconf)
my_ibm = ibm.IBM(config)
num = 5
vel = .12 # Tuned to give tau ~ 0.09
w = 0
zr = np.zeros(num)
rng = np.arange(num)
zrl = np.zeros_like
grid = Stub(
sample_depth=lambda x, y: zrl(x) + 10,
lonlat=lambda x, y: (
5.651 + x*0.01,
59.021 + y*0.01,
),
)
forcing = Stub(velocity=lambda x, y, z, tstep=0: [zrl(x) + vel] * 2)
state = Stub(
X=rng, Y=rng, Z=zr + 10, active=zr, alive=zr == 0, age=zr,
sink_vel=zr + w, dt=config['dt'], timestep=1,
)
my_ibm.update_ibm(grid, state, forcing)
assert np.any(state.Z < 10)
assert np.any(state.Z == 10)
class Test_vertical_mixing:
@staticmethod
def gfs(num, hvel=0, wvel=0, dt=1):
zr = np.zeros(num)
zrl = np.zeros_like
grid = Stub(
sample_depth=lambda x, y: zrl(x) + 10,
lonlat=lambda x, y: (x, y),
)
forcing = Stub(velocity=lambda x, y, z, tstep=0: [zrl(x) + hvel] * 2)
state = Stub(
X=zrl(zr), Y=zrl(zr), Z=zr + 10, active=zrl(zr), alive=zr == 0,
age=zrl(zr), sink_vel=zr + wvel, dt=dt, timestep=0,
)
return grid, forcing, state
def test_vertical_movement_when_method_constant(self):
ibmconf = dict(
lifespan=100,
taucrit=0,
vertical_mixing=dict(
method='constant',
value=0.01,
),
)
grid, forcing, state = self.gfs(num=5)
config = dict(dt=state.dt, ibm=ibmconf)
my_ibm = ibm.IBM(config)
assert np.all(state.Z == 10)
my_ibm.update_ibm(grid, state, forcing)
assert np.all(state.Z != 10)
def test_some_initial_lift_when_method_bounded_linear(self):
np.random.seed(0)
ibmconf = dict(
lifespan=100,
taucrit=0.12,
vertical_mixing=dict(
method='bounded_linear',
max_diff=0.01,
),
)
grid, forcing, state = self.gfs(num=5, hvel=1)
config = dict(dt=state.dt, ibm=ibmconf)
my_ibm = ibm.IBM(config)
my_ibm.update_ibm(grid, state, forcing)
assert np.any(state.Z != 10)
assert np.any(state.Z == 10)
class Test_get_settled_particles:
def test_selects_settled_particles(self):
import xarray as xr
ladim_dset = xr.Dataset(
data_vars=dict(
X=xr.Variable('particle_instance', np.arange(6)),
Y=xr.Variable('particle_instance', np.arange(10, 16)),
Z=xr.Variable('particle_instance', np.arange(20, 26)),
pid=xr.Variable('particle_instance', [0, 1, 2, 4, 0, 1]),
group_id=xr.Variable('particle', [0, 1, 0, 2, 1]),
particle_count=xr.Variable('time', [4, 2]),
),
coords=dict(
time=xr.Variable(
'time',
np.array(["2000-01-01", "2000-01-02"]).astype('datetime64[D]')
),
)
)
settled_dset = ibm.get_settled_particles(ladim_dset)
assert list(settled_dset.data_vars) == ['group_id', 'X', 'Y', 'Z']
assert settled_dset.pid.values.tolist() == [0, 1, 2, 4]
assert settled_dset.X.dims == ('pid', )
assert settled_dset.X.values.tolist() == [4, 5, 2, 3]
assert settled_dset.group_id.values.tolist() == [0, 1, 0, 1]
| 30.338279
| 82
| 0.548611
|
7b363bb14890a1c746ac9f5c1ce7508c007e2dd7
| 2,294
|
py
|
Python
|
networkapi/equipamento/resource/EquipmentTypeGetAllResource.py
|
brunodevel/GloboNetworkAPI
|
ea8eebc0337636f9250e628cc392514934db8edd
|
[
"Apache-2.0"
] | null | null | null |
networkapi/equipamento/resource/EquipmentTypeGetAllResource.py
|
brunodevel/GloboNetworkAPI
|
ea8eebc0337636f9250e628cc392514934db8edd
|
[
"Apache-2.0"
] | null | null | null |
networkapi/equipamento/resource/EquipmentTypeGetAllResource.py
|
brunodevel/GloboNetworkAPI
|
ea8eebc0337636f9250e628cc392514934db8edd
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding:utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from networkapi.admin_permission import AdminPermission
from networkapi.auth import has_perm
from networkapi.infrastructure.xml_utils import dumps_networkapi
import logging
from networkapi.rest import RestResource, UserNotAuthorizedError
from networkapi.equipamento.models import TipoEquipamento, EquipamentoError
from django.forms.models import model_to_dict
class EquipmentTypeGetAllResource(RestResource):
log = logging.getLogger('EquipmentTypeGetAllResource')
def handle_get(self, request, user, *args, **kwargs):
"""Treat requests GET to list all Equipment Type.
URL: equipmenttype/all
"""
try:
self.log.info("GET to list all Equipment Type")
# User permission
if not has_perm(user, AdminPermission.EQUIPMENT_MANAGEMENT, AdminPermission.READ_OPERATION):
self.log.error(
u'User does not have permission to perform the operation.')
raise UserNotAuthorizedError(None)
map_list = []
for equipment_type in TipoEquipamento.objects.all():
eq_tp = {
'id': equipment_type.id, 'nome': equipment_type.tipo_equipamento}
map_list.append(eq_tp)
return self.response(dumps_networkapi({'equipment_type': map_list}))
except UserNotAuthorizedError:
return self.not_authorized()
except EquipamentoError:
return self.response_error(1)
| 38.233333
| 104
| 0.710549
|
71eb9e5a7ff4709067eb66e4dd361c8d02d86e3f
| 178
|
py
|
Python
|
doc/CyberSecurity/Modern_Cryptography/Enigma/Python_Enigma/ReflectorA.py
|
tanducmai/.dotfiles
|
13cad9bc7fcecff6108eb5df34635d4748712532
|
[
"Unlicense"
] | null | null | null |
doc/CyberSecurity/Modern_Cryptography/Enigma/Python_Enigma/ReflectorA.py
|
tanducmai/.dotfiles
|
13cad9bc7fcecff6108eb5df34635d4748712532
|
[
"Unlicense"
] | null | null | null |
doc/CyberSecurity/Modern_Cryptography/Enigma/Python_Enigma/ReflectorA.py
|
tanducmai/.dotfiles
|
13cad9bc7fcecff6108eb5df34635d4748712532
|
[
"Unlicense"
] | null | null | null |
#Reflector A
alphabet = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
reflector = "EJMZALYXVBWFCRQUONTSPIKHGD"
def reflect(letter):
spot = alphabet.find(letter)
return reflector[spot]
| 17.8
| 40
| 0.764045
|
abe1efcfb7137a47a36aba247b9e4b259b082ea1
| 3,336
|
py
|
Python
|
venv/Lib/site-packages/networkx/algorithms/community/tests/test_centrality.py
|
Richoor/HEFT
|
8422bfc5e9abf132c409a0ae299cbde29eb6e5fc
|
[
"BSD-3-Clause"
] | 4
|
2018-10-19T04:36:20.000Z
|
2020-02-13T16:14:09.000Z
|
venv/Lib/site-packages/networkx/algorithms/community/tests/test_centrality.py
|
Richoor/HEFT
|
8422bfc5e9abf132c409a0ae299cbde29eb6e5fc
|
[
"BSD-3-Clause"
] | null | null | null |
venv/Lib/site-packages/networkx/algorithms/community/tests/test_centrality.py
|
Richoor/HEFT
|
8422bfc5e9abf132c409a0ae299cbde29eb6e5fc
|
[
"BSD-3-Clause"
] | 1
|
2018-08-23T14:45:15.000Z
|
2018-08-23T14:45:15.000Z
|
# -*- coding: utf-8 -*-
# test_centrality.py - unit tests for algorithms.community.centrality
#
# Copyright 2015, 2016 NetworkX developers.
#
# This file is part of NetworkX.
#
# NetworkX is distributed under a BSD license; see LICENSE.txt for more
# information.
"""Unit tests for the :mod:`networkx.algorithms.community.centrality`
module.
"""
from operator import itemgetter
from nose.tools import assert_equal
from nose.tools import assert_true
import networkx as nx
from networkx.algorithms.community import girvan_newman
def set_of_sets(iterable):
return set(map(frozenset, iterable))
def validate_communities(result, expected):
assert_equal(set_of_sets(result), set_of_sets(expected))
def validate_possible_communities(result, *expected):
assert_true(any(set_of_sets(result) == set_of_sets(p) for p in expected))
class TestGirvanNewman(object):
"""Unit tests for the
:func:`networkx.algorithms.community.centrality.girvan_newman`
function.
"""
def test_no_edges(self):
G = nx.empty_graph(3)
communities = list(girvan_newman(G))
assert_equal(len(communities), 1)
validate_communities(communities[0], [{0}, {1}, {2}])
def test_undirected(self):
# Start with the graph .-.-.-.
G = nx.path_graph(4)
communities = list(girvan_newman(G))
assert_equal(len(communities), 3)
# After one removal, we get the graph .-. .-.
validate_communities(communities[0], [{0, 1}, {2, 3}])
# After the next, we get the graph .-. . ., but there are two
# symmetric possible verisons.
validate_possible_communities(communities[1], [{0}, {1}, {2, 3}],
[{0, 1}, {2}, {3}])
# After the last removal, we alway get the empty graph.
validate_communities(communities[2], [{0}, {1}, {2}, {3}])
def test_directed(self):
G = nx.DiGraph(nx.path_graph(4))
communities = list(girvan_newman(G))
assert_equal(len(communities), 3)
validate_communities(communities[0], [{0, 1}, {2, 3}])
validate_possible_communities(communities[1], [{0}, {1}, {2, 3}],
[{0, 1}, {2}, {3}])
validate_communities(communities[2], [{0}, {1}, {2}, {3}])
def test_selfloops(self):
G = nx.path_graph(4)
G.add_edge(0, 0)
G.add_edge(2, 2)
communities = list(girvan_newman(G))
assert_equal(len(communities), 3)
validate_communities(communities[0], [{0, 1}, {2, 3}])
validate_possible_communities(communities[1], [{0}, {1}, {2, 3}],
[{0, 1}, {2}, {3}])
validate_communities(communities[2], [{0}, {1}, {2}, {3}])
def test_most_valuable_edge(self):
G = nx.Graph()
G.add_weighted_edges_from([(0, 1, 3), (1, 2, 2), (2, 3, 1)])
# Let the most valuable edge be the one with the highest weight.
def heaviest(G): return max(G.edges(data='weight'), key=itemgetter(2))[:2]
communities = list(girvan_newman(G, heaviest))
assert_equal(len(communities), 3)
validate_communities(communities[0], [{0}, {1, 2, 3}])
validate_communities(communities[1], [{0}, {1}, {2, 3}])
validate_communities(communities[2], [{0}, {1}, {2}, {3}])
| 35.870968
| 82
| 0.61301
|
625064129186841687f03065c106ae96463aeb08
| 3,974
|
py
|
Python
|
alipay/aop/api/request/AlipayUserCertdocSyncRequest.py
|
articuly/alipay-sdk-python-all
|
0259cd28eca0f219b97dac7f41c2458441d5e7a6
|
[
"Apache-2.0"
] | null | null | null |
alipay/aop/api/request/AlipayUserCertdocSyncRequest.py
|
articuly/alipay-sdk-python-all
|
0259cd28eca0f219b97dac7f41c2458441d5e7a6
|
[
"Apache-2.0"
] | null | null | null |
alipay/aop/api/request/AlipayUserCertdocSyncRequest.py
|
articuly/alipay-sdk-python-all
|
0259cd28eca0f219b97dac7f41c2458441d5e7a6
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import simplejson as json
from alipay.aop.api.FileItem import FileItem
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.AlipayUserCertdocSyncModel import AlipayUserCertdocSyncModel
class AlipayUserCertdocSyncRequest(object):
def __init__(self, biz_model=None):
self._biz_model = biz_model
self._biz_content = None
self._version = "1.0"
self._terminal_type = None
self._terminal_info = None
self._prod_code = None
self._notify_url = None
self._return_url = None
self._udf_params = None
self._need_encrypt = False
@property
def biz_model(self):
return self._biz_model
@biz_model.setter
def biz_model(self, value):
self._biz_model = value
@property
def biz_content(self):
return self._biz_content
@biz_content.setter
def biz_content(self, value):
if isinstance(value, AlipayUserCertdocSyncModel):
self._biz_content = value
else:
self._biz_content = AlipayUserCertdocSyncModel.from_alipay_dict(value)
@property
def version(self):
return self._version
@version.setter
def version(self, value):
self._version = value
@property
def terminal_type(self):
return self._terminal_type
@terminal_type.setter
def terminal_type(self, value):
self._terminal_type = value
@property
def terminal_info(self):
return self._terminal_info
@terminal_info.setter
def terminal_info(self, value):
self._terminal_info = value
@property
def prod_code(self):
return self._prod_code
@prod_code.setter
def prod_code(self, value):
self._prod_code = value
@property
def notify_url(self):
return self._notify_url
@notify_url.setter
def notify_url(self, value):
self._notify_url = value
@property
def return_url(self):
return self._return_url
@return_url.setter
def return_url(self, value):
self._return_url = value
@property
def udf_params(self):
return self._udf_params
@udf_params.setter
def udf_params(self, value):
if not isinstance(value, dict):
return
self._udf_params = value
@property
def need_encrypt(self):
return self._need_encrypt
@need_encrypt.setter
def need_encrypt(self, value):
self._need_encrypt = value
def add_other_text_param(self, key, value):
if not self.udf_params:
self.udf_params = dict()
self.udf_params[key] = value
def get_params(self):
params = dict()
params[P_METHOD] = 'alipay.user.certdoc.sync'
params[P_VERSION] = self.version
if self.biz_model:
params[P_BIZ_CONTENT] = json.dumps(obj=self.biz_model.to_alipay_dict(), use_decimal=True, ensure_ascii=False, sort_keys=True, separators=(',', ':'))
if self.biz_content:
if hasattr(self.biz_content, 'to_alipay_dict'):
params['biz_content'] = json.dumps(obj=self.biz_content.to_alipay_dict(), use_decimal=True, ensure_ascii=False, sort_keys=True, separators=(',', ':'))
else:
params['biz_content'] = self.biz_content
if self.terminal_type:
params['terminal_type'] = self.terminal_type
if self.terminal_info:
params['terminal_info'] = self.terminal_info
if self.prod_code:
params['prod_code'] = self.prod_code
if self.notify_url:
params['notify_url'] = self.notify_url
if self.return_url:
params['return_url'] = self.return_url
if self.udf_params:
params.update(self.udf_params)
return params
def get_multipart_params(self):
multipart_params = dict()
return multipart_params
| 27.406897
| 166
| 0.643181
|
7ee13083aaf65d1936a1919ade839d3f576c28b7
| 12,607
|
py
|
Python
|
pycas/cas/core/CasFactory.py
|
dkpro/dkpro-pycas
|
caeeea1613a689ce51c14c11cbc285dfe7af6521
|
[
"Apache-2.0"
] | 5
|
2017-08-18T04:00:08.000Z
|
2018-06-01T12:57:21.000Z
|
pycas/cas/core/CasFactory.py
|
dkpro/dkpro-pycas
|
caeeea1613a689ce51c14c11cbc285dfe7af6521
|
[
"Apache-2.0"
] | 7
|
2018-05-14T10:44:47.000Z
|
2019-11-04T12:13:02.000Z
|
pycas/cas/core/CasFactory.py
|
dkpro/dkpro-pycas
|
caeeea1613a689ce51c14c11cbc285dfe7af6521
|
[
"Apache-2.0"
] | 4
|
2018-03-23T15:28:39.000Z
|
2019-03-26T14:52:11.000Z
|
'''
Created on Jan 23, 2017
@author: Dibyojyoti
'''
from pycas.cas.parse.CasXmiParser import CasXmiParser
from pycas.cas.core.CAS import CAS
from pycas.type.cas.CAS_Type import CAS_Type
from pycas.type.cas.TypeSystemFactory import TypeSystemFactory
from pycas.type.cas.TypeDescription import TypeDescription
class CasFactory(object):
'''
This class creates a CASS object from a given CAS xmi file and type system file
'''
def __init__(self):
'''
Constructor
'''
'''
returns a CAS object
Args:
xmifilepath : cas xmi file path
typefilepath : type system xml file path
'''
def buildCAS(self,xmifilepath,typefilepath):
#create type ystem object
typesystem = TypeSystemFactory.readTypeSystem(self, typefilepath)
#create a CAS object
cas = CAS(typesystem)
#create cas xmi perser object to fetch elements from xmi file
casXmiParser = CasXmiParser()
casXmiParser.setXmiAsFile(xmifilepath)
return self.__build(cas, casXmiParser)
def buildCASfromString(self,xmistring,typefilepath):
#create type ystem object
typesystem = TypeSystemFactory.readTypeSystem(self, typefilepath)
#create a CAS object
cas = CAS(typesystem)
#create cas xmi perser object to fetch elements from xmi file
casXmiParser = CasXmiParser()
casXmiParser.setXmiAsString(xmistring)
return self.__build(cas, casXmiParser)
def buildCASfromStrings(self, xmistring, typesysstemString):
# create type ystem object
typesystem = TypeSystemFactory.readTypeSystemString(self, typesysstemString)
# create a CAS object
cas = CAS(typesystem)
# create cas xmi perser object to fetch elements from xmi file
casXmiParser = CasXmiParser()
casXmiParser.setXmiAsString(xmistring)
return self.__build(cas, casXmiParser)
def __build(self,cas,casXmiParser):
#get xmi root element as string
rootstr=casXmiParser.getRootElementAsString()
#get cas sofa element
for k in casXmiParser.getChildAttributesAsDict(casXmiParser.getCasSofaChild()):
if(casXmiParser.getLocalname(k) == 'id'):
cas.freeId(cas.sofaFS.FSid)
cas.sofaFS.FSid = casXmiParser.getChildAttributesAsDict(casXmiParser.getCasSofaChild()).get(k)
if(casXmiParser.getLocalname(k) == 'sofaNum'):
pass
elif(casXmiParser.getLocalname(k) == 'sofaID'):
pass
elif(casXmiParser.getLocalname(k) == 'mimeType'):
cas.sofaMimeType = casXmiParser.getChildAttributesAsDict(casXmiParser.getCasSofaChild()).get(k)
elif(casXmiParser.getLocalname(k) == 'sofaString'):
cas.documentText = casXmiParser.getChildAttributesAsDict(casXmiParser.getCasSofaChild()).get(k)
#"set type system file path for other feature structures"
#these contains the map of features names which refers other FS with FSid and the FS in which the feature is to be added
FtobeAddedDict = []
FSnotInIndexList = []
#loop each non cas feature structure elements
for fs in casXmiParser.getNonCasChildren():
if not (casXmiParser.getLocalname(fs) == 'XMI'): #or
#casXmiParser.getLocalname(fs) == 'TagsetDescription' or
# casXmiParser.getLocalname(fs) == 'DocumentMetaData'):
#get the name space url of the element and convert into domain
domain = casXmiParser.getNamespace(fs)[8:casXmiParser.getNamespace(fs).index('.ecore')].replace('/','.')
domain = domain+'.'+casXmiParser.getLocalname(fs)
#get the type description from type system for the domain
typedesc = cas.typesystem.getType(domain)
if(typedesc == None):
raise ValueError('bad xml',casXmiParser.getNamespace,'not in type system' )
return
#loop through attributes of the feature structure element, add build feature dict
featureDict = {}
# list to hold the attributes and values those refer to other FS
referenceList = {}
for k in casXmiParser.getChildAttributesAsDict(fs):
if(casXmiParser.getLocalname(k) == 'id'):
featureDict[casXmiParser.getLocalname(k)] = int(casXmiParser.getChildAttributesAsDict(fs).get(k))
#add sofa attribute
elif(casXmiParser.getLocalname(k) == 'sofa'):
featureDict[casXmiParser.getLocalname(k)] = cas.sofaFS
#add begin and end attribute
elif((casXmiParser.getLocalname(k) == 'begin') or (casXmiParser.getLocalname(k) == 'end')):
featureDict[casXmiParser.getLocalname(k)] = int(casXmiParser.getChildAttributesAsDict(fs).get(k))
#add other attributes
else:
#get the attribute value
value = casXmiParser.getChildAttributesAsDict(fs).get(k)
#get the feature description from type system
theFeature = cas.typesystem.getFeature(casXmiParser.getLocalname(k),domain)
if(theFeature == None):
raise ValueError('bad xml,', casXmiParser.getLocalname(k),'is not a feature of',casXmiParser.getLocalname(fs))
return
#if both rang and element type does not exists, throw error
if((theFeature.rangeType == None) and (theFeature.elementType == None)):
raise ValueError(casXmiParser.getLocalname(k),'range type and element type does not exists in type system')
return
# if range type does not exist, determine type from element type'
if(theFeature.rangeType == None):
#if type is cas primitive just add it to the feature dictionary
if (theFeature.elementType in (CAS_Type.TYPE_NAME_BOOLEAN,CAS_Type.TYPE_NAME_FLOAT,
CAS_Type.TYPE_NAME_DOUBLE,CAS_Type.TYPE_NAME_INTEGER,CAS_Type.TYPE_NAME_LONG,
CAS_Type.TYPE_NAME_STRING)):
featureDict[casXmiParser.getLocalname(k)] = value
else:
'value is a FS, so the referenced FS may not b created now , save it to referenceList'
referenceList[casXmiParser.getLocalname(k)] = value
#if rang type is FSARRAY determine type from element type
elif((not theFeature.rangeType == None) and
(isinstance(theFeature.rangeType,TypeDescription)) and
(theFeature.rangeType.name == CAS_Type.TYPE_NAME_FSARRAY)):
#if type is cas primitive just add it to the feature dictionary
#even if its a list just send the string the TOP.__setAttribute will convert to list
if (theFeature.elementType in (CAS_Type.TYPE_NAME_BOOLEAN,CAS_Type.TYPE_NAME_FLOAT,
CAS_Type.TYPE_NAME_DOUBLE,CAS_Type.TYPE_NAME_INTEGER,CAS_Type.TYPE_NAME_LONG,
CAS_Type.TYPE_NAME_STRING)):
featureDict[casXmiParser.getLocalname(k)] = value
else:
'value is a FS array, so the referenced FS may not b created now , save it to referenceList'
referenceList[casXmiParser.getLocalname(k)] = value
#'if rang type is not FSARRAY determine type from range type'
else:
#if type is cas primitive just add it to the feature dictionary
#even if its a list just send the string the TOP.__setAttribute will convert to list
if (theFeature.rangeType in (CAS_Type.TYPE_NAME_BOOLEAN,CAS_Type.TYPE_NAME_FLOAT,
CAS_Type.TYPE_NAME_DOUBLE,CAS_Type.TYPE_NAME_INTEGER,CAS_Type.TYPE_NAME_LONG,
CAS_Type.TYPE_NAME_STRING)):
featureDict[casXmiParser.getLocalname(k)] = value
else:
'value is a FS, so the referenced FS may not b created now , save it to referenceList'
referenceList[casXmiParser.getLocalname(k)] = value
#if the element's super type is TOP in type system create a TOP FS, otherwise create a Annotation FS
if not(typedesc.superType =='uima.cas.TOP'):
if('DocumentMetaData' in domain):
anewFs = cas.createDocumentAnnotation(domain,len(cas.documentText),featureDict)
else:
anewFs= cas.createAnnotation(domain,featureDict)
cas.addToIndex(anewFs)
else:
anewFs= cas.createFS(domain,featureDict)
FSnotInIndexList.append(anewFs)
#add to the list of reference to be added after all FS are created from referenceList
for e in referenceList:
FtobeAddedDict.append({'refby': anewFs.FSid,'fname': e,'refto': referenceList[e]})
#set the references
#iterate the features to be added dict
for element in FtobeAddedDict:
#initialize FS reference list
refFS = []
refto = None
refby = None
#if reference to string contains list of ids , create a int list of ids
refarray = []
if ' ' in element['refto']:
refarray = element['refto'].split()
refarray = list(map(int, refarray))
#iterate through all FS added to index in CAS
for fs in cas.getAnnotationIndex():
# find the FS for reference ids in CAS, if there are more than one references
if len(refarray) > 0 :
for ref in refarray:
if(fs.FSid == ref):
refFS.append(fs)
#if there is only one reference
else:
if(fs.FSid == int(element['refto'])):
refto = fs
if(fs.FSid == int(element['refby'])):
refby = fs
if len(refFS) > 0:
setattr(refby,element['fname'],refFS)
if not refto == None:
setattr(refby,element['fname'],refto)
#set the references, #iterate the features to be added dict
for element in FtobeAddedDict:
#initialize FS reference list
refFS = []
refto = None
refby = None
for fs in cas.getAnnotationIndex():
if(fs.FSid == int(element['refby'])):
refby = fs
#if reference to string contains list of ids , create a int list of ids
refarray = []
if ' ' in element['refto']:
refarray = element['refto'].split()
refarray = list(map(int, refarray))
#iterate through all FS added to index in CAS
for fs in FSnotInIndexList:
# find the FS for reference ids in CAS, if there are more than one references
if len(refarray) > 0 :
for ref in refarray:
if(fs.FSid == ref):
refFS.append(fs)
#if there is only one reference
else:
if(fs.FSid == int(element['refto'])):
refto = fs
if len(refFS) > 0:
setattr(refby,element['fname'],refFS)
if not refto == None:
setattr(refby,element['fname'],refto)
return cas
| 53.876068
| 138
| 0.555009
|
7f1842197c18298ae10e00ef469c0516703864bd
| 5,209
|
py
|
Python
|
tests/test_crop_foregroundd.py
|
benduffy1/MONAI
|
046e625b09262261373d7b8039fb652547201368
|
[
"Apache-2.0"
] | 3
|
2020-06-22T20:59:14.000Z
|
2021-04-09T21:24:45.000Z
|
tests/test_crop_foregroundd.py
|
Borda/MONAI
|
e0db5a564225a7cb62e7a23df97267019006302f
|
[
"Apache-2.0"
] | null | null | null |
tests/test_crop_foregroundd.py
|
Borda/MONAI
|
e0db5a564225a7cb62e7a23df97267019006302f
|
[
"Apache-2.0"
] | 1
|
2020-06-22T19:22:59.000Z
|
2020-06-22T19:22:59.000Z
|
# Copyright (c) MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import numpy as np
import torch
from parameterized import parameterized
from monai.transforms import CropForegroundd
from tests.utils import TEST_NDARRAYS, assert_allclose
TEST_POSITION, TESTS = [], []
for p in TEST_NDARRAYS:
TEST_POSITION.append(
[
{
"keys": ["img", "label"],
"source_key": "label",
"select_fn": lambda x: x > 0,
"channel_indices": None,
"margin": 0,
},
{
"img": p(
np.array([[[1, 0, 2, 0, 1], [0, 1, 2, 1, 0], [2, 2, 3, 2, 2], [0, 1, 2, 1, 0], [1, 0, 2, 0, 1]]])
),
"label": p(
np.array([[[0, 0, 0, 0, 0], [0, 1, 0, 1, 0], [0, 0, 1, 0, 0], [0, 1, 0, 1, 0], [0, 0, 0, 0, 0]]])
),
},
p(np.array([[[1, 2, 1], [2, 3, 2], [1, 2, 1]]])),
]
)
TESTS.append(
[
{"keys": ["img"], "source_key": "img", "select_fn": lambda x: x > 1, "channel_indices": None, "margin": 0},
{
"img": p(
np.array([[[0, 0, 0, 0, 0], [0, 1, 1, 1, 0], [0, 1, 3, 1, 0], [0, 1, 1, 1, 0], [0, 0, 0, 0, 0]]])
)
},
p(np.array([[[3]]])),
]
)
TESTS.append(
[
{"keys": ["img"], "source_key": "img", "select_fn": lambda x: x > 0, "channel_indices": 0, "margin": 0},
{
"img": p(
np.array([[[0, 0, 0, 0, 0], [0, 1, 2, 1, 0], [0, 2, 3, 2, 0], [0, 1, 2, 1, 0], [0, 0, 0, 0, 0]]])
)
},
p(np.array([[[1, 2, 1], [2, 3, 2], [1, 2, 1]]])),
]
)
TESTS.append(
[
{"keys": ["img"], "source_key": "img", "select_fn": lambda x: x > 0, "channel_indices": None, "margin": 1},
{
"img": p(
np.array([[[0, 0, 0, 0, 0], [0, 1, 2, 1, 0], [0, 2, 3, 2, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0]]])
)
},
p(np.array([[[0, 0, 0, 0, 0], [0, 1, 2, 1, 0], [0, 2, 3, 2, 0], [0, 0, 0, 0, 0]]])),
]
)
TESTS.append(
[
{
"keys": ["img"],
"source_key": "img",
"select_fn": lambda x: x > 0,
"channel_indices": None,
"margin": [2, 1],
},
{
"img": p(
np.array([[[0, 0, 0, 0, 0], [0, 1, 2, 1, 0], [0, 2, 3, 2, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0]]])
)
},
p(np.array([[[0, 0, 0, 0, 0], [0, 1, 2, 1, 0], [0, 2, 3, 2, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0]]])),
]
)
TESTS.append(
[
{
"keys": ["img"],
"source_key": "img",
"select_fn": lambda x: x > 0,
"channel_indices": 0,
"margin": 0,
"k_divisible": [4, 6],
"mode": "edge",
},
{
"img": p(
np.array(
[[[0, 2, 1, 2, 0], [1, 1, 2, 1, 1], [2, 2, 3, 2, 2], [1, 1, 2, 1, 1], [0, 0, 0, 0, 0]]],
dtype=np.float32,
)
)
},
p(np.array([[[0, 2, 1, 2, 0, 0], [1, 1, 2, 1, 1, 1], [2, 2, 3, 2, 2, 2], [1, 1, 2, 1, 1, 1]]])),
]
)
class TestCropForegroundd(unittest.TestCase):
@parameterized.expand(TEST_POSITION + TESTS)
def test_value(self, argments, input_data, expected_data):
result = CropForegroundd(**argments)(input_data)
r, i = result["img"], input_data["img"]
self.assertEqual(type(r), type(i))
if isinstance(r, torch.Tensor):
self.assertEqual(r.device, i.device)
assert_allclose(r, expected_data)
@parameterized.expand(TEST_POSITION)
def test_foreground_position(self, argments, input_data, _):
result = CropForegroundd(**argments)(input_data)
np.testing.assert_allclose(result["foreground_start_coord"], np.array([1, 1]))
np.testing.assert_allclose(result["foreground_end_coord"], np.array([4, 4]))
argments["start_coord_key"] = "test_start_coord"
argments["end_coord_key"] = "test_end_coord"
result = CropForegroundd(**argments)(input_data)
np.testing.assert_allclose(result["test_start_coord"], np.array([1, 1]))
np.testing.assert_allclose(result["test_end_coord"], np.array([4, 4]))
if __name__ == "__main__":
unittest.main()
| 36.426573
| 119
| 0.444231
|
620117126deef89eb70e8e531a68e9183c97cb7e
| 9,349
|
py
|
Python
|
Script_Extraction_API/quadrillage_recurssif_opti.py
|
CazabetLyon1/lyon_resto_2018_Autumn
|
2d9f495b1fdcea3e9f9e0945baf4d49f24b17de4
|
[
"DOC"
] | 1
|
2020-10-25T08:21:19.000Z
|
2020-10-25T08:21:19.000Z
|
Script_Extraction_API/quadrillage_recurssif_opti.py
|
CazabetLyon1/lyon_resto_2018_Autumn
|
2d9f495b1fdcea3e9f9e0945baf4d49f24b17de4
|
[
"DOC"
] | null | null | null |
Script_Extraction_API/quadrillage_recurssif_opti.py
|
CazabetLyon1/lyon_resto_2018_Autumn
|
2d9f495b1fdcea3e9f9e0945baf4d49f24b17de4
|
[
"DOC"
] | null | null | null |
from math import *
import copy
import json
import time
from geopy import distance
import matplotlib.pyplot as plt
import numpy as np
#liste en variable globale qui contient les coordonnées et le rayon associé de tout les appels qu'il faut faire pour couvrir la meme zone que l'ancien json donné
l=[]
NB_MAX_RES = 30
#fonction qui lit le json, on obtient une liste qui contient des dictionnaire, une clé ["lat"] ["lng"] ["nom"] pour chaque
def lect_json (nom_fich):
with open(nom_fich,"r", encoding="utf8") as f:
restaurants=json.load(f)
print(type(restaurants))
liste=[]
for resto in restaurants:
dict = {}
dict["lat"]=resto["geometry"]["location"]["lat"]
dict["lng"]=resto["geometry"]["location"]["lng"]
dict["nom"]=resto["name"]
liste.append(dict)
return liste
def calc_min_max_2 (liste):
""" recherche du min et du max pour les points afin de générer le point en haut a gauche,a droite en bas a gauche et droite """
dict= {}
for resto in liste:
if "latmax" not in dict:
dict["lngmax"]=resto.copy()
dict["lngmin"]=resto.copy()
dict["latmax"]=resto.copy()
dict["latmin"]=resto.copy()
if ((float)(resto["lng"])>dict["lngmax"]["lng"]):
dict["lngmax"]=resto.copy()
if((float)(resto["lng"])<dict["lngmin"]["lng"]):
dict["lngmin"]=resto.copy()
if ((float)(resto["lat"])>dict["latmax"]["lat"]):
dict["latmax"]=resto.copy()
if ((float)(resto["lat"])<dict["latmin"]["lat"]):
dict["latmin"]=resto.copy()
dict_point={}
dict_point["haut_gauche"]={}
dict_point["haut_gauche"]["lat"]=dict["latmax"]["lat"] # ATTENTION : lng va du - vers le plus de gauche a droite , et lat va du - vers le + de haut en bas
dict_point["haut_gauche"]["lng"]=dict["lngmin"]["lng"]
dict_point["bas_gauche"]={}
dict_point["bas_gauche"]["lat"]=dict["latmin"]["lat"]
dict_point["bas_gauche"]["lng"]=dict["lngmin"]["lng"]
dict_point["haut_droite"]={}
dict_point["haut_droite"]["lat"]=dict["latmax"]["lat"]
dict_point["haut_droite"]["lng"]=dict["lngmax"]["lng"]
dict_point["bas_droite"]={}
dict_point["bas_droite"]["lat"]=dict["latmin"]["lat"]
dict_point["bas_droite"]["lng"]=dict["lngmax"]["lng"]
print("haut_gauche: ",dict_point["haut_gauche"]["lat"],dict_point["haut_gauche"]["lng"],"haut_droite : ",dict_point["haut_droite"]["lat"],dict_point["haut_droite"]["lng"],"bas_gauche :" ,dict_point["bas_gauche"]["lat"],dict_point["bas_gauche"]["lng"],"bas_droite :",dict_point["bas_droite"]["lat"],dict_point["bas_droite"]["lng"])
return dict_point
#calcul de distance entre deux point : appel a la bibliothèque geopy , les coordonnées données a la fonction sont de type (lng , lat )
def dist(x1,y1,x2,y2) :
#res=sqrt(pow(y2-y1,2)+pow(x2-x1,2))
couple1 =(x1,y1)
couple2 =(x2,y2)
res=(distance.distance(couple1, couple2).km)
return res
#fonction qui selon, un point , un radius , une liste de resto, renvoie le nb de point dans le cercle de rayon radius de centre le point donné
def nb_res_appel(x,y,radius,liste):
compteur=0
for res in liste:
resultat=dist(x,y,res["lng"],res["lat"])
if resultat<=radius:
compteur+=1
return compteur
def decoupage(x,y,radius,radius_reel,liste_resto):
""" Focntion appelé au cas ou il y aurait plus de 60 restaurant (contenue dans lite_resto) dans le cercle de centre (x,y) de rayon radius, découpe ce cercle en plusieurs cercle plus petit sur lesquels on va faire des appels, le but étant que dans chaque cercle il y ai moins de 60 restaurants"""
#code pas propre : faire une fonction pour le contenue qui se repete
new_x=x+radius /2
new_y=y+radius /(2)
nb_res=nb_res_appel(new_x,new_y,radius_reel/2,liste_resto)
if nb_res > NB_MAX_RES :
#print("appel rec en plus")
decoupage(new_x,new_y,radius/2,radius_reel/2,liste_resto)
else :
if nb_res != 0:
dict={}
dict["lng"]=new_x
dict["lat"]=new_y
dict["rr"]=radius_reel/2
dict["r"]=radius/2
l.append(dict)
new_x=x-radius /2
new_y=y+radius /(2)
nb_res=nb_res_appel(new_x,new_y,radius_reel/2,liste_resto)
if nb_res > NB_MAX_RES :
#print("appel rec en plus")
decoupage(new_x,new_y,radius/2,radius_reel/2,liste_resto)
else :
if nb_res != 0:
dict={}
dict["lng"]=new_x
dict["lat"]=new_y
dict["rr"]=radius_reel/2
dict["r"]=radius/2
l.append(dict)
new_x=x+radius /2
new_y=y-radius /(2)
nb_res=nb_res_appel(new_x,new_y,radius_reel/2,liste_resto)
if nb_res > NB_MAX_RES :
#print("appel rec en plus")
decoupage(new_x,new_y,radius/2,radius_reel/2,liste_resto)
else :
if nb_res != 0:
dict={}
dict["lng"]=new_x
dict["lat"]=new_y
dict["rr"]=radius_reel/2
dict["r"]=radius/2
l.append(dict)
new_x=x-radius /2
new_y=y-radius /(2)
nb_res=nb_res_appel(new_x,new_y,radius_reel/2,liste_resto)
if nb_res > NB_MAX_RES :
#print("appel rec en plus")
decoupage(new_x,new_y,radius/2,radius_reel/2,liste_resto)
else :
if nb_res != 0:
dict={}
dict["lng"]=new_x
dict["lat"]=new_y
dict["rr"]=radius_reel/2
dict["r"]=radius/2
l.append(dict)
new_x=x
new_y=y
nb_res=nb_res_appel(new_x,new_y,radius_reel/2,liste_resto)
if nb_res > NB_MAX_RES :
#print("appel rec en plus")
decoupage(new_x,new_y,radius/2,radius_reel/2,liste_resto)
else :
if nb_res != 0:
dict={}
dict["lng"]=new_x
dict["lat"]=new_y
dict["rr"]=radius_reel/2
dict["r"]=radius/2
l.append(dict)
new_x=x+radius
new_y=y
nb_res=nb_res_appel(new_x,new_y,radius_reel/3,liste_resto)
if nb_res > NB_MAX_RES :
#print("appel rec en plus")
decoupage(new_x,new_y,radius/3,radius_reel/3,liste_resto)
else :
if nb_res != 0:
dict={}
dict["lng"]=new_x
dict["lat"]=new_y
dict["rr"]=radius_reel/3
dict["r"]=radius/3
l.append(dict)
new_x=x
new_y=y+radius
nb_res=nb_res_appel(new_x,new_y,radius_reel/3,liste_resto)
if nb_res > NB_MAX_RES :
#print("appel rec en plus")
decoupage(new_x,new_y,radius/3,radius_reel/3,liste_resto)
else :
if nb_res != 0:
dict={}
dict["lng"]=new_x
dict["lat"]=new_y
dict["rr"]=radius_reel/3
dict["r"]=radius/3
l.append(dict)
new_x=x-radius
new_y=y
nb_res=nb_res_appel(new_x,new_y,radius_reel/3,liste_resto)
if nb_res > NB_MAX_RES :
#print("appel rec en plus")
decoupage(new_x,new_y,radius/3,radius_reel/3,liste_resto)
else :
if nb_res != 0:
dict={}
dict["lng"]=new_x
dict["lat"]=new_y
dict["rr"]=radius_reel/3
dict["r"]=radius/3
l.append(dict)
new_x=x
new_y=y-radius
nb_res=nb_res_appel(new_x,new_y,radius_reel/3,liste_resto)
if nb_res > NB_MAX_RES :
#print("appel rec en plus")
decoupage(new_x,new_y,radius/3,radius_reel/3,liste_resto)
else :
if nb_res != 0:
dict={}
dict["lng"]=new_x
dict["lat"]=new_y
dict["rr"]=radius_reel/3
dict["r"]=radius/3
l.append(dict)
return
def shrek(nb=5,fichier_res='liste_point_genere.txt',ancien_fichier="C:/Users/franc/OneDrive/Documents/FAC/LIFPROJET/LIFPROJET/JSON/restaurants.json"):
""" fonction ultime qui genère un fichier contenant les points ou faire nos appels """
liste_resto=[]
liste_resto=copy.deepcopy(lect_json(ancien_fichier)) #fichier json de reference dans lequel sont contenus tout les anciens restaurants
dict= calc_min_max_2(liste_resto)
radius=((dict["haut_droite"]["lng"]-dict["haut_gauche"]["lng"])) #on a besoin du radius en terme distance entre des points de type (lng , lat) afin de faire evoluer i et j , qui seront utiliser comme coordonnées de type (lng , lat) pour faire nos appels fictifs,décoper en cercle plus petit ect
radius=radius/(nb*2)
print(radius)
nb_ligne=(int)((dict["haut_gauche"]["lat"]-dict["bas_gauche"]["lat"])/radius) #on adapte le nombre de ligne sur lesquels on fait nos appels afin de quadriller toute la zone correctement (cf potentielle image fournies pour mieux voir)
nb_ligne=(nb_ligne+1)*2
#calcul du radius en distance réelle :
radius_reel=dist(dict["haut_gauche"]["lng"],dict["haut_gauche"]["lat"],dict["haut_gauche"]["lng"]+radius,dict["haut_gauche"]["lat"])# on en a besoin pour evaluer si un restaurant est dans le cercle ou non, comme la distance entre le restaurant et le centre du cercle sera dans cette unité
print(radius_reel)
for i in range(nb_ligne+1):
for j in range(nb+2) :
if i%2==0 :
x=dict["haut_gauche"]["lng"]+ 2*j*radius - radius
y=dict["haut_gauche"]["lat"]- i * radius
print("----")
if i%2==1 :
x=dict["haut_gauche"]["lng"]+ j*radius*2 + radius -radius
y=y=dict["haut_gauche"]["lat"]- i * radius
print("--")
nb_res=nb_res_appel(x,y,radius_reel,liste_resto)
if nb_res>NB_MAX_RES:
decoupage(x,y,radius,radius_reel,liste_resto)
else :
if nb_res != 0:
dict_res={}
dict_res["lng"]=x
dict_res["lat"]=y
dict_res["rr"]=radius_reel
dict_res["r"]=radius
l.append(dict_res)
print ("fini :)\n")
with open(fichier_res, 'w') as f:
f.write(json.dumps(l, indent=4))
print("Fini : nb points = ",len(l))
fig, ax=plt.subplots()
for d in l :
C=plt.Circle((d["lng"],d["lat"]),d["r"])
ax.add_artist(C)
print(d["lng"])
print(d["lat"])
print(d["r"])
ax.set_xlim((dict["haut_gauche"]["lng"]-0.01,dict["haut_droite"]["lng"]+0.01))
ax.set_ylim((dict["bas_gauche"]["lat"]-0.01,dict["haut_gauche"]["lat"]+0.01))
plt.show()
shrek(10,'liste_point_pour_bars.txt',"C:/Users/franc/OneDrive/Documents/FAC/LIFPROJET/LIFPROJET/JSON/bars.json")
| 32.688811
| 331
| 0.683603
|
3dcf6d8300d04b5e3276145e21d9eca8843dadf7
| 42,004
|
py
|
Python
|
saleor/graphql/product/tests/test_category.py
|
nestfiy/saleor
|
6fce3bc5c0ca72ac28db99553e6d2b49249c6dac
|
[
"CC-BY-4.0"
] | null | null | null |
saleor/graphql/product/tests/test_category.py
|
nestfiy/saleor
|
6fce3bc5c0ca72ac28db99553e6d2b49249c6dac
|
[
"CC-BY-4.0"
] | null | null | null |
saleor/graphql/product/tests/test_category.py
|
nestfiy/saleor
|
6fce3bc5c0ca72ac28db99553e6d2b49249c6dac
|
[
"CC-BY-4.0"
] | null | null | null |
import os
from unittest.mock import Mock, patch
import graphene
import pytest
from django.utils.functional import SimpleLazyObject
from django.utils.text import slugify
from graphql_relay import to_global_id
from ....product.error_codes import ProductErrorCode
from ....product.models import Category, Product, ProductChannelListing
from ....product.tests.utils import create_image, create_pdf_file_with_image_ext
from ....tests.utils import dummy_editorjs
from ....webhook.event_types import WebhookEventAsyncType
from ...tests.utils import (
get_graphql_content,
get_graphql_content_from_response,
get_multipart_request_body,
)
QUERY_CATEGORY = """
query ($id: ID, $slug: String, $channel: String){
category(
id: $id,
slug: $slug,
) {
id
name
ancestors(first: 20) {
edges {
node {
name
}
}
}
children(first: 20) {
edges {
node {
name
}
}
}
products(first: 10, channel: $channel) {
edges {
node {
id
}
}
}
}
}
"""
def test_category_query_by_id(user_api_client, product, channel_USD):
category = Category.objects.first()
variables = {
"id": graphene.Node.to_global_id("Category", category.pk),
"channel": channel_USD.slug,
}
response = user_api_client.post_graphql(QUERY_CATEGORY, variables=variables)
content = get_graphql_content(response)
category_data = content["data"]["category"]
assert category_data is not None
assert category_data["name"] == category.name
assert len(category_data["ancestors"]["edges"]) == category.get_ancestors().count()
assert len(category_data["children"]["edges"]) == category.get_children().count()
def test_category_query_invalid_id(user_api_client, product, channel_USD):
category_id = "'"
variables = {
"id": category_id,
"channel": channel_USD.slug,
}
response = user_api_client.post_graphql(QUERY_CATEGORY, variables)
content = get_graphql_content_from_response(response)
assert len(content["errors"]) == 1
assert content["errors"][0]["message"] == f"Couldn't resolve id: {category_id}."
assert content["data"]["category"] is None
def test_category_query_object_with_given_id_does_not_exist(
user_api_client, product, channel_USD
):
category_id = graphene.Node.to_global_id("Category", -1)
variables = {
"id": category_id,
"channel": channel_USD.slug,
}
response = user_api_client.post_graphql(QUERY_CATEGORY, variables)
content = get_graphql_content(response)
assert content["data"]["category"] is None
def test_category_query_object_with_invalid_object_type(
user_api_client, product, channel_USD
):
category = Category.objects.first()
category_id = graphene.Node.to_global_id("Product", category.pk)
variables = {
"id": category_id,
"channel": channel_USD.slug,
}
response = user_api_client.post_graphql(QUERY_CATEGORY, variables)
content = get_graphql_content(response)
assert content["data"]["category"] is None
def test_category_query_doesnt_show_not_available_products(
user_api_client, product, channel_USD
):
category = Category.objects.first()
variant = product.variants.get()
# Set product as not visible due to lack of price.
variant.channel_listings.update(price_amount=None)
variables = {
"id": graphene.Node.to_global_id("Category", category.pk),
"channel": channel_USD.slug,
}
response = user_api_client.post_graphql(QUERY_CATEGORY, variables=variables)
content = get_graphql_content(response)
category_data = content["data"]["category"]
assert category_data is not None
assert category_data["name"] == category.name
assert not category_data["products"]["edges"]
def test_category_query_description(user_api_client, product, channel_USD):
category = Category.objects.first()
description = dummy_editorjs("Test description.", json_format=True)
category.description = dummy_editorjs("Test description.")
category.save()
variables = {
"id": graphene.Node.to_global_id("Category", category.pk),
"channel": channel_USD.slug,
}
query = """
query ($id: ID, $slug: String){
category(
id: $id,
slug: $slug,
) {
id
name
description
descriptionJson
}
}
"""
response = user_api_client.post_graphql(query, variables=variables)
content = get_graphql_content(response)
category_data = content["data"]["category"]
assert category_data["description"] == description
assert category_data["descriptionJson"] == description
def test_category_query_without_description(user_api_client, product, channel_USD):
category = Category.objects.first()
category.save()
variables = {
"id": graphene.Node.to_global_id("Category", category.pk),
"channel": channel_USD.slug,
}
query = """
query ($id: ID, $slug: String){
category(
id: $id,
slug: $slug,
) {
id
name
description
descriptionJson
}
}
"""
response = user_api_client.post_graphql(query, variables=variables)
content = get_graphql_content(response)
category_data = content["data"]["category"]
assert category_data["description"] is None
assert category_data["descriptionJson"] == "{}"
def test_category_query_by_slug(user_api_client, product, channel_USD):
category = Category.objects.first()
variables = {"slug": category.slug, "channel": channel_USD.slug}
response = user_api_client.post_graphql(QUERY_CATEGORY, variables=variables)
content = get_graphql_content(response)
category_data = content["data"]["category"]
assert category_data is not None
assert category_data["name"] == category.name
assert len(category_data["ancestors"]["edges"]) == category.get_ancestors().count()
assert len(category_data["children"]["edges"]) == category.get_children().count()
def test_category_query_error_when_id_and_slug_provided(
user_api_client, product, graphql_log_handler, channel_USD
):
category = Category.objects.first()
variables = {
"id": graphene.Node.to_global_id("Category", category.pk),
"slug": category.slug,
"channel": channel_USD.slug,
}
response = user_api_client.post_graphql(QUERY_CATEGORY, variables=variables)
assert graphql_log_handler.messages == [
"saleor.graphql.errors.handled[INFO].GraphQLError"
]
content = get_graphql_content(response, ignore_errors=True)
assert len(content["errors"]) == 1
def test_category_query_error_when_no_param(
user_api_client, product, graphql_log_handler
):
variables = {}
response = user_api_client.post_graphql(QUERY_CATEGORY, variables=variables)
assert graphql_log_handler.messages == [
"saleor.graphql.errors.handled[INFO].GraphQLError"
]
content = get_graphql_content(response, ignore_errors=True)
assert len(content["errors"]) == 1
def test_query_category_product_only_visible_in_listings_as_customer(
user_api_client, product_list, channel_USD
):
# given
category = Category.objects.first()
product_list[0].channel_listings.all().update(visible_in_listings=False)
product_count = Product.objects.count()
variables = {
"id": graphene.Node.to_global_id("Category", category.pk),
"channel": channel_USD.slug,
}
# when
response = user_api_client.post_graphql(QUERY_CATEGORY, variables=variables)
# then
content = get_graphql_content(response, ignore_errors=True)
assert len(content["data"]["category"]["products"]["edges"]) == product_count - 1
def test_query_category_product_visible_in_listings_as_staff_without_manage_products(
staff_api_client, product_list, channel_USD
):
# given
category = Category.objects.first()
product_list[0].channel_listings.all().update(visible_in_listings=False)
product_count = Product.objects.count()
variables = {
"id": graphene.Node.to_global_id("Category", category.pk),
"channel": channel_USD.slug,
}
# when
response = staff_api_client.post_graphql(QUERY_CATEGORY, variables=variables)
# then
content = get_graphql_content(response, ignore_errors=True)
assert (
len(content["data"]["category"]["products"]["edges"]) == product_count - 1
) # invisible doesn't count
def test_query_category_product_only_visible_in_listings_as_staff_with_perm(
staff_api_client, product_list, permission_manage_products
):
# given
staff_api_client.user.user_permissions.add(permission_manage_products)
category = Category.objects.first()
product_list[0].channel_listings.all().update(visible_in_listings=False)
product_count = Product.objects.count()
variables = {"id": graphene.Node.to_global_id("Category", category.pk)}
# when
response = staff_api_client.post_graphql(QUERY_CATEGORY, variables=variables)
# then
content = get_graphql_content(response, ignore_errors=True)
assert len(content["data"]["category"]["products"]["edges"]) == product_count
def test_query_category_product_only_visible_in_listings_as_app_without_manage_products(
app_api_client, product_list, channel_USD
):
# given
category = Category.objects.first()
product_list[0].channel_listings.all().update(visible_in_listings=False)
product_count = Product.objects.count()
variables = {
"id": graphene.Node.to_global_id("Category", category.pk),
"channel": channel_USD.slug,
}
# when
response = app_api_client.post_graphql(QUERY_CATEGORY, variables=variables)
# then
content = get_graphql_content(response, ignore_errors=True)
assert (
len(content["data"]["category"]["products"]["edges"]) == product_count - 1
) # invisible doesn't count
def test_query_category_product_only_visible_in_listings_as_app_with_perm(
app_api_client, product_list, permission_manage_products
):
# given
app_api_client.app.permissions.add(permission_manage_products)
category = Category.objects.first()
product_list[0].channel_listings.all().update(visible_in_listings=False)
product_count = Product.objects.count()
variables = {"id": graphene.Node.to_global_id("Category", category.pk)}
# when
response = app_api_client.post_graphql(QUERY_CATEGORY, variables=variables)
# then
content = get_graphql_content(response, ignore_errors=True)
assert len(content["data"]["category"]["products"]["edges"]) == product_count
CATEGORY_CREATE_MUTATION = """
mutation(
$name: String, $slug: String,
$description: JSONString, $backgroundImage: Upload,
$backgroundImageAlt: String, $parentId: ID) {
categoryCreate(
input: {
name: $name
slug: $slug
description: $description
backgroundImage: $backgroundImage
backgroundImageAlt: $backgroundImageAlt
},
parent: $parentId
) {
category {
id
name
slug
description
parent {
name
id
}
backgroundImage{
alt
}
}
errors {
field
code
message
}
}
}
"""
def test_category_create_mutation(
monkeypatch, staff_api_client, permission_manage_products, media_root
):
query = CATEGORY_CREATE_MUTATION
mock_create_thumbnails = Mock(return_value=None)
monkeypatch.setattr(
(
"saleor.product.thumbnails."
"create_category_background_image_thumbnails.delay"
),
mock_create_thumbnails,
)
category_name = "Test category"
description = "description"
category_slug = slugify(category_name)
category_description = dummy_editorjs(description, True)
image_file, image_name = create_image()
image_alt = "Alt text for an image."
# test creating root category
variables = {
"name": category_name,
"description": category_description,
"backgroundImage": image_name,
"backgroundImageAlt": image_alt,
"slug": category_slug,
}
body = get_multipart_request_body(query, variables, image_file, image_name)
response = staff_api_client.post_multipart(
body, permissions=[permission_manage_products]
)
content = get_graphql_content(response)
data = content["data"]["categoryCreate"]
assert data["errors"] == []
assert data["category"]["name"] == category_name
assert data["category"]["description"] == category_description
assert not data["category"]["parent"]
category = Category.objects.get(name=category_name)
assert category.description_plaintext == description
assert category.background_image.file
img_name, format = os.path.splitext(image_file._name)
file_name = category.background_image.name
assert file_name != image_file._name
assert file_name.startswith(f"category-backgrounds/{img_name}")
assert file_name.endswith(format)
mock_create_thumbnails.assert_called_once_with(category.pk)
assert data["category"]["backgroundImage"]["alt"] == image_alt
# test creating subcategory
parent_id = data["category"]["id"]
variables = {
"name": category_name,
"description": category_description,
"parentId": parent_id,
"slug": f"{category_slug}-2",
}
response = staff_api_client.post_graphql(query, variables)
content = get_graphql_content(response)
data = content["data"]["categoryCreate"]
assert data["errors"] == []
assert data["category"]["parent"]["id"] == parent_id
@patch("saleor.plugins.webhook.plugin._get_webhooks_for_event")
@patch("saleor.plugins.webhook.plugin.trigger_webhooks_async")
def test_category_create_trigger_webhook(
mocked_webhook_trigger,
mocked_get_webhooks_for_event,
any_webhook,
monkeypatch,
staff_api_client,
permission_manage_products,
media_root,
settings,
):
query = CATEGORY_CREATE_MUTATION
mocked_get_webhooks_for_event.return_value = [any_webhook]
settings.PLUGINS = ["saleor.plugins.webhook.plugin.WebhookPlugin"]
mock_create_thumbnails = Mock(return_value=None)
monkeypatch.setattr(
(
"saleor.product.thumbnails."
"create_category_background_image_thumbnails.delay"
),
mock_create_thumbnails,
)
category_name = "Test category"
description = "description"
category_slug = slugify(category_name)
category_description = dummy_editorjs(description, True)
image_file, image_name = create_image()
image_alt = "Alt text for an image."
# test creating root category
variables = {
"name": category_name,
"description": category_description,
"backgroundImage": image_name,
"backgroundImageAlt": image_alt,
"slug": category_slug,
}
body = get_multipart_request_body(query, variables, image_file, image_name)
response = staff_api_client.post_multipart(
body, permissions=[permission_manage_products]
)
content = get_graphql_content(response)
data = content["data"]["categoryCreate"]
category = Category.objects.first()
assert category
assert data["errors"] == []
mocked_webhook_trigger.assert_called_once_with(
{"id": graphene.Node.to_global_id("Category", category.id)},
WebhookEventAsyncType.CATEGORY_CREATED,
[any_webhook],
category,
SimpleLazyObject(lambda: staff_api_client.user),
)
@pytest.mark.parametrize(
"input_slug, expected_slug",
(
("test-slug", "test-slug"),
(None, "test-category"),
("", "test-category"),
("わたし-わ-にっぽん-です", "わたし-わ-にっぽん-です"),
),
)
def test_create_category_with_given_slug(
staff_api_client, permission_manage_products, input_slug, expected_slug
):
query = CATEGORY_CREATE_MUTATION
name = "Test category"
variables = {"name": name, "slug": input_slug}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_products]
)
content = get_graphql_content(response)
data = content["data"]["categoryCreate"]
assert not data["errors"]
assert data["category"]["slug"] == expected_slug
def test_create_category_name_with_unicode(
staff_api_client, permission_manage_products
):
query = CATEGORY_CREATE_MUTATION
name = "わたし-わ にっぽん です"
variables = {"name": name}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_products]
)
content = get_graphql_content(response)
data = content["data"]["categoryCreate"]
assert not data["errors"]
assert data["category"]["name"] == name
assert data["category"]["slug"] == "わたし-わ-にっぽん-です"
def test_category_create_mutation_without_background_image(
monkeypatch, staff_api_client, permission_manage_products
):
query = CATEGORY_CREATE_MUTATION
description = dummy_editorjs("description", True)
mock_create_thumbnails = Mock(return_value=None)
monkeypatch.setattr(
(
"saleor.product.thumbnails."
"create_category_background_image_thumbnails.delay"
),
mock_create_thumbnails,
)
# test creating root category
category_name = "Test category"
variables = {
"name": category_name,
"description": description,
"slug": slugify(category_name),
}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_products]
)
content = get_graphql_content(response)
data = content["data"]["categoryCreate"]
assert data["errors"] == []
assert mock_create_thumbnails.call_count == 0
MUTATION_CATEGORY_UPDATE_MUTATION = """
mutation($id: ID!, $name: String, $slug: String,
$backgroundImage: Upload, $backgroundImageAlt: String,
$description: JSONString) {
categoryUpdate(
id: $id
input: {
name: $name
description: $description
backgroundImage: $backgroundImage
backgroundImageAlt: $backgroundImageAlt
slug: $slug
}
) {
category {
id
name
description
parent {
id
}
backgroundImage{
alt
}
}
errors {
field
message
}
}
}
"""
def test_category_update_mutation(
monkeypatch, staff_api_client, category, permission_manage_products, media_root
):
mock_create_thumbnails = Mock(return_value=None)
monkeypatch.setattr(
(
"saleor.product.thumbnails."
"create_category_background_image_thumbnails.delay"
),
mock_create_thumbnails,
)
# create child category and test that the update mutation won't change
# it's parent
child_category = category.children.create(name="child")
category_name = "Updated name"
description = "description"
category_slug = slugify(category_name)
category_description = dummy_editorjs(description, True)
image_file, image_name = create_image()
image_alt = "Alt text for an image."
category_id = graphene.Node.to_global_id("Category", child_category.pk)
variables = {
"name": category_name,
"description": category_description,
"backgroundImage": image_name,
"backgroundImageAlt": image_alt,
"id": category_id,
"slug": category_slug,
}
body = get_multipart_request_body(
MUTATION_CATEGORY_UPDATE_MUTATION, variables, image_file, image_name
)
response = staff_api_client.post_multipart(
body, permissions=[permission_manage_products]
)
content = get_graphql_content(response)
data = content["data"]["categoryUpdate"]
assert data["errors"] == []
assert data["category"]["id"] == category_id
assert data["category"]["name"] == category_name
assert data["category"]["description"] == category_description
parent_id = graphene.Node.to_global_id("Category", category.pk)
assert data["category"]["parent"]["id"] == parent_id
category = Category.objects.get(name=category_name)
assert category.description_plaintext == description
assert category.background_image.file
mock_create_thumbnails.assert_called_once_with(category.pk)
assert data["category"]["backgroundImage"]["alt"] == image_alt
@patch("saleor.plugins.webhook.plugin._get_webhooks_for_event")
@patch("saleor.plugins.webhook.plugin.trigger_webhooks_async")
def test_category_update_trigger_webhook(
mocked_webhook_trigger,
mocked_get_webhooks_for_event,
any_webhook,
monkeypatch,
staff_api_client,
category,
permission_manage_products,
media_root,
settings,
):
mocked_get_webhooks_for_event.return_value = [any_webhook]
settings.PLUGINS = ["saleor.plugins.webhook.plugin.WebhookPlugin"]
mock_create_thumbnails = Mock(return_value=None)
monkeypatch.setattr(
(
"saleor.product.thumbnails."
"create_category_background_image_thumbnails.delay"
),
mock_create_thumbnails,
)
category_name = "Updated name"
description = "description"
category_slug = slugify(category_name)
category_description = dummy_editorjs(description, True)
image_file, image_name = create_image()
image_alt = "Alt text for an image."
variables = {
"name": category_name,
"description": category_description,
"backgroundImage": image_name,
"backgroundImageAlt": image_alt,
"id": graphene.Node.to_global_id("Category", category.pk),
"slug": category_slug,
}
body = get_multipart_request_body(
MUTATION_CATEGORY_UPDATE_MUTATION, variables, image_file, image_name
)
response = staff_api_client.post_multipart(
body, permissions=[permission_manage_products]
)
content = get_graphql_content(response)
data = content["data"]["categoryUpdate"]
assert data["errors"] == []
mocked_webhook_trigger.assert_called_once_with(
{"id": variables["id"]},
WebhookEventAsyncType.CATEGORY_UPDATED,
[any_webhook],
category,
SimpleLazyObject(lambda: staff_api_client.user),
)
def test_category_update_mutation_invalid_background_image(
staff_api_client, category, permission_manage_products
):
image_file, image_name = create_pdf_file_with_image_ext()
image_alt = "Alt text for an image."
variables = {
"name": "new-name",
"slug": "new-slug",
"id": to_global_id("Category", category.id),
"backgroundImage": image_name,
"backgroundImageAlt": image_alt,
"isPublished": True,
}
body = get_multipart_request_body(
MUTATION_CATEGORY_UPDATE_MUTATION, variables, image_file, image_name
)
response = staff_api_client.post_multipart(
body, permissions=[permission_manage_products]
)
content = get_graphql_content(response)
data = content["data"]["categoryUpdate"]
assert data["errors"][0]["field"] == "backgroundImage"
assert data["errors"][0]["message"] == "Invalid file type."
def test_category_update_mutation_without_background_image(
monkeypatch, staff_api_client, category, permission_manage_products
):
query = """
mutation($id: ID!, $name: String, $slug: String, $description: JSONString) {
categoryUpdate(
id: $id
input: {
name: $name
description: $description
slug: $slug
}
) {
errors {
field
message
}
}
}
"""
mock_create_thumbnails = Mock(return_value=None)
monkeypatch.setattr(
(
"saleor.product.thumbnails."
"create_category_background_image_thumbnails.delay"
),
mock_create_thumbnails,
)
category_name = "Updated name"
variables = {
"id": graphene.Node.to_global_id(
"Category", category.children.create(name="child").pk
),
"name": category_name,
"description": dummy_editorjs("description", True),
"slug": slugify(category_name),
}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_products]
)
content = get_graphql_content(response)
data = content["data"]["categoryUpdate"]
assert data["errors"] == []
assert mock_create_thumbnails.call_count == 0
UPDATE_CATEGORY_SLUG_MUTATION = """
mutation($id: ID!, $slug: String) {
categoryUpdate(
id: $id
input: {
slug: $slug
}
) {
category{
name
slug
}
errors {
field
message
code
}
}
}
"""
@pytest.mark.parametrize(
"input_slug, expected_slug, error_message",
[
("test-slug", "test-slug", None),
("", "", "Slug value cannot be blank."),
(None, "", "Slug value cannot be blank."),
],
)
def test_update_category_slug(
staff_api_client,
category,
permission_manage_products,
input_slug,
expected_slug,
error_message,
):
query = UPDATE_CATEGORY_SLUG_MUTATION
old_slug = category.slug
assert old_slug != input_slug
node_id = graphene.Node.to_global_id("Category", category.id)
variables = {"slug": input_slug, "id": node_id}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_products]
)
content = get_graphql_content(response)
data = content["data"]["categoryUpdate"]
errors = data["errors"]
if not error_message:
assert not errors
assert data["category"]["slug"] == expected_slug
else:
assert errors
assert errors[0]["field"] == "slug"
assert errors[0]["code"] == ProductErrorCode.REQUIRED.name
def test_update_category_slug_exists(
staff_api_client, category, permission_manage_products
):
query = UPDATE_CATEGORY_SLUG_MUTATION
input_slug = "test-slug"
second_category = Category.objects.get(pk=category.pk)
second_category.pk = None
second_category.slug = input_slug
second_category.save()
assert input_slug != category.slug
node_id = graphene.Node.to_global_id("Category", category.id)
variables = {"slug": input_slug, "id": node_id}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_products]
)
content = get_graphql_content(response)
data = content["data"]["categoryUpdate"]
errors = data["errors"]
assert errors
assert errors[0]["field"] == "slug"
assert errors[0]["code"] == ProductErrorCode.UNIQUE.name
@pytest.mark.parametrize(
"input_slug, expected_slug, input_name, error_message, error_field",
[
("test-slug", "test-slug", "New name", None, None),
("", "", "New name", "Slug value cannot be blank.", "slug"),
(None, "", "New name", "Slug value cannot be blank.", "slug"),
("test-slug", "", None, "This field cannot be blank.", "name"),
("test-slug", "", "", "This field cannot be blank.", "name"),
(None, None, None, "Slug value cannot be blank.", "slug"),
],
)
def test_update_category_slug_and_name(
staff_api_client,
category,
permission_manage_products,
input_slug,
expected_slug,
input_name,
error_message,
error_field,
):
query = """
mutation($id: ID!, $name: String, $slug: String) {
categoryUpdate(
id: $id
input: {
name: $name
slug: $slug
}
) {
category{
name
slug
}
errors {
field
message
code
}
}
}
"""
old_name = category.name
old_slug = category.slug
assert input_slug != old_slug
assert input_name != old_name
node_id = graphene.Node.to_global_id("Category", category.id)
variables = {"slug": input_slug, "name": input_name, "id": node_id}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_products]
)
content = get_graphql_content(response)
category.refresh_from_db()
data = content["data"]["categoryUpdate"]
errors = data["errors"]
if not error_message:
assert data["category"]["name"] == input_name == category.name
assert data["category"]["slug"] == input_slug == category.slug
else:
assert errors
assert errors[0]["field"] == error_field
assert errors[0]["code"] == ProductErrorCode.REQUIRED.name
MUTATION_CATEGORY_DELETE = """
mutation($id: ID!) {
categoryDelete(id: $id) {
category {
name
}
errors {
field
message
}
}
}
"""
@patch("saleor.product.signals.delete_versatile_image")
def test_category_delete_mutation(
delete_versatile_image_mock,
staff_api_client,
category,
permission_manage_products,
):
variables = {"id": graphene.Node.to_global_id("Category", category.id)}
response = staff_api_client.post_graphql(
MUTATION_CATEGORY_DELETE, variables, permissions=[permission_manage_products]
)
content = get_graphql_content(response)
data = content["data"]["categoryDelete"]
assert data["category"]["name"] == category.name
with pytest.raises(category._meta.model.DoesNotExist):
category.refresh_from_db()
delete_versatile_image_mock.assert_not_called()
@patch("saleor.product.signals.delete_versatile_image")
@patch("saleor.plugins.webhook.plugin._get_webhooks_for_event")
@patch("saleor.plugins.webhook.plugin.trigger_webhooks_async")
def test_category_delete_trigger_webhook(
mocked_webhook_trigger,
mocked_get_webhooks_for_event,
delete_versatile_image_mock,
any_webhook,
staff_api_client,
category,
permission_manage_products,
settings,
):
mocked_get_webhooks_for_event.return_value = [any_webhook]
settings.PLUGINS = ["saleor.plugins.webhook.plugin.WebhookPlugin"]
variables = {"id": graphene.Node.to_global_id("Category", category.id)}
response = staff_api_client.post_graphql(
MUTATION_CATEGORY_DELETE, variables, permissions=[permission_manage_products]
)
content = get_graphql_content(response)
data = content["data"]["categoryDelete"]
assert data["category"]["name"] == category.name
assert not Category.objects.first()
delete_versatile_image_mock.assert_not_called()
mocked_webhook_trigger.assert_called_once_with(
{"id": variables["id"]},
WebhookEventAsyncType.CATEGORY_DELETED,
[any_webhook],
category,
SimpleLazyObject(lambda: staff_api_client.user),
)
@patch("saleor.product.signals.delete_versatile_image")
def test_delete_category_with_background_image(
delete_versatile_image_mock,
staff_api_client,
category_with_image,
permission_manage_products,
media_root,
):
"""Ensure deleting category deletes background image from storage."""
category = category_with_image
variables = {"id": graphene.Node.to_global_id("Category", category.id)}
response = staff_api_client.post_graphql(
MUTATION_CATEGORY_DELETE, variables, permissions=[permission_manage_products]
)
content = get_graphql_content(response)
data = content["data"]["categoryDelete"]
assert data["category"]["name"] == category.name
with pytest.raises(category._meta.model.DoesNotExist):
category.refresh_from_db()
delete_versatile_image_mock.assert_called_once_with(category.background_image)
@patch("saleor.product.utils.update_products_discounted_prices_task")
def test_category_delete_mutation_for_categories_tree(
mock_update_products_discounted_prices_task,
staff_api_client,
categories_tree_with_published_products,
permission_manage_products,
):
parent = categories_tree_with_published_products
parent_product = parent.products.first()
child_product = parent.children.first().products.first()
product_list = [child_product, parent_product]
variables = {"id": graphene.Node.to_global_id("Category", parent.id)}
response = staff_api_client.post_graphql(
MUTATION_CATEGORY_DELETE, variables, permissions=[permission_manage_products]
)
content = get_graphql_content(response)
data = content["data"]["categoryDelete"]
assert data["category"]["name"] == parent.name
with pytest.raises(parent._meta.model.DoesNotExist):
parent.refresh_from_db()
mock_update_products_discounted_prices_task.delay.assert_called_once()
(
_call_args,
call_kwargs,
) = mock_update_products_discounted_prices_task.delay.call_args
assert set(call_kwargs["product_ids"]) == set(p.pk for p in product_list)
product_channel_listings = ProductChannelListing.objects.filter(
product__in=product_list
)
for product_channel_listing in product_channel_listings:
assert product_channel_listing.is_published is False
assert not product_channel_listing.publication_date
assert product_channel_listings.count() == 4
@patch("saleor.product.utils.update_products_discounted_prices_task")
def test_category_delete_mutation_for_children_from_categories_tree(
mock_update_products_discounted_prices_task,
staff_api_client,
categories_tree_with_published_products,
permission_manage_products,
):
parent = categories_tree_with_published_products
child = parent.children.first()
parent_product = parent.products.first()
child_product = child.products.first()
variables = {"id": graphene.Node.to_global_id("Category", child.id)}
response = staff_api_client.post_graphql(
MUTATION_CATEGORY_DELETE, variables, permissions=[permission_manage_products]
)
content = get_graphql_content(response)
data = content["data"]["categoryDelete"]
assert data["category"]["name"] == child.name
with pytest.raises(child._meta.model.DoesNotExist):
child.refresh_from_db()
mock_update_products_discounted_prices_task.delay.assert_called_once_with(
product_ids=[child_product.pk]
)
parent_product.refresh_from_db()
assert parent_product.category
product_channel_listings = ProductChannelListing.objects.filter(
product=parent_product
)
for product_channel_listing in product_channel_listings:
assert product_channel_listing.is_published is True
assert product_channel_listing.publication_date
child_product.refresh_from_db()
assert not child_product.category
product_channel_listings = ProductChannelListing.objects.filter(
product=child_product
)
for product_channel_listing in product_channel_listings:
assert product_channel_listing.is_published is False
assert not product_channel_listing.publication_date
LEVELED_CATEGORIES_QUERY = """
query leveled_categories($level: Int) {
categories(level: $level, first: 20) {
edges {
node {
name
parent {
name
}
}
}
}
}
"""
def test_category_level(user_api_client, category):
query = LEVELED_CATEGORIES_QUERY
child = Category.objects.create(name="child", slug="chi-ld", parent=category)
variables = {"level": 0}
response = user_api_client.post_graphql(query, variables)
content = get_graphql_content(response)
category_data = content["data"]["categories"]["edges"][0]["node"]
assert category_data["name"] == category.name
assert category_data["parent"] is None
variables = {"level": 1}
response = user_api_client.post_graphql(query, variables)
content = get_graphql_content(response)
category_data = content["data"]["categories"]["edges"][0]["node"]
assert category_data["name"] == child.name
assert category_data["parent"]["name"] == category.name
NOT_EXISTS_IDS_CATEGORIES_QUERY = """
query ($filter: CategoryFilterInput!) {
categories(first: 5, filter: $filter) {
edges {
node {
id
name
}
}
}
}
"""
def test_categories_query_ids_not_exists(user_api_client, category):
query = NOT_EXISTS_IDS_CATEGORIES_QUERY
variables = {"filter": {"ids": ["W3KATGDn3fq3ZH4=", "zH9pYmz7yWD3Hy8="]}}
response = user_api_client.post_graphql(query, variables)
content = get_graphql_content(response, ignore_errors=True)
message_error = '{"ids": [{"message": "Invalid ID specified.", "code": ""}]}'
assert len(content["errors"]) == 1
assert content["errors"][0]["message"] == message_error
assert content["data"]["categories"] is None
FETCH_CATEGORY_QUERY = """
query fetchCategory($id: ID!){
category(id: $id) {
name
backgroundImage(size: 120) {
url
alt
}
}
}
"""
def test_category_image_query(user_api_client, non_default_category, media_root):
alt_text = "Alt text for an image."
category = non_default_category
image_file, image_name = create_image()
category.background_image = image_file
category.background_image_alt = alt_text
category.save()
category_id = graphene.Node.to_global_id("Category", category.pk)
variables = {"id": category_id}
response = user_api_client.post_graphql(FETCH_CATEGORY_QUERY, variables)
content = get_graphql_content(response)
data = content["data"]["category"]
thumbnail_url = category.background_image.thumbnail["120x120"].url
assert thumbnail_url in data["backgroundImage"]["url"]
assert data["backgroundImage"]["alt"] == alt_text
def test_category_image_query_without_associated_file(
user_api_client, non_default_category
):
category = non_default_category
category_id = graphene.Node.to_global_id("Category", category.pk)
variables = {"id": category_id}
response = user_api_client.post_graphql(FETCH_CATEGORY_QUERY, variables)
content = get_graphql_content(response)
data = content["data"]["category"]
assert data["name"] == category.name
assert data["backgroundImage"] is None
def test_update_category_mutation_remove_background_image(
staff_api_client, category_with_image, permission_manage_products
):
query = """
mutation updateCategory($id: ID!, $backgroundImage: Upload) {
categoryUpdate(
id: $id, input: {
backgroundImage: $backgroundImage
}
) {
category {
backgroundImage{
url
}
}
errors {
field
message
}
}
}
"""
assert category_with_image.background_image
variables = {
"id": to_global_id("Category", category_with_image.id),
"backgroundImage": None,
}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_products]
)
content = get_graphql_content(response)
data = content["data"]["categoryUpdate"]["category"]
assert not data["backgroundImage"]
category_with_image.refresh_from_db()
assert not category_with_image.background_image
def test_query_category_for_federation(api_client, non_default_category):
category_id = graphene.Node.to_global_id("Category", non_default_category.pk)
variables = {
"representations": [
{
"__typename": "Category",
"id": category_id,
},
],
}
query = """
query GetCategoryInFederation($representations: [_Any]) {
_entities(representations: $representations) {
__typename
... on Category {
id
name
}
}
}
"""
response = api_client.post_graphql(query, variables)
content = get_graphql_content(response)
assert content["data"]["_entities"] == [
{
"__typename": "Category",
"id": category_id,
"name": non_default_category.name,
}
]
| 32.236378
| 88
| 0.654842
|
cb30382a453e8230414a67ff376a0ff8b1f918a1
| 1,350
|
py
|
Python
|
model_training/unet_vae/trainer.py
|
ostapViniavskyi/brain_tumor_segmentation
|
c367155bd8eb3e4f950da824385641d2dc8c063a
|
[
"MIT"
] | 7
|
2019-12-18T20:07:03.000Z
|
2021-04-28T09:19:11.000Z
|
model_training/unet_vae/trainer.py
|
ostapViniavskyi/brain_tumor_segmentation
|
c367155bd8eb3e4f950da824385641d2dc8c063a
|
[
"MIT"
] | null | null | null |
model_training/unet_vae/trainer.py
|
ostapViniavskyi/brain_tumor_segmentation
|
c367155bd8eb3e4f950da824385641d2dc8c063a
|
[
"MIT"
] | 1
|
2022-01-17T11:21:50.000Z
|
2022-01-17T11:21:50.000Z
|
import tqdm
import numpy as np
from model_training.unet_addnet.trainer import Trainer
class TrainerVAE(Trainer):
def __init__(self, model, vae, config, train_dl, val_dl, device):
super(TrainerVAE, self).__init__(model, config, train_dl, val_dl, device)
self.vae = vae
def _init_params(self):
super(TrainerVAE, self)._init_params()
self.vae.to(self.device)
def _run_epoch(self, epoch):
self.model.train()
self.vae.train()
losses = []
lr = self.optimizer.param_groups[0]['lr']
status_bar = tqdm.tqdm(total=len(self.train_dl))
status_bar.set_description(f'Epoch {epoch}, lr {lr}')
for X, y in self.train_dl:
self.model.zero_grad()
self.vae.zero_grad()
X, y = X.to(self.device), y.to(self.device)
y_pred, embedding = self.model(X)
X_pred, mu, sigma = self.vae(embedding)
loss = self.criterion(y_pred, y, X_pred, X, mu, sigma)
loss.backward()
self.optimizer.step()
losses.append(loss.item())
status_bar.update()
status_bar.set_postfix(loss=losses[-1])
status_bar.close()
return np.mean(losses)
def _get_params(self):
return list(self.model.parameters()) + list(self.vae.parameters())
| 30
| 81
| 0.603704
|
f3104cd45a6b2360c64481838cb49d7da68b8a43
| 1,234
|
py
|
Python
|
odbx/api/views/alarms.py
|
kilinger/odbx
|
78fa0ed60db4debb9398b9e197961c7a758dd9f4
|
[
"BSD-3-Clause"
] | null | null | null |
odbx/api/views/alarms.py
|
kilinger/odbx
|
78fa0ed60db4debb9398b9e197961c7a758dd9f4
|
[
"BSD-3-Clause"
] | null | null | null |
odbx/api/views/alarms.py
|
kilinger/odbx
|
78fa0ed60db4debb9398b9e197961c7a758dd9f4
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding:utf-8 -*-
from rest_framework import viewsets
from odbx.alarms.filters import AlarmMenuFilter
from odbx.alarms.models import AlarmMenu
from odbx.alarms.serializers import AlarmMenuSerializer
class AlarmViewSet(viewsets.ModelViewSet):
queryset = AlarmMenu.objects.all()
serializer_class = AlarmMenuSerializer
filter_class = AlarmMenuFilter
http_method_names = ['get', 'post', 'put', 'patch', 'head', 'options', 'trace']
def perform_create(self, serializer):
"""
:type serializer: `odbx.alarms.serializers.AlarmMenuSerializer`
:return:
"""
serializer.initial_data.pop('course', None)
serializer.save(user=self.request.user)
def perform_update(self, serializer):
"""
:type serializer: `odbx.alarms.serializers.AlarmMenuSerializer`
:return:
"""
new_course = serializer.initial_data.get('course', None)
if not serializer.instance.allow_update_course(new_course, self.request.user):
serializer.initial_data.pop('course', None)
if not serializer.instance.allow_update_valuation(self.request.user):
serializer.initial_data.pop('valuation', None)
serializer.save()
| 36.294118
| 86
| 0.690438
|
c4eddad37eeefabcff7e096394f351ac9aceb65c
| 5,497
|
py
|
Python
|
tests/test_config_builder.py
|
TheoLaudatQM/py-qua-tools
|
60c005c651148bafee8f437ce1be69d2340a265b
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_config_builder.py
|
TheoLaudatQM/py-qua-tools
|
60c005c651148bafee8f437ce1be69d2340a265b
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_config_builder.py
|
TheoLaudatQM/py-qua-tools
|
60c005c651148bafee8f437ce1be69d2340a265b
|
[
"BSD-3-Clause"
] | null | null | null |
import pytest
import numpy as np
from qualang_tools.config.configuration import *
from qualang_tools.config.components import *
from qualang_tools.config.builder import ConfigBuilder
@pytest.fixture
def config_resonator():
cont = Controller("con1")
res = ReadoutResonator(
"res1",
outputs=[cont.analog_output(0), cont.analog_output(1)],
inputs=[cont.analog_input(0), cont.analog_input(1)],
intermediate_frequency=2e6,
)
res.lo_frequency = 4e9
wfs = [
ArbitraryWaveform("wf1", np.linspace(0, -0.5, 16).tolist()),
ArbitraryWaveform("wf2", np.linspace(0, -0.5, 16).tolist()),
]
ro_pulse = MeasurePulse("ro_pulse", wfs, 16)
ro_pulse.add(
Weights(ConstantIntegrationWeights("integ_w1_I", cosine=1, sine=0, duration=16))
)
ro_pulse.add(
Weights(
ConstantIntegrationWeights("integ_w1_Q", cosine=0, sine=-1, duration=16)
)
)
ro_pulse.add(
Weights(ConstantIntegrationWeights("integ_w2_I", cosine=0, sine=1, duration=16))
)
ro_pulse.add(
Weights(ConstantIntegrationWeights("integ_w2_Q", cosine=1, sine=0, duration=16))
)
res.add(Operation(ro_pulse))
cb = ConfigBuilder()
cb.add(cont)
cb.add(res)
return cb.build()
def test_controller(config_resonator):
config = config_resonator
assert config["version"] == 1
assert "con1" in [*config["controllers"]]
assert config["controllers"]["con1"]["type"] == "opx1"
assert [*config["controllers"]["con1"]["analog_outputs"]] == [0, 1]
assert [*config["controllers"]["con1"]["analog_inputs"]] == [0, 1]
assert config["controllers"]["con1"]["analog_outputs"][0]["offset"] == 0
assert config["controllers"]["con1"]["analog_outputs"][1]["offset"] == 0
assert config["controllers"]["con1"]["analog_inputs"][0]["offset"] == 0
assert config["controllers"]["con1"]["analog_inputs"][1]["offset"] == 0
def test_element(config_resonator):
config = config_resonator
assert "res1" in [*config["elements"]]
assert config["elements"]["res1"]["time_of_flight"] == 0
assert config["elements"]["res1"]["smearing"] == 0
assert config["elements"]["res1"]["intermediate_frequency"] == 2e6
assert config["elements"]["res1"]["mixInputs"]["lo_frequency"] == 4e9
assert config["elements"]["res1"]["mixInputs"]["I"] == ("con1", 0)
assert config["elements"]["res1"]["mixInputs"]["Q"] == ("con1", 1)
assert "ro_pulse" in [*config["elements"]["res1"]["operations"]]
def test_pulses(config_resonator):
config = config_resonator
assert [*config["pulses"]] == ["ro_pulse"]
assert config["pulses"]["ro_pulse"]["operation"] == "measure"
assert config["pulses"]["ro_pulse"]["length"] == 16
assert config["pulses"]["ro_pulse"]["waveforms"]["I"] == "wf1"
assert config["pulses"]["ro_pulse"]["waveforms"]["Q"] == "wf2"
assert [*config["pulses"]["ro_pulse"]["integration_weights"]] == [
"integ_w1_I",
"integ_w1_Q",
"integ_w2_I",
"integ_w2_Q",
]
def test_integration_weights(config_resonator):
config = config_resonator
assert [*config["integration_weights"]] == [
"integ_w1_I",
"integ_w1_Q",
"integ_w2_I",
"integ_w2_Q",
]
assert config["integration_weights"]["integ_w1_I"]["cosine"] == [(1, 16)]
assert config["integration_weights"]["integ_w1_I"]["sine"] == [(0, 16)]
assert config["integration_weights"]["integ_w1_Q"]["cosine"] == [(0, 16)]
assert config["integration_weights"]["integ_w1_Q"]["sine"] == [(-1, 16)]
assert config["integration_weights"]["integ_w2_I"]["cosine"] == [(0, 16)]
assert config["integration_weights"]["integ_w2_I"]["sine"] == [(1, 16)]
assert config["integration_weights"]["integ_w2_Q"]["cosine"] == [(1, 16)]
assert config["integration_weights"]["integ_w2_Q"]["sine"] == [(0, 16)]
@pytest.fixture
def config_transmon():
cb = ConfigBuilder()
cont = Controller("con1")
cb.add(cont)
wf1 = ConstantWaveform("wf1", 1.0)
wf2 = ArbitraryWaveform("wf2", np.linspace(0, -0.5, 16).tolist())
qb1 = Transmon(
"qb1",
I=cont.analog_output(0),
Q=cont.analog_output(1),
intermediate_frequency=5e6,
)
qb1.lo_frequency = 4e9
qb1.add(Operation(ControlPulse("pi_pulse", [wf1, wf2], 16)))
cb.add(qb1)
qb2 = FluxTunableTransmon(
"qb2",
I=cont.analog_output(2),
Q=cont.analog_output(3),
fl_port=cont.analog_output(4),
intermediate_frequency=5e6,
)
qb2.lo_frequency = 4.5e9
qb2.add(Operation(ControlPulse("pi_pulse", [wf1, wf2], 16)))
qb2.add(Operation(ControlPulse("fl_pulse", [wf1], 16)))
cb.add(qb2)
qb1.mixer = Mixer(
"mx1",
intermediate_frequency=5e6,
lo_frequency=4e9,
correction=Matrix2x2([[1.0, 0.0], [1.0, 0.0]]),
)
return cb.build()
def test_transmon(config_transmon):
config = config_transmon
assert [*config["elements"]] == ["qb1", "qb2", "qb2_flux_line"]
assert [*config["mixers"]] == ["mx1"]
assert [*config["waveforms"]] == ["wf1", "wf2"]
assert config["mixers"]["mx1"] == [
{
"intermediate_frequency": 5e6,
"lo_frequency": 4e9,
"correction": [1.0, 0.0, 1.0, 0.0],
}
]
assert config["elements"]["qb2_flux_line"]["singleInput"]["port"] == ("con1", 4)
assert [*config["pulses"]] == ["pi_pulse", "fl_pulse"]
| 33.723926
| 88
| 0.616882
|
b6066628072ec200299de963f6a63afc059b70be
| 2,239
|
py
|
Python
|
tests/demo/demoproject/settings.py
|
saxix/django-adminfilters
|
14fd8dba6629752e64eccc5bec651eefb2c6c701
|
[
"BSD-1-Clause"
] | 17
|
2015-03-03T23:15:31.000Z
|
2022-03-02T16:55:18.000Z
|
tests/demo/demoproject/settings.py
|
saxix/django-adminfilters
|
14fd8dba6629752e64eccc5bec651eefb2c6c701
|
[
"BSD-1-Clause"
] | 9
|
2015-11-10T15:30:27.000Z
|
2022-02-12T20:55:39.000Z
|
tests/demo/demoproject/settings.py
|
saxix/django-adminfilters
|
14fd8dba6629752e64eccc5bec651eefb2c6c701
|
[
"BSD-1-Clause"
] | 14
|
2015-04-07T13:52:42.000Z
|
2022-02-03T17:54:42.000Z
|
# Django settings for demoproject project.
import os
import sys
here = os.path.dirname(__file__)
sys.path.append(os.path.abspath(os.path.join(here, '..', '..')))
DEBUG = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
# Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'ENGINE': 'django.db.backends.sqlite3',
'NAME': '~DEMODB.sqlite', # Not used with sqlite3.
# Set to empty string for localhost. Not used with sqlite3.
'HOST': '',
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
TIME_ZONE = 'Asia/Bangkok'
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
USE_I18N = True
USE_L10N = True
USE_TZ = True
MEDIA_ROOT = os.path.join(here, 'media')
MEDIA_URL = ''
STATIC_ROOT = ''
STATIC_URL = '/static/'
SECRET_KEY = 'c73*n!y=)tziu^2)y*@5i2^)$8z$tx#b9*_r3i6o1ohxo%*2^a'
MIDDLEWARE = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',)
ROOT_URLCONF = 'demoproject.urls'
WSGI_APPLICATION = 'demoproject.wsgi.application'
AUTHENTICATION_BACKENDS = ('demoproject.backends.AnyUserBackend',)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
'adminfilters',
'demoproject.demoapp')
# Django 1.9
TEMPLATES = [
{'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'debug': DEBUG,
'context_processors': [
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.debug',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
],
},
}
]
| 29.460526
| 78
| 0.662349
|
dc848883e0ae2b00907882d275eb8d0a7b2c9da8
| 2,866
|
py
|
Python
|
warbler/configuration.py
|
heckj/warbler
|
9f79ce4d279a3c166d9b8d244b645d74124eee00
|
[
"Apache-2.0"
] | 3
|
2020-09-30T05:37:09.000Z
|
2021-11-06T07:01:12.000Z
|
warbler/configuration.py
|
heckj/warbler
|
9f79ce4d279a3c166d9b8d244b645d74124eee00
|
[
"Apache-2.0"
] | null | null | null |
warbler/configuration.py
|
heckj/warbler
|
9f79ce4d279a3c166d9b8d244b645d74124eee00
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2020 Joseph Heck. See LICENSE
import os
# import shutil
from pathlib import Path as P
import configparser
import logging
logger = logging.getLogger(__name__)
sbg_config_dir = P("heckj", "warbler")
# https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html
xdg_config_dir = {
"env": "XDG_CONFIG_HOME",
"default": P(P.home(), ".config")
}
# There is a raging debate on this and people want to add a new field to the XDG spec
# Me, I think logs are user data ...
xdg_data_home = {
"env": "XDG_DATA_HOME",
"default": P(P.home(), ".local", "share")
}
class Configuration(configparser.ConfigParser):
def __init__(self):
super().__init__()
self.cfg_path = P(os.getenv(xdg_config_dir["env"], xdg_config_dir["default"]), sbg_config_dir)
self.log_path = P(os.getenv(xdg_data_home["env"], xdg_data_home["default"]), sbg_config_dir, "logs")
# self.scratch_path = P(os.getenv(xdg_data_home["env"], xdg_data_home["default"]), sbg_config_dir, "scratch")
if not self.cfg_path.exists():
self.cfg_path.mkdir(parents=True)
if not self.log_path.exists():
self.log_path.mkdir(parents=True)
# if not self.scratch_path.exists():
# self.scratch_path.mkdir(parents=True)
# self.lang_models = {}
# We do this separately to give the caller a chance to set up logging
def initialize(self):
# logging.info("Copying language schema files ...")
# self._copy_missing_language_files()
# # TODO: allow multiple language specifications
# logging.info("Loading language model ...")
# self._load_language_files()
pass
# https://stackoverflow.com/questions/1611799/preserve-case-in-configparser
def optionxform(self, optionstr):
return optionstr
def getpath(self, section, option):
return self._resolve_path(P(self.get(section, option)))
def _resolve_path(self, path: P):
"""Paths in the config file can be absolute or relative. Absolute paths are left untouched
relative paths are resolved relative to the configuration file location"""
path = path.expanduser()
if path.is_absolute():
return path
else:
return P(self.cfg_path, path)
# def _copy_missing_language_files(self):
# for src_file in default_config_data_dir.glob("schema-*.json"):
# dst_file = P(self.cfg_path, src_file.name)
# if not dst_file.exists():
# shutil.copy(str(src_file), str(dst_file))
# def _load_language_files(self):
# for fname in self.cfg_path.glob("schema-*.json"):
# version = fname.name[7:-5]
# self.lang_models[version] = parse_schema(fname)
# logger.info(f"Loaded language schema {version}")
| 33.717647
| 117
| 0.652477
|
335d2287ae2481c3d4ae75b92b323b2c454c270a
| 706
|
py
|
Python
|
blamepipeline/tokenizers/__init__.py
|
Shuailong/BlamePipeline
|
bbd508dd0afc2e2c579f6afea5a3acd4c5c47956
|
[
"MIT"
] | 5
|
2019-02-12T14:47:40.000Z
|
2021-10-24T00:39:05.000Z
|
blamepipeline/tokenizers/__init__.py
|
Shuailong/BlamePipeline
|
bbd508dd0afc2e2c579f6afea5a3acd4c5c47956
|
[
"MIT"
] | 1
|
2020-08-10T10:16:29.000Z
|
2020-08-10T10:16:29.000Z
|
blamepipeline/tokenizers/__init__.py
|
Shuailong/BlamePipeline
|
bbd508dd0afc2e2c579f6afea5a3acd4c5c47956
|
[
"MIT"
] | 3
|
2019-02-12T14:48:01.000Z
|
2022-02-07T15:27:09.000Z
|
#!/usr/bin/env python3
# Copyright 2017-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
import os
DEFAULTS = {
'corenlp_classpath': os.getenv('CLASSPATH')
}
def set_default(key, value):
global DEFAULTS
DEFAULTS[key] = value
from .corenlp_tokenizer import CoreNLPTokenizer
# Spacy is optional
try:
from .spacy_tokenizer import SpacyTokenizer
except ImportError:
pass
def get_class(name):
if name == 'spacy':
return SpacyTokenizer
if name == 'corenlp':
return CoreNLPTokenizer
raise RuntimeError('Invalid tokenizer: %s' % name)
| 19.081081
| 61
| 0.709632
|
b36868ccced89a9828e1f3ed666c963011828486
| 9,517
|
py
|
Python
|
generate_data/task2_data_quiz.py
|
shshen-closer/TOP1-for-Answer-Prediction-task-2-in-the-NeurIPS-2020-Education-Challenge
|
2add1704909a46c443681d8e1701ee54ddc43039
|
[
"Apache-2.0"
] | 3
|
2021-01-18T15:02:12.000Z
|
2021-05-16T10:47:33.000Z
|
generate_data/task2_data_quiz.py
|
shshen-closer/TOP1-for-Answer-Prediction-task-2-in-the-NeurIPS-2020-Education-Challenge
|
2add1704909a46c443681d8e1701ee54ddc43039
|
[
"Apache-2.0"
] | null | null | null |
generate_data/task2_data_quiz.py
|
shshen-closer/TOP1-for-Answer-Prediction-task-2-in-the-NeurIPS-2020-Education-Challenge
|
2add1704909a46c443681d8e1701ee54ddc43039
|
[
"Apache-2.0"
] | 2
|
2021-02-22T01:24:54.000Z
|
2022-03-12T00:29:17.000Z
|
import numpy as np
import pandas as pd
from tqdm import tqdm
import random
import json
#df_train = pd.read_csv('train_data/train_task_1_2.csv')
pd.set_option('display.float_format',lambda x : '%.2f' % x)
np.set_printoptions(suppress=True)
seq_len = 10
train_all = pd.read_csv('../data_preprocess/train_task_1_2_new.csv')
test_all = pd.read_csv('../data_preprocess/submission_task_1_2_new.csv')
train_all = train_all.drop(['CorrectAnswer'], axis = 1)
test_all = test_all.drop(['Unnamed: 0'], axis = 1)
train_id = set(np.array(train_all['UserId']))
test_id = set(np.array(test_all['UserId']))
all_data1 = pd.merge(train_all,test_all,how="outer")
all_data1['isc'] = all_data1['IsCorrect']
all_data1['isc'].fillna(2,inplace=True)
all_data = all_data1
order = ['AnswerId','UserId','QuestionId','IsCorrect','AnswerValue','timestamp','quizid','isc']
all_data = all_data[order]
ques_kc = pd.read_csv('../data_preprocess/question_metadata_processed_1_2.csv')
ques_kc = np.array(ques_kc)
ques_dict = {}
for item in ques_kc:
ques_dict[float(item[0])] = eval(item[1])
length_train = []
q_a_train = []
q_a_test = []
for item in tqdm(test_id):
idx = all_data[(all_data.UserId==item)].index.tolist()
temp1 = all_data.iloc[idx]
temp1 = temp1.sort_values(by=['quizid', 'timestamp'])
temp1['IsCorrect'].fillna(2,inplace=True)
temp1['AnswerValue'].fillna(5,inplace=True)
temp_ori = np.array(temp1)
temp_1=[]
temp_2 = []
temp_random = []
reidx = 0
# sorted by quizid and timestamp
for i in range(len(temp_ori)-1):
if temp_ori[i+1][-2] != temp_ori[i][-2]:
temp_random.append(temp_ori[i])
temp_quiz = []
if len(temp_random) >=seq_len:
for tt in range(len(temp_random) - 1):
if temp_random[tt+1][-3] - temp_random[tt][-3] > 3600*24*7: # 7 days
temp_quiz.append(temp_random[tt])
if len(temp_quiz) >= seq_len:
temp_quiz_2 = [np.append(x, reidx) for x in temp_quiz]
reidx += 1
temp_1.append(temp_quiz_2)
temp_quiz = []
else:
temp_quiz_2 = [np.append(x, reidx) for x in temp_quiz]
reidx += 1
temp_2.append(temp_quiz_2)
temp_quiz = []
else:
temp_quiz.append(temp_random[tt])
temp_quiz.append(temp_random[-1])
if len(temp_quiz) >= seq_len:
temp_quiz_2 = [np.append(x, reidx) for x in temp_quiz]
reidx += 1
temp_1.append(temp_quiz_2)
else:
temp_quiz_2 = [np.append(x, reidx) for x in temp_quiz]
reidx += 1
temp_2.append(temp_quiz_2)
else:
temp_quiz_2 = [np.append(x, reidx) for x in temp_random]
reidx += 1
temp_2.append(temp_quiz_2)
temp_random = []
else:
temp_random.append(temp_ori[i])
temp_random.append(temp_ori[-1])
temp_quiz = []
if len(temp_random) >=seq_len:
for tt in range(len(temp_random) - 1):
if temp_random[tt+1][-3] - temp_random[tt][-3] > 3600*24*7:
temp_quiz.append(temp_random[tt])
if len(temp_quiz) >= seq_len:
temp_quiz_2 = [np.append(x, reidx) for x in temp_quiz]
reidx += 1
temp_1.append(temp_quiz_2)
temp_quiz = []
else:
temp_quiz_2 = [np.append(x, reidx) for x in temp_quiz]
reidx += 1
temp_2.append(temp_quiz_2)
temp_quiz = []
else:
temp_quiz.append(temp_random[tt])
temp_quiz.append(temp_random[-1])
if len(temp_quiz) >= seq_len:
temp_quiz_2 = [np.append(x, reidx) for x in temp_quiz]
reidx += 1
temp_1.append(temp_quiz_2)
else:
temp_quiz_2 = [np.append(x, reidx) for x in temp_quiz]
reidx += 1
temp_2.append(temp_quiz_2)
else:
temp_quiz_2 = [np.append(x, reidx) for x in temp_random]
reidx += 1
temp_2.append(temp_quiz_2)
temp = []
for t1 in temp_2:
temp.extend(t1)
for t2 in temp_1:
temp.extend(t2)
temp = np.array(temp)
temp_front = []
# data process
for i in range(len(temp)-1):
if temp[i+1][-1] != temp[i][-1]:
temp_front.append(temp[i])
if len(temp_front) >= seq_len:
for tt in range(len(temp_front)):
a = [] # questions
b = [0]*seq_len #answer correctly or not
c = [0]*seq_len #answer choice
if tt < seq_len:
for ff in range(seq_len):
if ff<tt:
a.append(int(temp_front[ff][2]))
b[ff] = int(temp_front[ff][3])
c[ff] = int(temp_front[ff][4]) - 1
if ff>tt:
a.append(int(temp_front[ff][2]))
b[ff - 1] = int(temp_front[ff][3])
c[ff - 1] = int(temp_front[ff][4]) - 1
a.append(int(temp_front[tt][2]))
b[seq_len-1] = int(temp_front[tt][3])
c[seq_len-1] = int(temp_front[tt][4]) - 1
if tt >= seq_len:
for ff in range(tt - seq_len+1, tt+1):
a.append(int(temp_front[ff][2]))
b[ff - tt + seq_len -1] = int(temp_front[ff][3])
c[ff - tt + seq_len -1] = int(temp_front[ff][4]) - 1
if len(a)>seq_len:
print('iii')
if int(temp_front[tt][-2]) == 2:
q_a_test.append([a,b,c, temp_front[tt][0]])
else:
q_a_train.append([a,b, c,int(temp_front[tt][3]), int(temp_front[tt][4]) - 1])
temp_front = []
else:
temp_front.append(temp[i])
temp_front.append(temp[-1])
if len(temp_front) >= seq_len:
for tt in range(len(temp_front)):
a = []
b = [0]*seq_len
c = [0]*seq_len
if tt < seq_len:
for ff in range(seq_len):
if ff<tt:
a.append(int(temp_front[ff][2]))
b[ff] = int(temp_front[ff][3])
c[ff] = int(temp_front[ff][4]) - 1
if ff>tt:
a.append(int(temp_front[ff][2]))
b[ff - 1] = int(temp_front[ff][3])
c[ff - 1] = int(temp_front[ff][4]) - 1
a.append(int(temp_front[tt][2]))
b[seq_len-1] = int(temp_front[tt][3])
c[seq_len-1] = int(temp_front[tt][4]) - 1
if tt >= seq_len:
for ff in range(tt - seq_len+1, tt+1):
a.append(int(temp_front[ff][2]))
b[ff - tt + seq_len -1] = int(temp_front[ff][3])
c[ff - tt + seq_len -1] = int(temp_front[ff][4]) - 1
if len(a)>seq_len:
print('iii')
if int(temp_front[tt][-2]) == 2:
q_a_test.append([a,b,c, temp_front[tt][0]])
else:
q_a_train.append([a,b, c,int(temp_front[tt][3]), int(temp_front[tt][4]) - 1])
elif len(temp_front) > 0:
for tt in range(len(temp) - len(temp_front), len(temp)):
a = []
b = [0]*seq_len
c = [0]*seq_len
for ff in range(len(temp) - seq_len,len(temp)):
if ff<tt:
a.append(int(temp[ff][2]))
b[ff - len(temp) + seq_len] = int(temp[ff][3])
c[ff - len(temp) + seq_len] = int(temp[ff][4]) - 1
if ff>tt:
a.append(int(temp[ff][2]))
b[ff - 1 - len(temp) + seq_len] = int(temp[ff][3])
c[ff - 1 - len(temp) + seq_len] = int(temp[ff][4]) - 1
a.append(int(temp[tt][2]))
b[seq_len-1] = int(temp[tt][3])
c[seq_len-1] = int(temp[tt][4]) - 1
if int(temp[tt][-2]) == 2:
q_a_test.append([a,b,c, temp[tt][0]])
else:
q_a_train.append([a,b, c, int(temp[tt][3]), int(temp[tt][4]) - 1])
print(len(q_a_test))
print(len(q_a_train))
np.save("q_a_train.npy",np.array(q_a_train))
np.save("q_a_test.npy",np.array(q_a_test))
print('complete')
| 38.068
| 102
| 0.450772
|
14caa73b91ed1abad6eb9acf99129977013df021
| 4,917
|
py
|
Python
|
pw_console/py/help_window_test.py
|
Robor-Electronics/pigweed
|
f0a3564dc72361e9e0e4af77ababc8114e7c4763
|
[
"Apache-2.0"
] | null | null | null |
pw_console/py/help_window_test.py
|
Robor-Electronics/pigweed
|
f0a3564dc72361e9e0e4af77ababc8114e7c4763
|
[
"Apache-2.0"
] | null | null | null |
pw_console/py/help_window_test.py
|
Robor-Electronics/pigweed
|
f0a3564dc72361e9e0e4af77ababc8114e7c4763
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2021 The Pigweed Authors
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""Tests for pw_console.console_app"""
import inspect
import logging
import unittest
from unittest.mock import MagicMock
from jinja2 import Environment, PackageLoader, make_logging_undefined
from prompt_toolkit.key_binding import KeyBindings
from pw_console.help_window import HelpWindow
_jinja_env = Environment(
loader=PackageLoader('pw_console'),
undefined=make_logging_undefined(logger=logging.getLogger('pw_console')),
trim_blocks=True,
lstrip_blocks=True,
)
def _create_app_mock():
template = _jinja_env.get_template('keybind_list.jinja')
mock_app = MagicMock()
mock_app.get_template = MagicMock(return_value=template)
return mock_app
class TestHelpWindow(unittest.TestCase):
"""Tests for HelpWindow text and keybind lists."""
def setUp(self):
self.maxDiff = None # pylint: disable=invalid-name
def test_instantiate(self) -> None:
app = _create_app_mock()
help_window = HelpWindow(app)
self.assertIsNotNone(help_window)
# pylint: disable=unused-variable,unused-argument
def test_add_keybind_help_text(self) -> None:
bindings = KeyBindings()
@bindings.add('f1')
def show_help(event):
"""Toggle help window."""
@bindings.add('c-w')
@bindings.add('c-q')
def exit_(event):
"""Quit the application."""
app = _create_app_mock()
help_window = HelpWindow(app)
help_window.add_keybind_help_text('Global', bindings)
self.assertEqual(
help_window.help_text_sections,
{
'Global': {
'Quit the application.': ['Ctrl-Q', 'Ctrl-W'],
'Toggle help window.': ['F1'],
}
},
)
def test_generate_help_text(self) -> None:
"""Test keybind list template generation."""
global_bindings = KeyBindings()
@global_bindings.add('f1')
def show_help(event):
"""Toggle help window."""
@global_bindings.add('c-w')
@global_bindings.add('c-q')
def exit_(event):
"""Quit the application."""
focus_bindings = KeyBindings()
@focus_bindings.add('s-tab')
@focus_bindings.add('c-right')
@focus_bindings.add('c-down')
def app_focus_next(event):
"""Move focus to the next widget."""
@focus_bindings.add('c-left')
@focus_bindings.add('c-up')
def app_focus_previous(event):
"""Move focus to the previous widget."""
app = _create_app_mock()
help_window = HelpWindow(
app,
preamble='Pigweed CLI v0.1',
additional_help_text=inspect.cleandoc("""
Welcome to the Pigweed Console!
Please enjoy this extra help text.
"""),
)
help_window.add_keybind_help_text('Global', global_bindings)
help_window.add_keybind_help_text('Focus', focus_bindings)
help_window.generate_help_text()
self.assertIn(
inspect.cleandoc("""
Welcome to the Pigweed Console!
Please enjoy this extra help text.
"""),
help_window.help_text,
)
self.assertIn(
inspect.cleandoc("""
==== Global Keys ====
"""),
help_window.help_text,
)
self.assertIn(
inspect.cleandoc("""
Toggle help window. ----------------- F1
Quit the application. --------------- Ctrl-Q
Ctrl-W
"""),
help_window.help_text,
)
self.assertIn(
inspect.cleandoc("""
==== Focus Keys ====
"""),
help_window.help_text,
)
self.assertIn(
inspect.cleandoc("""
Move focus to the next widget. ------ Ctrl-Down
Ctrl-Right
Shift-Tab
Move focus to the previous widget. -- Ctrl-Left
Ctrl-Up
"""),
help_window.help_text,
)
if __name__ == '__main__':
unittest.main()
| 30.73125
| 79
| 0.570673
|
771f22eeb9ca175037e9e72bdb034cfc5e59a661
| 3,349
|
py
|
Python
|
agent/agent/settings.py
|
parker-pu/octopus-plus
|
8533f9c8a5b895e8851d2bece408864cc12c01b2
|
[
"Apache-2.0"
] | null | null | null |
agent/agent/settings.py
|
parker-pu/octopus-plus
|
8533f9c8a5b895e8851d2bece408864cc12c01b2
|
[
"Apache-2.0"
] | null | null | null |
agent/agent/settings.py
|
parker-pu/octopus-plus
|
8533f9c8a5b895e8851d2bece408864cc12c01b2
|
[
"Apache-2.0"
] | null | null | null |
# Scrapy settings for agent project
#
# For simplicity, this file contains only settings considered important or
# commonly used. You can find more settings consulting the documentation:
#
# https://docs.scrapy.org/en/latest/topics/settings.html
# https://docs.scrapy.org/en/latest/topics/downloader-middleware.html
# https://docs.scrapy.org/en/latest/topics/spider-middleware.html
from pathlib import Path
BASE_DIR = Path(__file__).resolve().parent.parent
BOT_NAME = 'agent'
SPIDER_MODULES = ['agent.spiders']
NEWSPIDER_MODULE = 'agent.spiders'
# Crawl responsibly by identifying yourself (and your website) on the user-agent
# USER_AGENT = 'agent (+http://www.yourdomain.com)'
# Obey robots.txt rules
ROBOTSTXT_OBEY = True
# Configure maximum concurrent requests performed by Scrapy (default: 16)
# CONCURRENT_REQUESTS = 32
# Configure a delay for requests for the same website (default: 0)
# See https://docs.scrapy.org/en/latest/topics/settings.html#download-delay
# See also autothrottle settings and docs
# DOWNLOAD_DELAY = 3
# The download delay setting will honor only one of:
# CONCURRENT_REQUESTS_PER_DOMAIN = 16
# CONCURRENT_REQUESTS_PER_IP = 16
# Disable cookies (enabled by default)
# COOKIES_ENABLED = False
# Disable Telnet Console (enabled by default)
# TELNETCONSOLE_ENABLED = False
# Override the default request headers:
# DEFAULT_REQUEST_HEADERS = {
# 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
# 'Accept-Language': 'en',
# }
# Enable or disable spider middlewares
# See https://docs.scrapy.org/en/latest/topics/spider-middleware.html
# SPIDER_MIDDLEWARES = {
# 'agent.middlewares.AgentSpiderMiddleware': 543,
# }
# Enable or disable downloader middlewares
# See https://docs.scrapy.org/en/latest/topics/downloader-middleware.html
DOWNLOADER_MIDDLEWARES = {
'scrapy.downloadermiddlewares.retry.RetryMiddleware': 200, # 重试中间件
'agent.middlewares.AgentDownloaderMiddleware': 543,
}
# 重试中间件参数
RETRY_ENABLED: True
RETRY_TIMES: 2 # 重试次数
RETRY_HTTP_CODECS: [408, ]
# 408--超时
# Enable or disable extensions
# See https://docs.scrapy.org/en/latest/topics/extensions.html
# EXTENSIONS = {
# 'scrapy.extensions.telnet.TelnetConsole': None,
# }
# Configure item pipelines
# See https://docs.scrapy.org/en/latest/topics/item-pipeline.html
# ITEM_PIPELINES = {
# 'agent.pipelines.AgentPipeline': 300,
# }
# Enable and configure the AutoThrottle extension (disabled by default)
# See https://docs.scrapy.org/en/latest/topics/autothrottle.html
# AUTOTHROTTLE_ENABLED = True
# The initial download delay
# AUTOTHROTTLE_START_DELAY = 5
# The maximum download delay to be set in case of high latencies
# AUTOTHROTTLE_MAX_DELAY = 60
# The average number of requests Scrapy should be sending in parallel to
# each remote server
# AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0
# Enable showing throttling stats for every response received:
# AUTOTHROTTLE_DEBUG = False
# Enable and configure HTTP caching (disabled by default)
# See https://docs.scrapy.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
# HTTPCACHE_ENABLED = True
# HTTPCACHE_EXPIRATION_SECS = 0
# HTTPCACHE_DIR = 'httpcache'
# HTTPCACHE_IGNORE_HTTP_CODES = []
# HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage'
PROJECT_CODE = "T1234"
PROJECT_NAME = "测试"
| 33.49
| 103
| 0.768289
|
50ea575a79ffa4a58b582885aefe89646e2ac670
| 2,297
|
py
|
Python
|
pyhole/core/config.py
|
jk0/pyhole
|
67ea7212fbbb8731a5cf649168b296d2dcd04059
|
[
"Apache-2.0"
] | 12
|
2016-01-29T14:47:41.000Z
|
2020-03-11T04:09:14.000Z
|
pyhole/core/config.py
|
jk0/pyhole
|
67ea7212fbbb8731a5cf649168b296d2dcd04059
|
[
"Apache-2.0"
] | 24
|
2016-01-13T18:50:33.000Z
|
2016-10-07T15:39:06.000Z
|
pyhole/core/config.py
|
jk0/pyhole
|
67ea7212fbbb8731a5cf649168b296d2dcd04059
|
[
"Apache-2.0"
] | 9
|
2015-01-10T18:18:30.000Z
|
2016-05-21T01:37:00.000Z
|
# Copyright 2010-2011 Josh Kearney
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Pyhole Configuration Manager"""
import ConfigParser
import os
import sys
import utils
class Config(object):
"""A configuration object."""
def __init__(self, config, section):
self.config = os.path.abspath(config)
self.config_parser = ConfigParser.ConfigParser()
self.section = section
for _i in range(0, 2):
try:
with open(self.config) as conf_file:
self.config_parser.readfp(conf_file)
except IOError:
print "Unable to load configuration file: %s" % self.config
utils.prepare_config()
def __str__(self):
"""Make the config object readable for logging."""
return self.section
def sections(self):
"""Return a list of sections."""
return self.config_parser.sections()
def get(self, option, **kwargs):
"""Retrieve configuration values."""
_type = kwargs.get("type", "str")
try:
if _type == "int":
return self.config_parser.getint(self.section, option)
elif _type == "bool":
return self.config_parser.getboolean(self.section, option)
elif _type == "list":
return self.config_parser.get(self.section, option).split(", ")
else:
return self.config_parser.get(self.section, option)
except ConfigParser.NoOptionError:
if "default" in kwargs:
return kwargs["default"]
print "Unable to locate '%s' in %s" % (option, self.config)
print "[%s]" % self.section
print "%s: value" % option
sys.exit(1)
| 33.289855
| 79
| 0.612538
|
9ae6226c29396d38b9178faf335f0e9ff21ca1e8
| 3,007
|
py
|
Python
|
kittycad/api/file/get_file_conversion.py
|
KittyCAD/kittycad.py
|
7f7460d366dbd55fce50e5faa4a032b62e4baae4
|
[
"MIT"
] | 1
|
2022-02-06T05:07:25.000Z
|
2022-02-06T05:07:25.000Z
|
kittycad/api/file/get_file_conversion.py
|
KittyCAD/kittycad.py
|
7f7460d366dbd55fce50e5faa4a032b62e4baae4
|
[
"MIT"
] | 7
|
2022-02-04T11:29:25.000Z
|
2022-03-07T01:37:26.000Z
|
kittycad/api/file/get_file_conversion.py
|
KittyCAD/kittycad.py
|
7f7460d366dbd55fce50e5faa4a032b62e4baae4
|
[
"MIT"
] | null | null | null |
from typing import Any, Dict, Optional, Union, cast
import httpx
from ...client import Client
from ...models.file_conversion_with_output import FileConversionWithOutput
from ...models.error import Error
from ...types import Response
def _get_kwargs(
id: str,
*,
client: Client,
) -> Dict[str, Any]:
url = "{}/file/conversions/{id}".format(client.base_url, id=id)
headers: Dict[str, Any] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
return {
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
}
def _parse_response(*, response: httpx.Response) -> Optional[Union[Any, FileConversionWithOutput, Error]]:
if response.status_code == 200:
response_200 = FileConversionWithOutput.from_dict(response.json())
return response_200
if response.status_code == 400:
response_4XX = Error.from_dict(response.json())
return response_4XX
if response.status_code == 500:
response_5XX = Error.from_dict(response.json())
return response_5XX
return None
def _build_response(*, response: httpx.Response) -> Response[Union[Any, FileConversionWithOutput, Error]]:
return Response(
status_code=response.status_code,
content=response.content,
headers=response.headers,
parsed=_parse_response(response=response),
)
def sync_detailed(
id: str,
*,
client: Client,
) -> Response[Union[Any, FileConversionWithOutput, Error]]:
kwargs = _get_kwargs(
id=id,
client=client,
)
response = httpx.get(
verify=client.verify_ssl,
**kwargs,
)
return _build_response(response=response)
def sync(
id: str,
*,
client: Client,
) -> Optional[Union[Any, FileConversionWithOutput, Error]]:
""" Get the status and output of an async file conversion.
This endpoint requires authentication by any KittyCAD user. It returns details of the requested file conversion for the user.
If the user is not authenticated to view the specified file conversion, then it is not returned.
Only KittyCAD employees with the proper access can view file conversions for other users. """
return sync_detailed(
id=id,
client=client,
).parsed
async def asyncio_detailed(
id: str,
*,
client: Client,
) -> Response[Union[Any, FileConversionWithOutput, Error]]:
kwargs = _get_kwargs(
id=id,
client=client,
)
async with httpx.AsyncClient(verify=client.verify_ssl) as _client:
response = await _client.get(**kwargs)
return _build_response(response=response)
async def asyncio(
id: str,
*,
client: Client,
) -> Optional[Union[Any, FileConversionWithOutput, Error]]:
""" Get the status and output of an async file conversion.
This endpoint requires authentication by any KittyCAD user. It returns details of the requested file conversion for the user.
If the user is not authenticated to view the specified file conversion, then it is not returned.
Only KittyCAD employees with the proper access can view file conversions for other users. """
return (
await asyncio_detailed(
id=id,
client=client,
)
).parsed
| 25.922414
| 125
| 0.745261
|
fa42cbeb9cd3faf5fdf8c1ae734320bbbb6234f9
| 102,341
|
py
|
Python
|
tests/validation/cattlevalidationtest/core/common_fixtures.py
|
bmdepesa/validation-tests
|
23e7ab95ce76744483a0657f790b42a88a93436d
|
[
"Apache-2.0"
] | 7
|
2015-11-18T17:43:08.000Z
|
2021-07-14T09:48:18.000Z
|
tests/validation/cattlevalidationtest/core/common_fixtures.py
|
bmdepesa/validation-tests
|
23e7ab95ce76744483a0657f790b42a88a93436d
|
[
"Apache-2.0"
] | 175
|
2015-07-09T18:41:24.000Z
|
2021-06-10T21:23:27.000Z
|
tests/validation/cattlevalidationtest/core/common_fixtures.py
|
bmdepesa/validation-tests
|
23e7ab95ce76744483a0657f790b42a88a93436d
|
[
"Apache-2.0"
] | 25
|
2015-08-08T04:54:24.000Z
|
2021-05-25T21:10:37.000Z
|
from cattle import from_env
import pytest
import random
import requests
import os
import time
import logging
import paramiko
import inspect
import re
import json
from docker import Client
logging.basicConfig()
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
TEST_IMAGE_UUID = os.environ.get('CATTLE_TEST_AGENT_IMAGE',
'docker:cattle/test-agent:v7')
SSH_HOST_IMAGE_UUID = os.environ.get('CATTLE_SSH_HOST_IMAGE',
'docker:rancher/ssh-host-container:' +
'v0.1.0')
SOCAT_IMAGE_UUID = os.environ.get('CATTLE_CLUSTER_SOCAT_IMAGE',
'docker:rancher/socat-docker:v0.2.0')
do_access_key = os.environ.get('DIGITALOCEAN_KEY')
do_install_url = os.environ.get(
'DOCKER_INSTALL_URL',
'https://releases.rancher.com/install-docker/1.10.sh')
WEB_IMAGE_UUID = "docker:sangeetha/testlbsd:latest"
SSH_IMAGE_UUID = "docker:sangeetha/testclient:latest"
LB_HOST_ROUTING_IMAGE_UUID = "docker:sangeetha/testnewhostrouting:latest"
SSH_IMAGE_UUID_HOSTNET = "docker:sangeetha/testclient33:latest"
HOST_ACCESS_IMAGE_UUID = "docker:sangeetha/testclient44:latest"
HEALTH_CHECK_IMAGE_UUID = "docker:sangeetha/testhealthcheck:v2"
MULTIPLE_EXPOSED_PORT_UUID = "docker:sangeetha/testmultipleport:v1"
DEFAULT_TIMEOUT = 45
DEFAULT_MACHINE_TIMEOUT = 900
RANCHER_DNS_SERVER = "169.254.169.250"
RANCHER_DNS_SEARCH = "rancher.internal"
RANCHER_FQDN = "rancher.internal"
SERVICE_WAIT_TIMEOUT = 120
SSLCERT_SUBDIR = os.path.join(os.path.dirname(os.path.realpath(__file__)),
'resources/sslcerts')
K8_SUBDIR = os.path.join(os.path.dirname(os.path.realpath(__file__)),
'resources/k8s')
PRIVATE_KEY_FILENAME = "/tmp/private_key_host_ssh"
HOST_SSH_TEST_ACCOUNT = "ranchertest"
HOST_SSH_PUBLIC_PORT = 2222
socat_container_list = []
host_container_list = []
ha_host_list = []
ha_host_count = 4
kube_host_count = 2
kube_host_list = []
rancher_compose_con = {"container": None, "host": None, "port": "7878"}
kubectl_client_con = {"container": None, "host": None, "port": "9999"}
kubectl_version = os.environ.get('KUBECTL_VERSION', "v1.2.2")
CONTAINER_STATES = ["running", "stopped", "stopping"]
cert_list = {}
MANAGED_NETWORK = "managed"
UNMANAGED_NETWORK = "bridge"
dns_labels = {"io.rancher.container.dns": "true",
"io.rancher.scheduler.affinity:container_label_ne":
"io.rancher.stack_service.name=${stack_name}/${service_name}"}
@pytest.fixture(scope='session')
def cattle_url():
default_url = 'http://localhost:8080/v1/schemas'
return os.environ.get('CATTLE_TEST_URL', default_url)
def _admin_client():
access_key = os.environ.get("CATTLE_ACCESS_KEY", 'admin')
secret_key = os.environ.get("CATTLE_SECRET_KEY", 'adminpass')
return from_env(url=cattle_url(),
cache=False,
access_key=access_key,
secret_key=secret_key)
def _client_for_user(name, accounts):
return from_env(url=cattle_url(),
cache=False,
access_key=accounts[name][0],
secret_key=accounts[name][1])
def create_user(admin_client, user_name, kind=None):
if kind is None:
kind = user_name
password = user_name + 'pass'
account = create_type_by_uuid(admin_client, 'account', user_name,
kind=user_name,
name=user_name)
active_cred = None
for cred in account.credentials():
if cred.kind == 'apiKey' and cred.publicValue == user_name:
active_cred = cred
break
if active_cred is None:
active_cred = admin_client.create_api_key({
'accountId': account.id,
'publicValue': user_name,
'secretValue': password
})
active_cred = wait_success(admin_client, active_cred)
if active_cred.state != 'active':
wait_success(admin_client, active_cred.activate())
return [user_name, password, account]
def acc_id(client):
obj = client.list_api_key()[0]
return obj.account().id
def client_for_project(project):
access_key = random_str()
secret_key = random_str()
admin_client = _admin_client()
active_cred = None
account = project
for cred in account.credentials():
if cred.kind == 'apiKey' and cred.publicValue == access_key:
active_cred = cred
break
if active_cred is None:
active_cred = admin_client.create_api_key({
'accountId': account.id,
'publicValue': access_key,
'secretValue': secret_key
})
active_cred = wait_success(admin_client, active_cred)
if active_cred.state != 'active':
wait_success(admin_client, active_cred.activate())
return from_env(url=cattle_url(),
cache=False,
access_key=access_key,
secret_key=secret_key)
def wait_success(client, obj, timeout=DEFAULT_TIMEOUT):
return client.wait_success(obj, timeout=timeout)
def create_type_by_uuid(admin_client, type, uuid, activate=True, validate=True,
**kw):
opts = dict(kw)
opts['uuid'] = uuid
objs = admin_client.list(type, uuid=uuid)
obj = None
if len(objs) == 0:
obj = admin_client.create(type, **opts)
else:
obj = objs[0]
obj = wait_success(admin_client, obj)
if activate and obj.state == 'inactive':
obj.activate()
obj = wait_success(admin_client, obj)
if validate:
for k, v in opts.items():
assert getattr(obj, k) == v
return obj
@pytest.fixture(scope='session')
def accounts():
result = {}
admin_client = _admin_client()
for user_name in ['admin', 'agent', 'user', 'agentRegister', 'test',
'readAdmin', 'token', 'superadmin', 'service']:
result[user_name] = create_user(admin_client,
user_name,
kind=user_name)
result['admin'] = create_user(admin_client, 'admin')
system_account = admin_client.list_account(kind='system', uuid='system')[0]
result['system'] = [None, None, system_account]
return result
@pytest.fixture(scope='session')
def client(admin_client):
client = client_for_project(
admin_client.list_project(uuid="adminProject")[0])
assert client.valid()
return client
@pytest.fixture(scope='session')
def admin_client():
admin_client = _admin_client()
assert admin_client.valid()
return admin_client
@pytest.fixture(scope='session')
def super_client(accounts):
ret = _client_for_user('superadmin', accounts)
return ret
@pytest.fixture
def test_name():
return random_str()
@pytest.fixture
def random_str():
return 'test-{0}'.format(random_num())
@pytest.fixture
def random_num():
return random.randint(0, 1000000)
def wait_all_success(client, items, timeout=DEFAULT_TIMEOUT):
result = []
for item in items:
item = client.wait_success(item, timeout=timeout)
result.append(item)
return result
@pytest.fixture
def managed_network(client):
networks = client.list_network(uuid='managed-docker0')
assert len(networks) == 1
return networks[0]
@pytest.fixture(scope='session')
def unmanaged_network(client):
networks = client.list_network(uuid='unmanaged')
assert len(networks) == 1
return networks[0]
@pytest.fixture
def one_per_host(client, test_name):
instances = []
hosts = client.list_host(kind='docker', removed_null=True)
assert len(hosts) > 2
for host in hosts:
c = client.create_container(name=test_name,
ports=['3000:3000'],
networkMode=MANAGED_NETWORK,
imageUuid=TEST_IMAGE_UUID,
requestedHostId=host.id)
instances.append(c)
instances = wait_all_success(
client, instances, timeout=SERVICE_WAIT_TIMEOUT)
for i in instances:
ports = i.ports_link()
assert len(ports) == 1
port = ports[0]
assert port.privatePort == 3000
assert port.publicPort == 3000
ping_port(port)
return instances
def delete_all(client, items):
wait_for = []
for i in items:
client.delete(i)
wait_for.append(client.reload(i))
wait_all_success(client, items, timeout=180)
def delete_by_id(self, type, id):
url = self.schema.types[type].links.collection
if url.endswith('/'):
url = url + id
else:
url = '/'.join([url, id])
return self._delete(url)
def get_port_content(port, path, params={}):
assert port.publicPort is not None
assert port.publicIpAddressId is not None
url = 'http://{}:{}/{}'.format(port.publicIpAddress().address,
port.publicPort,
path)
e = None
for i in range(60):
try:
return requests.get(url, params=params, timeout=5).text
except Exception as e1:
e = e1
logger.exception('Failed to call %s', url)
time.sleep(1)
pass
if e is not None:
raise e
raise Exception('failed to call url {0} for port'.format(url))
def ping_port(port):
pong = get_port_content(port, 'ping')
assert pong == 'pong'
def ping_link(src, link_name, var=None, value=None):
src_port = src.ports_link()[0]
links = src.instanceLinks()
assert len(links) == 1
assert len(links[0].ports) == 1
assert links[0].linkName == link_name
for i in range(3):
from_link = get_port_content(src_port, 'get', params={
'link': link_name,
'path': 'env?var=' + var,
'port': links[0].ports[0].privatePort
})
if from_link == value:
continue
else:
time.sleep(1)
assert from_link == value
def generate_RSA(bits=2048):
'''
Generate an RSA keypair
'''
from Crypto.PublicKey import RSA
new_key = RSA.generate(bits)
public_key = new_key.publickey().exportKey('OpenSSH')
private_key = new_key.exportKey()
return private_key, public_key
@pytest.fixture(scope='session')
def host_ssh_containers(request, client):
keys = generate_RSA()
host_key = keys[0]
os.system("echo '" + host_key + "' >" + PRIVATE_KEY_FILENAME)
hosts = client.list_host(kind='docker', removed_null=True)
ssh_containers = []
for host in hosts:
env_var = {"SSH_KEY": keys[1]}
docker_vol_value = ["/usr/bin/docker:/usr/bin/docker",
"/var/run/docker.sock:/var/run/docker.sock"
]
c = client.create_container(name="host_ssh_container",
networkMode=MANAGED_NETWORK,
imageUuid=SSH_HOST_IMAGE_UUID,
requestedHostId=host.id,
dataVolumes=docker_vol_value,
environment=env_var,
ports=[str(HOST_SSH_PUBLIC_PORT)+":22"]
)
ssh_containers.append(c)
for c in ssh_containers:
c = client.wait_success(c, 180)
assert c.state == "running"
def fin():
for c in ssh_containers:
client.delete(c)
os.system("rm " + PRIVATE_KEY_FILENAME)
request.addfinalizer(fin)
def get_ssh_to_host_ssh_container(host):
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(host.ipAddresses()[0].address, username=HOST_SSH_TEST_ACCOUNT,
key_filename=PRIVATE_KEY_FILENAME, port=HOST_SSH_PUBLIC_PORT)
return ssh
@pytest.fixture
def wait_for_condition(client, resource, check_function, fail_handler=None,
timeout=180):
start = time.time()
resource = client.reload(resource)
while not check_function(resource):
if time.time() - start > timeout:
exceptionMsg = 'Timeout waiting for ' + resource.kind + \
' to satisfy condition: ' + \
inspect.getsource(check_function)
if (fail_handler):
exceptionMsg = exceptionMsg + fail_handler(resource)
raise Exception(exceptionMsg)
time.sleep(.5)
resource = client.reload(resource)
return resource
def wait_for(callback, timeout=DEFAULT_TIMEOUT, timeout_message=None):
start = time.time()
ret = callback()
while ret is None or ret is False:
time.sleep(.5)
if time.time() - start > timeout:
if timeout_message:
raise Exception(timeout_message)
else:
raise Exception('Timeout waiting for condition')
ret = callback()
return ret
@pytest.fixture(scope='session')
def ha_hosts(client, admin_client):
hosts = client.list_host(
kind='docker', removed_null=True, state="active",
include="physicalHost")
do_host_count = 0
if len(hosts) >= ha_host_count:
for i in range(0, len(hosts)):
if hosts[i].physicalHost.driver == "digitalocean":
do_host_count += 1
ha_host_list.append(hosts[i])
if do_host_count < ha_host_count:
host_list = \
add_digital_ocean_hosts(client, ha_host_count - do_host_count)
ha_host_list.extend(host_list)
@pytest.fixture(scope='session')
def kube_hosts(request, client, admin_client):
project = admin_client.list_project(uuid="adminProject")[0]
# If Default project is not kubernetes, make it a kubernetes environment
if not project.kubernetes:
project = admin_client.update(
project, kubernetes=True)
project = wait_success(admin_client, project)
hosts = client.list_host(
kind='docker', removed_null=True, state="active",
include="physicalHost")
do_host_count = 0
if len(hosts) >= kube_host_count:
for i in range(0, len(hosts)):
if hosts[i].physicalHost.driver == "digitalocean":
do_host_count += 1
kube_host_list.append(hosts[i])
if do_host_count < kube_host_count:
host_list = \
add_digital_ocean_hosts(
client, kube_host_count - do_host_count)
kube_host_list.extend(host_list)
# Wait for Kubernetes environment to get created successfully
start = time.time()
env = client.list_environment(name="Kubernetes")
while len(env) != 1:
time.sleep(.5)
env = client.list_environment(name="Kubernetes")
if time.time() - start > 30:
raise Exception(
'Timed out waiting for Kubernetes env to get created')
environment = env[0]
wait_for_condition(
admin_client, environment,
lambda x: x.state == "active",
lambda x: 'State is: ' + x.state,
timeout=600)
if kubectl_client_con["container"] is None:
test_client_con = create_kubectl_client_container(client, "9999")
kubectl_client_con["container"] = test_client_con["container"]
kubectl_client_con["host"] = test_client_con["host"]
def remove():
delete_all(client, [kubectl_client_con["container"]])
request.addfinalizer(remove)
@pytest.fixture(scope='session')
def glusterfs_glusterconvoy(client, admin_client, request):
catalog_url = cattle_url() + "/v1-catalog/templates/library:"
# Deploy GlusterFS template from catalog
r = requests.get(catalog_url + "glusterfs:0")
template = json.loads(r.content)
r.close()
dockerCompose = template["dockerCompose"]
rancherCompose = template["rancherCompose"]
environment = {}
questions = template["questions"]
for question in questions:
label = question["variable"]
value = question["default"]
environment[label] = value
env = client.create_environment(name="glusterfs",
dockerCompose=dockerCompose,
rancherCompose=rancherCompose,
environment=environment,
startOnCreate=True)
env = client.wait_success(env, timeout=300)
assert env.state == "active"
for service in env.services():
wait_for_condition(
admin_client, service,
lambda x: x.state == "active",
lambda x: 'State is: ' + x.state,
timeout=600)
# Deploy ConvoyGluster template from catalog
r = requests.get(catalog_url + "convoy-gluster:1")
template = json.loads(r.content)
r.close()
dockerCompose = template["dockerCompose"]
rancherCompose = template["rancherCompose"]
environment = {}
questions = template["questions"]
print questions
for question in questions:
label = question["variable"]
value = question["default"]
environment[label] = value
environment["GLUSTERFS_SERVICE"] = "glusterfs/glusterfs-server"
env = client.create_environment(name="convoy-gluster",
dockerCompose=dockerCompose,
rancherCompose=rancherCompose,
environment=environment,
startOnCreate=True)
env = client.wait_success(env, timeout=300)
for service in env.services():
wait_for_condition(
admin_client, service,
lambda x: x.state == "active",
lambda x: 'State is: ' + x.state,
timeout=600)
# Verify that storage pool is created
storagepools = client.list_storage_pool(removed_null=True,
include="hosts",
kind="storagePool")
print storagepools
assert len(storagepools) == 1
def remove():
env1 = client.list_environment(name="glusterfs")
assert len(env1) == 1
env2 = client.list_environment(name="convoy-gluster")
assert len(env2) == 1
delete_all(client, [env1[0], env2[0]])
request.addfinalizer(remove)
@pytest.fixture(scope='session')
def socat_containers(client, request):
# When these tests run in the CI environment, the hosts don't expose the
# docker daemon over tcp, so we need to create a container that binds to
# the docker socket and exposes it on a port
if len(socat_container_list) != 0:
return
hosts = client.list_host(kind='docker', removed_null=True, state='active')
for host in hosts:
socat_container = client.create_container(
name='socat-%s' % random_str(),
networkMode=MANAGED_NETWORK,
imageUuid=SOCAT_IMAGE_UUID,
ports='2375:2375/tcp',
stdinOpen=False,
tty=False,
publishAllPorts=True,
privileged=True,
dataVolumes='/var/run/docker.sock:/var/run/docker.sock',
requestedHostId=host.id,
restartPolicy={"name": "always"})
socat_container_list.append(socat_container)
for socat_container in socat_container_list:
wait_for_condition(
client, socat_container,
lambda x: x.state == 'running',
lambda x: 'State is: ' + x.state)
time.sleep(10)
for host in hosts:
host_container = client.create_container(
name='host-%s' % random_str(),
networkMode="host",
imageUuid=HOST_ACCESS_IMAGE_UUID,
privileged=True,
requestedHostId=host.id,
restartPolicy={"name": "always"})
host_container_list.append(host_container)
for host_container in host_container_list:
wait_for_condition(
client, host_container,
lambda x: x.state in ('running', 'stopped'),
lambda x: 'State is: ' + x.state)
time.sleep(10)
def remove_socat():
delete_all(client, socat_container_list)
delete_all(client, host_container_list)
request.addfinalizer(remove_socat)
def get_docker_client(host):
ip = host.ipAddresses()[0].address
port = '2375'
params = {}
params['base_url'] = 'tcp://%s:%s' % (ip, port)
api_version = os.getenv('DOCKER_API_VERSION', '1.18')
params['version'] = api_version
return Client(**params)
def wait_for_scale_to_adjust(admin_client, service):
service = admin_client.wait_success(service)
instance_maps = admin_client.list_serviceExposeMap(serviceId=service.id,
state="active",
managed=1)
start = time.time()
while len(instance_maps) != \
get_service_instance_count(admin_client, service):
time.sleep(.5)
instance_maps = admin_client.list_serviceExposeMap(
serviceId=service.id, state="active")
if time.time() - start > 30:
raise Exception('Timed out waiting for Service Expose map to be ' +
'created for all instances')
for instance_map in instance_maps:
c = admin_client.by_id('container', instance_map.instanceId)
wait_for_condition(
admin_client, c,
lambda x: x.state == "running",
lambda x: 'State is: ' + x.state)
def check_service_map(admin_client, service, instance, state):
instance_service_map = admin_client.\
list_serviceExposeMap(serviceId=service.id, instanceId=instance.id,
state=state)
assert len(instance_service_map) == 1
def get_container_names_list(admin_client, services):
container_names = []
for service in services:
containers = get_service_container_list(admin_client, service)
for c in containers:
if c.state == "running":
container_names.append(c.externalId[:12])
return container_names
def validate_add_service_link(admin_client, service, consumedService):
service_maps = admin_client. \
list_serviceConsumeMap(serviceId=service.id,
consumedServiceId=consumedService.id)
assert len(service_maps) == 1
service_map = service_maps[0]
wait_for_condition(
admin_client, service_map,
lambda x: x.state == "active",
lambda x: 'State is: ' + x.state)
def validate_remove_service_link(admin_client, service, consumedService):
service_maps = admin_client. \
list_serviceConsumeMap(serviceId=service.id,
consumedServiceId=consumedService.id)
assert len(service_maps) == 1
service_map = service_maps[0]
wait_for_condition(
admin_client, service_map,
lambda x: x.state == "removed",
lambda x: 'State is: ' + x.state)
def get_service_container_list(admin_client, service, managed=None):
container = []
if managed is not None:
all_instance_maps = \
admin_client.list_serviceExposeMap(serviceId=service.id,
managed=managed)
else:
all_instance_maps = \
admin_client.list_serviceExposeMap(serviceId=service.id)
instance_maps = []
for instance_map in all_instance_maps:
if instance_map.state not in ("removed", "removing"):
instance_maps.append(instance_map)
for instance_map in instance_maps:
c = admin_client.by_id('container', instance_map.instanceId)
assert c.state in CONTAINER_STATES
containers = admin_client.list_container(
externalId=c.externalId,
include="hosts")
assert len(containers) == 1
container.append(containers[0])
return container
def link_svc_with_port(admin_client, service, linkservices, port):
for linkservice in linkservices:
service_link = {"serviceId": linkservice.id, "ports": [port]}
service = service.addservicelink(serviceLink=service_link)
validate_add_service_link(admin_client, service, linkservice)
return service
def link_svc(admin_client, service, linkservices):
for linkservice in linkservices:
service_link = {"serviceId": linkservice.id}
service = service.addservicelink(serviceLink=service_link)
validate_add_service_link(admin_client, service, linkservice)
return service
def activate_svc(client, service):
service.activate()
service = client.wait_success(service, SERVICE_WAIT_TIMEOUT)
assert service.state == "active"
return service
def validate_exposed_port(admin_client, service, public_port):
con_list = get_service_container_list(admin_client, service)
assert len(con_list) == service.scale
time.sleep(5)
for con in con_list:
con_host = admin_client.by_id('host', con.hosts[0].id)
for port in public_port:
response = get_http_response(con_host, port, "/service.html")
assert response == con.externalId[:12]
def validate_exposed_port_and_container_link(admin_client, con, link_name,
link_port, exposed_port):
time.sleep(10)
# Validate that the environment variables relating to link containers are
# set
containers = admin_client.list_container(externalId=con.externalId,
include="hosts",
removed_null=True)
assert len(containers) == 1
con = containers[0]
host = admin_client.by_id('host', con.hosts[0].id)
docker_client = get_docker_client(host)
inspect = docker_client.inspect_container(con.externalId)
response = inspect["Config"]["Env"]
logger.info(response)
address = None
port = None
env_name_link_address = link_name + "_PORT_" + str(link_port) + "_TCP_ADDR"
env_name_link_name = link_name + "_PORT_" + str(link_port) + "_TCP_PORT"
for env_var in response:
if env_name_link_address in env_var:
address = env_var[env_var.index("=")+1:]
if env_name_link_name in env_var:
port = env_var[env_var.index("=")+1:]
logger.info(address)
logger.info(port)
assert address and port is not None
# Validate port mapping
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(host.ipAddresses()[0].address, username="root",
password="root", port=exposed_port)
# Validate link containers
cmd = "wget -O result.txt --timeout=20 --tries=1 http://" + \
address+":"+port+"/name.html" + ";cat result.txt"
logger.info(cmd)
stdin, stdout, stderr = ssh.exec_command(cmd)
response = stdout.readlines()
assert len(response) == 1
resp = response[0].strip("\n")
logger.info(resp)
assert link_name == resp
def wait_for_lb_service_to_become_active(admin_client, client,
services, lb_service,
unmanaged_con_count=None):
wait_for_config_propagation(admin_client, lb_service)
lb_containers = get_service_container_list(admin_client, lb_service)
assert len(lb_containers) == lb_service.scale
# Get haproxy config from Lb Agents
for lb_con in lb_containers:
host = admin_client.by_id('host', lb_con.hosts[0].id)
docker_client = get_docker_client(host)
haproxy = docker_client.copy(
lb_con.externalId, "/etc/haproxy/haproxy.cfg")
print "haproxy: " + haproxy.read()
# Get iptable entries from host
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(host.ipAddresses()[0].address, username="root",
password="root", port=44)
cmd = "iptables-save"
logger.info(cmd)
stdin, stdout, stderr = ssh.exec_command(cmd)
responses = stdout.readlines()
for response in responses:
print response
def validate_lb_service_for_external_services(admin_client, client, lb_service,
port, container_list,
hostheader=None, path=None):
container_names = []
for con in container_list:
container_names.append(con.externalId[:12])
validate_lb_service_con_names(admin_client, client, lb_service, port,
container_names, hostheader, path)
def validate_lb_service(admin_client, client, lb_service, port,
target_services, hostheader=None, path=None,
domain=None, test_ssl_client_con=None,
unmanaged_cons=None):
target_count = 0
for service in target_services:
target_count = \
target_count + get_service_instance_count(client, service)
container_names = get_container_names_list(admin_client,
target_services)
logger.info(container_names)
# Check that unmanaged containers for each services in present in
# container_names
if unmanaged_cons is not None:
unmanaged_con_count = 0
for service in target_services:
if service.id in unmanaged_cons.keys():
unmanaged_con_list = unmanaged_cons[service.id]
unmanaged_con_count = unmanaged_con_count + 1
for con in unmanaged_con_list:
if con not in container_names:
assert False
assert len(container_names) == target_count + unmanaged_con_count
else:
assert len(container_names) == target_count
validate_lb_service_con_names(admin_client, client, lb_service, port,
container_names, hostheader, path, domain,
test_ssl_client_con)
def validate_lb_service_con_names(admin_client, client, lb_service, port,
container_names,
hostheader=None, path=None, domain=None,
test_ssl_client_con=None):
lb_containers = get_service_container_list(admin_client, lb_service)
assert len(lb_containers) == get_service_instance_count(client, lb_service)
for lb_con in lb_containers:
host = client.by_id('host', lb_con.hosts[0].id)
if domain:
# Validate for ssl listeners
# wait_until_lb_is_active(host, port, is_ssl=True)
if hostheader is not None or path is not None:
check_round_robin_access_for_ssl(container_names, host, port,
domain, test_ssl_client_con,
hostheader, path)
else:
check_round_robin_access_for_ssl(container_names, host, port,
domain, test_ssl_client_con)
else:
wait_until_lb_is_active(host, port)
if hostheader is not None or path is not None:
check_round_robin_access(container_names, host, port,
hostheader, path)
else:
check_round_robin_access(container_names, host, port)
def validate_cert_error(admin_client, client, lb_service, port, domain,
default_domain, cert,
hostheader=None, path=None,
test_ssl_client_con=None):
lb_containers = get_service_container_list(admin_client, lb_service)
for lb_con in lb_containers:
host = client.by_id('host', lb_con.hosts[0].id)
check_for_cert_error(host, port, domain, default_domain, cert,
test_ssl_client_con)
def wait_until_lb_is_active(host, port, timeout=30, is_ssl=False):
start = time.time()
while check_for_no_access(host, port, is_ssl):
time.sleep(.5)
print "No access yet"
if time.time() - start > timeout:
raise Exception('Timed out waiting for LB to become active')
return
def check_for_no_access(host, port, is_ssl=False):
if is_ssl:
protocol = "https://"
else:
protocol = "http://"
try:
url = protocol+host.ipAddresses()[0].address+":"+port+"/name.html"
requests.get(url)
return False
except requests.ConnectionError:
logger.info("Connection Error - " + url)
return True
def wait_until_lb_ip_is_active(lb_ip, port, timeout=30, is_ssl=False):
start = time.time()
while check_for_no_access_ip(lb_ip, port, is_ssl):
time.sleep(.5)
print "No access yet"
if time.time() - start > timeout:
raise Exception('Timed out waiting for LB to become active')
return
def check_for_no_access_ip(lb_ip, port, is_ssl=False):
if is_ssl:
protocol = "https://"
else:
protocol = "http://"
try:
url = protocol+lb_ip+":"+port+"/name.html"
requests.get(url)
return False
except requests.ConnectionError:
logger.info("Connection Error - " + url)
return True
def validate_linked_service(admin_client, service, consumed_services,
exposed_port, exclude_instance=None,
exclude_instance_purged=False,
unmanaged_cons=None, linkName=None):
time.sleep(5)
containers = get_service_container_list(admin_client, service)
assert len(containers) == service.scale
for container in containers:
host = admin_client.by_id('host', container.hosts[0].id)
for consumed_service in consumed_services:
expected_dns_list = []
expected_link_response = []
dns_response = []
consumed_containers = get_service_container_list(admin_client,
consumed_service)
if exclude_instance_purged:
assert len(consumed_containers) == consumed_service.scale - 1
else:
if unmanaged_cons is not None \
and consumed_service.id in unmanaged_cons.keys():
unmanaged_con_list = \
unmanaged_cons[consumed_service.id]
assert \
len(consumed_containers) == \
consumed_service.scale + len(unmanaged_con_list)
for con in unmanaged_con_list:
print "Checking for container : " + con.name
found = False
for consumed_con in consumed_containers:
if con.id == consumed_con.id:
found = True
break
assert found
else:
assert len(consumed_containers) == consumed_service.scale
for con in consumed_containers:
if (exclude_instance is not None) \
and (con.id == exclude_instance.id):
logger.info("Excluded from DNS and wget list:" + con.name)
else:
if con.networkMode == "host":
con_host = admin_client.by_id('host', con.hosts[0].id)
expected_dns_list.append(
con_host.ipAddresses()[0].address)
expected_link_response.append(con_host.hostname)
else:
expected_dns_list.append(con.primaryIpAddress)
expected_link_response.append(con.externalId[:12])
logger.info("Expected dig response List" + str(expected_dns_list))
logger.info("Expected wget response List" +
str(expected_link_response))
# Validate port mapping
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(host.ipAddresses()[0].address, username="root",
password="root", port=int(exposed_port))
if linkName is None:
linkName = consumed_service.name
# Validate link containers
cmd = "wget -O result.txt --timeout=20 --tries=1 http://" + \
linkName + ":80/name.html;cat result.txt"
logger.info(cmd)
stdin, stdout, stderr = ssh.exec_command(cmd)
response = stdout.readlines()
assert len(response) == 1
resp = response[0].strip("\n")
logger.info("Actual wget Response" + str(resp))
assert resp in (expected_link_response)
# Validate DNS resolution using dig
cmd = "dig " + linkName + " +short"
logger.info(cmd)
stdin, stdout, stderr = ssh.exec_command(cmd)
response = stdout.readlines()
logger.info("Actual dig Response" + str(response))
unmanaged_con_count = 0
if (unmanaged_cons is not None) and \
(consumed_service.id in unmanaged_cons.keys()):
unmanaged_con_count = len(unmanaged_cons[consumed_service.id])
expected_entries_dig = consumed_service.scale + unmanaged_con_count
if exclude_instance is not None:
expected_entries_dig = expected_entries_dig - 1
assert len(response) == expected_entries_dig
for resp in response:
dns_response.append(resp.strip("\n"))
for address in expected_dns_list:
assert address in dns_response
def validate_dns_service(admin_client, service, consumed_services,
exposed_port, dnsname, exclude_instance=None,
exclude_instance_purged=False, unmanaged_cons=None):
time.sleep(5)
service_containers = get_service_container_list(admin_client, service)
assert len(service_containers) == service.scale
for con in service_containers:
host = admin_client.by_id('host', con.hosts[0].id)
containers = []
expected_dns_list = []
expected_link_response = []
dns_response = []
for consumed_service in consumed_services:
cons = get_service_container_list(admin_client, consumed_service)
if exclude_instance_purged:
assert len(cons) == consumed_service.scale - 1
else:
if unmanaged_cons is not None \
and consumed_service.id in unmanaged_cons.keys():
unmanaged_con_list = unmanaged_cons[consumed_service.id]
if unmanaged_con_list is not None:
assert len(cons) == \
consumed_service.scale + \
len(unmanaged_con_list)
for con in unmanaged_con_list:
print "Checking for container : " + con.name
found = False
for consumed_con in cons:
if con.id == consumed_con.id:
found = True
break
assert found
else:
assert len(cons) == consumed_service.scale
containers = containers + cons
for con in containers:
if (exclude_instance is not None) \
and (con.id == exclude_instance.id):
logger.info("Excluded from DNS and wget list:" + con.name)
else:
if con.networkMode == "host":
con_host = admin_client.by_id('host', con.hosts[0].id)
expected_dns_list.append(con_host.ipAddresses()[0].address)
expected_link_response.append(con_host.hostname)
else:
expected_dns_list.append(con.primaryIpAddress)
expected_link_response.append(con.externalId[:12])
logger.info("Expected dig response List" + str(expected_dns_list))
logger.info("Expected wget response List" +
str(expected_link_response))
# Validate port mapping
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(host.ipAddresses()[0].address, username="root",
password="root", port=int(exposed_port))
# Validate link containers
cmd = "wget -O result.txt --timeout=20 --tries=1 http://" + dnsname + \
":80/name.html;cat result.txt"
logger.info(cmd)
stdin, stdout, stderr = ssh.exec_command(cmd)
response = stdout.readlines()
assert len(response) == 1
resp = response[0].strip("\n")
logger.info("Actual wget Response" + str(resp))
assert resp in (expected_link_response)
# Validate DNS resolution using dig
cmd = "dig " + dnsname + " +short"
logger.info(cmd)
stdin, stdout, stderr = ssh.exec_command(cmd)
response = stdout.readlines()
logger.info("Actual dig Response" + str(response))
assert len(response) == len(expected_dns_list)
for resp in response:
dns_response.append(resp.strip("\n"))
for address in expected_dns_list:
assert address in dns_response
def validate_external_service(admin_client, service, ext_services,
exposed_port, container_list,
exclude_instance=None,
exclude_instance_purged=False):
time.sleep(5)
containers = get_service_container_list(admin_client, service)
assert len(containers) == service.scale
for container in containers:
print "Validation for container -" + str(container.name)
host = admin_client.by_id('host', container.hosts[0].id)
for ext_service in ext_services:
expected_dns_list = []
expected_link_response = []
dns_response = []
for con in container_list:
if (exclude_instance is not None) \
and (con.id == exclude_instance.id):
print "Excluded from DNS and wget list:" + con.name
else:
expected_dns_list.append(con.primaryIpAddress)
expected_link_response.append(con.externalId[:12])
print "Expected dig response List" + str(expected_dns_list)
print "Expected wget response List" + str(expected_link_response)
# Validate port mapping
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(host.ipAddresses()[0].address, username="root",
password="root", port=int(exposed_port))
# Validate link containers
cmd = "wget -O result.txt --timeout=20 --tries=1 http://" + \
ext_service.name + ":80/name.html;cat result.txt"
print cmd
stdin, stdout, stderr = ssh.exec_command(cmd)
response = stdout.readlines()
assert len(response) == 1
resp = response[0].strip("\n")
print "Actual wget Response" + str(resp)
assert resp in (expected_link_response)
# Validate DNS resolution using dig
cmd = "dig " + ext_service.name + " +short"
print cmd
stdin, stdout, stderr = ssh.exec_command(cmd)
response = stdout.readlines()
print "Actual dig Response" + str(response)
expected_entries_dig = len(container_list)
if exclude_instance is not None:
expected_entries_dig = expected_entries_dig - 1
assert len(response) == expected_entries_dig
for resp in response:
dns_response.append(resp.strip("\n"))
for address in expected_dns_list:
assert address in dns_response
def validate_external_service_for_hostname(admin_client, service, ext_services,
exposed_port):
time.sleep(5)
containers = get_service_container_list(admin_client, service)
assert len(containers) == service.scale
for container in containers:
print "Validation for container -" + str(container.name)
host = admin_client.by_id('host', container.hosts[0].id)
for ext_service in ext_services:
# Validate port mapping
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(host.ipAddresses()[0].address, username="root",
password="root", port=int(exposed_port))
cmd = "ping -c 2 " + ext_service.name + \
"> result.txt;cat result.txt"
print cmd
stdin, stdout, stderr = ssh.exec_command(cmd)
response = stdout.readlines()
print "Actual wget Response" + str(response)
assert ext_service.hostname in str(response) and \
"0% packet loss" in str(response)
@pytest.fixture(scope='session')
def rancher_compose_container(admin_client, client, request):
if rancher_compose_con["container"] is not None:
return
setting = admin_client.by_id_setting(
"default.cattle.rancher.compose.linux.url")
rancher_compose_url = setting.value
cmd1 = \
"wget " + rancher_compose_url
compose_file = rancher_compose_url.split("/")[-1]
# cmd2 = "tar xvf rancher-compose-linux-amd64.tar.gz"
cmd2 = "tar xvf " + compose_file
hosts = client.list_host(kind='docker', removed_null=True, state="active")
assert len(hosts) > 0
host = hosts[0]
port = rancher_compose_con["port"]
c = client.create_container(name="rancher-compose-client",
networkMode=MANAGED_NETWORK,
imageUuid="docker:sangeetha/testclient",
ports=[port+":22/tcp"],
requestedHostId=host.id
)
c = client.wait_success(c, SERVICE_WAIT_TIMEOUT)
assert c.state == "running"
time.sleep(5)
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(host.ipAddresses()[0].address, username="root",
password="root", port=int(port))
cmd = cmd1+";"+cmd2
print cmd
stdin, stdout, stderr = ssh.exec_command(cmd)
response = stdout.readlines()
found = False
for resp in response:
if "/rancher-compose" in resp:
found = True
assert found
rancher_compose_con["container"] = c
rancher_compose_con["host"] = host
def remove_rancher_compose_container():
delete_all(client, [rancher_compose_con["container"]])
request.addfinalizer(remove_rancher_compose_container)
def launch_rancher_compose(client, env):
compose_configs = env.exportconfig()
docker_compose = compose_configs["dockerComposeConfig"]
rancher_compose = compose_configs["rancherComposeConfig"]
execute_rancher_compose(client, env.name + "rancher",
docker_compose, rancher_compose,
"up -d", "Creating stack")
def execute_rancher_compose(client, env_name, docker_compose,
rancher_compose, command, expected_resp,
timeout=SERVICE_WAIT_TIMEOUT):
access_key = client._access_key
secret_key = client._secret_key
docker_filename = env_name + "-docker-compose.yml"
rancher_filename = env_name + "-rancher-compose.yml"
project_name = env_name
cmd1 = "export RANCHER_URL=" + cattle_url()
cmd2 = "export RANCHER_ACCESS_KEY=" + access_key
cmd3 = "export RANCHER_SECRET_KEY=" + secret_key
cmd4 = "cd rancher-compose-v*"
cmd5 = "echo '" + docker_compose + "' > " + docker_filename
if rancher_compose is not None:
rcmd = "echo '" + rancher_compose + "' > " + rancher_filename + ";"
cmd6 = rcmd + "./rancher-compose -p " + project_name + " -f " \
+ docker_filename + " -r " + rancher_filename + \
" " + command
else:
cmd6 = "./rancher-compose -p " + project_name + \
" -f " + docker_filename + " " + command
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(
rancher_compose_con["host"].ipAddresses()[0].address, username="root",
password="root", port=int(rancher_compose_con["port"]))
cmd = cmd1+";"+cmd2+";"+cmd3+";"+cmd4+";"+cmd5+";"+cmd6
print cmd
stdin, stdout, stderr = ssh.exec_command(cmd, timeout=timeout)
response = stdout.readlines()
print "Obtained Response: " + str(response)
print "Expected Response: " + expected_resp
found = False
for resp in response:
if expected_resp in resp:
found = True
assert found
def launch_rancher_compose_from_file(client, subdir, docker_compose,
env_name, command, response,
rancher_compose=None):
docker_compose = readDataFile(subdir, docker_compose)
if rancher_compose is not None:
rancher_compose = readDataFile(subdir, rancher_compose)
execute_rancher_compose(client, env_name, docker_compose,
rancher_compose, command, response)
def create_env_with_svc_and_lb(client, scale_svc, scale_lb, port,
internal=False, lb_config=None):
launch_config_svc = {"imageUuid": WEB_IMAGE_UUID}
if internal:
launch_config_lb = {"expose": [port+":80"]}
else:
launch_config_lb = {"ports": [port+":80"]}
# Create Environment
env = create_env(client)
# Create Service
random_name = random_str()
service_name = random_name.replace("-", "")
service = client.create_service(name=service_name,
environmentId=env.id,
launchConfig=launch_config_svc,
scale=scale_svc)
service = client.wait_success(service)
assert service.state == "inactive"
# Create LB Service
random_name = random_str()
service_name = "LB-" + random_name.replace("-", "")
lb_service = client.create_loadBalancerService(
name=service_name,
environmentId=env.id,
launchConfig=launch_config_lb,
scale=scale_lb,
loadBalancerConfig=lb_config)
lb_service = client.wait_success(lb_service)
assert lb_service.state == "inactive"
return env, service, lb_service
def create_env_with_ext_svc_and_lb(client, scale_lb, port):
launch_config_lb = {"ports": [port+":80"]}
env, service, ext_service, con_list = create_env_with_ext_svc(
client, 1, port)
# Create LB Service
random_name = random_str()
service_name = "LB-" + random_name.replace("-", "")
lb_service = client.create_loadBalancerService(
name=service_name,
environmentId=env.id,
launchConfig=launch_config_lb,
scale=scale_lb)
lb_service = client.wait_success(lb_service)
assert lb_service.state == "inactive"
return env, lb_service, ext_service, con_list
def create_env_with_2_svc(client, scale_svc, scale_consumed_svc, port):
launch_config_svc = {"imageUuid": SSH_IMAGE_UUID,
"ports": [port+":22/tcp"]}
launch_config_consumed_svc = {"imageUuid": WEB_IMAGE_UUID}
# Create Environment
env = create_env(client)
# Create Service
random_name = random_str()
service_name = random_name.replace("-", "")
service = client.create_service(name=service_name,
environmentId=env.id,
launchConfig=launch_config_svc,
scale=scale_svc)
service = client.wait_success(service)
assert service.state == "inactive"
# Create Consumed Service
random_name = random_str()
service_name = random_name.replace("-", "")
consumed_service = client.create_service(
name=service_name, environmentId=env.id,
launchConfig=launch_config_consumed_svc, scale=scale_consumed_svc)
consumed_service = client.wait_success(consumed_service)
assert consumed_service.state == "inactive"
return env, service, consumed_service
def create_env_with_2_svc_dns(client, scale_svc, scale_consumed_svc, port,
cross_linking=False):
launch_config_svc = {"imageUuid": SSH_IMAGE_UUID,
"ports": [port+":22/tcp"]}
launch_config_consumed_svc = {"imageUuid": WEB_IMAGE_UUID}
# Create Environment for dns service and client service
env = create_env(client)
random_name = random_str()
service_name = random_name.replace("-", "")
service = client.create_service(name=service_name,
environmentId=env.id,
launchConfig=launch_config_svc,
scale=scale_svc)
service = client.wait_success(service)
assert service.state == "inactive"
# Create Consumed Service1
if cross_linking:
env_id = create_env(client).id
else:
env_id = env.id
random_name = random_str()
service_name = random_name.replace("-", "")
consumed_service = client.create_service(
name=service_name, environmentId=env_id,
launchConfig=launch_config_consumed_svc, scale=scale_consumed_svc)
consumed_service = client.wait_success(consumed_service)
assert consumed_service.state == "inactive"
# Create Consumed Service2
if cross_linking:
env_id = create_env(client).id
else:
env_id = env.id
random_name = random_str()
service_name = random_name.replace("-", "")
consumed_service1 = client.create_service(
name=service_name, environmentId=env_id,
launchConfig=launch_config_consumed_svc, scale=scale_consumed_svc)
consumed_service1 = client.wait_success(consumed_service1)
assert consumed_service1.state == "inactive"
# Create DNS service
dns = client.create_dnsService(name='WEB1',
environmentId=env.id)
dns = client.wait_success(dns)
return env, service, consumed_service, consumed_service1, dns
def create_env_with_ext_svc(client, scale_svc, port, hostname=False):
launch_config_svc = {"imageUuid": SSH_IMAGE_UUID,
"ports": [port+":22/tcp"]}
# Create Environment
env = create_env(client)
# Create Service
random_name = random_str()
service_name = random_name.replace("-", "")
service = client.create_service(name=service_name,
environmentId=env.id,
launchConfig=launch_config_svc,
scale=scale_svc)
service = client.wait_success(service)
assert service.state == "inactive"
con_list = None
# Create external Service
random_name = random_str()
ext_service_name = random_name.replace("-", "")
if not hostname:
# Create 2 containers which would be the applications that need to be
# serviced by the external service
c1 = client.create_container(name=random_str(),
imageUuid=WEB_IMAGE_UUID)
c2 = client.create_container(name=random_str(),
imageUuid=WEB_IMAGE_UUID)
c1 = client.wait_success(c1, SERVICE_WAIT_TIMEOUT)
assert c1.state == "running"
c2 = client.wait_success(c2, SERVICE_WAIT_TIMEOUT)
assert c2.state == "running"
con_list = [c1, c2]
ips = [c1.primaryIpAddress, c2.primaryIpAddress]
ext_service = client.create_externalService(
name=ext_service_name, environmentId=env.id,
externalIpAddresses=ips)
else:
ext_service = client.create_externalService(
name=ext_service_name, environmentId=env.id, hostname="google.com")
ext_service = client.wait_success(ext_service)
assert ext_service.state == "inactive"
return env, service, ext_service, con_list
def create_env_and_svc(client, launch_config, scale=None, retainIp=False):
env = create_env(client)
service = create_svc(client, env, launch_config, scale, retainIp)
return service, env
def check_container_in_service(admin_client, service):
container_list = get_service_container_list(admin_client, service,
managed=1)
assert len(container_list) == service.scale
for container in container_list:
assert container.state == "running"
containers = admin_client.list_container(
externalId=container.externalId,
include="hosts",
removed_null=True)
docker_client = get_docker_client(containers[0].hosts[0])
inspect = docker_client.inspect_container(container.externalId)
logger.info("Checked for containers running - " + container.name)
assert inspect["State"]["Running"]
def create_svc(client, env, launch_config, scale=None, retainIp=False):
random_name = random_str()
service_name = random_name.replace("-", "")
service = client.create_service(name=service_name,
environmentId=env.id,
launchConfig=launch_config,
scale=scale,
retainIp=retainIp)
service = client.wait_success(service)
assert service.state == "inactive"
return service
def wait_until_instances_get_stopped(admin_client, service, timeout=60):
stopped_count = 0
start = time.time()
while stopped_count != service.scale:
time.sleep(.5)
container_list = get_service_container_list(admin_client, service)
stopped_count = 0
for con in container_list:
if con.state == "stopped":
stopped_count = stopped_count + 1
if time.time() - start > timeout:
raise Exception(
'Timed out waiting for instances to get to stopped state')
def get_service_containers_with_name(
admin_client, service, name, managed=None):
nameformat = re.compile(name + "_[0-9]{1,2}")
start = time.time()
instance_list = []
while len(instance_list) != service.scale:
instance_list = []
print "sleep for .5 sec"
time.sleep(.5)
if managed is not None:
all_instance_maps = \
admin_client.list_serviceExposeMap(serviceId=service.id,
managed=managed)
else:
all_instance_maps = \
admin_client.list_serviceExposeMap(serviceId=service.id)
for instance_map in all_instance_maps:
if instance_map.state == "active":
c = admin_client.by_id('container', instance_map.instanceId)
if nameformat.match(c.name) \
and c.state in ("running", "stopped"):
instance_list.append(c)
print c.name
if time.time() - start > 30:
raise Exception('Timed out waiting for Service Expose map to be ' +
'created for all instances')
container = []
for instance in instance_list:
assert instance.externalId is not None
containers = admin_client.list_container(
externalId=instance.externalId,
include="hosts")
assert len(containers) == 1
container.append(containers[0])
return container
def wait_until_instances_get_stopped_for_service_with_sec_launch_configs(
admin_client, service, timeout=60):
stopped_count = 0
start = time.time()
container_count = service.scale*(len(service.secondaryLaunchConfigs)+1)
while stopped_count != container_count:
time.sleep(.5)
container_list = get_service_container_list(admin_client, service)
stopped_count = 0
for con in container_list:
if con.state == "stopped":
stopped_count = stopped_count + 1
if time.time() - start > timeout:
raise Exception(
'Timed out waiting for instances to get to stopped state')
def validate_lb_service_for_no_access(admin_client, lb_service, port,
hostheader, path):
lb_containers = get_service_container_list(admin_client, lb_service)
for lb_con in lb_containers:
host = admin_client.by_id('host', lb_con.hosts[0].id)
wait_until_lb_is_active(host, port)
check_for_service_unavailable(host, port, hostheader, path)
def check_for_service_unavailable(host, port, hostheader, path):
url = "http://" + host.ipAddresses()[0].address +\
":" + port + path
logger.info(url)
headers = {"host": hostheader}
logger.info(headers)
r = requests.get(url, headers=headers)
response = r.text.strip("\n")
logger.info(response)
r.close()
assert "503 Service Unavailable" in response
def get_http_response(host, port, path):
url = "http://" + host.ipAddresses()[0].address +\
":" + str(port) + path
logger.info(url)
r = requests.get(url)
response = r.text.strip("\n")
logger.info(response)
r.close()
return response
def check_round_robin_access(container_names, host, port,
hostheader=None, path="/name.html"):
check_round_robin_access_lb_ip(container_names,
host.ipAddresses()[0].address, port,
hostheader=hostheader, path=path)
def check_round_robin_access_lb_ip(container_names, lb_ip, port,
hostheader=None, path="/name.html"):
con_hostname = container_names[:]
con_hostname_ordered = []
url = "http://" + lb_ip +\
":" + port + path
logger.info(url)
headers = None
if hostheader is not None:
headers = {"host": hostheader}
logger.info(headers)
for n in range(0, len(con_hostname)):
if headers is not None:
r = requests.get(url, headers=headers)
else:
r = requests.get(url)
response = r.text.strip("\n")
logger.info(response)
r.close()
assert response in con_hostname
con_hostname.remove(response)
con_hostname_ordered.append(response)
logger.info(con_hostname_ordered)
i = 0
for n in range(0, 10):
if headers is not None:
r = requests.get(url, headers=headers)
else:
r = requests.get(url)
response = r.text.strip("\n")
r.close()
logger.info("Response received-" + response)
assert response == con_hostname_ordered[i]
i = i + 1
if i == len(con_hostname_ordered):
i = 0
def check_cert_using_openssl(host, port, domain, test_ssl_client_con):
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(
test_ssl_client_con["host"].ipAddresses()[0].address, username="root",
password="root", port=int(test_ssl_client_con["port"]))
cmd = "openssl s_client" + \
" -connect " + host.ipAddresses()[0].address + ":" + port + \
" -servername " + domain + "</dev/null > result.out;cat result.out"
logger.info(cmd)
stdin, stdout, stderr = ssh.exec_command(cmd)
response = stdout.readlines()
logger.info(response)
responseLen = len(response)
assert responseLen > 3
assert "CN="+domain in response[3]
def check_round_robin_access_for_ssl(container_names, host, port, domain,
test_ssl_client_con,
hostheader=None, path="/name.html"):
check_round_robin_access_for_ssl_lb_ip(container_names,
host.ipAddresses()[0].address,
port, domain,
test_ssl_client_con,
hostheader, path)
def check_round_robin_access_for_ssl_lb_ip(container_names, lb_ip,
port, domain,
test_ssl_client_con,
hostheader, path):
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(
test_ssl_client_con["host"].ipAddresses()[0].address, username="root",
password="root", port=int(test_ssl_client_con["port"]))
cmd = "echo '" + lb_ip + \
" " + domain + "'> /etc/hosts;grep " + domain + " /etc/hosts"
response = execute_command(ssh, cmd)
logger.info(response)
domain_cert = domain + ".crt "
cert_str = " --ca-certificate=" + domain_cert
if hostheader is None:
host_header_str = ""
else:
host_header_str = "--header=host:" + hostheader + " "
url_str = " https://" + domain + ":" + port + path
cmd = "wget -O result.txt --timeout=20 --tries=1" + \
cert_str + host_header_str + url_str + ";cat result.txt"
con_hostname = container_names[:]
con_hostname_ordered = []
for n in range(0, len(con_hostname)):
response = execute_command(ssh, cmd)
assert response in con_hostname
con_hostname.remove(response)
con_hostname_ordered.append(response)
logger.info(con_hostname_ordered)
i = 0
for n in range(0, 5):
response = execute_command(ssh, cmd)
logger.info(response)
assert response == con_hostname_ordered[i]
i = i + 1
if i == len(con_hostname_ordered):
i = 0
def check_for_cert_error(host, port, domain, default_domain, cert,
test_ssl_client_con, path="/name.html"):
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(
test_ssl_client_con["host"].ipAddresses()[0].address, username="root",
password="root", port=int(test_ssl_client_con["port"]))
cmd = "echo '" + host.ipAddresses()[0].address + \
" " + domain + "'> /etc/hosts;grep " + domain + " /etc/hosts"
response = execute_command(ssh, cmd)
logger.info(response)
domain_cert = cert + ".crt "
cert_str = " --ca-certificate=" + domain_cert
url_str = " https://" + domain + ":" + port + path
cmd = "wget -O result.txt --timeout=20 --tries=1" + \
cert_str + url_str + ";cat result.txt"
error_string = "ERROR: cannot verify " + domain + "'s certificate"
stdin, stdout, stderr = ssh.exec_command(cmd)
errors = stderr.readlines()
logger.info(errors)
found_error = False
for error in errors:
if error_string in error:
found_error = True
assert found_error
def execute_command(ssh, cmd):
logger.info(cmd)
stdin, stdout, stderr = ssh.exec_command(cmd)
response = stdout.readlines()
logger.info(response)
assert len(response) == 1
resp = response[0].strip("\n")
logger.info("Response" + str(resp))
return resp
def create_env_with_multiple_svc_and_lb(client, scale_svc, scale_lb,
ports, count, crosslinking=False):
target_port = ["80", "81"]
launch_config_svc = \
{"imageUuid": LB_HOST_ROUTING_IMAGE_UUID}
assert len(ports) in (1, 2)
launch_port = []
for i in range(0, len(ports)):
listening_port = ports[i]+":"+target_port[i]
if "/" in ports[i]:
port_mode = ports[i].split("/")
listening_port = port_mode[0]+":"+target_port[i]+"/"+port_mode[1]
launch_port.append(listening_port)
launch_config_lb = {"ports": launch_port}
services = []
# Create Environment
env = create_env(client)
# Create Service
for i in range(0, count):
random_name = random_str()
service_name = random_name.replace("-", "")
if crosslinking:
env_serv = create_env(client)
env_id = env_serv.id
else:
env_id = env.id
service = client.create_service(name=service_name,
environmentId=env_id,
launchConfig=launch_config_svc,
scale=scale_svc)
service = client.wait_success(service)
assert service.state == "inactive"
services.append(service)
# Create LB Service
random_name = random_str()
service_name = "LB-" + random_name.replace("-", "")
lb_service = client.create_loadBalancerService(
name=service_name,
environmentId=env.id,
launchConfig=launch_config_lb,
scale=scale_lb)
lb_service = client.wait_success(lb_service)
assert lb_service.state == "inactive"
env = env.activateservices()
env = client.wait_success(env, SERVICE_WAIT_TIMEOUT)
if not crosslinking:
for service in services:
service = client.wait_success(service, SERVICE_WAIT_TIMEOUT)
assert service.state == "active"
lb_service = client.wait_success(lb_service, SERVICE_WAIT_TIMEOUT)
assert lb_service.state == "active"
return env, services, lb_service
def create_env_with_multiple_svc_and_ssl_lb(client, scale_svc, scale_lb,
ports, count, ssl_ports,
default_cert, certs=[]):
target_port = ["80", "81"]
launch_config_svc = \
{"imageUuid": LB_HOST_ROUTING_IMAGE_UUID}
assert len(ports) in (1, 2)
launch_port = []
for i in range(0, len(ports)):
listening_port = ports[i]+":"+target_port[i]
if "/" in ports[i]:
port_mode = ports[i].split("/")
listening_port = port_mode[0]+":"+target_port[i]+"/"+port_mode[1]
launch_port.append(listening_port)
launch_config_lb = {"ports": launch_port,
"labels":
{'io.rancher.loadbalancer.ssl.ports': ssl_ports}}
services = []
# Create Environment
env = create_env(client)
# Create Service
for i in range(0, count):
random_name = random_str()
service_name = random_name.replace("-", "")
service = client.create_service(name=service_name,
environmentId=env.id,
launchConfig=launch_config_svc,
scale=scale_svc)
service = client.wait_success(service)
assert service.state == "inactive"
services.append(service)
# Create LB Service
random_name = random_str()
service_name = "LB-" + random_name.replace("-", "")
supported_cert_list = []
for cert in certs:
supported_cert_list.append(cert.id)
lb_service = client.create_loadBalancerService(
name=service_name,
environmentId=env.id,
launchConfig=launch_config_lb,
scale=scale_lb,
certificateIds=supported_cert_list,
defaultCertificateId=default_cert.id)
lb_service = client.wait_success(lb_service)
assert lb_service.state == "inactive"
env = env.activateservices()
env = client.wait_success(env, SERVICE_WAIT_TIMEOUT)
for service in services:
service = client.wait_success(service, SERVICE_WAIT_TIMEOUT)
assert service.state == "active"
lb_service = client.wait_success(lb_service, SERVICE_WAIT_TIMEOUT)
assert lb_service.state == "active"
return env, services, lb_service
def wait_for_config_propagation(admin_client, lb_service, timeout=30):
lb_instances = get_service_container_list(admin_client, lb_service)
assert len(lb_instances) == lb_service.scale
for lb_instance in lb_instances:
agentId = lb_instance.agentId
agent = admin_client.by_id('agent', agentId)
assert agent is not None
item = get_config_item(agent, "haproxy")
start = time.time()
print "requested_version " + str(item.requestedVersion)
print "applied_version " + str(item.appliedVersion)
while item.requestedVersion != item.appliedVersion:
print "requested_version " + str(item.requestedVersion)
print "applied_version " + str(item.appliedVersion)
time.sleep(.1)
agent = admin_client.reload(agent)
item = get_config_item(agent, "haproxy")
if time.time() - start > timeout:
raise Exception('Timed out waiting for config propagation')
def wait_for_metadata_propagation(admin_client, timeout=30):
networkAgents = admin_client.list_container(
name='Network Agent', removed_null=True)
assert len(networkAgents) == len(admin_client.list_host(kind='docker',
removed_null=True))
for networkAgent in networkAgents:
agentId = networkAgent.agentId
agent = admin_client.by_id('agent', agentId)
assert agent is not None
item = get_config_item(agent, "hosts")
start = time.time()
print "agent_id " + str(agentId)
print "requested_version " + str(item.requestedVersion)
print "applied_version " + str(item.appliedVersion)
while item.requestedVersion != item.appliedVersion:
print "requested_version " + str(item.requestedVersion)
print "applied_version " + str(item.appliedVersion)
time.sleep(.1)
agent = admin_client.reload(agent)
item = get_config_item(agent, "hosts")
if time.time() - start > timeout:
raise Exception('Timed out waiting for config propagation')
def get_config_item(agent, config_name):
item = None
for config_items in agent.configItemStatuses():
if config_items.name == config_name:
item = config_items
break
assert item is not None
return item
def get_plain_id(admin_client, obj=None):
if obj is None:
obj = admin_client
admin_client = super_client(None)
ret = admin_client.list(obj.type, uuid=obj.uuid, _plainId='true')
assert len(ret) == 1
return ret[0].id
def create_env(client):
random_name = random_str()
env_name = random_name.replace("-", "")
env = client.create_environment(name=env_name)
env = client.wait_success(env)
assert env.state == "active"
return env
def get_env(admin_client, service):
e = admin_client.by_id('environment', service.environmentId)
return e
def get_service_container_with_label(admin_client, service, name, label):
containers = []
found = False
instance_maps = admin_client.list_serviceExposeMap(serviceId=service.id,
state="active")
nameformat = re.compile(name + "_[0-9]{1,2}")
for instance_map in instance_maps:
c = admin_client.by_id('container', instance_map.instanceId)
if nameformat.match(c.name) \
and c.labels["io.rancher.service.deployment.unit"] == label:
containers = admin_client.list_container(
externalId=c.externalId,
include="hosts")
assert len(containers) == 1
found = True
break
assert found
return containers[0]
def get_side_kick_container(admin_client, container, service, service_name):
label = container.labels["io.rancher.service.deployment.unit"]
print container.name + " - " + label
secondary_con = get_service_container_with_label(
admin_client, service, service_name, label)
return secondary_con
def validate_internal_lb(admin_client, lb_service, services,
host, con_port, lb_port):
# Access each of the LB Agent from the client container
lb_containers = get_service_container_list(admin_client, lb_service)
assert len(lb_containers) == lb_service.scale
for lb_con in lb_containers:
lb_ip = lb_con.primaryIpAddress
target_count = 0
for service in services:
target_count = target_count + service.scale
expected_lb_response = get_container_names_list(admin_client,
services)
assert len(expected_lb_response) == target_count
# Validate port mapping
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(host.ipAddresses()[0].address, username="root",
password="root", port=int(con_port))
# Validate lb service from this container using LB agent's ip address
cmd = "wget -O result.txt --timeout=20 --tries=1 http://" + lb_ip + \
":"+lb_port+"/name.html;cat result.txt"
logger.info(cmd)
stdin, stdout, stderr = ssh.exec_command(cmd)
response = stdout.readlines()
assert len(response) == 1
resp = response[0].strip("\n")
logger.info("Actual wget Response" + str(resp))
assert resp in (expected_lb_response)
def create_env_with_2_svc_hostnetwork(
client, scale_svc, scale_consumed_svc, port, sshport,
isnetworkModeHost_svc=False,
isnetworkModeHost_consumed_svc=False):
launch_config_svc = {"imageUuid": SSH_IMAGE_UUID_HOSTNET}
launch_config_consumed_svc = {"imageUuid": WEB_IMAGE_UUID}
if isnetworkModeHost_svc:
launch_config_svc["networkMode"] = "host"
launch_config_svc["labels"] = dns_labels
else:
launch_config_svc["ports"] = [port+":"+sshport+"/tcp"]
if isnetworkModeHost_consumed_svc:
launch_config_consumed_svc["networkMode"] = "host"
launch_config_consumed_svc["labels"] = dns_labels
launch_config_consumed_svc["ports"] = []
# Create Environment
env = create_env(client)
# Create Service
random_name = random_str()
service_name = random_name.replace("-", "")
service = client.create_service(name=service_name,
environmentId=env.id,
launchConfig=launch_config_svc,
scale=scale_svc)
service = client.wait_success(service)
assert service.state == "inactive"
# Create Consumed Service
random_name = random_str()
service_name = random_name.replace("-", "")
consumed_service = client.create_service(
name=service_name, environmentId=env.id,
launchConfig=launch_config_consumed_svc, scale=scale_consumed_svc)
consumed_service = client.wait_success(consumed_service)
assert consumed_service.state == "inactive"
return env, service, consumed_service
def create_env_with_2_svc_dns_hostnetwork(
client, scale_svc, scale_consumed_svc, port,
cross_linking=False, isnetworkModeHost_svc=False,
isnetworkModeHost_consumed_svc=False):
launch_config_svc = {"imageUuid": SSH_IMAGE_UUID_HOSTNET}
launch_config_consumed_svc = {"imageUuid": WEB_IMAGE_UUID}
if isnetworkModeHost_svc:
launch_config_svc["networkMode"] = "host"
launch_config_svc["labels"] = dns_labels
else:
launch_config_svc["ports"] = [port+":33/tcp"]
if isnetworkModeHost_consumed_svc:
launch_config_consumed_svc["networkMode"] = "host"
launch_config_consumed_svc["labels"] = dns_labels
launch_config_consumed_svc["ports"] = []
# Create Environment for dns service and client service
env = create_env(client)
random_name = random_str()
service_name = random_name.replace("-", "")
service = client.create_service(name=service_name,
environmentId=env.id,
launchConfig=launch_config_svc,
scale=scale_svc)
service = client.wait_success(service)
assert service.state == "inactive"
# Force containers of 2 different services to be in different hosts
hosts = client.list_host(kind='docker', removed_null=True, state='active')
assert len(hosts) > 1
# Create Consumed Service1
if cross_linking:
env_id = create_env(client).id
else:
env_id = env.id
random_name = random_str()
service_name = random_name.replace("-", "")
launch_config_consumed_svc["requestedHostId"] = hosts[0].id
consumed_service = client.create_service(
name=service_name, environmentId=env_id,
launchConfig=launch_config_consumed_svc, scale=scale_consumed_svc)
consumed_service = client.wait_success(consumed_service)
assert consumed_service.state == "inactive"
# Create Consumed Service2
if cross_linking:
env_id = create_env(client).id
else:
env_id = env.id
random_name = random_str()
service_name = random_name.replace("-", "")
launch_config_consumed_svc["requestedHostId"] = hosts[1].id
consumed_service1 = client.create_service(
name=service_name, environmentId=env_id,
launchConfig=launch_config_consumed_svc, scale=scale_consumed_svc)
consumed_service1 = client.wait_success(consumed_service1)
assert consumed_service1.state == "inactive"
# Create DNS service
dns = client.create_dnsService(name='WEB1',
environmentId=env.id)
dns = client.wait_success(dns)
return env, service, consumed_service, consumed_service1, dns
def cleanup_images(client, delete_images):
hosts = client.list_host(kind='docker', removed_null=True, state='active')
print "To delete" + delete_images[0]
for host in hosts:
docker_client = get_docker_client(host)
images = docker_client.images()
for image in images:
print image["RepoTags"][0]
if image["RepoTags"][0] in delete_images:
print "Found Match"
docker_client.remove_image(image, True)
images = docker_client.images()
for image in images:
assert ["RepoTags"][0] not in delete_images
@pytest.fixture(scope='session')
def certs(client, admin_client, request):
if len(cert_list.keys()) > 0:
return
domain_list = get_domains()
print domain_list
for domain in domain_list:
cert = create_cert(client, domain)
cert_list[domain] = cert
def remove_certs():
delete_all(client, cert_list.values())
request.addfinalizer(remove_certs)
def get_cert(domain):
return cert_list[domain]
def create_client_container_for_ssh(client, port):
super_client_con = {}
domain_list = get_domains()
hosts = client.list_host(kind='docker', removed_null=True, state="active")
assert len(hosts) > 0
host = hosts[0]
c = client.create_container(name="lb-test-client" + port,
networkMode=MANAGED_NETWORK,
imageUuid="docker:sangeetha/testclient",
ports=[port+":22/tcp"],
requestedHostId=host.id
)
c = client.wait_success(c, SERVICE_WAIT_TIMEOUT)
assert c.state == "running"
time.sleep(5)
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(host.ipAddresses()[0].address, username="root",
password="root", port=int(port))
cmd = ""
for domain in domain_list:
cert, key, certChain = get_cert_for_domain(domain)
if certChain:
cp_cmd_cert = "echo '"+cert+"' > "+domain+"_chain.crt;"
else:
cp_cmd_cert = "echo '"+cert+"' > "+domain+".crt;"
cmd = cmd + cp_cmd_cert
print cmd
stdin, stdout, stderr = ssh.exec_command(cmd)
response = stdout.readlines()
print response
super_client_con["container"] = c
super_client_con["host"] = host
super_client_con["port"] = port
return super_client_con
def create_kubectl_client_container(client, port):
test_kubectl_client_con = {}
hosts = client.list_host(kind='docker', removed_null=True, state="active")
assert len(hosts) > 0
host = hosts[0]
c = client.create_container(name="test-kubctl-client",
networkMode=MANAGED_NETWORK,
imageUuid="docker:sangeetha/testclient",
ports=[port+":22/tcp"],
requestedHostId=host.id
)
c = client.wait_success(c, SERVICE_WAIT_TIMEOUT)
assert c.state == "running"
time.sleep(5)
kube_config = readDataFile(K8_SUBDIR, "config.txt")
kube_config = kube_config.replace("uuuuu", client._access_key)
kube_config = kube_config.replace("ppppp", client._secret_key)
server_ip = \
cattle_url()[cattle_url().index("//") + 2:cattle_url().index(":8080")]
kube_config = kube_config.replace("sssss", server_ip)
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(host.ipAddresses()[0].address, username="root",
password="root", port=int(port))
cmd1 = "wget https://storage.googleapis.com/kubernetes-release" + \
"/release/"+kubectl_version+"/bin/linux/amd64/kubectl"
cmd2 = "chmod +x kubectl"
cmd3 = "mkdir .kube"
cmd4 = "echo '" + kube_config + "'> .kube/config"
cmd5 = "./kubectl version"
cmd = cmd1 + ";" + cmd2 + ";" + cmd3 + ";" + cmd4 + ";" + cmd5
stdin, stdout, stderr = ssh.exec_command(cmd)
response = stdout.readlines()
print response
test_kubectl_client_con["container"] = c
test_kubectl_client_con["host"] = host
test_kubectl_client_con["port"] = port
return test_kubectl_client_con
def execute_kubectl_cmds(command, expected_resps=None, file_name=None,
expected_error=None):
cmd = "./kubectl " + command
if file_name is not None:
file_content = readDataFile(K8_SUBDIR, file_name)
cmd1 = cmd + " -f " + file_name
cmd2 = "echo '" + file_content + "'> " + file_name
cmd = cmd2 + ";" + cmd1
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(
kubectl_client_con["host"].ipAddresses()[0].address, username="root",
password="root", port=int(kubectl_client_con["port"]))
print cmd
stdin, stdout, stderr = ssh.exec_command(cmd)
response = stdout.readlines()
error = stderr.readlines()
str_response = ""
for resp in response:
str_response += resp
print "Obtained Response: " + str_response
# Validate Expected Response
if expected_resps is not None:
found = False
for resp in response:
for exp_resp in expected_resps:
if exp_resp in resp:
print "Found in response " + exp_resp
expected_resps.remove(exp_resp)
if len(expected_resps) == 0:
found = True
else:
print "Not found in response " + str(expected_resps)
assert found
if expected_error is not None:
found = False
for err_str in error:
if expected_error in err_str:
found = True
print "Found in Error Response " + err_str
assert found
return str_response
def create_cert(client, domainname, certname=None):
cert, key, certChain = get_cert_for_domain(domainname)
if certname is None:
certname = random_str()
cert1 = client. \
create_certificate(name=certname,
cert=cert,
key=key,
certChain=certChain)
cert1 = client.wait_success(cert1)
assert cert1.state == 'active'
return cert1
def get_cert_for_domain(name):
cert = readDataFile(SSLCERT_SUBDIR, name+".crt")
key = readDataFile(SSLCERT_SUBDIR, name+".key")
certChain = None
if os.path.isfile(os.path.join(SSLCERT_SUBDIR, name + "_chain.crt")):
certChain = readDataFile(SSLCERT_SUBDIR, name+"_chain.crt")
return cert, key, certChain
def get_domains():
domain_list_str = readDataFile(SSLCERT_SUBDIR, "certlist.txt").rstrip()
domain_list = domain_list_str.split(",")
return domain_list
def base_url():
base_url = cattle_url()
if (base_url.endswith('/v1/schemas')):
base_url = base_url[:-7]
elif (not base_url.endswith('/v1/')):
if (not base_url.endswith('/')):
base_url = base_url + '/v1/'
else:
base_url = base_url + 'v1/'
return base_url
def readDataFile(data_dir, name):
fname = os.path.join(data_dir, name)
print fname
is_file = os.path.isfile(fname)
assert is_file
with open(fname) as f:
return f.read()
def get_env_service_by_name(client, env_name, service_name):
env = client.list_environment(name=env_name)
assert len(env) == 1
service = client.list_service(name=service_name,
environmentId=env[0].id,
removed_null=True)
assert len(service) == 1
return env[0], service[0]
def check_for_appcookie_policy(admin_client, client, lb_service, port,
target_services, cookie_name):
container_names = get_container_names_list(admin_client,
target_services)
lb_containers = get_service_container_list(admin_client, lb_service)
for lb_con in lb_containers:
host = client.by_id('host', lb_con.hosts[0].id)
url = "http://" + host.ipAddresses()[0].address + \
":" + port + "/name.html"
headers = {"Cookie": cookie_name + "=test123"}
check_for_stickiness(url, container_names, headers=headers)
def check_for_lbcookie_policy(admin_client, client, lb_service, port,
target_services):
container_names = get_container_names_list(admin_client,
target_services)
lb_containers = get_service_container_list(admin_client, lb_service)
for lb_con in lb_containers:
host = client.by_id('host', lb_con.hosts[0].id)
url = "http://" + host.ipAddresses()[0].address + \
":" + port + "/name.html"
session = requests.Session()
r = session.get(url)
sticky_response = r.text.strip("\n")
logger.info("request: " + url)
logger.info(sticky_response)
r.close()
assert sticky_response in container_names
for n in range(0, 10):
r = session.get(url)
response = r.text.strip("\n")
r.close()
logger.info("request: " + url)
logger.info(response)
assert response == sticky_response
def check_for_balancer_first(admin_client, client, lb_service, port,
target_services):
container_names = get_container_names_list(admin_client,
target_services)
lb_containers = get_service_container_list(admin_client, lb_service)
for lb_con in lb_containers:
host = client.by_id('host', lb_con.hosts[0].id)
url = "http://" + host.ipAddresses()[0].address + \
":" + port + "/name.html"
check_for_stickiness(url, container_names)
def check_for_stickiness(url, expected_responses, headers=None):
r = requests.get(url, headers=headers)
sticky_response = r.text.strip("\n")
logger.info(sticky_response)
r.close()
assert sticky_response in expected_responses
for n in range(0, 10):
r = requests.get(url, headers=headers)
response = r.text.strip("\n")
r.close()
logger.info("request: " + url + " Header -" + str(headers))
logger.info(response)
assert response == sticky_response
def add_digital_ocean_hosts(client, count, size="1gb"):
# Create a Digital Ocean Machine
machines = []
hosts = []
for i in range(0, count):
create_args = {"name": random_str(),
"digitaloceanConfig": {"accessToken": do_access_key,
"size": size,
"image": "ubuntu-14-04-x64"},
"engineInstallUrl": do_install_url}
machine = client.create_machine(**create_args)
machines.append(machine)
for machine in machines:
machine = client.wait_success(machine, timeout=DEFAULT_MACHINE_TIMEOUT)
assert machine.state == 'active'
machine = wait_for_host(client, machine)
host = machine.hosts()[0]
assert host.state == 'active'
hosts.append(host)
return hosts
def wait_for_host(client, machine):
wait_for_condition(client,
machine,
lambda x: len(x.hosts()) == 1,
lambda x: 'Number of hosts associated with machine ' +
str(len(x.hosts())),
DEFAULT_MACHINE_TIMEOUT)
host = machine.hosts()[0]
host = wait_for_condition(client,
host,
lambda x: x.state == 'active',
lambda x: 'Host state is ' + x.state
)
return machine
def wait_for_host_agent_state(client, host, state):
host = wait_for_condition(client,
host,
lambda x: x.agentState == state,
lambda x: 'Host state is ' + x.agentState
)
return host
def get_service_instance_count(client, service):
scale = service.scale
# Check for Global Service
if "labels" in service.launchConfig.keys():
labels = service.launchConfig["labels"]
if "io.rancher.scheduler.global" in labels.keys():
if labels["io.rancher.scheduler.global"] == "true":
active_hosts = client.list_host(
kind='docker', removed_null=True, agentState="active",
state="active")
hosts = client.list_host(
kind='docker', removed_null=True, agentState_null=True,
state="active")
scale = len(hosts) + len(active_hosts)
return scale
def check_config_for_service(admin_client, service, labels, managed):
containers = get_service_container_list(admin_client, service, managed)
assert len(containers) == service.scale
for con in containers:
for key in labels.keys():
assert con.labels[key] == labels[key]
if managed == 1:
assert con.state == "running"
else:
assert con.state == "stopped"
if managed:
for key in labels.keys():
service_labels = service.launchConfig["labels"]
assert service_labels[key] == labels[key]
# Creating Environment namespace
def create_ns(namespace):
expected_result = ['namespace "'+namespace+'" created']
execute_kubectl_cmds(
"create namespace "+namespace, expected_result)
# Verify namespace is created
get_response = execute_kubectl_cmds(
"get namespace "+namespace+" -o json")
secret = json.loads(get_response)
assert secret["metadata"]["name"] == namespace
assert secret["status"]["phase"] == "Active"
# Teardown Environment namespace
def teardown_ns(namespace):
timeout = 0
expected_result = ['namespace "'+namespace+'" deleted']
execute_kubectl_cmds(
"delete namespace "+namespace, expected_result)
while True:
get_response = execute_kubectl_cmds("get namespaces")
if namespace not in get_response:
break
else:
time.sleep(5)
timeout += 5
if timeout == 300:
raise ValueError('Timeout Exception: for deleting namespace')
# Waitfor Pod
def waitfor_pods(selector=None,
namespace="default",
number=1,
state="Running"):
timeout = 0
all_running = True
get_response = execute_kubectl_cmds(
"get pod --selector="+selector+" -o json -a --namespace="+namespace)
pod = json.loads(get_response)
pods = pod['items']
pods_no = len(pod['items'])
while True:
if pods_no >= number:
for pod in pods:
if pod['status']['phase'] != state:
all_running = False
if all_running:
break
time.sleep(5)
timeout += 5
if timeout == 300:
raise ValueError('Timeout Exception: pods did not run properly')
get_response = execute_kubectl_cmds(
"get pod --selector="+selector+" -o"
" json -a --namespace="+namespace)
pod = json.loads(get_response)
pods = pod['items']
pods_no = len(pod['items'])
all_running = True
# Create K8 service
def create_k8_service(file_name, namespace, service_name, rc_name,
selector_name, scale=2, wait_for_service=True):
expected_result = ['replicationcontroller "'+rc_name+'" created',
'service "'+service_name+'" created']
execute_kubectl_cmds(
"create --namespace="+namespace, expected_result,
file_name=file_name)
if wait_for_service:
waitfor_pods(selector=selector_name, namespace=namespace, number=scale)
# Collect names of the pods in the service1
def get_pod_names_for_selector(selector_name, namespace, scale=2):
pod_names = []
get_response = execute_kubectl_cmds(
"get pod --selector="+selector_name+" -o json --namespace="+namespace)
pod = json.loads(get_response)
assert len(pod["items"]) == scale
for pod in pod["items"]:
pod_names.append(pod["metadata"]["name"])
return pod_names
# Collect names of the pods in the service1
def create_ingress(file_name, ingress_name, namespace, scale=1,
wait_for_ingress=True):
expected_result = ['ingress "'+ingress_name+'" created']
execute_kubectl_cmds(
"create --namespace="+namespace, expected_result,
file_name=file_name)
if wait_for_ingress:
return wait_for_ingress_to_become_active(ingress_name, namespace,
scale=1)
def wait_for_ingress_to_become_active(ingress_name, namespace, scale=1):
# Returns a list of lb_ips [Supports ingress scaling]
lb_ip = []
startTime = time.time()
while len(lb_ip) < scale:
if time.time() - startTime > 60:
raise \
ValueError("Timed out waiting "
"for Ip to be assigned for Ingress")
ingress_response = execute_kubectl_cmds(
"get ingress "+ingress_name+" -o json --namespace="+namespace)
ingress = json.loads(ingress_response)
print ingress
if "ingress" in ingress["status"]["loadBalancer"]:
for item in ingress["status"]["loadBalancer"]["ingress"]:
print item["ip"]
lb_ip.append(item["ip"])
time.sleep(.5)
return lb_ip
# Delete an ingress
def delete_ingress(ingress_name, namespace):
timeout = 0
expected_result = ['ingress "'+ingress_name+'" deleted']
execute_kubectl_cmds(
"delete ing " + ingress_name + " --namespace=" +
namespace, expected_result)
while True:
get_response = execute_kubectl_cmds(
"get ing " + ingress_name + " -o json --namespace=" + namespace,
)
if ingress_name not in get_response:
break
else:
time.sleep(5)
timeout += 5
if timeout == 300:
raise ValueError('Timeout Exception: for deleting ingress')
# Create service and ingress
def create_service_ingress(ingresses, services, port, namespace,
scale=2):
podnames = []
for i in range(0, len(services)):
create_k8_service(services[i]["filename"], namespace,
services[i]["name"], services[i]["rc_name"],
services[i]["selector"], scale=scale,
wait_for_service=True)
podnameslist = get_pod_names_for_selector(services[i]["selector"],
namespace, scale=scale)
podnames.append(podnameslist)
lbips = []
for i in range(0, len(ingresses)):
lb_ip = create_ingress(ingresses[i]["filename"],
ingresses[i]["name"], namespace,
wait_for_ingress=True)
wait_until_lb_ip_is_active(lb_ip[i], port)
lbips.append(lb_ip)
return(podnames, lbips)
| 35.448909
| 79
| 0.607928
|
1fd3b671a467d34c73d236d968add37499b665be
| 5,764
|
py
|
Python
|
maskrcnn_benchmark/modeling/rpn/rpn.py
|
huajianni666/maskrcnn-benchmark
|
8543f26c44a8f0a4d178548bd08471bf53498381
|
[
"MIT"
] | 2
|
2019-02-12T10:02:17.000Z
|
2019-02-12T14:42:08.000Z
|
maskrcnn_benchmark/modeling/rpn/rpn.py
|
huajianni666/maskrcnn-benchmark
|
8543f26c44a8f0a4d178548bd08471bf53498381
|
[
"MIT"
] | null | null | null |
maskrcnn_benchmark/modeling/rpn/rpn.py
|
huajianni666/maskrcnn-benchmark
|
8543f26c44a8f0a4d178548bd08471bf53498381
|
[
"MIT"
] | 1
|
2020-03-27T11:21:10.000Z
|
2020-03-27T11:21:10.000Z
|
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
import torch
import torch.nn.functional as F
from torch import nn
import pdb
from maskrcnn_benchmark.modeling import registry
from maskrcnn_benchmark.modeling.box_coder import BoxCoder
from .loss import make_rpn_loss_evaluator
from .anchor_generator import make_anchor_generator
from .inference import make_rpn_postprocessor
@registry.RPN_HEADS.register("SingleConvRPNHead")
class RPNHead(nn.Module):
"""
Adds a simple RPN Head with classification and regression heads
"""
def __init__(self, cfg, in_channels, num_anchors):
"""
Arguments:
cfg : config
in_channels (int): number of channels of the input feature
num_anchors (int): number of anchors to be predicted
"""
super(RPNHead, self).__init__()
self.conv = nn.Conv2d(
in_channels, in_channels, kernel_size=3, stride=1, padding=1
)
self.cls_logits = nn.Conv2d(in_channels, num_anchors, kernel_size=1, stride=1)
self.bbox_pred = nn.Conv2d(
in_channels, num_anchors * 4, kernel_size=1, stride=1
)
for l in [self.conv, self.cls_logits, self.bbox_pred]:
torch.nn.init.normal_(l.weight, std=0.01)
torch.nn.init.constant_(l.bias, 0)
def forward(self, x):
logits = []
bbox_reg = []
for feature in x:
t = F.relu(self.conv(feature))
logits.append(self.cls_logits(t))
bbox_reg.append(self.bbox_pred(t))
return logits, bbox_reg
class RPNModule(torch.nn.Module):
"""
Module for RPN computation. Takes feature maps from the backbone and RPN
proposals and losses. Works for both FPN and non-FPN.
"""
def __init__(self, cfg):
super(RPNModule, self).__init__()
self.cfg = cfg.clone()
anchor_generator = make_anchor_generator(cfg)
in_channels = cfg.MODEL.BACKBONE.OUT_CHANNELS
rpn_head = registry.RPN_HEADS[cfg.MODEL.RPN.RPN_HEAD]
head = rpn_head(
cfg, in_channels, anchor_generator.num_anchors_per_location()[0]
)
rpn_box_coder = BoxCoder(weights=(1.0, 1.0, 1.0, 1.0))
box_selector_train = make_rpn_postprocessor(cfg, rpn_box_coder, is_train=True)
box_selector_test = make_rpn_postprocessor(cfg, rpn_box_coder, is_train=False)
loss_evaluator = make_rpn_loss_evaluator(cfg, rpn_box_coder)
self.anchor_generator = anchor_generator
self.head = head
self.box_selector_train = box_selector_train
self.box_selector_test = box_selector_test
self.loss_evaluator = loss_evaluator
def forward(self, images, features, targets=None,train=True):
"""
Arguments:
images (ImageList): images for which we want to compute the predictions
features (list[Tensor]): features computed from the images that are
used for computing the predictions. Each tensor in the list
correspond to different feature levels
targets (list[BoxList): ground-truth boxes present in the image (optional)
Returns:
boxes (list[BoxList]): the predicted boxes from the RPN, one BoxList per
image.
losses (dict[Tensor]): the losses for the model during training. During
testing, it is an empty dict.
"""
objectness, rpn_box_regression = self.head(features)
anchors = self.anchor_generator(images, features)
if not train or not self.training :
return self._forward_test(anchors, objectness, rpn_box_regression)
return self._forward_train(anchors, objectness, rpn_box_regression, targets)
def _forward_train(self, anchors, objectness, rpn_box_regression, targets):
if self.cfg.MODEL.RPN_ONLY:
# When training an RPN-only model, the loss is determined by the
# predicted objectness and rpn_box_regression values and there is
# no need to transform the anchors into predicted boxes; this is an
# optimization that avoids the unnecessary transformation.
boxes = anchors
else:
# For end-to-end models, anchors must be transformed into boxes and
# sampled into a training batch.
with torch.no_grad():
boxes = self.box_selector_train(
anchors, objectness, rpn_box_regression, targets
)
loss_objectness, loss_rpn_box_reg = self.loss_evaluator(
anchors, objectness, rpn_box_regression, targets
)
losses = {
"loss_objectness": loss_objectness,
"loss_rpn_box_reg": loss_rpn_box_reg,
}
return boxes, anchors, objectness, rpn_box_regression, losses
def _forward_test(self, anchors, objectness, rpn_box_regression):
boxes = self.box_selector_test(anchors, objectness, rpn_box_regression)
if self.cfg.MODEL.RPN_ONLY:
# For end-to-end models, the RPN proposals are an intermediate state
# and don't bother to sort them in decreasing score order. For RPN-only
# models, the proposals are the final output and we return them in
# high-to-low confidence order.
inds = [
box.get_field("objectness").sort(descending=True)[1] for box in boxes
]
boxes = [box[ind] for box, ind in zip(boxes, inds)]
return boxes, anchors, objectness, rpn_box_regression, {}
def build_rpn(cfg):
"""
This gives the gist of it. Not super important because it doesn't change as much
"""
return RPNModule(cfg)
| 40.027778
| 86
| 0.650416
|
4585b0fcd22acbbab59888eef94f5bf829f1b747
| 5,140
|
py
|
Python
|
packages/M2Crypto-0.21.1/demo/medusa/default_handler.py
|
RaphaelPrevost/Back2Shops
|
5f2d369e82fe2a7b9b3a6c55782319b23d142dfd
|
[
"CECILL-B"
] | null | null | null |
packages/M2Crypto-0.21.1/demo/medusa/default_handler.py
|
RaphaelPrevost/Back2Shops
|
5f2d369e82fe2a7b9b3a6c55782319b23d142dfd
|
[
"CECILL-B"
] | 6
|
2021-03-31T19:21:50.000Z
|
2022-01-13T01:46:09.000Z
|
packages/M2Crypto-0.21.1/demo/medusa/default_handler.py
|
RaphaelPrevost/Back2Shops
|
5f2d369e82fe2a7b9b3a6c55782319b23d142dfd
|
[
"CECILL-B"
] | null | null | null |
# -*- Mode: Python; tab-width: 4 -*-
#
# Author: Sam Rushing <rushing@nightmare.com>
# Copyright 1997 by Sam Rushing
# All Rights Reserved.
#
# standard python modules
import os
import re
import posixpath
import stat
import string
import time
# medusa modules
import http_date
import http_server
import mime_type_table
import status_handler
import producers
unquote = http_server.unquote
# This is the 'default' handler. it implements the base set of
# features expected of a simple file-delivering HTTP server. file
# services are provided through a 'filesystem' object, the very same
# one used by the FTP server.
#
# You can replace or modify this handler if you want a non-standard
# HTTP server. You can also derive your own handler classes from
# it.
#
# support for handling POST requests is available in the derived
# class <default_with_post_handler>, defined below.
#
from counter import counter
class default_handler:
valid_commands = ['get', 'head']
IDENT = 'Default HTTP Request Handler'
# Pathnames that are tried when a URI resolves to a directory name
directory_defaults = [
'index.html',
'default.html'
]
default_file_producer = producers.file_producer
def __init__ (self, filesystem):
self.filesystem = filesystem
# count total hits
self.hit_counter = counter()
# count file deliveries
self.file_counter = counter()
# count cache hits
self.cache_counter = counter()
hit_counter = 0
def __repr__ (self):
return '<%s (%s hits) at %x>' % (
self.IDENT,
self.hit_counter,
id (self)
)
# always match, since this is a default
def match (self, request):
return 1
# handle a file request, with caching.
def handle_request (self, request):
if request.command not in self.valid_commands:
request.error (400) # bad request
return
self.hit_counter.increment()
path, params, query, fragment = request.split_uri()
if '%' in path:
path = unquote (path)
# strip off all leading slashes
while path and path[0] == '/':
path = path[1:]
if self.filesystem.isdir (path):
if path and path[-1] != '/':
request['Location'] = 'http://%s/%s/' % (
request.channel.server.server_name,
path
)
request.error (301)
return
# we could also generate a directory listing here,
# may want to move this into another method for that
# purpose
found = 0
if path and path[-1] != '/':
path = path + '/'
for default in self.directory_defaults:
p = path + default
if self.filesystem.isfile (p):
path = p
found = 1
break
if not found:
request.error (404) # Not Found
return
elif not self.filesystem.isfile (path):
request.error (404) # Not Found
return
file_length = self.filesystem.stat (path)[stat.ST_SIZE]
ims = get_header_match (IF_MODIFIED_SINCE, request.header)
length_match = 1
if ims:
length = ims.group (4)
if length:
try:
length = string.atoi (length)
if length != file_length:
length_match = 0
except:
pass
ims_date = 0
if ims:
ims_date = http_date.parse_http_date (ims.group (1))
try:
mtime = self.filesystem.stat (path)[stat.ST_MTIME]
except:
request.error (404)
return
if length_match and ims_date:
if mtime <= ims_date:
request.reply_code = 304
request.done()
self.cache_counter.increment()
return
try:
file = self.filesystem.open (path, 'rb')
except IOError:
request.error (404)
return
request['Last-Modified'] = http_date.build_http_date (mtime)
request['Content-Length'] = file_length
self.set_content_type (path, request)
if request.command == 'get':
request.push (self.default_file_producer (file))
self.file_counter.increment()
request.done()
def set_content_type (self, path, request):
ext = string.lower (get_extension (path))
if mime_type_table.content_type_map.has_key (ext):
request['Content-Type'] = mime_type_table.content_type_map[ext]
else:
# TODO: test a chunk off the front of the file for 8-bit
# characters, and use application/octet-stream instead.
request['Content-Type'] = 'text/plain'
def status (self):
return producers.simple_producer (
'<li>%s' % status_handler.html_repr (self)
+ '<ul>'
+ ' <li><b>Total Hits:</b> %s' % self.hit_counter
+ ' <li><b>Files Delivered:</b> %s' % self.file_counter
+ ' <li><b>Cache Hits:</b> %s' % self.cache_counter
+ '</ul>'
)
# HTTP/1.0 doesn't say anything about the "; length=nnnn" addition
# to this header. I suppose it's purpose is to avoid the overhead
# of parsing dates...
IF_MODIFIED_SINCE = re.compile (
'If-Modified-Since: ([^;]+)((; length=([0-9]+)$)|$)',
re.IGNORECASE
)
USER_AGENT = re.compile ('User-Agent: (.*)', re.IGNORECASE)
CONTENT_TYPE = re.compile (
r'Content-Type: ([^;]+)((; boundary=([A-Za-z0-9\'\(\)+_,./:=?-]+)$)|$)',
re.IGNORECASE
)
get_header = http_server.get_header
get_header_match = http_server.get_header_match
def get_extension (path):
dirsep = string.rfind (path, '/')
dotsep = string.rfind (path, '.')
if dotsep > dirsep:
return path[dotsep+1:]
else:
return ''
| 23.796296
| 73
| 0.677432
|
e6d2a9ad9bc8376df7184e244bdec40a3ca9c6ac
| 1,999
|
py
|
Python
|
ssseg/modules/datasets/lip.py
|
skydengyao/sssegmentation
|
606b05983fa967bb3c98d1120f44dfc516532dad
|
[
"MIT"
] | 1
|
2021-05-28T06:42:37.000Z
|
2021-05-28T06:42:37.000Z
|
ssseg/modules/datasets/lip.py
|
skydengyao/sssegmentation
|
606b05983fa967bb3c98d1120f44dfc516532dad
|
[
"MIT"
] | null | null | null |
ssseg/modules/datasets/lip.py
|
skydengyao/sssegmentation
|
606b05983fa967bb3c98d1120f44dfc516532dad
|
[
"MIT"
] | null | null | null |
'''
Function:
load the LIP dataset
Author:
Zhenchao Jin
'''
import os
import pandas as pd
from .base import *
'''LIP dataset'''
class LIPDataset(BaseDataset):
num_classes = 20
classnames = ['__background__', 'hat', 'hair', 'glove', 'sunglasses', 'upperclothes', 'dress',
'coat', 'socks', 'pants', 'jumpsuits', 'scarf', 'skirt', 'face',
'leftArm', 'rightArm', 'leftLeg', 'rightLeg', 'leftShoe', 'rightShoe']
assert num_classes == len(classnames)
def __init__(self, mode, logger_handle, dataset_cfg, **kwargs):
super(LIPDataset, self).__init__(mode, logger_handle, dataset_cfg, **kwargs)
# obtain the dirs
rootdir = dataset_cfg['rootdir']
setmap_dict = {'train': 'train', 'val': 'val', 'test': 'testing'}
self.image_dir = os.path.join(rootdir, f"{setmap_dict[dataset_cfg['set']]}_images")
self.ann_dir = os.path.join(rootdir, f"{setmap_dict[dataset_cfg['set']]}_segmentations")
# obatin imageids
df = pd.read_csv(os.path.join(rootdir, dataset_cfg['set']+'_id.txt'), names=['imageids'])
self.imageids = df['imageids'].values
self.imageids = [str(_id) for _id in self.imageids]
'''pull item'''
def __getitem__(self, index):
imageid = self.imageids[index]
imagepath = os.path.join(self.image_dir, imageid+'.jpg')
annpath = os.path.join(self.ann_dir, imageid+'.png')
sample = self.read(imagepath, annpath, self.dataset_cfg.get('with_ann', True))
sample.update({'id': imageid})
if self.mode == 'TRAIN':
sample = self.synctransform(sample, 'without_totensor_normalize_pad')
sample['edge'] = self.generateedge(sample['segmentation'].copy())
sample = self.synctransform(sample, 'only_totensor_normalize_pad')
else:
sample = self.synctransform(sample, 'all')
return sample
'''length'''
def __len__(self):
return len(self.imageids)
| 43.456522
| 99
| 0.624312
|
0f440debf44f018eccc4e398418786361a1994de
| 8,287
|
py
|
Python
|
plata/payment/modules/ogone.py
|
fsw/plata
|
c704f2626da1c233e5f915c34b95d576915fe607
|
[
"BSD-3-Clause"
] | null | null | null |
plata/payment/modules/ogone.py
|
fsw/plata
|
c704f2626da1c233e5f915c34b95d576915fe607
|
[
"BSD-3-Clause"
] | null | null | null |
plata/payment/modules/ogone.py
|
fsw/plata
|
c704f2626da1c233e5f915c34b95d576915fe607
|
[
"BSD-3-Clause"
] | null | null | null |
"""
Payment module for Ogone integration
Needs the following settings to work correctly::
OGONE = {
'PSPID': 'your_shop_id',
'LIVE': True, # Or False
'SHA1_IN': 'yourhash',
'SHA1_OUT': 'yourotherhash',
}
"""
from decimal import Decimal
from hashlib import sha1
import locale
import logging
from django.conf import settings
from django.core.urlresolvers import reverse
from django.http import HttpResponse, HttpResponseForbidden
from django.utils import timezone
from django.utils.decorators import method_decorator
from django.utils.translation import ugettext_lazy as _, get_language, to_locale
from django.views.decorators.csrf import csrf_exempt
import plata
from plata.payment.modules.base import ProcessorBase
from plata.shop.models import OrderPayment
logger = logging.getLogger('plata.payment.ogone')
csrf_exempt_m = method_decorator(csrf_exempt)
# Copied from ogone test account backend
STATUSES = """\
0 Incomplete or invalid
1 Cancelled by client
2 Authorization refused
4 Order stored
40 Stored waiting external result
41 Waiting client payment
5 Authorized
50 Authorized waiting external result
51 Authorization waiting
52 Authorization not known
55 Stand-by
56 OK with scheduled payments
57 Error in scheduled payments
59 Authoriz. to get manually
6 Authorized and cancelled
61 Author. deletion waiting
62 Author. deletion uncertain
63 Author. deletion refused
64 Authorized and cancelled
7 Payment deleted
71 Payment deletion pending
72 Payment deletion uncertain
73 Payment deletion refused
74 Payment deleted
75 Deletion processed by merchant
8 Refund
81 Refund pending
82 Refund uncertain
83 Refund refused
84 Payment declined by the acquirer
85 Refund processed by merchant
9 Payment requested
91 Payment processing
92 Payment uncertain
93 Payment refused
94 Refund declined by the acquirer
95 Payment processed by merchant
99 Being processed"""
STATUS_DICT = dict(line.split('\t') for line in STATUSES.splitlines())
class PaymentProcessor(ProcessorBase):
key = 'ogone'
default_name = _('Ogone')
def get_urls(self):
from django.conf.urls import patterns, url
return patterns('',
url(r'^payment/ogone/ipn/$', self.ipn, name='plata_payment_ogone_ipn'),
)
def process_order_confirmed(self, request, order):
OGONE = settings.OGONE
if not order.balance_remaining:
return self.already_paid(order)
logger.info('Processing order %s using Ogone' % order)
payment = self.create_pending_payment(order)
if plata.settings.PLATA_STOCK_TRACKING:
StockTransaction = plata.stock_model()
self.create_transactions(order, _('payment process reservation'),
type=StockTransaction.PAYMENT_PROCESS_RESERVATION,
negative=True, payment=payment)
# params that will be hashed
form_params = {
'PSPID': OGONE['PSPID'],
'orderID': 'Order-%d-%d' % (order.id, payment.id),
'amount': u'%s' % int(order.balance_remaining.quantize(Decimal('0.00'))*100),
'currency': order.currency,
'language': locale.normalize(to_locale(get_language())).split('.')[0],
'CN': u'%s %s' % (order.billing_first_name, order.billing_last_name),
'EMAIL': order.email,
'ownerZIP': order.billing_zip_code,
'owneraddress': order.billing_address,
'ownertown': order.billing_city,
'accepturl': u'http://%s%s' % (
request.META.get('HTTP_HOST'),
reverse('plata_order_success')),
'declineurl': u'http://%s%s' % (
request.META.get('HTTP_HOST'),
reverse('plata_order_payment_failure')),
'exceptionurl': u'http://%s%s' % (
request.META.get('HTTP_HOST'),
reverse('plata_order_payment_failure')),
'cancelurl': u'http://%s%s' % (
request.META.get('HTTP_HOST'),
reverse('plata_order_payment_failure')),
}
# create hash
value_strings = [u'{0}={1}{2}'.format(key.upper(), value, OGONE['SHA1_IN'])
for key, value in form_params.items()]
hash_string = u''.join(sorted(value_strings))
encoded_hash_string = sha1(hash_string.encode('utf-8')).hexdigest()
# add hash and additional params
form_params.update({
'SHASign': encoded_hash_string.upper(),
'mode': OGONE['LIVE'] and 'prod' or 'test',
})
return self.shop.render(request, 'payment/ogone_form.html', {
'order': order,
'HTTP_HOST': request.META.get('HTTP_HOST'),
'form_params': form_params,
'locale': form_params['language'],
})
@csrf_exempt_m
def ipn(self, request):
OGONE = settings.OGONE
try:
parameters_repr = repr(request.POST.copy()).encode('utf-8')
logger.info('IPN: Processing request data %s' % parameters_repr)
try:
orderID = request.POST['orderID']
currency = request.POST['currency']
amount = request.POST['amount']
STATUS = request.POST['STATUS']
PAYID = request.POST['PAYID']
BRAND = request.POST['BRAND']
SHASIGN = request.POST['SHASIGN']
except KeyError, e:
logger.error('IPN: Missing data in %s' % parameters_repr)
return HttpResponseForbidden('Missing data')
value_strings = [u'{0}={1}{2}'.format(key.upper(), value, OGONE['SHA1_OUT'])
for key, value in request.POST.iteritems()
if value and not key == 'SHASIGN']
sha1_out = sha1((u''.join(sorted(value_strings))).encode('utf-8')).hexdigest()
if sha1_out.lower() != SHASIGN.lower():
logger.error('IPN: Invalid hash in %s' % parameters_repr)
return HttpResponseForbidden('Hash did not validate')
try:
order, order_id, payment_id = orderID.split('-')
except ValueError:
logger.error('IPN: Error getting order for %s' % orderID)
return HttpResponseForbidden('Malformed order ID')
# Try fetching the order and order payment objects
# We create a new order payment object in case the old one
# cannot be found.
try:
order = self.shop.order_model.objects.get(pk=order_id)
except self.shop.order_model.DoesNotExist:
logger.error('IPN: Order %s does not exist' % order_id)
return HttpResponseForbidden('Order %s does not exist' % order_id)
try:
payment = order.payments.get(pk=payment_id)
except order.payments.model.DoesNotExist:
payment = order.payments.model(
order=order,
payment_module=u'%s' % self.name,
)
payment.status = OrderPayment.PROCESSED
payment.currency = currency
payment.amount = Decimal(amount)
payment.data = request.POST.copy()
payment.transaction_id = PAYID
payment.payment_method = BRAND
payment.notes = STATUS_DICT.get(STATUS)
if STATUS in ('5', '9'):
payment.authorized = timezone.now()
payment.status = OrderPayment.AUTHORIZED
payment.save()
order = order.reload()
logger.info('IPN: Successfully processed IPN request for %s' % order)
if payment.authorized and plata.settings.PLATA_STOCK_TRACKING:
StockTransaction = plata.stock_model()
self.create_transactions(order, _('sale'),
type=StockTransaction.SALE, negative=True, payment=payment)
if not order.balance_remaining:
self.order_paid(order, payment=payment)
return HttpResponse('OK')
except Exception, e:
logger.error('IPN: Processing failure %s' % unicode(e))
raise
| 35.719828
| 90
| 0.615663
|
85cd31aed0af562c9e93ede5068ff14d43328b4f
| 5,263
|
py
|
Python
|
foundation_setting_out_rev01.py
|
suben-mk/FoundationSetting-Out-Rev01
|
e4b47bba88e2e33b81486350197e02203fe79159
|
[
"MIT"
] | null | null | null |
foundation_setting_out_rev01.py
|
suben-mk/FoundationSetting-Out-Rev01
|
e4b47bba88e2e33b81486350197e02203fe79159
|
[
"MIT"
] | null | null | null |
foundation_setting_out_rev01.py
|
suben-mk/FoundationSetting-Out-Rev01
|
e4b47bba88e2e33b81486350197e02203fe79159
|
[
"MIT"
] | null | null | null |
# Foundation Setting Out Program Rev.01
import numpy as np
import csv
################################## Function List ##################################
# Convert Angle : แปลงมุม (Credit Prajuab Riabroy's Blog)
PI = np.pi
DEG2RAD = PI / 180.0
RAD2DEG= 180.0 / PI
# แปลง degree > deg,min,sec (Credit Prajuab Riabroy's Blog)
def deg2dms(dd):
sign=1
if (dd < 0):
sign = -1
dd=abs(dd)
minutes, seconds = divmod(dd*3600,60)
degrees, minutes = divmod(minutes,60)
return (sign, degrees, minutes, seconds)
# แปลง deg,min,sec > deg - min - sec (Credit Prajuab Riabroy's Blog)
def DMS2str(degree, minute, second, numdec):
degree = abs(degree); minute = abs(minute); second = abs(second)
s ='{:.%df}' % numdec
ss = s.format(second)
smin = s.format(60.0)
mm ='{:.0f}'.format(minute)
if (ss == smin):
minute += 1
ss = s.format(0.0)
if (minute >= 60):
mm = '0'
degree += 1
else:
mm ='{:.0f}'.format(minute)
return '{:.0f}'.format(degree)+"-"+mm+"-"+ss
# Write csv file : เขียนข้อมูลลงไฟล์ csv
def WriteCSV (export_pile_schedule_result):
# 'a' is append, newline='' ขึ้นบรรทัดใหม่, encoding='utf-8' อักขระทุกอย่างของ Unicode นำมาใช้หมด
with open ('export_pile_schedule_result.csv', 'a', newline='', encoding='utf-8') as file:
#fw is file writer
fw = csv.writer(file)
fw.writerow(export_pile_schedule_result)
# Foudation Position : คำนวณตำแหน่งเสาเข็ม/มุมฐานราก
def Foudation_Position(Ncenter, Ecenter, Azimuth, Chainage, Offset):
Ni = Ncenter + Chainage * np.cos(Azimuth * DEG2RAD) + Offset * np.cos((Azimuth + 90) * DEG2RAD)
Ei = Ecenter + Chainage * np.sin(Azimuth * DEG2RAD) + Offset * np.sin((Azimuth + 90) * DEG2RAD)
return Ni, Ei
############################# Pile Schedule Calculation ############################
# นำเข้าข้อมูล Pier Schedule.csv (ประเภทข้อความ)
PierSC = np.loadtxt('01_Pier_Schedule.csv', delimiter=',', skiprows=1, dtype=str)
# นำเข้าข้อมูล Chinage&Offset Axis.csv (ประเภทข้อความ)
CHOS_Axis = np.loadtxt('02_CH&OS_Axis.csv', delimiter=',', skiprows=1, dtype=str)
# ตั้งชื่อหัวตารางและเขียนลงใน CSV (Export)
Head_Column = ['Point', 'N', 'E', 'Pier No', 'Sta', 'Pier Az', 'F Type']
WriteCSV(Head_Column)
# นับจำนวณข้อมูลที่อยู่ใน PierSC Array
count_array1 = len(PierSC)
# กำหนดชื่อข้อมูลตาม Column ใน PierSC Array
for i in range(count_array1):
Pier_No = PierSC[i][0] #Column 1 is Pier No. (ประเภทข้อความ)
Sta = float(PierSC[i][1]) #Column 2 is Station (ประเภทตัวเลข)
Nc = float(PierSC[i][2]) #Column 3 is Northing (ประเภทตัวเลข
Ec = float(PierSC[i][3]) #Column 4 is Easting (ประเภทตัวเลข
Pier_Az = float(PierSC[i][4]) #Column 5 is Pier Azimuth (ประเภทตัวเลข
FSkew = float(PierSC[i][5]) #Column 6 is Footing Skew (ประเภทตัวเลข)
Found_Type1 = PierSC[i][6] #Column 7 is Foundation Type (ประเภทข้อความ)
# แปลง Pier Azimuth Deg>DMS
sn, d, m, s = deg2dms(Pier_Az)
Pier_AzDMS = DMS2str(d, m, s, 2) # (ประเภทข้อความ)
# ตรวจสอบ Foundation type ใน PierSC Array
if Found_Type1 == 'N/A':
continue # ข้ามข้อมูลในกรณีไม่มี Foundation type
else:
# นับจำนวณข้อมูลที่อยู่ใน CHOS_Axis Array
count_array2 = len(CHOS_Axis)
# กำหนดชื่อข้อมูล Column ของ Foundation type ใน CHOS_Axis Array
for j in range(count_array2):
Found_Type2 = CHOS_Axis[j][0]
# ตรวจสอบ Foundation type ทั้ง 2 Array
if Found_Type1 != Found_Type2:
continue # ข้ามข้อมูลในกรณี Foundation type ไม่ตรงกัน
else:
# กรณี Foundation type ตรงกัน
print('{}, N: {:.3f} E: {:.3f}, {}, Sta: {:.3f}, Az: {}, {}'.format('CL', Nc, Ec, Pier_No, Sta, Pier_AzDMS, Found_Type1))
# เขียนข้อมูล Pier Center ลง CSV file (Export)
Result_1 = ['CL', Nc, Ec, Pier_No, Sta, Pier_AzDMS, Found_Type1]
WriteCSV(Result_1)
for k in range(1,30,3): # range(Start, End, Step), 1 คือ Column ที่ 1, 30 คือ Column ที่ 30, 3 : คือ นับเลขที่ละ 3 เช่น 0, 2, 5, 8 .......
# หาตัวเลขคู่อันดับ เพื่อดึ่งข้อมูลใน CHOS_Axis Array
Index_P = k # ประเภทข้อความ
Index_Ch = Index_P + 1 # ประเภทข้อความ
Index_Os = Index_Ch + 1 # ประเภทข้อความ
Final_Az = Pier_Az + FSkew # ประเภทตัวเลข
# ตรวจสอบเสาเข็มของ Foundation type
if CHOS_Axis[j][Index_Ch] and CHOS_Axis[j][Index_Ch] == 'N/A':
continue # ข้ามข้อมูลเสาเข็มหรือข้อมูลมุมฐานราก ในกรณีไม่มีค่า Chainage / Offset Axis
else:
P = Pier_No + '/' + CHOS_Axis[j][Index_P] # ประเภทข้อความ
Ch = float(CHOS_Axis[j][Index_Ch]) # ประเภทตัวเลข
Os = float(CHOS_Axis[j][Index_Os]) # ประเภทตัวเลข
# คำนวณหาตำแหน่งเสาเข็มหรือมุมฐานราก แต่ละ Foundation type
Pile_Pos = Foudation_Position(Nc, Ec, Final_Az, Ch, Os)
print('{} N: {:.3f} E: {:.3f}'.format(P, Pile_Pos[0], Pile_Pos[1]))
# เขียนข้อมูลตำแหน่งเสาเข็มหรือมุมฐานรากลงใน CSV file (Export)
Result_2 = [P, Pile_Pos[0], Pile_Pos[1]]
WriteCSV(Result_2)
| 41.769841
| 151
| 0.586358
|
da63931547da9d005f57cf986aad1ad1f8080914
| 558
|
py
|
Python
|
OpenGLWrapper_JE/venv/Lib/site-packages/OpenGL/raw/GL/SUN/slice_accum.py
|
JE-Chen/je_old_repo
|
a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5
|
[
"MIT"
] | null | null | null |
OpenGLWrapper_JE/venv/Lib/site-packages/OpenGL/raw/GL/SUN/slice_accum.py
|
JE-Chen/je_old_repo
|
a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5
|
[
"MIT"
] | null | null | null |
OpenGLWrapper_JE/venv/Lib/site-packages/OpenGL/raw/GL/SUN/slice_accum.py
|
JE-Chen/je_old_repo
|
a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5
|
[
"MIT"
] | null | null | null |
'''Autogenerated by xml_generate script, do not edit!'''
from OpenGL import platform as _p, arrays
# Code generation uses this
from OpenGL.raw.GL import _types as _cs
# End users want this...
from OpenGL.raw.GL._types import *
from OpenGL.raw.GL import _errors
from OpenGL.constant import Constant as _C
import ctypes
_EXTENSION_NAME = 'GL_SUN_slice_accum'
def _f( function ):
return _p.createFunction( function,_p.PLATFORM.GL,'GL_SUN_slice_accum',error_checker=_errors._error_checker)
GL_SLICE_ACCUM_SUN=_C('GL_SLICE_ACCUM_SUN',0x85CC)
| 34.875
| 113
| 0.775986
|
a2f4f8abdf7783427ea7ed9d939e9b0f5e2670f9
| 12,856
|
py
|
Python
|
recognition/triplet_loss.py
|
chicm/landmark
|
6cb2672e9a042050721f9e557b9827316b205bc2
|
[
"Apache-2.0"
] | null | null | null |
recognition/triplet_loss.py
|
chicm/landmark
|
6cb2672e9a042050721f9e557b9827316b205bc2
|
[
"Apache-2.0"
] | null | null | null |
recognition/triplet_loss.py
|
chicm/landmark
|
6cb2672e9a042050721f9e557b9827316b205bc2
|
[
"Apache-2.0"
] | 1
|
2020-01-20T13:42:29.000Z
|
2020-01-20T13:42:29.000Z
|
# encoding: utf-8
"""
@author: liaoxingyu
@contact: sherlockliao01@gmail.com
"""
import torch
from torch import nn
from torch.autograd import Variable
def normalize(x, axis=-1):
"""Normalizing to unit length along the specified dimension.
Args:
x: pytorch Variable
Returns:
x: pytorch Variable, same shape as input
"""
x = 1. * x / (torch.norm(x, 2, axis, keepdim=True).expand_as(x) + 1e-12)
return x
def euclidean_dist(x, y):
"""
Args:
x: pytorch Variable, with shape [m, d]
y: pytorch Variable, with shape [n, d]
Returns:
dist: pytorch Variable, with shape [m, n]
"""
m, n = x.size(0), y.size(0)
xx = torch.pow(x, 2).sum(1, keepdim=True).expand(m, n)
yy = torch.pow(y, 2).sum(1, keepdim=True).expand(n, m).t()
dist = xx + yy
dist.addmm_(1, -2, x, y.t())
dist = dist.clamp(min=1e-12).sqrt() # for numerical stability
return dist
def hard_example_mining(dist_mat, labels, return_inds=False):
"""For each anchor, find the hardest positive and negative sample.
Args:
dist_mat: pytorch Variable, pair wise distance between samples, shape [N, N]
labels: pytorch LongTensor, with shape [N]
return_inds: whether to return the indices. Save time if `False`(?)
Returns:
dist_ap: pytorch Variable, distance(anchor, positive); shape [N]
dist_an: pytorch Variable, distance(anchor, negative); shape [N]
p_inds: pytorch LongTensor, with shape [N];
indices of selected hard positive samples; 0 <= p_inds[i] <= N - 1
n_inds: pytorch LongTensor, with shape [N];
indices of selected hard negative samples; 0 <= n_inds[i] <= N - 1
NOTE: Only consider the case in which all labels have same num of samples,
thus we can cope with all anchors in parallel.
"""
assert len(dist_mat.size()) == 2
assert dist_mat.size(0) == dist_mat.size(1)
N = dist_mat.size(0)
# shape [N, N]
#new_whale_indexs = (labels == 5004 * 2).nonzero()
is_pos = labels.expand(N, N).eq(labels.expand(N, N).t())
is_neg = labels.expand(N, N).ne(labels.expand(N, N).t())
#for i in new_whale_indexs:
# is_pos[i, :] = 0
# is_pos[:, i] = 0
# is_pos[i, i] = 1
# `dist_ap` means distance(anchor, positive)
# both `dist_ap` and `relative_p_inds` with shape [N, 1]
dist_ap, relative_p_inds = torch.max(
(dist_mat * is_pos.float()).contiguous().view(N, -1), 1, keepdim=True)
# `dist_an` means distance(anchor, negative)
# both `dist_an` and `relative_n_inds` with shape [N, 1]
temp = dist_mat * is_neg.float()
temp[temp == 0] = 10e5
dist_an, relative_n_inds = torch.min(
(temp).contiguous().view(N, -1), 1, keepdim=True)
# shape [N]
dist_ap = dist_ap.squeeze(1)
dist_an = dist_an.squeeze(1)
if return_inds:
# shape [N, N]
ind = (labels.new().resize_as_(labels)
.copy_(torch.arange(0, N).long())
.unsqueeze(0).expand(N, N))
# shape [N, 1]
p_inds = torch.gather(
ind[is_pos].contiguous().view(N, -1), 1, relative_p_inds.data)
n_inds = torch.gather(
ind[is_neg].contiguous().view(N, -1), 1, relative_n_inds.data)
# shape [N]
p_inds = p_inds.squeeze(1)
n_inds = n_inds.squeeze(1)
return dist_ap, dist_an, p_inds, n_inds
return dist_ap, dist_an
def shortest_dist(dist_mat):
"""Parallel version.
Args:
dist_mat: pytorch Variable, available shape:
1) [m, n]
2) [m, n, N], N is batch size
3) [m, n, *], * can be arbitrary additional dimensions
Returns:
dist: three cases corresponding to `dist_mat`:
1) scalar
2) pytorch Variable, with shape [N]
3) pytorch Variable, with shape [*]
"""
m, n = dist_mat.size()[:2]
# Just offering some reference for accessing intermediate distance.
dist = [[0 for _ in range(n)] for _ in range(m)]
for i in range(m):
for j in range(n):
if (i == 0) and (j == 0):
dist[i][j] = dist_mat[i, j]
elif (i == 0) and (j > 0):
dist[i][j] = dist[i][j - 1] + dist_mat[i, j]
elif (i > 0) and (j == 0):
dist[i][j] = dist[i - 1][j] + dist_mat[i, j]
else:
dist[i][j] = torch.min(dist[i - 1][j], dist[i][j - 1]) + dist_mat[i, j]
dist = dist[-1][-1]
return dist
def local_dist(x, y):
"""
Args:
x: pytorch Variable, with shape [M, m, d]
y: pytorch Variable, with shape [N, n, d]
Returns:
dist: pytorch Variable, with shape [M, N]
"""
M, m, d = x.size()
N, n, d = y.size()
x = x.contiguous().view(M * m, d)
y = y.contiguous().view(N * n, d)
# shape [M * m, N * n]
dist_mat = euclidean_dist(x, y)
dist_mat = (torch.exp(dist_mat) - 1.) / (torch.exp(dist_mat) + 1.)
# shape [M * m, N * n] -> [M, m, N, n] -> [m, n, M, N]
dist_mat = dist_mat.contiguous().view(M, m, N, n).permute(1, 3, 0, 2)
# shape [M, N]
dist_mat = shortest_dist(dist_mat)
return dist_mat
# class TripletLoss(object):
# """Modified from Tong Xiao's open-reid (https://github.com/Cysu/open-reid).
# Related Triplet Loss theory can be found in paper 'In Defense of the Triplet
# Loss for Person Re-Identification'."""
#
# def __init__(self, margin=None):
# self.margin = margin
# if margin is not None:
# self.ranking_loss = nn.MarginRankingLoss(margin=margin)
# else:
# self.ranking_loss = nn.SoftMarginLoss()
#
# def __call__(self, feat, labels, normalize_feature=False):
# # indexs = (labels != 5004).nonzero().view(-1)
# # global_feat = global_feat[indexs].contiguous()
# # labels = labels[indexs].contiguous()
# if normalize_feature:
# feat = normalize(feat, axis=-1)
# if len(feat.size()) == 3:
# dist_mat = local_dist(feat, feat)
# else:
# dist_mat = euclidean_dist(feat, feat)
# dist_ap, dist_an = hard_example_mining(
# dist_mat, labels)
# y = dist_an.new().resize_as_(dist_an).fill_(1)
# if self.margin is not None:
# loss = self.ranking_loss(dist_an, dist_ap, y)
# else:
# loss = self.ranking_loss(dist_an - dist_ap, y)
# return loss, dist_ap, dist_an
class TripletLoss(object):
"""Modified from Tong Xiao's open-reid (https://github.com/Cysu/open-reid).
Related Triplet Loss theory can be found in paper 'In Defense of the Triplet
Loss for Person Re-Identification'."""
def __init__(self, margin=None):
self.margin = margin
if margin is not None:
self.ranking_loss = nn.MarginRankingLoss(margin=margin)
else:
self.ranking_loss = nn.SoftMarginLoss()
def __call__(self, dist_ap, dist_an):
"""
Args:
dist_ap: pytorch Variable, distance between anchor and positive sample,
shape [N]
dist_an: pytorch Variable, distance between anchor and negative sample,
shape [N]
Returns:
loss: pytorch Variable, with shape [1]
"""
y = Variable(dist_an.data.new().resize_as_(dist_an.data).fill_(1))
if self.margin is not None:
loss = self.ranking_loss(dist_an, dist_ap, y)
else:
loss = self.ranking_loss(dist_an - dist_ap, y)
return loss
def normalize(x, axis=-1):
"""Normalizing to unit length along the specified dimension.
Args:
x: pytorch Variable
Returns:
x: pytorch Variable, same shape as input
"""
x = 1. * x / (torch.norm(x, 2, axis, keepdim=True).expand_as(x) + 1e-12)
return x
def euclidean_dist(x, y):
"""
Args:
x: pytorch Variable, with shape [m, d]
y: pytorch Variable, with shape [n, d]
Returns:
dist: pytorch Variable, with shape [m, n]
"""
m, n = x.size(0), y.size(0)
xx = torch.pow(x, 2).sum(1, keepdim=True).expand(m, n)
yy = torch.pow(y, 2).sum(1, keepdim=True).expand(n, m).t()
dist = xx + yy
dist.addmm_(1, -2, x, y.t())
dist = dist.clamp(min=1e-12).sqrt() # for numerical stability
return dist
def batch_euclidean_dist(x, y):
"""
Args:
x: pytorch Variable, with shape [N, m, d]
y: pytorch Variable, with shape [N, n, d]
Returns:
dist: pytorch Variable, with shape [N, m, n]
"""
assert len(x.size()) == 3
assert len(y.size()) == 3
assert x.size(0) == y.size(0)
assert x.size(-1) == y.size(-1)
N, m, d = x.size()
N, n, d = y.size()
# shape [N, m, n]
xx = torch.pow(x, 2).sum(-1, keepdim=True).expand(N, m, n)
yy = torch.pow(y, 2).sum(-1, keepdim=True).expand(N, n, m).permute(0, 2, 1)
dist = xx + yy
dist.baddbmm_(1, -2, x, y.permute(0, 2, 1))
dist = dist.clamp(min=1e-12).sqrt() # for numerical stability
return dist
def batch_local_dist(x, y):
"""
Args:
x: pytorch Variable, with shape [N, m, d]
y: pytorch Variable, with shape [N, n, d]
Returns:
dist: pytorch Variable, with shape [N]
"""
assert len(x.size()) == 3
assert len(y.size()) == 3
assert x.size(0) == y.size(0)
assert x.size(-1) == y.size(-1)
# shape [N, m, n]
dist_mat = batch_euclidean_dist(x, y)
dist_mat = (torch.exp(dist_mat) - 1.) / (torch.exp(dist_mat) + 1.)
# shape [N]
dist = shortest_dist(dist_mat.permute(1, 2, 0))
return dist
def global_loss(tri_loss, global_feat, labels, normalize_feature=False):
"""
Args:
tri_loss: a `TripletLoss` object
global_feat: pytorch Variable, shape [N, C]
labels: pytorch LongTensor, with shape [N]
normalize_feature: whether to normalize feature to unit length along the
Channel dimension
Returns:
loss: pytorch Variable, with shape [1]
p_inds: pytorch LongTensor, with shape [N];
indices of selected hard positive samples; 0 <= p_inds[i] <= N - 1
n_inds: pytorch LongTensor, with shape [N];
indices of selected hard negative samples; 0 <= n_inds[i] <= N - 1
=============
For Debugging
=============
dist_ap: pytorch Variable, distance(anchor, positive); shape [N]
dist_an: pytorch Variable, distance(anchor, negative); shape [N]
===================
For Mutual Learning
===================
dist_mat: pytorch Variable, pairwise euclidean distance; shape [N, N]
"""
if normalize_feature:
global_feat = normalize(global_feat, axis=-1)
# shape [N, N]
dist_mat = euclidean_dist(global_feat, global_feat)
dist_ap, dist_an = hard_example_mining(
dist_mat, labels, return_inds=False)
loss = tri_loss(dist_ap, dist_an)
return loss, dist_ap, dist_an, dist_mat
def local_loss(
tri_loss,
local_feat,
labels=None,
p_inds=None,
n_inds=None,
normalize_feature=False):
"""
Args:
tri_loss: a `TripletLoss` object
local_feat: pytorch Variable, shape [N, H, c] (NOTE THE SHAPE!)
p_inds: pytorch LongTensor, with shape [N];
indices of selected hard positive samples; 0 <= p_inds[i] <= N - 1
n_inds: pytorch LongTensor, with shape [N];
indices of selected hard negative samples; 0 <= n_inds[i] <= N - 1
labels: pytorch LongTensor, with shape [N]
normalize_feature: whether to normalize feature to unit length along the
Channel dimension
If hard samples are specified by `p_inds` and `n_inds`, then `labels` is not
used. Otherwise, local distance finds its own hard samples independent of
global distance.
Returns:
loss: pytorch Variable,with shape [1]
=============
For Debugging
=============
dist_ap: pytorch Variable, distance(anchor, positive); shape [N]
dist_an: pytorch Variable, distance(anchor, negative); shape [N]
===================
For Mutual Learning
===================
dist_mat: pytorch Variable, pairwise local distance; shape [N, N]
"""
if normalize_feature:
local_feat = normalize(local_feat, axis=-1)
if p_inds is None or n_inds is None:
dist_mat = local_dist(local_feat, local_feat)
dist_ap, dist_an = hard_example_mining(dist_mat, labels, return_inds=False)
loss = tri_loss(dist_ap, dist_an)
return loss, dist_ap, dist_an, dist_mat
else:
dist_ap = batch_local_dist(local_feat, local_feat[p_inds])
dist_an = batch_local_dist(local_feat, local_feat[n_inds])
loss = tri_loss(dist_ap, dist_an)
return loss, dist_ap, dist_an
if __name__ == '__main__':
global_feat = torch.randn(4, 2048)
local_feat = torch.randn(4, 7, 512)
labels = torch.tensor([10,2,10,2])
triple_loss = global_loss(TripletLoss(margin=0.3), global_feat, labels)[0] + \
local_loss(TripletLoss(margin=0.3), local_feat, labels)[0]
print(triple_loss)
| 34.282667
| 83
| 0.604698
|
7c54b5d7e0d7c3159ef91378a80cb5d9bfb8e43e
| 38
|
py
|
Python
|
samreetira2/__init__.py
|
samreetira123/samreetira2
|
38004bbfe9f698407ad816b29f7ce728c3472f58
|
[
"MIT"
] | null | null | null |
samreetira2/__init__.py
|
samreetira123/samreetira2
|
38004bbfe9f698407ad816b29f7ce728c3472f58
|
[
"MIT"
] | null | null | null |
samreetira2/__init__.py
|
samreetira123/samreetira2
|
38004bbfe9f698407ad816b29f7ce728c3472f58
|
[
"MIT"
] | null | null | null |
from samreetira2.samree import Samree
| 19
| 37
| 0.868421
|
a1d98ddc7c88635a63b6c5ee293fd490d5cd7b51
| 22,776
|
py
|
Python
|
fastybird_fb_bus_connector/connector.py
|
FastyBird/fb-bus-connector-plugin
|
71568874243578a37a01bd3f0cbb3306c331d11f
|
[
"Apache-2.0"
] | null | null | null |
fastybird_fb_bus_connector/connector.py
|
FastyBird/fb-bus-connector-plugin
|
71568874243578a37a01bd3f0cbb3306c331d11f
|
[
"Apache-2.0"
] | null | null | null |
fastybird_fb_bus_connector/connector.py
|
FastyBird/fb-bus-connector-plugin
|
71568874243578a37a01bd3f0cbb3306c331d11f
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python3
# Copyright 2021. FastyBird s.r.o.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
FastyBird BUS connector module
"""
# Python base dependencies
import asyncio
import logging
import re
import uuid
from datetime import datetime
from typing import Dict, Optional, Union
# Library dependencies
from fastybird_devices_module.connectors.connector import IConnector
from fastybird_devices_module.entities.channel import (
ChannelControlEntity,
ChannelDynamicPropertyEntity,
ChannelEntity,
ChannelPropertyEntity,
)
from fastybird_devices_module.entities.connector import ConnectorControlEntity
from fastybird_devices_module.entities.device import (
DeviceAttributeEntity,
DeviceControlEntity,
DeviceDynamicPropertyEntity,
DevicePropertyEntity,
DeviceStaticPropertyEntity,
)
from fastybird_devices_module.exceptions import RestartConnectorException
from fastybird_devices_module.utils import normalize_value
from fastybird_metadata.devices_module import ConnectionState
from fastybird_metadata.types import (
ButtonPayload,
ControlAction,
DataType,
SwitchPayload,
)
from kink import inject
from fastybird_fb_bus_connector.clients.client import Client
from fastybird_fb_bus_connector.consumers.consumer import Consumer
from fastybird_fb_bus_connector.entities import FbBusConnectorEntity, FbBusDeviceEntity
from fastybird_fb_bus_connector.events.listeners import EventsListener
from fastybird_fb_bus_connector.exceptions import InvalidStateException
from fastybird_fb_bus_connector.logger import Logger
from fastybird_fb_bus_connector.registry.model import (
DevicesAttributesRegistry,
DevicesRegistry,
RegistersRegistry,
)
# Library libs
from fastybird_fb_bus_connector.registry.records import (
InputRegisterRecord,
OutputRegisterRecord,
)
from fastybird_fb_bus_connector.transporters.transporter import ITransporter
from fastybird_fb_bus_connector.types import ConnectorAction, RegisterName, RegisterType
@inject(alias=IConnector)
class FbBusConnector(IConnector): # pylint: disable=too-many-instance-attributes,too-many-public-methods
"""
FastyBird BUS connector
@package FastyBird:FbBusConnector!
@module connector
@author Adam Kadlec <adam.kadlec@fastybird.com>
"""
__stopped: bool = False
__connector_id: uuid.UUID
__packets_to_be_sent: int = 0
__consumer: Consumer
__client: Client
__devices_registry: DevicesRegistry
__registers_registry: RegistersRegistry
__devices_attributes_registry: DevicesAttributesRegistry
__transporter: ITransporter
__events_listener: EventsListener
__logger: Union[Logger, logging.Logger]
# -----------------------------------------------------------------------------
def __init__( # pylint: disable=too-many-arguments
self,
connector_id: uuid.UUID,
consumer: Consumer,
client: Client,
devices_registry: DevicesRegistry,
registers_registry: RegistersRegistry,
devices_attributes_registry: DevicesAttributesRegistry,
transporter: ITransporter,
events_listener: EventsListener,
logger: Union[Logger, logging.Logger] = logging.getLogger("dummy"),
) -> None:
self.__connector_id = connector_id
self.__client = client
self.__consumer = consumer
self.__devices_registry = devices_registry
self.__registers_registry = registers_registry
self.__devices_attributes_registry = devices_attributes_registry
self.__transporter = transporter
self.__events_listener = events_listener
self.__logger = logger
# -----------------------------------------------------------------------------
@property
def id(self) -> uuid.UUID: # pylint: disable=invalid-name
"""Connector identifier"""
return self.__connector_id
# -----------------------------------------------------------------------------
def initialize(self, connector: FbBusConnectorEntity) -> None:
"""Set connector to initial state"""
self.__devices_registry.reset()
for device in connector.devices:
self.initialize_device(device=device)
# -----------------------------------------------------------------------------
def initialize_device(self, device: FbBusDeviceEntity) -> None:
"""Initialize device in connector registry"""
device_record = self.__devices_registry.append(
device_id=device.id,
device_enabled=False,
device_serial_number=device.identifier,
)
for device_property in device.properties:
self.initialize_device_property(device=device, device_property=device_property)
for device_attribute in device.attributes:
self.initialize_device_attribute(device=device, device_attribute=device_attribute)
for channel in device.channels:
self.initialize_device_channel(device=device, channel=channel)
self.__devices_registry.enable(device=device_record)
# -----------------------------------------------------------------------------
def remove_device(self, device_id: uuid.UUID) -> None:
"""Remove device from connector registry"""
self.__devices_registry.remove(device_id=device_id)
# -----------------------------------------------------------------------------
def reset_devices(self) -> None:
"""Reset devices registry to initial state"""
self.__devices_registry.reset()
# -----------------------------------------------------------------------------
def initialize_device_property(self, device: FbBusDeviceEntity, device_property: DevicePropertyEntity) -> None:
"""Initialize device property aka attribute register in connector registry"""
match = re.compile("(?P<name>[a-zA-Z-]+)_(?P<address>[0-9]+)")
parsed_property_identifier = match.fullmatch(device_property.identifier)
if parsed_property_identifier is not None:
if isinstance(device_property, DeviceDynamicPropertyEntity):
self.__registers_registry.append_attribute_register(
device_id=device.id,
register_id=device_property.id,
register_address=int(parsed_property_identifier.group("address")),
register_data_type=device_property.data_type,
register_invalid=device_property.invalid,
register_name=str(parsed_property_identifier.group("name")),
register_settable=device_property.settable,
register_queryable=device_property.queryable,
)
elif isinstance(device_property, DeviceStaticPropertyEntity):
self.__registers_registry.append_attribute_register(
device_id=device.id,
register_id=device_property.id,
register_address=int(parsed_property_identifier.group("address")),
register_data_type=device_property.data_type,
register_invalid=device_property.invalid,
register_name=str(parsed_property_identifier.group("name")),
register_settable=device_property.settable,
register_queryable=device_property.queryable,
register_value=device_property.value,
)
# -----------------------------------------------------------------------------
def notify_device_property(self, device: FbBusDeviceEntity, device_property: DevicePropertyEntity) -> None:
"""Notify device property was reported to connector"""
# -----------------------------------------------------------------------------
def remove_device_property(self, device: FbBusDeviceEntity, property_id: uuid.UUID) -> None:
"""Remove device property from connector registry"""
self.__registers_registry.remove(register_id=property_id)
# -----------------------------------------------------------------------------
def reset_devices_properties(self, device: FbBusDeviceEntity) -> None:
"""Reset devices properties registry to initial state"""
self.__registers_registry.reset(device_id=device.id, registers_type=RegisterType.ATTRIBUTE)
# -----------------------------------------------------------------------------
def initialize_device_attribute(self, device: FbBusDeviceEntity, device_attribute: DeviceAttributeEntity) -> None:
"""Initialize device attribute"""
if isinstance(device_attribute, DeviceAttributeEntity):
self.__devices_attributes_registry.append(
device_id=device_attribute.device.id,
attribute_id=device_attribute.id,
attribute_identifier=device_attribute.identifier,
attribute_name=device_attribute.name,
attribute_value=device_attribute.content
if isinstance(device_attribute.content, str) or device_attribute.content is None
else str(device_attribute.content),
)
# -----------------------------------------------------------------------------
def notify_device_attribute(self, device: FbBusDeviceEntity, device_attribute: DeviceAttributeEntity) -> None:
"""Notify device attribute was reported to connector"""
# -----------------------------------------------------------------------------
def remove_device_attribute(self, device: FbBusDeviceEntity, attribute_id: uuid.UUID) -> None:
"""Remove device attribute from connector registry"""
self.__devices_attributes_registry.remove(attribute_id=attribute_id, propagate=False)
# -----------------------------------------------------------------------------
def reset_devices_attributes(self, device: FbBusDeviceEntity) -> None:
"""Reset devices attributes registry to initial state"""
self.__devices_attributes_registry.reset(device_id=device.id)
# -----------------------------------------------------------------------------
def initialize_device_channel(self, device: FbBusDeviceEntity, channel: ChannelEntity) -> None:
"""Initialize device channel aka registers group in connector registry"""
for channel_property in channel.properties:
self.initialize_device_channel_property(channel=channel, channel_property=channel_property)
# -----------------------------------------------------------------------------
def remove_device_channel(self, device: FbBusDeviceEntity, channel_id: uuid.UUID) -> None:
"""Remove device channel from connector registry"""
io_registers = self.__registers_registry.get_all_for_device(
device_id=device.id,
register_type=[RegisterType.OUTPUT, RegisterType.INPUT],
)
for register in io_registers:
if (
isinstance(register, (OutputRegisterRecord, InputRegisterRecord))
and register.channel_id is not None
and register.channel_id.__eq__(channel_id)
):
self.__registers_registry.remove(register_id=register.id)
# -----------------------------------------------------------------------------
def reset_devices_channels(self, device: FbBusDeviceEntity) -> None:
"""Reset devices channels registry to initial state"""
self.__registers_registry.reset(device_id=device.id, registers_type=RegisterType.OUTPUT)
self.__registers_registry.reset(device_id=device.id, registers_type=RegisterType.INPUT)
# -----------------------------------------------------------------------------
def initialize_device_channel_property(
self,
channel: ChannelEntity,
channel_property: ChannelPropertyEntity,
) -> None:
"""Initialize device channel property aka input or output register in connector registry"""
match_channel = re.compile("(?P<type>[a-zA-Z-]+)_(?P<counter>[0-9]+)")
parsed_channel_identifier = match_channel.fullmatch(channel.identifier)
channel_type: Optional[RegisterName] = None
if parsed_channel_identifier is not None and RegisterName.has_value(
str(parsed_channel_identifier.group("type"))
):
channel_type = RegisterName(str(parsed_channel_identifier.group("type")))
elif RegisterName.has_value(channel.identifier):
channel_type = RegisterName(channel.identifier)
if channel_type is None:
self.__logger.warning(
"Channel identifier is not as expected. Register couldn't be mapped",
extra={
"device": {
"id": channel.device.id.__str__(),
},
"channel": {
"id": channel.device.id.__str__(),
},
},
)
return
match_property = re.compile("(?P<name>[a-zA-Z-]+)_(?P<address>[0-9]+)")
parsed_property_identifier = match_property.fullmatch(channel_property.identifier)
if parsed_property_identifier is not None:
if channel_type == RegisterName.OUTPUT:
self.__registers_registry.append_output_register(
device_id=channel.device.id,
register_id=channel_property.id,
register_address=int(parsed_property_identifier.group("address")),
register_data_type=channel_property.data_type,
register_invalid=channel_property.invalid,
channel_id=channel.id,
)
elif channel_type == RegisterName.INPUT:
self.__registers_registry.append_input_register(
device_id=channel.device.id,
register_id=channel_property.id,
register_address=int(parsed_property_identifier.group("address")),
register_data_type=channel_property.data_type,
register_invalid=channel_property.invalid,
channel_id=channel.id,
)
else:
self.__logger.warning(
"Channel identifier is not as expected. Register couldn't be mapped",
extra={
"device": {
"id": channel.device.id.__str__(),
},
"channel": {
"id": channel.device.id.__str__(),
},
},
)
# -----------------------------------------------------------------------------
def notify_device_channel_property(
self,
channel: ChannelEntity,
channel_property: ChannelPropertyEntity,
) -> None:
"""Notify device channel property was reported to connector"""
# -----------------------------------------------------------------------------
def remove_device_channel_property(self, channel: ChannelEntity, property_id: uuid.UUID) -> None:
"""Remove device channel property from connector registry"""
self.__registers_registry.remove(register_id=property_id)
# -----------------------------------------------------------------------------
def reset_devices_channels_properties(self, channel: ChannelEntity) -> None:
"""Reset devices channels properties registry to initial state"""
if channel.identifier == RegisterName.OUTPUT.value:
self.__registers_registry.reset(device_id=channel.device.id, registers_type=RegisterType.OUTPUT)
elif channel.identifier == RegisterName.INPUT.value:
self.__registers_registry.reset(device_id=channel.device.id, registers_type=RegisterType.INPUT)
# -----------------------------------------------------------------------------
def start(self) -> None:
"""Start connector services"""
# When connector is starting...
self.__events_listener.open()
for device in self.__devices_registry:
try:
# ...set device state to unknown
self.__devices_registry.set_state(device=device, state=ConnectionState.UNKNOWN)
except InvalidStateException:
self.__logger.error(
"Device state could not be updated. Device is disabled and have to be re-discovered",
extra={
"device": {
"id": device.id.__str__(),
"serial_number": device.serial_number,
},
},
)
self.__devices_registry.disable(device=device)
registers = self.__registers_registry.get_all_for_device(
device_id=device.id,
register_type=[RegisterType.OUTPUT, RegisterType.INPUT, RegisterType.ATTRIBUTE],
)
for register in registers:
self.__registers_registry.set_valid_state(register=register, state=False)
self.__logger.info("Connector has been started")
self.__stopped = False
# Register connector coroutine
asyncio.ensure_future(self.__worker())
# -----------------------------------------------------------------------------
def stop(self) -> None:
"""Close all opened connections & stop connector"""
# When connector is closing...
for device in self.__devices_registry:
try:
# ...set device state to disconnected
self.__devices_registry.set_state(device=device, state=ConnectionState.DISCONNECTED)
except InvalidStateException:
self.__logger.error(
"Device state could not be updated. Device is disabled and have to be re-discovered",
extra={
"device": {
"id": device.id.__str__(),
"serial_number": device.serial_number,
},
},
)
self.__devices_registry.disable(device=device)
registers = self.__registers_registry.get_all_for_device(
device_id=device.id,
register_type=[RegisterType.OUTPUT, RegisterType.INPUT, RegisterType.ATTRIBUTE],
)
for register in registers:
self.__registers_registry.set_valid_state(register=register, state=False)
self.__events_listener.close()
self.__logger.info("Connector has been stopped")
self.__stopped = True
# -----------------------------------------------------------------------------
def has_unfinished_tasks(self) -> bool:
"""Check if connector has some unfinished task"""
return not self.__consumer.is_empty()
# -----------------------------------------------------------------------------
def write_property(self, property_item: Union[DevicePropertyEntity, ChannelPropertyEntity], data: Dict) -> None:
"""Write device or channel property value to device"""
if self.__stopped:
self.__logger.warning("Connector is stopped, value can't be written")
return
if isinstance(property_item, (DeviceDynamicPropertyEntity, ChannelDynamicPropertyEntity)):
register_record = self.__registers_registry.get_by_id(register_id=property_item.id)
if register_record is None:
return
if property_item.data_type is not None:
value_to_write = normalize_value(
data_type=property_item.data_type,
value=data.get("expected_value", None),
value_format=property_item.format,
value_invalid=property_item.invalid,
)
else:
value_to_write = data.get("expected_value", None)
if (
isinstance(value_to_write, (str, int, float, bool, datetime, ButtonPayload, SwitchPayload))
or value_to_write is None
):
if (
isinstance(value_to_write, SwitchPayload)
and register_record.data_type == DataType.SWITCH
and value_to_write == SwitchPayload.TOGGLE
):
if register_record.actual_value == SwitchPayload.ON:
value_to_write = SwitchPayload.OFF
else:
value_to_write = SwitchPayload.ON
self.__registers_registry.set_expected_value(register=register_record, value=value_to_write)
return
# -----------------------------------------------------------------------------
def write_control(
self,
control_item: Union[ConnectorControlEntity, DeviceControlEntity, ChannelControlEntity],
data: Optional[Dict],
action: ControlAction,
) -> None:
"""Write connector control action"""
if isinstance(control_item, ConnectorControlEntity):
if not ConnectorAction.has_value(control_item.name):
return
control_action = ConnectorAction(control_item.name)
if control_action == ConnectorAction.DISCOVER:
self.__client.discover()
if control_action == ConnectorAction.RESTART:
raise RestartConnectorException("Restarting connector")
# -----------------------------------------------------------------------------
async def __worker(self) -> None:
"""Run connector service"""
while True:
if self.__stopped and self.has_unfinished_tasks():
return
self.__consumer.handle()
# Continue processing devices
self.__client.handle()
self.__transporter.handle()
# Be gentle to server
await asyncio.sleep(0.01)
| 40.311504
| 118
| 0.587592
|
91866e298c1a32f1ab98b7bcf43edf35ce6d8158
| 3,428
|
py
|
Python
|
Chapter_3/visits_microservice/visits_microservice_stack.py
|
jonahjon/spring-petclinic
|
df5213dc715c88679bd290b07d4461ac1b511ffa
|
[
"Apache-2.0"
] | null | null | null |
Chapter_3/visits_microservice/visits_microservice_stack.py
|
jonahjon/spring-petclinic
|
df5213dc715c88679bd290b07d4461ac1b511ffa
|
[
"Apache-2.0"
] | null | null | null |
Chapter_3/visits_microservice/visits_microservice_stack.py
|
jonahjon/spring-petclinic
|
df5213dc715c88679bd290b07d4461ac1b511ffa
|
[
"Apache-2.0"
] | null | null | null |
from aws_cdk import (
aws_ec2 as ec2,
aws_ecs as ecs,
aws_rds as rds,
aws_ecs as ecs,
aws_ecr_assets as ecr_assets,
aws_autoscaling as autoscaling,
aws_elasticloadbalancingv2 as elbv2,
core
)
from random import randint
class VisitsMicroserviceStack(core.Stack):
def __init__(self, scope: core.Construct, id: str, props: rds.DatabaseInstance, **kwargs) -> None:
super().__init__(scope, id, **kwargs)
fluxvpc = ec2.Vpc(
self, "FluxVpc",
max_azs=2,
cidr="10.1.0.0/16"
)
fluxcluster = ecs.Cluster(
self, 'FluxEcsCluster',
vpc=fluxvpc
)
fluxcluster.add_capacity("DefaultAutoScalingGroup",
instance_type=ec2.InstanceType('t2.large'),
vpc_subnets = ec2.SubnetSelection(subnet_type=ec2.SubnetType.PUBLIC),
min_capacity = 1)
Fluxalb = elbv2.ApplicationLoadBalancer(self, 'FluxEcsLb', vpc=fluxvpc, internet_facing=True)
Fluxlistener = Fluxalb.add_listener('FluxEcsListener', port=80)
Fluxlistener.add_fixed_response('Default-Fix', status_code= '404')
Fluxlistener.node.default_child.default_action=[{
"type": "fixed-response",
"fixedResponseConfig": {"statusCode": "404"}
}]
##########
#New Visit App
##########
for s in ['newvisits']:
asset = ecr_assets.DockerImageAsset(self, 'spring-petclinic-' + s,
directory='./work/build/spring-petclinic-' + 'visits' + '-service',
build_args={
'JAR_FILE': 'spring-petclinic-' + 'visits' + '-service-2.1.4.jar'
})
ecs_task = ecs.Ec2TaskDefinition(self, 'TaskDef-' + s)
env={}
if s != 'static':
env = {
'SPRING_DATASOURCE_PASSWORD': 'Welcome#123456',
'SPRING_DATASOURCE_USERNAME': 'master',
'SPRING_PROFILES_ACTIVE': 'mysql',
'SPRING_DATASOURCE_URL': 'jdbc:mysql://' + props.db_instance_endpoint_address + '/petclinic?useUnicode=true',
'SERVER_SERVLET_CONTEXT_PATH': '/api/' + s.rstrip('s')
}
ecs_container = ecs_task.add_container(
'Container-' + s,
memory_limit_mib=512,
image=ecs.ContainerImage.from_docker_image_asset(asset),
logging=ecs.LogDriver.aws_logs(stream_prefix=s),
environment=env
)
ecs_container.add_port_mappings(ecs.PortMapping(container_port=8080))
ecs_service = ecs.Ec2Service(
self, 'Ec2Service-' + s,
cluster = fluxcluster,
service_name = 'spring-petclinic-' + s,
desired_count = 2,
task_definition = ecs_task
)
parttern = '/api/' + s.rstrip('s') + '/*'
priority = randint(1, 1000)
check={'path': '/api/' + s.rstrip('s') + '/manage'}
target = Fluxlistener.add_targets(
'ECS-' + s,
path_pattern=parttern,
priority = priority,
port=80,
targets=[ecs_service],
health_check=check
)
core.CfnOutput(self,"FluxLoadBalancer",export_name="FluxLoadBalancer",value=Fluxalb.load_balancer_dns_name)
| 32.647619
| 127
| 0.556301
|
0b3a02c58998543f31103df321e9653c13fc9ccd
| 429
|
py
|
Python
|
checkout/migrations/0003_alter_order_country.py
|
sanson0/obelisk_jewellery
|
27f811f5d163c6fd6336fba56d7f54d5570fc7a8
|
[
"PostgreSQL"
] | null | null | null |
checkout/migrations/0003_alter_order_country.py
|
sanson0/obelisk_jewellery
|
27f811f5d163c6fd6336fba56d7f54d5570fc7a8
|
[
"PostgreSQL"
] | null | null | null |
checkout/migrations/0003_alter_order_country.py
|
sanson0/obelisk_jewellery
|
27f811f5d163c6fd6336fba56d7f54d5570fc7a8
|
[
"PostgreSQL"
] | 1
|
2021-10-30T11:10:51.000Z
|
2021-10-30T11:10:51.000Z
|
# Generated by Django 3.2.7 on 2021-10-01 11:31
from django.db import migrations
import django_countries.fields
class Migration(migrations.Migration):
dependencies = [
('checkout', '0002_auto_20210928_1357'),
]
operations = [
migrations.AlterField(
model_name='order',
name='country',
field=django_countries.fields.CountryField(max_length=2),
),
]
| 21.45
| 69
| 0.634033
|
2e7adbddb57cc23200e1d042adbccf44d404f3b1
| 253
|
py
|
Python
|
commands/newClass.py
|
AsherSeiling/jvclass
|
6a279145165994bfb2e37dee7cac626e6b0526ba
|
[
"MIT"
] | null | null | null |
commands/newClass.py
|
AsherSeiling/jvclass
|
6a279145165994bfb2e37dee7cac626e6b0526ba
|
[
"MIT"
] | null | null | null |
commands/newClass.py
|
AsherSeiling/jvclass
|
6a279145165994bfb2e37dee7cac626e6b0526ba
|
[
"MIT"
] | null | null | null |
import os
import sys
def new_class(args):
name = str(args[2].split(".")[1])
codetamplate = [f"public class {name}" + "{", "}"]
os.system(f"touch {args[2]}.java")
files = open(f"{args[2]}.java", "w+")
for i in codetamplate:
files.write(f"{i}\n")
| 23
| 51
| 0.604743
|
3d7052cb94e66ff3ef323488d2c7fa93e3345859
| 5,364
|
py
|
Python
|
deep_sort/tracker.py
|
weishengtoh/computervision_assignment
|
98964e765527956b15ac2cf075914956ea4e7ec4
|
[
"Apache-2.0"
] | null | null | null |
deep_sort/tracker.py
|
weishengtoh/computervision_assignment
|
98964e765527956b15ac2cf075914956ea4e7ec4
|
[
"Apache-2.0"
] | null | null | null |
deep_sort/tracker.py
|
weishengtoh/computervision_assignment
|
98964e765527956b15ac2cf075914956ea4e7ec4
|
[
"Apache-2.0"
] | null | null | null |
# vim: expandtab:ts=4:sw=4
from __future__ import absolute_import
import numpy as np
from . import kalman_filter
from . import linear_assignment
from . import iou_matching
from .track import Track, TrackState
class Tracker:
"""
This is the multi-target tracker.
Parameters
----------
metric : nn_matching.NearestNeighborDistanceMetric
A distance metric for measurement-to-track association.
max_age : int
Maximum number of missed misses before a track is deleted.
n_init : int
Number of consecutive detections before the track is confirmed. The
track state is set to `Deleted` if a miss occurs within the first
`n_init` frames.
Attributes
----------
metric : nn_matching.NearestNeighborDistanceMetric
The distance metric used for measurement to track association.
max_age : int
Maximum number of missed misses before a track is deleted.
n_init : int
Number of frames that a track remains in initialization phase.
kf : kalman_filter.KalmanFilter
A Kalman filter to filter target trajectories in image space.
tracks : List[Track]
The list of active tracks at the current time step.
"""
def __init__(self, metric, max_iou_distance=0.7, max_age=30, n_init=3):
self.metric = metric
self.max_iou_distance = max_iou_distance
self.max_age = max_age
self.n_init = n_init
self.kf = kalman_filter.KalmanFilter()
self.tracks = []
self._next_id = 1
def predict(self):
"""Propagate track state distributions one time step forward.
This function should be called once every time step, before `update`.
"""
for track in self.tracks:
track.predict(self.kf)
def update(self, detections):
"""Perform measurement update and track management.
Parameters
----------
detections : List[deep_sort.detection.Detection]
A list of detections at the current time step.
"""
# Run matching cascade.
matches, unmatched_tracks, unmatched_detections = \
self._match(detections)
# Update track set.
for track_idx, detection_idx in matches:
self.tracks[track_idx].update(self.kf, detections[detection_idx])
for track_idx in unmatched_tracks:
self.tracks[track_idx].mark_missed()
for detection_idx in unmatched_detections:
self._initiate_track(detections[detection_idx])
self.tracks = [t for t in self.tracks if not t.is_deleted()]
# Update distance metric.
active_targets = [t.track_id for t in self.tracks if t.is_confirmed()]
features, targets = [], []
for track in self.tracks:
if not track.is_confirmed():
continue
features += track.features
targets += [track.track_id for _ in track.features]
track.features = []
self.metric.partial_fit(np.asarray(features), np.asarray(targets),
active_targets)
def _match(self, detections):
def gated_metric(tracks, dets, track_indices, detection_indices):
features = np.array([dets[i].feature for i in detection_indices])
targets = np.array([tracks[i].track_id for i in track_indices])
cost_matrix = self.metric.distance(features, targets)
cost_matrix = linear_assignment.gate_cost_matrix(
self.kf, cost_matrix, tracks, dets, track_indices,
detection_indices)
return cost_matrix
# Split track set into confirmed and unconfirmed tracks.
confirmed_tracks = [
i for i, t in enumerate(self.tracks) if t.is_confirmed()
]
unconfirmed_tracks = [
i for i, t in enumerate(self.tracks) if not t.is_confirmed()
]
# Associate confirmed tracks using appearance features.
matches_a, unmatched_tracks_a, unmatched_detections = \
linear_assignment.matching_cascade(
gated_metric, self.metric.matching_threshold, self.max_age,
self.tracks, detections, confirmed_tracks)
# Associate remaining tracks together with unconfirmed tracks using IOU.
iou_track_candidates = unconfirmed_tracks + [
k for k in unmatched_tracks_a
if self.tracks[k].time_since_update == 1
]
unmatched_tracks_a = [
k for k in unmatched_tracks_a
if self.tracks[k].time_since_update != 1
]
matches_b, unmatched_tracks_b, unmatched_detections = \
linear_assignment.min_cost_matching(
iou_matching.iou_cost, self.max_iou_distance, self.tracks,
detections, iou_track_candidates, unmatched_detections)
matches = matches_a + matches_b
unmatched_tracks = list(set(unmatched_tracks_a + unmatched_tracks_b))
return matches, unmatched_tracks, unmatched_detections
def _initiate_track(self, detection):
mean, covariance = self.kf.initiate(detection.to_xyah())
self.tracks.append(
Track(mean, covariance, self._next_id, self.n_init, self.max_age,
detection.feature, detection.label))
self._next_id += 1
| 37.774648
| 80
| 0.645041
|
1fb20d685fd94a6e1a5bab513ab753ec67eae398
| 3,059
|
py
|
Python
|
tests/python/test_local_atomics.py
|
ppwwyyxx/taichi
|
ef0c3367bb06ad78b3457b8f93b5370f14b1d9c4
|
[
"MIT"
] | 1
|
2020-05-19T06:21:39.000Z
|
2020-05-19T06:21:39.000Z
|
tests/python/test_local_atomics.py
|
zf38473013/taichi
|
ad4d7ae04f4e559e84f6dee4a64ad57c3cf0c7fb
|
[
"MIT"
] | null | null | null |
tests/python/test_local_atomics.py
|
zf38473013/taichi
|
ad4d7ae04f4e559e84f6dee4a64ad57c3cf0c7fb
|
[
"MIT"
] | null | null | null |
import taichi as ti
@ti.all_archs
def test_explicit_local_atomic_add():
A = ti.var(ti.f32, shape=())
@ti.kernel
def func():
a = 0
for i in range(10):
ti.atomic_add(a, i)
A[None] = a
func()
assert A[None] == 45
@ti.all_archs
def test_implicit_local_atomic_add():
A = ti.var(ti.f32, shape=())
@ti.kernel
def func():
a = 0
for i in range(10):
a += i
A[None] = a
func()
assert A[None] == 45
@ti.all_archs
def test_explicit_local_atomic_sub():
A = ti.var(ti.f32, shape=())
@ti.kernel
def func():
a = 0
for i in range(10):
ti.atomic_sub(a, i)
A[None] = a
func()
assert A[None] == -45
@ti.all_archs
def test_implicit_local_atomic_sub():
A = ti.var(ti.f32, shape=())
@ti.kernel
def func():
a = 0
for i in range(10):
a -= i
A[None] = a
func()
assert A[None] == -45
@ti.all_archs
def test_explicit_local_atomic_min():
A = ti.var(ti.f32, shape=())
@ti.kernel
def func():
a = 1000
for i in range(10):
ti.atomic_min(a, i)
A[None] = a
func()
assert A[None] == 0
@ti.all_archs
def test_explicit_local_atomic_max():
A = ti.var(ti.f32, shape=())
@ti.kernel
def func():
a = -1000
for i in range(10):
ti.atomic_max(a, i)
A[None] = a
func()
assert A[None] == 9
@ti.all_archs
def test_explicit_local_atomic_and():
A = ti.var(ti.i32, shape=())
max_int = 2147483647
@ti.kernel
def func():
a = 1023
for i in range(10):
ti.atomic_and(a, max_int - 2**i)
A[None] = a
func()
assert A[None] == 0
@ti.all_archs
def test_implicit_local_atomic_and():
A = ti.var(ti.i32, shape=())
max_int = 2147483647
@ti.kernel
def func():
a = 1023
for i in range(10):
a &= max_int - 2**i
A[None] = a
func()
assert A[None] == 0
@ti.all_archs
def test_explicit_local_atomic_or():
A = ti.var(ti.i32, shape=())
@ti.kernel
def func():
a = 0
for i in range(10):
ti.atomic_or(a, 2**i)
A[None] = a
func()
assert A[None] == 1023
@ti.all_archs
def test_implicit_local_atomic_or():
A = ti.var(ti.i32, shape=())
@ti.kernel
def func():
a = 0
for i in range(10):
a |= 2**i
A[None] = a
func()
assert A[None] == 1023
@ti.all_archs
def test_explicit_local_atomic_xor():
A = ti.var(ti.i32, shape=())
@ti.kernel
def func():
a = 1023
for i in range(10):
ti.atomic_xor(a, 2**i)
A[None] = a
func()
assert A[None] == 0
@ti.all_archs
def test_implicit_local_atomic_xor():
A = ti.var(ti.i32, shape=())
@ti.kernel
def func():
a = 1023
for i in range(10):
a ^= 2**i
A[None] = a
func()
assert A[None] == 0
| 16.625
| 44
| 0.505721
|
7ae2ed5d7d3c943ab18dac8ce2415b59e77f2b3e
| 320
|
py
|
Python
|
25 1000-digit Fibonacci number.py
|
Polar1ty/euler_problems
|
bc1cd917d95d1b63b80a0b182dbd5e9f90a95d90
|
[
"MIT"
] | 2
|
2020-06-09T10:35:12.000Z
|
2020-06-09T11:32:16.000Z
|
25 1000-digit Fibonacci number.py
|
Polar1ty/euler_problems
|
bc1cd917d95d1b63b80a0b182dbd5e9f90a95d90
|
[
"MIT"
] | null | null | null |
25 1000-digit Fibonacci number.py
|
Polar1ty/euler_problems
|
bc1cd917d95d1b63b80a0b182dbd5e9f90a95d90
|
[
"MIT"
] | null | null | null |
# 1000-digit Fibonacci number
import time
start = time.time()
fibo = [1, 1]
while 1:
new_f = fibo[-2] + fibo[-1]
fibo.append(new_f)
if len(str(new_f)) == 1000:
print('Result =', fibo.index(new_f) + 1)
break
end = time.time()
print('Time of execution = ' + str(end - start))
| 20
| 49
| 0.559375
|
602fcb07447ad09561d80028e3c7c194afdbe4d3
| 13,849
|
py
|
Python
|
utils/eulerangles.py
|
Hao-FANG-92/3D_PSPNet
|
2821a3181cbdb70ee3291c29ebac19af38e6c8dc
|
[
"MIT"
] | 6
|
2020-06-13T08:34:49.000Z
|
2021-11-09T04:09:48.000Z
|
utils/eulerangles.py
|
jtpils/3D_PSPNet
|
2821a3181cbdb70ee3291c29ebac19af38e6c8dc
|
[
"MIT"
] | null | null | null |
utils/eulerangles.py
|
jtpils/3D_PSPNet
|
2821a3181cbdb70ee3291c29ebac19af38e6c8dc
|
[
"MIT"
] | 4
|
2020-03-08T17:00:19.000Z
|
2022-03-18T15:42:14.000Z
|
# emacs: -*- mode: python-mode; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
#
# See COPYING file distributed along with the NiBabel package for the
# copyright and license terms.
#
### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
''' Module implementing Euler angle rotations and their conversions
See:
* http://en.wikipedia.org/wiki/Rotation_matrix
* http://en.wikipedia.org/wiki/Euler_angles
* http://mathworld.wolfram.com/EulerAngles.html
See also: *Representing Attitude with Euler Angles and Quaternions: A
Reference* (2006) by James Diebel. A cached PDF link last found here:
http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.110.5134
Euler's rotation theorem tells us that any rotation in 3D can be
described by 3 angles. Let's call the 3 angles the *Euler angle vector*
and call the angles in the vector :math:`alpha`, :math:`beta` and
:math:`gamma`. The vector is [ :math:`alpha`,
:math:`beta`. :math:`gamma` ] and, in this description, the order of the
parameters specifies the order in which the rotations occur (so the
rotation corresponding to :math:`alpha` is applied first).
In order to specify the meaning of an *Euler angle vector* we need to
specify the axes around which each of the rotations corresponding to
:math:`alpha`, :math:`beta` and :math:`gamma` will occur.
There are therefore three axes for the rotations :math:`alpha`,
:math:`beta` and :math:`gamma`; let's call them :math:`i` :math:`j`,
:math:`k`.
Let us express the rotation :math:`alpha` around axis `i` as a 3 by 3
rotation matrix `A`. Similarly :math:`beta` around `j` becomes 3 x 3
matrix `B` and :math:`gamma` around `k` becomes matrix `G`. Then the
whole rotation expressed by the Euler angle vector [ :math:`alpha`,
:math:`beta`. :math:`gamma` ], `R` is given by::
R = np.dot(G, np.dot(B, A))
See http://mathworld.wolfram.com/EulerAngles.html
The order :math:`G B A` expresses the fact that the rotations are
performed in the order of the vector (:math:`alpha` around axis `i` =
`A` first).
To convert a given Euler angle vector to a meaningful rotation, and a
rotation matrix, we need to define:
* the axes `i`, `j`, `k`
* whether a rotation matrix should be applied on the left of a vector to
be transformed (vectors are column vectors) or on the right (vectors
are row vectors).
* whether the rotations move the axes as they are applied (intrinsic
rotations) - compared the situation where the axes stay fixed and the
vectors move within the axis frame (extrinsic)
* the handedness of the coordinate system
See: http://en.wikipedia.org/wiki/Rotation_matrix#Ambiguities
We are using the following conventions:
* axes `i`, `j`, `k` are the `z`, `y`, and `x` axes respectively. Thus
an Euler angle vector [ :math:`alpha`, :math:`beta`. :math:`gamma` ]
in our convention implies a :math:`alpha` radian rotation around the
`z` axis, followed by a :math:`beta` rotation around the `y` axis,
followed by a :math:`gamma` rotation around the `x` axis.
* the rotation matrix applies on the left, to column vectors on the
right, so if `R` is the rotation matrix, and `v` is a 3 x N matrix
with N column vectors, the transformed vector set `vdash` is given by
``vdash = np.dot(R, v)``.
* extrinsic rotations - the axes are fixed, and do not move with the
rotations.
* a right-handed coordinate system
The convention of rotation around ``z``, followed by rotation around
``y``, followed by rotation around ``x``, is known (confusingly) as
"xyz", pitch-roll-yaw, Cardan angles, or Tait-Bryan angles.
'''
import math
import sys
if sys.version_info >= (3,0):
from functools import reduce
import numpy as np
_FLOAT_EPS_4 = np.finfo(float).eps * 4.0
def euler2mat(z=0, y=0, x=0):
''' Return matrix for rotations around z, y and x axes
Uses the z, then y, then x convention above
Parameters
----------
z : scalar
Rotation angle in radians around z-axis (performed first)
y : scalar
Rotation angle in radians around y-axis
x : scalar
Rotation angle in radians around x-axis (performed last)
Returns
-------
M : array shape (3,3)
Rotation matrix giving same rotation as for given angles
Examples
--------
>>> zrot = 1.3 # radians
>>> yrot = -0.1
>>> xrot = 0.2
>>> M = euler2mat(zrot, yrot, xrot)
>>> M.shape == (3, 3)
True
The output rotation matrix is equal to the composition of the
individual rotations
>>> M1 = euler2mat(zrot)
>>> M2 = euler2mat(0, yrot)
>>> M3 = euler2mat(0, 0, xrot)
>>> composed_M = np.dot(M3, np.dot(M2, M1))
>>> np.allclose(M, composed_M)
True
You can specify rotations by named arguments
>>> np.all(M3 == euler2mat(x=xrot))
True
When applying M to a vector, the vector should column vector to the
right of M. If the right hand side is a 2D array rather than a
vector, then each column of the 2D array represents a vector.
>>> vec = np.array([1, 0, 0]).reshape((3,1))
>>> v2 = np.dot(M, vec)
>>> vecs = np.array([[1, 0, 0],[0, 1, 0]]).T # giving 3x2 array
>>> vecs2 = np.dot(M, vecs)
Rotations are counter-clockwise.
>>> zred = np.dot(euler2mat(z=np.pi/2), np.eye(3))
>>> np.allclose(zred, [[0, -1, 0],[1, 0, 0], [0, 0, 1]])
True
>>> yred = np.dot(euler2mat(y=np.pi/2), np.eye(3))
>>> np.allclose(yred, [[0, 0, 1],[0, 1, 0], [-1, 0, 0]])
True
>>> xred = np.dot(euler2mat(x=np.pi/2), np.eye(3))
>>> np.allclose(xred, [[1, 0, 0],[0, 0, -1], [0, 1, 0]])
True
Notes
-----
The direction of rotation is given by the right-hand rule (orient
the thumb of the right hand along the axis around which the rotation
occurs, with the end of the thumb at the positive end of the axis;
curl your fingers; the direction your fingers curl is the direction
of rotation). Therefore, the rotations are counterclockwise if
looking along the axis of rotation from positive to negative.
'''
Ms = []
if z:
cosz = math.cos(z)
sinz = math.sin(z)
Ms.append(np.array(
[[cosz, -sinz, 0],
[sinz, cosz, 0],
[0, 0, 1]]))
if y:
cosy = math.cos(y)
siny = math.sin(y)
Ms.append(np.array(
[[cosy, 0, siny],
[0, 1, 0],
[-siny, 0, cosy]]))
if x:
cosx = math.cos(x)
sinx = math.sin(x)
Ms.append(np.array(
[[1, 0, 0],
[0, cosx, -sinx],
[0, sinx, cosx]]))
if Ms:
return reduce(np.dot, Ms[::-1])
return np.eye(3)
def mat2euler(M, cy_thresh=None):
''' Discover Euler angle vector from 3x3 matrix
Uses the conventions above.
Parameters
----------
M : array-like, shape (3,3)
cy_thresh : None or scalar, optional
threshold below which to give up on straightforward arctan for
estimating x rotation. If None (default), estimate from
precision of input.
Returns
-------
z : scalar
y : scalar
x : scalar
Rotations in radians around z, y, x axes, respectively
Notes
-----
If there was no numerical error, the routine could be derived using
Sympy expression for z then y then x rotation matrix, which is::
[ cos(y)*cos(z), -cos(y)*sin(z), sin(y)],
[cos(x)*sin(z) + cos(z)*sin(x)*sin(y), cos(x)*cos(z) - sin(x)*sin(y)*sin(z), -cos(y)*sin(x)],
[sin(x)*sin(z) - cos(x)*cos(z)*sin(y), cos(z)*sin(x) + cos(x)*sin(y)*sin(z), cos(x)*cos(y)]
with the obvious derivations for z, y, and x
z = atan2(-r12, r11)
y = asin(r13)
x = atan2(-r23, r33)
Problems arise when cos(y) is close to zero, because both of::
z = atan2(cos(y)*sin(z), cos(y)*cos(z))
x = atan2(cos(y)*sin(x), cos(x)*cos(y))
will be close to atan2(0, 0), and highly unstable.
The ``cy`` fix for numerical instability below is from: *Graphics
Gems IV*, Paul Heckbert (editor), Academic Press, 1994, ISBN:
0123361559. Specifically it comes from EulerAngles.c by Ken
Shoemake, and deals with the case where cos(y) is close to zero:
See: http://www.graphicsgems.org/
The code appears to be licensed (from the website) as "can be used
without restrictions".
'''
M = np.asarray(M)
if cy_thresh is None:
try:
cy_thresh = np.finfo(M.dtype).eps * 4
except ValueError:
cy_thresh = _FLOAT_EPS_4
r11, r12, r13, r21, r22, r23, r31, r32, r33 = M.flat
# cy: sqrt((cos(y)*cos(z))**2 + (cos(x)*cos(y))**2)
cy = math.sqrt(r33*r33 + r23*r23)
if cy > cy_thresh: # cos(y) not close to zero, standard form
z = math.atan2(-r12, r11) # atan2(cos(y)*sin(z), cos(y)*cos(z))
y = math.atan2(r13, cy) # atan2(sin(y), cy)
x = math.atan2(-r23, r33) # atan2(cos(y)*sin(x), cos(x)*cos(y))
else: # cos(y) (close to) zero, so x -> 0.0 (see above)
# so r21 -> sin(z), r22 -> cos(z) and
z = math.atan2(r21, r22)
y = math.atan2(r13, cy) # atan2(sin(y), cy)
x = 0.0
return z, y, x
def euler2quat(z=0, y=0, x=0):
''' Return quaternion corresponding to these Euler angles
Uses the z, then y, then x convention above
Parameters
----------
z : scalar
Rotation angle in radians around z-axis (performed first)
y : scalar
Rotation angle in radians around y-axis
x : scalar
Rotation angle in radians around x-axis (performed last)
Returns
-------
quat : array shape (4,)
Quaternion in w, x, y z (real, then vector) format
Notes
-----
We can derive this formula in Sympy using:
1. Formula giving quaternion corresponding to rotation of theta radians
about arbitrary axis:
http://mathworld.wolfram.com/EulerParameters.html
2. Generated formulae from 1.) for quaternions corresponding to
theta radians rotations about ``x, y, z`` axes
3. Apply quaternion multiplication formula -
http://en.wikipedia.org/wiki/Quaternions#Hamilton_product - to
formulae from 2.) to give formula for combined rotations.
'''
z = z/2.0
y = y/2.0
x = x/2.0
cz = math.cos(z)
sz = math.sin(z)
cy = math.cos(y)
sy = math.sin(y)
cx = math.cos(x)
sx = math.sin(x)
return np.array([
cx*cy*cz - sx*sy*sz,
cx*sy*sz + cy*cz*sx,
cx*cz*sy - sx*cy*sz,
cx*cy*sz + sx*cz*sy])
def quat2euler(q):
''' Return Euler angles corresponding to quaternion `q`
Parameters
----------
q : 4 element sequence
w, x, y, z of quaternion
Returns
-------
z : scalar
Rotation angle in radians around z-axis (performed first)
y : scalar
Rotation angle in radians around y-axis
x : scalar
Rotation angle in radians around x-axis (performed last)
Notes
-----
It's possible to reduce the amount of calculation a little, by
combining parts of the ``quat2mat`` and ``mat2euler`` functions, but
the reduction in computation is small, and the code repetition is
large.
'''
# delayed import to avoid cyclic dependencies
import nibabel.quaternions as nq
return mat2euler(nq.quat2mat(q))
def euler2angle_axis(z=0, y=0, x=0):
''' Return angle, axis corresponding to these Euler angles
Uses the z, then y, then x convention above
Parameters
----------
z : scalar
Rotation angle in radians around z-axis (performed first)
y : scalar
Rotation angle in radians around y-axis
x : scalar
Rotation angle in radians around x-axis (performed last)
Returns
-------
theta : scalar
angle of rotation
vector : array shape (3,)
axis around which rotation occurs
Examples
--------
>>> theta, vec = euler2angle_axis(0, 1.5, 0)
>>> print(theta)
1.5
>>> np.allclose(vec, [0, 1, 0])
True
'''
# delayed import to avoid cyclic dependencies
import nibabel.quaternions as nq
return nq.quat2angle_axis(euler2quat(z, y, x))
def angle_axis2euler(theta, vector, is_normalized=False):
''' Convert angle, axis pair to Euler angles
Parameters
----------
theta : scalar
angle of rotation
vector : 3 element sequence
vector specifying axis for rotation.
is_normalized : bool, optional
True if vector is already normalized (has norm of 1). Default
False
Returns
-------
z : scalar
y : scalar
x : scalar
Rotations in radians around z, y, x axes, respectively
Examples
--------
>>> z, y, x = angle_axis2euler(0, [1, 0, 0])
>>> np.allclose((z, y, x), 0)
True
Notes
-----
It's possible to reduce the amount of calculation a little, by
combining parts of the ``angle_axis2mat`` and ``mat2euler``
functions, but the reduction in computation is small, and the code
repetition is large.
'''
# delayed import to avoid cyclic dependencies
import nibabel.quaternions as nq
M = nq.angle_axis2mat(theta, vector, is_normalized)
return mat2euler(M)
| 33.052506
| 100
| 0.594195
|
bd3b8066be6f55e107635852b6699094b1c8a64e
| 4,424
|
py
|
Python
|
qiskit/chemistry/algorithms/eigen_solvers/q_eom_ee.py
|
hushaohan/aqua
|
8512bc6ce246a8b3cca1e5edb1703b6885aa7c5d
|
[
"Apache-2.0"
] | 1
|
2020-08-01T21:07:54.000Z
|
2020-08-01T21:07:54.000Z
|
qiskit/chemistry/algorithms/eigen_solvers/q_eom_ee.py
|
hushaohan/aqua
|
8512bc6ce246a8b3cca1e5edb1703b6885aa7c5d
|
[
"Apache-2.0"
] | null | null | null |
qiskit/chemistry/algorithms/eigen_solvers/q_eom_ee.py
|
hushaohan/aqua
|
8512bc6ce246a8b3cca1e5edb1703b6885aa7c5d
|
[
"Apache-2.0"
] | 1
|
2022-01-25T07:09:10.000Z
|
2022-01-25T07:09:10.000Z
|
# -*- coding: utf-8 -*-
# This code is part of Qiskit.
#
# (C) Copyright IBM 2019, 2020.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
""" QEomEE algorithm """
from typing import Union, List, Optional
import logging
import numpy as np
from qiskit.aqua.operators import LegacyBaseOperator, Z2Symmetries
from qiskit.aqua.algorithms import NumPyMinimumEigensolver
from qiskit.aqua.utils.validation import validate_min, validate_in_set
from .q_equation_of_motion import QEquationOfMotion
logger = logging.getLogger(__name__)
class QEomEE(NumPyMinimumEigensolver):
""" QEomEE algorithm (classical) """
def __init__(self, operator: LegacyBaseOperator, num_orbitals: int,
num_particles: Union[List[int], int],
qubit_mapping: str = 'parity',
two_qubit_reduction: bool = True,
active_occupied: Optional[List[int]] = None,
active_unoccupied: Optional[List[int]] = None,
is_eom_matrix_symmetric: bool = True,
se_list: Optional[List[List[int]]] = None,
de_list: Optional[List[List[int]]] = None,
z2_symmetries: Optional[Z2Symmetries] = None,
untapered_op: Optional[LegacyBaseOperator] = None,
aux_operators: Optional[List[LegacyBaseOperator]] = None) -> None:
"""
Args:
operator: qubit operator
num_orbitals: total number of spin orbitals, has a min. value of 1.
num_particles: number of particles, if it is a list,
the first number is alpha and the second
number if beta.
qubit_mapping: qubit mapping type
two_qubit_reduction: two qubit reduction is applied or not
active_occupied: list of occupied orbitals to include, indices are
0 to n where n is num particles // 2
active_unoccupied: list of unoccupied orbitals to include, indices are
0 to m where m is (num_orbitals - num particles) // 2
is_eom_matrix_symmetric: is EoM matrix symmetric
se_list: single excitation list, overwrite the setting in active space
de_list: double excitation list, overwrite the setting in active space
z2_symmetries: represent the Z2 symmetries
untapered_op: if the operator is tapered, we need untapered operator
to build element of EoM matrix
aux_operators: Auxiliary operators to be evaluated at
each eigenvalue
Raises:
ValueError: invalid parameter
"""
validate_min('num_orbitals', num_orbitals, 1)
validate_in_set('qubit_mapping', qubit_mapping,
{'jordan_wigner', 'parity', 'bravyi_kitaev'})
if isinstance(num_particles, list) and len(num_particles) != 2:
raise ValueError('Num particles value {}. Number of values allowed is 2'.format(
num_particles))
super().__init__(operator, aux_operators)
self.qeom = QEquationOfMotion(operator, num_orbitals, num_particles, qubit_mapping,
two_qubit_reduction, active_occupied, active_unoccupied,
is_eom_matrix_symmetric, se_list, de_list,
z2_symmetries, untapered_op)
def _run(self):
super()._run()
wave_fn = self._ret['eigvecs'][0]
excitation_energies_gap, eom_matrices = self.qeom.calculate_excited_states(wave_fn)
excitation_energies = excitation_energies_gap + self._ret['energy']
all_energies = np.concatenate(([self._ret['energy']], excitation_energies))
self._ret['energy_gap'] = excitation_energies_gap
self._ret['energies'] = all_energies
self._ret['eom_matrices'] = eom_matrices
return self._ret
| 48.615385
| 94
| 0.62726
|
86748563683dfff1f127d811a9418843553f2764
| 1,662
|
py
|
Python
|
catalyst/rl/core/exploration.py
|
162/catalyst
|
b4ba36be52c51160e0fabecdcb084a8d5cd96cb7
|
[
"MIT"
] | null | null | null |
catalyst/rl/core/exploration.py
|
162/catalyst
|
b4ba36be52c51160e0fabecdcb084a8d5cd96cb7
|
[
"MIT"
] | null | null | null |
catalyst/rl/core/exploration.py
|
162/catalyst
|
b4ba36be52c51160e0fabecdcb084a8d5cd96cb7
|
[
"MIT"
] | null | null | null |
from typing import List
from copy import deepcopy
import numpy as np
from catalyst.rl.registry import EXPLORATION
from catalyst.rl.core import EnvironmentSpec
class ExplorationStrategy:
"""
Base class for working with various exploration strategies.
In discrete case must contain method get_action(q_values).
In continuous case must contain method get_action(action).
"""
def __init__(self, power=1.0):
self._power = power
def set_power(self, value):
assert 0. <= value <= 1.0
self._power = value
class ExplorationHandler:
def __init__(self, *exploration_params, env: EnvironmentSpec):
params = deepcopy(exploration_params)
self.exploration_strategies: List[ExplorationStrategy] = []
self.probs = []
for params_ in params:
exploration_name = params_.pop("exploration")
probability = params_.pop("probability")
strategy_fn = EXPLORATION.get(exploration_name)
strategy = strategy_fn(**params_)
self.exploration_strategies.append(strategy)
self.probs.append(probability)
self.num_strategies = len(self.probs)
assert np.isclose(np.sum(self.probs), 1.0)
def set_power(self, value):
assert 0. <= value <= 1.0
for exploration in self.exploration_strategies:
exploration.set_power(value=value)
def get_exploration_strategy(self):
strategy_idx = np.random.choice(self.num_strategies, p=self.probs)
strategy = self.exploration_strategies[strategy_idx]
return strategy
__all__ = ["ExplorationStrategy", "ExplorationHandler"]
| 31.961538
| 74
| 0.683514
|
1596f9e2d19c328135331402283f4ec8caf8c784
| 982
|
py
|
Python
|
outlook/setup.py
|
penelopec/indico-plugins-cern
|
bb67d2fb9e3d24faeeff2b78a5e9bcff52ac5f26
|
[
"MIT"
] | null | null | null |
outlook/setup.py
|
penelopec/indico-plugins-cern
|
bb67d2fb9e3d24faeeff2b78a5e9bcff52ac5f26
|
[
"MIT"
] | null | null | null |
outlook/setup.py
|
penelopec/indico-plugins-cern
|
bb67d2fb9e3d24faeeff2b78a5e9bcff52ac5f26
|
[
"MIT"
] | null | null | null |
# This file is part of the CERN Indico plugins.
# Copyright (C) 2014 - 2018 CERN
#
# The CERN Indico plugins are free software; you can redistribute
# them and/or modify them under the terms of the MIT License; see
# the LICENSE file for more details.
from __future__ import unicode_literals
from setuptools import find_packages, setup
setup(
name='indico-plugin-outlook',
version='1.0.1',
url='https://github.com/indico/indico-plugins-cern',
license='MIT',
author='Indico Team',
author_email='indico-team@cern.ch',
packages=find_packages(),
zip_safe=False,
include_package_data=True,
install_requires=[
'indico>=2.0'
],
entry_points={
'indico.plugins': {'outlook = indico_outlook.plugin:OutlookPlugin'},
},
classifiers=[
'Environment :: Plugins',
'Environment :: Web Environment',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
)
| 27.277778
| 76
| 0.662933
|
14552793cee58d25d39650fd853b7bff55726a8c
| 146
|
py
|
Python
|
payment_handler/handler/apps.py
|
KartashevaAnna/Payment-platform
|
c229894de5dd2b0f745b1d363fac2fcc9372413c
|
[
"MIT"
] | null | null | null |
payment_handler/handler/apps.py
|
KartashevaAnna/Payment-platform
|
c229894de5dd2b0f745b1d363fac2fcc9372413c
|
[
"MIT"
] | 1
|
2021-11-04T13:47:28.000Z
|
2021-11-04T13:47:28.000Z
|
payment_handler/handler/apps.py
|
KartashevaAnna/Payment-platform
|
c229894de5dd2b0f745b1d363fac2fcc9372413c
|
[
"MIT"
] | 1
|
2021-10-20T14:44:19.000Z
|
2021-10-20T14:44:19.000Z
|
from django.apps import AppConfig
class HandlerConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'handler'
| 20.857143
| 56
| 0.760274
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.